mirror of
https://github.com/asmjit/asmjit.git
synced 2025-12-17 04:24:37 +03:00
Added a support for indirect jumps within a function (Compiler) (#286)
This commit is contained in:
32
.travis.yml
32
.travis.yml
@@ -57,6 +57,33 @@ matrix:
|
|||||||
- sourceline: "ppa:ubuntu-toolchain-r/test"
|
- sourceline: "ppa:ubuntu-toolchain-r/test"
|
||||||
packages: [clang++-9]
|
packages: [clang++-9]
|
||||||
|
|
||||||
|
- name: "Linux Clang Default [64-bit] [REL] [NoBuilder]"
|
||||||
|
env: BUILD_MATRIX="BUILD_TYPE=Release && CC=clang-9 && CXX=clang++-9" EXTRA_OPTIONS="-DASMJIT_NO_BUILDER=1"
|
||||||
|
os: linux
|
||||||
|
addons:
|
||||||
|
apt:
|
||||||
|
sources:
|
||||||
|
- sourceline: "ppa:ubuntu-toolchain-r/test"
|
||||||
|
packages: [clang++-9]
|
||||||
|
|
||||||
|
- name: "Linux Clang Default [64-bit] [REL] [NoCompiler]"
|
||||||
|
env: BUILD_MATRIX="BUILD_TYPE=Release && CC=clang-9 && CXX=clang++-9" EXTRA_OPTIONS="-DASMJIT_NO_COMPILER=1"
|
||||||
|
os: linux
|
||||||
|
addons:
|
||||||
|
apt:
|
||||||
|
sources:
|
||||||
|
- sourceline: "ppa:ubuntu-toolchain-r/test"
|
||||||
|
packages: [clang++-9]
|
||||||
|
|
||||||
|
- name: "Linux Clang Default [64-bit] [REL] [NoLogging]"
|
||||||
|
env: BUILD_MATRIX="BUILD_TYPE=Release && CC=clang-9 && CXX=clang++-9" EXTRA_OPTIONS="-DASMJIT_NO_LOGGING=1"
|
||||||
|
os: linux
|
||||||
|
addons:
|
||||||
|
apt:
|
||||||
|
sources:
|
||||||
|
- sourceline: "ppa:ubuntu-toolchain-r/test"
|
||||||
|
packages: [clang++-9]
|
||||||
|
|
||||||
- name: "Linux GCC 4.8 [32-bit] [DBG]"
|
- name: "Linux GCC 4.8 [32-bit] [DBG]"
|
||||||
env: BUILD_MATRIX="BUILD_TYPE=Debug && CC=gcc-4.8 && CXX=g++-4.8" CXXFLAGS=-m32 LDFLAGS=-m32
|
env: BUILD_MATRIX="BUILD_TYPE=Debug && CC=gcc-4.8 && CXX=g++-4.8" CXXFLAGS=-m32 LDFLAGS=-m32
|
||||||
os: linux
|
os: linux
|
||||||
@@ -269,5 +296,8 @@ script:
|
|||||||
- eval "$RUN_CMD ./asmjit_test_unit --quick"
|
- eval "$RUN_CMD ./asmjit_test_unit --quick"
|
||||||
- eval "$RUN_CMD ./asmjit_test_opcode > /dev/null"
|
- eval "$RUN_CMD ./asmjit_test_opcode > /dev/null"
|
||||||
- eval "$RUN_CMD ./asmjit_test_x86_asm"
|
- eval "$RUN_CMD ./asmjit_test_x86_asm"
|
||||||
- eval "$RUN_CMD ./asmjit_test_x86_cc"
|
|
||||||
- eval "$RUN_CMD ./asmjit_test_x86_sections"
|
- eval "$RUN_CMD ./asmjit_test_x86_sections"
|
||||||
|
- |
|
||||||
|
if [ -f ./asmjit_test_x86_cc ]; then
|
||||||
|
eval "$RUN_CMD ./asmjit_test_x86_cc"
|
||||||
|
fi
|
||||||
|
|||||||
@@ -464,7 +464,6 @@ if (NOT ASMJIT_EMBED)
|
|||||||
|
|
||||||
foreach(_target asmjit_test_opcode
|
foreach(_target asmjit_test_opcode
|
||||||
asmjit_test_x86_asm
|
asmjit_test_x86_asm
|
||||||
asmjit_test_x86_cc
|
|
||||||
asmjit_test_x86_sections)
|
asmjit_test_x86_sections)
|
||||||
asmjit_add_target(${_target} TEST
|
asmjit_add_target(${_target} TEST
|
||||||
SOURCES test/${_target}.cpp
|
SOURCES test/${_target}.cpp
|
||||||
@@ -474,6 +473,15 @@ if (NOT ASMJIT_EMBED)
|
|||||||
CFLAGS_REL ${ASMJIT_PRIVATE_CFLAGS_REL})
|
CFLAGS_REL ${ASMJIT_PRIVATE_CFLAGS_REL})
|
||||||
endforeach()
|
endforeach()
|
||||||
|
|
||||||
|
if (NOT (ASMJIT_NO_BUILDER OR ASMJIT_NO_COMPILER))
|
||||||
|
asmjit_add_target(asmjit_test_x86_cc TEST
|
||||||
|
SOURCES test/asmjit_test_x86_cc.cpp
|
||||||
|
LIBRARIES AsmJit::AsmJit
|
||||||
|
CFLAGS ${ASMJIT_PRIVATE_CFLAGS}
|
||||||
|
CFLAGS_DBG ${ASMJIT_PRIVATE_CFLAGS_DBG}
|
||||||
|
CFLAGS_REL ${ASMJIT_PRIVATE_CFLAGS_REL})
|
||||||
|
endif()
|
||||||
|
|
||||||
foreach(_target asmjit_bench_x86)
|
foreach(_target asmjit_bench_x86)
|
||||||
asmjit_add_target(${_target} EXECUTABLE
|
asmjit_add_target(${_target} EXECUTABLE
|
||||||
SOURCES test/${_target}.cpp
|
SOURCES test/${_target}.cpp
|
||||||
|
|||||||
81
README.md
81
README.md
@@ -1460,6 +1460,87 @@ static void exampleUseOfConstPool(x86::Compiler& cc) {
|
|||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
|
### Jump Tables
|
||||||
|
|
||||||
|
**Compiler** supports `jmp` instruction with reg/mem operand, which is a commonly used pattern to implement indirect jumps within a function, for example to implement `switch()` statement in a programming languages. By default AsmJit assumes that every basic block can be a possible jump target as it's unable to deduce targets from instruction's operands. This is a very pessimistic default that should be avoided if possible as it's costly and very unfriendly to liveness analysis and register allocation. So instead of relying on such pessimistic default, use **JumpAnnotation** to annotate indirect jumps:
|
||||||
|
|
||||||
|
```c++
|
||||||
|
#include <asmjit/asmjit.h>
|
||||||
|
|
||||||
|
using namespace asmjit;
|
||||||
|
|
||||||
|
static void exampleUseOfIndirectJump(x86::Compiler& cc) {
|
||||||
|
cc.addFunc(FuncSignatureT<float, float, float, uint32_t>(CallConv::kIdHost));
|
||||||
|
|
||||||
|
// Function arguments
|
||||||
|
x86::Xmm a = cc.newXmmSs("a");
|
||||||
|
x86::Xmm b = cc.newXmmSs("b");
|
||||||
|
x86::Gp op = cc.newUInt32("op");
|
||||||
|
|
||||||
|
x86::Gp target = cc.newIntPtr("target");
|
||||||
|
x86::Gp offset = cc.newIntPtr("offset");
|
||||||
|
|
||||||
|
Label L_Table = cc.newLabel();
|
||||||
|
Label L_Add = cc.newLabel();
|
||||||
|
Label L_Sub = cc.newLabel();
|
||||||
|
Label L_Mul = cc.newLabel();
|
||||||
|
Label L_Div = cc.newLabel();
|
||||||
|
Label L_End = cc.newLabel();
|
||||||
|
|
||||||
|
cc.setArg(0, a);
|
||||||
|
cc.setArg(1, b);
|
||||||
|
cc.setArg(2, op);
|
||||||
|
|
||||||
|
// Jump annotation is a building block that allows to annotate all
|
||||||
|
// possible targets where `jmp()` can jump. It then drives the CFG
|
||||||
|
// contruction and liveness analysis, which impacts register allocation.
|
||||||
|
JumpAnnotation* annotation = cc.newJumpAnnotation();
|
||||||
|
annotation->addLabel(L_Add);
|
||||||
|
annotation->addLabel(L_Sub);
|
||||||
|
annotation->addLabel(L_Mul);
|
||||||
|
annotation->addLabel(L_Div);
|
||||||
|
|
||||||
|
// Most likely not the common indirect jump approach, but it
|
||||||
|
// doesn't really matter how final address is calculated. The
|
||||||
|
// most important path using JumpAnnotation with `jmp()`.
|
||||||
|
cc.lea(offset, x86::ptr(L_Table));
|
||||||
|
if (cc.is64Bit())
|
||||||
|
cc.movsxd(target, x86::dword_ptr(offset, op.cloneAs(offset), 2));
|
||||||
|
else
|
||||||
|
cc.mov(target, x86::dword_ptr(offset, op.cloneAs(offset), 2));
|
||||||
|
cc.add(target, offset);
|
||||||
|
cc.jmp(target, annotation);
|
||||||
|
|
||||||
|
// Acts like a switch() statement in C.
|
||||||
|
cc.bind(L_Add);
|
||||||
|
cc.addss(a, b);
|
||||||
|
cc.jmp(L_End);
|
||||||
|
|
||||||
|
cc.bind(L_Sub);
|
||||||
|
cc.subss(a, b);
|
||||||
|
cc.jmp(L_End);
|
||||||
|
|
||||||
|
cc.bind(L_Mul);
|
||||||
|
cc.mulss(a, b);
|
||||||
|
cc.jmp(L_End);
|
||||||
|
|
||||||
|
cc.bind(L_Div);
|
||||||
|
cc.divss(a, b);
|
||||||
|
|
||||||
|
cc.bind(L_End);
|
||||||
|
cc.ret(a);
|
||||||
|
|
||||||
|
cc.endFunc();
|
||||||
|
|
||||||
|
// Relative int32_t offsets of `L_XXX - L_Table`.
|
||||||
|
cc.bind(L_Table);
|
||||||
|
cc.embedLabelDelta(L_Add, L_Table, 4);
|
||||||
|
cc.embedLabelDelta(L_Sub, L_Table, 4);
|
||||||
|
cc.embedLabelDelta(L_Mul, L_Table, 4);
|
||||||
|
cc.embedLabelDelta(L_Div, L_Table, 4);
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
Advanced Features
|
Advanced Features
|
||||||
-----------------
|
-----------------
|
||||||
|
|||||||
@@ -26,6 +26,16 @@
|
|||||||
|
|
||||||
#define ASMJIT_EXPORTS
|
#define ASMJIT_EXPORTS
|
||||||
|
|
||||||
|
// Only turn-off these warnings when building asmjit itself.
|
||||||
|
#ifdef _MSC_VER
|
||||||
|
#ifndef _CRT_SECURE_NO_DEPRECATE
|
||||||
|
#define _CRT_SECURE_NO_DEPRECATE
|
||||||
|
#endif
|
||||||
|
#ifndef _CRT_SECURE_NO_WARNINGS
|
||||||
|
#define _CRT_SECURE_NO_WARNINGS
|
||||||
|
#endif
|
||||||
|
#endif
|
||||||
|
|
||||||
// Dependencies only required for asmjit build, but never exposed through public headers.
|
// Dependencies only required for asmjit build, but never exposed through public headers.
|
||||||
#ifdef _WIN32
|
#ifdef _WIN32
|
||||||
#ifndef WIN32_LEAN_AND_MEAN
|
#ifndef WIN32_LEAN_AND_MEAN
|
||||||
@@ -37,8 +47,23 @@
|
|||||||
#include <windows.h>
|
#include <windows.h>
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// [asmjit::Build - Globals - Build-Only]
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
#include "./api-config.h"
|
#include "./api-config.h"
|
||||||
|
|
||||||
|
#if !defined(ASMJIT_BUILD_DEBUG) && ASMJIT_CXX_GNU >= ASMJIT_CXX_MAKE_VER(4, 4, 0)
|
||||||
|
#define ASMJIT_FAVOR_SIZE __attribute__((__optimize__("Os")))
|
||||||
|
#define ASMJIT_FAVOR_SPEED __attribute__((__optimize__("O3")))
|
||||||
|
#elif ASMJIT_CXX_HAS_ATTRIBUTE(__minsize__, 0)
|
||||||
|
#define ASMJIT_FAVOR_SIZE __attribute__((__minsize__))
|
||||||
|
#define ASMJIT_FAVOR_SPEED
|
||||||
|
#else
|
||||||
|
#define ASMJIT_FAVOR_SIZE
|
||||||
|
#define ASMJIT_FAVOR_SPEED
|
||||||
|
#endif
|
||||||
|
|
||||||
// Make sure '#ifdef'ed unit tests are properly highlighted in IDE.
|
// Make sure '#ifdef'ed unit tests are properly highlighted in IDE.
|
||||||
#if !defined(ASMJIT_TEST) && defined(__INTELLISENSE__)
|
#if !defined(ASMJIT_TEST) && defined(__INTELLISENSE__)
|
||||||
#define ASMJIT_TEST
|
#define ASMJIT_TEST
|
||||||
|
|||||||
@@ -82,9 +82,14 @@
|
|||||||
// #define ASMJIT_NO_TEXT // Disable everything that contains text
|
// #define ASMJIT_NO_TEXT // Disable everything that contains text
|
||||||
// // representation (instructions, errors, ...).
|
// // representation (instructions, errors, ...).
|
||||||
// #define ASMJIT_NO_VALIDATION // Disable validation API and options.
|
// #define ASMJIT_NO_VALIDATION // Disable validation API and options.
|
||||||
// #define ASMJIT_NO_INTROSPECTION // Disable API related to instruction database
|
// #define ASMJIT_NO_INTROSPECTION // Disable API related to instruction database.
|
||||||
// // (validation, cpu features, rw-info, etc).
|
// // (validation, cpu features, rw-info, etc).
|
||||||
|
|
||||||
|
// ASMJIT_NO_BUILDER implies ASMJIT_NO_COMPILER.
|
||||||
|
#if defined(ASMJIT_NO_BUILDER) && !defined(ASMJIT_NO_COMPILER)
|
||||||
|
#define ASMJIT_NO_COMPILER
|
||||||
|
#endif
|
||||||
|
|
||||||
// Prevent compile-time errors caused by misconfiguration.
|
// Prevent compile-time errors caused by misconfiguration.
|
||||||
#if defined(ASMJIT_NO_TEXT) && !defined(ASMJIT_NO_LOGGING)
|
#if defined(ASMJIT_NO_TEXT) && !defined(ASMJIT_NO_LOGGING)
|
||||||
#pragma "ASMJIT_NO_TEXT can only be defined when ASMJIT_NO_LOGGING is defined."
|
#pragma "ASMJIT_NO_TEXT can only be defined when ASMJIT_NO_LOGGING is defined."
|
||||||
@@ -421,8 +426,6 @@
|
|||||||
#define ASMJIT_FALLTHROUGH ((void)0) /* fallthrough */
|
#define ASMJIT_FALLTHROUGH ((void)0) /* fallthrough */
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
#define ASMJIT_UNUSED(X) (void)(X)
|
|
||||||
|
|
||||||
// Utilities.
|
// Utilities.
|
||||||
#define ASMJIT_OFFSET_OF(STRUCT, MEMBER) ((int)(intptr_t)((const char*)&((const STRUCT*)0x100)->MEMBER) - 0x100)
|
#define ASMJIT_OFFSET_OF(STRUCT, MEMBER) ((int)(intptr_t)((const char*)&((const STRUCT*)0x100)->MEMBER) - 0x100)
|
||||||
#define ASMJIT_ARRAY_SIZE(X) uint32_t(sizeof(X) / sizeof(X[0]))
|
#define ASMJIT_ARRAY_SIZE(X) uint32_t(sizeof(X) / sizeof(X[0]))
|
||||||
@@ -514,44 +517,17 @@
|
|||||||
__VA_ARGS__& operator=(const __VA_ARGS__& other) = delete; \
|
__VA_ARGS__& operator=(const __VA_ARGS__& other) = delete; \
|
||||||
public:
|
public:
|
||||||
|
|
||||||
// ============================================================================
|
|
||||||
// [asmjit::Build - Globals - Build-Only]
|
|
||||||
// ============================================================================
|
|
||||||
|
|
||||||
// Internal macros that are only used when building AsmJit itself.
|
|
||||||
#ifdef ASMJIT_EXPORTS
|
|
||||||
#if !defined(ASMJIT_BUILD_DEBUG) && ASMJIT_CXX_GNU >= ASMJIT_CXX_MAKE_VER(4, 4, 0)
|
|
||||||
#define ASMJIT_FAVOR_SIZE __attribute__((__optimize__("Os")))
|
|
||||||
#define ASMJIT_FAVOR_SPEED __attribute__((__optimize__("O3")))
|
|
||||||
#elif ASMJIT_CXX_HAS_ATTRIBUTE(__minsize__, 0)
|
|
||||||
#define ASMJIT_FAVOR_SIZE __attribute__((__minsize__))
|
|
||||||
#define ASMJIT_FAVOR_SPEED
|
|
||||||
#else
|
|
||||||
#define ASMJIT_FAVOR_SIZE
|
|
||||||
#define ASMJIT_FAVOR_SPEED
|
|
||||||
#endif
|
|
||||||
|
|
||||||
// Only turn-off these warnings when building asmjit itself.
|
|
||||||
#ifdef _MSC_VER
|
|
||||||
#ifndef _CRT_SECURE_NO_DEPRECATE
|
|
||||||
#define _CRT_SECURE_NO_DEPRECATE
|
|
||||||
#endif
|
|
||||||
#ifndef _CRT_SECURE_NO_WARNINGS
|
|
||||||
#define _CRT_SECURE_NO_WARNINGS
|
|
||||||
#endif
|
|
||||||
#endif
|
|
||||||
#endif
|
|
||||||
|
|
||||||
// ============================================================================
|
// ============================================================================
|
||||||
// [asmjit::Build - Globals - Cleanup]
|
// [asmjit::Build - Globals - Cleanup]
|
||||||
// ============================================================================
|
// ============================================================================
|
||||||
|
|
||||||
// Undefine everything that is not used by AsmJit outside of `build.h` and that
|
// Try to cleanup things not used in other public headers.
|
||||||
// is considered private.
|
#ifndef ASMJIT_EXPORTS
|
||||||
#undef ASMJIT_CXX_CLANG
|
#undef ASMJIT_CXX_CLANG
|
||||||
#undef ASMJIT_CXX_GNU
|
#undef ASMJIT_CXX_GNU
|
||||||
#undef ASMJIT_CXX_INTEL
|
#undef ASMJIT_CXX_INTEL
|
||||||
#undef ASMJIT_CXX_MSC
|
#undef ASMJIT_CXX_MSC
|
||||||
#undef ASMJIT_CXX_MAKE_VER
|
#undef ASMJIT_CXX_MAKE_VER
|
||||||
|
#endif
|
||||||
|
|
||||||
#endif // ASMJIT_CORE_API_CONFIG_H_INCLUDED
|
#endif // ASMJIT_CORE_API_CONFIG_H_INCLUDED
|
||||||
|
|||||||
@@ -76,7 +76,7 @@ ASMJIT_FAVOR_SIZE Error ArchUtils::typeIdToRegInfo(uint32_t archId, uint32_t& ty
|
|||||||
regInfo._signature = 0;
|
regInfo._signature = 0;
|
||||||
|
|
||||||
// TODO: Move to X86 backend.
|
// TODO: Move to X86 backend.
|
||||||
#ifdef ASMJIT_BUILD_X86
|
#ifdef ASMJIT_BUILD_X86
|
||||||
if (ArchInfo::isX86Family(archId)) {
|
if (ArchInfo::isX86Family(archId)) {
|
||||||
// Passed RegType instead of TypeId?
|
// Passed RegType instead of TypeId?
|
||||||
if (typeId <= BaseReg::kTypeMax)
|
if (typeId <= BaseReg::kTypeMax)
|
||||||
@@ -168,7 +168,7 @@ ASMJIT_FAVOR_SIZE Error ArchUtils::typeIdToRegInfo(uint32_t archId, uint32_t& ty
|
|||||||
regInfo._signature = x86::opData.archRegs.regInfo[regType].signature();
|
regInfo._signature = x86::opData.archRegs.regInfo[regType].signature();
|
||||||
return kErrorOk;
|
return kErrorOk;
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
return DebugUtils::errored(kErrorInvalidArch);
|
return DebugUtils::errored(kErrorInvalidArch);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -85,19 +85,19 @@ public:
|
|||||||
// ARM sub-types.
|
// ARM sub-types.
|
||||||
kSubIdA32_Thumb = 8, //!< THUMB|THUMBv2 sub-type (only ARM in 32-bit mode).
|
kSubIdA32_Thumb = 8, //!< THUMB|THUMBv2 sub-type (only ARM in 32-bit mode).
|
||||||
|
|
||||||
#if (ASMJIT_ARCH_X86) && defined(__AVX512VL__)
|
#if (ASMJIT_ARCH_X86) && defined(__AVX512VL__)
|
||||||
kSubIdHost = kSubIdX86_AVX512VL
|
kSubIdHost = kSubIdX86_AVX512VL
|
||||||
#elif (ASMJIT_ARCH_X86) && defined(__AVX512F__)
|
#elif (ASMJIT_ARCH_X86) && defined(__AVX512F__)
|
||||||
kSubIdHost = kSubIdX86_AVX512
|
kSubIdHost = kSubIdX86_AVX512
|
||||||
#elif (ASMJIT_ARCH_X86) && defined(__AVX2__)
|
#elif (ASMJIT_ARCH_X86) && defined(__AVX2__)
|
||||||
kSubIdHost = kSubIdX86_AVX2
|
kSubIdHost = kSubIdX86_AVX2
|
||||||
#elif (ASMJIT_ARCH_X86) && defined(__AVX__)
|
#elif (ASMJIT_ARCH_X86) && defined(__AVX__)
|
||||||
kSubIdHost = kSubIdX86_AVX
|
kSubIdHost = kSubIdX86_AVX
|
||||||
#elif (ASMJIT_ARCH_ARM == 32) && (defined(_M_ARMT) || defined(__thumb__) || defined(__thumb2__))
|
#elif (ASMJIT_ARCH_ARM == 32) && (defined(_M_ARMT) || defined(__thumb__) || defined(__thumb2__))
|
||||||
kSubIdHost = kSubIdA32_Thumb
|
kSubIdHost = kSubIdA32_Thumb
|
||||||
#else
|
#else
|
||||||
kSubIdHost = 0
|
kSubIdHost = 0
|
||||||
#endif
|
#endif
|
||||||
};
|
};
|
||||||
|
|
||||||
//! \name Construction & Destruction
|
//! \name Construction & Destruction
|
||||||
|
|||||||
@@ -99,10 +99,10 @@ Error BaseAssembler::section(Section* section) {
|
|||||||
if (!_code->isSectionValid(section->id()) || _code->_sections[section->id()] != section)
|
if (!_code->isSectionValid(section->id()) || _code->_sections[section->id()] != section)
|
||||||
return reportError(DebugUtils::errored(kErrorInvalidSection));
|
return reportError(DebugUtils::errored(kErrorInvalidSection));
|
||||||
|
|
||||||
#ifndef ASMJIT_NO_LOGGING
|
#ifndef ASMJIT_NO_LOGGING
|
||||||
if (hasEmitterOption(kOptionLoggingEnabled))
|
if (hasEmitterOption(kOptionLoggingEnabled))
|
||||||
_code->_logger->logf(".section %s {#%u}\n", section->name(), section->id());
|
_code->_logger->logf(".section %s {#%u}\n", section->name(), section->id());
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
BaseAssembler_initSection(this, section);
|
BaseAssembler_initSection(this, section);
|
||||||
return kErrorOk;
|
return kErrorOk;
|
||||||
@@ -142,10 +142,10 @@ Error BaseAssembler::bind(const Label& label) {
|
|||||||
|
|
||||||
Error err = _code->bindLabel(label, _section->id(), offset());
|
Error err = _code->bindLabel(label, _section->id(), offset());
|
||||||
|
|
||||||
#ifndef ASMJIT_NO_LOGGING
|
#ifndef ASMJIT_NO_LOGGING
|
||||||
if (hasEmitterOption(kOptionLoggingEnabled))
|
if (hasEmitterOption(kOptionLoggingEnabled))
|
||||||
BaseAssembler_logLabel(this, label);
|
BaseAssembler_logLabel(this, label);
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
resetInlineComment();
|
resetInlineComment();
|
||||||
if (err)
|
if (err)
|
||||||
@@ -277,6 +277,7 @@ Error BaseAssembler::_emitFailed(
|
|||||||
// [asmjit::BaseAssembler - Embed]
|
// [asmjit::BaseAssembler - Embed]
|
||||||
// ============================================================================
|
// ============================================================================
|
||||||
|
|
||||||
|
#ifndef ASMJIT_NO_LOGGING
|
||||||
struct DataSizeByPower {
|
struct DataSizeByPower {
|
||||||
char str[4];
|
char str[4];
|
||||||
};
|
};
|
||||||
@@ -287,6 +288,7 @@ static const DataSizeByPower dataSizeByPowerTable[] = {
|
|||||||
{ "dd" },
|
{ "dd" },
|
||||||
{ "dq" }
|
{ "dq" }
|
||||||
};
|
};
|
||||||
|
#endif
|
||||||
|
|
||||||
Error BaseAssembler::embed(const void* data, uint32_t dataSize) {
|
Error BaseAssembler::embed(const void* data, uint32_t dataSize) {
|
||||||
if (ASMJIT_UNLIKELY(!_code))
|
if (ASMJIT_UNLIKELY(!_code))
|
||||||
@@ -300,10 +302,10 @@ Error BaseAssembler::embed(const void* data, uint32_t dataSize) {
|
|||||||
|
|
||||||
writer.emitData(data, dataSize);
|
writer.emitData(data, dataSize);
|
||||||
|
|
||||||
#ifndef ASMJIT_NO_LOGGING
|
#ifndef ASMJIT_NO_LOGGING
|
||||||
if (ASMJIT_UNLIKELY(hasEmitterOption(kOptionLoggingEnabled)))
|
if (ASMJIT_UNLIKELY(hasEmitterOption(kOptionLoggingEnabled)))
|
||||||
_code->_logger->logBinary(data, dataSize);
|
_code->_logger->logBinary(data, dataSize);
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
writer.done(this);
|
writer.done(this);
|
||||||
return kErrorOk;
|
return kErrorOk;
|
||||||
@@ -326,7 +328,7 @@ Error BaseAssembler::embedLabel(const Label& label) {
|
|||||||
CodeBufferWriter writer(this);
|
CodeBufferWriter writer(this);
|
||||||
ASMJIT_PROPAGATE(writer.ensureSpace(this, dataSize));
|
ASMJIT_PROPAGATE(writer.ensureSpace(this, dataSize));
|
||||||
|
|
||||||
#ifndef ASMJIT_NO_LOGGING
|
#ifndef ASMJIT_NO_LOGGING
|
||||||
if (ASMJIT_UNLIKELY(hasEmitterOption(kOptionLoggingEnabled))) {
|
if (ASMJIT_UNLIKELY(hasEmitterOption(kOptionLoggingEnabled))) {
|
||||||
StringTmp<256> sb;
|
StringTmp<256> sb;
|
||||||
sb.appendFormat(".%s ", dataSizeByPowerTable[Support::ctz(dataSize)].str);
|
sb.appendFormat(".%s ", dataSizeByPowerTable[Support::ctz(dataSize)].str);
|
||||||
@@ -334,7 +336,7 @@ Error BaseAssembler::embedLabel(const Label& label) {
|
|||||||
sb.appendChar('\n');
|
sb.appendChar('\n');
|
||||||
_code->_logger->log(sb);
|
_code->_logger->log(sb);
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
// TODO: Does it make sense to calculate the address here if everything is known?
|
// TODO: Does it make sense to calculate the address here if everything is known?
|
||||||
/*
|
/*
|
||||||
@@ -388,7 +390,7 @@ Error BaseAssembler::embedLabelDelta(const Label& label, const Label& base, uint
|
|||||||
CodeBufferWriter writer(this);
|
CodeBufferWriter writer(this);
|
||||||
ASMJIT_PROPAGATE(writer.ensureSpace(this, dataSize));
|
ASMJIT_PROPAGATE(writer.ensureSpace(this, dataSize));
|
||||||
|
|
||||||
#ifndef ASMJIT_NO_LOGGING
|
#ifndef ASMJIT_NO_LOGGING
|
||||||
if (ASMJIT_UNLIKELY(hasEmitterOption(kOptionLoggingEnabled))) {
|
if (ASMJIT_UNLIKELY(hasEmitterOption(kOptionLoggingEnabled))) {
|
||||||
StringTmp<256> sb;
|
StringTmp<256> sb;
|
||||||
sb.appendFormat(".%s (", dataSizeByPowerTable[Support::ctz(dataSize)].str);
|
sb.appendFormat(".%s (", dataSizeByPowerTable[Support::ctz(dataSize)].str);
|
||||||
@@ -398,7 +400,7 @@ Error BaseAssembler::embedLabelDelta(const Label& label, const Label& base, uint
|
|||||||
sb.appendString(")\n");
|
sb.appendString(")\n");
|
||||||
_code->_logger->log(sb);
|
_code->_logger->log(sb);
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
// If both labels are bound within the same section it means the delta can be calculated now.
|
// If both labels are bound within the same section it means the delta can be calculated now.
|
||||||
if (labelEntry->isBound() && baseEntry->isBound() && labelEntry->section() == baseEntry->section()) {
|
if (labelEntry->isBound() && baseEntry->isBound() && labelEntry->section() == baseEntry->section()) {
|
||||||
@@ -447,10 +449,10 @@ Error BaseAssembler::embedConstPool(const Label& label, const ConstPool& pool) {
|
|||||||
|
|
||||||
pool.fill(writer.cursor());
|
pool.fill(writer.cursor());
|
||||||
|
|
||||||
#ifndef ASMJIT_NO_LOGGING
|
#ifndef ASMJIT_NO_LOGGING
|
||||||
if (ASMJIT_UNLIKELY(hasEmitterOption(kOptionLoggingEnabled)))
|
if (ASMJIT_UNLIKELY(hasEmitterOption(kOptionLoggingEnabled)))
|
||||||
_code->_logger->logBinary(writer.cursor(), size);
|
_code->_logger->logBinary(writer.cursor(), size);
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
writer.advance(size);
|
writer.advance(size);
|
||||||
writer.done(this);
|
writer.done(this);
|
||||||
@@ -466,17 +468,16 @@ Error BaseAssembler::comment(const char* data, size_t size) {
|
|||||||
if (ASMJIT_UNLIKELY(!_code))
|
if (ASMJIT_UNLIKELY(!_code))
|
||||||
return DebugUtils::errored(kErrorNotInitialized);
|
return DebugUtils::errored(kErrorNotInitialized);
|
||||||
|
|
||||||
#ifndef ASMJIT_NO_LOGGING
|
#ifndef ASMJIT_NO_LOGGING
|
||||||
if (hasEmitterOption(kOptionLoggingEnabled)) {
|
if (hasEmitterOption(kOptionLoggingEnabled)) {
|
||||||
Logger* logger = _code->logger();
|
Logger* logger = _code->logger();
|
||||||
logger->log(data, size);
|
logger->log(data, size);
|
||||||
logger->log("\n", 1);
|
logger->log("\n", 1);
|
||||||
return kErrorOk;
|
return kErrorOk;
|
||||||
}
|
}
|
||||||
#else
|
#else
|
||||||
ASMJIT_UNUSED(data);
|
DebugUtils::unused(data, size);
|
||||||
ASMJIT_UNUSED(size);
|
#endif
|
||||||
#endif
|
|
||||||
|
|
||||||
return kErrorOk;
|
return kErrorOk;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -120,7 +120,7 @@ public:
|
|||||||
ASMJIT_API Error _emitOpArray(uint32_t instId, const Operand_* operands, size_t count) override;
|
ASMJIT_API Error _emitOpArray(uint32_t instId, const Operand_* operands, size_t count) override;
|
||||||
|
|
||||||
protected:
|
protected:
|
||||||
#ifndef ASMJIT_NO_LOGGING
|
#ifndef ASMJIT_NO_LOGGING
|
||||||
void _emitLog(
|
void _emitLog(
|
||||||
uint32_t instId, uint32_t options, const Operand_& o0, const Operand_& o1, const Operand_& o2, const Operand_& o3,
|
uint32_t instId, uint32_t options, const Operand_& o0, const Operand_& o1, const Operand_& o2, const Operand_& o3,
|
||||||
uint32_t relSize, uint32_t immSize, uint8_t* afterCursor);
|
uint32_t relSize, uint32_t immSize, uint8_t* afterCursor);
|
||||||
@@ -128,23 +128,17 @@ protected:
|
|||||||
Error _emitFailed(
|
Error _emitFailed(
|
||||||
Error err,
|
Error err,
|
||||||
uint32_t instId, uint32_t options, const Operand_& o0, const Operand_& o1, const Operand_& o2, const Operand_& o3);
|
uint32_t instId, uint32_t options, const Operand_& o0, const Operand_& o1, const Operand_& o2, const Operand_& o3);
|
||||||
#else
|
#else
|
||||||
inline Error _emitFailed(
|
inline Error _emitFailed(
|
||||||
uint32_t err,
|
uint32_t err,
|
||||||
uint32_t instId, uint32_t options, const Operand_& o0, const Operand_& o1, const Operand_& o2, const Operand_& o3) {
|
uint32_t instId, uint32_t options, const Operand_& o0, const Operand_& o1, const Operand_& o2, const Operand_& o3) {
|
||||||
|
|
||||||
ASMJIT_UNUSED(instId);
|
DebugUtils::unused(instId, options, o0, o1, o2, o3);
|
||||||
ASMJIT_UNUSED(options);
|
|
||||||
ASMJIT_UNUSED(o0);
|
|
||||||
ASMJIT_UNUSED(o1);
|
|
||||||
ASMJIT_UNUSED(o2);
|
|
||||||
ASMJIT_UNUSED(o3);
|
|
||||||
|
|
||||||
resetInstOptions();
|
resetInstOptions();
|
||||||
resetInlineComment();
|
resetInlineComment();
|
||||||
return reportError(err);
|
return reportError(err);
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
public:
|
public:
|
||||||
//! \}
|
//! \}
|
||||||
//! \endcond
|
//! \endcond
|
||||||
|
|||||||
@@ -40,9 +40,7 @@ ASMJIT_BEGIN_NAMESPACE
|
|||||||
class PostponedErrorHandler : public ErrorHandler {
|
class PostponedErrorHandler : public ErrorHandler {
|
||||||
public:
|
public:
|
||||||
void handleError(Error err, const char* message, BaseEmitter* origin) override {
|
void handleError(Error err, const char* message, BaseEmitter* origin) override {
|
||||||
ASMJIT_UNUSED(err);
|
DebugUtils::unused(err, origin);
|
||||||
ASMJIT_UNUSED(origin);
|
|
||||||
|
|
||||||
_message.assignString(message);
|
_message.assignString(message);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -128,7 +126,7 @@ InstNode* BaseBuilder::newInstNode(uint32_t instId, uint32_t instOptions, const
|
|||||||
|
|
||||||
node = new(node) InstNode(this, instId, instOptions, opCount, opCapacity);
|
node = new(node) InstNode(this, instId, instOptions, opCount, opCapacity);
|
||||||
node->setOp(0, o0);
|
node->setOp(0, o0);
|
||||||
for (uint32_t i = opCount; i < opCapacity; i++) node->resetOp(i);
|
node->resetOps(opCount, opCapacity);
|
||||||
return node;
|
return node;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -144,7 +142,7 @@ InstNode* BaseBuilder::newInstNode(uint32_t instId, uint32_t instOptions, const
|
|||||||
node = new(node) InstNode(this, instId, instOptions, opCount, opCapacity);
|
node = new(node) InstNode(this, instId, instOptions, opCount, opCapacity);
|
||||||
node->setOp(0, o0);
|
node->setOp(0, o0);
|
||||||
node->setOp(1, o1);
|
node->setOp(1, o1);
|
||||||
for (uint32_t i = opCount; i < opCapacity; i++) node->resetOp(i);
|
node->resetOps(opCount, opCapacity);
|
||||||
return node;
|
return node;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -161,7 +159,7 @@ InstNode* BaseBuilder::newInstNode(uint32_t instId, uint32_t instOptions, const
|
|||||||
node->setOp(0, o0);
|
node->setOp(0, o0);
|
||||||
node->setOp(1, o1);
|
node->setOp(1, o1);
|
||||||
node->setOp(2, o2);
|
node->setOp(2, o2);
|
||||||
for (uint32_t i = opCount; i < opCapacity; i++) node->resetOp(i);
|
node->resetOps(opCount, opCapacity);
|
||||||
return node;
|
return node;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -179,7 +177,7 @@ InstNode* BaseBuilder::newInstNode(uint32_t instId, uint32_t instOptions, const
|
|||||||
node->setOp(1, o1);
|
node->setOp(1, o1);
|
||||||
node->setOp(2, o2);
|
node->setOp(2, o2);
|
||||||
node->setOp(3, o3);
|
node->setOp(3, o3);
|
||||||
for (uint32_t i = opCount; i < opCapacity; i++) node->resetOp(i);
|
node->resetOps(opCount, opCapacity);
|
||||||
return node;
|
return node;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -657,8 +655,8 @@ Error BaseBuilder::_emit(uint32_t instId, const Operand_& o0, const Operand_& o1
|
|||||||
if (ASMJIT_UNLIKELY(!_code))
|
if (ASMJIT_UNLIKELY(!_code))
|
||||||
return DebugUtils::errored(kErrorNotInitialized);
|
return DebugUtils::errored(kErrorNotInitialized);
|
||||||
|
|
||||||
|
#ifndef ASMJIT_NO_VALIDATION
|
||||||
// Strict validation.
|
// Strict validation.
|
||||||
#ifndef ASMJIT_NO_VALIDATION
|
|
||||||
if (hasEmitterOption(kOptionStrictValidation)) {
|
if (hasEmitterOption(kOptionStrictValidation)) {
|
||||||
Operand_ opArray[4];
|
Operand_ opArray[4];
|
||||||
opArray[0].copyFrom(o0);
|
opArray[0].copyFrom(o0);
|
||||||
@@ -674,7 +672,7 @@ Error BaseBuilder::_emit(uint32_t instId, const Operand_& o0, const Operand_& o1
|
|||||||
return reportError(err);
|
return reportError(err);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
// Clear options that should never be part of `InstNode`.
|
// Clear options that should never be part of `InstNode`.
|
||||||
options &= ~BaseInst::kOptionReserved;
|
options &= ~BaseInst::kOptionReserved;
|
||||||
@@ -684,10 +682,13 @@ Error BaseBuilder::_emit(uint32_t instId, const Operand_& o0, const Operand_& o1
|
|||||||
ASMJIT_ASSERT(opCapacity >= 4);
|
ASMJIT_ASSERT(opCapacity >= 4);
|
||||||
|
|
||||||
InstNode* node = _allocator.allocT<InstNode>(InstNode::nodeSizeOfOpCapacity(opCapacity));
|
InstNode* node = _allocator.allocT<InstNode>(InstNode::nodeSizeOfOpCapacity(opCapacity));
|
||||||
|
const char* comment = inlineComment();
|
||||||
|
|
||||||
|
resetInstOptions();
|
||||||
|
resetInlineComment();
|
||||||
|
|
||||||
if (ASMJIT_UNLIKELY(!node)) {
|
if (ASMJIT_UNLIKELY(!node)) {
|
||||||
resetInstOptions();
|
|
||||||
resetExtraReg();
|
resetExtraReg();
|
||||||
resetInlineComment();
|
|
||||||
return reportError(DebugUtils::errored(kErrorOutOfMemory));
|
return reportError(DebugUtils::errored(kErrorOutOfMemory));
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -697,19 +698,13 @@ Error BaseBuilder::_emit(uint32_t instId, const Operand_& o0, const Operand_& o1
|
|||||||
node->setOp(1, o1);
|
node->setOp(1, o1);
|
||||||
node->setOp(2, o2);
|
node->setOp(2, o2);
|
||||||
node->setOp(3, o3);
|
node->setOp(3, o3);
|
||||||
|
node->resetOps(4, opCapacity);
|
||||||
|
|
||||||
for (uint32_t i = 4; i < InstNode::kBaseOpCapacity; i++)
|
|
||||||
node->resetOp(i);
|
|
||||||
|
|
||||||
const char* comment = inlineComment();
|
|
||||||
if (comment)
|
if (comment)
|
||||||
node->setInlineComment(static_cast<char*>(_dataZone.dup(comment, strlen(comment), true)));
|
node->setInlineComment(static_cast<char*>(_dataZone.dup(comment, strlen(comment), true)));
|
||||||
|
|
||||||
resetInstOptions();
|
|
||||||
resetExtraReg();
|
|
||||||
resetInlineComment();
|
|
||||||
|
|
||||||
addNode(node);
|
addNode(node);
|
||||||
|
resetExtraReg();
|
||||||
return kErrorOk;
|
return kErrorOk;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -726,8 +721,8 @@ Error BaseBuilder::_emit(uint32_t instId, const Operand_& o0, const Operand_& o1
|
|||||||
if (ASMJIT_UNLIKELY(!_code))
|
if (ASMJIT_UNLIKELY(!_code))
|
||||||
return DebugUtils::errored(kErrorNotInitialized);
|
return DebugUtils::errored(kErrorNotInitialized);
|
||||||
|
|
||||||
|
#ifndef ASMJIT_NO_VALIDATION
|
||||||
// Strict validation.
|
// Strict validation.
|
||||||
#ifndef ASMJIT_NO_VALIDATION
|
|
||||||
if (hasEmitterOption(kOptionStrictValidation)) {
|
if (hasEmitterOption(kOptionStrictValidation)) {
|
||||||
Operand_ opArray[Globals::kMaxOpCount];
|
Operand_ opArray[Globals::kMaxOpCount];
|
||||||
opArray[0].copyFrom(o0);
|
opArray[0].copyFrom(o0);
|
||||||
@@ -745,7 +740,7 @@ Error BaseBuilder::_emit(uint32_t instId, const Operand_& o0, const Operand_& o1
|
|||||||
return reportError(err);
|
return reportError(err);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
// Clear options that should never be part of `InstNode`.
|
// Clear options that should never be part of `InstNode`.
|
||||||
options &= ~BaseInst::kOptionReserved;
|
options &= ~BaseInst::kOptionReserved;
|
||||||
@@ -755,10 +750,13 @@ Error BaseBuilder::_emit(uint32_t instId, const Operand_& o0, const Operand_& o1
|
|||||||
ASMJIT_ASSERT(opCapacity >= opCount);
|
ASMJIT_ASSERT(opCapacity >= opCount);
|
||||||
|
|
||||||
InstNode* node = _allocator.allocT<InstNode>(InstNode::nodeSizeOfOpCapacity(opCapacity));
|
InstNode* node = _allocator.allocT<InstNode>(InstNode::nodeSizeOfOpCapacity(opCapacity));
|
||||||
|
const char* comment = inlineComment();
|
||||||
|
|
||||||
|
resetInstOptions();
|
||||||
|
resetInlineComment();
|
||||||
|
|
||||||
if (ASMJIT_UNLIKELY(!node)) {
|
if (ASMJIT_UNLIKELY(!node)) {
|
||||||
resetInstOptions();
|
|
||||||
resetExtraReg();
|
resetExtraReg();
|
||||||
resetInlineComment();
|
|
||||||
return reportError(DebugUtils::errored(kErrorOutOfMemory));
|
return reportError(DebugUtils::errored(kErrorOutOfMemory));
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -773,15 +771,11 @@ Error BaseBuilder::_emit(uint32_t instId, const Operand_& o0, const Operand_& o1
|
|||||||
if (opCapacity > 5)
|
if (opCapacity > 5)
|
||||||
node->setOp(5, o5);
|
node->setOp(5, o5);
|
||||||
|
|
||||||
const char* comment = inlineComment();
|
|
||||||
if (comment)
|
if (comment)
|
||||||
node->setInlineComment(static_cast<char*>(_dataZone.dup(comment, strlen(comment), true)));
|
node->setInlineComment(static_cast<char*>(_dataZone.dup(comment, strlen(comment), true)));
|
||||||
|
|
||||||
resetInstOptions();
|
|
||||||
resetExtraReg();
|
|
||||||
resetInlineComment();
|
|
||||||
|
|
||||||
addNode(node);
|
addNode(node);
|
||||||
|
resetExtraReg();
|
||||||
return kErrorOk;
|
return kErrorOk;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -61,6 +61,9 @@ class CommentNode;
|
|||||||
class SentinelNode;
|
class SentinelNode;
|
||||||
class LabelDeltaNode;
|
class LabelDeltaNode;
|
||||||
|
|
||||||
|
// Only used by Compiler infrastructure.
|
||||||
|
class JumpAnnotation;
|
||||||
|
|
||||||
// ============================================================================
|
// ============================================================================
|
||||||
// [asmjit::BaseBuilder]
|
// [asmjit::BaseBuilder]
|
||||||
// ============================================================================
|
// ============================================================================
|
||||||
@@ -345,9 +348,9 @@ public:
|
|||||||
//! \name Logging
|
//! \name Logging
|
||||||
//! \{
|
//! \{
|
||||||
|
|
||||||
#ifndef ASMJIT_NO_LOGGING
|
#ifndef ASMJIT_NO_LOGGING
|
||||||
ASMJIT_API Error dump(String& sb, uint32_t flags = 0) const noexcept;
|
ASMJIT_API Error dump(String& sb, uint32_t flags = 0) const noexcept;
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
//! \}
|
//! \}
|
||||||
|
|
||||||
@@ -471,6 +474,8 @@ public:
|
|||||||
|
|
||||||
// [BaseCompiler]
|
// [BaseCompiler]
|
||||||
|
|
||||||
|
//! Node is `JumpNode` (acts as InstNode).
|
||||||
|
kNodeJump = 15,
|
||||||
//! Node is `FuncNode` (acts as LabelNode).
|
//! Node is `FuncNode` (acts as LabelNode).
|
||||||
kNodeFunc = 16,
|
kNodeFunc = 16,
|
||||||
//! Node is `FuncRetNode` (acts as InstNode).
|
//! Node is `FuncRetNode` (acts as InstNode).
|
||||||
@@ -767,6 +772,11 @@ public:
|
|||||||
_opArray[index].reset();
|
_opArray[index].reset();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
inline void resetOps(uint32_t start, uint32_t end) noexcept {
|
||||||
|
for (uint32_t i = start; i < end; i++)
|
||||||
|
_opArray[i].reset();
|
||||||
|
}
|
||||||
|
|
||||||
//! \}
|
//! \}
|
||||||
|
|
||||||
//! \name Utilities
|
//! \name Utilities
|
||||||
|
|||||||
@@ -43,15 +43,15 @@ ASMJIT_BEGIN_NAMESPACE
|
|||||||
ASMJIT_FAVOR_SIZE Error CallConv::init(uint32_t ccId) noexcept {
|
ASMJIT_FAVOR_SIZE Error CallConv::init(uint32_t ccId) noexcept {
|
||||||
reset();
|
reset();
|
||||||
|
|
||||||
#ifdef ASMJIT_BUILD_X86
|
#ifdef ASMJIT_BUILD_X86
|
||||||
if (CallConv::isX86Family(ccId))
|
if (CallConv::isX86Family(ccId))
|
||||||
return x86::CallConvInternal::init(*this, ccId);
|
return x86::CallConvInternal::init(*this, ccId);
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
#ifdef ASMJIT_BUILD_ARM
|
#ifdef ASMJIT_BUILD_ARM
|
||||||
if (CallConv::isArmFamily(ccId))
|
if (CallConv::isArmFamily(ccId))
|
||||||
return arm::CallConvInternal::init(*this, ccId);
|
return arm::CallConvInternal::init(*this, ccId);
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
return DebugUtils::errored(kErrorInvalidArgument);
|
return DebugUtils::errored(kErrorInvalidArgument);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -175,7 +175,7 @@ struct CallConv {
|
|||||||
// [Host]
|
// [Host]
|
||||||
// ------------------------------------------------------------------------
|
// ------------------------------------------------------------------------
|
||||||
|
|
||||||
#if defined(ASMJIT_DOCGEN)
|
#if defined(ASMJIT_DOCGEN)
|
||||||
|
|
||||||
//! Default calling convention based on the current C++ compiler's settings.
|
//! Default calling convention based on the current C++ compiler's settings.
|
||||||
//!
|
//!
|
||||||
@@ -197,31 +197,31 @@ struct CallConv {
|
|||||||
//! \note If not defined by the host then it's the same as `kIdHostCDecl`.
|
//! \note If not defined by the host then it's the same as `kIdHostCDecl`.
|
||||||
kIdHostFastCall = DETECTED_AT_COMPILE_TIME
|
kIdHostFastCall = DETECTED_AT_COMPILE_TIME
|
||||||
|
|
||||||
#elif ASMJIT_ARCH_X86 == 32
|
#elif ASMJIT_ARCH_X86 == 32
|
||||||
|
|
||||||
kIdHost = kIdX86CDecl,
|
kIdHost = kIdX86CDecl,
|
||||||
kIdHostCDecl = kIdX86CDecl,
|
kIdHostCDecl = kIdX86CDecl,
|
||||||
kIdHostStdCall = kIdX86StdCall,
|
kIdHostStdCall = kIdX86StdCall,
|
||||||
|
|
||||||
#if defined(_MSC_VER)
|
# if defined(_MSC_VER)
|
||||||
kIdHostFastCall = kIdX86MsFastCall,
|
kIdHostFastCall = kIdX86MsFastCall,
|
||||||
#elif defined(__GNUC__)
|
# elif defined(__GNUC__)
|
||||||
kIdHostFastCall = kIdX86GccFastCall,
|
kIdHostFastCall = kIdX86GccFastCall,
|
||||||
#else
|
# else
|
||||||
kIdHostFastCall = kIdHost,
|
kIdHostFastCall = kIdHost,
|
||||||
#endif
|
# endif
|
||||||
|
|
||||||
kIdHostLightCall2 = kIdX86LightCall2,
|
kIdHostLightCall2 = kIdX86LightCall2,
|
||||||
kIdHostLightCall3 = kIdX86LightCall3,
|
kIdHostLightCall3 = kIdX86LightCall3,
|
||||||
kIdHostLightCall4 = kIdX86LightCall4
|
kIdHostLightCall4 = kIdX86LightCall4
|
||||||
|
|
||||||
#elif ASMJIT_ARCH_X86 == 64
|
#elif ASMJIT_ARCH_X86 == 64
|
||||||
|
|
||||||
#if defined(_WIN32)
|
# if defined(_WIN32)
|
||||||
kIdHost = kIdX86Win64,
|
kIdHost = kIdX86Win64,
|
||||||
#else
|
# else
|
||||||
kIdHost = kIdX86SysV64,
|
kIdHost = kIdX86SysV64,
|
||||||
#endif
|
# endif
|
||||||
|
|
||||||
kIdHostCDecl = kIdHost, // Doesn't exist, redirected to host.
|
kIdHostCDecl = kIdHost, // Doesn't exist, redirected to host.
|
||||||
kIdHostStdCall = kIdHost, // Doesn't exist, redirected to host.
|
kIdHostStdCall = kIdHost, // Doesn't exist, redirected to host.
|
||||||
@@ -231,26 +231,26 @@ struct CallConv {
|
|||||||
kIdHostLightCall3 = kIdX64LightCall3,
|
kIdHostLightCall3 = kIdX64LightCall3,
|
||||||
kIdHostLightCall4 = kIdX64LightCall4
|
kIdHostLightCall4 = kIdX64LightCall4
|
||||||
|
|
||||||
#elif ASMJIT_ARCH_ARM == 32
|
#elif ASMJIT_ARCH_ARM == 32
|
||||||
|
|
||||||
#if defined(__SOFTFP__)
|
# if defined(__SOFTFP__)
|
||||||
kIdHost = kIdArm32SoftFP,
|
kIdHost = kIdArm32SoftFP,
|
||||||
#else
|
# else
|
||||||
kIdHost = kIdArm32HardFP,
|
kIdHost = kIdArm32HardFP,
|
||||||
#endif
|
# endif
|
||||||
// These don't exist on ARM.
|
// These don't exist on ARM.
|
||||||
kIdHostCDecl = kIdHost, // Doesn't exist, redirected to host.
|
kIdHostCDecl = kIdHost, // Doesn't exist, redirected to host.
|
||||||
kIdHostStdCall = kIdHost, // Doesn't exist, redirected to host.
|
kIdHostStdCall = kIdHost, // Doesn't exist, redirected to host.
|
||||||
kIdHostFastCall = kIdHost // Doesn't exist, redirected to host.
|
kIdHostFastCall = kIdHost // Doesn't exist, redirected to host.
|
||||||
|
|
||||||
#else
|
#else
|
||||||
|
|
||||||
kIdHost = kIdNone,
|
kIdHost = kIdNone,
|
||||||
kIdHostCDecl = kIdHost,
|
kIdHostCDecl = kIdHost,
|
||||||
kIdHostStdCall = kIdHost,
|
kIdHostStdCall = kIdHost,
|
||||||
kIdHostFastCall = kIdHost
|
kIdHostFastCall = kIdHost
|
||||||
|
|
||||||
#endif
|
#endif
|
||||||
};
|
};
|
||||||
|
|
||||||
//! Strategy used to assign registers to function arguments.
|
//! Strategy used to assign registers to function arguments.
|
||||||
|
|||||||
@@ -289,13 +289,13 @@ void CodeHolder::clearEmitterOptions(uint32_t options) noexcept {
|
|||||||
// ============================================================================
|
// ============================================================================
|
||||||
|
|
||||||
void CodeHolder::setLogger(Logger* logger) noexcept {
|
void CodeHolder::setLogger(Logger* logger) noexcept {
|
||||||
#ifndef ASMJIT_NO_LOGGING
|
#ifndef ASMJIT_NO_LOGGING
|
||||||
_logger = logger;
|
_logger = logger;
|
||||||
uint32_t option = !logger ? uint32_t(0) : uint32_t(BaseEmitter::kOptionLoggingEnabled);
|
uint32_t option = !logger ? uint32_t(0) : uint32_t(BaseEmitter::kOptionLoggingEnabled);
|
||||||
CodeHolder_modifyEmitterOptions(this, BaseEmitter::kOptionLoggingEnabled, option);
|
CodeHolder_modifyEmitterOptions(this, BaseEmitter::kOptionLoggingEnabled, option);
|
||||||
#else
|
#else
|
||||||
ASMJIT_UNUSED(logger);
|
DebugUtils::unused(logger);
|
||||||
#endif
|
#endif
|
||||||
}
|
}
|
||||||
|
|
||||||
// ============================================================================
|
// ============================================================================
|
||||||
@@ -623,7 +623,7 @@ Error CodeHolder::newNamedLabelEntry(LabelEntry** entryOut, const char* name, si
|
|||||||
|
|
||||||
uint32_t CodeHolder::labelIdByName(const char* name, size_t nameSize, uint32_t parentId) noexcept {
|
uint32_t CodeHolder::labelIdByName(const char* name, size_t nameSize, uint32_t parentId) noexcept {
|
||||||
// TODO: Finalize - parent id is not used here?
|
// TODO: Finalize - parent id is not used here?
|
||||||
ASMJIT_UNUSED(parentId);
|
DebugUtils::unused(parentId);
|
||||||
|
|
||||||
uint32_t hashCode = CodeHolder_hashNameAndGetSize(name, nameSize);
|
uint32_t hashCode = CodeHolder_hashNameAndGetSize(name, nameSize);
|
||||||
if (ASMJIT_UNLIKELY(!nameSize)) return 0;
|
if (ASMJIT_UNLIKELY(!nameSize)) return 0;
|
||||||
|
|||||||
@@ -46,8 +46,7 @@ class GlobalConstPoolPass : public Pass {
|
|||||||
GlobalConstPoolPass() noexcept : Pass("GlobalConstPoolPass") {}
|
GlobalConstPoolPass() noexcept : Pass("GlobalConstPoolPass") {}
|
||||||
|
|
||||||
Error run(Zone* zone, Logger* logger) noexcept override {
|
Error run(Zone* zone, Logger* logger) noexcept override {
|
||||||
ASMJIT_UNUSED(zone);
|
DebugUtils::unused(zone, logger);
|
||||||
ASMJIT_UNUSED(logger);
|
|
||||||
|
|
||||||
// Flush the global constant pool.
|
// Flush the global constant pool.
|
||||||
BaseCompiler* compiler = static_cast<BaseCompiler*>(_cb);
|
BaseCompiler* compiler = static_cast<BaseCompiler*>(_cb);
|
||||||
@@ -285,7 +284,7 @@ FuncCallNode* BaseCompiler::addCall(uint32_t instId, const Operand_& o0, const F
|
|||||||
// [asmjit::BaseCompiler - Vars]
|
// [asmjit::BaseCompiler - Vars]
|
||||||
// ============================================================================
|
// ============================================================================
|
||||||
|
|
||||||
static void CodeCompiler_assignGenericName(BaseCompiler* self, VirtReg* vReg) {
|
static void BaseCompiler_assignGenericName(BaseCompiler* self, VirtReg* vReg) {
|
||||||
uint32_t index = unsigned(Operand::virtIdToIndex(vReg->_id));
|
uint32_t index = unsigned(Operand::virtIdToIndex(vReg->_id));
|
||||||
|
|
||||||
char buf[64];
|
char buf[64];
|
||||||
@@ -311,12 +310,14 @@ VirtReg* BaseCompiler::newVirtReg(uint32_t typeId, uint32_t signature, const cha
|
|||||||
|
|
||||||
vReg = new(vReg) VirtReg(Operand::indexToVirtId(index), signature, size, alignment, typeId);
|
vReg = new(vReg) VirtReg(Operand::indexToVirtId(index), signature, size, alignment, typeId);
|
||||||
|
|
||||||
#ifndef ASMJIT_NO_LOGGING
|
#ifndef ASMJIT_NO_LOGGING
|
||||||
if (name && name[0] != '\0')
|
if (name && name[0] != '\0')
|
||||||
vReg->_name.setData(&_dataZone, name, SIZE_MAX);
|
vReg->_name.setData(&_dataZone, name, SIZE_MAX);
|
||||||
else
|
else
|
||||||
CodeCompiler_assignGenericName(this, vReg);
|
BaseCompiler_assignGenericName(this, vReg);
|
||||||
#endif
|
#else
|
||||||
|
DebugUtils::unused(name);
|
||||||
|
#endif
|
||||||
|
|
||||||
_vRegArray.appendUnsafe(vReg);
|
_vRegArray.appendUnsafe(vReg);
|
||||||
return vReg;
|
return vReg;
|
||||||
@@ -544,10 +545,67 @@ void BaseCompiler::rename(const BaseReg& reg, const char* fmt, ...) {
|
|||||||
vReg->_name.setData(&_dataZone, buf, SIZE_MAX);
|
vReg->_name.setData(&_dataZone, buf, SIZE_MAX);
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
CodeCompiler_assignGenericName(this, vReg);
|
BaseCompiler_assignGenericName(this, vReg);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// [asmjit::BaseCompiler - Jump Annotations]
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
JumpNode* BaseCompiler::newJumpNode(uint32_t instId, uint32_t instOptions, const Operand_& o0, JumpAnnotation* annotation) noexcept {
|
||||||
|
uint32_t opCount = 1;
|
||||||
|
JumpNode* node = _allocator.allocT<JumpNode>();
|
||||||
|
if (ASMJIT_UNLIKELY(!node))
|
||||||
|
return nullptr;
|
||||||
|
|
||||||
|
node = new(node) JumpNode(this, instId, instOptions, opCount, annotation);
|
||||||
|
node->setOp(0, o0);
|
||||||
|
node->resetOps(opCount, JumpNode::kBaseOpCapacity);
|
||||||
|
return node;
|
||||||
|
}
|
||||||
|
|
||||||
|
Error BaseCompiler::emitAnnotatedJump(uint32_t instId, const Operand_& o0, JumpAnnotation* annotation) {
|
||||||
|
uint32_t options = instOptions() | globalInstOptions();
|
||||||
|
const char* comment = inlineComment();
|
||||||
|
|
||||||
|
JumpNode* node = newJumpNode(instId, options, o0, annotation);
|
||||||
|
|
||||||
|
resetInstOptions();
|
||||||
|
resetInlineComment();
|
||||||
|
|
||||||
|
if (ASMJIT_UNLIKELY(!node)) {
|
||||||
|
resetExtraReg();
|
||||||
|
return reportError(DebugUtils::errored(kErrorOutOfMemory));
|
||||||
|
}
|
||||||
|
|
||||||
|
node->setExtraReg(extraReg());
|
||||||
|
if (comment)
|
||||||
|
node->setInlineComment(static_cast<char*>(_dataZone.dup(comment, strlen(comment), true)));
|
||||||
|
|
||||||
|
addNode(node);
|
||||||
|
resetExtraReg();
|
||||||
|
return kErrorOk;
|
||||||
|
}
|
||||||
|
|
||||||
|
JumpAnnotation* BaseCompiler::newJumpAnnotation() {
|
||||||
|
if (_jumpAnnotations.grow(&_allocator, 1) != kErrorOk) {
|
||||||
|
reportError(DebugUtils::errored(kErrorOutOfMemory));
|
||||||
|
return nullptr;
|
||||||
|
}
|
||||||
|
|
||||||
|
uint32_t id = _jumpAnnotations.size();
|
||||||
|
JumpAnnotation* jumpAnnotation = _allocator.newT<JumpAnnotation>(this, id);
|
||||||
|
|
||||||
|
if (!jumpAnnotation) {
|
||||||
|
reportError(DebugUtils::errored(kErrorOutOfMemory));
|
||||||
|
return nullptr;
|
||||||
|
}
|
||||||
|
|
||||||
|
_jumpAnnotations.appendUnsafe(jumpAnnotation);
|
||||||
|
return jumpAnnotation;
|
||||||
|
}
|
||||||
|
|
||||||
// ============================================================================
|
// ============================================================================
|
||||||
// [asmjit::BaseCompiler - Events]
|
// [asmjit::BaseCompiler - Events]
|
||||||
// ============================================================================
|
// ============================================================================
|
||||||
|
|||||||
@@ -46,6 +46,9 @@ ASMJIT_BEGIN_NAMESPACE
|
|||||||
struct RATiedReg;
|
struct RATiedReg;
|
||||||
class RAWorkReg;
|
class RAWorkReg;
|
||||||
|
|
||||||
|
class JumpAnnotation;
|
||||||
|
|
||||||
|
class JumpNode;
|
||||||
class FuncNode;
|
class FuncNode;
|
||||||
class FuncRetNode;
|
class FuncRetNode;
|
||||||
class FuncCallNode;
|
class FuncCallNode;
|
||||||
@@ -208,6 +211,8 @@ public:
|
|||||||
Zone _vRegZone;
|
Zone _vRegZone;
|
||||||
//! Stores array of `VirtReg` pointers.
|
//! Stores array of `VirtReg` pointers.
|
||||||
ZoneVector<VirtReg*> _vRegArray;
|
ZoneVector<VirtReg*> _vRegArray;
|
||||||
|
//! Stores jump annotations.
|
||||||
|
ZoneVector<JumpAnnotation*> _jumpAnnotations;
|
||||||
|
|
||||||
//! Local constant pool, flushed at the end of each function.
|
//! Local constant pool, flushed at the end of each function.
|
||||||
ConstPoolNode* _localConstPool;
|
ConstPoolNode* _localConstPool;
|
||||||
@@ -326,9 +331,26 @@ public:
|
|||||||
|
|
||||||
//! \}
|
//! \}
|
||||||
|
|
||||||
|
//! \name Jump Annotations
|
||||||
|
//! \{
|
||||||
|
|
||||||
|
inline const ZoneVector<JumpAnnotation*>& jumpAnnotations() const noexcept {
|
||||||
|
return _jumpAnnotations;
|
||||||
|
}
|
||||||
|
|
||||||
|
ASMJIT_API JumpNode* newJumpNode(uint32_t instId, uint32_t instOptions, const Operand_& o0, JumpAnnotation* annotation) noexcept;
|
||||||
|
ASMJIT_API Error emitAnnotatedJump(uint32_t instId, const Operand_& o0, JumpAnnotation* annotation);
|
||||||
|
|
||||||
|
//! Returns a new `JumpAnnotation` instance, which can be used to aggregate
|
||||||
|
//! possible targets of a jump where the target is not a label, for example
|
||||||
|
//! to implement jump tables.
|
||||||
|
ASMJIT_API JumpAnnotation* newJumpAnnotation();
|
||||||
|
|
||||||
|
//! \}
|
||||||
|
|
||||||
// TODO: These should be removed
|
// TODO: These should be removed
|
||||||
inline void alloc(BaseReg& reg) { ASMJIT_UNUSED(reg); }
|
inline void alloc(BaseReg& reg) { DebugUtils::unused(reg); }
|
||||||
inline void spill(BaseReg& reg) { ASMJIT_UNUSED(reg); }
|
inline void spill(BaseReg& reg) { DebugUtils::unused(reg); }
|
||||||
|
|
||||||
//! \name Events
|
//! \name Events
|
||||||
//! \{
|
//! \{
|
||||||
@@ -339,6 +361,70 @@ public:
|
|||||||
//! \}
|
//! \}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// [asmjit::JumpAnnotation]
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
class JumpAnnotation {
|
||||||
|
public:
|
||||||
|
ASMJIT_NONCOPYABLE(JumpAnnotation)
|
||||||
|
|
||||||
|
BaseCompiler* _compiler;
|
||||||
|
uint32_t _annotationId;
|
||||||
|
ZoneVector<uint32_t> _labelIds;
|
||||||
|
|
||||||
|
inline JumpAnnotation(BaseCompiler* compiler, uint32_t annotationId) noexcept
|
||||||
|
: _compiler(compiler),
|
||||||
|
_annotationId(annotationId) {}
|
||||||
|
|
||||||
|
inline BaseCompiler* compiler() const noexcept { return _compiler; }
|
||||||
|
inline uint32_t annotationId() const noexcept { return _annotationId; }
|
||||||
|
const ZoneVector<uint32_t>& labelIds() const noexcept { return _labelIds; }
|
||||||
|
|
||||||
|
inline bool hasLabel(const Label& label) const noexcept { return hasLabelId(label.id()); }
|
||||||
|
inline bool hasLabelId(uint32_t labelId) const noexcept { return _labelIds.contains(labelId); }
|
||||||
|
|
||||||
|
inline Error addLabel(const Label& label) noexcept { return addLabelId(label.id()); }
|
||||||
|
inline Error addLabelId(uint32_t labelId) noexcept { return _labelIds.append(&_compiler->_allocator, labelId); }
|
||||||
|
};
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// [asmjit::JumpNode]
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
//! Jump instruction with \ref JumpAnnotation.
|
||||||
|
//!
|
||||||
|
//! \note This node should be only used to represent jump where the jump target
|
||||||
|
//! cannot be deduced by examining instruction operands. For example if the jump
|
||||||
|
//! target is register or memory location. This pattern is often used to perform
|
||||||
|
//! indirect jumps that use jump table, e.g. to implement `switch{}` statement.
|
||||||
|
class JumpNode : public InstNode {
|
||||||
|
public:
|
||||||
|
ASMJIT_NONCOPYABLE(JumpNode)
|
||||||
|
|
||||||
|
JumpAnnotation* _annotation;
|
||||||
|
|
||||||
|
//! \name Construction & Destruction
|
||||||
|
//! \{
|
||||||
|
|
||||||
|
ASMJIT_INLINE JumpNode(BaseCompiler* cc, uint32_t instId, uint32_t options, uint32_t opCount, JumpAnnotation* annotation) noexcept
|
||||||
|
: InstNode(cc, instId, options, opCount, kBaseOpCapacity),
|
||||||
|
_annotation(annotation) {
|
||||||
|
setType(kNodeJump);
|
||||||
|
}
|
||||||
|
|
||||||
|
//! \}
|
||||||
|
|
||||||
|
//! \name Accessors
|
||||||
|
//! \{
|
||||||
|
|
||||||
|
inline bool hasAnnotation() const noexcept { return _annotation != nullptr; }
|
||||||
|
inline JumpAnnotation* annotation() const noexcept { return _annotation; }
|
||||||
|
inline void setAnnotation(JumpAnnotation* annotation) noexcept { _annotation = annotation; }
|
||||||
|
|
||||||
|
//! \}
|
||||||
|
};
|
||||||
|
|
||||||
// ============================================================================
|
// ============================================================================
|
||||||
// [asmjit::FuncNode]
|
// [asmjit::FuncNode]
|
||||||
// ============================================================================
|
// ============================================================================
|
||||||
|
|||||||
@@ -78,13 +78,13 @@ const CpuInfo& CpuInfo::host() noexcept {
|
|||||||
if (!cpuInfoInitialized) {
|
if (!cpuInfoInitialized) {
|
||||||
CpuInfo cpuInfoLocal;
|
CpuInfo cpuInfoLocal;
|
||||||
|
|
||||||
#if defined(ASMJIT_BUILD_X86) && ASMJIT_ARCH_X86
|
#if defined(ASMJIT_BUILD_X86) && ASMJIT_ARCH_X86
|
||||||
x86::detectCpu(cpuInfoLocal);
|
x86::detectCpu(cpuInfoLocal);
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
#if defined(ASMJIT_BUILD_ARM) && ASMJIT_ARCH_ARM
|
#if defined(ASMJIT_BUILD_ARM) && ASMJIT_ARCH_ARM
|
||||||
arm::detectCpu(cpuInfoLocal);
|
arm::detectCpu(cpuInfoLocal);
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
cpuInfoLocal._hwThreadCount = detectHWThreadCount();
|
cpuInfoLocal._hwThreadCount = detectHWThreadCount();
|
||||||
cpuInfoGlobal = cpuInfoLocal;
|
cpuInfoGlobal = cpuInfoLocal;
|
||||||
|
|||||||
@@ -136,15 +136,15 @@ ASMJIT_FAVOR_SIZE Error BaseEmitter::emitProlog(const FuncFrame& frame) {
|
|||||||
if (ASMJIT_UNLIKELY(!_code))
|
if (ASMJIT_UNLIKELY(!_code))
|
||||||
return DebugUtils::errored(kErrorNotInitialized);
|
return DebugUtils::errored(kErrorNotInitialized);
|
||||||
|
|
||||||
#ifdef ASMJIT_BUILD_X86
|
#ifdef ASMJIT_BUILD_X86
|
||||||
if (archInfo().isX86Family())
|
if (archInfo().isX86Family())
|
||||||
return x86::X86Internal::emitProlog(as<x86::Emitter>(), frame);
|
return x86::X86Internal::emitProlog(as<x86::Emitter>(), frame);
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
#ifdef ASMJIT_BUILD_ARM
|
#ifdef ASMJIT_BUILD_ARM
|
||||||
if (archInfo().isArmFamily())
|
if (archInfo().isArmFamily())
|
||||||
return arm::ArmInternal::emitProlog(as<arm::Emitter>(), frame);
|
return arm::ArmInternal::emitProlog(as<arm::Emitter>(), frame);
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
return DebugUtils::errored(kErrorInvalidArch);
|
return DebugUtils::errored(kErrorInvalidArch);
|
||||||
}
|
}
|
||||||
@@ -153,15 +153,15 @@ ASMJIT_FAVOR_SIZE Error BaseEmitter::emitEpilog(const FuncFrame& frame) {
|
|||||||
if (ASMJIT_UNLIKELY(!_code))
|
if (ASMJIT_UNLIKELY(!_code))
|
||||||
return DebugUtils::errored(kErrorNotInitialized);
|
return DebugUtils::errored(kErrorNotInitialized);
|
||||||
|
|
||||||
#ifdef ASMJIT_BUILD_X86
|
#ifdef ASMJIT_BUILD_X86
|
||||||
if (archInfo().isX86Family())
|
if (archInfo().isX86Family())
|
||||||
return x86::X86Internal::emitEpilog(as<x86::Emitter>(), frame);
|
return x86::X86Internal::emitEpilog(as<x86::Emitter>(), frame);
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
#ifdef ASMJIT_BUILD_ARM
|
#ifdef ASMJIT_BUILD_ARM
|
||||||
if (archInfo().isArmFamily())
|
if (archInfo().isArmFamily())
|
||||||
return arm::ArmInternal::emitEpilog(as<arm::Emitter>(), frame);
|
return arm::ArmInternal::emitEpilog(as<arm::Emitter>(), frame);
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
return DebugUtils::errored(kErrorInvalidArch);
|
return DebugUtils::errored(kErrorInvalidArch);
|
||||||
}
|
}
|
||||||
@@ -170,15 +170,15 @@ ASMJIT_FAVOR_SIZE Error BaseEmitter::emitArgsAssignment(const FuncFrame& frame,
|
|||||||
if (ASMJIT_UNLIKELY(!_code))
|
if (ASMJIT_UNLIKELY(!_code))
|
||||||
return DebugUtils::errored(kErrorNotInitialized);
|
return DebugUtils::errored(kErrorNotInitialized);
|
||||||
|
|
||||||
#ifdef ASMJIT_BUILD_X86
|
#ifdef ASMJIT_BUILD_X86
|
||||||
if (archInfo().isX86Family())
|
if (archInfo().isX86Family())
|
||||||
return x86::X86Internal::emitArgsAssignment(as<x86::Emitter>(), frame, args);
|
return x86::X86Internal::emitArgsAssignment(as<x86::Emitter>(), frame, args);
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
#ifdef ASMJIT_BUILD_ARM
|
#ifdef ASMJIT_BUILD_ARM
|
||||||
if (archInfo().isArmFamily())
|
if (archInfo().isArmFamily())
|
||||||
return arm::ArmInternal::emitArgsAssignment(as<arm::Emitter>(), frame, args);
|
return arm::ArmInternal::emitArgsAssignment(as<arm::Emitter>(), frame, args);
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
return DebugUtils::errored(kErrorInvalidArch);
|
return DebugUtils::errored(kErrorInvalidArch);
|
||||||
}
|
}
|
||||||
@@ -191,7 +191,7 @@ Error BaseEmitter::commentf(const char* fmt, ...) {
|
|||||||
if (ASMJIT_UNLIKELY(!_code))
|
if (ASMJIT_UNLIKELY(!_code))
|
||||||
return DebugUtils::errored(kErrorNotInitialized);
|
return DebugUtils::errored(kErrorNotInitialized);
|
||||||
|
|
||||||
#ifndef ASMJIT_NO_LOGGING
|
#ifndef ASMJIT_NO_LOGGING
|
||||||
StringTmp<1024> sb;
|
StringTmp<1024> sb;
|
||||||
|
|
||||||
va_list ap;
|
va_list ap;
|
||||||
@@ -203,17 +203,17 @@ Error BaseEmitter::commentf(const char* fmt, ...) {
|
|||||||
return err;
|
return err;
|
||||||
|
|
||||||
return comment(sb.data(), sb.size());
|
return comment(sb.data(), sb.size());
|
||||||
#else
|
#else
|
||||||
ASMJIT_UNUSED(fmt);
|
DebugUtils::unused(fmt);
|
||||||
return kErrorOk;
|
return kErrorOk;
|
||||||
#endif
|
#endif
|
||||||
}
|
}
|
||||||
|
|
||||||
Error BaseEmitter::commentv(const char* fmt, va_list ap) {
|
Error BaseEmitter::commentv(const char* fmt, va_list ap) {
|
||||||
if (ASMJIT_UNLIKELY(!_code))
|
if (ASMJIT_UNLIKELY(!_code))
|
||||||
return DebugUtils::errored(kErrorNotInitialized);
|
return DebugUtils::errored(kErrorNotInitialized);
|
||||||
|
|
||||||
#ifndef ASMJIT_NO_LOGGING
|
#ifndef ASMJIT_NO_LOGGING
|
||||||
StringTmp<1024> sb;
|
StringTmp<1024> sb;
|
||||||
|
|
||||||
Error err = sb.appendVFormat(fmt, ap);
|
Error err = sb.appendVFormat(fmt, ap);
|
||||||
@@ -221,11 +221,10 @@ Error BaseEmitter::commentv(const char* fmt, va_list ap) {
|
|||||||
return err;
|
return err;
|
||||||
|
|
||||||
return comment(sb.data(), sb.size());
|
return comment(sb.data(), sb.size());
|
||||||
#else
|
#else
|
||||||
ASMJIT_UNUSED(fmt);
|
DebugUtils::unused(fmt, ap);
|
||||||
ASMJIT_UNUSED(ap);
|
|
||||||
return kErrorOk;
|
return kErrorOk;
|
||||||
#endif
|
#endif
|
||||||
}
|
}
|
||||||
|
|
||||||
// ============================================================================
|
// ============================================================================
|
||||||
@@ -242,7 +241,7 @@ Error BaseEmitter::onAttach(CodeHolder* code) noexcept {
|
|||||||
}
|
}
|
||||||
|
|
||||||
Error BaseEmitter::onDetach(CodeHolder* code) noexcept {
|
Error BaseEmitter::onDetach(CodeHolder* code) noexcept {
|
||||||
ASMJIT_UNUSED(code);
|
DebugUtils::unused(code);
|
||||||
|
|
||||||
_flags = 0;
|
_flags = 0;
|
||||||
_emitterOptions = 0;
|
_emitterOptions = 0;
|
||||||
|
|||||||
@@ -69,15 +69,15 @@ ASMJIT_FAVOR_SIZE Error FuncDetail::init(const FuncSignature& sign) {
|
|||||||
_retCount = 1;
|
_retCount = 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
#ifdef ASMJIT_BUILD_X86
|
#ifdef ASMJIT_BUILD_X86
|
||||||
if (CallConv::isX86Family(ccId))
|
if (CallConv::isX86Family(ccId))
|
||||||
return x86::X86Internal::initFuncDetail(*this, sign, gpSize);
|
return x86::X86Internal::initFuncDetail(*this, sign, gpSize);
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
#ifdef ASMJIT_BUILD_ARM
|
#ifdef ASMJIT_BUILD_ARM
|
||||||
if (CallConv::isArmFamily(ccId))
|
if (CallConv::isArmFamily(ccId))
|
||||||
return arm::ArmInternal::initFuncDetail(*this, sign, gpSize);
|
return arm::ArmInternal::initFuncDetail(*this, sign, gpSize);
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
// We should never bubble here as if `cc.init()` succeeded then there has to
|
// We should never bubble here as if `cc.init()` succeeded then there has to
|
||||||
// be an implementation for the current architecture. However, stay safe.
|
// be an implementation for the current architecture. However, stay safe.
|
||||||
@@ -91,29 +91,29 @@ ASMJIT_FAVOR_SIZE Error FuncDetail::init(const FuncSignature& sign) {
|
|||||||
ASMJIT_FAVOR_SIZE Error FuncFrame::init(const FuncDetail& func) noexcept {
|
ASMJIT_FAVOR_SIZE Error FuncFrame::init(const FuncDetail& func) noexcept {
|
||||||
uint32_t ccId = func.callConv().id();
|
uint32_t ccId = func.callConv().id();
|
||||||
|
|
||||||
#ifdef ASMJIT_BUILD_X86
|
#ifdef ASMJIT_BUILD_X86
|
||||||
if (CallConv::isX86Family(ccId))
|
if (CallConv::isX86Family(ccId))
|
||||||
return x86::X86Internal::initFuncFrame(*this, func);
|
return x86::X86Internal::initFuncFrame(*this, func);
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
#ifdef ASMJIT_BUILD_ARM
|
#ifdef ASMJIT_BUILD_ARM
|
||||||
if (CallConv::isArmFamily(ccId))
|
if (CallConv::isArmFamily(ccId))
|
||||||
return arm::ArmInternal::initFuncFrame(*this, func);
|
return arm::ArmInternal::initFuncFrame(*this, func);
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
return DebugUtils::errored(kErrorInvalidArgument);
|
return DebugUtils::errored(kErrorInvalidArgument);
|
||||||
}
|
}
|
||||||
|
|
||||||
ASMJIT_FAVOR_SIZE Error FuncFrame::finalize() noexcept {
|
ASMJIT_FAVOR_SIZE Error FuncFrame::finalize() noexcept {
|
||||||
#ifdef ASMJIT_BUILD_X86
|
#ifdef ASMJIT_BUILD_X86
|
||||||
if (ArchInfo::isX86Family(archId()))
|
if (ArchInfo::isX86Family(archId()))
|
||||||
return x86::X86Internal::finalizeFuncFrame(*this);
|
return x86::X86Internal::finalizeFuncFrame(*this);
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
#ifdef ASMJIT_BUILD_ARM
|
#ifdef ASMJIT_BUILD_ARM
|
||||||
if (ArchInfo::isArmFamily(archId()))
|
if (ArchInfo::isArmFamily(archId()))
|
||||||
return arm::ArmInternal::finalizeFuncFrame(*this);
|
return arm::ArmInternal::finalizeFuncFrame(*this);
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
return DebugUtils::errored(kErrorInvalidArgument);
|
return DebugUtils::errored(kErrorInvalidArgument);
|
||||||
}
|
}
|
||||||
@@ -128,15 +128,15 @@ ASMJIT_FAVOR_SIZE Error FuncArgsAssignment::updateFuncFrame(FuncFrame& frame) co
|
|||||||
|
|
||||||
uint32_t ccId = func->callConv().id();
|
uint32_t ccId = func->callConv().id();
|
||||||
|
|
||||||
#ifdef ASMJIT_BUILD_X86
|
#ifdef ASMJIT_BUILD_X86
|
||||||
if (CallConv::isX86Family(ccId))
|
if (CallConv::isX86Family(ccId))
|
||||||
return x86::X86Internal::argsToFuncFrame(*this, frame);
|
return x86::X86Internal::argsToFuncFrame(*this, frame);
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
#ifdef ASMJIT_BUILD_ARM
|
#ifdef ASMJIT_BUILD_ARM
|
||||||
if (CallConv::isArmFamily(ccId))
|
if (CallConv::isArmFamily(ccId))
|
||||||
return arm::ArmInternal::argsToFuncFrame(*this, frame);
|
return arm::ArmInternal::argsToFuncFrame(*this, frame);
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
return DebugUtils::errored(kErrorInvalidArch);
|
return DebugUtils::errored(kErrorInvalidArch);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -103,7 +103,7 @@ ASMJIT_FAVOR_SIZE const char* DebugUtils::errorAsString(Error err) noexcept {
|
|||||||
"Unknown error\0";
|
"Unknown error\0";
|
||||||
return Support::findPackedString(errorMessages, Support::min<Error>(err, kErrorCount));
|
return Support::findPackedString(errorMessages, Support::min<Error>(err, kErrorCount));
|
||||||
#else
|
#else
|
||||||
ASMJIT_UNUSED(err);
|
DebugUtils::unused(err);
|
||||||
static const char noMessage[] = "";
|
static const char noMessage[] = "";
|
||||||
return noMessage;
|
return noMessage;
|
||||||
#endif
|
#endif
|
||||||
|
|||||||
@@ -370,6 +370,10 @@ static inline void* func_as_ptr(Func func) noexcept { return Support::ptr_cast_i
|
|||||||
//! Debugging utilities.
|
//! Debugging utilities.
|
||||||
namespace DebugUtils {
|
namespace DebugUtils {
|
||||||
|
|
||||||
|
//! Used to silence warnings about unused arguments or variables.
|
||||||
|
template<typename... Args>
|
||||||
|
static ASMJIT_INLINE void unused(Args&&...) noexcept {}
|
||||||
|
|
||||||
//! Returns the error `err` passed.
|
//! Returns the error `err` passed.
|
||||||
//!
|
//!
|
||||||
//! Provided for debugging purposes. Putting a breakpoint inside `errored` can
|
//! Provided for debugging purposes. Putting a breakpoint inside `errored` can
|
||||||
|
|||||||
@@ -296,9 +296,13 @@ public:
|
|||||||
kOptionReserved = 0x00000001u,
|
kOptionReserved = 0x00000001u,
|
||||||
|
|
||||||
//! Used only by Assembler to mark that `_op4` and `_op5` are used (internal).
|
//! Used only by Assembler to mark that `_op4` and `_op5` are used (internal).
|
||||||
|
//!
|
||||||
|
//! TODO: This should be removed in the future.
|
||||||
kOptionOp4Op5Used = 0x00000002u,
|
kOptionOp4Op5Used = 0x00000002u,
|
||||||
|
|
||||||
//! Prevents following a jump during compilation (BaseCompiler).
|
//! Prevents following a jump during compilation (BaseCompiler).
|
||||||
|
//!
|
||||||
|
//! TODO: This should be renamed to kOptionNoReturn.
|
||||||
kOptionUnfollow = 0x00000010u,
|
kOptionUnfollow = 0x00000010u,
|
||||||
|
|
||||||
//! Overwrite the destination operand(s) (BaseCompiler).
|
//! Overwrite the destination operand(s) (BaseCompiler).
|
||||||
|
|||||||
@@ -470,7 +470,7 @@ static JitAllocatorBlock* JitAllocatorImpl_newBlock(JitAllocatorPrivateImpl* imp
|
|||||||
}
|
}
|
||||||
|
|
||||||
static void JitAllocatorImpl_deleteBlock(JitAllocatorPrivateImpl* impl, JitAllocatorBlock* block) noexcept {
|
static void JitAllocatorImpl_deleteBlock(JitAllocatorPrivateImpl* impl, JitAllocatorBlock* block) noexcept {
|
||||||
ASMJIT_UNUSED(impl);
|
DebugUtils::unused(impl);
|
||||||
|
|
||||||
if (block->flags & JitAllocatorBlock::kFlagDualMapped)
|
if (block->flags & JitAllocatorBlock::kFlagDualMapped)
|
||||||
VirtMem::releaseDualMapping(&block->mapping, block->blockSize);
|
VirtMem::releaseDualMapping(&block->mapping, block->blockSize);
|
||||||
|
|||||||
@@ -39,8 +39,7 @@ static inline void JitRuntime_flushInstructionCache(const void* p, size_t size)
|
|||||||
// Windows has a built-in support in `kernel32.dll`.
|
// Windows has a built-in support in `kernel32.dll`.
|
||||||
::FlushInstructionCache(::GetCurrentProcess(), p, size);
|
::FlushInstructionCache(::GetCurrentProcess(), p, size);
|
||||||
#else
|
#else
|
||||||
ASMJIT_UNUSED(p);
|
DebugUtils::unused(p, size);
|
||||||
ASMJIT_UNUSED(size);
|
|
||||||
#endif
|
#endif
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -146,7 +146,7 @@ Error Logging::formatLabel(
|
|||||||
const BaseEmitter* emitter,
|
const BaseEmitter* emitter,
|
||||||
uint32_t labelId) noexcept {
|
uint32_t labelId) noexcept {
|
||||||
|
|
||||||
ASMJIT_UNUSED(flags);
|
DebugUtils::unused(flags);
|
||||||
|
|
||||||
const LabelEntry* le = emitter->code()->labelEntry(labelId);
|
const LabelEntry* le = emitter->code()->labelEntry(labelId);
|
||||||
if (ASMJIT_UNLIKELY(!le))
|
if (ASMJIT_UNLIKELY(!le))
|
||||||
@@ -181,15 +181,15 @@ Error Logging::formatRegister(
|
|||||||
uint32_t regType,
|
uint32_t regType,
|
||||||
uint32_t regId) noexcept {
|
uint32_t regId) noexcept {
|
||||||
|
|
||||||
#ifdef ASMJIT_BUILD_X86
|
#ifdef ASMJIT_BUILD_X86
|
||||||
if (ArchInfo::isX86Family(archId))
|
if (ArchInfo::isX86Family(archId))
|
||||||
return x86::LoggingInternal::formatRegister(sb, flags, emitter, archId, regType, regId);
|
return x86::LoggingInternal::formatRegister(sb, flags, emitter, archId, regType, regId);
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
#ifdef ASMJIT_BUILD_ARM
|
#ifdef ASMJIT_BUILD_ARM
|
||||||
if (ArchInfo::isArmFamily(archId))
|
if (ArchInfo::isArmFamily(archId))
|
||||||
return arm::LoggingInternal::formatRegister(sb, flags, emitter, archId, regType, regId);
|
return arm::LoggingInternal::formatRegister(sb, flags, emitter, archId, regType, regId);
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
return kErrorInvalidArch;
|
return kErrorInvalidArch;
|
||||||
}
|
}
|
||||||
@@ -201,15 +201,15 @@ Error Logging::formatOperand(
|
|||||||
uint32_t archId,
|
uint32_t archId,
|
||||||
const Operand_& op) noexcept {
|
const Operand_& op) noexcept {
|
||||||
|
|
||||||
#ifdef ASMJIT_BUILD_X86
|
#ifdef ASMJIT_BUILD_X86
|
||||||
if (ArchInfo::isX86Family(archId))
|
if (ArchInfo::isX86Family(archId))
|
||||||
return x86::LoggingInternal::formatOperand(sb, flags, emitter, archId, op);
|
return x86::LoggingInternal::formatOperand(sb, flags, emitter, archId, op);
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
#ifdef ASMJIT_BUILD_ARM
|
#ifdef ASMJIT_BUILD_ARM
|
||||||
if (ArchInfo::isArmFamily(archId))
|
if (ArchInfo::isArmFamily(archId))
|
||||||
return arm::LoggingInternal::formatOperand(sb, flags, emitter, archId, op);
|
return arm::LoggingInternal::formatOperand(sb, flags, emitter, archId, op);
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
return kErrorInvalidArch;
|
return kErrorInvalidArch;
|
||||||
}
|
}
|
||||||
@@ -221,15 +221,15 @@ Error Logging::formatInstruction(
|
|||||||
uint32_t archId,
|
uint32_t archId,
|
||||||
const BaseInst& inst, const Operand_* operands, uint32_t opCount) noexcept {
|
const BaseInst& inst, const Operand_* operands, uint32_t opCount) noexcept {
|
||||||
|
|
||||||
#ifdef ASMJIT_BUILD_X86
|
#ifdef ASMJIT_BUILD_X86
|
||||||
if (ArchInfo::isX86Family(archId))
|
if (ArchInfo::isX86Family(archId))
|
||||||
return x86::LoggingInternal::formatInstruction(sb, flags, emitter, archId, inst, operands, opCount);
|
return x86::LoggingInternal::formatInstruction(sb, flags, emitter, archId, inst, operands, opCount);
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
#ifdef ASMJIT_BUILD_ARM
|
#ifdef ASMJIT_BUILD_ARM
|
||||||
if (ArchInfo::isArmFamily(archId))
|
if (ArchInfo::isArmFamily(archId))
|
||||||
return arm::LoggingInternal::formatInstruction(sb, flags, emitter, archId, inst, operands, opCount);
|
return arm::LoggingInternal::formatInstruction(sb, flags, emitter, archId, inst, operands, opCount);
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
return kErrorInvalidArch;
|
return kErrorInvalidArch;
|
||||||
}
|
}
|
||||||
@@ -309,12 +309,14 @@ static Error formatFuncRets(
|
|||||||
if (i) ASMJIT_PROPAGATE(sb.appendString(", "));
|
if (i) ASMJIT_PROPAGATE(sb.appendString(", "));
|
||||||
ASMJIT_PROPAGATE(formatFuncValue(sb, flags, emitter, fd.ret(i)));
|
ASMJIT_PROPAGATE(formatFuncValue(sb, flags, emitter, fd.ret(i)));
|
||||||
|
|
||||||
#ifndef ASMJIT_NO_COMPILER
|
#ifndef ASMJIT_NO_COMPILER
|
||||||
if (vRegs) {
|
if (vRegs) {
|
||||||
static const char nullRet[] = "<none>";
|
static const char nullRet[] = "<none>";
|
||||||
ASMJIT_PROPAGATE(sb.appendFormat(" %s", vRegs[i] ? vRegs[i]->name() : nullRet));
|
ASMJIT_PROPAGATE(sb.appendFormat(" %s", vRegs[i] ? vRegs[i]->name() : nullRet));
|
||||||
}
|
}
|
||||||
#endif
|
#else
|
||||||
|
DebugUtils::unused(vRegs);
|
||||||
|
#endif
|
||||||
}
|
}
|
||||||
|
|
||||||
return kErrorOk;
|
return kErrorOk;
|
||||||
@@ -335,12 +337,14 @@ static Error formatFuncArgs(
|
|||||||
if (i) ASMJIT_PROPAGATE(sb.appendString(", "));
|
if (i) ASMJIT_PROPAGATE(sb.appendString(", "));
|
||||||
ASMJIT_PROPAGATE(formatFuncValue(sb, flags, emitter, fd.arg(i)));
|
ASMJIT_PROPAGATE(formatFuncValue(sb, flags, emitter, fd.arg(i)));
|
||||||
|
|
||||||
#ifndef ASMJIT_NO_COMPILER
|
#ifndef ASMJIT_NO_COMPILER
|
||||||
if (vRegs) {
|
if (vRegs) {
|
||||||
static const char nullArg[] = "<none>";
|
static const char nullArg[] = "<none>";
|
||||||
ASMJIT_PROPAGATE(sb.appendFormat(" %s", vRegs[i] ? vRegs[i]->name() : nullArg));
|
ASMJIT_PROPAGATE(sb.appendFormat(" %s", vRegs[i] ? vRegs[i]->name() : nullArg));
|
||||||
}
|
}
|
||||||
#endif
|
#else
|
||||||
|
DebugUtils::unused(vRegs);
|
||||||
|
#endif
|
||||||
}
|
}
|
||||||
|
|
||||||
return kErrorOk;
|
return kErrorOk;
|
||||||
@@ -356,7 +360,8 @@ Error Logging::formatNode(
|
|||||||
ASMJIT_PROPAGATE(sb.appendFormat("<%05u> ", node_->position()));
|
ASMJIT_PROPAGATE(sb.appendFormat("<%05u> ", node_->position()));
|
||||||
|
|
||||||
switch (node_->type()) {
|
switch (node_->type()) {
|
||||||
case BaseNode::kNodeInst: {
|
case BaseNode::kNodeInst:
|
||||||
|
case BaseNode::kNodeJump: {
|
||||||
const InstNode* node = node_->as<InstNode>();
|
const InstNode* node = node_->as<InstNode>();
|
||||||
ASMJIT_PROPAGATE(
|
ASMJIT_PROPAGATE(
|
||||||
Logging::formatInstruction(sb, flags, cb,
|
Logging::formatInstruction(sb, flags, cb,
|
||||||
@@ -437,7 +442,7 @@ Error Logging::formatNode(
|
|||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
#ifndef ASMJIT_NO_COMPILER
|
#ifndef ASMJIT_NO_COMPILER
|
||||||
case BaseNode::kNodeFunc: {
|
case BaseNode::kNodeFunc: {
|
||||||
const FuncNode* node = node_->as<FuncNode>();
|
const FuncNode* node = node_->as<FuncNode>();
|
||||||
|
|
||||||
@@ -473,7 +478,7 @@ Error Logging::formatNode(
|
|||||||
node->baseInst(), node->operands(), node->opCount()));
|
node->baseInst(), node->operands(), node->opCount()));
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
default: {
|
default: {
|
||||||
ASMJIT_PROPAGATE(sb.appendFormat("[User:%u]", node_->type()));
|
ASMJIT_PROPAGATE(sb.appendFormat("[User:%u]", node_->type()));
|
||||||
|
|||||||
@@ -58,25 +58,25 @@ public:
|
|||||||
uint8_t _indentation[4];
|
uint8_t _indentation[4];
|
||||||
|
|
||||||
enum Flags : uint32_t {
|
enum Flags : uint32_t {
|
||||||
//!< Show also binary form of each logged instruction (assembler).
|
//! Show also binary form of each logged instruction (assembler).
|
||||||
kFlagMachineCode = 0x00000001u,
|
kFlagMachineCode = 0x00000001u,
|
||||||
//!< Show a text explanation of some immediate values.
|
//! Show a text explanation of some immediate values.
|
||||||
kFlagExplainImms = 0x00000002u,
|
kFlagExplainImms = 0x00000002u,
|
||||||
//!< Use hexadecimal notation of immediate values.
|
//! Use hexadecimal notation of immediate values.
|
||||||
kFlagHexImms = 0x00000004u,
|
kFlagHexImms = 0x00000004u,
|
||||||
//!< Use hexadecimal notation of address offsets.
|
//! Use hexadecimal notation of address offsets.
|
||||||
kFlagHexOffsets = 0x00000008u,
|
kFlagHexOffsets = 0x00000008u,
|
||||||
//!< Show casts between virtual register types (compiler).
|
//! Show casts between virtual register types (compiler).
|
||||||
kFlagRegCasts = 0x00000010u,
|
kFlagRegCasts = 0x00000010u,
|
||||||
//!< Show positions associated with nodes (compiler).
|
//! Show positions associated with nodes (compiler).
|
||||||
kFlagPositions = 0x00000020u,
|
kFlagPositions = 0x00000020u,
|
||||||
//!< Annotate nodes that are lowered by passes.
|
//! Annotate nodes that are lowered by passes.
|
||||||
kFlagAnnotations = 0x00000040u,
|
kFlagAnnotations = 0x00000040u,
|
||||||
|
|
||||||
// TODO: These must go, keep this only for formatting.
|
// TODO: These must go, keep this only for formatting.
|
||||||
//!< Show an additional output from passes.
|
//! Show an additional output from passes.
|
||||||
kFlagDebugPasses = 0x00000080u,
|
kFlagDebugPasses = 0x00000080u,
|
||||||
//!< Show an additional output from RA.
|
//! Show an additional output from RA.
|
||||||
kFlagDebugRA = 0x00000100u
|
kFlagDebugRA = 0x00000100u
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -324,16 +324,16 @@ struct Logging {
|
|||||||
String& sb,
|
String& sb,
|
||||||
uint32_t typeId) noexcept;
|
uint32_t typeId) noexcept;
|
||||||
|
|
||||||
#ifndef ASMJIT_NO_BUILDER
|
#ifndef ASMJIT_NO_BUILDER
|
||||||
ASMJIT_API static Error formatNode(
|
ASMJIT_API static Error formatNode(
|
||||||
String& sb,
|
String& sb,
|
||||||
uint32_t flags,
|
uint32_t flags,
|
||||||
const BaseBuilder* cb,
|
const BaseBuilder* cb,
|
||||||
const BaseNode* node_) noexcept;
|
const BaseNode* node_) noexcept;
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
// Only used by AsmJit internals, not available to users.
|
// Only used by AsmJit internals, not available to users.
|
||||||
#ifdef ASMJIT_EXPORTS
|
#ifdef ASMJIT_EXPORTS
|
||||||
enum {
|
enum {
|
||||||
// Has to be big to be able to hold all metadata compiler can assign to a
|
// Has to be big to be able to hold all metadata compiler can assign to a
|
||||||
// single instruction.
|
// single instruction.
|
||||||
@@ -344,7 +344,7 @@ struct Logging {
|
|||||||
static Error formatLine(
|
static Error formatLine(
|
||||||
String& sb,
|
String& sb,
|
||||||
const uint8_t* binData, size_t binSize, size_t dispSize, size_t immSize, const char* comment) noexcept;
|
const uint8_t* binData, size_t binSize, size_t dispSize, size_t immSize, const char* comment) noexcept;
|
||||||
#endif
|
#endif
|
||||||
};
|
};
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
|
|||||||
@@ -172,7 +172,7 @@ public:
|
|||||||
inline uint32_t dirty(uint32_t group) const noexcept { return _physToWorkMap->dirty[group]; }
|
inline uint32_t dirty(uint32_t group) const noexcept { return _physToWorkMap->dirty[group]; }
|
||||||
|
|
||||||
inline uint32_t workToPhysId(uint32_t group, uint32_t workId) const noexcept {
|
inline uint32_t workToPhysId(uint32_t group, uint32_t workId) const noexcept {
|
||||||
ASMJIT_UNUSED(group);
|
DebugUtils::unused(group);
|
||||||
ASMJIT_ASSERT(workId != kWorkNone);
|
ASMJIT_ASSERT(workId != kWorkNone);
|
||||||
ASMJIT_ASSERT(workId < _layout.workCount);
|
ASMJIT_ASSERT(workId < _layout.workCount);
|
||||||
return _workToPhysMap->physIds[workId];
|
return _workToPhysMap->physIds[workId];
|
||||||
@@ -289,15 +289,13 @@ public:
|
|||||||
}
|
}
|
||||||
|
|
||||||
inline void makeClean(uint32_t group, uint32_t workId, uint32_t physId) noexcept {
|
inline void makeClean(uint32_t group, uint32_t workId, uint32_t physId) noexcept {
|
||||||
ASMJIT_UNUSED(workId);
|
DebugUtils::unused(workId);
|
||||||
|
|
||||||
uint32_t regMask = Support::bitMask(physId);
|
uint32_t regMask = Support::bitMask(physId);
|
||||||
_physToWorkMap->dirty[group] &= ~regMask;
|
_physToWorkMap->dirty[group] &= ~regMask;
|
||||||
}
|
}
|
||||||
|
|
||||||
inline void makeDirty(uint32_t group, uint32_t workId, uint32_t physId) noexcept {
|
inline void makeDirty(uint32_t group, uint32_t workId, uint32_t physId) noexcept {
|
||||||
ASMJIT_UNUSED(workId);
|
DebugUtils::unused(workId);
|
||||||
|
|
||||||
uint32_t regMask = Support::bitMask(physId);
|
uint32_t regMask = Support::bitMask(physId);
|
||||||
_physToWorkMap->dirty[group] |= regMask;
|
_physToWorkMap->dirty[group] |= regMask;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -44,8 +44,26 @@ class RACFGBuilder {
|
|||||||
public:
|
public:
|
||||||
RAPass* _pass;
|
RAPass* _pass;
|
||||||
BaseCompiler* _cc;
|
BaseCompiler* _cc;
|
||||||
|
|
||||||
RABlock* _curBlock;
|
RABlock* _curBlock;
|
||||||
RABlock* _retBlock;
|
RABlock* _retBlock;
|
||||||
|
FuncNode* _funcNode;
|
||||||
|
RARegsStats _blockRegStats;
|
||||||
|
uint32_t _exitLabelId;
|
||||||
|
ZoneVector<uint32_t> _sharedAssignmentsMap;
|
||||||
|
|
||||||
|
// Only used by logging, it's fine to be here to prevent more #ifdefs...
|
||||||
|
bool _hasCode;
|
||||||
|
RABlock* _lastLoggedBlock;
|
||||||
|
|
||||||
|
#ifndef ASMJIT_NO_LOGGING
|
||||||
|
Logger* _logger;
|
||||||
|
uint32_t _logFlags;
|
||||||
|
StringTmp<512> _sb;
|
||||||
|
#endif
|
||||||
|
|
||||||
|
static constexpr uint32_t kRootIndentation = 2;
|
||||||
|
static constexpr uint32_t kCodeIndentation = 4;
|
||||||
|
|
||||||
// NOTE: This is a bit hacky. There are some nodes which are processed twice
|
// NOTE: This is a bit hacky. There are some nodes which are processed twice
|
||||||
// (see `onBeforeCall()` and `onBeforeRet()`) as they can insert some nodes
|
// (see `onBeforeCall()` and `onBeforeRet()`) as they can insert some nodes
|
||||||
@@ -57,241 +75,264 @@ public:
|
|||||||
: _pass(pass),
|
: _pass(pass),
|
||||||
_cc(pass->cc()),
|
_cc(pass->cc()),
|
||||||
_curBlock(nullptr),
|
_curBlock(nullptr),
|
||||||
_retBlock(nullptr) {}
|
_retBlock(nullptr),
|
||||||
|
_funcNode(nullptr),
|
||||||
|
_blockRegStats{},
|
||||||
|
_exitLabelId(Globals::kInvalidId),
|
||||||
|
_hasCode(false),
|
||||||
|
_lastLoggedBlock(nullptr) {
|
||||||
|
#ifndef ASMJIT_NO_LOGGING
|
||||||
|
_logger = _pass->debugLogger();
|
||||||
|
_logFlags = FormatOptions::kFlagPositions;
|
||||||
|
|
||||||
|
if (_logger)
|
||||||
|
_logFlags |= _logger->flags();
|
||||||
|
#endif
|
||||||
|
}
|
||||||
|
|
||||||
inline BaseCompiler* cc() const noexcept { return _cc; }
|
inline BaseCompiler* cc() const noexcept { return _cc; }
|
||||||
|
|
||||||
|
// --------------------------------------------------------------------------
|
||||||
|
// [Run]
|
||||||
|
// --------------------------------------------------------------------------
|
||||||
|
|
||||||
|
//! Called per function by an architecture-specific CFG builder.
|
||||||
Error run() noexcept {
|
Error run() noexcept {
|
||||||
#ifndef ASMJIT_NO_LOGGING
|
log("[RAPass::BuildCFG]\n");
|
||||||
Logger* logger = _pass->debugLogger();
|
ASMJIT_PROPAGATE(prepare());
|
||||||
uint32_t flags = FormatOptions::kFlagPositions;
|
|
||||||
RABlock* lastPrintedBlock = nullptr;
|
|
||||||
StringTmp<512> sb;
|
|
||||||
#endif
|
|
||||||
|
|
||||||
ASMJIT_RA_LOG_FORMAT("[RAPass::BuildCFG]\n");
|
logNode(_funcNode, kRootIndentation);
|
||||||
|
logBlock(_curBlock, kRootIndentation);
|
||||||
|
|
||||||
FuncNode* func = _pass->func();
|
BaseNode* node = _funcNode->next();
|
||||||
BaseNode* node = nullptr;
|
|
||||||
|
|
||||||
// Create entry and exit blocks.
|
|
||||||
_retBlock = _pass->newBlockOrExistingAt(func->exitNode(), &node);
|
|
||||||
if (ASMJIT_UNLIKELY(!_retBlock))
|
|
||||||
return DebugUtils::errored(kErrorOutOfMemory);
|
|
||||||
ASMJIT_PROPAGATE(_pass->addExitBlock(_retBlock));
|
|
||||||
|
|
||||||
if (node != func) {
|
|
||||||
_curBlock = _pass->newBlock();
|
|
||||||
if (ASMJIT_UNLIKELY(!_curBlock))
|
|
||||||
return DebugUtils::errored(kErrorOutOfMemory);
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
// Function that has no code at all.
|
|
||||||
_curBlock = _retBlock;
|
|
||||||
}
|
|
||||||
|
|
||||||
ASMJIT_PROPAGATE(_pass->addBlock(_curBlock));
|
|
||||||
|
|
||||||
RARegsStats blockRegStats;
|
|
||||||
blockRegStats.reset();
|
|
||||||
RAInstBuilder ib;
|
|
||||||
|
|
||||||
bool hasCode = false;
|
|
||||||
uint32_t exitLabelId = func->exitNode()->id();
|
|
||||||
|
|
||||||
ASMJIT_RA_LOG_COMPLEX({
|
|
||||||
flags |= logger->flags();
|
|
||||||
|
|
||||||
Logging::formatNode(sb, flags, cc(), func);
|
|
||||||
logger->logf(" %s\n", sb.data());
|
|
||||||
|
|
||||||
lastPrintedBlock = _curBlock;
|
|
||||||
logger->logf(" {#%u}\n", lastPrintedBlock->blockId());
|
|
||||||
});
|
|
||||||
|
|
||||||
node = func->next();
|
|
||||||
if (ASMJIT_UNLIKELY(!node))
|
if (ASMJIT_UNLIKELY(!node))
|
||||||
return DebugUtils::errored(kErrorInvalidState);
|
return DebugUtils::errored(kErrorInvalidState);
|
||||||
|
|
||||||
_curBlock->setFirst(node);
|
_curBlock->setFirst(node);
|
||||||
_curBlock->setLast(node);
|
_curBlock->setLast(node);
|
||||||
|
|
||||||
|
RAInstBuilder ib;
|
||||||
|
ZoneVector<RABlock*> blocksWithUnknownJumps;
|
||||||
|
|
||||||
for (;;) {
|
for (;;) {
|
||||||
BaseNode* next = node->next();
|
BaseNode* next = node->next();
|
||||||
ASMJIT_ASSERT(node->position() == 0 || node->position() == kNodePositionDidOnBefore);
|
ASMJIT_ASSERT(node->position() == 0 || node->position() == kNodePositionDidOnBefore);
|
||||||
|
|
||||||
if (node->isInst()) {
|
if (node->isInst()) {
|
||||||
|
// Instruction | Jump | Invoke | Return
|
||||||
|
// ------------------------------------
|
||||||
|
|
||||||
|
// Handle `InstNode`, `FuncCallNode`, and `FuncRetNode`. All of them
|
||||||
|
// share the same interface that provides operands that have read/write
|
||||||
|
// semantics.
|
||||||
if (ASMJIT_UNLIKELY(!_curBlock)) {
|
if (ASMJIT_UNLIKELY(!_curBlock)) {
|
||||||
// If this code is unreachable then it has to be removed.
|
// Unreachable code has to be removed, we cannot allocate registers
|
||||||
ASMJIT_RA_LOG_COMPLEX({
|
// in such code as we cannot do proper liveness analysis in such case.
|
||||||
sb.clear();
|
removeNode(node);
|
||||||
Logging::formatNode(sb, flags, cc(), node);
|
|
||||||
logger->logf(" <Removed> %s\n", sb.data());
|
|
||||||
});
|
|
||||||
cc()->removeNode(node);
|
|
||||||
node = next;
|
node = next;
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
else {
|
|
||||||
// Handle `InstNode`, `FuncCallNode`, and `FuncRetNode`. All of
|
|
||||||
// these share the `InstNode` interface and contain operands.
|
|
||||||
hasCode = true;
|
|
||||||
|
|
||||||
if (node->type() != BaseNode::kNodeInst) {
|
_hasCode = true;
|
||||||
if (node->position() != kNodePositionDidOnBefore) {
|
|
||||||
// Call and Reg are complicated as they may insert some surrounding
|
|
||||||
// code around them. The simplest approach is to get the previous
|
|
||||||
// node, call the `onBefore()` handlers and then check whether
|
|
||||||
// anything changed and restart if so. By restart we mean that the
|
|
||||||
// current `node` would go back to the first possible inserted node
|
|
||||||
// by `onBeforeCall()` or `onBeforeRet()`.
|
|
||||||
BaseNode* prev = node->prev();
|
|
||||||
if (node->type() == BaseNode::kNodeFuncCall) {
|
|
||||||
ASMJIT_PROPAGATE(static_cast<This*>(this)->onBeforeCall(node->as<FuncCallNode>()));
|
|
||||||
}
|
|
||||||
else if (node->type() == BaseNode::kNodeFuncRet) {
|
|
||||||
ASMJIT_PROPAGATE(static_cast<This*>(this)->onBeforeRet(node->as<FuncRetNode>()));
|
|
||||||
}
|
|
||||||
|
|
||||||
if (prev != node->prev()) {
|
if (node->isFuncCall() || node->isFuncRet()) {
|
||||||
// If this was the first node in the block and something was
|
if (node->position() != kNodePositionDidOnBefore) {
|
||||||
// inserted before it then we have to update the first block.
|
// Call and Reg are complicated as they may insert some surrounding
|
||||||
if (_curBlock->first() == node)
|
// code around them. The simplest approach is to get the previous
|
||||||
_curBlock->setFirst(prev->next());
|
// node, call the `onBefore()` handlers and then check whether
|
||||||
|
// anything changed and restart if so. By restart we mean that the
|
||||||
|
// current `node` would go back to the first possible inserted node
|
||||||
|
// by `onBeforeCall()` or `onBeforeRet()`.
|
||||||
|
BaseNode* prev = node->prev();
|
||||||
|
|
||||||
node->setPosition(kNodePositionDidOnBefore);
|
if (node->type() == BaseNode::kNodeFuncCall)
|
||||||
node = prev->next();
|
ASMJIT_PROPAGATE(static_cast<This*>(this)->onBeforeCall(node->as<FuncCallNode>()));
|
||||||
|
else
|
||||||
|
ASMJIT_PROPAGATE(static_cast<This*>(this)->onBeforeRet(node->as<FuncRetNode>()));
|
||||||
|
|
||||||
// `onBeforeCall()` and `onBeforeRet()` can only insert instructions.
|
if (prev != node->prev()) {
|
||||||
ASMJIT_ASSERT(node->isInst());
|
// If this was the first node in the block and something was
|
||||||
}
|
// inserted before it then we have to update the first block.
|
||||||
|
if (_curBlock->first() == node)
|
||||||
|
_curBlock->setFirst(prev->next());
|
||||||
|
|
||||||
// Necessary if something was inserted after `node`, but nothing before.
|
node->setPosition(kNodePositionDidOnBefore);
|
||||||
next = node->next();
|
node = prev->next();
|
||||||
|
|
||||||
|
// `onBeforeCall()` and `onBeforeRet()` can only insert instructions.
|
||||||
|
ASMJIT_ASSERT(node->isInst());
|
||||||
}
|
}
|
||||||
else {
|
|
||||||
// Change the position back to its original value.
|
// Necessary if something was inserted after `node`, but nothing before.
|
||||||
node->setPosition(0);
|
next = node->next();
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
// Change the position back to its original value.
|
||||||
|
node->setPosition(0);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
InstNode* inst = node->as<InstNode>();
|
||||||
|
logNode(inst, kCodeIndentation);
|
||||||
|
|
||||||
|
uint32_t controlType = BaseInst::kControlNone;
|
||||||
|
ib.reset();
|
||||||
|
ASMJIT_PROPAGATE(static_cast<This*>(this)->onInst(inst, controlType, ib));
|
||||||
|
|
||||||
|
if (node->isFuncCall()) {
|
||||||
|
ASMJIT_PROPAGATE(static_cast<This*>(this)->onCall(inst->as<FuncCallNode>(), ib));
|
||||||
|
}
|
||||||
|
|
||||||
|
if (node->isFuncRet()) {
|
||||||
|
ASMJIT_PROPAGATE(static_cast<This*>(this)->onRet(inst->as<FuncRetNode>(), ib));
|
||||||
|
controlType = BaseInst::kControlReturn;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (controlType == BaseInst::kControlJump) {
|
||||||
|
uint32_t fixedRegCount = 0;
|
||||||
|
for (RATiedReg& tiedReg : ib) {
|
||||||
|
RAWorkReg* workReg = _pass->workRegById(tiedReg.workId());
|
||||||
|
if (workReg->group() == BaseReg::kGroupGp) {
|
||||||
|
uint32_t useId = tiedReg.useId();
|
||||||
|
if (useId == BaseReg::kIdBad) {
|
||||||
|
useId = _pass->_scratchRegIndexes[fixedRegCount++];
|
||||||
|
tiedReg.setUseId(useId);
|
||||||
|
}
|
||||||
|
_curBlock->addExitScratchGpRegs(Support::bitMask<uint32_t>(useId));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
InstNode* inst = node->as<InstNode>();
|
ASMJIT_PROPAGATE(_pass->assignRAInst(inst, _curBlock, ib));
|
||||||
ASMJIT_RA_LOG_COMPLEX({
|
_blockRegStats.combineWith(ib._stats);
|
||||||
sb.clear();
|
|
||||||
Logging::formatNode(sb, flags, cc(), node);
|
|
||||||
logger->logf(" %s\n", sb.data());
|
|
||||||
});
|
|
||||||
|
|
||||||
uint32_t controlType = BaseInst::kControlNone;
|
if (controlType != BaseInst::kControlNone) {
|
||||||
ib.reset();
|
// Support for conditional and unconditional jumps.
|
||||||
ASMJIT_PROPAGATE(static_cast<This*>(this)->onInst(inst, controlType, ib));
|
if (controlType == BaseInst::kControlJump || controlType == BaseInst::kControlBranch) {
|
||||||
|
_curBlock->setLast(node);
|
||||||
|
_curBlock->addFlags(RABlock::kFlagHasTerminator);
|
||||||
|
_curBlock->makeConstructed(_blockRegStats);
|
||||||
|
|
||||||
if (node->type() != BaseNode::kNodeInst) {
|
if (!(inst->instOptions() & BaseInst::kOptionUnfollow)) {
|
||||||
if (node->type() == BaseNode::kNodeFuncCall) {
|
// Jmp/Jcc/Call/Loop/etc...
|
||||||
ASMJIT_PROPAGATE(static_cast<This*>(this)->onCall(inst->as<FuncCallNode>(), ib));
|
uint32_t opCount = inst->opCount();
|
||||||
}
|
const Operand* opArray = inst->operands();
|
||||||
else if (node->type() == BaseNode::kNodeFuncRet) {
|
|
||||||
ASMJIT_PROPAGATE(static_cast<This*>(this)->onRet(inst->as<FuncRetNode>(), ib));
|
|
||||||
controlType = BaseInst::kControlReturn;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
ASMJIT_PROPAGATE(_pass->assignRAInst(inst, _curBlock, ib));
|
// Cannot jump anywhere without operands.
|
||||||
blockRegStats.combineWith(ib._stats);
|
if (ASMJIT_UNLIKELY(!opCount))
|
||||||
|
return DebugUtils::errored(kErrorInvalidState);
|
||||||
|
|
||||||
if (controlType != BaseInst::kControlNone) {
|
if (opArray[opCount - 1].isLabel()) {
|
||||||
// Support for conditional and unconditional jumps.
|
// Labels are easy for constructing the control flow.
|
||||||
if (controlType == BaseInst::kControlJump || controlType == BaseInst::kControlBranch) {
|
LabelNode* labelNode;
|
||||||
_curBlock->setLast(node);
|
ASMJIT_PROPAGATE(cc()->labelNodeOf(&labelNode, opArray[opCount - 1].as<Label>()));
|
||||||
_curBlock->addFlags(RABlock::kFlagHasTerminator);
|
|
||||||
_curBlock->makeConstructed(blockRegStats);
|
|
||||||
|
|
||||||
if (!(inst->instOptions() & BaseInst::kOptionUnfollow)) {
|
RABlock* targetBlock = _pass->newBlockOrExistingAt(labelNode);
|
||||||
// Jmp/Jcc/Call/Loop/etc...
|
|
||||||
uint32_t opCount = inst->opCount();
|
|
||||||
const Operand* opArray = inst->operands();
|
|
||||||
|
|
||||||
// The last operand must be label (this supports also instructions
|
|
||||||
// like jecx in explicit form).
|
|
||||||
if (ASMJIT_UNLIKELY(opCount == 0 || !opArray[opCount - 1].isLabel()))
|
|
||||||
return DebugUtils::errored(kErrorInvalidState);
|
|
||||||
|
|
||||||
LabelNode* cbLabel;
|
|
||||||
ASMJIT_PROPAGATE(cc()->labelNodeOf(&cbLabel, opArray[opCount - 1].as<Label>()));
|
|
||||||
|
|
||||||
RABlock* targetBlock = _pass->newBlockOrExistingAt(cbLabel);
|
|
||||||
if (ASMJIT_UNLIKELY(!targetBlock))
|
if (ASMJIT_UNLIKELY(!targetBlock))
|
||||||
return DebugUtils::errored(kErrorOutOfMemory);
|
return DebugUtils::errored(kErrorOutOfMemory);
|
||||||
|
|
||||||
ASMJIT_PROPAGATE(_curBlock->appendSuccessor(targetBlock));
|
ASMJIT_PROPAGATE(_curBlock->appendSuccessor(targetBlock));
|
||||||
}
|
}
|
||||||
|
|
||||||
if (controlType == BaseInst::kControlJump) {
|
|
||||||
// Unconditional jump makes the code after the jump unreachable,
|
|
||||||
// which will be removed instantly during the CFG construction;
|
|
||||||
// as we cannot allocate registers for instructions that are not
|
|
||||||
// part of any block. Of course we can leave these instructions
|
|
||||||
// as they are, however, that would only postpone the problem as
|
|
||||||
// assemblers can't encode instructions that use virtual registers.
|
|
||||||
_curBlock = nullptr;
|
|
||||||
}
|
|
||||||
else {
|
else {
|
||||||
node = next;
|
// Not a label - could be jump with reg/mem operand, which
|
||||||
if (ASMJIT_UNLIKELY(!node))
|
// means that it can go anywhere. Such jumps must either be
|
||||||
return DebugUtils::errored(kErrorInvalidState);
|
// annotated so the CFG can be properly constructed, otherwise
|
||||||
|
// we assume the worst case - can jump to every basic block.
|
||||||
|
JumpAnnotation* jumpAnnotation = nullptr;
|
||||||
|
if (inst->type() == BaseNode::kNodeJump)
|
||||||
|
jumpAnnotation = inst->as<JumpNode>()->annotation();
|
||||||
|
|
||||||
RABlock* consecutiveBlock;
|
if (jumpAnnotation) {
|
||||||
if (node->type() == BaseNode::kNodeLabel) {
|
uint64_t timestamp = _pass->nextTimestamp();
|
||||||
if (node->hasPassData()) {
|
for (uint32_t id : jumpAnnotation->labelIds()) {
|
||||||
consecutiveBlock = node->passData<RABlock>();
|
LabelNode* labelNode;
|
||||||
}
|
ASMJIT_PROPAGATE(cc()->labelNodeOf(&labelNode, id));
|
||||||
else {
|
|
||||||
consecutiveBlock = _pass->newBlock(node);
|
RABlock* targetBlock = _pass->newBlockOrExistingAt(labelNode);
|
||||||
if (ASMJIT_UNLIKELY(!consecutiveBlock))
|
if (ASMJIT_UNLIKELY(!targetBlock))
|
||||||
return DebugUtils::errored(kErrorOutOfMemory);
|
return DebugUtils::errored(kErrorOutOfMemory);
|
||||||
node->setPassData<RABlock>(consecutiveBlock);
|
|
||||||
|
// Prevents adding basic-block successors multiple times.
|
||||||
|
if (!targetBlock->hasTimestamp(timestamp)) {
|
||||||
|
targetBlock->setTimestamp(timestamp);
|
||||||
|
ASMJIT_PROPAGATE(_curBlock->appendSuccessor(targetBlock));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
ASMJIT_PROPAGATE(shareAssignmentAcrossSuccessors(_curBlock));
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
ASMJIT_PROPAGATE(blocksWithUnknownJumps.append(_pass->allocator(), _curBlock));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (controlType == BaseInst::kControlJump) {
|
||||||
|
// Unconditional jump makes the code after the jump unreachable,
|
||||||
|
// which will be removed instantly during the CFG construction;
|
||||||
|
// as we cannot allocate registers for instructions that are not
|
||||||
|
// part of any block. Of course we can leave these instructions
|
||||||
|
// as they are, however, that would only postpone the problem as
|
||||||
|
// assemblers can't encode instructions that use virtual registers.
|
||||||
|
_curBlock = nullptr;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
node = next;
|
||||||
|
if (ASMJIT_UNLIKELY(!node))
|
||||||
|
return DebugUtils::errored(kErrorInvalidState);
|
||||||
|
|
||||||
|
RABlock* consecutiveBlock;
|
||||||
|
if (node->type() == BaseNode::kNodeLabel) {
|
||||||
|
if (node->hasPassData()) {
|
||||||
|
consecutiveBlock = node->passData<RABlock>();
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
consecutiveBlock = _pass->newBlock(node);
|
consecutiveBlock = _pass->newBlock(node);
|
||||||
if (ASMJIT_UNLIKELY(!consecutiveBlock))
|
if (ASMJIT_UNLIKELY(!consecutiveBlock))
|
||||||
return DebugUtils::errored(kErrorOutOfMemory);
|
return DebugUtils::errored(kErrorOutOfMemory);
|
||||||
|
node->setPassData<RABlock>(consecutiveBlock);
|
||||||
}
|
}
|
||||||
|
|
||||||
_curBlock->addFlags(RABlock::kFlagHasConsecutive);
|
|
||||||
ASMJIT_PROPAGATE(_curBlock->prependSuccessor(consecutiveBlock));
|
|
||||||
|
|
||||||
_curBlock = consecutiveBlock;
|
|
||||||
hasCode = false;
|
|
||||||
blockRegStats.reset();
|
|
||||||
|
|
||||||
if (_curBlock->isConstructed())
|
|
||||||
break;
|
|
||||||
ASMJIT_PROPAGATE(_pass->addBlock(consecutiveBlock));
|
|
||||||
|
|
||||||
ASMJIT_RA_LOG_COMPLEX({
|
|
||||||
lastPrintedBlock = _curBlock;
|
|
||||||
logger->logf(" {#%u}\n", lastPrintedBlock->blockId());
|
|
||||||
});
|
|
||||||
|
|
||||||
continue;
|
|
||||||
}
|
}
|
||||||
}
|
else {
|
||||||
|
consecutiveBlock = _pass->newBlock(node);
|
||||||
|
if (ASMJIT_UNLIKELY(!consecutiveBlock))
|
||||||
|
return DebugUtils::errored(kErrorOutOfMemory);
|
||||||
|
}
|
||||||
|
|
||||||
if (controlType == BaseInst::kControlReturn) {
|
_curBlock->addFlags(RABlock::kFlagHasConsecutive);
|
||||||
_curBlock->setLast(node);
|
ASMJIT_PROPAGATE(_curBlock->prependSuccessor(consecutiveBlock));
|
||||||
_curBlock->makeConstructed(blockRegStats);
|
|
||||||
ASMJIT_PROPAGATE(_curBlock->appendSuccessor(_retBlock));
|
|
||||||
|
|
||||||
_curBlock = nullptr;
|
_curBlock = consecutiveBlock;
|
||||||
|
_hasCode = false;
|
||||||
|
_blockRegStats.reset();
|
||||||
|
|
||||||
|
if (_curBlock->isConstructed())
|
||||||
|
break;
|
||||||
|
ASMJIT_PROPAGATE(_pass->addBlock(consecutiveBlock));
|
||||||
|
|
||||||
|
logBlock(_curBlock, kRootIndentation);
|
||||||
|
continue;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (controlType == BaseInst::kControlReturn) {
|
||||||
|
_curBlock->setLast(node);
|
||||||
|
_curBlock->makeConstructed(_blockRegStats);
|
||||||
|
ASMJIT_PROPAGATE(_curBlock->appendSuccessor(_retBlock));
|
||||||
|
|
||||||
|
_curBlock = nullptr;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
else if (node->type() == BaseNode::kNodeLabel) {
|
else if (node->type() == BaseNode::kNodeLabel) {
|
||||||
|
// Label - Basic-Block Management
|
||||||
|
// ------------------------------
|
||||||
|
|
||||||
if (!_curBlock) {
|
if (!_curBlock) {
|
||||||
// If the current code is unreachable the label makes it reachable again.
|
// If the current code is unreachable the label makes it reachable
|
||||||
|
// again. We may remove the whole block in the future if it's not
|
||||||
|
// referenced.
|
||||||
_curBlock = node->passData<RABlock>();
|
_curBlock = node->passData<RABlock>();
|
||||||
|
|
||||||
if (_curBlock) {
|
if (_curBlock) {
|
||||||
// If the label has a block assigned we can either continue with
|
// If the label has a block assigned we can either continue with
|
||||||
// it or skip it if the block has been constructed already.
|
// it or skip it if the block has been constructed already.
|
||||||
@@ -306,8 +347,8 @@ public:
|
|||||||
node->setPassData<RABlock>(_curBlock);
|
node->setPassData<RABlock>(_curBlock);
|
||||||
}
|
}
|
||||||
|
|
||||||
hasCode = false;
|
_hasCode = false;
|
||||||
blockRegStats.reset();
|
_blockRegStats.reset();
|
||||||
ASMJIT_PROPAGATE(_pass->addBlock(_curBlock));
|
ASMJIT_PROPAGATE(_pass->addBlock(_curBlock));
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
@@ -317,7 +358,7 @@ public:
|
|||||||
// The label currently processed is part of the current block. This
|
// The label currently processed is part of the current block. This
|
||||||
// is only possible for multiple labels that are right next to each
|
// is only possible for multiple labels that are right next to each
|
||||||
// other, or are separated by non-code nodes like directives and comments.
|
// other, or are separated by non-code nodes like directives and comments.
|
||||||
if (ASMJIT_UNLIKELY(hasCode))
|
if (ASMJIT_UNLIKELY(_hasCode))
|
||||||
return DebugUtils::errored(kErrorInvalidState);
|
return DebugUtils::errored(kErrorInvalidState);
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
@@ -327,25 +368,25 @@ public:
|
|||||||
ASMJIT_ASSERT(_curBlock->last() != node);
|
ASMJIT_ASSERT(_curBlock->last() != node);
|
||||||
_curBlock->setLast(node->prev());
|
_curBlock->setLast(node->prev());
|
||||||
_curBlock->addFlags(RABlock::kFlagHasConsecutive);
|
_curBlock->addFlags(RABlock::kFlagHasConsecutive);
|
||||||
_curBlock->makeConstructed(blockRegStats);
|
_curBlock->makeConstructed(_blockRegStats);
|
||||||
|
|
||||||
ASMJIT_PROPAGATE(_curBlock->appendSuccessor(consecutive));
|
ASMJIT_PROPAGATE(_curBlock->appendSuccessor(consecutive));
|
||||||
ASMJIT_PROPAGATE(_pass->addBlock(consecutive));
|
ASMJIT_PROPAGATE(_pass->addBlock(consecutive));
|
||||||
|
|
||||||
_curBlock = consecutive;
|
_curBlock = consecutive;
|
||||||
hasCode = false;
|
_hasCode = false;
|
||||||
blockRegStats.reset();
|
_blockRegStats.reset();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
// First time we see this label.
|
// First time we see this label.
|
||||||
if (hasCode) {
|
if (_hasCode) {
|
||||||
// Cannot continue the current block if it already contains some
|
// Cannot continue the current block if it already contains some
|
||||||
// code. We need to create a new block and make it a successor.
|
// code. We need to create a new block and make it a successor.
|
||||||
ASMJIT_ASSERT(_curBlock->last() != node);
|
ASMJIT_ASSERT(_curBlock->last() != node);
|
||||||
_curBlock->setLast(node->prev());
|
_curBlock->setLast(node->prev());
|
||||||
_curBlock->addFlags(RABlock::kFlagHasConsecutive);
|
_curBlock->addFlags(RABlock::kFlagHasConsecutive);
|
||||||
_curBlock->makeConstructed(blockRegStats);
|
_curBlock->makeConstructed(_blockRegStats);
|
||||||
|
|
||||||
RABlock* consecutive = _pass->newBlock(node);
|
RABlock* consecutive = _pass->newBlock(node);
|
||||||
if (ASMJIT_UNLIKELY(!consecutive))
|
if (ASMJIT_UNLIKELY(!consecutive))
|
||||||
@@ -355,43 +396,35 @@ public:
|
|||||||
ASMJIT_PROPAGATE(_pass->addBlock(consecutive));
|
ASMJIT_PROPAGATE(_pass->addBlock(consecutive));
|
||||||
|
|
||||||
_curBlock = consecutive;
|
_curBlock = consecutive;
|
||||||
hasCode = false;
|
_hasCode = false;
|
||||||
blockRegStats.reset();
|
_blockRegStats.reset();
|
||||||
}
|
}
|
||||||
|
|
||||||
node->setPassData<RABlock>(_curBlock);
|
node->setPassData<RABlock>(_curBlock);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
ASMJIT_RA_LOG_COMPLEX({
|
if (_curBlock && _curBlock != _lastLoggedBlock)
|
||||||
if (_curBlock && _curBlock != lastPrintedBlock) {
|
logBlock(_curBlock, kRootIndentation);
|
||||||
lastPrintedBlock = _curBlock;
|
logNode(node, kRootIndentation);
|
||||||
logger->logf(" {#%u}\n", lastPrintedBlock->blockId());
|
|
||||||
}
|
|
||||||
|
|
||||||
sb.clear();
|
|
||||||
Logging::formatNode(sb, flags, cc(), node);
|
|
||||||
logger->logf(" %s\n", sb.data());
|
|
||||||
});
|
|
||||||
|
|
||||||
// Unlikely: Assume that the exit label is reached only once per function.
|
// Unlikely: Assume that the exit label is reached only once per function.
|
||||||
if (ASMJIT_UNLIKELY(node->as<LabelNode>()->id() == exitLabelId)) {
|
if (ASMJIT_UNLIKELY(node->as<LabelNode>()->id() == _exitLabelId)) {
|
||||||
_curBlock->setLast(node);
|
_curBlock->setLast(node);
|
||||||
_curBlock->makeConstructed(blockRegStats);
|
_curBlock->makeConstructed(_blockRegStats);
|
||||||
ASMJIT_PROPAGATE(_pass->addExitBlock(_curBlock));
|
ASMJIT_PROPAGATE(_pass->addExitBlock(_curBlock));
|
||||||
|
|
||||||
_curBlock = nullptr;
|
_curBlock = nullptr;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
ASMJIT_RA_LOG_COMPLEX({
|
// Other Nodes | Function Exit
|
||||||
sb.clear();
|
// ---------------------------
|
||||||
Logging::formatNode(sb, flags, cc(), node);
|
|
||||||
logger->logf(" %s\n", sb.data());
|
logNode(node, kCodeIndentation);
|
||||||
});
|
|
||||||
|
|
||||||
if (node->type() == BaseNode::kNodeSentinel) {
|
if (node->type() == BaseNode::kNodeSentinel) {
|
||||||
if (node == func->endNode()) {
|
if (node == _funcNode->endNode()) {
|
||||||
// Make sure we didn't flow here if this is the end of the function sentinel.
|
// Make sure we didn't flow here if this is the end of the function sentinel.
|
||||||
if (ASMJIT_UNLIKELY(_curBlock))
|
if (ASMJIT_UNLIKELY(_curBlock))
|
||||||
return DebugUtils::errored(kErrorInvalidState);
|
return DebugUtils::errored(kErrorInvalidState);
|
||||||
@@ -401,7 +434,7 @@ public:
|
|||||||
else if (node->type() == BaseNode::kNodeFunc) {
|
else if (node->type() == BaseNode::kNodeFunc) {
|
||||||
// RAPass can only compile a single function at a time. If we
|
// RAPass can only compile a single function at a time. If we
|
||||||
// encountered a function it must be the current one, bail if not.
|
// encountered a function it must be the current one, bail if not.
|
||||||
if (ASMJIT_UNLIKELY(node != func))
|
if (ASMJIT_UNLIKELY(node != _funcNode))
|
||||||
return DebugUtils::errored(kErrorInvalidState);
|
return DebugUtils::errored(kErrorInvalidState);
|
||||||
// PASS if this is the first node.
|
// PASS if this is the first node.
|
||||||
}
|
}
|
||||||
@@ -424,8 +457,170 @@ public:
|
|||||||
if (_pass->hasDanglingBlocks())
|
if (_pass->hasDanglingBlocks())
|
||||||
return DebugUtils::errored(kErrorInvalidState);
|
return DebugUtils::errored(kErrorInvalidState);
|
||||||
|
|
||||||
|
for (RABlock* block : blocksWithUnknownJumps)
|
||||||
|
handleBlockWithUnknownJump(block);
|
||||||
|
|
||||||
|
return _pass->initSharedAssignments(_sharedAssignmentsMap);
|
||||||
|
}
|
||||||
|
|
||||||
|
// --------------------------------------------------------------------------
|
||||||
|
// [Prepare]
|
||||||
|
// --------------------------------------------------------------------------
|
||||||
|
|
||||||
|
//! Prepares the CFG builder of the current function.
|
||||||
|
Error prepare() noexcept {
|
||||||
|
FuncNode* func = _pass->func();
|
||||||
|
BaseNode* node = nullptr;
|
||||||
|
|
||||||
|
// Create entry and exit blocks.
|
||||||
|
_funcNode = func;
|
||||||
|
_retBlock = _pass->newBlockOrExistingAt(func->exitNode(), &node);
|
||||||
|
|
||||||
|
if (ASMJIT_UNLIKELY(!_retBlock))
|
||||||
|
return DebugUtils::errored(kErrorOutOfMemory);
|
||||||
|
ASMJIT_PROPAGATE(_pass->addExitBlock(_retBlock));
|
||||||
|
|
||||||
|
if (node != func) {
|
||||||
|
_curBlock = _pass->newBlock();
|
||||||
|
if (ASMJIT_UNLIKELY(!_curBlock))
|
||||||
|
return DebugUtils::errored(kErrorOutOfMemory);
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
// Function that has no code at all.
|
||||||
|
_curBlock = _retBlock;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Reset everything we may need.
|
||||||
|
_blockRegStats.reset();
|
||||||
|
_exitLabelId = func->exitNode()->id();
|
||||||
|
|
||||||
|
// Initially we assume there is no code in the function body.
|
||||||
|
_hasCode = false;
|
||||||
|
|
||||||
|
return _pass->addBlock(_curBlock);
|
||||||
|
}
|
||||||
|
|
||||||
|
// --------------------------------------------------------------------------
|
||||||
|
// [Utilities]
|
||||||
|
// --------------------------------------------------------------------------
|
||||||
|
|
||||||
|
//! Called when a `node` is removed, e.g. bacause of a dead code elimination.
|
||||||
|
void removeNode(BaseNode* node) noexcept {
|
||||||
|
logNode(node, kRootIndentation, "<Removed>");
|
||||||
|
cc()->removeNode(node);
|
||||||
|
}
|
||||||
|
|
||||||
|
//! Handles block with unknown jump, which could be a jump to a jump table.
|
||||||
|
//!
|
||||||
|
//! If we encounter such block we basically insert all existing blocks as
|
||||||
|
//! successors except the function entry block and a natural successor, if
|
||||||
|
//! such block exists.
|
||||||
|
Error handleBlockWithUnknownJump(RABlock* block) noexcept {
|
||||||
|
RABlocks& blocks = _pass->blocks();
|
||||||
|
size_t blockCount = blocks.size();
|
||||||
|
|
||||||
|
// NOTE: Iterate from `1` as the first block is the entry block, we don't
|
||||||
|
// allow the entry to be a successor of block that ends with unknown jump.
|
||||||
|
RABlock* consecutive = block->consecutive();
|
||||||
|
for (size_t i = 1; i < blockCount; i++) {
|
||||||
|
RABlock* successor = blocks[i];
|
||||||
|
if (successor == consecutive)
|
||||||
|
continue;
|
||||||
|
block->appendSuccessor(successor);
|
||||||
|
}
|
||||||
|
|
||||||
|
return shareAssignmentAcrossSuccessors(block);
|
||||||
|
}
|
||||||
|
|
||||||
|
Error shareAssignmentAcrossSuccessors(RABlock* block) noexcept {
|
||||||
|
if (block->successors().size() <= 1)
|
||||||
|
return kErrorOk;
|
||||||
|
|
||||||
|
RABlock* consecutive = block->consecutive();
|
||||||
|
uint32_t sharedAssignmentId = Globals::kInvalidId;
|
||||||
|
|
||||||
|
for (RABlock* successor : block->successors()) {
|
||||||
|
if (successor == consecutive)
|
||||||
|
continue;
|
||||||
|
|
||||||
|
if (successor->hasSharedAssignmentId()) {
|
||||||
|
if (sharedAssignmentId == Globals::kInvalidId)
|
||||||
|
sharedAssignmentId = successor->sharedAssignmentId();
|
||||||
|
else
|
||||||
|
_sharedAssignmentsMap[successor->sharedAssignmentId()] = sharedAssignmentId;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
if (sharedAssignmentId == Globals::kInvalidId)
|
||||||
|
ASMJIT_PROPAGATE(newSharedAssignmentId(&sharedAssignmentId));
|
||||||
|
successor->setSharedAssignmentId(sharedAssignmentId);
|
||||||
|
}
|
||||||
|
}
|
||||||
return kErrorOk;
|
return kErrorOk;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
Error newSharedAssignmentId(uint32_t* out) noexcept {
|
||||||
|
uint32_t id = _sharedAssignmentsMap.size();
|
||||||
|
ASMJIT_PROPAGATE(_sharedAssignmentsMap.append(_pass->allocator(), id));
|
||||||
|
|
||||||
|
*out = id;
|
||||||
|
return kErrorOk;
|
||||||
|
}
|
||||||
|
|
||||||
|
// --------------------------------------------------------------------------
|
||||||
|
// [Logging]
|
||||||
|
// --------------------------------------------------------------------------
|
||||||
|
|
||||||
|
#ifndef ASMJIT_NO_LOGGING
|
||||||
|
template<typename... Args>
|
||||||
|
inline void log(const char* fmt, Args&&... args) noexcept {
|
||||||
|
if (_logger)
|
||||||
|
_logger->logf(fmt, std::forward<Args>(args)...);
|
||||||
|
}
|
||||||
|
|
||||||
|
inline void logBlock(RABlock* block, uint32_t indentation = 0) noexcept {
|
||||||
|
if (_logger)
|
||||||
|
_logBlock(block, indentation);
|
||||||
|
}
|
||||||
|
|
||||||
|
inline void logNode(BaseNode* node, uint32_t indentation = 0, const char* action = nullptr) noexcept {
|
||||||
|
if (_logger)
|
||||||
|
_logNode(node, indentation, action);
|
||||||
|
}
|
||||||
|
|
||||||
|
void _logBlock(RABlock* block, uint32_t indentation) noexcept {
|
||||||
|
_sb.clear();
|
||||||
|
_sb.appendChars(' ', indentation);
|
||||||
|
_sb.appendFormat("{#%u}\n", block->blockId());
|
||||||
|
_logger->log(_sb);
|
||||||
|
_lastLoggedBlock = block;
|
||||||
|
}
|
||||||
|
|
||||||
|
void _logNode(BaseNode* node, uint32_t indentation, const char* action) noexcept {
|
||||||
|
_sb.clear();
|
||||||
|
_sb.appendChars(' ', indentation);
|
||||||
|
if (action) {
|
||||||
|
_sb.appendString(action);
|
||||||
|
_sb.appendChar(' ');
|
||||||
|
}
|
||||||
|
Logging::formatNode(_sb, _logFlags, cc(), node);
|
||||||
|
_sb.appendChar('\n');
|
||||||
|
_logger->log(_sb);
|
||||||
|
}
|
||||||
|
#else
|
||||||
|
template<typename... Args>
|
||||||
|
inline void log(const char* fmt, Args&&... args) noexcept {
|
||||||
|
DebugUtils::unused(fmt);
|
||||||
|
DebugUtils::unused(std::forward<Args>(args)...);
|
||||||
|
}
|
||||||
|
|
||||||
|
inline void logBlock(RABlock* block, uint32_t indentation = 0) noexcept {
|
||||||
|
DebugUtils::unused(block, indentation);
|
||||||
|
}
|
||||||
|
|
||||||
|
inline void logNode(BaseNode* node, uint32_t indentation = 0, const char* action = nullptr) noexcept {
|
||||||
|
DebugUtils::unused(node, indentation, action);
|
||||||
|
}
|
||||||
|
#endif
|
||||||
};
|
};
|
||||||
|
|
||||||
//! \}
|
//! \}
|
||||||
|
|||||||
@@ -398,10 +398,10 @@ public:
|
|||||||
};
|
};
|
||||||
|
|
||||||
// ============================================================================
|
// ============================================================================
|
||||||
// [asmjit::LiveInterval]
|
// [asmjit::RALiveInterval]
|
||||||
// ============================================================================
|
// ============================================================================
|
||||||
|
|
||||||
struct LiveInterval {
|
struct RALiveInterval {
|
||||||
uint32_t a, b;
|
uint32_t a, b;
|
||||||
|
|
||||||
enum Misc : uint32_t {
|
enum Misc : uint32_t {
|
||||||
@@ -412,15 +412,15 @@ struct LiveInterval {
|
|||||||
//! \name Construction & Destruction
|
//! \name Construction & Destruction
|
||||||
//! \{
|
//! \{
|
||||||
|
|
||||||
inline LiveInterval() noexcept : a(0), b(0) {}
|
inline RALiveInterval() noexcept : a(0), b(0) {}
|
||||||
inline LiveInterval(uint32_t a, uint32_t b) noexcept : a(a), b(b) {}
|
inline RALiveInterval(uint32_t a, uint32_t b) noexcept : a(a), b(b) {}
|
||||||
inline LiveInterval(const LiveInterval& other) noexcept : a(other.a), b(other.b) {}
|
inline RALiveInterval(const RALiveInterval& other) noexcept : a(other.a), b(other.b) {}
|
||||||
|
|
||||||
inline void init(uint32_t aVal, uint32_t bVal) noexcept {
|
inline void init(uint32_t aVal, uint32_t bVal) noexcept {
|
||||||
a = aVal;
|
a = aVal;
|
||||||
b = bVal;
|
b = bVal;
|
||||||
}
|
}
|
||||||
inline void init(const LiveInterval& other) noexcept { init(other.a, other.b); }
|
inline void init(const RALiveInterval& other) noexcept { init(other.a, other.b); }
|
||||||
inline void reset() noexcept { init(0, 0); }
|
inline void reset() noexcept { init(0, 0); }
|
||||||
|
|
||||||
//! \}
|
//! \}
|
||||||
@@ -428,7 +428,7 @@ struct LiveInterval {
|
|||||||
//! \name Overloaded Operators
|
//! \name Overloaded Operators
|
||||||
//! \{
|
//! \{
|
||||||
|
|
||||||
inline LiveInterval& operator=(const LiveInterval& other) = default;
|
inline RALiveInterval& operator=(const RALiveInterval& other) = default;
|
||||||
|
|
||||||
//! \}
|
//! \}
|
||||||
|
|
||||||
@@ -446,31 +446,31 @@ struct LiveInterval {
|
|||||||
// ============================================================================
|
// ============================================================================
|
||||||
|
|
||||||
template<typename T>
|
template<typename T>
|
||||||
class RALiveSpan : public LiveInterval, public T {
|
class RALiveSpan : public RALiveInterval, public T {
|
||||||
public:
|
public:
|
||||||
typedef T DataType;
|
typedef T DataType;
|
||||||
|
|
||||||
//! \name Construction & Destruction
|
//! \name Construction & Destruction
|
||||||
//! \{
|
//! \{
|
||||||
|
|
||||||
inline RALiveSpan() noexcept : LiveInterval(), T() {}
|
inline RALiveSpan() noexcept : RALiveInterval(), T() {}
|
||||||
inline RALiveSpan(const RALiveSpan<T>& other) noexcept : LiveInterval(other), T() {}
|
inline RALiveSpan(const RALiveSpan<T>& other) noexcept : RALiveInterval(other), T() {}
|
||||||
inline RALiveSpan(const LiveInterval& interval, const T& data) noexcept : LiveInterval(interval), T(data) {}
|
inline RALiveSpan(const RALiveInterval& interval, const T& data) noexcept : RALiveInterval(interval), T(data) {}
|
||||||
inline RALiveSpan(uint32_t a, uint32_t b) noexcept : LiveInterval(a, b), T() {}
|
inline RALiveSpan(uint32_t a, uint32_t b) noexcept : RALiveInterval(a, b), T() {}
|
||||||
inline RALiveSpan(uint32_t a, uint32_t b, const T& data) noexcept : LiveInterval(a, b), T(data) {}
|
inline RALiveSpan(uint32_t a, uint32_t b, const T& data) noexcept : RALiveInterval(a, b), T(data) {}
|
||||||
|
|
||||||
inline void init(const RALiveSpan<T>& other) noexcept {
|
inline void init(const RALiveSpan<T>& other) noexcept {
|
||||||
LiveInterval::init(static_cast<const LiveInterval&>(other));
|
RALiveInterval::init(static_cast<const RALiveInterval&>(other));
|
||||||
T::init(static_cast<const T&>(other));
|
T::init(static_cast<const T&>(other));
|
||||||
}
|
}
|
||||||
|
|
||||||
inline void init(const RALiveSpan<T>& span, const T& data) noexcept {
|
inline void init(const RALiveSpan<T>& span, const T& data) noexcept {
|
||||||
LiveInterval::init(static_cast<const LiveInterval&>(span));
|
RALiveInterval::init(static_cast<const RALiveInterval&>(span));
|
||||||
T::init(data);
|
T::init(data);
|
||||||
}
|
}
|
||||||
|
|
||||||
inline void init(const LiveInterval& interval, const T& data) noexcept {
|
inline void init(const RALiveInterval& interval, const T& data) noexcept {
|
||||||
LiveInterval::init(interval);
|
RALiveInterval::init(interval);
|
||||||
T::init(data);
|
T::init(data);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -520,7 +520,7 @@ public:
|
|||||||
|
|
||||||
inline bool isOpen() const noexcept {
|
inline bool isOpen() const noexcept {
|
||||||
uint32_t size = _data.size();
|
uint32_t size = _data.size();
|
||||||
return size > 0 && _data[size - 1].b == LiveInterval::kInf;
|
return size > 0 && _data[size - 1].b == RALiveInterval::kInf;
|
||||||
}
|
}
|
||||||
|
|
||||||
//! \}
|
//! \}
|
||||||
|
|||||||
@@ -370,6 +370,21 @@ Cleared:
|
|||||||
return kErrorOk;
|
return kErrorOk;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
Error RALocalAllocator::spillGpScratchRegsBeforeEntry(uint32_t scratchRegs) noexcept {
|
||||||
|
uint32_t group = BaseReg::kGroupGp;
|
||||||
|
Support::BitWordIterator<uint32_t> it(scratchRegs);
|
||||||
|
|
||||||
|
while (it.hasNext()) {
|
||||||
|
uint32_t physId = it.next();
|
||||||
|
if (_curAssignment.isPhysAssigned(group, physId)) {
|
||||||
|
uint32_t workId = _curAssignment.physToWorkId(group, physId);
|
||||||
|
ASMJIT_PROPAGATE(onSpillReg(group, workId, physId));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return kErrorOk;
|
||||||
|
}
|
||||||
|
|
||||||
// ============================================================================
|
// ============================================================================
|
||||||
// [asmjit::RALocalAllocator - Allocation]
|
// [asmjit::RALocalAllocator - Allocation]
|
||||||
// ============================================================================
|
// ============================================================================
|
||||||
@@ -847,7 +862,7 @@ Error RALocalAllocator::spillAfterAllocation(InstNode* node) noexcept {
|
|||||||
|
|
||||||
Error RALocalAllocator::allocBranch(InstNode* node, RABlock* target, RABlock* cont) noexcept {
|
Error RALocalAllocator::allocBranch(InstNode* node, RABlock* target, RABlock* cont) noexcept {
|
||||||
// TODO: This should be used to make the branch allocation better.
|
// TODO: This should be used to make the branch allocation better.
|
||||||
ASMJIT_UNUSED(cont);
|
DebugUtils::unused(cont);
|
||||||
|
|
||||||
// The cursor must point to the previous instruction for a possible instruction insertion.
|
// The cursor must point to the previous instruction for a possible instruction insertion.
|
||||||
_cc->_setCursor(node->prev());
|
_cc->_setCursor(node->prev());
|
||||||
@@ -863,6 +878,7 @@ Error RALocalAllocator::allocBranch(InstNode* node, RABlock* target, RABlock* co
|
|||||||
}
|
}
|
||||||
|
|
||||||
ASMJIT_PROPAGATE(allocInst(node));
|
ASMJIT_PROPAGATE(allocInst(node));
|
||||||
|
ASMJIT_PROPAGATE(spillRegsBeforeEntry(target));
|
||||||
|
|
||||||
if (target->hasEntryAssignment()) {
|
if (target->hasEntryAssignment()) {
|
||||||
BaseNode* injectionPoint = _pass->extraBlock()->prev();
|
BaseNode* injectionPoint = _pass->extraBlock()->prev();
|
||||||
@@ -879,7 +895,7 @@ Error RALocalAllocator::allocBranch(InstNode* node, RABlock* target, RABlock* co
|
|||||||
BaseNode* curCursor = _cc->cursor();
|
BaseNode* curCursor = _cc->cursor();
|
||||||
if (curCursor != injectionPoint) {
|
if (curCursor != injectionPoint) {
|
||||||
// Additional instructions emitted to switch from the current state to
|
// Additional instructions emitted to switch from the current state to
|
||||||
// the `target`s state. This means that we have to move these instructions
|
// the `target` state. This means that we have to move these instructions
|
||||||
// into an independent code block and patch the jump location.
|
// into an independent code block and patch the jump location.
|
||||||
Operand& targetOp(node->opType(node->opCount() - 1));
|
Operand& targetOp(node->opType(node->opCount() - 1));
|
||||||
if (ASMJIT_UNLIKELY(!targetOp.isLabel()))
|
if (ASMJIT_UNLIKELY(!targetOp.isLabel()))
|
||||||
@@ -911,13 +927,50 @@ Error RALocalAllocator::allocBranch(InstNode* node, RABlock* target, RABlock* co
|
|||||||
return kErrorOk;
|
return kErrorOk;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
Error RALocalAllocator::allocJumpTable(InstNode* node, const RABlocks& targets, RABlock* cont) noexcept {
|
||||||
|
if (targets.empty())
|
||||||
|
return DebugUtils::errored(kErrorInvalidState);
|
||||||
|
|
||||||
|
if (targets.size() == 1)
|
||||||
|
return allocBranch(node, targets[0], cont);
|
||||||
|
|
||||||
|
// The cursor must point to the previous instruction for a possible instruction insertion.
|
||||||
|
_cc->_setCursor(node->prev());
|
||||||
|
|
||||||
|
// All `targets` should have the same sharedAssignmentId, we just read the first.
|
||||||
|
RABlock* anyTarget = targets[0];
|
||||||
|
if (!anyTarget->hasSharedAssignmentId())
|
||||||
|
return DebugUtils::errored(kErrorInvalidState);
|
||||||
|
|
||||||
|
RASharedAssignment& sharedAssignment = _pass->_sharedAssignments[anyTarget->sharedAssignmentId()];
|
||||||
|
|
||||||
|
ASMJIT_PROPAGATE(allocInst(node));
|
||||||
|
|
||||||
|
if (!sharedAssignment.empty()) {
|
||||||
|
ASMJIT_PROPAGATE(switchToAssignment(
|
||||||
|
sharedAssignment.physToWorkMap(),
|
||||||
|
sharedAssignment.workToPhysMap(),
|
||||||
|
sharedAssignment.liveIn(),
|
||||||
|
true, // Read-only.
|
||||||
|
false // Try-mode.
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
ASMJIT_PROPAGATE(spillRegsBeforeEntry(anyTarget));
|
||||||
|
|
||||||
|
if (sharedAssignment.empty()) {
|
||||||
|
ASMJIT_PROPAGATE(_pass->setBlockEntryAssignment(anyTarget, block(), _curAssignment));
|
||||||
|
}
|
||||||
|
|
||||||
|
return kErrorOk;
|
||||||
|
}
|
||||||
|
|
||||||
// ============================================================================
|
// ============================================================================
|
||||||
// [asmjit::RALocalAllocator - Decision Making]
|
// [asmjit::RALocalAllocator - Decision Making]
|
||||||
// ============================================================================
|
// ============================================================================
|
||||||
|
|
||||||
uint32_t RALocalAllocator::decideOnAssignment(uint32_t group, uint32_t workId, uint32_t physId, uint32_t allocableRegs) const noexcept {
|
uint32_t RALocalAllocator::decideOnAssignment(uint32_t group, uint32_t workId, uint32_t physId, uint32_t allocableRegs) const noexcept {
|
||||||
ASMJIT_UNUSED(group);
|
DebugUtils::unused(group, physId);
|
||||||
ASMJIT_UNUSED(physId);
|
|
||||||
ASMJIT_ASSERT(allocableRegs != 0);
|
ASMJIT_ASSERT(allocableRegs != 0);
|
||||||
|
|
||||||
RAWorkReg* workReg = workRegById(workId);
|
RAWorkReg* workReg = workRegById(workId);
|
||||||
@@ -945,10 +998,7 @@ uint32_t RALocalAllocator::decideOnUnassignment(uint32_t group, uint32_t workId,
|
|||||||
ASMJIT_ASSERT(allocableRegs != 0);
|
ASMJIT_ASSERT(allocableRegs != 0);
|
||||||
|
|
||||||
// TODO:
|
// TODO:
|
||||||
ASMJIT_UNUSED(allocableRegs);
|
DebugUtils::unused(allocableRegs, group, workId, physId);
|
||||||
ASMJIT_UNUSED(group);
|
|
||||||
ASMJIT_UNUSED(workId);
|
|
||||||
ASMJIT_UNUSED(physId);
|
|
||||||
|
|
||||||
// if (!_curAssignment.isPhysDirty(group, physId)) {
|
// if (!_curAssignment.isPhysDirty(group, physId)) {
|
||||||
// }
|
// }
|
||||||
@@ -959,7 +1009,7 @@ uint32_t RALocalAllocator::decideOnUnassignment(uint32_t group, uint32_t workId,
|
|||||||
|
|
||||||
uint32_t RALocalAllocator::decideOnSpillFor(uint32_t group, uint32_t workId, uint32_t spillableRegs, uint32_t* spillWorkId) const noexcept {
|
uint32_t RALocalAllocator::decideOnSpillFor(uint32_t group, uint32_t workId, uint32_t spillableRegs, uint32_t* spillWorkId) const noexcept {
|
||||||
// May be used in the future to decide which register would be best to spill so `workId` can be assigned.
|
// May be used in the future to decide which register would be best to spill so `workId` can be assigned.
|
||||||
ASMJIT_UNUSED(workId);
|
DebugUtils::unused(workId);
|
||||||
ASMJIT_ASSERT(spillableRegs != 0);
|
ASMJIT_ASSERT(spillableRegs != 0);
|
||||||
|
|
||||||
Support::BitWordIterator<uint32_t> it(spillableRegs);
|
Support::BitWordIterator<uint32_t> it(spillableRegs);
|
||||||
|
|||||||
@@ -154,6 +154,12 @@ public:
|
|||||||
bool dstReadOnly,
|
bool dstReadOnly,
|
||||||
bool tryMode) noexcept;
|
bool tryMode) noexcept;
|
||||||
|
|
||||||
|
inline Error spillRegsBeforeEntry(RABlock* block) noexcept {
|
||||||
|
return spillGpScratchRegsBeforeEntry(block->entryScratchGpRegs());
|
||||||
|
}
|
||||||
|
|
||||||
|
Error spillGpScratchRegsBeforeEntry(uint32_t scratchRegs) noexcept;
|
||||||
|
|
||||||
//! \}
|
//! \}
|
||||||
|
|
||||||
//! \name Allocation
|
//! \name Allocation
|
||||||
@@ -163,6 +169,7 @@ public:
|
|||||||
Error spillAfterAllocation(InstNode* node) noexcept;
|
Error spillAfterAllocation(InstNode* node) noexcept;
|
||||||
|
|
||||||
Error allocBranch(InstNode* node, RABlock* target, RABlock* cont) noexcept;
|
Error allocBranch(InstNode* node, RABlock* target, RABlock* cont) noexcept;
|
||||||
|
Error allocJumpTable(InstNode* node, const RABlocks& targets, RABlock* cont) noexcept;
|
||||||
|
|
||||||
//! \}
|
//! \}
|
||||||
|
|
||||||
|
|||||||
@@ -86,12 +86,14 @@ RAPass::RAPass() noexcept
|
|||||||
_pov(),
|
_pov(),
|
||||||
_instructionCount(0),
|
_instructionCount(0),
|
||||||
_createdBlockCount(0),
|
_createdBlockCount(0),
|
||||||
|
_sharedAssignments(),
|
||||||
_lastTimestamp(0),
|
_lastTimestamp(0),
|
||||||
_archRegsInfo(nullptr),
|
_archRegsInfo(nullptr),
|
||||||
_archTraits(),
|
_archTraits(),
|
||||||
_physRegIndex(),
|
_physRegIndex(),
|
||||||
_physRegCount(),
|
_physRegCount(),
|
||||||
_physRegTotal(0),
|
_physRegTotal(0),
|
||||||
|
_scratchRegIndexes{},
|
||||||
_availableRegs(),
|
_availableRegs(),
|
||||||
_availableRegCount(),
|
_availableRegCount(),
|
||||||
_clobberedRegs(),
|
_clobberedRegs(),
|
||||||
@@ -119,6 +121,8 @@ static void RAPass_reset(RAPass* self, FuncDetail* funcDetail) noexcept {
|
|||||||
self->_workRegs.reset();
|
self->_workRegs.reset();
|
||||||
self->_instructionCount = 0;
|
self->_instructionCount = 0;
|
||||||
self->_createdBlockCount = 0;
|
self->_createdBlockCount = 0;
|
||||||
|
|
||||||
|
self->_sharedAssignments.reset();
|
||||||
self->_lastTimestamp = 0;
|
self->_lastTimestamp = 0;
|
||||||
|
|
||||||
self->_archRegsInfo = nullptr;
|
self->_archRegsInfo = nullptr;
|
||||||
@@ -127,6 +131,9 @@ static void RAPass_reset(RAPass* self, FuncDetail* funcDetail) noexcept {
|
|||||||
self->_physRegCount.reset();
|
self->_physRegCount.reset();
|
||||||
self->_physRegTotal = 0;
|
self->_physRegTotal = 0;
|
||||||
|
|
||||||
|
for (size_t i = 0; i < ASMJIT_ARRAY_SIZE(self->_scratchRegIndexes); i++)
|
||||||
|
self->_scratchRegIndexes[i] = BaseReg::kIdBad;
|
||||||
|
|
||||||
self->_availableRegs.reset();
|
self->_availableRegs.reset();
|
||||||
self->_availableRegCount.reset();
|
self->_availableRegCount.reset();
|
||||||
self->_clobberedRegs.reset();
|
self->_clobberedRegs.reset();
|
||||||
@@ -157,7 +164,7 @@ static void RAPass_resetVirtRegData(RAPass* self) noexcept {
|
|||||||
Error RAPass::runOnFunction(Zone* zone, Logger* logger, FuncNode* func) noexcept {
|
Error RAPass::runOnFunction(Zone* zone, Logger* logger, FuncNode* func) noexcept {
|
||||||
_allocator.reset(zone);
|
_allocator.reset(zone);
|
||||||
|
|
||||||
#ifndef ASMJIT_NO_LOGGING
|
#ifndef ASMJIT_NO_LOGGING
|
||||||
_logger = logger;
|
_logger = logger;
|
||||||
_debugLogger = nullptr;
|
_debugLogger = nullptr;
|
||||||
|
|
||||||
@@ -166,7 +173,9 @@ Error RAPass::runOnFunction(Zone* zone, Logger* logger, FuncNode* func) noexcept
|
|||||||
if (_loggerFlags & FormatOptions::kFlagDebugPasses)
|
if (_loggerFlags & FormatOptions::kFlagDebugPasses)
|
||||||
_debugLogger = logger;
|
_debugLogger = logger;
|
||||||
}
|
}
|
||||||
#endif
|
#else
|
||||||
|
DebugUtils::unused(logger);
|
||||||
|
#endif
|
||||||
|
|
||||||
// Initialize all core structures to use `zone` and `func`.
|
// Initialize all core structures to use `zone` and `func`.
|
||||||
BaseNode* end = func->endNode();
|
BaseNode* end = func->endNode();
|
||||||
@@ -193,11 +202,11 @@ Error RAPass::runOnFunction(Zone* zone, Logger* logger, FuncNode* func) noexcept
|
|||||||
RAPass_reset(this, nullptr);
|
RAPass_reset(this, nullptr);
|
||||||
_allocator.reset(nullptr);
|
_allocator.reset(nullptr);
|
||||||
|
|
||||||
#ifndef ASMJIT_NO_LOGGING
|
#ifndef ASMJIT_NO_LOGGING
|
||||||
_logger = nullptr;
|
_logger = nullptr;
|
||||||
_debugLogger = nullptr;
|
_debugLogger = nullptr;
|
||||||
_loggerFlags = 0;
|
_loggerFlags = 0;
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
_func = nullptr;
|
_func = nullptr;
|
||||||
_stop = nullptr;
|
_stop = nullptr;
|
||||||
@@ -223,10 +232,10 @@ Error RAPass::onPerformAllSteps() noexcept {
|
|||||||
ASMJIT_PROPAGATE(buildLiveness());
|
ASMJIT_PROPAGATE(buildLiveness());
|
||||||
ASMJIT_PROPAGATE(assignArgIndexToWorkRegs());
|
ASMJIT_PROPAGATE(assignArgIndexToWorkRegs());
|
||||||
|
|
||||||
#ifndef ASMJIT_NO_LOGGING
|
#ifndef ASMJIT_NO_LOGGING
|
||||||
if (logger() && logger()->hasFlag(FormatOptions::kFlagAnnotations))
|
if (logger() && logger()->hasFlag(FormatOptions::kFlagAnnotations))
|
||||||
ASMJIT_PROPAGATE(annotateCode());
|
ASMJIT_PROPAGATE(annotateCode());
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
ASMJIT_PROPAGATE(runGlobalAllocator());
|
ASMJIT_PROPAGATE(runGlobalAllocator());
|
||||||
ASMJIT_PROPAGATE(runLocalAllocator());
|
ASMJIT_PROPAGATE(runLocalAllocator());
|
||||||
@@ -350,6 +359,38 @@ Error RAPass::addBlock(RABlock* block) noexcept {
|
|||||||
return kErrorOk;
|
return kErrorOk;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// [asmjit::RAPass - CFG - Build]
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
Error RAPass::initSharedAssignments(const ZoneVector<uint32_t>& sharedAssignmentsMap) noexcept {
|
||||||
|
if (sharedAssignmentsMap.empty())
|
||||||
|
return kErrorOk;
|
||||||
|
|
||||||
|
uint32_t count = 0;
|
||||||
|
for (RABlock* block : _blocks) {
|
||||||
|
if (block->hasSharedAssignmentId()) {
|
||||||
|
uint32_t sharedAssignmentId = sharedAssignmentsMap[block->sharedAssignmentId()];
|
||||||
|
block->setSharedAssignmentId(sharedAssignmentId);
|
||||||
|
count = Support::max(count, sharedAssignmentId + 1);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
ASMJIT_PROPAGATE(_sharedAssignments.resize(allocator(), count));
|
||||||
|
|
||||||
|
// Aggregate all entry scratch GP regs from blocks of the same assignment to
|
||||||
|
// the assignment itself. It will then be used instead of RABlock's own scratch
|
||||||
|
// regs mask, as shared assignments have precedence.
|
||||||
|
for (RABlock* block : _blocks) {
|
||||||
|
if (block->hasSharedAssignmentId()) {
|
||||||
|
RASharedAssignment& sa = _sharedAssignments[block->sharedAssignmentId()];
|
||||||
|
sa.addScratchGpRegs(block->_entryScratchGpRegs);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return kErrorOk;
|
||||||
|
}
|
||||||
|
|
||||||
// ============================================================================
|
// ============================================================================
|
||||||
// [asmjit::RAPass - CFG - Views Order]
|
// [asmjit::RAPass - CFG - Views Order]
|
||||||
// ============================================================================
|
// ============================================================================
|
||||||
@@ -374,11 +415,10 @@ public:
|
|||||||
};
|
};
|
||||||
|
|
||||||
Error RAPass::buildViews() noexcept {
|
Error RAPass::buildViews() noexcept {
|
||||||
#ifndef ASMJIT_NO_LOGGING
|
#ifndef ASMJIT_NO_LOGGING
|
||||||
Logger* logger = debugLogger();
|
Logger* logger = debugLogger();
|
||||||
#endif
|
|
||||||
|
|
||||||
ASMJIT_RA_LOG_FORMAT("[RAPass::BuildViews]\n");
|
ASMJIT_RA_LOG_FORMAT("[RAPass::BuildViews]\n");
|
||||||
|
#endif
|
||||||
|
|
||||||
uint32_t count = blockCount();
|
uint32_t count = blockCount();
|
||||||
if (ASMJIT_UNLIKELY(!count)) return kErrorOk;
|
if (ASMJIT_UNLIKELY(!count)) return kErrorOk;
|
||||||
@@ -425,6 +465,22 @@ Error RAPass::buildViews() noexcept {
|
|||||||
i = top.index();
|
i = top.index();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
ASMJIT_RA_LOG_COMPLEX({
|
||||||
|
StringTmp<1024> sb;
|
||||||
|
for (RABlock* block : blocks()) {
|
||||||
|
sb.clear();
|
||||||
|
if (block->hasSuccessors()) {
|
||||||
|
sb.appendFormat(" #%u -> {", block->blockId());
|
||||||
|
_dumpBlockIds(sb, block->successors());
|
||||||
|
sb.appendString("}\n");
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
sb.appendFormat(" #%u -> {Exit}\n", block->blockId());
|
||||||
|
}
|
||||||
|
logger->log(sb);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
visited.release(allocator());
|
visited.release(allocator());
|
||||||
return kErrorOk;
|
return kErrorOk;
|
||||||
}
|
}
|
||||||
@@ -443,11 +499,10 @@ static ASMJIT_INLINE RABlock* intersectBlocks(RABlock* b1, RABlock* b2) noexcept
|
|||||||
|
|
||||||
// Based on "A Simple, Fast Dominance Algorithm".
|
// Based on "A Simple, Fast Dominance Algorithm".
|
||||||
Error RAPass::buildDominators() noexcept {
|
Error RAPass::buildDominators() noexcept {
|
||||||
#ifndef ASMJIT_NO_LOGGING
|
#ifndef ASMJIT_NO_LOGGING
|
||||||
Logger* logger = debugLogger();
|
Logger* logger = debugLogger();
|
||||||
#endif
|
|
||||||
|
|
||||||
ASMJIT_RA_LOG_FORMAT("[RAPass::BuildDominators]\n");
|
ASMJIT_RA_LOG_FORMAT("[RAPass::BuildDominators]\n");
|
||||||
|
#endif
|
||||||
|
|
||||||
if (_blocks.empty())
|
if (_blocks.empty())
|
||||||
return kErrorOk;
|
return kErrorOk;
|
||||||
@@ -556,10 +611,10 @@ Error RAPass::removeUnreachableBlocks() noexcept {
|
|||||||
if (numAllBlocks == numReachableBlocks)
|
if (numAllBlocks == numReachableBlocks)
|
||||||
return kErrorOk;
|
return kErrorOk;
|
||||||
|
|
||||||
#ifndef ASMJIT_NO_LOGGING
|
#ifndef ASMJIT_NO_LOGGING
|
||||||
Logger* logger = debugLogger();
|
Logger* logger = debugLogger();
|
||||||
#endif
|
|
||||||
ASMJIT_RA_LOG_FORMAT("[RAPass::RemoveUnreachableBlocks (%u of %u unreachable)]\n", numAllBlocks - numReachableBlocks, numAllBlocks);
|
ASMJIT_RA_LOG_FORMAT("[RAPass::RemoveUnreachableBlocks (%u of %u unreachable)]\n", numAllBlocks - numReachableBlocks, numAllBlocks);
|
||||||
|
#endif
|
||||||
|
|
||||||
for (uint32_t i = 0; i < numAllBlocks; i++) {
|
for (uint32_t i = 0; i < numAllBlocks; i++) {
|
||||||
RABlock* block = _blocks[i];
|
RABlock* block = _blocks[i];
|
||||||
@@ -689,7 +744,7 @@ namespace LiveOps {
|
|||||||
|
|
||||||
struct In {
|
struct In {
|
||||||
static ASMJIT_INLINE BitWord op(BitWord dst, BitWord out, BitWord gen, BitWord kill) noexcept {
|
static ASMJIT_INLINE BitWord op(BitWord dst, BitWord out, BitWord gen, BitWord kill) noexcept {
|
||||||
ASMJIT_UNUSED(dst);
|
DebugUtils::unused(dst);
|
||||||
return (out | gen) & ~kill;
|
return (out | gen) & ~kill;
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
@@ -758,10 +813,10 @@ namespace LiveOps {
|
|||||||
}
|
}
|
||||||
|
|
||||||
ASMJIT_FAVOR_SPEED Error RAPass::buildLiveness() noexcept {
|
ASMJIT_FAVOR_SPEED Error RAPass::buildLiveness() noexcept {
|
||||||
#ifndef ASMJIT_NO_LOGGING
|
#ifndef ASMJIT_NO_LOGGING
|
||||||
Logger* logger = debugLogger();
|
Logger* logger = debugLogger();
|
||||||
StringTmp<512> sb;
|
StringTmp<512> sb;
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
ASMJIT_RA_LOG_FORMAT("[RAPass::BuildLiveness]\n");
|
ASMJIT_RA_LOG_FORMAT("[RAPass::BuildLiveness]\n");
|
||||||
|
|
||||||
@@ -1072,6 +1127,7 @@ Error RAPass::assignArgIndexToWorkRegs() noexcept {
|
|||||||
// [asmjit::RAPass - Allocation - Global]
|
// [asmjit::RAPass - Allocation - Global]
|
||||||
// ============================================================================
|
// ============================================================================
|
||||||
|
|
||||||
|
#ifndef ASMJIT_NO_LOGGING
|
||||||
static void RAPass_dumpSpans(String& sb, uint32_t index, const LiveRegSpans& liveSpans) noexcept {
|
static void RAPass_dumpSpans(String& sb, uint32_t index, const LiveRegSpans& liveSpans) noexcept {
|
||||||
sb.appendFormat(" %02u: ", index);
|
sb.appendFormat(" %02u: ", index);
|
||||||
|
|
||||||
@@ -1083,6 +1139,7 @@ static void RAPass_dumpSpans(String& sb, uint32_t index, const LiveRegSpans& liv
|
|||||||
|
|
||||||
sb.appendChar('\n');
|
sb.appendChar('\n');
|
||||||
}
|
}
|
||||||
|
#endif
|
||||||
|
|
||||||
Error RAPass::runGlobalAllocator() noexcept {
|
Error RAPass::runGlobalAllocator() noexcept {
|
||||||
ASMJIT_PROPAGATE(initGlobalLiveSpans());
|
ASMJIT_PROPAGATE(initGlobalLiveSpans());
|
||||||
@@ -1115,15 +1172,15 @@ ASMJIT_FAVOR_SPEED Error RAPass::binPack(uint32_t group) noexcept {
|
|||||||
if (workRegCount(group) == 0)
|
if (workRegCount(group) == 0)
|
||||||
return kErrorOk;
|
return kErrorOk;
|
||||||
|
|
||||||
#ifndef ASMJIT_NO_LOGGING
|
#ifndef ASMJIT_NO_LOGGING
|
||||||
Logger* logger = debugLogger();
|
Logger* logger = debugLogger();
|
||||||
StringTmp<512> sb;
|
StringTmp<512> sb;
|
||||||
#endif
|
|
||||||
|
|
||||||
ASMJIT_RA_LOG_FORMAT("[RAPass::BinPack] Available=%u (0x%08X) Count=%u\n",
|
ASMJIT_RA_LOG_FORMAT("[RAPass::BinPack] Available=%u (0x%08X) Count=%u\n",
|
||||||
Support::popcnt(_availableRegs[group]),
|
Support::popcnt(_availableRegs[group]),
|
||||||
_availableRegs[group],
|
_availableRegs[group],
|
||||||
workRegCount(group));
|
workRegCount(group));
|
||||||
|
#endif
|
||||||
|
|
||||||
uint32_t i;
|
uint32_t i;
|
||||||
uint32_t physCount = _physRegCount[group];
|
uint32_t physCount = _physRegCount[group];
|
||||||
@@ -1222,6 +1279,9 @@ ASMJIT_FAVOR_SPEED Error RAPass::binPack(uint32_t group) noexcept {
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
|
// Maybe unused if logging is disabled.
|
||||||
|
DebugUtils::unused(physCount);
|
||||||
|
|
||||||
if (workRegs.empty()) {
|
if (workRegs.empty()) {
|
||||||
ASMJIT_RA_LOG_FORMAT(" Completed.\n");
|
ASMJIT_RA_LOG_FORMAT(" Completed.\n");
|
||||||
}
|
}
|
||||||
@@ -1307,8 +1367,11 @@ Error RAPass::runLocalAllocator() noexcept {
|
|||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
else if (successors.size() > 1) {
|
else if (successors.size() > 1) {
|
||||||
// TODO: Jump table.
|
RABlock* cont = block->hasConsecutive() ? successors.first() : nullptr;
|
||||||
ASMJIT_ASSERT(false);
|
ASMJIT_PROPAGATE(lra.allocJumpTable(inst, successors, cont));
|
||||||
|
|
||||||
|
node = next;
|
||||||
|
continue;
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
// Otherwise this is an unconditional jump, special handling isn't required.
|
// Otherwise this is an unconditional jump, special handling isn't required.
|
||||||
@@ -1326,10 +1389,10 @@ Error RAPass::runLocalAllocator() noexcept {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (consecutive) {
|
if (consecutive) {
|
||||||
if (consecutive->hasEntryAssignment()) {
|
BaseNode* prev = afterLast ? afterLast->prev() : cc()->lastNode();
|
||||||
BaseNode* prev = afterLast ? afterLast->prev() : cc()->lastNode();
|
cc()->_setCursor(unconditionalJump ? prev->prev() : prev);
|
||||||
cc()->_setCursor(unconditionalJump ? prev->prev() : prev);
|
|
||||||
|
|
||||||
|
if (consecutive->hasEntryAssignment()) {
|
||||||
ASMJIT_PROPAGATE(
|
ASMJIT_PROPAGATE(
|
||||||
lra.switchToAssignment(
|
lra.switchToAssignment(
|
||||||
consecutive->entryPhysToWorkMap(),
|
consecutive->entryPhysToWorkMap(),
|
||||||
@@ -1339,6 +1402,7 @@ Error RAPass::runLocalAllocator() noexcept {
|
|||||||
false));
|
false));
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
|
ASMJIT_PROPAGATE(lra.spillRegsBeforeEntry(consecutive));
|
||||||
ASMJIT_PROPAGATE(setBlockEntryAssignment(consecutive, block, lra._curAssignment));
|
ASMJIT_PROPAGATE(setBlockEntryAssignment(consecutive, block, lra._curAssignment));
|
||||||
lra._curAssignment.copyFrom(consecutive->entryPhysToWorkMap(), consecutive->entryWorkToPhysMap());
|
lra._curAssignment.copyFrom(consecutive->entryPhysToWorkMap(), consecutive->entryWorkToPhysMap());
|
||||||
}
|
}
|
||||||
@@ -1371,7 +1435,7 @@ Error RAPass::runLocalAllocator() noexcept {
|
|||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
// If we switched to some block we have to update `lra`.
|
// If we switched to some block we have to update the local allocator.
|
||||||
lra.replaceAssignment(block->entryPhysToWorkMap(), block->entryWorkToPhysMap());
|
lra.replaceAssignment(block->entryPhysToWorkMap(), block->entryWorkToPhysMap());
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -1380,6 +1444,18 @@ Error RAPass::runLocalAllocator() noexcept {
|
|||||||
}
|
}
|
||||||
|
|
||||||
Error RAPass::setBlockEntryAssignment(RABlock* block, const RABlock* fromBlock, const RAAssignment& fromAssignment) noexcept {
|
Error RAPass::setBlockEntryAssignment(RABlock* block, const RABlock* fromBlock, const RAAssignment& fromAssignment) noexcept {
|
||||||
|
if (block->hasSharedAssignmentId()) {
|
||||||
|
uint32_t sharedAssignmentId = block->sharedAssignmentId();
|
||||||
|
|
||||||
|
// Shouldn't happen. Entry assignment of a block that has a shared-state
|
||||||
|
// will assign to all blocks with the same sharedAssignmentId. It's a bug if
|
||||||
|
// the shared state has been already assigned.
|
||||||
|
if (!_sharedAssignments[sharedAssignmentId].empty())
|
||||||
|
return DebugUtils::errored(kErrorInvalidState);
|
||||||
|
|
||||||
|
return setSharedAssignment(sharedAssignmentId, fromAssignment);
|
||||||
|
}
|
||||||
|
|
||||||
PhysToWorkMap* physToWorkMap = clonePhysToWorkMap(fromAssignment.physToWorkMap());
|
PhysToWorkMap* physToWorkMap = clonePhysToWorkMap(fromAssignment.physToWorkMap());
|
||||||
WorkToPhysMap* workToPhysMap = cloneWorkToPhysMap(fromAssignment.workToPhysMap());
|
WorkToPhysMap* workToPhysMap = cloneWorkToPhysMap(fromAssignment.workToPhysMap());
|
||||||
|
|
||||||
@@ -1388,17 +1464,22 @@ Error RAPass::setBlockEntryAssignment(RABlock* block, const RABlock* fromBlock,
|
|||||||
|
|
||||||
block->setEntryAssignment(physToWorkMap, workToPhysMap);
|
block->setEntryAssignment(physToWorkMap, workToPhysMap);
|
||||||
|
|
||||||
// Must be first block, otherwise impossible.
|
// True if this is the first (entry) block, nothing to do in this case.
|
||||||
if (block == fromBlock)
|
if (block == fromBlock) {
|
||||||
return kErrorOk;
|
// Entry block should never have a shared state.
|
||||||
|
if (block->hasSharedAssignmentId())
|
||||||
|
return DebugUtils::errored(kErrorInvalidState);
|
||||||
|
|
||||||
const ZoneBitVector& liveOut = fromBlock->liveOut();
|
return kErrorOk;
|
||||||
const ZoneBitVector& liveIn = block->liveIn();
|
}
|
||||||
|
|
||||||
RAAssignment as;
|
RAAssignment as;
|
||||||
as.initLayout(_physRegCount, workRegs());
|
as.initLayout(_physRegCount, workRegs());
|
||||||
as.initMaps(physToWorkMap, workToPhysMap);
|
as.initMaps(physToWorkMap, workToPhysMap);
|
||||||
|
|
||||||
|
const ZoneBitVector& liveOut = fromBlock->liveOut();
|
||||||
|
const ZoneBitVector& liveIn = block->liveIn();
|
||||||
|
|
||||||
// It's possible that `fromBlock` has LIVE-OUT regs that `block` doesn't
|
// It's possible that `fromBlock` has LIVE-OUT regs that `block` doesn't
|
||||||
// have in LIVE-IN, these have to be unassigned.
|
// have in LIVE-IN, these have to be unassigned.
|
||||||
{
|
{
|
||||||
@@ -1415,22 +1496,94 @@ Error RAPass::setBlockEntryAssignment(RABlock* block, const RABlock* fromBlock,
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Complex allocation strategy: Record register assignments upon block entry.
|
return blockEntryAssigned(as);
|
||||||
{
|
}
|
||||||
for (uint32_t group = 0; group < BaseReg::kGroupVirt; group++) {
|
|
||||||
if (_strategy[group].isComplex()) {
|
Error RAPass::setSharedAssignment(uint32_t sharedAssignmentId, const RAAssignment& fromAssignment) noexcept {
|
||||||
Support::BitWordIterator<uint32_t> it(as.assigned(group));
|
ASMJIT_ASSERT(_sharedAssignments[sharedAssignmentId].empty());
|
||||||
|
|
||||||
|
PhysToWorkMap* physToWorkMap = clonePhysToWorkMap(fromAssignment.physToWorkMap());
|
||||||
|
WorkToPhysMap* workToPhysMap = cloneWorkToPhysMap(fromAssignment.workToPhysMap());
|
||||||
|
|
||||||
|
if (ASMJIT_UNLIKELY(!physToWorkMap || !workToPhysMap))
|
||||||
|
return DebugUtils::errored(kErrorOutOfMemory);
|
||||||
|
|
||||||
|
_sharedAssignments[sharedAssignmentId].assignMaps(physToWorkMap, workToPhysMap);
|
||||||
|
ZoneBitVector& sharedLiveIn = _sharedAssignments[sharedAssignmentId]._liveIn;
|
||||||
|
ASMJIT_PROPAGATE(sharedLiveIn.resize(allocator(), workRegCount()));
|
||||||
|
|
||||||
|
RAAssignment as;
|
||||||
|
as.initLayout(_physRegCount, workRegs());
|
||||||
|
|
||||||
|
uint32_t sharedAssigned[BaseReg::kGroupVirt] {};
|
||||||
|
|
||||||
|
for (RABlock* block : blocks()) {
|
||||||
|
if (block->sharedAssignmentId() == sharedAssignmentId) {
|
||||||
|
ASMJIT_ASSERT(!block->hasEntryAssignment());
|
||||||
|
|
||||||
|
PhysToWorkMap* entryPhysToWorkMap = clonePhysToWorkMap(fromAssignment.physToWorkMap());
|
||||||
|
WorkToPhysMap* entryWorkToPhysMap = cloneWorkToPhysMap(fromAssignment.workToPhysMap());
|
||||||
|
|
||||||
|
if (ASMJIT_UNLIKELY(!entryPhysToWorkMap || !entryWorkToPhysMap))
|
||||||
|
return DebugUtils::errored(kErrorOutOfMemory);
|
||||||
|
|
||||||
|
block->setEntryAssignment(entryPhysToWorkMap, entryWorkToPhysMap);
|
||||||
|
as.initMaps(entryPhysToWorkMap, entryWorkToPhysMap);
|
||||||
|
|
||||||
|
const ZoneBitVector& liveIn = block->liveIn();
|
||||||
|
sharedLiveIn.or_(liveIn);
|
||||||
|
|
||||||
|
for (uint32_t group = 0; group < BaseReg::kGroupVirt; group++) {
|
||||||
|
sharedAssigned[group] |= entryPhysToWorkMap->assigned[group];
|
||||||
|
Support::BitWordIterator<uint32_t> it(entryPhysToWorkMap->assigned[group]);
|
||||||
|
|
||||||
while (it.hasNext()) {
|
while (it.hasNext()) {
|
||||||
uint32_t physId = it.next();
|
uint32_t physId = it.next();
|
||||||
uint32_t workId = as.physToWorkId(group, physId);
|
uint32_t workId = as.physToWorkId(group, physId);
|
||||||
|
|
||||||
RAWorkReg* workReg = workRegById(workId);
|
if (!liveIn.bitAt(workId))
|
||||||
workReg->addAllocatedMask(Support::bitMask(physId));
|
as.unassign(group, workId, physId);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
{
|
||||||
|
as.initMaps(physToWorkMap, workToPhysMap);
|
||||||
|
|
||||||
|
for (uint32_t group = 0; group < BaseReg::kGroupVirt; group++) {
|
||||||
|
Support::BitWordIterator<uint32_t> it(_availableRegs[group] & ~sharedAssigned[group]);
|
||||||
|
|
||||||
|
while (it.hasNext()) {
|
||||||
|
uint32_t physId = it.next();
|
||||||
|
if (as.isPhysAssigned(group, physId)) {
|
||||||
|
uint32_t workId = as.physToWorkId(group, physId);
|
||||||
|
as.unassign(group, workId, physId);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return blockEntryAssigned(as);
|
||||||
|
}
|
||||||
|
|
||||||
|
Error RAPass::blockEntryAssigned(const RAAssignment& as) noexcept {
|
||||||
|
// Complex allocation strategy requires to record register assignments upon
|
||||||
|
// block entry (or per shared state).
|
||||||
|
for (uint32_t group = 0; group < BaseReg::kGroupVirt; group++) {
|
||||||
|
if (!_strategy[group].isComplex())
|
||||||
|
continue;
|
||||||
|
|
||||||
|
Support::BitWordIterator<uint32_t> it(as.assigned(group));
|
||||||
|
while (it.hasNext()) {
|
||||||
|
uint32_t physId = it.next();
|
||||||
|
uint32_t workId = as.physToWorkId(group, physId);
|
||||||
|
|
||||||
|
RAWorkReg* workReg = workRegById(workId);
|
||||||
|
workReg->addAllocatedMask(Support::bitMask(physId));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
return kErrorOk;
|
return kErrorOk;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -1591,11 +1744,11 @@ Error RAPass::insertPrologEpilog() noexcept {
|
|||||||
// ============================================================================
|
// ============================================================================
|
||||||
|
|
||||||
Error RAPass::rewrite() noexcept {
|
Error RAPass::rewrite() noexcept {
|
||||||
#ifndef ASMJIT_NO_LOGGING
|
#ifndef ASMJIT_NO_LOGGING
|
||||||
Logger* logger = debugLogger();
|
Logger* logger = debugLogger();
|
||||||
#endif
|
|
||||||
|
|
||||||
ASMJIT_RA_LOG_FORMAT("[RAPass::Rewrite]\n");
|
ASMJIT_RA_LOG_FORMAT("[RAPass::Rewrite]\n");
|
||||||
|
#endif
|
||||||
|
|
||||||
return _rewrite(_func, _stop);
|
return _rewrite(_func, _stop);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -1758,21 +1911,15 @@ ASMJIT_FAVOR_SIZE Error RAPass::annotateCode() noexcept {
|
|||||||
return kErrorOk;
|
return kErrorOk;
|
||||||
}
|
}
|
||||||
|
|
||||||
ASMJIT_FAVOR_SIZE Error RAPass::_logBlockIds(const RABlocks& blocks) noexcept {
|
ASMJIT_FAVOR_SIZE Error RAPass::_dumpBlockIds(String& sb, const RABlocks& blocks) noexcept {
|
||||||
// Can only be called if the `Logger` is present.
|
|
||||||
ASMJIT_ASSERT(debugLogger());
|
|
||||||
|
|
||||||
StringTmp<1024> sb;
|
|
||||||
sb.appendString(" [Succ] {");
|
|
||||||
|
|
||||||
for (uint32_t i = 0, size = blocks.size(); i < size; i++) {
|
for (uint32_t i = 0, size = blocks.size(); i < size; i++) {
|
||||||
const RABlock* block = blocks[i];
|
const RABlock* block = blocks[i];
|
||||||
if (i != 0) sb.appendString(", ");
|
if (i != 0)
|
||||||
sb.appendFormat("#%u", block->blockId());
|
ASMJIT_PROPAGATE(sb.appendFormat(", #%u", block->blockId()));
|
||||||
|
else
|
||||||
|
ASMJIT_PROPAGATE(sb.appendFormat("#%u", block->blockId()));
|
||||||
}
|
}
|
||||||
|
return kErrorOk;
|
||||||
sb.appendString("}\n");
|
|
||||||
return debugLogger()->log(sb.data(), sb.size());
|
|
||||||
}
|
}
|
||||||
|
|
||||||
ASMJIT_FAVOR_SIZE Error RAPass::_dumpBlockLiveness(String& sb, const RABlock* block) noexcept {
|
ASMJIT_FAVOR_SIZE Error RAPass::_dumpBlockLiveness(String& sb, const RABlock* block) noexcept {
|
||||||
|
|||||||
@@ -95,6 +95,7 @@ public:
|
|||||||
uint32_t _weight;
|
uint32_t _weight;
|
||||||
//! Post-order view order, used during POV construction.
|
//! Post-order view order, used during POV construction.
|
||||||
uint32_t _povOrder;
|
uint32_t _povOrder;
|
||||||
|
|
||||||
//! Basic statistics about registers.
|
//! Basic statistics about registers.
|
||||||
RARegsStats _regsStats;
|
RARegsStats _regsStats;
|
||||||
//! Maximum live-count per register group.
|
//! Maximum live-count per register group.
|
||||||
@@ -124,6 +125,14 @@ public:
|
|||||||
//! Liveness in/out/use/kill.
|
//! Liveness in/out/use/kill.
|
||||||
ZoneBitVector _liveBits[kLiveCount];
|
ZoneBitVector _liveBits[kLiveCount];
|
||||||
|
|
||||||
|
//! Shared assignment it or `Globals::kInvalidId` if this block doesn't
|
||||||
|
//! have shared assignment. See `RASharedAssignment` for more details.
|
||||||
|
uint32_t _sharedAssignmentId;
|
||||||
|
//! Scratch registers that cannot be allocated upon block entry.
|
||||||
|
uint32_t _entryScratchGpRegs;
|
||||||
|
//! Scratch registers used at exit, by a terminator instruction.
|
||||||
|
uint32_t _exitScratchGpRegs;
|
||||||
|
|
||||||
//! Register assignment (PhysToWork) on entry.
|
//! Register assignment (PhysToWork) on entry.
|
||||||
PhysToWorkMap* _entryPhysToWorkMap;
|
PhysToWorkMap* _entryPhysToWorkMap;
|
||||||
//! Register assignment (WorkToPhys) on entry.
|
//! Register assignment (WorkToPhys) on entry.
|
||||||
@@ -148,6 +157,10 @@ public:
|
|||||||
_idom(nullptr),
|
_idom(nullptr),
|
||||||
_predecessors(),
|
_predecessors(),
|
||||||
_successors(),
|
_successors(),
|
||||||
|
_doms(),
|
||||||
|
_sharedAssignmentId(Globals::kInvalidId),
|
||||||
|
_entryScratchGpRegs(0),
|
||||||
|
_exitScratchGpRegs(0),
|
||||||
_entryPhysToWorkMap(nullptr),
|
_entryPhysToWorkMap(nullptr),
|
||||||
_entryWorkToPhysMap(nullptr) {}
|
_entryWorkToPhysMap(nullptr) {}
|
||||||
|
|
||||||
@@ -205,6 +218,15 @@ public:
|
|||||||
|
|
||||||
inline uint32_t povOrder() const noexcept { return _povOrder; }
|
inline uint32_t povOrder() const noexcept { return _povOrder; }
|
||||||
|
|
||||||
|
inline uint32_t entryScratchGpRegs() const noexcept;
|
||||||
|
inline uint32_t exitScratchGpRegs() const noexcept { return _exitScratchGpRegs; }
|
||||||
|
|
||||||
|
inline void addExitScratchGpRegs(uint32_t regMask) noexcept { _exitScratchGpRegs |= regMask; }
|
||||||
|
|
||||||
|
inline bool hasSharedAssignmentId() const noexcept { return _sharedAssignmentId != Globals::kInvalidId; }
|
||||||
|
inline uint32_t sharedAssignmentId() const noexcept { return _sharedAssignmentId; }
|
||||||
|
inline void setSharedAssignmentId(uint32_t id) noexcept { _sharedAssignmentId = id; }
|
||||||
|
|
||||||
inline uint64_t timestamp() const noexcept { return _timestamp; }
|
inline uint64_t timestamp() const noexcept { return _timestamp; }
|
||||||
inline bool hasTimestamp(uint64_t ts) const noexcept { return _timestamp == ts; }
|
inline bool hasTimestamp(uint64_t ts) const noexcept { return _timestamp == ts; }
|
||||||
inline void setTimestamp(uint64_t ts) const noexcept { _timestamp = ts; }
|
inline void setTimestamp(uint64_t ts) const noexcept { _timestamp = ts; }
|
||||||
@@ -425,6 +447,12 @@ public:
|
|||||||
//! Returns the number of tied registers added to the builder.
|
//! Returns the number of tied registers added to the builder.
|
||||||
inline uint32_t tiedRegCount() const noexcept { return uint32_t((size_t)(_cur - _tiedRegs)); }
|
inline uint32_t tiedRegCount() const noexcept { return uint32_t((size_t)(_cur - _tiedRegs)); }
|
||||||
|
|
||||||
|
inline RATiedReg* begin() noexcept { return _tiedRegs; }
|
||||||
|
inline RATiedReg* end() noexcept { return _cur; }
|
||||||
|
|
||||||
|
inline const RATiedReg* begin() const noexcept { return _tiedRegs; }
|
||||||
|
inline const RATiedReg* end() const noexcept { return _cur; }
|
||||||
|
|
||||||
//! Returns `RATiedReg` at the given `index`.
|
//! Returns `RATiedReg` at the given `index`.
|
||||||
inline RATiedReg* operator[](uint32_t index) noexcept {
|
inline RATiedReg* operator[](uint32_t index) noexcept {
|
||||||
ASMJIT_ASSERT(index < tiedRegCount());
|
ASMJIT_ASSERT(index < tiedRegCount());
|
||||||
@@ -573,6 +601,54 @@ public:
|
|||||||
//! \}
|
//! \}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// [asmjit::RASharedAssignment]
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
class RASharedAssignment {
|
||||||
|
public:
|
||||||
|
typedef RAAssignment::PhysToWorkMap PhysToWorkMap;
|
||||||
|
typedef RAAssignment::WorkToPhysMap WorkToPhysMap;
|
||||||
|
|
||||||
|
//! Bit-mask of registers that cannot be used upon a block entry, for each
|
||||||
|
//! block that has this shared assignment. Scratch registers can come from
|
||||||
|
//! ISA limits (like jecx/loop instructions on x86) or because the registers
|
||||||
|
//! are used by jump/branch instruction that uses registers to perform an
|
||||||
|
//! indirect jump.
|
||||||
|
uint32_t _entryScratchGpRegs;
|
||||||
|
//! Union of all live-in registers.
|
||||||
|
ZoneBitVector _liveIn;
|
||||||
|
//! Register assignment (PhysToWork).
|
||||||
|
PhysToWorkMap* _physToWorkMap;
|
||||||
|
//! Register assignment (WorkToPhys).
|
||||||
|
WorkToPhysMap* _workToPhysMap;
|
||||||
|
|
||||||
|
//! Provided for clarity, most likely never called as we initialize a vector
|
||||||
|
//! of shared assignments to zero.
|
||||||
|
inline RASharedAssignment() noexcept
|
||||||
|
: _entryScratchGpRegs(0),
|
||||||
|
_liveIn(),
|
||||||
|
_physToWorkMap(nullptr),
|
||||||
|
_workToPhysMap(nullptr) {}
|
||||||
|
|
||||||
|
inline uint32_t entryScratchGpRegs() const noexcept { return _entryScratchGpRegs; }
|
||||||
|
inline void addScratchGpRegs(uint32_t mask) noexcept { _entryScratchGpRegs |= mask; }
|
||||||
|
|
||||||
|
inline const ZoneBitVector& liveIn() const noexcept { return _liveIn; }
|
||||||
|
|
||||||
|
inline PhysToWorkMap* physToWorkMap() const noexcept { return _physToWorkMap; }
|
||||||
|
inline WorkToPhysMap* workToPhysMap() const noexcept { return _workToPhysMap; }
|
||||||
|
|
||||||
|
inline bool empty() const noexcept {
|
||||||
|
return _physToWorkMap == nullptr;
|
||||||
|
}
|
||||||
|
|
||||||
|
inline void assignMaps(PhysToWorkMap* physToWorkMap, WorkToPhysMap* workToPhysMap) noexcept {
|
||||||
|
_physToWorkMap = physToWorkMap;
|
||||||
|
_workToPhysMap = workToPhysMap;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
// ============================================================================
|
// ============================================================================
|
||||||
// [asmjit::RAPass]
|
// [asmjit::RAPass]
|
||||||
// ============================================================================
|
// ============================================================================
|
||||||
@@ -617,6 +693,10 @@ public:
|
|||||||
uint32_t _instructionCount;
|
uint32_t _instructionCount;
|
||||||
//! Number of created blocks (internal).
|
//! Number of created blocks (internal).
|
||||||
uint32_t _createdBlockCount;
|
uint32_t _createdBlockCount;
|
||||||
|
|
||||||
|
//! SharedState blocks.
|
||||||
|
ZoneVector<RASharedAssignment> _sharedAssignments;
|
||||||
|
|
||||||
//! Timestamp generator (incremental).
|
//! Timestamp generator (incremental).
|
||||||
mutable uint64_t _lastTimestamp;
|
mutable uint64_t _lastTimestamp;
|
||||||
|
|
||||||
@@ -630,6 +710,8 @@ public:
|
|||||||
RARegCount _physRegCount;
|
RARegCount _physRegCount;
|
||||||
//! Total number of physical registers.
|
//! Total number of physical registers.
|
||||||
uint32_t _physRegTotal;
|
uint32_t _physRegTotal;
|
||||||
|
//! Indexes of a possible scratch registers that can be selected if necessary.
|
||||||
|
uint8_t _scratchRegIndexes[2];
|
||||||
|
|
||||||
//! Registers available for allocation.
|
//! Registers available for allocation.
|
||||||
RARegMask _availableRegs;
|
RARegMask _availableRegs;
|
||||||
@@ -689,6 +771,9 @@ public:
|
|||||||
//! Returns `ZoneAllocator` used by the register allocator.
|
//! Returns `ZoneAllocator` used by the register allocator.
|
||||||
inline ZoneAllocator* allocator() const noexcept { return const_cast<ZoneAllocator*>(&_allocator); }
|
inline ZoneAllocator* allocator() const noexcept { return const_cast<ZoneAllocator*>(&_allocator); }
|
||||||
|
|
||||||
|
inline const ZoneVector<RASharedAssignment>& sharedAssignments() const { return _sharedAssignments; }
|
||||||
|
inline uint32_t sharedAssignmentCount() const noexcept { return _sharedAssignments.size(); }
|
||||||
|
|
||||||
//! Returns the current function node.
|
//! Returns the current function node.
|
||||||
inline FuncNode* func() const noexcept { return _func; }
|
inline FuncNode* func() const noexcept { return _func; }
|
||||||
//! Returns the stop of the current function.
|
//! Returns the stop of the current function.
|
||||||
@@ -750,6 +835,11 @@ public:
|
|||||||
return _blocks[0];
|
return _blocks[0];
|
||||||
}
|
}
|
||||||
|
|
||||||
|
//! Returns all basic blocks of this function.
|
||||||
|
inline RABlocks& blocks() noexcept { return _blocks; }
|
||||||
|
//! \overload
|
||||||
|
inline const RABlocks& blocks() const noexcept { return _blocks; }
|
||||||
|
|
||||||
//! Returns the count of basic blocks (returns size of `_blocks` array).
|
//! Returns the count of basic blocks (returns size of `_blocks` array).
|
||||||
inline uint32_t blockCount() const noexcept { return _blocks.size(); }
|
inline uint32_t blockCount() const noexcept { return _blocks.size(); }
|
||||||
//! Returns the count of reachable basic blocks (returns size of `_pov` array).
|
//! Returns the count of reachable basic blocks (returns size of `_pov` array).
|
||||||
@@ -857,6 +947,9 @@ public:
|
|||||||
//! Use `RACFGBuilder` template that provides the necessary boilerplate.
|
//! Use `RACFGBuilder` template that provides the necessary boilerplate.
|
||||||
virtual Error buildCFG() noexcept = 0;
|
virtual Error buildCFG() noexcept = 0;
|
||||||
|
|
||||||
|
//! Called after the CFG is built.
|
||||||
|
Error initSharedAssignments(const ZoneVector<uint32_t>& sharedAssignmentsMap) noexcept;
|
||||||
|
|
||||||
//! \}
|
//! \}
|
||||||
|
|
||||||
//! \name CFG - Views Order
|
//! \name CFG - Views Order
|
||||||
@@ -1016,6 +1109,12 @@ public:
|
|||||||
//! Runs a local register allocator.
|
//! Runs a local register allocator.
|
||||||
Error runLocalAllocator() noexcept;
|
Error runLocalAllocator() noexcept;
|
||||||
Error setBlockEntryAssignment(RABlock* block, const RABlock* fromBlock, const RAAssignment& fromAssignment) noexcept;
|
Error setBlockEntryAssignment(RABlock* block, const RABlock* fromBlock, const RAAssignment& fromAssignment) noexcept;
|
||||||
|
Error setSharedAssignment(uint32_t sharedAssignmentId, const RAAssignment& fromAssignment) noexcept;
|
||||||
|
|
||||||
|
//! Called after the RA assignment has been assigned to a block.
|
||||||
|
//!
|
||||||
|
//! This cannot change the assignment, but can examine it.
|
||||||
|
Error blockEntryAssigned(const RAAssignment& as) noexcept;
|
||||||
|
|
||||||
//! \}
|
//! \}
|
||||||
|
|
||||||
@@ -1044,18 +1143,18 @@ public:
|
|||||||
|
|
||||||
//! \}
|
//! \}
|
||||||
|
|
||||||
#ifndef ASMJIT_NO_LOGGING
|
#ifndef ASMJIT_NO_LOGGING
|
||||||
//! \name Logging
|
//! \name Logging
|
||||||
//! \{
|
//! \{
|
||||||
|
|
||||||
Error annotateCode() noexcept;
|
Error annotateCode() noexcept;
|
||||||
|
|
||||||
Error _logBlockIds(const RABlocks& blocks) noexcept;
|
Error _dumpBlockIds(String& sb, const RABlocks& blocks) noexcept;
|
||||||
Error _dumpBlockLiveness(String& sb, const RABlock* block) noexcept;
|
Error _dumpBlockLiveness(String& sb, const RABlock* block) noexcept;
|
||||||
Error _dumpLiveSpans(String& sb) noexcept;
|
Error _dumpLiveSpans(String& sb) noexcept;
|
||||||
|
|
||||||
//! \}
|
//! \}
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
//! \name Emit
|
//! \name Emit
|
||||||
//! \{
|
//! \{
|
||||||
@@ -1074,6 +1173,13 @@ public:
|
|||||||
|
|
||||||
inline ZoneAllocator* RABlock::allocator() const noexcept { return _ra->allocator(); }
|
inline ZoneAllocator* RABlock::allocator() const noexcept { return _ra->allocator(); }
|
||||||
|
|
||||||
|
inline uint32_t RABlock::entryScratchGpRegs() const noexcept {
|
||||||
|
uint32_t regs = _entryScratchGpRegs;
|
||||||
|
if (hasSharedAssignmentId())
|
||||||
|
regs = _ra->_sharedAssignments[_sharedAssignmentId].entryScratchGpRegs();
|
||||||
|
return regs;
|
||||||
|
}
|
||||||
|
|
||||||
//! \}
|
//! \}
|
||||||
//! \endcond
|
//! \endcond
|
||||||
|
|
||||||
|
|||||||
@@ -920,8 +920,8 @@ static inline void writeU64uBE(void* p, uint64_t x) noexcept { writeU64xBE<1>(p,
|
|||||||
// [asmjit::Support - Operators]
|
// [asmjit::Support - Operators]
|
||||||
// ============================================================================
|
// ============================================================================
|
||||||
|
|
||||||
struct Set { template<typename T> static inline T op(T x, T y) noexcept { ASMJIT_UNUSED(x); return y; } };
|
struct Set { template<typename T> static inline T op(T x, T y) noexcept { DebugUtils::unused(x); return y; } };
|
||||||
struct SetNot { template<typename T> static inline T op(T x, T y) noexcept { ASMJIT_UNUSED(x); return ~y; } };
|
struct SetNot { template<typename T> static inline T op(T x, T y) noexcept { DebugUtils::unused(x); return ~y; } };
|
||||||
struct And { template<typename T> static inline T op(T x, T y) noexcept { return x & y; } };
|
struct And { template<typename T> static inline T op(T x, T y) noexcept { return x & y; } };
|
||||||
struct AndNot { template<typename T> static inline T op(T x, T y) noexcept { return x & ~y; } };
|
struct AndNot { template<typename T> static inline T op(T x, T y) noexcept { return x & ~y; } };
|
||||||
struct NotAnd { template<typename T> static inline T op(T x, T y) noexcept { return ~x & y; } };
|
struct NotAnd { template<typename T> static inline T op(T x, T y) noexcept { return ~x & y; } };
|
||||||
@@ -1274,9 +1274,13 @@ namespace Internal {
|
|||||||
ASMJIT_ASSERT(stackptr <= stack + kStackSize);
|
ASMJIT_ASSERT(stackptr <= stack + kStackSize);
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
iSort(base, (size_t)(end - base), cmp);
|
// UB sanitizer doesn't like applying offset to a nullptr base.
|
||||||
|
if (base != end)
|
||||||
|
iSort(base, (size_t)(end - base), cmp);
|
||||||
|
|
||||||
if (stackptr == stack)
|
if (stackptr == stack)
|
||||||
break;
|
break;
|
||||||
|
|
||||||
end = *--stackptr;
|
end = *--stackptr;
|
||||||
base = *--stackptr;
|
base = *--stackptr;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -143,7 +143,7 @@ Error VirtMem::alloc(void** p, size_t size, uint32_t flags) noexcept {
|
|||||||
}
|
}
|
||||||
|
|
||||||
Error VirtMem::release(void* p, size_t size) noexcept {
|
Error VirtMem::release(void* p, size_t size) noexcept {
|
||||||
ASMJIT_UNUSED(size);
|
DebugUtils::unused(size);
|
||||||
if (ASMJIT_UNLIKELY(!::VirtualFree(p, 0, MEM_RELEASE)))
|
if (ASMJIT_UNLIKELY(!::VirtualFree(p, 0, MEM_RELEASE)))
|
||||||
return DebugUtils::errored(kErrorInvalidArgument);
|
return DebugUtils::errored(kErrorInvalidArgument);
|
||||||
return kErrorOk;
|
return kErrorOk;
|
||||||
@@ -196,7 +196,7 @@ Error VirtMem::allocDualMapping(DualMapping* dm, size_t size, uint32_t flags) no
|
|||||||
}
|
}
|
||||||
|
|
||||||
Error VirtMem::releaseDualMapping(DualMapping* dm, size_t size) noexcept {
|
Error VirtMem::releaseDualMapping(DualMapping* dm, size_t size) noexcept {
|
||||||
ASMJIT_UNUSED(size);
|
DebugUtils::unused(size);
|
||||||
bool failed = false;
|
bool failed = false;
|
||||||
|
|
||||||
if (!::UnmapViewOfFile(dm->ro))
|
if (!::UnmapViewOfFile(dm->ro))
|
||||||
@@ -346,7 +346,7 @@ static ASMJIT_INLINE uint32_t VirtMem_appleSpecificMMapFlags(uint32_t flags) {
|
|||||||
}
|
}
|
||||||
#else
|
#else
|
||||||
static ASMJIT_INLINE uint32_t VirtMem_appleSpecificMMapFlags(uint32_t flags) {
|
static ASMJIT_INLINE uint32_t VirtMem_appleSpecificMMapFlags(uint32_t flags) {
|
||||||
ASMJIT_UNUSED(flags);
|
DebugUtils::unused(flags);
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
@@ -380,7 +380,7 @@ static Error VirtMem_openAnonymousMemory(int* fd, bool preferTmpOverDevShm) noex
|
|||||||
|
|
||||||
#if defined(SHM_ANON)
|
#if defined(SHM_ANON)
|
||||||
// Originally FreeBSD extension, apparently works in other BSDs too.
|
// Originally FreeBSD extension, apparently works in other BSDs too.
|
||||||
ASMJIT_UNUSED(preferTmpOverDevShm);
|
DebugUtils::unused(preferTmpOverDevShm);
|
||||||
*fd = shm_open(SHM_ANON, O_RDWR | O_CREAT | O_EXCL, S_IRUSR | S_IWUSR);
|
*fd = shm_open(SHM_ANON, O_RDWR | O_CREAT | O_EXCL, S_IRUSR | S_IWUSR);
|
||||||
|
|
||||||
if (ASMJIT_LIKELY(*fd >= 0))
|
if (ASMJIT_LIKELY(*fd >= 0))
|
||||||
|
|||||||
@@ -357,7 +357,7 @@ void* ZoneAllocator::_allocZeroed(size_t size, size_t& allocatedSize) noexcept {
|
|||||||
}
|
}
|
||||||
|
|
||||||
void ZoneAllocator::_releaseDynamic(void* p, size_t size) noexcept {
|
void ZoneAllocator::_releaseDynamic(void* p, size_t size) noexcept {
|
||||||
ASMJIT_UNUSED(size);
|
DebugUtils::unused(size);
|
||||||
ASMJIT_ASSERT(isInitialized());
|
ASMJIT_ASSERT(isInitialized());
|
||||||
|
|
||||||
// Pointer to `DynamicBlock` is stored at [-1].
|
// Pointer to `DynamicBlock` is stored at [-1].
|
||||||
|
|||||||
@@ -246,7 +246,7 @@ ZoneHashNode* ZoneHashBase::_insert(ZoneAllocator* allocator, ZoneHashNode* node
|
|||||||
}
|
}
|
||||||
|
|
||||||
ZoneHashNode* ZoneHashBase::_remove(ZoneAllocator* allocator, ZoneHashNode* node) noexcept {
|
ZoneHashNode* ZoneHashBase::_remove(ZoneAllocator* allocator, ZoneHashNode* node) noexcept {
|
||||||
ASMJIT_UNUSED(allocator);
|
DebugUtils::unused(allocator);
|
||||||
uint32_t hashMod = _calcMod(node->_hashCode);
|
uint32_t hashMod = _calcMod(node->_hashCode);
|
||||||
|
|
||||||
ZoneHashNode** pPrev = &_data[hashMod];
|
ZoneHashNode** pPrev = &_data[hashMod];
|
||||||
|
|||||||
@@ -410,11 +410,11 @@ public:
|
|||||||
|
|
||||||
ASMJIT_ASSERT(immSize == 1 || immSize == 4);
|
ASMJIT_ASSERT(immSize == 1 || immSize == 4);
|
||||||
|
|
||||||
#if ASMJIT_ARCH_BITS >= 64
|
#if ASMJIT_ARCH_BITS >= 64
|
||||||
uint64_t imm = uint64_t(immValue);
|
uint64_t imm = uint64_t(immValue);
|
||||||
#else
|
#else
|
||||||
uint32_t imm = uint32_t(immValue & 0xFFFFFFFFu);
|
uint32_t imm = uint32_t(immValue & 0xFFFFFFFFu);
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
// Many instructions just use a single byte immediate, so make it fast.
|
// Many instructions just use a single byte immediate, so make it fast.
|
||||||
emit8(imm & 0xFFu);
|
emit8(imm & 0xFFu);
|
||||||
@@ -432,11 +432,11 @@ public:
|
|||||||
if (!immSize)
|
if (!immSize)
|
||||||
return;
|
return;
|
||||||
|
|
||||||
#if ASMJIT_ARCH_BITS >= 64
|
#if ASMJIT_ARCH_BITS >= 64
|
||||||
uint64_t imm = uint64_t(immValue);
|
uint64_t imm = uint64_t(immValue);
|
||||||
#else
|
#else
|
||||||
uint32_t imm = uint32_t(immValue & 0xFFFFFFFFu);
|
uint32_t imm = uint32_t(immValue & 0xFFFFFFFFu);
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
// Many instructions just use a single byte immediate, so make it fast.
|
// Many instructions just use a single byte immediate, so make it fast.
|
||||||
emit8(imm & 0xFFu);
|
emit8(imm & 0xFFu);
|
||||||
@@ -457,12 +457,12 @@ public:
|
|||||||
// Can be 1, 2, 4 or 8 bytes, this handles the remaining high DWORD of an 8-byte immediate.
|
// Can be 1, 2, 4 or 8 bytes, this handles the remaining high DWORD of an 8-byte immediate.
|
||||||
ASMJIT_ASSERT(immSize == 4);
|
ASMJIT_ASSERT(immSize == 4);
|
||||||
|
|
||||||
#if ASMJIT_ARCH_BITS >= 64
|
#if ASMJIT_ARCH_BITS >= 64
|
||||||
imm >>= 8;
|
imm >>= 8;
|
||||||
emit32uLE(uint32_t(imm));
|
emit32uLE(uint32_t(imm));
|
||||||
#else
|
#else
|
||||||
emit32uLE(uint32_t((uint64_t(immValue) >> 32) & 0xFFFFFFFFu));
|
emit32uLE(uint32_t((uint64_t(immValue) >> 32) & 0xFFFFFFFFu));
|
||||||
#endif
|
#endif
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -611,8 +611,8 @@ ASMJIT_FAVOR_SPEED Error Assembler::_emit(uint32_t instId, const Operand_& o0, c
|
|||||||
if (ASMJIT_UNLIKELY(err))
|
if (ASMJIT_UNLIKELY(err))
|
||||||
goto Failed;
|
goto Failed;
|
||||||
|
|
||||||
|
#ifndef ASMJIT_NO_VALIDATION
|
||||||
// Strict validation.
|
// Strict validation.
|
||||||
#ifndef ASMJIT_NO_VALIDATION
|
|
||||||
if (hasEmitterOption(kOptionStrictValidation)) {
|
if (hasEmitterOption(kOptionStrictValidation)) {
|
||||||
Operand_ opArray[Globals::kMaxOpCount];
|
Operand_ opArray[Globals::kMaxOpCount];
|
||||||
|
|
||||||
@@ -633,7 +633,7 @@ ASMJIT_FAVOR_SPEED Error Assembler::_emit(uint32_t instId, const Operand_& o0, c
|
|||||||
err = InstAPI::validate(archId(), BaseInst(instId, options, _extraReg), opArray, Globals::kMaxOpCount);
|
err = InstAPI::validate(archId(), BaseInst(instId, options, _extraReg), opArray, Globals::kMaxOpCount);
|
||||||
if (ASMJIT_UNLIKELY(err)) goto Failed;
|
if (ASMJIT_UNLIKELY(err)) goto Failed;
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
uint32_t iFlags = instInfo->flags();
|
uint32_t iFlags = instInfo->flags();
|
||||||
|
|
||||||
@@ -4581,10 +4581,10 @@ EmitRel:
|
|||||||
|
|
||||||
EmitDone:
|
EmitDone:
|
||||||
if (ASMJIT_UNLIKELY(options & Inst::kOptionReserved)) {
|
if (ASMJIT_UNLIKELY(options & Inst::kOptionReserved)) {
|
||||||
#ifndef ASMJIT_NO_LOGGING
|
#ifndef ASMJIT_NO_LOGGING
|
||||||
if (hasEmitterOption(kOptionLoggingEnabled))
|
if (hasEmitterOption(kOptionLoggingEnabled))
|
||||||
_emitLog(instId, options, o0, o1, o2, o3, relSize, immSize, writer.cursor());
|
_emitLog(instId, options, o0, o1, o2, o3, relSize, immSize, writer.cursor());
|
||||||
#endif
|
#endif
|
||||||
}
|
}
|
||||||
|
|
||||||
resetInstOptions();
|
resetInstOptions();
|
||||||
@@ -4698,7 +4698,7 @@ Error Assembler::align(uint32_t alignMode, uint32_t alignment) {
|
|||||||
writer.done(this);
|
writer.done(this);
|
||||||
}
|
}
|
||||||
|
|
||||||
#ifndef ASMJIT_NO_LOGGING
|
#ifndef ASMJIT_NO_LOGGING
|
||||||
if (hasEmitterOption(kOptionLoggingEnabled)) {
|
if (hasEmitterOption(kOptionLoggingEnabled)) {
|
||||||
Logger* logger = _code->logger();
|
Logger* logger = _code->logger();
|
||||||
StringTmp<128> sb;
|
StringTmp<128> sb;
|
||||||
@@ -4706,7 +4706,7 @@ Error Assembler::align(uint32_t alignMode, uint32_t alignment) {
|
|||||||
sb.appendFormat("align %u\n", alignment);
|
sb.appendFormat("align %u\n", alignment);
|
||||||
logger->log(sb);
|
logger->log(sb);
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
return kErrorOk;
|
return kErrorOk;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -22,7 +22,7 @@
|
|||||||
// 3. This notice may not be removed or altered from any source distribution.
|
// 3. This notice may not be removed or altered from any source distribution.
|
||||||
|
|
||||||
#include "../core/api-build_p.h"
|
#include "../core/api-build_p.h"
|
||||||
#if defined(ASMJIT_BUILD_X86) && !defined(ASMJIT_NO_COMPILER)
|
#if defined(ASMJIT_BUILD_X86) && !defined(ASMJIT_NO_BUILDER)
|
||||||
|
|
||||||
#include "../x86/x86assembler.h"
|
#include "../x86/x86assembler.h"
|
||||||
#include "../x86/x86builder.h"
|
#include "../x86/x86builder.h"
|
||||||
@@ -66,4 +66,4 @@ Error Builder::onAttach(CodeHolder* code) noexcept {
|
|||||||
|
|
||||||
ASMJIT_END_SUB_NAMESPACE
|
ASMJIT_END_SUB_NAMESPACE
|
||||||
|
|
||||||
#endif // ASMJIT_BUILD_X86 && !ASMJIT_NO_COMPILER
|
#endif // ASMJIT_BUILD_X86 && !ASMJIT_NO_BUILDER
|
||||||
|
|||||||
@@ -65,7 +65,8 @@ public:
|
|||||||
_newRegFmt(OUT, PARAM, FORMAT, ARGS)
|
_newRegFmt(OUT, PARAM, FORMAT, ARGS)
|
||||||
#else
|
#else
|
||||||
# define ASMJIT_NEW_REG_FMT(OUT, PARAM, FORMAT, ARGS) \
|
# define ASMJIT_NEW_REG_FMT(OUT, PARAM, FORMAT, ARGS) \
|
||||||
ASMJIT_UNUSED(FORMAT); \
|
DebugUtils::unused(FORMAT); \
|
||||||
|
DebugUtils::unused(std::forward<Args>(args)...); \
|
||||||
_newReg(OUT, PARAM)
|
_newReg(OUT, PARAM)
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
@@ -235,15 +236,15 @@ public:
|
|||||||
//! \{
|
//! \{
|
||||||
|
|
||||||
//! Call a function.
|
//! Call a function.
|
||||||
inline FuncCallNode* call(const Gp& dst, const FuncSignature& sign) { return addCall(Inst::kIdCall, dst, sign); }
|
inline FuncCallNode* call(const Gp& target, const FuncSignature& sign) { return addCall(Inst::kIdCall, target, sign); }
|
||||||
//! \overload
|
//! \overload
|
||||||
inline FuncCallNode* call(const Mem& dst, const FuncSignature& sign) { return addCall(Inst::kIdCall, dst, sign); }
|
inline FuncCallNode* call(const Mem& target, const FuncSignature& sign) { return addCall(Inst::kIdCall, target, sign); }
|
||||||
//! \overload
|
//! \overload
|
||||||
inline FuncCallNode* call(const Label& label, const FuncSignature& sign) { return addCall(Inst::kIdCall, label, sign); }
|
inline FuncCallNode* call(const Label& target, const FuncSignature& sign) { return addCall(Inst::kIdCall, target, sign); }
|
||||||
//! \overload
|
//! \overload
|
||||||
inline FuncCallNode* call(const Imm& dst, const FuncSignature& sign) { return addCall(Inst::kIdCall, dst, sign); }
|
inline FuncCallNode* call(const Imm& target, const FuncSignature& sign) { return addCall(Inst::kIdCall, target, sign); }
|
||||||
//! \overload
|
//! \overload
|
||||||
inline FuncCallNode* call(uint64_t dst, const FuncSignature& sign) { return addCall(Inst::kIdCall, Imm(int64_t(dst)), sign); }
|
inline FuncCallNode* call(uint64_t target, const FuncSignature& sign) { return addCall(Inst::kIdCall, Imm(int64_t(target)), sign); }
|
||||||
|
|
||||||
//! Return.
|
//! Return.
|
||||||
inline FuncRetNode* ret() { return addRet(Operand(), Operand()); }
|
inline FuncRetNode* ret() { return addRet(Operand(), Operand()); }
|
||||||
@@ -254,6 +255,16 @@ public:
|
|||||||
|
|
||||||
//! \}
|
//! \}
|
||||||
|
|
||||||
|
//! \name Jump Tables Support
|
||||||
|
//! \{
|
||||||
|
|
||||||
|
using EmitterExplicitT<Compiler>::jmp;
|
||||||
|
|
||||||
|
inline Error jmp(const BaseReg& target, JumpAnnotation* annotation) { return emitAnnotatedJump(Inst::kIdJmp, target, annotation); }
|
||||||
|
inline Error jmp(const BaseMem& target, JumpAnnotation* annotation) { return emitAnnotatedJump(Inst::kIdJmp, target, annotation); }
|
||||||
|
|
||||||
|
//! \}
|
||||||
|
|
||||||
//! \name Finalize
|
//! \name Finalize
|
||||||
//! \{
|
//! \{
|
||||||
|
|
||||||
|
|||||||
@@ -244,57 +244,57 @@ struct EmitterExplicitT {
|
|||||||
|
|
||||||
//! Creates an `intptr_t` memory operand depending on the current architecture.
|
//! Creates an `intptr_t` memory operand depending on the current architecture.
|
||||||
inline Mem intptr_ptr(const Gp& base, int32_t offset = 0) const noexcept {
|
inline Mem intptr_ptr(const Gp& base, int32_t offset = 0) const noexcept {
|
||||||
uint32_t nativeGpSize = static_cast<const This*>(this)->gpSize();
|
uint32_t nativeGpSize = _emitter()->gpSize();
|
||||||
return Mem(base, offset, nativeGpSize);
|
return Mem(base, offset, nativeGpSize);
|
||||||
}
|
}
|
||||||
//! \overload
|
//! \overload
|
||||||
inline Mem intptr_ptr(const Gp& base, const Gp& index, uint32_t shift = 0, int32_t offset = 0) const noexcept {
|
inline Mem intptr_ptr(const Gp& base, const Gp& index, uint32_t shift = 0, int32_t offset = 0) const noexcept {
|
||||||
uint32_t nativeGpSize = static_cast<const This*>(this)->gpSize();
|
uint32_t nativeGpSize = _emitter()->gpSize();
|
||||||
return Mem(base, index, shift, offset, nativeGpSize);
|
return Mem(base, index, shift, offset, nativeGpSize);
|
||||||
}
|
}
|
||||||
//! \overload
|
//! \overload
|
||||||
inline Mem intptr_ptr(const Gp& base, const Vec& index, uint32_t shift = 0, int32_t offset = 0) const noexcept {
|
inline Mem intptr_ptr(const Gp& base, const Vec& index, uint32_t shift = 0, int32_t offset = 0) const noexcept {
|
||||||
uint32_t nativeGpSize = static_cast<const This*>(this)->gpSize();
|
uint32_t nativeGpSize = _emitter()->gpSize();
|
||||||
return Mem(base, index, shift, offset, nativeGpSize);
|
return Mem(base, index, shift, offset, nativeGpSize);
|
||||||
}
|
}
|
||||||
//! \overload
|
//! \overload
|
||||||
inline Mem intptr_ptr(const Label& base, int32_t offset = 0) const noexcept {
|
inline Mem intptr_ptr(const Label& base, int32_t offset = 0) const noexcept {
|
||||||
uint32_t nativeGpSize = static_cast<const This*>(this)->gpSize();
|
uint32_t nativeGpSize = _emitter()->gpSize();
|
||||||
return Mem(base, offset, nativeGpSize);
|
return Mem(base, offset, nativeGpSize);
|
||||||
}
|
}
|
||||||
//! \overload
|
//! \overload
|
||||||
inline Mem intptr_ptr(const Label& base, const Gp& index, uint32_t shift, int32_t offset = 0) const noexcept {
|
inline Mem intptr_ptr(const Label& base, const Gp& index, uint32_t shift, int32_t offset = 0) const noexcept {
|
||||||
uint32_t nativeGpSize = static_cast<const This*>(this)->gpSize();
|
uint32_t nativeGpSize = _emitter()->gpSize();
|
||||||
return Mem(base, index, shift, offset, nativeGpSize);
|
return Mem(base, index, shift, offset, nativeGpSize);
|
||||||
}
|
}
|
||||||
//! \overload
|
//! \overload
|
||||||
inline Mem intptr_ptr(const Label& base, const Vec& index, uint32_t shift, int32_t offset = 0) const noexcept {
|
inline Mem intptr_ptr(const Label& base, const Vec& index, uint32_t shift, int32_t offset = 0) const noexcept {
|
||||||
uint32_t nativeGpSize = static_cast<const This*>(this)->gpSize();
|
uint32_t nativeGpSize = _emitter()->gpSize();
|
||||||
return Mem(base, index, shift, offset, nativeGpSize);
|
return Mem(base, index, shift, offset, nativeGpSize);
|
||||||
}
|
}
|
||||||
//! \overload
|
//! \overload
|
||||||
inline Mem intptr_ptr(const Rip& rip, int32_t offset = 0) const noexcept {
|
inline Mem intptr_ptr(const Rip& rip, int32_t offset = 0) const noexcept {
|
||||||
uint32_t nativeGpSize = static_cast<const This*>(this)->gpSize();
|
uint32_t nativeGpSize = _emitter()->gpSize();
|
||||||
return Mem(rip, offset, nativeGpSize);
|
return Mem(rip, offset, nativeGpSize);
|
||||||
}
|
}
|
||||||
//! \overload
|
//! \overload
|
||||||
inline Mem intptr_ptr(uint64_t base) const noexcept {
|
inline Mem intptr_ptr(uint64_t base) const noexcept {
|
||||||
uint32_t nativeGpSize = static_cast<const This*>(this)->gpSize();
|
uint32_t nativeGpSize = _emitter()->gpSize();
|
||||||
return Mem(base, nativeGpSize);
|
return Mem(base, nativeGpSize);
|
||||||
}
|
}
|
||||||
//! \overload
|
//! \overload
|
||||||
inline Mem intptr_ptr(uint64_t base, const Gp& index, uint32_t shift = 0) const noexcept {
|
inline Mem intptr_ptr(uint64_t base, const Gp& index, uint32_t shift = 0) const noexcept {
|
||||||
uint32_t nativeGpSize = static_cast<const This*>(this)->gpSize();
|
uint32_t nativeGpSize = _emitter()->gpSize();
|
||||||
return Mem(base, index, shift, nativeGpSize);
|
return Mem(base, index, shift, nativeGpSize);
|
||||||
}
|
}
|
||||||
//! \overload
|
//! \overload
|
||||||
inline Mem intptr_ptr_abs(uint64_t base) const noexcept {
|
inline Mem intptr_ptr_abs(uint64_t base) const noexcept {
|
||||||
uint32_t nativeGpSize = static_cast<const This*>(this)->gpSize();
|
uint32_t nativeGpSize = _emitter()->gpSize();
|
||||||
return Mem(base, nativeGpSize, BaseMem::kSignatureMemAbs);
|
return Mem(base, nativeGpSize, BaseMem::kSignatureMemAbs);
|
||||||
}
|
}
|
||||||
//! \overload
|
//! \overload
|
||||||
inline Mem intptr_ptr_abs(uint64_t base, const Gp& index, uint32_t shift = 0) const noexcept {
|
inline Mem intptr_ptr_abs(uint64_t base, const Gp& index, uint32_t shift = 0) const noexcept {
|
||||||
uint32_t nativeGpSize = static_cast<const This*>(this)->gpSize();
|
uint32_t nativeGpSize = _emitter()->gpSize();
|
||||||
return Mem(base, index, shift, nativeGpSize, BaseMem::kSignatureMemAbs);
|
return Mem(base, index, shift, nativeGpSize, BaseMem::kSignatureMemAbs);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -304,57 +304,57 @@ struct EmitterExplicitT {
|
|||||||
//! \{
|
//! \{
|
||||||
|
|
||||||
//! Adds 8-bit integer data to the CodeBuffer.
|
//! Adds 8-bit integer data to the CodeBuffer.
|
||||||
inline Error db(uint8_t x) { return static_cast<This*>(this)->embed(&x, 1); }
|
inline Error db(uint8_t x) { return _emitter()->embed(&x, 1); }
|
||||||
//! Adds 16-bit integer data to the CodeBuffer.
|
//! Adds 16-bit integer data to the CodeBuffer.
|
||||||
inline Error dw(uint16_t x) { return static_cast<This*>(this)->embed(&x, 2); }
|
inline Error dw(uint16_t x) { return _emitter()->embed(&x, 2); }
|
||||||
//! Adds 32-bit integer data to the CodeBuffer.
|
//! Adds 32-bit integer data to the CodeBuffer.
|
||||||
inline Error dd(uint32_t x) { return static_cast<This*>(this)->embed(&x, 4); }
|
inline Error dd(uint32_t x) { return _emitter()->embed(&x, 4); }
|
||||||
//! Adds 64-bit integer data to the CodeBuffer.
|
//! Adds 64-bit integer data to the CodeBuffer.
|
||||||
inline Error dq(uint64_t x) { return static_cast<This*>(this)->embed(&x, 8); }
|
inline Error dq(uint64_t x) { return _emitter()->embed(&x, 8); }
|
||||||
|
|
||||||
//! Adds 8-bit integer data to the CodeBuffer.
|
//! Adds 8-bit integer data to the CodeBuffer.
|
||||||
inline Error dint8(int8_t x) { return static_cast<This*>(this)->embed(&x, sizeof(int8_t)); }
|
inline Error dint8(int8_t x) { return _emitter()->embed(&x, sizeof(int8_t)); }
|
||||||
//! Adds 8-bit integer data to the CodeBuffer.
|
//! Adds 8-bit integer data to the CodeBuffer.
|
||||||
inline Error duint8(uint8_t x) { return static_cast<This*>(this)->embed(&x, sizeof(uint8_t)); }
|
inline Error duint8(uint8_t x) { return _emitter()->embed(&x, sizeof(uint8_t)); }
|
||||||
|
|
||||||
//! Adds 16-bit integer data to the CodeBuffer.
|
//! Adds 16-bit integer data to the CodeBuffer.
|
||||||
inline Error dint16(int16_t x) { return static_cast<This*>(this)->embed(&x, sizeof(int16_t)); }
|
inline Error dint16(int16_t x) { return _emitter()->embed(&x, sizeof(int16_t)); }
|
||||||
//! Adds 16-bit integer data to the CodeBuffer.
|
//! Adds 16-bit integer data to the CodeBuffer.
|
||||||
inline Error duint16(uint16_t x) { return static_cast<This*>(this)->embed(&x, sizeof(uint16_t)); }
|
inline Error duint16(uint16_t x) { return _emitter()->embed(&x, sizeof(uint16_t)); }
|
||||||
|
|
||||||
//! Adds 32-bit integer data to the CodeBuffer.
|
//! Adds 32-bit integer data to the CodeBuffer.
|
||||||
inline Error dint32(int32_t x) { return static_cast<This*>(this)->embed(&x, sizeof(int32_t)); }
|
inline Error dint32(int32_t x) { return _emitter()->embed(&x, sizeof(int32_t)); }
|
||||||
//! Adds 32-bit integer data to the CodeBuffer.
|
//! Adds 32-bit integer data to the CodeBuffer.
|
||||||
inline Error duint32(uint32_t x) { return static_cast<This*>(this)->embed(&x, sizeof(uint32_t)); }
|
inline Error duint32(uint32_t x) { return _emitter()->embed(&x, sizeof(uint32_t)); }
|
||||||
|
|
||||||
//! Adds 64-bit integer data to the CodeBuffer.
|
//! Adds 64-bit integer data to the CodeBuffer.
|
||||||
inline Error dint64(int64_t x) { return static_cast<This*>(this)->embed(&x, sizeof(int64_t)); }
|
inline Error dint64(int64_t x) { return _emitter()->embed(&x, sizeof(int64_t)); }
|
||||||
//! Adds 64-bit integer data to the CodeBuffer.
|
//! Adds 64-bit integer data to the CodeBuffer.
|
||||||
inline Error duint64(uint64_t x) { return static_cast<This*>(this)->embed(&x, sizeof(uint64_t)); }
|
inline Error duint64(uint64_t x) { return _emitter()->embed(&x, sizeof(uint64_t)); }
|
||||||
|
|
||||||
//! Adds float data to the CodeBuffer.
|
//! Adds float data to the CodeBuffer.
|
||||||
inline Error dfloat(float x) { return static_cast<This*>(this)->embed(&x, sizeof(float)); }
|
inline Error dfloat(float x) { return _emitter()->embed(&x, sizeof(float)); }
|
||||||
//! Adds double data to the CodeBuffer.
|
//! Adds double data to the CodeBuffer.
|
||||||
inline Error ddouble(double x) { return static_cast<This*>(this)->embed(&x, sizeof(double)); }
|
inline Error ddouble(double x) { return _emitter()->embed(&x, sizeof(double)); }
|
||||||
|
|
||||||
//! Adds MMX data to the CodeBuffer.
|
//! Adds MMX data to the CodeBuffer.
|
||||||
inline Error dmm(const Data64& x) { return static_cast<This*>(this)->embed(&x, sizeof(Data64)); }
|
inline Error dmm(const Data64& x) { return _emitter()->embed(&x, sizeof(Data64)); }
|
||||||
//! Adds XMM data to the CodeBuffer.
|
//! Adds XMM data to the CodeBuffer.
|
||||||
inline Error dxmm(const Data128& x) { return static_cast<This*>(this)->embed(&x, sizeof(Data128)); }
|
inline Error dxmm(const Data128& x) { return _emitter()->embed(&x, sizeof(Data128)); }
|
||||||
//! Adds YMM data to the CodeBuffer.
|
//! Adds YMM data to the CodeBuffer.
|
||||||
inline Error dymm(const Data256& x) { return static_cast<This*>(this)->embed(&x, sizeof(Data256)); }
|
inline Error dymm(const Data256& x) { return _emitter()->embed(&x, sizeof(Data256)); }
|
||||||
|
|
||||||
//! Adds data in a given structure instance to the CodeBuffer.
|
//! Adds data in a given structure instance to the CodeBuffer.
|
||||||
template<typename T>
|
template<typename T>
|
||||||
inline Error dstruct(const T& x) { return static_cast<This*>(this)->embed(&x, uint32_t(sizeof(T))); }
|
inline Error dstruct(const T& x) { return _emitter()->embed(&x, uint32_t(sizeof(T))); }
|
||||||
|
|
||||||
//! \}
|
//! \}
|
||||||
|
|
||||||
protected:
|
protected:
|
||||||
//! \cond
|
//! \cond
|
||||||
inline This& _addInstOptions(uint32_t options) noexcept {
|
inline This& _addInstOptions(uint32_t options) noexcept {
|
||||||
static_cast<This*>(this)->addInstOptions(options);
|
_emitter()->addInstOptions(options);
|
||||||
return *static_cast<This*>(this);
|
return *_emitter();
|
||||||
}
|
}
|
||||||
//! \endcond
|
//! \endcond
|
||||||
|
|
||||||
@@ -403,7 +403,7 @@ public:
|
|||||||
//!
|
//!
|
||||||
//! \note This is the same as using `repe()` or `repz()` prefix.
|
//! \note This is the same as using `repe()` or `repz()` prefix.
|
||||||
inline This& rep(const Gp& zcx) noexcept {
|
inline This& rep(const Gp& zcx) noexcept {
|
||||||
static_cast<This*>(this)->_extraReg.init(zcx);
|
_emitter()->_extraReg.init(zcx);
|
||||||
return _addInstOptions(Inst::kOptionRep);
|
return _addInstOptions(Inst::kOptionRep);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -421,7 +421,7 @@ public:
|
|||||||
//!
|
//!
|
||||||
//! \note This is the same as using `bnd()` or `repnz()` prefix.
|
//! \note This is the same as using `bnd()` or `repnz()` prefix.
|
||||||
inline This& repne(const Gp& zcx) noexcept {
|
inline This& repne(const Gp& zcx) noexcept {
|
||||||
static_cast<This*>(this)->_extraReg.init(zcx);
|
_emitter()->_extraReg.init(zcx);
|
||||||
return _addInstOptions(Inst::kOptionRepne);
|
return _addInstOptions(Inst::kOptionRepne);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -467,8 +467,8 @@ public:
|
|||||||
|
|
||||||
//! Use masking {k} (AVX512+).
|
//! Use masking {k} (AVX512+).
|
||||||
inline This& k(const KReg& kreg) noexcept {
|
inline This& k(const KReg& kreg) noexcept {
|
||||||
static_cast<This*>(this)->_extraReg.init(kreg);
|
_emitter()->_extraReg.init(kreg);
|
||||||
return *static_cast<This*>(this);
|
return *_emitter();
|
||||||
}
|
}
|
||||||
|
|
||||||
//! Use zeroing instead of merging (AVX512+).
|
//! Use zeroing instead of merging (AVX512+).
|
||||||
|
|||||||
@@ -59,7 +59,7 @@ ASMJIT_BEGIN_SUB_NAMESPACE(x86)
|
|||||||
|
|
||||||
#ifndef ASMJIT_NO_TEXT
|
#ifndef ASMJIT_NO_TEXT
|
||||||
Error InstInternal::instIdToString(uint32_t archId, uint32_t instId, String& output) noexcept {
|
Error InstInternal::instIdToString(uint32_t archId, uint32_t instId, String& output) noexcept {
|
||||||
ASMJIT_UNUSED(archId);
|
DebugUtils::unused(archId);
|
||||||
|
|
||||||
if (ASMJIT_UNLIKELY(!Inst::isDefinedId(instId)))
|
if (ASMJIT_UNLIKELY(!Inst::isDefinedId(instId)))
|
||||||
return DebugUtils::errored(kErrorInvalidInstruction);
|
return DebugUtils::errored(kErrorInvalidInstruction);
|
||||||
@@ -69,7 +69,7 @@ Error InstInternal::instIdToString(uint32_t archId, uint32_t instId, String& out
|
|||||||
}
|
}
|
||||||
|
|
||||||
uint32_t InstInternal::stringToInstId(uint32_t archId, const char* s, size_t len) noexcept {
|
uint32_t InstInternal::stringToInstId(uint32_t archId, const char* s, size_t len) noexcept {
|
||||||
ASMJIT_UNUSED(archId);
|
DebugUtils::unused(archId);
|
||||||
|
|
||||||
if (ASMJIT_UNLIKELY(!s))
|
if (ASMJIT_UNLIKELY(!s))
|
||||||
return Inst::kIdNone;
|
return Inst::kIdNone;
|
||||||
@@ -776,7 +776,7 @@ static ASMJIT_INLINE void rwZeroExtendGp(OpRWInfo& opRwInfo, const Gp& reg, uint
|
|||||||
}
|
}
|
||||||
|
|
||||||
static ASMJIT_INLINE void rwZeroExtendAvxVec(OpRWInfo& opRwInfo, const Vec& reg) noexcept {
|
static ASMJIT_INLINE void rwZeroExtendAvxVec(OpRWInfo& opRwInfo, const Vec& reg) noexcept {
|
||||||
ASMJIT_UNUSED(reg);
|
DebugUtils::unused(reg);
|
||||||
|
|
||||||
uint64_t msk = ~Support::fillTrailingBits(opRwInfo.writeByteMask());
|
uint64_t msk = ~Support::fillTrailingBits(opRwInfo.writeByteMask());
|
||||||
if (msk) {
|
if (msk) {
|
||||||
@@ -1361,7 +1361,7 @@ static RegAnalysis InstInternal_regAnalysis(const Operand_* operands, uint32_t o
|
|||||||
|
|
||||||
Error InstInternal::queryFeatures(uint32_t archId, const BaseInst& inst, const Operand_* operands, uint32_t opCount, BaseFeatures& out) noexcept {
|
Error InstInternal::queryFeatures(uint32_t archId, const BaseInst& inst, const Operand_* operands, uint32_t opCount, BaseFeatures& out) noexcept {
|
||||||
// Only called when `archId` matches X86 family.
|
// Only called when `archId` matches X86 family.
|
||||||
ASMJIT_UNUSED(archId);
|
DebugUtils::unused(archId);
|
||||||
ASMJIT_ASSERT(ArchInfo::isX86Family(archId));
|
ASMJIT_ASSERT(ArchInfo::isX86Family(archId));
|
||||||
|
|
||||||
// Get the instruction data.
|
// Get the instruction data.
|
||||||
|
|||||||
@@ -68,7 +68,7 @@ static inline uint32_t x86KmovFromSize(uint32_t size) noexcept {
|
|||||||
// ============================================================================
|
// ============================================================================
|
||||||
|
|
||||||
ASMJIT_FAVOR_SIZE Error X86Internal::initFuncDetail(FuncDetail& func, const FuncSignature& sign, uint32_t gpSize) noexcept {
|
ASMJIT_FAVOR_SIZE Error X86Internal::initFuncDetail(FuncDetail& func, const FuncSignature& sign, uint32_t gpSize) noexcept {
|
||||||
ASMJIT_UNUSED(sign);
|
DebugUtils::unused(sign);
|
||||||
|
|
||||||
const CallConv& cc = func.callConv();
|
const CallConv& cc = func.callConv();
|
||||||
uint32_t archId = cc.archId();
|
uint32_t archId = cc.archId();
|
||||||
@@ -333,10 +333,10 @@ public:
|
|||||||
}
|
}
|
||||||
|
|
||||||
inline void unassign(uint32_t varId, uint32_t regId) noexcept {
|
inline void unassign(uint32_t varId, uint32_t regId) noexcept {
|
||||||
ASMJIT_UNUSED(varId);
|
|
||||||
ASMJIT_ASSERT(isAssigned(regId));
|
ASMJIT_ASSERT(isAssigned(regId));
|
||||||
ASMJIT_ASSERT(_physToVarId[regId] == varId);
|
ASMJIT_ASSERT(_physToVarId[regId] == varId);
|
||||||
|
|
||||||
|
DebugUtils::unused(varId);
|
||||||
_physToVarId[regId] = uint8_t(kVarIdNone);
|
_physToVarId[regId] = uint8_t(kVarIdNone);
|
||||||
_assignedRegs ^= Support::bitMask(regId);
|
_assignedRegs ^= Support::bitMask(regId);
|
||||||
}
|
}
|
||||||
@@ -1390,13 +1390,13 @@ ASMJIT_FAVOR_SIZE Error X86Internal::emitArgsAssignment(Emitter* emitter, const
|
|||||||
X86FuncArgsContext ctx;
|
X86FuncArgsContext ctx;
|
||||||
ASMJIT_PROPAGATE(ctx.initWorkData(frame, args));
|
ASMJIT_PROPAGATE(ctx.initWorkData(frame, args));
|
||||||
|
|
||||||
#ifdef ASMJIT_DUMP_ARGS_ASSIGNMENT
|
#ifdef ASMJIT_DUMP_ARGS_ASSIGNMENT
|
||||||
{
|
{
|
||||||
String sb;
|
String sb;
|
||||||
dumpAssignment(sb, ctx);
|
dumpAssignment(sb, ctx);
|
||||||
printf("%s\n", sb.data());
|
printf("%s\n", sb.data());
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
uint32_t archId = ctx.archId();
|
uint32_t archId = ctx.archId();
|
||||||
uint32_t varCount = ctx._varCount;
|
uint32_t varCount = ctx._varCount;
|
||||||
|
|||||||
@@ -387,7 +387,7 @@ ASMJIT_FAVOR_SIZE static Error LoggingInternal_explainConst(
|
|||||||
uint32_t vecSize,
|
uint32_t vecSize,
|
||||||
const Imm& imm) noexcept {
|
const Imm& imm) noexcept {
|
||||||
|
|
||||||
ASMJIT_UNUSED(flags);
|
DebugUtils::unused(flags);
|
||||||
|
|
||||||
static const char vcmpx[] =
|
static const char vcmpx[] =
|
||||||
"EQ_OQ\0" "LT_OS\0" "LE_OS\0" "UNORD_Q\0" "NEQ_UQ\0" "NLT_US\0" "NLE_US\0" "ORD_Q\0"
|
"EQ_OQ\0" "LT_OS\0" "LE_OS\0" "UNORD_Q\0" "NEQ_UQ\0" "NLT_US\0" "NLE_US\0" "ORD_Q\0"
|
||||||
@@ -625,7 +625,7 @@ ASMJIT_FAVOR_SIZE static Error LoggingInternal_explainConst(
|
|||||||
// ============================================================================
|
// ============================================================================
|
||||||
|
|
||||||
ASMJIT_FAVOR_SIZE Error LoggingInternal::formatRegister(String& sb, uint32_t flags, const BaseEmitter* emitter, uint32_t archId, uint32_t rType, uint32_t rId) noexcept {
|
ASMJIT_FAVOR_SIZE Error LoggingInternal::formatRegister(String& sb, uint32_t flags, const BaseEmitter* emitter, uint32_t archId, uint32_t rType, uint32_t rId) noexcept {
|
||||||
ASMJIT_UNUSED(archId);
|
DebugUtils::unused(archId);
|
||||||
const RegFormatInfo& info = x86RegFormatInfo;
|
const RegFormatInfo& info = x86RegFormatInfo;
|
||||||
|
|
||||||
#ifndef ASMJIT_NO_COMPILER
|
#ifndef ASMJIT_NO_COMPILER
|
||||||
@@ -653,7 +653,7 @@ ASMJIT_FAVOR_SIZE Error LoggingInternal::formatRegister(String& sb, uint32_t fla
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
#else
|
#else
|
||||||
ASMJIT_UNUSED(flags);
|
DebugUtils::unused(emitter, flags);
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
if (ASMJIT_LIKELY(rType <= BaseReg::kTypeMax)) {
|
if (ASMJIT_LIKELY(rType <= BaseReg::kTypeMax)) {
|
||||||
|
|||||||
@@ -99,7 +99,8 @@ public:
|
|||||||
: RACFGBuilder<X86RACFGBuilder>(pass),
|
: RACFGBuilder<X86RACFGBuilder>(pass),
|
||||||
_archId(pass->cc()->archId()),
|
_archId(pass->cc()->archId()),
|
||||||
_is64Bit(pass->gpSize() == 8),
|
_is64Bit(pass->gpSize() == 8),
|
||||||
_avxEnabled(pass->_avxEnabled) {}
|
_avxEnabled(pass->_avxEnabled) {
|
||||||
|
}
|
||||||
|
|
||||||
inline Compiler* cc() const noexcept { return static_cast<Compiler*>(_cc); }
|
inline Compiler* cc() const noexcept { return static_cast<Compiler*>(_cc); }
|
||||||
|
|
||||||
@@ -330,9 +331,8 @@ Error X86RACFGBuilder::onInst(InstNode* inst, uint32_t& controlType, RAInstBuild
|
|||||||
|
|
||||||
// Handle X86 constraints.
|
// Handle X86 constraints.
|
||||||
if (hasGpbHiConstraint) {
|
if (hasGpbHiConstraint) {
|
||||||
for (uint32_t i = 0; i < ib.tiedRegCount(); i++) {
|
for (RATiedReg& tiedReg : ib) {
|
||||||
RATiedReg* tiedReg = ib[i];
|
tiedReg._allocableRegs &= tiedReg.hasFlag(RATiedReg::kX86Gpb) ? 0x0Fu : 0xFFu;
|
||||||
tiedReg->_allocableRegs &= tiedReg->hasFlag(RATiedReg::kX86Gpb) ? 0x0Fu : 0xFFu;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -590,7 +590,7 @@ Error X86RACFGBuilder::onCall(FuncCallNode* call, RAInstBuilder& ib) noexcept {
|
|||||||
// ============================================================================
|
// ============================================================================
|
||||||
|
|
||||||
Error X86RACFGBuilder::moveImmToRegArg(FuncCallNode* call, const FuncValue& arg, const Imm& imm_, BaseReg* out) noexcept {
|
Error X86RACFGBuilder::moveImmToRegArg(FuncCallNode* call, const FuncValue& arg, const Imm& imm_, BaseReg* out) noexcept {
|
||||||
ASMJIT_UNUSED(call);
|
DebugUtils::unused(call);
|
||||||
ASMJIT_ASSERT(arg.isReg());
|
ASMJIT_ASSERT(arg.isReg());
|
||||||
|
|
||||||
Imm imm(imm_);
|
Imm imm(imm_);
|
||||||
@@ -634,7 +634,7 @@ MovU32:
|
|||||||
// ============================================================================
|
// ============================================================================
|
||||||
|
|
||||||
Error X86RACFGBuilder::moveImmToStackArg(FuncCallNode* call, const FuncValue& arg, const Imm& imm_) noexcept {
|
Error X86RACFGBuilder::moveImmToStackArg(FuncCallNode* call, const FuncValue& arg, const Imm& imm_) noexcept {
|
||||||
ASMJIT_UNUSED(call);
|
DebugUtils::unused(call);
|
||||||
ASMJIT_ASSERT(arg.isStack());
|
ASMJIT_ASSERT(arg.isStack());
|
||||||
|
|
||||||
Mem mem = ptr(_pass->_sp.as<Gp>(), arg.stackOffset());
|
Mem mem = ptr(_pass->_sp.as<Gp>(), arg.stackOffset());
|
||||||
@@ -696,7 +696,7 @@ MovU32:
|
|||||||
// ============================================================================
|
// ============================================================================
|
||||||
|
|
||||||
Error X86RACFGBuilder::moveRegToStackArg(FuncCallNode* call, const FuncValue& arg, const BaseReg& reg) noexcept {
|
Error X86RACFGBuilder::moveRegToStackArg(FuncCallNode* call, const FuncValue& arg, const BaseReg& reg) noexcept {
|
||||||
ASMJIT_UNUSED(call);
|
DebugUtils::unused(call);
|
||||||
ASMJIT_ASSERT(arg.isStack());
|
ASMJIT_ASSERT(arg.isStack());
|
||||||
|
|
||||||
Mem mem = ptr(_pass->_sp.as<Gp>(), arg.stackOffset());
|
Mem mem = ptr(_pass->_sp.as<Gp>(), arg.stackOffset());
|
||||||
@@ -1015,6 +1015,9 @@ void X86RAPass::onInit() noexcept {
|
|||||||
_availableRegs[Reg::kGroupMm ] = Support::lsbMask<uint32_t>(_physRegCount.get(Reg::kGroupMm ));
|
_availableRegs[Reg::kGroupMm ] = Support::lsbMask<uint32_t>(_physRegCount.get(Reg::kGroupMm ));
|
||||||
_availableRegs[Reg::kGroupKReg] = Support::lsbMask<uint32_t>(_physRegCount.get(Reg::kGroupKReg));
|
_availableRegs[Reg::kGroupKReg] = Support::lsbMask<uint32_t>(_physRegCount.get(Reg::kGroupKReg));
|
||||||
|
|
||||||
|
_scratchRegIndexes[0] = uint8_t(Gp::kIdCx);
|
||||||
|
_scratchRegIndexes[1] = uint8_t(baseRegCount - 1);
|
||||||
|
|
||||||
// The architecture specific setup makes implicitly all registers available. So
|
// The architecture specific setup makes implicitly all registers available. So
|
||||||
// make unavailable all registers that are special and cannot be used in general.
|
// make unavailable all registers that are special and cannot be used in general.
|
||||||
bool hasFP = _func->frame().hasPreservedFP();
|
bool hasFP = _func->frame().hasPreservedFP();
|
||||||
@@ -1048,12 +1051,12 @@ Error X86RAPass::onEmitMove(uint32_t workId, uint32_t dstPhysId, uint32_t srcPhy
|
|||||||
|
|
||||||
const char* comment = nullptr;
|
const char* comment = nullptr;
|
||||||
|
|
||||||
#ifndef ASMJIT_NO_LOGGING
|
#ifndef ASMJIT_NO_LOGGING
|
||||||
if (_loggerFlags & FormatOptions::kFlagAnnotations) {
|
if (_loggerFlags & FormatOptions::kFlagAnnotations) {
|
||||||
_tmpString.assignFormat("<MOVE> %s", workRegById(workId)->name());
|
_tmpString.assignFormat("<MOVE> %s", workRegById(workId)->name());
|
||||||
comment = _tmpString.data();
|
comment = _tmpString.data();
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
return X86Internal::emitRegMove(cc()->as<Emitter>(), dst, src, wReg->typeId(), _avxEnabled, comment);
|
return X86Internal::emitRegMove(cc()->as<Emitter>(), dst, src, wReg->typeId(), _avxEnabled, comment);
|
||||||
}
|
}
|
||||||
@@ -1066,12 +1069,12 @@ Error X86RAPass::onEmitSwap(uint32_t aWorkId, uint32_t aPhysId, uint32_t bWorkId
|
|||||||
uint32_t sign = is64Bit ? uint32_t(RegTraits<Reg::kTypeGpq>::kSignature)
|
uint32_t sign = is64Bit ? uint32_t(RegTraits<Reg::kTypeGpq>::kSignature)
|
||||||
: uint32_t(RegTraits<Reg::kTypeGpd>::kSignature);
|
: uint32_t(RegTraits<Reg::kTypeGpd>::kSignature);
|
||||||
|
|
||||||
#ifndef ASMJIT_NO_LOGGING
|
#ifndef ASMJIT_NO_LOGGING
|
||||||
if (_loggerFlags & FormatOptions::kFlagAnnotations) {
|
if (_loggerFlags & FormatOptions::kFlagAnnotations) {
|
||||||
_tmpString.assignFormat("<SWAP> %s, %s", waReg->name(), wbReg->name());
|
_tmpString.assignFormat("<SWAP> %s, %s", waReg->name(), wbReg->name());
|
||||||
cc()->setInlineComment(_tmpString.data());
|
cc()->setInlineComment(_tmpString.data());
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
return cc()->emit(Inst::kIdXchg, Reg(sign, aPhysId), Reg(sign, bPhysId));
|
return cc()->emit(Inst::kIdXchg, Reg(sign, aPhysId), Reg(sign, bPhysId));
|
||||||
}
|
}
|
||||||
@@ -1083,12 +1086,12 @@ Error X86RAPass::onEmitLoad(uint32_t workId, uint32_t dstPhysId) noexcept {
|
|||||||
|
|
||||||
const char* comment = nullptr;
|
const char* comment = nullptr;
|
||||||
|
|
||||||
#ifndef ASMJIT_NO_LOGGING
|
#ifndef ASMJIT_NO_LOGGING
|
||||||
if (_loggerFlags & FormatOptions::kFlagAnnotations) {
|
if (_loggerFlags & FormatOptions::kFlagAnnotations) {
|
||||||
_tmpString.assignFormat("<LOAD> %s", workRegById(workId)->name());
|
_tmpString.assignFormat("<LOAD> %s", workRegById(workId)->name());
|
||||||
comment = _tmpString.data();
|
comment = _tmpString.data();
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
return X86Internal::emitRegMove(cc()->as<Emitter>(), dstReg, srcMem, wReg->typeId(), _avxEnabled, comment);
|
return X86Internal::emitRegMove(cc()->as<Emitter>(), dstReg, srcMem, wReg->typeId(), _avxEnabled, comment);
|
||||||
}
|
}
|
||||||
@@ -1100,12 +1103,12 @@ Error X86RAPass::onEmitSave(uint32_t workId, uint32_t srcPhysId) noexcept {
|
|||||||
|
|
||||||
const char* comment = nullptr;
|
const char* comment = nullptr;
|
||||||
|
|
||||||
#ifndef ASMJIT_NO_LOGGING
|
#ifndef ASMJIT_NO_LOGGING
|
||||||
if (_loggerFlags & FormatOptions::kFlagAnnotations) {
|
if (_loggerFlags & FormatOptions::kFlagAnnotations) {
|
||||||
_tmpString.assignFormat("<SAVE> %s", workRegById(workId)->name());
|
_tmpString.assignFormat("<SAVE> %s", workRegById(workId)->name());
|
||||||
comment = _tmpString.data();
|
comment = _tmpString.data();
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
return X86Internal::emitRegMove(cc()->as<Emitter>(), dstMem, srcReg, wReg->typeId(), _avxEnabled, comment);
|
return X86Internal::emitRegMove(cc()->as<Emitter>(), dstMem, srcReg, wReg->typeId(), _avxEnabled, comment);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -25,9 +25,12 @@
|
|||||||
#include <stdio.h>
|
#include <stdio.h>
|
||||||
#include <string.h>
|
#include <string.h>
|
||||||
|
|
||||||
#include "./asmjit_test_misc.h"
|
|
||||||
#include "./asmjit_test_opcode.h"
|
#include "./asmjit_test_opcode.h"
|
||||||
|
|
||||||
|
#ifndef ASMJIT_NO_COMPILER
|
||||||
|
#include "./asmjit_test_misc.h"
|
||||||
|
#endif
|
||||||
|
|
||||||
using namespace asmjit;
|
using namespace asmjit;
|
||||||
|
|
||||||
// ============================================================================
|
// ============================================================================
|
||||||
@@ -131,6 +134,7 @@ static void benchX86(uint32_t archId) noexcept {
|
|||||||
asmtest::generateOpcodes(a.as<x86::Emitter>());
|
asmtest::generateOpcodes(a.as<x86::Emitter>());
|
||||||
});
|
});
|
||||||
|
|
||||||
|
#ifndef ASMJIT_NO_BUILDER
|
||||||
BenchUtils::bench<x86::Builder>(code, archId, "[raw]", [](x86::Builder& cb) {
|
BenchUtils::bench<x86::Builder>(code, archId, "[raw]", [](x86::Builder& cb) {
|
||||||
asmtest::generateOpcodes(cb.as<x86::Emitter>());
|
asmtest::generateOpcodes(cb.as<x86::Emitter>());
|
||||||
});
|
});
|
||||||
@@ -139,7 +143,9 @@ static void benchX86(uint32_t archId) noexcept {
|
|||||||
asmtest::generateOpcodes(cb.as<x86::Emitter>());
|
asmtest::generateOpcodes(cb.as<x86::Emitter>());
|
||||||
cb.finalize();
|
cb.finalize();
|
||||||
});
|
});
|
||||||
|
#endif
|
||||||
|
|
||||||
|
#ifndef ASMJIT_NO_COMPILER
|
||||||
BenchUtils::bench<x86::Compiler>(code, archId, "[raw]", [](x86::Compiler& cc) {
|
BenchUtils::bench<x86::Compiler>(code, archId, "[raw]", [](x86::Compiler& cc) {
|
||||||
asmtest::generateAlphaBlend(cc);
|
asmtest::generateAlphaBlend(cc);
|
||||||
});
|
});
|
||||||
@@ -148,17 +154,15 @@ static void benchX86(uint32_t archId) noexcept {
|
|||||||
asmtest::generateAlphaBlend(cc);
|
asmtest::generateAlphaBlend(cc);
|
||||||
cc.finalize();
|
cc.finalize();
|
||||||
});
|
});
|
||||||
|
#endif
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
int main(int argc, char* argv[]) {
|
int main() {
|
||||||
ASMJIT_UNUSED(argc);
|
#ifdef ASMJIT_BUILD_X86
|
||||||
ASMJIT_UNUSED(argv);
|
|
||||||
|
|
||||||
#ifdef ASMJIT_BUILD_X86
|
|
||||||
benchX86(ArchInfo::kIdX86);
|
benchX86(ArchInfo::kIdX86);
|
||||||
benchX86(ArchInfo::kIdX64);
|
benchX86(ArchInfo::kIdX64);
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -61,10 +61,7 @@ struct TestErrorHandler : public ErrorHandler {
|
|||||||
|
|
||||||
typedef void (*VoidFunc)(void);
|
typedef void (*VoidFunc)(void);
|
||||||
|
|
||||||
int main(int argc, char* argv[]) {
|
int main() {
|
||||||
ASMJIT_UNUSED(argc);
|
|
||||||
ASMJIT_UNUSED(argv);
|
|
||||||
|
|
||||||
TestErrorHandler eh;
|
TestErrorHandler eh;
|
||||||
|
|
||||||
OpcodeDumpInfo infoList[] = {
|
OpcodeDumpInfo infoList[] = {
|
||||||
@@ -87,11 +84,11 @@ int main(int argc, char* argv[]) {
|
|||||||
code.init(CodeInfo(info.archId));
|
code.init(CodeInfo(info.archId));
|
||||||
code.setErrorHandler(&eh);
|
code.setErrorHandler(&eh);
|
||||||
|
|
||||||
#ifndef ASMJIT_NO_LOGGING
|
#ifndef ASMJIT_NO_LOGGING
|
||||||
FileLogger logger(stdout);
|
FileLogger logger(stdout);
|
||||||
logger.addFlags(FormatOptions::kFlagMachineCode);
|
logger.addFlags(FormatOptions::kFlagMachineCode);
|
||||||
code.setLogger(&logger);
|
code.setLogger(&logger);
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
x86::Assembler a(&code);
|
x86::Assembler a(&code);
|
||||||
asmtest::generateOpcodes(a.as<x86::Emitter>(), info.useRex1, info.useRex2);
|
asmtest::generateOpcodes(a.as<x86::Emitter>(), info.useRex1, info.useRex2);
|
||||||
|
|||||||
@@ -71,7 +71,7 @@ static void dumpCpu(void) noexcept {
|
|||||||
// [X86]
|
// [X86]
|
||||||
// --------------------------------------------------------------------------
|
// --------------------------------------------------------------------------
|
||||||
|
|
||||||
#if ASMJIT_ARCH_X86
|
#if ASMJIT_ARCH_X86
|
||||||
static const DumpCpuFeature x86FeaturesList[] = {
|
static const DumpCpuFeature x86FeaturesList[] = {
|
||||||
{ x86::Features::kNX , "NX" },
|
{ x86::Features::kNX , "NX" },
|
||||||
{ x86::Features::kMT , "MT" },
|
{ x86::Features::kMT , "MT" },
|
||||||
@@ -168,13 +168,13 @@ static void dumpCpu(void) noexcept {
|
|||||||
INFO("X86 Features:");
|
INFO("X86 Features:");
|
||||||
dumpFeatures(cpu, x86FeaturesList, ASMJIT_ARRAY_SIZE(x86FeaturesList));
|
dumpFeatures(cpu, x86FeaturesList, ASMJIT_ARRAY_SIZE(x86FeaturesList));
|
||||||
INFO("");
|
INFO("");
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
// --------------------------------------------------------------------------
|
// --------------------------------------------------------------------------
|
||||||
// [ARM]
|
// [ARM]
|
||||||
// --------------------------------------------------------------------------
|
// --------------------------------------------------------------------------
|
||||||
|
|
||||||
#if ASMJIT_ARCH_ARM
|
#if ASMJIT_ARCH_ARM
|
||||||
static const DumpCpuFeature armFeaturesList[] = {
|
static const DumpCpuFeature armFeaturesList[] = {
|
||||||
{ arm::Features::kARMv6 , "ARMv6" },
|
{ arm::Features::kARMv6 , "ARMv6" },
|
||||||
{ arm::Features::kARMv7 , "ARMv7" },
|
{ arm::Features::kARMv7 , "ARMv7" },
|
||||||
@@ -198,17 +198,17 @@ static void dumpCpu(void) noexcept {
|
|||||||
INFO("ARM Features:");
|
INFO("ARM Features:");
|
||||||
dumpFeatures(cpu, armFeaturesList, ASMJIT_ARRAY_SIZE(armFeaturesList));
|
dumpFeatures(cpu, armFeaturesList, ASMJIT_ARRAY_SIZE(armFeaturesList));
|
||||||
INFO("");
|
INFO("");
|
||||||
#endif
|
#endif
|
||||||
}
|
}
|
||||||
|
|
||||||
// ============================================================================
|
// ============================================================================
|
||||||
// [DumpSizeOf]
|
// [DumpSizeOf]
|
||||||
// ============================================================================
|
// ============================================================================
|
||||||
|
|
||||||
static void dumpSizeOf(void) noexcept {
|
#define DUMP_TYPE(...) \
|
||||||
#define DUMP_TYPE(...) \
|
INFO(" %-26s: %u", #__VA_ARGS__, uint32_t(sizeof(__VA_ARGS__)))
|
||||||
INFO(" %-26s: %u", #__VA_ARGS__, uint32_t(sizeof(__VA_ARGS__)))
|
|
||||||
|
|
||||||
|
static void dumpSizeOf(void) noexcept {
|
||||||
INFO("Size of C++ types:");
|
INFO("Size of C++ types:");
|
||||||
DUMP_TYPE(int8_t);
|
DUMP_TYPE(int8_t);
|
||||||
DUMP_TYPE(int16_t);
|
DUMP_TYPE(int16_t);
|
||||||
@@ -260,7 +260,7 @@ static void dumpSizeOf(void) noexcept {
|
|||||||
DUMP_TYPE(FuncArgsAssignment);
|
DUMP_TYPE(FuncArgsAssignment);
|
||||||
INFO("");
|
INFO("");
|
||||||
|
|
||||||
#ifndef ASMJIT_NO_BUILDER
|
#ifndef ASMJIT_NO_BUILDER
|
||||||
INFO("Size of builder classes:");
|
INFO("Size of builder classes:");
|
||||||
DUMP_TYPE(BaseBuilder);
|
DUMP_TYPE(BaseBuilder);
|
||||||
DUMP_TYPE(BaseNode);
|
DUMP_TYPE(BaseNode);
|
||||||
@@ -274,18 +274,18 @@ static void dumpSizeOf(void) noexcept {
|
|||||||
DUMP_TYPE(CommentNode);
|
DUMP_TYPE(CommentNode);
|
||||||
DUMP_TYPE(SentinelNode);
|
DUMP_TYPE(SentinelNode);
|
||||||
INFO("");
|
INFO("");
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
#ifndef ASMJIT_NO_COMPILER
|
#ifndef ASMJIT_NO_COMPILER
|
||||||
INFO("Size of compiler classes:");
|
INFO("Size of compiler classes:");
|
||||||
DUMP_TYPE(BaseCompiler);
|
DUMP_TYPE(BaseCompiler);
|
||||||
DUMP_TYPE(FuncNode);
|
DUMP_TYPE(FuncNode);
|
||||||
DUMP_TYPE(FuncRetNode);
|
DUMP_TYPE(FuncRetNode);
|
||||||
DUMP_TYPE(FuncCallNode);
|
DUMP_TYPE(FuncCallNode);
|
||||||
INFO("");
|
INFO("");
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
#ifdef ASMJIT_BUILD_X86
|
#ifdef ASMJIT_BUILD_X86
|
||||||
INFO("Size of x86-specific classes:");
|
INFO("Size of x86-specific classes:");
|
||||||
DUMP_TYPE(x86::Assembler);
|
DUMP_TYPE(x86::Assembler);
|
||||||
#ifndef ASMJIT_NO_BUILDER
|
#ifndef ASMJIT_NO_BUILDER
|
||||||
@@ -299,11 +299,11 @@ static void dumpSizeOf(void) noexcept {
|
|||||||
DUMP_TYPE(x86::InstDB::OpSignature);
|
DUMP_TYPE(x86::InstDB::OpSignature);
|
||||||
DUMP_TYPE(x86::InstDB::InstSignature);
|
DUMP_TYPE(x86::InstDB::InstSignature);
|
||||||
INFO("");
|
INFO("");
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
#undef DUMP_TYPE
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#undef DUMP_TYPE
|
||||||
|
|
||||||
// ============================================================================
|
// ============================================================================
|
||||||
// [Main]
|
// [Main]
|
||||||
// ============================================================================
|
// ============================================================================
|
||||||
@@ -314,11 +314,11 @@ static void onBeforeRun(void) noexcept {
|
|||||||
}
|
}
|
||||||
|
|
||||||
int main(int argc, const char* argv[]) {
|
int main(int argc, const char* argv[]) {
|
||||||
#if defined(ASMJIT_BUILD_DEBUG)
|
#if defined(ASMJIT_BUILD_DEBUG)
|
||||||
const char buildType[] = "Debug";
|
const char buildType[] = "Debug";
|
||||||
#else
|
#else
|
||||||
const char buildType[] = "Release";
|
const char buildType[] = "Release";
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
INFO("AsmJit Unit-Test v%u.%u.%u [Arch=%s] [Mode=%s]\n\n",
|
INFO("AsmJit Unit-Test v%u.%u.%u [Arch=%s] [Mode=%s]\n\n",
|
||||||
unsigned((ASMJIT_LIBRARY_VERSION >> 16) ),
|
unsigned((ASMJIT_LIBRARY_VERSION >> 16) ),
|
||||||
|
|||||||
@@ -75,6 +75,7 @@ static void makeRawFunc(x86::Emitter* emitter) noexcept {
|
|||||||
emitter->emitEpilog(frame);
|
emitter->emitEpilog(frame);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#ifndef ASMJIT_NO_COMPILER
|
||||||
// This function works with x86::Compiler, provided for comparison.
|
// This function works with x86::Compiler, provided for comparison.
|
||||||
static void makeCompiledFunc(x86::Compiler* cc) noexcept {
|
static void makeCompiledFunc(x86::Compiler* cc) noexcept {
|
||||||
x86::Gp dst = cc->newIntPtr();
|
x86::Gp dst = cc->newIntPtr();
|
||||||
@@ -95,13 +96,19 @@ static void makeCompiledFunc(x86::Compiler* cc) noexcept {
|
|||||||
cc->movdqu(x86::ptr(dst), vec0);
|
cc->movdqu(x86::ptr(dst), vec0);
|
||||||
cc->endFunc();
|
cc->endFunc();
|
||||||
}
|
}
|
||||||
|
#endif
|
||||||
|
|
||||||
static uint32_t testFunc(JitRuntime& rt, uint32_t emitterType) noexcept {
|
static uint32_t testFunc(JitRuntime& rt, uint32_t emitterType) noexcept {
|
||||||
|
#ifndef ASMJIT_NO_LOGGING
|
||||||
FileLogger logger(stdout);
|
FileLogger logger(stdout);
|
||||||
|
#endif
|
||||||
|
|
||||||
CodeHolder code;
|
CodeHolder code;
|
||||||
code.init(rt.codeInfo());
|
code.init(rt.codeInfo());
|
||||||
|
|
||||||
|
#ifndef ASMJIT_NO_LOGGING
|
||||||
code.setLogger(&logger);
|
code.setLogger(&logger);
|
||||||
|
#endif
|
||||||
|
|
||||||
Error err = kErrorOk;
|
Error err = kErrorOk;
|
||||||
switch (emitterType) {
|
switch (emitterType) {
|
||||||
@@ -112,6 +119,7 @@ static uint32_t testFunc(JitRuntime& rt, uint32_t emitterType) noexcept {
|
|||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#ifndef ASMJIT_NO_BUILDER
|
||||||
case BaseEmitter::kTypeBuilder: {
|
case BaseEmitter::kTypeBuilder: {
|
||||||
printf("Using x86::Builder:\n");
|
printf("Using x86::Builder:\n");
|
||||||
x86::Builder cb(&code);
|
x86::Builder cb(&code);
|
||||||
@@ -124,7 +132,9 @@ static uint32_t testFunc(JitRuntime& rt, uint32_t emitterType) noexcept {
|
|||||||
}
|
}
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
#endif
|
||||||
|
|
||||||
|
#ifndef ASMJIT_NO_COMPILER
|
||||||
case BaseEmitter::kTypeCompiler: {
|
case BaseEmitter::kTypeCompiler: {
|
||||||
printf("Using x86::Compiler:\n");
|
printf("Using x86::Compiler:\n");
|
||||||
x86::Compiler cc(&code);
|
x86::Compiler cc(&code);
|
||||||
@@ -137,6 +147,7 @@ static uint32_t testFunc(JitRuntime& rt, uint32_t emitterType) noexcept {
|
|||||||
}
|
}
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
#endif
|
||||||
}
|
}
|
||||||
|
|
||||||
// Add the code generated to the runtime.
|
// Add the code generated to the runtime.
|
||||||
@@ -161,16 +172,19 @@ static uint32_t testFunc(JitRuntime& rt, uint32_t emitterType) noexcept {
|
|||||||
return !(out[0] == 5 && out[1] == 8 && out[2] == 4 && out[3] == 9);
|
return !(out[0] == 5 && out[1] == 8 && out[2] == 4 && out[3] == 9);
|
||||||
}
|
}
|
||||||
|
|
||||||
int main(int argc, char* argv[]) {
|
int main() {
|
||||||
ASMJIT_UNUSED(argc);
|
|
||||||
ASMJIT_UNUSED(argv);
|
|
||||||
|
|
||||||
unsigned nFailed = 0;
|
unsigned nFailed = 0;
|
||||||
JitRuntime rt;
|
JitRuntime rt;
|
||||||
|
|
||||||
nFailed += testFunc(rt, BaseEmitter::kTypeAssembler);
|
nFailed += testFunc(rt, BaseEmitter::kTypeAssembler);
|
||||||
|
|
||||||
|
#ifndef ASMJIT_NO_BUILDER
|
||||||
nFailed += testFunc(rt, BaseEmitter::kTypeBuilder);
|
nFailed += testFunc(rt, BaseEmitter::kTypeBuilder);
|
||||||
|
#endif
|
||||||
|
|
||||||
|
#ifndef ASMJIT_NO_COMPILER
|
||||||
nFailed += testFunc(rt, BaseEmitter::kTypeCompiler);
|
nFailed += testFunc(rt, BaseEmitter::kTypeCompiler);
|
||||||
|
#endif
|
||||||
|
|
||||||
if (!nFailed)
|
if (!nFailed)
|
||||||
printf("[PASSED] All tests passed\n");
|
printf("[PASSED] All tests passed\n");
|
||||||
|
|||||||
@@ -65,7 +65,7 @@ class SimpleErrorHandler : public ErrorHandler {
|
|||||||
public:
|
public:
|
||||||
SimpleErrorHandler() : _err(kErrorOk) {}
|
SimpleErrorHandler() : _err(kErrorOk) {}
|
||||||
virtual void handleError(Error err, const char* message, BaseEmitter* origin) {
|
virtual void handleError(Error err, const char* message, BaseEmitter* origin) {
|
||||||
ASMJIT_UNUSED(origin);
|
DebugUtils::unused(origin);
|
||||||
_err = err;
|
_err = err;
|
||||||
_message.assignString(message);
|
_message.assignString(message);
|
||||||
}
|
}
|
||||||
@@ -154,7 +154,7 @@ void X86TestApp::showInfo() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
int X86TestApp::run() {
|
int X86TestApp::run() {
|
||||||
#ifndef ASMJIT_NO_LOGGING
|
#ifndef ASMJIT_NO_LOGGING
|
||||||
uint32_t kFormatFlags = FormatOptions::kFlagMachineCode |
|
uint32_t kFormatFlags = FormatOptions::kFlagMachineCode |
|
||||||
FormatOptions::kFlagExplainImms |
|
FormatOptions::kFlagExplainImms |
|
||||||
FormatOptions::kFlagRegCasts |
|
FormatOptions::kFlagRegCasts |
|
||||||
@@ -167,7 +167,7 @@ int X86TestApp::run() {
|
|||||||
|
|
||||||
StringLogger stringLogger;
|
StringLogger stringLogger;
|
||||||
stringLogger.addFlags(kFormatFlags);
|
stringLogger.addFlags(kFormatFlags);
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
for (X86Test* test : _tests) {
|
for (X86Test* test : _tests) {
|
||||||
JitRuntime runtime;
|
JitRuntime runtime;
|
||||||
@@ -177,7 +177,7 @@ int X86TestApp::run() {
|
|||||||
code.init(runtime.codeInfo());
|
code.init(runtime.codeInfo());
|
||||||
code.setErrorHandler(&errorHandler);
|
code.setErrorHandler(&errorHandler);
|
||||||
|
|
||||||
#ifndef ASMJIT_NO_LOGGING
|
#ifndef ASMJIT_NO_LOGGING
|
||||||
if (_verbose) {
|
if (_verbose) {
|
||||||
code.setLogger(&fileLogger);
|
code.setLogger(&fileLogger);
|
||||||
}
|
}
|
||||||
@@ -185,13 +185,13 @@ int X86TestApp::run() {
|
|||||||
stringLogger.clear();
|
stringLogger.clear();
|
||||||
code.setLogger(&stringLogger);
|
code.setLogger(&stringLogger);
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
printf("[Test] %s", test->name());
|
printf("[Test] %s", test->name());
|
||||||
|
|
||||||
#ifndef ASMJIT_NO_LOGGING
|
#ifndef ASMJIT_NO_LOGGING
|
||||||
if (_verbose) printf("\n");
|
if (_verbose) printf("\n");
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
x86::Compiler cc(&code);
|
x86::Compiler cc(&code);
|
||||||
test->compile(cc);
|
test->compile(cc);
|
||||||
@@ -202,7 +202,7 @@ int X86TestApp::run() {
|
|||||||
if (!err)
|
if (!err)
|
||||||
err = cc.finalize();
|
err = cc.finalize();
|
||||||
|
|
||||||
#ifndef ASMJIT_NO_LOGGING
|
#ifndef ASMJIT_NO_LOGGING
|
||||||
if (_dumpAsm) {
|
if (_dumpAsm) {
|
||||||
if (!_verbose) printf("\n");
|
if (!_verbose) printf("\n");
|
||||||
|
|
||||||
@@ -210,7 +210,7 @@ int X86TestApp::run() {
|
|||||||
cc.dump(sb, kFormatFlags);
|
cc.dump(sb, kFormatFlags);
|
||||||
printf("%s", sb.data());
|
printf("%s", sb.data());
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
if (err == kErrorOk)
|
if (err == kErrorOk)
|
||||||
err = runtime.add(&func, &code);
|
err = runtime.add(&func, &code);
|
||||||
@@ -230,9 +230,9 @@ int X86TestApp::run() {
|
|||||||
else {
|
else {
|
||||||
if (!_verbose) printf(" [FAILED]\n");
|
if (!_verbose) printf(" [FAILED]\n");
|
||||||
|
|
||||||
#ifndef ASMJIT_NO_LOGGING
|
#ifndef ASMJIT_NO_LOGGING
|
||||||
if (!_verbose) printf("%s", stringLogger.data());
|
if (!_verbose) printf("%s", stringLogger.data());
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
printf("[Status]\n");
|
printf("[Status]\n");
|
||||||
printf(" Returned: %s\n", result.data());
|
printf(" Returned: %s\n", result.data());
|
||||||
@@ -249,9 +249,9 @@ int X86TestApp::run() {
|
|||||||
else {
|
else {
|
||||||
if (!_verbose) printf(" [FAILED]\n");
|
if (!_verbose) printf(" [FAILED]\n");
|
||||||
|
|
||||||
#ifndef ASMJIT_NO_LOGGING
|
#ifndef ASMJIT_NO_LOGGING
|
||||||
if (!_verbose) printf("%s", stringLogger.data());
|
if (!_verbose) printf("%s", stringLogger.data());
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
printf("[Status]\n");
|
printf("[Status]\n");
|
||||||
printf(" ERROR 0x%08X: %s\n", unsigned(err), errorHandler._message.data());
|
printf(" ERROR 0x%08X: %s\n", unsigned(err), errorHandler._message.data());
|
||||||
@@ -458,8 +458,7 @@ public:
|
|||||||
}
|
}
|
||||||
|
|
||||||
virtual bool run(void* _func, String& result, String& expect) {
|
virtual bool run(void* _func, String& result, String& expect) {
|
||||||
ASMJIT_UNUSED(result);
|
DebugUtils::unused(result, expect);
|
||||||
ASMJIT_UNUSED(expect);
|
|
||||||
|
|
||||||
typedef void(*Func)(void);
|
typedef void(*Func)(void);
|
||||||
Func func = ptr_as_func<Func>(_func);
|
Func func = ptr_as_func<Func>(_func);
|
||||||
@@ -489,8 +488,7 @@ public:
|
|||||||
}
|
}
|
||||||
|
|
||||||
virtual bool run(void* _func, String& result, String& expect) {
|
virtual bool run(void* _func, String& result, String& expect) {
|
||||||
ASMJIT_UNUSED(result);
|
DebugUtils::unused(result, expect);
|
||||||
ASMJIT_UNUSED(expect);
|
|
||||||
|
|
||||||
typedef void (*Func)(void);
|
typedef void (*Func)(void);
|
||||||
Func func = ptr_as_func<Func>(_func);
|
Func func = ptr_as_func<Func>(_func);
|
||||||
@@ -598,8 +596,7 @@ public:
|
|||||||
}
|
}
|
||||||
|
|
||||||
virtual bool run(void* _func, String& result, String& expect) {
|
virtual bool run(void* _func, String& result, String& expect) {
|
||||||
ASMJIT_UNUSED(result);
|
DebugUtils::unused(result, expect);
|
||||||
ASMJIT_UNUSED(expect);
|
|
||||||
|
|
||||||
typedef void (*Func)(void);
|
typedef void (*Func)(void);
|
||||||
Func func = ptr_as_func<Func>(_func);
|
Func func = ptr_as_func<Func>(_func);
|
||||||
@@ -760,6 +757,125 @@ public:
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// [X86Test_JumpTable]
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
class X86Test_JumpTable : public X86Test {
|
||||||
|
public:
|
||||||
|
bool _annotated;
|
||||||
|
|
||||||
|
X86Test_JumpTable(bool annotated)
|
||||||
|
: X86Test("X86Test_JumpTable"),
|
||||||
|
_annotated(annotated) {
|
||||||
|
_name.assignFormat("JumpTable {%s}", annotated ? "Annotated" : "Unknown Reg/Mem");
|
||||||
|
}
|
||||||
|
|
||||||
|
enum Operator {
|
||||||
|
kOperatorAdd = 0,
|
||||||
|
kOperatorSub = 1,
|
||||||
|
kOperatorMul = 2,
|
||||||
|
kOperatorDiv = 3
|
||||||
|
};
|
||||||
|
|
||||||
|
static void add(X86TestApp& app) {
|
||||||
|
app.add(new X86Test_JumpTable(false));
|
||||||
|
app.add(new X86Test_JumpTable(true));
|
||||||
|
}
|
||||||
|
|
||||||
|
virtual void compile(x86::Compiler& cc) {
|
||||||
|
cc.addFunc(FuncSignatureT<float, float, float, uint32_t>(CallConv::kIdHost));
|
||||||
|
|
||||||
|
x86::Xmm a = cc.newXmmSs("a");
|
||||||
|
x86::Xmm b = cc.newXmmSs("b");
|
||||||
|
x86::Gp op = cc.newUInt32("op");
|
||||||
|
x86::Gp target = cc.newIntPtr("target");
|
||||||
|
x86::Gp offset = cc.newIntPtr("offset");
|
||||||
|
|
||||||
|
Label L_End = cc.newLabel();
|
||||||
|
|
||||||
|
Label L_Table = cc.newLabel();
|
||||||
|
Label L_Add = cc.newLabel();
|
||||||
|
Label L_Sub = cc.newLabel();
|
||||||
|
Label L_Mul = cc.newLabel();
|
||||||
|
Label L_Div = cc.newLabel();
|
||||||
|
|
||||||
|
cc.setArg(0, a);
|
||||||
|
cc.setArg(1, b);
|
||||||
|
cc.setArg(2, op);
|
||||||
|
|
||||||
|
cc.lea(offset, x86::ptr(L_Table));
|
||||||
|
if (cc.is64Bit())
|
||||||
|
cc.movsxd(target, x86::dword_ptr(offset, op.cloneAs(offset), 2));
|
||||||
|
else
|
||||||
|
cc.mov(target, x86::dword_ptr(offset, op.cloneAs(offset), 2));
|
||||||
|
cc.add(target, offset);
|
||||||
|
|
||||||
|
// JumpAnnotation allows to annotate all possible jump targets of
|
||||||
|
// instructions where it cannot be deduced from operands.
|
||||||
|
if (_annotated) {
|
||||||
|
JumpAnnotation* annotation = cc.newJumpAnnotation();
|
||||||
|
annotation->addLabel(L_Add);
|
||||||
|
annotation->addLabel(L_Sub);
|
||||||
|
annotation->addLabel(L_Mul);
|
||||||
|
annotation->addLabel(L_Div);
|
||||||
|
cc.jmp(target, annotation);
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
cc.jmp(target);
|
||||||
|
}
|
||||||
|
|
||||||
|
cc.bind(L_Add);
|
||||||
|
cc.addss(a, b);
|
||||||
|
cc.jmp(L_End);
|
||||||
|
|
||||||
|
cc.bind(L_Sub);
|
||||||
|
cc.subss(a, b);
|
||||||
|
cc.jmp(L_End);
|
||||||
|
|
||||||
|
cc.bind(L_Mul);
|
||||||
|
cc.mulss(a, b);
|
||||||
|
cc.jmp(L_End);
|
||||||
|
|
||||||
|
cc.bind(L_Div);
|
||||||
|
cc.divss(a, b);
|
||||||
|
|
||||||
|
cc.bind(L_End);
|
||||||
|
cc.ret(a);
|
||||||
|
|
||||||
|
cc.endFunc();
|
||||||
|
|
||||||
|
cc.bind(L_Table);
|
||||||
|
cc.embedLabelDelta(L_Add, L_Table, 4);
|
||||||
|
cc.embedLabelDelta(L_Sub, L_Table, 4);
|
||||||
|
cc.embedLabelDelta(L_Mul, L_Table, 4);
|
||||||
|
cc.embedLabelDelta(L_Div, L_Table, 4);
|
||||||
|
}
|
||||||
|
|
||||||
|
virtual bool run(void* _func, String& result, String& expect) {
|
||||||
|
typedef float (*Func)(float, float, uint32_t);
|
||||||
|
Func func = ptr_as_func<Func>(_func);
|
||||||
|
|
||||||
|
float results[4];
|
||||||
|
float expected[4];
|
||||||
|
|
||||||
|
results[0] = func(33.0f, 14.0f, kOperatorAdd);
|
||||||
|
results[1] = func(33.0f, 14.0f, kOperatorSub);
|
||||||
|
results[2] = func(10.0f, 6.0f, kOperatorMul);
|
||||||
|
results[3] = func(80.0f, 8.0f, kOperatorDiv);
|
||||||
|
|
||||||
|
expected[0] = 47.0f;
|
||||||
|
expected[1] = 19.0f;
|
||||||
|
expected[2] = 60.0f;
|
||||||
|
expected[3] = 10.0f;
|
||||||
|
|
||||||
|
result.assignFormat("ret={%f, %f, %f, %f}", results[0], results[1], results[2], results[3]);
|
||||||
|
expect.assignFormat("ret={%f, %f, %f, %f}", expected[0], expected[1], expected[2], expected[3]);
|
||||||
|
|
||||||
|
return result == expect;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
// ============================================================================
|
// ============================================================================
|
||||||
// [X86Test_AllocBase]
|
// [X86Test_AllocBase]
|
||||||
// ============================================================================
|
// ============================================================================
|
||||||
@@ -3984,6 +4100,7 @@ int main(int argc, char* argv[]) {
|
|||||||
app.addT<X86Test_JumpMany>();
|
app.addT<X86Test_JumpMany>();
|
||||||
app.addT<X86Test_JumpUnreachable1>();
|
app.addT<X86Test_JumpUnreachable1>();
|
||||||
app.addT<X86Test_JumpUnreachable2>();
|
app.addT<X86Test_JumpUnreachable2>();
|
||||||
|
app.addT<X86Test_JumpTable>();
|
||||||
|
|
||||||
// Alloc tests.
|
// Alloc tests.
|
||||||
app.addT<X86Test_AllocBase>();
|
app.addT<X86Test_AllocBase>();
|
||||||
|
|||||||
@@ -52,19 +52,21 @@ static void fail(const char* message, Error err) {
|
|||||||
exit(1);
|
exit(1);
|
||||||
}
|
}
|
||||||
|
|
||||||
int main(int argc, char* argv[]) {
|
int main() {
|
||||||
ASMJIT_UNUSED(argc);
|
|
||||||
ASMJIT_UNUSED(argv);
|
|
||||||
|
|
||||||
CodeInfo codeInfo(ArchInfo::kIdHost);
|
CodeInfo codeInfo(ArchInfo::kIdHost);
|
||||||
JitAllocator allocator;
|
JitAllocator allocator;
|
||||||
|
|
||||||
|
#ifndef ASMJIT_NO_LOGGING
|
||||||
FileLogger logger(stdout);
|
FileLogger logger(stdout);
|
||||||
logger.setIndentation(FormatOptions::kIndentationCode, 2);
|
logger.setIndentation(FormatOptions::kIndentationCode, 2);
|
||||||
|
#endif
|
||||||
|
|
||||||
CodeHolder code;
|
CodeHolder code;
|
||||||
code.init(codeInfo);
|
code.init(codeInfo);
|
||||||
|
|
||||||
|
#ifndef ASMJIT_NO_LOGGING
|
||||||
code.setLogger(&logger);
|
code.setLogger(&logger);
|
||||||
|
#endif
|
||||||
|
|
||||||
Section* dataSection;
|
Section* dataSection;
|
||||||
Error err = code.newSection(&dataSection, ".data", SIZE_MAX, 0, 8);
|
Error err = code.newSection(&dataSection, ".data", SIZE_MAX, 0, 8);
|
||||||
|
|||||||
Reference in New Issue
Block a user