diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 159a6e3..5293bb3 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -3,14 +3,22 @@ name: CI jobs: build: + timeout-minutes: 60 runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 - - name: Use Node.js 20.x + with: + submodules: recursive + - name: Use Rust + uses: raftario/setup-rust-action@v1 + with: + rust-channel: nightly + rust-host: x86_64-unknown-linux-gnu + - name: Use Node.js 22.x uses: actions/setup-node@v4 with: - node-version: 20.x + node-version: 22.x - name: Use Python 3.10 uses: actions/setup-python@v5 with: @@ -20,11 +28,6 @@ jobs: with: path: .venv key: ${{ runner.os }}-venv-3_10 - - uses: actions/checkout@v4 - with: - repository: pytorch/executorch - path: executorch - submodules: recursive - name: Cached build uses: actions/cache@v2 with: @@ -51,6 +54,6 @@ jobs: - name: Install dependencies run: yarn install - name: Build - run: yarn build --CDCMAKE_PREFIX_PATH=$(realpath executorch/cmake-out) --CDEXECUTORCH_SRC_ROOT=$(realpath executorch) + run: yarn build - name: Run tests - run: yarn test + run: yarn test-all diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml new file mode 100644 index 0000000..787d1bb --- /dev/null +++ b/.github/workflows/release.yml @@ -0,0 +1,112 @@ +name: Release +on: + push: + tags: + - 'v*' + workflow_dispatch: + +concurrency: + group: ${{ github.workflow }}-${{ github.event.repository.name }}-${{ github.ref_name }} + cancel-in-progress: true + +jobs: + build: + timeout-minutes: 120 + strategy: + matrix: + os: [ubuntu-latest, windows-latest, macos-latest] + arch: [x86_64, aarch64] + runs-on: ${{ matrix.os }} + + steps: + - uses: actions/checkout@v4 + with: + submodules: recursive + - name: Use Rust + uses: raftario/setup-rust-action@v1 + with: + rust-channel: nightly + rust-host: ${{ matrix.arch }}-${{ matrix.os == 'windows-latest' && 'pc-windows-gnullvm' || matrix.os == 'macos-latest' && 'apple-darwin' || 'unknown-linux-gnu' }} + - name: Use Node.js 22.x + uses: actions/setup-node@v4 + with: + node-version: 22.x + - name: Use Python 3.10 + uses: actions/setup-python@v5 + with: + python-version: '3.10' + - name: Cached pip + uses: actions/cache@v2 + with: + path: .venv + key: ${{ runner.os }}-venv-3_10 + - name: Download QNN SDK + if: ${{ matrix.os == 'windows-latest' }} + shell: bash + env: + QNN_VERSION: '2.26.0.240828' + run: | + curl -L -o qnn_sdk.zip https://softwarecenter.qualcomm.com/api/download/software/qualcomm_neural_processing_sdk/v${QNN_VERSION}.zip + unzip qnn_sdk.zip + rm qnn_sdk.zip + - name: Build executorch + shell: bash + env: + QNN_SDK_ROOT: ${{ github.workspace }}/qairt + OS: ${{ matrix.os }} + ARCH: ${{ matrix.arch }} + run: | + python -m venv .venv + source .venv/bin/activate + cd executorch + pip install tomli zstd setuptools wheel + pip install --pre torch torchvision torchaudio --index-url https://download.pytorch.org/whl/nightly/cpu + if [ ! -d cmake-out ]; then + ./install_requirements.sh + EXTRA_CMAKE_ARGS="" + if [[ "$OS" == "windows-latest" ]]; then + EXTRA_CMAKE_ARGS="-DCMAKE_TOOLCHAIN_FILE=../cmake/mingw-w64-${ARCH}.clang.toolchain.cmake" + if [[ "$ARCH" == "aarch64" ]]; then + EXTRA_CMAKE_ARGS="${EXTRA_CMAKE_ARGS} -DEXECUTORCH_BUILD_QNN=ON" + fi + elif [[ "$OS" == "macos-latest" ]]; then + if [[ "$ARCH" == "x86_64" ]]; then + EXTRA_CMAKE_ARGS="-DCMAKE_OSX_ARCHITECTURES=x86_64" + else + EXTRA_CMAKE_ARGS="-DCMAKE_OSX_ARCHITECTURES=arm64" + fi + EXTRA_CMAKE_ARGS="${EXTRA_CMAKE_ARGS} -DEXECUTORCH_BUILD_COREML=ON" + elif [[ "$OS" == "ubuntu-latest" ]] && [[ "$ARCH" == "aarch64" ]]; then + EXTRA_CMAKE_ARGS="-DCMAKE_TOOLCHAIN_FILE=../cmake/aarch64-linux-gnu.clang.toolchain.cmake" + fi + cmake \ + -S . \ + -B cmake-out \ + -DCMAKE_INSTALL_PREFIX=cmake-out \ + -DEXECUTORCH_BUILD_XNNPACK=ON \ + -DEXECUTORCH_BUILD_EXTENSION_DATA_LOADER=ON \ + -DEXECUTORCH_BUILD_EXTENSION_MODULE=ON \ + -DEXECUTORCH_BUILD_KERNELS_CUSTOM=ON \ + -DEXECUTORCH_BUILD_KERNELS_QUANTIZED=ON \ + -DEXECUTORCH_BUILD_KERNELS_OPTIMIZED=ON \ + -DEXECUTORCH_BUILD_EXTENSION_TENSOR=ON \ + -DQNN_SDK_ROOT=$QNN_SDK_ROOT \ + -DEXECUTORCH_BUILD_CPUINFO=ON \ + -DEXECUTORCH_BUILD_XNNPACK=ON \ + -DEXECUTORCH_BUILD_PTHREADPOOL=ON \ + -DEXECUTORCH_BUILD_SDK=ON \ + $EXTRA_CMAKE_ARGS + cmake --build cmake-out --target install --config Release -j$(nproc) + fi + - name: Install dependencies + run: yarn install + - name: Build + shell: bash + env: + TARGET: ${{ matrix.arch }}-${{ matrix.os == 'windows-latest' && 'pc-windows-gnullvm' || matrix.os == 'macos-latest' && 'apple-darwin' || 'unknown-linux-gnu' }} + run: yarn build --target $TARGET + - name: Upload build artifacts + uses: actions/upload-artifact@v4 + with: + name: bin + path: bin diff --git a/.gitignore b/.gitignore index c89ecf4..b74212a 100644 --- a/.gitignore +++ b/.gitignore @@ -1,6 +1,11 @@ -node_modules/ -build/ -bin/ -executorch/ -*.js -.venv/ \ No newline at end of file +target +*.node +*.dll +*.dylib +**/node_modules +**/.DS_Store +npm-debug.log +cargo.log +cross.log +lib/*.js +bin/**/* \ No newline at end of file diff --git a/.gitmodules b/.gitmodules new file mode 100644 index 0000000..4d11d38 --- /dev/null +++ b/.gitmodules @@ -0,0 +1,4 @@ +[submodule "executorch"] + path = executorch + url = https://github.com/mybigday/executorch.git + branch = windows-qnn diff --git a/CMakeLists.txt b/CMakeLists.txt deleted file mode 100644 index dc8476f..0000000 --- a/CMakeLists.txt +++ /dev/null @@ -1,275 +0,0 @@ -cmake_minimum_required(VERSION 3.15) -cmake_policy(SET CMP0091 NEW) -cmake_policy(SET CMP0042 NEW) - -project(node-executorch) - -set(CMAKE_CXX_STANDARD 17) - -if (NOT CMAKE_BUILD_TYPE) - set(CMAKE_BUILD_TYPE Release) -endif() - -if(NOT DEFINED napi_build_version) - set(napi_build_version 6) -endif() -add_definitions(-DNAPI_VERSION=${napi_build_version}) -message(STATUS "NAPI_VERSION: ${napi_build_version}") -string(TOLOWER ${CMAKE_SYSTEM_NAME} PLATFORM) -string(TOLOWER ${CMAKE_SYSTEM_PROCESSOR} ARCH) - -# normalize the platform name to match the node platform names -if(PLATFORM STREQUAL "windows") - set(PLATFORM "win32") -endif() - -# normalize the arch name to match the node arch names -if(ARCH STREQUAL "x86_64" OR ARCH STREQUAL "amd64") - set(ARCH "x64") -elseif(ARCH STREQUAL "i386") - set(ARCH "x86") -elseif(ARCH STREQUAL "arm64x" OR ARCH STREQUAL "aarch64") - set(ARCH "arm64") -endif() - -set(OUTPUT_BIN_DIR ${CMAKE_CURRENT_SOURCE_DIR}/bin/${PLATFORM}/${ARCH}) - -message(STATUS "Platform: ${PLATFORM}") -message(STATUS "Arch: ${ARCH}") - -include(FetchContent) -include(cmake/Utils.cmake) - -file( - GLOB SOURCES - src/addon.cc - src/Module.h - src/Module.cpp - src/Tensor.h - src/Tensor.cpp - src/Sampler.h - src/Sampler.cpp - src/utils.h - src/utils.cpp - "${EXECUTORCH_SRC_ROOT}/examples/models/llama2/sampler/sampler.cpp" -) - -if(MINGW) - if (CMAKE_HOST_SYSTEM_NAME STREQUAL "Windows") - string(REPLACE "/DELAYLOAD:NODE.EXE" "" CMAKE_SHARED_LINKER_FLAGS ${CMAKE_SHARED_LINKER_FLAGS}) - endif() - file(GLOB WIN_DELAY_LOAD node_modules/cmake-js/src/win_delay_load_hook.cc) - list(APPEND SOURCES ${WIN_DELAY_LOAD}) - - if(NOT CMAKE_JS_NODELIB_TARGET) - set(CMAKE_JS_NODELIB_TARGET "${CMAKE_BINARY_DIR}/node.lib") - endif() - if(NOT CMAKE_JS_NODELIB_DEF) - set(CMAKE_JS_NODELIB_DEF "${CMAKE_SOURCE_DIR}/node_modules/node-api-headers/def/node_api.def") - endif() - execute_process(COMMAND ${TOOLCHAIN_PREFIX}-dlltool -d ${CMAKE_JS_NODELIB_DEF} -l ${CMAKE_JS_NODELIB_TARGET}) -endif() - -if(NOT EXECUTORCH_SRC_ROOT) - FetchContent_Declare( - executorch - GIT_REPOSITORY https://github.com/pytorch/executorch.git - GIT_TAG main - ) - FetchContent_MakeAvailable(executorch) - set(EXECUTORCH_SRC_ROOT ${executorch_SOURCE_DIR}) -endif() - -find_package(executorch REQUIRED CONFIG) -set(link_libraries executorch extension_module extension_data_loader) - -# copy assets -add_custom_target( - copy_assets ALL - COMMAND ${CMAKE_COMMAND} -E make_directory ${OUTPUT_BIN_DIR} - COMMENT "Creating output directory" -) - -# copy libextension_module.so or libextension_module.dll -add_custom_command( - TARGET copy_assets - POST_BUILD - COMMAND ${CMAKE_COMMAND} -E copy ${LIB_extension_module} ${OUTPUT_BIN_DIR} - COMMENT "Copying extension_module" -) - -if(TARGET optimized_native_cpu_ops_lib) - message(STATUS "Optimized kernels enabled") - list( - APPEND - link_libraries - optimized_native_cpu_ops_lib - optimized_kernels - portable_kernels - cpublas - eigen_blas - ) - target_link_options_shared_lib(optimized_native_cpu_ops_lib) -else() - list(APPEND link_libraries portable_ops_lib portable_kernels) - target_link_options_shared_lib(portable_ops_lib) -endif() - -if(TARGET quantized_ops_lib) - message(STATUS "Quantized kernels enabled") - list(APPEND link_libraries quantized_kernels quantized_ops_lib) - target_link_options_shared_lib(quantized_ops_lib) -endif() - -if(TARGET cpuinfo) - list(APPEND link_libraries cpuinfo) -endif() - -if(TARGET pthreadpool) - list(APPEND link_libraries pthreadpool) -endif() - -# custom_ops -if(TARGET custom_ops) - message(STATUS "Custom ops enabled") - list(APPEND link_libraries custom_ops) - target_link_options_shared_lib(custom_ops) -endif() - -# XNNPACK -if(TARGET xnnpack_backend) - message(STATUS "XNNPACK backend enabled") - list(APPEND link_libraries xnnpack_backend XNNPACK) - target_link_options_shared_lib(xnnpack_backend) -endif() - -# Vulkan backend -if(TARGET vulkan_backend) - message(STATUS "Vulkan backend enabled") - list(APPEND link_libraries vulkan_backend) - target_link_options_shared_lib(vulkan_backend) -endif() - -# Qnn backend -if(TARGET qnn_executorch_backend) - message(STATUS "QNN backend enabled") - list(APPEND link_libraries qnn_executorch_backend) - target_link_options_shared_lib(qnn_executorch_backend) - add_custom_command( - TARGET copy_assets - POST_BUILD - COMMAND ${CMAKE_COMMAND} -E copy ${LIB_qnn_executorch_backend} ${OUTPUT_BIN_DIR} - COMMENT "Copying assets" - ) - if(QNN_SDK_ROOT) - # copy QNN SDK libraries - set(QNN_PLATFORM "") - if(WIN32 AND ARCH STREQUAL "arm64") - set(QNN_PLATFORM "aarch64-windows-msvc") - elseif(LINUX AND ARCH STREQUAL "arm64") - set(QNN_PLATFORM "aarch64-ubuntu-gcc9.4") - endif() - if(QNN_PLATFORM) - file( - GLOB QNN_LIBS - ${QNN_SDK_ROOT}/lib/${QNN_PLATFORM}/libQnnSystem.so - ${QNN_SDK_ROOT}/lib/${QNN_PLATFORM}/libQnnSaver.so - ${QNN_SDK_ROOT}/lib/${QNN_PLATFORM}/libQnnCpu.so - ${QNN_SDK_ROOT}/lib/${QNN_PLATFORM}/libQnnDsp.so - ${QNN_SDK_ROOT}/lib/${QNN_PLATFORM}/libQnnDspV66Stub.so - ${QNN_SDK_ROOT}/lib/${QNN_PLATFORM}/libQnnHtp.so - ${QNN_SDK_ROOT}/lib/${QNN_PLATFORM}/libQnnHtpPrepare.so - ${QNN_SDK_ROOT}/lib/${QNN_PLATFORM}/libQnnHtpV68Stub.so - ${QNN_SDK_ROOT}/lib/${QNN_PLATFORM}/QnnSystem.dll - ${QNN_SDK_ROOT}/lib/${QNN_PLATFORM}/QnnSaver.dll - ${QNN_SDK_ROOT}/lib/${QNN_PLATFORM}/QnnCpu.dll - ${QNN_SDK_ROOT}/lib/${QNN_PLATFORM}/QnnDsp.dll - ${QNN_SDK_ROOT}/lib/${QNN_PLATFORM}/QnnDspV66Stub.dll - ${QNN_SDK_ROOT}/lib/${QNN_PLATFORM}/QnnHtp.dll - ${QNN_SDK_ROOT}/lib/${QNN_PLATFORM}/QnnHtpPrepare.dll - ${QNN_SDK_ROOT}/lib/${QNN_PLATFORM}/QnnHtpV68Stub.dll - ${QNN_SDK_ROOT}/lib/${QNN_PLATFORM}/QnnHtpV73Stub.dll - ${QNN_SDK_ROOT}/lib/hexagon-v66/unsigned/libQnnHtpV66Skel.so - ${QNN_SDK_ROOT}/lib/hexagon-v68/unsigned/libQnnHtpV68Skel.so - ${QNN_SDK_ROOT}/lib/hexagon-v73/unsigned/libQnnHtpV73Skel.so - ${QNN_SDK_ROOT}/lib/hexagon-v73/unsigned/libqnnhtpv73.cat - - ) - add_custom_command( - TARGET copy_assets - POST_BUILD - COMMAND ${CMAKE_COMMAND} -E copy ${QNN_LIBS} ${OUTPUT_BIN_DIR} - COMMENT "Copying assets" - ) - endif() - endif() -endif() - -# MPS backend -if(TARGET mpsdelegate) - message(STATUS "MPS backend enabled") - list( - APPEND - link_libraries - mpsdelegate - "-framework Foundation" - "-weak_framework MetalPerformanceShaders" - "-weak_framework MetalPerformanceShadersGraph" - "-weak_framework Metal" - ) - target_link_options_shared_lib(mpsdelegate) -endif() - -include_directories( - ${executorch_INCLUDE_DIRS} - "${EXECUTORCH_SRC_ROOT}/.." - ${CMAKE_JS_INC} -) - -if(CMAKE_BUILD_TYPE STREQUAL "Release") - add_compile_options(-O3) -endif() - -add_library(${PROJECT_NAME} SHARED ${SOURCES} ${CMAKE_JS_SRC}) -set_target_properties(${PROJECT_NAME} PROPERTIES PREFIX "" SUFFIX ".node") -target_link_libraries( - ${PROJECT_NAME} - ${link_libraries} - ${CMAKE_JS_LIB} -) - -if(CMAKE_JS_NODELIB_TARGET AND MINGW) - set_target_properties(${PROJECT_NAME} PROPERTIES LINK_FLAGS "-lnode -L ${CMAKE_BINARY_DIR}") -endif() - -add_dependencies(copy_assets ${PROJECT_NAME}) - -add_custom_command( - TARGET copy_assets - POST_BUILD - COMMAND ${CMAKE_COMMAND} -E copy $ ${OUTPUT_BIN_DIR}/$ - COMMENT "Copying assets" -) - -if(MINGW) - # copy libunwind.dll, libc++.dll form the MinGW directory - find_program(MINGW_CC ${CMAKE_C_COMPILER}) - get_filename_component(MINGW_DIR ${MINGW_CC} DIRECTORY) - get_filename_component(MINGW_DIR ${MINGW_DIR} DIRECTORY) - if(ARCH STREQUAL "arm64") - set(MINGW_DIR ${MINGW_DIR}/aarch64-w64-mingw32) - else() - set(MINGW_DIR ${MINGW_DIR}/x86_64-w64-mingw32) - endif() - file( - GLOB MINGW_DLLS - ${MINGW_DIR}/bin/libunwind.dll - ${MINGW_DIR}/bin/libc++.dll - ) - add_custom_command( - TARGET copy_assets - POST_BUILD - COMMAND ${CMAKE_COMMAND} -E copy ${MINGW_DLLS} ${OUTPUT_BIN_DIR} - COMMENT "Copying MinGW DLLs" - ) -endif() diff --git a/Cargo.lock b/Cargo.lock new file mode 100644 index 0000000..0b2d425 --- /dev/null +++ b/Cargo.lock @@ -0,0 +1,320 @@ +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +version = 3 + +[[package]] +name = "aho-corasick" +version = "1.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e60d3430d3a69478ad0993f19238d2df97c507009a52b3c10addcd7f6bcb916" +dependencies = [ + "memchr", +] + +[[package]] +name = "build-target" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "832133bbabbbaa9fbdba793456a2827627a7d2b8fb96032fa1e7666d7895832b" + +[[package]] +name = "byteorder" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b" + +[[package]] +name = "cc" +version = "1.0.98" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "41c270e7540d725e65ac7f1b212ac8ce349719624d7bcff99f8e2e488e8cf03f" + +[[package]] +name = "cfg-if" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" + +[[package]] +name = "cpp" +version = "0.5.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bfa65869ef853e45c60e9828aa08cdd1398cb6e13f3911d9cb2a079b144fcd64" +dependencies = [ + "cpp_macros", +] + +[[package]] +name = "cpp_build" +version = "0.5.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0e361fae2caf9758164b24da3eedd7f7d7451be30d90d8e7b5d2be29a2f0cf5b" +dependencies = [ + "cc", + "cpp_common", + "lazy_static", + "proc-macro2", + "regex", + "syn", + "unicode-xid", +] + +[[package]] +name = "cpp_common" +version = "0.5.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3e1a2532e4ed4ea13031c13bc7bc0dbca4aae32df48e9d77f0d1e743179f2ea1" +dependencies = [ + "lazy_static", + "proc-macro2", + "syn", +] + +[[package]] +name = "cpp_macros" +version = "0.5.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "47ec9cc90633446f779ef481a9ce5a0077107dd5b87016440448d908625a83fd" +dependencies = [ + "aho-corasick", + "byteorder", + "cpp_common", + "lazy_static", + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "lazy_static" +version = "1.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" + +[[package]] +name = "libc" +version = "0.2.155" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "97b3888a4aecf77e811145cadf6eef5901f4782c53886191b2f693f24761847c" + +[[package]] +name = "libloading" +version = "0.8.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0c2a198fb6b0eada2a8df47933734e6d35d350665a33a3593d7164fa52c75c19" +dependencies = [ + "cfg-if", + "windows-targets", +] + +[[package]] +name = "memchr" +version = "2.7.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6c8640c5d730cb13ebd907d8d04b52f55ac9a2eec55b440c8892f40d56c76c1d" + +[[package]] +name = "neon" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7d75440242411c87dc39847b0e33e961ec1f10326a9d8ecf9c1ea64a3b3c13dc" +dependencies = [ + "libloading", + "neon-macros", + "once_cell", + "semver", + "send_wrapper", + "smallvec", +] + +[[package]] +name = "neon-macros" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c6813fde79b646e47e7ad75f480aa80ef76a5d9599e2717407961531169ee38b" +dependencies = [ + "quote", + "syn", + "syn-mid", +] + +[[package]] +name = "node-executorch" +version = "0.1.0-alpha.1" +dependencies = [ + "build-target", + "cpp", + "cpp_build", + "cpp_macros", + "libc", + "neon", +] + +[[package]] +name = "once_cell" +version = "1.19.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3fdb12b2476b595f9358c5161aa467c2438859caa136dec86c26fdd2efe17b92" + +[[package]] +name = "proc-macro2" +version = "1.0.85" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "22244ce15aa966053a896d1accb3a6e68469b97c7f33f284b99f0d576879fc23" +dependencies = [ + "unicode-ident", +] + +[[package]] +name = "quote" +version = "1.0.36" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0fa76aaf39101c457836aec0ce2316dbdc3ab723cdda1c6bd4e6ad4208acaca7" +dependencies = [ + "proc-macro2", +] + +[[package]] +name = "regex" +version = "1.10.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c117dbdfde9c8308975b6a18d71f3f385c89461f7b3fb054288ecf2a2058ba4c" +dependencies = [ + "aho-corasick", + "memchr", + "regex-automata", + "regex-syntax", +] + +[[package]] +name = "regex-automata" +version = "0.4.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "86b83b8b9847f9bf95ef68afb0b8e6cdb80f498442f5179a29fad448fcc1eaea" +dependencies = [ + "aho-corasick", + "memchr", + "regex-syntax", +] + +[[package]] +name = "regex-syntax" +version = "0.8.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "adad44e29e4c806119491a7f06f03de4d1af22c3a680dd47f1e6e179439d1f56" + +[[package]] +name = "semver" +version = "1.0.23" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "61697e0a1c7e512e84a621326239844a24d8207b4669b41bc18b32ea5cbf988b" + +[[package]] +name = "send_wrapper" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cd0b0ec5f1c1ca621c432a25813d8d60c88abe6d3e08a3eb9cf37d97a0fe3d73" + +[[package]] +name = "smallvec" +version = "1.13.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3c5e1a9a646d36c3599cd173a41282daf47c44583ad367b8e6837255952e5c67" + +[[package]] +name = "syn" +version = "2.0.66" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c42f3f41a2de00b01c0aaad383c5a45241efc8b2d1eda5661812fda5f3cdcff5" +dependencies = [ + "proc-macro2", + "quote", + "unicode-ident", +] + +[[package]] +name = "syn-mid" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b5dc35bb08dd1ca3dfb09dce91fd2d13294d6711c88897d9a9d60acf39bce049" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "unicode-ident" +version = "1.0.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b" + +[[package]] +name = "unicode-xid" +version = "0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f962df74c8c05a667b5ee8bcf162993134c104e96440b663c8daa176dc772d8c" + +[[package]] +name = "windows-targets" +version = "0.52.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6f0713a46559409d202e70e28227288446bf7841d3211583a4b53e3f6d96e7eb" +dependencies = [ + "windows_aarch64_gnullvm", + "windows_aarch64_msvc", + "windows_i686_gnu", + "windows_i686_gnullvm", + "windows_i686_msvc", + "windows_x86_64_gnu", + "windows_x86_64_gnullvm", + "windows_x86_64_msvc", +] + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.52.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7088eed71e8b8dda258ecc8bac5fb1153c5cffaf2578fc8ff5d61e23578d3263" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.52.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9985fd1504e250c615ca5f281c3f7a6da76213ebd5ccc9561496568a2752afb6" + +[[package]] +name = "windows_i686_gnu" +version = "0.52.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "88ba073cf16d5372720ec942a8ccbf61626074c6d4dd2e745299726ce8b89670" + +[[package]] +name = "windows_i686_gnullvm" +version = "0.52.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "87f4261229030a858f36b459e748ae97545d6f1ec60e5e0d6a3d32e0dc232ee9" + +[[package]] +name = "windows_i686_msvc" +version = "0.52.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "db3c2bf3d13d5b658be73463284eaf12830ac9a26a90c717b7f771dfe97487bf" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.52.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4e4246f76bdeff09eb48875a0fd3e2af6aada79d409d33011886d3e1581517d9" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.52.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "852298e482cd67c356ddd9570386e2862b5673c85bd5f88df9ab6802b334c596" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.52.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bec47e5bfd1bff0eeaf6d8b485cc1074891a197ab4225d504cb7a1ab88b02bf0" diff --git a/Cargo.toml b/Cargo.toml new file mode 100644 index 0000000..01bd028 --- /dev/null +++ b/Cargo.toml @@ -0,0 +1,25 @@ +[package] +name = "node-executorch" +version = "0.1.0-alpha.1" +description = "Node.js binding for ExecuTorch" +authors = ["Hans "] +license = "BSD-3-Clause" +edition = "2021" +exclude = ["index.node"] +build = "build.rs" + +[lib] +crate-type = ["cdylib"] +test = false + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html + +[dependencies] +neon = { version = "1", default-features = false, features = ["napi-6"] } +cpp = "0.5" +cpp_macros = "0.5" +libc = "0.2" + +[build-dependencies] +cpp_build = "0.5" +build-target = "0.4" diff --git a/README.md b/README.md index 9e19753..6ba8ccb 100644 --- a/README.md +++ b/README.md @@ -1,12 +1,11 @@ -executorch-node -=== +# node-executorch -Node.js binding for ExecuTorch +**node-executorch:** Node.js binding for ExecuTorch -# Installation +# Install ```sh -npm i node-executorch +npm install node-executorch ``` # Usage @@ -25,22 +24,89 @@ input.dispose(); model.dispose(); ``` -# Build From Source +## Building node-executorch -Note: For Windows currently only support cross-compile. +Building node-executorch requires a [supported version of Node and Rust](https://github.com/neon-bindings/neon#platform-support). -1. Fetch ExecuTorch Source -2. Build ExecuTorch and install to any path -3. Build this project +To run the build, run: ```sh -# Install dependency -yarn +$ npm run build +``` + +This command uses the [@neon-rs/cli](https://www.npmjs.com/package/@neon-rs/cli) utility to assemble the binary Node addon from the output of `cargo`. + +## Available Scripts + +In the project directory, you can run: + +#### `yarn install` + +Installs the project, including running `yarn build`. + +#### `yarn build` + +Builds the Node addon (`bin///executorch.node`) from source, generating a release build with `cargo --release`. + +Additional [`cargo build`](https://doc.rust-lang.org/cargo/commands/cargo-build.html) arguments may be passed to `npm run build` and similar commands. For example, to enable a [cargo feature](https://doc.rust-lang.org/cargo/reference/features.html): + +``` +yarn build --feature=beetle +``` + +#### `yarn debug` + +Similar to `yarn build` but generates a debug build with `cargo`. + +#### `yarn cross` + +Similar to `yarn build` but uses [cross-rs](https://github.com/cross-rs/cross) to cross-compile for another platform. Use the [`CARGO_BUILD_TARGET`](https://doc.rust-lang.org/cargo/reference/config.html#buildtarget) environment variable to select the build target. + +#### `yarn test` + +Runs the unit tests by calling `cargo test`. You can learn more about [adding tests to your Rust code](https://doc.rust-lang.org/book/ch11-01-writing-tests.html) from the [Rust book](https://doc.rust-lang.org/book/). + +## Project Layout + +The directory structure of this project is: -# Build -yarn build --CDCMAKE_PREFIX_PATH=/path/to/install/dir \ - --CDEXECUTORCH_SRC_ROOT=/path/to/executorch/src_root ``` +node-executorch/ +├── Cargo.toml +├── README.md +├── src/ +| └── lib.rs +├── lib/ +| ├── index.ts +| └── binding.ts +├── bin/ +| └── //executorch.node +├── scripts/ +| └── postneon-dist.js +├── package.json +└── target/ +``` + +| Entry | Purpose | +|----------------|------------------------------------------------------------------------------------------------------------------------------------------| +| `Cargo.toml` | The Cargo [manifest file](https://doc.rust-lang.org/cargo/reference/manifest.html), which informs the `cargo` command. | +| `README.md` | This file. | +| `src/` | The directory tree containing the Rust source code for the project. | +| `lib.rs` | Entry point for the Rust source code. | +| `lib/` | The directory tree containing the TypeScript source code for the project. | +| `executorch.node` | [Node addon](https://nodejs.org/api/addons.html) generated by the build. | +| `scripts/` | Utility scripts for the project. | +| `postneon-dist.js` | Utility script for post-processing the Neon build. | +| `package.json` | The npm [manifest file](https://docs.npmjs.com/cli/v7/configuring-npm/package-json), which informs the `npm` command. | +| `target/` | Binary artifacts generated by the Rust build. | + +## Learn More + +Learn more about: + +- [Neon](https://neon-bindings.com). +- [Rust](https://www.rust-lang.org). +- [Node](https://nodejs.org). ## License diff --git a/build.rs b/build.rs new file mode 100644 index 0000000..3da8325 --- /dev/null +++ b/build.rs @@ -0,0 +1,112 @@ +extern crate cpp_build; +extern crate build_target; +use std::path::Path; +use build_target::Os; + +fn link_lib(lib_path: &Path, lib: &str, whole_link: bool) -> Result<(), ()> { + let so_ext = match build_target::target_os().unwrap() { + Os::Linux => "so", + Os::MacOs => "dylib", + Os::Windows => "dll", + _ => panic!("Unsupported OS"), + }; + let filename = match lib { + "extension_module" => format!("lib{}.{}", lib, so_ext), + "qnn_executorch_backend" => format!("lib{}.{}", lib, so_ext), + _ => format!("lib{}.a", lib), + }; + if lib_path.join(&filename).exists() { + if filename.ends_with(so_ext) { + println!("cargo:rustc-link-lib=dylib={}", lib); + } else { + if whole_link { + println!("cargo:rustc-link-lib=static:+whole-archive={}", lib); + } else { + println!("cargo:rustc-link-lib=static={}", lib); + } + } + return Ok(()); + } + Err(()) +} + +fn main() { + println!("cargo:rerun-if-changed=src/sampler.rs"); + println!("cargo:rerun-if-changed=src/tensor.rs"); + println!("cargo:rerun-if-changed=src/tensor.hpp"); + println!("cargo:rerun-if-changed=src/module.rs"); + println!("cargo:rerun-if-changed=src/module.hpp"); + println!("cargo:rerun-if-changed=src/method_meta.rs"); + println!("cargo:rerun-if-changed=src/evalue.rs"); + println!("cargo:rerun-if-changed=src/evalue.hpp"); + println!("cargo:rerun-if-changed=src/eterror.rs"); + println!("cargo:rerun-if-changed=src/lib.rs"); + + let install_prefix = std::env::var("EXECUTORCH_INSTALL_PREFIX").unwrap_or_else(|_| "executorch/cmake-out".to_string()); + let lib_path = Path::new(&install_prefix).join("lib"); + + let node_platform = match std::env::var("CARGO_CFG_TARGET_OS").unwrap().as_str() { + "linux" => "linux", + "macos" => "darwin", + "windows" => "win32", + _ => panic!("Unsupported platform"), + }; + let node_arch = match std::env::var("CARGO_CFG_TARGET_ARCH").unwrap().as_str() { + "x86_64" => "x64", + "aarch64" => "arm64", + _ => panic!("Unsupported arch"), + }; + + println!("cargo:rustc-link-search=native={}", lib_path.display()); + + // for nodejs/electron usage + println!("cargo:rustc-link-arg=-Wl,-rpath,bin/{}/{}", node_platform, node_arch); + println!("cargo:rustc-link-arg=-Wl,-rpath,node_modules/bin/{}/{}", node_platform, node_arch); + println!("cargo:rustc-link-arg=-Wl,-rpath,resources/node_modules/bin/{}/{}", node_platform, node_arch); + + assert!(link_lib(&lib_path, "executorch", false).is_ok()); + assert!(link_lib(&lib_path, "executorch_no_prim_ops", false).is_ok()); + assert!(link_lib(&lib_path, "extension_module", false).is_ok()); + assert!(link_lib(&lib_path, "extension_data_loader", false).is_ok()); + + // Optimized Kernels + if link_lib(&lib_path, "optimized_native_cpu_ops_lib", true).is_ok() { + assert!(link_lib(&lib_path, "optimized_kernels", false).is_ok()); + assert!(link_lib(&lib_path, "portable_kernels", false).is_ok()); + // assert!(link_lib(&lib_path, "cpublas", false).is_ok()); + assert!(link_lib(&lib_path, "eigen_blas", false).is_ok()); + } else { + assert!(link_lib(&lib_path, "portable_ops_lib", true).is_ok()); + assert!(link_lib(&lib_path, "portable_kernels", false).is_ok()); + } + + // Quantized Kernels + if link_lib(&lib_path, "quantized_ops_lib", true).is_ok() { + assert!(link_lib(&lib_path, "quantized_kernels", false).is_ok()); + } + + // Tensor extension + let _ = link_lib(&lib_path, "extension_tensor", false); + + // Runner Util extension + let _ = link_lib(&lib_path, "extension_runner_util", false); + + // misc. + let _ = link_lib(&lib_path, "cpuinfo", false); + let _ = link_lib(&lib_path, "pthreadpool", false); + + // XNNPACK + if link_lib(&lib_path, "xnnpack_backend", true).is_ok() { + assert!(link_lib(&lib_path, "XNNPACK", false).is_ok()); + } + + // Vulkan + let _ = link_lib(&lib_path, "vulkan_backend", true); + + // QNN + let _ = link_lib(&lib_path, "qnn_executorch_backend", true); + + cpp_build::Config::new() + .flag("-std=c++17") + .build("src/lib.rs"); +} diff --git a/cmake/Utils.cmake b/cmake/Utils.cmake deleted file mode 100644 index f5bfbcc..0000000 --- a/cmake/Utils.cmake +++ /dev/null @@ -1,40 +0,0 @@ -# This is the funtion to use -Wl, --whole-archive to link static library NB: -# target_link_options is broken for this case, it only append the interface link -# options of the first library. -function(kernel_link_options target_name) - # target_link_options(${target_name} INTERFACE - # "$") - target_link_options( - ${target_name} INTERFACE "SHELL:LINKER:--whole-archive \ - $ \ - LINKER:--no-whole-archive" - ) -endfunction() - -# Same as kernel_link_options but it's for MacOS linker -function(macos_kernel_link_options target_name) - target_link_options( - ${target_name} INTERFACE - "SHELL:LINKER:-force_load,$" - ) -endfunction() - -# Ensure that the load-time constructor functions run. By default, the linker -# would remove them since there are no other references to them. -function(target_link_options_shared_lib target_name) - if(APPLE) - macos_kernel_link_options(${target_name}) - else() - kernel_link_options(${target_name}) - endif() -endfunction() - -function(download_node_lib version arch target_path) - # Download node.lib for Windows cross-compilation - if(WIN32 AND target_path) - if(NOT EXISTS ${target_path}) - file(DOWNLOAD https://nodejs.org/dist/v${version}/win-${arch}/node.lib ${target_path}) - endif() - endif() -endfunction() - diff --git a/cmake/aarch64-linux-gnu.clang.toolchain.cmake b/cmake/aarch64-linux-gnu.clang.toolchain.cmake new file mode 100644 index 0000000..aa14c29 --- /dev/null +++ b/cmake/aarch64-linux-gnu.clang.toolchain.cmake @@ -0,0 +1,12 @@ +set(CMAKE_SYSTEM_NAME Linux) +set(CMAKE_SYSTEM_PROCESSOR aarch64) + +set(CMAKE_C_COMPILER "clang") +set(CMAKE_CXX_COMPILER "clang++") + +set(CMAKE_C_FLAGS "-march=armv8-a -target aarch64-linux-gnu") +set(CMAKE_CXX_FLAGS "-march=armv8-a -target aarch64-linux-gnu") + +# cache flags +set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS}" CACHE STRING "c flags") +set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS}" CACHE STRING "c++ flags") \ No newline at end of file diff --git a/cmake/clang-aarch64-linux-gnu.toolchain.cmake b/cmake/clang-aarch64-linux-gnu.toolchain.cmake deleted file mode 100644 index 60e3b96..0000000 --- a/cmake/clang-aarch64-linux-gnu.toolchain.cmake +++ /dev/null @@ -1,31 +0,0 @@ -set(CMAKE_SYSTEM_NAME Linux) -set(CMAKE_SYSTEM_PROCESSOR aarch64) - -set(CMAKE_C_COMPILER "clang") -set(CMAKE_CXX_COMPILER "clang++") -set(CMAKE_ASM_COMPILER "aarch64-linux-gnu-as") - -if(NOT CMAKE_FIND_ROOT_PATH_MODE_PROGRAM) - set(CMAKE_FIND_ROOT_PATH_MODE_PROGRAM NEVER) -endif() -if(NOT CMAKE_FIND_ROOT_PATH_MODE_LIBRARY) - set(CMAKE_FIND_ROOT_PATH_MODE_LIBRARY ONLY) -endif() -if(NOT CMAKE_FIND_ROOT_PATH_MODE_INCLUDE) - set(CMAKE_FIND_ROOT_PATH_MODE_INCLUDE ONLY) -endif() -if(NOT CMAKE_FIND_ROOT_PATH_MODE_PACKAGE) - set(CMAKE_FIND_ROOT_PATH_MODE_PACKAGE ONLY) -endif() - -set(SYSROOT /usr/aarch64-linux-gnu) - -set(CMAKE_C_FLAGS "--target=aarch64-linux-gnu -isysroot=${SYSROOT}") -set(CMAKE_CXX_FLAGS "--target=aarch64-linux-gnu -isysroot=${SYSROOT}") - -set(CMAKE_ASM_FLAGS "-march=armv8-a") - -# cache flags -set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS}" CACHE STRING "c flags") -set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS}" CACHE STRING "c++ flags") -set(CMAKE_ASM_FLAGS "${CMAKE_ASM_FLAGS}" CACHE STRING "asm flags") diff --git a/cmake/mingw-w64-aarch64.clang.toolchain.cmake b/cmake/mingw-w64-aarch64.clang.toolchain.cmake new file mode 100644 index 0000000..6a18b2e --- /dev/null +++ b/cmake/mingw-w64-aarch64.clang.toolchain.cmake @@ -0,0 +1,10 @@ +set(CMAKE_SYSTEM_NAME Windows) +set(CMAKE_SYSTEM_PROCESSOR arm64) + +set(CMAKE_C_COMPILER aarch64-w64-mingw32-clang) +set(CMAKE_CXX_COMPILER aarch64-w64-mingw32-clang++) +set(CMAKE_RC_COMPILER aarch64-w64-mingw32-windres) +set(CMAKE_AR aarch64-w64-mingw32-ar) +set(CMAKE_RANLIB aarch64-w64-mingw32-ranlib) +set(CMAKE_STRIP aarch64-w64-mingw32-strip) +set(CMAKE_LINKER aarch64-w64-mingw32-ld) diff --git a/cmake/mingw-w64-aarch64.toolchain.cmake b/cmake/mingw-w64-aarch64.toolchain.cmake deleted file mode 100644 index ce163c5..0000000 --- a/cmake/mingw-w64-aarch64.toolchain.cmake +++ /dev/null @@ -1,15 +0,0 @@ -set(CMAKE_SYSTEM_NAME Windows) -set(CMAKE_SYSTEM_PROCESSOR aarch64) -set(TOOLCHAIN_PREFIX aarch64-w64-mingw32) - -# cross compilers to use for C, C++ and Fortran -set(CMAKE_C_COMPILER ${TOOLCHAIN_PREFIX}-gcc) -set(CMAKE_CXX_COMPILER ${TOOLCHAIN_PREFIX}-g++) -set(CMAKE_Fortran_COMPILER ${TOOLCHAIN_PREFIX}-gfortran) -set(CMAKE_RC_COMPILER ${TOOLCHAIN_PREFIX}-windres) -set(CMAKE_LINKER ${TOOLCHAIN_PREFIX}-ld) - -# modify default behavior of FIND_XXX() commands -set(CMAKE_FIND_ROOT_PATH_MODE_PROGRAM NEVER) -set(CMAKE_FIND_ROOT_PATH_MODE_LIBRARY ONLY) -set(CMAKE_FIND_ROOT_PATH_MODE_INCLUDE ONLY) diff --git a/cmake/mingw-w64-x86_64.clang.toolchain.cmake b/cmake/mingw-w64-x86_64.clang.toolchain.cmake new file mode 100644 index 0000000..4623d70 --- /dev/null +++ b/cmake/mingw-w64-x86_64.clang.toolchain.cmake @@ -0,0 +1,10 @@ +set(CMAKE_SYSTEM_NAME Windows) +set(CMAKE_SYSTEM_PROCESSOR x86_64) + +set(CMAKE_C_COMPILER x86_64-w64-mingw32-clang) +set(CMAKE_CXX_COMPILER x86_64-w64-mingw32-clang++) +set(CMAKE_RC_COMPILER x86_64-w64-mingw32-windres) +set(CMAKE_AR x86_64-w64-mingw32-ar) +set(CMAKE_RANLIB x86_64-w64-mingw32-ranlib) +set(CMAKE_STRIP x86_64-w64-mingw32-strip) +set(CMAKE_LINKER x86_64-w64-mingw32-ld) diff --git a/cmake/mingw-w64-x86_64.toolchain.cmake b/cmake/mingw-w64-x86_64.toolchain.cmake deleted file mode 100644 index 5430ba7..0000000 --- a/cmake/mingw-w64-x86_64.toolchain.cmake +++ /dev/null @@ -1,15 +0,0 @@ -set(CMAKE_SYSTEM_NAME Windows) -set(CMAKE_SYSTEM_PROCESSOR x86_64) -set(TOOLCHAIN_PREFIX x86_64-w64-mingw32) - -# cross compilers to use for C, C++ and Fortran -set(CMAKE_C_COMPILER ${TOOLCHAIN_PREFIX}-gcc) -set(CMAKE_CXX_COMPILER ${TOOLCHAIN_PREFIX}-g++) -set(CMAKE_Fortran_COMPILER ${TOOLCHAIN_PREFIX}-gfortran) -set(CMAKE_RC_COMPILER ${TOOLCHAIN_PREFIX}-windres) -set(CMAKE_LINKER ${TOOLCHAIN_PREFIX}-ld) - -# modify default behavior of FIND_XXX() commands -set(CMAKE_FIND_ROOT_PATH_MODE_PROGRAM NEVER) -set(CMAKE_FIND_ROOT_PATH_MODE_LIBRARY ONLY) -set(CMAKE_FIND_ROOT_PATH_MODE_INCLUDE ONLY) diff --git a/executorch b/executorch new file mode 160000 index 0000000..25d7751 --- /dev/null +++ b/executorch @@ -0,0 +1 @@ +Subproject commit 25d77516938dff31f519d915513b7196f924a1d5 diff --git a/lib/__snapshots__/index.test.ts.snap b/lib/__snapshots__/index.test.ts.snap index 82d6688..71e9e8e 100644 --- a/lib/__snapshots__/index.test.ts.snap +++ b/lib/__snapshots__/index.test.ts.snap @@ -1,17 +1,6 @@ // Jest Snapshot v1, https://goo.gl/fbAQLP exports[`Module 1`] = ` -Float32Array [ - 0, - 0, - 0, - 0, - 0, - 0, -] -`; - -exports[`Module 2`] = ` Float32Array [ 1, 4, diff --git a/lib/binding.ts b/lib/binding.ts new file mode 100644 index 0000000..a1bcddc --- /dev/null +++ b/lib/binding.ts @@ -0,0 +1,28 @@ +import type { ExternalObject, InternalEValue, MethodMeta, Optional, TensorPtrInfo, DType } from "./types"; + +interface Binding { + // module methods + moduleLoad(path: string, load_mode: number): Promise; + moduleLoadMethod(ptr: ExternalObject, name: string): Promise; + moduleExecute(ptr: ExternalObject, method_name: string, inputs: InternalEValue[]): Promise; + moduleGetMethodMeta(ptr: ExternalObject, method_name: string): MethodMeta; + moduleMethodNames(ptr: ExternalObject): string[]; + // tensor methods + createTensor(dtype: DType, shape: number[], data: ArrayBuffer): ExternalObject; + tensorGetDtype(ptr: ExternalObject): DType; + tensorGetShape(ptr: ExternalObject): number[]; + tensorGetData(ptr: ExternalObject): ArrayBuffer; + tensorSetData(ptr: ExternalObject, data: ArrayBuffer): void; + tensorSetValue(ptr: ExternalObject, position: Array, data: number | boolean): void; + tensorConcat(ptrs: ExternalObject[], axis: number): TensorPtrInfo; + tensorSlice(ptr: ExternalObject, slice_position: Array>|number>>): TensorPtrInfo; + tensorReshape(ptr: ExternalObject, shape: number[]): TensorPtrInfo; +} + +const moduleBasePath = `../bin/${process.platform}/${process.arch}`; + +if (process.platform === "win32") { + process.env.PATH = `${moduleBasePath};${process.env.PATH}`; +} + +export const mod = require(`${moduleBasePath}/executorch.node`) as Binding; diff --git a/lib/index.test.ts b/lib/index.test.ts index 78cacc3..4360245 100644 --- a/lib/index.test.ts +++ b/lib/index.test.ts @@ -1,5 +1,5 @@ import path from "path"; -import { Module, Tensor, Sampler } from "./index"; +import { Module, Tensor, Sampler, EValueTag, DType } from "./index"; const model = path.resolve(__dirname, "__fixtures__/mul.pte"); @@ -9,20 +9,13 @@ it("Module", async () => { expect(mod.getMethodMeta("forward")).toEqual({ name: "forward", inputs: [ - { tag: "tensor", tensor_info: { dtype: "float32", shape: [3, 2] } }, - { tag: "tensor", tensor_info: { dtype: "float32", shape: [3, 2] } }, + { tag: EValueTag.Tensor, tensor_info: { dtype: DType.float32, shape: [3, 2] } }, + { tag: EValueTag.Tensor, tensor_info: { dtype: DType.float32, shape: [3, 2] } }, ], - outputs: [{ tag: "tensor", tensor_info: { dtype: "float32", shape: [3, 2] } }], + outputs: [{ tag: EValueTag.Tensor, tensor_info: { dtype: DType.float32, shape: [3, 2] } }], }); - { // execute without inputs - const outputs = await mod.execute("forward"); - expect(outputs[0]).toBeInstanceOf(Tensor); - if (outputs[0] instanceof Tensor) { - expect(outputs[0].dtype).toBe("float32"); - expect(outputs[0].shape).toEqual([3, 2]); - expect(outputs[0].data).toMatchSnapshot(); - } - } + // forward without inputs + expect(async () => await mod.execute("forward")).rejects.toThrow("Failed to execute method: InvalidArgument"); { // forward const input = new Tensor("float32", [3, 2], new Float32Array([1, 2, 3, 4, 5, 6])); const outputs = await mod.forward([input, input]); @@ -55,20 +48,8 @@ it("Tensor", async () => { expect(concat.data).toMatchSnapshot(); // reshape - input.reshape([2, 3]); - expect(input.dtype).toBe("float32"); - expect(input.shape).toEqual([2, 3]); - expect(input.data).toMatchSnapshot(); -}); - -it("Sampler", async () => { - const mockTensor = new Tensor("float32", [1, 2, 10], Float32Array.from({ length: 20 }, (_, i) => i)); - const sampler = new Sampler(10); - - // sample - const sample = sampler.sample(mockTensor); - expect(sample).toBeGreaterThanOrEqual(0); - expect(sample).toBeLessThan(10); - - sampler.dispose(); + const reshaped = input.reshape([2, 3]); + expect(reshaped.dtype).toBe("float32"); + expect(reshaped.shape).toEqual([2, 3]); + expect(reshaped.data).toMatchSnapshot(); }); diff --git a/lib/index.ts b/lib/index.ts index 21c2365..2e59417 100644 --- a/lib/index.ts +++ b/lib/index.ts @@ -1,98 +1,173 @@ -type DType = - | "float32" - | "float64" - | "int32" - | "uint8" - | "int8" - | "int16" - | "int64" - | "bool"; - -type TensorData = - | boolean[] - | Float32Array - | Float64Array - | Int32Array - | Uint8Array - | Int8Array - | Int16Array - | Int32Array - | BigInt64Array; - -type Optional = T | null | undefined; - -interface TensorImpl { - get dtype(): DType; - get shape(): number[]; - get data(): TensorData; - setValue(position: Array, data: number | boolean): void - slice(...slice_position: Array>|number>>): TensorImpl; - reshape(shape: number[]): TensorImpl; - dispose(): void; -} - -interface Tensor { - new(dtype: DType, shape: number[], data: TensorData): TensorImpl; - concat(tensors: TensorImpl[], axis: number): TensorImpl; -} - -type EValue = null | string | number | boolean | TensorImpl; - -type TensorInfo = { - dtype: DType; - shape: number[]; -}; - -type EValueSpec = { - tag: "null" | "string" | "number" | "boolean" | "tensor"; - tensor_info?: TensorInfo; +import { mod } from "./binding"; +import { EValueTag, DType, ModuleLoadMode } from "./types"; +import type { ExternalObject, MethodMeta, TensorData, Optional, TensorPtrInfo, InternalEValue } from "./types"; + +export * from "./types"; + +export type DTypeStr = keyof typeof DType; + +const dtypeTypedArrayMap = { + [DType.float32]: Float32Array, + [DType.float64]: Float64Array, + [DType.uint8]: Uint8Array, + [DType.int8]: Int8Array, + [DType.int16]: Int16Array, + [DType.int32]: Int32Array, + [DType.int64]: BigInt64Array, }; -type MethodMeta = { - name: string; - inputs: Array; - outputs: Array; -}; - -interface ModuleImpl { - forward(inputs: EValue[]): Promise | undefined; - execute(method_name: string, inputs?: EValue[]): Promise | undefined; - getMethodMeta(method_name: string): MethodMeta | undefined; - get method_names(): string[]; - dispose(): void; -} - -interface Module { - load(path: string): Promise; +const toArrayBuffer = (data: TensorData): ArrayBuffer => { + if (Array.isArray(data)) { + return Uint8Array.from(data.map((v) => v ? 1 : 0)).buffer; + } else { + return data.buffer; + } } -interface SamplerImpl { - sample(tensor: TensorImpl): number; - dispose(): void; +class Tensor { + _ptr: ExternalObject; + _dtype: DType; + + constructor(dtype: DTypeStr | DType, shape: number[], data: TensorData | ExternalObject) { + this._dtype = typeof dtype === "string" ? DType[dtype] : dtype; + if (!Array.isArray(data) && !ArrayBuffer.isView(data)) { + this._ptr = data; + return; + } + this._ptr = mod.createTensor(this._dtype, shape, toArrayBuffer(data as TensorData)); + } + + static fromPtr(ptrInfo: TensorPtrInfo): Tensor { + const { shape, dtype, ptr } = ptrInfo; + + return new Tensor(dtype, shape, ptr); + } + + static concat(tensors: Tensor[], axis: number): Tensor { + const ptrs = tensors.map((t) => t._ptr); + return Tensor.fromPtr(mod.tensorConcat(ptrs, axis)); + } + + get shape(): number[] { + return mod.tensorGetShape(this._ptr); + } + + get dtype(): DTypeStr { + return DType[this._dtype] as DTypeStr; + } + + get data(): TensorData { + const buf = mod.tensorGetData(this._ptr); + if (this._dtype === DType.bool) { + return Array.from(new Uint8Array(buf)).map((v) => v === 1); + } else { + return new dtypeTypedArrayMap[this._dtype](buf); + } + } + + set data(data: TensorData) { + mod.tensorSetData(this._ptr, toArrayBuffer(data)); + } + + slice(...slice_position: Array>|number>>): Tensor { + return Tensor.fromPtr(mod.tensorSlice(this._ptr, slice_position)); + } + + reshape(shape: number[]): Tensor { + return Tensor.fromPtr(mod.tensorReshape(this._ptr, shape)); + } + + setValue(position: Array, data: number | boolean): void { + mod.tensorSetValue(this._ptr, position, data); + } + + dispose() { + delete this._ptr; + } } -interface Sampler { - new(vocab_size: number, temperature?: number, top_p?: number, seed?: number): SamplerImpl; +export type EValue = null | string | number | boolean | Tensor | undefined; + +const toInternalEValue = (value: EValue): InternalEValue => { + if (value === null) { + return { tag: EValueTag.None, data: null }; + } else if (typeof value === "string") { + return { tag: EValueTag.String, data: value }; + } else if (typeof value === "number") { + if (Number.isInteger(value)) { + return { tag: EValueTag.Int, data: value }; + } else { + return { tag: EValueTag.Double, data: value }; + } + } else if (typeof value === "boolean") { + return { tag: EValueTag.Bool, data: value }; + } else if (value instanceof Tensor) { + return { + tag: EValueTag.Tensor, + data: value._ptr, + }; + } else { + throw new Error(`Unsupported type: ${typeof value}`); + } } -interface Binding { - Module: Module; - Tensor: Tensor; - Sampler: Sampler; +const fromInternalEValue = (value: InternalEValue): EValue => { + switch (value.tag) { + case EValueTag.None: + return null; + case EValueTag.String: + case EValueTag.Int: + case EValueTag.Double: + case EValueTag.Bool: + return value.data as string | number | boolean; + case EValueTag.Tensor: + return Tensor.fromPtr(value.data as TensorPtrInfo); + default: + return undefined; + } } -const moduleBasePath = `../bin/${process.platform}/${process.arch}`; - -if (process.platform === "linux") { - process.env.LD_LIBRARY_PATH = `${moduleBasePath}:${process.env.LD_LIBRARY_PATH}`; -} else if (process.platform === "darwin") { - process.env.DYLD_LIBRARY_PATH = `${moduleBasePath}:${process.env.DYLD_LIBRARY_PATH}`; -} else if (process.platform === "win32") { - process.env.PATH = `${moduleBasePath};${process.env.PATH}`; +class Module { + _ptr: ExternalObject; + + constructor(ptr: ExternalObject) { + this._ptr = ptr; + } + + static async load(path: string, load_mode: ModuleLoadMode = ModuleLoadMode.MmapUseMlock): Promise { + const ptr = await mod.moduleLoad(path, load_mode); + return new Module(ptr); + } + + get method_names(): string[] { + return mod.moduleMethodNames(this._ptr); + } + + getMethodMeta(method_name: string): MethodMeta { + return mod.moduleGetMethodMeta(this._ptr, method_name); + } + + async loadMethod(name: string): Promise { + await mod.moduleLoadMethod(this._ptr, name); + } + + async forward(inputs: EValue[]): Promise { + return this.execute("forward", inputs); + } + + async execute(method_name: string, inputs: EValue[] = []): Promise { + return ( + await mod.moduleExecute( + this._ptr, + method_name, + inputs.map(toInternalEValue) + ) + ).map(fromInternalEValue); + } + + dispose() { + delete this._ptr; + } } -const mod = require(`${moduleBasePath}/node-executorch.node`) as Binding; - -export const Module = mod.Module; -export const Tensor = mod.Tensor; -export const Sampler = mod.Sampler; +export { Tensor, Module }; diff --git a/lib/types.ts b/lib/types.ts new file mode 100644 index 0000000..c7a1a9f --- /dev/null +++ b/lib/types.ts @@ -0,0 +1,75 @@ +export enum ModuleLoadMode { + File = 0, + Mmap = 1, + MmapUseMlock = 2, + MmapUseMlockIgnoreErrors = 3, +} + +export enum DType { + uint8 = 0, + int8 = 1, + int16 = 2, + int32 = 3, + int64 = 4, + float16 = 5, + float32 = 6, + float64 = 7, + bool = 11, +} + +export type TensorData = + | boolean[] + | Float32Array + | Float64Array + | Int32Array + | Uint8Array + | Int8Array + | Int16Array + | Int32Array + | BigInt64Array; + +export type Optional = T | null | undefined; + +export type ExternalObject = any; + +export type TensorPtrInfo = { + shape: number[]; + dtype: DType; + ptr: ExternalObject; +}; + +export enum EValueTag { + None = 0, + Tensor = 1, + String = 2, + Double = 3, + Int = 4, + Bool = 5, + ListBool = 6, + ListDouble = 7, + ListInt = 8, + ListTensor = 9, + ListScalar = 10, + ListOptionalTensor = 11, +} + +export type InternalEValue = { + tag: EValueTag; + data: null | string | number | boolean | ExternalObject | TensorPtrInfo | undefined; +} + +export type TensorInfo = { + dtype: DType; + shape: number[]; +}; + +export type EValueSpec = { + tag: EValueTag; + tensor_info?: TensorInfo; +}; + +export type MethodMeta = { + name: string; + inputs: Array; + outputs: Array; +}; diff --git a/package.json b/package.json index b8604f1..bcc4696 100644 --- a/package.json +++ b/package.json @@ -4,38 +4,49 @@ "description": "Node.js binding for ExecuTorch", "main": "lib/index.js", "scripts": { - "test": "jest", "build-js": "tsc", - "build": "cmake-js build", - "prepack": "npm run build-js" + "test-all": "yarn test-rs && yarn test", + "test": "jest", + "test-rs": "cargo test", + "prepack": "npm run build-js", + "cargo-build": "cargo build --message-format=json > cargo.log", + "cross-build": "cross build --message-format=json > cross.log", + "postcargo-build": "yarn neon-dist < cargo.log", + "postcross-build": "yarn neon-dist -m /target < cross.log", + "neon-dist": "neon dist --name node_executorch", + "postneon-dist": "node ./scripts/postneon-dist.js", + "debug": "yarn cargo-build", + "build": "yarn cargo-build --release", + "cross": "yarn cross-build --release" }, "author": "Hans ", "license": "BSD-3-Clause", - "files": [ - "lib", - "bin", - "src", - "cmake", - "CMakeLists.txt" - ], - "binary": { - "napi_versions": [ - 8 - ] - }, "devDependencies": { "@babel/preset-env": "^7.24.6", "@babel/preset-typescript": "^7.24.6", "@jest/globals": "^29.7.0", "@types/jest": "^29.5.12", "@types/node": "^20.12.12", - "cmake-js": "^7.3.0", + "@neon-rs/cli": "0.1.68", "jest": "^29.7.0", "typescript": "^5.4.5" }, - "dependencies": { - "node-addon-api": "^8.0.0" + "repository": { + "type": "git", + "url": "git+https://github.com/mybigday/node-executorch.git" + }, + "keywords": [ + "executorch", + "ai" + ], + "bugs": { + "url": "https://github.com/mybigday/node-executorch/issues" }, + "homepage": "https://github.com/mybigday/node-executorch#readme", + "files": [ + "lib", + "bin" + ], "jest": { "testEnvironment": "node", "testMatch": [ diff --git a/scripts/build-all.sh b/scripts/build-all.sh deleted file mode 100755 index ff10be3..0000000 --- a/scripts/build-all.sh +++ /dev/null @@ -1,122 +0,0 @@ -#!/bin/bash - -set -e - -if [ -z "$1" ]; then - echo "Usage: $0 " - exit 1 -fi - -ET_SRC=$(realpath $1) -PROJECT_DIR=$(realpath $(dirname $0)) - -if [ ! -d "$ET_SRC/cmake-linux-x64-out" ]; then - pushd $ET_SRC - cmake . \ - -B cmake-linux-x64-out -DCMAKE_INSTALL_PREFIX=cmake-linux-x64-out \ - -DCMAKE_BUILD_TYPE=Release \ - -DEXECUTORCH_ENABLE_LOGGING=ON \ - -DEXECUTORCH_BUILD_EXTENSION_MODULE=ON \ - -DEXECUTORCH_BUILD_EXTENSION_DATA_LOADER=ON \ - -DEXECUTORCH_BUILD_KERNELS_CUSTOM=ON \ - -DEXECUTORCH_BUILD_KERNELS_QUANTIZED=ON \ - -DEXECUTORCH_BUILD_KERNELS_OPTIMIZED=ON \ - -DEXECUTORCH_BUILD_CPUINFO=ON \ - -DEXECUTORCH_BUILD_XNNPACK=ON \ - -DEXECUTORCH_BUILD_PTHREADPOOL=ON - cmake --build cmake-linux-x64-out --config Release --target install -j4 - popd -fi - -rm -rf build && \ - yarn build --CDCMAKE_PREFIX_PATH=$ET_SRC/cmake-linux-x64-out \ - --CDEXECUTORCH_SRC_ROOT=$ET_SRC - -if [ ! -d "$ET_SRC/cmake-linux-arm64-out" ]; then - pushd $ET_SRC - cmake . \ - -B cmake-linux-arm64-out -DCMAKE_INSTALL_PREFIX=cmake-linux-arm64-out \ - -DCMAKE_BUILD_TYPE=Release \ - -DEXECUTORCH_ENABLE_LOGGING=ON \ - -DEXECUTORCH_BUILD_EXTENSION_MODULE=ON \ - -DEXECUTORCH_BUILD_EXTENSION_DATA_LOADER=ON \ - -DEXECUTORCH_BUILD_KERNELS_CUSTOM=ON \ - -DEXECUTORCH_BUILD_KERNELS_QUANTIZED=ON \ - -DEXECUTORCH_BUILD_KERNELS_OPTIMIZED=ON \ - -DEXECUTORCH_BUILD_CPUINFO=ON \ - -DEXECUTORCH_BUILD_XNNPACK=ON \ - -DEXECUTORCH_BUILD_PTHREADPOOL=ON \ - -DCMAKE_TOOLCHAIN_FILE="$PROJECT_DIR/cmake/clang-aarch64-linux-gnu.toolchain.cmake" - cmake --build cmake-linux-arm64-out --config Release --target install -j4 - popd -fi - -rm -rf build && \ -yarn build --CDCMAKE_PREFIX_PATH=$ET_SRC/cmake-linux-arm64-out \ - --CDEXECUTORCH_SRC_ROOT=$ET_SRC --CDCMAKE_TOOLCHAIN_FILE=cmake/clang-aarch64-linux-gnu.toolchain.cmake - -if [ ! -d "$ET_SRC/cmake-win-x64-out" ]; then - pushd $ET_SRC - cmake . \ - -B cmake-win-x64-out -DCMAKE_INSTALL_PREFIX=cmake-win-x64-out \ - -DEXECUTORCH_ENABLE_LOGGING=1 \ - -DCMAKE_BUILD_TYPE=Release \ - -DEXECUTORCH_BUILD_EXTENSION_MODULE=ON \ - -DEXECUTORCH_BUILD_EXTENSION_DATA_LOADER=ON \ - -DEXECUTORCH_BUILD_KERNELS_CUSTOM=ON \ - -DEXECUTORCH_BUILD_KERNELS_QUANTIZED=ON \ - -DEXECUTORCH_BUILD_KERNELS_OPTIMIZED=ON \ - -DEXECUTORCH_BUILD_CPUINFO=ON \ - -DEXECUTORCH_BUILD_XNNPACK=ON \ - -DEXECUTORCH_BUILD_PTHREADPOOL=ON \ - -DCMAKE_TOOLCHAIN_FILE="$PROJECT_DIR/cmake/mingw-w64-x86_64.toolchain.cmake" - cmake --build cmake-win-x64-out --config Release --target install -j4 - popd -fi - -rm -rf build && \ - yarn build --CDCMAKE_PREFIX_PATH=$ET_SRC/cmake-win-arm64-out \ - --CDEXECUTORCH_SRC_ROOT=$ET_SRC --CDCMAKE_TOOLCHAIN_FILE=cmake/mingw-w64-aarch64.toolchain.cmake - -if [ ! -d "$ET_SRC/cmake-win-arm64-out" ]; then - pushd $ET_SRC - if [ -z "$QNN_SDK_ROOT" ]; then - cmake . \ - -B cmake-win-arm64-out -DCMAKE_INSTALL_PREFIX=cmmake-win-arm64-out \ - -DEXECUTORCH_ENABLE_LOGGING=1 \ - -DCMAKE_BUILD_TYPE=Release \ - -DEXECUTORCH_BUILD_EXTENSION_MODULE=ON \ - -DEXECUTORCH_BUILD_EXTENSION_DATA_LOADER=ON \ - -DEXECUTORCH_BUILD_KERNELS_CUSTOM=ON \ - -DEXECUTORCH_BUILD_KERNELS_QUANTIZED=ON \ - -DEXECUTORCH_BUILD_KERNELS_OPTIMIZED=ON \ - -DEXECUTORCH_BUILD_CPUINFO=ON \ - -DEXECUTORCH_BUILD_XNNPACK=ON \ - -DEXECUTORCH_BUILD_PTHREADPOOL=ON \ - -DCMAKE_TOOLCHAIN_FILE="$PROJECT_DIR/cmake/mingw-w64-aarch64.toolchain.cmake" - cmake --build cmake-win-arm64-out --config Release --target install -j4 - else - cmake . \ - -B cmake-win-arm64-out -DCMAKE_INSTALL_PREFIX=cmmake-win-arm64-out \ - -DEXECUTORCH_ENABLE_LOGGING=1 \ - -DCMAKE_BUILD_TYPE=Release \ - -DEXECUTORCH_BUILD_EXTENSION_MODULE=ON \ - -DEXECUTORCH_BUILD_EXTENSION_DATA_LOADER=ON \ - -DEXECUTORCH_BUILD_KERNELS_CUSTOM=ON \ - -DEXECUTORCH_BUILD_KERNELS_QUANTIZED=ON \ - -DEXECUTORCH_BUILD_KERNELS_OPTIMIZED=ON \ - -DEXECUTORCH_BUILD_QNN=ON \ - -DQNN_SDK_ROOT=$QNN_SDK_ROOT \ - -DEXECUTORCH_BUILD_CPUINFO=ON \ - -DEXECUTORCH_BUILD_XNNPACK=ON \ - -DEXECUTORCH_BUILD_PTHREADPOOL=ON \ - -DEXECUTORCH_BUILD_SDK=ON \ - -DCMAKE_TOOLCHAIN_FILE="$PROJECT_DIR/cmake/mingw-w64-aarch64.toolchain.cmake" - cmake --build cmake-win-arm64-out --config Release --target install -j4 - fi - popd -fi - -rm -rf build && \ -yarn build --CDCMAKE_PREFIX_PATH=$ET_SRC/cmake-win-x64-out \ - --CDEXECUTORCH_SRC_ROOT=$ET_SRC --CDCMAKE_TOOLCHAIN_FILE=cmake/mingw-w64-x86_64.toolchain.cmake diff --git a/scripts/postneon-dist.js b/scripts/postneon-dist.js new file mode 100644 index 0000000..6826f20 --- /dev/null +++ b/scripts/postneon-dist.js @@ -0,0 +1,59 @@ +const fs = require('fs'); + +let platform = 'unknown'; +let arch = 'unknown'; + +// Parse file header to determine platform and architecture +const content = fs.readFileSync('index.node').subarray(0, 256); + +if (content[0] === 0x7f && content[1] === 0x45 && content[2] === 0x4c && content[3] === 0x46) { // Linux: ELF x86_64/aarch64 + platform = 'linux'; + if (content[18] === 0xb7) { + arch = 'arm64'; + } else if (content[18] === 0x3E) { + arch = 'x64'; + } else { + console.error('Unknown ELF arch code:', content[18].toString(16)); + } +} else if (content[0] === 0x4d && content[1] === 0x5a) { // Windows: PE x86_64/aarch64 + const pePos = content.indexOf('PE\0\0'); + platform = 'win32'; + const code = content[pePos + 4] + (content[pePos + 5] << 8); + if (code === 0x8664) { + arch = 'x64'; + } else if (code === 0xaa64) { + arch = 'arm64'; + } else { + console.error('Unknown PE arch code:', code.toString(16)); + } +} else if (content[0] === 0xfe && content[1] === 0xed && content[2] === 0xfa && content[3] === 0xce) { // MacOS: Mach-O x86_64/aarch64 + platform = 'darwin'; + if (content[4] === 0x07) { + arch = 'x64'; + } else if (content[4] === 0x0c) { + arch = 'arm64'; + } +} + +// Create the directory for the platform and architecture if it doesn't exist +if (!fs.existsSync(`bin/${platform}/${arch}`)) { + fs.mkdirSync(`bin/${platform}/${arch}`, { recursive: true }); +} + +// Rename the index.node file to executorch.node +fs.renameSync('index.node', `bin/${platform}/${arch}/executorch.node`); + +// Copy the shared libraries to the bin directory +const installPrefix = process.env.EXECUTORCH_INSTALL_PREFIX || 'executorch/cmake-out'; + +const shared_libs = ['libextension_module', 'qnn_executorch_backend']; + +for (const lib of shared_libs) { + if (fs.existsSync(`${installPrefix}/lib/${lib}.so`)) { + fs.copyFileSync(`${installPrefix}/lib/${lib}.so`, `bin/${platform}/${arch}/${lib}.so`); + } else if (fs.existsSync(`${installPrefix}/lib/${lib}.dylib`)) { + fs.copyFileSync(`${installPrefix}/lib/${lib}.dylib`, `bin/${platform}/${arch}/${lib}.dylib`); + } else if (fs.existsSync(`${installPrefix}/lib/${lib}.dll`)) { + fs.copyFileSync(`${installPrefix}/lib/${lib}.dll`, `bin/${platform}/${arch}/${lib}.dll`); + } +} diff --git a/src/Module.cpp b/src/Module.cpp deleted file mode 100644 index 0eccd8a..0000000 --- a/src/Module.cpp +++ /dev/null @@ -1,352 +0,0 @@ -#include "common.h" -#include "Module.h" -#include "utils.h" -#include -#include - -namespace executorch::node { - -const std::unordered_map dtypeMap = { - {exec_aten::ScalarType::Byte, "uint8"}, - {exec_aten::ScalarType::Char, "int8"}, - {exec_aten::ScalarType::Short, "int16"}, - {exec_aten::ScalarType::Int, "int32"}, - {exec_aten::ScalarType::Long, "int64"}, - {exec_aten::ScalarType::Float, "float32"}, - {exec_aten::ScalarType::Double, "float64"}, - {exec_aten::ScalarType::Bool, "bool"}}; - -const std::unordered_map tagMap = { - {torch::executor::Tag::Int, "int"}, - {torch::executor::Tag::Double, "double"}, - {torch::executor::Tag::Bool, "bool"}, - {torch::executor::Tag::String, "string"}, - {torch::executor::Tag::Tensor, "tensor"}}; - -/* LoadWorker */ - -class LoadWorker : public Napi::AsyncWorker, public Napi::Promise::Deferred { -public: - LoadWorker(Napi::Env env, const std::string &path) - : Napi::AsyncWorker(env), Napi::Promise::Deferred(env), path_(path) {} - -protected: - void Execute() { - try { - auto *module = new torch::executor::Module( - path_, torch::executor::Module::MlockConfig::NoMlock); - module_ = std::make_unique(module); - } catch (const std::exception &e) { - SetError(e.what()); - } - } - - void OnOK() { - Resolve(Module::New(Napi::External::New( - Napi::AsyncWorker::Env(), module_.release()))); - } - - void OnError(const Napi::Error &e) { Reject(e.Value()); } - -private: - const std::string path_; - std::unique_ptr module_; -}; - -/* ExecuteWorker */ -class ExecuteWorker : public Napi::AsyncWorker, public Napi::Promise::Deferred { -public: - ExecuteWorker(Napi::Env env, ModuleHolder *module, std::string method, - std::vector inputs) - : Napi::AsyncWorker(env), Napi::Promise::Deferred(env), module_(module), - method_(method), inputs_(std::move(inputs)) {} - -protected: - void Execute() { - try { - auto result = (*module_)->execute(method_, inputs_); - if (result.ok()) { - outputs_ = std::move(result.get()); - } else { - throw std::runtime_error("Failed to execute method: " + - errorString(result.error())); - } - } catch (const std::exception &e) { - SetError(e.what()); - } - } - - void OnOK() { - try { - auto results = - Napi::Array::New(Napi::AsyncWorker::Env(), outputs_.size()); - for (size_t i = 0; i < outputs_.size(); i++) { - results.Set(i, - napiValueFromEValue(Napi::AsyncWorker::Env(), outputs_[i])); - } - Resolve(results); - } catch (const std::exception &e) { - Reject(Napi::Error::New(Napi::AsyncWorker::Env(), e.what()).Value()); - } - } - - void OnError(const Napi::Error &e) { Reject(e.Value()); } - -private: - ModuleHolder *module_; - const std::string method_; - const std::vector inputs_; - std::vector outputs_; -}; - -/* LoadMethodWorker */ - -class LoadMethodWorker : public Napi::AsyncWorker, - public Napi::Promise::Deferred { -public: - LoadMethodWorker(Napi::Env env, ModuleHolder *module, std::string method) - : Napi::AsyncWorker(env), Napi::Promise::Deferred(env), module_(module), - method_(method) {} - -protected: - void Execute() { - try { - auto error = (*module_)->load_method(method_); - if (error != torch::executor::Error::Ok) { - throw std::runtime_error("Failed to load method: " + - errorString(error)); - } - } catch (const std::exception &e) { - SetError(e.what()); - } - } - - void OnOK() { Resolve(Napi::AsyncWorker::Env().Undefined()); } - - void OnError(const Napi::Error &e) { Reject(e.Value()); } - -private: - ModuleHolder *module_; - const std::string method_; -}; - -/* Module */ - -Module::Module(const Napi::CallbackInfo &info) - : Napi::ObjectWrap(info) { - Napi::Env env = info.Env(); - Napi::HandleScope scope(env); - - THROW_IF_NOT(env, info.Length() >= 1 && info[0].IsExternal(), - "Expected an external"); - - auto module = info[0].As>(); - module_.reset(std::move(module.Data())); - - auto method_names = (*module_)->method_names(); - THROW_IF_NOT(env, method_names.ok(), - "Failed to get method names: " + - errorString(method_names.error())); - method_names_ = method_names.get(); -} - -// execute(method: string, inputs?: EValue[]): Promise | undefined -Napi::Value Module::Execute(const Napi::CallbackInfo &info) { - Napi::Env env = info.Env(); - Napi::HandleScope scope(env); - - RETURN_IF_NOT(env, module_ != nullptr, "Module is disposed"); - RETURN_IF_NOT(env, info.Length() >= 1 && info[0].IsString(), - "Argument 0 must be a string"); - - std::string method = info[0].As().Utf8Value(); - if (method_names_.count(method) == 0) { - return env.Undefined(); - } - - if (info.Length() > 1) { - RETURN_IF_NOT(env, info[1].IsArray(), "Argument 1 must be an array"); - std::vector inputs; - auto inputsArray = info[1].As(); - for (size_t i = 0; i < inputsArray.Length(); i++) { - inputs.push_back(evalueFromNapiValue(inputsArray.Get(i))); - } - - auto worker = new ExecuteWorker(env, module_.get(), method, inputs); - worker->Queue(); - return worker->Promise(); - } else { - auto worker = new ExecuteWorker(env, module_.get(), method, {}); - worker->Queue(); - return worker->Promise(); - } -} - -Napi::Value Module::Load(const Napi::CallbackInfo &info) { - Napi::Env env = info.Env(); - Napi::HandleScope scope(env); - - RETURN_IF_NOT(env, info.Length() >= 1 && info[0].IsString(), - "Argument 0 must be a string"); - - std::string path = info[0].As().Utf8Value(); - auto worker = new LoadWorker(env, path); - worker->Queue(); - return worker->Promise(); -} - -Napi::Value Module::LoadMethod(const Napi::CallbackInfo &info) { - Napi::Env env = info.Env(); - Napi::HandleScope scope(env); - - RETURN_IF_NOT(env, module_ != nullptr, "Module is disposed"); - RETURN_IF_NOT(env, info.Length() >= 1 && info[0].IsString(), - "Argument 0 must be a string"); - - std::string method = info[0].As().Utf8Value(); - if (method_names_.count(method) == 0) { - return env.Undefined(); - } - if ((*module_)->is_method_loaded(method)) { - return env.Undefined(); - } - - auto worker = new LoadMethodWorker(env, module_.get(), method); - worker->Queue(); - return worker->Promise(); -} - -Napi::Value Module::Forward(const Napi::CallbackInfo &info) { - Napi::Env env = info.Env(); - Napi::HandleScope scope(env); - - RETURN_IF_NOT(env, module_ != nullptr, "Module is disposed"); - RETURN_IF_NOT(env, method_names_.count("forward") > 0, - "Method 'forward' is not available"); - RETURN_IF_NOT(env, info.Length() >= 1 && info[0].IsArray(), - "Argument 0 must be an array"); - - std::vector inputs; - auto inputsArray = info[0].As(); - for (size_t i = 0; i < inputsArray.Length(); i++) { - inputs.push_back(evalueFromNapiValue(inputsArray.Get(i))); - } - - auto worker = new ExecuteWorker(env, module_.get(), "forward", inputs); - worker->Queue(); - return worker->Promise(); -} - -Napi::Value Module::GetMethodNames(const Napi::CallbackInfo &info) { - Napi::Env env = info.Env(); - Napi::HandleScope scope(env); - - RETURN_IF_NOT(env, module_ != nullptr, "Module is disposed"); - - auto result = (*module_)->method_names(); - RETURN_IF_NOT(env, result.ok(), "Failed to get method names: " + - errorString(result.error())); - auto names = result.get(); - auto js_results = Napi::Array::New(env, names.size()); - size_t i = 0; - for (const auto &name : names) { - js_results.Set(i++, Napi::String::New(env, name)); - } - return js_results; -} - -Napi::Value toNapiValue(const Napi::Env &env, const torch::executor::TensorInfo &tensor_info) { - auto shape = tensor_info.sizes(); - auto dtype = tensor_info.scalar_type(); - auto obj = Napi::Object::New(env); - auto shapeArray = Napi::Array::New(env, shape.size()); - for (size_t i = 0; i < shape.size(); i++) { - shapeArray.Set(i, shape[i]); - } - obj.Set("shape", shapeArray); - obj.Set("dtype", dtypeMap.at(dtype)); - return obj; -} - -Napi::Value toNapiValue(const Napi::Env &env, const torch::executor::MethodMeta &meta) { - auto obj = Napi::Object::New(env); - obj.Set("name", meta.name()); - auto inputs = Napi::Array::New(env, meta.num_inputs()); - for (size_t i = 0; i < meta.num_inputs(); i++) { - auto tag = meta.input_tag(i); - if (tag.ok()) { - auto info = Napi::Object::New(env); - info.Set("tag", tagMap.at(tag.get())); - if (tag.get() == torch::executor::Tag::Tensor) { - info.Set("tensor_info", toNapiValue(env, meta.input_tensor_meta(i).get())); - } - inputs.Set(i, info); - } else { - inputs.Set(i, env.Undefined()); - } - } - obj.Set("inputs", inputs); - auto outputs = Napi::Array::New(env, meta.num_outputs()); - for (size_t i = 0; i < meta.num_outputs(); i++) { - auto tag = meta.output_tag(i); - if (tag.ok()) { - auto info = Napi::Object::New(env); - info.Set("tag", tagMap.at(tag.get())); - if (tag.get() == torch::executor::Tag::Tensor) { - info.Set("tensor_info", toNapiValue(env, meta.output_tensor_meta(i).get())); - } - outputs.Set(i, info); - } else { - outputs.Set(i, env.Undefined()); - } - } - obj.Set("outputs", outputs); - return obj; -} - -// getMethodMeta(method: string): MethodMeta -Napi::Value Module::GetMethodMeta(const Napi::CallbackInfo &info) { - Napi::Env env = info.Env(); - Napi::HandleScope scope(env); - - RETURN_IF_NOT(env, module_ != nullptr, "Module is disposed"); - RETURN_IF_NOT(env, info.Length() >= 1 && info[0].IsString(), - "Argument 0 must be a string"); - - std::string method = info[0].As().Utf8Value(); - if (method_names_.count(method) == 0) { - return env.Undefined(); - } - - auto result = (*module_)->method_meta(method); - RETURN_IF_NOT(env, result.ok(), "Failed to get method meta: " + - errorString(result.error())); - auto meta = result.get(); - return toNapiValue(env, meta); -} - -void Module::Dispose(const Napi::CallbackInfo &info) { - module_.reset(); -} - -Napi::Object Module::Init(Napi::Env env, Napi::Object exports) { - Napi::Function func = DefineClass( - env, "Module", - {StaticMethod("load", &Module::Load), - InstanceAccessor("method_names", &Module::GetMethodNames, nullptr), - InstanceMethod("loadMethod", &Module::LoadMethod), - InstanceMethod("forward", &Module::Forward), - InstanceMethod("execute", &Module::Execute), - InstanceMethod("getMethodMeta", &Module::GetMethodMeta), - InstanceMethod("dispose", &Module::Dispose)}); - - constructor = Napi::Persistent(func); - constructor.SuppressDestruct(); - exports.Set("Module", func); - - return exports; -} - -Napi::FunctionReference Module::constructor; - -} // namespace executorch::node diff --git a/src/Module.h b/src/Module.h deleted file mode 100644 index 146750f..0000000 --- a/src/Module.h +++ /dev/null @@ -1,50 +0,0 @@ -#pragma once - -#include -#include -#include -#include - -namespace executorch { -namespace node { - -// Wrap torch::executor::Module to avoid memory leak -class ModuleHolder { -public: - ModuleHolder(torch::executor::Module *module) { module_.reset(module); } - - torch::executor::Module *operator->() { return module_.get(); } - - torch::executor::Module &operator*() { return *module_; } - -private: - std::unique_ptr module_; -}; - -class Module : public Napi::ObjectWrap { -public: - static Napi::Object Init(Napi::Env env, Napi::Object exports); - - static inline Napi::Object New(Napi::External module) { - return constructor.New({module}); - } - - Module(const Napi::CallbackInfo &info); - -protected: - static Napi::Value Load(const Napi::CallbackInfo &info); - Napi::Value LoadMethod(const Napi::CallbackInfo &info); - Napi::Value Forward(const Napi::CallbackInfo &info); - Napi::Value Execute(const Napi::CallbackInfo &info); - Napi::Value GetMethodNames(const Napi::CallbackInfo &info); - Napi::Value GetMethodMeta(const Napi::CallbackInfo &info); - void Dispose(const Napi::CallbackInfo &info); - -private: - static Napi::FunctionReference constructor; - std::unique_ptr module_; - std::unordered_set method_names_; -}; - -} // namespace node -} // namespace executorch diff --git a/src/Sampler.cpp b/src/Sampler.cpp deleted file mode 100644 index f979186..0000000 --- a/src/Sampler.cpp +++ /dev/null @@ -1,86 +0,0 @@ -#include "common.h" -#include "Sampler.h" -#include "Tensor.h" - -namespace executorch::node { - -// new Sampler(vocab_size: number, temperature?: number = 0.7, top_p?: number = 0.9, seed?: number = 0): Sampler -Sampler::Sampler(const Napi::CallbackInfo &info) - : Napi::ObjectWrap(info) { - Napi::Env env = info.Env(); - Napi::HandleScope scope(env); - - THROW_IF_NOT(env, info.Length() >= 1, "Expected at least 1 argument"); - THROW_IF_NOT(env, info[0].IsNumber(), "Argument 0 must be a number"); - if (info.Length() >= 2) { - THROW_IF_NOT(env, info[1].IsNumber(), "Argument 1 must be a number"); - } - if (info.Length() >= 3) { - THROW_IF_NOT(env, info[2].IsNumber(), "Argument 2 must be a number"); - } - if (info.Length() >= 4) { - THROW_IF_NOT(env, info[3].IsNumber(), "Argument 3 must be a number"); - } - - vocab_size_ = info[0].As().Int32Value(); - double temperature = info.Length() >= 2 ? info[1].As().DoubleValue() : 0.7; - double top_p = info.Length() >= 3 ? info[2].As().DoubleValue() : 0.9; - int64_t seed = info.Length() >= 4 ? info[3].As().Int64Value() : 0; - - sampler_ = std::make_unique(vocab_size_, temperature, top_p, seed); -} - -// sample(tensor: Tensor): number -Napi::Value Sampler::Sample(const Napi::CallbackInfo &info) { - Napi::Env env = info.Env(); - Napi::HandleScope scope(env); - - THROW_IF_NOT(env, sampler_ != nullptr, "Sampler is disposed"); - - THROW_IF_NOT(env, info.Length() >= 1, "Expected at least 1 argument"); - THROW_IF_NOT(env, Tensor::IsInstance(info[0]), "Argument 0 must be a Tensor"); - - auto tensor = Napi::ObjectWrap::Unwrap(info[0].As())->GetTensor(); - - // check shape: [1, ?, vocab_size] - THROW_IF_NOT(env, tensor.dim() == 3, "Expected a 3D tensor"); - THROW_IF_NOT(env, tensor.size(0) == 1, "Batch size must be 1"); - THROW_IF_NOT(env, tensor.size(2) == vocab_size_, "Vocab size mismatch"); - - void *data = tensor.mutable_data_ptr(); - - if (tensor.size(1) > 1) { - data = (void *)((char *) data + tensor.element_size() * (tensor.size(1) - 1)); - } - - if (tensor.scalar_type() == exec_aten::ScalarType::Float) { - auto result = sampler_->sample((float *) data); - return Napi::Number::New(env, result); - } else if (tensor.scalar_type() == exec_aten::ScalarType::Half) { - auto result = sampler_->sample((exec_aten::Half *) data); - } else { - THROW_IF_NOT(env, false, "Unsupported tensor type"); - } - return env.Undefined(); -} - -void Sampler::Dispose(const Napi::CallbackInfo &info) { - sampler_.reset(); -} - -Napi::Object Sampler::Init(Napi::Env env, Napi::Object exports) { - Napi::Function func = - DefineClass(env, "Sampler", - {InstanceMethod("sample", &Sampler::Sample), - InstanceMethod("dispose", &Sampler::Dispose)}); - - constructor = Napi::Persistent(func); - constructor.SuppressDestruct(); - exports.Set("Sampler", func); - - return exports; -} - -Napi::FunctionReference Sampler::constructor; - -} // namespace executorch::node diff --git a/src/Sampler.h b/src/Sampler.h deleted file mode 100644 index b89d8c6..0000000 --- a/src/Sampler.h +++ /dev/null @@ -1,29 +0,0 @@ -#pragma once - -#include -#include -#include - -namespace executorch { -namespace node { - -class Sampler : public Napi::ObjectWrap { -public: - static Napi::Object Init(Napi::Env env, Napi::Object exports); - - Sampler(const Napi::CallbackInfo &info); - -protected: - Napi::Value Sample(const Napi::CallbackInfo &info); - void Dispose(const Napi::CallbackInfo &info); - - static Napi::Value Concat(const Napi::CallbackInfo &info); - -private: - static Napi::FunctionReference constructor; - int32_t vocab_size_ = 0; - std::unique_ptr sampler_ = nullptr; -}; - -} // namespace node -} // namespace executorch diff --git a/src/Tensor.cpp b/src/Tensor.cpp deleted file mode 100644 index 87e1f04..0000000 --- a/src/Tensor.cpp +++ /dev/null @@ -1,474 +0,0 @@ -#include "common.h" -#include "Tensor.h" -#include "utils.h" -#include -#include - -namespace executorch::node { - -const std::unordered_map dtypeSize = { - {exec_aten::ScalarType::Byte, 1}, - {exec_aten::ScalarType::Char, 1}, - {exec_aten::ScalarType::Short, 2}, - {exec_aten::ScalarType::Int, 4}, - {exec_aten::ScalarType::Long, 8}, - {exec_aten::ScalarType::Float, 4}, - {exec_aten::ScalarType::Double, 8}, - {exec_aten::ScalarType::Bool, 1}}; - -const std::unordered_map dtypeMap = { - {"uint8", exec_aten::ScalarType::Byte}, - {"int8", exec_aten::ScalarType::Char}, - {"int16", exec_aten::ScalarType::Short}, - {"int32", exec_aten::ScalarType::Int}, - {"int64", exec_aten::ScalarType::Long}, - {"float16", exec_aten::ScalarType::Half}, - {"float32", exec_aten::ScalarType::Float}, - {"float64", exec_aten::ScalarType::Double}, - {"bool", exec_aten::ScalarType::Bool}}; - -exec_aten::ScalarType getType(std::string dtype) { - auto it = dtypeMap.find(dtype); - if (it == dtypeMap.end()) { - throw std::runtime_error("Unsupported dtype"); - } - return it->second; -} - -std::string getTypeName(exec_aten::ScalarType type) { - for (auto &pair : dtypeMap) { - if (pair.second == type) { - return pair.first; - } - } - throw std::runtime_error("Unsupported dtype"); -} - -void *getData(const Napi::Env &env, const Napi::Value &value, size_t size) { - if (value.IsBuffer()) { - Napi::Buffer buffer = value.As>(); - THROW_IF_NOT(env, buffer.Length() == size, "Invalid buffer size"); - char *data = new char[size]; - memcpy(data, buffer.Data(), size); - return data; - } else if (value.IsTypedArray()) { - Napi::TypedArray typedArray = value.As(); - THROW_IF_NOT(env, typedArray.ByteLength() == size, "Invalid typed array size"); - char *data = new char[size]; - memcpy(data, typedArray.ArrayBuffer().Data(), size); - return data; - } else { - THROW_IF_NOT(env, false, "Expected buffer or typed array"); - } - return nullptr; -} - -size_t calcSize(exec_aten::ScalarType type, size_t rank, exec_aten::SizesType *dims) { - size_t size = dtypeSize.at(type); - for (size_t i = 0; i < rank; i++) { - size *= dims[i]; - } - return size; -} - -Tensor::Tensor(const Napi::CallbackInfo &info) - : Napi::ObjectWrap(info) { - Napi::Env env = info.Env(); - Napi::HandleScope scope(env); - - if (info.Length() == 0) { - return; - } - - THROW_IF_NOT(env, info.Length() >= 3, "Expected 3 arguments"); - THROW_IF_NOT(env, info[0].IsString(), "Argument 0 must be a string"); - THROW_IF_NOT(env, info[1].IsArray(), "Argument 1 must be an array"); - - std::string dtype = info[0].As().Utf8Value(); - - Napi::Array jsDims = info[1].As(); - size_t rank = jsDims.Length(); - exec_aten::SizesType *dims = new exec_aten::SizesType[rank]; - for (size_t i = 0; i < rank; i++) { - Napi::Value value = jsDims.Get(i); - THROW_IF_NOT(env, value.IsNumber(), "Dimension must be a number"); - dims[i] = value.ToNumber().Int32Value(); - } - - try { - auto type = getType(dtype); - tensor_ = std::make_unique(new exec_aten::TensorImpl( - getType(dtype), rank, dims, getData(env, info[2], calcSize(type, rank, dims)))); - } catch (std::exception &e) { - Napi::Error::New(env, e.what()).ThrowAsJavaScriptException(); - } -} - -size_t getSlicePos(Napi::Env &env, Napi::Value val, size_t dimSize, size_t default_value) { - if (val.IsNumber()) { - auto num = val.ToNumber().Int32Value(); - if (num < 0) - num += dimSize; - if (num < 0 || num >= dimSize) { - Napi::TypeError::New(env, "Index out of range").ThrowAsJavaScriptException(); - } - return num; - } else { - return default_value; - } -} - -Napi::Value Tensor::Shape(const Napi::CallbackInfo &info) { - Napi::Env env = info.Env(); - Napi::HandleScope scope(env); - - if (tensor_ == nullptr) { - Napi::TypeError::New(env, "Tensor is disposed").ThrowAsJavaScriptException(); - return env.Undefined(); - } - - Napi::Array shape = Napi::Array::New(env, tensor_->dim()); - for (size_t i = 0; i < tensor_->dim(); i++) { - shape.Set(i, Napi::Number::New(env, tensor_->size(i))); - } - - return shape; -} - -Napi::Value Tensor::Dtype(const Napi::CallbackInfo &info) { - Napi::Env env = info.Env(); - Napi::HandleScope scope(env); - - return Napi::String::New(env, getTypeName(tensor_->scalar_type())); -} - -Napi::Value Tensor::GetData(const Napi::CallbackInfo &info) { - Napi::Env env = info.Env(); - Napi::HandleScope scope(env); - - if (tensor_ == nullptr) { - Napi::TypeError::New(env, "Tensor is disposed").ThrowAsJavaScriptException(); - return env.Undefined(); - } - - size_t size = tensor_->nbytes(); - size_t n_elem = tensor_->numel(); - const void *data = tensor_->const_data_ptr(); - - switch (tensor_->scalar_type()) { - case exec_aten::ScalarType::Byte: { - Napi::Uint8Array array = Napi::Uint8Array::New(env, n_elem); - memcpy(array.Data(), data, size); - return array; - } break; - case exec_aten::ScalarType::Char: { - Napi::Int8Array array = Napi::Int8Array::New(env, n_elem); - memcpy(array.Data(), data, size); - return array; - } break; - case exec_aten::ScalarType::Short: { - Napi::Int16Array array = Napi::Int16Array::New(env, n_elem); - memcpy(array.Data(), data, size); - return array; - } break; - case exec_aten::ScalarType::Int: { - Napi::Int32Array array = Napi::Int32Array::New(env, n_elem); - memcpy(array.Data(), data, size); - return array; - } break; - case exec_aten::ScalarType::Long: { - Napi::BigInt64Array array = Napi::BigInt64Array::New(env, n_elem); - memcpy(array.Data(), data, size); - return array; - } - case exec_aten::ScalarType::Float: { - Napi::Float32Array array = Napi::Float32Array::New(env, n_elem); - memcpy(array.Data(), data, size); - return array; - } break; - case exec_aten::ScalarType::Double: { - Napi::Float64Array array = Napi::Float64Array::New(env, n_elem); - memcpy(array.Data(), data, size); - return array; - } break; - case exec_aten::ScalarType::Bool: { - Napi::Array array = Napi::Array::New(env, n_elem); - auto boolData = static_cast(data); - for (size_t i = 0; i < n_elem; i++) { - array.Set(i, Napi::Boolean::New(env, boolData[i])); - } - return array; - } break; - default: - throw std::runtime_error("Unsupported dtype"); - } -} - -void Tensor::SetData(const Napi::CallbackInfo &info, const Napi::Value &value) { - Napi::Env env = info.Env(); - Napi::HandleScope scope(env); - - auto data = getData(env, value, tensor_->nbytes()); - memcpy(tensor_->mutable_data_ptr(), data, tensor_->nbytes()); -} - -void Tensor::SetValue(const Napi::CallbackInfo &info) { - Napi::Env env = info.Env(); - Napi::HandleScope scope(env); - - THROW_IF_NOT(env, tensor_ != nullptr, "Tensor is disposed"); - THROW_IF_NOT(env, info.Length() == 2, "Expected 2 arguments"); - THROW_IF_NOT(env, info[0].IsArray(), "Argument 0 must be an array"); - THROW_IF_NOT(env, info[1].IsNumber() || info[1].IsBoolean(), "Argument 1 must be a number or boolean"); - - size_t pos = 0; - - Napi::Array jsPosition = info[0].As(); - size_t rank = tensor_->dim(); - THROW_IF_NOT(env, jsPosition.Length() == rank, "Invalid position"); - - for (size_t i = 0; i < rank; i++) { - Napi::Value value = jsPosition.Get(i); - THROW_IF_NOT(env, value.IsNumber(), "Position must be a number"); - pos += value.ToNumber().Int32Value() * (i == 0 ? 1 : tensor_->size(i - 1)); - } - - void *data = tensor_->mutable_data_ptr(); - switch (tensor_->scalar_type()) { - case exec_aten::ScalarType::Byte: - static_cast(data)[pos] = - (uint8_t)info[1].ToNumber().Int32Value(); - break; - case exec_aten::ScalarType::Char: - static_cast(data)[pos] = - (int8_t)info[1].ToNumber().Int32Value(); - break; - case exec_aten::ScalarType::Short: - static_cast(data)[pos] = - (int16_t)info[1].ToNumber().Int32Value(); - break; - case exec_aten::ScalarType::Int: - static_cast(data)[pos] = info[1].ToNumber().Int32Value(); - break; - case exec_aten::ScalarType::Long: - static_cast(data)[pos] = info[1].ToNumber().Int64Value(); - break; - case exec_aten::ScalarType::Float: - static_cast(data)[pos] = info[1].ToNumber().FloatValue(); - break; - case exec_aten::ScalarType::Half: - static_cast(data)[pos] = info[1].ToNumber().FloatValue(); - break; - case exec_aten::ScalarType::Double: - static_cast(data)[pos] = info[1].ToNumber().DoubleValue(); - break; - case exec_aten::ScalarType::Bool: - static_cast(data)[pos] = info[1].ToBoolean().Value(); - break; - default: - throw std::runtime_error("Unsupported dtype"); - } -} - -// slice(...slice_position: Array>|number>>): Tensor -Napi::Value Tensor::Slice(const Napi::CallbackInfo &info) { - Napi::Env env = info.Env(); - Napi::HandleScope scope(env); - - RETURN_IF_NOT(env, tensor_ != nullptr, "Tensor is disposed"); - RETURN_IF_NOT(env, info.Length() >= 1, "Expected at least 1 argument"); - RETURN_IF_NOT(env, info.Length() <= tensor_->dim(), "Invalid position, too many dimensions"); - - size_t rank = tensor_->dim(); - size_t n_elem = 1; - - std::vector startVec(rank); - std::vector endVec(rank); - for (size_t i = 0; i < rank; i++) { - Napi::Value sliceDim = info.Length() > i ? info[i] : env.Undefined(); - auto dimSize = tensor_->size(i); - if (sliceDim.IsArray()) { - Napi::Array dim = sliceDim.As(); - RETURN_IF_NOT(env, dim.Length() == 2, "Invalid slice position, expected 2 elements"); - startVec[i] = getSlicePos(env, dim.Get(Napi::Number::New(env, 0)), dimSize, 0); - endVec[i] = - getSlicePos(env, dim.Get(Napi::Number::New(env, 1)), dimSize, dimSize); - } else if (sliceDim.IsNumber()) { - size_t pos = getSlicePos(env, sliceDim, dimSize, 0); - startVec[i] = pos; - endVec[i] = pos + 1; - } else if (sliceDim.IsUndefined() || sliceDim.IsNull()) { - startVec[i] = 0; - endVec[i] = dimSize; - } - n_elem *= endVec[i] - startVec[i]; - } - - ssize_t elem_size = tensor_->element_size(); - char *newData = new char[n_elem * elem_size]; - - const char *data = (char*) tensor_->const_data_ptr(); - - for (size_t i = 0; i < n_elem; i++) { - size_t offset = 0; - size_t pos = i; - for (size_t j = 0; j < rank; j++) { - size_t stride = tensor_->size(j) - 1; - size_t dim_size = endVec[j] - startVec[j]; - size_t dim_pos = pos % dim_size; - pos /= dim_size; - offset += (startVec[j] + dim_pos) * stride; - } - memcpy(newData + i * elem_size, - data + offset * elem_size, - elem_size); - } - - auto dims = new exec_aten::SizesType[rank]; - for (size_t i = 0; i < rank; i++) { - dims[i] = endVec[i] - startVec[i]; - } - exec_aten::Tensor tensor( - new exec_aten::TensorImpl(tensor_->scalar_type(), rank, dims, newData)); - return Tensor::New(tensor); -} - -// static concat(tensors: Array, axis?: number = 0): Tensor -Napi::Value Tensor::Concat(const Napi::CallbackInfo &info) { - Napi::Env env = info.Env(); - Napi::HandleScope scope(env); - - RETURN_IF_NOT(env, info.Length() >= 1, "Expected at least 1 argument"); - RETURN_IF_NOT(env, info[0].IsArray(), "Argument 0 must be an array"); - if (info.Length() > 1) { - RETURN_IF_NOT(env, info[1].IsNumber(), "Argument 1 must be a number"); - } - - auto js_tensors = info[0].As(); - size_t n_tensors = js_tensors.Length(); - - RETURN_IF_NOT(env, n_tensors > 0, "Expected non-empty array"); - - size_t axis = info.Length() > 1 ? info[1].ToNumber().Int32Value() : 0; - std::vector tensors(n_tensors); - std::vector sizes; - size_t rank = 0; - exec_aten::ScalarType dtype; - - for (size_t i = 0; i < n_tensors; i++) { - auto item = js_tensors.Get(i); - RETURN_IF_NOT(env, Tensor::IsInstance(item), "Item is not a Tensor"); - auto tensor = Napi::ObjectWrap::Unwrap(item.As())-> - GetTensorPtr(); - RETURN_IF_NOT(env, tensor != nullptr, "Tensor is disposed"); - tensors[i] = tensor; - if (i == 0) { - dtype = tensor->scalar_type(); - rank = tensor->dim(); - sizes.resize(rank); - for (size_t j = 0; j < rank; j++) { - sizes[j] = tensor->size(j); - } - RETURN_IF_NOT(env, axis < rank, "Invalid axis"); - } else if (dtype != tensor->scalar_type()) { - RETURN_IF_NOT(env, false, "Tensors have different dtypes"); - } else if (rank != tensor->dim()) { - RETURN_IF_NOT(env, false, "Tensors have different ranks"); - } else { - for (size_t j = 0; j < rank; j++) { - if (j == axis) { - sizes[j] += tensor->size(j); - continue; - } - RETURN_IF_NOT(env, sizes[j] == tensor->size(j) || j == axis, "Tensors have different sizes"); - } - } - } - - size_t n_elem = 1; - for (size_t i = 0; i < rank; i++) { - n_elem *= sizes[i]; - } - ssize_t elem_size = tensors[0]->element_size(); - char *newData = new char[n_elem * elem_size]; - - size_t trip_step = 1; - for (size_t j = 0; j < axis; j++) { - trip_step *= tensors[0]->size(j); - } - - size_t chunk_size = elem_size; - for (size_t k = axis; k < rank; k++) { - chunk_size *= tensors[0]->size(k); - } - - for (size_t i = 0; i < trip_step; i++) { - for (size_t j = 0; j < n_tensors; j++) { - const char *data = (char*) tensors[j]->const_data_ptr(); - memcpy(newData + j * chunk_size + - i * n_tensors * chunk_size, - data + chunk_size * i, - chunk_size); - } - } - - auto *dims = new exec_aten::SizesType[rank]; - for (size_t i = 0; i < rank; i++) { - dims[i] = sizes[i]; - } - exec_aten::Tensor tensor( - new exec_aten::TensorImpl(dtype, rank, dims, newData)); - return Tensor::New(tensor); -} - -Napi::Value Tensor::Reshape(const Napi::CallbackInfo &info) { - Napi::Env env = info.Env(); - Napi::HandleScope scope(env); - - RETURN_IF_NOT(env, tensor_ != nullptr, "Tensor is disposed"); - RETURN_IF_NOT(env, info.Length() == 1, "Expected 1 argument"); - RETURN_IF_NOT(env, info[0].IsArray(), "Argument 0 must be an array"); - - auto jsDims = info[0].As(); - size_t rank = jsDims.Length(); - exec_aten::SizesType *dims = new exec_aten::SizesType[rank]; - size_t n_elem = 1; - for (size_t i = 0; i < rank; i++) { - Napi::Value value = jsDims.Get(i); - RETURN_IF_NOT(env, value.IsNumber(), "Dimension must be a number"); - dims[i] = value.ToNumber().Int32Value(); - n_elem *= dims[i]; - } - RETURN_IF_NOT(env, n_elem == tensor_->numel(), "Expected the same number of elements"); - - tensor_ = std::make_unique( - new exec_aten::TensorImpl(tensor_->scalar_type(), rank, dims, tensor_->mutable_data_ptr())); - return info.This(); -} - -void Tensor::Dispose(const Napi::CallbackInfo &info) { - tensor_.reset(); -} - -Napi::Object Tensor::Init(Napi::Env env, Napi::Object exports) { - Napi::Function func = - DefineClass(env, "Tensor", - {StaticMethod("concat", &Tensor::Concat), - InstanceAccessor("shape", &Tensor::Shape, nullptr), - InstanceAccessor("dtype", &Tensor::Dtype, nullptr), - InstanceAccessor("data", &Tensor::GetData, &Tensor::SetData), - InstanceMethod("setValue", &Tensor::SetValue), - InstanceMethod("slice", &Tensor::Slice), - InstanceMethod("reshape", &Tensor::Reshape), - InstanceMethod("dispose", &Tensor::Dispose)}); - - constructor = Napi::Persistent(func); - constructor.SuppressDestruct(); - exports.Set("Tensor", func); - - return exports; -} - -Napi::FunctionReference Tensor::constructor; - -} // namespace executorch::node diff --git a/src/Tensor.h b/src/Tensor.h deleted file mode 100644 index 0091595..0000000 --- a/src/Tensor.h +++ /dev/null @@ -1,49 +0,0 @@ -#pragma once - -#include -#include -#include - -namespace executorch { -namespace node { - -class Tensor : public Napi::ObjectWrap { -public: - static Napi::Object Init(Napi::Env env, Napi::Object exports); - - Tensor(const Napi::CallbackInfo &info); - - static inline bool IsInstance(const Napi::Value &value) { - return value.IsObject() && - value.As().InstanceOf(constructor.Value()); - } - - static Napi::Object New(const exec_aten::Tensor &tensor) { - auto instance = constructor.New({}); - auto *obj = Napi::ObjectWrap::Unwrap(instance); - obj->tensor_ = std::make_unique(std::move(tensor.unsafeGetTensorImpl())); - return instance; - } - - inline exec_aten::Tensor GetTensor() { return *tensor_; } - inline exec_aten::Tensor* GetTensorPtr() { return tensor_.get(); } - -protected: - Napi::Value Shape(const Napi::CallbackInfo &info); - Napi::Value Dtype(const Napi::CallbackInfo &info); - Napi::Value GetData(const Napi::CallbackInfo &info); - void SetData(const Napi::CallbackInfo &info, const Napi::Value &value); - void SetValue(const Napi::CallbackInfo &info); - Napi::Value Slice(const Napi::CallbackInfo &info); - Napi::Value Reshape(const Napi::CallbackInfo &info); - void Dispose(const Napi::CallbackInfo &info); - - static Napi::Value Concat(const Napi::CallbackInfo &info); - -private: - static Napi::FunctionReference constructor; - std::unique_ptr tensor_ = nullptr; -}; - -} // namespace node -} // namespace executorch diff --git a/src/addon.cc b/src/addon.cc deleted file mode 100644 index 834f290..0000000 --- a/src/addon.cc +++ /dev/null @@ -1,13 +0,0 @@ -#include "Module.h" -#include "Tensor.h" -#include "Sampler.h" -#include - -Napi::Object Init(Napi::Env env, Napi::Object exports) { - exports = executorch::node::Tensor::Init(env, exports); - exports = executorch::node::Module::Init(env, exports); - exports = executorch::node::Sampler::Init(env, exports); - return exports; -} - -NODE_API_MODULE(executorch, Init) diff --git a/src/common.h b/src/common.h deleted file mode 100644 index 9878f31..0000000 --- a/src/common.h +++ /dev/null @@ -1,12 +0,0 @@ -#pragma once - -#define RETURN_IF_NOT(env, condition, message) \ - if (!(condition)) { \ - Napi::Error::New(env, message).ThrowAsJavaScriptException(); \ - return env.Undefined(); \ - } - -#define THROW_IF_NOT(env, condition, message) \ - if (!(condition)) { \ - Napi::Error::New(env, message).ThrowAsJavaScriptException(); \ - } diff --git a/src/eterror.rs b/src/eterror.rs new file mode 100644 index 0000000..ab0795b --- /dev/null +++ b/src/eterror.rs @@ -0,0 +1,66 @@ +#[derive(Debug, Copy, Clone, PartialEq)] +pub enum ETError { + Ok = 0, + Internal = 1, + InvalidState = 2, + EndOfMethod = 3, + NotSupported = 16, + NotImplemented = 17, + InvalidArgument = 18, + InvalidType = 19, + OperatorMissing = 20, + NotFound = 32, + MemoryAllocationFailed = 33, + AccessFailed = 34, + InvalidProgram = 35, + DelegateInvalidCompatibility = 48, + DelegateMemoryAllocationFailed = 49, + DelegateInvalidHandle = 50, +} + +impl From for ETError { + fn from(value: i32) -> Self { + match value { + 0 => ETError::Ok, + 1 => ETError::Internal, + 2 => ETError::InvalidState, + 3 => ETError::EndOfMethod, + 16 => ETError::NotSupported, + 17 => ETError::NotImplemented, + 18 => ETError::InvalidArgument, + 19 => ETError::InvalidType, + 20 => ETError::OperatorMissing, + 32 => ETError::NotFound, + 33 => ETError::MemoryAllocationFailed, + 34 => ETError::AccessFailed, + 35 => ETError::InvalidProgram, + 48 => ETError::DelegateInvalidCompatibility, + 49 => ETError::DelegateMemoryAllocationFailed, + 50 => ETError::DelegateInvalidHandle, + _ => panic!("Unknown error code"), + } + } +} + +impl From for i32 { + fn from(value: ETError) -> Self { + match value { + ETError::Ok => 0, + ETError::Internal => 1, + ETError::InvalidState => 2, + ETError::EndOfMethod => 3, + ETError::NotSupported => 16, + ETError::NotImplemented => 17, + ETError::InvalidArgument => 18, + ETError::InvalidType => 19, + ETError::OperatorMissing => 20, + ETError::NotFound => 32, + ETError::MemoryAllocationFailed => 33, + ETError::AccessFailed => 34, + ETError::InvalidProgram => 35, + ETError::DelegateInvalidCompatibility => 48, + ETError::DelegateMemoryAllocationFailed => 49, + ETError::DelegateInvalidHandle => 50, + } + } +} diff --git a/src/evalue.hpp b/src/evalue.hpp new file mode 100644 index 0000000..c2d2d7b --- /dev/null +++ b/src/evalue.hpp @@ -0,0 +1,13 @@ +#include +#include + +using Tag = torch::executor::Tag; + +const std::unordered_map TAG_ID_MAP = { + {Tag::None, 0}, {Tag::Tensor, 1}, {Tag::String, 2}, + {Tag::Double, 3}, {Tag::Int, 4}, {Tag::Bool, 5}, + {Tag::ListBool, 6}, {Tag::ListDouble, 7}, {Tag::ListInt, 8}, + {Tag::ListTensor, 9}, {Tag::ListScalar, 10}, {Tag::ListOptionalTensor, 11}, +}; + +inline int32_t tag_to_int(Tag tag) { return TAG_ID_MAP.at(tag); } diff --git a/src/evalue.rs b/src/evalue.rs new file mode 100644 index 0000000..0ebdd54 --- /dev/null +++ b/src/evalue.rs @@ -0,0 +1,261 @@ +use crate::create_bigint_array; +use crate::create_bool_array; +use crate::create_num_array; +use crate::eterror::ETError; +use crate::evalue_tag::EValueTag; +use crate::tensor::AtenTensor; +use crate::tensor::Tensor; +use cpp::{cpp, cpp_class}; +use neon::prelude::*; +use neon::types::JsBigInt; + +cpp! {{ + #include "executorch/runtime/core/evalue.h" + #include "src/evalue.hpp" +}} + +cpp_class!(pub unsafe struct EValue as "torch::executor::EValue"); + +impl EValue { + // Constructors + + pub fn from_tensor(value: &Tensor) -> Self { + unsafe { + cpp!([value as "const TensorHolder*"] -> EValue as "torch::executor::EValue" { + return torch::executor::EValue(value->get_tensor()); + }) + } + } + + pub fn from_string(value: String) -> Self { + let cstr = std::ffi::CString::new(value).unwrap(); + unsafe { + let cstr_ptr = cstr.as_ptr(); + cpp!([cstr_ptr as "const char*"] -> EValue as "torch::executor::EValue" { + return torch::executor::EValue(cstr_ptr, strlen(cstr_ptr)); + }) + } + } + + pub fn from_double(value: f64) -> Self { + unsafe { + cpp!([value as "double"] -> EValue as "torch::executor::EValue" { + return torch::executor::EValue(value); + }) + } + } + + pub fn from_int(value: i64) -> Self { + unsafe { + cpp!([value as "int64_t"] -> EValue as "torch::executor::EValue" { + return torch::executor::EValue(value); + }) + } + } + + pub fn from_bool(value: bool) -> Self { + unsafe { + cpp!([value as "bool"] -> EValue as "torch::executor::EValue" { + return torch::executor::EValue(value); + }) + } + } + + pub fn null() -> Self { + unsafe { + cpp!([] -> EValue as "torch::executor::EValue" { + return torch::executor::EValue(); + }) + } + } + + // Methods + + pub fn tag(&self) -> EValueTag { + unsafe { + cpp!([self as "const torch::executor::EValue*"] -> i32 as "int32_t" { + return tag_to_int(self->tag); + }) + } + .into() + } + + pub fn get_tensor(&self) -> Tensor { + let aten_tensor = unsafe { + cpp!([self as "const torch::executor::EValue*"] -> AtenTensor as "exec_aten::Tensor" { + return self->toTensor(); + }) + }; + Tensor::from(aten_tensor) + } + + pub fn get_string(&self) -> &str { + let len: usize = 0; + let c_str = unsafe { + let len_ptr = &len as *const usize; + cpp!([self as "const torch::executor::EValue*", len_ptr as "size_t*"] -> *const std::os::raw::c_char as "const char*" { + auto str_view = self->toString(); + *len_ptr = str_view.size(); + return str_view.data(); + }) + }; + unsafe { std::str::from_utf8(std::slice::from_raw_parts(c_str as *const u8, len)).unwrap() } + } + + pub fn get_double(&self) -> f64 { + unsafe { + cpp!([self as "const torch::executor::EValue*"] -> f64 as "double" { + return self->toDouble(); + }) + } + } + + pub fn get_int(&self) -> i64 { + unsafe { + cpp!([self as "const torch::executor::EValue*"] -> i64 as "int64_t" { + return self->toInt(); + }) + } + } + + pub fn get_bool(&self) -> bool { + unsafe { + cpp!([self as "const torch::executor::EValue*"] -> bool as "bool" { + return self->toBool(); + }) + } + } + + pub fn get_bool_list(&self) -> &[bool] { + let len: usize = 0; + let c_data = unsafe { + let len_ptr = &len as *const usize; + cpp!([self as "const torch::executor::EValue*", len_ptr as "size_t*"] -> *const bool as "const bool*" { + auto list = self->toBoolList(); + *len_ptr = list.size(); + return list.data(); + }) + }; + unsafe { std::slice::from_raw_parts(c_data, len) } + } + + pub fn get_double_list(&self) -> &[f64] { + let len: usize = 0; + let c_data = unsafe { + let len_ptr = &len as *const usize; + cpp!([self as "const torch::executor::EValue*", len_ptr as "size_t*"] -> *const f64 as "const double*" { + auto list = self->toDoubleList(); + *len_ptr = list.size(); + return list.data(); + }) + }; + unsafe { std::slice::from_raw_parts(c_data, len) } + } + + pub fn get_int_list(&self) -> &[i64] { + let len: usize = 0; + let c_data = unsafe { + let len_ptr = &len as *const usize; + cpp!([self as "const torch::executor::EValue*", len_ptr as "size_t*"] -> *const i64 as "const int64_t*" { + auto list = self->toIntList(); + *len_ptr = list.size(); + return list.data(); + }) + }; + unsafe { std::slice::from_raw_parts(c_data, len) } + } + + // JS conversion + + pub fn to_js<'cx, C>(&self, cx: &mut C) -> JsResult<'cx, JsObject> + where + C: Context<'cx>, + { + let tag = self.tag(); + let js_evalue = cx.empty_object(); + let js_tag = cx.number(tag.clone() as i32); + js_evalue.set(cx, "tag", js_tag)?; + let value: Handle<'_, JsValue> = match tag { + EValueTag::None => cx.null().as_value(cx), + EValueTag::Tensor => self.get_tensor().to_js(cx)?.as_value(cx), + EValueTag::String => cx.string(self.get_string()).as_value(cx), + EValueTag::Double => cx.number(self.get_double()).as_value(cx), + EValueTag::Int => JsBigInt::from_i64(cx, self.get_int()).as_value(cx), + EValueTag::Bool => cx.boolean(self.get_bool()).as_value(cx), + EValueTag::ListBool => { + let list = self.get_bool_list(); + create_bool_array!(cx, cx, js_list, list); + js_list.as_value(cx) + } + EValueTag::ListDouble => { + let list = self.get_double_list(); + create_num_array!(cx, cx, js_list, list); + js_list.as_value(cx) + } + EValueTag::ListInt => { + let list = self.get_int_list(); + create_bigint_array!(cx, cx, js_list, list); + js_list.as_value(cx) + } + _ => cx.undefined().as_value(cx), + }; + js_evalue.set(cx, "data", value)?; + Ok(js_evalue) + } + + pub fn from_js<'cx, C>( + cx: &mut FunctionContext, + js_evalue: Handle<'cx, JsObject>, + ) -> Result + where + C: Context<'cx>, + { + if let Ok(tag) = js_evalue.get::(cx, "tag") { + match tag.value(cx) as i32 { + 0 => Ok(Self::null()), + 1 => { + if let Ok(value) = js_evalue.get::, _, _>(cx, "data") { + Ok(Self::from_tensor(&value)) + } else { + Err(ETError::InvalidArgument) + } + } + 2 => { + if let Ok(value) = js_evalue.get::(cx, "data") { + Ok(Self::from_string(value.value(cx))) + } else { + Err(ETError::InvalidArgument) + } + } + 3 => { + if let Ok(value) = js_evalue.get::(cx, "data") { + Ok(Self::from_double(value.value(cx))) + } else { + Err(ETError::InvalidArgument) + } + } + 4 => { + if let Ok(value) = js_evalue.get::(cx, "data") { + if let Ok(value) = value.to_i64(cx) { + Ok(Self::from_int(value)) + } else { + Err(ETError::InvalidArgument) + } + } else { + Err(ETError::InvalidArgument) + } + } + 5 => { + if let Ok(value) = js_evalue.get::(cx, "data") { + Ok(Self::from_bool(value.value(cx))) + } else { + Err(ETError::InvalidArgument) + } + } + _ => Err(ETError::NotSupported), + } + } else { + Err(ETError::InvalidArgument) + } + } +} diff --git a/src/evalue_tag.rs b/src/evalue_tag.rs new file mode 100644 index 0000000..e9d4c00 --- /dev/null +++ b/src/evalue_tag.rs @@ -0,0 +1,54 @@ +#[derive(Clone, PartialEq)] +pub enum EValueTag { + None = 0, + Tensor = 1, + String = 2, + Double = 3, + Int = 4, + Bool = 5, + ListBool = 6, + ListDouble = 7, + ListInt = 8, + ListTensor = 9, + ListScalar = 10, + ListOptionalTensor = 11, +} + +impl From for EValueTag { + fn from(value: i32) -> Self { + match value { + 0 => EValueTag::None, + 1 => EValueTag::Tensor, + 2 => EValueTag::String, + 3 => EValueTag::Double, + 4 => EValueTag::Int, + 5 => EValueTag::Bool, + 6 => EValueTag::ListBool, + 7 => EValueTag::ListDouble, + 8 => EValueTag::ListInt, + 9 => EValueTag::ListTensor, + 10 => EValueTag::ListScalar, + 11 => EValueTag::ListOptionalTensor, + _ => panic!("Unknown EValueTag"), + } + } +} + +impl From for i32 { + fn from(value: EValueTag) -> Self { + match value { + EValueTag::None => 0, + EValueTag::Tensor => 1, + EValueTag::String => 2, + EValueTag::Double => 3, + EValueTag::Int => 4, + EValueTag::Bool => 5, + EValueTag::ListBool => 6, + EValueTag::ListDouble => 7, + EValueTag::ListInt => 8, + EValueTag::ListTensor => 9, + EValueTag::ListScalar => 10, + EValueTag::ListOptionalTensor => 11, + } + } +} diff --git a/src/lib.rs b/src/lib.rs new file mode 100644 index 0000000..d0a4207 --- /dev/null +++ b/src/lib.rs @@ -0,0 +1,31 @@ +mod eterror; +mod evalue; +mod evalue_tag; +mod macros; +mod method_meta; +mod module; +mod tensor; +mod tensor_type; + +use neon::prelude::*; + +#[neon::main] +fn main(mut cx: ModuleContext) -> NeonResult<()> { + // Tensor + cx.export_function("createTensor", tensor::create)?; + cx.export_function("tensorGetDtype", tensor::get_dtype)?; + cx.export_function("tensorGetShape", tensor::get_shape)?; + cx.export_function("tensorGetData", tensor::get_data)?; + cx.export_function("tensorSetData", tensor::set_data)?; + cx.export_function("tensorSetValue", tensor::set_value)?; + cx.export_function("tensorConcat", tensor::concat)?; + cx.export_function("tensorSlice", tensor::slice)?; + cx.export_function("tensorReshape", tensor::reshape)?; + // Module + cx.export_function("moduleLoad", module::load)?; + cx.export_function("moduleLoadMethod", module::load_method)?; + cx.export_function("moduleExecute", module::execute)?; + cx.export_function("moduleGetMethodMeta", module::get_method_meta)?; + cx.export_function("moduleMethodNames", module::method_names)?; + Ok(()) +} diff --git a/src/macros.rs b/src/macros.rs new file mode 100644 index 0000000..782046b --- /dev/null +++ b/src/macros.rs @@ -0,0 +1,75 @@ +#[macro_export] +macro_rules! arg_get_value_vec { + ($cx:ident, $index:expr, $js_elem_type:ty, $elem_type:ty) => { + $cx.argument::($index)? + .to_vec(&mut $cx)? + .iter() + .map(|value| { + value + .downcast::<$js_elem_type, _>(&mut $cx) + .unwrap() + .value(&mut $cx) as $elem_type + }) + .collect() + }; +} + +#[macro_export] +macro_rules! arg_get_value { + ($cx:ident, $index:expr, $js_type:ty, $type:ty) => { + $cx.argument::<$js_type>($index)?.value(&mut $cx) as $type + }; +} + +#[macro_export] +macro_rules! array_get_value { + ($cx:ident, $array:ident, $index:expr, $js_elem_type:ty, $elem_type:ty) => { + $array + .get::<$js_elem_type, _, _>(&mut $cx, $index)? + .value(&mut $cx) as $elem_type + }; +} + +#[macro_export] +macro_rules! create_bool_array { + ($cx:ident, $mut_cx:expr, $array:ident, $vec:expr) => { + let $array = $cx.empty_array(); + for (i, &elem) in $vec.iter().enumerate() { + let elem = $cx.boolean(elem); + $array.set($mut_cx, i as u32, elem)?; + } + }; +} + +#[macro_export] +macro_rules! create_num_array { + ($cx:ident, $mut_cx:expr, $array:ident, $vec:expr) => { + let $array = $cx.empty_array(); + for (i, &elem) in $vec.iter().enumerate() { + let elem = $cx.number(elem); + $array.set($mut_cx, i as u32, elem)?; + } + }; +} + +#[macro_export] +macro_rules! create_bigint_array { + ($cx:ident, $mut_cx:expr, $array:ident, $vec:expr) => { + let $array = $cx.empty_array(); + for (i, &elem) in $vec.iter().enumerate() { + let elem = JsBigInt::from_i64($mut_cx, elem); + $array.set($mut_cx, i as u32, elem)?; + } + }; +} + +#[macro_export] +macro_rules! create_string_array { + ($cx:ident, $mut_cx:expr, $array:ident, $vec:expr) => { + let $array = $cx.empty_array(); + for (i, elem) in $vec.iter().enumerate() { + let elem = $cx.string(elem); + $array.set($mut_cx, i as u32, elem)?; + } + }; +} diff --git a/src/method_meta.rs b/src/method_meta.rs new file mode 100644 index 0000000..b82e41c --- /dev/null +++ b/src/method_meta.rs @@ -0,0 +1,162 @@ +use crate::create_num_array; +use crate::evalue_tag::EValueTag; +use crate::tensor_type::TensorType; +use cpp::{cpp, cpp_class}; +use neon::prelude::*; + +cpp! {{ + #include "executorch/runtime/executor/method_meta.h" +}} + +pub struct TensorInfo<'a> { + dtype: TensorType, + shape: &'a [i32], +} + +cpp_class!(pub unsafe struct MethodMeta as "torch::executor::MethodMeta"); + +impl MethodMeta { + pub fn name(&self) -> String { + let c_str = unsafe { + cpp!([self as "const torch::executor::MethodMeta*"] -> *const libc::c_char as "const char*" { + return self->name(); + }) + }; + unsafe { + std::ffi::CStr::from_ptr(c_str) + .to_str() + .unwrap() + .to_string() + } + } + + pub fn num_inputs(&self) -> usize { + println!("num_inputs"); + unsafe { + cpp!([self as "const torch::executor::MethodMeta*"] -> usize as "size_t" { + return self->num_inputs(); + }) + } + } + + pub fn num_outputs(&self) -> usize { + unsafe { + cpp!([self as "const torch::executor::MethodMeta*"] -> usize as "size_t" { + return self->num_outputs(); + }) + } + } + + pub fn input_tag(&self, index: usize) -> EValueTag { + unsafe { + cpp!([self as "const torch::executor::MethodMeta*", index as "size_t"] -> i32 as "int32_t" { + return static_cast(self->input_tag(index).get()); + }) + }.into() + } + + pub fn output_tag(&self, index: usize) -> EValueTag { + unsafe { + cpp!([self as "const torch::executor::MethodMeta*", index as "size_t"] -> i32 as "int32_t" { + return static_cast(self->output_tag(index).get()); + }) + }.into() + } + + pub fn input_tensor_info(&self, index: usize) -> TensorInfo { + let mut dtype: i32 = 0; + let mut dim: usize = 0; + let dtype_ptr: *mut i32 = &mut dtype; + let dim_ptr: *mut usize = &mut dim; + let c_shape = unsafe { + cpp!([ + self as "const torch::executor::MethodMeta*", + index as "size_t", + dtype_ptr as "int32_t*", + dim_ptr as "size_t*" + ] -> *const i32 as "const int32_t*" { + auto tensor_info = self->input_tensor_meta(index).get(); + *dtype_ptr = static_cast(tensor_info.scalar_type()); + auto shape = tensor_info.sizes(); + *dim_ptr = shape.size(); + return shape.data(); + }) + }; + TensorInfo { + dtype: dtype.into(), + shape: unsafe { std::slice::from_raw_parts(c_shape, dim) }, + } + } + + pub fn output_tensor_info(&self, index: usize) -> TensorInfo { + let mut dtype: i32 = 0; + let mut dim: usize = 0; + let dtype_ptr: *mut i32 = &mut dtype; + let dim_ptr: *mut usize = &mut dim; + let c_shape = unsafe { + cpp!([ + self as "const torch::executor::MethodMeta*", + index as "size_t", + dtype_ptr as "int32_t*", + dim_ptr as "size_t*" + ] -> *const i32 as "const int32_t*" { + auto tensor_info = self->output_tensor_meta(index).get(); + *dtype_ptr = static_cast(tensor_info.scalar_type()); + auto shape = tensor_info.sizes(); + *dim_ptr = shape.size(); + return shape.data(); + }) + }; + TensorInfo { + dtype: dtype.into(), + shape: unsafe { std::slice::from_raw_parts(c_shape, dim) }, + } + } + + pub fn to_js<'cx, C>(&self, cx: &mut C) -> JsResult<'cx, JsObject> + where + C: Context<'cx>, + { + let obj = cx.empty_object(); + let name = cx.string(self.name()); + obj.set(cx, "name", name)?; + println!("name: {}", self.name()); + let inputs = cx.empty_array(); + for i in 0..self.num_inputs() { + let input = cx.empty_object(); + let tag = self.input_tag(i); + let tag_num = cx.number(tag.clone() as i32); + input.set(cx, "tag", tag_num)?; + if tag == EValueTag::Tensor { + let tensor_info = self.input_tensor_info(i); + let tensor = cx.empty_object(); + let dtype_num = cx.number(tensor_info.dtype as i32); + tensor.set(cx, "dtype", dtype_num)?; + create_num_array!(cx, cx, shape, tensor_info.shape); + tensor.set(cx, "shape", shape)?; + input.set(cx, "tensor_info", tensor)?; + } + inputs.set(cx, i as u32, input)?; + } + obj.set(cx, "inputs", inputs)?; + let outputs = cx.empty_array(); + for i in 0..self.num_outputs() { + let output = cx.empty_object(); + let tag = self.output_tag(i); + let tag_num = cx.number(tag.clone() as i32); + output.set(cx, "tag", tag_num)?; + if tag == EValueTag::Tensor { + let tensor_info = self.output_tensor_info(i); + let tensor = cx.empty_object(); + let dtype_num = cx.number(tensor_info.dtype as i32); + tensor.set(cx, "dtype", dtype_num)?; + create_num_array!(cx, cx, shape, tensor_info.shape); + tensor.set(cx, "shape", shape)?; + output.set(cx, "tensor_info", tensor)?; + } + outputs.set(cx, i as u32, output)?; + } + obj.set(cx, "outputs", outputs)?; + Ok(obj) + } +} diff --git a/src/module.hpp b/src/module.hpp new file mode 100644 index 0000000..35a07dc --- /dev/null +++ b/src/module.hpp @@ -0,0 +1,40 @@ +#include +#include +#include +#include +#include +#include + +class ModuleHolder { + using Module = torch::executor::Module; + +public: + ModuleHolder(std::string path, Module::LoadMode load_mode = Module::LoadMode::MmapUseMlock) { + module_ = std::make_shared(path, load_mode); + auto method_names = module_->method_names(); + if (method_names.ok()) { + auto names = method_names.get(); + for (auto &name : names) { + method_names_.push_back(name); + } + } + } + + ModuleHolder(const ModuleHolder &other) { + module_ = other.module_; + method_names_ = other.method_names_; + } + + Module &get_module() const { return *module_; } + + const std::vector &method_names() const { return method_names_; } + + bool has_method(const std::string &method_name) const { + return std::find(method_names_.begin(), method_names_.end(), method_name) != + method_names_.end(); + } + +private: + std::shared_ptr module_ = nullptr; + std::vector method_names_; +}; diff --git a/src/module.rs b/src/module.rs new file mode 100644 index 0000000..e1c4e24 --- /dev/null +++ b/src/module.rs @@ -0,0 +1,243 @@ +use crate::arg_get_value; +use crate::create_string_array; +use crate::eterror::ETError; +use crate::evalue::EValue; +use crate::method_meta::MethodMeta; +use cpp::{cpp, cpp_class}; +use neon::prelude::*; +use neon::types::Finalize; + +cpp! {{ + #include "src/module.hpp" +}} + +cpp_class!(pub unsafe struct Module as "ModuleHolder"); + +impl Finalize for Module {} + +#[derive(Debug, Copy, Clone, PartialEq)] +pub enum ModuleLoadMode { + File = 0, + Mmap = 1, + MmapUseMlock = 2, + MmapUseMlockIgnoreErrors = 3, +} + +impl From for ModuleLoadMode { + fn from(value: i32) -> Self { + match value { + 0 => ModuleLoadMode::File, + 1 => ModuleLoadMode::Mmap, + 2 => ModuleLoadMode::MmapUseMlock, + 3 => ModuleLoadMode::MmapUseMlockIgnoreErrors, + _ => panic!("Invalid ModuleLoadMode value"), + } + } +} + +impl Module { + pub fn copy(&self) -> Self { + unsafe { + cpp!([self as "ModuleHolder*"] -> Module as "ModuleHolder" { + return *self; + }) + } + } + + pub fn new(path: String, load_mode: ModuleLoadMode) -> Self { + let cpath = std::ffi::CString::new(path.as_bytes()).unwrap(); + let load_mode = load_mode as u8; + unsafe { + let cpath_ptr = cpath.as_ptr(); + cpp!([cpath_ptr as "const char*", load_mode as "uint8_t"] -> Module as "ModuleHolder" { + return ModuleHolder( + std::string(cpath_ptr), + static_cast(load_mode) + ); + }) + } + } + + pub fn load(&self) -> Result<(), ETError> { + let code = unsafe { + cpp!([self as "ModuleHolder*"] -> i32 as "int32_t" { + return static_cast(self->get_module().load()); + }) + }; + match code { + 0 => Ok(()), + _ => Err(ETError::from(code)), + } + } + + pub fn load_method(&self, name: String) -> Result<(), ETError> { + let cname = std::ffi::CString::new(name).unwrap(); + let code = unsafe { + let cname_ptr = cname.as_ptr(); + cpp!([self as "ModuleHolder*", cname_ptr as "const char*"] -> i32 as "int32_t" { + return static_cast(self->get_module().load_method(std::string(cname_ptr))); + }) + }; + match code { + 0 => Ok(()), + _ => Err(ETError::from(code)), + } + } + + pub fn has_method(&self, method_name: String) -> bool { + let cname = std::ffi::CString::new(method_name).unwrap(); + let cname_ptr = cname.as_ptr(); + let result = unsafe { + cpp!([self as "ModuleHolder*", cname_ptr as "const char*"] -> bool as "bool" { + return self->has_method(std::string(cname_ptr)); + }) + }; + result + } + + pub fn method_meta(&self, method_name: String) -> Result { + if !self.has_method(method_name.clone()) { + return Err(ETError::InvalidArgument); + } + let cname = std::ffi::CString::new(method_name).unwrap(); + let cname_ptr = cname.as_ptr(); + let meta = unsafe { + cpp!([self as "ModuleHolder*", cname_ptr as "const char*"] -> MethodMeta as "torch::executor::MethodMeta" { + return self->get_module().method_meta(std::string(cname_ptr)).get(); + }) + }; + Ok(meta) + } + + pub fn method_names(&self) -> Vec { + let nums: usize = unsafe { + cpp!([self as "ModuleHolder*"] -> usize as "size_t" { + return self->method_names().size(); + }) + }; + let mut names = Vec::with_capacity(nums); + for i in 0..nums { + let name = unsafe { + cpp!([self as "ModuleHolder*", i as "size_t"] -> *const libc::c_char as "const char*" { + return self->method_names()[i].c_str(); + }) + }; + names.push(unsafe { std::ffi::CStr::from_ptr(name).to_str().unwrap().to_string() }); + } + names + } + + pub fn execute( + &self, + method_name: String, + inputs: Vec, + ) -> Result, ETError> { + let cname = std::ffi::CString::new(method_name).unwrap(); + let cname_ptr = cname.as_ptr(); + let inputs_ptr = inputs.as_ptr(); + let ninputs = inputs.len(); + let nouts = unsafe { + cpp!([self as "ModuleHolder*", cname_ptr as "const char*"] -> usize as "size_t" { + auto meta = self->get_module().method_meta(std::string(cname_ptr)); + return meta.ok() ? meta.get().num_outputs() : 0; + }) + }; + let mut outputs = vec![EValue::null(); nouts]; + let outputs_ptr = outputs.as_mut_ptr(); + let code = unsafe { + cpp!([ + self as "ModuleHolder*", + cname_ptr as "const char*", + inputs_ptr as "const torch::executor::EValue*", + ninputs as "size_t", + outputs_ptr as "torch::executor::EValue*" + ] -> i32 as "int32_t" { + std::vector inputs(inputs_ptr, inputs_ptr + ninputs); + auto result = self->get_module().execute(std::string(cname_ptr), inputs); + if (result.ok()) { + auto outputs = result.get(); + for (size_t i = 0; i < outputs.size(); i++) { + outputs_ptr[i] = outputs[i]; + } + } + return static_cast(result.error()); + }) + }; + match code { + 0 => Ok(outputs), + _ => Err(ETError::from(code)), + } + } +} + +// JS interface + +pub fn load(mut cx: FunctionContext) -> JsResult { + let path = arg_get_value!(cx, 0, JsString, String); + let load_mode = arg_get_value!(cx, 1, JsNumber, i32); + let promise = cx.task(move || path).promise(move |mut cx, path| { + let module = Module::new(path, load_mode.into()); + match module.load() { + Ok(()) => Ok(cx.boxed(module)), + Err(e) => cx.throw_error(format!("Error: {:?}", e)), + } + }); + Ok(promise) +} + +pub fn load_method(mut cx: FunctionContext) -> JsResult { + let module = cx.argument::>(0)?.copy(); + let name = arg_get_value!(cx, 1, JsString, String); + let promise = cx + .task(move || module.load_method(name)) + .promise(move |mut cx, result| match result { + Ok(()) => Ok(cx.undefined()), + Err(e) => cx.throw_error(format!("Failed to load method: {:?}", e)), + }); + Ok(promise) +} + +pub fn execute(mut cx: FunctionContext) -> JsResult { + let module = cx.argument::>(0)?.copy(); + // = Arc::new(cx.argument::>(0)?); + let method_name = arg_get_value!(cx, 1, JsString, String); + let mut inputs = Vec::::new(); + let inputs_js = cx.argument::(2)?; + for i in 0..inputs_js.len(&mut cx) { + let input_js = inputs_js.get(&mut cx, i)?; + match EValue::from_js::(&mut cx, input_js) { + Ok(input) => inputs.push(input), + Err(e) => return cx.throw_error(format!("Failed to parse input: {:?}", e)), + } + } + let promise = cx + .task(move || module.execute(method_name, inputs)) + .promise(move |mut cx, result| match result { + Ok(outputs) => { + let outputs_js = cx.empty_array(); + for (i, output) in outputs.iter().enumerate() { + let output_js = output.to_js(&mut cx)?; + outputs_js.set(&mut cx, i as u32, output_js)?; + } + Ok(outputs_js) + } + Err(e) => cx.throw_error(format!("Failed to execute method: {:?}", e)), + }); + Ok(promise) +} + +pub fn get_method_meta(mut cx: FunctionContext) -> JsResult { + let module = cx.argument::>(0)?; + let method_name = arg_get_value!(cx, 1, JsString, String); + match module.method_meta(method_name) { + Ok(meta) => meta.to_js(&mut cx), + Err(e) => cx.throw_error(format!("Failed to get method meta: {:?}", e)), + } +} + +pub fn method_names(mut cx: FunctionContext) -> JsResult { + let module = cx.argument::>(0)?; + let names = module.method_names(); + create_string_array!(cx, &mut cx, array, names); + Ok(array) +} diff --git a/src/tensor.hpp b/src/tensor.hpp new file mode 100644 index 0000000..dc33929 --- /dev/null +++ b/src/tensor.hpp @@ -0,0 +1,34 @@ +#include +#include +#include + +class TensorHolder { +public: + TensorHolder(size_t dtype, int64_t dim, const exec_aten::SizesType *shape, + const uint8_t *data, size_t data_nelem) { + shape_ = std::make_shared>(dim); + memcpy(shape_->data(), shape, dim * sizeof(exec_aten::SizesType)); + data_.reset(malloc(data_nelem), free); + memcpy(data_.get(), data, data_nelem); + tensor_ = std::make_shared( + new exec_aten::TensorImpl(static_cast(dtype), + dim, shape_->data(), data_.get())); + } + + TensorHolder(exec_aten::Tensor tensor) { + tensor_ = std::make_shared(tensor); + } + + void set_data(const uint8_t *data, size_t data_nelem) { + data_.reset(malloc(data_nelem), free); + memcpy(data_.get(), data, data_nelem); + tensor_->unsafeGetTensorImpl()->set_data(data_.get()); + } + + exec_aten::Tensor &get_tensor() const { return *tensor_; } + +private: + std::shared_ptr data_ = nullptr; + std::shared_ptr> shape_ = nullptr; + std::shared_ptr tensor_ = nullptr; +}; diff --git a/src/tensor.rs b/src/tensor.rs new file mode 100644 index 0000000..492f2ff --- /dev/null +++ b/src/tensor.rs @@ -0,0 +1,463 @@ +use crate::arg_get_value; +use crate::arg_get_value_vec; +use crate::create_num_array; +use crate::tensor_type::TensorType; +use cpp::{cpp, cpp_class}; +use neon::prelude::*; +use neon::types::buffer::TypedArray; +use neon::types::Finalize; + +cpp! {{ + #include "src/tensor.hpp" +}} + +cpp_class!(pub unsafe struct Tensor as "TensorHolder"); + +impl Finalize for Tensor {} + +cpp_class!(pub unsafe struct AtenTensor as "exec_aten::Tensor"); + +impl From for Tensor { + fn from(tensor: AtenTensor) -> Self { + unsafe { + cpp!([tensor as "exec_aten::Tensor"] -> Tensor as "TensorHolder" { + return TensorHolder(tensor); + }) + } + } +} + +impl Tensor { + fn new( + dtype: TensorType, + dim: i64, + shape: *const i32, + data: *const u8, + data_nelem: usize, + ) -> Self { + let dtype_num = dtype as i32; + unsafe { + cpp!([ + dtype_num as "int32_t", + dim as "int64_t", + shape as "const int32_t*", + data as "const uint8_t*", + data_nelem as "size_t" + ] -> Tensor as "TensorHolder" { + return TensorHolder(dtype_num, dim, shape, data, data_nelem); + }) + } + } + + pub fn create(dtype: TensorType, shape: &[i32], data: &[u8]) -> Result { + let mut numel = usize::from(dtype); + if numel == 0 { + return Err("Unsupported dtype".to_string()); + } + for dim in shape { + numel *= *dim as usize; + } + if numel != data.len() { + return Err("Data length does not match shape".to_string()); + } + let dim = shape.len() as i64; + let shape_ptr = shape.as_ptr(); + let data_ptr = data.as_ptr(); + let data_nelem = data.len(); + Ok(Tensor::new(dtype, dim, shape_ptr, data_ptr, data_nelem)) + } + + fn dim(&self) -> i64 { + unsafe { + cpp!([self as "const TensorHolder*"] -> i64 as "int64_t" { + return self->get_tensor().dim(); + }) + } + } + + pub fn dtype(&self) -> TensorType { + let dtype = unsafe { + cpp!([self as "const TensorHolder*"] -> i32 as "int32_t" { + return static_cast(self->get_tensor().scalar_type()); + }) + }; + TensorType::from(dtype) + } + + pub fn sizes(&self) -> &[i32] { + let dim = self.dim(); + let shape_ptr = unsafe { + cpp!([self as "const TensorHolder*"] -> *const i32 as "const int32_t*" { + return self->get_tensor().sizes().begin(); + }) + }; + unsafe { std::slice::from_raw_parts(shape_ptr, dim as usize) } + } + + pub fn numel(&self) -> i64 { + unsafe { + cpp!([self as "const TensorHolder*"] -> i64 as "int64_t" { + return self->get_tensor().numel(); + }) + } + } + + fn element_size(&self) -> i64 { + unsafe { + cpp!([self as "const TensorHolder*"] -> i64 as "int64_t" { + return self->get_tensor().element_size(); + }) + } + } + + fn data_ptr(&self) -> *const u8 { + unsafe { + cpp!([self as "const TensorHolder*"] -> *mut u8 as "const uint8_t*" { + return self->get_tensor().const_data_ptr(); + }) + } + } + + pub fn get_data(&self) -> &[T] { + let data_ptr = self.data_ptr(); + let data_len = self.nbytes(); + unsafe { + std::slice::from_raw_parts(data_ptr as *const T, data_len / std::mem::size_of::()) + } + } + + pub fn set_data(&self, data: &[T]) { + let data_ptr = data.as_ptr() as *const u8; + let data_nelem = data.len() * std::mem::size_of::(); + unsafe { + cpp!([self as "TensorHolder*", data_ptr as "const uint8_t*", data_nelem as "size_t"] { + self->set_data(data_ptr, data_nelem); + }) + } + } + + fn nbytes(&self) -> usize { + unsafe { + cpp!([self as "const TensorHolder*"] -> usize as "size_t" { + return self->get_tensor().nbytes(); + }) + } + } + + fn set_value_impl(&self, position: *const i32, value: *const u8, value_size: usize) { + unsafe { + cpp!([self as "const TensorHolder*", position as "const int32_t*", value as "const uint8_t*", value_size as "size_t"] { + size_t index = 0; + size_t stride = 1; + for (int i = self->get_tensor().dim() - 1; i >= 0; i--) { + index += position[i] * stride; + stride *= self->get_tensor().sizes()[i]; + } + memcpy(self->get_tensor().mutable_data_ptr() + index * value_size, value, value_size); + }) + } + } + + pub fn set_value(&self, position: &[i32], value: T) { + let position_ptr = position.as_ptr(); + let value_ptr = &value as *const T as *const u8; + let value_size = std::mem::size_of::(); + self.set_value_impl(position_ptr, value_ptr, value_size); + } + + pub fn reshape(&self, shape: &[i32]) -> Result { + let numel = self.numel(); + let mut shape_prod = 1; + for dim in shape { + shape_prod *= *dim as i64; + } + if numel != shape_prod { + return Err("New shape must have the same number of elements".to_string()); + } + Tensor::create(self.dtype(), shape, self.get_data()) + } + + pub fn slice(&self, slices: Vec<(Option, Option)>) -> Result { + let shape = self.sizes(); + let dim = shape.len(); + + // Compute actual start and end indices + let mut start_indices = Vec::::with_capacity(dim); + + let new_shape: Vec = slices + .iter() + .zip(shape.iter()) + .map(|((start, end), &len)| { + let start_idx = match start { + Some(idx) => *idx % len, + None => 0, + }; + let end_idx = match end { + Some(idx) => *idx % len, + None => len, + }; + start_indices.push(start_idx as usize); + (end_idx - start_idx) as i32 + }) + .collect(); + + let elem_size = self.element_size() as usize; + + let mut new_data = + Vec::with_capacity(new_shape.iter().product::() as usize * elem_size); + + let data = self.get_data::(); + + for i in 0..new_data.capacity() / elem_size { + let mut offset = 0; + let mut pos = i; + for j in 0..dim { + let stride = if j == dim - 1 { + 1 + } else { + shape[j + 1..].iter().product::() as usize + }; + let dim_size = new_shape[j] as usize; + let dim_pos = pos % dim_size; + pos /= dim_size; + offset += (start_indices[j] + dim_pos) * stride; + } + new_data.extend_from_slice(&data[offset * elem_size..(offset + 1) * elem_size]); + } + + Tensor::create(self.dtype(), new_shape.as_slice(), &new_data) + } + + pub fn concat(tensors: Vec>>, axis: i64) -> Result { + if tensors.len() == 0 { + return Err("Expected non-empty array of tensors".to_string()); + } + let axis = axis as usize; + + let dtype = tensors[0].dtype(); + let rank = tensors[0].dim() as usize; + let sizes = tensors[0].sizes().to_vec(); + let mut new_sizes = sizes.clone(); + + if axis >= rank { + return Err("Invalid axis".to_string()); + } + + for &tensor in tensors.iter().skip(1) { + if dtype != tensor.dtype() { + return Err("Tensors have different dtypes".to_string()); + } + if rank != tensor.dim() as usize { + return Err("Tensors have different ranks".to_string()); + } + for j in 0..rank { + if j == axis { + new_sizes[j] += tensor.sizes()[j]; + } else if sizes[j] != tensor.sizes()[j] { + return Err("Tensors have different sizes".to_string()); + } + } + } + + let elem_size = tensors[0].element_size() as usize; + let numel: usize = new_sizes.iter().map(|&s| s as usize).product(); + let mut new_data = vec![0u8; numel * elem_size]; + + let trip_step: usize = sizes.iter().take(axis).map(|&s| s as usize).product(); + let chunk_size: usize = sizes + .iter() + .skip(axis) + .map(|&s| s as usize) + .product::() + * elem_size; + let n_tensors = tensors.len(); + + for i in 0..trip_step { + for (j, tensor) in tensors.iter().enumerate() { + let data = tensor.get_data::(); + let dst_data_offset = i * n_tensors * chunk_size + j * chunk_size; + let src_data_offset = i * chunk_size; + new_data[dst_data_offset..dst_data_offset + chunk_size] + .copy_from_slice(&data[src_data_offset..src_data_offset + chunk_size]); + } + } + + Tensor::create(dtype, &new_sizes, &new_data) + } + + pub fn to_js<'cx, C>(&self, cx: &mut C) -> JsResult<'cx, JsObject> + where + C: Context<'cx>, + { + let dtype = self.dtype(); + let shape = self.sizes(); + let info = cx.empty_object(); + let dtype_num = cx.number(dtype as i32); + info.set(cx, "dtype", dtype_num)?; + create_num_array!(cx, cx, js_shape, shape); + info.set(cx, "shape", js_shape)?; + let ptr = cx.boxed(self.clone()); + info.set(cx, "ptr", ptr)?; + Ok(info) + } +} + +// JS interface + +pub fn create(mut cx: FunctionContext) -> JsResult> { + let dtype: TensorType = (cx.argument::(0)?.value(&mut cx) as i32).into(); + let shape: Vec = arg_get_value_vec!(cx, 1, JsNumber, i32); + let data = cx.argument::(2)?.as_slice(&cx); + match Tensor::create(dtype, shape.as_slice(), &data) { + Ok(tensor) => Ok(cx.boxed(tensor)), + Err(e) => cx.throw_error(e), + } +} + +pub fn get_dtype(mut cx: FunctionContext) -> JsResult { + let tensor = cx.argument::>(0)?; + Ok(cx.number(tensor.dtype() as i32)) +} + +pub fn get_shape(mut cx: FunctionContext) -> JsResult { + let tensor = cx.argument::>(0)?; + let shape = tensor.sizes(); + let js_shape = cx.empty_array(); + for (i, &dim) in shape.iter().enumerate() { + let n = cx.number(dim); + js_shape.set(&mut cx, i as u32, n)?; + } + Ok(js_shape) +} + +pub fn get_data(mut cx: FunctionContext) -> JsResult { + let tensor = cx.argument::>(0)?; + let data = tensor.get_data::(); + let js_data = JsArrayBuffer::from_slice(&mut cx, data)?; + Ok(js_data) +} + +pub fn set_data(mut cx: FunctionContext) -> JsResult { + let tensor = cx.argument::>(0)?; + let data = cx.argument::(1)?.as_slice(&cx); + tensor.set_data(data); + Ok(cx.undefined()) +} + +pub fn set_value(mut cx: FunctionContext) -> JsResult { + let tensor = cx.argument::>(0)?; + let position: Vec = arg_get_value_vec!(cx, 1, JsNumber, i32); + let position = position.as_slice(); + match tensor.dtype() { + TensorType::UInt8 => { + let value = arg_get_value!(cx, 2, JsNumber, u8); + tensor.set_value(position, value); + } + TensorType::Int8 => { + let value = arg_get_value!(cx, 2, JsNumber, i8); + tensor.set_value(position, value); + } + TensorType::Int16 => { + let value = arg_get_value!(cx, 2, JsNumber, i16); + tensor.set_value(position, value); + } + TensorType::Int32 => { + let value = arg_get_value!(cx, 2, JsNumber, i32); + tensor.set_value(position, value); + } + TensorType::Int64 => { + let value = arg_get_value!(cx, 2, JsNumber, i64); + tensor.set_value(position, value); + } + TensorType::Float32 => { + let value = arg_get_value!(cx, 2, JsNumber, f32); + tensor.set_value(position, value); + } + TensorType::Float64 => { + let value = arg_get_value!(cx, 2, JsNumber, f64); + tensor.set_value(position, value); + } + TensorType::Bool => { + let value = arg_get_value!(cx, 2, JsBoolean, bool); + tensor.set_value(position, value); + } + _ => return cx.throw_error("Unsupported dtype"), + } + Ok(cx.undefined()) +} + +pub fn concat(mut cx: FunctionContext) -> JsResult { + let tensors: Vec<_> = cx + .argument::(0)? + .to_vec(&mut cx)? + .iter() + .map(|tensor| tensor.downcast::, _>(&mut cx).unwrap()) + .collect(); + let axis = arg_get_value!(cx, 1, JsNumber, i64); + match Tensor::concat(tensors, axis) { + Ok(tensor) => tensor.to_js(&mut cx), + Err(e) => cx.throw_error(e), + } +} + +pub fn slice(mut cx: FunctionContext) -> JsResult { + let tensor = cx.argument::>(0)?; + let dim = tensor.dim() as u32; + let mut slices = Vec::<(Option, Option)>::with_capacity(dim as usize); + let slices_js = cx.argument::(1)?; + let len = slices_js.len(&mut cx); + for i in 0..len { + let index = cx.number(i); + let slice = slices_js.get::(&mut cx, index)?; + if slice.is_a::(&mut cx) { + let slice = slice.downcast::(&mut cx).unwrap(); + let first = slice.get::(&mut cx, 0).unwrap(); + let second = slice.get::(&mut cx, 1).unwrap(); + let start = if first.is_a::(&mut cx) { + Some( + first + .downcast::(&mut cx) + .unwrap() + .value(&mut cx) as i32, + ) + } else { + None + }; + let end = if second.is_a::(&mut cx) { + Some( + second + .downcast::(&mut cx) + .unwrap() + .value(&mut cx) as i32, + ) + } else { + None + }; + slices.push((start, end)); + } else if slice.is_a::(&mut cx) { + let start = slice + .downcast::(&mut cx) + .unwrap() + .value(&mut cx) as i32; + slices.push((Some(start), Some(start + 1))); + } else { + slices.push((None, None)); + } + } + for _ in len..dim { + slices.push((None, None)); + } + match tensor.slice(slices) { + Ok(tensor) => tensor.to_js(&mut cx), + Err(e) => cx.throw_error(e), + } +} + +pub fn reshape(mut cx: FunctionContext) -> JsResult { + let tensor = cx.argument::>(0)?; + let shape_vec: Vec = arg_get_value_vec!(cx, 1, JsNumber, i32); + match tensor.reshape(shape_vec.as_slice()) { + Ok(tensor) => tensor.to_js(&mut cx), + Err(e) => cx.throw_error(e), + } +} diff --git a/src/tensor_type.rs b/src/tensor_type.rs new file mode 100644 index 0000000..04934ad --- /dev/null +++ b/src/tensor_type.rs @@ -0,0 +1,103 @@ +#[derive(Debug, Copy, Clone, PartialEq)] +pub enum TensorType { + UInt8 = 0, + Int8 = 1, + Int16 = 2, + Int32 = 3, + Int64 = 4, + Float16 = 5, + Float32 = 6, + Float64 = 7, + ComplexFloat16 = 8, + ComplexFloat32 = 9, + ComplexFloat64 = 10, + Bool = 11, + QInt8 = 12, + QUInt8 = 13, + QInt32 = 14, + BFloat16 = 15, + QUInt4x2 = 16, + QUInt2x4 = 17, + Bits1x8 = 18, + Bits2x4 = 19, + Bits4x2 = 20, + Bits8 = 21, + Bits16 = 22, +} + +impl From for TensorType { + fn from(value: i32) -> Self { + match value { + 0 => TensorType::UInt8, + 1 => TensorType::Int8, + 2 => TensorType::Int16, + 3 => TensorType::Int32, + 4 => TensorType::Int64, + 5 => TensorType::Float16, + 6 => TensorType::Float32, + 7 => TensorType::Float64, + 8 => TensorType::ComplexFloat16, + 9 => TensorType::ComplexFloat32, + 10 => TensorType::ComplexFloat64, + 11 => TensorType::Bool, + 12 => TensorType::QInt8, + 13 => TensorType::QUInt8, + 14 => TensorType::QInt32, + 15 => TensorType::BFloat16, + 16 => TensorType::QUInt4x2, + 17 => TensorType::QUInt2x4, + 18 => TensorType::Bits1x8, + 19 => TensorType::Bits2x4, + 20 => TensorType::Bits4x2, + 21 => TensorType::Bits8, + 22 => TensorType::Bits16, + _ => panic!("Unknown tensor type"), + } + } +} + +impl From for i32 { + fn from(value: TensorType) -> Self { + match value { + TensorType::UInt8 => 0, + TensorType::Int8 => 1, + TensorType::Int16 => 2, + TensorType::Int32 => 3, + TensorType::Int64 => 4, + TensorType::Float16 => 5, + TensorType::Float32 => 6, + TensorType::Float64 => 7, + TensorType::ComplexFloat16 => 8, + TensorType::ComplexFloat32 => 9, + TensorType::ComplexFloat64 => 10, + TensorType::Bool => 11, + TensorType::QInt8 => 12, + TensorType::QUInt8 => 13, + TensorType::QInt32 => 14, + TensorType::BFloat16 => 15, + TensorType::QUInt4x2 => 16, + TensorType::QUInt2x4 => 17, + TensorType::Bits1x8 => 18, + TensorType::Bits2x4 => 19, + TensorType::Bits4x2 => 20, + TensorType::Bits8 => 21, + TensorType::Bits16 => 22, + } + } +} + +impl From for usize { + fn from(value: TensorType) -> Self { + match value { + TensorType::UInt8 => std::mem::size_of::(), + TensorType::Int8 => std::mem::size_of::(), + TensorType::Int16 => std::mem::size_of::(), + TensorType::Int32 => std::mem::size_of::(), + TensorType::Int64 => std::mem::size_of::(), + TensorType::Float32 => std::mem::size_of::(), + TensorType::Float64 => std::mem::size_of::(), + TensorType::Bool => std::mem::size_of::(), + _ => 0, + } + } +} diff --git a/src/utils.cpp b/src/utils.cpp deleted file mode 100644 index 85eaac9..0000000 --- a/src/utils.cpp +++ /dev/null @@ -1,120 +0,0 @@ -#include "utils.h" -#include "Tensor.h" - -using namespace torch::executor; - -EValue evalueFromNapiValue(const Napi::Value &value) { - EValue evalue; - if (value.IsNull() || value.IsUndefined()) { - evalue.tag = Tag::None; - } else if (value.IsNumber()) { - evalue.payload.copyable_union.as_double = value.ToNumber().DoubleValue(); - evalue.tag = Tag::Double; - } else if (value.IsBoolean()) { - evalue.payload.copyable_union.as_bool = value.ToBoolean().Value(); - evalue.tag = Tag::Bool; - } else if (value.IsString()) { - auto str = value.ToString().Utf8Value(); - char *buf = strdup(str.c_str()); - evalue.payload.copyable_union.as_string = ArrayRef(buf, str.size()); - evalue.tag = Tag::String; - } else if (executorch::node::Tensor::IsInstance(value)) { - evalue.payload.as_tensor = - Napi::ObjectWrap::Unwrap( - value.As()) - ->GetTensor(); - evalue.tag = Tag::Tensor; - return evalue; - } else { - throw std::runtime_error("Unsupported value type"); - } - return evalue; -} - -Napi::Value napiValueFromEValue(const Napi::Env &env, const EValue &evalue) { - switch (evalue.tag) { - case Tag::None: - return env.Null(); - break; - case Tag::Int: - return Napi::Number::New(env, evalue.payload.copyable_union.as_int); - break; - case Tag::Double: - return Napi::Number::New(env, evalue.payload.copyable_union.as_double); - break; - case Tag::Bool: - return Napi::Boolean::New(env, evalue.payload.copyable_union.as_bool); - break; - case Tag::String: { - auto chars = evalue.payload.copyable_union.as_string; - std::string str(chars.data(), chars.size()); - return Napi::String::New(env, str); - } break; - case Tag::Tensor: { - return executorch::node::Tensor::New(evalue.payload.as_tensor); - } break; - case Tag::ListBool: { - auto list = evalue.payload.copyable_union.as_bool_list; - auto array = Napi::Array::New(env, list.size()); - for (size_t i = 0; i < list.size(); i++) { - array.Set(i, list[i]); - } - return array; - } break; - case Tag::ListDouble: { - auto list = evalue.payload.copyable_union.as_double_list; - auto array = Napi::Array::New(env, list.size()); - for (size_t i = 0; i < list.size(); i++) { - array.Set(i, list[i]); - } - return array; - } break; - case Tag::ListInt: { - auto list = evalue.payload.copyable_union.as_int_list.get(); - auto array = Napi::Array::New(env, list.size()); - for (size_t i = 0; i < list.size(); i++) { - array.Set(i, list[i]); - } - return array; - } break; - default: - throw std::runtime_error("Unsupported value type"); - } -} - -std::string errorString(const Error &error) { - switch (error) { - case Error::Internal: - return "Internal"; - case Error::InvalidState: - return "InvalidState"; - case Error::EndOfMethod: - return "EndOfMethod"; - case Error::NotSupported: - return "NotSupported"; - case Error::NotImplemented: - return "NotImplemented"; - case Error::InvalidArgument: - return "InvalidArgument"; - case Error::InvalidType: - return "InvalidType"; - case Error::OperatorMissing: - return "OperatorMissing"; - case Error::NotFound: - return "NotFound"; - case Error::MemoryAllocationFailed: - return "MemoryAllocationFailed"; - case Error::AccessFailed: - return "AccessFailed"; - case Error::InvalidProgram: - return "InvalidProgram"; - case Error::DelegateInvalidCompatibility: - return "DelegateInvalidCompatibility"; - case Error::DelegateMemoryAllocationFailed: - return "DelegateMemoryAllocationFailed"; - case Error::DelegateInvalidHandle: - return "DelegateInvalidHandle"; - default: - return "UnknownError"; - } -} diff --git a/src/utils.h b/src/utils.h deleted file mode 100644 index 77b6b5d..0000000 --- a/src/utils.h +++ /dev/null @@ -1,10 +0,0 @@ -#include -#include -#include - -torch::executor::EValue evalueFromNapiValue(const Napi::Value &value); - -Napi::Value napiValueFromEValue(const Napi::Env &env, - const torch::executor::EValue &evalue); - -std::string errorString(const torch::executor::Error &error); diff --git a/tsconfig.json b/tsconfig.json index 2b31304..297d332 100644 --- a/tsconfig.json +++ b/tsconfig.json @@ -3,5 +3,5 @@ "esModuleInterop": true }, "include": ["lib/**/*"], - "exclude": ["node_modules", "executorch", "build"] + "exclude": ["node_modules", "executorch", "target"] } \ No newline at end of file diff --git a/yarn.lock b/yarn.lock index b0d7118..8fad859 100644 --- a/yarn.lock +++ b/yarn.lock @@ -997,6 +997,41 @@ resolved "https://registry.yarnpkg.com/@bcoe/v8-coverage/-/v8-coverage-0.2.3.tgz#75a2e8b51cb758a7553d6804a5932d7aace75c39" integrity sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw== +"@cargo-messages/android-arm-eabi@0.1.67": + version "0.1.67" + resolved "https://registry.yarnpkg.com/@cargo-messages/android-arm-eabi/-/android-arm-eabi-0.1.67.tgz#9cb86a8dd3cc31b6e6ed99c90ac4337cab327c5f" + integrity sha512-L2u1zuPIy7iTB4AQyzXHTPlHtCF9L1VKsyfTLr8TBL7kXXnW+juorxujuYTKKO2Gw2ndb+NjPY27ZFqpPjtaXA== + +"@cargo-messages/darwin-arm64@0.1.67": + version "0.1.67" + resolved "https://registry.yarnpkg.com/@cargo-messages/darwin-arm64/-/darwin-arm64-0.1.67.tgz#7ebcf3d1395295b2f7ae313b43dba69a72732ea3" + integrity sha512-LjJiPbxn1TzvYnCH4Vrq0O8leLX1koVnmeLiJsdO5INBUrfJZeV/7tijLEtS6ZZhAR00B1nurJL6ycrUHY6F3g== + +"@cargo-messages/darwin-x64@0.1.67": + version "0.1.67" + resolved "https://registry.yarnpkg.com/@cargo-messages/darwin-x64/-/darwin-x64-0.1.67.tgz#3e32c696747499ebe525a71f58f0a812aac5d587" + integrity sha512-XHKSnBLO8heVUuWPzYmyZMTFTc5pp4YREHNaviFoukVl6DPw2nUz3eclwI7EesUXtdjYiHegKB+9RgkpvzWijw== + +"@cargo-messages/linux-arm-gnueabihf@0.1.67": + version "0.1.67" + resolved "https://registry.yarnpkg.com/@cargo-messages/linux-arm-gnueabihf/-/linux-arm-gnueabihf-0.1.67.tgz#8bdd9dd7ed6ffd88ef9d21652231fb2812f527ec" + integrity sha512-CnrdvDEU9KfXh66UqYnJBw9h/JnstYHAZiTdf05HEPdv+T8HNVnFvalQ5nYWd2kezezvqpz7MVHNEnqtKdxEPQ== + +"@cargo-messages/linux-x64-gnu@0.1.67": + version "0.1.67" + resolved "https://registry.yarnpkg.com/@cargo-messages/linux-x64-gnu/-/linux-x64-gnu-0.1.67.tgz#cf0cf0de819895cebad31e4426e4341621c43da6" + integrity sha512-UzJ7rCx1Ch8gOrdXSnnPS1CmzYcPr6suIvPo/vx6+xGJQuCpNA38PpjhKul9vwH9cDXHwNUq5/3JbEIxBTjCyw== + +"@cargo-messages/win32-arm64-msvc@0.1.67": + version "0.1.67" + resolved "https://registry.yarnpkg.com/@cargo-messages/win32-arm64-msvc/-/win32-arm64-msvc-0.1.67.tgz#e5d5650ab2afa366586ebafbb8942a5d5ed377a1" + integrity sha512-Cbp9aBu1T3BhjSNUN/3v51Qr7bkSUNo5cwYMCZlkOdjepxNqTBhCEAzm2CHD9W0osdJrMF9/c8RX5yu0UkjxFQ== + +"@cargo-messages/win32-x64-msvc@0.1.67": + version "0.1.67" + resolved "https://registry.yarnpkg.com/@cargo-messages/win32-x64-msvc/-/win32-x64-msvc-0.1.67.tgz#c7c92c4bb7c4e7c9f0393d1bba04b6eae305adbc" + integrity sha512-ASs8v5cWzbyDU+OsMHRmkOMy0IW+ZRMWyLvbanTp68cDsInyPbQc0JMAcMY1GIMKhCZCUmg3ClJt5I+ytUxTUg== + "@istanbuljs/load-nyc-config@^1.0.0": version "1.1.0" resolved "https://registry.yarnpkg.com/@istanbuljs/load-nyc-config/-/load-nyc-config-1.1.0.tgz#fd3db1d59ecf7cf121e80650bb86712f9b55eced" @@ -1237,6 +1272,19 @@ "@jridgewell/resolve-uri" "^3.1.0" "@jridgewell/sourcemap-codec" "^1.4.14" +"@neon-rs/cli@0.1.68": + version "0.1.68" + resolved "https://registry.yarnpkg.com/@neon-rs/cli/-/cli-0.1.68.tgz#2907698e67792bf1dc638cc708a1386a73dadd4e" + integrity sha512-K3U/DUC7oxBKAGfU3P6tDW8K2ir39Peqaeyrgm+xRxJ2sRuYp/PzbJNuYnjOA/7GWGYt/xDOeEdh6QOV1PGRTg== + optionalDependencies: + "@cargo-messages/android-arm-eabi" "0.1.67" + "@cargo-messages/darwin-arm64" "0.1.67" + "@cargo-messages/darwin-x64" "0.1.67" + "@cargo-messages/linux-arm-gnueabihf" "0.1.67" + "@cargo-messages/linux-x64-gnu" "0.1.67" + "@cargo-messages/win32-arm64-msvc" "0.1.67" + "@cargo-messages/win32-x64-msvc" "0.1.67" + "@sinclair/typebox@^0.27.8": version "0.27.8" resolved "https://registry.yarnpkg.com/@sinclair/typebox/-/typebox-0.27.8.tgz#6667fac16c436b5434a387a34dedb013198f6e6e" @@ -1324,9 +1372,9 @@ pretty-format "^29.0.0" "@types/node@*", "@types/node@^20.12.12": - version "20.12.12" - resolved "https://registry.yarnpkg.com/@types/node/-/node-20.12.12.tgz#7cbecdf902085cec634fdb362172dfe12b8f2050" - integrity sha512-eWLDGF/FOSPtAvEqeRAQ4C8LSA7M1I7i0ky1I8U7kD1J5ITyW3AsRhQrKVoWf5pFKZ2kILsEGJhsI9r93PYnOw== + version "20.14.1" + resolved "https://registry.yarnpkg.com/@types/node/-/node-20.14.1.tgz#2434dbcb1f039e31f2c0e9969da93f52cf6348f3" + integrity sha512-T2MzSGEu+ysB/FkWfqmhV3PLyQlowdptmmgD20C6QxsS8Fmv5SjpZ1ayXaEC0S21/h5UJ9iA6W/5vSNU5l00OA== dependencies: undici-types "~5.26.4" @@ -1386,19 +1434,6 @@ anymatch@^3.0.3: normalize-path "^3.0.0" picomatch "^2.0.4" -"aproba@^1.0.3 || ^2.0.0": - version "2.0.0" - resolved "https://registry.yarnpkg.com/aproba/-/aproba-2.0.0.tgz#52520b8ae5b569215b354efc0caa3fe1e45a8adc" - integrity sha512-lYe4Gx7QT+MKGbDsA+Z+he/Wtef0BiwDOlK/XkBrdfsh9J/jPPXbX0tE9x9cl27Tmu5gg3QUbUrQYa/y+KOHPQ== - -are-we-there-yet@^3.0.0: - version "3.0.1" - resolved "https://registry.yarnpkg.com/are-we-there-yet/-/are-we-there-yet-3.0.1.tgz#679df222b278c64f2cdba1175cdc00b0d96164bd" - integrity sha512-QZW4EDmGwlYur0Yyf/b2uGucHQMa8aFUP7eu9ddR73vvhFyt4V0Vl3QHPcTNJ8l6qYOBdxgXdnBXQrHilfRQBg== - dependencies: - delegates "^1.0.0" - readable-stream "^3.6.0" - argparse@^1.0.7: version "1.0.10" resolved "https://registry.yarnpkg.com/argparse/-/argparse-1.0.10.tgz#bcd6791ea5ae09725e17e5ad988134cd40b3d911" @@ -1406,20 +1441,6 @@ argparse@^1.0.7: dependencies: sprintf-js "~1.0.2" -asynckit@^0.4.0: - version "0.4.0" - resolved "https://registry.yarnpkg.com/asynckit/-/asynckit-0.4.0.tgz#c79ed97f7f34cb8f2ba1bc9790bcc366474b4b79" - integrity sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q== - -axios@^1.6.5: - version "1.7.2" - resolved "https://registry.yarnpkg.com/axios/-/axios-1.7.2.tgz#b625db8a7051fbea61c35a3cbb3a1daa7b9c7621" - integrity sha512-2A8QhOMrbomlDuiLeK9XibIBzuHeRcqqNOHp0Cyp5EoJ1IFDh+XZH3A6BkXtv0K4gFGCI0Y4BM7B1wOEi0Rmgw== - dependencies: - follow-redirects "^1.15.6" - form-data "^4.0.0" - proxy-from-env "^1.1.0" - babel-jest@^29.7.0: version "29.7.0" resolved "https://registry.yarnpkg.com/babel-jest/-/babel-jest-29.7.0.tgz#f4369919225b684c56085998ac63dbd05be020d5" @@ -1562,9 +1583,9 @@ camelcase@^6.2.0: integrity sha512-Gmy6FhYlCY7uOElZUSbxo2UCDH8owEk996gkbrpsgGtrJLM3J7jGxl9Ic7Qwwj4ivOE5AWZWRMecDdF7hqGjFA== caniuse-lite@^1.0.30001587: - version "1.0.30001625" - resolved "https://registry.yarnpkg.com/caniuse-lite/-/caniuse-lite-1.0.30001625.tgz#ead1b155ea691d6a87938754d3cb119c24465b03" - integrity sha512-4KE9N2gcRH+HQhpeiRZXd+1niLB/XNLAhSy4z7fI8EzcbcPoAqjNInxVHTiTwWfTIV4w096XG8OtCOCQQKPv3w== + version "1.0.30001627" + resolved "https://registry.yarnpkg.com/caniuse-lite/-/caniuse-lite-1.0.30001627.tgz#8071c42d468e06ed2fb2c545efe79a663fd326ab" + integrity sha512-4zgNiB8nTyV/tHhwZrFs88ryjls/lHiqFhrxCW4qSTeuRByBVnPYpDInchOIySWknznucaf31Z4KYqjfbrecVw== chalk@^2.4.2: version "2.4.2" @@ -1588,11 +1609,6 @@ char-regex@^1.0.2: resolved "https://registry.yarnpkg.com/char-regex/-/char-regex-1.0.2.tgz#d744358226217f981ed58f479b1d6bcc29545dcf" integrity sha512-kWWXztvZ5SBQV+eRgKFeh8q5sLuZY2+8WUIzlxWVTg+oGwY14qylx1KbKzHd8P6ZYkAg0xyIDU9JMHhyJMZ1jw== -chownr@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/chownr/-/chownr-2.0.0.tgz#15bfbe53d2eab4cf70f18a8cd68ebe5b3cb1dece" - integrity sha512-bIomtDF5KGpdogkLd9VspvFzk9KfpyyGlS8YFVZl7TGPBHL5snIOnxeshwVgPteQ9b4Eydl+pVbIyE1DcvCWgQ== - ci-info@^3.2.0: version "3.9.0" resolved "https://registry.yarnpkg.com/ci-info/-/ci-info-3.9.0.tgz#4279a62028a7b1f262f3473fc9605f5e218c59b4" @@ -1612,25 +1628,6 @@ cliui@^8.0.1: strip-ansi "^6.0.1" wrap-ansi "^7.0.0" -cmake-js@^7.3.0: - version "7.3.0" - resolved "https://registry.yarnpkg.com/cmake-js/-/cmake-js-7.3.0.tgz#6fd6234b7aeec4545c1c806f9e3f7ffacd9798b2" - integrity sha512-dXs2zq9WxrV87bpJ+WbnGKv8WUBXDw8blNiwNHoRe/it+ptscxhQHKB1SJXa1w+kocLMeP28Tk4/eTCezg4o+w== - dependencies: - axios "^1.6.5" - debug "^4" - fs-extra "^11.2.0" - lodash.isplainobject "^4.0.6" - memory-stream "^1.0.0" - node-api-headers "^1.1.0" - npmlog "^6.0.2" - rc "^1.2.7" - semver "^7.5.4" - tar "^6.2.0" - url-join "^4.0.1" - which "^2.0.2" - yargs "^17.7.2" - co@^4.6.0: version "4.6.0" resolved "https://registry.yarnpkg.com/co/-/co-4.6.0.tgz#6ea6bdf3d853ae54ccb8e47bfa0bf3f9031fb184" @@ -1665,28 +1662,11 @@ color-name@~1.1.4: resolved "https://registry.yarnpkg.com/color-name/-/color-name-1.1.4.tgz#c2a09a87acbde69543de6f63fa3995c826c536a2" integrity sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA== -color-support@^1.1.3: - version "1.1.3" - resolved "https://registry.yarnpkg.com/color-support/-/color-support-1.1.3.tgz#93834379a1cc9a0c61f82f52f0d04322251bd5a2" - integrity sha512-qiBjkpbMLO/HL68y+lh4q0/O1MZFj2RX6X/KmMa3+gJD3z+WwI1ZzDHysvqHGS3mP6mznPckpXmw1nI9cJjyRg== - -combined-stream@^1.0.8: - version "1.0.8" - resolved "https://registry.yarnpkg.com/combined-stream/-/combined-stream-1.0.8.tgz#c3d45a8b34fd730631a110a8a2520682b31d5a7f" - integrity sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg== - dependencies: - delayed-stream "~1.0.0" - concat-map@0.0.1: version "0.0.1" resolved "https://registry.yarnpkg.com/concat-map/-/concat-map-0.0.1.tgz#d8a96bd77fd68df7793a73036a3ba0d5405d477b" integrity sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg== -console-control-strings@^1.1.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/console-control-strings/-/console-control-strings-1.1.0.tgz#3d7cf4464db6446ea644bf4b39507f9851008e8e" - integrity sha512-ty/fTekppD2fIwRvnZAVdeOiGd1c7YXEixbgJTNzqcxJWKQnjJ/V1bNEEE6hygpM3WjwHFUVK6HTjWSzV4a8sQ== - convert-source-map@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/convert-source-map/-/convert-source-map-2.0.0.tgz#4b560f649fc4e918dd0ab75cf4961e8bc882d82a" @@ -1721,10 +1701,10 @@ cross-spawn@^7.0.3: shebang-command "^2.0.0" which "^2.0.1" -debug@^4, debug@^4.1.0, debug@^4.1.1, debug@^4.3.1: - version "4.3.4" - resolved "https://registry.yarnpkg.com/debug/-/debug-4.3.4.tgz#1319f6579357f2338d3337d2cdd4914bb5dcc865" - integrity sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ== +debug@^4.1.0, debug@^4.1.1, debug@^4.3.1: + version "4.3.5" + resolved "https://registry.yarnpkg.com/debug/-/debug-4.3.5.tgz#e83444eceb9fedd4a1da56d671ae2446a01a6e1e" + integrity sha512-pt0bNEmneDIvdL1Xsd9oDQ/wrQRkXDT4AUWlNZNPKvW5x/jyO9VFXkJUP07vQ2upmw5PlaITaPKc31jK13V+jg== dependencies: ms "2.1.2" @@ -1733,26 +1713,11 @@ dedent@^1.0.0: resolved "https://registry.yarnpkg.com/dedent/-/dedent-1.5.3.tgz#99aee19eb9bae55a67327717b6e848d0bf777e5a" integrity sha512-NHQtfOOW68WD8lgypbLA5oT+Bt0xXJhiYvoR6SmmNXZfpzOGXwdKWmcwG8N7PwVVWV3eF/68nmD9BaJSsTBhyQ== -deep-extend@^0.6.0: - version "0.6.0" - resolved "https://registry.yarnpkg.com/deep-extend/-/deep-extend-0.6.0.tgz#c4fa7c95404a17a9c3e8ca7e1537312b736330ac" - integrity sha512-LOHxIOaPYdHlJRtCQfDIVZtfw/ufM8+rVj649RIHzcm/vGwQRXFt6OPqIFWsm2XEMrNIEtWR64sY1LEKD2vAOA== - deepmerge@^4.2.2: version "4.3.1" resolved "https://registry.yarnpkg.com/deepmerge/-/deepmerge-4.3.1.tgz#44b5f2147cd3b00d4b56137685966f26fd25dd4a" integrity sha512-3sUqbMEc77XqpdNO7FRyRog+eW3ph+GYCbj+rK+uYyRMuwsVy0rMiVtPn+QJlKFvWP/1PYpapqYn0Me2knFn+A== -delayed-stream@~1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/delayed-stream/-/delayed-stream-1.0.0.tgz#df3ae199acadfb7d440aaae0b29e2272b24ec619" - integrity sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ== - -delegates@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/delegates/-/delegates-1.0.0.tgz#84c6e159b81904fdca59a0ef44cd870d31250f9a" - integrity sha512-bd2L678uiWATM6m5Z1VzNCErI3jiGzt6HGY8OVICs40JQq/HALfbyNJmp0UDakEY4pMMaN0Ly5om/B1VI/+xfQ== - detect-newline@^3.0.0: version "3.1.0" resolved "https://registry.yarnpkg.com/detect-newline/-/detect-newline-3.1.0.tgz#576f5dfc63ae1a192ff192d8ad3af6308991b651" @@ -1764,9 +1729,9 @@ diff-sequences@^29.6.3: integrity sha512-EjePK1srD3P08o2j4f0ExnylqRs5B9tJjcp9t1krH2qRi8CCdsYfwe9JgSLurFBWwq4uOlipzfk5fHNvwFKr8Q== electron-to-chromium@^1.4.668: - version "1.4.783" - resolved "https://registry.yarnpkg.com/electron-to-chromium/-/electron-to-chromium-1.4.783.tgz#933887165b8b6025a81663d2d97cf4b85cde27b2" - integrity sha512-bT0jEz/Xz1fahQpbZ1D7LgmPYZ3iHVY39NcWWro1+hA2IvjiPeaXtfSqrQ+nXjApMvQRE2ASt1itSLRrebHMRQ== + version "1.4.789" + resolved "https://registry.yarnpkg.com/electron-to-chromium/-/electron-to-chromium-1.4.789.tgz#fec941cb753ee139da562a5a8ff31fc3e828b411" + integrity sha512-0VbyiaXoT++Fi2vHGo2ThOeS6X3vgRCWrjPeO2FeIAWL6ItiSJ9BqlH8LfCXe3X1IdcG+S0iLoNaxQWhfZoGzQ== emittery@^0.13.1: version "0.13.1" @@ -1868,36 +1833,6 @@ find-up@^4.0.0, find-up@^4.1.0: locate-path "^5.0.0" path-exists "^4.0.0" -follow-redirects@^1.15.6: - version "1.15.6" - resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.15.6.tgz#7f815c0cda4249c74ff09e95ef97c23b5fd0399b" - integrity sha512-wWN62YITEaOpSK584EZXJafH1AGpO8RVgElfkuXbTOrPX4fIfOyEpW/CsiNd8JdYrAoOvafRTOEnvsO++qCqFA== - -form-data@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/form-data/-/form-data-4.0.0.tgz#93919daeaf361ee529584b9b31664dc12c9fa452" - integrity sha512-ETEklSGi5t0QMZuiXoA/Q6vcnxcLQP5vdugSpuAyi6SVGi2clPPp+xgEhuMaHC+zGgn31Kd235W35f7Hykkaww== - dependencies: - asynckit "^0.4.0" - combined-stream "^1.0.8" - mime-types "^2.1.12" - -fs-extra@^11.2.0: - version "11.2.0" - resolved "https://registry.yarnpkg.com/fs-extra/-/fs-extra-11.2.0.tgz#e70e17dfad64232287d01929399e0ea7c86b0e5b" - integrity sha512-PmDi3uwK5nFuXh7XDTlVnS17xJS7vW36is2+w3xcv8SVxiB4NyATf4ctkVY5bkSjX0Y4nbvZCq1/EjtEyr9ktw== - dependencies: - graceful-fs "^4.2.0" - jsonfile "^6.0.1" - universalify "^2.0.0" - -fs-minipass@^2.0.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/fs-minipass/-/fs-minipass-2.1.0.tgz#7f5036fdbf12c63c169190cbe4199c852271f9fb" - integrity sha512-V/JgOLFCS+R6Vcq0slCuaeWEdNC3ouDlJMNIsacH2VtALiu9mV4LPrHc5cDl8k5aw6J8jwgWWpiTo5RYhmIzvg== - dependencies: - minipass "^3.0.0" - fs.realpath@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/fs.realpath/-/fs.realpath-1.0.0.tgz#1504ad2523158caa40db4a2787cb01411994ea4f" @@ -1913,20 +1848,6 @@ function-bind@^1.1.2: resolved "https://registry.yarnpkg.com/function-bind/-/function-bind-1.1.2.tgz#2c02d864d97f3ea6c8830c464cbd11ab6eab7a1c" integrity sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA== -gauge@^4.0.3: - version "4.0.4" - resolved "https://registry.yarnpkg.com/gauge/-/gauge-4.0.4.tgz#52ff0652f2bbf607a989793d53b751bef2328dce" - integrity sha512-f9m+BEN5jkg6a0fZjleidjN51VE1X+mPFQ2DJ0uv1V39oCLCbsGe6yjbBnp7eK7z/+GAon99a3nHuqbuuthyPg== - dependencies: - aproba "^1.0.3 || ^2.0.0" - color-support "^1.1.3" - console-control-strings "^1.1.0" - has-unicode "^2.0.1" - signal-exit "^3.0.7" - string-width "^4.2.3" - strip-ansi "^6.0.1" - wide-align "^1.1.5" - gensync@^1.0.0-beta.2: version "1.0.0-beta.2" resolved "https://registry.yarnpkg.com/gensync/-/gensync-1.0.0-beta.2.tgz#32a6ee76c3d7f52d46b2b1ae5d93fea8580a25e0" @@ -1964,7 +1885,7 @@ globals@^11.1.0: resolved "https://registry.yarnpkg.com/globals/-/globals-11.12.0.tgz#ab8795338868a0babd8525758018c2a7eb95c42e" integrity sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA== -graceful-fs@^4.1.6, graceful-fs@^4.2.0, graceful-fs@^4.2.9: +graceful-fs@^4.2.9: version "4.2.11" resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.11.tgz#4183e4e8bf08bb6e05bbb2f7d2e0c8f712ca40e3" integrity sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ== @@ -1979,11 +1900,6 @@ has-flag@^4.0.0: resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-4.0.0.tgz#944771fd9c81c81265c4d6941860da06bb59479b" integrity sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ== -has-unicode@^2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/has-unicode/-/has-unicode-2.0.1.tgz#e0e6fe6a28cf51138855e086d1691e771de2a8b9" - integrity sha512-8Rf9Y83NBReMnx0gFzA8JImQACstCYWUplepDa9xprwwtmgEZUF0h/i5xSA625zB/I37EtrswSST6OXxwaaIJQ== - hasown@^2.0.0: version "2.0.2" resolved "https://registry.yarnpkg.com/hasown/-/hasown-2.0.2.tgz#003eaf91be7adc372e84ec59dc37252cedb80003" @@ -2022,16 +1938,11 @@ inflight@^1.0.4: once "^1.3.0" wrappy "1" -inherits@2, inherits@^2.0.3: +inherits@2: version "2.0.4" resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.4.tgz#0fa2c64f932917c3433a0ded55363aae37416b7c" integrity sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ== -ini@~1.3.0: - version "1.3.8" - resolved "https://registry.yarnpkg.com/ini/-/ini-1.3.8.tgz#a29da425b48806f34767a4efce397269af28432c" - integrity sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew== - is-arrayish@^0.2.1: version "0.2.1" resolved "https://registry.yarnpkg.com/is-arrayish/-/is-arrayish-0.2.1.tgz#77c99840527aa8ecb1a8ba697b80645a7a926a9d" @@ -2513,15 +2424,6 @@ json5@^2.2.3: resolved "https://registry.yarnpkg.com/json5/-/json5-2.2.3.tgz#78cd6f1a19bdc12b73db5ad0c61efd66c1e29283" integrity sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg== -jsonfile@^6.0.1: - version "6.1.0" - resolved "https://registry.yarnpkg.com/jsonfile/-/jsonfile-6.1.0.tgz#bc55b2634793c679ec6403094eb13698a6ec0aae" - integrity sha512-5dgndWOriYSm5cnYaJNhalLNDKOqFwyDB/rr1E9ZsGciGvKPs8R2xYGCacuf3z6K1YKDz182fd+fY3cn3pMqXQ== - dependencies: - universalify "^2.0.0" - optionalDependencies: - graceful-fs "^4.1.6" - kleur@^3.0.3: version "3.0.3" resolved "https://registry.yarnpkg.com/kleur/-/kleur-3.0.3.tgz#a79c9ecc86ee1ce3fa6206d1216c501f147fc07e" @@ -2549,11 +2451,6 @@ lodash.debounce@^4.0.8: resolved "https://registry.yarnpkg.com/lodash.debounce/-/lodash.debounce-4.0.8.tgz#82d79bff30a67c4005ffd5e2515300ad9ca4d7af" integrity sha512-FT1yDzDYEoYWhnSGnpE/4Kj1fLZkDFyqRb7fNt6FdYOSxlUWAtp42Eh6Wb0rGIv/m9Bgo7x4GhQbm5Ys4SG5ow== -lodash.isplainobject@^4.0.6: - version "4.0.6" - resolved "https://registry.yarnpkg.com/lodash.isplainobject/-/lodash.isplainobject-4.0.6.tgz#7c526a52d89b45c45cc690b88163be0497f550cb" - integrity sha512-oSXzaWypCMHkPC3NvBEaPHf0KsA5mvPrOPgQWDsbg8n7orZ290M0BmC/jgRZ4vcJ6DTAhjrsSYgdsW/F+MFOBA== - lru-cache@^5.1.1: version "5.1.1" resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-5.1.1.tgz#1da27e6710271947695daf6848e847f01d84b920" @@ -2575,13 +2472,6 @@ makeerror@1.0.12: dependencies: tmpl "1.0.5" -memory-stream@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/memory-stream/-/memory-stream-1.0.0.tgz#481dfd259ccdf57b03ec2c9632960044180e73c2" - integrity sha512-Wm13VcsPIMdG96dzILfij09PvuS3APtcKNh7M28FsCA/w6+1mjR7hhPmfFNoilX9xU7wTdhsH5lJAm6XNzdtww== - dependencies: - readable-stream "^3.4.0" - merge-stream@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/merge-stream/-/merge-stream-2.0.0.tgz#52823629a14dd00c9770fb6ad47dc6310f2c1f60" @@ -2595,18 +2485,6 @@ micromatch@^4.0.4: braces "^3.0.3" picomatch "^2.3.1" -mime-db@1.52.0: - version "1.52.0" - resolved "https://registry.yarnpkg.com/mime-db/-/mime-db-1.52.0.tgz#bbabcdc02859f4987301c856e3387ce5ec43bf70" - integrity sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg== - -mime-types@^2.1.12: - version "2.1.35" - resolved "https://registry.yarnpkg.com/mime-types/-/mime-types-2.1.35.tgz#381a871b62a734450660ae3deee44813f70d959a" - integrity sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw== - dependencies: - mime-db "1.52.0" - mimic-fn@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/mimic-fn/-/mimic-fn-2.1.0.tgz#7ed2c2ccccaf84d3ffcb7a69b57711fc2083401b" @@ -2619,36 +2497,6 @@ minimatch@^3.0.4, minimatch@^3.1.1: dependencies: brace-expansion "^1.1.7" -minimist@^1.2.0: - version "1.2.8" - resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.8.tgz#c1a464e7693302e082a075cee0c057741ac4772c" - integrity sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA== - -minipass@^3.0.0: - version "3.3.6" - resolved "https://registry.yarnpkg.com/minipass/-/minipass-3.3.6.tgz#7bba384db3a1520d18c9c0e5251c3444e95dd94a" - integrity sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw== - dependencies: - yallist "^4.0.0" - -minipass@^5.0.0: - version "5.0.0" - resolved "https://registry.yarnpkg.com/minipass/-/minipass-5.0.0.tgz#3e9788ffb90b694a5d0ec94479a45b5d8738133d" - integrity sha512-3FnjYuehv9k6ovOEbyOswadCDPX1piCfhV8ncmYtHOjuPwylVWsghTLo7rabjC3Rx5xD4HDx8Wm1xnMF7S5qFQ== - -minizlib@^2.1.1: - version "2.1.2" - resolved "https://registry.yarnpkg.com/minizlib/-/minizlib-2.1.2.tgz#e90d3466ba209b932451508a11ce3d3632145931" - integrity sha512-bAxsR8BVfj60DWXHE3u30oHzfl4G7khkSuPW+qvpd7jFRHm7dLxOjUk1EHACJ/hxLY8phGJ0YhYHZo7jil7Qdg== - dependencies: - minipass "^3.0.0" - yallist "^4.0.0" - -mkdirp@^1.0.3: - version "1.0.4" - resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-1.0.4.tgz#3eb5ed62622756d79a5f0e2a221dfebad75c2f7e" - integrity sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw== - ms@2.1.2: version "2.1.2" resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.2.tgz#d09d1f357b443f493382a8eb3ccd183872ae6009" @@ -2659,16 +2507,6 @@ natural-compare@^1.4.0: resolved "https://registry.yarnpkg.com/natural-compare/-/natural-compare-1.4.0.tgz#4abebfeed7541f2c27acfb29bdbbd15c8d5ba4f7" integrity sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw== -node-addon-api@^8.0.0: - version "8.0.0" - resolved "https://registry.yarnpkg.com/node-addon-api/-/node-addon-api-8.0.0.tgz#5453b7ad59dd040d12e0f1a97a6fa1c765c5c9d2" - integrity sha512-ipO7rsHEBqa9STO5C5T10fj732ml+5kLN1cAG8/jdHd56ldQeGj3Q7+scUS+VHK/qy1zLEwC4wMK5+yM0btPvw== - -node-api-headers@^1.1.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/node-api-headers/-/node-api-headers-1.1.0.tgz#3f9dd7bb10b29e1c3e3db675979605a308b2373c" - integrity sha512-ucQW+SbYCUPfprvmzBsnjT034IGRB2XK8rRc78BgjNKhTdFKgAwAmgW704bKIBmcYW48it0Gkjpkd39Azrwquw== - node-int64@^0.4.0: version "0.4.0" resolved "https://registry.yarnpkg.com/node-int64/-/node-int64-0.4.0.tgz#87a9065cdb355d3182d8f94ce11188b825c68a3b" @@ -2691,16 +2529,6 @@ npm-run-path@^4.0.1: dependencies: path-key "^3.0.0" -npmlog@^6.0.2: - version "6.0.2" - resolved "https://registry.yarnpkg.com/npmlog/-/npmlog-6.0.2.tgz#c8166017a42f2dea92d6453168dd865186a70830" - integrity sha512-/vBvz5Jfr9dT/aFWd0FIRf+T/Q2WBsLENygUaFUqstqsycmZAP/t5BvFJTK0viFmSUxiUKTUplWy5vt+rvKIxg== - dependencies: - are-we-there-yet "^3.0.0" - console-control-strings "^1.1.0" - gauge "^4.0.3" - set-blocking "^2.0.0" - once@^1.3.0: version "1.4.0" resolved "https://registry.yarnpkg.com/once/-/once-1.4.0.tgz#583b1aa775961d4b113ac17d9c50baef9dd76bd1" @@ -2810,40 +2638,16 @@ prompts@^2.0.1: kleur "^3.0.3" sisteransi "^1.0.5" -proxy-from-env@^1.1.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/proxy-from-env/-/proxy-from-env-1.1.0.tgz#e102f16ca355424865755d2c9e8ea4f24d58c3e2" - integrity sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg== - pure-rand@^6.0.0: version "6.1.0" resolved "https://registry.yarnpkg.com/pure-rand/-/pure-rand-6.1.0.tgz#d173cf23258231976ccbdb05247c9787957604f2" integrity sha512-bVWawvoZoBYpp6yIoQtQXHZjmz35RSVHnUOTefl8Vcjr8snTPY1wnpSPMWekcFwbxI6gtmT7rSYPFvz71ldiOA== -rc@^1.2.7: - version "1.2.8" - resolved "https://registry.yarnpkg.com/rc/-/rc-1.2.8.tgz#cd924bf5200a075b83c188cd6b9e211b7fc0d3ed" - integrity sha512-y3bGgqKj3QBdxLbLkomlohkvsA8gdAiUQlSBJnBhfn+BPxg4bc62d8TcBW15wavDfgexCgccckhcZvywyQYPOw== - dependencies: - deep-extend "^0.6.0" - ini "~1.3.0" - minimist "^1.2.0" - strip-json-comments "~2.0.1" - react-is@^18.0.0: version "18.3.1" resolved "https://registry.yarnpkg.com/react-is/-/react-is-18.3.1.tgz#e83557dc12eae63a99e003a46388b1dcbb44db7e" integrity sha512-/LLMVyas0ljjAtoYiPqYiL8VWXzUUdThrmU5+n20DZv+a+ClRoevUzw5JxU+Ieh5/c87ytoTBV9G1FiKfNJdmg== -readable-stream@^3.4.0, readable-stream@^3.6.0: - version "3.6.2" - resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-3.6.2.tgz#56a9b36ea965c00c5a93ef31eb111a0f11056967" - integrity sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA== - dependencies: - inherits "^2.0.3" - string_decoder "^1.1.1" - util-deprecate "^1.0.1" - regenerate-unicode-properties@^10.1.0: version "10.1.1" resolved "https://registry.yarnpkg.com/regenerate-unicode-properties/-/regenerate-unicode-properties-10.1.1.tgz#6b0e05489d9076b04c436f318d9b067bba459480" @@ -2918,11 +2722,6 @@ resolve@^1.14.2, resolve@^1.20.0: path-parse "^1.0.7" supports-preserve-symlinks-flag "^1.0.0" -safe-buffer@~5.2.0: - version "5.2.1" - resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.2.1.tgz#1eaf9fa9bdb1fdd4ec75f58f9cdb4e6b7827eec6" - integrity sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ== - semver@^6.3.0, semver@^6.3.1: version "6.3.1" resolved "https://registry.yarnpkg.com/semver/-/semver-6.3.1.tgz#556d2ef8689146e46dcea4bfdd095f3434dffcb4" @@ -2933,11 +2732,6 @@ semver@^7.5.3, semver@^7.5.4: resolved "https://registry.yarnpkg.com/semver/-/semver-7.6.2.tgz#1e3b34759f896e8f14d6134732ce798aeb0c6e13" integrity sha512-FNAIBWCx9qcRhoHcgcJ0gvU7SN1lYU2ZXuSfl04bSC5OpvDHFyJCjdNHomPXxjQlCBU67YW64PzY7/VIEH7F2w== -set-blocking@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/set-blocking/-/set-blocking-2.0.0.tgz#045f9782d011ae9a6803ddd382b24392b3d890f7" - integrity sha512-KiKBS8AnWGEyLzofFfmvKwpdPzqiy16LvQfK3yv/fVH7Bj13/wl3JSR1J+rfgRE9q7xUJK4qvgS8raSOeLUehw== - shebang-command@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/shebang-command/-/shebang-command-2.0.0.tgz#ccd0af4f8835fbdc265b82461aaf0c36663f34ea" @@ -2998,7 +2792,7 @@ string-length@^4.0.1: char-regex "^1.0.2" strip-ansi "^6.0.0" -"string-width@^1.0.2 || 2 || 3 || 4", string-width@^4.1.0, string-width@^4.2.0, string-width@^4.2.3: +string-width@^4.1.0, string-width@^4.2.0, string-width@^4.2.3: version "4.2.3" resolved "https://registry.yarnpkg.com/string-width/-/string-width-4.2.3.tgz#269c7117d27b05ad2e536830a8ec895ef9c6d010" integrity sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g== @@ -3007,13 +2801,6 @@ string-length@^4.0.1: is-fullwidth-code-point "^3.0.0" strip-ansi "^6.0.1" -string_decoder@^1.1.1: - version "1.3.0" - resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-1.3.0.tgz#42f114594a46cf1a8e30b0a84f56c78c3edac21e" - integrity sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA== - dependencies: - safe-buffer "~5.2.0" - strip-ansi@^6.0.0, strip-ansi@^6.0.1: version "6.0.1" resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-6.0.1.tgz#9e26c63d30f53443e9489495b2105d37b67a85d9" @@ -3036,11 +2823,6 @@ strip-json-comments@^3.1.1: resolved "https://registry.yarnpkg.com/strip-json-comments/-/strip-json-comments-3.1.1.tgz#31f1281b3832630434831c310c01cccda8cbe006" integrity sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig== -strip-json-comments@~2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/strip-json-comments/-/strip-json-comments-2.0.1.tgz#3c531942e908c2697c0ec344858c286c7ca0a60a" - integrity sha512-4gB8na07fecVVkOI6Rs4e7T6NOTki5EmL7TUduTs6bu3EdnSycntVJ4re8kgZA+wx9IueI2Y11bfbgwtzuE0KQ== - supports-color@^5.3.0: version "5.5.0" resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-5.5.0.tgz#e2e69a44ac8772f78a1ec0b35b689df6530efc8f" @@ -3067,18 +2849,6 @@ supports-preserve-symlinks-flag@^1.0.0: resolved "https://registry.yarnpkg.com/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz#6eda4bd344a3c94aea376d4cc31bc77311039e09" integrity sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w== -tar@^6.2.0: - version "6.2.1" - resolved "https://registry.yarnpkg.com/tar/-/tar-6.2.1.tgz#717549c541bc3c2af15751bea94b1dd068d4b03a" - integrity sha512-DZ4yORTwrbTj/7MZYq2w+/ZFdI6OZ/f9SFHR+71gIVUZhOQPHzVCLpvRnPgyaMpfWxxk/4ONva3GQSyNIKRv6A== - dependencies: - chownr "^2.0.0" - fs-minipass "^2.0.0" - minipass "^5.0.0" - minizlib "^2.1.1" - mkdirp "^1.0.3" - yallist "^4.0.0" - test-exclude@^6.0.0: version "6.0.0" resolved "https://registry.yarnpkg.com/test-exclude/-/test-exclude-6.0.0.tgz#04a8698661d805ea6fa293b6cb9e63ac044ef15e" @@ -3148,11 +2918,6 @@ unicode-property-aliases-ecmascript@^2.0.0: resolved "https://registry.yarnpkg.com/unicode-property-aliases-ecmascript/-/unicode-property-aliases-ecmascript-2.1.0.tgz#43d41e3be698bd493ef911077c9b131f827e8ccd" integrity sha512-6t3foTQI9qne+OZoVQB/8x8rk2k1eVy1gRXhV3oFQ5T6R1dqQ1xtin3XqSlx3+ATBkliTaR/hHyJBm+LVPNM8w== -universalify@^2.0.0: - version "2.0.1" - resolved "https://registry.yarnpkg.com/universalify/-/universalify-2.0.1.tgz#168efc2180964e6386d061e094df61afe239b18d" - integrity sha512-gptHNQghINnc/vTGIk0SOFGFNXw7JVrlRUtConJRlvaw6DuX0wO5Jeko9sWrMBhh+PsYAZ7oXAiOnf/UKogyiw== - update-browserslist-db@^1.0.13: version "1.0.16" resolved "https://registry.yarnpkg.com/update-browserslist-db/-/update-browserslist-db-1.0.16.tgz#f6d489ed90fb2f07d67784eb3f53d7891f736356" @@ -3161,16 +2926,6 @@ update-browserslist-db@^1.0.13: escalade "^3.1.2" picocolors "^1.0.1" -url-join@^4.0.1: - version "4.0.1" - resolved "https://registry.yarnpkg.com/url-join/-/url-join-4.0.1.tgz#b642e21a2646808ffa178c4c5fda39844e12cde7" - integrity sha512-jk1+QP6ZJqyOiuEI9AEWQfju/nB2Pw466kbA0LEZljHwKeMgd9WrAEgEGxjPDD2+TNbbb37rTyhEfrCXfuKXnA== - -util-deprecate@^1.0.1: - version "1.0.2" - resolved "https://registry.yarnpkg.com/util-deprecate/-/util-deprecate-1.0.2.tgz#450d4dc9fa70de732762fbd2d4a28981419a0ccf" - integrity sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw== - v8-to-istanbul@^9.0.1: version "9.2.0" resolved "https://registry.yarnpkg.com/v8-to-istanbul/-/v8-to-istanbul-9.2.0.tgz#2ed7644a245cddd83d4e087b9b33b3e62dfd10ad" @@ -3187,20 +2942,13 @@ walker@^1.0.8: dependencies: makeerror "1.0.12" -which@^2.0.1, which@^2.0.2: +which@^2.0.1: version "2.0.2" resolved "https://registry.yarnpkg.com/which/-/which-2.0.2.tgz#7c6a8dd0a636a0327e10b59c9286eee93f3f51b1" integrity sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA== dependencies: isexe "^2.0.0" -wide-align@^1.1.5: - version "1.1.5" - resolved "https://registry.yarnpkg.com/wide-align/-/wide-align-1.1.5.tgz#df1d4c206854369ecf3c9a4898f1b23fbd9d15d3" - integrity sha512-eDMORYaPNZ4sQIuuYPDHdQvf4gyCF9rEEV/yPxGfwPkRodwEgiMUUXTx/dex+Me0wxx53S+NgUHaP7y3MGlDmg== - dependencies: - string-width "^1.0.2 || 2 || 3 || 4" - wrap-ansi@^7.0.0: version "7.0.0" resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-7.0.0.tgz#67e145cff510a6a6984bdf1152911d69d2eb9e43" @@ -3233,17 +2981,12 @@ yallist@^3.0.2: resolved "https://registry.yarnpkg.com/yallist/-/yallist-3.1.1.tgz#dbb7daf9bfd8bac9ab45ebf602b8cbad0d5d08fd" integrity sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g== -yallist@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/yallist/-/yallist-4.0.0.tgz#9bb92790d9c0effec63be73519e11a35019a3a72" - integrity sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A== - yargs-parser@^21.1.1: version "21.1.1" resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-21.1.1.tgz#9096bceebf990d21bb31fa9516e0ede294a77d35" integrity sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw== -yargs@^17.3.1, yargs@^17.7.2: +yargs@^17.3.1: version "17.7.2" resolved "https://registry.yarnpkg.com/yargs/-/yargs-17.7.2.tgz#991df39aca675a192b816e1e0363f9d75d2aa269" integrity sha512-7dSzzRQ++CKnNI/krKnYRV7JKKPUXMEh61soaHKg9mrWEhzFWhFnxPxGl+69cD1Ou63C13NUPCnmIcrvqCuM6w==