Fix escape characters in datatype definition (#5971) #18872
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
name: CI Workflow | |
on: | |
pull_request: | |
branches: | |
- master | |
push: | |
branches: | |
- master | |
workflow_dispatch: | |
inputs: | |
auto_checkpoint: | |
description: 'Database config - auto checkpoint' | |
required: false | |
default: true | |
type: boolean | |
buffer_pool_size: | |
description: 'Database config - buffer pool size' | |
required: false | |
type: number | |
max_num_threads: | |
description: 'Database config - max number of threads' | |
required: false | |
default: 2 | |
type: number | |
enable_compression: | |
description: 'Database config - enable compression' | |
required: false | |
default: true | |
type: boolean | |
checkpoint_threshold: | |
description: 'Database config - checkpoint threshold' | |
required: false | |
default: 16777216 | |
type: number | |
force_checkpoint_on_close: | |
description: "Database config - force checkpoint on close" | |
required: false | |
default: true | |
type: boolean | |
env: | |
RUNTIME_CHECKS: 1 | |
USE_EXISTING_BINARY_DATASET: 1 | |
AUTO_CHECKPOINT: ${{ github.event.inputs.auto_checkpoint }} | |
BUFFER_POOL_SIZE: ${{ github.event.inputs.buffer_pool_size }} | |
MAX_NUM_THREADS: ${{ github.event.inputs.max_num_threads }} | |
ENABLE_COMPRESSION: ${{ github.event.inputs.enable_compression }} | |
CHECKPOINT_THRESHOLD: ${{ github.event.inputs.checkpoint_threshold }} | |
FORCE_CHECKPOINT_ON_CLOSE: ${{ github.event.inputs.force_checkpoint_on_close }} | |
WERROR: 1 | |
RUSTFLAGS: --deny warnings | |
PIP_BREAK_SYSTEM_PACKAGES: 1 | |
# Only allow one run in this group to run at a time, and cancel any runs in progress in this group. | |
# We use the workflow name and then add the pull request number, or (if it's a push to master), we use the name of the branch. | |
# See github's docs[1] and a relevant stack overflow answer[2] | |
# [1]: https://docs.github.com/en/actions/using-jobs/using-concurrency | |
# [2]: https://stackoverflow.com/questions/66335225/how-to-cancel-previous-runs-in-the-pr-when-you-push-new-commitsupdate-the-curre | |
concurrency: | |
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} | |
cancel-in-progress: true | |
jobs: | |
generate-binary-datasets: | |
name: generate binary dataset | |
needs: [ sanity-checks, python-lint-check ] | |
runs-on: kuzu-self-hosted-testing | |
env: | |
NUM_THREADS: 32 | |
GEN: Ninja | |
CC: gcc | |
CXX: g++ | |
GCS_ACCESS_KEY_ID: ${{ secrets.GCS_ACCESS_KEY_ID }} | |
GCS_SECRET_ACCESS_KEY: ${{ secrets.GCS_SECRET_ACCESS_KEY }} | |
steps: | |
- uses: actions/checkout@v4 | |
- name: Build | |
run: make release | |
# Generate the binary db once to test that it can be read on all platforms. | |
- name: Generate datasets | |
run: bash scripts/generate_binary_demo.sh | |
- name: Upload binary-demo | |
uses: actions/upload-artifact@v4 | |
with: | |
name: binary-demo | |
path: dataset/binary-demo | |
- name: Generate and upload tinysnb | |
run: | | |
version_current="$(python3 benchmark/version.py)" | |
echo "0" > version_s3.txt | |
echo "0" > version_gcs.txt | |
s3cmd get --force s3://kuzu-test/tinysnb/version.txt version_s3.txt || true | |
s3cmd --access_key=${GCS_ACCESS_KEY_ID} --secret_key=${GCS_SECRET_ACCESS_KEY} --host=storage.googleapis.com --host-bucket="%(bucket)s.storage.googleapis.com" get --force s3://kuzudb-test/tinysnb/version.txt version_gcs.txt || true | |
if [[ "$version_current" == "$(cat version_s3.txt)" ]] && [[ "$version_current" == "$(cat version_gcs.txt)" ]]; then | |
echo "TinySNB dataset is up to date, skipping upload" | |
rm -rf tinysnb version_s3.txt version_gcs.txt | |
exit 0 | |
fi | |
echo "TinySNB dataset is outdated, uploading..." | |
bash scripts/generate_binary_tinysnb.sh | |
s3cmd del -r s3://kuzu-test/tinysnb/ | |
s3cmd sync ./tinysnb s3://kuzu-test/ | |
# s3cmd del -r doesn't work on GCS so we individually delete each object in the directory | |
for gcs_file in $(s3cmd --access_key=${GCS_ACCESS_KEY_ID} --secret_key=${GCS_SECRET_ACCESS_KEY} --host=storage.googleapis.com --host-bucket="%(bucket)s.storage.googleapis.com" ls s3://kuzudb-test/tinysnb/ | awk '{ print $4 }'); | |
do | |
s3cmd --access_key=${GCS_ACCESS_KEY_ID} --secret_key=${GCS_SECRET_ACCESS_KEY} --host=storage.googleapis.com --host-bucket="%(bucket)s.storage.googleapis.com" del ${gcs_file} | |
done | |
s3cmd --access_key=${GCS_ACCESS_KEY_ID} --secret_key=${GCS_SECRET_ACCESS_KEY} --host=storage.googleapis.com --host-bucket="%(bucket)s.storage.googleapis.com" sync ./tinysnb s3://kuzudb-test/ | |
rm -rf tinysnb version_s3.txt version_gcs.txt | |
- name: Generate and upload ldbc-sf01 | |
run: | | |
version_current="$(python3 benchmark/version.py)" | |
echo "0" > version_s3.txt | |
s3cmd get --force s3://kuzu-test/ldbc01/version.txt version_s3.txt || true | |
if [[ "$version_current" == "$(cat version_s3.txt)" ]]; then | |
echo "LDBC-SF01 dataset is up to date, skipping upload" | |
rm -rf ldbc01 version_s3.txt | |
exit 0 | |
fi | |
echo "LDBC-SF01 dataset is outdated, uploading..." | |
bash scripts/generate_binary_ldbc-sf01.sh | |
s3cmd del -r s3://kuzu-test/ldbc01/ | |
s3cmd sync ./ldbc01 s3://kuzu-test/ | |
rm -rf ldbc01 version_s3.txt | |
gcc-build-test: | |
name: gcc build & test | |
needs: [ sanity-checks, python-lint-check, generate-binary-datasets ] | |
runs-on: kuzu-self-hosted-testing | |
env: | |
NUM_THREADS: 32 | |
TEST_JOBS: 16 | |
CLANGD_DIAGNOSTIC_JOBS: 32 | |
CLANGD_DIAGNOSTIC_INSTANCES: 6 | |
GEN: Ninja | |
CC: gcc | |
CXX: g++ | |
UW_S3_ACCESS_KEY_ID: ${{ secrets.UW_S3_ACCESS_KEY_ID }} | |
UW_S3_SECRET_ACCESS_KEY: ${{ secrets.UW_S3_SECRET_ACCESS_KEY }} | |
AWS_S3_ACCESS_KEY_ID: ${{ secrets.AWS_S3_ACCESS_KEY_ID }} | |
AWS_S3_SECRET_ACCESS_KEY: ${{ secrets.AWS_S3_SECRET_ACCESS_KEY }} | |
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_S3_ACCESS_KEY_ID }} | |
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_S3_SECRET_ACCESS_KEY }} | |
GCS_ACCESS_KEY_ID: ${{ secrets.GCS_ACCESS_KEY_ID }} | |
GCS_SECRET_ACCESS_KEY: ${{ secrets.GCS_SECRET_ACCESS_KEY }} | |
RUN_ID: "$(hostname)-$(date +%s)" | |
HTTP_CACHE_FILE: TRUE | |
steps: | |
- uses: actions/checkout@v4 | |
- name: Download binary-demo | |
uses: actions/download-artifact@v4 | |
with: | |
name: binary-demo | |
path: ${{ github.workspace }}/dataset/binary-demo | |
- name: Test | |
run: | | |
make test | |
- name: Test in mem | |
env: | |
IN_MEM_MODE: true | |
run: | | |
make test | |
- name: Test checkpoint at threshold 0 | |
env: | |
AUTO_CHECKPOINT: true | |
CHECKPOINT_THRESHOLD: 0 | |
run: | | |
make test | |
- name: Test recovery | |
env: | |
AUTO_CHECKPOINT: false | |
CHECKPOINT_ON_CLOSE: false | |
run: | | |
make test | |
- name: Test sparse frontier threshold at 2 | |
env: | |
SPARSE_FRONTIER_THRESHOLD: 2 | |
run: | | |
make test | |
- name: Test with checksums disabled | |
env: | |
ENABLE_CHECKSUMS: false | |
run: | | |
make test | |
- name: Ensure Python dependencies | |
run: pip install --user -r tools/python_api/requirements_dev.txt | |
- name: Python test | |
run: make pytest | |
- name: Ensure Node.js dependencies | |
run: make nodejs-deps | |
- name: Node.js test | |
run: make nodejstest | |
- name: Java test | |
run: make javatest | |
- name: Rust test with pre-built library | |
env: | |
KUZU_SHARED: 1 | |
KUZU_INCLUDE_DIR: ${{ github.workspace }}/build/release/src | |
KUZU_LIBRARY_DIR: ${{ github.workspace }}/build/release/src | |
KUZU_LOCAL_EXTENSIONS: ${{ github.workspace }}/extension | |
run: | | |
make extension-release | |
make rusttest | |
gcc-build-test-vector-size: | |
name: gcc build & test in-mem only with various vector size | |
needs: [ gcc-build-test ] | |
runs-on: kuzu-self-hosted-testing | |
env: | |
NUM_THREADS: 32 | |
TEST_JOBS: 16 | |
CLANGD_DIAGNOSTIC_JOBS: 32 | |
CLANGD_DIAGNOSTIC_INSTANCES: 6 | |
GEN: Ninja | |
CC: gcc | |
CXX: g++ | |
UW_S3_ACCESS_KEY_ID: ${{ secrets.UW_S3_ACCESS_KEY_ID }} | |
UW_S3_SECRET_ACCESS_KEY: ${{ secrets.UW_S3_SECRET_ACCESS_KEY }} | |
AWS_S3_ACCESS_KEY_ID: ${{ secrets.AWS_S3_ACCESS_KEY_ID }} | |
AWS_S3_SECRET_ACCESS_KEY: ${{ secrets.AWS_S3_SECRET_ACCESS_KEY }} | |
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_S3_ACCESS_KEY_ID }} | |
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_S3_SECRET_ACCESS_KEY }} | |
GCS_ACCESS_KEY_ID: ${{ secrets.GCS_ACCESS_KEY_ID }} | |
GCS_SECRET_ACCESS_KEY: ${{ secrets.GCS_SECRET_ACCESS_KEY }} | |
RUN_ID: "$(hostname)-$(date +%s)" | |
HTTP_CACHE_FILE: TRUE | |
steps: | |
- uses: actions/checkout@v4 | |
- name: Download binary-demo | |
uses: actions/download-artifact@v4 | |
with: | |
name: binary-demo | |
path: ${{ github.workspace }}/dataset/binary-demo | |
- name: Test vector 1024 | |
env: | |
IN_MEM_MODE: true | |
run: | | |
make test VECTOR_CAPACITY_LOG2=10 | |
- name: Test vector capacity of 1024 under in-memory mode | |
env: | |
IN_MEM_MODE: true | |
run: | | |
make test VECTOR_CAPACITY_LOG2=10 | |
- name: Test vector capacity of 64 | |
run: | | |
make test VECTOR_CAPACITY_LOG2=6 | |
- name: Test vector capacity of 64 under in-memory mode | |
env: | |
IN_MEM_MODE: true | |
run: | | |
make test VECTOR_CAPACITY_LOG2=6 | |
- name: Test vector capacity of 2 | |
run: | | |
make test VECTOR_CAPACITY_LOG2=1 | |
- name: Test vector capacity of 2 under in-memory mode | |
env: | |
IN_MEM_MODE: true | |
run: | | |
make test VECTOR_CAPACITY_LOG2=1 | |
code-coverage: | |
name: code coverage | |
runs-on: kuzu-self-hosted-testing | |
needs: [ sanity-checks, python-lint-check, generate-binary-datasets ] | |
env: | |
NUM_THREADS: 32 | |
TEST_JOBS: 16 | |
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} | |
steps: | |
- uses: actions/checkout@v4 | |
- name: Download binary-demo | |
uses: actions/download-artifact@v4 | |
with: | |
name: binary-demo | |
path: dataset/binary-demo | |
- name: Test coverage for on-disk mode | |
run: make lcov | |
- name: Generate coverage report for on-disk mode | |
run: | | |
lcov --config-file .lcovrc -c -d ./ --no-external -o cover_on_disk.info &&\ | |
lcov --remove cover_on_disk.info $(< .github/workflows/lcov_exclude) -o cover_on_disk.info | |
- name: Upload coverage report for on-disk mode | |
uses: codecov/[email protected] | |
with: | |
file: cover_on_disk.info | |
flags: on-disk | |
name: on-disk-coverage | |
- name: Clean coverage data | |
run: | | |
find . -name "*.gcda" -delete | |
lcov --zerocounters --directory ./ | |
- name: Test coverage for in-mem mode | |
env: | |
IN_MEM_MODE: true | |
run: make lcov | |
- name: Generate coverage report for in-mem mode | |
run: | | |
lcov --config-file .lcovrc -c -d ./ --no-external -o cover_in_mem.info &&\ | |
lcov --remove cover_in_mem.info $(< .github/workflows/lcov_exclude) -o cover_in_mem.info | |
- name: Upload coverage report for in-mem mode | |
uses: codecov/[email protected] | |
with: | |
file: cover_in_mem.info | |
flags: in-mem | |
name: in-mem-coverage | |
- name: Clean coverage data | |
run: | | |
find . -name "*.gcda" -delete | |
lcov --zerocounters --directory ./ | |
- name: Test coverage for recovery | |
env: | |
AUTO_CHECKPOINT: false | |
CHECKPOINT_ON_CLOSE: false | |
run: make lcov | |
- name: Generate coverage report for recovery | |
run: | | |
lcov --config-file .lcovrc -c -d ./ --no-external -o cover_recovery.info &&\ | |
lcov --remove cover_recovery.info $(< .github/workflows/lcov_exclude) -o cover_recovery.info | |
- name: Upload coverage report for recovery | |
uses: codecov/[email protected] | |
with: | |
file: cover_recovery.info | |
flags: recovery | |
name: recovery-coverage | |
- name: Clean coverage data | |
run: | | |
find . -name "*.gcda" -delete | |
lcov --zerocounters --directory ./ | |
webassembly-build-test: | |
name: webassembly build & test | |
needs: [ sanity-checks, generate-binary-datasets ] | |
runs-on: kuzu-self-hosted-testing | |
env: | |
WERROR: 0 | |
TEST_JOBS: 8 | |
NUM_THREADS: 32 | |
steps: | |
- uses: actions/checkout@v4 | |
- name: Download binary-demo | |
uses: actions/download-artifact@v4 | |
with: | |
name: binary-demo | |
path: dataset/binary-demo | |
- name: Install dependencies | |
working-directory: tools/wasm | |
run: npm i | |
- name: Build WebAssembly package | |
working-directory: tools/wasm | |
run: | | |
source /home/runner/emsdk/emsdk_env.sh | |
npm run build | |
- name: API test | |
working-directory: tools/wasm | |
run: npm run test | |
- name: Clean up | |
run: make clean | |
- name: Kernel build & test | |
run: | | |
source /home/runner/emsdk/emsdk_env.sh | |
make wasmtest | |
- name: Kernel build & test in memory | |
run: | | |
source /home/runner/emsdk/emsdk_env.sh | |
make wasmtest | |
env: | |
IN_MEM_MODE: true | |
- name: Kernel build & test (single-threaded) | |
env: | |
SINGLE_THREADED: true | |
run: | | |
make clean | |
source /home/runner/emsdk/emsdk_env.sh | |
make wasmtest | |
rust-build-test: | |
name: rust build & test | |
runs-on: kuzu-self-hosted-testing | |
env: | |
# CARGO_BUILD_JOBS is set using this in the Makefile | |
NUM_THREADS: 32 | |
CC: gcc | |
CXX: g++ | |
KUZU_LOCAL_EXTENSIONS: ${{ github.workspace }}/extension | |
steps: | |
- uses: actions/checkout@v4 | |
- name: Rust API test | |
run: | | |
make extension-release | |
make rusttest | |
- name: Rust example build | |
working-directory: examples/rust | |
run: cargo build --locked --all-features | |
- name: Rust API clippy | |
working-directory: tools/rust_api | |
run: cargo clippy --release --all-targets --all-features | |
clang-build-test-with-tsan: | |
name: clang build & test with tsan | |
needs: [ clang-build-test, generate-binary-datasets ] | |
runs-on: kuzu-self-hosted-testing | |
env: | |
NUM_THREADS: 32 | |
TEST_JOBS: 16 | |
GEN: Ninja | |
CC: clang | |
CXX: clang++ | |
WERROR: 0 | |
steps: | |
- uses: actions/checkout@v4 | |
- name: Download binary-demo | |
uses: actions/download-artifact@v4 | |
with: | |
name: binary-demo | |
path: dataset/binary-demo | |
- name: Test with TSAN | |
run: | | |
make test | |
env: | |
TSAN_OPTIONS: "halt_on_error=1" | |
# 32GB | |
MAX_DB_SIZE: 34359738368 | |
TSAN: 1 | |
RUNTIME_CHECKS: 0 | |
gcc-build-test-with-asan: | |
name: gcc build & test with asan | |
needs: [ gcc-build-test, generate-binary-datasets ] | |
runs-on: kuzu-self-hosted-testing | |
env: | |
NUM_THREADS: 32 | |
TEST_JOBS: 16 | |
GEN: Ninja | |
CC: gcc | |
CXX: g++ | |
WERROR: 0 | |
steps: | |
- uses: actions/checkout@v4 | |
- name: Download binary-demo | |
uses: actions/download-artifact@v4 | |
with: | |
name: binary-demo | |
path: dataset/binary-demo | |
- name: Test with ASAN | |
run: | | |
make test ASAN=1 BM_MALLOC=ON | |
env: | |
ASAN_OPTIONS: "detect_leaks=1" | |
- name: Test fwd only rel tables with ASAN | |
env: | |
GTEST_FILTER: "*~*:RelScanTest*" | |
DEFAULT_REL_STORAGE_DIRECTION: FWD | |
run: | | |
make test | |
- name: Test compression disabled with ASAN | |
env: | |
ENABLE_COMPRESSION: false | |
run: | | |
make test DEFAULT_REL_STORAGE_DIRECTION=BOTH | |
clang-build-test: | |
name: clang build & test | |
needs: [ sanity-checks, python-lint-check, generate-binary-datasets ] | |
runs-on: kuzu-self-hosted-testing | |
env: | |
NUM_THREADS: 32 | |
TEST_JOBS: 16 | |
CC: clang | |
CXX: clang++ | |
UW_S3_ACCESS_KEY_ID: ${{ secrets.UW_S3_ACCESS_KEY_ID }} | |
UW_S3_SECRET_ACCESS_KEY: ${{ secrets.UW_S3_SECRET_ACCESS_KEY }} | |
AWS_S3_ACCESS_KEY_ID: ${{ secrets.AWS_S3_ACCESS_KEY_ID }} | |
AWS_S3_SECRET_ACCESS_KEY: ${{ secrets.AWS_S3_SECRET_ACCESS_KEY }} | |
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_S3_ACCESS_KEY_ID }} | |
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_S3_SECRET_ACCESS_KEY }} | |
GCS_ACCESS_KEY_ID: ${{ secrets.GCS_ACCESS_KEY_ID }} | |
GCS_SECRET_ACCESS_KEY: ${{ secrets.GCS_SECRET_ACCESS_KEY }} | |
RUN_ID: "$(hostname)-$(date +%s)" | |
HTTP_CACHE_FILE: TRUE | |
steps: | |
- uses: actions/checkout@v4 | |
- name: Download binary-demo | |
uses: actions/download-artifact@v4 | |
with: | |
name: binary-demo | |
path: dataset/binary-demo | |
- name: Test | |
run: | | |
make test | |
- name: Test fwd only rel tables | |
env: | |
# We assume that any test name containing '~' is part of e2e_test | |
GTEST_FILTER: "*~*:RelScanTest*" | |
DEFAULT_REL_STORAGE_DIRECTION: FWD | |
run: | | |
make test | |
- name: Test compression disabled | |
env: | |
ENABLE_COMPRESSION: false | |
run: | | |
make test DEFAULT_REL_STORAGE_DIRECTION=BOTH | |
- name: Test recovery | |
env: | |
AUTO_CHECKPOINT: false | |
CHECKPOINT_ON_CLOSE: false | |
run: | | |
make test | |
- name: Ensure Python dependencies | |
run: pip install --user -r tools/python_api/requirements_dev.txt | |
- name: Python test | |
run: make pytest | |
- name: Ensure Node.js dependencies | |
run: make nodejs-deps | |
- name: Node.js test | |
run: make nodejstest | |
- name: Java test | |
run: make javatest | |
- name: Rust test with pre-built library | |
env: | |
KUZU_SHARED: 1 | |
KUZU_INCLUDE_DIR: ${{ github.workspace }}/build/release/src | |
KUZU_LIBRARY_DIR: ${{ github.workspace }}/build/release/src | |
KUZU_LOCAL_EXTENSIONS: ${{ github.workspace }}/extension | |
run: | | |
make extension-release | |
make rusttest | |
clang-build-test-various-page-sizes: | |
strategy: | |
matrix: | |
page_size_log2: [ 16, 18 ] | |
name: clang build & test with page_size_log2=${{ matrix.page_size_log2 }} | |
needs: [ clang-build-test ] | |
runs-on: kuzu-self-hosted-testing | |
env: | |
NUM_THREADS: 32 | |
TEST_JOBS: 16 | |
CC: clang | |
CXX: clang++ | |
UW_S3_ACCESS_KEY_ID: ${{ secrets.UW_S3_ACCESS_KEY_ID }} | |
UW_S3_SECRET_ACCESS_KEY: ${{ secrets.UW_S3_SECRET_ACCESS_KEY }} | |
AWS_S3_ACCESS_KEY_ID: ${{ secrets.AWS_S3_ACCESS_KEY_ID }} | |
AWS_S3_SECRET_ACCESS_KEY: ${{ secrets.AWS_S3_SECRET_ACCESS_KEY }} | |
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_S3_ACCESS_KEY_ID }} | |
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_S3_SECRET_ACCESS_KEY }} | |
GCS_ACCESS_KEY_ID: ${{ secrets.GCS_ACCESS_KEY_ID }} | |
GCS_SECRET_ACCESS_KEY: ${{ secrets.GCS_SECRET_ACCESS_KEY }} | |
RUN_ID: "$(hostname)-$(date +%s)" | |
HTTP_CACHE_FILE: TRUE | |
steps: | |
- uses: actions/checkout@v4 | |
- name: Build | |
run: | | |
make release PAGE_SIZE_LOG2=${{ matrix.page_size_log2 }} | |
- name: Generate binary demo | |
run: | | |
bash scripts/generate_binary_demo.sh | |
- name: Test | |
run: | | |
make test PAGE_SIZE_LOG2=${{ matrix.page_size_log2 }} | |
clang-build-test-various-node-group-sizes: | |
strategy: | |
matrix: | |
node_group_size_log2: [ 4, 12, 19 ] | |
name: clang build & test with node_group_size_log2=${{ matrix.node_group_size_log2 }} | |
needs: [ clang-build-test ] | |
runs-on: kuzu-self-hosted-testing | |
env: | |
NUM_THREADS: 32 | |
TEST_JOBS: 16 | |
CC: clang | |
CXX: clang++ | |
UW_S3_ACCESS_KEY_ID: ${{ secrets.UW_S3_ACCESS_KEY_ID }} | |
UW_S3_SECRET_ACCESS_KEY: ${{ secrets.UW_S3_SECRET_ACCESS_KEY }} | |
AWS_S3_ACCESS_KEY_ID: ${{ secrets.AWS_S3_ACCESS_KEY_ID }} | |
AWS_S3_SECRET_ACCESS_KEY: ${{ secrets.AWS_S3_SECRET_ACCESS_KEY }} | |
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_S3_ACCESS_KEY_ID }} | |
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_S3_SECRET_ACCESS_KEY }} | |
RUN_ID: "$(hostname)-$(date +%s)" | |
HTTP_CACHE_FILE: TRUE | |
BUFFER_POOL_SIZE: 1073741824 | |
steps: | |
- uses: actions/checkout@v4 | |
- name: Build | |
run: | | |
if [ ${{ matrix.node_group_size_log2 }} -ge 11 ]; then | |
make release VECTOR_CAPACITY_LOG2=11 NODE_GROUP_SIZE_LOG2=${{ matrix.node_group_size_log2 }} | |
else | |
make release VECTOR_CAPACITY_LOG2=${{ matrix.node_group_size_log2 }} NODE_GROUP_SIZE_LOG2=${{ matrix.node_group_size_log2 }} | |
fi | |
- name: Generate binary demo | |
run: | | |
bash scripts/generate_binary_demo.sh | |
- name: Test | |
run: | | |
if [ ${{ matrix.node_group_size_log2 }} -ge 11 ]; then | |
make test VECTOR_CAPACITY_LOG2=11 NODE_GROUP_SIZE_LOG2=${{ matrix.node_group_size_log2 }} | |
else | |
make test VECTOR_CAPACITY_LOG2=${{ matrix.node_group_size_log2 }} NODE_GROUP_SIZE_LOG2=${{ matrix.node_group_size_log2 }} | |
fi | |
msvc-build-test: | |
name: msvc build & test | |
needs: [ sanity-checks, python-lint-check, generate-binary-datasets ] | |
runs-on: self-hosted-windows | |
env: | |
# Shorten build path as much as possible | |
CARGO_TARGET_DIR: ${{ github.workspace }}/rs | |
NUM_THREADS: 18 | |
TEST_JOBS: 9 | |
UW_S3_ACCESS_KEY_ID: ${{ secrets.UW_S3_ACCESS_KEY_ID }} | |
UW_S3_SECRET_ACCESS_KEY: ${{ secrets.UW_S3_SECRET_ACCESS_KEY }} | |
AWS_S3_ACCESS_KEY_ID: ${{ secrets.AWS_S3_ACCESS_KEY_ID }} | |
AWS_S3_SECRET_ACCESS_KEY: ${{ secrets.AWS_S3_SECRET_ACCESS_KEY }} | |
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_S3_ACCESS_KEY_ID }} | |
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_S3_SECRET_ACCESS_KEY }} | |
GCS_ACCESS_KEY_ID: ${{ secrets.GCS_ACCESS_KEY_ID }} | |
GCS_SECRET_ACCESS_KEY: ${{ secrets.GCS_SECRET_ACCESS_KEY }} | |
PG_HOST: ${{ secrets.PG_HOST }} | |
RUN_ID: "$(hostname)-$([Math]::Floor((Get-Date).TimeOfDay.TotalSeconds))" | |
HTTP_CACHE_FILE: TRUE | |
WERROR: 0 | |
steps: | |
- uses: actions/checkout@v4 | |
- name: Download binary-demo | |
uses: actions/download-artifact@v4 | |
with: | |
name: binary-demo | |
path: dataset/binary-demo | |
- name: Visual Studio Generator Build | |
shell: cmd | |
run: | | |
call "C:\Program Files (x86)\Microsoft Visual Studio\2022\BuildTools\VC\Auxiliary\Build\vcvars64.bat" | |
make all GEN="Visual Studio 17 2022" | |
make clean | |
- name: Test | |
shell: cmd | |
run: | | |
call "C:\Program Files (x86)\Microsoft Visual Studio\2022\BuildTools\VC\Auxiliary\Build\vcvars64.bat" | |
make test | |
- name: Test compression disabled | |
shell: cmd | |
env: | |
ENABLE_COMPRESSION: false | |
run: | | |
call "C:\Program Files (x86)\Microsoft Visual Studio\2022\BuildTools\VC\Auxiliary\Build\vcvars64.bat" | |
make test | |
- name: Test fwd only rel tables | |
shell: cmd | |
env: | |
DEFAULT_REL_STORAGE_DIRECTION: FWD | |
GTEST_FILTER: "*~*:RelScanTest*" | |
run: | | |
call "C:\Program Files (x86)\Microsoft Visual Studio\2022\BuildTools\VC\Auxiliary\Build\vcvars64.bat" | |
make test | |
- name: Test in mem | |
shell: cmd | |
env: | |
IN_MEM_MODE: true | |
run: | | |
call "C:\Program Files (x86)\Microsoft Visual Studio\2022\BuildTools\VC\Auxiliary\Build\vcvars64.bat" | |
make test DEFAULT_REL_STORAGE_DIRECTION=BOTH | |
- name: Test recovery | |
shell: cmd | |
env: | |
AUTO_CHECKPOINT: false | |
CHECKPOINT_ON_CLOSE: false | |
run: | | |
call "C:\Program Files (x86)\Microsoft Visual Studio\2022\BuildTools\VC\Auxiliary\Build\vcvars64.bat" | |
make test | |
- name: Test sparse frontier threshold at 2 | |
shell: cmd | |
env: | |
SPARSE_FRONTIER_THRESHOLD: 2 | |
run: | | |
call "C:\Program Files (x86)\Microsoft Visual Studio\2022\BuildTools\VC\Auxiliary\Build\vcvars64.bat" | |
make test | |
- name: Ensure Python dependencies | |
run: pip install --user -r tools/python_api/requirements_dev.txt | |
- name: Python test | |
shell: cmd | |
run: | | |
call "C:\Program Files (x86)\Microsoft Visual Studio\2022\BuildTools\VC\Auxiliary\Build\vcvars64.bat" | |
make pytest | |
- name: Ensure Node.js dependencies | |
run: make nodejs-deps | |
- name: Node.js test | |
shell: cmd | |
run: | | |
call "C:\Program Files (x86)\Microsoft Visual Studio\2022\BuildTools\VC\Auxiliary\Build\vcvars64.bat" | |
make nodejstest | |
- name: Java test | |
shell: cmd | |
run: | | |
call "C:\Program Files (x86)\Microsoft Visual Studio\2022\BuildTools\VC\Auxiliary\Build\vcvars64.bat" | |
make javatest | |
- name: Rust test | |
shell: cmd | |
env: | |
KUZU_LOCAL_EXTENSIONS: ${{ github.workspace }}/extension | |
run: | | |
call "C:\Program Files (x86)\Microsoft Visual Studio\2022\BuildTools\VC\Auxiliary\Build\vcvars64.bat" | |
make extension-release | |
make rusttest | |
- name: Rust test with pre-built library | |
env: | |
KUZU_SHARED: 1 | |
KUZU_INCLUDE_DIR: ${{ github.workspace }}/build/release/src | |
KUZU_LIBRARY_DIR: ${{ github.workspace }}/build/release/src | |
KUZU_LOCAL_EXTENSIONS: ${{ github.workspace }}/extension | |
shell: cmd | |
run: | | |
call "C:\Program Files (x86)\Microsoft Visual Studio\2022\BuildTools\VC\Auxiliary\Build\vcvars64.bat" | |
set PATH=%PATH%;${{ github.workspace }}/build/release/src | |
make extension-release | |
make rusttest | |
sanity-checks: | |
name: sanity checks | |
runs-on: ubuntu-24.04 | |
steps: | |
- uses: actions/checkout@v4 | |
- name: Check source headers for include guards | |
run: ./scripts/check-include-guards.sh src/include | |
./scripts/check-include-guards.sh test/include | |
- name: Check extension headers for include guards | |
run: | | |
for dir in extension/*/src/include ;do | |
./scripts/check-include-guards.sh "$dir" | |
done | |
- name: Checks source files for std::assert | |
run: ./scripts/check-no-std-assert.sh src | |
- name: Check extension files for std::assert | |
run: ./scripts/check-no-std-assert.sh extension | |
- name: Ensure generated grammar files are up to date | |
run: | | |
python3 scripts/antlr4/hash.py src/antlr4/keywords.txt src/antlr4/Cypher.g4 > tmphashfile | |
cmp tmphashfile scripts/antlr4/hash.md5 | |
rm tmphashfile | |
code-format: | |
name: code format | |
runs-on: ubuntu-24.04 | |
steps: | |
- uses: actions/checkout@v4 | |
with: | |
repository: ${{ github.event.pull_request.head.repo.full_name }} | |
ref: ${{ github.event.pull_request.head.ref }} | |
- name: Install clang-format | |
run: | | |
sudo apt-get update | |
sudo apt-get install -y clang-format-18 | |
- name: Check and fix source format | |
run: python3 scripts/run-clang-format.py --in-place --clang-format-executable /usr/bin/clang-format-18 -r src/ | |
- name: Check and fix test format | |
run: python3 scripts/run-clang-format.py --in-place --clang-format-executable /usr/bin/clang-format-18 -r test/ | |
- name: Check and fix tools format | |
run: python3 scripts/run-clang-format.py --in-place --clang-format-executable /usr/bin/clang-format-18 -r tools/ | |
- name: Check and fix extension format | |
run: python3 scripts/run-clang-format.py --in-place --clang-format-executable /usr/bin/clang-format-18 -r extension/ | |
- name: Format Python API | |
working-directory: tools/python_api | |
run: make format | |
- name: Update Rust | |
run: rustup update | |
- name: Format Rust API | |
working-directory: tools/rust_api | |
run: cargo fmt --all | |
- name: Fail if any change is detected on the master branch | |
if: github.ref == 'refs/heads/master' | |
run: git diff --exit-code | |
- name: Commit changes on non-master branches | |
uses: EndBug/add-and-commit@v9 | |
if: github.ref != 'refs/heads/master' | |
with: | |
author_name: "CI Bot" | |
message: "ci: auto code format" | |
python-lint-check: | |
name: python lint check | |
runs-on: macos-14 | |
steps: | |
- uses: actions/checkout@v4 | |
- name: Setup Python | |
uses: actions/setup-python@v5 | |
with: | |
python-version: "3.11" | |
- name: Run Python lint | |
working-directory: tools/python_api | |
run: make check | |
benchmark: | |
name: benchmark | |
needs: [ gcc-build-test, clang-build-test ] | |
uses: ./.github/workflows/benchmark-workflow.yml | |
report-benchmark-result: | |
name: report benchmark result | |
needs: [ benchmark ] | |
runs-on: ubuntu-24.04 | |
if: github.event_name == 'pull_request' | |
steps: | |
- name: Download comparison results | |
uses: actions/download-artifact@v4 | |
with: | |
name: comparison-results | |
- name: Report benchmark result | |
uses: thollander/actions-comment-pull-request@v3 | |
with: | |
file-path: compare_result.md | |
comment-tag: benchmark-execution | |
clang-tidy: | |
name: clang tidy & clangd diagnostics check | |
needs: [ sanity-checks ] | |
runs-on: kuzu-self-hosted-testing | |
env: | |
GEN: Ninja | |
NUM_THREADS: 32 | |
CC: clang | |
CXX: clang++ | |
steps: | |
- uses: actions/checkout@v4 | |
# For `napi.h` header. | |
- name: Ensure Node.js dependencies | |
run: make nodejs-deps | |
- name: Check for clangd diagnostics | |
run: make clangd-diagnostics | |
- name: Run clang-tidy | |
run: make tidy | |
- name: Run clang-tidy analyzer | |
run: make tidy-analyzer | |
macos-build-test: | |
name: apple clang build & test | |
needs: [ sanity-checks, python-lint-check, generate-binary-datasets ] | |
runs-on: [ self-hosted, macOS ] | |
env: | |
TEST_JOBS: 16 | |
GEN: Ninja | |
UW_S3_ACCESS_KEY_ID: ${{ secrets.UW_S3_ACCESS_KEY_ID }} | |
UW_S3_SECRET_ACCESS_KEY: ${{ secrets.UW_S3_SECRET_ACCESS_KEY }} | |
AWS_S3_ACCESS_KEY_ID: ${{ secrets.AWS_S3_ACCESS_KEY_ID }} | |
AWS_S3_SECRET_ACCESS_KEY: ${{ secrets.AWS_S3_SECRET_ACCESS_KEY }} | |
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_S3_ACCESS_KEY_ID }} | |
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_S3_SECRET_ACCESS_KEY }} | |
GCS_ACCESS_KEY_ID: ${{ secrets.GCS_ACCESS_KEY_ID }} | |
GCS_SECRET_ACCESS_KEY: ${{ secrets.GCS_SECRET_ACCESS_KEY }} | |
PG_HOST: ${{ secrets.PG_HOST }} | |
RUN_ID: "$(hostname)-$(date +%s)" | |
HTTP_CACHE_FILE: TRUE | |
steps: | |
- uses: actions/checkout@v4 | |
- name: Determine NUM_THREADS | |
run: | | |
export NUM_THREADS=$(($(sysctl -n hw.logicalcpu) * 2 / 3)) | |
if [ $NUM_THREADS -lt 12 ]; then | |
export NUM_THREADS=12 | |
fi | |
echo "NUM_THREADS=$NUM_THREADS" >> $GITHUB_ENV | |
echo "NUM_THREADS=$NUM_THREADS" | |
- name: Download binary-demo | |
uses: actions/download-artifact@v4 | |
with: | |
name: binary-demo | |
path: dataset/binary-demo | |
- name: Test | |
run: | | |
ulimit -n 10240 | |
make test | |
- name: Test in mem | |
env: | |
IN_MEM_MODE: true | |
run: | | |
make test | |
- name: Test checkpoint at threshold 0 | |
env: | |
AUTO_CHECKPOINT: true | |
CHECKPOINT_THRESHOLD: 0 | |
run: | | |
make test | |
- name: Test sparse frontier threshold at 2 | |
env: | |
SPARSE_FRONTIER_THRESHOLD: 2 | |
run: | | |
make test | |
- name: Test with deserializer debugging info | |
env: | |
ENABLE_DESER_DEBUG: 1 | |
# don't run any tests that use existing serialized data | |
# or any tests that serializing debugging info would make too slow | |
# also skip FSM-related tests as the debugging info can mess with database sizes | |
GTEST_FILTER: "*~*-*binary_demo*:*hash_leak*:*Reclaim*" | |
run: | | |
make test | |
- name: C and C++ Examples | |
run: | | |
ulimit -n 10240 | |
make example | |
- name: Ensure Python dependencies | |
run: pip3 install --user -r tools/python_api/requirements_dev.txt | |
- name: Python test | |
env: | |
PYBIND11_PYTHON_VERSION: 3.12 | |
run: | | |
ulimit -n 10240 | |
make pytest | |
- name: Ensure Node.js dependencies | |
run: make nodejs-deps | |
- name: Node.js test | |
run: | | |
ulimit -n 10240 | |
make nodejstest | |
- name: Java test | |
run: | | |
ulimit -n 10240 | |
export JAVA_HOME=`/usr/libexec/java_home` | |
make javatest | |
- name: Rust test | |
env: | |
KUZU_LOCAL_EXTENSIONS: ${{ github.workspace }}/extension | |
run: | | |
ulimit -n 10240 | |
source /Users/runner/.cargo/env | |
# `extension-release` disables KUZU_BUILD, but it is a dependency of `BUILD_EXAMPLES`, so disable it too. | |
make extension-release EXTRA_CMAKE_FLAGS="-DBUILD_EXAMPLES=OFF" | |
make rusttest | |
- name: Rust test with pre-built library | |
env: | |
KUZU_SHARED: 1 | |
KUZU_INCLUDE_DIR: ${{ github.workspace }}/build/release/src | |
KUZU_LIBRARY_DIR: ${{ github.workspace }}/build/release/src | |
KUZU_LOCAL_EXTENSIONS: ${{ github.workspace }}/extension | |
run: | | |
ulimit -n 10240 | |
source /Users/runner/.cargo/env | |
make extension-release | |
make rusttest | |
- name: Rust example | |
working-directory: examples/rust | |
run: | | |
ulimit -n 10240 | |
source /Users/runner/.cargo/env | |
cargo build --locked --all-features | |
shell-test: | |
name: shell test | |
runs-on: ubuntu-24.04 | |
needs: [ sanity-checks ] | |
steps: | |
- uses: actions/checkout@v4 | |
- name: Build | |
run: make release NUM_THREADS=$(nproc) USE_STD_FORMAT=1 | |
- name: Test | |
run: | | |
pip3 install pytest pexpect | |
make shell-test | |
linux-extension-test: | |
name: linux extension test | |
needs: [ gcc-build-test, clang-build-test ] | |
runs-on: kuzu-self-hosted-testing | |
env: | |
NUM_THREADS: 32 | |
TEST_JOBS: 16 | |
CLANGD_DIAGNOSTIC_JOBS: 32 | |
CLANGD_DIAGNOSTIC_INSTANCES: 6 | |
GEN: Ninja | |
CC: gcc | |
CXX: g++ | |
UW_S3_ACCESS_KEY_ID: ${{ secrets.UW_S3_ACCESS_KEY_ID }} | |
UW_S3_SECRET_ACCESS_KEY: ${{ secrets.UW_S3_SECRET_ACCESS_KEY }} | |
AWS_S3_ACCESS_KEY_ID: ${{ secrets.AWS_S3_ACCESS_KEY_ID }} | |
AWS_S3_SECRET_ACCESS_KEY: ${{ secrets.AWS_S3_SECRET_ACCESS_KEY }} | |
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_S3_ACCESS_KEY_ID }} | |
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_S3_SECRET_ACCESS_KEY }} | |
GCS_ACCESS_KEY_ID: ${{ secrets.GCS_ACCESS_KEY_ID }} | |
GCS_SECRET_ACCESS_KEY: ${{ secrets.GCS_SECRET_ACCESS_KEY }} | |
AZURE_CONNECTION_STRING: ${{ format('DefaultEndpointsProtocol=https;AccountName={0};AccountKey={1};EndpointSuffix=core.windows.net', secrets.AZURE_ACCOUNT_NAME, secrets.AZURE_ACCOUNT_KEY) }} | |
AZURE_ACCOUNT_NAME: ${{ secrets.AZURE_ACCOUNT_NAME }} | |
AZURE_PUBLIC_CONTAINER: ${{ secrets.AZURE_PUBLIC_CONTAINER }} | |
RUN_ID: "$(hostname)-$(date +%s)" | |
HTTP_CACHE_FILE: FALSE | |
ASAN_OPTIONS: detect_leaks=1 | |
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} | |
steps: | |
- uses: actions/checkout@v4 | |
- name: Update PostgreSQL host | |
working-directory: extension/postgres/test/test_files | |
env: | |
PG_FNAME: postgres.test | |
SQL_FNAME: sql_query.test | |
FIND: "localhost" | |
run: | | |
node -e 'fs=require("fs");fs.readFile(process.env.PG_FNAME,"utf8",(err,data)=>{if(err!=null)throw err;fs.writeFile(process.env.PG_FNAME,data.replaceAll(process.env.FIND,process.env.PG_HOST),"utf8",e=>{if(e!=null)throw e;});});' | |
node -e 'fs=require("fs");fs.readFile(process.env.SQL_FNAME,"utf8",(err,data)=>{if(err!=null)throw err;fs.writeFile(process.env.SQL_FNAME,data.replaceAll(process.env.FIND,process.env.PG_HOST),"utf8",e=>{if(e!=null)throw e;});});' | |
- name: Install dependencies | |
run: pip install rangehttpserver requests | |
# shell needs to be built first to generate the dataset provided by the server | |
- name: Extension test build | |
run: make extension-test-build | |
- name: Extension test | |
run: | | |
python3 scripts/generate-tinysnb.py | |
python3 scripts/setup-extension-repo.py & | |
make extension-test | |
- name: Extension test in mem | |
env: | |
IN_MEM_MODE: true | |
HTTP_CACHE_FILE: false | |
run: | | |
make extension-test | |
- name: Extension test with checkpoint at threshold 0 | |
env: | |
AUTO_CHECKPOINT: true | |
CHECKPOINT_THRESHOLD: 0 | |
run: | | |
make extension-test | |
- name: Extension test recovery | |
env: | |
AUTO_CHECKPOINT: false | |
CHECKPOINT_ON_CLOSE: false | |
run: | | |
make extension-test && make clean | |
# TODO(Royi) there is currently a known memory leak issue in the DuckDB postgres/delta/iceberg extension so we avoid running the postgres/delta extension tests with Leak Sanitizer | |
- name: Extension test with asan | |
run: | | |
make clean && make extension-test ASAN=1 WERROR=0 EXTENSION_TEST_EXCLUDE_FILTER="(delta|postgres|iceberg)" && make clean | |
- name: Extension test coverage | |
run: make extension-lcov | |
- name: Generate coverage report | |
run: | | |
lcov --config-file .lcovrc -c -d ./ --no-external -o cover_extension.info &&\ | |
lcov --remove cover_extension.info $(< .github/workflows/lcov_exclude) -o cover_extension.info | |
- name: Upload coverage report | |
uses: codecov/[email protected] | |
with: | |
file: cover_extension.info | |
flags: extension | |
name: extension-coverage | |
- name: Static link extension test | |
run: | |
make extension-static-link-test | |
- name: Static link extension test in mem mode | |
env: | |
IN_MEM_MODE: true | |
HTTP_CACHE_FILE: false | |
run: | | |
make extension-static-link-test && make clean | |
macos-extension-test: | |
name: macos extension test | |
needs: [ macos-build-test ] | |
runs-on: [ self-hosted, macOS ] | |
env: | |
TEST_JOBS: 16 | |
GEN: Ninja | |
UW_S3_ACCESS_KEY_ID: ${{ secrets.UW_S3_ACCESS_KEY_ID }} | |
UW_S3_SECRET_ACCESS_KEY: ${{ secrets.UW_S3_SECRET_ACCESS_KEY }} | |
AWS_S3_ACCESS_KEY_ID: ${{ secrets.AWS_S3_ACCESS_KEY_ID }} | |
AWS_S3_SECRET_ACCESS_KEY: ${{ secrets.AWS_S3_SECRET_ACCESS_KEY }} | |
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_S3_ACCESS_KEY_ID }} | |
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_S3_SECRET_ACCESS_KEY }} | |
GCS_ACCESS_KEY_ID: ${{ secrets.GCS_ACCESS_KEY_ID }} | |
GCS_SECRET_ACCESS_KEY: ${{ secrets.GCS_SECRET_ACCESS_KEY }} | |
AZURE_CONNECTION_STRING: ${{ format('DefaultEndpointsProtocol=https;AccountName={0};AccountKey={1};EndpointSuffix=core.windows.net', secrets.AZURE_ACCOUNT_NAME, secrets.AZURE_ACCOUNT_KEY) }} | |
AZURE_ACCOUNT_NAME: ${{ secrets.AZURE_ACCOUNT_NAME }} | |
AZURE_PUBLIC_CONTAINER: ${{ secrets.AZURE_PUBLIC_CONTAINER }} | |
PG_HOST: ${{ secrets.PG_HOST }} | |
OLLAMA_URL: ${{ secrets.OLLAMA_URL }} | |
RUN_ID: "$(hostname)-$(date +%s)" | |
HTTP_CACHE_FILE: FALSE | |
steps: | |
- uses: actions/checkout@v4 | |
- name: Determine NUM_THREADS | |
run: | | |
export NUM_THREADS=$(($(sysctl -n hw.logicalcpu) * 2 / 3)) | |
if [ $NUM_THREADS -lt 12 ]; then | |
export NUM_THREADS=12 | |
fi | |
echo "NUM_THREADS=$NUM_THREADS" >> $GITHUB_ENV | |
echo "NUM_THREADS=$NUM_THREADS" | |
- name: Update PostgreSQL host | |
working-directory: extension/postgres/test/test_files | |
env: | |
PG_TEST_FNAME: postgres.test | |
SQL_TEST_FNAME: sql_query.test | |
FIND: "localhost" | |
run: | | |
node -e 'fs=require("fs");fs.readFile(process.env.PG_TEST_FNAME,"utf8",(err,data)=>{if(err!=null)throw err;fs.writeFile(process.env.PG_TEST_FNAME,data.replaceAll(process.env.FIND,process.env.PG_HOST),"utf8",e=>{if(e!=null)throw e;});});' | |
node -e 'fs=require("fs");fs.readFile(process.env.SQL_TEST_FNAME,"utf8",(err,data)=>{if(err!=null)throw err;fs.writeFile(process.env.SQL_TEST_FNAME,data.replaceAll(process.env.FIND,process.env.PG_HOST),"utf8",e=>{if(e!=null)throw e;});});' | |
- name: Install dependencies | |
run: pip3 install rangehttpserver requests | |
# shell needs to be built first to generate the dataset provided by the server | |
- name: Extension test build | |
run: make extension-test-build | |
- name: Extension test | |
run: | | |
python3 scripts/generate-tinysnb.py | |
python3 scripts/setup-extension-repo.py & | |
make extension-test | |
- name: Extension test in mem | |
env: | |
IN_MEM_MODE: true | |
HTTP_CACHE_FILE: false | |
run: | | |
make extension-test | |
- name: Extension test with checkpoint at threshold 0 | |
env: | |
AUTO_CHECKPOINT: true | |
CHECKPOINT_THRESHOLD: 0 | |
run: | | |
make extension-test | |
- name: Extension test recovery | |
env: | |
AUTO_CHECKPOINT: false | |
CHECKPOINT_ON_CLOSE: false | |
run: | | |
make extension-test && make clean | |
- name: Static link extension test | |
run: | | |
python3 scripts/setup-extension-repo.py & | |
make extension-static-link-test | |
- name: Static link extension test in mem mode | |
env: | |
IN_MEM_MODE: true | |
HTTP_CACHE_FILE: false | |
run: | | |
make extension-static-link-test && make clean | |
windows-extension-test: | |
name: windows extension test | |
needs: [ msvc-build-test ] | |
runs-on: self-hosted-windows | |
env: | |
# Shorten build path as much as possible | |
CARGO_TARGET_DIR: ${{ github.workspace }}/rs | |
NUM_THREADS: 18 | |
TEST_JOBS: 9 | |
UW_S3_ACCESS_KEY_ID: ${{ secrets.UW_S3_ACCESS_KEY_ID }} | |
UW_S3_SECRET_ACCESS_KEY: ${{ secrets.UW_S3_SECRET_ACCESS_KEY }} | |
AWS_S3_ACCESS_KEY_ID: ${{ secrets.AWS_S3_ACCESS_KEY_ID }} | |
AWS_S3_SECRET_ACCESS_KEY: ${{ secrets.AWS_S3_SECRET_ACCESS_KEY }} | |
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_S3_ACCESS_KEY_ID }} | |
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_S3_SECRET_ACCESS_KEY }} | |
GCS_ACCESS_KEY_ID: ${{ secrets.GCS_ACCESS_KEY_ID }} | |
GCS_SECRET_ACCESS_KEY: ${{ secrets.GCS_SECRET_ACCESS_KEY }} | |
AZURE_CONNECTION_STRING: ${{ format('DefaultEndpointsProtocol=https;AccountName={0};AccountKey={1};EndpointSuffix=core.windows.net', secrets.AZURE_ACCOUNT_NAME, secrets.AZURE_ACCOUNT_KEY) }} | |
AZURE_ACCOUNT_NAME: ${{ secrets.AZURE_ACCOUNT_NAME }} | |
AZURE_PUBLIC_CONTAINER: ${{ secrets.AZURE_PUBLIC_CONTAINER }} | |
OLLAMA_URL: ${{ secrets.OLLAMA_URL }} | |
PG_HOST: ${{ secrets.PG_HOST }} | |
RUN_ID: "$(hostname)-$([Math]::Floor((Get-Date).TimeOfDay.TotalSeconds))" | |
HTTP_CACHE_FILE: FALSE | |
WERROR: 0 | |
steps: | |
- uses: actions/checkout@v4 | |
- name: Install dependencies | |
run: pip install rangehttpserver requests | |
- name: Update PostgreSQL host | |
working-directory: extension/postgres/test/test_files | |
env: | |
PG_FNAME: postgres.test | |
SQL_FNAME: sql_query.test | |
FIND: "localhost" | |
run: | | |
$fname = $env:PG_FNAME | |
$find = $env:FIND | |
$replace = $env:PG_HOST | |
$content = Get-Content -Path $fname | |
$content = $content -replace [regex]::Escape($find), $replace | |
Set-Content -Path $fname -Value $content | |
$fname = $env:SQL_FNAME | |
$find = $env:FIND | |
$replace = $env:PG_HOST | |
$content = Get-Content -Path $fname | |
$content = $content -replace [regex]::Escape($find), $replace | |
Set-Content -Path $fname -Value $content | |
- name: Extension test build | |
shell: cmd | |
run: | | |
call "C:\Program Files (x86)\Microsoft Visual Studio\2022\BuildTools\VC\Auxiliary\Build\vcvars64.bat" | |
make extension-test-build | |
- name: Extension test | |
shell: cmd | |
run: | | |
call "C:\Program Files (x86)\Microsoft Visual Studio\2022\BuildTools\VC\Auxiliary\Build\vcvars64.bat" | |
python3 scripts/generate-tinysnb.py | |
if %errorlevel% neq 0 exit /b %errorlevel% | |
start /b python scripts/setup-extension-repo.py | |
make extension-test | |
- name: Extension test in mem | |
shell: cmd | |
env: | |
IN_MEM_MODE: true | |
HTTP_CACHE_FILE: false | |
run: | | |
call "C:\Program Files (x86)\Microsoft Visual Studio\2022\BuildTools\VC\Auxiliary\Build\vcvars64.bat" | |
python3 scripts/generate-tinysnb.py | |
if %errorlevel% neq 0 exit /b %errorlevel% | |
start /b python scripts/setup-extension-repo.py | |
make extension-test | |
- name: Extension test with checkpoint at threshold 0 | |
shell: cmd | |
env: | |
AUTO_CHECKPOINT: true | |
CHECKPOINT_THRESHOLD: 0 | |
run: | | |
call "C:\Program Files (x86)\Microsoft Visual Studio\2022\BuildTools\VC\Auxiliary\Build\vcvars64.bat" | |
python3 scripts/generate-tinysnb.py | |
if %errorlevel% neq 0 exit /b %errorlevel% | |
start /b python scripts/setup-extension-repo.py | |
make extension-test | |
- name: Extension test recovery | |
shell: cmd | |
env: | |
AUTO_CHECKPOINT: false | |
CHECKPOINT_ON_CLOSE: false | |
run: | | |
call "C:\Program Files (x86)\Microsoft Visual Studio\2022\BuildTools\VC\Auxiliary\Build\vcvars64.bat" | |
python3 scripts/generate-tinysnb.py | |
if %errorlevel% neq 0 exit /b %errorlevel% | |
start /b python scripts/setup-extension-repo.py | |
make extension-test | |
- name: Clean | |
shell: cmd | |
run: | | |
call "C:\Program Files (x86)\Microsoft Visual Studio\2022\BuildTools\VC\Auxiliary\Build\vcvars64.bat" | |
make clean | |
- name: Static link extension test | |
shell: cmd | |
run: | | |
call "C:\Program Files (x86)\Microsoft Visual Studio\2022\BuildTools\VC\Auxiliary\Build\vcvars64.bat" | |
if %errorlevel% neq 0 exit /b %errorlevel% | |
start /b make extension-static-link-test | |
- name: Static link extension test in mem mode | |
shell: cmd | |
env: | |
IN_MEM_MODE: true | |
HTTP_CACHE_FILE: false | |
run: | | |
call "C:\Program Files (x86)\Microsoft Visual Studio\2022\BuildTools\VC\Auxiliary\Build\vcvars64.bat" | |
if %errorlevel% neq 0 exit /b %errorlevel% | |
start /b make extension-static-link-test | |
- name: Clean | |
shell: cmd | |
run: | | |
call "C:\Program Files (x86)\Microsoft Visual Studio\2022\BuildTools\VC\Auxiliary\Build\vcvars64.bat" | |
make clean |