diff --git a/.eslintrc b/.eslintrc index 786b7e5a..92813260 100644 --- a/.eslintrc +++ b/.eslintrc @@ -7,19 +7,18 @@ "jest": true }, "parser": "@typescript-eslint/parser", - "extends": [ - "eslint:recommended", - "plugin:@typescript-eslint/recommended", - "plugin:prettier/recommended", - "prettier" - ], - "plugins": [ - "import" - ], "parserOptions": { "project": "tsconfig.json", "sourceType": "module" }, + "plugins": [ + "import" + ], + "extends": [ + "eslint:recommended", + "plugin:@typescript-eslint/recommended", + "plugin:prettier/recommended" + ], "rules": { "linebreak-style": ["error", "unix"], "no-empty": 1, diff --git a/.github/workflows/codesee-arch-diagram.yml b/.github/workflows/codesee-arch-diagram.yml deleted file mode 100644 index 80f58e63..00000000 --- a/.github/workflows/codesee-arch-diagram.yml +++ /dev/null @@ -1,23 +0,0 @@ -# This workflow was added by CodeSee. Learn more at https://codesee.io/ -# This is v2.0 of this workflow file -on: - push: - branches: - - staging - pull_request_target: - types: [opened, synchronize, reopened] - -name: CodeSee - -permissions: read-all - -jobs: - codesee: - runs-on: ubuntu-latest - continue-on-error: true - name: Analyze the repo with CodeSee - steps: - - uses: Codesee-io/codesee-action@v2 - with: - codesee-token: ${{ secrets.CODESEE_ARCH_DIAG_API_TOKEN }} - codesee-url: https://app.codesee.io diff --git a/.github/workflows/staging.yml b/.github/workflows/staging.yml new file mode 100644 index 00000000..ef95fe0b --- /dev/null +++ b/.github/workflows/staging.yml @@ -0,0 +1,27 @@ +name: "CI / Staging" + +on: + push: + branches: + - staging + workflow_dispatch: + +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + +jobs: + use-native-library-js-staging: + permissions: + contents: write + actions: write + checks: write + pull-requests: write + uses: MatrixAI/.github/.github/workflows/native-library-js-staging.yml@master + secrets: + GH_TOKEN: ${{ secrets.GH_TOKEN }} + GIT_AUTHOR_EMAIL: ${{ secrets.GIT_AUTHOR_EMAIL }} + GIT_AUTHOR_NAME: ${{ secrets.GIT_AUTHOR_NAME }} + GIT_COMMITTER_EMAIL: ${{ secrets.GIT_COMMITTER_EMAIL }} + GIT_COMMITTER_NAME: ${{ secrets.GIT_COMMITTER_NAME }} + diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml deleted file mode 100644 index 41f22e38..00000000 --- a/.gitlab-ci.yml +++ /dev/null @@ -1,371 +0,0 @@ -workflow: - rules: - # Disable merge request pipelines - - if: $CI_MERGE_REQUEST_ID - when: never - - when: always - -variables: - GIT_SUBMODULE_STRATEGY: recursive - GH_PROJECT_PATH: "MatrixAI/${CI_PROJECT_NAME}" - GH_PROJECT_URL: "https://${GITHUB_TOKEN}@github.com/${GH_PROJECT_PATH}.git" - # Cache .npm - npm_config_cache: "${CI_PROJECT_DIR}/tmp/npm" - # Prefer offline node module installation - npm_config_prefer_offline: "true" - # Homebrew cache only used by macos runner - HOMEBREW_CACHE: "${CI_PROJECT_DIR}/tmp/Homebrew" - -default: - interruptible: true - before_script: - # Replace this in windows runners that use powershell - # with `mkdir -Force "$CI_PROJECT_DIR/tmp"` - - mkdir -p "$CI_PROJECT_DIR/tmp" - -# Cached directories shared between jobs & pipelines per-branch per-runner -cache: - key: $CI_COMMIT_REF_SLUG - # Preserve cache even if job fails - when: 'always' - paths: - - ./tmp/npm/ - # Homebrew cache is only used by the macos runner - - ./tmp/Homebrew - # Chocolatey cache is only used by the windows runner - - ./tmp/chocolatey/ - # `jest` cache is configured in jest.config.js - - ./tmp/jest/ - # `npm_config_devdir` cache for `node-gyp` headers and libraries used by windows and macos - - ./tmp/node-gyp - -stages: - - check # Linting, unit tests - - build # Cross-platform library compilation, unit tests - - integration # Cross-platform application bundling, integration tests, and pre-release - - release # Cross-platform distribution and deployment - -image: registry.gitlab.com/matrixai/engineering/maintenance/gitlab-runner - -check:lint: - stage: check - needs: [] - script: - - > - nix-shell --arg ci true --run $' - npm run lint; - npm run lint-native; - npm run lint-shell; - ' - rules: - # Runs on feature and staging commits and ignores version commits - - if: $CI_COMMIT_BRANCH =~ /^(?:feature.*|staging)$/ && $CI_COMMIT_TITLE !~ /^[0-9]+\.[0-9]+\.[0-9]+(?:-.*[0-9]+)?$/ - # Runs on tag pipeline where the tag is a prerelease or release version - - if: $CI_COMMIT_TAG =~ /^v[0-9]+\.[0-9]+\.[0-9]+(?:-.*[0-9]+)?$/ - # Manually run on commits other than master and ignore version commits - - if: $CI_COMMIT_BRANCH && $CI_COMMIT_BRANCH != 'master' && $CI_COMMIT_TITLE !~ /^[0-9]+\.[0-9]+\.[0-9]+(?:-.*[0-9]+)?$/ - when: manual - -check:test: - stage: check - needs: [] - script: - - > - nix-shell --arg ci true --run $' - npm run prebuild --verbose; - npm test -- --ci --coverage; - ' - artifacts: - when: always - reports: - junit: - - ./tmp/junit/junit.xml - coverage_report: - coverage_format: cobertura - path: ./tmp/coverage/cobertura-coverage.xml - coverage: '/All files[^|]*\|[^|]*\s+([\d\.]+)/' - rules: - # Runs on feature commits and ignores version commits - - if: $CI_COMMIT_BRANCH =~ /^feature.*$/ && $CI_COMMIT_TITLE !~ /^[0-9]+\.[0-9]+\.[0-9]+(?:-.*[0-9]+)?$/ - # Manually run on commits other than master and staging and ignore version commits - - if: $CI_COMMIT_BRANCH && $CI_COMMIT_BRANCH !~ /^(?:master|staging)$/ && $CI_COMMIT_TITLE !~ /^[0-9]+\.[0-9]+\.[0-9]+(?:-.*[0-9]+)?$/ - when: manual - -build:merge: - stage: build - needs: [] - allow_failure: true - script: - # Required for `gh pr create` - - git remote add upstream "$GH_PROJECT_URL" - - > - nix-shell --arg ci true --run $' - gh pr create \ - --head staging \ - --base master \ - --title "ci: merge staging to master" \ - --body "This is an automatic PR generated by the pipeline CI/CD. This will be automatically fast-forward merged if successful." \ - --assignee "@me" \ - --no-maintainer-edit \ - --repo "$GH_PROJECT_PATH" || true; - printf "Pipeline Attempt on ${CI_PIPELINE_ID} for ${CI_COMMIT_SHA}\n\n${CI_PIPELINE_URL}" \ - | gh pr comment staging \ - --body-file - \ - --repo "$GH_PROJECT_PATH"; - ' - rules: - # Runs on staging commits and ignores version commits - - if: $CI_COMMIT_BRANCH == 'staging' && $CI_COMMIT_TITLE !~ /^[0-9]+\.[0-9]+\.[0-9]+(?:-.*[0-9]+)?$/ - # Runs on tag pipeline where the tag is a prerelease or release version - - if: $CI_COMMIT_TAG =~ /^v[0-9]+\.[0-9]+\.[0-9]+(?:-.*[0-9]+)?$/ - -build:dist: - stage: build - needs: [] - script: - - > - nix-shell --arg ci true --run $' - npm run build --ignore-scripts --verbose; - ' - artifacts: - when: always - paths: - - ./dist - rules: - # Runs on staging commits and ignores version commits - - if: $CI_COMMIT_BRANCH == 'staging' && $CI_COMMIT_TITLE !~ /^[0-9]+\.[0-9]+\.[0-9]+(?:-.*[0-9]+)?$/ - # Runs on tag pipeline where the tag is a prerelease or release version - - if: $CI_COMMIT_TAG =~ /^v[0-9]+\.[0-9]+\.[0-9]+(?:-.*[0-9]+)?$/ - -build:linux: - stage: build - needs: [] - variables: - # Only x64 architecture is needed - npm_config_arch: "x64" - script: - - > - nix-shell --arg ci true --run $' - npm run prebuild --verbose; - npm test -- --ci --coverage; - npm run bench; - ' - artifacts: - when: always - reports: - junit: - - ./tmp/junit/junit.xml - coverage_report: - coverage_format: cobertura - path: ./tmp/coverage/cobertura-coverage.xml - metrics: ./benches/results/metrics.txt - paths: - - ./prebuilds/ - coverage: '/All files[^|]*\|[^|]*\s+([\d\.]+)/' - rules: - # Runs on staging commits and ignores version commits - - if: $CI_COMMIT_BRANCH == 'staging' && $CI_COMMIT_TITLE !~ /^[0-9]+\.[0-9]+\.[0-9]+(?:-.*[0-9]+)?$/ - # Runs on tag pipeline where the tag is a prerelease or release version - - if: $CI_COMMIT_TAG =~ /^v[0-9]+\.[0-9]+\.[0-9]+(?:-.*[0-9]+)?$/ - -build:windows: - stage: build - needs: [] - tags: - - windows - variables: - # Location node-gyp installed headers and static libraries - npm_config_devdir: "${CI_PROJECT_DIR}/tmp/node-gyp" - # Only x64 architecture is needed - npm_config_arch: "x64" - before_script: - - mkdir -Force "$CI_PROJECT_DIR/tmp" - - Import-Module $env:ChocolateyInstall\helpers\chocolateyProfile.psm1 # Added line - script: - - ./scripts/choco-install.ps1 - - refreshenv - - npm install --ignore-scripts - - $env:Path = "$(npm root)\.bin;" + $env:Path - - npm run prebuild --verbose - - npm test -- --ci --coverage - - npm run bench - artifacts: - when: always - reports: - junit: - - ./tmp/junit/junit.xml - coverage_report: - coverage_format: cobertura - path: ./tmp/coverage/cobertura-coverage.xml - metrics: ./benches/results/metrics.txt - paths: - - ./prebuilds/ - rules: - # Runs on staging commits and ignores version commits - - if: $CI_COMMIT_BRANCH == 'staging' && $CI_COMMIT_TITLE !~ /^[0-9]+\.[0-9]+\.[0-9]+(?:-.*[0-9]+)?$/ - # Runs on tag pipeline where the tag is a prerelease or release version - - if: $CI_COMMIT_TAG =~ /^v[0-9]+\.[0-9]+\.[0-9]+(?:-.*[0-9]+)?$/ - -build:macos: - stage: build - needs: [] - tags: - - saas-macos-medium-m1 - image: macos-12-xcode-14 - variables: - # Location node-gyp installed headers and static libraries - npm_config_devdir: "${CI_PROJECT_DIR}/tmp/node-gyp" - # Produce universal binary - npm_config_arch: 'x64+arm64' - script: - - eval "$(brew shellenv)" - - ./scripts/brew-install.sh - - hash -r - - npm install --ignore-scripts - - export PATH="$(npm root)/.bin:$PATH" - - npm run prebuild --verbose - - npm test -- --ci --coverage - - npm run bench - artifacts: - when: always - reports: - junit: - - ./tmp/junit/junit.xml - coverage_report: - coverage_format: cobertura - path: ./tmp/coverage/cobertura-coverage.xml - metrics: ./benches/results/metrics.txt - paths: - - ./prebuilds/ - rules: - # Runs on staging commits and ignores version commits - - if: $CI_COMMIT_BRANCH == 'staging' && $CI_COMMIT_TITLE !~ /^[0-9]+\.[0-9]+\.[0-9]+(?:-.*[0-9]+)?$/ - # Runs on tag pipeline where the tag is a prerelease or release version - - if: $CI_COMMIT_TAG =~ /^v[0-9]+\.[0-9]+\.[0-9]+(?:-.*[0-9]+)?$/ - -build:prerelease: - stage: build - needs: - - build:dist - - build:linux - - build:windows - - build:macos - # Don't interrupt publishing job - interruptible: false - script: - - echo "//registry.npmjs.org/:_authToken=$NPM_TOKEN" > ./.npmrc - - echo 'Publishing library prerelease' - - > - nix-shell --arg ci true --run $' - npm publish --tag prerelease --access public; - ' - - > - for d in prebuilds/*; do - tar \ - --create \ - --verbose \ - --file="prebuilds/$(basename $d).tar" \ - --directory=prebuilds \ - "$(basename $d)"; - done - - > - nix-shell --arg ci true --run $' - gh release \ - create "$CI_COMMIT_TAG" \ - prebuilds/*.tar \ - --title "${CI_COMMIT_TAG}-$(date -u +"%Y-%m-%dT%H:%M:%SZ")" \ - --notes "" \ - --prerelease \ - --target staging \ - --repo "$GH_PROJECT_PATH"; - ' - after_script: - - rm -f ./.npmrc - rules: - # Only runs on tag pipeline where the tag is a prerelease version - # This requires dependencies to also run on tag pipeline - # However version tag comes with a version commit - # Dependencies must not run on the version commit - - if: $CI_COMMIT_TAG =~ /^v[0-9]+\.[0-9]+\.[0-9]+-.*[0-9]+$/ - -integration:merge: - stage: integration - needs: - - build:merge - - job: build:linux - optional: true - - job: build:windows - optional: true - - job: build:macos - optional: true - # Requires mutual exclusion - resource_group: integration:merge - allow_failure: true - variables: - # Ensure that CI/CD is fetching all commits - # this is necessary to checkout origin/master - # and to also merge origin/staging - GIT_DEPTH: 0 - script: - - > - nix-shell --arg ci true --run $' - printf "Pipeline Succeeded on ${CI_PIPELINE_ID} for ${CI_COMMIT_SHA}\n\n${CI_PIPELINE_URL}" \ - | gh pr comment staging \ - --body-file - \ - --repo "$GH_PROJECT_PATH"; - ' - - git remote add upstream "$GH_PROJECT_URL" - - git checkout origin/master - # Merge up to the current commit (not the latest commit) - - git merge --ff-only "$CI_COMMIT_SHA" - - git push upstream HEAD:master - rules: - # Runs on staging commits and ignores version commits - - if: $CI_COMMIT_BRANCH == 'staging' && $CI_COMMIT_TITLE !~ /^[0-9]+\.[0-9]+\.[0-9]+(?:-.*[0-9]+)?$/ - # Runs on tag pipeline where the tag is a prerelease or release version - - if: $CI_COMMIT_TAG =~ /^v[0-9]+\.[0-9]+\.[0-9]+(?:-.*[0-9]+)?$/ - -release:distribution: - stage: release - needs: - - build:dist - - build:linux - - build:windows - - build:macos - - integration:merge - # Don't interrupt publishing job - interruptible: false - script: - - echo "//registry.npmjs.org/:_authToken=$NPM_TOKEN" > ./.npmrc - - echo 'Publishing library' - - > - nix-shell --arg ci true --run $' - npm publish --access public; - ' - - > - for d in prebuilds/*; do - tar \ - --create \ - --verbose \ - --file="prebuilds/$(basename $d).tar" \ - --directory=prebuilds \ - "$(basename $d)"; - done - - > - nix-shell --arg ci true --run $' - gh release \ - create "$CI_COMMIT_TAG" \ - prebuilds/*.tar \ - --title "${CI_COMMIT_TAG}-$(date -u +"%Y-%m-%dT%H:%M:%SZ")" \ - --notes "" \ - --target master \ - --repo "$GH_PROJECT_PATH"; - ' - after_script: - - rm -f ./.npmrc - rules: - # Only runs on tag pipeline where the tag is a release version - # This requires dependencies to also run on tag pipeline - # However version tag comes with a version commit - # Dependencies must not run on the version commit - - if: $CI_COMMIT_TAG =~ /^v[0-9]+\.[0-9]+\.[0-9]+$/ diff --git a/README.md b/README.md index 4e0ad772..8ec7e273 100644 --- a/README.md +++ b/README.md @@ -1,8 +1,5 @@ # js-db -staging: [![pipeline status](https://gitlab.com/MatrixAI/open-source/js-db/badges/staging/pipeline.svg)](https://gitlab.com/MatrixAI/open-source/js-db/commits/staging) -master: [![pipeline status](https://gitlab.com/MatrixAI/open-source/js-db/badges/master/pipeline.svg)](https://gitlab.com/MatrixAI/open-source/js-db/commits/master) - DB is library managing key value state for MatrixAI's JavaScript/TypeScript applications. This forks classic-level's C++ binding code around LevelDB 1.20. Differences from classic-level: @@ -89,7 +86,7 @@ If you already cloned, run this: git submodule update --init --recursive ``` -Run `nix-shell`, and once you're inside, you can use: +Run `nix develop`, and once you're inside, you can use: ```sh # install (or reinstall packages from package.json) @@ -97,7 +94,7 @@ npm install # build the dist npm run build # run the repl (this allows you to import from ./src) -npm run ts-node +npm run tsx # run the tests npm run test # lint the source code diff --git a/benches/db_1KiB.ts b/benches/db_1KiB.ts index 35cd596e..f2522c6b 100644 --- a/benches/db_1KiB.ts +++ b/benches/db_1KiB.ts @@ -2,12 +2,14 @@ import os from 'os'; import fs from 'fs'; import path from 'path'; import crypto from 'crypto'; +import url from 'node:url'; import b from 'benny'; import Logger, { LogLevel, StreamHandler } from '@matrixai/logger'; -import DB from '@/DB'; -import { suiteCommon } from './utils'; +import { suiteCommon } from './utils/utils.js'; +import DB from '#DB.js'; const logger = new Logger('DB1KiB Bench', LogLevel.WARN, [new StreamHandler()]); +const filename = url.fileURLToPath(new URL(import.meta.url)); async function main() { const dataDir = await fs.promises.mkdtemp( @@ -18,7 +20,7 @@ async function main() { const data0 = crypto.randomBytes(0); const data1KiB = crypto.randomBytes(1024); const summary = await b.suite( - path.basename(__filename, path.extname(__filename)), + path.basename(filename, path.extname(filename)), b.add('get 1 KiB of data', async () => { await db.put('1kib', data1KiB, true); return async () => { @@ -44,8 +46,7 @@ async function main() { }); return summary; } - -if (require.main === module) { +if (process.argv[1] === url.fileURLToPath(import.meta.url)) { void main(); } diff --git a/benches/db_1MiB.ts b/benches/db_1MiB.ts index fe000678..9eabc380 100644 --- a/benches/db_1MiB.ts +++ b/benches/db_1MiB.ts @@ -2,12 +2,14 @@ import os from 'os'; import fs from 'fs'; import path from 'path'; import crypto from 'crypto'; +import url from 'node:url'; import b from 'benny'; import Logger, { LogLevel, StreamHandler } from '@matrixai/logger'; -import DB from '@/DB'; -import { suiteCommon } from './utils'; +import { suiteCommon } from './utils/utils.js'; +import DB from '#DB.js'; const logger = new Logger('DB1MiB Bench', LogLevel.WARN, [new StreamHandler()]); +const filename = url.fileURLToPath(new URL(import.meta.url)); async function main() { const dataDir = await fs.promises.mkdtemp( @@ -18,7 +20,7 @@ async function main() { const data0 = crypto.randomBytes(0); const data1MiB = crypto.randomBytes(1024 * 1024); const summary = await b.suite( - path.basename(__filename, path.extname(__filename)), + path.basename(filename, path.extname(filename)), b.add('get 1 MiB of data', async () => { await db.put('1mib', data1MiB, true); return async () => { @@ -45,7 +47,7 @@ async function main() { return summary; } -if (require.main === module) { +if (process.argv[1] === url.fileURLToPath(import.meta.url)) { void main(); } diff --git a/benches/index.ts b/benches/index.ts index 5c96be72..fce10d30 100644 --- a/benches/index.ts +++ b/benches/index.ts @@ -2,26 +2,29 @@ import fs from 'fs'; import path from 'path'; +import url from 'node:url'; import si from 'systeminformation'; -import DB1KiB from './db_1KiB'; -import DB1MiB from './db_1MiB'; +import DB1KiB from './db_1KiB.js'; +import DB1MiB from './db_1MiB.js'; + +const dirname = url.fileURLToPath(new URL('.', import.meta.url)); async function main(): Promise { - await fs.promises.mkdir(path.join(__dirname, 'results'), { recursive: true }); + await fs.promises.mkdir(path.join(dirname, 'results'), { recursive: true }); await DB1KiB(); await DB1MiB(); const resultFilenames = await fs.promises.readdir( - path.join(__dirname, 'results'), + path.join(dirname, 'results'), ); const metricsFile = await fs.promises.open( - path.join(__dirname, 'results', 'metrics.txt'), + path.join(dirname, 'results', 'metrics.txt'), 'w', ); let concatenating = false; for (const resultFilename of resultFilenames) { if (/.+_metrics\.txt$/.test(resultFilename)) { const metricsData = await fs.promises.readFile( - path.join(__dirname, 'results', resultFilename), + path.join(dirname, 'results', resultFilename), ); if (concatenating) { await metricsFile.write('\n'); @@ -37,7 +40,7 @@ async function main(): Promise { system: 'model, manufacturer', }); await fs.promises.writeFile( - path.join(__dirname, 'results', 'system.json'), + path.join(dirname, 'results', 'system.json'), JSON.stringify(systemData, null, 2), ); } diff --git a/benches/results/db_1KiB.chart.html b/benches/results/db_1KiB.chart.html index c29540ff..25b68a2b 100644 --- a/benches/results/db_1KiB.chart.html +++ b/benches/results/db_1KiB.chart.html @@ -28,7 +28,7 @@
- +