diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 6418320..9422350 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -113,3 +113,28 @@ jobs: with: files: dist/*.dawn.node + package: + needs: build + name: package + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + with: + persist-credentials: false + + - name: Use Node.js + uses: actions/setup-node@v4 + with: + node-version: '20.x' + + - name: Print env + run: | + echo github.event.action: ${{ github.event.action }} + echo github.event_name: ${{ github.event_name }} + + - name: Build + shell: download-and-package + run: | + npm ci + node build/download-run-artifacts.js "--repo=${{ github.repository }}" "--run_id=${{ github.run_id }}"" diff --git a/build/download-run-artifacts.js b/build/download-run-artifacts.js new file mode 100644 index 0000000..bca7123 --- /dev/null +++ b/build/download-run-artifacts.js @@ -0,0 +1,105 @@ +import fs from 'node:fs'; +import path from 'node:path'; +import {unzip} from 'unzipit'; + +import * as github from './github.js'; +import { + parseArgs, +} from './utils.js' + +async function downloadFileFromZip(url, filepath) { + const res = await fetch(url); + const zipData = await res.arrayBuffer(); + const {entries} = await unzip(zipData); + return Promise.all(Object.entries(entries).map(async ([name, entry]) => { + const data = await entry.arrayBuffer(); + const filename = path.join(filepath, name); + console.log('downloaded:', filename); + fs.mkdirSync(filepath, {recursive: true}); + fs.writeFileSync(filename, new Uint8Array(data)); + return filename; + })); +} + +const options = { + repo: { type: 'string', inlineValue: true, required: true, description: 'owner/name of repo' }, + run_id: { type: 'string', inlineValue: true, required: true, description: 'run_id from action' }, +}; +const { values: args } = parseArgs({ args: process.argv.slice(2), options }); + +/* +const data = { + "total_count": 3, + "artifacts": [ + { + "id": 2386423695, + "node_id": "MDg6QXJ0aWZhY3QyMzg2NDIzNjk1", + "name": "darwin-arm64.dawn.node", + "size_in_bytes": 10841703, + "url": "https://api.github.com/repos/greggman/node-webgpu/actions/artifacts/2386423695", + "archive_download_url": "https://api.github.com/repos/greggman/node-webgpu/actions/artifacts/2386423695/zip", + "expired": false, + "created_at": "2025-01-04T23:01:06Z", + "updated_at": "2025-01-04T23:01:06Z", + "expires_at": "2025-04-04T22:49:48Z", + "workflow_run": { + "id": 12614358725, + "repository_id": 911859581, + "head_repository_id": 911859581, + "head_branch": "main", + "head_sha": "3bb7a9fec4559ddc789c424b92a92122ad09c1f4" + } + }, + { + "id": 2386431784, + "node_id": "MDg6QXJ0aWZhY3QyMzg2NDMxNzg0", + "name": "linux-x64.dawn.node", + "size_in_bytes": 120766916, + "url": "https://api.github.com/repos/greggman/node-webgpu/actions/artifacts/2386431784", + "archive_download_url": "https://api.github.com/repos/greggman/node-webgpu/actions/artifacts/2386431784/zip", + "expired": false, + "created_at": "2025-01-04T23:08:23Z", + "updated_at": "2025-01-04T23:08:23Z", + "expires_at": "2025-04-04T22:49:48Z", + "workflow_run": { + "id": 12614358725, + "repository_id": 911859581, + "head_repository_id": 911859581, + "head_branch": "main", + "head_sha": "3bb7a9fec4559ddc789c424b92a92122ad09c1f4" + } + }, + { + "id": 2386436118, + "node_id": "MDg6QXJ0aWZhY3QyMzg2NDM2MTE4", + "name": "win32-x64.dawn.node", + "size_in_bytes": 11265694, + "url": "https://api.github.com/repos/greggman/node-webgpu/actions/artifacts/2386436118", + "archive_download_url": "https://api.github.com/repos/greggman/node-webgpu/actions/artifacts/2386436118/zip", + "expired": false, + "created_at": "2025-01-04T23:12:55Z", + "updated_at": "2025-01-04T23:12:55Z", + "expires_at": "2025-04-04T22:49:48Z", + "workflow_run": { + "id": 12614358725, + "repository_id": 911859581, + "head_repository_id": 911859581, + "head_branch": "main", + "head_sha": "3bb7a9fec4559ddc789c424b92a92122ad09c1f4" + } + } + ] +} +*/ + +const [owner, repo] = args.repo.split('/'); +const data = await github.getRunArtifacts({ + owner, + repo, + run_id: args.run_id, +}); +const filenames = await Promise.all( + data.artifacts + .filter(({name}) => name?.endsWith('.node')) + .map(({archive_download_url}) => downloadFileFromZip(archive_download_url, 'dist')) +); diff --git a/build/github.js b/build/github.js index dd6f0d5..f292eec 100644 --- a/build/github.js +++ b/build/github.js @@ -1,9 +1,27 @@ -export async function getLatestRelease({owner, repo}) { - const res = await fetch(`https://api.github.com/repos/${owner}/${repo}/releases/latest`, { +function fail(msg) { + throw new Error(msg); +} + +async function fetchGithub(url, params) { + url = `https://api.github.com/${url.replaceAll(/\{(.*?)\}/g, (_, id) => params[id] ?? fail(`no: ${id}`))}`; + console.log(url); + const res = await fetch(url, { headers: { 'Accept': 'application/vnd.github+json', 'X-GitHub-Api-Version': '2022-11-28', - } + }, }); return await res.json(); } + +export async function getLatestRelease(params) { + return fetchGithub('repos/{owner}/{repo}/releases/latest', params); +} + +export async function getLatestArtifacts(params) { + return fetchGithub('repos/{owner}/{repo}/actions/artifacts', params); +} + +export async function getRunArtifacts(params) { + return fetchGithub('repos/{owner}/{repo}/actions/runs/{run_id}/artifacts', params); +} \ No newline at end of file diff --git a/build/publish.js b/build/publish.js index ea0449e..4365b7e 100644 --- a/build/publish.js +++ b/build/publish.js @@ -1,5 +1,4 @@ -import path from 'path'; -import fs from 'fs'; + import {execute} from './execute.js'; import * as github from './github.js'; @@ -12,27 +11,12 @@ function executeL(cmd, args) { execute(cmd, args); } -async function downloadFile(name, url, filepath) { - const res = await fetch(url); - const data = await res.arrayBuffer(); - const filename = path.join(filepath, name); - console.log('download:', filename); - fs.mkdirSync(filepath, {recursive: true}); - fs.writeFileSync(filename, new Uint8Array(data)); - return filename; -} - async function main() { const data = await github.getLatestRelease({ owner, repo, }); - //const vsixFilenames = await Promise.all( - // data.assets - // .filter(({name}) => name?.endsWith('.vsix')) - // .map(({name, browser_download_url}) => downloadFile(name, browser_download_url, 'dist')) - //); - //executeL('./node_modules/.bin/vsce', ['publish', '--packagePath', ...vsixFilenames]); + executeL('./node_modules/.bin/vsce', ['publish', '--packagePath', ...vsixFilenames]); } main(); \ No newline at end of file diff --git a/build/utils.js b/build/utils.js index 9d71823..139676c 100644 --- a/build/utils.js +++ b/build/utils.js @@ -1,5 +1,6 @@ -import fs from 'fs'; -import path from 'path'; +import fs from 'node:fs'; +import path from 'node:path'; +import util from 'node:util'; export function exists(filename) { try { @@ -24,4 +25,36 @@ export function appendPathIfItExists(filepath) { export function addElemIf(cond, elem) { return cond ? [elem] : []; +} + +function formatOption(key, { type, inlineValue }) { + return type === 'bool' + ? `--${key}` + : inlineValue + ? `--${key}=value` + : `--${key} value`; +} + +export function showHelp(options) { + const longest = Object.entries(options).reduce((max, [k, v]) => Math.max(max, formatOption(k, v).length), 0); + const help = Object.entries(options).map(([k, v]) => `${formatOption(k, v).padEnd(longest + 1)} : ${v.description ?? ''}`); + console.log(help.join('\n')); +} + +export function parseArgs({options, args}) { + const { values, positionals } = util.parseArgs({ args, options }); + for (const [k, {required}] of Object.entries(options)) { + if (required && values[k] === undefined) { + console.error(`missing required option: ${k}`); + showHelp(options); + process.exit(1); + } + } + const ndx = positionals.findIndex(v => v.startsWith('-')); + if (ndx >= 0) { + console.error(`unknown option: ${positionals[ndx]}`); + showHelp(options); + process.exit(1); + } + return { values, positionals }; } \ No newline at end of file diff --git a/index.js b/index.js new file mode 100644 index 0000000..03817f2 --- /dev/null +++ b/index.js @@ -0,0 +1,9 @@ +import { dirname, join } from 'node:path'; +import { fileURLToPath } from 'node:url'; +import { createRequire } from 'module'; +const require = createRequire(import.meta.url); + +const __dirname = dirname(fileURLToPath(import.meta.url)); +const dawnNodePath = join(__dirname, `${process.platform}-${process.arch}.node`); +const { create, globals } = require(dawnNodePath); +export { create, globals } diff --git a/package-lock.json b/package-lock.json index f2f1b8a..d2892a9 100644 --- a/package-lock.json +++ b/package-lock.json @@ -9,7 +9,8 @@ "version": "0.0.6", "license": "MIT", "devDependencies": { - "mocha": "^11.0.1" + "mocha": "^11.0.1", + "unzipit": "^1.4.3" } }, "node_modules/@isaacs/cliui": { @@ -1038,6 +1039,24 @@ "node": ">=8.0" } }, + "node_modules/unzipit": { + "version": "1.4.3", + "resolved": "https://registry.npmjs.org/unzipit/-/unzipit-1.4.3.tgz", + "integrity": "sha512-gsq2PdJIWWGhx5kcdWStvNWit9FVdTewm4SEG7gFskWs+XCVaULt9+BwuoBtJiRE8eo3L1IPAOrbByNLtLtIlg==", + "dev": true, + "dependencies": { + "uzip-module": "^1.0.2" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/uzip-module": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/uzip-module/-/uzip-module-1.0.3.tgz", + "integrity": "sha512-AMqwWZaknLM77G+VPYNZLEruMGWGzyigPK3/Whg99B3S6vGHuqsyl5ZrOv1UUF3paGK1U6PM0cnayioaryg/fA==", + "dev": true + }, "node_modules/which": { "version": "2.0.2", "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", diff --git a/package.json b/package.json index 4a59658..95662a2 100644 --- a/package.json +++ b/package.json @@ -24,7 +24,12 @@ "url": "https://github.com/greggman/node-webgpu/issues" }, "homepage": "https://github.com/greggman/node-webgpu#readme", + "files": [ + "dist/**/*", + "index.js" + ], "devDependencies": { - "mocha": "^11.0.1" + "mocha": "^11.0.1", + "unzipit": "^1.4.3" } } diff --git a/test/test.js b/test/test.js index 2dfebfe..b2b3aea 100644 --- a/test/test.js +++ b/test/test.js @@ -1,7 +1,22 @@ import Mocha from 'mocha'; -const mocha = new Mocha({ -}); +import { createRequire } from 'module'; +const require = createRequire(import.meta.url); + +const isDev = process.argv[2] !== 'dev'; +const isWin = process.platform === 'win32'; +const dawnNodePath = isDev + ? isWin + ? `${process.cwd()}/third_party/dawn/out/cmake-release/Debug/dawn.node` + : `${process.cwd()}/third_party/dawn/out/cmake-release/dawn.node` + : `${process.cwd()}/dist/${process.platform}-${process.arch}.node`; + +const { create, globals } = require(dawnNodePath); + +Object.assign(globalThis, globals); +globalThis.navigator = { gpu: create([]) }; + +const mocha = new Mocha({}); mocha.addFile('./test/tests/basic-tests.js'); diff --git a/test/tests/basic-tests.js b/test/tests/basic-tests.js index ac3d2f9..46031ce 100644 --- a/test/tests/basic-tests.js +++ b/test/tests/basic-tests.js @@ -1,15 +1,4 @@ -import { createRequire } from 'module'; -const require = createRequire(import.meta.url); -const isWin = process.platform === 'win32'; -const dawnNodePath = isWin - ? `${process.cwd()}/third_party/dawn/out/cmake-release/gen/node/NapiSymbols.def`.replaceAll('\\', '/') - : `${process.cwd()}/third_party/dawn/out/cmake-release/dawn.node`; - -const { create, globals } = require(dawnNodePath); - -Object.assign(globalThis, globals); -const navigator = { gpu: create([]) }; function assert(cond, msg = '') { if (!cond) {