diff --git a/package-lock.json b/package-lock.json index 072f862..376f137 100644 --- a/package-lock.json +++ b/package-lock.json @@ -9,8 +9,8 @@ "version": "0.0.0-dev", "license": "MIT", "dependencies": { - "fdir": "^6.4.3", - "gunshi": "^0.14.0" + "gunshi": "^0.14.0", + "tinyglobby": "^0.2.12" }, "bin": { "sourcemap-publisher": "cli.js" @@ -2890,8 +2890,6 @@ "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.2.tgz", "integrity": "sha512-M7BAV6Rlcy5u+m6oPhAPFgJTzAioX/6B0DxyvDlo9l8+T3nLKbrczg2WLUyzd45L8RqfUMyGPzekbMvX2Ldkwg==", "license": "MIT", - "optional": true, - "peer": true, "engines": { "node": ">=12" }, @@ -3355,6 +3353,22 @@ "dev": true, "license": "MIT" }, + "node_modules/tinyglobby": { + "version": "0.2.12", + "resolved": "https://registry.npmjs.org/tinyglobby/-/tinyglobby-0.2.12.tgz", + "integrity": "sha512-qkf4trmKSIiMTs/E63cxH+ojC2unam7rJ0WrauAzpT3ECNTxGRMlaXxVbfxMUC/w0LaYk6jQ4y/nGR9uBO3tww==", + "license": "MIT", + "dependencies": { + "fdir": "^6.4.3", + "picomatch": "^4.0.2" + }, + "engines": { + "node": ">=12.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/SuperchupuDev" + } + }, "node_modules/tinypool": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/tinypool/-/tinypool-1.0.2.tgz", diff --git a/package.json b/package.json index 69ac39c..3bb0028 100644 --- a/package.json +++ b/package.json @@ -52,7 +52,7 @@ "vitest": "^3.1.1" }, "dependencies": { - "fdir": "^6.4.3", - "gunshi": "^0.14.0" + "gunshi": "^0.14.0", + "tinyglobby": "^0.2.12" } } diff --git a/src/commands/publish.ts b/src/commands/publish.ts index c009aff..9611176 100644 --- a/src/commands/publish.ts +++ b/src/commands/publish.ts @@ -1,6 +1,7 @@ import {x} from 'tinyexec'; import {Command, define} from 'gunshi'; import path from 'node:path'; +import {glob} from 'tinyglobby'; import * as prompts from '@clack/prompts'; import {rm} from 'node:fs/promises'; import { @@ -8,12 +9,12 @@ import { preparePackageJson, readPackageJson } from '../utils/package-json.js'; +import {copyFileToDir, getTempDir} from '../utils/fs.js'; import { - copyRelativeFilesToDir, - getSourceFilesFromPaths, - getTempDir -} from '../utils/fs.js'; -import {updateSourceMapUrls} from '../utils/sourcemaps.js'; + ExtractedSourceMapSuccess, + extractSourceMaps, + updateSourceMapUrls +} from '../utils/sourcemaps.js'; const filesToKeep = ['.npmrc', '.npmignore', 'package.json']; @@ -37,43 +38,76 @@ export const publishCommand: Command = define({ prompts.intro('Publishing sourcemaps...'); const cwd = process.cwd(); - const paths = ctx.positionals.length > 0 ? ctx.positionals : ['dist/']; const dryRun = ctx.values['dry-run']; const provenance = ctx.values.provenance; - const tempDir = await getTempDir(cwd, '.sourcemap-publish'); + const packageJsonPath = path.join(cwd, 'package.json'); + let packageJson: PackageJson; try { - await copyRelativeFilesToDir([...filesToKeep, ...paths], cwd, tempDir); + packageJson = await readPackageJson(packageJsonPath); + } catch (err) { + prompts.log.error(`${err}`); + prompts.cancel( + 'Failed to read package.json. Please ensure you run this command in the project directory' + ); + return; + } - const packageJsonPath = path.join(tempDir, 'package.json'); - let packageJson: PackageJson | null; + let paths: string[]; - try { - packageJson = await readPackageJson(packageJsonPath); - } catch (err) { - prompts.log.error(`${err}`); - prompts.cancel( - 'Failed to read package.json. Please ensure you run this command in the project directory' - ); + try { + paths = await glob(packageJson.files, { + absolute: true, + cwd, + onlyFiles: true + }); + } catch (err) { + prompts.cancel( + 'Failed to load files from `files` array in package.json.' + ); + prompts.log.message(String(err)); + return; + } + + let tempDir: string | undefined; + + try { + tempDir = await getTempDir(cwd, '.sourcemap-publish'); + + const tempPackageJsonPath = path.join(tempDir, 'package.json'); + + const sourcePaths = paths.filter((p) => p.endsWith('.js')); + const sourceMaps = await extractSourceMaps(sourcePaths); + + if (sourceMaps.length === 0) { + prompts.cancel('No sourcemap files were found to publish!'); return; } - const resolvedSourcePaths = paths.map((p) => path.join(cwd, p)); + const successfulSourceMaps: ExtractedSourceMapSuccess[] = []; - const files = await getSourceFilesFromPaths(cwd, resolvedSourcePaths); + for (const sourceMap of sourceMaps) { + if (sourceMap.success === false) { + prompts.log.warn( + `Skipping source file "${sourceMap.source}" (${sourceMap.reason})` + ); + continue; + } - if (files.length === 0) { - prompts.cancel('No files were found to publish!'); - return; + successfulSourceMaps.push(sourceMap); + await copyFileToDir(sourceMap.path, cwd, tempDir); + } + + for (const file of filesToKeep) { + await copyFileToDir(path.join(cwd, file), cwd, tempDir); } try { packageJson = await preparePackageJson( tempDir, - packageJsonPath, - packageJson, - paths + tempPackageJsonPath, + packageJson ); } catch (err) { prompts.log.error(`${err}`); @@ -82,26 +116,19 @@ export const publishCommand: Command = define({ } try { + const totalSuccessfulSourceMaps = successfulSourceMaps.length; + const totalFailedSourceMaps = + sourceMaps.length - totalSuccessfulSourceMaps; + if (dryRun) { prompts.log.info( - `Updated ${files.length} sourcemap URLs, skipped 0 files (dry run)` + `Updated ${totalSuccessfulSourceMaps} sourcemap URLs, skipped ${totalFailedSourceMaps} files (dry run)` ); } else { - const updateResult = await updateSourceMapUrls( - cwd, - files, - packageJson - ); - const totalSkipped = updateResult.skipped.length; - const totalUpdated = files.length - totalSkipped; + await updateSourceMapUrls(cwd, successfulSourceMaps, packageJson); prompts.log.info( - `Updated ${totalUpdated} sourcemap URLs, skipped ${totalSkipped} files` + `Updated ${totalSuccessfulSourceMaps} sourcemap URLs, skipped ${totalFailedSourceMaps} files` ); - for (const skippedFile of updateResult.skipped) { - prompts.log.warn( - `Skipped ${skippedFile} (could not load file or sourcemap)` - ); - } } } catch (err) { prompts.log.error(`${err}`); @@ -152,7 +179,9 @@ export const publishCommand: Command = define({ `Published sourcemaps successfully!${dryRun ? ' (dry run)' : ''}` ); } finally { - await rm(tempDir, {force: true, recursive: true}); + if (tempDir) { + await rm(tempDir, {force: true, recursive: true}); + } } } }); diff --git a/src/utils/__snapshots__/fs.test.ts.snap b/src/utils/__snapshots__/fs.test.ts.snap deleted file mode 100644 index ee32bd2..0000000 --- a/src/utils/__snapshots__/fs.test.ts.snap +++ /dev/null @@ -1,8 +0,0 @@ -// Vitest Snapshot v1, https://vitest.dev/guide/snapshot.html - -exports[`getSourceFilesFromPaths > should find all js/ts files in the specified paths 1`] = ` -[ - "/lib/js-file.js", - "/lib/nested/js-file.js", -] -`; diff --git a/src/utils/__snapshots__/sourcemaps.test.ts.snap b/src/utils/__snapshots__/sourcemaps.test.ts.snap index 9f2577b..23a6bf7 100644 --- a/src/utils/__snapshots__/sourcemaps.test.ts.snap +++ b/src/utils/__snapshots__/sourcemaps.test.ts.snap @@ -1,23 +1,37 @@ // Vitest Snapshot v1, https://vitest.dev/guide/snapshot.html -exports[`updateSourceMapUrls > replaces urls with CDN urls 1`] = ` -" -// This is a test file -//# sourceMappingURL=https://unpkg.com/test-package@1.0.0/foo.js.map" +exports[`extractSourceMap > retrieves sourcemap URL 1`] = ` +{ + "path": "TEMP_DIR/foo.js.map", + "range": [ + 21, + 31, + ], + "source": "TEMP_DIR/foo.js", + "success": true, +} `; -exports[`updateSourceMapUrls > replaces urls with CDN urls 2`] = ` +exports[`extractSourceMaps > extracts sourcemaps from files 1`] = ` +{ + "path": "TEMP_DIR/foo.js.map", + "range": [ + 21, + 31, + ], + "source": "TEMP_DIR/foo.js", + "success": true, +} +`; + +exports[`updateSourceMapUrls > replaces urls with CDN urls 1`] = ` " // This is a test file //# sourceMappingURL=https://unpkg.com/test-package@1.0.0/foo.js.map" `; -exports[`updateSourceMapUrls > replaces urls with CDN urls 3`] = `"// x"`; - -exports[`updateSourceMapUrls > replaces urls with CDN urls 4`] = `"// x"`; - -exports[`updateSourceMapUrls > skips on non-existent sourcemap 1`] = ` +exports[`updateSourceMapUrls > replaces urls with CDN urls 2`] = ` " // This is a test file -//# sourceMappingURL=https://unpkg.com/test-package@1.0.0/foo.js.map" +//# sourceMappingURL=https://unpkg.com/test-package@1.0.0/bar.js.map" `; diff --git a/src/utils/fs.test.ts b/src/utils/fs.test.ts index 9d1f0c4..1a981b4 100644 --- a/src/utils/fs.test.ts +++ b/src/utils/fs.test.ts @@ -1,18 +1,12 @@ import {suite, beforeEach, afterEach, test, expect} from 'vitest'; import {writeFile, rm, mkdtemp, mkdir, stat} from 'node:fs/promises'; -import { - copyRelativeFilesToDir, - getSourceFilesFromPaths, - getTempDir -} from './fs.js'; +import {copyFileToDir, getTempDir} from './fs.js'; import path from 'node:path'; import {tmpdir} from 'node:os'; const mockFs: Record = { - 'lib/js-file.js': '// foo', - 'lib/ts-file.ts': '// foo', - 'lib/dts-file.d.ts': '// foo', - 'lib/nested/js-file.js': '// foo' + 'lib/file.js': '// foo', + 'lib/file.d.ts': '// foo' }; const writeMockFs = async (tempDir: string) => { @@ -23,29 +17,6 @@ const writeMockFs = async (tempDir: string) => { } }; -suite('getSourceFilesFromPaths', () => { - let tempDir: string; - - beforeEach(async () => { - tempDir = await mkdtemp(path.join(tmpdir(), 'smpub')); - await writeMockFs(tempDir); - }); - - afterEach(async () => { - await rm(tempDir, {force: true, recursive: true}); - }); - - test('should find all js/ts files in the specified paths', async () => { - const results = await getSourceFilesFromPaths(tempDir, [ - path.join(tempDir, 'lib/') - ]); - - expect( - results.map((p) => p.replace(tempDir, '')) - ).toMatchSnapshot(); - }); -}); - suite('getTempDir', () => { let tempDir: string; @@ -65,7 +36,7 @@ suite('getTempDir', () => { }); }); -suite('copyRelativeFilesToDir', () => { +suite('copyFileToDir', () => { let tempDir: string; let targetDir: string; @@ -80,27 +51,21 @@ suite('copyRelativeFilesToDir', () => { await rm(targetDir, {force: true, recursive: true}); }); - test('copies files to target directory', async () => { - const files = ['lib/js-file.js', 'lib/ts-file.ts']; - await copyRelativeFilesToDir(files, tempDir, targetDir); + test('copies file to target directory', async () => { + const file = path.join(tempDir, 'lib/file.js'); + await copyFileToDir(file, tempDir, targetDir); await expect( - stat(path.join(targetDir, 'lib/js-file.js')) - ).resolves.not.toThrow(); - await expect( - stat(path.join(targetDir, 'lib/ts-file.ts')) + stat(path.join(targetDir, 'lib/file.js')) ).resolves.not.toThrow(); }); test('ignores non-existent files', async () => { - const files = ['lib/js-file.js', 'lib/non-existent-file.js']; - await copyRelativeFilesToDir(files, tempDir, targetDir); + const file = path.join(tempDir, 'lib/non-existent.js'); + await copyFileToDir(file, tempDir, targetDir); await expect( - stat(path.join(targetDir, 'lib/js-file.js')) - ).resolves.not.toThrow(); - await expect( - stat(path.join(targetDir, 'lib/non-existent-file.js')) + stat(path.join(targetDir, 'lib/non-existent.js')) ).rejects.toThrow(); }); }); diff --git a/src/utils/fs.ts b/src/utils/fs.ts index eb8a77a..ecb8ae4 100644 --- a/src/utils/fs.ts +++ b/src/utils/fs.ts @@ -1,25 +1,6 @@ -import {fdir} from 'fdir'; import path from 'node:path'; import {mkdir, rm, stat, cp} from 'node:fs/promises'; -export async function getSourceFilesFromPaths( - cwd: string, - paths: string[] -): Promise { - const crawler = new fdir(); - const files = await crawler - .withFullPaths() - .exclude((_dirName, dirPath) => { - return !paths.some((p) => dirPath.startsWith(p)); - }) - .filter((file) => { - return paths.some((p) => file.startsWith(p)) && file.endsWith('.js'); - }) - .crawl(cwd) - .withPromise(); - return files; -} - export async function getTempDir(cwd: string, name: string): Promise { const tempDir = path.join(cwd, name); @@ -29,19 +10,23 @@ export async function getTempDir(cwd: string, name: string): Promise { return tempDir; } -export async function copyRelativeFilesToDir( - files: string[], +export async function copyFileToDir( + file: string, sourceDir: string, targetDir: string ): Promise { - for (const file of files) { - const sourcePath = path.join(sourceDir, file); - const targetPath = path.join(targetDir, file); - try { - await stat(sourcePath); - await cp(sourcePath, targetPath, {recursive: true}); - } catch { - continue; - } + const targetPath = path.join(targetDir, path.relative(sourceDir, file)); + const dir = path.dirname(targetPath); + try { + await stat(file); + } catch { + // Ignore if it doesn't exist, treat this like a "force copy" + return; + } + try { + await mkdir(dir, {recursive: true}); + } catch { + // ignore if the dir already exists } + await cp(file, targetPath, {recursive: true}); } diff --git a/src/utils/package-json.test.ts b/src/utils/package-json.test.ts index 0f47a18..0970a69 100644 --- a/src/utils/package-json.test.ts +++ b/src/utils/package-json.test.ts @@ -62,8 +62,15 @@ suite('readPackageJson', () => { }).rejects.toThrow('Invalid `package.json` file: missing version'); }); + test('throws when package.json file list is missing', async () => { + await writeFile(pkgPath, JSON.stringify({name: 'test', version: '1.0.0'})); + await expect(async () => { + await readPackageJson(pkgPath); + }).rejects.toThrow('Invalid `package.json` file: missing files list'); + }); + test('returns valid package.json object', async () => { - const pkg = {name: 'test', version: '1.0.0'}; + const pkg = {name: 'test', version: '1.0.0', files: []}; await writeFile(pkgPath, JSON.stringify(pkg)); const result = await readPackageJson(pkgPath); expect(result).toEqual(pkg); @@ -87,11 +94,9 @@ suite('preparePackageJson', () => { bin: { foo: './lib/cli.js' }, - scripts: { - build: 'build' - } + scripts: {} }; - tempDir = await mkdtemp('smpub'); + tempDir = await mkdtemp(path.join(tmpdir(), 'smpub')); pkgPath = path.join(tempDir, 'package.json'); await writeFile(pkgPath, JSON.stringify(pkg)); }); @@ -101,7 +106,7 @@ suite('preparePackageJson', () => { }); test('prepares package correctly', async () => { - await preparePackageJson(tempDir, pkgPath, pkg, ['lib']); + await preparePackageJson(tempDir, pkgPath, pkg); const newPkg = await JSON.parse(await readFile(pkgPath, 'utf8')); @@ -109,17 +114,15 @@ suite('preparePackageJson', () => { name: 'test-package', version: '1.0.0-sourcemaps', main: './stub.js', - files: ['./stub.js', 'lib/**/*.map'], - scripts: { - build: 'build' - } + files: ['./stub.js', './**/*.map'], + scripts: {} }); }); test('handles prerelease versions', async () => { pkg.version = '1.0.0-alpha'; - await preparePackageJson(tempDir, pkgPath, pkg, ['lib']); + await preparePackageJson(tempDir, pkgPath, pkg); const newPkg = await JSON.parse(await readFile(pkgPath, 'utf8')); diff --git a/src/utils/package-json.ts b/src/utils/package-json.ts index e02fbd9..e468b4a 100644 --- a/src/utils/package-json.ts +++ b/src/utils/package-json.ts @@ -4,7 +4,7 @@ import path from 'node:path'; export interface PackageJson { name: string; version: string; - files?: string[]; + files: string[]; [key: string]: unknown; } @@ -25,7 +25,7 @@ export const readPackageJson = async (p: string): Promise => { throw new Error('Could not parse `package.json` file'); } - if (typeof obj !== 'object' || obj === null) { + if (typeof obj !== 'object' || obj === null || Array.isArray(obj)) { throw new Error('Invalid `package.json` file'); } @@ -37,6 +37,10 @@ export const readPackageJson = async (p: string): Promise => { throw new Error('Invalid `package.json` file: missing version'); } + if (!Array.isArray(obj.files)) { + throw new Error('Invalid `package.json` file: missing files list'); + } + return obj as PackageJson; }; @@ -45,10 +49,9 @@ const packageJsonKeysToStrip = ['exports', 'bin']; export async function preparePackageJson( cwd: string, packageJsonPath: string, - packageJson: PackageJson, - paths: string[] + packageJson: PackageJson ): Promise { - const files: string[] = ['./stub.js']; + const files: string[] = ['./stub.js', './**/*.map']; const isPreRelease = packageJson.version.includes('-'); const versionSep = isPreRelease ? '.' : '-'; const version = `${packageJson.version}${versionSep}sourcemaps`; @@ -56,13 +59,10 @@ export async function preparePackageJson( ...packageJson, files, main: './stub.js', - version + version, + scripts: {} }; - for (const path of paths) { - files.push(`${path}/**/*.map`); - } - for (const key of packageJsonKeysToStrip) { newPackageJson[key] = undefined; } diff --git a/src/utils/sourcemaps.test.ts b/src/utils/sourcemaps.test.ts index bb8e2b0..a08a387 100644 --- a/src/utils/sourcemaps.test.ts +++ b/src/utils/sourcemaps.test.ts @@ -1,5 +1,10 @@ import {suite, test, expect, beforeEach, afterEach, vi} from 'vitest'; -import {createExternalSourcemapUrl, updateSourceMapUrls} from './sourcemaps.js'; +import { + createExternalSourcemapUrl, + ExtractedSourceMapSuccess, + extractSourceMap, + updateSourceMapUrls +} from './sourcemaps.js'; import type {PackageJson} from './package-json.js'; import {mkdtemp, readFile, rm, writeFile} from 'fs/promises'; import path from 'path'; @@ -10,7 +15,8 @@ suite('createExternalSourcemapUrl', () => { const file = 'foo/bar.js.map'; const pkg: PackageJson = { name: 'test-package', - version: '1.0.0-sourcemaps' + version: '1.0.0-sourcemaps', + files: [] }; expect(createExternalSourcemapUrl(file, pkg)).toBe( 'https://unpkg.com/test-package@1.0.0-sourcemaps/foo/bar.js.map' @@ -35,7 +41,8 @@ suite('updateSourceMapUrls', () => { beforeEach(async () => { pkg = { name: 'test-package', - version: '1.0.0' + version: '1.0.0', + files: [] }; tempDir = await mkdtemp(path.join(tmpdir(), 'smpub')); @@ -46,7 +53,7 @@ suite('updateSourceMapUrls', () => { //# sourceMappingURL=foo.js.map`, 'bar.js': ` // This is a test file -//# sourceMappingURL=foo.js.map`, +//# sourceMappingURL=bar.js.map`, 'bar.js.map': '// x', 'foo.js.map': '// x' }; @@ -59,126 +66,212 @@ suite('updateSourceMapUrls', () => { vi.restoreAllMocks(); }); - test('skips on non-existent file', async () => { - const paths = [path.join(tempDir, 'non-existent.js')]; - const result = await updateSourceMapUrls(tempDir, paths, pkg); - - expect(result).toEqual({skipped: paths}); - }); - - test('ignores files with no source maps', async () => { - const filePath = path.join(tempDir, 'no-sourcemap.js'); - const contents = '// This is a test file'; - - files['no-sourcemap.js'] = contents; - - await writeFiles(files, tempDir); + test('replaces urls with CDN urls', async () => { + const sourceMaps: ExtractedSourceMapSuccess[] = []; + + for (const [file, source] of Object.entries(files)) { + if (file.endsWith('.map')) { + continue; + } + const substr = 'sourceMappingURL='; + const rangeStart = source.indexOf(substr) + substr.length; + sourceMaps.push({ + success: true, + path: path.join(tempDir, `${file}.map`), + source: path.join(tempDir, file), + range: [rangeStart, source.length] + }); + } - await updateSourceMapUrls(tempDir, [filePath], pkg); + await updateSourceMapUrls(tempDir, sourceMaps, pkg); - const actualContents = await readFile(filePath, 'utf8'); + for (const sourceMap of sourceMaps) { + const contents = await readFile(sourceMap.source, 'utf8'); - expect(actualContents).toBe(contents); + expect(contents).toMatchSnapshot(); + } }); +}); - test('ignores sourcemaps in weird places', async () => { - const filePath = path.join(tempDir, 'funky-sourcemaps.js'); - const contents = ` -303; -//# sourceMappingURL=funky-sourcemaps.js.map -808;`; - - files['funky-sourcemaps.js'] = contents; - - await writeFiles(files, tempDir); - - await updateSourceMapUrls(tempDir, [filePath], pkg); - - const actualContents = await readFile(filePath, 'utf8'); +suite('extractSourceMap', () => { + let tempDir: string; - expect(actualContents).toBe(contents); + beforeEach(async () => { + tempDir = await mkdtemp(path.join(tmpdir(), 'smpub')); }); - test('ignores absolute URLs', async () => { - const filePath = path.join(tempDir, 'absolute-url.js'); - const contents = ` -// This is a test file -//# sourceMappingURL=/absolute/path/to/sourcemap.js.map`; - - files['absolute-url.js'] = contents; - - await writeFiles(files, tempDir); - - await updateSourceMapUrls(tempDir, [filePath], pkg); - - const actualContents = await readFile(filePath, 'utf8'); - - expect(actualContents).toBe(contents); + afterEach(async () => { + await rm(tempDir, {recursive: true, force: true}); }); - test('ignores URLs with a protocol', async () => { - const filePath = path.join(tempDir, 'protocol.js'); - const contents = ` -// This is a test file -//# sourceMappingURL=https://example.com/sourcemap.js.map`; - - files['protocol.js'] = contents; - - await writeFiles(files, tempDir); - - await updateSourceMapUrls(tempDir, [filePath], pkg); + test('errors when file does not exist', async () => { + const source = path.join(tempDir, 'non-existent.js'); + const result = await extractSourceMap(source); - const actualContents = await readFile(filePath, 'utf8'); - - expect(actualContents).toBe(contents); + expect(result).toEqual({ + source, + success: false, + reason: 'could not load source file' + }); }); - test('ignores inline sourcemaps', async () => { - const filePath = path.join(tempDir, 'inline.js'); - const contents = ` -// This is a test file -//# sourceMappingURL=data:application/json;base64,wooowooowooo`; - - files['inline.js'] = contents; - - await writeFiles(files, tempDir); + test('errors when no sourcemap URL', async () => { + await writeFiles( + { + 'foo.js': '// foo' + }, + tempDir + ); + const source = path.join(tempDir, 'foo.js'); + const result = await extractSourceMap(source); + + expect(result).toEqual({ + source, + success: false, + reason: 'no sourcemap found' + }); + }); - await updateSourceMapUrls(tempDir, [filePath], pkg); + test('errors when absolute sourcemap URL', async () => { + await writeFiles( + { + 'foo.js': `// foo +//# sourceMappingURL=/absolute/sourcemap.js.map` + }, + tempDir + ); + const source = path.join(tempDir, 'foo.js'); + const result = await extractSourceMap(source); + + expect(result).toEqual({ + source, + success: false, + reason: 'absolute and external URLs not supported' + }); + }); - const actualContents = await readFile(filePath, 'utf8'); + test('errors when external sourcemap URL', async () => { + await writeFiles( + { + 'foo.js': `// foo +//# sourceMappingURL=https://example.com/sourcemap.js.map` + }, + tempDir + ); + const source = path.join(tempDir, 'foo.js'); + const result = await extractSourceMap(source); + + expect(result).toEqual({ + source, + success: false, + reason: 'absolute and external URLs not supported' + }); + }); - expect(actualContents).toBe(contents); + test('errors when data URL', async () => { + await writeFiles( + { + 'foo.js': `// foo +//# sourceMappingURL=data:application/json;base64,woowoo` + }, + tempDir + ); + const source = path.join(tempDir, 'foo.js'); + const result = await extractSourceMap(source); + + expect(result).toEqual({ + source, + success: false, + reason: 'data URLs not supported' + }); }); - test('skips on non-existent sourcemap', async () => { - files['non-existent-map.js'] = ` -// This is a test file -//# sourceMappingURL=non-existent-map.js.map`; + test('errors when sourcemap does not exist', async () => { + await writeFiles( + { + 'foo.js': `// foo +//# sourceMappingURL=foo.js.map` + }, + tempDir + ); + const source = path.join(tempDir, 'foo.js'); + const result = await extractSourceMap(source); + + expect(result).toEqual({ + source, + success: false, + reason: 'sourcemap not found' + }); + }); - await writeFiles(files, tempDir); + test('ignores sourcemaps in weird places', async () => { + await writeFiles( + { + 'foo.js': `// foo +303; +//# sourceMappingURL=funky-sourcemaps.js.map +808;` + }, + tempDir + ); + const source = path.join(tempDir, 'foo.js'); + const result = await extractSourceMap(source); + + expect(result).toEqual({ + source, + success: false, + reason: 'no sourcemap found' + }); + }); - const paths = ['non-existent-map.js', 'foo.js'].map((file) => - path.join(tempDir, file) + test('retrieves sourcemap URL', async () => { + await writeFiles( + { + 'foo.js': `// foo +//# sourceMappingURL=foo.js.map`, + 'foo.js.map': '// foo' + }, + tempDir ); - const result = await updateSourceMapUrls(tempDir, paths, pkg); + const source = path.join(tempDir, 'foo.js'); + const result = (await extractSourceMap( + source + )) as ExtractedSourceMapSuccess; + + result.source = result.source.replace(tempDir, 'TEMP_DIR'); + result.path = result.path.replace(tempDir, 'TEMP_DIR'); + expect(result).toMatchSnapshot(); + }); +}); - expect(result).toEqual({skipped: [paths[0]]}); +suite('extractSourceMaps', () => { + let tempDir: string; - const fooContents = await readFile(path.join(tempDir, 'foo.js'), 'utf8'); + beforeEach(async () => { + tempDir = await mkdtemp(path.join(tmpdir(), 'smpub')); + }); - expect(fooContents).toMatchSnapshot(); + afterEach(async () => { + await rm(tempDir, {recursive: true, force: true}); }); - test('replaces urls with CDN urls', async () => { - const paths = [...Object.keys(files)].map((file) => - path.join(tempDir, file) + test('extracts sourcemaps from files', async () => { + await writeFiles( + { + 'foo.js': `// foo +//# sourceMappingURL=foo.js.map`, + 'foo.js.map': '// foo', + 'bar.js': '// i have no sourcemap' + }, + tempDir ); - await updateSourceMapUrls(tempDir, paths, pkg); - - for (const p of paths) { - const contents = await readFile(p, 'utf8'); - - expect(contents).toMatchSnapshot(); - } + const source = path.join(tempDir, 'foo.js'); + const result = (await extractSourceMap( + source + )) as ExtractedSourceMapSuccess; + + result.source = result.source.replace(tempDir, 'TEMP_DIR'); + result.path = result.path.replace(tempDir, 'TEMP_DIR'); + expect(result).toMatchSnapshot(); }); }); diff --git a/src/utils/sourcemaps.ts b/src/utils/sourcemaps.ts index 1eb789a..254ab7c 100644 --- a/src/utils/sourcemaps.ts +++ b/src/utils/sourcemaps.ts @@ -9,76 +9,115 @@ export function createExternalSourcemapUrl( return `https://unpkg.com/${packageJson.name}@${packageJson.version}/${p}`; } -export interface UpdateSourceMapUrlsResult { - skipped: string[]; -} - export async function updateSourceMapUrls( cwd: string, - files: string[], + sourceMaps: ExtractedSourceMapSuccess[], packageJson: PackageJson -): Promise { - const result: UpdateSourceMapUrlsResult = {skipped: []}; +): Promise { // TODO (jg): maybe one day paralellise this with a concurrency limit - for (const file of files) { + for (const sourceMap of sourceMaps) { + // TODO (43081j); we will already have read this file as part of + // parsing source maps. ideally we shouldn't read it again here, but + // storing all of the sources in memory is not a good idea either let contents: string; try { - contents = await readFile(file, 'utf8'); + contents = await readFile(sourceMap.source, 'utf8'); } catch { - result.skipped.push(file); continue; } - const trimmedContents = contents.trim(); - const lastLine = trimmedContents.slice( - trimmedContents.lastIndexOf('\n') + 1 + const sourcemapRelativePath = path.relative(cwd, sourceMap.path); + const sourcemapNewPath = createExternalSourcemapUrl( + sourcemapRelativePath, + packageJson ); - const sourcemapPattern = /^\/\/# sourceMappingURL=(.+)/d; - const sourcemapMatch = lastLine.match(sourcemapPattern); - if (!sourcemapMatch || !sourcemapMatch.indices) { - continue; - } + await writeFile( + sourceMap.source, + contents.slice(0, sourceMap.range[0]) + + sourcemapNewPath + + contents.slice(sourceMap.range[1]) + ); + } +} - const sourcemapURL = sourcemapMatch[1]; +export interface ExtractedSourceMapSuccess { + success: true; + range: [number, number]; + path: string; + source: string; +} - // Don't support absolute URLs, or URLs with a protocol - if (sourcemapURL.startsWith('/') || /^\w+:\/\//.test(sourcemapURL)) { - continue; - } +export interface ExtractedSourceMapError { + success: false; + source: string; + reason: string; +} - // Ignore inline maps - if (sourcemapURL.startsWith('data:')) { - continue; - } +export type ExtractedSourceMap = + | ExtractedSourceMapSuccess + | ExtractedSourceMapError; - const sourcemapPath = path.join(path.dirname(file), sourcemapURL); +export async function extractSourceMap( + source: string +): Promise { + let contents: string; - try { - await stat(sourcemapPath); - } catch { - result.skipped.push(file); - continue; - } + try { + contents = await readFile(source, 'utf8'); + } catch { + return {source, success: false, reason: 'could not load source file'}; + } - const sourcemapRelativePath = path.relative(cwd, sourcemapPath); - // TODO (43081j): get pkg-name from somewhere - const sourcemapNewPath = createExternalSourcemapUrl( - sourcemapRelativePath, - packageJson - ); + const trimmedContents = contents.trim(); + const lastLine = trimmedContents.slice(trimmedContents.lastIndexOf('\n') + 1); + const sourcemapPattern = /^\/\/# sourceMappingURL=(.+)/d; + const sourcemapMatch = lastLine.match(sourcemapPattern); - const newSourcemapLine = - sourcemapMatch[0].slice(0, sourcemapMatch.indices[1][0]) + - sourcemapNewPath + - sourcemapMatch[0].slice(sourcemapMatch.indices[1][1]); + if (!sourcemapMatch || !sourcemapMatch.indices) { + return {source, success: false, reason: 'no sourcemap found'}; + } - await writeFile( - file, - contents.slice(0, contents.lastIndexOf('\n') + 1) + newSourcemapLine - ); + const sourcemapURL = sourcemapMatch[1]; + + // Don't support absolute URLs, or URLs with a protocol + if (sourcemapURL.startsWith('/') || /^\w+:\/\//.test(sourcemapURL)) { + return { + source, + success: false, + reason: 'absolute and external URLs not supported' + }; + } + + // Ignore inline maps + if (sourcemapURL.startsWith('data:')) { + return {source, success: false, reason: 'data URLs not supported'}; + } + + const sourcemapPath = path.join(path.dirname(source), sourcemapURL); + + try { + await stat(sourcemapPath); + } catch { + return {source, success: false, reason: 'sourcemap not found'}; } - return result; + return { + success: true, + range: sourcemapMatch.indices[1], + path: sourcemapPath, + source + }; +} + +export async function extractSourceMaps( + files: string[] +): Promise { + const results: ExtractedSourceMap[] = []; + for (const file of files) { + const extracted = await extractSourceMap(file); + results.push(extracted); + } + return results; }