diff --git a/Gruntfile.js b/Gruntfile.js index f151bad399df..54092f99d1b7 100644 --- a/Gruntfile.js +++ b/Gruntfile.js @@ -52,16 +52,20 @@ module.exports = function (grunt) { cmd: 'node', args: ['tools/gen_cache', 'src/webgpu', '--validate'], }, + // Note these generate `cts*.https.html` directly into the out-wpt/ directory rather than + // the gen/ directory (as well as generating a `webgpu_variant_list*.json` file in gen/). 'write-out-wpt-cts-html': { - // Note this generates directly into the out-wpt/ directory rather than the gen/ directory. cmd: 'node', args: ['tools/gen_wpt_cts_html', 'tools/gen_wpt_cfg_unchunked.json'], }, 'write-out-wpt-cts-html-chunked2sec': { - // Note this generates directly into the out-wpt/ directory rather than the gen/ directory. cmd: 'node', args: ['tools/gen_wpt_cts_html', 'tools/gen_wpt_cfg_chunked2sec.json'], }, + 'write-out-wpt-cts-html-withsomeworkers': { + cmd: 'node', + args: ['tools/gen_wpt_cts_html', 'tools/gen_wpt_cfg_withsomeworkers.json'], + }, unittest: { cmd: 'node', args: ['tools/run_node', 'unittests:*'], @@ -199,7 +203,11 @@ module.exports = function (grunt) { concurrent: { 'write-out-wpt-cts-html-all': { - tasks: ['run:write-out-wpt-cts-html', 'run:write-out-wpt-cts-html-chunked2sec'], + tasks: [ + 'run:write-out-wpt-cts-html', + 'run:write-out-wpt-cts-html-chunked2sec', + 'run:write-out-wpt-cts-html-withsomeworkers', + ], }, 'all-builds': { tasks: ['build-standalone', 'build-wpt', 'run:build-out-node'], diff --git a/src/common/tools/gen_wpt_cts_html.ts b/src/common/tools/gen_wpt_cts_html.ts index 35eac195b33c..8e5460f6b44f 100644 --- a/src/common/tools/gen_wpt_cts_html.ts +++ b/src/common/tools/gen_wpt_cts_html.ts @@ -2,7 +2,10 @@ import { promises as fs } from 'fs'; import * as path from 'path'; import { DefaultTestFileLoader } from '../internal/file_loader.js'; +import { compareQueries, Ordering } from '../internal/query/compare.js'; +import { parseQuery } from '../internal/query/parseQuery.js'; import { + TestQuery, TestQueryMultiCase, TestQueryMultiFile, TestQueryMultiTest, @@ -68,8 +71,11 @@ interface ConfigJSON { * (a typical default time limit in WPT test executors). */ maxChunkTimeMS?: number; - /** List of argument prefixes (what comes before the test query). Defaults to `['?q=']`. */ - argumentsPrefixes?: string[]; + /** + * List of argument prefixes (what comes before the test query), and optionally a list of + * test queries to run under that prefix. Defaults to `['?q=']`. + */ + argumentsPrefixes?: ArgumentsPrefixConfigJSON[]; expectations?: { /** File containing a list of WPT paths to suppress. */ file: string; @@ -81,9 +87,10 @@ interface ConfigJSON { file: string; prefix: string; }; - /*No long path assert */ + /** Allow generating long variant names that could result in long filenames on some runners. */ noLongPathAssert?: boolean; } +type ArgumentsPrefixConfigJSON = string | { prefixes: string[]; filters?: string[] }; interface Config { suite: string; @@ -91,7 +98,7 @@ interface Config { outVariantList?: string; template: string; maxChunkTimeMS: number; - argumentsPrefixes: string[]; + argumentsPrefixes: ArgumentsPrefixConfig[]; noLongPathAssert: boolean; expectations?: { file: string; @@ -102,6 +109,26 @@ interface Config { prefix: string; }; } +interface ArgumentsPrefixConfig { + readonly prefix: string; + readonly filters?: readonly TestQuery[]; +} + +/** Process the `argumentsPrefixes` config section into a format that will be useful later. */ +function* reifyArgumentsPrefixesConfig( + argumentsPrefixes: ArgumentsPrefixConfigJSON[] +): Generator { + for (const item of argumentsPrefixes) { + if (typeof item === 'string') { + yield { prefix: item, filters: undefined }; + } else { + const filters = item.filters?.map(f => parseQuery(f)); + for (const prefix of item.prefixes) { + yield { prefix, filters }; + } + } + } +} let config: Config; @@ -118,7 +145,9 @@ let config: Config; out: path.resolve(jsonFileDir, configJSON.out), template: path.resolve(jsonFileDir, configJSON.template), maxChunkTimeMS: configJSON.maxChunkTimeMS ?? Infinity, - argumentsPrefixes: configJSON.argumentsPrefixes ?? ['?q='], + argumentsPrefixes: configJSON.argumentsPrefixes + ? [...reifyArgumentsPrefixesConfig(configJSON.argumentsPrefixes)] + : [{ prefix: '?q=' }], noLongPathAssert: configJSON.noLongPathAssert ?? false, }; if (configJSON.outVariantList) { @@ -153,17 +182,18 @@ let config: Config; ] = process.argv; config = { + suite, out: outFile, template: templateFile, - suite, maxChunkTimeMS: Infinity, - argumentsPrefixes: ['?q='], + argumentsPrefixes: [{ prefix: '?q=' }], noLongPathAssert: false, }; if (process.argv.length >= 7) { config.argumentsPrefixes = (await fs.readFile(argsPrefixesFile, 'utf8')) .split(/\r?\n/) - .filter(a => a.length); + .filter(a => a.length) + .map(prefix => ({ prefix })); config.expectations = { file: expectationsFile, prefix: expectationsPrefix, @@ -179,7 +209,7 @@ let config: Config; const useChunking = Number.isFinite(config.maxChunkTimeMS); // Sort prefixes from longest to shortest - config.argumentsPrefixes.sort((a, b) => b.length - a.length); + config.argumentsPrefixes.sort((a, b) => b.prefix.length - a.prefix.length); // Load expectations (if any) const expectations: Map = await loadQueryFile( @@ -196,10 +226,26 @@ let config: Config; const loader = new DefaultTestFileLoader(); const lines = []; const tooLongQueries = []; - for (const prefix of config.argumentsPrefixes) { + // MAINTENANCE_TODO: Doing all this work for each prefix is inefficient, + // especially if there are no expectations. + for (const { prefix, filters } of config.argumentsPrefixes) { const rootQuery = new TestQueryMultiFile(config.suite, []); + const subqueriesToExpand = expectations.get(prefix) ?? []; + if (filters) { + // Make sure any queries we want to filter will show up in the output. + // Important: This also checks that all queries actually exist (no typos, correct suite). + for (const q of filters) { + // subqueriesToExpand doesn't error if this happens, so check it first: + assert(q.suite === config.suite, () => `Filter is for the wrong suite: ${q}`); + if (q.level >= 2) { + // No need to expand since it will be already expanded. + subqueriesToExpand.push(q.toString()); + } + } + } + const tree = await loader.loadTree(rootQuery, { - subqueriesToExpand: expectations.get(prefix), + subqueriesToExpand, fullyExpandSubtrees: fullyExpand.get(prefix), maxChunkTime: config.maxChunkTimeMS, }); @@ -213,10 +259,23 @@ let config: Config; let variantCount = 0; const alwaysExpandThroughLevel = 2; // expand to, at minimum, every test. - for (const { query, subtreeCounts } of tree.iterateCollapsedNodes({ + loopOverNodes: for (const { query, subtreeCounts } of tree.iterateCollapsedNodes({ alwaysExpandThroughLevel, })) { assert(query instanceof TestQueryMultiCase); + + const queryMatchesFilter = (filter: TestQuery) => { + const compare = compareQueries(filter, query); + // StrictSubset should not happen because we pass these to subqueriesToExpand so + // they should always be expanded (and therefore iterated more finely than this). + assert(compare !== Ordering.StrictSubset); + return compare === Ordering.Equal || compare === Ordering.StrictSuperset; + }; + // MAINTENANCE_TODO: Looping this inside another loop is inefficient. + if (filters && !filters.some(queryMatchesFilter)) { + continue loopOverNodes; + } + if (!config.noLongPathAssert) { const queryString = query.toString(); // Check for a safe-ish path length limit. Filename must be <= 255, and on Windows the whole @@ -270,7 +329,7 @@ ${[...queryStrings.values()].join('\n')}` }); async function loadQueryFile( - argumentsPrefixes: string[], + argumentsPrefixes: ArgumentsPrefixConfig[], queryFile?: { file: string; prefix: string; @@ -284,13 +343,13 @@ async function loadQueryFile( } const result: Map = new Map(); - for (const prefix of argumentsPrefixes) { + for (const { prefix } of argumentsPrefixes) { result.set(prefix, []); } expLoop: for (const exp of lines) { // Take each expectation for the longest prefix it matches. - for (const argsPrefix of argumentsPrefixes) { + for (const { prefix: argsPrefix } of argumentsPrefixes) { const prefix = queryFile!.prefix + argsPrefix; if (exp.startsWith(prefix)) { result.get(argsPrefix)!.push(exp.substring(prefix.length)); diff --git a/tools/gen_wpt_cfg_withsomeworkers.json b/tools/gen_wpt_cfg_withsomeworkers.json new file mode 100644 index 000000000000..422f8f3f1229 --- /dev/null +++ b/tools/gen_wpt_cfg_withsomeworkers.json @@ -0,0 +1,35 @@ +{ + "suite": "webgpu", + "argumentsPrefixes": [ + { + "_comment": "On the main thread, run all tests...", + "prefixes": ["?q="] + }, + { + "_comment": "... and run a subset of tests on each type of worker.", + "prefixes": ["?worker=dedicated&q=", "?worker=shared&q=", "?worker=service&q="], + "filters": [ + "webgpu:print_environment:*", + "webgpu:api,operation,buffers,map:*", + "webgpu:api,operation,buffers,map_detach:*", + "webgpu:api,operation,buffers,map_ArrayBuffer:*", + "webgpu:api,operation,buffers,map_oom:*", + "webgpu:api,operation,buffers,map_detach:*", + "webgpu:api,operation,command_buffer,basic:*", + "webgpu:api,operation,command_buffer,copyBufferToBuffer:*", + "webgpu:api,operation,compute,basic:*", + "webgpu:api,operation,rendering,basic:*", + "webgpu:api,operation,render_pass,storeOp:*", + "webgpu:api,operation,render_pass,storeop2:*", + "webgpu:api,operation,onSubmittedWorkDone:*", + "webgpu:api,validation,buffer,destroy:*", + "webgpu:api,validation,buffer,mapping:*", + "webgpu:idl,*", + "webgpu:web_platform,*" + ] + } + ], + "out": "../out-wpt/cts-withsomeworkers.https.html", + "outVariantList": "../gen/webgpu_variant_list_withsomeworkers.json", + "template": "../src/common/templates/cts.https.html" +}