Skip to content
Merged
Show file tree
Hide file tree
Changes from 12 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 2 additions & 3 deletions core/audits/seo/crawlable-anchors.js
Original file line number Diff line number Diff line change
Expand Up @@ -57,12 +57,10 @@ class CrawlableAnchors extends Audit {
href,
attributeNames = [],
listeners = [],
ancestorListeners = [],
}) => {
rawHref = rawHref.replace( /\s/g, '');
name = name.trim();
role = role.trim();
const hasListener = Boolean(listeners.length || ancestorListeners.length);

if (role.length > 0) return;
// Ignore mailto links even if they use one of the failing patterns. See https://github.com/GoogleChrome/lighthouse/issues/11443#issuecomment-694898412
Expand All @@ -86,7 +84,8 @@ class CrawlableAnchors extends Audit {
!attributeNames.includes('href') &&
hrefAssociatedAttributes.every(attribute => !attributeNames.includes(attribute))
) {
return hasListener;
// If it has an even listener (e.g. onclick) then we can't assume it's a placeholder. Therefore we consider it failing.
return Boolean(listeners.length);
}

if (href === '') return true;
Expand Down
32 changes: 8 additions & 24 deletions core/gather/gatherers/anchor-elements.js
Original file line number Diff line number Diff line change
Expand Up @@ -157,33 +157,17 @@ class AnchorElements extends BaseGatherer {

// DOM.getDocument is necessary for pushNodesByBackendIdsToFrontend to properly retrieve nodeIds if the `DOM` domain was enabled before this gatherer, invoke it to be safe.
await session.sendCommand('DOM.getDocument', {depth: -1, pierce: true});
const anchorsWithEventListeners = anchors.map(async anchor => {
const listeners = await getEventListeners(session, anchor.node.devtoolsNodePath);

/** @type {Set<{type: string}>} */
const ancestorListeners = new Set();
const splitPath = anchor.node.devtoolsNodePath.split(',');
const ancestorListenerPromises = [];
while (splitPath.length >= 2) {
splitPath.length -= 2;
const path = splitPath.join(',');
const promise = getEventListeners(session, path).then(listeners => {
for (const listener of listeners) {
ancestorListeners.add(listener);
}
}).catch(() => {});
ancestorListenerPromises.push(promise);
}

await Promise.all(ancestorListenerPromises);

return {
...anchor,
listeners,
ancestorListeners: Array.from(ancestorListeners),
};
const anchorsWithEventListeners = anchors.map( anchor => {
return getEventListeners(session, anchor.node.devtoolsNodePath).then(listeners => {
return {
...anchor,
listeners,
};
});
});


const result = await Promise.all(anchorsWithEventListeners);
await session.sendCommand('DOM.disable');
return result;
Expand Down
14 changes: 13 additions & 1 deletion core/scripts/generate-timing-trace.js
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,19 @@ function saveTraceFromCLI() {
printErrorAndQuit('Lighthouse JSON results not found.');
}

const lhrObject = JSON.parse(fs.readFileSync(filename, 'utf8'));
let text = fs.readFileSync(filename, 'utf8');
// Support reading from HTML too.
if (filename.endsWith('.html') || text.startsWith('<')) {
const startMarker = '__LIGHTHOUSE_JSON__ = ';
const start = text.indexOf(startMarker) + startMarker.length;
const end = text.indexOf(';</script>', start);
if (start === -1 || end === -1) {
printErrorAndQuit('No Lighthouse JSON found in HTML file.');
}
text = text.slice(start, end);
}

const lhrObject = JSON.parse(text);
const jsonStr = createTraceString(lhrObject);

const pathObj = path.parse(filename);
Expand Down
10 changes: 1 addition & 9 deletions core/test/audits/seo/crawlable-anchors-test.js
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,6 @@ function runAudit({
(rawHref || href) ? 'href' : null, role ? 'role' : null, name ? 'name' : null,
].filter(Boolean),
listeners = onclick.trim().length ? [{type: 'click'}] : [],
ancestorListeners = [],
node = {
snippet: '',
devtoolsNodePath: '',
Expand All @@ -34,7 +33,6 @@ function runAudit({
href,
name,
listeners,
ancestorListeners,
onclick,
role,
node,
Expand Down Expand Up @@ -105,19 +103,13 @@ describe('SEO: Crawlable anchors audit', () => {
rawHref: 'javascript:void(0)',
listeners: [{type: 'nope'}, {type: 'another'}, {type: 'click'}],
});
assert.equal(auditResultWithInvalidHref, 0, 'invalid href with any event listener is a faile');
assert.equal(auditResultWithInvalidHref, 0, 'invalid href with any event listener is a fail');

const auditResultWithNoHref = runAudit({
listeners: [{type: 'nope'}, {type: 'another'}, {type: 'click'}],
});
assert.equal(auditResultWithNoHref, 0, 'no href with any event listener is a fail');

const auditResultWithParentListenerNoHref = runAudit({
ancestorListeners: [{type: 'nope'}, {type: 'another'}, {type: 'click'}],
});
assert.equal(auditResultWithParentListenerNoHref, 0,
'no href with any event listener on a parent is a fail');

const auditResultNoListener = runAudit({
rawHref: '/validPath',
});
Expand Down
3 changes: 0 additions & 3 deletions types/artifacts.d.ts
Original file line number Diff line number Diff line change
Expand Up @@ -363,9 +363,6 @@ declare module Artifacts {
listeners?: Array<{
type: Crdp.DOMDebugger.EventListener['type']
}>
ancestorListeners?: Array<{
type: Crdp.DOMDebugger.EventListener['type']
}>
}

type BFCacheReasonMap = {
Expand Down
Loading