-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathcrawl.js
More file actions
94 lines (75 loc) · 2.51 KB
/
crawl.js
File metadata and controls
94 lines (75 loc) · 2.51 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
const { JSDOM } = require("jsdom")
const crawlPage = async (baseURL, currentURL, pages) => {
console.log(`actively crawling: ${currentURL}`)
const baseURLObj = new URL(baseURL)
const currentURLObj = new URL(currentURL)
if (baseURLObj.hostname !== currentURLObj.hostname){
return pages
}
const normalizedCurrentURL = normalizedURL(currentURL)
if (pages[normalizedCurrentURL] > 0){
pages[normalizedCurrentURL]++
return pages
}
pages[normalizedCurrentURL] = 1
try {
const response = await fetch(currentURL)
if (response.status > 399){
console.log(`error in fetch with status code: ${response.status} on page: ${currentURL}`)
return pages
}
const contentType = response.headers.get("content-type")
if (!contentType.includes("text/html")){
console.log(`non html response, content type: ${contentType} on page: ${currentURL}`)
return pages
}
const htmlBody = await response.text()
const nextURLs = getURLsFromHTML(htmlBody, baseURL)
for (const nextURL of nextURLs){
pages = await crawlPage(baseURL, nextURL, pages)
}
}
catch (err){
console.log(`error in fetch: ${err.message}, on page: ${currentURL}`)
}
return pages
}
const getURLsFromHTML = (htmlBody, baseURL) => {
const urls = []
const dom = new JSDOM(htmlBody)
const linkElements = dom.window.document.querySelectorAll("a")
for (const linkElement of linkElements){
if (linkElement.href.slice(0, 1) === "/"){
try {
const urlObj = new URL(`${baseURL}${linkElement.href}`)
urls.push(urlObj.href)
}
catch(err){
console.log(`error with relative url: ${err.message}, path: ${linkElement.href}`)
}
}
else{
try {
const urlObj = new URL(linkElement.href)
urls.push(urlObj.href)
}
catch(err){
console.log(`error with absolute url: ${err.message}, path: ${linkElement.href}`)
}
}
}
return urls
}
const normalizedURL = (urlString) => {
const urlObj = new URL(urlString)
const hostPath = `${urlObj.hostname}${urlObj.pathname}`
if (hostPath.length > 0 && hostPath.slice(-1) === "/"){
return hostPath.slice(0, -1)
}
return hostPath
}
module.exports = {
normalizedURL,
getURLsFromHTML,
crawlPage
}