Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add script to find broken links in shownotes #174

Draft
wants to merge 1 commit into
base: master
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
104 changes: 104 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,104 @@
# Logs
logs
*.log
npm-debug.log*
yarn-debug.log*
yarn-error.log*
lerna-debug.log*

# Diagnostic reports (https://nodejs.org/api/report.html)
report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json

# Runtime data
pids
*.pid
*.seed
*.pid.lock

# Directory for instrumented libs generated by jscoverage/JSCover
lib-cov

# Coverage directory used by tools like istanbul
coverage
*.lcov

# nyc test coverage
.nyc_output

# Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files)
.grunt

# Bower dependency directory (https://bower.io/)
bower_components

# node-waf configuration
.lock-wscript

# Compiled binary addons (https://nodejs.org/api/addons.html)
build/Release

# Dependency directories
node_modules/
jspm_packages/

# TypeScript v1 declaration files
typings/

# TypeScript cache
*.tsbuildinfo

# Optional npm cache directory
.npm

# Optional eslint cache
.eslintcache

# Microbundle cache
.rpt2_cache/
.rts2_cache_cjs/
.rts2_cache_es/
.rts2_cache_umd/

# Optional REPL history
.node_repl_history

# Output of 'npm pack'
*.tgz

# Yarn Integrity file
.yarn-integrity

# dotenv environment variables file
.env
.env.test

# parcel-bundler cache (https://parceljs.org/)
.cache

# Next.js build output
.next

# Nuxt.js build / generate output
.nuxt
dist

# Gatsby files
.cache/
# Comment in the public line in if your project uses Gatsby and *not* Next.js
# https://nextjs.org/blog/next-9-1#public-directory-support
# public

# vuepress build output
.vuepress/dist

# Serverless directories
.serverless/

# FuseBox cache
.fusebox/

# DynamoDB Local files
.dynamodb/

# TernJS port file
.tern-port
61 changes: 61 additions & 0 deletions package-lock.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

27 changes: 27 additions & 0 deletions package.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
{
"name": "show-notes",
"version": "1.0.0",
"description": "The repository of [Changelog](https://changelog.com) episode show notes in Markdown format.",
"main": "checklinks.js",
"type": "module",
"scripts": {
"checklinks": "node src/checklinks.js",
"test": "echo \"Error: no test specified\" && exit 1"
},
"repository": {
"type": "git",
"url": "git+https://github.com/simeydk/show-notes.git"
},
"keywords": [],
"author": "",
"license": "ISC",
"bugs": {
"url": "https://github.com/simeydk/show-notes/issues"
},
"homepage": "https://github.com/simeydk/show-notes#readme",
"dependencies": {
"abort-controller": "^3.0.0",
"lowdb": "^3.0.0",
"node-fetch": "^3.0.0"
}
}
32 changes: 32 additions & 0 deletions src/checklinks.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
import { fileURLToPath } from 'url';

import { glob} from './utils/files.js';
import { asyncChunkedForEach, asyncForEach, asyncMap} from './utils/asyncArray.js'
import { getLinksFromMdFile, checkLink } from './utils/links.js';
import {withLowCache} from './utils/cache.js'

if (fileURLToPath(import.meta.url).startsWith(process.argv[1])) {
const pattern = process.argv[2] || '**/*.md'
main(pattern)
}

const CHUNK_SIZE = 50;

const cachedCheckLink = await withLowCache(checkLink, 'linkcache.json', 24 * 60 * 60 * 1000)


async function main(globPattern = '**/*.md') {

const filenames = await glob(globPattern)
const numFiles = filenames.length
console.log(`Running ${filenames.length} files:`)
console.log('----------------------------')
console.log(filenames.slice(0,20).join('\n') + (numFiles > 20 ? `\n(...${numFiles - 20} more)` : '') + '\n')
const links = await asyncMap(filenames, getLinksFromMdFile).then(arr => arr.flat())
console.log(`Found ${links.length} links`)
await asyncChunkedForEach(links, async link => Object.assign(link, await cachedCheckLink(link.url)), CHUNK_SIZE)
const brokenLinks = links.filter(x => !x.ok)
console.log(brokenLinks.map(({filename, status, url}) => `${status} ${filename} ${url}`).join('\n'))
console.log(brokenLinks.length , links.length )
}

16 changes: 16 additions & 0 deletions src/utils/asyncArray.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
export async function asyncMap(array, fn) {
return Promise.all(array.map(fn));
}

export async function asyncForEach(array, fn) {
await Promise.all(array.map(fn));
}

export async function asyncChunkedForEach(array, fn, n = 10) {
for (let i = 0; i < array.length; i = i + n) {
const t = Date.now()
console.log(`starting chunk ${i} to ${i + n - 1} out of ${array.length}`)
await asyncForEach(array.slice(i, i + n), fn)
console.log(`finished with chunk in ${(Date.now() - t)/1000}s`)
}
}
61 changes: 61 additions & 0 deletions src/utils/cache.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,61 @@
import { join, dirname } from 'path'
import { Low, JSONFile } from 'lowdb'
import { fileURLToPath } from 'url'

const __filename = fileURLToPath(import.meta.url);
const __dirname = dirname(__filename);

export async function withLowCache(fn, dbName, TTLms = 3600_000, saveFreq = 1000) {
if (!(dbName.includes('/') || dbName.includes('\\'))) {
dbName = join(__dirname, dbName)
}

const db = new Low(new JSONFile(dbName))
await db.read()


if (!db.data) {
db.data = {}
} else {
flush()
}

db.save = () => {
db.saver = db.saver || setTimeout(() => {
db.write()
db.saver = undefined
}, saveFreq)
}
async function cachedFn(...args) {
const key = JSON.stringify(args)
const cachedResult = db.data[key]
if (cachedResult) {
}
if (cachedResult && (Date.now() < cachedResult.time + TTLms)) {
return cachedResult.value
} else {
const result = await fn(...args)
db.data[key] = { value: result, time: Date.now() }
db.save()
return result
}
}

cachedFn.flush = flush
cachedFn.clear = clear

async function flush() {
Object.entries(db.data).forEach(([key, { time }]) => {
if (Date.now() > time + TTLms) delete db.data[key]
})
await db.write()
}

async function clear() {
db.data = {}
db.write()
}

return cachedFn

}
18 changes: 18 additions & 0 deletions src/utils/files.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
import {promises as fs } from 'fs';
import util from 'util';
import child_process from 'child_process'
export const exec = util.promisify(child_process.exec);

// minimal implementation of glob - avoiding adding 'glob' dependency
export async function glob(pattern = "*") {
const { stdout } = await exec('find ' + pattern);
return stdout
.split('\n') // make an array
.filter(x => x); // remove blanks
}

// Returns the contents of a file as a string
export async function readFileString(filename) {
const buffer = await fs.readFile(filename);
return buffer.toString();
}
Loading