From ec24253aec8cdbcc54204ece4b73b8dc630666e0 Mon Sep 17 00:00:00 2001 From: Zhell <35771353+Zhell1@users.noreply.github.com> Date: Sun, 28 Feb 2021 18:27:44 +0100 Subject: [PATCH 1/2] add support for port in domain passed in cli --- src/cli.js | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/src/cli.js b/src/cli.js index d4e0f07..3b6bc2a 100644 --- a/src/cli.js +++ b/src/cli.js @@ -19,6 +19,13 @@ export default () => { domain = 'localhost', outputDir = buildDir, } = program.optsObj + + var port = 0, domain_final = domain + if(domain.split(':')[1]){ + port = domain.split(':')[1].split('/')[0] + domain_final = domain.split(':')[0] + } + console.log("domain_final = ",domain_final,"port = ",port) const pkg = JSON.parse(fs.readFileSync(path.join(process.cwd(), 'package.json'))) const basename = ((p) => p.endsWith('/') ? p : p + '/')(pkg.homepage ? url.parse(pkg.homepage).pathname : '') @@ -41,7 +48,7 @@ export default () => { const server = new Server(buildDirPath, basename, 0, pkg.proxy) server.start().then(() => { - const crawler = new Crawler(`http://${domain}:${server.port()}${basename}`, options.snapshotDelay, options) + const crawler = new Crawler(`http://${domain_final}:${server.port()}${basename}`, options.snapshotDelay, options) return crawler.crawl(({ urlPath, html }) => { if (!urlPath.startsWith(basename)) { console.log(`❗ Refusing to crawl ${urlPath} because it is outside of the ${basename} sub-folder`) From 81b7fef8ce6ef5d7a57407d6fa2ca480d8138638 Mon Sep 17 00:00:00 2001 From: Zhell <35771353+Zhell1@users.noreply.github.com> Date: Sun, 28 Feb 2021 18:29:41 +0100 Subject: [PATCH 2/2] Update cli.js --- src/cli.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/cli.js b/src/cli.js index 3b6bc2a..a01b67a 100644 --- a/src/cli.js +++ b/src/cli.js @@ -46,7 +46,7 @@ export default () => { const writer = new Writer(buildDirPath, outputDirPath) writer.move('index.html', '200.html') - const server = new Server(buildDirPath, basename, 0, pkg.proxy) + const server = new Server(buildDirPath, basename, port, pkg.proxy) server.start().then(() => { const crawler = new Crawler(`http://${domain_final}:${server.port()}${basename}`, options.snapshotDelay, options) return crawler.crawl(({ urlPath, html }) => {