diff --git a/build-contract b/build-contract index cd7ed1f..de97be4 100755 --- a/build-contract +++ b/build-contract @@ -53,7 +53,7 @@ function wait_for_contract { } MONOREPO_PRE=$(cat package.json | grep '"build-contract-predockerbuild"' | awk -F '"' '{ print $4 }') -if [[ "$MONOREPO_PRE" == "#" ]]; then $DIR/nodejs/build-contract-predockerbuild +if [[ "$MONOREPO_PRE" == "#" ]]; then y-monorepo-build-prepare elif [[ ! -z "$MONOREPO_PRE" ]]; then npm run build-contract-predockerbuild; fi CONTRACTS_DIR="build-contracts/" diff --git a/nodejs/build-contract-packagelock b/nodejs/build-contract-packagelock deleted file mode 100755 index 6bb13d7..0000000 --- a/nodejs/build-contract-packagelock +++ /dev/null @@ -1,16 +0,0 @@ -#!/bin/bash -set -e - -SCRIPTPATH="$( cd "$(dirname "$0")" ; pwd -P )" - -$SCRIPTPATH/build-contract-predockerbuild - -cd npm-monorepo/ci -npm install --production --package-lock-only --ignore-scripts -echo "------- packagelock details -------" -cat ../../npm-monorepo/ci/package.json -echo "" -shasum -a 256 ../../npm-monorepo/ci/npm-monorepo/* || echo "The module has no monorepo deps" -echo "------- packagelock details -------" -mv package-lock.json ../../ -cd ../../ diff --git a/nodejs/build-contract-predockerbuild b/nodejs/build-contract-predockerbuild deleted file mode 100755 index 9bd6979..0000000 --- a/nodejs/build-contract-predockerbuild +++ /dev/null @@ -1,135 +0,0 @@ -#!/usr/bin/env node -const path = require('path'); -const fs = require('fs'); -const mnpm = require('./mnpm'); - -const npmLib = new Promise((resolve,reject) => { - // ls -la $(which npm), or we could do something like https://stackoverflow.com/a/25106648/113009 - const guesses = ['./node_modules/npm', '/usr/local/lib/node_modules/npm', '/usr/lib/node_modules/npm']; - const check = (path) => path ? fs.stat(path, (err, stats) => { - if (err) return check(guesses.shift()); - let installed = /^(?:\.\/)?node_modules\/(.*)/.exec(path); - if (installed) path = installed[1]; - require(installed ? installed[1] : path).load((err, loaded) => { - if (err) throw err; - resolve(loaded); - }); - }) : reject(new Error("Failed to guess the npm lib\'s install path. Try `npm (link|install) npm`.")); - check(guesses.shift()); -}); - -let dir = path.resolve('.'); -let mdir = path.join(dir, 'npm-monorepo'); -let cidir = path.join(mdir, 'ci'); -let cimdir = path.join(cidir, 'npm-monorepo'); - -/** - * Gets a minimal package.json with only the stuff that should - * trigger an invalidation of docker build cache for the npm ci layer. - */ -function getCiPackage(packageJson) { - return { - name: packageJson.name, - version: packageJson.version, - dependencies: packageJson.dependencies - } -} - -/** - * Produces a package tarball. - * @param modulePath In case we find a way to avoid depending on process.cwd - */ -function npmPackage(modulePath, cb) { - if (process.cwd() !== modulePath) throw new Error('npm expected to run in ' + modulePath + ', not ' + process.cwd()); - npmLib.then(npm => { - npm.commands.pack([], (err, result) => { - if (err) return cb(err); - const name = result[0].filename; - fs.stat(name, (err, stats) => { - if (err) console.error('# npm pack failed to produce the result file', npm, process.cwd()); - cb(err, err ? undefined : name); - }); - }); - }); -} - -let package = require(path.join(dir,'package.json')); -let monorepoDeps = Object.keys(package.dependencies).filter( - dep => /^file:\.\.\//.test(package.dependencies[dep])); - -fs.mkdir(dir, err1 => fs.mkdir(mdir, err2 => fs.mkdir(cidir, err3 => fs.mkdir(cimdir, err => { - if (err) { - if (err.code !== 'EEXIST') throw err; - console.log('# Monorepo dir structure already present', cimdir); - } - - const completed = () => { - process.chdir(dir); // restore after npm - if (monorepoDeps.length) { - console.log('# Zero monorepo dependencies found'); - fs.unlink(path.join(cimdir, 'package.json'), err => err && console.error('Failed to clean up after sourceless tgz pack', err)); - } - fs.writeFile(path.join(mdir, 'package.json'), mnpm.stringifyPackageJson(package), - err => { if (err) throw err; }); - const ciPackage = getCiPackage(package); - fs.writeFile(path.join(cidir, 'package.json'), mnpm.stringifyPackageJson(ciPackage), - err => { if (err) throw err; }); - fs.unlink(path.join(cimdir, '.npmignore'), err => err && console.error(err)); - }; - - // Needed for the depCiPackage part in the callback stack below - fs.writeFile(path.join(cimdir, '.npmignore'), "*.tgz\n", err => err && console.error(err)); - - const next = dep => { - if (!dep) return completed(); - - let uri = package.dependencies[dep]; - let urimatch = /^file:(\.\.\/.*)/.exec(uri); - if (!urimatch) return console.error('# Unrecognized monorepo dependency URI', uri); - let depdir = path.normalize(path.join(dir, urimatch[1])); - - process.chdir(dir); process.chdir(depdir); // for npm - let depPackage = require(path.resolve('./package.json')); - npmPackage(depdir, (err, tgzname) => { - if (err) throw err; - console.log('# Packed', tgzname, 'in', process.cwd()); - fs.rename(tgzname, path.join(mdir, tgzname), err => { - if (err) throw err; - console.log('# Created monorepo tarball', mdir, tgzname); - package.dependencies[dep] = `file:npm-monorepo/${tgzname}`; - - let depCiPackage = getCiPackage(depPackage); - // We could probably speed things up here by using tar-stream, and maybe set permissions - let depPpJson = path.join(cimdir, 'package.json'); - fs.writeFile(depPpJson, mnpm.stringifyPackageJson(depCiPackage), err => { - mnpm.writeProdPackageTgzWithDeterministicHash({ - packageJsonObject: getCiPackage(depPackage), - filePath: path.join(cimdir, tgzname) - }).then(() => { - console.log('# Created monorepo sourceless tarball for npm ci', cimdir, tgzname); - console.log('------- debug info -------'); - process.chdir(cimdir); // for npm - require('child_process').execSync('tar xvzf ' + tgzname, {stdio:[0,1,2]}); - require('child_process').execSync('ls -la package/', {stdio:[0,1,2]}); - require('child_process').execSync('shasum package/package.json', {stdio:[0,1,2]}); - require('child_process').execSync('rm -rf package/', {stdio:[0,1,2]}); - console.log('------- ---------- -------'); - next(monorepoDeps.shift()); - }); - }); - }); - }); - }; - next(monorepoDeps.shift()); - -})))); - -process.on('uncaughtException', err => { - console.error('Uncaught exception', err); - process.exit(1); -}); - -process.on('unhandledRejection', (err, p) => { - console.error('Unhandled rejection', err); - process.exit(1); -}); diff --git a/nodejs/mnpm.js b/nodejs/mnpm.js deleted file mode 100644 index cf7ee52..0000000 --- a/nodejs/mnpm.js +++ /dev/null @@ -1,32 +0,0 @@ -const fs = require('fs'); -const tar = require('tar-stream'); -const zlib = require('./zlib-choice'); - -const tarContentMtime = new Date(946684800000); - -function stringifyPackageJson(packageJsonObject) { - return JSON.stringify(packageJsonObject, null, ' ') + '\n'; -} - -async function writeProdPackageTgzWithDeterministicHash({packageJsonObject, filePath}) { - const content = stringifyPackageJson(packageJsonObject); - const pack = tar.pack(); - const p = pack.entry({ - name: 'package/package.json', - mtime: tarContentMtime - }, content); - const fileStream = fs.createWriteStream(filePath); - const completed = new Promise((resolve, reject) => { - fileStream.on('close', resolve); - }); - pack.finalize(); - pack - .pipe(zlib.createGzip()) - .pipe(fileStream); - return completed; -}; - -module.exports = { - stringifyPackageJson, - writeProdPackageTgzWithDeterministicHash -}; diff --git a/nodejs/mnpm.spec.js b/nodejs/mnpm.spec.js deleted file mode 100644 index 6124df4..0000000 --- a/nodejs/mnpm.spec.js +++ /dev/null @@ -1,179 +0,0 @@ -const os = require('os'); -const path = require('path'); -const fs = require('fs'); -const crypto = require('crypto'); -const zlib = require('./zlib-choice'); -const stream = require('stream'); -const tar = require('tar-stream'); - -const mnpm = require('./mnpm'); - -describe("stringifyPackageJson", () => { - - it("Uses two whitespaces to indent (the Yolean convention) and adds a trailing newline", () => { - const string = mnpm.stringifyPackageJson({name: 'test-module'}); - expect(string).toBe('{\n "name": "test-module"\n}\n'); - }); - -}); - -describe("Our choice of gzip function", () => { - - it("Is platform independent wrt result checksum", done => { - const blob = new stream.PassThrough(); - const sha256 = crypto.createHash('sha256'); - const result = new stream.PassThrough(); - result.on('data', d => sha256.update(d)); - result.on('end', () => expect(sha256.digest('hex')).toBe( - 'b13627bbeee31ae666d6696cf11e411ee6b0e40d4b235cb2a02da32693ba2d3c')); - result.on('end', done); - // Note that this differs from `echo 'x' | gzip - | shasum -a 256 -` - blob.pipe(zlib.createGzip()).pipe(result); - blob.end('x\n'); - }); - - // https://github.com/nodejs/node/issues/12244 - it("Results may depend on zlib version", () => { - expect(process.versions.zlib).toBe('1.2.11'); - }); - - it("Results may depend on zlib options", done => { - const options = { - windowBits: 14, memLevel: 7, - level: zlib.constants.Z_BEST_SPEED, - strategy: zlib.constants.Z_FIXED - }; - const blob = new stream.PassThrough(); - const sha256 = crypto.createHash('sha256'); - const result = new stream.PassThrough(); - result.on('data', d => sha256.update(d)); - result.on('end', () => expect(sha256.digest('hex')).toBe( - 'dd8dbe0ba323ab288d9e9272efc1f2bf52f495a812122c6ee9f9c5e7d765fda5')); - result.on('end', done); - blob.pipe(zlib.createGzip(options)).pipe(result); - blob.end('x\n'); - }); - - it("Results may depend on zlib compression level", done => { - const options = { - level: zlib.constants.Z_BEST_COMPRESSION - }; - const blob = new stream.PassThrough(); - const sha256 = crypto.createHash('sha256'); - const result = new stream.PassThrough(); - result.on('data', d => sha256.update(d)); - result.on('end', () => expect(sha256.digest('hex')).toBe( - '6cda46810118792ed89f1e1662549186b5c851e4ce240be861780bc646e850c6')); - result.on('end', done); - blob.pipe(zlib.createGzip(options)).pipe(result); - blob.end('x\n'); - }); - - it("Results may be more platform independent with no compression", done => { - const options = { - level: zlib.constants.Z_NO_COMPRESSION - }; - const blob = new stream.PassThrough(); - const sha256 = crypto.createHash('sha256'); - const result = new stream.PassThrough(); - result.on('data', d => sha256.update(d)); - result.on('end', () => expect(sha256.digest('hex')).toBe( - 'f2b18200cd38c0d2c3dff4d3e2be9fd83069acd6b73cbc835c708fc3693c45d9')); - result.on('end', done); - blob.pipe(zlib.createGzip(options)).pipe(result); - blob.end('x\n'); - }); - - xit("Results may be better with inflate instead of gzip", done => { - const blob = new stream.PassThrough(); - const sha256 = crypto.createHash('sha256'); - const result = new stream.PassThrough(); - result.on('data', d => sha256.update(d)); - result.on('end', () => expect(sha256.digest('hex')).toBe( - 'c5f9a2352dadba9488900ba6ede0133270e12350ffa6d6ebbdefef9ee6aa2238')); - result.on('end', done); - blob.pipe(zlib.createInflate()).pipe(result); - blob.end('x\n'); - }); - -}); - -describe("writeProdPackageTgzWithDeterministicHash", () => { - - it("Writes a file", async () => { - const filePath = path.join(os.tmpdir(), 'build-contract-test-mnpm-' + Date.now() + '.tgz'); - await mnpm.writeProdPackageTgzWithDeterministicHash({ - packageJsonObject: { - "dependencies": { - "build-contract": "1.5.0" - } - }, - filePath - }); - const stat = await fs.promises.stat(filePath); - // we base these assertions on a test result, not on npm pack output (which differs) - // and use the assertions to see if something changes over time or across platforms - expect(stat.size).toBe(154); - const tgz = await fs.promises.readFile(filePath); - const sha256 = crypto.createHash('sha256'); - sha256.update(tgz); - expect(sha256.digest('hex')).toBe('ebfa2ce786383196d29b927d3f0a51539655ebe56d4bac52b96f7e13749ba79c'); - const sha512 = crypto.createHash('sha512'); - sha512.update(tgz); - expect(sha512.digest('base64')).toBe('4aKJrQoeaGZuY8IDk/LmKX9drIVPoeQG00phQ7kZoR+SXHtrgeA19uUnBrclpm4Sm6xIv8/50V5u/dPxWg62Iw=='); - await fs.promises.unlink(filePath); - }); - - it("Entries are deterministic", done => { - const filePath = path.join(os.tmpdir(), 'build-contract-test-mnpm-' + Date.now() + '.tgz'); - const packageJsonObject = { - "dependencies": { - "build-contract": "1.5.0" - } - }; - mnpm.writeProdPackageTgzWithDeterministicHash({ - packageJsonObject, - filePath - }).then(() => { - const extract = tar.extract(); - let count = 0; - - extract.on('entry', function(header, stream, next) { - count++; - expect(header.name).toBe('package/package.json'); - expect(header.mode).toBe(parseInt('0644',8)); - expect(header.uid).toBe(0); - expect(header.gid).toBe(0); - expect(header.size).toBe(mnpm.stringifyPackageJson(packageJsonObject).length); - expect(header.type).toBe('file'); - expect(header.linkname).toBeNull(); - expect(header.uname).toBe(''); - expect(header.gname).toBe(''); - expect(header.devmajor).toBe(0); - expect(header.devminor).toBe(0); - expect(header.mtime).toBeInstanceOf(Date); - expect(header.mtime.getTime()).toBe(946684800000); - expect(Object.keys(header).length).toBe(12); - - stream.on('end', function() { - // previous test asserted tgz checksum so we don't need to check content here - next(); - }) - - stream.resume(); - }); - - extract.on('finish', () => { - expect(count).toBe(1); - fs.promises.unlink(filePath).then(done); - }); - - fs.createReadStream(filePath) - .pipe(zlib.createGunzip({ - - })) - .pipe(extract); - }); - }); - -}); diff --git a/nodejs/zlib-choice.js b/nodejs/zlib-choice.js deleted file mode 100644 index b7a3939..0000000 --- a/nodejs/zlib-choice.js +++ /dev/null @@ -1,6 +0,0 @@ -const zlib = require('zlib'); -const pakoStreams = require('browserify-zlib'); - -module.exports.createGzip = pakoStreams.createGzip; -module.exports.createGunzip = zlib.createGunzip; -module.exports.constants = zlib.constants;