diff --git a/.github/actions/setup-pnpm/action.yml b/.github/actions/setup-pnpm/action.yml index 71a722c8f..a8f98e8e9 100644 --- a/.github/actions/setup-pnpm/action.yml +++ b/.github/actions/setup-pnpm/action.yml @@ -24,13 +24,13 @@ runs: run: | echo "STORE_PATH=$(pnpm store path --silent)" >> $GITHUB_ENV - - uses: actions/cache@v3 - name: Setup pnpm cache - with: - path: ${{ env.STORE_PATH }} - key: ${{ runner.os }}-${{ inputs.node-version }}-pnpm-store-${{ hashFiles('**/pnpm-lock.yaml') }} - restore-keys: | - ${{ runner.os }}-${{ inputs.node-version }}-pnpm-store- + # - uses: actions/cache@v3 + # name: Setup pnpm cache + # with: + # path: ${{ env.STORE_PATH }} + # key: ${{ runner.os }}-${{ inputs.node-version }}-pnpm-store-${{ hashFiles('**/pnpm-lock.yaml') }} + # restore-keys: | + # ${{ runner.os }}-${{ inputs.node-version }}-pnpm-store- - name: Install dependencies shell: bash diff --git a/internals/upscaler-cli/bin/cli.ts b/internals/upscaler-cli/bin/cli.ts deleted file mode 100755 index 9ffa799b6..000000000 --- a/internals/upscaler-cli/bin/cli.ts +++ /dev/null @@ -1,7 +0,0 @@ -#!/usr/bin/env ts-node -import { CLI } from '../src/lib/cli/CLI.js'; - -(async () => { - const cli = new CLI(); - await cli.run(); -})(); diff --git a/internals/upscaler-cli/package.json b/internals/upscaler-cli/package.json deleted file mode 100644 index 0c196faad..000000000 --- a/internals/upscaler-cli/package.json +++ /dev/null @@ -1,34 +0,0 @@ -{ - "name": "@internals/upscaler-cli", - "private": true, - "type": "module", - "version": "0.1.0", - "description": "CLI for managing the UpscalerJS repo", - "author": "Kevin Scott", - "bin": { - "upscaler-cli": "./bin/cli.ts" - }, - "license": "MIT", - "dependencies": { - "@types/fast-levenshtein": "^0.0.2", - "@internals/common": "workspace:*", - "chalk": "^5.3.0", - "commander": "^11.0.0", - "fast-levenshtein": "^3.0.0", - "inquirer": "^9.2.10", - "ts-node": "^10.9.1", - "tsc-alias": "^1.8.7" - }, - "devDependencies": { - "@commander-js/extra-typings": "^11.0.0", - "@types/node": "^20.5.1", - "@types/uglify-js": "^3.17.1", - "vitest": "^0.34.2" - }, - "scripts": { - "test": "vitest" - }, - "engines": { - "node": ">=20.0.0" - } -} diff --git a/internals/upscaler-cli/src/commands/guide/index.ts b/internals/upscaler-cli/src/commands/guide/index.ts deleted file mode 100644 index 389f1e0ce..000000000 --- a/internals/upscaler-cli/src/commands/guide/index.ts +++ /dev/null @@ -1,4 +0,0 @@ -import { Command } from 'commander'; - -export default (program: Command) => program.command('guide') - .description('Commands related to guides & examples'); diff --git a/internals/upscaler-cli/src/commands/guide/start.ts b/internals/upscaler-cli/src/commands/guide/start.ts deleted file mode 100644 index f1206e34a..000000000 --- a/internals/upscaler-cli/src/commands/guide/start.ts +++ /dev/null @@ -1,49 +0,0 @@ -import { Command } from '@commander-js/extra-typings'; -import path from 'path'; -import { runPNPMScript } from '@internals/common'; -import { EXAMPLES_DIR, isValidGuide } from '../../lib/guides/isValidGuide.js'; -import { runNPMCommand } from '../../lib/utils/run-npm-command.js'; -import { pluralize } from '../../lib/utils/pluralize.js'; -import { getTFJSLibraryTarget } from '../../lib/utils/get-tfjs-library-target.js'; -import { getAllDirectories } from '../../lib/utils/get-all-directories.js'; -import { findSimilarFiles } from '../../lib/utils/find-similar-files.js'; -import { info, verbose } from '../../lib/utils/log.js'; - -interface Options { - skipUpscalerBuild?: boolean; -} - -type Action = (...args: T) => Promise; - -export const start: Action<[string, Options]> = async (guide, { skipUpscalerBuild, }) => { - info(`Starting guide: "${guide}"`) - if (!await isValidGuide(guide)) { - const examples = await getAllDirectories(EXAMPLES_DIR); - const similarFiles = await findSimilarFiles(examples, guide, { n: 3, distance: 5 }); - - throw new Error([ - `"${guide}" is not a valid guide, and was not found in the examples directory.`, - similarFiles.length > 0 ? `Did you mean ${pluralize(similarFiles)}?` : undefined, - ].filter(Boolean).join(' ')); - } - - const guidePath = path.resolve(EXAMPLES_DIR, guide); - - // get package name from directory - const platform = await getTFJSLibraryTarget(guidePath); - - if (skipUpscalerBuild !== true) { - await runPNPMScript(`build:${platform}`, 'upscaler') - verbose(`** built upscaler: ${platform}`) - } - - await runNPMCommand(['install', '--no-package-lock'], guidePath); - await runNPMCommand(['run', 'dev'], guidePath); -} - -export default (program: Command) => program.command('start') - .description('Start an example') - .argument('', 'example to start') - .option('--skipUpscalerBuild', 'if true, skip building UpscalerJS when starting up') - .action(start); - diff --git a/internals/upscaler-cli/src/lib/cli/CLI.ts b/internals/upscaler-cli/src/lib/cli/CLI.ts deleted file mode 100644 index 50bb52aee..000000000 --- a/internals/upscaler-cli/src/lib/cli/CLI.ts +++ /dev/null @@ -1,59 +0,0 @@ -import { program } from 'commander'; -import path from 'path'; -import fsExtra from 'fs-extra'; -import { ROOT_DIR } from '../package-scripts/utils/constants.js'; -import { buildCommandsTree } from './build-commands-tree.js'; -import { DEFAULT_LOG_LEVEL, setLogLevel } from '../utils/log.js'; -const { readFileSync, } = fsExtra; - - // "scripts": { - // "docs:build-api": "pnpm __run_command ./package-scripts/docs/build-api.ts", - // "docs:build-guides": "pnpm __run_command ./package-scripts/docs/build-guides.ts", - // "docs:link-model-readmes": "pnpm __run_command ./package-scripts/docs/link-model-readmes.ts", - // "docs:tense-checks": "pnpm __run_command ./package-scripts/docs/tense-checks.ts", - // "model:benchmark:performance": "pnpm __run_command ./package-scripts/benchmark/performance/index.ts", - // "find-all-packages": "pnpm __run_command ./package-scripts/find-all-packages.ts", - // "model:benchmark:speed": "pnpm __run_command ./package-scripts/benchmark/speed/index.ts", - // "model:build": "pnpm __run_command ./package-scripts/build-model.ts", - // "model:clean": "pnpm __run_command ./package-scripts/clean-model.ts", - // "model:convert-python-model": "pnpm __run_command ./package-scripts/convert-python-model.ts", - // "model:convert-python-model-folder": "pnpm __run_command ./package-scripts/convert-python-model-folder.ts", - // "model:create": "pnpm __run_command ./package-scripts/create-new-model-folder.ts", - // "model:demo:create": "pnpm __run_command ./package-scripts/create-model-demo.ts", - // "model:write-docs": "pnpm __run_command ./package-scripts/write-model-docs.ts", - // "test:integration:browserstack": "pnpm __run_command ./test.ts --kind integration --platform browser --runner browserstack", - // "test:integration:browser": "pnpm __run_command ./test.ts --kind integration --platform browser", - // "test:integration:node": "pnpm __run_command ./test.ts --kind integration --platform node", - // "test:memory-leaks": "pnpm __run_command ./test.ts --kind memory --platform browser", - // "test:model": "pnpm __run_command ./test.ts --kind model", - // "update:version": "pnpm __run_command ./package-scripts/update-version.ts", - // "update:tfjs": "pnpm __run_command ./package-scripts/update-tfjs.ts", - // "update:dependency": "pnpm __run_command ./package-scripts/update-dependency.ts", - // "update:npm:dependencies": "pnpm __run_command ./package-scripts/update-npm-dependencies.ts", - // }, - - -export class CLI { - constructor() { - const packageJSON = path.resolve(ROOT_DIR, './package.json'); - const { name, description, version } = JSON.parse(readFileSync(packageJSON, 'utf-8')); - - program - .name(name) - .description(description) - .option('-l, --log-level ', 'What level to log at', DEFAULT_LOG_LEVEL) - .on('option:log-level', () => { - const { logLevel } = program.opts(); - setLogLevel(logLevel); - }) - .version(version); - - } - - run = async () => { // skipcq: JS-0105 - const srcDir = path.resolve(ROOT_DIR, './internals/upscaler-cli/src/commands'); - const root = await buildCommandsTree(srcDir); - await root.registerProgram(program); - return program.parseAsync(); - }; -} diff --git a/internals/upscaler-cli/src/lib/cli/build-commands-tree.test.ts b/internals/upscaler-cli/src/lib/cli/build-commands-tree.test.ts deleted file mode 100644 index 5abbcd6b0..000000000 --- a/internals/upscaler-cli/src/lib/cli/build-commands-tree.test.ts +++ /dev/null @@ -1,288 +0,0 @@ -import { buildCommandsTree } from "./build-commands-tree"; -import { vi } from 'vitest'; -import fsExtra from "fs-extra"; -import { Command } from "commander"; -// import fakeFileImport from 'foo/index.js'; -const { readdir } = fsExtra; - -vi.mock('fs-extra', () => { - return { - default: { - readdir: vi.fn().mockImplementation(() => Promise.resolve([])), - stat: vi.fn().mockImplementation((name) => Promise.resolve({ - isDirectory: () => { - return (name as string).split('.').length === 1; - }, - })), - }, - } -}); - -vi.mock('foo/index.js', () => ({ default: vi.fn(), })); -vi.mock('foo/guide/index.js', () => ({ default: vi.fn(), })); -vi.mock('foo/guide/file1.ts', () => ({ default: vi.fn(), })); -vi.mock('foo/model/index.js', () => ({ default: vi.fn(), })); - -describe('buildCommandsTree', () => { - it('returns a single node for an empty directory, where that node is the directory', async () => { - const mockReadDir = () => Promise.resolve([ - ]); - - vi.mocked(readdir).mockImplementation(mockReadDir); - const node = await buildCommandsTree('foo'); - expect(Object.values(node.children).length).toBe(0); - }); - - it('returns a root node with one child for a directory with one file', async () => { - const mockReadDir = () => Promise.resolve([ - 'foo.json', - ]); - - vi.mocked(readdir).mockImplementation(mockReadDir); - const root = await buildCommandsTree('foo'); - expect(Object.values(root.children).length).toBe(1); - const child = root.getChild('foo'); - expect(child.name).toBe('foo.json'); - expect(child.fullPath).toContain('foo/foo.json'); - expect(Object.values(child.children).length).toBe(0); - expect(child.parent).toBe(root); - }); - - it('ignores a .DS_Store', async () => { - const mockReadDir = () => Promise.resolve([ - 'foo.json', - '.DS_Store', - ]); - - vi.mocked(readdir).mockImplementation(mockReadDir); - const root = await buildCommandsTree('foo'); - expect(Object.values(root.children).length).toBe(1); - const child = root.getChild('foo'); - expect(child.name).toBe('foo.json'); - expect(child.fullPath).toContain('foo/foo.json'); - expect(Object.values(child.children).length).toBe(0); - expect(child.parent).toBe(root); - expect(await child.isDirectory).toBe(false); - }); - - it('reads a directory as well', async () => { - vi.mocked(readdir).mockImplementation((name) => { - if (name === 'foo') { - return Promise.resolve([ - 'directory', - ]); - } - return []; - }); - const root = await buildCommandsTree('foo'); - expect(Object.values(root.children).length).toBe(1); - const child = root.getChild('directory'); - expect(child.name).toBe('directory'); - expect(child.fullPath).toContain('foo/directory'); - expect(Object.values(child.children).length).toBe(0); - expect(child.parent).toBe(root); - expect(await child.isDirectory).toBe(true); - }); - - it('reads a directory and its contents', async () => { - vi.mocked(readdir).mockImplementation((name) => { - if (name === 'foo') { - return Promise.resolve([ - 'directory', - ]); - } - return Promise.resolve([ - 'file1.ts', - 'file2.ts', - ]); - }); - const root = await buildCommandsTree('foo'); - expect(Object.values(root.children).length).toBe(1); - const directory = root.getChild('directory'); - expect(directory.name).toBe('directory'); - expect(directory.fullPath).toContain('foo/directory'); - expect(directory.parent).toBe(root); - expect(await directory.isDirectory).toBe(true); - - expect(Object.values(directory.children).length).toBe(2); - - const file1 = directory.getChild('file1'); - expect(file1.name).toBe('file1.ts'); - expect(file1.fullPath).toContain('foo/directory/file1.ts'); - expect(file1.parent).toBe(directory); - expect(await file1.isDirectory).toBe(false); - expect(Object.values(file1.children).length).toBe(0); - - const file2 = directory.getChild('file2'); - expect(file2.name).toBe('file2.ts'); - expect(file2.fullPath).toContain('foo/directory/file2.ts'); - expect(file2.parent).toBe(directory); - expect(await file2.isDirectory).toBe(false); - expect(Object.values(file2.children).length).toBe(0); - }); - - it('reads a directory and its contents and _its_ contents', async () => { - vi.mocked(readdir).mockImplementation((name) => { - if (name.toString().endsWith('foo')) { - return Promise.resolve([ - 'directory', - 'rootFile.ts', - ]); - } - if (name.toString().endsWith('directory')) { - return Promise.resolve([ - 'subdirectory1', - 'subdirectory2', - 'file1.ts', - 'file2.ts', - ]); - } - if (name.toString().endsWith('subdirectory1')) { - return Promise.resolve([ - 'file3.ts', - ]); - } - if (name.toString().endsWith('subdirectory2')) { - return Promise.resolve([ - 'file4.ts', - ]); - } - throw new Error(`Unexpected directory: ${name}`); - }); - const root = await buildCommandsTree('foo'); - expect(Object.values(root.children).length).toBe(2); - - const rootFile = root.getChild('rootFile'); - expect(rootFile.name).toBe('rootFile.ts'); - expect(rootFile.fullPath).toContain('foo/rootFile.ts'); - expect(rootFile.parent).toBe(root); - expect(await rootFile.isDirectory).toBe(false); - expect(Object.values(rootFile.children).length).toBe(0); - - const directory = root.getChild('directory'); - expect(directory.name).toBe('directory'); - expect(directory.fullPath).toContain('foo/directory'); - expect(directory.parent).toBe(root); - expect(await directory.isDirectory).toBe(true); - expect(Object.values(directory.children).length).toBe(4); - - const file1 = directory.getChild('file1'); - expect(file1.name).toBe('file1.ts'); - expect(file1.fullPath).toContain('foo/directory/file1.ts'); - expect(file1.parent).toBe(directory); - expect(await file1.isDirectory).toBe(false); - expect(Object.values(file1.children).length).toBe(0); - - const file2 = directory.getChild('file2'); - expect(file2.name).toBe('file2.ts'); - expect(file2.fullPath).toContain('foo/directory/file2.ts'); - expect(file2.parent).toBe(directory); - expect(await file2.isDirectory).toBe(false); - expect(Object.values(file2.children).length).toBe(0); - - const subdirectory1 = directory.getChild('subdirectory1'); - expect(subdirectory1.name).toBe('subdirectory1'); - expect(subdirectory1.fullPath).toContain('foo/directory/subdirectory1'); - expect(subdirectory1.parent).toBe(directory); - expect(await subdirectory1.isDirectory).toBe(true); - expect(Object.values(subdirectory1.children).length).toBe(1); - - const file3 = subdirectory1.getChild('file3'); - expect(file3.name).toBe('file3.ts'); - expect(file3.fullPath).toContain('foo/directory/subdirectory1/file3.ts'); - expect(file3.parent).toBe(subdirectory1); - expect(await file3.isDirectory).toBe(false); - expect(Object.values(file3.children).length).toBe(0); - - const subdirectory2 = directory.getChild('subdirectory2'); - expect(subdirectory2.name).toBe('subdirectory2'); - expect(subdirectory2.fullPath).toContain('foo/directory/subdirectory2'); - expect(subdirectory2.parent).toBe(directory); - expect(await subdirectory2.isDirectory).toBe(true); - expect(Object.values(subdirectory2.children).length).toBe(1); - - const file4 = subdirectory2.getChild('file4.ts'); - expect(file4.name).toBe('file4.ts'); - expect(file4.fullPath).toContain('foo/directory/subdirectory2/file4.ts'); - expect(file4.parent).toBe(subdirectory2); - expect(await file4.isDirectory).toBe(false); - expect(Object.values(file4.children).length).toBe(0); - - }); - - const mockFakeFile = (pathname: string) => import(pathname); - describe('commands', () => { - - it('a directory returns the command for its index file', async () => { - const fakeFile = await mockFakeFile('foo/index.js'); - const mockReadDir = () => Promise.resolve([ - 'index.js', - ]); - - function registerFunction () { }; - fakeFile.default = registerFunction; - - vi.mocked(readdir).mockImplementation(mockReadDir); - const root = await buildCommandsTree('foo'); - expect(await root.getRegistrationFunction()).toEqual(registerFunction); - }); - - it('a file returns the command for itself', async () => { - const fakeFile = await mockFakeFile('foo/index.js'); - const mockReadDir = () => Promise.resolve([ - 'index.js', - ]); - - function registerFunction () { }; - fakeFile.default = registerFunction; - - vi.mocked(readdir).mockImplementation(mockReadDir); - const root = await buildCommandsTree('foo'); - expect(await root.getChild('index').getRegistrationFunction()).toEqual(registerFunction); - }); - }); - - it('registers a program', async () => { - const guideIndex = await mockFakeFile('foo/guide/index.js'); - const modelIndex = await mockFakeFile('foo/model/index.js'); - const guideFile1 = await mockFakeFile('foo/guide/file1.ts'); - guideIndex.default = (program: Command) => program.command('guide') - modelIndex.default = (program: Command) => program.command('model') - guideFile1.default = (program: Command) => program.command('file1') - vi.mocked(readdir).mockImplementation((name) => { - if (name.toString().endsWith('foo')) { - return Promise.resolve([ - 'guide', - 'model', - ]); - } - if (name.toString().endsWith('guide')) { - return Promise.resolve([ - 'index.js', - 'file1.ts', - ]); - } - if (name.toString().endsWith('model')) { - return Promise.resolve([ - 'index.js', - ]); - } - throw new Error(`Unexpected directory: ${name}`); - }); - const root = await buildCommandsTree('foo'); - const subFakePrograms: FakeProgram[] = []; - class FakeProgram { - command = vi.fn().mockImplementation(() => { - const subFakeProgram = new FakeProgram(); - subFakePrograms.push(subFakeProgram); - return subFakeProgram; - }); - } - const fakeProgram = new FakeProgram(); - await root.registerProgram(fakeProgram as unknown as Command); - expect(fakeProgram.command).toHaveBeenCalledWith('guide'); - expect(fakeProgram.command).toHaveBeenCalledWith('model'); - expect(subFakePrograms.length).toBeGreaterThanOrEqual(2); - expect(subFakePrograms[0].command).toHaveBeenCalledWith('file1'); - }); -}); diff --git a/internals/upscaler-cli/src/lib/cli/build-commands-tree.ts b/internals/upscaler-cli/src/lib/cli/build-commands-tree.ts deleted file mode 100644 index 1b6f3dcc5..000000000 --- a/internals/upscaler-cli/src/lib/cli/build-commands-tree.ts +++ /dev/null @@ -1,112 +0,0 @@ -import { Command } from "commander"; -import fsExtra from "fs-extra"; -import path from 'path'; -const { readdir, stat } = fsExtra; - -const ignoreFiles = (file: string) => !['.DS_Store'].includes(file); - -const VALID_EXTENSIONS = ['ts', 'js', 'json', '']; - -type Action = (...args: T) => Promise; -interface CommandDefinition { - default: (program: Command) => Command; - action?: Action; -} -function isCommandDefinition(obj: unknown): obj is CommandDefinition { - return typeof obj === 'object' && obj !== null && 'default' in obj; -}; - -async function dynamicallyLoadCommand(fullPath: string): Promise> { - const contents = await import(fullPath); - if (!isCommandDefinition(contents)) { - throw new Error(`Loaded file at ${fullPath} is not a valid command definition`); - } - return contents; -}; - -class Node { - name: string; - fullPath: string; - parent?: Node; - children: Record = {}; - isDirectory: Promise; - - constructor(fullPath: string, parent?: Node) { - this.fullPath = fullPath; - const name = this.fullPath.split('/').pop(); - if (!name) { - throw new Error(`Bad full path provided: ${fullPath}`); - } - this.name = name; - this.parent = parent; - this.isDirectory = stat(this.fullPath).then(r => r.isDirectory()); - } - - addChild(child: Node) { - if (this.children[child.name]) { - throw new Error(`Duplicate child for name ${child.name}`); - } - this.children[child.name] = child; - } - - getChild(name: string) { - for (const ext of VALID_EXTENSIONS) { - const filename = [name, ext].filter(Boolean).join('.'); - const child = this.children[filename]; - if (child) { - return child; - } - } - throw new Error(`No child found for name ${name}`); - } - - getIndex() { - try { - return this.getChild('index'); - } catch (err) { - throw new Error(`No index file was found for directory ${this.fullPath}. Children were: ${JSON.stringify(Object.keys(this.children))}`); - } - } - - async getRegistrationFunction() { - if (await this.isDirectory) { - const index = await this.getIndex(); - return dynamicallyLoadCommand(index.fullPath); - } - return dynamicallyLoadCommand(this.fullPath); - } - - async registerProgram(program: Command) { - let subprogram = program; - if (this.parent !== undefined) { - // register myself - const { default: registrationFunction, } = await this.getRegistrationFunction(); - subprogram = await registrationFunction(program); - } - await Promise.all(this.getChildren().map(child => child.registerProgram(subprogram))); - } - - getChildren() { - return Object.values(this.children).filter(child => !child.name.includes('index')); - } -} - -export const buildCommandsTree = async (fullPath: string, rootNode?: Node, depth = 0): Promise => { - const root = rootNode || new Node(fullPath); - if (depth > 15) { - throw new Error(`Too deep: ${fullPath}`); - } - - const files = await readdir(fullPath); - await Promise.all(files.filter(ignoreFiles).map(async file => { - const fullFilePath = path.join(fullPath, file); - const child = new Node(fullFilePath, root); - root.addChild(child); - const stats = await stat(fullFilePath); - if (stats.isDirectory()) { - await buildCommandsTree(fullFilePath, child, depth + 1); - } - })); - - return root; -}; diff --git a/internals/upscaler-cli/src/lib/cli/index.ts b/internals/upscaler-cli/src/lib/cli/index.ts deleted file mode 100644 index 9074ba6b4..000000000 --- a/internals/upscaler-cli/src/lib/cli/index.ts +++ /dev/null @@ -1,57 +0,0 @@ -import { program } from 'commander'; -import path from 'path'; -import fsExtra from 'fs-extra'; -import { ROOT_DIR } from '../package-scripts/utils/constants.js'; -import { buildCommandsTree } from './build-commands-tree.js'; -const { readFileSync, } = fsExtra; - - // "scripts": { - // "docs:build-api": "pnpm __run_command ./package-scripts/docs/build-api.ts", - // "docs:build-guides": "pnpm __run_command ./package-scripts/docs/build-guides.ts", - // "docs:link-model-readmes": "pnpm __run_command ./package-scripts/docs/link-model-readmes.ts", - // "docs:tense-checks": "pnpm __run_command ./package-scripts/docs/tense-checks.ts", - // "model:benchmark:performance": "pnpm __run_command ./package-scripts/benchmark/performance/index.ts", - // "find-all-packages": "pnpm __run_command ./package-scripts/find-all-packages.ts", - // "model:benchmark:speed": "pnpm __run_command ./package-scripts/benchmark/speed/index.ts", - // "model:build": "pnpm __run_command ./package-scripts/build-model.ts", - // "model:clean": "pnpm __run_command ./package-scripts/clean-model.ts", - // "model:convert-python-model": "pnpm __run_command ./package-scripts/convert-python-model.ts", - // "model:convert-python-model-folder": "pnpm __run_command ./package-scripts/convert-python-model-folder.ts", - // "model:create": "pnpm __run_command ./package-scripts/create-new-model-folder.ts", - // "model:demo:create": "pnpm __run_command ./package-scripts/create-model-demo.ts", - // "model:write-docs": "pnpm __run_command ./package-scripts/write-model-docs.ts", - // "test:integration:browserstack": "pnpm __run_command ./test.ts --kind integration --platform browser --runner browserstack", - // "test:integration:browser": "pnpm __run_command ./test.ts --kind integration --platform browser", - // "test:integration:node": "pnpm __run_command ./test.ts --kind integration --platform node", - // "test:memory-leaks": "pnpm __run_command ./test.ts --kind memory --platform browser", - // "test:model": "pnpm __run_command ./test.ts --kind model", - // "update:version": "pnpm __run_command ./package-scripts/update-version.ts", - // "update:tfjs": "pnpm __run_command ./package-scripts/update-tfjs.ts", - // "update:dependency": "pnpm __run_command ./package-scripts/update-dependency.ts", - // "update:npm:dependencies": "pnpm __run_command ./package-scripts/update-npm-dependencies.ts", - // }, - - -export class CLI { - constructor() { - const packageJSON = new URL('../package.json', import.meta.url); - const { name, description, version } = JSON.parse(readFileSync(packageJSON, 'utf-8')); - - program - .name(name) - .description(description) - .option('-', '--log-level', 'log level') - .on('option:log-level', (l) => { - console.log('YO WHATUP', l) - }) - .version(version); - - } - - run = async () => { // skipcq: JS-0105 - const srcDir = path.resolve(ROOT_DIR, './internals/upscaler-cli/src/commands'); - const root = await buildCommandsTree(srcDir); - await root.registerProgram(program); - return program.parseAsync(); - }; -} diff --git a/internals/upscaler-cli/src/lib/guides/isValidGuide.ts b/internals/upscaler-cli/src/lib/guides/isValidGuide.ts deleted file mode 100644 index a6ddb68fc..000000000 --- a/internals/upscaler-cli/src/lib/guides/isValidGuide.ts +++ /dev/null @@ -1,18 +0,0 @@ -import path from 'path'; -import fsExtra from 'fs-extra'; -const { stat } = fsExtra; -import * as url from 'url'; -// const __filename = url.fileURLToPath(import.meta.url); -const __dirname = url.fileURLToPath(new URL('.', import.meta.url)); -export const EXAMPLES_DIR = path.resolve(__dirname, '../../../../../examples'); - -export const isValidGuide = async (guide: string) => { - const guidePath = path.resolve(EXAMPLES_DIR, guide); - try { - const stats = await stat(guidePath); - stats.isDirectory(); - } catch(err) { - return false; - } - return true; -}; diff --git a/internals/upscaler-cli/src/lib/package-scripts/create-model-demo.ts b/internals/upscaler-cli/src/lib/package-scripts/create-model-demo.ts deleted file mode 100644 index e81a00ecb..000000000 --- a/internals/upscaler-cli/src/lib/package-scripts/create-model-demo.ts +++ /dev/null @@ -1,183 +0,0 @@ -import { writeFile, copy, mkdirp, } from 'fs-extra'; -import path from 'path'; -import yargs from 'yargs'; -import { ifDefined as _ifDefined } from './prompt/ifDefined'; -import { MODELS_DIR } from './utils/constants'; - -/*** - * Types - */ - -/**** - * Utility functions - */ - -const makeDemoFolder = async (model: string) => { - await mkdirp(path.resolve(MODELS_DIR, model, 'demo')); -}; - -const copyFixture = async (model: string) => { - const modelRoot = path.resolve(MODELS_DIR, model); - await copy(path.resolve(modelRoot, 'assets/fixture.png'), path.resolve(modelRoot, 'demo', 'fixture.png')); -}; - -const getIndexJSFile = (model: string) => ` -import Upscaler from "upscaler"; -import * as models from '@upscalerjs/${model}'; -import fixture from "./fixture.png"; - -const upscaler = new Upscaler({ - model: models.small, -}); - -upscaler.upscale(fixture).then((upscaledImgSrc) => { - const img = document.createElement("img"); - img.src = upscaledImgSrc; - document.getElementById("target").appendChild(img); -}); -`; - -const getIndexHTMLFile = (model: string) => ` - - - @upscalerjs/${model} - - - - - - - - - - - - -
OriginalUpscaled
- - -
- -
-
- - - - -`; - -const getStackBlitz = () => ` -{ - "installDependencies": true, - "startCommand": "npm run dev" -} -`; -const getPackageJSONFile = (model: string) => ` -{ - "name": "@upscalerjs/demo.${model}", - "private": true, - "version": "1.0.0-beta.1", - "main": "index.js", - "scripts": { - "dev": "vite" - }, - "devDependencies": { - "vite": "*" - }, - "author": "Kevin Scott", - "license": "MIT", - "dependencies": { - "@tensorflow/tfjs": "~4.11.0", - "seedrandom": "^3.0.5", - "@upscalerjs/${model}": "^0.1.0", - "upscaler": "^1.0.0-beta.8" - }, - "engines": { - "npm": ">8.0.0" - } -}`; - -const writeDemoFile = async (model: string, file: string, contents: string) => { - const demoRoot = path.resolve(MODELS_DIR, model, 'demo'); - await writeFile(path.resolve(demoRoot, file), contents.trim(), 'utf-8'); -} - -const writeDemoFiles = (model: string) => Promise.all([ - ['index.js', getIndexJSFile(model)], - ['index.html', getIndexHTMLFile(model)], - ['package.json', getPackageJSONFile(model)], - ['.stackblitzrc', getStackBlitz()], -].map(([file, contents]) => writeDemoFile(model, file, contents))); - -/**** - * Main function - */ - -const createModelDemo = async ( - model: string, -) => { - await makeDemoFolder(model); - await copyFixture(model); - await writeDemoFiles(model); -} - -export default createModelDemo; - -/**** - * Functions to expose the main function as a CLI tool - */ - -interface Answers { - model: string; -} - -const getArgs = async (): Promise => { - const argv = await yargs.command('create model demo', '', yargs => { - yargs.positional('model', { - describe: 'The model demo to create', - }); - }) - .help() - .argv; - - return { - model: argv._[0] as string, - } - -} - -if (require.main === module) { - (async () => { - const { model } = await getArgs(); - await createModelDemo(model); - })(); -} diff --git a/internals/upscaler-cli/src/lib/package-scripts/docs/build-api.ts b/internals/upscaler-cli/src/lib/package-scripts/docs/build-api.ts deleted file mode 100644 index d3b8c4dba..000000000 --- a/internals/upscaler-cli/src/lib/package-scripts/docs/build-api.ts +++ /dev/null @@ -1,878 +0,0 @@ -import path from 'path'; -import { - mkdirp, - writeFile, -} from 'fs-extra'; -import { - Application, - DeclarationReflection as TypedocDeclarationReflection, - TSConfigReader, - TypeDocReader, - ReflectionKind, -} from 'typedoc'; -import { Platform } from '../prompt/types'; -import { DOCS_DIR, UPSCALER_DIR } from '../utils/constants'; -import { clearOutMarkdownFiles } from './utils/clear-out-markdown-files'; -import { getSharedArgs, SharedArgs } from './types'; -import { - CommentDisplayPart, - CommentTag, - ParameterReflection, - ArrayType, - UnionType, - IntersectionType, - IntrinsicType, - LiteralType, - ReferenceType, - SomeType, - Comment, - SignatureReflection, - SourceReference, - DeclarationReflection, - TypeParameterReflection, -} from 'typedoc/dist/lib/serialization/schema'; - -const REPO_ROOT = 'https://github.com/thekevinscott/UpscalerJS'; -/**** - * Types - */ -type DecRef = DeclarationReflection | PlatformSpecificDeclarationReflection; -interface Definitions { - constructors: Record; - methods: Record; - interfaces: Record; - types: Record; - classes: Record; - functions: Record; - enums: Record; -} - -interface PlatformSpecificDeclarationReflection extends Omit { - kind: 'Platform Specific Type'; - node: DeclarationReflection; - browser: DeclarationReflection; -} - -const getSummary = (comment?: Comment) => comment?.summary.map(({ text }) => text).join(''); -const rewriteURL = (url: string) => { - const parts = url.split(/blob\/(?[^/]+)/) - if (parts.length !== 3) { - throw new Error(`Error with the regex: ${url}`); - } - return [ - parts[0], - 'tree/main', - parts[2], - ].join(''); -}; - - -const isDeclarationReflection = (reflection?: DecRef): reflection is DeclarationReflection => reflection?.kind !== 'Platform Specific Type'; -const isArrayType = (type: SomeType): type is ArrayType => type.type === 'array'; -const isReferenceType = (type: SomeType): type is ReferenceType => type.type === 'reference'; -const isLiteralType = (type: SomeType): type is LiteralType => type.type === 'literal'; -const isInstrinsicType = (type: SomeType): type is IntrinsicType => type.type === 'intrinsic'; -const isUnionType = (type: SomeType): type is UnionType => type.type === 'union'; -const isIntersectionType = (type: SomeType): type is IntersectionType => type.type === 'intersection'; - -const getURLFromSources = (matchingType: undefined | DecRef | TypeParameterReflection) => { - if (!matchingType) { - return undefined; - } - if ('sources' in matchingType) { - const sources = matchingType.sources; - if (sources?.length) { - const { url } = sources?.[0] || {}; - if (url?.startsWith(REPO_ROOT)) { - return rewriteURL(url); - } - return url; - } - } - - return undefined; -}; - -const getLiteralTypeValue = (type: LiteralType): string => { - const { value } = type; - if (typeof value === 'number') { - return `${value}`; - } else if (typeof value === 'string') { - return value; - } - - throw new Error('Not yet implemented for literal'); -} - - -const getReferenceTypeOfParameter = (_type?: SomeType, definitions?: Definitions): { - type: 'reference' | 'array' | 'literal' | 'intrinsic' | 'union', - name: string; - includeURL?: boolean; -} => { - if (!_type) { - throw new Error('Define a type'); - } - if (isArrayType(_type)) { - const { elementType } = _type; - if (isReferenceType(elementType)) { - return { - type: _type.type, - name: elementType.name, - } - } else if (isUnionType(elementType)) { - return { - type: 'union', - name: elementType.types.map(t => { - if ('name' in t) { - return t.name; - } - throw new Error('unimplemented'); - }).join(' | '), - }; - } - - console.error(_type); - - throw new Error('Not yet implemented'); - } - - if (isReferenceType(_type)) { - const { name } = _type; - if (name === 'ModelDefinitionObjectOrFn') { - return { - type: _type.type, - name: "ModelDefinition", - }; - } - return { - type: _type.type, - name, - }; - } - - if (isLiteralType(_type)) { - return { - type: 'literal', - name: getLiteralTypeValue(_type), - }; - } - - if (isInstrinsicType(_type)) { - return { - type: 'intrinsic', - name: _type.name, - } - } - - if (isIntersectionType(_type)) { - const refType = _type.types.filter(t => t.type === 'reference').pop(); - if (!refType || !isReferenceType(refType)) { - throw new Error('No reference type found on intersection type.'); - } - // if (definitions === undefined) { - // throw new Error('Intersection type was provided and a reference type was found in the union, but no definitions are present.') - // } - const typeArg = refType.typeArguments?.filter(typeArg => typeArg.type === 'reference').pop(); - if (!typeArg || !('name' in typeArg)) { - throw new Error('No type arguments found on intersection type.'); - } - return { - type: 'literal', - name: typeArg.name, - }; - } - - if (isUnionType(_type)) { - let includeURL = true; - - const getNameFromUnionType = (type: UnionType): string => type.types.map(t => { - if (isReferenceType(t)) { - if (definitions === undefined) { - console.warn('Union type was provided and a reference type was found in the union, but no definitions are present.'); - return t.name; - } - const { interfaces, types } = definitions; - const matchingType = interfaces[t.name] || types[t.name]; - if (!isDeclarationReflection(matchingType)) { - throw new Error('Is a platform specific type'); - } - if (!matchingType?.type) { - return t.name; - // throw new Error(`No matching type found for literal ${t.name} in union`); - } - const matchingTypeType = matchingType.type; - if (isLiteralType(matchingTypeType)) { - // if any literal types are included, don't include the URL - includeURL = false; - return JSON.stringify(matchingTypeType.value); - } - if (matchingTypeType.type === 'reflection') { - // Ignore reflection types - return t.name; - } - if (matchingTypeType.type === 'union') { - return getNameFromUnionType(matchingTypeType); - } - if (matchingTypeType.type === 'tuple') { - console.log('matchingTypeType tuple', matchingTypeType); - return `[${matchingTypeType.elements?.map(e => { - if ('name' in e) { - return e.name; - } - throw new Error('Array type not yet implemented'); - }).join(',')}]`; - } - console.error('matchingTypeType', JSON.stringify(matchingTypeType, null, 2)); - - throw new Error(`Unsupported type of matching type ${matchingTypeType.type} in reference type of union type ${t.name}.`); - } else if (isInstrinsicType(t)) { - if (t.name === 'undefined') { - // ignore an explicit undefined type; this should be better represented to the user as an optional flag. - return undefined; - } - return t.name; - } else if (isLiteralType(t)) { - return `${t.value}`; - } else if (t.type === 'indexedAccess') { - const objectType = t.objectType; - if ('name' in objectType) { - return objectType.name; - } - return ''; - } else if (t.type === 'array') { - if ('name' in t.elementType) { - return `${t.elementType.name}[]`; - } - console.warn('Unknown element type', t); - // throw new Error('Unknown element type'); - return ''; - } - console.error(t); - throw new Error(`Unsupported type in union type: ${t.type}`); - }).filter(Boolean).join(' | '); - - const name = getNameFromUnionType(_type); - - return { - type: 'literal', - includeURL, - name, - }; - } - - console.error(_type) - - throw new Error(`Unsupported type: ${_type.type}`) -}; - -const writePlatformSpecificParameter = (platform: string, parameter: DeclarationReflection, definitions: Definitions) => { - const comment = getSummary(parameter.comment); - const { type, name } = getReferenceTypeOfParameter(parameter.type, definitions); - const url = getURLFromSources(parameter); - const parsedName = `${name}${type === 'array' ? '[]' : ''}`; - return [ - '-', - `**[${platform}](${url})**:`, - `\`${parsedName}\``, - comment ? ` - ${comment}` : undefined, - ].filter(Boolean).join(' '); -}; - - -const writePlatformSpecificDefinitions = (definitions: Definitions): string => { - const platformSpecificTypes: PlatformSpecificDeclarationReflection[] = []; - for (const type of Object.values(definitions.types)) { - if (!isDeclarationReflection(type)) { - platformSpecificTypes.push(type); - } - } - return platformSpecificTypes.map(parameter => [ - writePlatformSpecificParameter('Browser', parameter.browser, definitions), - writePlatformSpecificParameter('Node', parameter.node, definitions), - ].join('\n')).join('\n'); -}; - - -/**** - * Constants - */ -const UPSCALER_TSCONFIG_PATH = path.resolve(UPSCALER_DIR, 'tsconfig.browser.esm.json'); -const UPSCALER_SRC_PATH = path.resolve(UPSCALER_DIR, 'src/browser/esm'); -const EXAMPLES_DOCS_DEST = path.resolve(DOCS_DIR, 'docs/documentation/api'); -const VALID_EXPORTS_FOR_WRITING_DOCS = ['default']; -const VALID_METHODS_FOR_WRITING_DOCS = [ - 'constructor', - 'upscale', - 'execute', - 'warmup', - 'abort', - 'dispose', - 'getModel', -]; -const INTRINSIC_TYPES = [ - 'string', - 'number', - 'boolean', -]; -const TYPES_TO_EXPAND: Record = { - 'upscale': ['Input', 'Progress'], - 'warmup': ['WarmupSizes'], -} -const EXPANDED_TYPE_CONTENT: Record) => string> = { - 'Input': (definitions) => writePlatformSpecificDefinitions(definitions), - 'WarmupSizes': () => ([ - "- `number` - a number representing both the size (width and height) of the patch.", - "- `{patchSize: number; padding?: number}` - an object with the `patchSize` and optional `padding` properties.", - "- `number[]` - an array of numbers representing the size (width and height) of the patch.", - "- `{patchSize: number; padding?: number}[]` - an array of objects with the `patchSize` and optional `padding` properties.", - ].join('\n')), - 'Progress': () => ([ - 'The progress callback function has the following four parameters:', - '- `progress` - a number between 0 and 1 representing the progress of the upscale.', - '- `slice` - a string or 3D tensor representing the current slice of the image being processed. The type returned is specified by the `progressOutput` option, or if not present, the `output` option, or if not present, string for the browser and tensor for node.', - '- `row` - the row of the image being processed.', - '- `col` - the column of the image being processed.', - '', - '[See the guide on progress for more information.](/documentation/guides/browser/usage/progress)', - ].join('\n')), -}; -// define special type information that is external -const makeNewExternalType = (name: string, _url: string): DeclarationReflection => { - const type = new TypedocDeclarationReflection(name, ReflectionKind['SomeType']); - // const source = new SourceReference('', 0, 0); - // source.url = url; - type.sources = []; - return type as DeclarationReflection; -}; - -const EXTERNALLY_DEFINED_TYPES: Record = { - 'AbortSignal': makeNewExternalType( - 'AbortSignal', - 'https://developer.mozilla.org/en-US/docs/Web/API/AbortSignal' - ), - 'SerializableConstructor': makeNewExternalType( - 'SerializableConstructor', - 'https://github.com/tensorflow/tfjs/blob/38f8462fe642011ff1b7bcbb52e018f3451be58b/tfjs-core/src/serialization.ts#L54', - ), -} - -/**** - * Utility functions - */ - - -const getPackageAsTree = (entryPoint: string, tsconfig: string, projectRoot: string) => { - const app = new Application(); - - app.options.addReader(new TSConfigReader()); - app.options.addReader(new TypeDocReader()); - - app.bootstrap({ - entryPoints: [entryPoint], - tsconfig, - }); - - const project = app.convert(); - - if (project) { - return app.serializer.projectToObject(project, projectRoot); - } - throw new Error('No project was converted.') -} - -const getTypeFromPlatformSpecificFiles = async (fileName: string, typeName: string) => { - const platforms: Platform[] = ['browser', 'node']; - const platformSpecificTypes: DeclarationReflection[] = []; - for (const platform of platforms) { - const imageBrowser = getPackageAsTree( - path.resolve(UPSCALER_DIR, 'src', platform, `${fileName}.${platform}.ts`), - path.resolve(UPSCALER_DIR, `tsconfig.docs.${platform}.json`), - UPSCALER_DIR, - ); - const matchingType = imageBrowser.children?.filter(child => child.name === typeName).pop(); - if (!matchingType) { - throw new Error(`Could not find input from ${fileName}.${platform}.ts`); - } - platformSpecificTypes.push(matchingType); - } - - const platformSpecificType: PlatformSpecificDeclarationReflection = { - name: typeName, - variant: 'declaration', - kind: 'Platform Specific Type', - browser: platformSpecificTypes[0], - node: platformSpecificTypes[1], - children: [], - type: platformSpecificTypes[0].type, - } - - return platformSpecificType; -} - -const getTypesFromPlatformSpecificFiles = async (): Promise<{ - children: PlatformSpecificDeclarationReflection[]; -}> => { - return { - children: await Promise.all([ - getTypeFromPlatformSpecificFiles('image', 'Input'), - ]), - }; -} - -function getAsObj (arr: T[], getKey: (item: T) => string) { - return arr.reduce((obj, item) => ({ - ...obj, - [getKey(item)]: item, - }), {} as Record); -} - -const getKindStringKey = (kindString: 'Platform Specific Type' | ReflectionKind) => { - switch (kindString) { - case 'Platform Specific Type': - return 'types'; - case ReflectionKind.Constructor: - return 'constructors'; - case ReflectionKind.Method: - return 'methods'; - case ReflectionKind.Interface: - return 'interfaces'; - case ReflectionKind.TypeAlias: - return 'types'; - case ReflectionKind.Class: - return 'classes'; - case ReflectionKind.Function: - return 'functions'; - case ReflectionKind.Enum: - return 'enums'; - default: - throw new Error(`Unexpected kind string: ${kindString}`); - } -} - -const getDefinitions = async (): Promise => { - const upscalerTree = getPackageAsTree( - UPSCALER_SRC_PATH, - UPSCALER_TSCONFIG_PATH, - UPSCALER_DIR, - ); - console.log('***** 2') - const platformSpecificTypes = await getTypesFromPlatformSpecificFiles(); - if (!upscalerTree.children) { - throw new Error('No children were found on upscaler tree object. Indicates an error in the returned structure from getPackageAsTree'); - } - const children = [ - ...upscalerTree.children, - ...(platformSpecificTypes.children || []), - ]; - - const parsedChildren = children.reduce((obj, child) => { - const { kind } = child; - const key = getKindStringKey(kind); - if (!key) { - throw new Error(`Unexpected kind string: ${kind}`); - } - return { - ...obj, - [key]: obj[key].concat(child), - }; - }, { - constructors: [] as DecRef[], - methods: [] as DecRef[], - functions: [] as DecRef[], - interfaces: [] as DecRef[], - types: [] as DecRef[], - classes: [] as DecRef[], - enums: [] as DecRef[], - }); - - return { - methods: getAsObj(parsedChildren.methods, i => i.name), - constructors: getAsObj(parsedChildren.constructors, i => i.name), - functions: getAsObj(parsedChildren.functions, i => i.name), - types: getAsObj(parsedChildren.types, i => i.name), - interfaces: getAsObj(parsedChildren.interfaces, i => i.name), - classes: getAsObj(parsedChildren.classes, i => i.name), - enums: getAsObj(parsedChildren.enums, i => i.name), - }; -}; - -const getTextSummary = (name: string, comment?: Comment): { - codeSnippet?: string; - description?: string; - blockTags?: Record; -} => { - if (comment === undefined) { - return {}; - } - const { summary, blockTags } = comment; - const expectedCodeSnippet = summary.pop(); - if (expectedCodeSnippet?.kind !== 'code') { - throw new Error(`Expected code snippet not found for ${name}`); - } - // const { text, code } = summary.reduce((obj, item) => { - // return { - // ...obj, - // [item.kind]: item.text.trim(), - // } - // }, { - // text: '', - // code: '', - // }); - const text = summary.map(({ text }) => text).join(''); - return { - blockTags: blockTags?.reduce((obj, blockTag) => { - return { - ...obj, - [blockTag.tag]: blockTag.content, - }; - }, {}), - description: text.trim(), - codeSnippet: expectedCodeSnippet.text.trim(), - } -}; - -const getSource = ([source]: SourceReference[]) => { - let { - fileName, - line, - // character, - url, - } = source; - url = `${REPO_ROOT}/blob/main/${fileName}#L${line}`; - // if (!url) { - // throw new Error(`No URL defined for source ${fileName} at line ${line}`); - // } - const prettyFileName = fileName.split('packages/upscalerjs/src/shared/').pop(); - return `Defined in ${prettyFileName}:${line}`; -}; - - -function sortChildrenByLineNumber(children: T[]) { - return children.sort(({ sources: aSrc }, { sources: bSrc }) => { - if (!aSrc?.length) { - return 1; - } - if (!bSrc?.length) { - return -1; - } - return aSrc[0].line - bSrc[0].line; - }); -}; - -// const isTypeParameterReflection = (reflection: DecRef | TypeParameterReflection): reflection is TypeParameterReflection => { -// return 'parent' in reflection; -// } - -const writeParameter = (methodName: string, parameter: ParameterReflection | DeclarationReflection, matchingType: undefined | DecRef | TypeParameterReflection, definitions: Definitions, childParameters: string) => { - // if (matchingType !== undefined && !isTypeParameterReflection(matchingType) && !isDeclarationReflection(matchingType)) { - // // this is a platform-specify type specification. likely it is the input definition. - // const comment = getSummary(parameter.comment); - // const { type, name } = getReferenceTypeOfParameter(parameter.type, definitions); - // const parsedName = `\`${name}${type === 'array' ? '[]' : ''}\``; - // return [ - // '-', - // `**${parameter.name}${parameter.flags?.isOptional ? '?' : ''}**:`, - // childParameters ? undefined : `[${parsedName}](#${name.toLowerCase()})`, // only show the type information if we're not expanding it - // comment ? ` - ${comment}` : undefined, - // ].filter(Boolean).join(' '); - // } - const comment = getSummary(parameter.comment); - const { type, name, includeURL = true } = getReferenceTypeOfParameter(parameter.type, definitions); - const parsedName = `${name}${type === 'array' ? '[]' : ''}`; - - let url: string | undefined; - const typesToExpand = TYPES_TO_EXPAND[methodName === 'constructor' ? '_constructor' : methodName] || []; - if (typesToExpand.includes(name)) { - url = `#${name.toLowerCase()}`; - } else if (includeURL) { - url = getURLFromSources(matchingType); - } - const linkedName = url ? `[\`${parsedName}\`](${url})` : `\`${parsedName}\``; - return [ - '-', - `**${parameter.name}${parameter.flags?.isOptional ? '?' : ''}**:`, - childParameters === '' ? linkedName : undefined, // only show the type information if we're not expanding it - comment ? ` - ${comment.split('\n').join(" ")}` : undefined, - ].filter(Boolean).join(' '); -}; - -const getMatchingType = (parameter: ParameterReflection | DeclarationReflection, definitions: Definitions, typeParameters: Record = {}) => { - const { classes, interfaces, types } = definitions; - let { name: nameOfTypeDefinition } = getReferenceTypeOfParameter(parameter.type, definitions); - let matchingType: undefined | PlatformSpecificDeclarationReflection | DeclarationReflection | TypeParameterReflection; - if (!INTRINSIC_TYPES.includes(nameOfTypeDefinition) && parameter.type !== undefined && !isLiteralType(parameter.type)) { - // first, check if it is a specially defined external type - matchingType = EXTERNALLY_DEFINED_TYPES[nameOfTypeDefinition] || interfaces[nameOfTypeDefinition] || types[nameOfTypeDefinition]; - // console.log('matchingType', matchingType); - if (!matchingType) { - // it's possible that this type is a generic type; in which case, replace the generic with the actual type it's extending - matchingType = typeParameters[nameOfTypeDefinition]; - if (matchingType) { - nameOfTypeDefinition = matchingType.type.name; - matchingType = interfaces[nameOfTypeDefinition] || types[nameOfTypeDefinition]; - parameter.type = matchingType.type; - } - } - if (!matchingType && (parameter.type === undefined || !isUnionType(parameter.type))) { - console.warn('------') - console.warn(parameter.type); - console.warn([ - `No matching type could be found for ${nameOfTypeDefinition}.`, - `- Available interfaces: ${Object.keys(interfaces).join(', ')}`, - `- Available types: ${Object.keys(types).join(', ')}`, - `- Available classes: ${Object.keys(classes).join(', ')}` - ].join('\n')); - console.warn('------') - } - } - return matchingType; -} - -const getParameters = (methodName: string, parameters: (ParameterReflection | DeclarationReflection)[], definitions: Definitions, typeParameters: Record = {}, depth = 0): string => { - if (depth > 5) { - throw new Error('Too many levels of depth'); - } - return parameters.map((parameter) => { - const matchingType = getMatchingType(parameter, definitions, typeParameters); - const { children = [] } = matchingType || {}; - const childParameters = getParameters(methodName, sortChildrenByLineNumber(children), definitions, typeParameters, depth + 1); - return [ - writeParameter(methodName, parameter, matchingType, definitions, childParameters), - childParameters, - ].filter(Boolean).map(line => Array(depth * 2).fill(' ').join('') + line).join('\n'); - }).filter(Boolean).join('\n'); -}; - -const getReturnType = (signatures: (SignatureReflection & { typeParameter?: TypeParameterReflection[] })[], blockTags?: Record) => { - if (signatures.length === 1) { - const { type } = signatures[0]; - if (type === undefined) { - return 'void'; - } - - if (isReferenceType(type)) { - const { name, typeArguments } = type; - let nameOfType = name; - if (typeArguments?.length) { - nameOfType = `${nameOfType}<${typeArguments.map(t => getReferenceTypeOfParameter(t)).map(({ name }) => name).join(', ')}>`; - } - const returnDescription = blockTags?.['@returns']?.map(({ text }) => text).join(''); - return `\`${nameOfType}\`${returnDescription ? ` - ${returnDescription}` : ''}`; - } - - if (isInstrinsicType(type)) { - const nameOfType = type.name; - const returnDescription = blockTags?.['@returns']?.map(({ text }) => text).join(''); - return `\`${nameOfType}\`${returnDescription ? ` - ${returnDescription}` : ''}`; - } - - console.error(type); - throw new Error(`Return Type function not yet implemented for type ${type.type}`) - } - - let comment: Comment; - const validReturnTypes = new Set(); - let returnType = ''; - signatures.forEach(signature => { - if (signature.comment) { - if (comment !== undefined) { - throw new Error('Multiple comments defined for return signatures'); - } - comment = signature.comment; - } - const { type } = signature; - if (type === undefined) { - throw new Error('No type defined for signature'); - } - if (!isReferenceType(type)) { - throw new Error(`Unsupported type: ${type.type}`); - } - if (returnType !== '' && returnType !== type.name) { - throw new Error(`Conflicting return types in signatures: ${returnType} vs ${type.name}}`) - } - returnType = type.name; - if (!('typeArguments' in type)) { - throw new Error('No type arguments defined for type'); - } - const { typeArguments } = type; - typeArguments?.forEach(type => { - if (isUnionType(type)) { - type.types.forEach(t => { - if (isInstrinsicType(t) || isReferenceType(t)) { - validReturnTypes.add(t.name); - } else { - throw new Error(`Unsupported type when trying to handle union type while collecting valid signatures: ${type.type} ${t.type}`); - } - }); - } else if (isInstrinsicType(type)) { - validReturnTypes.add(type.name); - } else if (isReferenceType(type)) { - validReturnTypes.add(type.name); - } else { - throw new Error(`Unsupported type when trying to collect valid signatures: ${type.type}`); - } - }); - }) - - const nameOfType = `${returnType}<${Array.from(validReturnTypes).join(' | ')}>`; - const returnDescription = blockTags?.['@returns']?.map(({ text }) => text).join(''); - return `\`${nameOfType}\`${returnDescription ? ` - ${returnDescription}` : ''}`; -} - -const writeExpandedTypeDefinitions = (methodName: string, definitions: Definitions, typeParameters: Record = {}): string => { - // this method is for writing out additional information on the types, below the parameters - const typesToExpand = TYPES_TO_EXPAND[methodName === 'constructor' ? '_constructor' : methodName] || []; - return typesToExpand.map(type => [ - `### \`${type}\``, - EXPANDED_TYPE_CONTENT[type](definitions, typeParameters), - ].join('\n')).join('\n'); -} - -const getContentForMethod = (method: DeclarationReflection, definitions: Definitions, i: number) => { - const { - name, - signatures, - sources, - } = method; - - if (name === 'upscale') { - return [ - [ - '---', - `title: ${name}`, - `sidebar_position: ${i}`, - `sidebar_label: ${name}`, - '---', - ].join('\n'), - - `# ${name}`, - "Alias for [`execute`](execute)", - ].filter(Boolean).join('\n\n'); - - } - - if (!sources?.length) { - throw new Error(`No sources found for ${name}`); - } - if (!signatures?.length) { - const { type: _type, ...m } = method; - console.log(JSON.stringify(m, null, 2)) - throw new Error(`No signatures found in ${name}`); - } - const signature = signatures[0] as SignatureReflection & { typeParameter?: TypeParameterReflection[] }; - const { comment, parameters, typeParameter: typeParameters } = signature; - // if (!comment) { - // throw new Error(`No comment found in method ${name}`); - // } - - const { description, codeSnippet, blockTags } = getTextSummary(name, comment); - let source; - try { - source = getSource(sources); - } catch(e) { - console.error(JSON.stringify(method, null, 2)); - throw e; - } - - const content = [ - [ - '---', - `title: ${name}`, - `sidebar_position: ${i}`, - `sidebar_label: ${name}`, - '---', - ].join('\n'), -`# \`${name}\``, - description, - ...(codeSnippet ? [ - '## Example', - codeSnippet, - ] : []), - source, - ...(parameters ? [ - '## Parameters', - getParameters(name, parameters, definitions, getAsObj(typeParameters || [], t => t.name)), - ] : []), - writeExpandedTypeDefinitions(name, definitions, getAsObj(typeParameters || [], t => t.name)), - '## Returns', - getReturnType(signatures, blockTags), - ].filter(Boolean).join('\n\n'); - return content; -}; - -const getSortedMethodsForWriting = (definitions: Definitions) => { - const exports = Object.values(definitions.classes); - const methods: DeclarationReflection[] = []; - for (const xport of exports) { - if (VALID_EXPORTS_FOR_WRITING_DOCS.includes(xport.name)) { - const { children } = xport; - if (!children) { - throw new Error(`No methods found in export ${xport.name}`); - } - sortChildrenByLineNumber(children).forEach(method => { - if (VALID_METHODS_FOR_WRITING_DOCS.includes(method.name)) { - methods.push(method); - } else { - console.log(`** Ignoring method ${method.name}`); - } - }); - } - } - return methods; -}; - -const writeAPIDocumentationFiles = async (methods: DeclarationReflection[], definitions: Definitions) => { - await Promise.all(methods.map(async (method, i) => { - const content = getContentForMethod(method, definitions, i); - if (content) { - const target = path.resolve(EXAMPLES_DOCS_DEST, `${method.name}.md`); - await mkdirp(path.dirname(target)); - await writeFile(target, content.trim(), 'utf-8'); - } else { - throw new Error(`No content for method ${method.name}`); - } - })) -}; - -const writeIndexFile = async (methods: DeclarationReflection[]) => { - const contents = [ - '# API', - '', - 'API Documentation for UpscalerJS.', - '', - 'Available methods:', - '', - ...methods.map(method => `- [\`${method.name}\`](./${method.name})`), - ].join('\n') - await writeFile(path.resolve(EXAMPLES_DOCS_DEST, 'index.md'), contents, 'utf-8'); -} - -/**** - * Main function - */ -async function main({ shouldClearMarkdown }: SharedArgs = {}) { - await mkdirp(EXAMPLES_DOCS_DEST); - if (shouldClearMarkdown) { - await clearOutMarkdownFiles(EXAMPLES_DOCS_DEST); - } - - const definitions = await getDefinitions(); - const methods = getSortedMethodsForWriting(definitions); - - await Promise.all([ - writeAPIDocumentationFiles(methods, definitions), - writeIndexFile(methods), - ]); -} - -/**** - * Functions to expose the main function as a CLI tool - */ - -if (require.main === module) { - (async () => { - const sharedArgs = await getSharedArgs(); - await main({ ...sharedArgs }); - })(); -} diff --git a/internals/upscaler-cli/src/lib/package-scripts/docs/build-guides.ts b/internals/upscaler-cli/src/lib/package-scripts/docs/build-guides.ts deleted file mode 100644 index f94f8cf28..000000000 --- a/internals/upscaler-cli/src/lib/package-scripts/docs/build-guides.ts +++ /dev/null @@ -1,282 +0,0 @@ -import path from 'path'; -import { copyFile, existsSync, mkdirp, readdir, readdirSync, readFile, readFileSync, statSync, writeFile } from 'fs-extra'; -import { DOCS_DIR, EXAMPLES_DIR } from '../utils/constants'; -import { getPackageJSON } from '../utils/packages'; -import fm from 'front-matter'; -import { clearOutMarkdownFiles } from './utils/clear-out-markdown-files'; -import { getSharedArgs, SharedArgs } from './types'; - -/**** - * Types - */ -interface FrontMatter { - [index: string]: string | number | FrontMatter; -} -interface ExampleContent { - title: string; - frontmatter: FrontMatter; -} -type Category = 'browser' | 'node' | 'other'; - -/**** - * Constants - */ -const DEFAULT_EMBED_FOR_NODE = 'codesandbox'; -const DEFAULT_EMBED_FOR_BROWSER = 'codesandbox'; -// const DEFAULT_EMBED_FOR_BROWSER = 'stackblitz'; -const EXAMPLES_DOCS_DEST = path.resolve(DOCS_DIR, 'docs/documentation/guides'); - -/**** - * Utility functions - */ -const isCategory = (category: unknown): category is Category => typeof category === 'string' && ['browser', 'node', 'other'].includes(category); -const isDirectory = (root: string) => (folder: string) => statSync(path.resolve(root, folder)).isDirectory(); -const getExampleFolders = (root: string) => readdirSync(root).filter(isDirectory(root)); - -const getDefaultCodeEmbedParameters = (category: Category, params: Record = {}) => { - if (category === 'node') { - return 'view=split,preview&module=index.js&hidenavigation=1'; - }; - return Object.entries({ - embed: 1, - file: 'index.js', - hideExplorer: 1, - ...params, - }).map(([key, val]) => `${key}=${val}`).join('&'); -} - -const getFrontmatter = (key: string): ExampleContent => { - const packageJSON = getPackageJSON(path.resolve(EXAMPLES_DIR, key, 'package.json')); - const readmePath = path.resolve(EXAMPLES_DIR, key, 'README.md'); - const readmeContents = readFileSync(readmePath, 'utf-8'); - const { body } = fm(readmeContents); - const bodyParts = body.split('\n'); - let title: undefined | string; - for (const line of bodyParts) { - if (line.startsWith('#')) { - title = line.split('#')?.pop()?.trim() ?? ''; - break; - } - } - - if (!title) { - throw new Error(`No title found in file ${readmePath}`); - } - - const { - category = 'browser', - code_embed, - ...frontmatter - } = packageJSON['@upscalerjs']?.guide?.frontmatter || {}; - - const codeEmbed = code_embed !== false ? { - params: getDefaultCodeEmbedParameters(category, frontmatter.params), - type: category ? DEFAULT_EMBED_FOR_NODE : DEFAULT_EMBED_FOR_BROWSER, - url: `/examples/${key}`, - ...code_embed, - } : {}; - - return { - frontmatter: { - category, - hide_table_of_contents: true, - ...frontmatter, - code_embed: codeEmbed, - }, - title, - } -}; - -const getExamplesWithFrontmatter = (): ({ key: string; } & ExampleContent)[] => getExampleFolders(EXAMPLES_DIR).filter(key => { - const readmePath = path.resolve(EXAMPLES_DIR, key, 'README.md'); - return existsSync(readmePath); -}).map(key => { - return { - key, - ...getFrontmatter(key), - }; -}); - -const getExampleOrder = (examples: ({ key: string; } & ExampleContent)[]) => { - return examples.sort((a, b) => { - const aPos = Number(a.frontmatter.sidebar_position); - const bPos = Number(b.frontmatter.sidebar_position); - if (Number.isNaN(aPos)) { - return 1; - } - if (Number.isNaN(bPos)) { - return -1; - } - return aPos - bPos; - }).map(({ key }) => key); -} - -const getExamplesByName = () => { - const examplesWithFrontmatter = getExamplesWithFrontmatter(); - const exampleOrder = getExampleOrder(examplesWithFrontmatter); - - return { - examplesByName: examplesWithFrontmatter.reduce((obj, { key, ...rest }) => { - if (obj[key]) { - throw new Error(`Example already exists for key ${key}`); - } - return { - ...obj, - [key]: rest, - }; - }, {} as Record), - exampleOrder, - }; -} - -const indent = (str: string, depth = 0) => [...Array(depth * 2).fill(''), str].join(' '); -const uppercase = (str: string) => str[0].toUpperCase() + str.slice(1); - -const buildFrontmatter = (frontmatter: FrontMatter = {}, depth = 0): string[] => Object.entries(frontmatter).reduce((arr, [key, val]) => { - if (typeof val === 'object') { - return arr.concat(...[ - `${key}:`, - ...buildFrontmatter(val, depth + 1), - ].map(str => indent(str, depth))); - } - return arr.concat(indent(`${key}: ${val}`, depth)); -}, [] as string[]); - -const parseContents = async (key: string, frontmatter: FrontMatter = {}) => { - const readmePath = path.resolve(EXAMPLES_DIR, key, 'README.md'); - const contents = await readFile(readmePath, 'utf-8'); - const frontmatterContents = [ - ...buildFrontmatter(frontmatter), - ]; - return [ - '---', - ...frontmatterContents, - '---', - '', - contents, - ].filter(Boolean).join('\n'); -} - -const copyAssets = async (targetDir: string, key: string) => { - const srcAssetsDir = path.resolve(EXAMPLES_DIR, key, 'assets'); - if (existsSync(srcAssetsDir)) { - const targetAssetsDir = path.resolve(targetDir, 'assets'); - await mkdirp(targetAssetsDir); - const assets = await readdir(srcAssetsDir); - await Promise.all(assets.map(async asset => { - const assetPath = path.resolve(srcAssetsDir, asset); - await copyFile(assetPath, path.resolve(targetAssetsDir, asset)); - })); - } -} - -const copyReadmesToDocs = async (exampleOrder: string[], examplesByName: Record, dest: string) => { - await Promise.all(exampleOrder.map(async (key) => { - const example = examplesByName[key]; - if (!example) { - throw new Error(`No example found for key ${key}`); - } - const { - frontmatter, - } = example; - - const { - parent, - category, - } = frontmatter; - if (!isCategory(category)) { - throw new Error(`Category is not valid: ${category}, for key ${key}`); - } - if (parent !== undefined && typeof parent !== 'string') { - throw new Error(`Parent is not of type string: ${parent}`); - } - const targetDir = path.resolve(...[dest, category, parent].filter(Boolean)); - - // copy assets - await copyAssets(targetDir, key); - - // write readme - const targetPath = path.resolve(targetDir, `${key}.md`); - await mkdirp(path.dirname(targetPath)); - const fileContents = await parseContents(key, frontmatter); - await writeFile(targetPath, fileContents, 'utf-8'); - })); -} - -const writeIndexFile = async (exampleOrder: string[], examplesByName: Record, dest: string) => { - const examplesByCategory = exampleOrder.reduce((obj, example) => { - const { frontmatter: { parent, category } } = examplesByName[example]; - if (!isCategory(category)) { - throw new Error(`Category is not valid: ${category}, for key ${example}`); - } - if (parent !== undefined && typeof parent !== 'string') { - throw new Error(`Parent is not of type string: ${parent}`); - } - return { - ...obj, - [category]: (obj[category] || []).concat([[parent ? uppercase(parent) : undefined, example]]), - } - }, {} as Record>); - - const content = [ - '---', - 'hide_table_of_contents: true', - '---', - '# Guides', - 'This page contains a list of guides and examples for using various features of UpscalerJS.', - '', - 'The first two guides discuss the basics of UpscalerJS and how to use it in a project. The [Models](browser/models) and [Working with Tensors](browser/tensors) guides discuss useful configuration options of UpscalerJS.', - '', - 'There are also guides on [improving the performance](#performance) of UpscalerJS, [specific examples of implementations](#implementations), and [Node.js-specific](#node) guides.', - '', - ...Object.entries(examplesByCategory).map(([category, examples]) => { - let activeParent: undefined | string; - return `\n## ${uppercase(category)}\n\n${examples.map(([parent, example]) => { - const { title } = examplesByName[example]; - const url = [ - '/documentation', - 'guides', - category, - parent, - example - ].filter(Boolean).join('/'); - const strings: string[] = []; - if (activeParent !== parent) { - activeParent = parent; - strings.push(`- ### ${parent}`); - } - strings.push(indent(`- [${title}](${url})`, activeParent ? 1 : 0)); - return strings.join('\n'); - }).join('\n')}`; - }), - ].join('\n'); - - await writeFile(path.resolve(dest, 'index.md'), content, 'utf-8'); -} - -/**** - * Main function - */ -export const buildGuides = async (dest: string, { shouldClearMarkdown }: SharedArgs = {}) => { - await mkdirp(dest) - if (shouldClearMarkdown) { - await clearOutMarkdownFiles(dest); - } - const { exampleOrder, examplesByName } = getExamplesByName(); - - await Promise.all([ - copyReadmesToDocs(exampleOrder, examplesByName, dest), - writeIndexFile(exampleOrder, examplesByName, dest), - ]); -} - -/**** - * Functions to expose the main function as a CLI tool - */ - -if (require.main === module) { - (async () => { - const sharedArgs = await getSharedArgs(); - await buildGuides(EXAMPLES_DOCS_DEST, { ...sharedArgs }); - })(); -} diff --git a/internals/upscaler-cli/src/lib/package-scripts/docs/link-model-readmes.ts b/internals/upscaler-cli/src/lib/package-scripts/docs/link-model-readmes.ts deleted file mode 100644 index 8b4fc650e..000000000 --- a/internals/upscaler-cli/src/lib/package-scripts/docs/link-model-readmes.ts +++ /dev/null @@ -1,235 +0,0 @@ -/***** - * Script for linking model readmes locally in docs folder - */ -import path from 'path'; -import { copy, existsSync, mkdirp, readFile, writeFile } from 'fs-extra'; -import { DOCS_DIR, MODELS_DIR } from '../utils/constants'; -import { getAllAvailableModelPackages } from "../utils/getAllAvailableModels"; -import { getSharedArgs, SharedArgs } from './types'; -import { clearOutMarkdownFiles } from './utils/clear-out-markdown-files'; - -/**** - * Types - */ - -interface PackageWithMetadata { - description: string; - sidebarPosition: number; - enhancedSrc: string; - unenhancedSrc: string; - category: string; - packageName: string; -} - -/**** - * Utility functions - */ - -const copyAssets = async (packageName: string, targetDir: string) => { - const packagePath = path.resolve(MODELS_DIR, packageName, 'assets'); - const targetPath = path.resolve(targetDir, packageName); - await copy(packagePath, targetPath); -} - -const createMarkdown = (contents: string, targetPath: string) => writeFile(targetPath, contents, 'utf-8'); - -const getCategory = (packageName: string, readmeContents: string) => { - const lines = readmeContents.split('\n'); - for (const line of lines) { - if (line.startsWith('category: ')) { - return line.split('category: ').pop() ?? ''; - } - } - - throw new Error(`Could not find category for package name ${packageName}`); -}; - -const linkAllModelReadmes = async (packages: string[], targetAssetDir: string, targetDocDir: string, verbose?: boolean) => { - for (const packageName of packages) { - const packagePath = path.resolve(MODELS_DIR, packageName); - const docMdxPath = path.resolve(packagePath, 'DOC.mdx'); - - if (existsSync(docMdxPath)) { - const docMdxContents = await readFile(docMdxPath, 'utf-8'); - const category = getCategory(packageName, docMdxContents); - - const targetPath = path.resolve(targetDocDir, category, `${packageName}.mdx`); - await mkdirp(path.dirname(targetPath)); - // try { - // unlinkSync(targetPath); - // } catch (err) { } - await copyAssets(packageName, targetAssetDir); - await createMarkdown(await readFile(docMdxPath, 'utf-8'), targetPath); - if (verbose) { - console.log(`** Linked: ${packageName}`); - } - } else if (verbose) { - console.log(`** Does not have a DOC.mdx file: ${packageName}`) - } - } -}; - -const getDescription = (readmeContents: string) => { - const lines = readmeContents.split('\n'); - let description = ''; - let startedDescription = false; - for (const line of lines) { - if (line.startsWith('# ')) { - startedDescription = true; - } else if (line.startsWith('## ')) { - startedDescription = false; - break; - } else if (!line.startsWith(' `${part[0].toUpperCase()}${part.slice(1)}`; - -const getSidebarPosition = (packageName: string, readmeContents: string) => { - const lines = readmeContents.split('\n'); - for (const line of lines) { - if (line.startsWith('sidebar_position: ')) { - const pos = line.split('sidebar_position: ').pop() ?? ''; - return parseInt(pos, 10); - } - } - throw new Error(`Could not find sidebar position for package name ${packageName}`); -}; - -const getEnhancedSrc = (packageName: string, readmeContents: string) => { - const lines = readmeContents.split('\n'); - for (const line of lines) { - if (line.startsWith('enhanced_src: ')) { - return line.split('enhanced_src: ').pop() ?? ''; - } - } - - throw new Error(`Could not find enhanced_src for package name ${packageName}`); -}; - -const getPackageMetadata = async (packageName: string) => { - const packagePath = path.resolve(MODELS_DIR, packageName); - const docMdxPath = path.resolve(packagePath, 'DOC.mdx'); - const docMdxContents = await readFile(docMdxPath, 'utf-8'); - return { - description: getDescription(docMdxContents), - sidebarPosition: getSidebarPosition(packageName, docMdxContents), - enhancedSrc: getEnhancedSrc(packageName, docMdxContents), - unenhancedSrc: `${packageName}/fixture.png`, - category: getCategory(packageName, docMdxContents), - }; -}; - -const getAllPackagesWithMetadata = async (packageNames: string[]): Promise => { - const packagesWithValidReadme = packageNames.filter(packageName => { - const packagePath = path.resolve(MODELS_DIR, packageName); - const readmePath = path.resolve(packagePath, 'DOC.mdx'); - return existsSync(readmePath); - }); - const packagesWithMetadata = await Promise.all(packagesWithValidReadme.map(async (packageName) => ({ - packageName, - ...(await getPackageMetadata(packageName)), - }))); - - return packagesWithMetadata; -}; - - -const getAllPackagesOrganizedByCategory = async (packageNames: string[]): Promise<{ category: string, packages: PackageWithMetadata[] }[]> => { - const packages = await getAllPackagesWithMetadata(packageNames); - - const packagesByCategory = packages.reduce>>((obj, pkg) => { - const { category, sidebarPosition } = pkg; - if (!obj[category]) { - obj[category] = {}; - } - obj[category][sidebarPosition] = pkg; - return obj; - }, {}); - - return Object.keys(packagesByCategory).map(category => { - const packageSidebarPositions = Object.keys(packagesByCategory[category]).sort(); - const packages = packagesByCategory[category]; - - return { - category, - packages: packageSidebarPositions.map(position => packages[position]), - } - }); -}; - -const writeModelIndexFile = async (packageNames: string[]) => { - const packagesByCategory = getAllPackagesOrganizedByCategory(packageNames); - const contents = ` ---- -title: Models -description: An overview of available UpscalerJS Models -sidebar_position: 1 -sidebar_label: Overview -pagination_next: null -pagination_prev: null -hide_title: true ---- -View this page on the UpscalerJS website - -# Models - -UpscalerJS offers a number of available models. With the exception of \`default-model\`, these models must be explicitly installed alongside UpscalerJS. - -import ModelCard from '@site/src/components/modelCards/modelCard/modelCard'; -import ModelCards from '@site/src/components/modelCards/modelCards'; - -${(await packagesByCategory).map(({ category, packages }) => ` -## ${category.split('-').map(uppercase)} - - - ${packages.map(({ packageName, description, unenhancedSrc, enhancedSrc } ) => ` - - `).join('\n')} - -`).join('\n')} - - `; - await writeFile(path.resolve(DOCS_DIR, 'docs', 'models', 'index.md'), contents.trim(), 'utf-8'); -}; - -/**** - * Main function - */ -const linkModelReadmes = async ({ shouldClearMarkdown, verbose }: SharedArgs = {}) => { - const packages = getAllAvailableModelPackages(); - const targetAssetDir = path.resolve(DOCS_DIR, 'assets/assets/sample-images'); - const targetDocDir = path.resolve(DOCS_DIR, 'docs/models/available'); - if (shouldClearMarkdown) { - await clearOutMarkdownFiles(targetDocDir, verbose); - } - - await writeModelIndexFile(packages); - if (verbose) { - console.log('Wrote model index file'); - } - await linkAllModelReadmes(packages, targetAssetDir, targetDocDir, verbose); -}; - -/**** - * Functions to expose the main function as a CLI tool - */ - -if (require.main === module) { - (async () => { - const sharedArgs = await getSharedArgs(); - await linkModelReadmes({ ...sharedArgs }); - })(); -} diff --git a/internals/upscaler-cli/src/lib/package-scripts/docs/tense-checks.ts b/internals/upscaler-cli/src/lib/package-scripts/docs/tense-checks.ts deleted file mode 100644 index e207339b6..000000000 --- a/internals/upscaler-cli/src/lib/package-scripts/docs/tense-checks.ts +++ /dev/null @@ -1,111 +0,0 @@ -/***** - * Script for checking tense in docs markdown files - */ -import { sync } from 'glob'; -import { readFile } from 'fs-extra'; -import path from 'path'; -import { DOCS_DIR, } from '../utils/constants'; - -/**** - * Constants - */ - -const EXCLUDED_DIRECTORIES = [ - 'node_modules', - 'blog', -]; - -/**** - * Utility functions - */ - -const getDocumentationFiles = (): string[] => { - return sync(path.resolve(DOCS_DIR, '**/*.{md,mdx}')).filter(file => { - return EXCLUDED_DIRECTORIES.reduce((include, dir) => { - return !include ? false : !file.includes(dir); - }, true); - }); -}; - -// split a markdown file's contents into two concatenated strings, -// one containing the main content of the file, the other containing -// just the asides -const splitFileContents = (contents: string): [string, string] => { - const nonAsides = []; - const asides = []; - let isAside = false; - for (const line of contents.split('\n')) { - if (line.startsWith(':::')) { - isAside = !isAside; - } else { - if (isAside) { - asides.push(line); - } else { - nonAsides.push(line); - } - } - } - return [nonAsides.join('\n'), asides.join('\n')]; -}; - -// check that a chunk of text matches a specific tense -const checkTense = (contents: string, expectedTense: 'third' | 'second') => { - if (expectedTense === 'third') { - // const matches = contents.match(/(Y|y)ou|(Y|y)our|(M|m)ine|(M|m)y/g); - return contents.match(/\b(I |I'm|me|my|mine|you|your|yours|yourself|yourselves)\b/g); - } else if (expectedTense === 'second') { - return contents.match(/\b(I |I'm|me|my|mine|we|us|our|ours|ourselves)\b/g); - } - throw new Error(`Unexpected tense: ${expectedTense}`); -} - -const checkFileForTense = async (file: string) => { - const contents = await readFile(file, 'utf-8'); - if (file.includes('documentation/api') || file.includes('troubleshooting')) { - const matches = checkTense(contents, 'second'); - if (matches !== null) { - return [ - `Found inconsistent tenses in file ${file}:`, - '', - `Main content should be second person, found following keywords: ${matches.join('|')}`, - ].join('\n'); - } - } else { - const [mainContents, asides] = splitFileContents(contents); - const mainMatches = checkTense(mainContents, 'third'); - const asidesMatches = checkTense(asides, 'second'); - if (mainMatches !== null || asidesMatches !== null) { - return [ - `Found inconsistent tenses in file ${file}:`, - '', - ...(mainMatches !== null ? [ - `Main content should be third person, found following keywords: ${mainMatches.join('|')}`, - ] : []), - ...(asidesMatches !== null ? [ - `Asides content should be second person, found following keywords: ${asidesMatches.join('|')}`, - ] : []), - ].join('\n'); - } - } - return undefined; -} - -/**** - * Main function - */ -const tenseChecks = async () => { - const files = getDocumentationFiles(); - const errors = (await Promise.all(files.map(checkFileForTense))).filter(Boolean); - - if (errors.length) { - throw new Error(errors.join('\n\n\n')); - } -} - -/**** - * Functions to expose the main function as a CLI tool - */ - -if (require.main === module) { - tenseChecks(); -} diff --git a/internals/upscaler-cli/src/lib/package-scripts/docs/types.ts b/internals/upscaler-cli/src/lib/package-scripts/docs/types.ts deleted file mode 100644 index 77766cb56..000000000 --- a/internals/upscaler-cli/src/lib/package-scripts/docs/types.ts +++ /dev/null @@ -1,17 +0,0 @@ -import yargs from 'yargs'; - -export interface SharedArgs { - shouldClearMarkdown?: boolean; - verbose?: boolean; -} - -export const getSharedArgs = async (): Promise => { - const argv = await yargs(process.argv.slice(2)).options({ - shouldClearMarkdown: { type: 'boolean' }, - verbose: { type: 'boolean' }, - }).argv; - - return { - ...argv, - } -}; diff --git a/internals/upscaler-cli/src/lib/package-scripts/docs/utils/clear-out-markdown-files.ts b/internals/upscaler-cli/src/lib/package-scripts/docs/utils/clear-out-markdown-files.ts deleted file mode 100644 index 08d120ed5..000000000 --- a/internals/upscaler-cli/src/lib/package-scripts/docs/utils/clear-out-markdown-files.ts +++ /dev/null @@ -1,17 +0,0 @@ -import { glob } from 'glob'; -import { unlink } from 'fs-extra'; - -const getAllMarkdownFiles = (target: string) => glob(`${target}/**/*.md?(x)`); - -export const clearOutMarkdownFiles = async (target: string, verbose?: boolean) => { - const files = await getAllMarkdownFiles(target); - if (files.length > 0) { - await Promise.all(files.map(file => unlink(file))); - if (verbose) { - console.log([ - `Cleared out ${files.length} markdown files, including:`, - ...files.map(file => file.split(/docs\/documentation\//gi).pop()).map(file => `- ${file}`), - ].join('\n')); - } - } -}; diff --git a/internals/upscaler-cli/src/lib/package-scripts/find-all-packages.ts b/internals/upscaler-cli/src/lib/package-scripts/find-all-packages.ts deleted file mode 100644 index e0a6a933d..000000000 --- a/internals/upscaler-cli/src/lib/package-scripts/find-all-packages.ts +++ /dev/null @@ -1,34 +0,0 @@ -import fs from 'fs'; -import path from 'path'; -import { ROOT_DIR } from './utils/constants.js'; - -const ROOT = ROOT_DIR; -const EXCLUDED = ['node_modules', 'scratch']; -const MAX_DEPTH = 100; - -const findAllPackages = (dir: string, excluded: string[] = [], depth = 0): Array => { - let packages: Array = []; - if (depth > MAX_DEPTH) { - throw new Error('Maximum depth reached'); - } - const files = fs.readdirSync(dir); - for (const file of files) { - const fullFile = path.resolve(dir, file); - if (file === 'package.json') { - const strippedFile = fullFile.split(`${ROOT}/`).pop(); - if (!strippedFile) { - throw new Error(`Error with file ${fullFile}`); - } - packages.push(strippedFile); - } else if (!EXCLUDED.includes(file) && !excluded.includes(fullFile)) { - const stat = fs.statSync(fullFile); - if (stat?.isDirectory()) { - const dirFiles = findAllPackages(fullFile, excluded, depth + 1); - packages = packages.concat(dirFiles); - } - } - } - return packages; -}; - -export default findAllPackages; diff --git a/internals/upscaler-cli/src/lib/package-scripts/prompt/getModel.ts b/internals/upscaler-cli/src/lib/package-scripts/prompt/getModel.ts deleted file mode 100644 index 0f71bdc41..000000000 --- a/internals/upscaler-cli/src/lib/package-scripts/prompt/getModel.ts +++ /dev/null @@ -1,31 +0,0 @@ -import inquirer from 'inquirer'; -import { getAllAvailableModelPackages } from '../utils/getAllAvailableModels'; - -export const AVAILABLE_MODELS = getAllAvailableModelPackages(); - -export const getModel = async (model?: string | number | (string | number)[], all?: unknown) => { - if (all === true) { - const modelPackages = getAllAvailableModelPackages(); - return modelPackages; - } - - if (typeof model === 'string') { - return [model]; - } - - if (Array.isArray(model)) { - return model.map(m => `${m}`); - } - - const { models } = await inquirer.prompt<{ - models: string[] - }>([ - { - type: 'checkbox', - name: 'models', - message: 'Which models do you want to build?', - choices: AVAILABLE_MODELS, - }, - ]); - return models; -} diff --git a/internals/upscaler-cli/src/lib/package-scripts/prompt/getNumber.ts b/internals/upscaler-cli/src/lib/package-scripts/prompt/getNumber.ts deleted file mode 100644 index d49243bb5..000000000 --- a/internals/upscaler-cli/src/lib/package-scripts/prompt/getNumber.ts +++ /dev/null @@ -1,17 +0,0 @@ -import inquirer from 'inquirer'; - -export const getNumber = async (message: string, arg?: unknown) => { - if (typeof arg === 'number') { - return arg; - } - - const response = await inquirer.prompt<{ - arg: number; - }>([ - { - name: 'arg', - message, - }, - ]); - return response.arg; -}; diff --git a/internals/upscaler-cli/src/lib/package-scripts/prompt/getOutputFormats.ts b/internals/upscaler-cli/src/lib/package-scripts/prompt/getOutputFormats.ts deleted file mode 100644 index 022e24cb9..000000000 --- a/internals/upscaler-cli/src/lib/package-scripts/prompt/getOutputFormats.ts +++ /dev/null @@ -1,37 +0,0 @@ -import inquirer from 'inquirer'; -import { OutputFormat } from './types'; - -export const DEFAULT_OUTPUT_FORMATS: Array = ['cjs', 'esm', 'umd']; - -const isValidOutputFormat = (outputFormat: string): outputFormat is OutputFormat => { - for (const f of DEFAULT_OUTPUT_FORMATS) { - if (f === outputFormat) { - return true; - } - } - return false; -} - -export const getOutputFormats = async (outputFormat?: unknown, defaultToAll?: boolean) => { - if (typeof outputFormat === 'string' && isValidOutputFormat(outputFormat)) { - return [outputFormat] - } - if (Array.isArray(outputFormat)) { - return outputFormat; - } - if (defaultToAll) { - return DEFAULT_OUTPUT_FORMATS; - } - const { outputFormats } = await inquirer.prompt<{ - outputFormats: string[]; - }>([ - { - type: 'checkbox', - name: 'outputFormats', - message: 'Which output formats do you want to build?', - choices: DEFAULT_OUTPUT_FORMATS, - }, - ]); - return outputFormats; -} - diff --git a/internals/upscaler-cli/src/lib/package-scripts/prompt/getPlatform.ts b/internals/upscaler-cli/src/lib/package-scripts/prompt/getPlatform.ts deleted file mode 100644 index 0a38bc087..000000000 --- a/internals/upscaler-cli/src/lib/package-scripts/prompt/getPlatform.ts +++ /dev/null @@ -1,25 +0,0 @@ -import inquirer from 'inquirer'; -import { Platform } from "./types"; - -export const AVAILABLE_PLATFORMS = ['node', 'node-gpu', 'browser']; - -const isValidPlatform = (platform?: string | number): platform is Platform => { - return typeof platform === 'string' && AVAILABLE_PLATFORMS.includes(platform); -}; - -export const getPlatform = async (platforms?: string | number) => { - if (isValidPlatform(platforms)) { - return platforms; - } - - const { value } = await inquirer.prompt([ - { - type: 'list', - name: 'value', - message: 'Which platforms do you want to build for?', - choices: AVAILABLE_PLATFORMS, - }, - ]); - return value; -} - diff --git a/internals/upscaler-cli/src/lib/package-scripts/prompt/getString.ts b/internals/upscaler-cli/src/lib/package-scripts/prompt/getString.ts deleted file mode 100644 index e982c6967..000000000 --- a/internals/upscaler-cli/src/lib/package-scripts/prompt/getString.ts +++ /dev/null @@ -1,37 +0,0 @@ -import inquirer from 'inquirer'; - -export const getString = async (message: string, arg?: unknown) => { - if (typeof arg === 'string') { - return arg; - } - - const response = await inquirer.prompt<{ - arg: string - }>([ - { - name: 'arg', - message, - }, - ]); - return response.arg; -} - -export const getStringArray = async (message: string, arg?: unknown) => { - if (typeof arg === 'string') { - return [arg]; - } - - if (Array.isArray(arg) && arg.length > 0) { - return arg; - } - - const response = await inquirer.prompt<{ - arg: string - }>([ - { - name: 'arg', - message, - }, - ]); - return response.arg.split(' ').filter(Boolean); -} diff --git a/internals/upscaler-cli/src/lib/package-scripts/prompt/ifDefined.ts b/internals/upscaler-cli/src/lib/package-scripts/prompt/ifDefined.ts deleted file mode 100644 index 5e2c9891d..000000000 --- a/internals/upscaler-cli/src/lib/package-scripts/prompt/ifDefined.ts +++ /dev/null @@ -1 +0,0 @@ -export function ifDefined(argv: Record, key: string, type: string) { return typeof argv[key] === type ? argv[key] as T: undefined; } diff --git a/internals/upscaler-cli/src/lib/package-scripts/prompt/prompt.ts b/internals/upscaler-cli/src/lib/package-scripts/prompt/prompt.ts deleted file mode 100644 index e463669f2..000000000 --- a/internals/upscaler-cli/src/lib/package-scripts/prompt/prompt.ts +++ /dev/null @@ -1,42 +0,0 @@ -import yargs, { Options } from 'yargs'; -import inquirer, { QuestionCollection } from 'inquirer'; - -export interface PromptOption extends Options { - name: string; - isValidType?: IsValidType; - prompt?: QuestionCollection; -}; - -async function prompt(...options: PromptOption[]) { - const yargsOptions: { - [key: string]: Options; - } = options.reduce((obj, option) => { - const { name, isValidType: _isValidType, ...yargsOption } = option; - return { - ...obj, - [name]: yargsOption, - }; - }, {}); - const argv = await yargs(process.argv.slice(2)).options(yargsOptions).argv; - - for (const option of options) { - if (option.isValidType && option.prompt) { - argv[option.name] = await getArg(argv[option.name], option.isValidType, option.prompt); - } - } - return argv; -} - -export type IsValidType = (arg: unknown) => arg is ExpectedType; - -async function getArg(defaultArg: ExpectedType, isValidType: IsValidType, promptOption: QuestionCollection): Promise { - if (isValidType(defaultArg)) { - return defaultArg; - } - const { arg } = await inquirer.prompt<{ - arg: ExpectedType; - }>(promptOption); - return arg; -} - -export default prompt; diff --git a/internals/upscaler-cli/src/lib/package-scripts/prompt/types.ts b/internals/upscaler-cli/src/lib/package-scripts/prompt/types.ts deleted file mode 100644 index fd3a31cc9..000000000 --- a/internals/upscaler-cli/src/lib/package-scripts/prompt/types.ts +++ /dev/null @@ -1,2 +0,0 @@ -export type OutputFormat = 'cjs' | 'esm' | 'umd'; -export type Platform = 'node' | 'node-gpu' | 'browser'; diff --git a/internals/upscaler-cli/src/lib/package-scripts/scaffold-dependencies.ts b/internals/upscaler-cli/src/lib/package-scripts/scaffold-dependencies.ts deleted file mode 100644 index 6c9861cd4..000000000 --- a/internals/upscaler-cli/src/lib/package-scripts/scaffold-dependencies.ts +++ /dev/null @@ -1,197 +0,0 @@ -import fs from 'fs'; -import path from 'path'; -import { getPackageJSON } from './utils/packages.js'; -import { JSONSchemaForNPMPackageJsonFiles } from '@schemastore/package'; -import { ROOT_DIR } from './utils/constants.js'; - -/**** - * Type Definitions - */ -export type Platform = 'browser' | 'node' | 'node-gpu'; - -export type TFJSDependency = '@tensorflow/tfjs' | '@tensorflow/tfjs-node' | '@tensorflow/tfjs-node-gpu'; - -type ContentFn = (arg: { - tfjs?: TFJSDependency; - platform?: Platform; - packageJSON: JSONSchemaForNPMPackageJsonFiles; -}) => string; -type Content = string | ContentFn; -interface File { - name: string; - contents: Content[]; -} - -export interface ScaffoldDependenciesConfig { - scaffoldPlatformFiles?: boolean; - files: File[]; -} - -/**** - * Constants - */ -const ROOT = ROOT_DIR; - -/**** - * Dependency-specific utility functions - */ -export const writeTFJSDependency: ContentFn = ({ tfjs, }) => { - if (tfjs === undefined) { - throw new Error('TFJS Platform was undefined'); - } - return `export * as tf from '${tfjs}';`; -}; - -export const getPlatformSpecificTensorflow = (platform?: Platform): TFJSDependency | undefined => { - if (platform === undefined) { - return undefined; - } - if (platform === 'node') { - return '@tensorflow/tfjs-node'; - } - if (platform === 'node-gpu') { - return '@tensorflow/tfjs-node-gpu'; - } - return '@tensorflow/tfjs'; -} - -/**** - * File OS utility functions - */ - -const writeFile = (filename: string, content: string) => fs.writeFileSync(filename, content); - -const writeLines = (filename: string, content: Array) => writeFile(filename, `${content.map(l => l.trim()).join('\n')}\n`); - -/**** - * Functions for scaffolding platform-specific files - */ -const getFilePath = (file: string, platform: Platform) => `${file}.${platform === 'browser' ? 'browser' : 'node'}.ts`; - -const findPlatformSpecificFiles = (folder: string) => new Set(fs.readdirSync(folder).filter(file => { - return /(.*).(browser|node).ts$/.test(file) -}).map(file => file.split('.').slice(0, -2).join('.'))); - -const scaffoldPlatformSpecificFile = (src: string, file: string, platform: Platform) => { - const srcFile = path.resolve(src, getFilePath(file, platform)); - if (!fs.existsSync(srcFile)) { - throw new Error(`File ${srcFile} does not exist`) - } - const targetFile = path.resolve(src, `${file}.generated.ts`); - try { fs.unlinkSync(targetFile); } catch(err) { - // ignore - } - fs.symlinkSync(srcFile, targetFile, 'file'); -}; - -export const scaffoldPlatformSpecificFiles = (folderSrc: string, platform: Platform, { verbose }: { verbose?: boolean } = {}) => { - const files = findPlatformSpecificFiles(folderSrc); - if (verbose) { - console.log([ - 'Scaffolding the following files:', - ...Array.from(files).map(file => `- ${file}.generated.ts`), - ].join('\n')) - } - files.forEach(file => scaffoldPlatformSpecificFile(folderSrc, file, platform)); -} - -/**** - * Utility methods - */ -export function loadScaffoldDependenciesConfig(filePath: string): Promise<{ - default: ScaffoldDependenciesConfig -}> { - return import(filePath); -} - -/**** - * Main function - */ - -type ScaffoldDependencies = ( - packageRoot: string, - config: ScaffoldDependenciesConfig, - platform?: Platform, - opts?: { verbose?: boolean }, -) => void; -const scaffoldDependencies: ScaffoldDependencies = ( - packageRoot, - { - files, - scaffoldPlatformFiles, - }, - platform, { verbose = false } = {}) => { - const PACKAGE_ROOT = path.resolve(ROOT, packageRoot); - const PACKAGE_SRC = path.resolve(PACKAGE_ROOT, 'src'); - if (scaffoldPlatformFiles) { - if (!platform) { - throw new Error('You must provide a platform to scaffold platform specific files'); - } - scaffoldPlatformSpecificFiles(PACKAGE_SRC, platform, { verbose }); - } - const tfjs = getPlatformSpecificTensorflow(platform); - const packageJSON = getPackageJSON(PACKAGE_ROOT); - files.forEach(({ name, contents }) => { - const filePath = path.resolve(PACKAGE_SRC, `${name}.generated.ts`); - const lines = contents.map(line => typeof line === 'string' ? line : line({ - tfjs, - packageJSON, - platform, - })); - writeLines(filePath, lines); - }); -} - -export default scaffoldDependencies; - -/**** - * Functions to expose the main function as a CLI tool - */ - -// interface Args { -// targetPackage: string; -// platform?: Platform; -// config: string; -// } - -// const isPlatform = (platform?: unknown): platform is Platform => typeof platform === 'string' && ['browser', 'node', 'node-gpu'].includes(platform); - -// const getPlatform = (platform?: unknown): Platform | undefined => { -// if (isPlatform(platform)) { -// return platform; -// } -// } - -// const getArgs = async (): Promise => { -// const argv = await yargs.command('scaffold-dependencies [platform]', 'scaffold dependencies for a specific platform', yargs => { -// yargs.positional('platform', { -// describe: 'The platform to target', -// }).options({ -// src: { type: 'string', demandOption: true }, -// config: { type: 'string', demandOption: true }, -// }); -// }) -// .help() -// .argv; - -// if (typeof argv.src !== 'string') { -// throw new Error(`Invalid src, should be a string: ${argv.src}`); -// } - -// if (typeof argv.config !== 'string') { -// throw new Error(`Invalid config, should be a string: ${argv.config}`); -// } - -// return { -// targetPackage: argv.src, -// config: argv.config, -// platform: getPlatform(argv['_'][0]), -// } -// } - - - // (async () => { - // const argv = await getArgs(); - // const { default: config } = await loadScaffoldDependenciesConfig(path.resolve(ROOT, argv.config)); - // await scaffoldDependencies(argv.targetPackage, config, argv.platform); - // })(); diff --git a/internals/upscaler-cli/src/lib/package-scripts/update-dependency.ts b/internals/upscaler-cli/src/lib/package-scripts/update-dependency.ts deleted file mode 100644 index 6d75570f3..000000000 --- a/internals/upscaler-cli/src/lib/package-scripts/update-dependency.ts +++ /dev/null @@ -1,188 +0,0 @@ -import yargs from 'yargs'; -import inquirer from 'inquirer'; -import isValidVersion from './utils/isValidVersion'; -import { - AVAILABLE_PACKAGES, - DIRECTORIES, - getPackageJSONPath, - getPackageJSONValue, - getPreparedFolderName, - Package, - TransformPackageJsonFn, - updateMultiplePackages, - updatePackageJSONForKey, - updateSinglePackage, - } from './utils/packages'; -import { Dependency } from '@schemastore/package'; - -/**** - * Utility functions - */ - -class Logger { - updates: Array = []; - constructor(msg: string) { - this.push(msg); - } - - push(msg: string) { - this.updates.push(msg); - } - - write() { - if (this.updates.length) { - this.updates.forEach(message => console.log(message)) - } - } -} - -const makeSetVersionForPackageJSON = (dependencies: string[], version: string): TransformPackageJsonFn => (packageJSON, dir) => { - const packageJSONKeys = ['dependencies', 'peerDependencies', 'devDependencies', 'pnpm.overrides']; - const logger = new Logger(`- Updated ${getPreparedFolderName(getPackageJSONPath(dir))}`); - for (const packageJSONKey of packageJSONKeys) { - const packageJSONListOfDependencies = getPackageJSONValue(packageJSON, packageJSONKey); - if (packageJSONListOfDependencies) { - const gen = getMatchingDependency(dependencies, packageJSONListOfDependencies); - let value = gen.next().value; - while (value) { - const [key] = value; - if (!key) { - throw new Error(`No key found in ${JSON.stringify(value)}`) - } - packageJSONListOfDependencies[key] = version; - value = gen.next().value; - logger.push(` - ${packageJSONKey}: ${key}`); - } - packageJSON = updatePackageJSONForKey(packageJSON, packageJSONKey, packageJSONListOfDependencies) - } - } - logger.write(); - return packageJSON; -} - -export function* getMatchingDependency(matchingDependencies: string[], packageJSONListOfDependencies?: Dependency) { - if (packageJSONListOfDependencies) { - const entries = Object.entries(packageJSONListOfDependencies); - for (const [key, val] of entries) { - for (const matchingDependency of matchingDependencies) { - if (key === matchingDependency) { - yield [key, val]; - break; - } - } - } - } -} - -/**** - * Main function - */ -const updateDependency = async (dependencies: string[], version: string, packages: string[]) => { - if (!isValidVersion(version)) { - throw new Error(`Version is not in the format x.x.x. You specified: ${version}`); - } - if (packages.length === 0) { - console.log('No packages selected, nothing to do.') - return; - } - - const setVersionForPackageJSON = makeSetVersionForPackageJSON(dependencies, version); - - await Promise.all(packages.map(packageKey => { - const pkg = DIRECTORIES[packageKey]; - if (pkg === undefined) { - throw new Error(`Package ${packageKey} is not defined.`); - } - const { multiple, directory } = pkg; - const fn = multiple ? updateMultiplePackages : updateSinglePackage; - return fn(directory, setVersionForPackageJSON); - })); -}; - -export default updateDependency; - -/**** - * Functions to expose the main function as a CLI tool - */ - -interface Args { - dependency: string; - version: string; - packages: Package[]; -} - -const getDependency = (dependency?: unknown) => { - if (typeof dependency === 'string') { - return dependency; - } - - return inquirer.prompt([ - { - name: 'dependency', - message: 'Specify the dependency to update', - }, - ]).then(r => r.dependency); -} - -const getVersion = (version?: unknown) => { - if (typeof version === 'string') { - return version; - } - - return inquirer.prompt([ - { - name: 'version', - message: 'Specify the version to update to', - }, - ]).then(r => r.version); -}; - -const isPackages = (packages?: unknown): packages is Package[] => { - return Boolean(Array.isArray(packages) && packages.length && typeof packages[0] === 'string'); -} - -const getPackages = (packages?: unknown) => { - if (isPackages(packages)) { - return packages; - } - - return inquirer.prompt([ - { - type: 'checkbox', - name: 'packages', - message: 'Which packages do you want to update?', - choices: AVAILABLE_PACKAGES, - }, - ]).then(r => r.packages); -} - -const getArgs = async (): Promise => { - const argv = await yargs.command('update-dependency ', 'update dependency', yargs => { - yargs.positional('dependency', { - describe: 'The dependency to update', - }).positional('version', { - describe: 'The version to update to', - }).options({ - packages: { type: 'string' }, - }); - }) - .help() - .argv; - - const dependency = await getDependency(argv._[0]); - const version = await getVersion(argv.version); - const packages = await getPackages(argv.packages); - - return { - dependency, - version, - packages, - } -} - -if (require.main === module) { - (async () => { - const { dependency, version, packages } = await getArgs(); - await updateDependency([dependency], version, packages); - })(); -} diff --git a/internals/upscaler-cli/src/lib/package-scripts/update-npm-dependencies.ts b/internals/upscaler-cli/src/lib/package-scripts/update-npm-dependencies.ts deleted file mode 100644 index 8e6b91fd0..000000000 --- a/internals/upscaler-cli/src/lib/package-scripts/update-npm-dependencies.ts +++ /dev/null @@ -1,119 +0,0 @@ -import { exec as _exec, ExecOptions } from 'child_process'; -import yargs from 'yargs'; -import path from 'path'; -import { ifDefined as _ifDefined } from './prompt/ifDefined'; -import { glob } from 'glob'; -import { ROOT_DIR } from './utils/constants'; -import asyncPool from "tiny-async-pool"; - -/**** - * Types - */ -interface PNPMLSItem { - name: string; - version: string; - path: string; - private: boolean; -} - -/**** - * Constants - */ -const NUMBER_OF_CONCURRENT_THREADS = 5; - -/**** - * Utility functions - */ -const exec = (cmd: string, { verbose, ...opts }: { verbose?: boolean; } & ExecOptions = {}) => new Promise((resolve, reject) => { - const spawnedProcess = _exec(cmd, opts, (error) => { - if (error) { - reject(error); - } else { - resolve(); - } - }); - - if (verbose) { - spawnedProcess.stdout?.pipe(process.stdout); - } -}); -const getOutput = (cmd: string, { ...opts }: ExecOptions = {}) => new Promise((resolve, reject) => { - let output = ''; - const spawnedProcess = _exec(cmd, opts, (error) => { - if (error) { - reject(error); - } else { - resolve(output); - } - }); - - spawnedProcess.stdout?.on('data', chunk => { - output += chunk; - }); -}); - -const getPNPMPackages = async (): Promise => JSON.parse(await getOutput('pnpm m ls --json --depth=-1')); - -const getAllNonPNPMPackages = async () => { - const packages = new Set((await getPNPMPackages()).map((pkg) => `${pkg.path.split(`${ROOT_DIR}/`).pop()}/package.json`)); - const files = await glob('**/package.json', { - ignore: [ - 'node_modules/**', - '**/node_modules/**', - '**/scratch/**', - '**/dev/browser/public/**', - '**/examples/react/**', - ], - }); - return files.filter(file => !packages.has(file) && file !== 'package.json'); -} - -/**** - * Main function - */ -const updateNPMDependencies = async ({ verbose }: Args) => { - const filteredFiles = await getAllNonPNPMPackages(); - for await (const _ of asyncPool(NUMBER_OF_CONCURRENT_THREADS, filteredFiles, async (file: string) => { - await exec('npm update --save', { - cwd: path.resolve(ROOT_DIR, path.dirname(file)), - verbose, - }); - })) { - // empty - } -}; - -export default updateNPMDependencies; - -/**** - * Functions to expose the main function as a CLI tool - */ - -interface Args { - verbose?: boolean; -} - -const getArgs = async (): Promise => { - const argv = await yargs.command('update-dependency ', 'update dependency', yargs => { - yargs.option('v', { - alias: 'verbose', - type: 'boolean', - packages: { type: 'string' }, - }); - }) - .help() - .argv; - - function ifDefined(key: string, type: string) { return _ifDefined(argv, key, type) as T; } - - return { - verbose: ifDefined('v', 'boolean'), - } -} - -if (require.main === module) { - (async () => { - const { verbose } = await getArgs(); - await updateNPMDependencies({ verbose }); - })(); -} diff --git a/internals/upscaler-cli/src/lib/package-scripts/update-tfjs.ts b/internals/upscaler-cli/src/lib/package-scripts/update-tfjs.ts deleted file mode 100644 index 0dab55d7e..000000000 --- a/internals/upscaler-cli/src/lib/package-scripts/update-tfjs.ts +++ /dev/null @@ -1,100 +0,0 @@ -import yargs from 'yargs'; -import inquirer, { DistinctQuestion } from 'inquirer'; -import { - AVAILABLE_PACKAGES, - getPackageJSON, - Package, -} from './utils/packages'; -import updateDependency, { getMatchingDependency } from './update-dependency'; -import { ROOT_DIR } from './utils/constants'; - -/**** - * Constants - */ -const TFJS_PACKAGES = [ - '@tensorflow/tfjs', - '@tensorflow/tfjs-node', - '@tensorflow/tfjs-node-gpu', - '@tensorflow/tfjs-layers', - '@tensorflow/tfjs-core', -]; - -/**** - * Main function - */ - -const updateTFJS = (version: string, packages: Package[]) => updateDependency(TFJS_PACKAGES, version, packages); - -export default updateTFJS; - -/**** - * Functions to expose the main function as a CLI tool - */ -const getTFJSVersion = (dir: string = ROOT_DIR): string => { - const packageJSON = getPackageJSON(dir); - const deps = packageJSON.peerDependencies; - const gen = getMatchingDependency(TFJS_PACKAGES, deps); - const matchingTFJS = gen.next().value; - if (!matchingTFJS) { - throw new Error(`Could not find a dependency matching @tensorflow/tfjs in ${dir}`); - } - const [_, val] = matchingTFJS; - if (!val) { - throw new Error(`No value was found in ${JSON.stringify(matchingTFJS)}`); - } - return val; -}; - -type TypecheckFunction = (value?: unknown) => value is T; -function getArg(typecheckFunction: TypecheckFunction, question: { name: string } & DistinctQuestion) { - return (value?: unknown) => typecheckFunction(value) ? value : inquirer.prompt(question).then(r => r[question.name]); -}; - -const isVersion = (version?: unknown): version is string => typeof version === 'string'; -const getVersion = getArg(isVersion, { - name: 'value', - message: 'Specify the version to update to', - default: getTFJSVersion(), -}); - -const isPackages = (packages?: unknown): packages is Package[] => { - return Boolean(Array.isArray(packages) && packages.length && typeof packages[0] === 'string'); -} -const getPackages = getArg(isPackages, { - type: 'checkbox', - name: 'packages', - message: 'Which packages do you want to update?', - choices: AVAILABLE_PACKAGES, -}); - -interface Args { - version: string; - packages: Package[]; -} - -const getArgs = async (): Promise => { - const argv = await yargs.command('update-dependency ', 'update dependency', yargs => { - yargs.positional('version', { - describe: 'The version to update to', - }).options({ - packages: { type: 'string' }, - }); - }) - .help() - .argv; - - const version = await getVersion(argv.version); - const packages = await getPackages(argv.packages); - - return { - version, - packages, - } -} - -if (require.main === module) { - (async () => { - const { version, packages } = await getArgs(); - await updateTFJS(version, packages); - })(); -} diff --git a/internals/upscaler-cli/src/lib/package-scripts/update-version.ts b/internals/upscaler-cli/src/lib/package-scripts/update-version.ts deleted file mode 100644 index 06e536c0f..000000000 --- a/internals/upscaler-cli/src/lib/package-scripts/update-version.ts +++ /dev/null @@ -1,126 +0,0 @@ -import inquirer from 'inquirer'; -import isValidVersion from './utils/isValidVersion'; -import { AVAILABLE_PACKAGES, DIRECTORIES, EXAMPLES, getPackageJSON, getPackageJSONPath, getPreparedFolderName, Package, PackageUpdaterLogger, ROOT, TransformPackageJsonFn, updateMultiplePackages, updateSinglePackage, UPSCALER_JS, WRAPPER } from './utils/packages'; -import { ROOT_DIR } from './utils/constants'; - -/**** - * Type Definitions - */ -type Answers = { packages: Array, version: string, updateDependencies?: boolean, } - -/**** - * Constants - */ -const logger: PackageUpdaterLogger = (file: string) => { - return `- Updated ${getPreparedFolderName(getPackageJSONPath(file))}`; -} - -const makeSetVersionForPackageJSON = (version: string): TransformPackageJsonFn => (packageJSON) => { - packageJSON.version = version; - return packageJSON; -} - -const getVersion = (dir: string) => { - return getPackageJSON(dir).version; -}; - -const getCurrentVersions = () => { - const upscalerJSVersion = getVersion(DIRECTORIES[UPSCALER_JS].directory); - const rootVersion = getVersion(DIRECTORIES[ROOT].directory); - return [ - `root: ${rootVersion}`, - `upscaler: ${upscalerJSVersion}`, - ].join(' | '); -}; - -const updateVersion = (): Promise => new Promise(resolve => { - inquirer.prompt([ - { - name: 'version', - message: `Specify the version you wish to change to:\n(${getCurrentVersions()})\n`, - default: getVersion(ROOT_DIR), - }, - { - type: 'checkbox', - name: 'packages', - message: 'Which packages do you want to update?', - choices: AVAILABLE_PACKAGES, - }, - { - name: 'updateDependencies', - message: "Since UpscalerJS's version will be updated, do you also want to update packages (like examples) that reference it?", - type: 'confirm', - default: true, - when: ({ packages }: Omit) => packages.includes('UpscalerJS'), - }, - // { - // name: 'commit', - // message: `Do you wish to commit changes`, - // type: 'confirm', - // default: true, - // }, - ]).then(async ({ version, packages, - // commit, - updateDependencies }) => { - if (!isValidVersion(version)) { - throw new Error(`Version is not in the format x.x.x. You specified: ${version}`); - } - if (packages.length === 0) { - console.log('No packages selected, nothing to do.') - return; - } - - const setVersionForPackageJSON = makeSetVersionForPackageJSON(version); - - for (const packageKey of packages) { - const pkg = DIRECTORIES[packageKey]; - if (pkg === undefined) { - throw new Error(`Package ${packageKey} is not defined.`); - } - const { multiple, directory } = pkg; - const fn = multiple ? updateMultiplePackages : updateSinglePackage; - - await fn(directory, setVersionForPackageJSON, logger); - } - if (updateDependencies) { - const dependencyDirectories = [DIRECTORIES[EXAMPLES], DIRECTORIES[WRAPPER]]; - const dependencyUpdater: TransformPackageJsonFn = packageJSON => { - const deps = packageJSON.dependencies; - if (deps) { - return { - ...packageJSON, - dependencies: { - ...deps, - 'upscaler': version, - } - }; - } - return packageJSON; - } - const dependencyLogger: PackageUpdaterLogger = dir => { - return `- Updated "upscaler" dependency in ${getPreparedFolderName(dir)}`; - }; - for (const { directory, multiple } of dependencyDirectories) { - const fn = multiple ? updateMultiplePackages : updateSinglePackage; - fn(directory, dependencyUpdater, dependencyLogger); - } - } - // if (commit) { - // const cmd = `git commit -m "Updated version to ${version} for ${formatArray(packages)}"`; - // await new Promise(resolve => setTimeout(resolve, 100)); - // try { - // await execute(cmd); - // } catch(err) { - // console.error('*******', err) - // throw err; - // } - // } - resolve(); - }); -}); - -export default updateVersion; - -if (require.main === module) { - updateVersion(); -} diff --git a/internals/upscaler-cli/src/lib/package-scripts/upscalerjs-rollup-config.ts b/internals/upscaler-cli/src/lib/package-scripts/upscalerjs-rollup-config.ts deleted file mode 100644 index 144dce183..000000000 --- a/internals/upscaler-cli/src/lib/package-scripts/upscalerjs-rollup-config.ts +++ /dev/null @@ -1,43 +0,0 @@ -import commonjs from '@rollup/plugin-commonjs'; -import { nodeResolve, } from '@rollup/plugin-node-resolve'; -import type { InputOptions, OutputOptions, } from 'rollup'; -import DefaultUpscalerModel from '../../../../../models/default-model/umd-names.json' assert { type: "json" };; - -const isValidUMDNameFile = (contents: unknown): contents is { - '.': string -} => typeof contents === 'object' && contents !== undefined && contents !== null && '.' in contents; - -const getModelUmdName = () => { - if (isValidUMDNameFile(DefaultUpscalerModel)) { - return DefaultUpscalerModel['.']; - } - throw new Error('Bad umd-names.json file for @upscalerjs/default-model'); -}; - -export const inputOptions: InputOptions = { - context: 'window', - external: [ - '@tensorflow/tfjs', - '@upscalerjs/default-model', - ], - plugins: [ - nodeResolve({ - preferBuiltins: true, - resolveOnly: [ - /^(?!.*(@tensorflow\/tfjs))/, //skipcq: js-0113 - /^(?!.*(@tensorflow\/tfjs-core))/, //skipcq: js-0113 - ], - }), - commonjs(), - ], -}; - -export const outputOptions: OutputOptions = { - format: 'umd', - globals: { - '@tensorflow/tfjs': 'tf', - '@tensorflow/tfjs-core': 'tf', - '@upscalerjs/default-model': getModelUmdName(), - }, -}; - diff --git a/internals/upscaler-cli/src/lib/package-scripts/utils/babelTransform.ts b/internals/upscaler-cli/src/lib/package-scripts/utils/babelTransform.ts deleted file mode 100644 index c3367015f..000000000 --- a/internals/upscaler-cli/src/lib/package-scripts/utils/babelTransform.ts +++ /dev/null @@ -1,19 +0,0 @@ -import { transformAsync } from '@babel/core'; -import fs from 'fs'; -import { getAllFilesRecursively } from "./getAllFilesRecursively"; - -export const babelTransform = async (directory: string) => { - const files = getAllFilesRecursively(directory, file => file.endsWith('.js')); - - await Promise.all(files.map(async filePath => { - const contents = fs.readFileSync(filePath, 'utf-8'); - const transformedCode = await transformAsync(contents, { - plugins: [ - "@babel/plugin-transform-modules-commonjs", - "babel-plugin-add-module-exports", - "@babel/plugin-proposal-export-namespace-from", - ], - }); - fs.writeFileSync(filePath, transformedCode?.code ?? ''); - })); -}; diff --git a/internals/upscaler-cli/src/lib/package-scripts/utils/browserStack.ts b/internals/upscaler-cli/src/lib/package-scripts/utils/browserStack.ts deleted file mode 100644 index 0f7a06a6d..000000000 --- a/internals/upscaler-cli/src/lib/package-scripts/utils/browserStack.ts +++ /dev/null @@ -1,213 +0,0 @@ -import path from 'path'; -import webdriver, { WebDriver, ThenableWebDriver, Builder, logging } from 'selenium-webdriver'; -import * as dotenv from 'dotenv'; -import { ROOT_DIR } from './constants'; -import { existsSync, readFileSync, writeFileSync } from 'fs-extra'; - -/**** - * Types - */ -export interface BrowserOption { - os?: string; - os_version: string; - browser?: string; - browser_version?: string; - device?: string; - real_mobile?: 'true'; - browserName?: string; - localhost?: string; -} - -export type FilterBrowserOption = (option: BrowserOption) => boolean; - -/**** - * Constants - */ - -const env = getEnv(); - -const browserOptions: Array = JSON.parse(readFileSync(path.resolve(__dirname, './browserStackOptions.json'), 'utf8')); -const mobileBrowserOptions: Array = JSON.parse(readFileSync(path.resolve(__dirname, './browserStackMobileOptions.json'), 'utf8')).map((option: BrowserOption[]) => ({ - "real_mobile": "true", - "localhost": "bs-local.com", - ...option, -})); - -const username = env.BROWSERSTACK_USERNAME; -const accessKey = env.BROWSERSTACK_ACCESS_KEY; - -const prefs = new logging.Preferences(); -prefs.setLevel(logging.Type.BROWSER, logging.Level.INFO); - -/**** - * Public Constants - */ -export const serverURL = `http://${username}:${accessKey}@hub-cloud.browserstack.com/wd/hub`; -export const DEFAULT_CAPABILITIES = { - 'build': env.BROWSERSTACK_BUILD_NAME, - 'project': env.BROWSERSTACK_PROJECT_NAME, - 'browserstack.local': true, - // 'browserstack.localIdentifier': process.env.BROWSERSTACK_LOCAL_IDENTIFIER, -} - -/**** - * Utility Functions - */ -function getEnv () { - const localEnvPath = path.resolve(ROOT_DIR, '.env') - if (existsSync(localEnvPath)) { - return { - ...process.env, - ...dotenv.parse(readFileSync(localEnvPath, 'utf-8')), - }; - } - - return process.env; -} - -function shouldPrintLogs (entry: logging.Entry, capabilities: BrowserOption) { - if (entry.message.includes('favicon')) { - return false; - } - - // if running in IE, it appears TFJS is already available? Ignore warnings - // about the TFJS backend already being registered - return entry.level.name !== 'WARNING' && capabilities?.browserName !== 'edge'; -} - - -/**** - * Public Functions - */ -export const getBrowserstackAccessKey = () => getEnv().BROWSERSTACK_ACCESS_KEY; - -export const getBrowserOptions = (filter?: FilterBrowserOption): Array => browserOptions.filter(filter || Boolean); - -export const getMobileBrowserOptions = (filter?: FilterBrowserOption): Array => mobileBrowserOptions.filter(filter || Boolean); - -type Capabilities = Parameters[0]; -export const getDriver = (capabilities: Capabilities, { verbose }: { verbose?: boolean } = {}): ThenableWebDriver => new webdriver.Builder() - .usingServer(serverURL) - .setLoggingPrefs(prefs) - .withCapabilities({ - ...DEFAULT_CAPABILITIES, - ...capabilities, - verbose, - }) - .build(); - -export const printLogs = async (driver: WebDriver, capabilities: BrowserOption, verbose = false) => { - if (capabilities?.browserName === 'firefox') { - if (capabilities?.os === 'windows') { - if (verbose) { - console.log('Not printing logs, because it is Windows Firefox') - } - // There is a bug with Firefox not supporting the get logs method on Windows - // https://stackoverflow.com/questions/59192232/selenium-trying-to-get-firefox-console-logs-results-in-webdrivererror-http-me - // console.log('** Firefox on Windows does not support logging') - return; - } - if (capabilities?.os === 'OS X') { - if (verbose) { - console.log('Not printing logs, because it is OS X Firefox') - } - // Firefox does not seem to support logging on OS X either - // https://github.com/mozilla/geckodriver/issues/1698 - // console.log('** Firefox on OS X does not support logging') - return; - } - } - - if (capabilities?.browserName === 'safari') { - if (verbose) { - console.log('Not printing logs, because it is Safari') - } - // It looks like Safari also does not support logging - // console.log('** Safari does not support logging') - return; - } - - const logs = await driver.manage().logs().get(logging.Type.BROWSER); - - if (verbose) { - console.log(`Got ${logs.length} logs`); - } - - for (const entry of logs) { - if (shouldPrintLogs(entry, capabilities)) { - console.log('LOG [%s] %s', entry.level.name, entry.message, capabilities); - } else if (verbose) { - console.log('Skipping log'); - } - } -} - -export const takeScreenshot = (driver: ThenableWebDriver, target: string) => new Promise((resolve) => { - driver.takeScreenshot().then(data => { - const base64Data = data.replace(/^data:image\/png;base64,/, ""); - writeFileSync(target, base64Data, 'base64'); - resolve(); - }); -}); - -export async function executeAsyncScript(driver: webdriver.WebDriver, fn: (args?: A) => T, args?: A, { - pollTime = 100, - timeout = 60 * 1000 * 5, -}: { - pollTime?: number; - timeout?: number; -} = {}): Promise { - const wait = (d: number) => new Promise(r => setTimeout(r, d)); - const localKey = `___result_${Math.random()}___`; - const errorKey = `___result_${Math.random()}___`; - const mainFn = new Function(` - const main = ${fn.toString()} - main(...arguments).then((result) => { - window['${localKey}'] = result; - }).catch(err => { - window['${errorKey}'] = err.message; - }); - `); - try { - driver.executeScript(mainFn, args); - } catch (err) { - if (err instanceof Error) { - throw new Error(`Error executing main script: ${err.message}`); - } else { - throw err; - } - } - let response: T | undefined; - let err: string | undefined; - const start = performance.now(); - while (!response && !err) { - if (performance.now() - start > timeout) { - throw new Error(`Failed to execute script after ${timeout} ms`); - } - try { - response = await driver.executeScript((localKey: string) => window[localKey], localKey); - } catch(err) { - console.error(`Error executing script (duration: ${performance.now() - start})`, err); - } - if (!response) { - err = await driver.executeScript((errorKey: string) => window[errorKey], errorKey); - if (err) { - console.log('An error was returned', err); - throw new Error(err); - } - } - await wait(pollTime); - } - if (!response) { - throw new Error('Bug with code'); - } - return response; -} - -// When checking for the errorKey or localKey variables on the window object above, -// we need to declare that window can adopt any kind of variable -declare global { - interface Window { - [index: string]: any; // skipcq: JS-0323 - } -} diff --git a/internals/upscaler-cli/src/lib/package-scripts/utils/browserStackMobileOptions.json b/internals/upscaler-cli/src/lib/package-scripts/utils/browserStackMobileOptions.json deleted file mode 100644 index 8d2cff916..000000000 --- a/internals/upscaler-cli/src/lib/package-scripts/utils/browserStackMobileOptions.json +++ /dev/null @@ -1,78 +0,0 @@ -[ - { - "browserName": "Android", - "os_version": "12.0", - "device": "Samsung Galaxy S22" - }, - { - "browserName": "Android", - "os_version": "12.0", - "device": "Samsung Galaxy S22 Ultra" - }, - { - "browserName": "Android", - "os_version": "13.0", - "device": "Google Pixel 7" - }, - { - "browserName": "Android", - "os_version": "12.0", - "device": "Google Pixel 6 Pro" - }, - { - "browserName": "Android", - "os_version": "12.0", - "device": "Google Pixel 6" - }, - - - { - "os_version": "16", - "device": "iPhone 14", - "browserName": "iPhone" - }, - { - "os_version": "16", - "device": "iPhone 14 Pro Max", - "browserName": "iPhone" - }, - - { - "os_version": "15", - "device": "iPhone 13", - "browserName": "iPhone" - }, - { - "os_version": "15", - "device": "iPhone 13 Pro Max", - "browserName": "iPhone" - }, - - { - "os_version": "16", - "device": "iPhone 12 Mini", - "browserName": "iPhone" - }, - { - "os_version": "16", - "device": "iPhone 12 Pro Max", - "browserName": "iPhone" - }, - - - { - "device": "iPad Pro 12.9 2021", - "os_version": "14", - "browserName": "iPad" - }, - { - "device": "iPad Mini 2021", - "os_version": "15", - "browserName": "iPad" - }, - { - "device": "iPad Air 4", - "os_version": "14", - "browserName": "iPad" - } -] diff --git a/internals/upscaler-cli/src/lib/package-scripts/utils/browserStackOptions.json b/internals/upscaler-cli/src/lib/package-scripts/utils/browserStackOptions.json deleted file mode 100644 index b4bcc87ef..000000000 --- a/internals/upscaler-cli/src/lib/package-scripts/utils/browserStackOptions.json +++ /dev/null @@ -1,67 +0,0 @@ -[ - { - "os": "windows", - "os_version": "11", - "browserName": "chrome", - "browser_version": "latest" - }, - { - "os": "windows", - "os_version": "11", - "browserName": "firefox", - "browser_version": "latest" - }, - { - "os": "windows", - "os_version": "11", - "browserName": "edge", - "browser_version": "latest" - }, - { - "os": "OS X", - "os_version": "Big Sur", - "browserName": "chrome", - "browser_version": "latest" - }, - { - "os": "OS X", - "os_version": "Big Sur", - "browserName": "firefox", - "browser_version": "latest" - }, - { - "os": "OS X", - "os_version": "Big Sur", - "browserName": "safari", - "browser_version": "latest", - "localhost": "bs-local.com" - }, - { - "browserName": "Android", - "os_version": "11.0", - "device": "Samsung Galaxy S21 Ultra", - "real_mobile": "true", - "localhost": "bs-local.com" - }, - { - "browserName": "Android", - "os_version": "12.0", - "device": "Google Pixel 5", - "real_mobile": "true", - "localhost": "bs-local.com" - }, - { - "os_version": "16", - "device": "iPhone 12 Pro Max", - "real_mobile": "true", - "browserName": "iPhone", - "localhost": "bs-local.com" - }, - { - "os_version": "16", - "device": "iPhone 14 Pro Max", - "real_mobile": "true", - "browserName": "iPhone", - "localhost": "bs-local.com" - } -] diff --git a/internals/upscaler-cli/src/lib/package-scripts/utils/callExec.ts b/internals/upscaler-cli/src/lib/package-scripts/utils/callExec.ts deleted file mode 100644 index d06bd06ac..000000000 --- a/internals/upscaler-cli/src/lib/package-scripts/utils/callExec.ts +++ /dev/null @@ -1,38 +0,0 @@ -import { exec, ExecOptions } from 'child_process'; - -export type StdOut = (chunk: string) => void; -export type StdErr = (chunk: string) => void; - -export const callExec = (cmd: string, { - verbose = false, - ...options -}: { - encoding?: 'buffer' | null; - verbose?: boolean; -} & ExecOptions = {}, stdout?: StdOut | boolean, stderr?: StdErr | boolean): Promise => new Promise((resolve, reject) => { - if (verbose) { - console.log(`Running command: ${cmd}`); - } - const spawnedProcess = exec(cmd, options, (error) => { - if (error) { - reject(error); - } else { - resolve(); - } - }); - - if (stderr === true || verbose) { - spawnedProcess.stderr?.pipe(process.stderr); - } else if (stderr && typeof stderr !== 'boolean') { - spawnedProcess.stderr?.on('data', stderr); - } - - if (stdout === undefined || stdout === true) { - spawnedProcess.stdout?.pipe(process.stdout); - } else if (stdout) { - spawnedProcess.stdout?.on('data', stdout); - } -}); - -export default callExec; - diff --git a/internals/upscaler-cli/src/lib/package-scripts/utils/compile.ts b/internals/upscaler-cli/src/lib/package-scripts/utils/compile.ts deleted file mode 100644 index d26c77f4e..000000000 --- a/internals/upscaler-cli/src/lib/package-scripts/utils/compile.ts +++ /dev/null @@ -1,48 +0,0 @@ -import path from 'path'; -// import ts, { ProjectReference } from 'typescript'; -import callExec from './callExec.js'; -import { OutputFormat } from '../prompt/types.js'; - -// export function _old_compile(rootNames: string[], options: ts.CompilerOptions, projectReferences?: Array) { -// let program = ts.createProgram({ -// rootNames, -// options, -// projectReferences, -// }); -// let emitResult = program.emit(); - -// let allDiagnostics = ts -// .getPreEmitDiagnostics(program) -// .concat(emitResult.diagnostics); - -// allDiagnostics.forEach(diagnostic => { -// if (diagnostic.file) { -// let { line, character } = ts.getLineAndCharacterOfPosition(diagnostic.file, diagnostic.start!); -// let message = ts.flattenDiagnosticMessageText(diagnostic.messageText, "\n"); -// console.log(`${diagnostic.file.fileName} (${line + 1},${character + 1}): ${message}`); -// } else { -// console.log(ts.flattenDiagnosticMessageText(diagnostic.messageText, "\n")); -// } -// }); - -// return emitResult; -// }; - -type CompileTypescriptOpts = { - outDir?: string; - verbose?: boolean; -} -type CompileTypescript = (modelFolder: string, outputFormat: OutputFormat, opts?: CompileTypescriptOpts) => Promise; - -export const compileTypescript: CompileTypescript = (modelFolder: string, outputFormat: OutputFormat, { - outDir, - verbose, -} = {}) => callExec([ - 'tsc', - '-p', - path.resolve(modelFolder, `tsconfig.${outputFormat}.json`), - outDir ? `--outDir ${outDir}` : undefined, -].filter(Boolean).join(' '), { - verbose, - cwd: modelFolder, -}); diff --git a/internals/upscaler-cli/src/lib/package-scripts/utils/constants.ts b/internals/upscaler-cli/src/lib/package-scripts/utils/constants.ts deleted file mode 100644 index 3214d8507..000000000 --- a/internals/upscaler-cli/src/lib/package-scripts/utils/constants.ts +++ /dev/null @@ -1,21 +0,0 @@ -import path from 'path'; -import { fileURLToPath } from 'url'; - -const dirname = path.dirname(fileURLToPath(import.meta.url)); - -export const ROOT_DIR = path.resolve(dirname, '../../../../../../'); -export const MODELS_DIR = path.resolve(ROOT_DIR, 'models'); -export const EXAMPLES_DIR = path.resolve(ROOT_DIR, 'examples'); -export const TEST_DIR = path.resolve(ROOT_DIR, 'test'); -export const FIXTURES_DIR = path.resolve(TEST_DIR, '__fixtures__'); - -export const DOCS_DIR = path.resolve(ROOT_DIR, 'docs'); -export const ASSETS_DIR = path.resolve(DOCS_DIR, 'assets/assets'); - -export const TMP_DIR = path.resolve(ROOT_DIR, 'tmp'); -export const DEV_DIR = path.resolve(ROOT_DIR, 'dev'); -export const PACKAGES_DIR = path.resolve(ROOT_DIR, 'packages'); - -export const UPSCALER_DIR = path.resolve(PACKAGES_DIR, 'upscalerjs'); -export const SHARED_DIR = path.resolve(PACKAGES_DIR, 'shared'); -export const WRAPPER_DIR = path.resolve(PACKAGES_DIR, 'upscalerjs-wrapper'); diff --git a/internals/upscaler-cli/src/lib/package-scripts/utils/execute.ts b/internals/upscaler-cli/src/lib/package-scripts/utils/execute.ts deleted file mode 100644 index 78f668e0d..000000000 --- a/internals/upscaler-cli/src/lib/package-scripts/utils/execute.ts +++ /dev/null @@ -1,16 +0,0 @@ -import { exec } from 'child_process'; -const execute = (cmd: string, { cwd, }: { cwd?: string } = {}): Promise => new Promise((resolve, reject) => { - const spawnedProcess = exec(cmd, { - cwd, - }, (error: Error | null) => { - if (error) { - reject(error.message); - } else { - resolve(); - } - }); - spawnedProcess.stderr?.pipe(process.stderr); - spawnedProcess.stdout?.pipe(process.stdout); -}) -export default execute; - diff --git a/internals/upscaler-cli/src/lib/package-scripts/utils/getAllAvailableModels.ts b/internals/upscaler-cli/src/lib/package-scripts/utils/getAllAvailableModels.ts deleted file mode 100644 index 8204f08e2..000000000 --- a/internals/upscaler-cli/src/lib/package-scripts/utils/getAllAvailableModels.ts +++ /dev/null @@ -1,111 +0,0 @@ -import { readdirSync, lstatSync, readFileSync, existsSync } from 'fs-extra'; -import path from 'path'; -import { getPackageJSONExports, PackageJSONExport } from './getPackageJSONExports'; - -const ROOT = path.resolve(__dirname, '../../../'); -const MODELS_DIR = path.resolve(ROOT, 'models'); -const EXCLUDED = ['dist', 'types', 'node_modules', 'docs']; - -const jsonParse = (fileName: string) => JSON.parse(readFileSync(fileName, 'utf-8')) - -export const getAllAvailableModelPackages = (includeExperimental = false): Array => readdirSync(MODELS_DIR).filter(file => { - const modelDir = path.resolve(MODELS_DIR, file); - if (EXCLUDED.includes(file) || !lstatSync(modelDir).isDirectory()) { - return false; - } - - const packageJSONPath = path.resolve(modelDir, 'package.json'); - - if (!existsSync(packageJSONPath)) { - return false; - } - - if (includeExperimental === false) { - const packageJSON = JSON.parse(readFileSync(packageJSONPath, 'utf-8')); - const experimental = packageJSON['@upscalerjs']?.['model']?.['experimental']; - return experimental !== true; - } - - return true; -}); - -export interface AvailableModel { - export: string; - esm: string; - cjs: string; - umd: string; - pathName: string | PackageJSONExport; -} - -export const getAllAvailableModels = (packageName: string): AvailableModel[] => { - const modelPackageDir = path.resolve(MODELS_DIR, packageName); - const umdNames = jsonParse(path.resolve(modelPackageDir, 'umd-names.json')); - return getPackageJSONExports(modelPackageDir).map(([key, pathName]) => { - const umdName = umdNames[key]; - if (umdName === undefined) { - throw new Error(`No UMD name defined for ${packageName}/umd-names.json for ${key}`); - } - const availableModel: AvailableModel = { - export: key, - esm: key.substring(2), - cjs: key.substring(2), - umd: umdName, - pathName, - }; - return availableModel; - }); -}; - -export const getFilteredModels = ({ - specificModel, - specificPackage, - filter = (_packageName, _model) => true, - includeExperimental = false, -}: { - specificPackage?: string; - specificModel?: string; - filter?: (packageName: string, model: AvailableModel) => boolean; - includeExperimental?: boolean; -} = {}): [string, AvailableModel[]][] => { - const filteredPackagesAndModels = getAllAvailableModelPackages(includeExperimental).reduce((arr, packageName) => { - const models = getAllAvailableModels(packageName); - return arr.concat(models.map(model => { - return [packageName, model]; - })); - }, [] as ([string, AvailableModel])[]) - .filter(([packageName]) => { - if (specificPackage !== undefined) { - return packageName === specificPackage; - } - return true; - }) - .filter(([_, model]) => { - if (specificModel !== undefined) { - return model.esm === specificModel; - } - return true; - }) - .filter(([packageName, model]) => { - return filter(packageName, model); - }); - if (filteredPackagesAndModels.length === 0) { - const allPackages = getAllAvailableModelPackages().map(packageName => { - return [ - `- ${packageName}`, - ...getAllAvailableModels(packageName).map(m => ` - ${m.esm}`), - ].join('\n'); - }); - throw new Error([ - 'No models were found for filter', - 'Available models:', - ...allPackages, - ].join('\n')); - } - - const filteredPackagesAndModelsObj = filteredPackagesAndModels.reduce>((obj, [packageName, model]) => ({ - ...obj, - [packageName]: (obj[packageName] || []).concat([model]), - }), {}); - - return Object.entries(filteredPackagesAndModelsObj); -}; diff --git a/internals/upscaler-cli/src/lib/package-scripts/utils/getAllFilesRecursively.ts b/internals/upscaler-cli/src/lib/package-scripts/utils/getAllFilesRecursively.ts deleted file mode 100644 index bbf69fad9..000000000 --- a/internals/upscaler-cli/src/lib/package-scripts/utils/getAllFilesRecursively.ts +++ /dev/null @@ -1,17 +0,0 @@ -import path from 'path'; -import fs from 'fs'; - -type Filter = (file: string) => boolean; - -export const getAllFilesRecursively = (directory: string, filter: Filter = (_file) => true): string[] => { - return fs.readdirSync(directory).map(file => path.resolve(directory, file)).reduce((arr, file) => { - const stat = fs.statSync(file); - if (stat?.isDirectory()) { - return arr.concat(getAllFilesRecursively(file, filter)); - } - if (filter(file)) { - return arr.concat([file]); - } - return arr; - }, [] as string[]) -} diff --git a/internals/upscaler-cli/src/lib/package-scripts/utils/getHashedName.ts b/internals/upscaler-cli/src/lib/package-scripts/utils/getHashedName.ts deleted file mode 100644 index 52cb11a3e..000000000 --- a/internals/upscaler-cli/src/lib/package-scripts/utils/getHashedName.ts +++ /dev/null @@ -1,3 +0,0 @@ -import crypto from 'crypto'; - -export const getHashedName = (contents: string) => crypto.createHash('md5').update(contents).digest('hex'); diff --git a/internals/upscaler-cli/src/lib/package-scripts/utils/getPackageJSONExports.ts b/internals/upscaler-cli/src/lib/package-scripts/utils/getPackageJSONExports.ts deleted file mode 100644 index 7dc031fe8..000000000 --- a/internals/upscaler-cli/src/lib/package-scripts/utils/getPackageJSONExports.ts +++ /dev/null @@ -1,50 +0,0 @@ -import path from 'path'; -import fs from 'fs'; - -const shouldIncludeExportName = (_exportName: string) => { - throw new Error('Not implemented'); - /* - if (exportName === '.') { - return false; - } - - // TODO: Rethink whether we should deselect any node or node-gpu exports. - // It seems like the exports field is doing double duty. - if (exportName.endsWith('node') || exportName.endsWith('node-gpu')) { - return false; - } - - // return true; - */ -} - - -export type PackageJSONExport = string | { - require: string; - import: string; -}; - -const isPackageJSONExports = (exports: unknown): exports is { - [index: string]: PackageJSONExport; -} => { - if (typeof exports !== 'object' || exports === null) { - return false; - }; - return Object.entries(exports).reduce((isValid, [_exportName, exportValue]) => { - return isValid === false ? false : typeof exportValue === 'string' || (typeof exportValue === 'object' && 'require' in exportValue && 'import' in exportValue); - }, true); -} - -export const getPackageJSONExports = (modelFolder: string): Array<[string, PackageJSONExport]> => { - const packageJSONPath = path.resolve(modelFolder, 'package.json'); - const packageJSON = fs.readFileSync(packageJSONPath, 'utf8'); - const { exports } = JSON.parse(packageJSON); - if (isPackageJSONExports(exports)) { - const entries = Object.entries(exports); - if (entries.length === 1) { - return entries; - } - return entries.filter(([exportName]) => shouldIncludeExportName(exportName)); - } - throw new Error(`Invalid exports field in package json for ${modelFolder}}: ${JSON.stringify(exports)}`); -}; diff --git a/internals/upscaler-cli/src/lib/package-scripts/utils/isValidVersion.ts b/internals/upscaler-cli/src/lib/package-scripts/utils/isValidVersion.ts deleted file mode 100644 index c58d50596..000000000 --- a/internals/upscaler-cli/src/lib/package-scripts/utils/isValidVersion.ts +++ /dev/null @@ -1,23 +0,0 @@ -const splitParts = (version: string) => { - try { - const firstPart = version.split('-')[0]; - return firstPart.split("."); - } catch(err) { - console.error(`Could not split version ${version}`); - throw err; - } -} -export default (version: string) => { - const parts = splitParts(version); - if (parts.length !== 3) { - return false; - } - for (let i = 0; i < 3; i++) { - try { - parseInt(parts[i], 10); - } catch(err) { - return false; - } - } - return true; -} diff --git a/internals/upscaler-cli/src/lib/package-scripts/utils/packages.ts b/internals/upscaler-cli/src/lib/package-scripts/utils/packages.ts deleted file mode 100644 index d2a83e2d0..000000000 --- a/internals/upscaler-cli/src/lib/package-scripts/utils/packages.ts +++ /dev/null @@ -1,105 +0,0 @@ -import fs from 'fs'; -import path from 'path'; -import findAllPackages from '../find-all-packages.js'; -import { JSONSchemaForNPMPackageJsonFiles } from '@schemastore/package'; -import { DEV_DIR, DOCS_DIR, EXAMPLES_DIR, MODELS_DIR, PACKAGES_DIR, ROOT_DIR, TEST_DIR, UPSCALER_DIR, WRAPPER_DIR } from './constants.js'; - -interface FakeExports { - [index: string]: string | FakeExports; -} - -export type JSONSchema = JSONSchemaForNPMPackageJsonFiles & { - exports: FakeExports; -}; - -export type Package = 'UpscalerJS' | 'Models' | 'Test' | 'Examples' | 'Root' | 'Wrapper' | 'Dev' | 'Packages'; -export type TransformPackageJsonFn = (packageJSON: JSONSchema, dir: string) => JSONSchema; -export type PackageUpdaterLogger = (file: string) => (string | undefined); - -export const UPSCALER_JS = 'UpscalerJS'; -export const ROOT = 'Root'; -export const WRAPPER = 'Wrapper'; -export const EXAMPLES = 'Examples'; -export const MODELS = 'Models'; -export const TEST = 'Test'; -export const DEV = 'Development'; -export const DOCS = 'Docs'; -export const PACKAGES = 'Packages'; - -export const AVAILABLE_PACKAGES = [ ROOT, UPSCALER_JS, MODELS, EXAMPLES, TEST, WRAPPER, DEV, DOCS, PACKAGES ]; - -export const DIRECTORIES: Record = { - [ROOT]: { directory: ROOT_DIR }, - [UPSCALER_JS]: { directory: UPSCALER_DIR }, - [MODELS]: { directory: MODELS_DIR, multiple: true }, - [EXAMPLES]: { directory: EXAMPLES_DIR, multiple: true }, - [TEST]: { directory: TEST_DIR, multiple: true }, - [WRAPPER]: { directory: WRAPPER_DIR }, - [DEV]: { directory: DEV_DIR, multiple: true }, - [DOCS]: { directory: DOCS_DIR, multiple: true }, - [PACKAGES]: { directory: PACKAGES_DIR, multiple: true }, -} - -export const getPreparedFolderName = (file: string) => { - return file.split(`${ROOT_DIR}/`).pop(); -}; - -export const getPackageJSONPath = (file: string) => { - if (file.endsWith('package.json')) { - return file; - } - return path.resolve(file, 'package.json'); -} - -export const writePackageJSON = (file: string, contents: unknown) => { - const stringifiedContents = `${JSON.stringify(contents, null, 2)}\n`; - fs.writeFileSync(getPackageJSONPath(file), stringifiedContents); -}; - -export const getPackageJSON = (file: string): JSONSchema => JSON.parse(fs.readFileSync(getPackageJSONPath(file), 'utf-8')); - -const defaultTransform: TransformPackageJsonFn = (packageJSON) => packageJSON; - -const defaultLogger: PackageUpdaterLogger = (_file: string) => undefined; - -export const updateSinglePackage = (dir: string, transform: TransformPackageJsonFn = defaultTransform, logger: PackageUpdaterLogger = defaultLogger) => { - const packageJSON = getPackageJSON(dir); - writePackageJSON(dir, transform(packageJSON, dir)); - const message = logger(dir); - if (message) { - console.log(message); - } -}; - -export const updateMultiplePackages = (dir: string, transform: TransformPackageJsonFn = defaultTransform, logger: PackageUpdaterLogger = defaultLogger) => { - const packages = findAllPackages(dir, [path.resolve(DOCS_DIR, 'assets')]); - for (const pkgName of packages) { - const pkg = path.resolve(ROOT_DIR, pkgName); - updateSinglePackage(pkg, transform, logger); - } -}; - -export const getPackageJSONValue = (packageJSON: JSONSchema, depKey: string) => { - return depKey.split('.').reduce((json, key) => json?.[key], packageJSON); -} - -type Value = JSONSchema[keyof JSONSchema]; -export const updatePackageJSONForKey = (packageJSON: JSONSchema, key: string, val: Value): JSONSchema => { - return getObj(packageJSON, key.split('.'), val); -} - -function getObj>(obj: T, parts: string[], val: Value): T { - if (parts.length === 1) { - return { - ...obj, - [parts[0]]: { - ...obj[parts[0]], - ...val, - } - }; - } - return { - ...obj, - [parts[0]]: getObj(obj[parts[0]], parts.slice(1), val), - } -} diff --git a/internals/upscaler-cli/src/lib/package-scripts/utils/rollup.ts b/internals/upscaler-cli/src/lib/package-scripts/utils/rollup.ts deleted file mode 100644 index 5acfb1948..000000000 --- a/internals/upscaler-cli/src/lib/package-scripts/utils/rollup.ts +++ /dev/null @@ -1,93 +0,0 @@ -import fs from 'fs'; -import path from 'path'; -import { OutputOptions, rollup, RollupBuild, RollupOptions } from 'rollup'; - -export async function rollupBuild(inputOptions: RollupOptions, outputOptionsList: Array, dist: string) { - let bundle: RollupBuild | undefined; - let buildFailed = false; - try { - // create a bundle - bundle = await rollup({ - ...inputOptions, - onwarn: (warning) => { - if (warning.code === 'MIXED_EXPORTS') { - throw new Error(warning.message); - } else { - console.warn(warning); - throw new Error(warning.message); - } - } - }); - - // // an array of file names this bundle depends on - // console.log(bundle.watchFiles); - - await generateOutputs(bundle, outputOptionsList, dist); - } catch (error) { - buildFailed = true; - // do some error reporting - console.error(error); - } - if (bundle) { - // closes the bundle - await bundle.close(); - } - if (buildFailed) { - throw new Error('build failed'); - } -} - -async function generateOutputs(bundle: RollupBuild, outputOptionsList: Array, dist: string) { - for (const outputOptions of outputOptionsList) { - // generate output specific code in-memory - // you can call this function multiple times on the same bundle object - // replace bundle.generate with bundle.write to directly write to disk - const { output } = await bundle.generate(outputOptions); - - for (const chunkOrAsset of output) { - // console.log('chunk or asset', chunkOrAsset) - if (chunkOrAsset.type === 'asset') { - // For assets, this contains - // { - // fileName: string, // the asset file name - // source: string | Uint8Array // the asset source - // type: 'asset' // signifies that this is an asset - // } - // fs.writeFileSync(fileN) - console.log('Asset', chunkOrAsset); - } else { - // For chunks, this contains - // { - // code: string, // the generated JS code - // dynamicImports: string[], // external modules imported dynamically by the chunk - // exports: string[], // exported variable names - // facadeModuleId: string | null, // the id of a module that this chunk corresponds to - // fileName: string, // the chunk file name - // implicitlyLoadedBefore: string[]; // entries that should only be loaded after this chunk - // imports: string[], // external modules imported statically by the chunk - // importedBindings: {[imported: string]: string[]} // imported bindings per dependency - // isDynamicEntry: boolean, // is this chunk a dynamic entry point - // isEntry: boolean, // is this chunk a static entry point - // isImplicitEntry: boolean, // should this chunk only be loaded after other chunks - // map: string | null, // sourcemaps if present - // modules: { // information about the modules in this chunk - // [id: string]: { - // renderedExports: string[]; // exported variable names that were included - // removedExports: string[]; // exported variable names that were removed - // renderedLength: number; // the length of the remaining code in this module - // originalLength: number; // the original length of the code in this module - // code: string | null; // remaining code in this module - // }; - // }, - // name: string // the name of this chunk as used in naming patterns - // referencedFiles: string[] // files referenced via import.meta.ROLLUP_FILE_URL_ - // type: 'chunk', // signifies that this is a chunk - // } - // console.log(chunkOrAsset.fileName) - // console.log(chunkOrAsset.code) - fs.writeFileSync(path.resolve(dist, chunkOrAsset.fileName), chunkOrAsset.code, 'utf-8'); - // console.log('Chunk', chunkOrAsset.modules); - } - } - } -} diff --git a/internals/upscaler-cli/src/lib/package-scripts/utils/runDocker.ts b/internals/upscaler-cli/src/lib/package-scripts/utils/runDocker.ts deleted file mode 100644 index 4c3e38d6d..000000000 --- a/internals/upscaler-cli/src/lib/package-scripts/utils/runDocker.ts +++ /dev/null @@ -1,21 +0,0 @@ -import callExec from "../../../test/lib/utils/callExec"; - -interface Volume { - external: string; - internal: string; - -} -interface DockerOptions { - volumes?: Volume[]; -} -type RunDocker = (dockerImage: string, cmd: string, opts?: DockerOptions) => Promise; -export const runDocker: RunDocker = (dockerImage, cmd, { volumes = [] } = {}) => { - return callExec([ - "docker run --rm", - ...volumes.map(({ internal, external }) => { - return `-v "${external}:${internal}"`; - }), - dockerImage, - cmd, - ].join(' ')); -} diff --git a/internals/upscaler-cli/src/lib/package-scripts/utils/uglify.ts b/internals/upscaler-cli/src/lib/package-scripts/utils/uglify.ts deleted file mode 100644 index bac898224..000000000 --- a/internals/upscaler-cli/src/lib/package-scripts/utils/uglify.ts +++ /dev/null @@ -1,14 +0,0 @@ -import fs from 'fs'; -import path from 'path'; -import Uglify from 'uglify-js'; -// const Uglify = require('uglify-js'); - -export const uglify = (folder: string, filename: string) => { - const filenameWithoutExt = filename.split('.').slice(0, -1).join('.'); - const { code, map } = Uglify.minify(fs.readFileSync(path.resolve(folder, `${filenameWithoutExt}.js`), 'utf-8'), { - sourceMap: true, - // comments: true, - }); - fs.writeFileSync(path.resolve(folder, `${filenameWithoutExt}.min.js`), code, 'utf-8'); - fs.writeFileSync(path.resolve(folder, `${filenameWithoutExt}.min.js.map`), map, 'utf-8'); -} diff --git a/internals/upscaler-cli/src/lib/package-scripts/utils/withTmpDir.ts b/internals/upscaler-cli/src/lib/package-scripts/utils/withTmpDir.ts deleted file mode 100644 index 0d773f4f3..000000000 --- a/internals/upscaler-cli/src/lib/package-scripts/utils/withTmpDir.ts +++ /dev/null @@ -1,40 +0,0 @@ -import path from 'path'; -import { sync as rimraf } from 'rimraf'; -import fsExtra from 'fs-extra'; -import { getHashedName } from './getHashedName.js'; -import { TMP_DIR } from './constants.js'; -const { existsSync, mkdirpSync } = fsExtra; - -export const makeTmpDir = (root = TMP_DIR): string => { - const hashedName = getHashedName(`${Math.random()}`); - const folder = path.resolve(root, hashedName); - mkdirpSync(folder); - if (!existsSync(folder)) { - throw new Error(`Tmp directory ${folder} was not created`); - } - return folder; -}; - -interface WithTmpDirOpts { - rootDir?: string; - removeTmpDir?: boolean; -} -type WithTmpDir = (callback: WithTmpDirFn, opts?: WithTmpDirOpts) => (Promise | void); -type WithTmpDirFn = (tmp: string) => Promise; -export const withTmpDir: WithTmpDir = async (callback, { rootDir, removeTmpDir = true } = {}) => { - const tmpDir = makeTmpDir(rootDir); - - try { - await callback(tmpDir); - } - finally { - try { - if (removeTmpDir) { - rimraf(tmpDir); - } - } - catch (e) { - console.error(`An error has occurred while removing the temp folder at ${tmpDir}. Please remove it manually. Error: ${e}`); - } - } -}; diff --git a/internals/upscaler-cli/src/lib/package-scripts/validate-build.ts b/internals/upscaler-cli/src/lib/package-scripts/validate-build.ts deleted file mode 100644 index 59ac4657c..000000000 --- a/internals/upscaler-cli/src/lib/package-scripts/validate-build.ts +++ /dev/null @@ -1,124 +0,0 @@ -import path from 'path'; -import yargs from 'yargs'; -import fs from 'fs'; -import { getPackageJSON, JSONSchema } from './utils/packages'; -import { sync } from 'glob'; - -const ROOT = path.resolve(__dirname, '../..'); - -/**** - * Utility methods - */ -const getKeysOfObj = (json: JSONSchema, keys: string[]): Partial => { - return keys.reduce((obj, jsonKey) => { - if (json[jsonKey]) { - return { - ...obj, - [jsonKey]: json[jsonKey], - } - }; - return obj; - }, {}); -}; -const getObjAsArray = (obj: Partial): string[] => { - return Object.values(obj).reduce((arr, file) => { - if (typeof file === 'string') { - return arr.concat(file); - } - return arr.concat(getObjAsArray(file)); - }, [] as string[]); -}; - -export const extractAllFilesFromPackageJSON = (packagePath: string): string[] => { - const packageJSON = getPackageJSON(packagePath); - return getObjAsArray(getKeysOfObj(packageJSON, [ - 'exports', - 'main', - 'module', - 'types', - 'umd:main', - ])); -}; - -/**** - * Main function - */ - -const validateBuild = (packageName: string, include: string[] = [], { - includeFilesFromPackageJSON = true, -}: { - includeFilesFromPackageJSON?: boolean; -} = { }): Set => { - const packagePath = path.resolve(ROOT, packageName); - const files = new Set([ - ...(includeFilesFromPackageJSON ? extractAllFilesFromPackageJSON(packagePath) : []), - ...include, - ].map(file => path.resolve(packagePath, file))); - const packageDistPath = path.resolve(packagePath, 'dist'); - files.forEach(file => { - if (!fs.existsSync(path.resolve(packageDistPath, file))) { - const existingFiles: string[] = sync(path.resolve(packageDistPath, '**/*')); - console.log('files that we checked', files); - throw new Error([ - `File ${file} was not built or does not exist.`, - `Existing files include: \n${existingFiles.map(f => ` - ${f}`).join('\n')}`, - `Files we are checking include: \n${Array.from(files).map(f => ` - ${f}`).join('\n')}`, - ].join('\n')); - } - }); - return files; -}; - -export default validateBuild; - -/**** - * Functions to expose the main function as a CLI tool - */ - -interface Args { - src: string; - include?: string[]; -} - -const isValidStringArray = (arr: unknown): arr is string[] => Array.isArray(arr) && typeof arr[0] === 'string'; - -const getArgs = async (): Promise => { - const argv = await yargs.command('validate-build [platform]', 'validate a build', yargs => { - yargs.positional('src', { - describe: 'The package to validate', - }).options({ - include: { alias: 'c', type: 'string', demandOption: true }, - }).nargs('include', 0); - }) - .help() - .argv; - - const src = argv['_'][0]; - if (typeof src !== 'string') { - throw new Error('Invalid src'); - } - - const include = argv.c; - if (include !== undefined && !isValidStringArray(include)) { - throw new Error('Is not a valid array') - } - - return { - src, - include: include as string[] | undefined, - } -} - -if (require.main === module) { - (async () => { - const argv = await getArgs(); - const checkedFiles = Array.from(validateBuild(argv.src, argv.include)); - console.log([ - 'The following files are present: ', - ...checkedFiles.map(file => { - return ` - ${file}`; - }), - ].join('\n')) - })(); -} - diff --git a/internals/upscaler-cli/src/lib/package-scripts/write-model-docs.ts b/internals/upscaler-cli/src/lib/package-scripts/write-model-docs.ts deleted file mode 100644 index 22126fbb2..000000000 --- a/internals/upscaler-cli/src/lib/package-scripts/write-model-docs.ts +++ /dev/null @@ -1,171 +0,0 @@ -import { existsSync, readdirSync, readFile, readFileSync, writeFile } from 'fs-extra'; -import path from 'path'; -import yargs from 'yargs'; -import { ifDefined as _ifDefined } from './prompt/ifDefined'; -import { AVAILABLE_MODELS, getModel } from './prompt/getModel'; -import { SHARED_DIR, MODELS_DIR } from './utils/constants'; -import { getPackageJSON, JSONSchema } from './utils/packages'; - -/*** - * Types - */ - -interface Opts { - verbose?: boolean; -} - -/**** - * Utility functions - */ - -const getModelFamily = (packageJSON: JSONSchema) => { - return packageJSON['@upscalerjs']?.['modelFamily']; -}; - -const getSharedDoc = async (modelFamily: string) => { - const sharedDoc = path.resolve(SHARED_DIR, 'src', modelFamily, 'DOC.mdx'); - if (!existsSync(sharedDoc)) { - throw new Error(`File does not exist: ${sharedDoc}`) - } - return await readFile(sharedDoc, 'utf-8'); -}; - -const getSnippets = (model: string): Record => { - const snippets: Record = {}; - const docSnippetsPath = path.resolve(MODELS_DIR, model, 'doc-snippets'); - if (!existsSync(docSnippetsPath)) { - throw new Error(`doc snippets folder does not exist at "${docSnippetsPath}"`) - } - const snippetPaths = readdirSync(docSnippetsPath); - - for (const snippetPath of snippetPaths) { - const snippet = readFileSync(path.resolve(docSnippetsPath, snippetPath), 'utf-8') ?? ''; - const snippetKey = snippetPath.split('.').slice(0, -1).join('.'); - if (typeof snippetKey !== 'string') { - throw new Error(`Bad snippet key: ${snippetKey}`) - } - snippets[`snippets/${snippetKey}`] = snippet.trim(); - } - return snippets; - -} - -const getPackageJSONArgs = (model: string, packageJSON: JSONSchema): Record => { - const name = packageJSON.name; - if (!name) { - throw new Error(`No name defined for packageJSON for model ${model}`); - } - return { - key: name.split("@upscalerjs/").pop() ?? '', - description: `Overview of @upscalerjs/${model} model`, - title: packageJSON['@upscalerjs']?.title, - ...getSnippets(model) - }; -}; - -const getKey = (match: string) => match.match(/<%(.*)%>/)?.[1].trim(); - -const getPreparedDoc = async (model: string, { verbose }: Opts) => { - const packageJSON = getPackageJSON(path.resolve(MODELS_DIR, model, 'package.json')); - const modelFamily = getModelFamily(packageJSON); - if (!modelFamily) { - throw new Error(`No explicit model family defined in package JSON: ${model}`) - } - - const sharedDoc = await getSharedDoc(modelFamily); - const args = getPackageJSONArgs(model, packageJSON); - const matches = sharedDoc.matchAll(/<%.+?%>/g); - const chunks: string[] = []; - let start = 0; - for (const match of matches) { - const key = getKey(match[0]); - if (key === undefined) { - throw new Error(`An undefined key was returned from the match "${match[0]}" for model ${model}`); - } else if (!(key in args)) { - throw new Error(`Key "${key}" for model family ${modelFamily} and model ${model} was not found in args. Did you mean to prepend it with 'snippets/'? Args is: ${JSON.stringify(args, null, 2)}}`); - } else if (typeof args[key] !== 'string') { - throw new Error(`Key "${key}" for model family ${modelFamily} and model ${model} is not a string, it is: ${typeof args[key]}`) - } else { - const matchStart = match?.index ?? 0; - const matchEnd = matchStart + (match[0]?.length ?? 0); - - chunks.push(sharedDoc.slice(start, matchStart)); - chunks.push(args[key]) - start = matchEnd; - - if (verbose) { - console.log( - `Found ${match[0]} (${key}) start=${match?.index} end=${(match?.index ?? 0) + match[0]?.length - }.`, - ); - } - } - } - chunks.push(sharedDoc.slice(start)); - return chunks.join(''); -} - -/**** - * Main function - */ - -const writeModelDocs = async ( - models: Array = AVAILABLE_MODELS, - { - verbose = false, - }: Opts = {}, -) => { - if (models.length === 0) { - console.log('No models selected, nothing to do.') - return; - } - - await Promise.all(models.map(async model => { - const updatedDoc = await getPreparedDoc(model, { verbose }); - const targetPath = path.resolve(MODELS_DIR, model, 'DOC.mdx'); - - await readFile(targetPath, 'utf-8'); - - await writeFile(targetPath, updatedDoc); - })); -} - -export default writeModelDocs; - -/**** - * Functions to expose the main function as a CLI tool - */ - -interface Answers extends Opts { - models: Array; -} - -const getArgs = async (): Promise => { - const argv = await yargs.command('build models', 'build models', yargs => { - yargs.positional('model', { - describe: 'The model to build', - array: true, - }).option('v', { - alias: 'verbose', - type: 'boolean', - }); - }) - .help() - .argv; - - const models = await getModel(argv._, argv.model); - - function ifDefined(key: string, type: string) { return _ifDefined(argv, key, type) as T; } - - return { - models, - verbose: ifDefined('v', 'boolean'), - } -} - -if (require.main === module) { - (async () => { - const { models, ...opts } = await getArgs(); - await writeModelDocs(models, opts); - })(); -} diff --git a/internals/upscaler-cli/src/lib/utils/find-similar-files.test.ts b/internals/upscaler-cli/src/lib/utils/find-similar-files.test.ts deleted file mode 100644 index e5e9a7ef6..000000000 --- a/internals/upscaler-cli/src/lib/utils/find-similar-files.test.ts +++ /dev/null @@ -1,20 +0,0 @@ -import { findSimilarFiles } from './find-similar-files'; - -describe("findSimilarFiles", () => { - it('finds similar files', () => { - expect(findSimilarFiles(['foo', 'bar', 'baz'], 'ba')).toEqual(['bar', 'baz', 'foo']); - }); - - it('can limit to an n', () => { - expect(findSimilarFiles(['foo', 'bar', 'baz'], 'ba', { n: 1 })).toEqual(['bar']); - }); - - it('can limit to a threshold distance', () => { - expect(findSimilarFiles(['foo', 'bar', 'baz', 'baaa'], 'ba', { n: 3, distance: 2, })).toEqual(['bar', 'baz']); - }); - - it('sorts the files correctly', () => { - expect(findSimilarFiles(['former', 'fogey', 'foo'], 'fo')).toEqual(['foo', 'fogey', 'former']); - }); -}); - diff --git a/internals/upscaler-cli/src/lib/utils/find-similar-files.ts b/internals/upscaler-cli/src/lib/utils/find-similar-files.ts deleted file mode 100644 index 9a95730a0..000000000 --- a/internals/upscaler-cli/src/lib/utils/find-similar-files.ts +++ /dev/null @@ -1,35 +0,0 @@ -import levenshtein from 'fast-levenshtein'; - -/** - * For a particular query (e.g., one that is misspelled), - * find most similar files from an array - */ -export const findSimilarFiles = ( - files: string[], - query: string, - { - n, - distance: threshold, - }: { - n?: number, - distance?: number, - } = {} -): string[] => { - const filesWithDistance = files.reduce<{ file: string; distance: number; }[]>((arr, file) => { - const distance = levenshtein.get(query, file); - if (threshold === undefined || distance < threshold) { - return arr.concat({ - file, - distance, - }); - } - - return arr; - }, []); - - const sortedFiles = filesWithDistance.sort((a, b) => { - return a.distance - b.distance; - }); - - return sortedFiles.slice(0, n).map(({ file }) => file); -}; diff --git a/internals/upscaler-cli/src/lib/utils/get-all-directories.test.ts b/internals/upscaler-cli/src/lib/utils/get-all-directories.test.ts deleted file mode 100644 index b2873b797..000000000 --- a/internals/upscaler-cli/src/lib/utils/get-all-directories.test.ts +++ /dev/null @@ -1,44 +0,0 @@ -import { vi } from 'vitest'; -import fsExtra from "fs-extra"; -import { getAllDirectories } from './get-all-directories'; -const { readdir, stat } = fsExtra; - -vi.mock('fs-extra', () => { - return { - default: { - readdir: vi.fn(), - stat: vi.fn(), - }, - } -}); - -describe("getAllDirectories", () => { - afterEach(() => { - vi.resetAllMocks(); - }); - - it('returns only directories', async () => { - const mockReadDir = () => Promise.resolve([ - 'foo-dir', - 'bar-dir', - 'foo-non-dir', - 'bar-non-dir', - 'baz-dir', - 'baz-non-dir', - ]); - const mockStat = (name: string) => Promise.resolve({ - isDirectory: () => { - return name.endsWith('non-dir') === false; - }, - }); - - vi.mocked(readdir).mockImplementation(mockReadDir as unknown as typeof readdir); - vi.mocked(stat).mockImplementation(mockStat as unknown as typeof stat); - - expect(await getAllDirectories('foo')).toEqual([ - 'foo-dir', - 'bar-dir', - 'baz-dir', - ]); - }); -}); diff --git a/internals/upscaler-cli/src/lib/utils/get-all-directories.ts b/internals/upscaler-cli/src/lib/utils/get-all-directories.ts deleted file mode 100644 index caca2e6d8..000000000 --- a/internals/upscaler-cli/src/lib/utils/get-all-directories.ts +++ /dev/null @@ -1,15 +0,0 @@ -import path from 'path'; -import fsExtra from "fs-extra"; -const { readdir, stat } = fsExtra; - -export const getAllDirectories = async (rootDir: string) => { - const directories: string[] = []; - const files = await readdir(rootDir); - await Promise.all(files.map(async file => { - const stats = await stat(path.resolve(rootDir, file)); - if (stats.isDirectory()) { - directories.push(file); - } - })); - return directories; -}; diff --git a/internals/upscaler-cli/src/lib/utils/get-tfjs-library-target.test.ts b/internals/upscaler-cli/src/lib/utils/get-tfjs-library-target.test.ts deleted file mode 100644 index a6040b1a6..000000000 --- a/internals/upscaler-cli/src/lib/utils/get-tfjs-library-target.test.ts +++ /dev/null @@ -1,60 +0,0 @@ -import { vi } from 'vitest'; -import fsExtra from "fs-extra"; -import { TFJS_LIBRARY_TARGET_ERROR, getTFJSLibraryTarget } from './get-tfjs-library-target'; -const { readFile } = fsExtra; - -vi.mock('fs-extra', () => { - return { - default: { - readFile: vi.fn(), - }, - } -}); - -describe('getTFJSLibraryTarget', () => { - afterEach(() => { - vi.resetAllMocks(); - }); - - const makeMock = (dependencies: Record) => { - vi.mocked(readFile).mockImplementation(() => Promise.resolve(Buffer.from(JSON.stringify({ - dependencies, - })))); - } - - it('loads the correct package json from the right directory', async () => { - makeMock({ - '@tensorflow/tfjs': '1.0.0', - }); - expect(readFile).toHaveBeenCalledTimes(0); - expect(await getTFJSLibraryTarget('foo')).toBe('browser'); - expect(readFile).toHaveBeenCalledWith(expect.stringContaining('foo/package.json'), expect.anything()); - }); - - it('returns browser for @tensorflow/tfjs', async () => { - makeMock({ - '@tensorflow/tfjs': '1.0.0', - }); - expect(await getTFJSLibraryTarget('foo')).toBe('browser'); - }); - - it('returns node for @tensorflow/tfjs-node', async () => { - makeMock({ - '@tensorflow/tfjs-node': '1.0.0', - }); - expect(await getTFJSLibraryTarget('foo')).toBe('node'); - }); - - it('returns node-gpu for @tensorflow/tfjs-node-gpu', async () => { - makeMock({ - '@tensorflow/tfjs-node-gpu': '1.0.0', - }); - expect(await getTFJSLibraryTarget('foo')).toBe('node-gpu'); - }); - - it('throws if no dependencies are found', async () => { - makeMock({ - }); - await expect(() => getTFJSLibraryTarget('foo')).rejects.toThrow(TFJS_LIBRARY_TARGET_ERROR('foo')); - }); -}); diff --git a/internals/upscaler-cli/src/lib/utils/get-tfjs-library-target.ts b/internals/upscaler-cli/src/lib/utils/get-tfjs-library-target.ts deleted file mode 100644 index a675d6730..000000000 --- a/internals/upscaler-cli/src/lib/utils/get-tfjs-library-target.ts +++ /dev/null @@ -1,22 +0,0 @@ -import path from 'path'; -import fsExtra from "fs-extra"; -const { readFile } = fsExtra; - -type TFJSLibraryTarget = 'browser' | 'node' | 'node-gpu'; - -export const TFJS_LIBRARY_TARGET_ERROR = (dir: string) => new Error(`Could not determine valid TFJS dependency in directory ${dir}`); - -export const getTFJSLibraryTarget = async (dir: string): Promise => { - const packageJSON = JSON.parse(await readFile(path.resolve(dir, 'package.json'), 'utf8')); - const deps = Object.keys(packageJSON.dependencies); - if (deps.includes('@tensorflow/tfjs')) { - return 'browser'; - } else if (deps.includes('@tensorflow/tfjs-node')) { - return 'node'; - } else if (deps.includes('@tensorflow/tfjs-node-gpu')) { - return 'node-gpu'; - } - - throw TFJS_LIBRARY_TARGET_ERROR(dir); -}; - diff --git a/internals/upscaler-cli/src/lib/utils/log.ts b/internals/upscaler-cli/src/lib/utils/log.ts deleted file mode 100644 index d9937c68b..000000000 --- a/internals/upscaler-cli/src/lib/utils/log.ts +++ /dev/null @@ -1,60 +0,0 @@ -import chalk, { ChalkInstance } from 'chalk'; - -export type LogType = 'info' | 'warn' | 'error' | 'verbose'; - -const logs = [ - 'error', - 'warn', - 'info', - 'verbose', -]; - -export const DEFAULT_LOG_LEVEL: LogType = 'info'; - -const isLogType = (logType: unknown): logType is LogType => typeof logType === 'string' && ['error', 'warn', 'info', 'verbose'].includes(logType); - -const types: Record = { - error: chalk.bold.red, - warn: chalk.hex('#FFA500'), // Orange color - info: chalk.bold.blue, - verbose: chalk.green, -}; - -const parseMessage = (...message: unknown[]): string => message.map(m => { - if (Array.isArray(m)) { - return parseMessage(...m); - } - if (typeof m === 'object' && m !== null) { - return JSON.stringify(m); - } - if (m === true || m === false || typeof m === 'number') { - return chalk.yellow(m.toString()); - } - return m; -}).filter(Boolean).join(' '); - -export const log = (type: LogType, ...message: unknown[]) => { - if (logs.indexOf(type) <= logs.indexOf(level.level)) { - const chalkType = types[type]; - if (type === 'error' || type === 'warn') { - process.stderr.write(chalkType(`${parseMessage(...message)}\n`)); - } else { - process.stdout.write(chalkType(`${parseMessage(...message)}\n`)); - } - } -}; - -const level: { level: LogType } = { - level: DEFAULT_LOG_LEVEL, -} -export const setLogLevel = (newLevel: LogType) => { - if (!isLogType(newLevel)) { - throw new Error(`Invalid log type provided: ${newLevel}`); - } - level.level = newLevel; -}; - -export const info = (...message: unknown[]) => log('info', ...message); -export const warn = (...message: unknown[]) => log('warn', ...message); -export const error = (...message: unknown[]) => log('error', ...message); -export const verbose = (...message: unknown[]) => log('verbose', ...message); diff --git a/internals/upscaler-cli/src/lib/utils/pluralize.test.ts b/internals/upscaler-cli/src/lib/utils/pluralize.test.ts deleted file mode 100644 index 42a53c5ce..000000000 --- a/internals/upscaler-cli/src/lib/utils/pluralize.test.ts +++ /dev/null @@ -1,23 +0,0 @@ -import { DEFAULT_SEPARATOR, EMPTY_ITEMS_ERROR, pluralize } from "./pluralize"; - -describe("pluralize", () => { - it('throws if given no items', () => { - expect(() => pluralize([])).toThrow(EMPTY_ITEMS_ERROR); - }); - - it('returns a single item', () => { - expect(pluralize(['foo'])).toEqual('foo'); - }); - - it('returns two items with a separator', () => { - expect(pluralize(['foo', 'bar'])).toEqual(`foo ${DEFAULT_SEPARATOR} bar`); - }); - - it('returns three items with commas and a separator', () => { - expect(pluralize(['foo', 'bar', 'baz'])).toEqual(`foo, bar, ${DEFAULT_SEPARATOR} baz`); - }); - - it('accepts a custom separator', () => { - expect(pluralize(['foo', 'bar', 'baz'], 'and')).toEqual('foo, bar, and baz'); - }); -}); diff --git a/internals/upscaler-cli/src/lib/utils/pluralize.ts b/internals/upscaler-cli/src/lib/utils/pluralize.ts deleted file mode 100644 index eb5d03a57..000000000 --- a/internals/upscaler-cli/src/lib/utils/pluralize.ts +++ /dev/null @@ -1,13 +0,0 @@ -export const EMPTY_ITEMS_ERROR = new Error('Must provide at least one item to pluralize') -export const DEFAULT_SEPARATOR = 'or'; -export const pluralize = (items: string[], separator = DEFAULT_SEPARATOR): string => { - if (items.length === 0) { - throw EMPTY_ITEMS_ERROR; - } - if (items.length <= 2) { - return items.join(` ${separator} `); - } - - return `${items.slice(0, -1).join(', ')}, ${separator} ${items[items.length - 1]}`; -}; - diff --git a/internals/upscaler-cli/src/lib/utils/run-npm-command.test.ts b/internals/upscaler-cli/src/lib/utils/run-npm-command.test.ts deleted file mode 100644 index 02de67bf2..000000000 --- a/internals/upscaler-cli/src/lib/utils/run-npm-command.test.ts +++ /dev/null @@ -1,50 +0,0 @@ -import { vi } from 'vitest'; -import { spawn } from 'child_process'; -import { runNPMCommand } from "./run-npm-command"; - -vi.mock('child_process', () => { - return { - spawn: vi.fn(), - } -}); - -describe('run-npm-command', () => { - afterEach(() => { - vi.resetAllMocks(); - }); - - it('if the command throws an error, it should reject', async () => { - const error = 'huzzah'; - vi.mocked(spawn).mockImplementation(() => ({ - on: (event: string, callback: (...args: unknown[]) => unknown) => { - if (event === 'error') { - callback(error); - } - }, - })); - await expect(() => runNPMCommand([], 'foo')).rejects.toThrow(error); - }); - - it('if the command exits with a non-0 exit code, it should reject with that code', async () => { - const code = 1; - vi.mocked(spawn).mockImplementation(() => ({ - on: (event: string, callback: (...args: unknown[]) => unknown) => { - if (event === 'close') { - callback(`${code}`); // skipcq: JS-0255 - } - }, - })); - await expect(() => runNPMCommand([], 'foo')).rejects.toThrow(`${code}`); - }); - - it('if the command exits with a 0 exit code, it should resolve', async () => { - vi.mocked(spawn).mockImplementation(() => ({ - on: (event: string, callback: (...args: unknown[]) => unknown) => { - if (event === 'close') { - callback(0); // skipcq: JS-0255 - } - }, - })); - await runNPMCommand([], 'foo'); - }); -}); diff --git a/internals/upscaler-cli/src/lib/utils/run-npm-command.ts b/internals/upscaler-cli/src/lib/utils/run-npm-command.ts deleted file mode 100644 index 0133da7b2..000000000 --- a/internals/upscaler-cli/src/lib/utils/run-npm-command.ts +++ /dev/null @@ -1,19 +0,0 @@ -import { spawn } from 'child_process'; - -export const runNPMCommand = (command: string[], cwd: string) => new Promise((resolve, reject) => { - const child = spawn("npm", command, { - shell: true, - cwd, - stdio: "inherit" - }); - - child.on('error', reject); - - child.on('close', code => { - if (code === 0) { - resolve(); - } else { - reject(code); - } - }); -}); diff --git a/internals/upscaler-cli/tsconfig.json b/internals/upscaler-cli/tsconfig.json deleted file mode 100644 index da922fa86..000000000 --- a/internals/upscaler-cli/tsconfig.json +++ /dev/null @@ -1,16 +0,0 @@ -{ - "compilerOptions": { - "target": "ESNext", - "strict": true, - "esModuleInterop": true, - "module": "ESNext", - "moduleResolution": "node", - "skipLibCheck": true, - "resolveJsonModule": true, - "forceConsistentCasingInFileNames": true, - "types": ["vitest/globals"] - }, - "ts-node": { - "esm": true - } -} diff --git a/internals/upscaler-cli/vite.config.ts b/internals/upscaler-cli/vite.config.ts deleted file mode 100644 index b60110d2b..000000000 --- a/internals/upscaler-cli/vite.config.ts +++ /dev/null @@ -1,8 +0,0 @@ -// vite.config.ts -import { defineConfig } from 'vitest/config' - -export default defineConfig({ - test: { - globals: true, - }, -}) diff --git a/package.json b/package.json index 87674dce8..3e8a18a53 100644 --- a/package.json +++ b/package.json @@ -57,7 +57,6 @@ "@internals/http-server": "workspace:*", "@internals/scripts": "workspace:*", "@internals/test-runner": "workspace:*", - "@internals/upscaler-cli": "workspace:*", "@internals/webdriver": "workspace:*", "@rollup/plugin-commonjs": "25.0.4", "@rollup/plugin-node-resolve": "15.2.0", diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 414f2cd29..a7755b71e 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -32,9 +32,6 @@ importers: '@internals/test-runner': specifier: workspace:* version: link:internals/test-runner - '@internals/upscaler-cli': - specifier: workspace:* - version: link:internals/upscaler-cli '@internals/webdriver': specifier: workspace:* version: link:internals/webdriver @@ -695,46 +692,6 @@ importers: specifier: latest version: 0.14.0 - internals/upscaler-cli: - dependencies: - '@internals/common': - specifier: workspace:* - version: link:../common - '@types/fast-levenshtein': - specifier: ^0.0.2 - version: 0.0.2 - chalk: - specifier: ^5.3.0 - version: 5.3.0 - commander: - specifier: ^11.0.0 - version: 11.0.0 - fast-levenshtein: - specifier: ^3.0.0 - version: 3.0.0 - inquirer: - specifier: ^9.2.10 - version: 9.2.10 - ts-node: - specifier: ^10.9.1 - version: 10.9.1(@types/node@20.5.1)(typescript@5.2.2) - tsc-alias: - specifier: ^1.8.7 - version: 1.8.7 - devDependencies: - '@commander-js/extra-typings': - specifier: ^11.0.0 - version: 11.0.0(commander@11.0.0) - '@types/node': - specifier: ^20.5.1 - version: 20.5.1 - '@types/uglify-js': - specifier: ^3.17.1 - version: 3.17.1 - vitest: - specifier: ^0.34.2 - version: 0.34.2(jsdom@22.1.0) - internals/webdriver: dependencies: dotenv: @@ -2592,14 +2549,6 @@ packages: requiresBuild: true optional: true - /@commander-js/extra-typings@11.0.0(commander@11.0.0): - resolution: {integrity: sha512-06ol6Kn5gPjFY6v0vWOZ84nQwyqhZdaeZCHYH3vhwewjpOEjniF1KHZxh18887G3poWiJ8qyq5pb6ANuiddfPQ==} - peerDependencies: - commander: 11.0.x - dependencies: - commander: 11.0.0 - dev: true - /@cspotcode/source-map-support@0.8.1: resolution: {integrity: sha512-IchNf6dN4tHoMFIn/7OE8LWZ19Y6q/67Bmf6vnGREv8RSbBVb9LPJxEcnwrcwX6ixSvaiGoomAUvu4YSxXrVgw==} engines: {node: '>=12'} @@ -4537,6 +4486,7 @@ packages: /@ljharb/through@2.3.9: resolution: {integrity: sha512-yN599ZBuMPPK4tdoToLlvgJB4CLK8fGl7ntfy0Wn7U6ttNvHYurd81bfUiK/6sMkiIwm65R6ck4L6+Y3DfVbNQ==} engines: {node: '>= 0.4'} + dev: true /@mapbox/node-pre-gyp@1.0.9: resolution: {integrity: sha512-aDF3S3rK9Q2gey/WAttUlISduDItz5BU3306M9Eyv6/oS40aMprnopshtlKTykxRNIBEZuRMaZAnbrQ4QtKGyw==} @@ -5263,10 +5213,6 @@ packages: '@types/qs': 6.9.7 '@types/serve-static': 1.13.10 - /@types/fast-levenshtein@0.0.2: - resolution: {integrity: sha512-h9AGeNlFimLtFUlEZgk+hb3LUT4tNHu8y0jzCUeTdi1BM4e86sBQs/nQYgHk70ksNyNbuLwpymFAXkb0GAehmw==} - dev: false - /@types/fs-extra@11.0.1: resolution: {integrity: sha512-MxObHvNl4A69ofaTRU8DFqvgzzv8s9yRtaPPm5gud9HDNvpB3GPQFvNuTWAI59B9huVGV5jXYJwbCsmBsOGYWA==} dependencies: @@ -5612,12 +5558,6 @@ packages: resolution: {integrity: sha512-NfQ4gyz38SL8sDNrSixxU2Os1a5xcdFxipAFxYEuLUlvU2uDwS4NUpsImcf1//SlWItCVMMLiylsxbmNMToV/g==} dev: false - /@types/uglify-js@3.17.1: - resolution: {integrity: sha512-GkewRA4i5oXacU/n4MA9+bLgt5/L3F1mKrYvFGm7r2ouLXhRKjuWwo9XHNnbx6WF3vlGW21S3fCvgqxvxXXc5g==} - dependencies: - source-map: 0.6.1 - dev: true - /@types/unist@2.0.6: resolution: {integrity: sha512-PBjIUxZHOuj0R15/xuwJYjFi+KZdNFrehocChv4g5hu6aFroHue8m0lBP0POdK2nKzbw0cgV1mws8+V/JAcEkQ==} @@ -6337,6 +6277,7 @@ packages: engines: {node: '>=8'} dependencies: type-fest: 0.21.3 + dev: true /ansi-html-community@0.0.8: resolution: {integrity: sha512-1APHAyr3+PCamwNw3bXCPp4HFLONZt/yIH0sZp0/469KWNTEy+qN5jQ3GVX6DMZ1UXAi34yVwtTeaG/HpBuuzw==} @@ -6796,6 +6737,7 @@ packages: buffer: 5.7.1 inherits: 2.0.4 readable-stream: 3.6.2 + dev: true /bl@5.1.0: resolution: {integrity: sha512-tv1ZJHLfTDnXE6tMHv73YgSJaWR2AFuPwMntBe7XL/GBFHnT0CLnsHMogfk5+GzCDC5ZWarSCYaIGATZt9dNsQ==} @@ -7289,6 +7231,7 @@ packages: /chalk@5.3.0: resolution: {integrity: sha512-dLitG79d+GV1Nb/VYcCDFivJeK1hiukt9QjRNVOsUtTy1rR1YJsmpGGTZ3qJos+uw7WmWF4wUwBd9jxjocFC2w==} engines: {node: ^12.17.0 || ^14.13 || >=16.0.0} + dev: true /char-regex@1.0.2: resolution: {integrity: sha512-kWWXztvZ5SBQV+eRgKFeh8q5sLuZY2+8WUIzlxWVTg+oGwY14qylx1KbKzHd8P6ZYkAg0xyIDU9JMHhyJMZ1jw==} @@ -7310,6 +7253,7 @@ packages: /chardet@0.7.0: resolution: {integrity: sha512-mT8iDcrh03qDGRRmoA2hmBJnxpllMR+0/0qlzjqZES6NdiWDcZkCNAk4rPFZ9Q85r27unkiNNg8ZOiwZXBHwcA==} + dev: true /chart.js@4.3.3: resolution: {integrity: sha512-aTk7pBw+x6sQYhon/NR3ikfUJuym/LdgpTlgZRe2PaEhjUMKBKyNaFCMVRAyTEWYFNO7qRu7iQVqOw/OqzxZxQ==} @@ -7437,6 +7381,7 @@ packages: engines: {node: '>=8'} dependencies: restore-cursor: 3.1.0 + dev: true /cli-cursor@4.0.0: resolution: {integrity: sha512-VGtlMu3x/4DOtIUwEkRezxUZ2lBacNJCHash0N0WeZDBS+7Ux1dm3XWAgWYxLJFMMdOeXMHXorshEFhbMSGelg==} @@ -7448,6 +7393,7 @@ packages: /cli-spinners@2.6.1: resolution: {integrity: sha512-x/5fWmGMnbKQAaNwN+UZlV79qBLM9JFnJuJ03gIi5whrob0xV0ofNVHy9DhwGdsMJQc2OKv0oGmLzvaqvAVv+g==} engines: {node: '>=6'} + dev: true /cli-table3@0.6.2: resolution: {integrity: sha512-QyavHCaIC80cMivimWu4aWHilIpiDpfm3hGmqAmXVL1UsnbLuBSMd21hTX6VY4ZSDSM73ESLeF8TOYId3rBTbw==} @@ -7472,6 +7418,7 @@ packages: /cli-width@4.1.0: resolution: {integrity: sha512-ouuZd4/dm2Sw5Gmqy6bGyNNNe1qt9RpmxveLSO7KcgsTnU7RXfsw+/bukWGo1abgBiMAic068rclZsO4IWmmxQ==} engines: {node: '>= 12'} + dev: true /clipboardy@3.0.0: resolution: {integrity: sha512-Su+uU5sr1jkUy1sGRpLKjKrvEOVXgSgiSInwa/qeID6aJ07yh+5NWc3h2QfjHjBnfX4LhtFcuAWKUsJ3r+fjbg==} @@ -7521,6 +7468,7 @@ packages: /clone@1.0.4: resolution: {integrity: sha512-JQHZ2QMW6l3aH/j6xCqQThY/9OH4D/9ls34cgkUBiEeocRTU04tHfKPBsUK1PqZCUQM7GiA0IIXJSuXHI64Kbg==} engines: {node: '>=0.8'} + dev: true /cloudflared@0.5.0: resolution: {integrity: sha512-ILt34H2KNd15cbGm3VfLvswUHCJJ5vhDXN5M0u9BfEul4DLMQ/Y3ecOP5YUPFN9ERcVabfsMEcLbC0hN3AydTw==} @@ -7620,6 +7568,7 @@ packages: /commander@11.0.0: resolution: {integrity: sha512-9HMlXtt/BNoYr8ooyjjNRdIilOTkVJXB+GhxMTtOKwk0R4j4lS4NpjuqmRxroBfnfTSHQIHQB7wryHhXarNjmQ==} engines: {node: '>=16'} + dev: false /commander@2.20.3: resolution: {integrity: sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ==} @@ -8348,6 +8297,7 @@ packages: resolution: {integrity: sha512-s82itHOnYrN0Ib8r+z7laQz3sdE+4FP3d9Q7VLO7U+KRT+CR0GsWuyHxzdAY82I7cXv0G/twrqomTJLOssO5HA==} dependencies: clone: 1.0.4 + dev: true /defer-to-connect@1.1.3: resolution: {integrity: sha512-0ISdNousHvZT2EiFlZeZAHBUvSxmKswVCEf8hW7KWgG4a8MVEu/3Vb6uWYozkjylyCxe0JBIiRB1jV45S70WVQ==} @@ -9024,6 +8974,7 @@ packages: /escape-string-regexp@5.0.0: resolution: {integrity: sha512-/veY75JbMK4j1yjvuUxuVsiS/hr/4iHs9FTT6cgTexxdE0Ly/glccBAkloH/DofkjRbZU3bnoj38mOmhkZ0lHw==} engines: {node: '>=12'} + dev: true /escodegen@2.1.0: resolution: {integrity: sha512-2NlIDTwUWJN0mRPQOdtQBzbUHvdGY2P1VXSyU83Q3xKxM7WHX2Ql8dKq782Q9TgQUNOLEzEYu9bzLNj1q88I5w==} @@ -9320,6 +9271,7 @@ packages: chardet: 0.7.0 iconv-lite: 0.4.24 tmp: 0.0.33 + dev: true /extract-zip@2.0.1: resolution: {integrity: sha512-GDhU9ntwuKyGXdZBUgTIe+vXnWj0fppUEtMDL0+idd5Sta8TGpHssn/eusA9mrPr9qNDym6SxAYZjNvCn/9RBg==} @@ -9364,22 +9316,11 @@ packages: /fast-levenshtein@2.0.6: resolution: {integrity: sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw==} - /fast-levenshtein@3.0.0: - resolution: {integrity: sha512-hKKNajm46uNmTlhHSyZkmToAc56uZJwYq7yrciZjqOxnlfQwERDQJmHPUp7m1m9wx8vgOe8IaCKZ5Kv2k1DdCQ==} - dependencies: - fastest-levenshtein: 1.0.16 - dev: false - /fast-url-parser@1.1.3: resolution: {integrity: sha512-5jOCVXADYNuRkKFzNJ0dCCewsZiYo0dz8QNYljkOpFC6r2U4OBmKtvm/Tsuh4w1YYdDqDb31a8TVhBJ2OJKdqQ==} dependencies: punycode: 1.4.1 - /fastest-levenshtein@1.0.16: - resolution: {integrity: sha512-eRnCtTTtGZFpQCwhJiUOuxPQWRXVKYDn0b2PeHfXL6/Zi53SLAzAHfVhVWK2AryC/WH05kGfxhFIPvTF0SXQzg==} - engines: {node: '>= 4.9.1'} - dev: false - /fastq@1.15.0: resolution: {integrity: sha512-wBrocU2LCXXa+lWBt8RoIRD89Fi8OdABODa/kEnyeyjS5aZO5/GNvI5sEINADqP/h8M29UHTHUb53sUu5Ihqdw==} dependencies: @@ -9453,6 +9394,7 @@ packages: dependencies: escape-string-regexp: 5.0.0 is-unicode-supported: 1.2.0 + dev: true /file-entry-cache@6.0.1: resolution: {integrity: sha512-7Gps/XWymbLk2QLYK4NzpMOrYjMhdIxXuIvy2QBsLE6ljuodKvdkWs/cpyJJ3CVIVpH0Oi1Hvg1ovbMzLdFBBg==} @@ -10683,6 +10625,7 @@ packages: string-width: 4.2.3 strip-ansi: 6.0.1 wrap-ansi: 6.2.0 + dev: true /internal-slot@1.0.5: resolution: {integrity: sha512-Y+R5hJrzs52QCG2laLn4udYVnxsfny9CpOhNhUvk/SSSVyF6T27FzRbF0sroPidSu3X8oEAkOn2K804mjpt6UQ==} @@ -10851,6 +10794,7 @@ packages: /is-interactive@1.0.0: resolution: {integrity: sha512-2HvIEKRoqS62guEC+qBjpvRubdX910WCMuJTZ+I9yvqKU2/12eSL549HMwtabb4oupdj2sMP50k+XJfB/8JE6w==} engines: {node: '>=8'} + dev: true /is-interactive@2.0.0: resolution: {integrity: sha512-qP1vozQRI+BMOPcjFzrjXuQvdak2pHNUMZoeG2eRbiSqyvbEf/wQtEOTOX1guk6E3t36RkaqiSt8A/6YElNxLQ==} @@ -11015,10 +10959,12 @@ packages: /is-unicode-supported@0.1.0: resolution: {integrity: sha512-knxG2q4UC3u8stRGyAVJCOdxFmv5DZiRcdlIaAQXAbSfJya+OhopNotLQrstBhququ4ZpuKbDc/8S6mgXgPFPw==} engines: {node: '>=10'} + dev: true /is-unicode-supported@1.2.0: resolution: {integrity: sha512-wH+U77omcRzevfIG8dDhTS0V9zZyweakfD01FULl97+0EHiJTTZtJqxPSkIIo/SDPv/i07k/C9jAPY+jwLLeUQ==} engines: {node: '>=12'} + dev: true /is-weakmap@2.0.1: resolution: {integrity: sha512-NSBR4kH5oVj1Uwvv970ruUkCV7O1mzgVFO4/rev2cLRda9Tm9HrL70ZPut4rOHgY0FNrUu9BCbXA2sdQ+x0chA==} @@ -11923,6 +11869,7 @@ packages: dependencies: chalk: 4.1.2 is-unicode-supported: 0.1.0 + dev: true /log-symbols@5.1.0: resolution: {integrity: sha512-l0x2DvrW294C9uDCoQe1VSU4gf529FkSZ6leBl4TiqZH/e+0R7hSfHQBNut2mNygDgHwvYHfFLn6Oxb3VWj2rA==} @@ -12745,6 +12692,7 @@ packages: /mute-stream@1.0.0: resolution: {integrity: sha512-avsJQhyd+680gKXyG/sQc0nXaC6rBkPOfyHYcFb9+hdkqQkR9bdnkJ0AMZhke0oesPqIO+mFFJ+IdBc7mst4IA==} engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} + dev: true /mylas@2.1.13: resolution: {integrity: sha512-+MrqnJRtxdF+xngFfUUkIMQrUUL0KsxbADUkn23Z/4ibGg192Q+z+CQyiYwvWTsYjJygmMR8+w3ZDa98Zh6ESg==} @@ -13123,6 +13071,7 @@ packages: log-symbols: 4.1.0 strip-ansi: 6.0.1 wcwidth: 1.0.1 + dev: true /ora@6.3.1: resolution: {integrity: sha512-ERAyNnZOfqM+Ao3RAvIXkYh5joP220yf59gVe2X/cI6SiCxIdi4c9HZKZD8R6q/RDXEje1THBju6iExiSsgJaQ==} @@ -13142,6 +13091,7 @@ packages: /os-tmpdir@1.0.2: resolution: {integrity: sha512-D2FR03Vir7FIu45XBY20mTb+/ZSWB00sjU9jdQXt83gDrI4Ztz5Fs7/yy74g2N5SVQY4xY1qDr4rNddwYRVX0g==} engines: {node: '>=0.10.0'} + dev: true /p-cancelable@1.1.0: resolution: {integrity: sha512-s73XxOZ4zpt1edZYZzvhqFa6uvQc1vwUa0K0BdtIZgQMAJj9IbebH+JkgKZc9h+B05PKHLOTl4ajG1BmNrVZlw==} @@ -14861,6 +14811,7 @@ packages: dependencies: onetime: 5.1.2 signal-exit: 3.0.7 + dev: true /restore-cursor@4.0.0: resolution: {integrity: sha512-I9fPXU9geO9bHOt9pHHOhOkYerIMsmVaWB0rA2AI9ERh/+x/i7MV5HKBNrg+ljO5eoPVgCcnFuRjJ9uH6I/3eg==} @@ -14965,6 +14916,7 @@ packages: /run-async@3.0.0: resolution: {integrity: sha512-540WwVDOMxA6dN6We19EcT9sc3hkXPw5mzRNGM3FkdN/vtE9NFvj5lFAPNwUDmJjXidm3v7TC1cTE7t17Ulm1Q==} engines: {node: '>=0.12.0'} + dev: true /run-parallel@1.2.0: resolution: {integrity: sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==} @@ -16165,6 +16117,7 @@ packages: engines: {node: '>=0.6.0'} dependencies: os-tmpdir: 1.0.2 + dev: true /tmp@0.2.1: resolution: {integrity: sha512-76SUhtfqR2Ijn+xllcI5P1oyannHNHByD80W1q447gU3mp9G9PSpGdWmjUOHRDPiHYacIk66W7ubDTuPF3BEtQ==} @@ -16459,6 +16412,7 @@ packages: /type-fest@0.21.3: resolution: {integrity: sha512-t0rzBq87m3fVcduHDUFhKmyyX+9eo6WQjZvf51Ea/M0Q7+T374Jp1aUiyUl0GKxp8M/OETVHSDvmkyPgvX+X2w==} engines: {node: '>=10'} + dev: true /type-fest@0.8.1: resolution: {integrity: sha512-4dbzIzqvjtgiM5rw1k5rEHtBANKmdudhGyBEajN01fEyhaAIhsoKNy6y7+IN93IfpFtwY9iqi7kD+xwKhQsNJA==} @@ -17353,6 +17307,7 @@ packages: resolution: {integrity: sha512-XHPEwS0q6TaxcvG85+8EYkbiCux2XtWG2mkc47Ng2A77BQu9+DqIOJldST4HgPkuea7dvKSj5VgX3P1d4rW8Tg==} dependencies: defaults: 1.0.3 + dev: true /web-namespaces@1.1.4: resolution: {integrity: sha512-wYxSGajtmoP4WxfejAPIr4l0fVh+jeMXZb08wNc0tMg6xsfZXj3cECqIK0G7ZAqUq0PP8WlMDtaOGVBTAWztNw==} @@ -17687,6 +17642,7 @@ packages: ansi-styles: 4.3.0 string-width: 4.2.3 strip-ansi: 6.0.1 + dev: true /wrap-ansi@7.0.0: resolution: {integrity: sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==}