diff --git a/src/transform/fsContext.ts b/src/transform/fsContext.ts new file mode 100644 index 00000000..42cf7c24 --- /dev/null +++ b/src/transform/fsContext.ts @@ -0,0 +1,37 @@ +import {readFileSync, writeFileSync} from 'fs'; +import {readFile, writeFile} from 'fs/promises'; + +import {FsContext} from './typings'; +import {isFileExists, isFileExistsAsync} from './utilsFS'; + +export class DefaultFsContext implements FsContext { + exist(path: string): boolean { + return isFileExists(path); + } + + read(path: string): string { + return readFileSync(path, 'utf8'); + } + + write(path: string, content: string): void { + writeFileSync(path, content, { + encoding: 'utf8', + }); + } + + async existAsync(path: string): Promise { + return await isFileExistsAsync(path); + } + + async readAsync(path: string): Promise { + return readFile(path, 'utf8'); + } + + async writeAsync(path: string, content: string): Promise { + writeFile(path, content, { + encoding: 'utf8', + }); + } +} + +export const defaultFsContext = new DefaultFsContext(); diff --git a/src/transform/plugins/images/collect.ts b/src/transform/plugins/images/collect.ts index f67e4671..c5c0579b 100644 --- a/src/transform/plugins/images/collect.ts +++ b/src/transform/plugins/images/collect.ts @@ -12,21 +12,21 @@ type Options = MarkdownItPluginOpts & { singlePage: boolean; }; -const collect = (input: string, options: Options) => { +const collect = async (input: string, options: Options) => { const md = new MarkdownIt().use(imsize); - const {root, path, destPath = '', copyFile, singlePage} = options; + const {root, path, destPath = '', copyFile, singlePage, deps} = options; const tokens = md.parse(input, {}); let result = input; - tokens.forEach((token) => { + for (const token of tokens) { if (token.type !== 'inline') { return; } const children = token.children || []; - children.forEach((childToken) => { + for (const childToken of children) { if (childToken.type !== 'image') { return; } @@ -40,15 +40,16 @@ const collect = (input: string, options: Options) => { const targetPath = resolveRelativePath(path, src); const targetDestPath = resolveRelativePath(destPath, src); - if (singlePage && !path.includes('_includes/')) { - const newSrc = relative(root, resolveRelativePath(path, src)); + deps?.markDep?.(path, targetPath, 'image'); + if (singlePage && !path.includes('_includes/')) { + const newSrc = relative(root, targetPath); result = result.replace(src, newSrc); } - copyFile(targetPath, targetDestPath); - }); - }); + await copyFile(targetPath, targetDestPath); + } + } if (singlePage) { return result; diff --git a/src/transform/plugins/images/index.ts b/src/transform/plugins/images/index.ts index 7684b2d9..7ed8a81c 100644 --- a/src/transform/plugins/images/index.ts +++ b/src/transform/plugins/images/index.ts @@ -2,12 +2,12 @@ import {join, sep} from 'path'; import {bold} from 'chalk'; import {optimize} from 'svgo'; import Token from 'markdown-it/lib/token'; -import {readFileSync} from 'fs'; -import {isFileExists, resolveRelativePath} from '../../utilsFS'; +import {resolveRelativePath} from '../../utilsFS'; import {isExternalHref, isLocalUrl} from '../../utils'; import {MarkdownItPluginCb, MarkdownItPluginOpts} from '../typings'; -import {StateCore} from '../../typings'; +import {FsContext, StateCore} from '../../typings'; +import {defaultFsContext} from '../../fsContext'; interface ImageOpts extends MarkdownItPluginOpts { assetsPublicPath: string; @@ -15,9 +15,10 @@ interface ImageOpts extends MarkdownItPluginOpts { } function replaceImageSrc( + fs: FsContext, token: Token, state: StateCore, - {assetsPublicPath = sep, root = '', path: optsPath, log}: ImageOpts, + {assetsPublicPath = sep, root = '', path: optsPath, log, deps}: ImageOpts, ) { const src = token.attrGet('src') || ''; const currentPath = state.env.path || optsPath; @@ -28,7 +29,9 @@ function replaceImageSrc( const path = resolveRelativePath(currentPath, src); - if (isFileExists(path)) { + deps?.markDep?.(currentPath, path, 'image'); + + if (fs.exist(path)) { state.md.assets?.push(path); } else { log.error(`Asset not found: ${bold(src)} in ${bold(currentPath)}`); @@ -51,15 +54,18 @@ function prefix() { } function convertSvg( + fs: FsContext, token: Token, state: StateCore, - {path: optsPath, log, notFoundCb, root}: SVGOpts, + {path: optsPath, log, notFoundCb, root, deps}: SVGOpts, ) { const currentPath = state.env.path || optsPath; const path = resolveRelativePath(currentPath, token.attrGet('src') || ''); try { - const raw = readFileSync(path).toString(); + deps?.markDep?.(currentPath, path, 'image'); + + const raw = fs.read(path).toString(); const result = optimize(raw, { plugins: [ { @@ -90,6 +96,8 @@ function convertSvg( type Opts = SVGOpts & ImageOpts; const index: MarkdownItPluginCb = (md, opts) => { + const fs = opts.fs ?? defaultFsContext; + md.assets = []; const plugin = (state: StateCore) => { @@ -117,9 +125,9 @@ const index: MarkdownItPluginCb = (md, opts) => { const shouldInlineSvg = opts.inlineSvg !== false && !isExternalHref(imgSrc); if (imgSrc.endsWith('.svg') && shouldInlineSvg) { - childrenTokens[j] = convertSvg(childrenTokens[j], state, opts); + childrenTokens[j] = convertSvg(fs, childrenTokens[j], state, opts); } else { - replaceImageSrc(childrenTokens[j], state, opts); + replaceImageSrc(fs, childrenTokens[j], state, opts); } childrenTokens[j].attrSet('yfm_patched', '1'); diff --git a/src/transform/plugins/includes/collect.ts b/src/transform/plugins/includes/collect.ts index d5522443..a7c5acdb 100644 --- a/src/transform/plugins/includes/collect.ts +++ b/src/transform/plugins/includes/collect.ts @@ -1,19 +1,24 @@ import {relative} from 'path'; import {bold} from 'chalk'; -import {readFileSync} from 'fs'; -import {getRelativePath, isFileExists, resolveRelativePath} from '../../utilsFS'; +import {getRelativePath, resolveRelativePath} from '../../utilsFS'; +import {defaultFsContext} from '../../fsContext'; import {IncludeCollectOpts} from './types'; -const includesPaths: string[] = []; - -function processRecursive( +async function processRecursive( includePath: string, targetDestPath: string, options: IncludeCollectOpts, ) { - const {path, log, copyFile, includedParentPath: includedParentPathNullable, included} = options; + const { + path, + log, + copyFile, + includedParentPath: includedParentPathNullable, + included, + fs, + } = options; const includedParentPath = includedParentPathNullable || path; const includeOptions = { @@ -23,11 +28,11 @@ function processRecursive( }; try { - const contentProcessed = copyFile(includePath, targetDestPath, includeOptions); + const contentProcessed = await copyFile(includePath, targetDestPath, includeOptions); // To reduce file reading we can include the file content into the generated content if (included) { - const content = contentProcessed ?? readFileSync(targetDestPath, 'utf8'); + const content = contentProcessed ?? (await fs?.readAsync(targetDestPath)); if (content) { const includedRelativePath = getRelativePath(includedParentPath, includePath); @@ -35,7 +40,7 @@ function processRecursive( // The appendix is the map that protects from multiple include files if (!options.appendix?.has(includedRelativePath)) { // Recursive function to include the depth structure - const includeContent = collectRecursive(content, { + const includeContent = await collectRecursive(content, { ...options, path: includePath, includedParentPath, @@ -54,8 +59,8 @@ function processRecursive( } } -function collectRecursive(result: string, options: IncludeCollectOpts) { - const {root, path, destPath = '', log, singlePage} = options; +async function collectRecursive(result: string, options: IncludeCollectOpts) { + const {root, path, destPath = '', log, singlePage, fs = defaultFsContext, deps} = options; const INCLUDE_REGEXP = /{%\s*include\s*(notitle)?\s*\[(.+?)]\((.+?)\)\s*%}/g; @@ -67,19 +72,24 @@ function collectRecursive(result: string, options: IncludeCollectOpts) { let includePath = resolveRelativePath(path, relativePath); const hashIndex = relativePath.lastIndexOf('#'); - if (hashIndex > -1 && !isFileExists(includePath)) { + + deps?.markDep?.(path, includePath, 'include'); + + if (hashIndex > -1 && !(await fs.existAsync(includePath))) { includePath = includePath.slice(0, includePath.lastIndexOf('#')); relativePath = relativePath.slice(0, hashIndex); } const targetDestPath = resolveRelativePath(destPath, relativePath); - if (includesPaths.includes(includePath)) { - log.error(`Circular includes: ${bold(includesPaths.concat(path).join(' ▶ '))}`); + if (options.includesPaths?.includes(includePath)) { + log.error( + `Circular includes: ${bold(options.includesPaths?.concat(path).join(' ▶ '))}`, + ); break; } - if (singlePage && !includesPaths.length) { + if (singlePage && !options.includesPaths?.length) { const newRelativePath = relative(root, includePath); const newInclude = matchedInclude.replace(relativePath, newRelativePath); @@ -89,22 +99,23 @@ function collectRecursive(result: string, options: IncludeCollectOpts) { INCLUDE_REGEXP.lastIndex = INCLUDE_REGEXP.lastIndex - delta; } - includesPaths.push(includePath); + options.includesPaths?.push(includePath); - processRecursive(includePath, targetDestPath, options); + await processRecursive(includePath, targetDestPath, options); - includesPaths.pop(); + options.includesPaths?.pop(); } return result; } -function collect(input: string, options: IncludeCollectOpts) { +async function collect(input: string, options: IncludeCollectOpts) { const shouldWriteAppendix = !options.appendix; + options.includesPaths = options.includesPaths ?? []; options.appendix = options.appendix ?? new Map(); - input = collectRecursive(input, options); + input = await collectRecursive(input, options); if (shouldWriteAppendix) { // Appendix should be appended to the end of the file (it supports depth structure, so the included files will have included as well) diff --git a/src/transform/plugins/includes/index.ts b/src/transform/plugins/includes/index.ts index 85b30797..63fac960 100644 --- a/src/transform/plugins/includes/index.ts +++ b/src/transform/plugins/includes/index.ts @@ -1,15 +1,15 @@ import {bold} from 'chalk'; import Token from 'markdown-it/lib/token'; -import {StateCore} from '../../typings'; import { GetFileTokensOpts, getFileTokens, getFullIncludePath, - isFileExists, resolveRelativePath, } from '../../utilsFS'; import {findBlockTokens} from '../../utils'; +import {StateCore} from '../../typings'; +import {defaultFsContext} from '../../fsContext'; import {MarkdownItPluginCb, MarkdownItPluginOpts} from '../typings'; import {MarkdownItIncluded} from './types'; @@ -29,7 +29,7 @@ type Options = MarkdownItPluginOpts & }; function unfoldIncludes(md: MarkdownItIncluded, state: StateCore, path: string, options: Options) { - const {root, notFoundCb, log, noReplaceInclude = false} = options; + const {root, notFoundCb, log, noReplaceInclude = false, fs = defaultFsContext, deps} = options; const {tokens} = state; let i = 0; @@ -57,7 +57,11 @@ function unfoldIncludes(md: MarkdownItIncluded, state: StateCore, path: string, let pathname = fullIncludePath; let hash = ''; const hashIndex = fullIncludePath.lastIndexOf('#'); - if (hashIndex > -1 && !isFileExists(pathname)) { + const existed = fs.exist(pathname); + + deps?.markDep?.(path, pathname, 'include'); + + if (hashIndex > -1 && !existed) { pathname = fullIncludePath.slice(0, hashIndex); hash = fullIncludePath.slice(hashIndex + 1); } @@ -68,7 +72,7 @@ function unfoldIncludes(md: MarkdownItIncluded, state: StateCore, path: string, continue; } - const fileTokens = getFileTokens(pathname, state, { + const fileTokens = getFileTokens(fs, pathname, state, { ...options, content: included, // The content forces the function to use it instead of reading from the disk }); diff --git a/src/transform/plugins/includes/types.ts b/src/transform/plugins/includes/types.ts index 341af7d8..76ce047e 100644 --- a/src/transform/plugins/includes/types.ts +++ b/src/transform/plugins/includes/types.ts @@ -9,10 +9,15 @@ export interface MarkdownItIncluded extends MarkdownIt { export type IncludeCollectOpts = MarkdownItPluginOpts & { destPath: string; - copyFile(path: string, dest: string, opts: IncludeCollectOpts): string | null | undefined; + copyFile( + path: string, + dest: string, + opts: IncludeCollectOpts, + ): Promise; singlePage: Boolean; included: Boolean; includedParentPath?: string; additionalIncludedList?: string[]; + includesPaths?: string[]; appendix?: Map; }; diff --git a/src/transform/plugins/links/collect.ts b/src/transform/plugins/links/collect.ts index a7166b04..dae6fd03 100644 --- a/src/transform/plugins/links/collect.ts +++ b/src/transform/plugins/links/collect.ts @@ -4,6 +4,7 @@ import url from 'url'; import {PAGE_LINK_REGEXP, getHrefTokenAttr, isLocalUrl} from '../../utils'; import {getSinglePageAnchorId, resolveRelativePath} from '../../utilsFS'; +import {MarkdownItPluginOpts} from '../typings'; import index from './index'; @@ -12,9 +13,7 @@ const replaceLinkHref = (input: string, href: string, newHref: string) => { return input.replace(`](${href})`, `](${newHref})`); }; -type Options = { - root: string; - path: string; +type Options = MarkdownItPluginOpts & { singlePage: boolean; }; @@ -22,7 +21,7 @@ type Options = { * Example: replace [Text](../../path/to/file.md#anchor) with [Text](#_path_to_file_anchor) * */ const collect = (input: string, options: Options) => { - const {root, path: startPath, singlePage} = options; + const {root, path: startPath, singlePage, deps} = options; if (!singlePage) { return; @@ -66,6 +65,8 @@ const collect = (input: string, options: Options) => { if (pathname) { const isPageFile = PAGE_LINK_REGEXP.test(pathname); if (isPageFile) { + deps?.markDep?.(startPath, pathname, 'link'); + const newHref = getSinglePageAnchorId({ root, currentPath: startPath, diff --git a/src/transform/plugins/links/index.ts b/src/transform/plugins/links/index.ts index 4e976c13..e5a4917a 100644 --- a/src/transform/plugins/links/index.ts +++ b/src/transform/plugins/links/index.ts @@ -1,9 +1,7 @@ import url from 'url'; import {bold} from 'chalk'; -import Token from 'markdown-it/lib/token'; import path, {isAbsolute, parse, relative, resolve} from 'path'; - -import {Logger} from 'src/transform/log'; +import Token from 'markdown-it/lib/token'; import { PAGE_LINK_REGEXP, @@ -13,8 +11,10 @@ import { headingInfo, isLocalUrl, } from '../../utils'; -import {getFileTokens, isFileExists} from '../../utilsFS'; -import {CacheContext, StateCore} from '../../typings'; +import {getFileTokens} from '../../utilsFS'; +import {Logger} from '../../log'; +import {CacheContext, FsContext, StateCore} from '../../typings'; +import {defaultFsContext} from '../../fsContext'; import {MarkdownItPluginCb, MarkdownItPluginOpts} from '../typings'; function getTitleFromTokens(tokens: Token[]) { @@ -49,12 +49,13 @@ type Options = { currentPath: string; log: Logger; cache?: CacheContext; + fs: FsContext; }; const getTitle = (id: string | null, options: Options) => { const {file, state, opts} = options; - const fileTokens = getFileTokens(file, state, { + const fileTokens = getFileTokens(options.fs, file, state, { ...opts, disableLint: true, disableTitleRefSubstitution: true, @@ -121,6 +122,8 @@ function processLink(state: StateCore, tokens: Token[], idx: number, opts: ProcO log, getPublicPath = getDefaultPublicPath, cache, + fs = defaultFsContext, + deps, } = opts; const currentPath = state.env.path || startPath; @@ -147,9 +150,13 @@ function processLink(state: StateCore, tokens: Token[], idx: number, opts: ProcO if (pathname) { file = resolve(path.parse(currentPath).dir, pathname); - fileExists = isFileExists(file); + fileExists = fs.exist(file); isPageFile = PAGE_LINK_REGEXP.test(pathname); + if (isPageFile) { + deps?.markDep?.(currentPath, file, 'link'); + } + if (isPageFile && !fileExists) { let needShowError = true; if (needSkipLinkFn) { @@ -193,6 +200,7 @@ function processLink(state: StateCore, tokens: Token[], idx: number, opts: ProcO currentPath, log, cache, + fs, }); } diff --git a/src/transform/typings.ts b/src/transform/typings.ts index 77f19743..df8758e2 100644 --- a/src/transform/typings.ts +++ b/src/transform/typings.ts @@ -29,6 +29,33 @@ export type Heading = { items?: Heading[]; }; +export interface FsContext { + read(path: string | null): string; + exist(path: string | null): boolean; + write(path: string | null, content: string): void; + readAsync(path: string | null): Promise; + existAsync(path: string | null): Promise; + writeAsync(path: string | null, content: string): Promise; +} + +export interface DependencyContext { + resetDeps?(path: string): void; + markDep?(path: string, dependencyPath: string, type?: string): void; + unmarkDep?(path: string, dependencyPath: string, type?: string): void; +} + +export interface RevisionMeta { + files: { + [key: string]: { + modifyedDate: number; // modified_at + dependencies: { + [type: string]: string[]; + }; + changed: boolean; + }; + }; +} + export interface OptionsType { vars?: Record; path?: string; @@ -58,6 +85,8 @@ export interface OptionsType { getPublicPath?: (options: OptionsType, href?: string) => string; renderInline?: boolean; cache?: CacheContext; + fs?: FsContext; + deps?: DependencyContext; [x: string]: unknown; } @@ -84,6 +113,8 @@ export interface MarkdownItPluginOpts { rootPublicPath: string; isLintRun: boolean; cache?: CacheContext; + fs?: FsContext; + deps?: DependencyContext; conditionsInCode?: boolean; vars?: Record; extractTitle?: boolean; diff --git a/src/transform/utilsFS.ts b/src/transform/utilsFS.ts index f8accb8b..d349b2ce 100644 --- a/src/transform/utilsFS.ts +++ b/src/transform/utilsFS.ts @@ -1,25 +1,14 @@ import type {Dictionary} from 'lodash'; -import {readFileSync, statSync} from 'fs'; import escapeRegExp from 'lodash/escapeRegExp'; import {join, parse, relative, resolve, sep} from 'path'; +import {statSync} from 'fs'; +import {stat} from 'fs/promises'; import liquidSnippet from './liquid'; -import {StateCore} from './typings'; +import {FsContext, StateCore} from './typings'; import {defaultTransformLink} from './utils'; -const filesCache: Record = {}; - -export function isFileExists(file: string) { - try { - const stats = statSync(file); - - return stats.isFile(); - } catch (e) { - return false; - } -} - export function resolveRelativePath(fromPath: string, relativePath: string) { const {dir: fromDir} = parse(fromPath); @@ -39,7 +28,32 @@ export type GetFileTokensOpts = { content?: string; }; -export function getFileTokens(path: string, state: StateCore, options: GetFileTokensOpts) { +export function isFileExists(file: string) { + try { + const stats = statSync(file); + + return stats.isFile(); + } catch (e) { + return false; + } +} + +export async function isFileExistsAsync(file: string) { + try { + const stats = await stat(file); + + return stats.isFile(); + } catch (e) { + return false; + } +} + +export function getFileTokens( + fs: FsContext, + path: string, + state: StateCore, + options: GetFileTokensOpts, +) { const { getVarsPerFile, vars, @@ -57,12 +71,7 @@ export function getFileTokens(path: string, state: StateCore, options: GetFileTo // Read the content only if we dont have one in the args if (!content) { - if (filesCache[path]) { - content = filesCache[path]; - } else { - content = readFileSync(path, 'utf8'); - filesCache[path] = content; - } + content = fs.read(path); } let sourceMap; diff --git a/test/include-included.test.ts b/test/include-included.test.ts index ee7ccbe6..4f26c9c4 100644 --- a/test/include-included.test.ts +++ b/test/include-included.test.ts @@ -1,5 +1,4 @@ import {resolve} from 'path'; -import {readFileSync} from 'fs'; import {readFile} from 'node:fs/promises'; import transform from '../src/transform'; @@ -26,7 +25,7 @@ const collectIncluded = (text: string, path: string) => { included: true, path: path, root: resolve(path, '../'), - copyFile: (includePath) => readFileSync(includePath, 'utf-8'), + copyFile: (includePath) => readFile(includePath, 'utf-8'), singlePage: false, destPath: '', isLintRun: false, @@ -46,7 +45,7 @@ describe('Included to md', () => { const expectPath = resolve(__dirname, './mocks/include-included-3.expect.md'); const expectContent = await readFile(expectPath, 'utf8'); - const result = collectIncluded(input, inputPath); + const result = await collectIncluded(input, inputPath); expect(result).toBe(expectContent); }); @@ -70,7 +69,7 @@ describe('Included to md', () => { const expectPath = resolve(__dirname, './mocks/include-included-3-deep.expect.md'); const expectContent = await readFile(expectPath, 'utf8'); - const result = collectIncluded(input, inputPath); + const result = await collectIncluded(input, inputPath); expect(result).toBe(expectContent); });