diff --git a/package-lock.json b/package-lock.json index 1906bbe0c3..26d1c1a65e 100644 --- a/package-lock.json +++ b/package-lock.json @@ -80,6 +80,7 @@ "@types/ini": "4.1.0", "@types/jest": "29.5.14", "@types/node": "20.14.8", + "@types/pako": "1.0.4", "@types/react": "18.3.1", "@types/react-dom": "18.3.0", "@types/react-modal": "3.16.3", @@ -15511,6 +15512,13 @@ "dev": true, "license": "MIT" }, + "node_modules/@types/pako": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/@types/pako/-/pako-1.0.4.tgz", + "integrity": "sha512-Z+5bJSm28EXBSUJEgx29ioWeEEHUh6TiMkZHDhLwjc9wVFH+ressbkmX6waUZc5R3Gobn4Qu5llGxaoflZ+yhA==", + "dev": true, + "license": "MIT" + }, "node_modules/@types/parse-json": { "version": "4.0.2", "resolved": "https://registry.npmjs.org/@types/parse-json/-/parse-json-4.0.2.tgz", diff --git a/package.json b/package.json index e3e8dfa03c..0c1f2b86a5 100644 --- a/package.json +++ b/package.json @@ -133,6 +133,7 @@ "@types/ini": "4.1.0", "@types/jest": "29.5.14", "@types/node": "20.14.8", + "@types/pako": "1.0.4", "@types/react": "18.3.1", "@types/react-dom": "18.3.0", "@types/react-modal": "3.16.3", diff --git a/packages/docs/site/docs/blueprints/04-resources.md b/packages/docs/site/docs/blueprints/04-resources.md index f9ce69d8f2..4723139e63 100644 --- a/packages/docs/site/docs/blueprints/04-resources.md +++ b/packages/docs/site/docs/blueprints/04-resources.md @@ -59,6 +59,7 @@ type GitDirectoryReference = { url: string; // Repository URL (https://, ssh git@..., etc.) path?: string; // Optional subdirectory inside the repository ref?: string; // Optional branch, tag, or commit SHA + '.git'?: boolean; // Experimental: include a .git directory with fetched metadata }; ``` @@ -84,6 +85,7 @@ type GitDirectoryReference = { - Playground automatically detects providers like GitHub and GitLab. - It handles CORS-proxied fetches and sparse checkouts, so you can use URLs that point to specific subdirectories or branches. - This resource can be used with steps like [`installPlugin`](/blueprints/steps#InstallPluginStep) and [`installTheme`](/blueprints/steps#InstallThemeStep). +- Set `".git": true` to include a `.git` folder containing packfiles and refs so Git-aware tooling can detect the checkout. This currently mirrors a shallow clone of the selected ref. ### CoreThemeReference diff --git a/packages/playground/blueprints/public/blueprint-schema-validator.js b/packages/playground/blueprints/public/blueprint-schema-validator.js index 4a163be1a2..428f257869 100644 --- a/packages/playground/blueprints/public/blueprint-schema-validator.js +++ b/packages/playground/blueprints/public/blueprint-schema-validator.js @@ -4028,6 +4028,11 @@ const schema25 = { description: 'The path to the directory in the git repository. Defaults to the repo root.', }, + '.git': { + type: 'boolean', + description: + 'When true, include a .git directory in the cloned files', + }, }, required: ['resource', 'url', 'ref'], additionalProperties: false, @@ -4070,7 +4075,8 @@ function validate19( key0 === 'url' || key0 === 'ref' || key0 === 'refType' || - key0 === 'path' + key0 === 'path' || + key0 === '.git' ) ) { validate19.errors = [ @@ -4224,6 +4230,35 @@ function validate19( } else { var valid0 = true; } + if (valid0) { + if (data['.git'] !== undefined) { + const _errs13 = errors; + if ( + typeof data['.git'] !== + 'boolean' + ) { + validate19.errors = [ + { + instancePath: + instancePath + + '/.git', + schemaPath: + '#/properties/.git/type', + keyword: 'type', + params: { + type: 'boolean', + }, + message: + 'must be boolean', + }, + ]; + return false; + } + var valid0 = _errs13 === errors; + } else { + var valid0 = true; + } + } } } } diff --git a/packages/playground/blueprints/public/blueprint-schema.json b/packages/playground/blueprints/public/blueprint-schema.json index 5fa7a73c0a..039dcd14f0 100644 --- a/packages/playground/blueprints/public/blueprint-schema.json +++ b/packages/playground/blueprints/public/blueprint-schema.json @@ -1354,6 +1354,10 @@ "path": { "type": "string", "description": "The path to the directory in the git repository. Defaults to the repo root." + }, + ".git": { + "type": "boolean", + "description": "When true, include a .git directory in the cloned files" } }, "required": ["resource", "url", "ref"], diff --git a/packages/playground/blueprints/src/lib/v1/resources.spec.ts b/packages/playground/blueprints/src/lib/v1/resources.spec.ts index 19772fd3a8..2f0bf34b82 100644 --- a/packages/playground/blueprints/src/lib/v1/resources.spec.ts +++ b/packages/playground/blueprints/src/lib/v1/resources.spec.ts @@ -5,6 +5,10 @@ import { } from './resources'; import { expect, describe, it, vi, beforeEach } from 'vitest'; import { StreamedFile } from '@php-wasm/stream-compression'; +import { mkdtemp, rm, writeFile, mkdir } from 'fs/promises'; +import { tmpdir } from 'os'; +import { join } from 'path'; +import { execSync, type ExecSyncOptions } from 'child_process'; describe('UrlResource', () => { it('should create a new instance of UrlResource', () => { @@ -75,6 +79,112 @@ describe('GitDirectoryResource', () => { ); expect(files['dependabot.yml']).toBeInstanceOf(Uint8Array); }); + + it('includes a .git directory when requested', async () => { + const commit = '05138293dd39e25a9fa8e43a9cc775d6fb780e37'; + const resource = new GitDirectoryResource({ + resource: 'git:directory', + url: 'https://github.com/WordPress/wordpress-playground', + ref: commit, + refType: 'commit', + path: 'packages/docs/site/docs/blueprints/tutorial', + '.git': true, + }); + + const { files } = await resource.resolve(); + + // Create a temporary directory and write all files to disk + const tmpDir = await mkdtemp(join(tmpdir(), 'git-test-')); + try { + // Write all files to the temporary directory + for (const [path, content] of Object.entries(files)) { + const fullPath = join(tmpDir, path); + const dir = join(fullPath, '..'); + await mkdir(dir, { recursive: true }); + + if (typeof content === 'string') { + await writeFile(fullPath, content, 'utf8'); + } else { + await writeFile(fullPath, content); + } + } + + // Run git commands to verify the repository state + const gitEnv: ExecSyncOptions = { + cwd: tmpDir, + encoding: 'utf8', + maxBuffer: 10 * 1024 * 1024, // 10MB buffer to handle large output + stdio: ['pipe', 'pipe', 'ignore'], // Suppress stderr to avoid buffer overflow + }; + + // Verify we're on the expected commit + const currentCommit = execSync('git rev-parse HEAD', gitEnv) + .toString() + .trim(); + expect(currentCommit).toBe(commit); + + // Verify the remote is configured correctly + const remoteUrl = execSync('git remote get-url origin', gitEnv) + .toString() + .trim(); + expect(remoteUrl).toBe( + 'https://github.com/WordPress/wordpress-playground' + ); + + // Verify this is a shallow clone + const isShallow = execSync( + 'git rev-parse --is-shallow-repository', + gitEnv + ) + .toString() + .trim(); + expect(isShallow).toBe('true'); + + // Verify the shallow file contains the expected commit + const shallowCommit = execSync('cat .git/shallow', gitEnv) + .toString() + .trim(); + expect(shallowCommit).toBe(commit); + + // Verify the expected files exist in the git index + const lsFiles = execSync('git ls-files', gitEnv) + .toString() + .trim() + .split('\n') + .filter((f) => f.length > 0) + .sort(); + expect(lsFiles).toEqual([ + '01-what-are-blueprints-what-you-can-do-with-them.md', + '02-how-to-load-run-blueprints.md', + '03-build-your-first-blueprint.md', + 'index.md', + ]); + + // Verify we can run git log to see commit history + const logOutput = execSync('git log --oneline -n 1', gitEnv) + .toString() + .trim(); + expect(logOutput).toContain(commit.substring(0, 7)); + + // Update the git index to match the actual files on disk + execSync('git add -A', gitEnv); + + // Modify a file and verify git status detects the change + const fileToModify = join(tmpDir, 'index.md'); + await writeFile(fileToModify, 'modified content\n', 'utf8'); + const statusAfterModification = execSync( + 'git status --porcelain', + gitEnv + ) + .toString() + .trim(); + // Git status should show the file as modified (can be ' M' or 'M ') + expect(statusAfterModification).toMatch(/M.*index\.md/); + } finally { + // Clean up the temporary directory + await rm(tmpDir, { recursive: true, force: true }); + } + }); }); describe('name', () => { diff --git a/packages/playground/blueprints/src/lib/v1/resources.ts b/packages/playground/blueprints/src/lib/v1/resources.ts index c9ce9a1d66..675ca531f5 100644 --- a/packages/playground/blueprints/src/lib/v1/resources.ts +++ b/packages/playground/blueprints/src/lib/v1/resources.ts @@ -16,6 +16,7 @@ import { zipNameToHumanName } from '../utils/zip-name-to-human-name'; import { fetchWithCorsProxy } from '@php-wasm/web'; import { StreamedFile } from '@php-wasm/stream-compression'; import type { StreamBundledFile } from './types'; +import { createDotGitDirectory } from '@wp-playground/storage'; export type { FileTree }; export const ResourceTypes = [ @@ -74,6 +75,8 @@ export type GitDirectoryReference = { refType?: GitDirectoryRefType; /** The path to the directory in the git repository. Defaults to the repo root. */ path?: string; + /** When true, include a `.git` directory with Git metadata (experimental). */ + '.git'?: boolean; }; export interface Directory { files: FileTree; @@ -579,12 +582,35 @@ export class GitDirectoryResource extends Resource { const requestedPath = (this.reference.path ?? '').replace(/^\/+/, ''); const filesToClone = listDescendantFiles(allFiles, requestedPath); - let files = await sparseCheckout(repoUrl, commitHash, filesToClone); + const checkout = await sparseCheckout( + repoUrl, + commitHash, + filesToClone, + { + withObjects: this.reference['.git'], + } + ); + let files = checkout.files; // Remove the path prefix from the cloned file names. files = mapKeys(files, (name) => name.substring(requestedPath.length).replace(/^\/+/, '') ); + if (this.reference['.git']) { + const gitFiles = await createDotGitDirectory({ + repoUrl: this.reference.url, + commitHash, + ref: this.reference.ref, + refType: this.reference.refType, + objects: checkout.objects ?? [], + fileOids: checkout.fileOids ?? {}, + pathPrefix: requestedPath, + }); + files = { + ...gitFiles, + ...files, + }; + } return { name: this.filename, files, diff --git a/packages/playground/blueprints/tsconfig.lib.json b/packages/playground/blueprints/tsconfig.lib.json index 829b0bc14c..a623dd0b3c 100644 --- a/packages/playground/blueprints/tsconfig.lib.json +++ b/packages/playground/blueprints/tsconfig.lib.json @@ -5,6 +5,9 @@ "declaration": true, "types": ["node"] }, - "include": ["src/**/*.ts"], + "include": [ + "src/**/*.ts", + "../storage/src/lib/git-create-dotgit-directory.ts" + ], "exclude": ["jest.config.ts", "src/**/*.spec.ts", "src/**/*.test.ts"] } diff --git a/packages/playground/components/src/demos/GitBrowserDemo.tsx b/packages/playground/components/src/demos/GitBrowserDemo.tsx index 5936ce5e75..fab392d9be 100644 --- a/packages/playground/components/src/demos/GitBrowserDemo.tsx +++ b/packages/playground/components/src/demos/GitBrowserDemo.tsx @@ -73,9 +73,9 @@ export default function GitBrowserDemo() { Object.keys(filesToCheckout) ); const checkedOutFiles: Record = {}; - for (const filename in result) { + for (const filename in result.files) { checkedOutFiles[filename] = new TextDecoder().decode( - result[filename] + result.files[filename] ); } setCheckedOutFiles(checkedOutFiles); diff --git a/packages/playground/storage/src/index.ts b/packages/playground/storage/src/index.ts index 776099b921..5587719e52 100644 --- a/packages/playground/storage/src/index.ts +++ b/packages/playground/storage/src/index.ts @@ -3,5 +3,6 @@ export * from './lib/changeset'; export * from './lib/playground'; export * from './lib/browser-fs'; export * from './lib/git-sparse-checkout'; +export * from './lib/git-create-dotgit-directory'; export * from './lib/paths'; export * from './lib/filesystems'; diff --git a/packages/playground/storage/src/lib/git-create-dotgit-directory.ts b/packages/playground/storage/src/lib/git-create-dotgit-directory.ts new file mode 100644 index 0000000000..21ae6d4b35 --- /dev/null +++ b/packages/playground/storage/src/lib/git-create-dotgit-directory.ts @@ -0,0 +1,228 @@ +import { GitIndex } from 'isomorphic-git/src/models/GitIndex.js'; +import type { SparseCheckoutObject } from './git-sparse-checkout'; +import pako from 'pako'; +const deflate = pako.deflate; + +type GitDirectoryRefType = 'branch' | 'tag' | 'commit' | 'refname'; + +type GitHeadInfo = { + headContent: string; + branchName?: string; + branchRef?: string; + tagName?: string; +}; + +const FULL_SHA_REGEX = /^[0-9a-f]{40}$/i; + +/** + * Creates loose Git object files from sparse checkout objects. + * Each object is compressed using deflate and stored in the Git objects directory. + */ +async function createLooseGitObjectFiles( + objects: SparseCheckoutObject[] +): Promise> { + const files: Record = {}; + const encoder = new TextEncoder(); + + await Promise.all( + objects.map(async ({ oid, type, body }) => { + if (!oid || body.length === 0) { + return; + } + const header = encoder.encode(`${type} ${body.length}\0`); + const combined = new Uint8Array(header.length + body.length); + combined.set(header, 0); + combined.set(body, header.length); + const compressed = await deflate(combined); + const prefix = oid.slice(0, 2); + const suffix = oid.slice(2); + files[`.git/objects/${prefix}/${suffix}`] = compressed; + }) + ); + + return files; +} + +/** + * Resolves the HEAD reference information based on the ref type and value. + */ +function resolveHeadInfo( + ref: string, + refType: GitDirectoryRefType | undefined, + commitHash: string +): GitHeadInfo { + const trimmed = ref?.trim() ?? ''; + let fullRef: string | null = null; + + switch (refType) { + case 'branch': + if (trimmed) { + fullRef = `refs/heads/${trimmed}`; + } + break; + case 'refname': + fullRef = trimmed || null; + break; + case 'tag': + if (trimmed.startsWith('refs/')) { + fullRef = trimmed; + } else if (trimmed) { + fullRef = `refs/tags/${trimmed}`; + } + break; + case 'commit': + fullRef = null; + break; + default: + if (trimmed.startsWith('refs/')) { + fullRef = trimmed; + } else if (FULL_SHA_REGEX.test(trimmed)) { + fullRef = null; + } else if (trimmed && trimmed !== 'HEAD') { + fullRef = `refs/heads/${trimmed}`; + } + break; + } + + const headContent = fullRef ? `ref: ${fullRef}\n` : `${commitHash}\n`; + + const branchRef = + fullRef && fullRef.startsWith('refs/heads/') ? fullRef : undefined; + const branchName = branchRef?.slice('refs/heads/'.length); + + const tagRef = + fullRef && fullRef.startsWith('refs/tags/') ? fullRef : undefined; + const tagName = tagRef?.slice('refs/tags/'.length); + + return { + headContent, + branchName, + branchRef, + tagName, + }; +} + +/** + * Builds a Git config file content with remote and branch configuration. + */ +function buildGitConfig( + repoUrl: string, + { + branchName, + partialCloneFilter, + }: { branchName?: string; partialCloneFilter?: string } +): string { + const repositoryFormatVersion = partialCloneFilter ? 1 : 0; + const lines = [ + '[core]', + `\trepositoryformatversion = ${repositoryFormatVersion}`, + '\tfilemode = true', + '\tbare = false', + '\tlogallrefupdates = true', + '\tignorecase = true', + '\tprecomposeunicode = true', + '[remote "origin"]', + `\turl = ${repoUrl}`, + '\tfetch = +refs/heads/*:refs/remotes/origin/*', + '\tfetch = +refs/tags/*:refs/tags/*', + ]; + if (partialCloneFilter) { + lines.push('\tpromisor = true'); + lines.push(`\tpartialclonefilter = ${partialCloneFilter}`); + lines.push('[extensions]'); + lines.push('\tpartialclone = origin'); + } + if (branchName) { + lines.push( + `[branch "${branchName}"]`, + '\tremote = origin', + `\tmerge = refs/heads/${branchName}` + ); + } + return lines.join('\n') + '\n'; +} + +/** + * Creates a complete .git directory structure with all necessary files. + * This includes HEAD, config, refs, objects, and the Git index. + */ +export async function createDotGitDirectory({ + repoUrl, + commitHash, + ref, + refType, + objects, + fileOids, + pathPrefix, +}: { + repoUrl: string; + commitHash: string; + ref: string; + refType?: GitDirectoryRefType; + objects: SparseCheckoutObject[]; + fileOids: Record; + pathPrefix: string; +}): Promise> { + const gitFiles: Record = {}; + const headInfo = resolveHeadInfo(ref, refType, commitHash); + + gitFiles['.git/HEAD'] = headInfo.headContent; + gitFiles['.git/config'] = buildGitConfig(repoUrl, { + branchName: headInfo.branchName, + }); + gitFiles['.git/description'] = 'WordPress Playground clone\n'; + gitFiles['.git/shallow'] = `${commitHash}\n`; + + // Create refs/ directory structure + gitFiles['.git/refs/heads/.gitkeep'] = ''; + gitFiles['.git/refs/tags/.gitkeep'] = ''; + gitFiles['.git/refs/remotes/.gitkeep'] = ''; + + if (headInfo.branchRef && headInfo.branchName) { + gitFiles['.git/logs/HEAD'] = `ref: ${headInfo.branchRef}\n`; + gitFiles[`.git/${headInfo.branchRef}`] = `${commitHash}\n`; + gitFiles[ + `.git/refs/remotes/origin/${headInfo.branchName}` + ] = `${commitHash}\n`; + gitFiles[ + '.git/refs/remotes/origin/HEAD' + ] = `ref: refs/remotes/origin/${headInfo.branchName}\n`; + } + + if (headInfo.tagName) { + gitFiles[`.git/refs/tags/${headInfo.tagName}`] = `${commitHash}\n`; + } + + // Use loose objects only, no packfiles + Object.assign(gitFiles, await createLooseGitObjectFiles(objects)); + + // Create the git index + const index = new GitIndex(); + for (const [path, oid] of Object.entries(fileOids)) { + // Remove the path prefix to get the working tree relative path + const workingTreePath = path + .substring(pathPrefix.length) + .replace(/^\/+/, ''); + index.insert({ + filepath: workingTreePath, + oid, + stats: { + ctimeSeconds: 0, + ctimeNanoseconds: 0, + mtimeSeconds: 0, + mtimeNanoseconds: 0, + dev: 0, + ino: 0, + mode: 0o100644, // Regular file + uid: 0, + gid: 0, + size: 0, + }, + }); + } + const indexBuffer = await index.toObject(); + // Convert Buffer to Uint8Array - copy the data to ensure it's a proper Uint8Array + gitFiles['.git/index'] = Uint8Array.from(indexBuffer); + + return gitFiles; +} diff --git a/packages/playground/storage/src/lib/git-sparse-checkout.spec.ts b/packages/playground/storage/src/lib/git-sparse-checkout.spec.ts index a908aff0f1..1026bdd5a4 100644 --- a/packages/playground/storage/src/lib/git-sparse-checkout.spec.ts +++ b/packages/playground/storage/src/lib/git-sparse-checkout.spec.ts @@ -97,7 +97,7 @@ describe('resolveCommitHash', () => { }); describe('sparseCheckout', () => { - it('should retrieve the requested files from a git repo', async () => { + it('should retrieve the requested files and objects from a git repo when withObjects is true', async () => { const commitHash = await resolveCommitHash( 'https://github.com/WordPress/wordpress-playground.git', { @@ -105,15 +105,47 @@ describe('sparseCheckout', () => { type: 'branch', } ); - const files = await sparseCheckout( + const result = await sparseCheckout( 'https://github.com/WordPress/wordpress-playground.git', commitHash, - ['README.md'] + ['README.md'], + { + withObjects: true, + } + ); + expect(result.files).toEqual({ + 'README.md': expect.any(Uint8Array), + }); + expect(result.files['README.md'].length).toBeGreaterThan(0); + expect(result.packfiles?.length).toBeGreaterThan(0); + expect(result.packfiles?.some((packfile) => packfile.promisor)).toBe( + true + ); + expect(result.objects?.length).toBeGreaterThan(0); + }); + + it('should retrieve only the requested files from a git repo when withObjects is false', async () => { + const commitHash = await resolveCommitHash( + 'https://github.com/WordPress/wordpress-playground.git', + { + value: 'trunk', + type: 'branch', + } + ); + const result = await sparseCheckout( + 'https://github.com/WordPress/wordpress-playground.git', + commitHash, + ['README.md'], + { + withObjects: false, + } ); - expect(files).toEqual({ + expect(result.files).toEqual({ 'README.md': expect.any(Uint8Array), }); - expect(files['README.md'].length).toBeGreaterThan(0); + expect(result.files['README.md'].length).toBeGreaterThan(0); + expect(result.packfiles).toBeUndefined(); + expect(result.objects).toBeUndefined(); }); }); diff --git a/packages/playground/storage/src/lib/git-sparse-checkout.ts b/packages/playground/storage/src/lib/git-sparse-checkout.ts index 24d39cdb21..a977fc33bf 100644 --- a/packages/playground/storage/src/lib/git-sparse-checkout.ts +++ b/packages/playground/storage/src/lib/git-sparse-checkout.ts @@ -39,31 +39,97 @@ if (typeof globalThis.Buffer === 'undefined') { * @param fullyQualifiedBranchName The full name of the branch to fetch from (e.g., 'refs/heads/main'). * @param filesPaths An array of all the file paths to fetch from the repository. Does **not** accept * patterns, wildcards, directory paths. All files must be explicitly listed. - * @returns A record where keys are file paths and values are the retrieved file contents. + * @returns The requested files and packfiles required to recreate the Git objects locally. */ +export type SparseCheckoutPackfile = { + name: string; + pack: Uint8Array; + index: Uint8Array; + promisor?: boolean; +}; + +export type SparseCheckoutObject = { + oid: string; + type: 'blob' | 'tree' | 'commit' | 'tag'; + body: Uint8Array; +}; + +export type SparseCheckoutResult = { + files: Record; + packfiles?: SparseCheckoutPackfile[]; + objects?: SparseCheckoutObject[]; + fileOids?: Record; +}; + export async function sparseCheckout( repoUrl: string, commitHash: string, - filesPaths: string[] -) { - const treesIdx = await fetchWithoutBlobs(repoUrl, commitHash); - const objects = await resolveObjects(treesIdx, commitHash, filesPaths); + filesPaths: string[], + options?: { + withObjects?: boolean; + } +): Promise { + const treesPack = await fetchWithoutBlobs(repoUrl, commitHash); + const objects = await resolveObjects(treesPack.idx, commitHash, filesPaths); - const blobsIdx = await fetchObjects( - repoUrl, - filesPaths.map((path) => objects[path].oid) - ); + const blobOids = filesPaths.map((path) => objects[path].oid); + const blobsPack = + blobOids.length > 0 ? await fetchObjects(repoUrl, blobOids) : null; const fetchedPaths: Record = {}; await Promise.all( filesPaths.map(async (path) => { + if (!blobsPack) { + return; + } fetchedPaths[path] = await extractGitObjectFromIdx( - blobsIdx, + blobsPack.idx, objects[path].oid ); }) ); - return fetchedPaths; + + /** + * Short-circuit if the consumer doesn't need additional details about + * the Git objects. + */ + if (!options?.withObjects) { + return { files: fetchedPaths }; + } + + const packfiles: SparseCheckoutPackfile[] = []; + const treesIndex = await treesPack.idx.toBuffer(); + packfiles.push({ + name: `pack-${treesPack.idx.packfileSha}`, + pack: treesPack.packfile, + index: toUint8Array(treesIndex), + promisor: treesPack.promisor, + }); + + if (blobsPack) { + const blobsIndex = await blobsPack.idx.toBuffer(); + packfiles.push({ + name: `pack-${blobsPack.idx.packfileSha}`, + pack: blobsPack.packfile, + index: toUint8Array(blobsIndex), + promisor: blobsPack.promisor, + }); + } + + const fileOids: Record = {}; + for (const path of filesPaths) { + fileOids[path] = objects[path].oid; + } + + return { + files: fetchedPaths, + packfiles, + objects: [ + ...(await collectLooseObjects(treesPack)), + ...(await collectLooseObjects(blobsPack)), + ], + fileOids, + }; } export type GitFileTreeFile = { @@ -113,8 +179,8 @@ export async function listGitFiles( repoUrl: string, commitHash: string ): Promise { - const treesIdx = await fetchWithoutBlobs(repoUrl, commitHash); - const rootTree = await resolveAllObjects(treesIdx, commitHash); + const treesPack = await fetchWithoutBlobs(repoUrl, commitHash); + const rootTree = await resolveAllObjects(treesPack.idx, commitHash); if (!rootTree?.object) { return []; } @@ -176,7 +242,7 @@ export async function listGitRefs( fullyQualifiedBranchPrefix: string ) { const packbuffer = Buffer.from( - await collect([ + (await collect([ GitPktLine.encode(`command=ls-refs\n`), GitPktLine.encode(`agent=git/2.37.3\n`), GitPktLine.encode(`object-format=sha1\n`), @@ -184,7 +250,7 @@ export async function listGitRefs( GitPktLine.encode(`peel\n`), GitPktLine.encode(`ref-prefix ${fullyQualifiedBranchPrefix}\n`), GitPktLine.flush(), - ]) + ])) as any ); const response = await fetch(repoUrl + '/git-upload-pack', { @@ -195,7 +261,7 @@ export async function listGitRefs( 'Content-Length': `${packbuffer.length}`, 'Git-Protocol': 'version=2', }, - body: packbuffer, + body: packbuffer as any, }); const refs: Record = {}; @@ -324,7 +390,7 @@ async function fetchRefOid(repoUrl: string, refname: string) { async function fetchWithoutBlobs(repoUrl: string, commitHash: string) { const packbuffer = Buffer.from( - await collect([ + (await collect([ GitPktLine.encode( `want ${commitHash} multi_ack_detailed no-done side-band-64k thin-pack ofs-delta agent=git/2.37.3 filter \n` ), @@ -334,7 +400,7 @@ async function fetchWithoutBlobs(repoUrl: string, commitHash: string) { GitPktLine.flush(), GitPktLine.encode(`done\n`), GitPktLine.encode(`done\n`), - ]) + ])) as any ); const response = await fetch(repoUrl + '/git-upload-pack', { @@ -344,12 +410,12 @@ async function fetchWithoutBlobs(repoUrl: string, commitHash: string) { 'content-type': 'application/x-git-upload-pack-request', 'Content-Length': `${packbuffer.length}`, }, - body: packbuffer, + body: packbuffer as any, }); const iterator = streamToIterator(response.body!); const parsed = await parseUploadPackResponse(iterator); - const packfile = Buffer.from(await collect(parsed.packfile)); + const packfile = Buffer.from((await collect(parsed.packfile)) as any); const idx = await GitPackIndex.fromPack({ pack: packfile, }); @@ -359,7 +425,11 @@ async function fetchWithoutBlobs(repoUrl: string, commitHash: string) { result.oid = oid; return result; }; - return idx; + return { + idx, + packfile: toUint8Array(packfile), + promisor: true, + }; } async function resolveAllObjects(idx: GitPackIndex, commitHash: string) { @@ -386,6 +456,43 @@ async function resolveAllObjects(idx: GitPackIndex, commitHash: string) { return rootItem; } +async function collectLooseObjects( + pack?: { + idx: GitPackIndex; + packfile: Uint8Array; + promisor?: boolean; + } | null +): Promise { + if (!pack) { + return []; + } + const results: SparseCheckoutObject[] = []; + const seen = new Set(); + for (const oid of pack.idx.hashes ?? []) { + if (seen.has(oid)) { + continue; + } + const offset = pack.idx.offsets.get(oid); + if (offset === undefined) { + continue; + } + const { type, object } = await pack.idx.readSlice({ start: offset }); + if (type === 'ofs_delta' || type === 'ref_delta') { + continue; + } + if (!object) { + continue; + } + seen.add(oid); + results.push({ + oid, + type: type as SparseCheckoutObject['type'], + body: toUint8Array(object as Uint8Array), + }); + } + return results; +} + async function resolveObjects( idx: GitPackIndex, commitHash: string, @@ -434,7 +541,7 @@ async function resolveObjects( // Request oid for each resolvedRef async function fetchObjects(url: string, objectHashes: string[]) { const packbuffer = Buffer.from( - await collect([ + (await collect([ ...objectHashes.map((objectHash) => GitPktLine.encode( `want ${objectHash} multi_ack_detailed no-done side-band-64k thin-pack ofs-delta agent=git/2.37.3 \n` @@ -442,7 +549,7 @@ async function fetchObjects(url: string, objectHashes: string[]) { ), GitPktLine.flush(), GitPktLine.encode(`done\n`), - ]) + ])) as any ); const response = await fetch(url + '/git-upload-pack', { @@ -452,15 +559,30 @@ async function fetchObjects(url: string, objectHashes: string[]) { 'content-type': 'application/x-git-upload-pack-request', 'Content-Length': `${packbuffer.length}`, }, - body: packbuffer, + body: packbuffer as any, }); const iterator = streamToIterator(response.body!); const parsed = await parseUploadPackResponse(iterator); - const packfile = Buffer.from(await collect(parsed.packfile)); - return await GitPackIndex.fromPack({ + const packfile = Buffer.from((await collect(parsed.packfile)) as any); + if (packfile.byteLength === 0) { + const idx = await GitPackIndex.fromPack({ + pack: packfile, + }); + return { + idx, + packfile: new Uint8Array(), + promisor: false, + }; + } + const idx = await GitPackIndex.fromPack({ pack: packfile, }); + return { + idx, + packfile: toUint8Array(packfile), + promisor: false, + }; } async function extractGitObjectFromIdx(idx: GitPackIndex, objectHash: string) { @@ -545,3 +667,10 @@ function streamToIterator(stream: any) { }, }; } + +function toUint8Array(buffer: Uint8Array | Buffer) { + if (buffer instanceof Uint8Array) { + return Uint8Array.from(buffer); + } + return Uint8Array.from(buffer); +} diff --git a/packages/playground/storage/src/lib/isomorphic-git.d.ts b/packages/playground/storage/src/lib/isomorphic-git.d.ts index 7b95b48ecf..f8c0857a81 100644 --- a/packages/playground/storage/src/lib/isomorphic-git.d.ts +++ b/packages/playground/storage/src/lib/isomorphic-git.d.ts @@ -1,3 +1,26 @@ +declare module 'isomorphic-git/src/models/GitIndex.js' { + export class GitIndex { + constructor(entries?: Map, unmergedPaths?: Set); + insert(entry: { + filepath: string; + oid: string; + stats: { + ctimeSeconds: number; + ctimeNanoseconds: number; + mtimeSeconds: number; + mtimeNanoseconds: number; + dev: number; + ino: number; + mode: number; + uid: number; + gid: number; + size: number; + }; + }): void; + toObject(): Promise; + } +} + declare module 'isomorphic-git/src/models/GitPktLine.js' { export class GitPktLine { static encode(data: string): Buffer; @@ -72,6 +95,20 @@ declare module 'isomorphic-git/src/models/GitPackIndex.js' { export class GitPackIndex { static fromPack({ pack }: { pack: Buffer }): Promise; read({ oid }: { oid: string }): Promise; + toBuffer(): Promise; + packfileSha: string; + hashes?: string[]; + offsets: Map; + readSlice({ start }: { start: number }): Promise<{ + type: + | 'blob' + | 'tree' + | 'commit' + | 'tag' + | 'ofs_delta' + | 'ref_delta'; + object?: Buffer | Uint8Array; + }>; } }