From 5f52928b63054da67367d831a65987c9c77011db Mon Sep 17 00:00:00 2001 From: Trevor Taubitz Date: Sat, 21 Feb 2026 12:39:36 -0500 Subject: [PATCH 01/21] feat: Add initial support for LFS Signed-off-by: Trevor Taubitz --- packages/keystatic/src/api/generic.ts | 86 ++++- packages/keystatic/src/app/git-lfs.test.ts | 134 ++++++++ packages/keystatic/src/app/git-lfs.ts | 371 +++++++++++++++++++++ packages/keystatic/src/app/shell/data.tsx | 72 +++- packages/keystatic/src/app/updating.tsx | 21 ++ packages/keystatic/src/app/useItemData.ts | 36 +- packages/keystatic/src/config.tsx | 1 + 7 files changed, 699 insertions(+), 22 deletions(-) create mode 100644 packages/keystatic/src/app/git-lfs.test.ts create mode 100644 packages/keystatic/src/app/git-lfs.ts diff --git a/packages/keystatic/src/api/generic.ts b/packages/keystatic/src/api/generic.ts index f74d5e810..4234d092e 100644 --- a/packages/keystatic/src/api/generic.ts +++ b/packages/keystatic/src/api/generic.ts @@ -155,9 +155,11 @@ export function makeGenericAPIRouteHandler( if (joined === 'github/repo-not-found') { return githubRepoNotFound(req, config); } + if (joined === 'github/lfs') { + return githubLfsProxy(req); + } if (joined === 'github/logout') { - const cookies = cookie.parse(req.headers.get('cookie') ?? ''); - const access_token = cookies['keystatic-gh-access-token']; + const access_token = getAccessToken(req); if (access_token) { await fetch( `https://api.github.com/applications/${config.clientId}/token`, @@ -291,6 +293,11 @@ async function getTokenCookies( return headers; } +function getAccessToken(req: KeystaticRequest): string | undefined { + const cookies = cookie.parse(req.headers.get('cookie') ?? ''); + return cookies['keystatic-gh-access-token'] || undefined; +} + async function getRefreshToken( req: KeystaticRequest, config: InnerAPIRouteConfig @@ -407,6 +414,81 @@ async function createdGithubApp( return handleGitHubAppCreation(req, slugEnvVarName); } +async function githubLfsProxy( + req: KeystaticRequest +): Promise { + const accessToken = getAccessToken(req); + if (!accessToken) { + return { status: 401, body: 'Unauthorized' }; + } + + let payload: { + url: string; + method: string; + headers: Record; + body?: string; + }; + try { + payload = await req.json(); + } catch { + return { status: 400, body: 'Invalid JSON body' }; + } + + const targetUrl = new URL(payload.url); + const allowedHosts = [ + 'github.com', + 'github-cloud.s3.amazonaws.com', + 'github-cloud.githubusercontent.com', + ]; + if ( + !allowedHosts.some( + host => + targetUrl.hostname === host || + targetUrl.hostname.endsWith('.' + host) + ) + ) { + return { + status: 403, + body: 'LFS proxy only allows requests to GitHub hosts', + }; + } + + const headers: Record = { ...payload.headers }; + if (targetUrl.hostname === 'github.com') { + headers['Authorization'] = `Bearer ${accessToken}`; + } + + const body = + payload.body != null + ? Uint8Array.from(atob(payload.body), c => c.charCodeAt(0)) + : undefined; + + const response = await fetch(payload.url, { + method: payload.method, + headers, + body, + }); + + const responseBytes = new Uint8Array(await response.arrayBuffer()); + const responseHeaders: Record = {}; + const skipHeaders = new Set([ + 'transfer-encoding', + 'content-encoding', + 'content-length', + ]); + for (const [key, value] of response.headers.entries()) { + if (!skipHeaders.has(key.toLowerCase())) { + responseHeaders[key] = value; + } + } + + return { + status: response.status, + headers: responseHeaders, + body: responseBytes, + }; +} + function immediatelyExpiringCookie(name: string) { return cookie.serialize(name, '', { secure: process.env.NODE_ENV === 'production', diff --git a/packages/keystatic/src/app/git-lfs.test.ts b/packages/keystatic/src/app/git-lfs.test.ts new file mode 100644 index 000000000..ace120905 --- /dev/null +++ b/packages/keystatic/src/app/git-lfs.test.ts @@ -0,0 +1,134 @@ +/** @jest-environment node */ +import { webcrypto } from 'node:crypto'; +import { expect, test, describe } from '@jest/globals'; +import { + parseGitAttributes, + isLfsTracked, + isLfsPointer, + parseLfsPointer, + createLfsPointer, +} from './git-lfs'; + +if (!globalThis.crypto) { + globalThis.crypto = webcrypto as any; +} + +const textEncoder = new TextEncoder(); + +describe('parseGitAttributes', () => { + test('extracts LFS patterns from standard .gitattributes', () => { + const content = [ + '*.png filter=lfs diff=lfs merge=lfs -text', + '*.jpg filter=lfs diff=lfs merge=lfs -text', + '*.md text', + ].join('\n'); + expect(parseGitAttributes(content)).toEqual(['*.png', '*.jpg']); + }); + + test('ignores lines without all three LFS attributes', () => { + const content = '*.png filter=lfs diff=lfs -text'; + expect(parseGitAttributes(content)).toEqual([]); + }); + + test('ignores comments and blank lines', () => { + const content = [ + '# This is a comment', + '', + '*.bin filter=lfs diff=lfs merge=lfs -text', + '*.md text # not lfs', + ].join('\n'); + expect(parseGitAttributes(content)).toEqual(['*.bin']); + }); + + test('handles inline comments after LFS attributes', () => { + const content = '*.psd filter=lfs diff=lfs merge=lfs -text # large files'; + expect(parseGitAttributes(content)).toEqual(['*.psd']); + }); + + test('returns empty array for empty content', () => { + expect(parseGitAttributes('')).toEqual([]); + }); +}); + +describe('isLfsTracked', () => { + const patterns = ['*.png', '*.jpg', 'assets/**/*.gif']; + + test('matches simple extension pattern', () => { + expect(isLfsTracked('images/photo.png', patterns)).toBe(true); + expect(isLfsTracked('deep/nested/file.jpg', patterns)).toBe(true); + }); + + test('does not match non-tracked extensions', () => { + expect(isLfsTracked('README.md', patterns)).toBe(false); + expect(isLfsTracked('src/app.ts', patterns)).toBe(false); + }); + + test('matches glob patterns with directories', () => { + expect(isLfsTracked('assets/icons/icon.gif', patterns)).toBe(true); + }); + + test('does not match outside glob scope', () => { + expect(isLfsTracked('other/icon.gif', patterns)).toBe(false); + }); + + test('returns false for empty patterns', () => { + expect(isLfsTracked('file.png', [])).toBe(false); + }); +}); + +describe('isLfsPointer / parseLfsPointer / createLfsPointer', () => { + const sampleOid = + 'abc123def456abc123def456abc123def456abc123def456abc123def456abcd1234'; + const sampleSize = 12345; + + test('createLfsPointer produces valid pointer', () => { + const pointer = createLfsPointer(sampleOid, sampleSize); + const text = new TextDecoder().decode(pointer); + expect(text).toBe( + `version https://git-lfs.github.com/spec/v1\noid sha256:${sampleOid}\nsize ${sampleSize}\n` + ); + }); + + test('isLfsPointer detects valid pointer', () => { + const pointer = createLfsPointer(sampleOid, sampleSize); + expect(isLfsPointer(pointer)).toBe(true); + }); + + test('isLfsPointer rejects non-pointer content', () => { + expect(isLfsPointer(textEncoder.encode('hello world'))).toBe(false); + expect(isLfsPointer(new Uint8Array(300))).toBe(false); + expect(isLfsPointer(new Uint8Array(10))).toBe(false); + }); + + test('parseLfsPointer extracts oid and size', () => { + const pointer = createLfsPointer(sampleOid, sampleSize); + const parsed = parseLfsPointer(pointer); + expect(parsed.oid).toBe(sampleOid); + expect(parsed.size).toBe(sampleSize); + }); + + test('parseLfsPointer throws for missing oid', () => { + const content = textEncoder.encode( + 'version https://git-lfs.github.com/spec/v1\nsize 100\n' + ); + expect(() => parseLfsPointer(content)).toThrow('missing or invalid oid'); + }); + + test('parseLfsPointer throws for missing size', () => { + const content = textEncoder.encode( + `version https://git-lfs.github.com/spec/v1\noid sha256:${sampleOid}\n` + ); + expect(() => parseLfsPointer(content)).toThrow('missing size'); + }); + + test('roundtrip: create then parse', () => { + const oid = + '0000000000000000000000000000000000000000000000000000000000000000'; + const size = 999999; + const pointer = createLfsPointer(oid, size); + expect(isLfsPointer(pointer)).toBe(true); + const parsed = parseLfsPointer(pointer); + expect(parsed.oid).toBe(oid); + expect(parsed.size).toBe(size); + }); +}); diff --git a/packages/keystatic/src/app/git-lfs.ts b/packages/keystatic/src/app/git-lfs.ts new file mode 100644 index 000000000..9f6bee004 --- /dev/null +++ b/packages/keystatic/src/app/git-lfs.ts @@ -0,0 +1,371 @@ +import { minimatch } from 'minimatch'; + +const textEncoder = new TextEncoder(); +const textDecoder = new TextDecoder(); + +const LFS_POINTER_PREFIX = 'version https://git-lfs.github.com/spec/v1'; +const LFS_PROXY_PATH = '/api/keystatic/github/lfs'; + +// LFS proxy — all requests go through the server to avoid CORS +// ---------------------------------------------------------------------------- + +async function lfsProxyFetch( + url: string, + init: RequestInit +): Promise { + return fetch(LFS_PROXY_PATH, { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ + url, + method: init.method ?? 'GET', + headers: init.headers ?? {}, + body: init.body != null ? uint8ArrayToBase64(init.body) : undefined, + }), + }); +} + +function uint8ArrayToBase64(data: unknown): string { + const bytes = + data instanceof Uint8Array ? data : new Uint8Array(data as ArrayBuffer); + let binary = ''; + for (let i = 0; i < bytes.byteLength; i++) { + binary += String.fromCharCode(bytes[i]); + } + return btoa(binary); +} + +// .gitattributes parsing +// ---------------------------------------------------------------------------- + +export function parseGitAttributes(content: string): string[] { + return content + .split('\n') + .map(line => line.split('#')[0].trim()) + .filter(line => line.length > 0) + .flatMap(line => { + const [pattern, ...attributes] = line.split(/\s+/); + if (!pattern) return []; + const attrs = parseAttributes(attributes); + if ( + attrs.get('filter') === 'lfs' && + attrs.get('diff') === 'lfs' && + attrs.get('merge') === 'lfs' + ) { + return [pattern]; + } + return []; + }); +} + +function parseAttributes(parts: string[]): Map { + const attrs = new Map(); + for (const part of parts) { + if (part.includes('=')) { + const [key, value] = part.split('=', 2); + attrs.set(key, value); + } else if (part.startsWith('-')) { + attrs.set(part.slice(1), false); + } else { + attrs.set(part, true); + } + } + return attrs; +} + +// Pattern matching +// ---------------------------------------------------------------------------- + +export function isLfsTracked(path: string, patterns: string[]): boolean { + return patterns.some(pattern => minimatch(path, pattern, { matchBase: true })); +} + +// Pointer operations +// ---------------------------------------------------------------------------- + +export function isLfsPointer(content: Uint8Array): boolean { + if (content.byteLength > 200 || content.byteLength < 50) return false; + const text = textDecoder.decode(content.slice(0, LFS_POINTER_PREFIX.length)); + return text === LFS_POINTER_PREFIX; +} + +export function parseLfsPointer(content: Uint8Array): { + oid: string; + size: number; +} { + const text = textDecoder.decode(content); + const lines = text.split('\n').filter(l => l.trim().length > 0); + const pairs = new Map(); + for (const line of lines) { + const spaceIdx = line.indexOf(' '); + if (spaceIdx !== -1) { + pairs.set(line.slice(0, spaceIdx), line.slice(spaceIdx + 1)); + } + } + + const oidRaw = pairs.get('oid'); + if (!oidRaw?.startsWith('sha256:')) { + throw new Error(`Invalid LFS pointer: missing or invalid oid`); + } + const sizeRaw = pairs.get('size'); + if (!sizeRaw) { + throw new Error(`Invalid LFS pointer: missing size`); + } + + return { + oid: oidRaw.slice('sha256:'.length), + size: parseInt(sizeRaw, 10), + }; +} + +export function createLfsPointer(oid: string, size: number): Uint8Array { + const text = `version https://git-lfs.github.com/spec/v1\noid sha256:${oid}\nsize ${size}\n`; + return textEncoder.encode(text); +} + +// SHA-256 +// ---------------------------------------------------------------------------- + +async function computeSha256(content: Uint8Array): Promise { + const hashBuffer = await crypto.subtle.digest('SHA-256', content as unknown as ArrayBuffer); + return Array.from(new Uint8Array(hashBuffer)) + .map(b => b.toString(16).padStart(2, '0')) + .join(''); +} + +// LFS Batch API +// ---------------------------------------------------------------------------- + +type LfsBatchObject = { oid: string; size: number }; + +type LfsBatchResponseObject = { + oid: string; + size: number; + authenticated?: boolean; + actions?: { + upload?: { href: string; header?: Record }; + download?: { href: string; header?: Record }; + verify?: { href: string; header?: Record }; + }; + error?: { code: number; message: string }; +}; + +type LfsBatchResponse = { + transfer?: string; + objects: LfsBatchResponseObject[]; +}; + +async function lfsBatchRequest( + owner: string, + repo: string, + _token: string, + operation: 'upload' | 'download', + objects: LfsBatchObject[] +): Promise { + const url = `https://github.com/${owner}/${repo}.git/info/lfs/objects/batch`; + const response = await lfsProxyFetch(url, { + method: 'POST', + headers: { + Accept: 'application/vnd.git-lfs+json', + 'Content-Type': 'application/vnd.git-lfs+json', + }, + body: textEncoder.encode( + JSON.stringify({ + operation, + transfers: ['basic'], + objects, + }) + ), + }); + + if (!response.ok) { + const body = await response.text(); + throw new Error( + `LFS batch API error (${response.status}): ${body}` + ); + } + return response.json(); +} + +async function lfsUploadObjects( + batchResponse: LfsBatchResponse, + objectContents: Map +): Promise { + for (const obj of batchResponse.objects) { + if (obj.error) { + throw new Error( + `LFS server error for ${obj.oid}: ${obj.error.message} (${obj.error.code})` + ); + } + const uploadAction = obj.actions?.upload; + if (!uploadAction) continue; // server already has this object + + const content = objectContents.get(obj.oid); + if (!content) { + throw new Error(`Missing content for LFS object ${obj.oid}`); + } + + const uploadResponse = await lfsProxyFetch(uploadAction.href, { + method: 'PUT', + headers: uploadAction.header ?? {}, + body: content as unknown as BodyInit, + }); + + if (!uploadResponse.ok) { + const body = await uploadResponse.text(); + throw new Error( + `LFS upload failed for ${obj.oid} (${uploadResponse.status}): ${body}` + ); + } + + if (obj.actions?.verify) { + const verifyResponse = await lfsProxyFetch(obj.actions.verify.href, { + method: 'POST', + headers: { + 'Content-Type': 'application/vnd.git-lfs+json', + ...(obj.actions.verify.header ?? {}), + }, + body: textEncoder.encode( + JSON.stringify({ oid: obj.oid, size: obj.size }) + ), + }); + if (!verifyResponse.ok) { + const body = await verifyResponse.text(); + throw new Error( + `LFS verify failed for ${obj.oid} (${verifyResponse.status}): ${body}` + ); + } + } + } +} + +async function lfsDownloadObjects( + batchResponse: LfsBatchResponse +): Promise> { + const results = new Map(); + for (const obj of batchResponse.objects) { + if (obj.error) { + console.warn( + `LFS download error for ${obj.oid}: ${obj.error.message} (${obj.error.code})` + ); + continue; + } + const downloadAction = obj.actions?.download; + if (!downloadAction) { + console.warn(`No download action for LFS object ${obj.oid}`); + continue; + } + const response = await lfsProxyFetch(downloadAction.href, { + headers: downloadAction.header ?? {}, + }); + if (!response.ok) { + console.warn( + `LFS download failed for ${obj.oid} (${response.status})` + ); + continue; + } + const buffer = await response.arrayBuffer(); + results.set(obj.oid, new Uint8Array(buffer)); + } + return results; +} + +// High-level orchestrators +// ---------------------------------------------------------------------------- + +export async function processLfsAdditions( + additions: { path: string; contents: Uint8Array }[], + owner: string, + repo: string, + token: string, + patterns: string[] +): Promise<{ path: string; contents: Uint8Array }[]> { + const lfsAdditions: { + index: number; + oid: string; + size: number; + contents: Uint8Array; + }[] = []; + + const result = [...additions]; + + for (let i = 0; i < additions.length; i++) { + const addition = additions[i]; + if (isLfsTracked(addition.path, patterns)) { + const oid = await computeSha256(addition.contents); + lfsAdditions.push({ + index: i, + oid, + size: addition.contents.byteLength, + contents: addition.contents, + }); + } + } + + if (lfsAdditions.length === 0) return result; + + const objectContents = new Map( + lfsAdditions.map(a => [a.oid, a.contents]) + ); + + const batchResponse = await lfsBatchRequest( + owner, + repo, + token, + 'upload', + lfsAdditions.map(a => ({ oid: a.oid, size: a.size })) + ); + + await lfsUploadObjects(batchResponse, objectContents); + + for (const lfsAddition of lfsAdditions) { + result[lfsAddition.index] = { + path: additions[lfsAddition.index].path, + contents: createLfsPointer(lfsAddition.oid, lfsAddition.size), + }; + } + + return result; +} + +export async function resolveLfsPointers( + blobs: Map, + owner: string, + repo: string, + token: string +): Promise> { + const pointers: { path: string; oid: string; size: number }[] = []; + + for (const [path, content] of blobs) { + if (isLfsPointer(content)) { + try { + const { oid, size } = parseLfsPointer(content); + pointers.push({ path, oid, size }); + } catch { + // not a valid pointer — treat as a normal file + } + } + } + + if (pointers.length === 0) return blobs; + + const batchResponse = await lfsBatchRequest( + owner, + repo, + token, + 'download', + pointers.map(p => ({ oid: p.oid, size: p.size })) + ); + + const downloaded = await lfsDownloadObjects(batchResponse); + + const resolved = new Map(blobs); + for (const pointer of pointers) { + const content = downloaded.get(pointer.oid); + if (content) { + resolved.set(pointer.path, content); + } + } + + return resolved; +} diff --git a/packages/keystatic/src/app/shell/data.tsx b/packages/keystatic/src/app/shell/data.tsx index d19ac523e..fd79dea69 100644 --- a/packages/keystatic/src/app/shell/data.tsx +++ b/packages/keystatic/src/app/shell/data.tsx @@ -50,6 +50,8 @@ import { } from '../object-cache'; import { CollabProvider } from './collab'; import { EmptyRepo } from './empty-repo'; +import { parseGitAttributes } from '../git-lfs'; +import { fetchBlob } from '../useItemData'; export function fetchLocalTree(sha: string) { if (treeCache.has(sha)) { @@ -438,22 +440,64 @@ export function GitHubAppShellProvider(props: { repo?.owner.login, ]); + const baseCommitSha = + currentBranchRef?.target?.__typename === 'Commit' + ? currentBranchRef.target.oid + : ''; + + const lfsPatterns = useData( + useCallback(() => { + if ( + props.config.storage.kind !== 'github' || + !props.config.storage.lfs + ) { + return [] as string[]; + } + if (currentBranchTree.kind !== 'loaded' || !repoInfo) return LOADING; + const node = getTreeNodeAtPath( + currentBranchTree.data.tree, + '.gitattributes' + ); + if (!node) return [] as string[]; + const blob = fetchBlob( + props.config, + node.entry.sha, + '.gitattributes', + baseCommitSha, + repoInfo + ); + if (blob instanceof Uint8Array) { + return parseGitAttributes(new TextDecoder().decode(blob)); + } + return blob.then(b => parseGitAttributes(new TextDecoder().decode(b))); + }, [ + props.config, + currentBranchTree, + repoInfo, + baseCommitSha, + ]) + ); + return ( - - - {props.config.storage.kind === 'cloud' ? ( - - {props.children} - - ) : ( - props.children - )} - - + + + + {props.config.storage.kind === 'cloud' ? ( + + {props.children} + + ) : ( + props.children + )} + + + @@ -471,6 +515,12 @@ export function useCurrentBranch() { return useContext(CurrentBranchContext); } +const LfsPatternsContext = createContext([]); + +export function useLfsPatterns() { + return useContext(LfsPatternsContext); +} + type BranchInfo = { id: string; commitSha: string; diff --git a/packages/keystatic/src/app/updating.tsx b/packages/keystatic/src/app/updating.tsx index 4ca1ee7b9..4235e457c 100644 --- a/packages/keystatic/src/app/updating.tsx +++ b/packages/keystatic/src/app/updating.tsx @@ -29,6 +29,9 @@ import { createUrqlClient } from './provider'; import { serializeProps } from '../form/serialize-props'; import { scopeEntriesWithPathPrefix } from './shell/path-prefix'; import { base64Encode } from '#base64'; +import { getAuth } from './auth'; +import { processLfsAdditions } from './git-lfs'; +import { useLfsPatterns } from './shell/data'; const textEncoder = new TextEncoder(); @@ -133,6 +136,7 @@ export function useUpsertItem(args: { const repoInfo = useRepoInfo(); const appSlug = useContext(AppSlugContext); const unscopedTreeData = useCurrentUnscopedTree(); + const lfsPatterns = useLfsPatterns(); return [ state, @@ -210,6 +214,23 @@ export function useUpsertItem(args: { branchName: override?.branch ?? currentBranch, repositoryNameWithOwner: `${repoInfo.owner}/${repoInfo.name}`, }; + if ( + args.config.storage.kind === 'github' && + args.config.storage.lfs && + lfsPatterns.length > 0 + ) { + const auth = await getAuth(args.config); + if (auth) { + additions = await processLfsAdditions( + additions, + repoInfo.owner, + repoInfo.name, + auth.accessToken, + lfsPatterns + ); + } + } + const runMutation = (expectedHeadOid: string) => mutate({ input: { diff --git a/packages/keystatic/src/app/useItemData.ts b/packages/keystatic/src/app/useItemData.ts index 48ba8d896..499ce8173 100644 --- a/packages/keystatic/src/app/useItemData.ts +++ b/packages/keystatic/src/app/useItemData.ts @@ -9,8 +9,9 @@ import { } from '../form/api'; import { parseProps } from '../form/parse-props'; import { getAuth } from './auth'; +import { resolveLfsPointers } from './git-lfs'; import { loadDataFile } from './required-files'; -import { useBaseCommit, useRepoInfo, useTree } from './shell/data'; +import { useBaseCommit, useLfsPatterns, useRepoInfo, useTree } from './shell/data'; import { getDirectoriesForTreeKey, getTreeKey } from './tree-key'; import { TreeNode, getTreeNodeAtPath, TreeEntry, blobSha } from './trees'; import { LOADING, useData } from './useData'; @@ -209,6 +210,7 @@ export function useItemData(args: UseItemDataArgs) { const { current: currentBranch } = useTree(); const baseCommit = useBaseCommit(); const repoInfo = useRepoInfo(); + const lfsPatterns = useLfsPatterns(); const rootTree = currentBranch.kind === 'loaded' ? currentBranch.data.tree : undefined; @@ -279,7 +281,29 @@ export function useItemData(args: UseItemDataArgs) { return blob.then(blob => [entry.path, blob] as const); }); + const lfsEnabled = + args.config.storage.kind === 'github' && + args.config.storage.lfs && + lfsPatterns.length > 0; + + const buildResult = async (blobMap: Map) => { + if (lfsEnabled && repoInfo) { + const auth = await getAuth(args.config); + if (auth) { + blobMap = await resolveLfsPointers( + blobMap, + repoInfo.owner, + repoInfo.name, + auth.accessToken + ); + } + } + const { initialState, initialFiles } = parseEntry(_args, blobMap); + return { initialState, initialFiles, localTreeKey }; + }; + if ( + !lfsEnabled && allBlobs.every((x): x is readonly [string, Uint8Array] => Array.isArray(x) ) @@ -296,14 +320,7 @@ export function useItemData(args: UseItemDataArgs) { }; } - return Promise.all(allBlobs).then(async data => { - const { initialState, initialFiles } = parseEntry(_args, new Map(data)); - return { - initialState, - initialFiles, - localTreeKey, - }; - }); + return Promise.all(allBlobs).then(data => buildResult(new Map(data))); }, [ hasLoaded, tree, @@ -316,6 +333,7 @@ export function useItemData(args: UseItemDataArgs) { baseCommit, repoInfo, localTreeKey, + lfsPatterns, ]) ); } diff --git a/packages/keystatic/src/config.tsx b/packages/keystatic/src/config.tsx index 4c95368f3..d72e65dc0 100644 --- a/packages/keystatic/src/config.tsx +++ b/packages/keystatic/src/config.tsx @@ -80,6 +80,7 @@ type Navigation = K[] | { [section: string]: K[] }; type GitHubStorageConfig = { kind: 'github'; repo: RepoConfig; + lfs?: boolean; } & CommonRemoteStorageConfig; export type GitHubConfig< From 820b42757ed1b6554cbde58fe67996283fac6758 Mon Sep 17 00:00:00 2001 From: Trevor Taubitz Date: Sat, 21 Feb 2026 12:53:02 -0500 Subject: [PATCH 02/21] refactor: Move LFS logic to backend Signed-off-by: Trevor Taubitz --- packages/keystatic/src/api/generic.ts | 209 ++++++++++++++----- packages/keystatic/src/app/git-lfs.ts | 235 +++++----------------- packages/keystatic/src/app/updating.tsx | 15 +- packages/keystatic/src/app/useItemData.ts | 12 +- 4 files changed, 217 insertions(+), 254 deletions(-) diff --git a/packages/keystatic/src/api/generic.ts b/packages/keystatic/src/api/generic.ts index 4234d092e..26aa19b0c 100644 --- a/packages/keystatic/src/api/generic.ts +++ b/packages/keystatic/src/api/generic.ts @@ -10,6 +10,7 @@ import { handleGitHubAppCreation, localModeApiHandler } from '#api-handler'; import { webcrypto } from '#webcrypto'; import { bytesToHex } from '../hex'; import { decryptValue, encryptValue } from './encryption'; +import { parseRepoConfig } from '../app/repo-config'; export type APIRouteConfig = { /** @default process.env.KEYSTATIC_GITHUB_CLIENT_ID */ @@ -156,7 +157,7 @@ export function makeGenericAPIRouteHandler( return githubRepoNotFound(req, config); } if (joined === 'github/lfs') { - return githubLfsProxy(req); + return githubLfsHandler(req, config.config); } if (joined === 'github/logout') { const access_token = getAccessToken(req); @@ -414,19 +415,24 @@ async function createdGithubApp( return handleGitHubAppCreation(req, slugEnvVarName); } -async function githubLfsProxy( - req: KeystaticRequest +async function githubLfsHandler( + req: KeystaticRequest, + config: Config ): Promise { + if (config.storage.kind !== 'github') { + return { status: 400, body: 'LFS is only supported with GitHub storage' }; + } + const accessToken = getAccessToken(req); if (!accessToken) { return { status: 401, body: 'Unauthorized' }; } + const { owner, name: repo } = parseRepoConfig(config.storage.repo); + let payload: { - url: string; - method: string; - headers: Record; - body?: string; + operation: 'upload' | 'download'; + objects: Array<{ oid: string; size: number; content?: string }>; }; try { payload = await req.json(); @@ -434,58 +440,167 @@ async function githubLfsProxy( return { status: 400, body: 'Invalid JSON body' }; } - const targetUrl = new URL(payload.url); - const allowedHosts = [ - 'github.com', - 'github-cloud.s3.amazonaws.com', - 'github-cloud.githubusercontent.com', - ]; - if ( - !allowedHosts.some( - host => - targetUrl.hostname === host || - targetUrl.hostname.endsWith('.' + host) - ) - ) { + if (payload.operation !== 'upload' && payload.operation !== 'download') { + return { status: 400, body: 'Invalid operation' }; + } + + const batchUrl = `https://github.com/${owner}/${repo}.git/info/lfs/objects/batch`; + const batchRes = await fetch(batchUrl, { + method: 'POST', + headers: { + Accept: 'application/vnd.git-lfs+json', + 'Content-Type': 'application/vnd.git-lfs+json', + Authorization: `Bearer ${accessToken}`, + }, + body: JSON.stringify({ + operation: payload.operation, + transfers: ['basic'], + objects: payload.objects.map(o => ({ oid: o.oid, size: o.size })), + }), + }); + + if (!batchRes.ok) { + const body = await batchRes.text(); return { - status: 403, - body: 'LFS proxy only allows requests to GitHub hosts', + status: batchRes.status, + body: `LFS batch API error: ${body}`, }; } - const headers: Record = { ...payload.headers }; - if (targetUrl.hostname === 'github.com') { - headers['Authorization'] = `Bearer ${accessToken}`; + type LfsBatchResponseObject = { + oid: string; + size: number; + actions?: { + upload?: { href: string; header?: Record }; + download?: { href: string; header?: Record }; + verify?: { href: string; header?: Record }; + }; + error?: { code: number; message: string }; + }; + const batch: { objects: LfsBatchResponseObject[] } = await batchRes.json(); + + if (payload.operation === 'upload') { + return lfsHandleUpload(batch.objects, payload.objects); } + return lfsHandleDownload(batch.objects); +} - const body = - payload.body != null - ? Uint8Array.from(atob(payload.body), c => c.charCodeAt(0)) - : undefined; +async function lfsHandleUpload( + batchObjects: Array<{ + oid: string; + size: number; + actions?: { + upload?: { href: string; header?: Record }; + verify?: { href: string; header?: Record }; + }; + error?: { code: number; message: string }; + }>, + clientObjects: Array<{ oid: string; content?: string }> +): Promise { + const contentMap = new Map(); + for (const obj of clientObjects) { + if (obj.content) { + const binary = atob(obj.content); + const bytes = new Uint8Array(binary.length); + for (let i = 0; i < binary.length; i++) { + bytes[i] = binary.charCodeAt(i); + } + contentMap.set(obj.oid, bytes); + } + } - const response = await fetch(payload.url, { - method: payload.method, - headers, - body, - }); + for (const obj of batchObjects) { + if (obj.error) { + return { + status: 502, + body: `LFS error for ${obj.oid}: ${obj.error.message} (${obj.error.code})`, + }; + } - const responseBytes = new Uint8Array(await response.arrayBuffer()); - const responseHeaders: Record = {}; - const skipHeaders = new Set([ - 'transfer-encoding', - 'content-encoding', - 'content-length', - ]); - for (const [key, value] of response.headers.entries()) { - if (!skipHeaders.has(key.toLowerCase())) { - responseHeaders[key] = value; + const uploadAction = obj.actions?.upload; + if (!uploadAction) continue; + + const content = contentMap.get(obj.oid); + if (!content) { + return { + status: 400, + body: `Missing content for LFS object ${obj.oid}`, + }; + } + + const uploadRes = await fetch(uploadAction.href, { + method: 'PUT', + headers: uploadAction.header ?? {}, + body: content as unknown as BodyInit, + }); + if (!uploadRes.ok) { + const body = await uploadRes.text(); + return { + status: 502, + body: `LFS upload failed for ${obj.oid} (${uploadRes.status}): ${body}`, + }; + } + + if (obj.actions?.verify) { + const verifyRes = await fetch(obj.actions.verify.href, { + method: 'POST', + headers: { + 'Content-Type': 'application/vnd.git-lfs+json', + ...(obj.actions.verify.header ?? {}), + }, + body: JSON.stringify({ oid: obj.oid, size: obj.size }), + }); + if (!verifyRes.ok) { + const body = await verifyRes.text(); + return { + status: 502, + body: `LFS verify failed for ${obj.oid} (${verifyRes.status}): ${body}`, + }; + } + } + } + + return { + status: 200, + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ ok: true }), + }; +} + +async function lfsHandleDownload( + batchObjects: Array<{ + oid: string; + actions?: { + download?: { href: string; header?: Record }; + }; + error?: { code: number; message: string }; + }> +): Promise { + const results: Array<{ oid: string; content: string }> = []; + + for (const obj of batchObjects) { + if (obj.error) continue; + const downloadAction = obj.actions?.download; + if (!downloadAction) continue; + + const res = await fetch(downloadAction.href, { + headers: downloadAction.header ?? {}, + }); + if (!res.ok) continue; + + const buffer = await res.arrayBuffer(); + const bytes = new Uint8Array(buffer); + let binary = ''; + for (let i = 0; i < bytes.byteLength; i++) { + binary += String.fromCharCode(bytes[i]); } + results.push({ oid: obj.oid, content: btoa(binary) }); } return { - status: response.status, - headers: responseHeaders, - body: responseBytes, + status: 200, + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ objects: results }), }; } diff --git a/packages/keystatic/src/app/git-lfs.ts b/packages/keystatic/src/app/git-lfs.ts index 9f6bee004..60bbb2896 100644 --- a/packages/keystatic/src/app/git-lfs.ts +++ b/packages/keystatic/src/app/git-lfs.ts @@ -4,36 +4,7 @@ const textEncoder = new TextEncoder(); const textDecoder = new TextDecoder(); const LFS_POINTER_PREFIX = 'version https://git-lfs.github.com/spec/v1'; -const LFS_PROXY_PATH = '/api/keystatic/github/lfs'; - -// LFS proxy — all requests go through the server to avoid CORS -// ---------------------------------------------------------------------------- - -async function lfsProxyFetch( - url: string, - init: RequestInit -): Promise { - return fetch(LFS_PROXY_PATH, { - method: 'POST', - headers: { 'Content-Type': 'application/json' }, - body: JSON.stringify({ - url, - method: init.method ?? 'GET', - headers: init.headers ?? {}, - body: init.body != null ? uint8ArrayToBase64(init.body) : undefined, - }), - }); -} - -function uint8ArrayToBase64(data: unknown): string { - const bytes = - data instanceof Uint8Array ? data : new Uint8Array(data as ArrayBuffer); - let binary = ''; - for (let i = 0; i < bytes.byteLength; i++) { - binary += String.fromCharCode(bytes[i]); - } - return btoa(binary); -} +const LFS_ENDPOINT = '/api/keystatic/github/lfs'; // .gitattributes parsing // ---------------------------------------------------------------------------- @@ -133,151 +104,31 @@ async function computeSha256(content: Uint8Array): Promise { .join(''); } -// LFS Batch API +// Base64 utilities // ---------------------------------------------------------------------------- -type LfsBatchObject = { oid: string; size: number }; - -type LfsBatchResponseObject = { - oid: string; - size: number; - authenticated?: boolean; - actions?: { - upload?: { href: string; header?: Record }; - download?: { href: string; header?: Record }; - verify?: { href: string; header?: Record }; - }; - error?: { code: number; message: string }; -}; - -type LfsBatchResponse = { - transfer?: string; - objects: LfsBatchResponseObject[]; -}; - -async function lfsBatchRequest( - owner: string, - repo: string, - _token: string, - operation: 'upload' | 'download', - objects: LfsBatchObject[] -): Promise { - const url = `https://github.com/${owner}/${repo}.git/info/lfs/objects/batch`; - const response = await lfsProxyFetch(url, { - method: 'POST', - headers: { - Accept: 'application/vnd.git-lfs+json', - 'Content-Type': 'application/vnd.git-lfs+json', - }, - body: textEncoder.encode( - JSON.stringify({ - operation, - transfers: ['basic'], - objects, - }) - ), - }); - - if (!response.ok) { - const body = await response.text(); - throw new Error( - `LFS batch API error (${response.status}): ${body}` - ); - } - return response.json(); -} - -async function lfsUploadObjects( - batchResponse: LfsBatchResponse, - objectContents: Map -): Promise { - for (const obj of batchResponse.objects) { - if (obj.error) { - throw new Error( - `LFS server error for ${obj.oid}: ${obj.error.message} (${obj.error.code})` - ); - } - const uploadAction = obj.actions?.upload; - if (!uploadAction) continue; // server already has this object - - const content = objectContents.get(obj.oid); - if (!content) { - throw new Error(`Missing content for LFS object ${obj.oid}`); - } - - const uploadResponse = await lfsProxyFetch(uploadAction.href, { - method: 'PUT', - headers: uploadAction.header ?? {}, - body: content as unknown as BodyInit, - }); - - if (!uploadResponse.ok) { - const body = await uploadResponse.text(); - throw new Error( - `LFS upload failed for ${obj.oid} (${uploadResponse.status}): ${body}` - ); - } - - if (obj.actions?.verify) { - const verifyResponse = await lfsProxyFetch(obj.actions.verify.href, { - method: 'POST', - headers: { - 'Content-Type': 'application/vnd.git-lfs+json', - ...(obj.actions.verify.header ?? {}), - }, - body: textEncoder.encode( - JSON.stringify({ oid: obj.oid, size: obj.size }) - ), - }); - if (!verifyResponse.ok) { - const body = await verifyResponse.text(); - throw new Error( - `LFS verify failed for ${obj.oid} (${verifyResponse.status}): ${body}` - ); - } - } +function uint8ArrayToBase64(data: Uint8Array): string { + let binary = ''; + for (let i = 0; i < data.byteLength; i++) { + binary += String.fromCharCode(data[i]); } + return btoa(binary); } -async function lfsDownloadObjects( - batchResponse: LfsBatchResponse -): Promise> { - const results = new Map(); - for (const obj of batchResponse.objects) { - if (obj.error) { - console.warn( - `LFS download error for ${obj.oid}: ${obj.error.message} (${obj.error.code})` - ); - continue; - } - const downloadAction = obj.actions?.download; - if (!downloadAction) { - console.warn(`No download action for LFS object ${obj.oid}`); - continue; - } - const response = await lfsProxyFetch(downloadAction.href, { - headers: downloadAction.header ?? {}, - }); - if (!response.ok) { - console.warn( - `LFS download failed for ${obj.oid} (${response.status})` - ); - continue; - } - const buffer = await response.arrayBuffer(); - results.set(obj.oid, new Uint8Array(buffer)); +function base64ToUint8Array(base64: string): Uint8Array { + const binary = atob(base64); + const bytes = new Uint8Array(binary.length); + for (let i = 0; i < binary.length; i++) { + bytes[i] = binary.charCodeAt(i); } - return results; + return bytes; } -// High-level orchestrators +// Server-backed LFS operations // ---------------------------------------------------------------------------- export async function processLfsAdditions( additions: { path: string; contents: Uint8Array }[], - owner: string, - repo: string, - token: string, patterns: string[] ): Promise<{ path: string; contents: Uint8Array }[]> { const lfsAdditions: { @@ -304,19 +155,22 @@ export async function processLfsAdditions( if (lfsAdditions.length === 0) return result; - const objectContents = new Map( - lfsAdditions.map(a => [a.oid, a.contents]) - ); - - const batchResponse = await lfsBatchRequest( - owner, - repo, - token, - 'upload', - lfsAdditions.map(a => ({ oid: a.oid, size: a.size })) - ); + const response = await fetch(LFS_ENDPOINT, { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ + operation: 'upload', + objects: lfsAdditions.map(a => ({ + oid: a.oid, + size: a.size, + content: uint8ArrayToBase64(a.contents), + })), + }), + }); - await lfsUploadObjects(batchResponse, objectContents); + if (!response.ok) { + throw new Error(`LFS upload failed: ${await response.text()}`); + } for (const lfsAddition of lfsAdditions) { result[lfsAddition.index] = { @@ -329,10 +183,7 @@ export async function processLfsAdditions( } export async function resolveLfsPointers( - blobs: Map, - owner: string, - repo: string, - token: string + blobs: Map ): Promise> { const pointers: { path: string; oid: string; size: number }[] = []; @@ -349,15 +200,25 @@ export async function resolveLfsPointers( if (pointers.length === 0) return blobs; - const batchResponse = await lfsBatchRequest( - owner, - repo, - token, - 'download', - pointers.map(p => ({ oid: p.oid, size: p.size })) - ); + const response = await fetch(LFS_ENDPOINT, { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ + operation: 'download', + objects: pointers.map(p => ({ oid: p.oid, size: p.size })), + }), + }); - const downloaded = await lfsDownloadObjects(batchResponse); + if (!response.ok) { + throw new Error(`LFS download failed: ${await response.text()}`); + } + + const data: { objects: Array<{ oid: string; content: string }> } = + await response.json(); + const downloaded = new Map(); + for (const obj of data.objects) { + downloaded.set(obj.oid, base64ToUint8Array(obj.content)); + } const resolved = new Map(blobs); for (const pointer of pointers) { diff --git a/packages/keystatic/src/app/updating.tsx b/packages/keystatic/src/app/updating.tsx index 4235e457c..95621ba05 100644 --- a/packages/keystatic/src/app/updating.tsx +++ b/packages/keystatic/src/app/updating.tsx @@ -29,7 +29,6 @@ import { createUrqlClient } from './provider'; import { serializeProps } from '../form/serialize-props'; import { scopeEntriesWithPathPrefix } from './shell/path-prefix'; import { base64Encode } from '#base64'; -import { getAuth } from './auth'; import { processLfsAdditions } from './git-lfs'; import { useLfsPatterns } from './shell/data'; @@ -219,16 +218,10 @@ export function useUpsertItem(args: { args.config.storage.lfs && lfsPatterns.length > 0 ) { - const auth = await getAuth(args.config); - if (auth) { - additions = await processLfsAdditions( - additions, - repoInfo.owner, - repoInfo.name, - auth.accessToken, - lfsPatterns - ); - } + additions = await processLfsAdditions( + additions, + lfsPatterns + ); } const runMutation = (expectedHeadOid: string) => diff --git a/packages/keystatic/src/app/useItemData.ts b/packages/keystatic/src/app/useItemData.ts index 499ce8173..164043d42 100644 --- a/packages/keystatic/src/app/useItemData.ts +++ b/packages/keystatic/src/app/useItemData.ts @@ -288,15 +288,9 @@ export function useItemData(args: UseItemDataArgs) { const buildResult = async (blobMap: Map) => { if (lfsEnabled && repoInfo) { - const auth = await getAuth(args.config); - if (auth) { - blobMap = await resolveLfsPointers( - blobMap, - repoInfo.owner, - repoInfo.name, - auth.accessToken - ); - } + blobMap = await resolveLfsPointers( + blobMap + ); } const { initialState, initialFiles } = parseEntry(_args, blobMap); return { initialState, initialFiles, localTreeKey }; From 669df0c75c6fd5ce0f0265fde391ee89c5ca6e9c Mon Sep 17 00:00:00 2001 From: Trevor Taubitz Date: Sat, 21 Feb 2026 13:32:51 -0500 Subject: [PATCH 03/21] fix: Simplify API and split into separate endpoints Signed-off-by: Trevor Taubitz --- packages/keystatic/src/api/generic.ts | 267 +++++++++++++-------- packages/keystatic/src/app/git-lfs.test.ts | 53 +--- packages/keystatic/src/app/git-lfs.ts | 123 +++------- 3 files changed, 210 insertions(+), 233 deletions(-) diff --git a/packages/keystatic/src/api/generic.ts b/packages/keystatic/src/api/generic.ts index 26aa19b0c..b006a3426 100644 --- a/packages/keystatic/src/api/generic.ts +++ b/packages/keystatic/src/api/generic.ts @@ -156,8 +156,11 @@ export function makeGenericAPIRouteHandler( if (joined === 'github/repo-not-found') { return githubRepoNotFound(req, config); } - if (joined === 'github/lfs') { - return githubLfsHandler(req, config.config); + if (joined === 'github/lfs/upload') { + return githubLfsUpload(req, config.config); + } + if (joined === 'github/lfs/download') { + return githubLfsDownload(req, config.config); } if (joined === 'github/logout') { const access_token = getAccessToken(req); @@ -415,37 +418,34 @@ async function createdGithubApp( return handleGitHubAppCreation(req, slugEnvVarName); } -async function githubLfsHandler( +function getLfsConfig( req: KeystaticRequest, config: Config -): Promise { +): + | { error: KeystaticResponse } + | { owner: string; repo: string; accessToken: string } { if (config.storage.kind !== 'github') { - return { status: 400, body: 'LFS is only supported with GitHub storage' }; + return { + error: { status: 400, body: 'LFS is only supported with GitHub storage' }, + }; } - const accessToken = getAccessToken(req); if (!accessToken) { - return { status: 401, body: 'Unauthorized' }; + return { error: { status: 401, body: 'Unauthorized' } }; } - const { owner, name: repo } = parseRepoConfig(config.storage.repo); + return { owner, repo, accessToken }; +} - let payload: { - operation: 'upload' | 'download'; - objects: Array<{ oid: string; size: number; content?: string }>; - }; - try { - payload = await req.json(); - } catch { - return { status: 400, body: 'Invalid JSON body' }; - } - - if (payload.operation !== 'upload' && payload.operation !== 'download') { - return { status: 400, body: 'Invalid operation' }; - } - +async function lfsBatchRequest( + owner: string, + repo: string, + accessToken: string, + operation: 'upload' | 'download', + objects: Array<{ oid: string; size: number }> +) { const batchUrl = `https://github.com/${owner}/${repo}.git/info/lfs/objects/batch`; - const batchRes = await fetch(batchUrl, { + return fetch(batchUrl, { method: 'POST', headers: { Accept: 'application/vnd.git-lfs+json', @@ -453,63 +453,114 @@ async function githubLfsHandler( Authorization: `Bearer ${accessToken}`, }, body: JSON.stringify({ - operation: payload.operation, + operation, transfers: ['basic'], - objects: payload.objects.map(o => ({ oid: o.oid, size: o.size })), + objects, }), }); +} - if (!batchRes.ok) { - const body = await batchRes.text(); - return { - status: batchRes.status, - body: `LFS batch API error: ${body}`, - }; +function base64ToBytes(base64: string): Uint8Array { + const binary = atob(base64); + const bytes = new Uint8Array(binary.length); + for (let i = 0; i < binary.length; i++) { + bytes[i] = binary.charCodeAt(i); } + return bytes; +} - type LfsBatchResponseObject = { - oid: string; - size: number; - actions?: { - upload?: { href: string; header?: Record }; - download?: { href: string; header?: Record }; - verify?: { href: string; header?: Record }; - }; - error?: { code: number; message: string }; - }; - const batch: { objects: LfsBatchResponseObject[] } = await batchRes.json(); +function bytesToBase64(bytes: Uint8Array): string { + let binary = ''; + for (let i = 0; i < bytes.byteLength; i++) { + binary += String.fromCharCode(bytes[i]); + } + return btoa(binary); +} + +async function computeSha256(content: Uint8Array): Promise { + const hashBuffer = await crypto.subtle.digest( + 'SHA-256', + content as unknown as ArrayBuffer + ); + return Array.from(new Uint8Array(hashBuffer)) + .map(b => b.toString(16).padStart(2, '0')) + .join(''); +} + +function createLfsPointer(oid: string, size: number): string { + return `version https://git-lfs.github.com/spec/v1\noid sha256:${oid}\nsize ${size}\n`; +} - if (payload.operation === 'upload') { - return lfsHandleUpload(batch.objects, payload.objects); +function parseLfsPointer(text: string): { oid: string; size: number } { + const lines = text.split('\n').filter(l => l.trim().length > 0); + const pairs = new Map(); + for (const line of lines) { + const spaceIdx = line.indexOf(' '); + if (spaceIdx !== -1) { + pairs.set(line.slice(0, spaceIdx), line.slice(spaceIdx + 1)); + } + } + const oidRaw = pairs.get('oid'); + if (!oidRaw?.startsWith('sha256:')) { + throw new Error('Invalid LFS pointer: missing or invalid oid'); + } + const sizeRaw = pairs.get('size'); + if (!sizeRaw) { + throw new Error('Invalid LFS pointer: missing size'); } - return lfsHandleDownload(batch.objects); + return { oid: oidRaw.slice('sha256:'.length), size: parseInt(sizeRaw, 10) }; } -async function lfsHandleUpload( - batchObjects: Array<{ - oid: string; - size: number; - actions?: { - upload?: { href: string; header?: Record }; - verify?: { href: string; header?: Record }; - }; - error?: { code: number; message: string }; - }>, - clientObjects: Array<{ oid: string; content?: string }> +type LfsBatchResponseObject = { + oid: string; + size: number; + actions?: { + upload?: { href: string; header?: Record }; + download?: { href: string; header?: Record }; + verify?: { href: string; header?: Record }; + }; + error?: { code: number; message: string }; +}; + +async function githubLfsUpload( + req: KeystaticRequest, + config: Config ): Promise { - const contentMap = new Map(); - for (const obj of clientObjects) { - if (obj.content) { - const binary = atob(obj.content); - const bytes = new Uint8Array(binary.length); - for (let i = 0; i < binary.length; i++) { - bytes[i] = binary.charCodeAt(i); - } - contentMap.set(obj.oid, bytes); - } + const lfs = getLfsConfig(req, config); + if ('error' in lfs) return lfs.error; + + let payload: { objects: Array<{ content: string }> }; + try { + payload = await req.json(); + } catch { + return { status: 400, body: 'Invalid JSON body' }; + } + + const prepared = await Promise.all( + payload.objects.map(async obj => { + const bytes = base64ToBytes(obj.content); + const oid = await computeSha256(bytes); + return { oid, size: bytes.byteLength, bytes }; + }) + ); + + const batchRes = await lfsBatchRequest( + lfs.owner, + lfs.repo, + lfs.accessToken, + 'upload', + prepared.map(p => ({ oid: p.oid, size: p.size })) + ); + if (!batchRes.ok) { + return { + status: batchRes.status, + body: `LFS batch API error: ${await batchRes.text()}`, + }; } - for (const obj of batchObjects) { + const batch: { objects: LfsBatchResponseObject[] } = await batchRes.json(); + + for (const obj of batch.objects) { if (obj.error) { return { status: 502, @@ -520,24 +571,20 @@ async function lfsHandleUpload( const uploadAction = obj.actions?.upload; if (!uploadAction) continue; - const content = contentMap.get(obj.oid); - if (!content) { - return { - status: 400, - body: `Missing content for LFS object ${obj.oid}`, - }; + const item = prepared.find(p => p.oid === obj.oid); + if (!item) { + return { status: 500, body: `No content prepared for ${obj.oid}` }; } const uploadRes = await fetch(uploadAction.href, { method: 'PUT', headers: uploadAction.header ?? {}, - body: content as unknown as BodyInit, + body: item.bytes as unknown as BodyInit, }); if (!uploadRes.ok) { - const body = await uploadRes.text(); return { status: 502, - body: `LFS upload failed for ${obj.oid} (${uploadRes.status}): ${body}`, + body: `LFS upload failed for ${obj.oid} (${uploadRes.status}): ${await uploadRes.text()}`, }; } @@ -551,34 +598,64 @@ async function lfsHandleUpload( body: JSON.stringify({ oid: obj.oid, size: obj.size }), }); if (!verifyRes.ok) { - const body = await verifyRes.text(); return { status: 502, - body: `LFS verify failed for ${obj.oid} (${verifyRes.status}): ${body}`, + body: `LFS verify failed for ${obj.oid} (${verifyRes.status}): ${await verifyRes.text()}`, }; } } } + const pointers = prepared.map(p => + bytesToBase64( + new TextEncoder().encode(createLfsPointer(p.oid, p.size)) + ) + ); + return { status: 200, headers: { 'Content-Type': 'application/json' }, - body: JSON.stringify({ ok: true }), + body: JSON.stringify({ objects: pointers.map(p => ({ pointer: p })) }), }; } -async function lfsHandleDownload( - batchObjects: Array<{ - oid: string; - actions?: { - download?: { href: string; header?: Record }; - }; - error?: { code: number; message: string }; - }> +async function githubLfsDownload( + req: KeystaticRequest, + config: Config ): Promise { - const results: Array<{ oid: string; content: string }> = []; + const lfs = getLfsConfig(req, config); + if ('error' in lfs) return lfs.error; - for (const obj of batchObjects) { + let payload: { pointers: Array<{ pointer: string }> }; + try { + payload = await req.json(); + } catch { + return { status: 400, body: 'Invalid JSON body' }; + } + + const parsed = payload.pointers.map(p => { + const text = new TextDecoder().decode(base64ToBytes(p.pointer)); + return parseLfsPointer(text); + }); + + const batchRes = await lfsBatchRequest( + lfs.owner, + lfs.repo, + lfs.accessToken, + 'download', + parsed.map(p => ({ oid: p.oid, size: p.size })) + ); + if (!batchRes.ok) { + return { + status: batchRes.status, + body: `LFS batch API error: ${await batchRes.text()}`, + }; + } + + const batch: { objects: LfsBatchResponseObject[] } = await batchRes.json(); + const downloaded = new Map(); + + for (const obj of batch.objects) { if (obj.error) continue; const downloadAction = obj.actions?.download; if (!downloadAction) continue; @@ -588,15 +665,13 @@ async function lfsHandleDownload( }); if (!res.ok) continue; - const buffer = await res.arrayBuffer(); - const bytes = new Uint8Array(buffer); - let binary = ''; - for (let i = 0; i < bytes.byteLength; i++) { - binary += String.fromCharCode(bytes[i]); - } - results.push({ oid: obj.oid, content: btoa(binary) }); + downloaded.set(obj.oid, bytesToBase64(new Uint8Array(await res.arrayBuffer()))); } + const results = parsed.map(p => ({ + content: downloaded.get(p.oid) ?? null, + })); + return { status: 200, headers: { 'Content-Type': 'application/json' }, diff --git a/packages/keystatic/src/app/git-lfs.test.ts b/packages/keystatic/src/app/git-lfs.test.ts index ace120905..c27177973 100644 --- a/packages/keystatic/src/app/git-lfs.test.ts +++ b/packages/keystatic/src/app/git-lfs.test.ts @@ -5,8 +5,6 @@ import { parseGitAttributes, isLfsTracked, isLfsPointer, - parseLfsPointer, - createLfsPointer, } from './git-lfs'; if (!globalThis.crypto) { @@ -15,6 +13,12 @@ if (!globalThis.crypto) { const textEncoder = new TextEncoder(); +function makeLfsPointer(oid: string, size: number): Uint8Array { + return textEncoder.encode( + `version https://git-lfs.github.com/spec/v1\noid sha256:${oid}\nsize ${size}\n` + ); +} + describe('parseGitAttributes', () => { test('extracts LFS patterns from standard .gitattributes', () => { const content = [ @@ -76,59 +80,26 @@ describe('isLfsTracked', () => { }); }); -describe('isLfsPointer / parseLfsPointer / createLfsPointer', () => { +describe('isLfsPointer', () => { const sampleOid = 'abc123def456abc123def456abc123def456abc123def456abc123def456abcd1234'; const sampleSize = 12345; - test('createLfsPointer produces valid pointer', () => { - const pointer = createLfsPointer(sampleOid, sampleSize); - const text = new TextDecoder().decode(pointer); - expect(text).toBe( - `version https://git-lfs.github.com/spec/v1\noid sha256:${sampleOid}\nsize ${sampleSize}\n` - ); - }); - - test('isLfsPointer detects valid pointer', () => { - const pointer = createLfsPointer(sampleOid, sampleSize); + test('detects valid pointer', () => { + const pointer = makeLfsPointer(sampleOid, sampleSize); expect(isLfsPointer(pointer)).toBe(true); }); - test('isLfsPointer rejects non-pointer content', () => { + test('rejects non-pointer content', () => { expect(isLfsPointer(textEncoder.encode('hello world'))).toBe(false); expect(isLfsPointer(new Uint8Array(300))).toBe(false); expect(isLfsPointer(new Uint8Array(10))).toBe(false); }); - test('parseLfsPointer extracts oid and size', () => { - const pointer = createLfsPointer(sampleOid, sampleSize); - const parsed = parseLfsPointer(pointer); - expect(parsed.oid).toBe(sampleOid); - expect(parsed.size).toBe(sampleSize); - }); - - test('parseLfsPointer throws for missing oid', () => { - const content = textEncoder.encode( - 'version https://git-lfs.github.com/spec/v1\nsize 100\n' - ); - expect(() => parseLfsPointer(content)).toThrow('missing or invalid oid'); - }); - - test('parseLfsPointer throws for missing size', () => { - const content = textEncoder.encode( - `version https://git-lfs.github.com/spec/v1\noid sha256:${sampleOid}\n` - ); - expect(() => parseLfsPointer(content)).toThrow('missing size'); - }); - - test('roundtrip: create then parse', () => { + test('detects pointer with varying oid and size', () => { const oid = '0000000000000000000000000000000000000000000000000000000000000000'; - const size = 999999; - const pointer = createLfsPointer(oid, size); + const pointer = makeLfsPointer(oid, 999999); expect(isLfsPointer(pointer)).toBe(true); - const parsed = parseLfsPointer(pointer); - expect(parsed.oid).toBe(oid); - expect(parsed.size).toBe(size); }); }); diff --git a/packages/keystatic/src/app/git-lfs.ts b/packages/keystatic/src/app/git-lfs.ts index 60bbb2896..bd8ab3a76 100644 --- a/packages/keystatic/src/app/git-lfs.ts +++ b/packages/keystatic/src/app/git-lfs.ts @@ -1,10 +1,8 @@ import { minimatch } from 'minimatch'; -const textEncoder = new TextEncoder(); const textDecoder = new TextDecoder(); const LFS_POINTER_PREFIX = 'version https://git-lfs.github.com/spec/v1'; -const LFS_ENDPOINT = '/api/keystatic/github/lfs'; // .gitattributes parsing // ---------------------------------------------------------------------------- @@ -51,7 +49,7 @@ export function isLfsTracked(path: string, patterns: string[]): boolean { return patterns.some(pattern => minimatch(path, pattern, { matchBase: true })); } -// Pointer operations +// Pointer detection // ---------------------------------------------------------------------------- export function isLfsPointer(content: Uint8Array): boolean { @@ -60,50 +58,6 @@ export function isLfsPointer(content: Uint8Array): boolean { return text === LFS_POINTER_PREFIX; } -export function parseLfsPointer(content: Uint8Array): { - oid: string; - size: number; -} { - const text = textDecoder.decode(content); - const lines = text.split('\n').filter(l => l.trim().length > 0); - const pairs = new Map(); - for (const line of lines) { - const spaceIdx = line.indexOf(' '); - if (spaceIdx !== -1) { - pairs.set(line.slice(0, spaceIdx), line.slice(spaceIdx + 1)); - } - } - - const oidRaw = pairs.get('oid'); - if (!oidRaw?.startsWith('sha256:')) { - throw new Error(`Invalid LFS pointer: missing or invalid oid`); - } - const sizeRaw = pairs.get('size'); - if (!sizeRaw) { - throw new Error(`Invalid LFS pointer: missing size`); - } - - return { - oid: oidRaw.slice('sha256:'.length), - size: parseInt(sizeRaw, 10), - }; -} - -export function createLfsPointer(oid: string, size: number): Uint8Array { - const text = `version https://git-lfs.github.com/spec/v1\noid sha256:${oid}\nsize ${size}\n`; - return textEncoder.encode(text); -} - -// SHA-256 -// ---------------------------------------------------------------------------- - -async function computeSha256(content: Uint8Array): Promise { - const hashBuffer = await crypto.subtle.digest('SHA-256', content as unknown as ArrayBuffer); - return Array.from(new Uint8Array(hashBuffer)) - .map(b => b.toString(16).padStart(2, '0')) - .join(''); -} - // Base64 utilities // ---------------------------------------------------------------------------- @@ -131,39 +85,21 @@ export async function processLfsAdditions( additions: { path: string; contents: Uint8Array }[], patterns: string[] ): Promise<{ path: string; contents: Uint8Array }[]> { - const lfsAdditions: { - index: number; - oid: string; - size: number; - contents: Uint8Array; - }[] = []; - - const result = [...additions]; - + const lfsIndices: number[] = []; for (let i = 0; i < additions.length; i++) { - const addition = additions[i]; - if (isLfsTracked(addition.path, patterns)) { - const oid = await computeSha256(addition.contents); - lfsAdditions.push({ - index: i, - oid, - size: addition.contents.byteLength, - contents: addition.contents, - }); + if (isLfsTracked(additions[i].path, patterns)) { + lfsIndices.push(i); } } - if (lfsAdditions.length === 0) return result; + if (lfsIndices.length === 0) return additions; - const response = await fetch(LFS_ENDPOINT, { + const response = await fetch('/api/keystatic/github/lfs/upload', { method: 'POST', headers: { 'Content-Type': 'application/json' }, body: JSON.stringify({ - operation: 'upload', - objects: lfsAdditions.map(a => ({ - oid: a.oid, - size: a.size, - content: uint8ArrayToBase64(a.contents), + objects: lfsIndices.map(i => ({ + content: uint8ArrayToBase64(additions[i].contents), })), }), }); @@ -172,10 +108,13 @@ export async function processLfsAdditions( throw new Error(`LFS upload failed: ${await response.text()}`); } - for (const lfsAddition of lfsAdditions) { - result[lfsAddition.index] = { - path: additions[lfsAddition.index].path, - contents: createLfsPointer(lfsAddition.oid, lfsAddition.size), + const data: { objects: Array<{ pointer: string }> } = await response.json(); + const result = [...additions]; + for (let i = 0; i < lfsIndices.length; i++) { + const idx = lfsIndices[i]; + result[idx] = { + path: additions[idx].path, + contents: base64ToUint8Array(data.objects[i].pointer), }; } @@ -185,27 +124,23 @@ export async function processLfsAdditions( export async function resolveLfsPointers( blobs: Map ): Promise> { - const pointers: { path: string; oid: string; size: number }[] = []; + const pointerEntries: { path: string; raw: Uint8Array }[] = []; for (const [path, content] of blobs) { if (isLfsPointer(content)) { - try { - const { oid, size } = parseLfsPointer(content); - pointers.push({ path, oid, size }); - } catch { - // not a valid pointer — treat as a normal file - } + pointerEntries.push({ path, raw: content }); } } - if (pointers.length === 0) return blobs; + if (pointerEntries.length === 0) return blobs; - const response = await fetch(LFS_ENDPOINT, { + const response = await fetch('/api/keystatic/github/lfs/download', { method: 'POST', headers: { 'Content-Type': 'application/json' }, body: JSON.stringify({ - operation: 'download', - objects: pointers.map(p => ({ oid: p.oid, size: p.size })), + pointers: pointerEntries.map(p => ({ + pointer: uint8ArrayToBase64(p.raw), + })), }), }); @@ -213,18 +148,14 @@ export async function resolveLfsPointers( throw new Error(`LFS download failed: ${await response.text()}`); } - const data: { objects: Array<{ oid: string; content: string }> } = + const data: { objects: Array<{ content: string | null }> } = await response.json(); - const downloaded = new Map(); - for (const obj of data.objects) { - downloaded.set(obj.oid, base64ToUint8Array(obj.content)); - } - const resolved = new Map(blobs); - for (const pointer of pointers) { - const content = downloaded.get(pointer.oid); + + for (let i = 0; i < pointerEntries.length; i++) { + const content = data.objects[i].content; if (content) { - resolved.set(pointer.path, content); + resolved.set(pointerEntries[i].path, base64ToUint8Array(content)); } } From 11210c48bfd80d3191129fdf6d1b94aeb244b0f5 Mon Sep 17 00:00:00 2001 From: Trevor Taubitz Date: Sat, 21 Feb 2026 13:45:29 -0500 Subject: [PATCH 04/21] fix: Do downloads in parallel through individual frontend calls Signed-off-by: Trevor Taubitz --- packages/keystatic/src/api/generic.ts | 57 +++++++++++++-------------- packages/keystatic/src/app/git-lfs.ts | 32 +++++---------- 2 files changed, 36 insertions(+), 53 deletions(-) diff --git a/packages/keystatic/src/api/generic.ts b/packages/keystatic/src/api/generic.ts index b006a3426..aa7884e80 100644 --- a/packages/keystatic/src/api/generic.ts +++ b/packages/keystatic/src/api/generic.ts @@ -159,8 +159,8 @@ export function makeGenericAPIRouteHandler( if (joined === 'github/lfs/upload') { return githubLfsUpload(req, config.config); } - if (joined === 'github/lfs/download') { - return githubLfsDownload(req, config.config); + if (params.length === 4 && params[0] === 'github' && params[1] === 'lfs' && params[2] === 'download') { + return githubLfsDownload(params[3], req, config.config); } if (joined === 'github/logout') { const access_token = getAccessToken(req); @@ -620,30 +620,22 @@ async function githubLfsUpload( } async function githubLfsDownload( + pointer: string, req: KeystaticRequest, config: Config ): Promise { const lfs = getLfsConfig(req, config); if ('error' in lfs) return lfs.error; - let payload: { pointers: Array<{ pointer: string }> }; - try { - payload = await req.json(); - } catch { - return { status: 400, body: 'Invalid JSON body' }; - } - - const parsed = payload.pointers.map(p => { - const text = new TextDecoder().decode(base64ToBytes(p.pointer)); - return parseLfsPointer(text); - }); + const text = new TextDecoder().decode(base64ToBytes(pointer)); + const parsed = parseLfsPointer(text); const batchRes = await lfsBatchRequest( lfs.owner, lfs.repo, lfs.accessToken, 'download', - parsed.map(p => ({ oid: p.oid, size: p.size })) + [{ oid: parsed.oid, size: parsed.size }] ); if (!batchRes.ok) { return { @@ -653,29 +645,34 @@ async function githubLfsDownload( } const batch: { objects: LfsBatchResponseObject[] } = await batchRes.json(); - const downloaded = new Map(); - - for (const obj of batch.objects) { - if (obj.error) continue; - const downloadAction = obj.actions?.download; - if (!downloadAction) continue; + const obj = batch.objects[0]; - const res = await fetch(downloadAction.href, { - headers: downloadAction.header ?? {}, - }); - if (!res.ok) continue; + if (obj?.error) { + return { + status: 502, + body: `LFS error for ${obj.oid}: ${obj.error.message} (${obj.error.code})`, + }; + } - downloaded.set(obj.oid, bytesToBase64(new Uint8Array(await res.arrayBuffer()))); + const downloadAction = obj?.actions?.download; + if (!downloadAction) { + return { status: 404, body: 'LFS object not found' }; } - const results = parsed.map(p => ({ - content: downloaded.get(p.oid) ?? null, - })); + const res = await fetch(downloadAction.href, { + headers: downloadAction.header ?? {}, + }); + if (!res.ok) { + return { + status: 502, + body: `LFS download failed (${res.status}): ${await res.text()}`, + }; + } return { status: 200, - headers: { 'Content-Type': 'application/json' }, - body: JSON.stringify({ objects: results }), + headers: { 'Content-Type': 'application/octet-stream' }, + body: new Uint8Array(await res.arrayBuffer()), }; } diff --git a/packages/keystatic/src/app/git-lfs.ts b/packages/keystatic/src/app/git-lfs.ts index bd8ab3a76..8d4b73f0d 100644 --- a/packages/keystatic/src/app/git-lfs.ts +++ b/packages/keystatic/src/app/git-lfs.ts @@ -134,30 +134,16 @@ export async function resolveLfsPointers( if (pointerEntries.length === 0) return blobs; - const response = await fetch('/api/keystatic/github/lfs/download', { - method: 'POST', - headers: { 'Content-Type': 'application/json' }, - body: JSON.stringify({ - pointers: pointerEntries.map(p => ({ - pointer: uint8ArrayToBase64(p.raw), - })), - }), - }); - - if (!response.ok) { - throw new Error(`LFS download failed: ${await response.text()}`); - } - - const data: { objects: Array<{ content: string | null }> } = - await response.json(); const resolved = new Map(blobs); + const downloads = pointerEntries.map(async entry => { + const encoded = encodeURIComponent(uint8ArrayToBase64(entry.raw)); + const response = await fetch( + `/api/keystatic/github/lfs/download/${encoded}` + ); + if (!response.ok) return; + resolved.set(entry.path, new Uint8Array(await response.arrayBuffer())); + }); - for (let i = 0; i < pointerEntries.length; i++) { - const content = data.objects[i].content; - if (content) { - resolved.set(pointerEntries[i].path, base64ToUint8Array(content)); - } - } - + await Promise.all(downloads); return resolved; } From 1fa9896d27e7388af9e12f0dc42e54496e1311ff Mon Sep 17 00:00:00 2001 From: Trevor Taubitz Date: Sat, 21 Feb 2026 15:00:05 -0500 Subject: [PATCH 05/21] fix: Always resolve LFS references, even when option isn't enabled Signed-off-by: Trevor Taubitz --- packages/keystatic/src/app/useItemData.ts | 17 +++++------------ 1 file changed, 5 insertions(+), 12 deletions(-) diff --git a/packages/keystatic/src/app/useItemData.ts b/packages/keystatic/src/app/useItemData.ts index 164043d42..0b97d334b 100644 --- a/packages/keystatic/src/app/useItemData.ts +++ b/packages/keystatic/src/app/useItemData.ts @@ -11,7 +11,7 @@ import { parseProps } from '../form/parse-props'; import { getAuth } from './auth'; import { resolveLfsPointers } from './git-lfs'; import { loadDataFile } from './required-files'; -import { useBaseCommit, useLfsPatterns, useRepoInfo, useTree } from './shell/data'; +import { useBaseCommit, useRepoInfo, useTree } from './shell/data'; import { getDirectoriesForTreeKey, getTreeKey } from './tree-key'; import { TreeNode, getTreeNodeAtPath, TreeEntry, blobSha } from './trees'; import { LOADING, useData } from './useData'; @@ -210,7 +210,6 @@ export function useItemData(args: UseItemDataArgs) { const { current: currentBranch } = useTree(); const baseCommit = useBaseCommit(); const repoInfo = useRepoInfo(); - const lfsPatterns = useLfsPatterns(); const rootTree = currentBranch.kind === 'loaded' ? currentBranch.data.tree : undefined; @@ -281,23 +280,18 @@ export function useItemData(args: UseItemDataArgs) { return blob.then(blob => [entry.path, blob] as const); }); - const lfsEnabled = - args.config.storage.kind === 'github' && - args.config.storage.lfs && - lfsPatterns.length > 0; + const isGitHub = args.config.storage.kind === 'github'; const buildResult = async (blobMap: Map) => { - if (lfsEnabled && repoInfo) { - blobMap = await resolveLfsPointers( - blobMap - ); + if (isGitHub) { + blobMap = await resolveLfsPointers(blobMap); } const { initialState, initialFiles } = parseEntry(_args, blobMap); return { initialState, initialFiles, localTreeKey }; }; if ( - !lfsEnabled && + !isGitHub && allBlobs.every((x): x is readonly [string, Uint8Array] => Array.isArray(x) ) @@ -327,7 +321,6 @@ export function useItemData(args: UseItemDataArgs) { baseCommit, repoInfo, localTreeKey, - lfsPatterns, ]) ); } From 29a83d1c1bea36dec42728496e5d6a9ab79f21a7 Mon Sep 17 00:00:00 2001 From: Trevor Taubitz Date: Sat, 21 Feb 2026 16:46:05 -0500 Subject: [PATCH 06/21] fix: Simplify routing logic Signed-off-by: Trevor Taubitz --- packages/keystatic/src/api/generic.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/keystatic/src/api/generic.ts b/packages/keystatic/src/api/generic.ts index aa7884e80..abe5c16b1 100644 --- a/packages/keystatic/src/api/generic.ts +++ b/packages/keystatic/src/api/generic.ts @@ -159,7 +159,7 @@ export function makeGenericAPIRouteHandler( if (joined === 'github/lfs/upload') { return githubLfsUpload(req, config.config); } - if (params.length === 4 && params[0] === 'github' && params[1] === 'lfs' && params[2] === 'download') { + if (joined.startsWith('github/lfs/download/')) { return githubLfsDownload(params[3], req, config.config); } if (joined === 'github/logout') { From 60fae811c5ca4cc8a656e42f86bf5a9348b44876 Mon Sep 17 00:00:00 2001 From: Trevor Taubitz Date: Sat, 21 Feb 2026 16:58:22 -0500 Subject: [PATCH 07/21] style: Remove unnecessary comments Signed-off-by: Trevor Taubitz --- packages/keystatic/src/app/git-lfs.ts | 15 --------------- 1 file changed, 15 deletions(-) diff --git a/packages/keystatic/src/app/git-lfs.ts b/packages/keystatic/src/app/git-lfs.ts index 8d4b73f0d..a6e07fa9b 100644 --- a/packages/keystatic/src/app/git-lfs.ts +++ b/packages/keystatic/src/app/git-lfs.ts @@ -4,9 +4,6 @@ const textDecoder = new TextDecoder(); const LFS_POINTER_PREFIX = 'version https://git-lfs.github.com/spec/v1'; -// .gitattributes parsing -// ---------------------------------------------------------------------------- - export function parseGitAttributes(content: string): string[] { return content .split('\n') @@ -42,25 +39,16 @@ function parseAttributes(parts: string[]): Map { return attrs; } -// Pattern matching -// ---------------------------------------------------------------------------- - export function isLfsTracked(path: string, patterns: string[]): boolean { return patterns.some(pattern => minimatch(path, pattern, { matchBase: true })); } -// Pointer detection -// ---------------------------------------------------------------------------- - export function isLfsPointer(content: Uint8Array): boolean { if (content.byteLength > 200 || content.byteLength < 50) return false; const text = textDecoder.decode(content.slice(0, LFS_POINTER_PREFIX.length)); return text === LFS_POINTER_PREFIX; } -// Base64 utilities -// ---------------------------------------------------------------------------- - function uint8ArrayToBase64(data: Uint8Array): string { let binary = ''; for (let i = 0; i < data.byteLength; i++) { @@ -78,9 +66,6 @@ function base64ToUint8Array(base64: string): Uint8Array { return bytes; } -// Server-backed LFS operations -// ---------------------------------------------------------------------------- - export async function processLfsAdditions( additions: { path: string; contents: Uint8Array }[], patterns: string[] From 37403b660a844f719d9087e94ec314cf11d54378 Mon Sep 17 00:00:00 2001 From: Trevor Taubitz Date: Sat, 21 Feb 2026 17:06:48 -0500 Subject: [PATCH 08/21] fix: Refactor base64 functions Signed-off-by: Trevor Taubitz --- packages/keystatic/src/api/generic.ts | 22 ++++------------------ packages/keystatic/src/app/git-lfs.ts | 22 ++++------------------ packages/keystatic/src/base64.ts | 15 +++++++++++---- 3 files changed, 19 insertions(+), 40 deletions(-) diff --git a/packages/keystatic/src/api/generic.ts b/packages/keystatic/src/api/generic.ts index abe5c16b1..4f1ae73da 100644 --- a/packages/keystatic/src/api/generic.ts +++ b/packages/keystatic/src/api/generic.ts @@ -1,6 +1,7 @@ import * as cookie from 'cookie'; import * as s from 'superstruct'; import { Config } from '..'; +import { base64Decode, base64Encode } from '#base64'; import { KeystaticResponse, KeystaticRequest, @@ -460,22 +461,7 @@ async function lfsBatchRequest( }); } -function base64ToBytes(base64: string): Uint8Array { - const binary = atob(base64); - const bytes = new Uint8Array(binary.length); - for (let i = 0; i < binary.length; i++) { - bytes[i] = binary.charCodeAt(i); - } - return bytes; -} -function bytesToBase64(bytes: Uint8Array): string { - let binary = ''; - for (let i = 0; i < bytes.byteLength; i++) { - binary += String.fromCharCode(bytes[i]); - } - return btoa(binary); -} async function computeSha256(content: Uint8Array): Promise { const hashBuffer = await crypto.subtle.digest( @@ -538,7 +524,7 @@ async function githubLfsUpload( const prepared = await Promise.all( payload.objects.map(async obj => { - const bytes = base64ToBytes(obj.content); + const bytes = base64Decode(obj.content); const oid = await computeSha256(bytes); return { oid, size: bytes.byteLength, bytes }; }) @@ -607,7 +593,7 @@ async function githubLfsUpload( } const pointers = prepared.map(p => - bytesToBase64( + base64Encode( new TextEncoder().encode(createLfsPointer(p.oid, p.size)) ) ); @@ -627,7 +613,7 @@ async function githubLfsDownload( const lfs = getLfsConfig(req, config); if ('error' in lfs) return lfs.error; - const text = new TextDecoder().decode(base64ToBytes(pointer)); + const text = new TextDecoder().decode(base64Decode(pointer)); const parsed = parseLfsPointer(text); const batchRes = await lfsBatchRequest( diff --git a/packages/keystatic/src/app/git-lfs.ts b/packages/keystatic/src/app/git-lfs.ts index a6e07fa9b..d65e2cfe1 100644 --- a/packages/keystatic/src/app/git-lfs.ts +++ b/packages/keystatic/src/app/git-lfs.ts @@ -1,4 +1,5 @@ import { minimatch } from 'minimatch'; +import { base64Decode, base64Encode } from '#base64'; const textDecoder = new TextDecoder(); @@ -49,22 +50,7 @@ export function isLfsPointer(content: Uint8Array): boolean { return text === LFS_POINTER_PREFIX; } -function uint8ArrayToBase64(data: Uint8Array): string { - let binary = ''; - for (let i = 0; i < data.byteLength; i++) { - binary += String.fromCharCode(data[i]); - } - return btoa(binary); -} -function base64ToUint8Array(base64: string): Uint8Array { - const binary = atob(base64); - const bytes = new Uint8Array(binary.length); - for (let i = 0; i < binary.length; i++) { - bytes[i] = binary.charCodeAt(i); - } - return bytes; -} export async function processLfsAdditions( additions: { path: string; contents: Uint8Array }[], @@ -84,7 +70,7 @@ export async function processLfsAdditions( headers: { 'Content-Type': 'application/json' }, body: JSON.stringify({ objects: lfsIndices.map(i => ({ - content: uint8ArrayToBase64(additions[i].contents), + content: base64Encode(additions[i].contents), })), }), }); @@ -99,7 +85,7 @@ export async function processLfsAdditions( const idx = lfsIndices[i]; result[idx] = { path: additions[idx].path, - contents: base64ToUint8Array(data.objects[i].pointer), + contents: base64Decode(data.objects[i].pointer), }; } @@ -121,7 +107,7 @@ export async function resolveLfsPointers( const resolved = new Map(blobs); const downloads = pointerEntries.map(async entry => { - const encoded = encodeURIComponent(uint8ArrayToBase64(entry.raw)); + const encoded = encodeURIComponent(base64Encode(entry.raw)); const response = await fetch( `/api/keystatic/github/lfs/download/${encoded}` ); diff --git a/packages/keystatic/src/base64.ts b/packages/keystatic/src/base64.ts index 5004a4db1..0ddd9add8 100644 --- a/packages/keystatic/src/base64.ts +++ b/packages/keystatic/src/base64.ts @@ -1,8 +1,7 @@ export function base64UrlDecode(base64: string) { - const binString = atob(base64.replace(/-/g, '+').replace(/_/g, '/')); - return Uint8Array.from( - binString as Iterable, - m => (m as unknown as string).codePointAt(0)! + return base64Decode(base64 + .replace(/-/g, '+') + .replace(/_/g, '/') ); } @@ -13,6 +12,14 @@ export function base64UrlEncode(bytes: Uint8Array) { .replace(/=/g, ''); } +export function base64Decode(base64: string) { + const binString = atob(base64); + return Uint8Array.from( + binString as Iterable, + m => (m as unknown as string).codePointAt(0)! + ); +} + export function base64Encode(bytes: Uint8Array) { const binString = Array.from(bytes, byte => String.fromCodePoint(byte)).join( '' From 7cb816951dae2819077571ab4e48c75a295e23fb Mon Sep 17 00:00:00 2001 From: Trevor Taubitz Date: Sat, 21 Feb 2026 20:52:50 -0500 Subject: [PATCH 09/21] fix: Remove unnecessary check Signed-off-by: Trevor Taubitz --- packages/keystatic/src/app/git-lfs.ts | 3 --- 1 file changed, 3 deletions(-) diff --git a/packages/keystatic/src/app/git-lfs.ts b/packages/keystatic/src/app/git-lfs.ts index d65e2cfe1..030f4b702 100644 --- a/packages/keystatic/src/app/git-lfs.ts +++ b/packages/keystatic/src/app/git-lfs.ts @@ -45,13 +45,10 @@ export function isLfsTracked(path: string, patterns: string[]): boolean { } export function isLfsPointer(content: Uint8Array): boolean { - if (content.byteLength > 200 || content.byteLength < 50) return false; const text = textDecoder.decode(content.slice(0, LFS_POINTER_PREFIX.length)); return text === LFS_POINTER_PREFIX; } - - export async function processLfsAdditions( additions: { path: string; contents: Uint8Array }[], patterns: string[] From 6d108d8407490add745779866642955980c56e4a Mon Sep 17 00:00:00 2001 From: Trevor Taubitz Date: Sat, 21 Feb 2026 21:17:36 -0500 Subject: [PATCH 10/21] fix: Use synchronous call for cached items Signed-off-by: Trevor Taubitz --- packages/keystatic/src/app/useItemData.ts | 19 +++++++------------ 1 file changed, 7 insertions(+), 12 deletions(-) diff --git a/packages/keystatic/src/app/useItemData.ts b/packages/keystatic/src/app/useItemData.ts index 0b97d334b..38f088793 100644 --- a/packages/keystatic/src/app/useItemData.ts +++ b/packages/keystatic/src/app/useItemData.ts @@ -9,7 +9,7 @@ import { } from '../form/api'; import { parseProps } from '../form/parse-props'; import { getAuth } from './auth'; -import { resolveLfsPointers } from './git-lfs'; +import { isLfsPointer, resolveLfsPointers } from './git-lfs'; import { loadDataFile } from './required-files'; import { useBaseCommit, useRepoInfo, useTree } from './shell/data'; import { getDirectoriesForTreeKey, getTreeKey } from './tree-key'; @@ -291,21 +291,16 @@ export function useItemData(args: UseItemDataArgs) { }; if ( - !isGitHub && allBlobs.every((x): x is readonly [string, Uint8Array] => Array.isArray(x) ) ) { - const { initialFiles, initialState } = parseEntry( - _args, - new Map(allBlobs) - ); - - return { - initialState, - initialFiles, - localTreeKey, - }; + const blobMap = new Map(allBlobs); + if (isGitHub && [...blobMap.values()].some(isLfsPointer)) { + return buildResult(blobMap); + } + const { initialFiles, initialState } = parseEntry(_args, blobMap); + return { initialState, initialFiles, localTreeKey }; } return Promise.all(allBlobs).then(data => buildResult(new Map(data))); From 9adf8785d2ce04878bfc414d44c514b9d53287c2 Mon Sep 17 00:00:00 2001 From: Trevor Taubitz Date: Sat, 21 Feb 2026 21:40:53 -0500 Subject: [PATCH 11/21] fix: Simplify LFS resolution and merging logic Signed-off-by: Trevor Taubitz --- packages/keystatic/src/app/git-lfs.ts | 22 ++++++++++------- packages/keystatic/src/app/useItemData.ts | 30 ++++++++++++++--------- 2 files changed, 31 insertions(+), 21 deletions(-) diff --git a/packages/keystatic/src/app/git-lfs.ts b/packages/keystatic/src/app/git-lfs.ts index 030f4b702..ec2e8010f 100644 --- a/packages/keystatic/src/app/git-lfs.ts +++ b/packages/keystatic/src/app/git-lfs.ts @@ -89,21 +89,25 @@ export async function processLfsAdditions( return result; } -export async function resolveLfsPointers( - blobs: Map -): Promise> { - const pointerEntries: { path: string; raw: Uint8Array }[] = []; +export type LfsPointerEntry = { path: string; raw: Uint8Array }; +export function extractLfsPointers( + blobs: Map +): LfsPointerEntry[] { + const pointers: LfsPointerEntry[] = []; for (const [path, content] of blobs) { if (isLfsPointer(content)) { - pointerEntries.push({ path, raw: content }); + pointers.push({ path, raw: content }); } } + return pointers; +} - if (pointerEntries.length === 0) return blobs; - - const resolved = new Map(blobs); - const downloads = pointerEntries.map(async entry => { +export async function downloadLfsPointers( + pointers: LfsPointerEntry[] +): Promise> { + const resolved = new Map(); + const downloads = pointers.map(async entry => { const encoded = encodeURIComponent(base64Encode(entry.raw)); const response = await fetch( `/api/keystatic/github/lfs/download/${encoded}` diff --git a/packages/keystatic/src/app/useItemData.ts b/packages/keystatic/src/app/useItemData.ts index 38f088793..2d348651d 100644 --- a/packages/keystatic/src/app/useItemData.ts +++ b/packages/keystatic/src/app/useItemData.ts @@ -9,7 +9,7 @@ import { } from '../form/api'; import { parseProps } from '../form/parse-props'; import { getAuth } from './auth'; -import { isLfsPointer, resolveLfsPointers } from './git-lfs'; +import { downloadLfsPointers, extractLfsPointers } from './git-lfs'; import { loadDataFile } from './required-files'; import { useBaseCommit, useRepoInfo, useTree } from './shell/data'; import { getDirectoriesForTreeKey, getTreeKey } from './tree-key'; @@ -282,28 +282,34 @@ export function useItemData(args: UseItemDataArgs) { const isGitHub = args.config.storage.kind === 'github'; - const buildResult = async (blobMap: Map) => { - if (isGitHub) { - blobMap = await resolveLfsPointers(blobMap); - } - const { initialState, initialFiles } = parseEntry(_args, blobMap); - return { initialState, initialFiles, localTreeKey }; - }; - if ( allBlobs.every((x): x is readonly [string, Uint8Array] => Array.isArray(x) ) ) { const blobMap = new Map(allBlobs); - if (isGitHub && [...blobMap.values()].some(isLfsPointer)) { - return buildResult(blobMap); + const lfsPointers = isGitHub ? extractLfsPointers(blobMap) : []; + if (lfsPointers.length > 0) { + return downloadLfsPointers(lfsPointers).then(resolved => { + const merged = new Map([...blobMap, ...resolved]); + const { initialFiles, initialState } = parseEntry(_args, merged); + return { initialState, initialFiles, localTreeKey }; + }); } const { initialFiles, initialState } = parseEntry(_args, blobMap); return { initialState, initialFiles, localTreeKey }; } - return Promise.all(allBlobs).then(data => buildResult(new Map(data))); + return Promise.all(allBlobs).then(async data => { + let blobMap = new Map(data); + const lfsPointers = isGitHub ? extractLfsPointers(blobMap) : []; + if (lfsPointers.length > 0) { + const resolved = await downloadLfsPointers(lfsPointers); + blobMap = new Map([...blobMap, ...resolved]); + } + const { initialState, initialFiles } = parseEntry(_args, blobMap); + return { initialState, initialFiles, localTreeKey }; + }); }, [ hasLoaded, tree, From b1f4d6769c643ca1335b217cb31b3b16854b7470 Mon Sep 17 00:00:00 2001 From: Trevor Taubitz Date: Sat, 21 Feb 2026 21:43:43 -0500 Subject: [PATCH 12/21] fix: Refactor LFS pointer creation function Signed-off-by: Trevor Taubitz --- packages/keystatic/src/api/generic.ts | 5 +---- packages/keystatic/src/app/git-lfs.test.ts | 5 ++--- packages/keystatic/src/app/git-lfs.ts | 4 ++++ 3 files changed, 7 insertions(+), 7 deletions(-) diff --git a/packages/keystatic/src/api/generic.ts b/packages/keystatic/src/api/generic.ts index 4f1ae73da..bcff045db 100644 --- a/packages/keystatic/src/api/generic.ts +++ b/packages/keystatic/src/api/generic.ts @@ -2,6 +2,7 @@ import * as cookie from 'cookie'; import * as s from 'superstruct'; import { Config } from '..'; import { base64Decode, base64Encode } from '#base64'; +import { createLfsPointer } from '../app/git-lfs'; import { KeystaticResponse, KeystaticRequest, @@ -473,10 +474,6 @@ async function computeSha256(content: Uint8Array): Promise { .join(''); } -function createLfsPointer(oid: string, size: number): string { - return `version https://git-lfs.github.com/spec/v1\noid sha256:${oid}\nsize ${size}\n`; -} - function parseLfsPointer(text: string): { oid: string; size: number } { const lines = text.split('\n').filter(l => l.trim().length > 0); const pairs = new Map(); diff --git a/packages/keystatic/src/app/git-lfs.test.ts b/packages/keystatic/src/app/git-lfs.test.ts index c27177973..1c7ac6216 100644 --- a/packages/keystatic/src/app/git-lfs.test.ts +++ b/packages/keystatic/src/app/git-lfs.test.ts @@ -5,6 +5,7 @@ import { parseGitAttributes, isLfsTracked, isLfsPointer, + createLfsPointer, } from './git-lfs'; if (!globalThis.crypto) { @@ -14,9 +15,7 @@ if (!globalThis.crypto) { const textEncoder = new TextEncoder(); function makeLfsPointer(oid: string, size: number): Uint8Array { - return textEncoder.encode( - `version https://git-lfs.github.com/spec/v1\noid sha256:${oid}\nsize ${size}\n` - ); + return textEncoder.encode(createLfsPointer(oid, size)); } describe('parseGitAttributes', () => { diff --git a/packages/keystatic/src/app/git-lfs.ts b/packages/keystatic/src/app/git-lfs.ts index ec2e8010f..1a32e6448 100644 --- a/packages/keystatic/src/app/git-lfs.ts +++ b/packages/keystatic/src/app/git-lfs.ts @@ -49,6 +49,10 @@ export function isLfsPointer(content: Uint8Array): boolean { return text === LFS_POINTER_PREFIX; } +export function createLfsPointer(oid: string, size: number): string { + return `${LFS_POINTER_PREFIX}\noid sha256:${oid}\nsize ${size}\n`; +} + export async function processLfsAdditions( additions: { path: string; contents: Uint8Array }[], patterns: string[] From 84f060b60642e0c010acac9dac44cbe3e3b79d82 Mon Sep 17 00:00:00 2001 From: Trevor Taubitz Date: Sat, 21 Feb 2026 22:12:06 -0500 Subject: [PATCH 13/21] fix: Add caching support to LFS blobs Signed-off-by: Trevor Taubitz --- packages/keystatic/src/app/useItemData.ts | 27 ++++++++++++++++++----- 1 file changed, 22 insertions(+), 5 deletions(-) diff --git a/packages/keystatic/src/app/useItemData.ts b/packages/keystatic/src/app/useItemData.ts index 2d348651d..0172c587f 100644 --- a/packages/keystatic/src/app/useItemData.ts +++ b/packages/keystatic/src/app/useItemData.ts @@ -275,22 +275,37 @@ export function useItemData(args: UseItemDataArgs) { repoInfo ); if (blob instanceof Uint8Array) { - return [entry.path, blob] as const; + return [entry.path, entry.sha, blob] as const; } - return blob.then(blob => [entry.path, blob] as const); + return blob.then(blob => [entry.path, entry.sha, blob] as const); }); const isGitHub = args.config.storage.kind === 'github'; + const cacheLfsBlobs = ( + resolved: Map, + pathToSha: Map + ) => { + for (const [path, content] of resolved) { + const sha = pathToSha.get(path); + if (sha) { + blobCache.set(sha, content); + setBlobToPersistedCache(sha, content); + } + } + }; + if ( - allBlobs.every((x): x is readonly [string, Uint8Array] => + allBlobs.every((x): x is readonly [string, string, Uint8Array] => Array.isArray(x) ) ) { - const blobMap = new Map(allBlobs); + const blobMap = new Map(allBlobs.map(([path, , blob]) => [path, blob])); const lfsPointers = isGitHub ? extractLfsPointers(blobMap) : []; if (lfsPointers.length > 0) { + const pathToSha = new Map(allBlobs.map(([path, sha]) => [path, sha])); return downloadLfsPointers(lfsPointers).then(resolved => { + cacheLfsBlobs(resolved, pathToSha); const merged = new Map([...blobMap, ...resolved]); const { initialFiles, initialState } = parseEntry(_args, merged); return { initialState, initialFiles, localTreeKey }; @@ -301,10 +316,12 @@ export function useItemData(args: UseItemDataArgs) { } return Promise.all(allBlobs).then(async data => { - let blobMap = new Map(data); + let blobMap = new Map(data.map(([path, , blob]) => [path, blob])); const lfsPointers = isGitHub ? extractLfsPointers(blobMap) : []; if (lfsPointers.length > 0) { + const pathToSha = new Map(data.map(([path, sha]) => [path, sha])); const resolved = await downloadLfsPointers(lfsPointers); + cacheLfsBlobs(resolved, pathToSha); blobMap = new Map([...blobMap, ...resolved]); } const { initialState, initialFiles } = parseEntry(_args, blobMap); From fddcb075bcf904742e4e936321fe3de6aec8e265 Mon Sep 17 00:00:00 2001 From: Trevor Taubitz Date: Sat, 21 Feb 2026 23:15:44 -0500 Subject: [PATCH 14/21] fix: Refactor LFS resolution Signed-off-by: Trevor Taubitz --- packages/keystatic/src/app/git-lfs.ts | 29 --------- packages/keystatic/src/app/useItemData.ts | 78 +++++++++++------------ 2 files changed, 37 insertions(+), 70 deletions(-) diff --git a/packages/keystatic/src/app/git-lfs.ts b/packages/keystatic/src/app/git-lfs.ts index 1a32e6448..3d5b8d9a2 100644 --- a/packages/keystatic/src/app/git-lfs.ts +++ b/packages/keystatic/src/app/git-lfs.ts @@ -93,33 +93,4 @@ export async function processLfsAdditions( return result; } -export type LfsPointerEntry = { path: string; raw: Uint8Array }; -export function extractLfsPointers( - blobs: Map -): LfsPointerEntry[] { - const pointers: LfsPointerEntry[] = []; - for (const [path, content] of blobs) { - if (isLfsPointer(content)) { - pointers.push({ path, raw: content }); - } - } - return pointers; -} - -export async function downloadLfsPointers( - pointers: LfsPointerEntry[] -): Promise> { - const resolved = new Map(); - const downloads = pointers.map(async entry => { - const encoded = encodeURIComponent(base64Encode(entry.raw)); - const response = await fetch( - `/api/keystatic/github/lfs/download/${encoded}` - ); - if (!response.ok) return; - resolved.set(entry.path, new Uint8Array(await response.arrayBuffer())); - }); - - await Promise.all(downloads); - return resolved; -} diff --git a/packages/keystatic/src/app/useItemData.ts b/packages/keystatic/src/app/useItemData.ts index 0172c587f..5baa4e6ae 100644 --- a/packages/keystatic/src/app/useItemData.ts +++ b/packages/keystatic/src/app/useItemData.ts @@ -9,7 +9,8 @@ import { } from '../form/api'; import { parseProps } from '../form/parse-props'; import { getAuth } from './auth'; -import { downloadLfsPointers, extractLfsPointers } from './git-lfs'; +import { isLfsPointer } from './git-lfs'; +import { base64Encode } from '../base64'; import { loadDataFile } from './required-files'; import { useBaseCommit, useRepoInfo, useTree } from './shell/data'; import { getDirectoriesForTreeKey, getTreeKey } from './tree-key'; @@ -258,6 +259,8 @@ export function useItemData(args: UseItemDataArgs) { schema: args.schema, slug: args.slug, }; + type BlobEntry = { path: string; sha: string; blob: Uint8Array }; + const allBlobs = locationsForTreeKey .flatMap(dir => { const node = getTreeNodeAtPath(tree, dir); @@ -275,58 +278,51 @@ export function useItemData(args: UseItemDataArgs) { repoInfo ); if (blob instanceof Uint8Array) { - return [entry.path, entry.sha, blob] as const; + return { path: entry.path, sha: entry.sha, blob }; } - return blob.then(blob => [entry.path, entry.sha, blob] as const); + return blob.then( + blob => ({ path: entry.path, sha: entry.sha, blob }) + ); }); const isGitHub = args.config.storage.kind === 'github'; - const cacheLfsBlobs = ( - resolved: Map, - pathToSha: Map - ) => { - for (const [path, content] of resolved) { - const sha = pathToSha.get(path); - if (sha) { - blobCache.set(sha, content); - setBlobToPersistedCache(sha, content); - } - } + const resolveLfsBlobs = async (entries: BlobEntry[]) => { + const lfsEntries = entries.filter(e => isLfsPointer(e.blob)); + if (lfsEntries.length === 0) return; + await Promise.all( + lfsEntries.map(async entry => { + const encoded = encodeURIComponent(base64Encode(entry.blob)); + const response = await fetch( + `/api/keystatic/github/lfs/download/${encoded}` + ); + if (!response.ok) return; + const blob = new Uint8Array(await response.arrayBuffer()); + blobCache.set(entry.sha, blob); + setBlobToPersistedCache(entry.sha, blob); + entry.blob = blob; + }) + ); + }; + + const toBlobMap = (entries: BlobEntry[]) => + new Map(entries.map(e => [e.path, e.blob])); + + const buildResult = async (entries: BlobEntry[]) => { + if (isGitHub) await resolveLfsBlobs(entries); + const { initialState, initialFiles } = parseEntry(_args, toBlobMap(entries)); + return { initialState, initialFiles, localTreeKey }; }; if ( - allBlobs.every((x): x is readonly [string, string, Uint8Array] => - Array.isArray(x) + allBlobs.every( + (x): x is BlobEntry => x instanceof Promise === false ) ) { - const blobMap = new Map(allBlobs.map(([path, , blob]) => [path, blob])); - const lfsPointers = isGitHub ? extractLfsPointers(blobMap) : []; - if (lfsPointers.length > 0) { - const pathToSha = new Map(allBlobs.map(([path, sha]) => [path, sha])); - return downloadLfsPointers(lfsPointers).then(resolved => { - cacheLfsBlobs(resolved, pathToSha); - const merged = new Map([...blobMap, ...resolved]); - const { initialFiles, initialState } = parseEntry(_args, merged); - return { initialState, initialFiles, localTreeKey }; - }); - } - const { initialFiles, initialState } = parseEntry(_args, blobMap); - return { initialState, initialFiles, localTreeKey }; + return buildResult(allBlobs); } - return Promise.all(allBlobs).then(async data => { - let blobMap = new Map(data.map(([path, , blob]) => [path, blob])); - const lfsPointers = isGitHub ? extractLfsPointers(blobMap) : []; - if (lfsPointers.length > 0) { - const pathToSha = new Map(data.map(([path, sha]) => [path, sha])); - const resolved = await downloadLfsPointers(lfsPointers); - cacheLfsBlobs(resolved, pathToSha); - blobMap = new Map([...blobMap, ...resolved]); - } - const { initialState, initialFiles } = parseEntry(_args, blobMap); - return { initialState, initialFiles, localTreeKey }; - }); + return Promise.all(allBlobs).then(buildResult); }, [ hasLoaded, tree, From 248eaa70dcf0663e207042eacc419b38431cee5b Mon Sep 17 00:00:00 2001 From: Trevor Taubitz Date: Sun, 22 Feb 2026 00:06:26 -0500 Subject: [PATCH 15/21] fix: Refactor functionality to parse pointer on client side Signed-off-by: Trevor Taubitz --- packages/keystatic/src/api/generic.ts | 35 ++++++---------------- packages/keystatic/src/app/git-lfs.test.ts | 27 +++++++++++++++++ packages/keystatic/src/app/git-lfs.ts | 20 +++++++++++++ packages/keystatic/src/app/useItemData.ts | 15 +++++++--- 4 files changed, 67 insertions(+), 30 deletions(-) diff --git a/packages/keystatic/src/api/generic.ts b/packages/keystatic/src/api/generic.ts index bcff045db..9cb1302a1 100644 --- a/packages/keystatic/src/api/generic.ts +++ b/packages/keystatic/src/api/generic.ts @@ -161,8 +161,8 @@ export function makeGenericAPIRouteHandler( if (joined === 'github/lfs/upload') { return githubLfsUpload(req, config.config); } - if (joined.startsWith('github/lfs/download/')) { - return githubLfsDownload(params[3], req, config.config); + if (joined === 'github/lfs/download') { + return githubLfsDownload(req, config.config); } if (joined === 'github/logout') { const access_token = getAccessToken(req); @@ -474,26 +474,6 @@ async function computeSha256(content: Uint8Array): Promise { .join(''); } -function parseLfsPointer(text: string): { oid: string; size: number } { - const lines = text.split('\n').filter(l => l.trim().length > 0); - const pairs = new Map(); - for (const line of lines) { - const spaceIdx = line.indexOf(' '); - if (spaceIdx !== -1) { - pairs.set(line.slice(0, spaceIdx), line.slice(spaceIdx + 1)); - } - } - const oidRaw = pairs.get('oid'); - if (!oidRaw?.startsWith('sha256:')) { - throw new Error('Invalid LFS pointer: missing or invalid oid'); - } - const sizeRaw = pairs.get('size'); - if (!sizeRaw) { - throw new Error('Invalid LFS pointer: missing size'); - } - return { oid: oidRaw.slice('sha256:'.length), size: parseInt(sizeRaw, 10) }; -} - type LfsBatchResponseObject = { oid: string; size: number; @@ -603,22 +583,25 @@ async function githubLfsUpload( } async function githubLfsDownload( - pointer: string, req: KeystaticRequest, config: Config ): Promise { const lfs = getLfsConfig(req, config); if ('error' in lfs) return lfs.error; - const text = new TextDecoder().decode(base64Decode(pointer)); - const parsed = parseLfsPointer(text); + let payload: { oid: string; size: number }; + try { + payload = await req.json(); + } catch { + return { status: 400, body: 'Invalid JSON body' }; + } const batchRes = await lfsBatchRequest( lfs.owner, lfs.repo, lfs.accessToken, 'download', - [{ oid: parsed.oid, size: parsed.size }] + [{ oid: payload.oid, size: payload.size }] ); if (!batchRes.ok) { return { diff --git a/packages/keystatic/src/app/git-lfs.test.ts b/packages/keystatic/src/app/git-lfs.test.ts index 1c7ac6216..65d83bc1b 100644 --- a/packages/keystatic/src/app/git-lfs.test.ts +++ b/packages/keystatic/src/app/git-lfs.test.ts @@ -6,6 +6,7 @@ import { isLfsTracked, isLfsPointer, createLfsPointer, + parseLfsPointer, } from './git-lfs'; if (!globalThis.crypto) { @@ -102,3 +103,29 @@ describe('isLfsPointer', () => { expect(isLfsPointer(pointer)).toBe(true); }); }); + +describe('parseLfsPointer', () => { + test('parses a valid pointer', () => { + const oid = + 'abc123def456abc123def456abc123def456abc123def456abc123def456abcd1234'; + const size = 999999; + const text = createLfsPointer(oid, size); + expect(parseLfsPointer(text)).toEqual({ oid, size }); + }); + + test('throws on missing oid', () => { + const text = 'version https://git-lfs.github.com/spec/v1\nsize 100\n'; + expect(() => parseLfsPointer(text)).toThrow('missing or invalid oid'); + }); + + test('throws on invalid oid prefix', () => { + const text = + 'version https://git-lfs.github.com/spec/v1\noid md5:abc123\nsize 100\n'; + expect(() => parseLfsPointer(text)).toThrow('missing or invalid oid'); + }); + + test('throws on missing size', () => { + const text = `version https://git-lfs.github.com/spec/v1\noid sha256:abc123\n`; + expect(() => parseLfsPointer(text)).toThrow('missing size'); + }); +}); diff --git a/packages/keystatic/src/app/git-lfs.ts b/packages/keystatic/src/app/git-lfs.ts index 3d5b8d9a2..96432c700 100644 --- a/packages/keystatic/src/app/git-lfs.ts +++ b/packages/keystatic/src/app/git-lfs.ts @@ -49,6 +49,26 @@ export function isLfsPointer(content: Uint8Array): boolean { return text === LFS_POINTER_PREFIX; } +export function parseLfsPointer(text: string): { oid: string; size: number } { + const lines = text.split('\n').filter(l => l.trim().length > 0); + const pairs = new Map(); + for (const line of lines) { + const spaceIdx = line.indexOf(' '); + if (spaceIdx !== -1) { + pairs.set(line.slice(0, spaceIdx), line.slice(spaceIdx + 1)); + } + } + const oidRaw = pairs.get('oid'); + if (!oidRaw?.startsWith('sha256:')) { + throw new Error('Invalid LFS pointer: missing or invalid oid'); + } + const sizeRaw = pairs.get('size'); + if (!sizeRaw) { + throw new Error('Invalid LFS pointer: missing size'); + } + return { oid: oidRaw.slice('sha256:'.length), size: parseInt(sizeRaw, 10) }; +} + export function createLfsPointer(oid: string, size: number): string { return `${LFS_POINTER_PREFIX}\noid sha256:${oid}\nsize ${size}\n`; } diff --git a/packages/keystatic/src/app/useItemData.ts b/packages/keystatic/src/app/useItemData.ts index 5baa4e6ae..8bb67d469 100644 --- a/packages/keystatic/src/app/useItemData.ts +++ b/packages/keystatic/src/app/useItemData.ts @@ -9,8 +9,7 @@ import { } from '../form/api'; import { parseProps } from '../form/parse-props'; import { getAuth } from './auth'; -import { isLfsPointer } from './git-lfs'; -import { base64Encode } from '../base64'; +import { isLfsPointer, parseLfsPointer } from './git-lfs'; import { loadDataFile } from './required-files'; import { useBaseCommit, useRepoInfo, useTree } from './shell/data'; import { getDirectoriesForTreeKey, getTreeKey } from './tree-key'; @@ -290,11 +289,19 @@ export function useItemData(args: UseItemDataArgs) { const resolveLfsBlobs = async (entries: BlobEntry[]) => { const lfsEntries = entries.filter(e => isLfsPointer(e.blob)); if (lfsEntries.length === 0) return; + const textDecoder = new TextDecoder(); await Promise.all( lfsEntries.map(async entry => { - const encoded = encodeURIComponent(base64Encode(entry.blob)); + const { oid, size } = parseLfsPointer( + textDecoder.decode(entry.blob) + ); const response = await fetch( - `/api/keystatic/github/lfs/download/${encoded}` + '/api/keystatic/github/lfs/download', + { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ oid, size }), + } ); if (!response.ok) return; const blob = new Uint8Array(await response.arrayBuffer()); From 764ad88fa322c773f41952f57d30c0940b45d24e Mon Sep 17 00:00:00 2001 From: Trevor Taubitz Date: Sun, 22 Feb 2026 00:07:15 -0500 Subject: [PATCH 16/21] fix: Use relative path Signed-off-by: Trevor Taubitz --- packages/keystatic/src/api/generic.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/keystatic/src/api/generic.ts b/packages/keystatic/src/api/generic.ts index 9cb1302a1..489bd63cc 100644 --- a/packages/keystatic/src/api/generic.ts +++ b/packages/keystatic/src/api/generic.ts @@ -1,7 +1,7 @@ import * as cookie from 'cookie'; import * as s from 'superstruct'; import { Config } from '..'; -import { base64Decode, base64Encode } from '#base64'; +import { base64Decode, base64Encode } from '../base64'; import { createLfsPointer } from '../app/git-lfs'; import { KeystaticResponse, From 5f9e21bd39c9097099c6634429ff352dc7e6b034 Mon Sep 17 00:00:00 2001 From: Trevor Taubitz Date: Sun, 22 Feb 2026 00:30:15 -0500 Subject: [PATCH 17/21] fix: Use functional style Signed-off-by: Trevor Taubitz --- packages/keystatic/src/app/useItemData.ts | 19 ++++++++++--------- 1 file changed, 10 insertions(+), 9 deletions(-) diff --git a/packages/keystatic/src/app/useItemData.ts b/packages/keystatic/src/app/useItemData.ts index 8bb67d469..b71b4daa3 100644 --- a/packages/keystatic/src/app/useItemData.ts +++ b/packages/keystatic/src/app/useItemData.ts @@ -286,12 +286,13 @@ export function useItemData(args: UseItemDataArgs) { const isGitHub = args.config.storage.kind === 'github'; - const resolveLfsBlobs = async (entries: BlobEntry[]) => { - const lfsEntries = entries.filter(e => isLfsPointer(e.blob)); - if (lfsEntries.length === 0) return; + const resolveLfsBlobs = async ( + entries: BlobEntry[] + ): Promise => { const textDecoder = new TextDecoder(); - await Promise.all( - lfsEntries.map(async entry => { + return Promise.all( + entries.map(async entry => { + if (!isLfsPointer(entry.blob)) return entry; const { oid, size } = parseLfsPointer( textDecoder.decode(entry.blob) ); @@ -303,11 +304,11 @@ export function useItemData(args: UseItemDataArgs) { body: JSON.stringify({ oid, size }), } ); - if (!response.ok) return; + if (!response.ok) return entry; const blob = new Uint8Array(await response.arrayBuffer()); blobCache.set(entry.sha, blob); setBlobToPersistedCache(entry.sha, blob); - entry.blob = blob; + return { ...entry, blob }; }) ); }; @@ -316,8 +317,8 @@ export function useItemData(args: UseItemDataArgs) { new Map(entries.map(e => [e.path, e.blob])); const buildResult = async (entries: BlobEntry[]) => { - if (isGitHub) await resolveLfsBlobs(entries); - const { initialState, initialFiles } = parseEntry(_args, toBlobMap(entries)); + const resolved = isGitHub ? await resolveLfsBlobs(entries) : entries; + const { initialState, initialFiles } = parseEntry(_args, toBlobMap(resolved)); return { initialState, initialFiles, localTreeKey }; }; From cf4d93d6b5ffdc16ecaf2c14e5b3fae2c34a3004 Mon Sep 17 00:00:00 2001 From: Trevor Taubitz Date: Sun, 22 Feb 2026 00:39:50 -0500 Subject: [PATCH 18/21] fix: Use correct crypto Signed-off-by: Trevor Taubitz --- packages/keystatic/src/api/generic.ts | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/packages/keystatic/src/api/generic.ts b/packages/keystatic/src/api/generic.ts index 489bd63cc..a8ea49fb2 100644 --- a/packages/keystatic/src/api/generic.ts +++ b/packages/keystatic/src/api/generic.ts @@ -462,10 +462,8 @@ async function lfsBatchRequest( }); } - - async function computeSha256(content: Uint8Array): Promise { - const hashBuffer = await crypto.subtle.digest( + const hashBuffer = await webcrypto.subtle.digest( 'SHA-256', content as unknown as ArrayBuffer ); From 2c5fc3284d60d47a259a9d42eb52a07a43525772 Mon Sep 17 00:00:00 2001 From: Trevor Taubitz Date: Sun, 22 Feb 2026 11:01:13 -0500 Subject: [PATCH 19/21] fix: Add user-agent to all LFS calls Signed-off-by: Trevor Taubitz --- packages/keystatic/src/api/generic.ts | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/packages/keystatic/src/api/generic.ts b/packages/keystatic/src/api/generic.ts index a8ea49fb2..d426a4a4b 100644 --- a/packages/keystatic/src/api/generic.ts +++ b/packages/keystatic/src/api/generic.ts @@ -14,6 +14,8 @@ import { bytesToHex } from '../hex'; import { decryptValue, encryptValue } from './encryption'; import { parseRepoConfig } from '../app/repo-config'; +const USER_AGENT = 'keystatic'; + export type APIRouteConfig = { /** @default process.env.KEYSTATIC_GITHUB_CLIENT_ID */ clientId?: string; @@ -453,6 +455,7 @@ async function lfsBatchRequest( Accept: 'application/vnd.git-lfs+json', 'Content-Type': 'application/vnd.git-lfs+json', Authorization: `Bearer ${accessToken}`, + 'User-Agent': USER_AGENT, }, body: JSON.stringify({ operation, @@ -472,6 +475,7 @@ async function computeSha256(content: Uint8Array): Promise { .join(''); } + type LfsBatchResponseObject = { oid: string; size: number; @@ -539,7 +543,7 @@ async function githubLfsUpload( const uploadRes = await fetch(uploadAction.href, { method: 'PUT', - headers: uploadAction.header ?? {}, + headers: { 'User-Agent': USER_AGENT, ...(uploadAction.header ?? {}) }, body: item.bytes as unknown as BodyInit, }); if (!uploadRes.ok) { @@ -554,6 +558,7 @@ async function githubLfsUpload( method: 'POST', headers: { 'Content-Type': 'application/vnd.git-lfs+json', + 'User-Agent': USER_AGENT, ...(obj.actions.verify.header ?? {}), }, body: JSON.stringify({ oid: obj.oid, size: obj.size }), @@ -624,7 +629,7 @@ async function githubLfsDownload( } const res = await fetch(downloadAction.href, { - headers: downloadAction.header ?? {}, + headers: { 'User-Agent': USER_AGENT, ...(downloadAction.header ?? {}) }, }); if (!res.ok) { return { From f48527bdc1e4ace2ca5c547c100fa6520cc0a2d1 Mon Sep 17 00:00:00 2001 From: Trevor Taubitz Date: Sun, 22 Feb 2026 11:30:17 -0500 Subject: [PATCH 20/21] chore: Add changeset Signed-off-by: Trevor Taubitz --- .changeset/green-lions-watch.md | 5 +++++ 1 file changed, 5 insertions(+) create mode 100644 .changeset/green-lions-watch.md diff --git a/.changeset/green-lions-watch.md b/.changeset/green-lions-watch.md new file mode 100644 index 000000000..6b068cfda --- /dev/null +++ b/.changeset/green-lions-watch.md @@ -0,0 +1,5 @@ +--- +'@keystatic/core': minor +--- + +Add Git LFS support From 3c6810c3d97410ef89aebe9770c255de2f16388b Mon Sep 17 00:00:00 2001 From: Trevor Taubitz Date: Fri, 27 Feb 2026 11:29:11 -0500 Subject: [PATCH 21/21] doc: Add docs for optional Git LFS support Signed-off-by: Trevor Taubitz --- docs/src/content/pages/github-mode.mdoc | 29 +++++++++++++++++++++++++ 1 file changed, 29 insertions(+) diff --git a/docs/src/content/pages/github-mode.mdoc b/docs/src/content/pages/github-mode.mdoc index 4e196a6fb..f420b06c3 100644 --- a/docs/src/content/pages/github-mode.mdoc +++ b/docs/src/content/pages/github-mode.mdoc @@ -149,6 +149,35 @@ The process of deploying Keystatic can vary based on where you're deploying, but --- +## Using Git LFS + +Keystatic supports [Git LFS](https://git-lfs.com/). This allows you to manage large binaries in your repo without keeping them in Git history. + +To use Git LFS, start by installing the LFS plugin through the official instructions: + +Next, add `lfs: true` to your GitHub storage configuration: + +```diff +storage: { + kind: 'github', + repo: `${REPO_OWNER}/${REPO_NAME}` ++ lfs: true +} +``` + +Install LFS to the repo, and add any patterns you want through `git lfs track`. + +```bash +git lfs install +git lfs track "*.jpg" "*.jpeg" "*.png" "*.gif" "*.bmp" "*.tiff" "*.webp" "*.svg" +``` + +Commit your `.gitattributes` and push to GitHub. Make sure this file lives on the remote branch you want to commit images to. + +Now any images you upload will automatically be managed through Git LFS. + +--- + ## Screencast walk-through This segment of the [Keystatic Mini-Course on YouTube](https://www.youtube.com/playlist?list=PLHrxuCR-0CcSmkyLcmdV7Ruql8DTm644k) may help understand how to set up the `github` storage kind: