diff --git a/.changeset/green-lions-watch.md b/.changeset/green-lions-watch.md new file mode 100644 index 000000000..6b068cfda --- /dev/null +++ b/.changeset/green-lions-watch.md @@ -0,0 +1,5 @@ +--- +'@keystatic/core': minor +--- + +Add Git LFS support diff --git a/docs/src/content/pages/github-mode.mdoc b/docs/src/content/pages/github-mode.mdoc index 4e196a6fb..f420b06c3 100644 --- a/docs/src/content/pages/github-mode.mdoc +++ b/docs/src/content/pages/github-mode.mdoc @@ -149,6 +149,35 @@ The process of deploying Keystatic can vary based on where you're deploying, but --- +## Using Git LFS + +Keystatic supports [Git LFS](https://git-lfs.com/). This allows you to manage large binaries in your repo without keeping them in Git history. + +To use Git LFS, start by installing the LFS plugin through the official instructions: + +Next, add `lfs: true` to your GitHub storage configuration: + +```diff +storage: { + kind: 'github', + repo: `${REPO_OWNER}/${REPO_NAME}` ++ lfs: true +} +``` + +Install LFS to the repo, and add any patterns you want through `git lfs track`. + +```bash +git lfs install +git lfs track "*.jpg" "*.jpeg" "*.png" "*.gif" "*.bmp" "*.tiff" "*.webp" "*.svg" +``` + +Commit your `.gitattributes` and push to GitHub. Make sure this file lives on the remote branch you want to commit images to. + +Now any images you upload will automatically be managed through Git LFS. + +--- + ## Screencast walk-through This segment of the [Keystatic Mini-Course on YouTube](https://www.youtube.com/playlist?list=PLHrxuCR-0CcSmkyLcmdV7Ruql8DTm644k) may help understand how to set up the `github` storage kind: diff --git a/packages/keystatic/src/api/generic.ts b/packages/keystatic/src/api/generic.ts index f74d5e810..d426a4a4b 100644 --- a/packages/keystatic/src/api/generic.ts +++ b/packages/keystatic/src/api/generic.ts @@ -1,6 +1,8 @@ import * as cookie from 'cookie'; import * as s from 'superstruct'; import { Config } from '..'; +import { base64Decode, base64Encode } from '../base64'; +import { createLfsPointer } from '../app/git-lfs'; import { KeystaticResponse, KeystaticRequest, @@ -10,6 +12,9 @@ import { handleGitHubAppCreation, localModeApiHandler } from '#api-handler'; import { webcrypto } from '#webcrypto'; import { bytesToHex } from '../hex'; import { decryptValue, encryptValue } from './encryption'; +import { parseRepoConfig } from '../app/repo-config'; + +const USER_AGENT = 'keystatic'; export type APIRouteConfig = { /** @default process.env.KEYSTATIC_GITHUB_CLIENT_ID */ @@ -155,9 +160,14 @@ export function makeGenericAPIRouteHandler( if (joined === 'github/repo-not-found') { return githubRepoNotFound(req, config); } + if (joined === 'github/lfs/upload') { + return githubLfsUpload(req, config.config); + } + if (joined === 'github/lfs/download') { + return githubLfsDownload(req, config.config); + } if (joined === 'github/logout') { - const cookies = cookie.parse(req.headers.get('cookie') ?? ''); - const access_token = cookies['keystatic-gh-access-token']; + const access_token = getAccessToken(req); if (access_token) { await fetch( `https://api.github.com/applications/${config.clientId}/token`, @@ -291,6 +301,11 @@ async function getTokenCookies( return headers; } +function getAccessToken(req: KeystaticRequest): string | undefined { + const cookies = cookie.parse(req.headers.get('cookie') ?? ''); + return cookies['keystatic-gh-access-token'] || undefined; +} + async function getRefreshToken( req: KeystaticRequest, config: InnerAPIRouteConfig @@ -407,6 +422,229 @@ async function createdGithubApp( return handleGitHubAppCreation(req, slugEnvVarName); } +function getLfsConfig( + req: KeystaticRequest, + config: Config +): + | { error: KeystaticResponse } + | { owner: string; repo: string; accessToken: string } { + if (config.storage.kind !== 'github') { + return { + error: { status: 400, body: 'LFS is only supported with GitHub storage' }, + }; + } + const accessToken = getAccessToken(req); + if (!accessToken) { + return { error: { status: 401, body: 'Unauthorized' } }; + } + const { owner, name: repo } = parseRepoConfig(config.storage.repo); + return { owner, repo, accessToken }; +} + +async function lfsBatchRequest( + owner: string, + repo: string, + accessToken: string, + operation: 'upload' | 'download', + objects: Array<{ oid: string; size: number }> +) { + const batchUrl = `https://github.com/${owner}/${repo}.git/info/lfs/objects/batch`; + return fetch(batchUrl, { + method: 'POST', + headers: { + Accept: 'application/vnd.git-lfs+json', + 'Content-Type': 'application/vnd.git-lfs+json', + Authorization: `Bearer ${accessToken}`, + 'User-Agent': USER_AGENT, + }, + body: JSON.stringify({ + operation, + transfers: ['basic'], + objects, + }), + }); +} + +async function computeSha256(content: Uint8Array): Promise { + const hashBuffer = await webcrypto.subtle.digest( + 'SHA-256', + content as unknown as ArrayBuffer + ); + return Array.from(new Uint8Array(hashBuffer)) + .map(b => b.toString(16).padStart(2, '0')) + .join(''); +} + + +type LfsBatchResponseObject = { + oid: string; + size: number; + actions?: { + upload?: { href: string; header?: Record }; + download?: { href: string; header?: Record }; + verify?: { href: string; header?: Record }; + }; + error?: { code: number; message: string }; +}; + +async function githubLfsUpload( + req: KeystaticRequest, + config: Config +): Promise { + const lfs = getLfsConfig(req, config); + if ('error' in lfs) return lfs.error; + + let payload: { objects: Array<{ content: string }> }; + try { + payload = await req.json(); + } catch { + return { status: 400, body: 'Invalid JSON body' }; + } + + const prepared = await Promise.all( + payload.objects.map(async obj => { + const bytes = base64Decode(obj.content); + const oid = await computeSha256(bytes); + return { oid, size: bytes.byteLength, bytes }; + }) + ); + + const batchRes = await lfsBatchRequest( + lfs.owner, + lfs.repo, + lfs.accessToken, + 'upload', + prepared.map(p => ({ oid: p.oid, size: p.size })) + ); + if (!batchRes.ok) { + return { + status: batchRes.status, + body: `LFS batch API error: ${await batchRes.text()}`, + }; + } + + const batch: { objects: LfsBatchResponseObject[] } = await batchRes.json(); + + for (const obj of batch.objects) { + if (obj.error) { + return { + status: 502, + body: `LFS error for ${obj.oid}: ${obj.error.message} (${obj.error.code})`, + }; + } + + const uploadAction = obj.actions?.upload; + if (!uploadAction) continue; + + const item = prepared.find(p => p.oid === obj.oid); + if (!item) { + return { status: 500, body: `No content prepared for ${obj.oid}` }; + } + + const uploadRes = await fetch(uploadAction.href, { + method: 'PUT', + headers: { 'User-Agent': USER_AGENT, ...(uploadAction.header ?? {}) }, + body: item.bytes as unknown as BodyInit, + }); + if (!uploadRes.ok) { + return { + status: 502, + body: `LFS upload failed for ${obj.oid} (${uploadRes.status}): ${await uploadRes.text()}`, + }; + } + + if (obj.actions?.verify) { + const verifyRes = await fetch(obj.actions.verify.href, { + method: 'POST', + headers: { + 'Content-Type': 'application/vnd.git-lfs+json', + 'User-Agent': USER_AGENT, + ...(obj.actions.verify.header ?? {}), + }, + body: JSON.stringify({ oid: obj.oid, size: obj.size }), + }); + if (!verifyRes.ok) { + return { + status: 502, + body: `LFS verify failed for ${obj.oid} (${verifyRes.status}): ${await verifyRes.text()}`, + }; + } + } + } + + const pointers = prepared.map(p => + base64Encode( + new TextEncoder().encode(createLfsPointer(p.oid, p.size)) + ) + ); + + return { + status: 200, + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ objects: pointers.map(p => ({ pointer: p })) }), + }; +} + +async function githubLfsDownload( + req: KeystaticRequest, + config: Config +): Promise { + const lfs = getLfsConfig(req, config); + if ('error' in lfs) return lfs.error; + + let payload: { oid: string; size: number }; + try { + payload = await req.json(); + } catch { + return { status: 400, body: 'Invalid JSON body' }; + } + + const batchRes = await lfsBatchRequest( + lfs.owner, + lfs.repo, + lfs.accessToken, + 'download', + [{ oid: payload.oid, size: payload.size }] + ); + if (!batchRes.ok) { + return { + status: batchRes.status, + body: `LFS batch API error: ${await batchRes.text()}`, + }; + } + + const batch: { objects: LfsBatchResponseObject[] } = await batchRes.json(); + const obj = batch.objects[0]; + + if (obj?.error) { + return { + status: 502, + body: `LFS error for ${obj.oid}: ${obj.error.message} (${obj.error.code})`, + }; + } + + const downloadAction = obj?.actions?.download; + if (!downloadAction) { + return { status: 404, body: 'LFS object not found' }; + } + + const res = await fetch(downloadAction.href, { + headers: { 'User-Agent': USER_AGENT, ...(downloadAction.header ?? {}) }, + }); + if (!res.ok) { + return { + status: 502, + body: `LFS download failed (${res.status}): ${await res.text()}`, + }; + } + + return { + status: 200, + headers: { 'Content-Type': 'application/octet-stream' }, + body: new Uint8Array(await res.arrayBuffer()), + }; +} + function immediatelyExpiringCookie(name: string) { return cookie.serialize(name, '', { secure: process.env.NODE_ENV === 'production', diff --git a/packages/keystatic/src/app/git-lfs.test.ts b/packages/keystatic/src/app/git-lfs.test.ts new file mode 100644 index 000000000..65d83bc1b --- /dev/null +++ b/packages/keystatic/src/app/git-lfs.test.ts @@ -0,0 +1,131 @@ +/** @jest-environment node */ +import { webcrypto } from 'node:crypto'; +import { expect, test, describe } from '@jest/globals'; +import { + parseGitAttributes, + isLfsTracked, + isLfsPointer, + createLfsPointer, + parseLfsPointer, +} from './git-lfs'; + +if (!globalThis.crypto) { + globalThis.crypto = webcrypto as any; +} + +const textEncoder = new TextEncoder(); + +function makeLfsPointer(oid: string, size: number): Uint8Array { + return textEncoder.encode(createLfsPointer(oid, size)); +} + +describe('parseGitAttributes', () => { + test('extracts LFS patterns from standard .gitattributes', () => { + const content = [ + '*.png filter=lfs diff=lfs merge=lfs -text', + '*.jpg filter=lfs diff=lfs merge=lfs -text', + '*.md text', + ].join('\n'); + expect(parseGitAttributes(content)).toEqual(['*.png', '*.jpg']); + }); + + test('ignores lines without all three LFS attributes', () => { + const content = '*.png filter=lfs diff=lfs -text'; + expect(parseGitAttributes(content)).toEqual([]); + }); + + test('ignores comments and blank lines', () => { + const content = [ + '# This is a comment', + '', + '*.bin filter=lfs diff=lfs merge=lfs -text', + '*.md text # not lfs', + ].join('\n'); + expect(parseGitAttributes(content)).toEqual(['*.bin']); + }); + + test('handles inline comments after LFS attributes', () => { + const content = '*.psd filter=lfs diff=lfs merge=lfs -text # large files'; + expect(parseGitAttributes(content)).toEqual(['*.psd']); + }); + + test('returns empty array for empty content', () => { + expect(parseGitAttributes('')).toEqual([]); + }); +}); + +describe('isLfsTracked', () => { + const patterns = ['*.png', '*.jpg', 'assets/**/*.gif']; + + test('matches simple extension pattern', () => { + expect(isLfsTracked('images/photo.png', patterns)).toBe(true); + expect(isLfsTracked('deep/nested/file.jpg', patterns)).toBe(true); + }); + + test('does not match non-tracked extensions', () => { + expect(isLfsTracked('README.md', patterns)).toBe(false); + expect(isLfsTracked('src/app.ts', patterns)).toBe(false); + }); + + test('matches glob patterns with directories', () => { + expect(isLfsTracked('assets/icons/icon.gif', patterns)).toBe(true); + }); + + test('does not match outside glob scope', () => { + expect(isLfsTracked('other/icon.gif', patterns)).toBe(false); + }); + + test('returns false for empty patterns', () => { + expect(isLfsTracked('file.png', [])).toBe(false); + }); +}); + +describe('isLfsPointer', () => { + const sampleOid = + 'abc123def456abc123def456abc123def456abc123def456abc123def456abcd1234'; + const sampleSize = 12345; + + test('detects valid pointer', () => { + const pointer = makeLfsPointer(sampleOid, sampleSize); + expect(isLfsPointer(pointer)).toBe(true); + }); + + test('rejects non-pointer content', () => { + expect(isLfsPointer(textEncoder.encode('hello world'))).toBe(false); + expect(isLfsPointer(new Uint8Array(300))).toBe(false); + expect(isLfsPointer(new Uint8Array(10))).toBe(false); + }); + + test('detects pointer with varying oid and size', () => { + const oid = + '0000000000000000000000000000000000000000000000000000000000000000'; + const pointer = makeLfsPointer(oid, 999999); + expect(isLfsPointer(pointer)).toBe(true); + }); +}); + +describe('parseLfsPointer', () => { + test('parses a valid pointer', () => { + const oid = + 'abc123def456abc123def456abc123def456abc123def456abc123def456abcd1234'; + const size = 999999; + const text = createLfsPointer(oid, size); + expect(parseLfsPointer(text)).toEqual({ oid, size }); + }); + + test('throws on missing oid', () => { + const text = 'version https://git-lfs.github.com/spec/v1\nsize 100\n'; + expect(() => parseLfsPointer(text)).toThrow('missing or invalid oid'); + }); + + test('throws on invalid oid prefix', () => { + const text = + 'version https://git-lfs.github.com/spec/v1\noid md5:abc123\nsize 100\n'; + expect(() => parseLfsPointer(text)).toThrow('missing or invalid oid'); + }); + + test('throws on missing size', () => { + const text = `version https://git-lfs.github.com/spec/v1\noid sha256:abc123\n`; + expect(() => parseLfsPointer(text)).toThrow('missing size'); + }); +}); diff --git a/packages/keystatic/src/app/git-lfs.ts b/packages/keystatic/src/app/git-lfs.ts new file mode 100644 index 000000000..96432c700 --- /dev/null +++ b/packages/keystatic/src/app/git-lfs.ts @@ -0,0 +1,116 @@ +import { minimatch } from 'minimatch'; +import { base64Decode, base64Encode } from '#base64'; + +const textDecoder = new TextDecoder(); + +const LFS_POINTER_PREFIX = 'version https://git-lfs.github.com/spec/v1'; + +export function parseGitAttributes(content: string): string[] { + return content + .split('\n') + .map(line => line.split('#')[0].trim()) + .filter(line => line.length > 0) + .flatMap(line => { + const [pattern, ...attributes] = line.split(/\s+/); + if (!pattern) return []; + const attrs = parseAttributes(attributes); + if ( + attrs.get('filter') === 'lfs' && + attrs.get('diff') === 'lfs' && + attrs.get('merge') === 'lfs' + ) { + return [pattern]; + } + return []; + }); +} + +function parseAttributes(parts: string[]): Map { + const attrs = new Map(); + for (const part of parts) { + if (part.includes('=')) { + const [key, value] = part.split('=', 2); + attrs.set(key, value); + } else if (part.startsWith('-')) { + attrs.set(part.slice(1), false); + } else { + attrs.set(part, true); + } + } + return attrs; +} + +export function isLfsTracked(path: string, patterns: string[]): boolean { + return patterns.some(pattern => minimatch(path, pattern, { matchBase: true })); +} + +export function isLfsPointer(content: Uint8Array): boolean { + const text = textDecoder.decode(content.slice(0, LFS_POINTER_PREFIX.length)); + return text === LFS_POINTER_PREFIX; +} + +export function parseLfsPointer(text: string): { oid: string; size: number } { + const lines = text.split('\n').filter(l => l.trim().length > 0); + const pairs = new Map(); + for (const line of lines) { + const spaceIdx = line.indexOf(' '); + if (spaceIdx !== -1) { + pairs.set(line.slice(0, spaceIdx), line.slice(spaceIdx + 1)); + } + } + const oidRaw = pairs.get('oid'); + if (!oidRaw?.startsWith('sha256:')) { + throw new Error('Invalid LFS pointer: missing or invalid oid'); + } + const sizeRaw = pairs.get('size'); + if (!sizeRaw) { + throw new Error('Invalid LFS pointer: missing size'); + } + return { oid: oidRaw.slice('sha256:'.length), size: parseInt(sizeRaw, 10) }; +} + +export function createLfsPointer(oid: string, size: number): string { + return `${LFS_POINTER_PREFIX}\noid sha256:${oid}\nsize ${size}\n`; +} + +export async function processLfsAdditions( + additions: { path: string; contents: Uint8Array }[], + patterns: string[] +): Promise<{ path: string; contents: Uint8Array }[]> { + const lfsIndices: number[] = []; + for (let i = 0; i < additions.length; i++) { + if (isLfsTracked(additions[i].path, patterns)) { + lfsIndices.push(i); + } + } + + if (lfsIndices.length === 0) return additions; + + const response = await fetch('/api/keystatic/github/lfs/upload', { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ + objects: lfsIndices.map(i => ({ + content: base64Encode(additions[i].contents), + })), + }), + }); + + if (!response.ok) { + throw new Error(`LFS upload failed: ${await response.text()}`); + } + + const data: { objects: Array<{ pointer: string }> } = await response.json(); + const result = [...additions]; + for (let i = 0; i < lfsIndices.length; i++) { + const idx = lfsIndices[i]; + result[idx] = { + path: additions[idx].path, + contents: base64Decode(data.objects[i].pointer), + }; + } + + return result; +} + + diff --git a/packages/keystatic/src/app/shell/data.tsx b/packages/keystatic/src/app/shell/data.tsx index d19ac523e..fd79dea69 100644 --- a/packages/keystatic/src/app/shell/data.tsx +++ b/packages/keystatic/src/app/shell/data.tsx @@ -50,6 +50,8 @@ import { } from '../object-cache'; import { CollabProvider } from './collab'; import { EmptyRepo } from './empty-repo'; +import { parseGitAttributes } from '../git-lfs'; +import { fetchBlob } from '../useItemData'; export function fetchLocalTree(sha: string) { if (treeCache.has(sha)) { @@ -438,22 +440,64 @@ export function GitHubAppShellProvider(props: { repo?.owner.login, ]); + const baseCommitSha = + currentBranchRef?.target?.__typename === 'Commit' + ? currentBranchRef.target.oid + : ''; + + const lfsPatterns = useData( + useCallback(() => { + if ( + props.config.storage.kind !== 'github' || + !props.config.storage.lfs + ) { + return [] as string[]; + } + if (currentBranchTree.kind !== 'loaded' || !repoInfo) return LOADING; + const node = getTreeNodeAtPath( + currentBranchTree.data.tree, + '.gitattributes' + ); + if (!node) return [] as string[]; + const blob = fetchBlob( + props.config, + node.entry.sha, + '.gitattributes', + baseCommitSha, + repoInfo + ); + if (blob instanceof Uint8Array) { + return parseGitAttributes(new TextDecoder().decode(blob)); + } + return blob.then(b => parseGitAttributes(new TextDecoder().decode(b))); + }, [ + props.config, + currentBranchTree, + repoInfo, + baseCommitSha, + ]) + ); + return ( - - - {props.config.storage.kind === 'cloud' ? ( - - {props.children} - - ) : ( - props.children - )} - - + + + + {props.config.storage.kind === 'cloud' ? ( + + {props.children} + + ) : ( + props.children + )} + + + @@ -471,6 +515,12 @@ export function useCurrentBranch() { return useContext(CurrentBranchContext); } +const LfsPatternsContext = createContext([]); + +export function useLfsPatterns() { + return useContext(LfsPatternsContext); +} + type BranchInfo = { id: string; commitSha: string; diff --git a/packages/keystatic/src/app/updating.tsx b/packages/keystatic/src/app/updating.tsx index 4ca1ee7b9..95621ba05 100644 --- a/packages/keystatic/src/app/updating.tsx +++ b/packages/keystatic/src/app/updating.tsx @@ -29,6 +29,8 @@ import { createUrqlClient } from './provider'; import { serializeProps } from '../form/serialize-props'; import { scopeEntriesWithPathPrefix } from './shell/path-prefix'; import { base64Encode } from '#base64'; +import { processLfsAdditions } from './git-lfs'; +import { useLfsPatterns } from './shell/data'; const textEncoder = new TextEncoder(); @@ -133,6 +135,7 @@ export function useUpsertItem(args: { const repoInfo = useRepoInfo(); const appSlug = useContext(AppSlugContext); const unscopedTreeData = useCurrentUnscopedTree(); + const lfsPatterns = useLfsPatterns(); return [ state, @@ -210,6 +213,17 @@ export function useUpsertItem(args: { branchName: override?.branch ?? currentBranch, repositoryNameWithOwner: `${repoInfo.owner}/${repoInfo.name}`, }; + if ( + args.config.storage.kind === 'github' && + args.config.storage.lfs && + lfsPatterns.length > 0 + ) { + additions = await processLfsAdditions( + additions, + lfsPatterns + ); + } + const runMutation = (expectedHeadOid: string) => mutate({ input: { diff --git a/packages/keystatic/src/app/useItemData.ts b/packages/keystatic/src/app/useItemData.ts index 48ba8d896..b71b4daa3 100644 --- a/packages/keystatic/src/app/useItemData.ts +++ b/packages/keystatic/src/app/useItemData.ts @@ -9,6 +9,7 @@ import { } from '../form/api'; import { parseProps } from '../form/parse-props'; import { getAuth } from './auth'; +import { isLfsPointer, parseLfsPointer } from './git-lfs'; import { loadDataFile } from './required-files'; import { useBaseCommit, useRepoInfo, useTree } from './shell/data'; import { getDirectoriesForTreeKey, getTreeKey } from './tree-key'; @@ -257,6 +258,8 @@ export function useItemData(args: UseItemDataArgs) { schema: args.schema, slug: args.slug, }; + type BlobEntry = { path: string; sha: string; blob: Uint8Array }; + const allBlobs = locationsForTreeKey .flatMap(dir => { const node = getTreeNodeAtPath(tree, dir); @@ -274,36 +277,60 @@ export function useItemData(args: UseItemDataArgs) { repoInfo ); if (blob instanceof Uint8Array) { - return [entry.path, blob] as const; + return { path: entry.path, sha: entry.sha, blob }; } - return blob.then(blob => [entry.path, blob] as const); + return blob.then( + blob => ({ path: entry.path, sha: entry.sha, blob }) + ); }); + const isGitHub = args.config.storage.kind === 'github'; + + const resolveLfsBlobs = async ( + entries: BlobEntry[] + ): Promise => { + const textDecoder = new TextDecoder(); + return Promise.all( + entries.map(async entry => { + if (!isLfsPointer(entry.blob)) return entry; + const { oid, size } = parseLfsPointer( + textDecoder.decode(entry.blob) + ); + const response = await fetch( + '/api/keystatic/github/lfs/download', + { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ oid, size }), + } + ); + if (!response.ok) return entry; + const blob = new Uint8Array(await response.arrayBuffer()); + blobCache.set(entry.sha, blob); + setBlobToPersistedCache(entry.sha, blob); + return { ...entry, blob }; + }) + ); + }; + + const toBlobMap = (entries: BlobEntry[]) => + new Map(entries.map(e => [e.path, e.blob])); + + const buildResult = async (entries: BlobEntry[]) => { + const resolved = isGitHub ? await resolveLfsBlobs(entries) : entries; + const { initialState, initialFiles } = parseEntry(_args, toBlobMap(resolved)); + return { initialState, initialFiles, localTreeKey }; + }; + if ( - allBlobs.every((x): x is readonly [string, Uint8Array] => - Array.isArray(x) + allBlobs.every( + (x): x is BlobEntry => x instanceof Promise === false ) ) { - const { initialFiles, initialState } = parseEntry( - _args, - new Map(allBlobs) - ); - - return { - initialState, - initialFiles, - localTreeKey, - }; + return buildResult(allBlobs); } - return Promise.all(allBlobs).then(async data => { - const { initialState, initialFiles } = parseEntry(_args, new Map(data)); - return { - initialState, - initialFiles, - localTreeKey, - }; - }); + return Promise.all(allBlobs).then(buildResult); }, [ hasLoaded, tree, diff --git a/packages/keystatic/src/base64.ts b/packages/keystatic/src/base64.ts index 5004a4db1..0ddd9add8 100644 --- a/packages/keystatic/src/base64.ts +++ b/packages/keystatic/src/base64.ts @@ -1,8 +1,7 @@ export function base64UrlDecode(base64: string) { - const binString = atob(base64.replace(/-/g, '+').replace(/_/g, '/')); - return Uint8Array.from( - binString as Iterable, - m => (m as unknown as string).codePointAt(0)! + return base64Decode(base64 + .replace(/-/g, '+') + .replace(/_/g, '/') ); } @@ -13,6 +12,14 @@ export function base64UrlEncode(bytes: Uint8Array) { .replace(/=/g, ''); } +export function base64Decode(base64: string) { + const binString = atob(base64); + return Uint8Array.from( + binString as Iterable, + m => (m as unknown as string).codePointAt(0)! + ); +} + export function base64Encode(bytes: Uint8Array) { const binString = Array.from(bytes, byte => String.fromCodePoint(byte)).join( '' diff --git a/packages/keystatic/src/config.tsx b/packages/keystatic/src/config.tsx index 4c95368f3..d72e65dc0 100644 --- a/packages/keystatic/src/config.tsx +++ b/packages/keystatic/src/config.tsx @@ -80,6 +80,7 @@ type Navigation = K[] | { [section: string]: K[] }; type GitHubStorageConfig = { kind: 'github'; repo: RepoConfig; + lfs?: boolean; } & CommonRemoteStorageConfig; export type GitHubConfig<