From 868e9587caaecec4c112759e908f35a7051ae228 Mon Sep 17 00:00:00 2001 From: Gregor Mitscha-Baude Date: Mon, 20 Oct 2025 10:02:18 +0200 Subject: [PATCH 1/6] two new tests --- src/sync/git-sync.test.ts | 32 +++++++++ src/test/mock-remote.ts | 140 ++++++++++++++++++++++++++------------ 2 files changed, 129 insertions(+), 43 deletions(-) diff --git a/src/sync/git-sync.test.ts b/src/sync/git-sync.test.ts index 24f5f42..32c62eb 100644 --- a/src/sync/git-sync.test.ts +++ b/src/sync/git-sync.test.ts @@ -110,6 +110,38 @@ describe('syncBidirectional', () => { expectParity(store, remote); }); + test('rename followed by local edit pushes updated content under new path', async () => { + store.createFile('Draft.md', 'initial body'); + await syncBidirectional(store, 'user/repo'); + expect([...remote.snapshot().keys()]).toEqual(['Draft.md']); + + const nextPath = store.renameFile('Draft.md', 'Ready'); + expect(nextPath).toBe('Ready.md'); + store.saveFile('Ready.md', 'edited after rename'); + + await syncBidirectional(store, 'user/repo'); + + const remoteFiles = [...remote.snapshot().entries()]; + expect(remoteFiles).toEqual([['Ready.md', 'edited after rename']]); + const readyMeta = store.listFiles().find((file) => file.path === 'Ready.md'); + const readyDoc = readyMeta ? store.loadFileById(readyMeta.id) : null; + expect(readyDoc?.content).toBe('edited after rename'); + expect(listTombstones(store.slug)).toHaveLength(0); + }); + + test('surface 422 when branch head advances during push', async () => { + store.createFile('Lonely.md', 'seed text'); + await syncBidirectional(store, 'user/repo'); + + store.saveFile('Lonely.md', 'edited locally'); + remote.advanceHeadOnNextUpdate(); + + await expect(syncBidirectional(store, 'user/repo')).rejects.toMatchObject({ + status: 422, + path: expect.stringContaining('/git/refs/heads/'), + }); + }); + test('pulls new remote notes', async () => { remote.setFile('Remote.md', '# remote'); await syncBidirectional(store, 'user/repo'); diff --git a/src/test/mock-remote.ts b/src/test/mock-remote.ts index a6df817..1276229 100644 --- a/src/test/mock-remote.ts +++ b/src/test/mock-remote.ts @@ -33,6 +33,7 @@ class MockRemoteRepo { private treeRecords = new Map(); private commitRecords = new Map(); private installations = new Map(); + private pendingHeadAdvance = new Set(); configure(owner: string, repo: string) { this.owner = owner; @@ -107,9 +108,26 @@ class MockRemoteRepo { const body = (await this.parseBody(request)) ?? {}; const sha = typeof body?.sha === 'string' ? String(body.sha) : ''; const branch = decodeURIComponent(refPatchMatch[3] ?? ''); + const force = body?.force === true; if (!sha) { return this.makeResponse(422, { message: 'missing sha' }); } + if (this.pendingHeadAdvance.has(branch) && force !== true) { + this.pendingHeadAdvance.delete(branch); + this.createSyntheticCommit(branch); + } + const commit = this.commitRecords.get(sha); + if (!commit) { + return this.makeResponse(422, { message: 'unknown commit' }); + } + const currentHead = this.headByBranch.get(branch); + if (currentHead && force !== true && !this.isDescendant(sha, currentHead)) { + return this.makeResponse(422, { + message: 'fast-forward required', + currentHead, + attempted: sha, + }); + } this.setHead(branch, sha); return this.makeResponse(200, { ref: `refs/heads/${branch}`, @@ -189,10 +207,14 @@ class MockRemoteRepo { const body = (await this.parseBody(request)) ?? {}; const ref = typeof body.ref === 'string' ? body.ref : ''; const sha = typeof body.sha === 'string' ? body.sha : ''; - if (!ref.startsWith('refs/heads/') || !this.commitRecords.has(sha)) { + const branch = ref.replace('refs/heads/', ''); + if ( + !ref.startsWith('refs/heads/') || + !this.commitRecords.has(sha) || + this.headByBranch.has(branch) + ) { return this.makeResponse(422, { message: 'invalid ref' }); } - const branch = ref.replace('refs/heads/', ''); this.setHead(branch, sha); return this.makeResponse(201, { ref, object: { sha, type: 'commit' } }); } @@ -200,40 +222,45 @@ class MockRemoteRepo { const createTreeMatch = url.pathname.match(/^\/repos\/([^/]+)\/([^/]+)\/git\/trees$/); if (createTreeMatch && method === 'POST') { const body = (await this.parseBody(request)) ?? {}; - const entries: Array<{ path?: string; mode?: string; type?: string; content?: string; sha?: string | null }> = - Array.isArray(body.tree) ? body.tree : []; - const nextTree = new Map(); - const deleted = new Set(); - let base = this.files; - if (typeof body.base_tree === 'string') { - const baseTree = this.treeRecords.get(body.base_tree); - if (baseTree) base = this.cloneFiles(baseTree); - } - for (const entry of entries) { - if (!entry.path) continue; - if (entry.sha === null) { - deleted.add(entry.path); - continue; + const entries: Array<{ + path?: string; + mode?: string; + type?: string; + content?: string; + sha?: string | null; + }> = Array.isArray(body.tree) ? body.tree : []; + const nextTree = new Map(); + const deleted = new Set(); + let base = this.files; + if (typeof body.base_tree === 'string') { + const baseTree = this.treeRecords.get(body.base_tree); + if (baseTree) base = this.cloneFiles(baseTree); } - if (entry.type === 'blob' && typeof entry.content === 'string') { - const text = entry.content; - const sha = this.computeSha(text); - nextTree.set(entry.path, { text, sha }); - this.blobs.set(sha, text); - } else if (entry.sha) { - const blob = this.blobs.get(entry.sha); - if (blob !== undefined) { - nextTree.set(entry.path, { text: blob, sha: entry.sha }); + for (const entry of entries) { + if (!entry.path) continue; + if (entry.sha === null) { + deleted.add(entry.path); + continue; + } + if (entry.type === 'blob' && typeof entry.content === 'string') { + const text = entry.content; + const sha = this.computeSha(text); + nextTree.set(entry.path, { text, sha }); + this.blobs.set(sha, text); + } else if (entry.sha) { + const blob = this.blobs.get(entry.sha); + if (blob !== undefined) { + nextTree.set(entry.path, { text: blob, sha: entry.sha }); + } } } - } - const combined = this.cloneFiles(base); - for (const [path, file] of nextTree.entries()) { - combined.set(path, file); - } - for (const path of deleted) { - combined.delete(path); - } + const combined = this.cloneFiles(base); + for (const [path, file] of nextTree.entries()) { + combined.set(path, file); + } + for (const path of deleted) { + combined.delete(path); + } const treeSha = this.nextTree(); this.treeRecords.set(treeSha, combined); return this.makeResponse(201, { sha: treeSha, tree: this.formatTree(combined, true) }); @@ -255,16 +282,6 @@ class MockRemoteRepo { return this.makeResponse(201, { sha: commitSha }); } - const updateRefMatch = url.pathname.match(/^\/repos\/([^/]+)\/([^/]+)\/git\/refs\/heads\/([^/]+)$/); - if (updateRefMatch && method === 'PATCH') { - const branch = decodeURIComponent(updateRefMatch[3] ?? ''); - const body = (await this.parseBody(request)) ?? {}; - const sha = typeof body.sha === 'string' ? body.sha : ''; - if (!sha) return this.makeResponse(422, { message: 'missing sha' }); - this.setHead(branch, sha); - return this.makeResponse(200, { ref: `refs/heads/${branch}`, object: { sha, type: 'commit' } }); - } - const contentGetMatch = url.pathname.match(/^\/repos\/([^/]+)\/([^/]+)\/contents\/(.+)$/); if (contentGetMatch && method === 'GET') { const path = decodeURIComponent(contentGetMatch[3] ?? ''); @@ -301,6 +318,43 @@ class MockRemoteRepo { return this.makeResponse(404, { message: 'not found' }); } + // Schedule a synthetic commit before the next ref update to simulate an external writer. + advanceHeadOnNextUpdate(branch: string = this.defaultBranch) { + this.pendingHeadAdvance.add(branch); + } + + private createSyntheticCommit(branch: string) { + const snapshot = this.cloneFiles(this.files); + const treeSha = this.nextTree(); + this.treeRecords.set(treeSha, this.cloneFiles(snapshot)); + const parent = this.headByBranch.get(branch); + const commitSha = this.nextCommit(); + this.commitRecords.set(commitSha, { + treeSha, + files: this.cloneFiles(snapshot), + parents: parent ? [parent] : [], + }); + this.setHead(branch, commitSha); + } + + private isDescendant(descendant: string, ancestor: string): boolean { + if (descendant === ancestor) return true; + const visited = new Set(); + const queue: string[] = [descendant]; + while (queue.length > 0) { + const current = queue.shift(); + if (!current || visited.has(current)) continue; + visited.add(current); + const commit = this.commitRecords.get(current); + if (!commit) continue; + for (const parent of commit.parents) { + if (parent === ancestor) return true; + queue.push(parent); + } + } + return false; + } + private ensureRequest(input: RequestInfo | URL, init?: RequestInit): Request { if (input instanceof Request) return input; if (typeof input === 'string' || input instanceof URL) { From 7abecc7661acefae447269ff50b5ed4186ee6427 Mon Sep 17 00:00:00 2001 From: Gregor Mitscha-Baude Date: Mon, 20 Oct 2025 10:12:49 +0200 Subject: [PATCH 2/6] simlulate stale reads, add breaking rename tests --- src/sync/git-sync-stale.test.ts | 54 +++++ src/sync/git-sync2.test.ts | 370 ++++++++++++++++++++++++++++++++ src/test/mock-remote.ts | 18 +- 3 files changed, 441 insertions(+), 1 deletion(-) create mode 100644 src/sync/git-sync-stale.test.ts create mode 100644 src/sync/git-sync2.test.ts diff --git a/src/sync/git-sync-stale.test.ts b/src/sync/git-sync-stale.test.ts new file mode 100644 index 0000000..5618be3 --- /dev/null +++ b/src/sync/git-sync-stale.test.ts @@ -0,0 +1,54 @@ +import { beforeEach, describe, expect, test, vi } from 'vitest'; +import { LocalStore, listTombstones } from '../storage/local'; +import { MockRemoteRepo } from '../test/mock-remote'; + +const authModule = vi.hoisted(() => ({ + ensureFreshAccessToken: vi.fn().mockResolvedValue('test-token'), +})); + +vi.mock('../auth/app-auth', () => authModule); + +const globalAny = globalThis as { fetch?: typeof fetch }; + +describe('syncBidirectional with stale ref reads enabled', () => { + let store: LocalStore; + let remote: MockRemoteRepo; + let syncBidirectional: typeof import('./git-sync').syncBidirectional; + + beforeEach(async () => { + authModule.ensureFreshAccessToken.mockReset(); + authModule.ensureFreshAccessToken.mockResolvedValue('test-token'); + remote = new MockRemoteRepo(); + remote.configure('user', 'repo'); + remote.allowToken('test-token'); + remote.enableStaleReads({ enabled: true, windowMs: 5_000 }); + const fetchMock = vi.fn((input: RequestInfo | URL, init?: RequestInit) => remote.handleFetch(input, init)); + globalAny.fetch = fetchMock as unknown as typeof fetch; + const mod = await import('./git-sync'); + syncBidirectional = mod.syncBidirectional; + store = new LocalStore('user/repo'); + }); + + test('second consecutive edit hits 422 due to stale head', async () => { + store.createFile('Note.md', 'v1'); + await syncBidirectional(store, 'user/repo'); + + store.saveFile('Note.md', 'v2'); + await syncBidirectional(store, 'user/repo'); + + store.saveFile('Note.md', 'v3'); + await expect(syncBidirectional(store, 'user/repo')).rejects.toMatchObject({ status: 422 }); + }); + + test('rename then edit fails under stale reads', async () => { + store.createFile('Draft.md', 'first'); + await syncBidirectional(store, 'user/repo'); + + store.renameFile('Draft.md', 'Draft v2'); + await expect(syncBidirectional(store, 'user/repo')).rejects.toMatchObject({ status: 422 }); + + store.saveFile('Draft v2.md', 'second'); + await expect(syncBidirectional(store, 'user/repo')).rejects.toMatchObject({ status: 422 }); + expect(listTombstones(store.slug)).not.toHaveLength(0); + }); +}); diff --git a/src/sync/git-sync2.test.ts b/src/sync/git-sync2.test.ts new file mode 100644 index 0000000..6661165 --- /dev/null +++ b/src/sync/git-sync2.test.ts @@ -0,0 +1,370 @@ +import { Buffer } from 'node:buffer'; +import { beforeEach, describe, expect, test, vi } from 'vitest'; +import { LocalStore, listTombstones, findBySyncedHash } from '../storage/local'; +import { MockRemoteRepo } from '../test/mock-remote'; + +const authModule = vi.hoisted(() => ({ + ensureFreshAccessToken: vi.fn().mockResolvedValue('test-token'), +})); + +vi.mock('../auth/app-auth', () => authModule); + +const globalAny = globalThis as { + fetch?: typeof fetch; +}; + +describe('syncBidirectional', () => { + let store: LocalStore; + let remote: MockRemoteRepo; + let syncBidirectional: typeof import('./git-sync').syncBidirectional; + + beforeEach(async () => { + authModule.ensureFreshAccessToken.mockReset(); + authModule.ensureFreshAccessToken.mockResolvedValue('test-token'); + remote = new MockRemoteRepo(); + remote.configure('user', 'repo'); + remote.allowToken('test-token'); + remote.enableStaleReads({ enabled: true, windowMs: 5_000 }); + const fetchMock = vi.fn((input: RequestInfo | URL, init?: RequestInit) => + remote.handleFetch(input, init) + ); + globalAny.fetch = fetchMock as unknown as typeof fetch; + const mod = await import('./git-sync'); + syncBidirectional = mod.syncBidirectional; + store = new LocalStore('user/repo'); + }); + + test('pushes new notes and remains stable', async () => { + const firstId = store.createFile('First.md', 'first note'); + const secondId = store.createFile('Second.md', 'second note'); + await syncBidirectional(store, 'user/repo'); + await syncBidirectional(store, 'user/repo'); + expectParity(store, remote); + expect(listTombstones(store.slug)).toHaveLength(0); + const firstDoc = store.loadFileById(firstId); + const secondDoc = store.loadFileById(secondId); + expect(firstDoc?.path).toBe('First.md'); + expect(secondDoc?.path).toBe('Second.md'); + }); + + test('applies local deletions to remote without resurrection', async () => { + store.createFile('Ghost.md', 'haunt me'); + await syncBidirectional(store, 'user/repo'); + store.deleteFile('Ghost.md'); + await syncBidirectional(store, 'user/repo'); + expectParity(store, remote); + expect(store.listFiles()).toHaveLength(0); + expect(listTombstones(store.slug)).toHaveLength(0); + }); + + test('renames move files remotely', async () => { + store.createFile('Original.md', 'rename me'); + await syncBidirectional(store, 'user/repo'); + store.renameFile('Original.md', 'Renamed'); + await syncBidirectional(store, 'user/repo'); + expectParity(store, remote); + const notes = store.listFiles(); + expect(notes).toHaveLength(1); + expect(notes[0]?.path).toBe('Renamed.md'); + expect([...remote.snapshot().keys()]).toEqual(['Renamed.md']); + }); + + test('rename removes old remote path after prior sync', async () => { + store.createFile('test.md', 'body'); + await syncBidirectional(store, 'user/repo'); + expect([...remote.snapshot().keys()]).toEqual(['test.md']); + store.renameFile('test.md', 'test2'); + await syncBidirectional(store, 'user/repo'); + const remoteFiles = [...remote.snapshot().keys()].sort(); + expect(remoteFiles).toEqual(['test2.md']); + expectParity(store, remote); + }); + + test('rename with remote edits keeps both copies in sync', async () => { + store.createFile('draft.md', 'original body'); + await syncBidirectional(store, 'user/repo'); + remote.setFile('draft.md', 'remote update'); + store.renameFile('draft.md', 'draft-renamed'); + await syncBidirectional(store, 'user/repo'); + const paths = [...remote.snapshot().keys()].sort(); + expect(paths).toEqual(['draft-renamed.md', 'draft.md']); + expectParity(store, remote); + const localPaths = store + .listFiles() + .map((n) => n.path) + .sort(); + expect(localPaths).toEqual(['draft-renamed.md', 'draft.md']); + }); + + test('rename revert does not push redundant commits', async () => { + store.createFile('first-name.md', 'body'); + await syncBidirectional(store, 'user/repo'); + const headBeforeRename = await getRemoteHeadSha(remote); + + store.renameFile('first-name.md', 'second-name'); + store.renameFile('second-name.md', 'first-name'); + + await syncBidirectional(store, 'user/repo'); + + const headAfterSync = await getRemoteHeadSha(remote); + expect(headAfterSync).toBe(headBeforeRename); + expectParity(store, remote); + }); + + test('rename followed by local edit pushes updated content under new path', async () => { + store.createFile('Draft.md', 'initial body'); + await syncBidirectional(store, 'user/repo'); + expect([...remote.snapshot().keys()]).toEqual(['Draft.md']); + + const nextPath = store.renameFile('Draft.md', 'Ready'); + expect(nextPath).toBe('Ready.md'); + store.saveFile('Ready.md', 'edited after rename'); + + await syncBidirectional(store, 'user/repo'); + + const remoteFiles = [...remote.snapshot().entries()]; + expect(remoteFiles).toEqual([['Ready.md', 'edited after rename']]); + const readyMeta = store.listFiles().find((file) => file.path === 'Ready.md'); + const readyDoc = readyMeta ? store.loadFileById(readyMeta.id) : null; + expect(readyDoc?.content).toBe('edited after rename'); + expect(listTombstones(store.slug)).toHaveLength(0); + }); + + test('surface 422 when branch head advances during push', async () => { + store.createFile('Lonely.md', 'seed text'); + await syncBidirectional(store, 'user/repo'); + + store.saveFile('Lonely.md', 'edited locally'); + remote.advanceHeadOnNextUpdate(); + + await expect(syncBidirectional(store, 'user/repo')).rejects.toMatchObject({ + status: 422, + path: expect.stringContaining('/git/refs/heads/'), + }); + }); + + test('pulls new remote notes', async () => { + remote.setFile('Remote.md', '# remote'); + await syncBidirectional(store, 'user/repo'); + expectParity(store, remote); + const notes = store.listFiles(); + expect(notes).toHaveLength(1); + const doc = store.loadFileById(notes[0]?.id ?? ''); + expect(doc?.content).toBe('# remote'); + }); + + test('removes notes when deleted remotely', async () => { + store.createFile('Shared.md', 'shared text'); + await syncBidirectional(store, 'user/repo'); + remote.deleteDirect('Shared.md'); + await syncBidirectional(store, 'user/repo'); + expectParity(store, remote); + expect(store.listFiles()).toHaveLength(0); + }); + + test('syncs tracked image files while ignoring unrelated blobs', async () => { + remote.setFile('data.json', '{"keep":true}'); + remote.setFile('image.png', 'asset'); + store.createFile('OnlyNote.md', '# hello'); + await syncBidirectional(store, 'user/repo'); + const snapshot = remote.snapshot(); + expect(snapshot.get('data.json')).toBe('{"keep":true}'); + expect(snapshot.get('image.png')).toBe('asset'); + expect(snapshot.get('OnlyNote.md')).toBe('# hello'); + const files = store.listFiles(); + const imageMeta = files.find((f) => f.path === 'image.png'); + expect(imageMeta).toBeDefined(); + if (imageMeta) { + const imageDoc = store.loadFileById(imageMeta.id); + expect(imageDoc?.kind).toBe('asset-url'); + expect(imageDoc?.content).toMatch(/^gh-blob:/); + } + expectParity(store, remote); + }); + + test('pulls nested Markdown files', async () => { + remote.setFile('nested/Nested.md', '# nested'); + await syncBidirectional(store, 'user/repo'); + const notes = store.listFiles(); + expect(notes).toHaveLength(1); + const doc = store.loadFileById(notes[0]?.id ?? ''); + expect(doc?.path).toBe('nested/Nested.md'); + expect(doc?.content).toBe('# nested'); + }); + + test('pulls binary image assets from remote', async () => { + remote.setFile('assets/logo.png', 'image-data'); + await syncBidirectional(store, 'user/repo'); + const files = store.listFiles(); + const asset = files.find((f) => f.path === 'assets/logo.png'); + expect(asset).toBeDefined(); + if (!asset) return; + const doc = store.loadFileById(asset.id); + expect(doc?.kind).toBe('asset-url'); + expect(doc?.content).toMatch(/^gh-blob:/); + expectParity(store, remote); + }); + + test('pulls binary assets via blob fallback when contents payload is empty', async () => { + const payload = 'high-res-image'; + const expectedBase64 = Buffer.from(payload, 'utf8').toString('base64'); + remote.setFile('assets/large.png', payload); + const originalFetch = globalAny.fetch!; + let capturedSha: string | null = null; + const interceptFetch = vi.fn(async (input: RequestInfo | URL, init?: RequestInit) => { + const request = input instanceof Request ? input : new Request(input, init); + const url = new URL(request.url); + if ( + request.method.toUpperCase() === 'GET' && + url.pathname === '/repos/user/repo/contents/assets/large.png' + ) { + const upstream = await originalFetch(input, init); + const json = await upstream.json(); + capturedSha = typeof json?.sha === 'string' ? json.sha : null; + return new Response(JSON.stringify({ ...json, content: '' }), { + status: 200, + headers: { 'Content-Type': 'application/json' }, + }); + } + if ( + request.method.toUpperCase() === 'GET' && + capturedSha && + url.pathname === `/repos/user/repo/git/blobs/${capturedSha}` + ) { + return new Response( + JSON.stringify({ sha: capturedSha, content: expectedBase64, encoding: 'base64' }), + { status: 200, headers: { 'Content-Type': 'application/json' } } + ); + } + return originalFetch(input, init); + }); + + globalAny.fetch = interceptFetch as unknown as typeof fetch; + try { + await syncBidirectional(store, 'user/repo'); + } finally { + globalAny.fetch = originalFetch; + } + const files = store.listFiles(); + const asset = files.find((f) => f.path === 'assets/large.png'); + expect(asset).toBeDefined(); + if (!asset) return; + const doc = store.loadFileById(asset.id); + expect(doc?.kind).toBe('asset-url'); + expect(doc?.content).toMatch(/^gh-blob:/); + expect(capturedSha).toBeTruthy(); + expectParity(store, remote); + }); + + test('tracks remote binary renames by sha/hash', async () => { + const payload = Buffer.from('asset', 'utf8').toString('base64'); + const id = store.createFile('logo.png', payload); + await syncBidirectional(store, 'user/repo'); + const before = store.loadFileById(id); + expect(before?.lastSyncedHash).toBeDefined(); + remote.deleteDirect('logo.png'); + remote.setFile('assets/logo.png', payload); + if (before?.lastSyncedHash) { + const lookup = findBySyncedHash(store.slug, before.lastSyncedHash); + expect(lookup?.id).toBe(id); + } + + await syncBidirectional(store, 'user/repo'); + + const paths = store + .listFiles() + .map((f) => f.path) + .sort(); + expect(paths).toContain('assets/logo.png'); + expect(paths).not.toContain('logo.png'); + const renamedFile = store.listFiles().find((f) => f.path === 'assets/logo.png'); + expect(renamedFile).toBeDefined(); + expectParity(store, remote); + }); + + test('listRepoFiles includes nested markdown', async () => { + const mod = await import('./git-sync'); + remote.setFile('nested/Nested.md', '# nested'); + let cfg = mod.buildRemoteConfig('user/repo'); + let entries = await mod.listRepoFiles(cfg); + const paths = entries.map((e) => e.path).sort(); + expect(paths).toEqual(['nested/Nested.md']); + }); + + test('listRepoFiles returns markdown and image entries', async () => { + const mod = await import('./git-sync'); + remote.setFile('docs/Doc.md', '# hi'); + remote.setFile('assets/logo.png', 'img'); + let cfg = mod.buildRemoteConfig('user/repo'); + let entries = await mod.listRepoFiles(cfg); + const byPath = new Map(entries.map((entry) => [entry.path, entry.kind])); + expect(byPath.get('docs/Doc.md')).toBe('markdown'); + expect(byPath.get('assets/logo.png')).toBe('binary'); + }); + + test('includes README.md files from the repository', async () => { + remote.setFile('README.md', 'root readme'); + remote.setFile('sub/README.md', 'sub readme'); + await syncBidirectional(store, 'user/repo'); + const paths = store + .listFiles() + .map((n) => n.path) + .sort(); + expect(paths).toEqual(['README.md', 'sub/README.md']); + }); +}); + +type RemoteHeadPayload = { + object?: { sha?: string }; +}; + +async function getRemoteHeadSha(remote: MockRemoteRepo, branch = 'main'): Promise { + const response = await remote.handleFetch( + `https://api.github.com/repos/user/repo/git/ref/heads/${branch}`, + { method: 'GET' } + ); + const payload = (await response.json()) as RemoteHeadPayload; + if (!response.ok) { + throw new Error(`remote head lookup failed with status ${response.status}`); + } + const sha = typeof payload.object?.sha === 'string' ? payload.object.sha : ''; + expect(sha).not.toBe(''); + return sha; +} + +function expectParity(store: LocalStore, remote: MockRemoteRepo) { + const localDocs = new Map>(); + for (const meta of store.listFiles()) { + const doc = store.loadFileById(meta.id); + if (!doc) continue; + localDocs.set(meta.path, doc); + } + const remoteMap = remote.snapshot(); + const trackedRemoteKeys = [...remoteMap.keys()].filter(isTrackedPath).sort(); + expect(trackedRemoteKeys).toEqual([...localDocs.keys()].sort()); + for (const [path, doc] of localDocs.entries()) { + const remoteContent = remoteMap.get(path); + if (doc?.kind === 'markdown') { + expect(remoteContent).toBe(doc.content); + } else if (doc?.kind === 'binary') { + const decoded = Buffer.from(doc.content, 'base64').toString('utf8'); + expect(remoteContent).toBe(decoded); + } else if (doc?.kind === 'asset-url') { + expect(remoteContent).toBeDefined(); + } + } +} + +function isTrackedPath(path: string): boolean { + const lower = path.toLowerCase(); + return ( + lower.endsWith('.md') || + lower.endsWith('.png') || + lower.endsWith('.jpg') || + lower.endsWith('.jpeg') || + lower.endsWith('.gif') || + lower.endsWith('.webp') || + lower.endsWith('.svg') || + lower.endsWith('.avif') + ); +} diff --git a/src/test/mock-remote.ts b/src/test/mock-remote.ts index 1276229..7175a91 100644 --- a/src/test/mock-remote.ts +++ b/src/test/mock-remote.ts @@ -34,6 +34,9 @@ class MockRemoteRepo { private commitRecords = new Map(); private installations = new Map(); private pendingHeadAdvance = new Set(); + private simulateStale = false; + private staleWindowMs = 0; + private staleRefByBranch = new Map(); configure(owner: string, repo: string) { this.owner = owner; @@ -44,6 +47,12 @@ class MockRemoteRepo { this.installations.set(token, {}); } + enableStaleReads(options: { enabled: boolean; windowMs?: number } = { enabled: true }) { + this.simulateStale = options.enabled; + this.staleWindowMs = options.windowMs ?? 200; + this.staleRefByBranch.clear(); + } + snapshot(): Map { const result = new Map(); for (const [path, file] of this.files.entries()) { @@ -97,9 +106,13 @@ class MockRemoteRepo { if (!head) { return this.makeResponse(404, { message: 'not found' }); } + const stale = this.simulateStale ? this.staleRefByBranch.get(branch) : undefined; + const now = Date.now(); + const shaToServe = + stale && stale.until > now && this.commitRecords.has(stale.commit) ? stale.commit : head; return this.makeResponse(200, { ref: `refs/heads/${branch}`, - object: { sha: head, type: 'commit' }, + object: { sha: shaToServe, type: 'commit' }, }); } @@ -129,6 +142,9 @@ class MockRemoteRepo { }); } this.setHead(branch, sha); + if (this.simulateStale) { + this.staleRefByBranch.set(branch, { commit: currentHead ?? sha, until: Date.now() + this.staleWindowMs }); + } return this.makeResponse(200, { ref: `refs/heads/${branch}`, object: { sha, type: 'commit' }, From 7e0eb0f907c9e130d390e0d03a16a3cea7f549e9 Mon Sep 17 00:00:00 2001 From: Gregor Mitscha-Baude Date: Mon, 20 Oct 2025 10:37:54 +0200 Subject: [PATCH 3/6] update status bar with 422 info on affected file. TODO remove data.ts bloat --- src/data.ts | 111 +++++++++++++++++++++++++++++++++++++++++- src/data/data.test.ts | 39 +++++++++++++++ src/sync/git-sync.ts | 62 ++++++++++++++++++----- 3 files changed, 200 insertions(+), 12 deletions(-) diff --git a/src/data.ts b/src/data.ts index 80fc9db..728b866 100644 --- a/src/data.ts +++ b/src/data.ts @@ -450,7 +450,7 @@ function useRepoData({ slug, route, recordRecent, setActivePath }: RepoDataInput setStatusMessage(parts.length ? `Synced: ${parts.join(', ')}` : 'Up to date'); } catch (error) { logError(error); - setStatusMessage('Sync failed'); + setStatusMessage(formatSyncFailure(error)); } }; @@ -936,6 +936,115 @@ function formatError(error: unknown): string { return String(error); } +type GitHubRequestError = Error & { + status?: number; + path?: string; + body?: unknown; + text?: string | null; + syncContexts?: SyncRequestContext[]; +}; + +function formatSyncFailure(error: unknown): string { + const fallback = 'Sync failed'; + if (!error || typeof error !== 'object') return fallback; + const err = error as GitHubRequestError; + if (err.status === 422) { + const action = describeGitHubRequest(err.path); + const affected = describeAffectedPaths(err); + const reason = extractGitHubReason(err); + let message = `Sync failed: GitHub returned 422 while ${action}`; + if (affected) message += ` for ${affected}`; + if (reason) message += ` (${reason})`; + return `${message}. Please report this bug.`; + } + return fallback; +} + +type SyncRequestContext = { + operation: 'put' | 'delete' | 'batch'; + paths: string[]; +}; + +function describeGitHubRequest(path: string | undefined): string { + if (!path) return 'processing the GitHub request'; + let cleaned = stripRepoPrefix(path); + if (cleaned.startsWith('git/refs/heads/')) { + let branch = cleaned.slice('git/refs/heads/'.length); + branch = decodeGitPath(branch); + return `updating refs/heads/${branch}`; + } + if (cleaned.startsWith('git/commits')) return 'creating a commit on GitHub'; + if (cleaned.startsWith('git/trees')) return 'creating a tree on GitHub'; + if (cleaned.startsWith('contents/')) { + const resource = decodeGitPath(cleaned.slice('contents/'.length)); + return `updating repository contents at ${resource}`; + } + return `calling ${cleaned}`; +} + +function stripRepoPrefix(path: string): string { + const trimmed = path.replace(/^\/+/, ''); + const repoPattern = /^repos\/[^/]+\/[^/]+\/(.+)$/; + const match = trimmed.match(repoPattern); + if (match && typeof match[1] === 'string') { + return match[1]; + } + return trimmed; +} + +function decodeGitPath(path: string): string { + return path + .split('/') + .map((segment) => { + try { + return decodeURIComponent(segment); + } catch { + return segment; + } + }) + .join('/'); +} + +function extractGitHubReason(err: GitHubRequestError): string | undefined { + const body = err.body as { message?: unknown; errors?: unknown } | undefined; + if (body) { + if (typeof body.message === 'string' && body.message.trim() !== '') { + return body.message.trim(); + } + if (Array.isArray(body.errors)) { + const messages = body.errors + .map((entry: unknown) => { + if (typeof entry === 'string') return entry; + if (entry && typeof entry === 'object') { + const msg = (entry as { message?: unknown }).message; + if (typeof msg === 'string' && msg.trim() !== '') return msg.trim(); + } + return undefined; + }) + .filter((msg): msg is string => msg !== undefined && msg !== ''); + if (messages.length > 0) return messages.join('; '); + } + } + if (typeof err.text === 'string' && err.text.trim() !== '') { + return err.text.trim(); + } + return undefined; +} + +function describeAffectedPaths(err: GitHubRequestError): string | undefined { + const contexts = Array.isArray(err.syncContexts) ? err.syncContexts : undefined; + if (!contexts || contexts.length === 0) return undefined; + const latest = contexts[contexts.length - 1]; + if (!latest || !Array.isArray(latest.paths) || latest.paths.length === 0) return undefined; + const display = latest.paths.slice(0, 3); + const formatted = display.map((path) => path || '(unknown path)'); + let suffix = ''; + if (latest.paths.length > display.length) { + suffix = ` and ${latest.paths.length - display.length} more`; + } + return formatted.join(', ') + suffix; +} + function findByPath(notes: T[], targetPath: string): T | undefined { let normalized = normalizePath(targetPath); return notes.find((note) => normalizePath(note.path) === normalized); diff --git a/src/data/data.test.ts b/src/data/data.test.ts index 2807c28..9f7a19c 100644 --- a/src/data/data.test.ts +++ b/src/data/data.test.ts @@ -413,6 +413,45 @@ describe('useRepoData', () => { expect(result.current.state.user).toEqual(expect.objectContaining({ login: 'hubot' })); }); + test('sync surfaces detailed message when GitHub returns 422', async () => { + const slug = 'acme/docs'; + const recordRecent = vi.fn(); + + mockGetSessionToken.mockReturnValue('session-token'); + mockGetSessionUser.mockReturnValue({ + login: 'hubot', + name: null, + avatarUrl: 'https://example.com/hubot.png', + }); + markRepoLinked(slug); + + const ghError = Object.assign(new Error('GitHub request failed (422)'), { + status: 422, + path: '/repos/acme/docs/git/refs/heads/main', + body: { message: 'Update is not a fast-forward' }, + syncContexts: [{ operation: 'delete', paths: ['Ready.md'] }], + }); + mockSyncBidirectional.mockRejectedValue(ghError); + + const { result } = renderRepoData({ + slug, + route: { kind: 'repo', owner: 'acme', repo: 'docs' }, + recordRecent, + }); + + await waitFor(() => expect(result.current.state.repoQueryStatus).toBe('ready')); + await waitFor(() => expect(result.current.state.canSync).toBe(true)); + + await act(async () => { + await result.current.actions.syncNow(); + }); + + expect(mockSyncBidirectional).toHaveBeenCalledWith(expect.any(LocalStore), slug); + expect(result.current.state.statusMessage).toBe( + 'Sync failed: GitHub returned 422 while updating refs/heads/main for Ready.md (Update is not a fast-forward). Please report this bug.' + ); + }); + // Read-only repos should list remote notes and refresh on selection. test('read-only repos surface notes and refresh on selection', async () => { const slug = 'octo/wiki'; diff --git a/src/sync/git-sync.ts b/src/sync/git-sync.ts index a58489a..720c52c 100644 --- a/src/sync/git-sync.ts +++ b/src/sync/git-sync.ts @@ -128,6 +128,31 @@ type PutFilePayload = { blobSha?: string; }; +type SyncRequestContext = { + operation: 'put' | 'delete' | 'batch'; + paths: string[]; +}; + +type ErrorWithSyncContext = Error & { syncContexts?: SyncRequestContext[] }; + +async function withGitHubContext(context: SyncRequestContext, task: () => Promise): Promise { + try { + return await task(); + } catch (error) { + attachSyncContext(error, context); + throw error; + } +} + +function attachSyncContext(error: unknown, context: SyncRequestContext): void { + if (!error || typeof error !== 'object') return; + const err = error as ErrorWithSyncContext; + if (!Array.isArray(err.syncContexts)) { + err.syncContexts = []; + } + err.syncContexts.push(context); +} + function serializeContent(file: PutFilePayload) { if (file.blobSha) { return { path: file.path, blobSha: file.blobSha }; @@ -186,8 +211,13 @@ async function ensureBinaryContent(config: RemoteConfig, doc: RepoFile): Promise // Upsert a single file and return its new content sha export async function putFile(config: RemoteConfig, file: PutFilePayload, message: string): Promise { - let res = await commitChanges(config, message, [serializeContent(file)]); - return extractBlobSha(res, file.path) ?? file.blobSha ?? res.commitSha; + return await withGitHubContext( + { operation: 'put', paths: [file.path] }, + async () => { + let res = await commitChanges(config, message, [serializeContent(file)]); + return extractBlobSha(res, file.path) ?? file.blobSha ?? res.commitSha; + } + ); } export async function commitBatch( @@ -196,10 +226,15 @@ export async function commitBatch( message: string ): Promise { if (files.length === 0) return null; - let res = await commitChanges(config, message, files.map(serializeContent)); - // Return the first blob sha if available to align with caller expectations - const firstPath = files[0]?.path; - return firstPath ? extractBlobSha(res, firstPath) ?? files[0]?.blobSha ?? res.commitSha : res.commitSha; + return await withGitHubContext( + { operation: 'batch', paths: files.map((file) => file.path) }, + async () => { + let res = await commitChanges(config, message, files.map(serializeContent)); + // Return the first blob sha if available to align with caller expectations + const firstPath = files[0]?.path; + return firstPath ? extractBlobSha(res, firstPath) ?? files[0]?.blobSha ?? res.commitSha : res.commitSha; + } + ); } export async function listRepoFiles(config: RemoteConfig): Promise { @@ -253,12 +288,17 @@ export async function deleteFiles( message: string ): Promise { if (files.length === 0) return null; - let res = await commitChanges( - config, - message, - files.map((f) => ({ path: f.path, delete: true })) + return await withGitHubContext( + { operation: 'delete', paths: files.map((file) => file.path) }, + async () => { + let res = await commitChanges( + config, + message, + files.map((f) => ({ path: f.path, delete: true })) + ); + return res.commitSha || null; + } ); - return res.commitSha || null; } function fromBase64(b64: string): string { From 37dd86f02bb694888d2d897bd38addabec9daf86 Mon Sep 17 00:00:00 2001 From: Gregor Mitscha-Baude Date: Mon, 20 Oct 2025 11:21:09 +0200 Subject: [PATCH 4/6] fix git sync by batch committing --- src/storage/local.ts | 1 + src/sync/git-sync-stale.test.ts | 35 +++++- src/sync/git-sync.ts | 189 ++++++++++++++++++++++++++------ src/test/mock-remote.ts | 6 +- 4 files changed, 189 insertions(+), 42 deletions(-) diff --git a/src/storage/local.ts b/src/storage/local.ts index ff0793d..7778f1e 100644 --- a/src/storage/local.ts +++ b/src/storage/local.ts @@ -791,6 +791,7 @@ export function moveFilePath(slug: string, id: string, toPath: string) { }; }); rebuildFolderIndex(slug); + emitRepoChange(slug); } function loadIndexForSlug(slug: string): FileMeta[] { diff --git a/src/sync/git-sync-stale.test.ts b/src/sync/git-sync-stale.test.ts index 5618be3..59bf6fc 100644 --- a/src/sync/git-sync-stale.test.ts +++ b/src/sync/git-sync-stale.test.ts @@ -29,7 +29,7 @@ describe('syncBidirectional with stale ref reads enabled', () => { store = new LocalStore('user/repo'); }); - test('second consecutive edit hits 422 due to stale head', async () => { + test('second consecutive edit survives stale ref reads', async () => { store.createFile('Note.md', 'v1'); await syncBidirectional(store, 'user/repo'); @@ -37,18 +37,41 @@ describe('syncBidirectional with stale ref reads enabled', () => { await syncBidirectional(store, 'user/repo'); store.saveFile('Note.md', 'v3'); - await expect(syncBidirectional(store, 'user/repo')).rejects.toMatchObject({ status: 422 }); + const summary = await syncBidirectional(store, 'user/repo'); + expect(summary).toEqual({ + pulled: 0, + pushed: 1, + deletedRemote: 0, + deletedLocal: 0, + merged: 0, + }); }); - test('rename then edit fails under stale reads', async () => { + test('rename then edit succeeds despite stale reads', async () => { store.createFile('Draft.md', 'first'); await syncBidirectional(store, 'user/repo'); store.renameFile('Draft.md', 'Draft v2'); - await expect(syncBidirectional(store, 'user/repo')).rejects.toMatchObject({ status: 422 }); + const renameSummary = await syncBidirectional(store, 'user/repo'); + expect(renameSummary).toEqual({ + pulled: 0, + pushed: 1, + deletedRemote: 1, + deletedLocal: 0, + merged: 0, + }); + expect([...remote.snapshot().keys()]).toEqual(['Draft v2.md']); store.saveFile('Draft v2.md', 'second'); - await expect(syncBidirectional(store, 'user/repo')).rejects.toMatchObject({ status: 422 }); - expect(listTombstones(store.slug)).not.toHaveLength(0); + const editSummary = await syncBidirectional(store, 'user/repo'); + expect(editSummary).toEqual({ + pulled: 0, + pushed: 1, + deletedRemote: 0, + deletedLocal: 0, + merged: 0, + }); + expect([...remote.snapshot().keys()]).toEqual(['Draft v2.md']); + expect(listTombstones(store.slug)).toHaveLength(0); }); }); diff --git a/src/sync/git-sync.ts b/src/sync/git-sync.ts index 720c52c..6e627a5 100644 --- a/src/sync/git-sync.ts +++ b/src/sync/git-sync.ts @@ -378,7 +378,8 @@ async function commitChanges( let repoEncoded = encodeURIComponent(config.repo); let branch = config.branch ? config.branch.trim() : ''; if (!branch) branch = 'main'; - let refPath = `/repos/${ownerEncoded}/${repoEncoded}/git/ref/heads/${encodeURIComponent(branch)}`; + let refPathBase = `/repos/${ownerEncoded}/${repoEncoded}/git/ref/heads/${encodeURIComponent(branch)}`; + let refPath = `${refPathBase}?cache_bust=${Date.now()}`; let headSha: string | null = null; let baseTreeSha: string | null = null; @@ -408,7 +409,7 @@ async function commitChanges( } else if (refRes.status === 404) { isInitialCommit = true; } else { - await throwGitHubError(refRes, refPath); + await throwGitHubError(refRes, refPathBase); } let treeItems: Array<{ @@ -705,6 +706,66 @@ export async function syncBidirectional(store: LocalStore, slug: string): Promis // TODO why does this not use a default branch?? const config = buildRemoteConfig(slug); + + type PendingUpload = { + payload: PutFilePayload; + onApplied: (newSha: string) => void; + }; + type PendingDelete = { + path: string; + onApplied: () => void; + }; + + const pendingUploads: PendingUpload[] = []; + const pendingDeletes: PendingDelete[] = []; + + const queueUpload = ( + payload: PutFilePayload, + options: { onPending?: () => void; onApplied: (newSha: string) => void } + ) => { + options.onPending?.(); + pendingUploads.push({ payload, onApplied: options.onApplied }); + }; + + const queueDelete = ( + path: string, + options: { onPending?: () => void; onApplied: () => void } + ) => { + options.onPending?.(); + pendingDeletes.push({ path, onApplied: options.onApplied }); + }; + + const flushPendingChanges = async () => { + if (pendingUploads.length === 0 && pendingDeletes.length === 0) return; + const serializedDeletes = pendingDeletes.map((entry) => ({ path: entry.path, delete: true })); + const serializedUploads = pendingUploads.map((entry) => serializeContent(entry.payload)); + const pathsForContext = [ + ...pendingUploads.map((entry) => entry.payload.path), + ...pendingDeletes.map((entry) => entry.path), + ]; + const message = 'vibenote: sync changes'; + let res: CommitResponse | null = null; + res = await withGitHubContext({ operation: 'batch', paths: pathsForContext }, async () => { + return await commitChanges(config, message, [ + ...serializedDeletes, + ...serializedUploads, + ]); + }); + const commitResult = res ?? { commitSha: '', blobShas: {} }; + for (const entry of pendingUploads) { + const newSha = + extractBlobSha(commitResult, entry.payload.path) ?? + entry.payload.blobSha ?? + commitResult.commitSha; + entry.onApplied(newSha); + } + for (const entry of pendingDeletes) { + entry.onApplied(); + } + pendingUploads.length = 0; + pendingDeletes.length = 0; + }; + const storeSlug = store.slug; const entries = await listRepoFiles(config); const remoteMap = new Map(entries.map((e) => [e.path, e.sha] as const)); @@ -759,9 +820,15 @@ export async function syncBidirectional(store: LocalStore, slug: string): Promis debugLog(slug, 'sync:push:skip-missing-content', { path: doc.path }); continue; } - const newSha = await putFile(config, payload, 'vibenote: update notes'); - markSynced(storeSlug, id, { remoteSha: newSha, syncedHash: syncedHashForDoc(doc, newSha) }); - remoteMap.set(doc.path, newSha); + queueUpload(payload, { + onPending: () => { + remoteMap.set(doc.path, doc.lastRemoteSha ?? 'pending'); + }, + onApplied: (newSha) => { + markSynced(storeSlug, id, { remoteSha: newSha, syncedHash: syncedHashForDoc(doc, newSha) }); + remoteMap.set(doc.path, newSha); + }, + }); pushed++; debugLog(slug, 'sync:push:unchanged-remote', { path: doc.path }); } @@ -795,9 +862,15 @@ export async function syncBidirectional(store: LocalStore, slug: string): Promis debugLog(slug, 'sync:push:skip-missing-content', { path: doc.path }); continue; } - const newSha = await putFile(config, payload, 'vibenote: update notes'); - markSynced(storeSlug, id, { remoteSha: newSha, syncedHash: syncedHashForDoc(doc, newSha) }); - remoteMap.set(doc.path, newSha); + queueUpload(payload, { + onPending: () => { + remoteMap.set(doc.path, doc.lastRemoteSha ?? 'pending'); + }, + onApplied: (newSha) => { + markSynced(storeSlug, id, { remoteSha: newSha, syncedHash: syncedHashForDoc(doc, newSha) }); + remoteMap.set(doc.path, newSha); + }, + }); pushed++; debugLog(slug, 'sync:push:remote-rename-only', { path: doc.path }); continue; @@ -810,13 +883,18 @@ export async function syncBidirectional(store: LocalStore, slug: string): Promis if (mergedText !== doc.content) { updateFile(storeSlug, id, mergedText, 'markdown'); } - const newSha = await putFile( - config, + queueUpload( { path: doc.path, content: mergedText, baseSha: rf.sha, kind: 'markdown' }, - 'vibenote: merge notes' + { + onPending: () => { + remoteMap.set(doc.path, doc.lastRemoteSha ?? 'pending'); + }, + onApplied: (newSha) => { + markSynced(storeSlug, id, { remoteSha: newSha, syncedHash: hashText(mergedText) }); + remoteMap.set(doc.path, newSha); + }, + } ); - markSynced(storeSlug, id, { remoteSha: newSha, syncedHash: hashText(mergedText) }); - remoteMap.set(doc.path, newSha); merged++; pushed++; debugLog(slug, 'sync:merge', { path: doc.path }); @@ -858,9 +936,15 @@ export async function syncBidirectional(store: LocalStore, slug: string): Promis debugLog(slug, 'sync:restore-skip-missing-content', { path: doc.path }); continue; } - const newSha = await putFile(config, payload, 'vibenote: restore note'); - markSynced(storeSlug, id, { remoteSha: newSha, syncedHash: syncedHashForDoc(doc, newSha) }); - remoteMap.set(doc.path, newSha); + queueUpload(payload, { + onPending: () => { + remoteMap.set(doc.path, doc.lastRemoteSha ?? 'pending'); + }, + onApplied: (newSha) => { + markSynced(storeSlug, id, { remoteSha: newSha, syncedHash: syncedHashForDoc(doc, newSha) }); + remoteMap.set(doc.path, newSha); + }, + }); pushed++; debugLog(slug, 'sync:restore-remote-missing', { path: doc.path }); } else { @@ -888,12 +972,16 @@ export async function syncBidirectional(store: LocalStore, slug: string): Promis } if (!t.lastRemoteSha || t.lastRemoteSha === sha) { // safe to delete remotely - await deleteFiles(config, [{ path: t.path, sha }], 'vibenote: delete removed notes'); + queueDelete(t.path, { + onApplied: () => { + remoteMap.delete(t.path); + removeTombstones( + storeSlug, + (x) => x.type === 'delete' && x.path === t.path && x.deletedAt === t.deletedAt + ); + }, + }); deletedRemote++; - removeTombstones( - storeSlug, - (x) => x.type === 'delete' && x.path === t.path && x.deletedAt === t.deletedAt - ); debugLog(slug, 'sync:tombstone:delete:remote-deleted', { path: t.path }); } else { // remote changed since we deleted locally → keep remote (no action), clear tombstone @@ -908,11 +996,41 @@ export async function syncBidirectional(store: LocalStore, slug: string): Promis const remoteTargetSha = remoteMap.get(t.to); if (targetLocal && !remoteTargetSha) { const { id, doc } = targetLocal; - const payload = await buildUploadPayload(config, doc); + let payload: PutFilePayload | null = null; + if (t.lastRemoteSha) { + try { + const base = await fetchBlob(config, t.lastRemoteSha); + if (base) { + if (doc.kind === 'markdown') { + const remoteText = fromBase64(base); + if (remoteText === doc.content) { + payload = { path: doc.path, kind: 'markdown', blobSha: t.lastRemoteSha }; + } + } else if (doc.kind === 'binary') { + if (base === doc.content) { + payload = { path: doc.path, kind: 'binary', blobSha: t.lastRemoteSha }; + } + } else if (doc.kind === 'asset-url') { + payload = { path: doc.path, kind: 'binary', blobSha: t.lastRemoteSha }; + } + } + } catch { + // fall back to rebuilding payload below + } + } + if (!payload) { + payload = await buildUploadPayload(config, doc); + } if (payload) { - const nextSha = await putFile(config, payload, 'vibenote: update notes'); - markSynced(storeSlug, id, { remoteSha: nextSha, syncedHash: syncedHashForDoc(doc, nextSha) }); - remoteMap.set(t.to, nextSha); + queueUpload(payload, { + onPending: () => { + remoteMap.set(t.to, doc.lastRemoteSha ?? 'pending'); + }, + onApplied: (nextSha) => { + markSynced(storeSlug, id, { remoteSha: nextSha, syncedHash: syncedHashForDoc(doc, nextSha) }); + remoteMap.set(t.to, nextSha); + }, + }); pushed++; debugLog(slug, 'sync:tombstone:rename:ensure-target', { to: t.to }); } else { @@ -944,17 +1062,16 @@ export async function syncBidirectional(store: LocalStore, slug: string): Promis remoteMap.set(t.from, shaToDelete); } if (!t.lastRemoteSha || t.lastRemoteSha === shaToDelete) { - await deleteFiles( - config, - [{ path: t.from, sha: shaToDelete }], - 'vibenote: delete old path after rename' - ); + queueDelete(t.from, { + onApplied: () => { + remoteMap.delete(t.from); + removeTombstones( + storeSlug, + (x) => x.type === 'rename' && x.from === t.from && x.to === t.to && x.renamedAt === t.renamedAt + ); + }, + }); deletedRemote++; - remoteMap.delete(t.from); - removeTombstones( - storeSlug, - (x) => x.type === 'rename' && x.from === t.from && x.to === t.to && x.renamedAt === t.renamedAt - ); debugLog(slug, 'sync:tombstone:rename:remote-deleted', { from: t.from, to: t.to }); continue; } @@ -988,6 +1105,8 @@ export async function syncBidirectional(store: LocalStore, slug: string): Promis } } + await flushPendingChanges(); + const summary = { pulled, pushed, deletedRemote, deletedLocal, merged }; return summary; } diff --git a/src/test/mock-remote.ts b/src/test/mock-remote.ts index 7175a91..ffa225d 100644 --- a/src/test/mock-remote.ts +++ b/src/test/mock-remote.ts @@ -106,7 +106,11 @@ class MockRemoteRepo { if (!head) { return this.makeResponse(404, { message: 'not found' }); } - const stale = this.simulateStale ? this.staleRefByBranch.get(branch) : undefined; + const bypassCache = this.simulateStale && url.searchParams.has('cache_bust'); + if (bypassCache) { + this.staleRefByBranch.delete(branch); + } + const stale = this.simulateStale && !bypassCache ? this.staleRefByBranch.get(branch) : undefined; const now = Date.now(); const shaToServe = stale && stale.until > now && this.commitRecords.has(stale.commit) ? stale.commit : head; From cce630728ac11c325c5a6a934e8f3b3feabbb2ee Mon Sep 17 00:00:00 2001 From: Gregor Mitscha-Baude Date: Mon, 20 Oct 2025 16:13:35 +0200 Subject: [PATCH 5/6] move stuff --- src/data.ts | 111 +---------------------------- src/sync/git-sync.ts | 166 +++++++++++++++++++++++++++++++++---------- 2 files changed, 129 insertions(+), 148 deletions(-) diff --git a/src/data.ts b/src/data.ts index 728b866..c13dac7 100644 --- a/src/data.ts +++ b/src/data.ts @@ -41,6 +41,7 @@ import { listRepoFiles, pullRepoFile, type RemoteFile, + formatSyncFailure, } from './sync/git-sync'; import { logError } from './lib/logging'; import { useReadOnlyFiles } from './data/useReadOnlyFiles'; @@ -453,7 +454,6 @@ function useRepoData({ slug, route, recordRecent, setActivePath }: RepoDataInput setStatusMessage(formatSyncFailure(error)); } }; - // click on a file in the sidebar const selectFile = async (path: string | undefined) => { await selectReadOnlyFile(path); @@ -936,115 +936,6 @@ function formatError(error: unknown): string { return String(error); } -type GitHubRequestError = Error & { - status?: number; - path?: string; - body?: unknown; - text?: string | null; - syncContexts?: SyncRequestContext[]; -}; - -function formatSyncFailure(error: unknown): string { - const fallback = 'Sync failed'; - if (!error || typeof error !== 'object') return fallback; - const err = error as GitHubRequestError; - if (err.status === 422) { - const action = describeGitHubRequest(err.path); - const affected = describeAffectedPaths(err); - const reason = extractGitHubReason(err); - let message = `Sync failed: GitHub returned 422 while ${action}`; - if (affected) message += ` for ${affected}`; - if (reason) message += ` (${reason})`; - return `${message}. Please report this bug.`; - } - return fallback; -} - -type SyncRequestContext = { - operation: 'put' | 'delete' | 'batch'; - paths: string[]; -}; - -function describeGitHubRequest(path: string | undefined): string { - if (!path) return 'processing the GitHub request'; - let cleaned = stripRepoPrefix(path); - if (cleaned.startsWith('git/refs/heads/')) { - let branch = cleaned.slice('git/refs/heads/'.length); - branch = decodeGitPath(branch); - return `updating refs/heads/${branch}`; - } - if (cleaned.startsWith('git/commits')) return 'creating a commit on GitHub'; - if (cleaned.startsWith('git/trees')) return 'creating a tree on GitHub'; - if (cleaned.startsWith('contents/')) { - const resource = decodeGitPath(cleaned.slice('contents/'.length)); - return `updating repository contents at ${resource}`; - } - return `calling ${cleaned}`; -} - -function stripRepoPrefix(path: string): string { - const trimmed = path.replace(/^\/+/, ''); - const repoPattern = /^repos\/[^/]+\/[^/]+\/(.+)$/; - const match = trimmed.match(repoPattern); - if (match && typeof match[1] === 'string') { - return match[1]; - } - return trimmed; -} - -function decodeGitPath(path: string): string { - return path - .split('/') - .map((segment) => { - try { - return decodeURIComponent(segment); - } catch { - return segment; - } - }) - .join('/'); -} - -function extractGitHubReason(err: GitHubRequestError): string | undefined { - const body = err.body as { message?: unknown; errors?: unknown } | undefined; - if (body) { - if (typeof body.message === 'string' && body.message.trim() !== '') { - return body.message.trim(); - } - if (Array.isArray(body.errors)) { - const messages = body.errors - .map((entry: unknown) => { - if (typeof entry === 'string') return entry; - if (entry && typeof entry === 'object') { - const msg = (entry as { message?: unknown }).message; - if (typeof msg === 'string' && msg.trim() !== '') return msg.trim(); - } - return undefined; - }) - .filter((msg): msg is string => msg !== undefined && msg !== ''); - if (messages.length > 0) return messages.join('; '); - } - } - if (typeof err.text === 'string' && err.text.trim() !== '') { - return err.text.trim(); - } - return undefined; -} - -function describeAffectedPaths(err: GitHubRequestError): string | undefined { - const contexts = Array.isArray(err.syncContexts) ? err.syncContexts : undefined; - if (!contexts || contexts.length === 0) return undefined; - const latest = contexts[contexts.length - 1]; - if (!latest || !Array.isArray(latest.paths) || latest.paths.length === 0) return undefined; - const display = latest.paths.slice(0, 3); - const formatted = display.map((path) => path || '(unknown path)'); - let suffix = ''; - if (latest.paths.length > display.length) { - suffix = ` and ${latest.paths.length - display.length} more`; - } - return formatted.join(', ') + suffix; -} - function findByPath(notes: T[], targetPath: string): T | undefined { let normalized = normalizePath(targetPath); return notes.find((note) => normalizePath(note.path) === normalized); diff --git a/src/sync/git-sync.ts b/src/sync/git-sync.ts index 6e627a5..7c26da1 100644 --- a/src/sync/git-sync.ts +++ b/src/sync/git-sync.ts @@ -26,6 +26,7 @@ import { mergeMarkdown } from '../merge/merge'; import { readCachedBlob, writeCachedBlob } from '../storage/blob-cache'; export type { RemoteConfig, RemoteFile }; +export { formatSyncFailure }; type RemoteConfig = { owner: string; repo: string; branch: string }; @@ -211,13 +212,10 @@ async function ensureBinaryContent(config: RemoteConfig, doc: RepoFile): Promise // Upsert a single file and return its new content sha export async function putFile(config: RemoteConfig, file: PutFilePayload, message: string): Promise { - return await withGitHubContext( - { operation: 'put', paths: [file.path] }, - async () => { - let res = await commitChanges(config, message, [serializeContent(file)]); - return extractBlobSha(res, file.path) ?? file.blobSha ?? res.commitSha; - } - ); + return await withGitHubContext({ operation: 'put', paths: [file.path] }, async () => { + let res = await commitChanges(config, message, [serializeContent(file)]); + return extractBlobSha(res, file.path) ?? file.blobSha ?? res.commitSha; + }); } export async function commitBatch( @@ -226,15 +224,12 @@ export async function commitBatch( message: string ): Promise { if (files.length === 0) return null; - return await withGitHubContext( - { operation: 'batch', paths: files.map((file) => file.path) }, - async () => { - let res = await commitChanges(config, message, files.map(serializeContent)); - // Return the first blob sha if available to align with caller expectations - const firstPath = files[0]?.path; - return firstPath ? extractBlobSha(res, firstPath) ?? files[0]?.blobSha ?? res.commitSha : res.commitSha; - } - ); + return await withGitHubContext({ operation: 'batch', paths: files.map((file) => file.path) }, async () => { + let res = await commitChanges(config, message, files.map(serializeContent)); + // Return the first blob sha if available to align with caller expectations + const firstPath = files[0]?.path; + return firstPath ? extractBlobSha(res, firstPath) ?? files[0]?.blobSha ?? res.commitSha : res.commitSha; + }); } export async function listRepoFiles(config: RemoteConfig): Promise { @@ -288,17 +283,14 @@ export async function deleteFiles( message: string ): Promise { if (files.length === 0) return null; - return await withGitHubContext( - { operation: 'delete', paths: files.map((file) => file.path) }, - async () => { - let res = await commitChanges( - config, - message, - files.map((f) => ({ path: f.path, delete: true })) - ); - return res.commitSha || null; - } - ); + return await withGitHubContext({ operation: 'delete', paths: files.map((file) => file.path) }, async () => { + let res = await commitChanges( + config, + message, + files.map((f) => ({ path: f.path, delete: true })) + ); + return res.commitSha || null; + }); } function fromBase64(b64: string): string { @@ -727,10 +719,7 @@ export async function syncBidirectional(store: LocalStore, slug: string): Promis pendingUploads.push({ payload, onApplied: options.onApplied }); }; - const queueDelete = ( - path: string, - options: { onPending?: () => void; onApplied: () => void } - ) => { + const queueDelete = (path: string, options: { onPending?: () => void; onApplied: () => void }) => { options.onPending?.(); pendingDeletes.push({ path, onApplied: options.onApplied }); }; @@ -746,17 +735,12 @@ export async function syncBidirectional(store: LocalStore, slug: string): Promis const message = 'vibenote: sync changes'; let res: CommitResponse | null = null; res = await withGitHubContext({ operation: 'batch', paths: pathsForContext }, async () => { - return await commitChanges(config, message, [ - ...serializedDeletes, - ...serializedUploads, - ]); + return await commitChanges(config, message, [...serializedDeletes, ...serializedUploads]); }); const commitResult = res ?? { commitSha: '', blobShas: {} }; for (const entry of pendingUploads) { const newSha = - extractBlobSha(commitResult, entry.payload.path) ?? - entry.payload.blobSha ?? - commitResult.commitSha; + extractBlobSha(commitResult, entry.payload.path) ?? entry.payload.blobSha ?? commitResult.commitSha; entry.onApplied(newSha); } for (const entry of pendingDeletes) { @@ -1110,3 +1094,109 @@ export async function syncBidirectional(store: LocalStore, slug: string): Promis const summary = { pulled, pushed, deletedRemote, deletedLocal, merged }; return summary; } + +// helpers to surface errors + +type GitHubRequestError = Error & { + status?: number; + path?: string; + body?: unknown; + text?: string | null; + syncContexts?: SyncRequestContext[]; +}; + +function formatSyncFailure(error: unknown): string { + const fallback = 'Sync failed'; + if (!error || typeof error !== 'object') return fallback; + const err = error as GitHubRequestError; + if (err.status === 422) { + const action = describeGitHubRequest(err.path); + const affected = describeAffectedPaths(err); + const reason = extractGitHubReason(err); + let message = `Sync failed: GitHub returned 422 while ${action}`; + if (affected) message += ` for ${affected}`; + if (reason) message += ` (${reason})`; + return `${message}. Please report this bug.`; + } + return fallback; +} + +function describeGitHubRequest(path: string | undefined): string { + if (!path) return 'processing the GitHub request'; + let cleaned = stripRepoPrefix(path); + if (cleaned.startsWith('git/refs/heads/')) { + let branch = cleaned.slice('git/refs/heads/'.length); + branch = decodeGitPath(branch); + return `updating refs/heads/${branch}`; + } + if (cleaned.startsWith('git/commits')) return 'creating a commit on GitHub'; + if (cleaned.startsWith('git/trees')) return 'creating a tree on GitHub'; + if (cleaned.startsWith('contents/')) { + const resource = decodeGitPath(cleaned.slice('contents/'.length)); + return `updating repository contents at ${resource}`; + } + return `calling ${cleaned}`; +} + +function stripRepoPrefix(path: string): string { + const trimmed = path.replace(/^\/+/, ''); + const repoPattern = /^repos\/[^/]+\/[^/]+\/(.+)$/; + const match = trimmed.match(repoPattern); + if (match && typeof match[1] === 'string') { + return match[1]; + } + return trimmed; +} + +function decodeGitPath(path: string): string { + return path + .split('/') + .map((segment) => { + try { + return decodeURIComponent(segment); + } catch { + return segment; + } + }) + .join('/'); +} + +function extractGitHubReason(err: GitHubRequestError): string | undefined { + const body = err.body as { message?: unknown; errors?: unknown } | undefined; + if (body) { + if (typeof body.message === 'string' && body.message.trim() !== '') { + return body.message.trim(); + } + if (Array.isArray(body.errors)) { + const messages = body.errors + .map((entry: unknown) => { + if (typeof entry === 'string') return entry; + if (entry && typeof entry === 'object') { + const msg = (entry as { message?: unknown }).message; + if (typeof msg === 'string' && msg.trim() !== '') return msg.trim(); + } + return undefined; + }) + .filter((msg): msg is string => msg !== undefined && msg !== ''); + if (messages.length > 0) return messages.join('; '); + } + } + if (typeof err.text === 'string' && err.text.trim() !== '') { + return err.text.trim(); + } + return undefined; +} + +function describeAffectedPaths(err: GitHubRequestError): string | undefined { + const contexts = Array.isArray(err.syncContexts) ? err.syncContexts : undefined; + if (!contexts || contexts.length === 0) return undefined; + const latest = contexts[contexts.length - 1]; + if (!latest || !Array.isArray(latest.paths) || latest.paths.length === 0) return undefined; + const display = latest.paths.slice(0, 3); + const formatted = display.map((path) => path || '(unknown path)'); + let suffix = ''; + if (latest.paths.length > display.length) { + suffix = ` and ${latest.paths.length - display.length} more`; + } + return formatted.join(', ') + suffix; +} From 35f225480747d4d2adef8bc3c0fa97bd3ce0dbff Mon Sep 17 00:00:00 2001 From: Gregor Mitscha-Baude Date: Tue, 21 Oct 2025 12:32:29 +0200 Subject: [PATCH 6/6] fix/consolidate tests --- src/data/data.test.ts | 29 ++- src/sync/git-sync.test.ts | 22 ++- src/sync/git-sync2.test.ts | 370 ------------------------------------- 3 files changed, 45 insertions(+), 376 deletions(-) delete mode 100644 src/sync/git-sync2.test.ts diff --git a/src/data/data.test.ts b/src/data/data.test.ts index 9f7a19c..d830abf 100644 --- a/src/data/data.test.ts +++ b/src/data/data.test.ts @@ -31,6 +31,10 @@ type SyncMocks = { syncBidirectional: ReturnType; }; +type LoggingMocks = { + logError: ReturnType; +}; + const authModule = vi.hoisted(() => ({ signInWithGitHubApp: vi.fn(), getSessionToken: vi.fn(), @@ -57,6 +61,10 @@ const syncModule = vi.hoisted(() => ({ syncBidirectional: vi.fn(), })); +const loggingModule = vi.hoisted(() => ({ + logError: vi.fn(), +})); + vi.mock('../auth/app-auth', () => ({ signInWithGitHubApp: authModule.signInWithGitHubApp, getSessionToken: authModule.getSessionToken, @@ -73,13 +81,21 @@ vi.mock('../lib/backend', () => ({ revokeShareLink: backendModule.revokeShareLink, })); -vi.mock('../sync/git-sync', () => ({ - buildRemoteConfig: syncModule.buildRemoteConfig, - listRepoFiles: syncModule.listRepoFiles, - pullRepoFile: syncModule.pullRepoFile, - syncBidirectional: syncModule.syncBidirectional, +vi.mock('../lib/logging', () => ({ + logError: loggingModule.logError, })); +vi.mock('../sync/git-sync', async () => { + const actual = await vi.importActual('../sync/git-sync'); + return { + ...actual, + buildRemoteConfig: syncModule.buildRemoteConfig, + listRepoFiles: syncModule.listRepoFiles, + pullRepoFile: syncModule.pullRepoFile, + syncBidirectional: syncModule.syncBidirectional, + }; +}); + let useRepoData: typeof import('../data').useRepoData; beforeAll(async () => { @@ -101,6 +117,7 @@ const mockBuildRemoteConfig = syncModule.buildRemoteConfig; const mockListRepoFiles = syncModule.listRepoFiles; const mockPullRepoFile = syncModule.pullRepoFile; const mockSyncBidirectional = syncModule.syncBidirectional; +const mockLogError = loggingModule.logError; const writableMeta: RepoMetadata = { isPrivate: true, @@ -189,6 +206,7 @@ describe('useRepoData', () => { mockListRepoFiles.mockReset(); mockPullRepoFile.mockReset(); mockSyncBidirectional.mockReset(); + mockLogError.mockReset(); mockGetSessionToken.mockReturnValue(null); mockGetSessionUser.mockReturnValue(null); @@ -450,6 +468,7 @@ describe('useRepoData', () => { expect(result.current.state.statusMessage).toBe( 'Sync failed: GitHub returned 422 while updating refs/heads/main for Ready.md (Update is not a fast-forward). Please report this bug.' ); + expect(mockLogError).toHaveBeenCalledWith(ghError); }); // Read-only repos should list remote notes and refresh on selection. diff --git a/src/sync/git-sync.test.ts b/src/sync/git-sync.test.ts index 32c62eb..f6563bd 100644 --- a/src/sync/git-sync.test.ts +++ b/src/sync/git-sync.test.ts @@ -13,7 +13,26 @@ const globalAny = globalThis as { fetch?: typeof fetch; }; -describe('syncBidirectional', () => { +const remoteScenarios: Array<{ + label: string; + configure(remote: MockRemoteRepo): void; +}> = [ + { + label: 'fresh remote responses', + configure(remote) { + remote.enableStaleReads({ enabled: false }); + }, + }, + { + label: 'stale remote responses with random delay', + configure(remote) { + const windowMs = Math.floor(Math.random() * 901) + 100; + remote.enableStaleReads({ enabled: true, windowMs }); + }, + }, +]; + +describe.each(remoteScenarios)('syncBidirectional: $label', ({ configure }) => { let store: LocalStore; let remote: MockRemoteRepo; let syncBidirectional: typeof import('./git-sync').syncBidirectional; @@ -24,6 +43,7 @@ describe('syncBidirectional', () => { remote = new MockRemoteRepo(); remote.configure('user', 'repo'); remote.allowToken('test-token'); + configure(remote); const fetchMock = vi.fn((input: RequestInfo | URL, init?: RequestInit) => remote.handleFetch(input, init) ); diff --git a/src/sync/git-sync2.test.ts b/src/sync/git-sync2.test.ts deleted file mode 100644 index 6661165..0000000 --- a/src/sync/git-sync2.test.ts +++ /dev/null @@ -1,370 +0,0 @@ -import { Buffer } from 'node:buffer'; -import { beforeEach, describe, expect, test, vi } from 'vitest'; -import { LocalStore, listTombstones, findBySyncedHash } from '../storage/local'; -import { MockRemoteRepo } from '../test/mock-remote'; - -const authModule = vi.hoisted(() => ({ - ensureFreshAccessToken: vi.fn().mockResolvedValue('test-token'), -})); - -vi.mock('../auth/app-auth', () => authModule); - -const globalAny = globalThis as { - fetch?: typeof fetch; -}; - -describe('syncBidirectional', () => { - let store: LocalStore; - let remote: MockRemoteRepo; - let syncBidirectional: typeof import('./git-sync').syncBidirectional; - - beforeEach(async () => { - authModule.ensureFreshAccessToken.mockReset(); - authModule.ensureFreshAccessToken.mockResolvedValue('test-token'); - remote = new MockRemoteRepo(); - remote.configure('user', 'repo'); - remote.allowToken('test-token'); - remote.enableStaleReads({ enabled: true, windowMs: 5_000 }); - const fetchMock = vi.fn((input: RequestInfo | URL, init?: RequestInit) => - remote.handleFetch(input, init) - ); - globalAny.fetch = fetchMock as unknown as typeof fetch; - const mod = await import('./git-sync'); - syncBidirectional = mod.syncBidirectional; - store = new LocalStore('user/repo'); - }); - - test('pushes new notes and remains stable', async () => { - const firstId = store.createFile('First.md', 'first note'); - const secondId = store.createFile('Second.md', 'second note'); - await syncBidirectional(store, 'user/repo'); - await syncBidirectional(store, 'user/repo'); - expectParity(store, remote); - expect(listTombstones(store.slug)).toHaveLength(0); - const firstDoc = store.loadFileById(firstId); - const secondDoc = store.loadFileById(secondId); - expect(firstDoc?.path).toBe('First.md'); - expect(secondDoc?.path).toBe('Second.md'); - }); - - test('applies local deletions to remote without resurrection', async () => { - store.createFile('Ghost.md', 'haunt me'); - await syncBidirectional(store, 'user/repo'); - store.deleteFile('Ghost.md'); - await syncBidirectional(store, 'user/repo'); - expectParity(store, remote); - expect(store.listFiles()).toHaveLength(0); - expect(listTombstones(store.slug)).toHaveLength(0); - }); - - test('renames move files remotely', async () => { - store.createFile('Original.md', 'rename me'); - await syncBidirectional(store, 'user/repo'); - store.renameFile('Original.md', 'Renamed'); - await syncBidirectional(store, 'user/repo'); - expectParity(store, remote); - const notes = store.listFiles(); - expect(notes).toHaveLength(1); - expect(notes[0]?.path).toBe('Renamed.md'); - expect([...remote.snapshot().keys()]).toEqual(['Renamed.md']); - }); - - test('rename removes old remote path after prior sync', async () => { - store.createFile('test.md', 'body'); - await syncBidirectional(store, 'user/repo'); - expect([...remote.snapshot().keys()]).toEqual(['test.md']); - store.renameFile('test.md', 'test2'); - await syncBidirectional(store, 'user/repo'); - const remoteFiles = [...remote.snapshot().keys()].sort(); - expect(remoteFiles).toEqual(['test2.md']); - expectParity(store, remote); - }); - - test('rename with remote edits keeps both copies in sync', async () => { - store.createFile('draft.md', 'original body'); - await syncBidirectional(store, 'user/repo'); - remote.setFile('draft.md', 'remote update'); - store.renameFile('draft.md', 'draft-renamed'); - await syncBidirectional(store, 'user/repo'); - const paths = [...remote.snapshot().keys()].sort(); - expect(paths).toEqual(['draft-renamed.md', 'draft.md']); - expectParity(store, remote); - const localPaths = store - .listFiles() - .map((n) => n.path) - .sort(); - expect(localPaths).toEqual(['draft-renamed.md', 'draft.md']); - }); - - test('rename revert does not push redundant commits', async () => { - store.createFile('first-name.md', 'body'); - await syncBidirectional(store, 'user/repo'); - const headBeforeRename = await getRemoteHeadSha(remote); - - store.renameFile('first-name.md', 'second-name'); - store.renameFile('second-name.md', 'first-name'); - - await syncBidirectional(store, 'user/repo'); - - const headAfterSync = await getRemoteHeadSha(remote); - expect(headAfterSync).toBe(headBeforeRename); - expectParity(store, remote); - }); - - test('rename followed by local edit pushes updated content under new path', async () => { - store.createFile('Draft.md', 'initial body'); - await syncBidirectional(store, 'user/repo'); - expect([...remote.snapshot().keys()]).toEqual(['Draft.md']); - - const nextPath = store.renameFile('Draft.md', 'Ready'); - expect(nextPath).toBe('Ready.md'); - store.saveFile('Ready.md', 'edited after rename'); - - await syncBidirectional(store, 'user/repo'); - - const remoteFiles = [...remote.snapshot().entries()]; - expect(remoteFiles).toEqual([['Ready.md', 'edited after rename']]); - const readyMeta = store.listFiles().find((file) => file.path === 'Ready.md'); - const readyDoc = readyMeta ? store.loadFileById(readyMeta.id) : null; - expect(readyDoc?.content).toBe('edited after rename'); - expect(listTombstones(store.slug)).toHaveLength(0); - }); - - test('surface 422 when branch head advances during push', async () => { - store.createFile('Lonely.md', 'seed text'); - await syncBidirectional(store, 'user/repo'); - - store.saveFile('Lonely.md', 'edited locally'); - remote.advanceHeadOnNextUpdate(); - - await expect(syncBidirectional(store, 'user/repo')).rejects.toMatchObject({ - status: 422, - path: expect.stringContaining('/git/refs/heads/'), - }); - }); - - test('pulls new remote notes', async () => { - remote.setFile('Remote.md', '# remote'); - await syncBidirectional(store, 'user/repo'); - expectParity(store, remote); - const notes = store.listFiles(); - expect(notes).toHaveLength(1); - const doc = store.loadFileById(notes[0]?.id ?? ''); - expect(doc?.content).toBe('# remote'); - }); - - test('removes notes when deleted remotely', async () => { - store.createFile('Shared.md', 'shared text'); - await syncBidirectional(store, 'user/repo'); - remote.deleteDirect('Shared.md'); - await syncBidirectional(store, 'user/repo'); - expectParity(store, remote); - expect(store.listFiles()).toHaveLength(0); - }); - - test('syncs tracked image files while ignoring unrelated blobs', async () => { - remote.setFile('data.json', '{"keep":true}'); - remote.setFile('image.png', 'asset'); - store.createFile('OnlyNote.md', '# hello'); - await syncBidirectional(store, 'user/repo'); - const snapshot = remote.snapshot(); - expect(snapshot.get('data.json')).toBe('{"keep":true}'); - expect(snapshot.get('image.png')).toBe('asset'); - expect(snapshot.get('OnlyNote.md')).toBe('# hello'); - const files = store.listFiles(); - const imageMeta = files.find((f) => f.path === 'image.png'); - expect(imageMeta).toBeDefined(); - if (imageMeta) { - const imageDoc = store.loadFileById(imageMeta.id); - expect(imageDoc?.kind).toBe('asset-url'); - expect(imageDoc?.content).toMatch(/^gh-blob:/); - } - expectParity(store, remote); - }); - - test('pulls nested Markdown files', async () => { - remote.setFile('nested/Nested.md', '# nested'); - await syncBidirectional(store, 'user/repo'); - const notes = store.listFiles(); - expect(notes).toHaveLength(1); - const doc = store.loadFileById(notes[0]?.id ?? ''); - expect(doc?.path).toBe('nested/Nested.md'); - expect(doc?.content).toBe('# nested'); - }); - - test('pulls binary image assets from remote', async () => { - remote.setFile('assets/logo.png', 'image-data'); - await syncBidirectional(store, 'user/repo'); - const files = store.listFiles(); - const asset = files.find((f) => f.path === 'assets/logo.png'); - expect(asset).toBeDefined(); - if (!asset) return; - const doc = store.loadFileById(asset.id); - expect(doc?.kind).toBe('asset-url'); - expect(doc?.content).toMatch(/^gh-blob:/); - expectParity(store, remote); - }); - - test('pulls binary assets via blob fallback when contents payload is empty', async () => { - const payload = 'high-res-image'; - const expectedBase64 = Buffer.from(payload, 'utf8').toString('base64'); - remote.setFile('assets/large.png', payload); - const originalFetch = globalAny.fetch!; - let capturedSha: string | null = null; - const interceptFetch = vi.fn(async (input: RequestInfo | URL, init?: RequestInit) => { - const request = input instanceof Request ? input : new Request(input, init); - const url = new URL(request.url); - if ( - request.method.toUpperCase() === 'GET' && - url.pathname === '/repos/user/repo/contents/assets/large.png' - ) { - const upstream = await originalFetch(input, init); - const json = await upstream.json(); - capturedSha = typeof json?.sha === 'string' ? json.sha : null; - return new Response(JSON.stringify({ ...json, content: '' }), { - status: 200, - headers: { 'Content-Type': 'application/json' }, - }); - } - if ( - request.method.toUpperCase() === 'GET' && - capturedSha && - url.pathname === `/repos/user/repo/git/blobs/${capturedSha}` - ) { - return new Response( - JSON.stringify({ sha: capturedSha, content: expectedBase64, encoding: 'base64' }), - { status: 200, headers: { 'Content-Type': 'application/json' } } - ); - } - return originalFetch(input, init); - }); - - globalAny.fetch = interceptFetch as unknown as typeof fetch; - try { - await syncBidirectional(store, 'user/repo'); - } finally { - globalAny.fetch = originalFetch; - } - const files = store.listFiles(); - const asset = files.find((f) => f.path === 'assets/large.png'); - expect(asset).toBeDefined(); - if (!asset) return; - const doc = store.loadFileById(asset.id); - expect(doc?.kind).toBe('asset-url'); - expect(doc?.content).toMatch(/^gh-blob:/); - expect(capturedSha).toBeTruthy(); - expectParity(store, remote); - }); - - test('tracks remote binary renames by sha/hash', async () => { - const payload = Buffer.from('asset', 'utf8').toString('base64'); - const id = store.createFile('logo.png', payload); - await syncBidirectional(store, 'user/repo'); - const before = store.loadFileById(id); - expect(before?.lastSyncedHash).toBeDefined(); - remote.deleteDirect('logo.png'); - remote.setFile('assets/logo.png', payload); - if (before?.lastSyncedHash) { - const lookup = findBySyncedHash(store.slug, before.lastSyncedHash); - expect(lookup?.id).toBe(id); - } - - await syncBidirectional(store, 'user/repo'); - - const paths = store - .listFiles() - .map((f) => f.path) - .sort(); - expect(paths).toContain('assets/logo.png'); - expect(paths).not.toContain('logo.png'); - const renamedFile = store.listFiles().find((f) => f.path === 'assets/logo.png'); - expect(renamedFile).toBeDefined(); - expectParity(store, remote); - }); - - test('listRepoFiles includes nested markdown', async () => { - const mod = await import('./git-sync'); - remote.setFile('nested/Nested.md', '# nested'); - let cfg = mod.buildRemoteConfig('user/repo'); - let entries = await mod.listRepoFiles(cfg); - const paths = entries.map((e) => e.path).sort(); - expect(paths).toEqual(['nested/Nested.md']); - }); - - test('listRepoFiles returns markdown and image entries', async () => { - const mod = await import('./git-sync'); - remote.setFile('docs/Doc.md', '# hi'); - remote.setFile('assets/logo.png', 'img'); - let cfg = mod.buildRemoteConfig('user/repo'); - let entries = await mod.listRepoFiles(cfg); - const byPath = new Map(entries.map((entry) => [entry.path, entry.kind])); - expect(byPath.get('docs/Doc.md')).toBe('markdown'); - expect(byPath.get('assets/logo.png')).toBe('binary'); - }); - - test('includes README.md files from the repository', async () => { - remote.setFile('README.md', 'root readme'); - remote.setFile('sub/README.md', 'sub readme'); - await syncBidirectional(store, 'user/repo'); - const paths = store - .listFiles() - .map((n) => n.path) - .sort(); - expect(paths).toEqual(['README.md', 'sub/README.md']); - }); -}); - -type RemoteHeadPayload = { - object?: { sha?: string }; -}; - -async function getRemoteHeadSha(remote: MockRemoteRepo, branch = 'main'): Promise { - const response = await remote.handleFetch( - `https://api.github.com/repos/user/repo/git/ref/heads/${branch}`, - { method: 'GET' } - ); - const payload = (await response.json()) as RemoteHeadPayload; - if (!response.ok) { - throw new Error(`remote head lookup failed with status ${response.status}`); - } - const sha = typeof payload.object?.sha === 'string' ? payload.object.sha : ''; - expect(sha).not.toBe(''); - return sha; -} - -function expectParity(store: LocalStore, remote: MockRemoteRepo) { - const localDocs = new Map>(); - for (const meta of store.listFiles()) { - const doc = store.loadFileById(meta.id); - if (!doc) continue; - localDocs.set(meta.path, doc); - } - const remoteMap = remote.snapshot(); - const trackedRemoteKeys = [...remoteMap.keys()].filter(isTrackedPath).sort(); - expect(trackedRemoteKeys).toEqual([...localDocs.keys()].sort()); - for (const [path, doc] of localDocs.entries()) { - const remoteContent = remoteMap.get(path); - if (doc?.kind === 'markdown') { - expect(remoteContent).toBe(doc.content); - } else if (doc?.kind === 'binary') { - const decoded = Buffer.from(doc.content, 'base64').toString('utf8'); - expect(remoteContent).toBe(decoded); - } else if (doc?.kind === 'asset-url') { - expect(remoteContent).toBeDefined(); - } - } -} - -function isTrackedPath(path: string): boolean { - const lower = path.toLowerCase(); - return ( - lower.endsWith('.md') || - lower.endsWith('.png') || - lower.endsWith('.jpg') || - lower.endsWith('.jpeg') || - lower.endsWith('.gif') || - lower.endsWith('.webp') || - lower.endsWith('.svg') || - lower.endsWith('.avif') - ); -}