Compare commits

...

2 Commits

Author SHA1 Message Date
bymyself
a04ec6b0fb fix: handle pagination re-enable and concurrent drift in assetsStore
Address code review feedback:
- Re-enable pagination (hasMoreHistory=true) when history grows past one
  page after previously being exhausted. Skip drift accumulation while
  pagination is inactive.
- Capture drift at request time in fetchHistoryNextPage and subtract only
  the captured amount on success, preserving concurrent updateHistory()
  increments during in-flight fetches.

Amp-Thread-ID: https://ampcode.com/threads/T-019c7a21-9e2b-73dc-81bf-d7c64574345c
2026-03-12 02:01:14 -07:00
bymyself
43747e308b fix: preserve paginated assets during incremental history updates
updateHistory() reset all pagination state (offset=0, cleared items) on
every job event, causing assets loaded beyond the first batch to vanish
during infinite scroll.

Refactor to incremental prepend approach: subsequent updateHistory()
calls fetch page 1 and merge only new items. Track prepended count
between pagination calls so loadMoreHistory() adjusts offset to avoid
duplicates from shifted data.

Amp-Thread-ID: https://ampcode.com/threads/T-019c7a21-9e2b-73dc-81bf-d7c64574345c
2026-03-12 02:01:14 -07:00
2 changed files with 222 additions and 61 deletions

View File

@@ -515,6 +515,142 @@ describe('assetsStore - Refactored (Option A)', () => {
})
})
describe('Incremental Update', () => {
it('should preserve loaded items when updateHistory is called after pagination', async () => {
// Use indices 100-299 for page1 so there's room for a "newer" item
const page1 = Array.from({ length: 200 }, (_, i) =>
createMockJobItem(100 + i)
)
vi.mocked(api.getHistory).mockResolvedValueOnce(page1)
await store.updateHistory()
const page2 = Array.from({ length: 200 }, (_, i) =>
createMockJobItem(300 + i)
)
vi.mocked(api.getHistory).mockResolvedValueOnce(page2)
await store.loadMoreHistory()
expect(store.historyAssets).toHaveLength(400)
// Index 0 → Date.now() - 0ms, which is newer than index 100+
const newJob = createMockJobItem(0)
const refreshedPage1 = [newJob, ...page1.slice(0, 199)]
vi.mocked(api.getHistory).mockResolvedValueOnce(refreshedPage1)
await store.updateHistory()
expect(store.historyAssets).toHaveLength(401)
expect(store.historyAssets[0].id).toBe('prompt_0')
})
it('should not duplicate items when updateHistory fetches overlapping data', async () => {
const page1 = Array.from({ length: 200 }, (_, i) => createMockJobItem(i))
vi.mocked(api.getHistory).mockResolvedValueOnce(page1)
await store.updateHistory()
vi.mocked(api.getHistory).mockResolvedValueOnce([...page1])
await store.updateHistory()
expect(store.historyAssets).toHaveLength(200)
})
it('should adjust offset for loadMore after new items are prepended', async () => {
const page1 = Array.from({ length: 200 }, (_, i) => createMockJobItem(i))
vi.mocked(api.getHistory).mockResolvedValueOnce(page1)
await store.updateHistory()
const newJobs = Array.from({ length: 5 }, (_, i) =>
createMockJobItem(5000 + i)
)
const refreshedPage1 = [...newJobs, ...page1.slice(0, 195)]
vi.mocked(api.getHistory).mockResolvedValueOnce(refreshedPage1)
await store.updateHistory()
const page2 = Array.from({ length: 200 }, (_, i) =>
createMockJobItem(200 + i)
)
vi.mocked(api.getHistory).mockResolvedValueOnce(page2)
await store.loadMoreHistory()
expect(api.getHistory).toHaveBeenLastCalledWith(200, { offset: 205 })
})
it('should preserve drift counter when loadMore fails', async () => {
const page1 = Array.from({ length: 200 }, (_, i) => createMockJobItem(i))
vi.mocked(api.getHistory).mockResolvedValueOnce(page1)
await store.updateHistory()
const newJobs = Array.from({ length: 3 }, (_, i) =>
createMockJobItem(6000 + i)
)
const refreshedPage1 = [...newJobs, ...page1.slice(0, 197)]
vi.mocked(api.getHistory).mockResolvedValueOnce(refreshedPage1)
await store.updateHistory()
// loadMore fails — drift counter should be preserved
vi.mocked(api.getHistory).mockRejectedValueOnce(new Error('fail'))
await store.loadMoreHistory()
// Retry loadMore — should still include the drift adjustment
const page2 = Array.from({ length: 200 }, (_, i) =>
createMockJobItem(200 + i)
)
vi.mocked(api.getHistory).mockResolvedValueOnce(page2)
await store.loadMoreHistory()
expect(api.getHistory).toHaveBeenLastCalledWith(200, { offset: 203 })
})
it('should re-enable pagination when history grows past one page', async () => {
// Initial load with fewer than BATCH_SIZE items — pagination exhausted
const smallPage = Array.from({ length: 50 }, (_, i) =>
createMockJobItem(i)
)
vi.mocked(api.getHistory).mockResolvedValueOnce(smallPage)
await store.updateHistory()
expect(store.hasMoreHistory).toBe(false)
// Many new jobs arrive — first page is now full (BATCH_SIZE items)
const fullPage = Array.from({ length: 200 }, (_, i) =>
createMockJobItem(7000 + i)
)
vi.mocked(api.getHistory).mockResolvedValueOnce(fullPage)
await store.updateHistory()
expect(store.hasMoreHistory).toBe(true)
})
it('should not accumulate drift when pagination is exhausted', async () => {
const smallPage = Array.from({ length: 50 }, (_, i) =>
createMockJobItem(i)
)
vi.mocked(api.getHistory).mockResolvedValueOnce(smallPage)
await store.updateHistory()
// Subsequent update with a few new items, still under BATCH_SIZE
const newJobs = Array.from({ length: 3 }, (_, i) =>
createMockJobItem(8000 + i)
)
const refreshed = [...newJobs, ...smallPage.slice(0, 47)]
vi.mocked(api.getHistory).mockResolvedValueOnce(refreshed)
await store.updateHistory()
// Re-enable pagination with a full page
const fullPage = Array.from({ length: 200 }, (_, i) =>
createMockJobItem(9000 + i)
)
vi.mocked(api.getHistory).mockResolvedValueOnce(fullPage)
await store.updateHistory()
// loadMore should use offset 200 (no drift accumulated while exhausted)
const page2 = Array.from({ length: 200 }, (_, i) =>
createMockJobItem(200 + i)
)
vi.mocked(api.getHistory).mockResolvedValueOnce(page2)
await store.loadMoreHistory()
expect(api.getHistory).toHaveBeenLastCalledWith(200, { offset: 200 })
})
})
describe('jobDetailView Support', () => {
it('should include outputCount and allOutputs in user_metadata', async () => {
const mockHistory = Array.from({ length: 5 }, (_, i) =>

View File

@@ -94,7 +94,7 @@ export const useAssetsStore = defineStore('assets', () => {
// Track assets currently being deleted (for loading overlay)
const deletingAssetIds = shallowReactive(new Set<string>())
const setAssetDeleting = (assetId: string, isDeleting: boolean) => {
function setAssetDeleting(assetId: string, isDeleting: boolean) {
if (isDeleting) {
deletingAssetIds.add(assetId)
} else {
@@ -102,7 +102,7 @@ export const useAssetsStore = defineStore('assets', () => {
}
}
const isAssetDeleting = (assetId: string): boolean => {
function isAssetDeleting(assetId: string): boolean {
return deletingAssetIds.has(assetId)
}
@@ -110,6 +110,7 @@ export const useAssetsStore = defineStore('assets', () => {
const historyOffset = ref(0)
const hasMoreHistory = ref(true)
const isLoadingMore = ref(false)
const prependedSinceLastLoad = ref(0)
const allHistoryItems = ref<AssetItem[]>([])
@@ -133,67 +134,92 @@ export const useAssetsStore = defineStore('assets', () => {
})
/**
* Fetch history assets with pagination support
* @param loadMore - true for pagination (append), false for initial load (replace)
* Insert an asset into the sorted list at the correct position (newest first).
* Skips duplicates already tracked in loadedIds.
* @returns true if the asset was inserted, false if it was a duplicate.
*/
const fetchHistoryAssets = async (loadMore = false): Promise<AssetItem[]> => {
// Reset state for initial load
if (!loadMore) {
historyOffset.value = 0
hasMoreHistory.value = true
allHistoryItems.value = []
loadedIds.clear()
}
function insertAssetSorted(asset: AssetItem): boolean {
if (loadedIds.has(asset.id)) return false
loadedIds.add(asset.id)
// Fetch from server with offset
const history = await api.getHistory(BATCH_SIZE, {
offset: historyOffset.value
})
const assetTime = new Date(asset.created_at ?? 0).getTime()
const insertIndex = allHistoryItems.value.findIndex(
(item) => new Date(item.created_at ?? 0).getTime() < assetTime
)
// Convert JobListItems to AssetItems
const newAssets = mapHistoryToAssets(history)
if (loadMore) {
// Filter out duplicates and insert in sorted order
for (const asset of newAssets) {
if (loadedIds.has(asset.id)) {
continue // Skip duplicates
}
loadedIds.add(asset.id)
// Find insertion index to maintain sorted order (newest first)
const assetTime = new Date(asset.created_at ?? 0).getTime()
const insertIndex = allHistoryItems.value.findIndex(
(item) => new Date(item.created_at ?? 0).getTime() < assetTime
)
if (insertIndex === -1) {
// Asset is oldest, append to end
allHistoryItems.value.push(asset)
} else {
// Insert at the correct position
allHistoryItems.value.splice(insertIndex, 0, asset)
}
}
if (insertIndex === -1) {
allHistoryItems.value.push(asset)
} else {
// Initial load: replace all
allHistoryItems.value = newAssets
newAssets.forEach((asset) => loadedIds.add(asset.id))
allHistoryItems.value.splice(insertIndex, 0, asset)
}
return true
}
// Update pagination state
historyOffset.value += BATCH_SIZE
hasMoreHistory.value = history.length === BATCH_SIZE
function trimToMaxItems() {
if (allHistoryItems.value.length > MAX_HISTORY_ITEMS) {
const removed = allHistoryItems.value.slice(MAX_HISTORY_ITEMS)
allHistoryItems.value = allHistoryItems.value.slice(0, MAX_HISTORY_ITEMS)
// Clean up Set
const removed = allHistoryItems.value.splice(MAX_HISTORY_ITEMS)
removed.forEach((item) => loadedIds.delete(item.id))
}
}
return allHistoryItems.value
/**
* Fetch the first page of history. On initial load (empty state), replaces
* all items. On subsequent calls, incrementally merges new items without
* clearing existing pagination state — fixing the "disappearing assets" bug.
*/
async function fetchHistoryIncremental(): Promise<void> {
const history = await api.getHistory(BATCH_SIZE, { offset: 0 })
const newAssets = mapHistoryToAssets(history)
if (allHistoryItems.value.length === 0) {
allHistoryItems.value = newAssets
newAssets.forEach((asset) => loadedIds.add(asset.id))
historyOffset.value = BATCH_SIZE
hasMoreHistory.value = history.length === BATCH_SIZE
} else {
let newCount = 0
for (const asset of newAssets) {
if (insertAssetSorted(asset)) newCount++
}
if (hasMoreHistory.value) {
prependedSinceLastLoad.value += newCount
} else if (history.length === BATCH_SIZE) {
hasMoreHistory.value = true
prependedSinceLastLoad.value = 0
}
}
trimToMaxItems()
}
/**
* Fetch the next page of history for infinite scroll pagination.
* Adjusts offset to account for items prepended since the last page load.
*/
async function fetchHistoryNextPage(): Promise<void> {
const driftAtRequest = prependedSinceLastLoad.value
const adjustedOffset = historyOffset.value + driftAtRequest
const history = await api.getHistory(BATCH_SIZE, {
offset: adjustedOffset
})
const newAssets = mapHistoryToAssets(history)
// Subtract only the drift captured at request time so concurrent
// updateHistory() increments during the in-flight fetch are preserved.
prependedSinceLastLoad.value = Math.max(
0,
prependedSinceLastLoad.value - driftAtRequest
)
for (const asset of newAssets) {
insertAssetSorted(asset)
}
historyOffset.value = adjustedOffset + BATCH_SIZE
hasMoreHistory.value = history.length === BATCH_SIZE
trimToMaxItems()
}
const historyAssets = ref<AssetItem[]>([])
@@ -201,18 +227,19 @@ export const useAssetsStore = defineStore('assets', () => {
const historyError = ref<unknown>(null)
/**
* Initial load of history assets
* Load or refresh history assets. On first call, performs a full load.
* On subsequent calls (e.g. after job completion), incrementally merges
* new items without clearing existing pagination state.
*/
const updateHistory = async () => {
async function updateHistory() {
historyLoading.value = true
historyError.value = null
try {
await fetchHistoryAssets(false)
await fetchHistoryIncremental()
historyAssets.value = allHistoryItems.value
} catch (err) {
console.error('Error fetching history assets:', err)
historyError.value = err
// Keep existing data when error occurs
if (!historyAssets.value.length) {
historyAssets.value = []
}
@@ -224,20 +251,18 @@ export const useAssetsStore = defineStore('assets', () => {
/**
* Load more history items (infinite scroll)
*/
const loadMoreHistory = async () => {
// Guard: prevent concurrent loads and check if more items available
async function loadMoreHistory() {
if (!hasMoreHistory.value || isLoadingMore.value) return
isLoadingMore.value = true
historyError.value = null
try {
await fetchHistoryAssets(true)
await fetchHistoryNextPage()
historyAssets.value = allHistoryItems.value
} catch (err) {
console.error('Error loading more history:', err)
historyError.value = err
// Keep existing data when error occurs (consistent with updateHistory)
if (!historyAssets.value.length) {
historyAssets.value = []
}