fix: Fix favorites, fix #850
This commit fixes invalid assumption that all timelines are sorted by status id. Some, like favorites or bookmarks are sorted by private server id. To correctly paginate we must use the Link header. To work around the issue, offline for favorites was effectively disabled. Statuses are still inserted into the database but we can't reproduce correct timeline order.
This commit is contained in:
parent
8bbe372fda
commit
5e7c8003db
12248
package-lock.json
generated
Normal file
12248
package-lock.json
generated
Normal file
File diff suppressed because it is too large
Load diff
|
@ -116,6 +116,7 @@
|
|||
"assert": "^2.0.0",
|
||||
"eslint-plugin-html": "^6.0.0",
|
||||
"fake-indexeddb": "^3.0.0",
|
||||
"http-link-header": "^1.0.2",
|
||||
"mocha": "^7.1.0",
|
||||
"now": "^18.0.0",
|
||||
"standard": "^14.3.1",
|
||||
|
|
|
@ -14,6 +14,7 @@ import uniqBy from 'lodash-es/uniqBy'
|
|||
import { addStatusesOrNotifications } from './addStatusOrNotification'
|
||||
import { scheduleIdleTask } from '../_utils/scheduleIdleTask'
|
||||
import { sortItemSummariesForThread } from '../_utils/sortItemSummariesForThread'
|
||||
import LinkHeader from 'http-link-header'
|
||||
|
||||
const byId = _ => _.id
|
||||
|
||||
|
@ -90,12 +91,44 @@ async function fetchTimelineItemsFromNetwork (instanceName, accessToken, timelin
|
|||
if (timelineName.startsWith('status/')) { // special case - this is a list of descendents and ancestors
|
||||
return fetchThreadFromNetwork(instanceName, accessToken, timelineName)
|
||||
} else { // normal timeline
|
||||
return getTimeline(instanceName, accessToken, timelineName, lastTimelineItemId, null, TIMELINE_BATCH_SIZE)
|
||||
const { items } = await getTimeline(instanceName, accessToken, timelineName, lastTimelineItemId, null, TIMELINE_BATCH_SIZE)
|
||||
return items
|
||||
}
|
||||
}
|
||||
async function addPagedTimelineItems (instanceName, timelineName, items) {
|
||||
console.log('addPagedTimelineItems, length:', items.length)
|
||||
mark('addPagedTimelineItemSummaries')
|
||||
const newSummaries = items.map(timelineItemToSummary)
|
||||
addPagedTimelineItemSummaries(instanceName, timelineName, newSummaries)
|
||||
stop('addPagedTimelineItemSummaries')
|
||||
}
|
||||
|
||||
export async function addPagedTimelineItemSummaries (instanceName, timelineName, newSummaries) {
|
||||
const oldSummaries = store.getForTimeline(instanceName, timelineName, 'timelineItemSummaries') || []
|
||||
|
||||
const mergedSummaries = uniqBy(concat(oldSummaries, newSummaries), byId)
|
||||
|
||||
if (!isEqual(oldSummaries, mergedSummaries)) {
|
||||
store.setForTimeline(instanceName, timelineName, { timelineItemSummaries: mergedSummaries })
|
||||
}
|
||||
}
|
||||
|
||||
async function fetchTimelineItems (instanceName, accessToken, timelineName, lastTimelineItemId, online) {
|
||||
async function fetchPagedItems (instanceName, accessToken, timelineName) {
|
||||
const { timelineNextPageId } = store.get()
|
||||
console.log('saved timelineNextPageId', timelineNextPageId)
|
||||
const { items, headers } = await getTimeline(instanceName, accessToken, timelineName, timelineNextPageId, null, TIMELINE_BATCH_SIZE)
|
||||
const linkHeader = headers.get('Link')
|
||||
const next = LinkHeader.parse(linkHeader).rel('next')[0]
|
||||
const nextId = next && next.uri && (new URL(next.uri)).searchParams.get('max_id')
|
||||
console.log('new timelineNextPageId', nextId)
|
||||
store.setForTimeline(instanceName, timelineName, { timelineNextPageId: nextId })
|
||||
await storeFreshTimelineItemsInDatabase(instanceName, timelineName, items)
|
||||
await addPagedTimelineItems(instanceName, timelineName, items)
|
||||
}
|
||||
|
||||
async function fetchTimelineItems (instanceName, accessToken, timelineName, online) {
|
||||
mark('fetchTimelineItems')
|
||||
const { lastTimelineItemId } = store.get()
|
||||
let items
|
||||
let stale = false
|
||||
if (!online) {
|
||||
|
@ -146,12 +179,15 @@ async function fetchTimelineItemsAndPossiblyFallBack () {
|
|||
currentTimeline,
|
||||
currentInstance,
|
||||
accessToken,
|
||||
lastTimelineItemId,
|
||||
online
|
||||
} = store.get()
|
||||
|
||||
const { items, stale } = await fetchTimelineItems(currentInstance, accessToken, currentTimeline, lastTimelineItemId, online)
|
||||
addTimelineItems(currentInstance, currentTimeline, items, stale)
|
||||
if (currentTimeline === 'favorites') {
|
||||
await fetchPagedItems(currentInstance, accessToken, currentTimeline)
|
||||
} else {
|
||||
const { items, stale } = await fetchTimelineItems(currentInstance, accessToken, currentTimeline, online)
|
||||
addTimelineItems(currentInstance, currentTimeline, items, stale)
|
||||
}
|
||||
stop('fetchTimelineItemsAndPossiblyFallBack')
|
||||
}
|
||||
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
import { get, paramsString, DEFAULT_TIMEOUT } from '../_utils/ajax'
|
||||
import { getWithHeaders, paramsString, DEFAULT_TIMEOUT } from '../_utils/ajax'
|
||||
import { auth, basename } from './utils'
|
||||
|
||||
function getTimelineUrlPath (timeline) {
|
||||
|
@ -69,10 +69,10 @@ export async function getTimeline (instanceName, accessToken, timeline, maxId, s
|
|||
url += '?' + paramsString(params)
|
||||
|
||||
console.log('fetching url', url)
|
||||
const items = await get(url, auth(accessToken), { timeout: DEFAULT_TIMEOUT })
|
||||
const { json: items, headers } = await getWithHeaders(url, auth(accessToken), { timeout: DEFAULT_TIMEOUT })
|
||||
|
||||
if (timeline === 'direct') {
|
||||
return items.map(item => item.last_status)
|
||||
}
|
||||
return items
|
||||
return { items, headers }
|
||||
}
|
||||
|
|
|
@ -56,6 +56,7 @@ export function timelineComputations (store) {
|
|||
computeForTimeline(store, 'showHeader', false)
|
||||
computeForTimeline(store, 'shouldShowHeader', false)
|
||||
computeForTimeline(store, 'timelineItemSummariesAreStale', false)
|
||||
computeForTimeline(store, 'timelineNextPageId', null)
|
||||
|
||||
store.compute('currentTimelineType', ['currentTimeline'], currentTimeline => (
|
||||
currentTimeline && currentTimeline.split('/')[0])
|
||||
|
|
|
@ -30,7 +30,7 @@ async function throwErrorIfInvalidResponse (response) {
|
|||
}
|
||||
const json = await response.json()
|
||||
if (response.status >= 200 && response.status < 300) {
|
||||
return json
|
||||
return { json, headers: response.headers }
|
||||
}
|
||||
if (json && json.error) {
|
||||
throw new Error(response.status + ': ' + json.error)
|
||||
|
@ -74,6 +74,12 @@ export async function patch (url, body, headers, options) {
|
|||
}
|
||||
|
||||
export async function get (url, headers, options) {
|
||||
const { json } = await _fetch(url, makeFetchOptions('GET', headers, options), options)
|
||||
return json
|
||||
}
|
||||
|
||||
/** @returns {json, headers} */
|
||||
export async function getWithHeaders (url, headers, options) {
|
||||
return _fetch(url, makeFetchOptions('GET', headers, options), options)
|
||||
}
|
||||
|
||||
|
|
Loading…
Reference in a new issue