ssb: If things time out because we're following a million accounts...recover ungracefully.

This commit is contained in:
Cory McWilliams 2025-04-09 20:02:16 -04:00
parent 38d746b310
commit f72395756a
3 changed files with 65 additions and 57 deletions

View File

@ -1,5 +1,5 @@
{ {
"type": "tildefriends-app", "type": "tildefriends-app",
"emoji": "🦀", "emoji": "🦀",
"previous": "&YZCzXrfB6j+y0sXF4KspAibwjLsSCaMoB5rdO3mQl+Q=.sha256" "previous": "&VE+OD1O5xdeiqipvbxXpmatUHu7dTbliuQpFnuZgsGQ=.sha256"
} }

View File

@ -175,63 +175,67 @@ class TfElement extends LitElement {
} }
} }
let abouts = await tfrpc.rpc.query( try {
` let abouts = await tfrpc.rpc.query(
SELECT `
messages.author, json(messages.content) AS content, messages.sequence SELECT
FROM messages.author, json(messages.content) AS content, messages.sequence
messages, FROM
json_each(?1) AS following messages,
WHERE json_each(?1) AS following
messages.author = following.value AND WHERE
messages.content ->> 'type' = 'about' AND messages.author = following.value AND
messages.rowid > ?3 AND messages.content ->> 'type' = 'about' AND
messages.rowid <= ?4 messages.rowid > ?3 AND
UNION messages.rowid <= ?4
SELECT UNION
messages.author, json(messages.content) AS content, messages.sequence SELECT
FROM messages.author, json(messages.content) AS content, messages.sequence
messages, FROM
json_each(?2) AS following messages,
WHERE json_each(?2) AS following
messages.author = following.value AND WHERE
messages.content ->> 'type' = 'about' AND messages.author = following.value AND
messages.rowid <= ?4 messages.content ->> 'type' = 'about' AND
ORDER BY messages.author, messages.sequence messages.rowid <= ?4
`, ORDER BY messages.author, messages.sequence
[ `,
JSON.stringify(ids.filter((id) => cache.about[id])), [
JSON.stringify(ids.filter((id) => !cache.about[id])), JSON.stringify(ids.filter((id) => cache.about[id])),
cache.last_row_id, JSON.stringify(ids.filter((id) => !cache.about[id])),
max_row_id, cache.last_row_id,
] max_row_id,
); ]
for (let about of abouts) { );
let content = JSON.parse(about.content); for (let about of abouts) {
if (content.about === about.author) { let content = JSON.parse(about.content);
delete content.type; if (content.about === about.author) {
delete content.about; delete content.type;
cache.about[about.author] = Object.assign( delete content.about;
cache.about[about.author] || {}, cache.about[about.author] = Object.assign(
content cache.about[about.author] || {},
content
);
}
}
cache.last_row_id = max_row_id;
let new_cache = JSON.stringify(cache);
if (new_cache !== original_cache) {
let start_time = new Date();
tfrpc.rpc.databaseSet('about', new_cache).then(function () {
console.log('saving about took', (new Date() - start_time) / 1000);
});
}
users = users || {};
for (let id of Object.keys(cache.about)) {
users[id] = Object.assign(
{follow_depth: following[id]?.d},
users[id] || {},
cache.about[id]
); );
} }
} } catch (e) {
cache.last_row_id = max_row_id; console.log(e);
let new_cache = JSON.stringify(cache);
if (new_cache !== original_cache) {
let start_time = new Date();
tfrpc.rpc.databaseSet('about', new_cache).then(function () {
console.log('saving about took', (new Date() - start_time) / 1000);
});
}
users = users || {};
for (let id of Object.keys(cache.about)) {
users[id] = Object.assign(
{follow_depth: following[id]?.d},
users[id] || {},
cache.about[id]
);
} }
return Object.assign({}, users); return Object.assign({}, users);
} }

View File

@ -253,7 +253,11 @@ class TfTabNewsFeedElement extends LitElement {
try { try {
let more = []; let more = [];
let last_start_time = this.time_range[0]; let last_start_time = this.time_range[0];
more = await this.fetch_messages(null, last_start_time); try {
more = await this.fetch_messages(null, last_start_time);
} catch (e) {
console.log(e);
}
this.update_time_range_from_messages( this.update_time_range_from_messages(
more.filter((x) => x.timestamp < last_start_time) more.filter((x) => x.timestamp < last_start_time)
); );