ssb: Switch to a more brute force about-gathering approach. I think if I start with this and avoid querying all known accounts up-front, we will be plenty fast.
Some checks failed
Build Tilde Friends / Build-All (push) Failing after 5m12s

This commit is contained in:
Cory McWilliams 2025-05-08 12:39:26 -04:00
parent c93b8fc045
commit d367d47c4d
3 changed files with 31 additions and 130 deletions

View File

@ -1,5 +1,5 @@
{ {
"type": "tildefriends-app", "type": "tildefriends-app",
"emoji": "🦀", "emoji": "🦀",
"previous": "&gO8ganUDJtyGtfKUPRTbK2d34GJXhHKvHIZheF/lXI4=.sha256" "previous": "&2TUIbhXFJWMc6NTZt2Q0RB004ufTD+6CuaC5i2DdpsU=.sha256"
} }

View File

@ -152,137 +152,36 @@ class TfElement extends LitElement {
async fetch_about(following, users) { async fetch_about(following, users) {
let ids = Object.keys(following).sort(); let ids = Object.keys(following).sort();
const k_cache_version = 1; console.log('loading about for', ids.length, 'accounts');
let cache = await tfrpc.rpc.databaseGet('about');
let original_cache = cache;
cache = cache ? JSON.parse(cache) : {};
if (cache.version !== k_cache_version) {
cache = {
version: k_cache_version,
about: {},
last_row_id: 0,
};
}
let max_row_id = (
await tfrpc.rpc.query(
`
SELECT MAX(rowid) AS max_row_id FROM messages
`,
[]
)
)[0].max_row_id;
for (let id of Object.keys(cache.about)) {
if (ids.indexOf(id) == -1) {
delete cache.about[id];
}
}
const k_account_chunk_size = 4096;
const k_chunk_size = 1024;
let min_row_id = 0;
console.log(
'loading about for',
ids.length,
'accounts',
cache.last_row_id,
'=>',
max_row_id
);
try { try {
while (true) { let rows = await tfrpc.rpc.query(
let max_seen;
for (
let account_chunk = 0;
account_chunk < ids.length;
account_chunk += k_account_chunk_size
) {
let ids_chunk = ids.slice(
account_chunk,
account_chunk + k_account_chunk_size
);
let abouts = await tfrpc.rpc.query(
` `
WITH SELECT all_abouts.author, json(json_group_object(all_abouts.key, all_abouts.value)) AS about
past AS ( FROM (
SELECT SELECT
messages.rowid AS rowid, messages.author, json(messages.content) AS content, messages.sequence messages.author,
FROM fields.key,
messages RANK() OVER (PARTITION BY messages.author, fields.key ORDER BY messages.sequence DESC) AS rank,
fields.value
FROM messages JOIN json_each(messages.content) AS fields
WHERE WHERE
messages.rowid > ?3 AND messages.content ->> '$.type' = 'about' AND
messages.rowid <= ?4 AND messages.content ->> '$.about' = messages.author AND
messages.content ->> 'type' = 'about' NOT fields.key IN ('about', 'type')) all_abouts
), WHERE rank = 1
current AS ( GROUP BY all_abouts.author
SELECT
messages.rowid AS rowid, messages.author, json(messages.content) AS content, messages.sequence
FROM
messages
WHERE
messages.rowid > ?6 AND
messages.rowid <= ?4 AND
messages.content ->> 'type' = 'about'
)
SELECT * FROM past
JOIN json_each(?1) AS following
ON past.author = following.value
UNION SELECT * FROM current
JOIN json_each(?2) AS following
ON current.author = following.value
ORDER BY rowid LIMIT ?5
`, `,
[ [JSON.stringify(ids)]
JSON.stringify(ids_chunk.filter((id) => cache.about[id])),
JSON.stringify(ids_chunk.filter((id) => !cache.about[id])),
cache.last_row_id,
max_row_id,
k_chunk_size,
min_row_id,
]
); );
for (let about of abouts) {
let content = JSON.parse(about.content);
if (content.about === about.author) {
delete content.type;
delete content.about;
cache.about[about.author] = Object.assign(
cache.about[about.author] || {},
content
);
}
max_seen = about.rowid;
}
console.log(account_chunk, '/', ids.length, 'accounts');
}
console.log(
'cache =',
cache.last_row_id,
'seen =',
max_seen,
'max =',
max_row_id
);
cache.last_row_id = Math.max(cache.last_row_id, max_seen ?? max_row_id);
min_row_id = Math.max(min_row_id, max_seen ?? max_row_id);
let new_cache = JSON.stringify(cache);
if (new_cache !== original_cache) {
let start_time = new Date();
tfrpc.rpc.databaseSet('about', new_cache).then(function () {
console.log('saving about took', (new Date() - start_time) / 1000);
});
}
users = users || {}; users = users || {};
for (let id of Object.keys(cache.about)) { for (let row of rows) {
users[id] = Object.assign( users[row.author] = Object.assign(
{follow_depth: following[id]?.d}, {follow_depth: following[row.author]?.d},
users[id] || {}, users[row.author] || {},
cache.about[id] JSON.parse(row.about)
); );
} }
if (cache.last_row_id >= max_row_id) { console.log('updated users');
break;
}
}
} catch (e) { } catch (e) {
console.log(e); console.log(e);
} }

View File

@ -98,7 +98,9 @@ class TfTabNewsElement extends LitElement {
unread_status(channel) { unread_status(channel) {
if (channel === undefined) { if (channel === undefined) {
if (Object.keys(this.channels_unread).some(x => this.unread_status(x))) { if (
Object.keys(this.channels_unread).some((x) => this.unread_status(x))
) {
return '✉️ '; return '✉️ ';
} }
} else if ( } else if (