possible performance improvement for random

This commit is contained in:
2026-01-26 09:47:05 +01:00
parent dbcf39c3ba
commit a5acc22c4a
2 changed files with 22 additions and 15 deletions

View File

@@ -0,0 +1 @@
CREATE INDEX CONCURRENTLY IF NOT EXISTS tags_assign_tag_id_idx ON public.tags_assign (tag_id);

View File

@@ -25,7 +25,15 @@ export default router => {
const hasSession = !!req.session;
const modequery = mime.startsWith("audio") ? lib.getMode(0) : lib.getMode(req.session?.mode ?? 0);
// ID Seek Strategy
const maxIdResult = await db`select max(id) as id from "items"`;
const maxId = maxIdResult[0].id || 0;
const randomId = Math.floor(Math.random() * maxId);
// Reusable query parts
const baseQuery = db`
select "items".*
from "items"
${isFav
? db`join "favorites" on "favorites".item_id = "items".id join "user" as fu on fu.id = "favorites".user_id`
@@ -42,26 +50,24 @@ export default router => {
${!hasSession && globalfilter ? db`and not exists (select 1 from tags_assign where item_id = items.id and (${db.unsafe(globalfilter)}))` : db``}
`;
const count = await db`
select count(*) as total
// Try seeking forward from random ID
let rows = await db`
${baseQuery}
and "items".id >= ${randomId}
order by "items".id asc
limit 1
`;
if (count[0].total == 0) {
return res.json({
success: false,
items: []
});
// Fallback: wrap around if nothing found
if (rows.length === 0) {
rows = await db`
${baseQuery}
and "items".id >= 0
order by "items".id asc
limit 1
`;
}
const offset = Math.floor(Math.random() * count[0].total);
const rows = await db`
select "items".*
${baseQuery}
limit 1 offset ${offset}
`;
return res.json({
success: rows.length > 0,
items: rows.length > 0 ? rows[0] : []