possible query optimization for random large datasets
This commit is contained in:
@@ -25,8 +25,7 @@ export default router => {
|
||||
const hasSession = !!req.session;
|
||||
const modequery = mime.startsWith("audio") ? lib.getMode(0) : lib.getMode(req.session?.mode ?? 0);
|
||||
|
||||
const rows = await db`
|
||||
select "items".*
|
||||
const baseQuery = db`
|
||||
from "items"
|
||||
${isFav
|
||||
? db`join "favorites" on "favorites".item_id = "items".id join "user" as fu on fu.id = "favorites".user_id`
|
||||
@@ -41,8 +40,26 @@ export default router => {
|
||||
${isFav ? db`and fu."user" = ${user}` : db`and items.username ilike ${user}`}
|
||||
${tag ? db`and tags.normalized ilike '%' || slugify(${tag}) || '%'` : db``}
|
||||
${!hasSession && globalfilter ? db`and not exists (select 1 from tags_assign where item_id = items.id and (${db.unsafe(globalfilter)}))` : db``}
|
||||
order by random()
|
||||
limit 1
|
||||
`;
|
||||
|
||||
const count = await db`
|
||||
select count(*) as total
|
||||
${baseQuery}
|
||||
`;
|
||||
|
||||
if (count[0].total == 0) {
|
||||
return res.json({
|
||||
success: false,
|
||||
items: []
|
||||
});
|
||||
}
|
||||
|
||||
const offset = Math.floor(Math.random() * count[0].total);
|
||||
|
||||
const rows = await db`
|
||||
select "items".*
|
||||
${baseQuery}
|
||||
limit 1 offset ${offset}
|
||||
`;
|
||||
|
||||
return res.json({
|
||||
|
||||
Reference in New Issue
Block a user