realizing webupload with approval functionality

This commit is contained in:
x
2026-01-23 23:35:12 +01:00
parent 42f4e19897
commit 577d73af11
9 changed files with 1433 additions and 137 deletions

View File

@@ -5,7 +5,7 @@ import { promises as fs } from "fs";
export default (router, tpl) => {
router.get(/^\/login(\/)?$/, async (req, res) => {
if(req.cookies.session) {
if (req.cookies.session) {
return res.reply({
body: tpl.render('error', {
message: "you're already logged in lol",
@@ -17,7 +17,7 @@ export default (router, tpl) => {
body: tpl.render("login", { theme: req.cookies.theme ?? "f0ck" })
});
});
router.post(/^\/login(\/)?$/, async (req, res) => {
const user = await db`
select *
@@ -25,9 +25,9 @@ export default (router, tpl) => {
where "login" = ${req.post.username.toLowerCase()}
limit 1
`;
if(user.length === 0)
if (user.length === 0)
return res.reply({ body: "user doesn't exist or wrong password" });
if(!(await lib.verify(req.post.password, user[0].password)))
if (!(await lib.verify(req.post.password, user[0].password)))
return res.reply({ body: "user doesn't exist or wrong password" });
const stamp = ~~(Date.now() / 1e3);
@@ -36,7 +36,7 @@ export default (router, tpl) => {
where last_action <= ${(Date.now() - 6048e5)}
and kmsi = 0
`;
const session = lib.md5(lib.createID());
const blah = {
user_id: user[0].id,
@@ -49,8 +49,7 @@ export default (router, tpl) => {
};
await db`
insert into "user_sessions" ${
db(blah, 'user_id', 'session', 'browser', 'created_at', 'last_used', 'last_action', 'kmsi')
insert into "user_sessions" ${db(blah, 'user_id', 'session', 'browser', 'created_at', 'last_used', 'last_action', 'kmsi')
}
`;
@@ -60,16 +59,16 @@ export default (router, tpl) => {
"Location": "/"
}).end();
});
router.get(/^\/logout$/, lib.loggedin, async (req, res) => {
const usersession = await db`
select *
from "user_sessions"
where id = ${+req.session.sess_id}
`;
if(usersession.length === 0)
if (usersession.length === 0)
return res.reply({ body: "nope 2" });
await db`
delete from "user_sessions"
where id = ${+req.session.sess_id}
@@ -80,7 +79,7 @@ export default (router, tpl) => {
"Location": "/"
}).end();
});
router.get(/^\/login\/pwdgen$/, async (req, res) => {
res.reply({
body: "<form action=\"/login/pwdgen\" method=\"post\"><input type=\"text\" name=\"pwd\" placeholder=\"pwd\" /><input type=\"submit\" value=\"f0ck it\" /></form>"
@@ -102,7 +101,7 @@ export default (router, tpl) => {
}, req)
});
});
router.get(/^\/admin\/sessions(\/)?$/, lib.auth, async (req, res) => {
const rows = await db`
select "user_sessions".*, "user".user
@@ -110,7 +109,7 @@ export default (router, tpl) => {
left join "user" on "user".id = "user_sessions".user_id
order by "user_sessions".last_used desc
`;
res.reply({
body: tpl.render("admin/sessions", {
session: req.session,
@@ -121,79 +120,142 @@ export default (router, tpl) => {
});
});
// router.get(/^\/admin\/log(\/)?$/, lib.auth, async (req, res) => {
// // Funktioniert ohne systemd service natürlich nicht.
// exec("journalctl -qeu f0ck --no-pager", (err, stdout) => {
// res.reply({
// body: tpl.render("admin/log", {
// log: stdout.split("\n").slice(0, -1),
// tmp: null
// }, req)
// });
// });
// });
router.get(/^\/admin\/approve\/?/, lib.auth, async (req, res) => {
if (req.url.qs?.id) {
const id = +req.url.qs.id;
const f0ck = await db`
select dest, mime
from "items"
where
id = ${id} and
active = 'false'
limit 1
`;
if (f0ck.length === 0) {
return res.reply({
body: `f0ck ${id}: f0ck not found`
});
}
// router.get(/^\/admin\/recover\/?/, lib.auth, async (req, res) => {
// Gelöschte Objekte werden nicht aufgehoben.
// if(req.url.qs?.id) {
// const id = +req.url.qs.id;
// const f0ck = await db`
// select dest, mime
// from "items"
// where
// id = ${id} and
// active = 'false'
// limit 1
// `;
// if(f0ck.length === 0) {
// return res.reply({
// body: `f0ck ${id}: f0ck not found`
// });
// }
await db`update "items" set active = 'true' where id = ${id}`;
// await db`update "items" set active = 'true' where id = ${id}`;
// Check if files need moving (if they are in deleted/)
try {
await fs.access(`./public/b/${f0ck[0].dest}`);
// Exists in public, good (new upload)
} catch {
// Not in public, likely a deleted item being recovered
await fs.copyFile(`./deleted/b/${f0ck[0].dest}`, `./public/b/${f0ck[0].dest}`).catch(_ => { });
await fs.copyFile(`./deleted/t/${id}.webp`, `./public/t/${id}.webp`).catch(_ => { });
await fs.unlink(`./deleted/b/${f0ck[0].dest}`).catch(_ => { });
await fs.unlink(`./deleted/t/${id}.webp`).catch(_ => { });
// await fs.copyFile(`./deleted/b/${f0ck[0].dest}`, `./public/b/${f0ck[0].dest}`).catch(_=>{});
// await fs.copyFile(`./deleted/t/${id}.webp`, `./public/t/${id}.webp`).catch(_=>{});
// await fs.unlink(`./deleted/b/${f0ck[0].dest}`).catch(_=>{});
// await fs.unlink(`./deleted/t/${id}.webp`).catch(_=>{});
if (f0ck[0].mime.startsWith('audio')) {
await fs.copyFile(`./deleted/ca/${id}.webp`, `./public/ca/${id}.webp`).catch(_ => { });
await fs.unlink(`./deleted/ca/${id}.webp`).catch(_ => { });
}
}
// if(f0ck[0].mime.startsWith('audio')) {
// await fs.copyFile(`./deleted/ca/${id}.webp`, `./public/ca/${id}.webp`).catch(_=>{});
// await fs.unlink(`./deleted/ca/${id}.webp`).catch(_=>{});
// }
return res.writeHead(302, {
"Location": `/${id}`
}).end();
}
// return res.reply({
// body: `f0ck ${id} recovered. <a href="/admin/recover">back</a>`
// });
// }
const _posts = await db`
select id, mime, username, dest
from "items"
where
active = 'false'
order by id desc
`;
// const _posts = await db`
// select id, mime, username
// from "items"
// where
// active = 'false'
// order by id desc
// `;
if (_posts.length === 0) {
return res.reply({
body: tpl.render('admin/approve', { posts: [], tmp: null }, req)
});
}
// if(_posts.length === 0) {
// return res.reply({
// body: 'blah'
// });
// }
const posts = await Promise.all(_posts.map(async p => {
// Try to get thumbnail from public or deleted
let thumb;
try {
// Try public first
thumb = (await fs.readFile(`./public/t/${p.id}.webp`)).toString('base64');
} catch {
try {
thumb = (await fs.readFile(`./deleted/t/${p.id}.webp`)).toString('base64');
} catch {
thumb = ""; // No thumbnail?
}
}
return {
...p,
thumbnail: thumb
};
}));
// const posts = await Promise.all(_posts.map(async p => ({
// ...p,
// thumbnail: (await fs.readFile(`./deleted/t/${p.id}.webp`)).toString('base64')
// })));
res.reply({
body: tpl.render('admin/approve', {
posts,
tmp: null
}, req)
});
});
// res.reply({
// body: tpl.render('admin/recover', {
// posts,
// tmp: null
// }, req)
// });
// });
router.get(/^\/admin\/deny\/?/, lib.auth, async (req, res) => {
console.log('[ADMIN DENY] Logs initiated');
if (req.url.qs?.id) {
const id = +req.url.qs.id;
console.log(`[ADMIN DENY] Denying ID: ${id}`);
try {
const f0ck = await db`
select dest, mime
from "items"
where
id = ${id}
limit 1
`;
if (f0ck.length > 0) {
console.log(`[ADMIN DENY] Found item, deleting files: ${f0ck[0].dest}`);
// Delete files
await fs.unlink(`./public/b/${f0ck[0].dest}`).catch(e => console.log('File error pub/b:', e.message));
await fs.unlink(`./public/t/${id}.webp`).catch(e => console.log('File error pub/t:', e.message));
await fs.unlink(`./deleted/b/${f0ck[0].dest}`).catch(e => console.log('File error del/b:', e.message));
await fs.unlink(`./deleted/t/${id}.webp`).catch(e => console.log('File error del/t:', e.message));
if (f0ck[0].mime.startsWith('audio')) {
await fs.unlink(`./public/ca/${id}.webp`).catch(() => { });
await fs.unlink(`./deleted/ca/${id}.webp`).catch(() => { });
}
// Delete DB entries
console.log('[ADMIN DENY] Deleting DB entries...');
try {
await db`delete from "tags_assign" where item_id = ${id}`;
await db`delete from "favorites" where item_id = ${id}`;
await db`delete from "comments" where item_id = ${id}`.catch(() => { });
await db`delete from "items" where id = ${id}`;
console.log('[ADMIN DENY] Deleted successfully');
} catch (dbErr) {
console.error('[ADMIN DENY DB ERROR]', dbErr);
}
} else {
console.log('[ADMIN DENY] Item not found in DB');
}
} catch (err) {
console.error('[ADMIN DENY ERROR]', err);
}
return res.writeHead(302, {
"Location": `/admin/approve`
}).end();
}
console.log('[ADMIN DENY] No ID provided');
return res.writeHead(302, { "Location": "/admin/approve" }).end();
});
return router;
};

View File

@@ -0,0 +1,260 @@
import { promises as fs } from "fs";
import db from '../../sql.mjs';
import lib from '../../lib.mjs';
import cfg from '../../config.mjs';
import queue from '../../queue.mjs';
import path from "path";
// Native multipart form data parser
const parseMultipart = (buffer, boundary) => {
const parts = {};
const boundaryBuffer = Buffer.from(`--${boundary}`);
const segments = [];
let start = 0;
let idx;
while ((idx = buffer.indexOf(boundaryBuffer, start)) !== -1) {
if (start !== 0) {
segments.push(buffer.slice(start, idx - 2)); // -2 for \r\n before boundary
}
start = idx + boundaryBuffer.length + 2; // +2 for \r\n after boundary
}
for (const segment of segments) {
const headerEnd = segment.indexOf('\r\n\r\n');
if (headerEnd === -1) continue;
const headers = segment.slice(0, headerEnd).toString();
const body = segment.slice(headerEnd + 4);
const nameMatch = headers.match(/name="([^"]+)"/);
const filenameMatch = headers.match(/filename="([^"]+)"/);
const contentTypeMatch = headers.match(/Content-Type:\s*([^\r\n]+)/i);
if (nameMatch) {
const name = nameMatch[1];
if (filenameMatch) {
parts[name] = {
filename: filenameMatch[1],
contentType: contentTypeMatch ? contentTypeMatch[1] : 'application/octet-stream',
data: body
};
} else {
parts[name] = body.toString().trim();
}
}
}
return parts;
};
// Collect request body as buffer with debug logging
const collectBody = (req) => {
return new Promise((resolve, reject) => {
console.log('[UPLOAD DEBUG] collectBody started');
const chunks = [];
req.on('data', chunk => {
// console.log(`[UPLOAD DEBUG] chunk received: ${chunk.length} bytes`);
chunks.push(chunk);
});
req.on('end', () => {
console.log(`[UPLOAD DEBUG] Stream ended. Total size: ${chunks.reduce((acc, c) => acc + c.length, 0)}`);
resolve(Buffer.concat(chunks));
});
req.on('error', err => {
console.error('[UPLOAD DEBUG] Stream error:', err);
reject(err);
});
// Ensure stream is flowing
if (req.isPaused()) {
console.log('[UPLOAD DEBUG] Stream was paused, resuming...');
req.resume();
}
});
};
export default router => {
router.group(/^\/api\/v2/, group => {
group.post(/\/upload$/, lib.loggedin, async (req, res) => {
try {
console.log('[UPLOAD DEBUG] Request received');
// Use stored content type if available (from middleware bypass), otherwise use header
const contentType = req._multipartContentType || req.headers['content-type'] || '';
const boundaryMatch = contentType.match(/boundary=(.+)$/);
if (!boundaryMatch) {
console.log('[UPLOAD DEBUG] No boundary found');
return res.json({ success: false, msg: 'Invalid content type' }, 400);
}
let body;
if (req.bodyPromise) {
console.log('[UPLOAD DEBUG] Waiting for buffered body from middleware promise...');
body = await req.bodyPromise;
console.log('[UPLOAD DEBUG] Received body from promise');
} else if (req.rawBody) {
console.log('[UPLOAD DEBUG] Using buffered body from middleware');
body = req.rawBody;
} else {
console.log('[UPLOAD DEBUG] Collecting body via collectBody...');
body = await collectBody(req);
}
if (!body) {
return res.json({ success: false, msg: 'Failed to receive file body' }, 400);
}
console.log('[UPLOAD DEBUG] Body size:', body.length);
const parts = parseMultipart(body, boundaryMatch[1]);
console.log('[UPLOAD DEBUG] Parsed parts:', Object.keys(parts));
// Validate required fields
const file = parts.file;
const rating = parts.rating; // 'sfw' or 'nsfw'
const tagsRaw = parts.tags; // comma-separated tags
if (!file || !file.data) {
return res.json({ success: false, msg: 'No file provided' }, 400);
}
if (!rating || !['sfw', 'nsfw'].includes(rating)) {
return res.json({ success: false, msg: 'Rating (sfw/nsfw) is required' }, 400);
}
const tags = tagsRaw ? tagsRaw.split(',').map(t => t.trim()).filter(t => t.length > 0) : [];
if (tags.length < 3) {
return res.json({ success: false, msg: 'At least 3 tags are required' }, 400);
}
// Validate MIME type
const allowedMimes = ['video/mp4', 'video/webm'];
let mime = file.contentType;
if (!allowedMimes.includes(mime)) {
return res.json({ success: false, msg: `Invalid file type. Only mp4 and webm allowed. Got: ${mime}` }, 400);
}
// Validate file size
const maxfilesize = cfg.main.maxfilesize;
const size = file.data.length;
if (size > maxfilesize) {
return res.json({
success: false,
msg: `File too large. Max: ${lib.formatSize(maxfilesize)}, Got: ${lib.formatSize(size)}`
}, 400);
}
// Generate UUID for filename
const uuid = await queue.genuuid();
const ext = mime === 'video/mp4' ? 'mp4' : 'webm';
const filename = `${uuid}.${ext}`;
const tmpPath = `./tmp/${filename}`;
const destPath = `./public/b/${filename}`;
// Save file temporarily
await fs.writeFile(tmpPath, file.data);
// Verify MIME with file command
const actualMime = (await queue.exec(`file --mime-type -b ${tmpPath}`)).stdout.trim();
if (!allowedMimes.includes(actualMime)) {
await fs.unlink(tmpPath).catch(() => { });
return res.json({ success: false, msg: `Invalid file type detected: ${actualMime}` }, 400);
}
// Generate checksum
const checksum = (await queue.exec(`sha256sum ${tmpPath}`)).stdout.trim().split(" ")[0];
// Check for repost
const repost = await queue.checkrepostsum(checksum);
if (repost) {
await fs.unlink(tmpPath).catch(() => { });
return res.json({
success: false,
msg: `This file already exists`,
repost: repost
}, 409);
}
// Move to public folder
await fs.copyFile(tmpPath, destPath);
await fs.unlink(tmpPath).catch(() => { });
// Insert into database (active=false for admin approval)
await db`
insert into items ${db({
src: '',
dest: filename,
mime: actualMime,
size: size,
checksum: checksum,
username: req.session.user,
userchannel: 'web',
usernetwork: 'web',
stamp: ~~(Date.now() / 1000),
active: false
}, 'src', 'dest', 'mime', 'size', 'checksum', 'username', 'userchannel', 'usernetwork', 'stamp', 'active')
}
`;
// Get the new item ID
const itemid = await queue.getItemID(filename);
// Generate thumbnail
try {
await queue.genThumbnail(filename, actualMime, itemid, '');
} catch (err) {
await queue.exec(`magick ./mugge.png ./public/t/${itemid}.webp`);
}
// Assign rating tag (sfw=1, nsfw=2)
const ratingTagId = rating === 'sfw' ? 1 : 2;
await db`
insert into tags_assign ${db({ item_id: itemid, tag_id: ratingTagId, user_id: req.session.id })}
`;
// Assign user tags
for (const tagName of tags) {
// Check if tag exists, create if not
let tagRow = await db`
select id from tags where normalized = slugify(${tagName}) limit 1
`;
let tagId;
if (tagRow.length === 0) {
// Create new tag
await db`
insert into tags ${db({ tag: tagName }, 'tag')}
`;
tagRow = await db`
select id from tags where normalized = slugify(${tagName}) limit 1
`;
}
tagId = tagRow[0].id;
// Assign tag to item
await db`
insert into tags_assign ${db({ item_id: itemid, tag_id: tagId, user_id: req.session.id })}
on conflict do nothing
`;
}
return res.json({
success: true,
msg: 'Upload successful! Your upload is pending admin approval.',
itemid: itemid
});
} catch (err) {
console.error('[UPLOAD ERROR]', err);
return res.json({ success: false, msg: 'Upload failed: ' + err.message }, 500);
}
});
});
return router;
};

View File

@@ -4,8 +4,10 @@ import lib from "./inc/lib.mjs";
import cuffeo from "cuffeo";
import { promises as fs } from "fs";
import flummpress from "flummpress";
import { handleUpload } from "./upload_handler.mjs";
process.on('unhandledRejection', err => {
if (err.code === 'ERR_HTTP_HEADERS_SENT') return;
console.error(err);
throw err;
});
@@ -19,7 +21,7 @@ process.on('unhandledRejection', err => {
this.level = args.level || 0;
this.name = args.name;
this.active = args.hasOwnProperty("active") ? args.active : true;
this.clients = args.clients || [ "irc", "tg", "slack" ];
this.clients = args.clients || ["irc", "tg", "slack"];
this.f = args.f;
},
bot: await new cuffeo(cfg.clients)
@@ -27,7 +29,7 @@ process.on('unhandledRejection', err => {
console.time("loading");
const modules = {
events: (await fs.readdir("./src/inc/events")).filter(f => f.endsWith(".mjs")),
events: (await fs.readdir("./src/inc/events")).filter(f => f.endsWith(".mjs")),
trigger: (await fs.readdir("./src/inc/trigger")).filter(f => f.endsWith(".mjs"))
};
@@ -41,7 +43,7 @@ process.on('unhandledRejection', err => {
console.timeLog("loading", `${dir}/${mod}`);
return res;
}))).flat(2)
})))).reduce((a, b) => ({...a, ...b}));
})))).reduce((a, b) => ({ ...a, ...b }));
blah.events.forEach(event => {
console.timeLog("loading", `registering event > ${event.name}`);
@@ -61,15 +63,16 @@ process.on('unhandledRejection', err => {
const router = app.router;
const tpl = app.tpl;
app.use(async (req, res) => {
// sessionhandler
req.session = false;
if(req.url.pathname.match(/^\/(s|b|t|ca)\//))
if (req.url.pathname.match(/^\/(s|b|t|ca)\//))
return;
req.theme = req.cookies.theme || 'amoled';
req.fullscreen = req.cookies.fullscreen || 0;
if(req.cookies.session) {
if (req.cookies.session) {
const user = await db`
select "user".id, "user".login, "user".user, "user".admin, "user_sessions".id as sess_id, "user_options".*
from "user_sessions"
@@ -78,8 +81,8 @@ process.on('unhandledRejection', err => {
where "user_sessions".session = ${lib.md5(req.cookies.session)}
limit 1
`;
if(user.length === 0) {
if (user.length === 0) {
return res.writeHead(307, { // delete session
"Cache-Control": "no-cache, public",
"Set-Cookie": "session=; Path=/; expires=Thu, 01 Jan 1970 00:00:00 GMT",
@@ -91,28 +94,26 @@ process.on('unhandledRejection', err => {
// log last action
await db`
update "user_sessions" set ${
db({
last_used: ~~(Date.now() / 1e3),
last_action: req.url.pathname,
browser: req.headers['user-agent']
}, 'last_used', 'last_action', 'browser')
update "user_sessions" set ${db({
last_used: ~~(Date.now() / 1e3),
last_action: req.url.pathname,
browser: req.headers['user-agent']
}, 'last_used', 'last_action', 'browser')
}
where id = ${+user[0].sess_id}
`;
req.session.theme = req.cookies.theme;
req.session.fullscreen = req.cookies.fullscreen;
// update userprofile
await db`
insert into "user_options" ${
db({
user_id: +user[0].id,
mode: user[0].mode ?? 0,
theme: req.session.theme ?? 'amoled',
fullscreen: req.session.fullscreen || 0
}, 'user_id', 'mode', 'theme', 'fullscreen')
insert into "user_options" ${db({
user_id: +user[0].id,
mode: user[0].mode ?? 0,
theme: req.session.theme ?? 'amoled',
fullscreen: req.session.fullscreen || 0
}, 'user_id', 'mode', 'theme', 'fullscreen')
}
on conflict ("user_id") do update set
mode = excluded.mode,
@@ -123,6 +124,15 @@ process.on('unhandledRejection', err => {
}
});
// Bypass middleware for direct upload handling
app.use(async (req, res) => {
if (req.method === 'POST' && req.url.pathname === '/api/v2/upload') {
await handleUpload(req, res);
// Modify URL to prevent router matching and double execution
req.url.pathname = '/handled_upload_bypass';
}
});
tpl.views = "views";
tpl.debug = true;
tpl.cache = false;

250
src/upload_handler.mjs Normal file
View File

@@ -0,0 +1,250 @@
import { promises as fs } from "fs";
import db from "./inc/sql.mjs";
import lib from "./inc/lib.mjs";
import cfg from "./inc/config.mjs";
import queue from "./inc/queue.mjs";
import path from "path";
// Native multipart form data parser
const parseMultipart = (buffer, boundary) => {
const parts = {};
const boundaryBuffer = Buffer.from(`--${boundary}`);
const segments = [];
let start = 0;
let idx;
while ((idx = buffer.indexOf(boundaryBuffer, start)) !== -1) {
if (start !== 0) {
segments.push(buffer.slice(start, idx - 2)); // -2 for \r\n before boundary
}
start = idx + boundaryBuffer.length + 2; // +2 for \r\n after boundary
}
for (const segment of segments) {
const headerEnd = segment.indexOf('\r\n\r\n');
if (headerEnd === -1) continue;
const headers = segment.slice(0, headerEnd).toString();
const body = segment.slice(headerEnd + 4);
const nameMatch = headers.match(/name="([^"]+)"/);
const filenameMatch = headers.match(/filename="([^"]+)"/);
const contentTypeMatch = headers.match(/Content-Type:\s*([^\r\n]+)/i);
if (nameMatch) {
const name = nameMatch[1];
if (filenameMatch) {
parts[name] = {
filename: filenameMatch[1],
contentType: contentTypeMatch ? contentTypeMatch[1] : 'application/octet-stream',
data: body
};
} else {
parts[name] = body.toString().trim();
}
}
}
return parts;
};
// Collect request body as buffer
const collectBody = (req) => {
return new Promise((resolve, reject) => {
const chunks = [];
req.on('data', chunk => chunks.push(chunk));
req.on('end', () => resolve(Buffer.concat(chunks)));
req.on('error', reject);
// Ensure stream flows
if (req.isPaused()) req.resume();
});
};
// Helper for JSON response
const sendJson = (res, data, code = 200) => {
res.writeHead(code, { 'Content-Type': 'application/json' });
res.end(JSON.stringify(data));
};
export const handleUpload = async (req, res) => {
console.log('[UPLOAD HANDLER] Started');
// Manual Session Lookup (because flummpress middleware might not have finished)
// We assume req.cookies is populated by framework or we need to parse it?
// index.mjs accesses req.cookies directly, so we assume it works.
let user = [];
if (req.cookies && req.cookies.session) {
user = await db`
select "user".id, "user".login, "user".user, "user".admin, "user_sessions".id as sess_id, "user_options".*
from "user_sessions"
left join "user" on "user".id = "user_sessions".user_id
left join "user_options" on "user_options".user_id = "user_sessions".user_id
where "user_sessions".session = ${lib.md5(req.cookies.session)}
limit 1
`;
}
if (user.length === 0) {
console.log('[UPLOAD HANDLER] Unauthorized - No valid session found');
return sendJson(res, { success: false, msg: 'Unauthorized' }, 401);
}
// Mock req.session for consistency if needed by other logic, though we use 'user[0]' here
req.session = user[0];
console.log('[UPLOAD HANDLER] Authorized:', req.session.user);
try {
const contentType = req.headers['content-type'] || '';
const boundaryMatch = contentType.match(/boundary=(.+)$/);
if (!boundaryMatch) {
console.log('[UPLOAD HANDLER] No boundary');
return sendJson(res, { success: false, msg: 'Invalid content type' }, 400);
}
console.log('[UPLOAD HANDLER] Collecting body...');
const body = await collectBody(req);
console.log('[UPLOAD HANDLER] Body collected, size:', body.length);
const parts = parseMultipart(body, boundaryMatch[1]);
// Validate required fields
const file = parts.file;
const rating = parts.rating;
const tagsRaw = parts.tags;
if (!file || !file.data) {
return sendJson(res, { success: false, msg: 'No file provided' }, 400);
}
if (!rating || !['sfw', 'nsfw'].includes(rating)) {
return sendJson(res, { success: false, msg: 'Rating (sfw/nsfw) is required' }, 400);
}
const tags = tagsRaw ? tagsRaw.split(',').map(t => t.trim()).filter(t => t.length > 0) : [];
if (tags.length < 3) {
return sendJson(res, { success: false, msg: 'At least 3 tags are required' }, 400);
}
// Validate MIME type
const allowedMimes = ['video/mp4', 'video/webm'];
let mime = file.contentType;
if (!allowedMimes.includes(mime)) {
return sendJson(res, { success: false, msg: `Invalid file type. Only mp4 and webm allowed. Got: ${mime}` }, 400);
}
// Validate file size
const maxfilesize = cfg.main.maxfilesize;
const size = file.data.length;
if (size > maxfilesize) {
return sendJson(res, {
success: false,
msg: `File too large. Max: ${lib.formatSize(maxfilesize)}, Got: ${lib.formatSize(size)}`
}, 400);
}
// Generate UUID
const uuid = await queue.genuuid();
const ext = mime === 'video/mp4' ? 'mp4' : 'webm';
const filename = `${uuid}.${ext}`;
const tmpPath = `./tmp/${filename}`;
const destPath = `./public/b/${filename}`;
// Save temporarily
await fs.writeFile(tmpPath, file.data);
// Verify MIME
const actualMime = (await queue.exec(`file --mime-type -b ${tmpPath}`)).stdout.trim();
if (!allowedMimes.includes(actualMime)) {
await fs.unlink(tmpPath).catch(() => { });
return sendJson(res, { success: false, msg: `Invalid file type detected: ${actualMime}` }, 400);
}
// Constants
const checksum = (await queue.exec(`sha256sum ${tmpPath}`)).stdout.trim().split(" ")[0];
// Check repost
const repost = await queue.checkrepostsum(checksum);
if (repost) {
await fs.unlink(tmpPath).catch(() => { });
return sendJson(res, {
success: false,
msg: `This file already exists`,
repost: repost
}, 409);
}
// Move to public
await fs.copyFile(tmpPath, destPath);
await fs.unlink(tmpPath).catch(() => { });
// Insert
await db`
insert into items ${db({
src: '',
dest: filename,
mime: actualMime,
size: size,
checksum: checksum,
username: req.session.user,
userchannel: 'web',
usernetwork: 'web',
stamp: ~~(Date.now() / 1000),
active: false
}, 'src', 'dest', 'mime', 'size', 'checksum', 'username', 'userchannel', 'usernetwork', 'stamp', 'active')
}
`;
const itemid = await queue.getItemID(filename);
// Thumbnail
try {
await queue.genThumbnail(filename, actualMime, itemid, '');
} catch (err) {
await queue.exec(`magick ./mugge.png ./public/t/${itemid}.webp`);
}
// Tags
const ratingTagId = rating === 'sfw' ? 1 : 2;
await db`
insert into tags_assign ${db({ item_id: itemid, tag_id: ratingTagId, user_id: req.session.id })}
`;
for (const tagName of tags) {
let tagRow = await db`
select id from tags where normalized = slugify(${tagName}) limit 1
`;
let tagId;
if (tagRow.length === 0) {
await db`
insert into tags ${db({ tag: tagName }, 'tag')}
`;
tagRow = await db`
select id from tags where normalized = slugify(${tagName}) limit 1
`;
}
tagId = tagRow[0].id;
await db`
insert into tags_assign ${db({ item_id: itemid, tag_id: tagId, user_id: req.session.id })}
on conflict do nothing
`;
}
return sendJson(res, {
success: true,
msg: 'Upload successful! Your upload is pending admin approval.',
itemid: itemid
});
} catch (err) {
console.error('[UPLOAD HANDLER ERROR]', err);
return sendJson(res, { success: false, msg: 'Upload failed: ' + err.message }, 500);
}
};