import { promises as fs } from "fs"; import db from "./inc/sql.mjs"; import lib from "./inc/lib.mjs"; import cfg from "./inc/config.mjs"; import queue from "./inc/queue.mjs"; import path from "path"; // Native multipart form data parser const parseMultipart = (buffer, boundary) => { const parts = {}; const boundaryBuffer = Buffer.from(`--${boundary}`); const segments = []; let start = 0; let idx; while ((idx = buffer.indexOf(boundaryBuffer, start)) !== -1) { if (start !== 0) { segments.push(buffer.slice(start, idx - 2)); // -2 for \r\n before boundary } start = idx + boundaryBuffer.length + 2; // +2 for \r\n after boundary } for (const segment of segments) { const headerEnd = segment.indexOf('\r\n\r\n'); if (headerEnd === -1) continue; const headers = segment.slice(0, headerEnd).toString(); const body = segment.slice(headerEnd + 4); const nameMatch = headers.match(/name="([^"]+)"/); const filenameMatch = headers.match(/filename="([^"]+)"/); const contentTypeMatch = headers.match(/Content-Type:\s*([^\r\n]+)/i); if (nameMatch) { const name = nameMatch[1]; if (filenameMatch) { parts[name] = { filename: filenameMatch[1], contentType: contentTypeMatch ? contentTypeMatch[1] : 'application/octet-stream', data: body }; } else { parts[name] = body.toString().trim(); } } } return parts; }; // Collect request body as buffer const collectBody = (req) => { return new Promise((resolve, reject) => { const chunks = []; req.on('data', chunk => chunks.push(chunk)); req.on('end', () => resolve(Buffer.concat(chunks))); req.on('error', reject); // Ensure stream flows if (req.isPaused()) req.resume(); }); }; // Helper for JSON response const sendJson = (res, data, code = 200) => { res.writeHead(code, { 'Content-Type': 'application/json' }); res.end(JSON.stringify(data)); }; export const handleUpload = async (req, res) => { console.log('[UPLOAD HANDLER] Started'); // Manual Session Lookup (because flummpress middleware might not have finished) // We assume req.cookies is populated by framework or we need to parse it? // index.mjs accesses req.cookies directly, so we assume it works. let user = []; if (req.cookies && req.cookies.session) { user = await db` select "user".id, "user".login, "user".user, "user".admin, "user_sessions".id as sess_id, "user_options".* from "user_sessions" left join "user" on "user".id = "user_sessions".user_id left join "user_options" on "user_options".user_id = "user_sessions".user_id where "user_sessions".session = ${lib.md5(req.cookies.session)} limit 1 `; } if (user.length === 0) { console.log('[UPLOAD HANDLER] Unauthorized - No valid session found'); return sendJson(res, { success: false, msg: 'Unauthorized' }, 401); } // Mock req.session for consistency if needed by other logic, though we use 'user[0]' here req.session = user[0]; console.log('[UPLOAD HANDLER] Authorized:', req.session.user); try { const contentType = req.headers['content-type'] || ''; const boundaryMatch = contentType.match(/boundary=(.+)$/); if (!boundaryMatch) { console.log('[UPLOAD HANDLER] No boundary'); return sendJson(res, { success: false, msg: 'Invalid content type' }, 400); } console.log('[UPLOAD HANDLER] Collecting body...'); const body = await collectBody(req); console.log('[UPLOAD HANDLER] Body collected, size:', body.length); const parts = parseMultipart(body, boundaryMatch[1]); // Validate required fields const file = parts.file; const rating = parts.rating; const tagsRaw = parts.tags; if (!file || !file.data) { return sendJson(res, { success: false, msg: 'No file provided' }, 400); } if (!rating || !['sfw', 'nsfw'].includes(rating)) { return sendJson(res, { success: false, msg: 'Rating (sfw/nsfw) is required' }, 400); } const tags = tagsRaw ? tagsRaw.split(',').map(t => t.trim()).filter(t => t.length > 0) : []; if (tags.length < 3) { return sendJson(res, { success: false, msg: 'At least 3 tags are required' }, 400); } // Validate MIME type const allowedMimes = ['video/mp4', 'video/webm']; let mime = file.contentType; if (!allowedMimes.includes(mime)) { return sendJson(res, { success: false, msg: `Invalid file type. Only mp4 and webm allowed. Got: ${mime}` }, 400); } // Validate file size const maxfilesize = cfg.main.maxfilesize; const size = file.data.length; if (size > maxfilesize) { return sendJson(res, { success: false, msg: `File too large. Max: ${lib.formatSize(maxfilesize)}, Got: ${lib.formatSize(size)}` }, 400); } // Generate UUID const uuid = await queue.genuuid(); const ext = mime === 'video/mp4' ? 'mp4' : 'webm'; const filename = `${uuid}.${ext}`; const tmpPath = `./tmp/${filename}`; const destPath = `./public/b/${filename}`; // Ensure directories exist await fs.mkdir('./tmp', { recursive: true }); await fs.mkdir('./public/b', { recursive: true }); // Save temporarily await fs.writeFile(tmpPath, file.data); // Verify MIME const actualMime = (await queue.exec(`file --mime-type -b ${tmpPath}`)).stdout.trim(); if (!allowedMimes.includes(actualMime)) { await fs.unlink(tmpPath).catch(() => { }); return sendJson(res, { success: false, msg: `Invalid file type detected: ${actualMime}` }, 400); } // Constants const checksum = (await queue.exec(`sha256sum ${tmpPath}`)).stdout.trim().split(" ")[0]; // Check repost const repost = await queue.checkrepostsum(checksum); if (repost) { await fs.unlink(tmpPath).catch(() => { }); return sendJson(res, { success: false, msg: `This file already exists`, repost: repost }, 409); } // Move to public await fs.copyFile(tmpPath, destPath); await fs.unlink(tmpPath).catch(() => { }); // Insert await db` insert into items ${db({ src: '', dest: filename, mime: actualMime, size: size, checksum: checksum, username: req.session.user, userchannel: 'web', usernetwork: 'web', stamp: ~~(Date.now() / 1000), active: false }, 'src', 'dest', 'mime', 'size', 'checksum', 'username', 'userchannel', 'usernetwork', 'stamp', 'active') } `; const itemid = await queue.getItemID(filename); // Thumbnail try { await queue.genThumbnail(filename, actualMime, itemid, ''); } catch (err) { await queue.exec(`magick ./mugge.png ./public/t/${itemid}.webp`); } // Tags const ratingTagId = rating === 'sfw' ? 1 : 2; await db` insert into tags_assign ${db({ item_id: itemid, tag_id: ratingTagId, user_id: req.session.id })} `; for (const tagName of tags) { let tagRow = await db` select id from tags where normalized = slugify(${tagName}) limit 1 `; let tagId; if (tagRow.length === 0) { await db` insert into tags ${db({ tag: tagName }, 'tag')} `; tagRow = await db` select id from tags where normalized = slugify(${tagName}) limit 1 `; } tagId = tagRow[0].id; await db` insert into tags_assign ${db({ item_id: itemid, tag_id: tagId, user_id: req.session.id })} on conflict do nothing `; } return sendJson(res, { success: true, msg: 'Upload successful! Your upload is pending admin approval.', itemid: itemid }); } catch (err) { console.error('[UPLOAD HANDLER ERROR]', err); return sendJson(res, { success: false, msg: 'Upload failed: ' + err.message }, 500); } };