Merge pull request 'fixing imgur' (#68) from f0ck_local into master
All checks were successful
fetch npm modules / f0ck the f0cker (push) Successful in 18s

Reviewed-on: #68
This commit is contained in:
Kibi Kelburton 2024-02-20 00:14:09 +00:00
commit 9f1052320e

View File

@ -11,7 +11,7 @@ import path from "path";
const regex = { const regex = {
all: /https?:\/\/([\w_-]+(?:(?:\.[\w_-]+)+))([\w.,@?^=%&:/~+#-]*[\w@?^=%&/~+#-])?/gi, all: /https?:\/\/([\w_-]+(?:(?:\.[\w_-]+)+))([\w.,@?^=%&:/~+#-]*[\w@?^=%&/~+#-])?/gi,
yt: /(?:youtube\.com\/\S*(?:(?:\/e(?:mbed))?\/|watch\/?\?(?:\S*?&?v\=))|youtu\.be\/)([a-zA-Z0-9_-]{6,11})/gi, yt: /(?:youtube\.com\/\S*(?:(?:\/e(?:mbed))?\/|watch\/?\?(?:\S*?&?v\=))|youtu\.be\/)([a-zA-Z0-9_-]{6,11})/gi,
imgur: /^https?:\/\/(\w+\.)?imgur.com\/(\w*\d\w*)+(\.[a-zA-Z]{3,4})?/gi, imgur: /(?:https?:)?\/\/(\w+\.)?imgur\.com\/(\S*)(\.[a-zA-Z]{3})/gm,
instagram: /(?:https?:\/\/www\.)?instagram\.com\S*?\/(?:p|reel)\/(\w{11})\/?/im instagram: /(?:https?:\/\/www\.)?instagram\.com\S*?\/(?:p|reel)\/(\w{11})\/?/im
}; };
const mediagroupids = new Set(); const mediagroupids = new Set();
@ -52,10 +52,10 @@ export default async bot => {
links.forEach(async link => { links.forEach(async link => {
if(regex.imgur.test(link)) if(regex.imgur.test(link))
return await e.reply(`fuck imgur... seriously`); await e.reply(`imgur schmimigur`);
if(regex.instagram.test(link)) if(regex.instagram.test(link))
return await e.reply(`fuck instagram... seriously`); await e.reply(`insta schminsta`);
// check repost (link) // check repost (link)
repost = await queue.checkrepostlink(link); repost = await queue.checkrepostlink(link);
@ -69,16 +69,16 @@ export default async bot => {
// read metadata // read metadata
let ext; let ext;
if(regex.instagram.test(link)) if(regex.imgur.test(link)) {
// is instagram // is imgur
try { try {
// @flummi -> is there a variable for the actual work directory so it doesn't have to be hardcoded? // will die extension von der url
const meta = JSON.parse((await queue.exec(`yt-dlp -f 'bv*[height<=720]+ba/b[height<=720] / wv*+ba/w' --skip-download --dump-json "${link}"`)).stdout); ext = link.split(".").slice(-1).join(".");
ext = meta.ext; } catch(err) {
} catch(err) { const tmphead = (await fetch(link, { method: "HEAD" })).headers["content-type"];
const tmphead = (await fetch(link, { method: "HEAD" })).headers["content-type"]; // this can be undefined for unsupported mime types, but will be caught in the general mime check below
// this can be undefined for unsupported mime types, but will be caught in the general mime check below ext = cfg.mimes[tmphead];
ext = cfg.mimes[tmphead]; }
} }
else { else {
// is not instagram // is not instagram
@ -114,7 +114,18 @@ export default async bot => {
return await e.editMessageText(msg.result.chat.id, msg.result.message_id, "instagram dl error"); return await e.editMessageText(msg.result.chat.id, msg.result.message_id, "instagram dl error");
return await e.reply("instagram dl error", err); return await e.reply("instagram dl error", err);
} }
else { else if(regex.imgur.test(link)) {
console.log("penis123");
try {
await queue.exec(`torsocks wget "${link}" -O "./tmp/${uuid}.${ext}"`);
source = "/home/f0ck/f0ckv2/tmp/"+uuid+"."+ext;
console.log(source);
} catch(err) {
console.log(err);
}
}
else
{
try { try {
source = (await queue.exec(`yt-dlp -f 'bv*[height<=720]+ba/b[height<=720] / wv*+ba/w' "${link}" --max-filesize ${maxfilesize / 1024}k --postprocessor-args "ffmpeg:-bitexact" -o "./tmp/${uuid}.%(ext)s" --print after_move:filepath --merge-output-format "mp4"`)).stdout.trim(); source = (await queue.exec(`yt-dlp -f 'bv*[height<=720]+ba/b[height<=720] / wv*+ba/w' "${link}" --max-filesize ${maxfilesize / 1024}k --postprocessor-args "ffmpeg:-bitexact" -o "./tmp/${uuid}.%(ext)s" --print after_move:filepath --merge-output-format "mp4"`)).stdout.trim();
} catch(err) { } catch(err) {