2023-03-28 14:52:17 +00:00
|
|
|
require('dotenv').config()
|
|
|
|
|
|
|
|
const fs = require('node:fs')
|
2023-03-28 17:09:56 +00:00
|
|
|
const BQueue = require('bee-queue')
|
2023-03-28 14:52:17 +00:00
|
|
|
|
|
|
|
const upload = require('./utils/upload.js')
|
|
|
|
const ytdlp = require('./utils/ytdlp.js')
|
|
|
|
const redis = require('./utils/redis.js')
|
|
|
|
|
|
|
|
const metadata = require('./utils/metadata.js')
|
|
|
|
const database = require('./utils/database.js')
|
|
|
|
const logger = require("./utils/logger.js")
|
|
|
|
|
|
|
|
const { PrismaClient } = require('@prisma/client')
|
|
|
|
const prisma = new PrismaClient()
|
|
|
|
|
2023-03-28 17:09:56 +00:00
|
|
|
const queue = new BQueue('download', {
|
2023-03-31 14:17:37 +00:00
|
|
|
prefix: 'download',
|
|
|
|
redis: {
|
|
|
|
host: process.env.REDIS_HOST,
|
|
|
|
port: process.env.REDIS_PORT,
|
|
|
|
password: process.env.REDIS_PASS,
|
|
|
|
},
|
|
|
|
removeOnFailure: true,
|
|
|
|
removeOnSuccess: true,
|
|
|
|
storeJobs: false
|
|
|
|
})
|
|
|
|
|
|
|
|
const channelQueue = new BQueue('channel', {
|
|
|
|
prefix: 'channel',
|
2023-03-28 17:09:56 +00:00
|
|
|
redis: {
|
|
|
|
host: process.env.REDIS_HOST,
|
|
|
|
port: process.env.REDIS_PORT,
|
|
|
|
password: process.env.REDIS_PASS,
|
2023-03-28 17:35:38 +00:00
|
|
|
},
|
|
|
|
removeOnFailure: true,
|
|
|
|
removeOnSuccess: true,
|
|
|
|
storeJobs: false
|
2023-03-28 17:09:56 +00:00
|
|
|
})
|
2023-03-28 14:52:17 +00:00
|
|
|
|
2023-03-28 16:41:40 +00:00
|
|
|
async function check() {
|
2023-03-28 14:52:17 +00:00
|
|
|
const channels = await prisma.autodownload.findMany()
|
|
|
|
|
2023-03-31 14:17:37 +00:00
|
|
|
channels.forEach(async (c) => {
|
2023-03-28 14:52:17 +00:00
|
|
|
if (await redis.get(c.channel)) {
|
|
|
|
logger.info({ message: `${c.channel} is already being downloaded` })
|
|
|
|
} else {
|
|
|
|
await redis.set(c.channel, 'downloading')
|
2023-03-31 14:17:37 +00:00
|
|
|
channelQueue.createJob(c).save()
|
2023-03-28 14:52:17 +00:00
|
|
|
}
|
2023-03-31 14:17:37 +00:00
|
|
|
})
|
2023-03-28 14:52:17 +00:00
|
|
|
}
|
|
|
|
|
2023-03-28 16:41:40 +00:00
|
|
|
async function checkChannel(channelId) {
|
2023-03-28 14:52:17 +00:00
|
|
|
logger.info({ message: `Checking ${channelId} for new videos...` })
|
|
|
|
|
|
|
|
const videos = await metadata.getChannelVideos(channelId)
|
|
|
|
if (!videos) return logger.info({ message: `Failed requesting Youtube for ${channelId}` })
|
|
|
|
|
|
|
|
videos.forEach(async (video) => {
|
|
|
|
const id = video.url.match(/[?&]v=([^&]+)/)[1]
|
|
|
|
|
|
|
|
const already = await prisma.videos.findFirst({
|
|
|
|
where: {
|
|
|
|
id: id
|
|
|
|
}
|
|
|
|
})
|
|
|
|
|
|
|
|
if (already) return
|
|
|
|
if (await redis.get(id)) {
|
|
|
|
logger.info({ message: `Someone is already downloading ${video.title}, ${id}` })
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
if (await redis.get(`blacklist:${id}`)) {
|
|
|
|
logger.info({ message: `${video.title} is blacklisted from downloading, ${id}` })
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
if (video.duration > 7200) {
|
2023-03-28 15:06:30 +00:00
|
|
|
logger.info({ message: `${video.title} is longer than 2h, ${id}` })
|
2023-03-28 14:52:17 +00:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
await redis.set(id, 'downloading')
|
|
|
|
logger.info({ message: `Added ${video.title} to the queue, ${id}` })
|
|
|
|
|
2023-03-28 17:09:56 +00:00
|
|
|
queue.createJob({ video, id }).save()
|
2023-03-28 14:52:17 +00:00
|
|
|
})
|
|
|
|
}
|
|
|
|
|
2023-05-12 18:52:36 +00:00
|
|
|
queue.process(1, async function (job, done) {
|
2023-03-28 17:09:56 +00:00
|
|
|
const { video, id } = job.data
|
|
|
|
|
2023-03-28 16:41:40 +00:00
|
|
|
logger.info({ message: `Starting to download ${video.title}, ${id}` })
|
|
|
|
|
|
|
|
const download = await ytdlp.downloadVideo('https://www.youtube.com' + video.url)
|
|
|
|
if (download.fail) {
|
|
|
|
logger.info({ message: `Failed downloading ${video.title}, ${id} -> ${download.message}` })
|
|
|
|
await redis.del(id)
|
2023-03-28 17:09:56 +00:00
|
|
|
return done()
|
2023-03-28 16:41:40 +00:00
|
|
|
} else {
|
|
|
|
const file = fs.readdirSync("./videos").find(f => f.includes(id))
|
|
|
|
if (file) {
|
|
|
|
fs.renameSync(`./videos/${file}`, `./videos/${id}.webm`)
|
|
|
|
logger.info({ message: `Downloaded ${video.title}, ${id}` })
|
|
|
|
|
|
|
|
const videoUrl = await upload.uploadVideo(`./videos/${id}.webm`)
|
|
|
|
logger.info({ message: `Uploaded ${video.title}, ${id}` })
|
|
|
|
fs.unlinkSync(`./videos/${id}.webm`)
|
|
|
|
|
|
|
|
await database.createDatabaseVideo(id, videoUrl)
|
|
|
|
await redis.del(id)
|
2023-05-12 18:52:36 +00:00
|
|
|
return done()
|
2023-03-28 16:41:40 +00:00
|
|
|
} else {
|
2023-03-31 13:52:11 +00:00
|
|
|
await redis.set(`blacklist:${id}`, 'error')
|
2023-03-28 16:41:40 +00:00
|
|
|
logger.info({ message: `Couldn't find file for ${video.title}, ${id}` })
|
2023-03-28 17:09:56 +00:00
|
|
|
return done()
|
2023-03-28 16:41:40 +00:00
|
|
|
}
|
|
|
|
}
|
2023-03-28 17:09:56 +00:00
|
|
|
})
|
2023-03-28 16:41:40 +00:00
|
|
|
|
2023-03-31 14:17:37 +00:00
|
|
|
channelQueue.process(10, async function (job, done) {
|
|
|
|
const c = job.data
|
|
|
|
|
|
|
|
await checkChannel(c.channel)
|
|
|
|
await redis.del(c.channel)
|
2023-05-12 18:52:36 +00:00
|
|
|
|
|
|
|
done()
|
2023-03-31 14:17:37 +00:00
|
|
|
})
|
|
|
|
|
2023-05-12 18:45:55 +00:00
|
|
|
check()
|
2023-03-29 06:04:00 +00:00
|
|
|
setInterval(() => {
|
|
|
|
check()
|
|
|
|
}, 300000)
|