just a quick little rewrite, no big deal
This commit is contained in:
parent
96f8ec7b00
commit
cdb8da68c9
|
@ -7,13 +7,17 @@ jobs:
|
||||||
docker:
|
docker:
|
||||||
if: ${{ github.ref == 'refs/heads/main' }}
|
if: ${{ github.ref == 'refs/heads/main' }}
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
packages: write
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Login to GitHub Container Registry
|
- name: Login to GitHub Container Registry
|
||||||
uses: docker/login-action@v1
|
uses: docker/login-action@v1
|
||||||
with:
|
with:
|
||||||
registry: ghcr.io
|
registry: ghcr.io
|
||||||
username: ${{ github.repository_owner }}
|
username: ${{ github.actor }}
|
||||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
password: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|
||||||
- name: Set up Docker Buildx
|
- name: Set up Docker Buildx
|
||||||
uses: docker/setup-buildx-action@v1
|
uses: docker/setup-buildx-action@v1
|
||||||
|
|
|
@ -1,78 +1,105 @@
|
||||||
|
# Based on https://raw.githubusercontent.com/github/gitignore/main/Node.gitignore
|
||||||
|
|
||||||
# Logs
|
# Logs
|
||||||
|
|
||||||
logs
|
logs
|
||||||
*.log
|
_.log
|
||||||
npm-debug.log*
|
npm-debug.log_
|
||||||
yarn-debug.log*
|
yarn-debug.log*
|
||||||
yarn-error.log*
|
yarn-error.log*
|
||||||
lerna-debug.log*
|
lerna-debug.log*
|
||||||
.pnpm-debug.log*
|
.pnpm-debug.log*
|
||||||
|
|
||||||
|
# Caches
|
||||||
|
|
||||||
|
.cache
|
||||||
|
|
||||||
# Diagnostic reports (https://nodejs.org/api/report.html)
|
# Diagnostic reports (https://nodejs.org/api/report.html)
|
||||||
report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json
|
|
||||||
|
report.[0-9]_.[0-9]_.[0-9]_.[0-9]_.json
|
||||||
|
|
||||||
# Runtime data
|
# Runtime data
|
||||||
|
|
||||||
pids
|
pids
|
||||||
*.pid
|
_.pid
|
||||||
*.seed
|
_.seed
|
||||||
*.pid.lock
|
*.pid.lock
|
||||||
|
|
||||||
# Directory for instrumented libs generated by jscoverage/JSCover
|
# Directory for instrumented libs generated by jscoverage/JSCover
|
||||||
|
|
||||||
lib-cov
|
lib-cov
|
||||||
|
|
||||||
# Coverage directory used by tools like istanbul
|
# Coverage directory used by tools like istanbul
|
||||||
|
|
||||||
coverage
|
coverage
|
||||||
*.lcov
|
*.lcov
|
||||||
|
|
||||||
# nyc test coverage
|
# nyc test coverage
|
||||||
|
|
||||||
.nyc_output
|
.nyc_output
|
||||||
|
|
||||||
# Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files)
|
# Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files)
|
||||||
|
|
||||||
.grunt
|
.grunt
|
||||||
|
|
||||||
# Bower dependency directory (https://bower.io/)
|
# Bower dependency directory (https://bower.io/)
|
||||||
|
|
||||||
bower_components
|
bower_components
|
||||||
|
|
||||||
# node-waf configuration
|
# node-waf configuration
|
||||||
|
|
||||||
.lock-wscript
|
.lock-wscript
|
||||||
|
|
||||||
# Compiled binary addons (https://nodejs.org/api/addons.html)
|
# Compiled binary addons (https://nodejs.org/api/addons.html)
|
||||||
|
|
||||||
build/Release
|
build/Release
|
||||||
|
|
||||||
# Dependency directories
|
# Dependency directories
|
||||||
|
|
||||||
node_modules/
|
node_modules/
|
||||||
jspm_packages/
|
jspm_packages/
|
||||||
|
|
||||||
# Snowpack dependency directory (https://snowpack.dev/)
|
# Snowpack dependency directory (https://snowpack.dev/)
|
||||||
|
|
||||||
web_modules/
|
web_modules/
|
||||||
|
|
||||||
# TypeScript cache
|
# TypeScript cache
|
||||||
|
|
||||||
*.tsbuildinfo
|
*.tsbuildinfo
|
||||||
|
|
||||||
# Optional npm cache directory
|
# Optional npm cache directory
|
||||||
|
|
||||||
.npm
|
.npm
|
||||||
|
|
||||||
# Optional eslint cache
|
# Optional eslint cache
|
||||||
|
|
||||||
.eslintcache
|
.eslintcache
|
||||||
|
|
||||||
# Optional stylelint cache
|
# Optional stylelint cache
|
||||||
|
|
||||||
.stylelintcache
|
.stylelintcache
|
||||||
|
|
||||||
# Microbundle cache
|
# Microbundle cache
|
||||||
|
|
||||||
.rpt2_cache/
|
.rpt2_cache/
|
||||||
.rts2_cache_cjs/
|
.rts2_cache_cjs/
|
||||||
.rts2_cache_es/
|
.rts2_cache_es/
|
||||||
.rts2_cache_umd/
|
.rts2_cache_umd/
|
||||||
|
|
||||||
# Optional REPL history
|
# Optional REPL history
|
||||||
|
|
||||||
.node_repl_history
|
.node_repl_history
|
||||||
|
|
||||||
# Output of 'npm pack'
|
# Output of 'npm pack'
|
||||||
|
|
||||||
*.tgz
|
*.tgz
|
||||||
|
|
||||||
# Yarn Integrity file
|
# Yarn Integrity file
|
||||||
|
|
||||||
.yarn-integrity
|
.yarn-integrity
|
||||||
|
|
||||||
# dotenv environment variable files
|
# dotenv environment variable files
|
||||||
|
|
||||||
.env
|
.env
|
||||||
.env.development.local
|
.env.development.local
|
||||||
.env.test.local
|
.env.test.local
|
||||||
|
@ -80,55 +107,71 @@ web_modules/
|
||||||
.env.local
|
.env.local
|
||||||
|
|
||||||
# parcel-bundler cache (https://parceljs.org/)
|
# parcel-bundler cache (https://parceljs.org/)
|
||||||
.cache
|
|
||||||
.parcel-cache
|
.parcel-cache
|
||||||
|
|
||||||
# Next.js build output
|
# Next.js build output
|
||||||
|
|
||||||
.next
|
.next
|
||||||
out
|
out
|
||||||
|
|
||||||
# Nuxt.js build / generate output
|
# Nuxt.js build / generate output
|
||||||
|
|
||||||
.nuxt
|
.nuxt
|
||||||
dist
|
dist
|
||||||
|
|
||||||
# Gatsby files
|
# Gatsby files
|
||||||
.cache/
|
|
||||||
# Comment in the public line in if your project uses Gatsby and not Next.js
|
# Comment in the public line in if your project uses Gatsby and not Next.js
|
||||||
|
|
||||||
# https://nextjs.org/blog/next-9-1#public-directory-support
|
# https://nextjs.org/blog/next-9-1#public-directory-support
|
||||||
|
|
||||||
# public
|
# public
|
||||||
|
|
||||||
# vuepress build output
|
# vuepress build output
|
||||||
|
|
||||||
.vuepress/dist
|
.vuepress/dist
|
||||||
|
|
||||||
# vuepress v2.x temp and cache directory
|
# vuepress v2.x temp and cache directory
|
||||||
|
|
||||||
.temp
|
.temp
|
||||||
.cache
|
|
||||||
|
|
||||||
# Docusaurus cache and generated files
|
# Docusaurus cache and generated files
|
||||||
|
|
||||||
.docusaurus
|
.docusaurus
|
||||||
|
|
||||||
# Serverless directories
|
# Serverless directories
|
||||||
|
|
||||||
.serverless/
|
.serverless/
|
||||||
|
|
||||||
# FuseBox cache
|
# FuseBox cache
|
||||||
|
|
||||||
.fusebox/
|
.fusebox/
|
||||||
|
|
||||||
# DynamoDB Local files
|
# DynamoDB Local files
|
||||||
|
|
||||||
.dynamodb/
|
.dynamodb/
|
||||||
|
|
||||||
# TernJS port file
|
# TernJS port file
|
||||||
|
|
||||||
.tern-port
|
.tern-port
|
||||||
|
|
||||||
# Stores VSCode versions used for testing VSCode extensions
|
# Stores VSCode versions used for testing VSCode extensions
|
||||||
|
|
||||||
.vscode-test
|
.vscode-test
|
||||||
|
|
||||||
# yarn v2
|
# yarn v2
|
||||||
|
|
||||||
.yarn/cache
|
.yarn/cache
|
||||||
.yarn/unplugged
|
.yarn/unplugged
|
||||||
.yarn/build-state.yml
|
.yarn/build-state.yml
|
||||||
.yarn/install-state.gz
|
.yarn/install-state.gz
|
||||||
.pnp.*
|
.pnp.*
|
||||||
|
|
||||||
s3.json
|
# IntelliJ based IDEs
|
||||||
yt-dlp
|
.idea
|
||||||
.DS_Store
|
|
||||||
|
# Finder (MacOS) folder config
|
||||||
|
.DS_Store
|
||||||
|
|
||||||
|
s3.json
|
|
@ -1,9 +1,9 @@
|
||||||
FROM node:alpine
|
FROM oven/bun:1 AS base
|
||||||
|
|
||||||
RUN mkdir -p /usr/src/preservetube/backend
|
RUN mkdir -p /usr/src/preservetube/backend
|
||||||
WORKDIR /usr/src/preservetube/backend
|
WORKDIR /usr/src/preservetube/backend
|
||||||
|
|
||||||
COPY . /usr/src/preservetube/backend
|
COPY . /usr/src/preservetube/backend
|
||||||
RUN yarn
|
RUN bun install
|
||||||
|
|
||||||
CMD ["node", "index.js"]
|
CMD ["bun", "run", "src/index.ts"]
|
661
LICENSE
661
LICENSE
|
@ -1,661 +0,0 @@
|
||||||
GNU AFFERO GENERAL PUBLIC LICENSE
|
|
||||||
Version 3, 19 November 2007
|
|
||||||
|
|
||||||
Copyright (C) 2007 Free Software Foundation, Inc. <https://fsf.org/>
|
|
||||||
Everyone is permitted to copy and distribute verbatim copies
|
|
||||||
of this license document, but changing it is not allowed.
|
|
||||||
|
|
||||||
Preamble
|
|
||||||
|
|
||||||
The GNU Affero General Public License is a free, copyleft license for
|
|
||||||
software and other kinds of works, specifically designed to ensure
|
|
||||||
cooperation with the community in the case of network server software.
|
|
||||||
|
|
||||||
The licenses for most software and other practical works are designed
|
|
||||||
to take away your freedom to share and change the works. By contrast,
|
|
||||||
our General Public Licenses are intended to guarantee your freedom to
|
|
||||||
share and change all versions of a program--to make sure it remains free
|
|
||||||
software for all its users.
|
|
||||||
|
|
||||||
When we speak of free software, we are referring to freedom, not
|
|
||||||
price. Our General Public Licenses are designed to make sure that you
|
|
||||||
have the freedom to distribute copies of free software (and charge for
|
|
||||||
them if you wish), that you receive source code or can get it if you
|
|
||||||
want it, that you can change the software or use pieces of it in new
|
|
||||||
free programs, and that you know you can do these things.
|
|
||||||
|
|
||||||
Developers that use our General Public Licenses protect your rights
|
|
||||||
with two steps: (1) assert copyright on the software, and (2) offer
|
|
||||||
you this License which gives you legal permission to copy, distribute
|
|
||||||
and/or modify the software.
|
|
||||||
|
|
||||||
A secondary benefit of defending all users' freedom is that
|
|
||||||
improvements made in alternate versions of the program, if they
|
|
||||||
receive widespread use, become available for other developers to
|
|
||||||
incorporate. Many developers of free software are heartened and
|
|
||||||
encouraged by the resulting cooperation. However, in the case of
|
|
||||||
software used on network servers, this result may fail to come about.
|
|
||||||
The GNU General Public License permits making a modified version and
|
|
||||||
letting the public access it on a server without ever releasing its
|
|
||||||
source code to the public.
|
|
||||||
|
|
||||||
The GNU Affero General Public License is designed specifically to
|
|
||||||
ensure that, in such cases, the modified source code becomes available
|
|
||||||
to the community. It requires the operator of a network server to
|
|
||||||
provide the source code of the modified version running there to the
|
|
||||||
users of that server. Therefore, public use of a modified version, on
|
|
||||||
a publicly accessible server, gives the public access to the source
|
|
||||||
code of the modified version.
|
|
||||||
|
|
||||||
An older license, called the Affero General Public License and
|
|
||||||
published by Affero, was designed to accomplish similar goals. This is
|
|
||||||
a different license, not a version of the Affero GPL, but Affero has
|
|
||||||
released a new version of the Affero GPL which permits relicensing under
|
|
||||||
this license.
|
|
||||||
|
|
||||||
The precise terms and conditions for copying, distribution and
|
|
||||||
modification follow.
|
|
||||||
|
|
||||||
TERMS AND CONDITIONS
|
|
||||||
|
|
||||||
0. Definitions.
|
|
||||||
|
|
||||||
"This License" refers to version 3 of the GNU Affero General Public License.
|
|
||||||
|
|
||||||
"Copyright" also means copyright-like laws that apply to other kinds of
|
|
||||||
works, such as semiconductor masks.
|
|
||||||
|
|
||||||
"The Program" refers to any copyrightable work licensed under this
|
|
||||||
License. Each licensee is addressed as "you". "Licensees" and
|
|
||||||
"recipients" may be individuals or organizations.
|
|
||||||
|
|
||||||
To "modify" a work means to copy from or adapt all or part of the work
|
|
||||||
in a fashion requiring copyright permission, other than the making of an
|
|
||||||
exact copy. The resulting work is called a "modified version" of the
|
|
||||||
earlier work or a work "based on" the earlier work.
|
|
||||||
|
|
||||||
A "covered work" means either the unmodified Program or a work based
|
|
||||||
on the Program.
|
|
||||||
|
|
||||||
To "propagate" a work means to do anything with it that, without
|
|
||||||
permission, would make you directly or secondarily liable for
|
|
||||||
infringement under applicable copyright law, except executing it on a
|
|
||||||
computer or modifying a private copy. Propagation includes copying,
|
|
||||||
distribution (with or without modification), making available to the
|
|
||||||
public, and in some countries other activities as well.
|
|
||||||
|
|
||||||
To "convey" a work means any kind of propagation that enables other
|
|
||||||
parties to make or receive copies. Mere interaction with a user through
|
|
||||||
a computer network, with no transfer of a copy, is not conveying.
|
|
||||||
|
|
||||||
An interactive user interface displays "Appropriate Legal Notices"
|
|
||||||
to the extent that it includes a convenient and prominently visible
|
|
||||||
feature that (1) displays an appropriate copyright notice, and (2)
|
|
||||||
tells the user that there is no warranty for the work (except to the
|
|
||||||
extent that warranties are provided), that licensees may convey the
|
|
||||||
work under this License, and how to view a copy of this License. If
|
|
||||||
the interface presents a list of user commands or options, such as a
|
|
||||||
menu, a prominent item in the list meets this criterion.
|
|
||||||
|
|
||||||
1. Source Code.
|
|
||||||
|
|
||||||
The "source code" for a work means the preferred form of the work
|
|
||||||
for making modifications to it. "Object code" means any non-source
|
|
||||||
form of a work.
|
|
||||||
|
|
||||||
A "Standard Interface" means an interface that either is an official
|
|
||||||
standard defined by a recognized standards body, or, in the case of
|
|
||||||
interfaces specified for a particular programming language, one that
|
|
||||||
is widely used among developers working in that language.
|
|
||||||
|
|
||||||
The "System Libraries" of an executable work include anything, other
|
|
||||||
than the work as a whole, that (a) is included in the normal form of
|
|
||||||
packaging a Major Component, but which is not part of that Major
|
|
||||||
Component, and (b) serves only to enable use of the work with that
|
|
||||||
Major Component, or to implement a Standard Interface for which an
|
|
||||||
implementation is available to the public in source code form. A
|
|
||||||
"Major Component", in this context, means a major essential component
|
|
||||||
(kernel, window system, and so on) of the specific operating system
|
|
||||||
(if any) on which the executable work runs, or a compiler used to
|
|
||||||
produce the work, or an object code interpreter used to run it.
|
|
||||||
|
|
||||||
The "Corresponding Source" for a work in object code form means all
|
|
||||||
the source code needed to generate, install, and (for an executable
|
|
||||||
work) run the object code and to modify the work, including scripts to
|
|
||||||
control those activities. However, it does not include the work's
|
|
||||||
System Libraries, or general-purpose tools or generally available free
|
|
||||||
programs which are used unmodified in performing those activities but
|
|
||||||
which are not part of the work. For example, Corresponding Source
|
|
||||||
includes interface definition files associated with source files for
|
|
||||||
the work, and the source code for shared libraries and dynamically
|
|
||||||
linked subprograms that the work is specifically designed to require,
|
|
||||||
such as by intimate data communication or control flow between those
|
|
||||||
subprograms and other parts of the work.
|
|
||||||
|
|
||||||
The Corresponding Source need not include anything that users
|
|
||||||
can regenerate automatically from other parts of the Corresponding
|
|
||||||
Source.
|
|
||||||
|
|
||||||
The Corresponding Source for a work in source code form is that
|
|
||||||
same work.
|
|
||||||
|
|
||||||
2. Basic Permissions.
|
|
||||||
|
|
||||||
All rights granted under this License are granted for the term of
|
|
||||||
copyright on the Program, and are irrevocable provided the stated
|
|
||||||
conditions are met. This License explicitly affirms your unlimited
|
|
||||||
permission to run the unmodified Program. The output from running a
|
|
||||||
covered work is covered by this License only if the output, given its
|
|
||||||
content, constitutes a covered work. This License acknowledges your
|
|
||||||
rights of fair use or other equivalent, as provided by copyright law.
|
|
||||||
|
|
||||||
You may make, run and propagate covered works that you do not
|
|
||||||
convey, without conditions so long as your license otherwise remains
|
|
||||||
in force. You may convey covered works to others for the sole purpose
|
|
||||||
of having them make modifications exclusively for you, or provide you
|
|
||||||
with facilities for running those works, provided that you comply with
|
|
||||||
the terms of this License in conveying all material for which you do
|
|
||||||
not control copyright. Those thus making or running the covered works
|
|
||||||
for you must do so exclusively on your behalf, under your direction
|
|
||||||
and control, on terms that prohibit them from making any copies of
|
|
||||||
your copyrighted material outside their relationship with you.
|
|
||||||
|
|
||||||
Conveying under any other circumstances is permitted solely under
|
|
||||||
the conditions stated below. Sublicensing is not allowed; section 10
|
|
||||||
makes it unnecessary.
|
|
||||||
|
|
||||||
3. Protecting Users' Legal Rights From Anti-Circumvention Law.
|
|
||||||
|
|
||||||
No covered work shall be deemed part of an effective technological
|
|
||||||
measure under any applicable law fulfilling obligations under article
|
|
||||||
11 of the WIPO copyright treaty adopted on 20 December 1996, or
|
|
||||||
similar laws prohibiting or restricting circumvention of such
|
|
||||||
measures.
|
|
||||||
|
|
||||||
When you convey a covered work, you waive any legal power to forbid
|
|
||||||
circumvention of technological measures to the extent such circumvention
|
|
||||||
is effected by exercising rights under this License with respect to
|
|
||||||
the covered work, and you disclaim any intention to limit operation or
|
|
||||||
modification of the work as a means of enforcing, against the work's
|
|
||||||
users, your or third parties' legal rights to forbid circumvention of
|
|
||||||
technological measures.
|
|
||||||
|
|
||||||
4. Conveying Verbatim Copies.
|
|
||||||
|
|
||||||
You may convey verbatim copies of the Program's source code as you
|
|
||||||
receive it, in any medium, provided that you conspicuously and
|
|
||||||
appropriately publish on each copy an appropriate copyright notice;
|
|
||||||
keep intact all notices stating that this License and any
|
|
||||||
non-permissive terms added in accord with section 7 apply to the code;
|
|
||||||
keep intact all notices of the absence of any warranty; and give all
|
|
||||||
recipients a copy of this License along with the Program.
|
|
||||||
|
|
||||||
You may charge any price or no price for each copy that you convey,
|
|
||||||
and you may offer support or warranty protection for a fee.
|
|
||||||
|
|
||||||
5. Conveying Modified Source Versions.
|
|
||||||
|
|
||||||
You may convey a work based on the Program, or the modifications to
|
|
||||||
produce it from the Program, in the form of source code under the
|
|
||||||
terms of section 4, provided that you also meet all of these conditions:
|
|
||||||
|
|
||||||
a) The work must carry prominent notices stating that you modified
|
|
||||||
it, and giving a relevant date.
|
|
||||||
|
|
||||||
b) The work must carry prominent notices stating that it is
|
|
||||||
released under this License and any conditions added under section
|
|
||||||
7. This requirement modifies the requirement in section 4 to
|
|
||||||
"keep intact all notices".
|
|
||||||
|
|
||||||
c) You must license the entire work, as a whole, under this
|
|
||||||
License to anyone who comes into possession of a copy. This
|
|
||||||
License will therefore apply, along with any applicable section 7
|
|
||||||
additional terms, to the whole of the work, and all its parts,
|
|
||||||
regardless of how they are packaged. This License gives no
|
|
||||||
permission to license the work in any other way, but it does not
|
|
||||||
invalidate such permission if you have separately received it.
|
|
||||||
|
|
||||||
d) If the work has interactive user interfaces, each must display
|
|
||||||
Appropriate Legal Notices; however, if the Program has interactive
|
|
||||||
interfaces that do not display Appropriate Legal Notices, your
|
|
||||||
work need not make them do so.
|
|
||||||
|
|
||||||
A compilation of a covered work with other separate and independent
|
|
||||||
works, which are not by their nature extensions of the covered work,
|
|
||||||
and which are not combined with it such as to form a larger program,
|
|
||||||
in or on a volume of a storage or distribution medium, is called an
|
|
||||||
"aggregate" if the compilation and its resulting copyright are not
|
|
||||||
used to limit the access or legal rights of the compilation's users
|
|
||||||
beyond what the individual works permit. Inclusion of a covered work
|
|
||||||
in an aggregate does not cause this License to apply to the other
|
|
||||||
parts of the aggregate.
|
|
||||||
|
|
||||||
6. Conveying Non-Source Forms.
|
|
||||||
|
|
||||||
You may convey a covered work in object code form under the terms
|
|
||||||
of sections 4 and 5, provided that you also convey the
|
|
||||||
machine-readable Corresponding Source under the terms of this License,
|
|
||||||
in one of these ways:
|
|
||||||
|
|
||||||
a) Convey the object code in, or embodied in, a physical product
|
|
||||||
(including a physical distribution medium), accompanied by the
|
|
||||||
Corresponding Source fixed on a durable physical medium
|
|
||||||
customarily used for software interchange.
|
|
||||||
|
|
||||||
b) Convey the object code in, or embodied in, a physical product
|
|
||||||
(including a physical distribution medium), accompanied by a
|
|
||||||
written offer, valid for at least three years and valid for as
|
|
||||||
long as you offer spare parts or customer support for that product
|
|
||||||
model, to give anyone who possesses the object code either (1) a
|
|
||||||
copy of the Corresponding Source for all the software in the
|
|
||||||
product that is covered by this License, on a durable physical
|
|
||||||
medium customarily used for software interchange, for a price no
|
|
||||||
more than your reasonable cost of physically performing this
|
|
||||||
conveying of source, or (2) access to copy the
|
|
||||||
Corresponding Source from a network server at no charge.
|
|
||||||
|
|
||||||
c) Convey individual copies of the object code with a copy of the
|
|
||||||
written offer to provide the Corresponding Source. This
|
|
||||||
alternative is allowed only occasionally and noncommercially, and
|
|
||||||
only if you received the object code with such an offer, in accord
|
|
||||||
with subsection 6b.
|
|
||||||
|
|
||||||
d) Convey the object code by offering access from a designated
|
|
||||||
place (gratis or for a charge), and offer equivalent access to the
|
|
||||||
Corresponding Source in the same way through the same place at no
|
|
||||||
further charge. You need not require recipients to copy the
|
|
||||||
Corresponding Source along with the object code. If the place to
|
|
||||||
copy the object code is a network server, the Corresponding Source
|
|
||||||
may be on a different server (operated by you or a third party)
|
|
||||||
that supports equivalent copying facilities, provided you maintain
|
|
||||||
clear directions next to the object code saying where to find the
|
|
||||||
Corresponding Source. Regardless of what server hosts the
|
|
||||||
Corresponding Source, you remain obligated to ensure that it is
|
|
||||||
available for as long as needed to satisfy these requirements.
|
|
||||||
|
|
||||||
e) Convey the object code using peer-to-peer transmission, provided
|
|
||||||
you inform other peers where the object code and Corresponding
|
|
||||||
Source of the work are being offered to the general public at no
|
|
||||||
charge under subsection 6d.
|
|
||||||
|
|
||||||
A separable portion of the object code, whose source code is excluded
|
|
||||||
from the Corresponding Source as a System Library, need not be
|
|
||||||
included in conveying the object code work.
|
|
||||||
|
|
||||||
A "User Product" is either (1) a "consumer product", which means any
|
|
||||||
tangible personal property which is normally used for personal, family,
|
|
||||||
or household purposes, or (2) anything designed or sold for incorporation
|
|
||||||
into a dwelling. In determining whether a product is a consumer product,
|
|
||||||
doubtful cases shall be resolved in favor of coverage. For a particular
|
|
||||||
product received by a particular user, "normally used" refers to a
|
|
||||||
typical or common use of that class of product, regardless of the status
|
|
||||||
of the particular user or of the way in which the particular user
|
|
||||||
actually uses, or expects or is expected to use, the product. A product
|
|
||||||
is a consumer product regardless of whether the product has substantial
|
|
||||||
commercial, industrial or non-consumer uses, unless such uses represent
|
|
||||||
the only significant mode of use of the product.
|
|
||||||
|
|
||||||
"Installation Information" for a User Product means any methods,
|
|
||||||
procedures, authorization keys, or other information required to install
|
|
||||||
and execute modified versions of a covered work in that User Product from
|
|
||||||
a modified version of its Corresponding Source. The information must
|
|
||||||
suffice to ensure that the continued functioning of the modified object
|
|
||||||
code is in no case prevented or interfered with solely because
|
|
||||||
modification has been made.
|
|
||||||
|
|
||||||
If you convey an object code work under this section in, or with, or
|
|
||||||
specifically for use in, a User Product, and the conveying occurs as
|
|
||||||
part of a transaction in which the right of possession and use of the
|
|
||||||
User Product is transferred to the recipient in perpetuity or for a
|
|
||||||
fixed term (regardless of how the transaction is characterized), the
|
|
||||||
Corresponding Source conveyed under this section must be accompanied
|
|
||||||
by the Installation Information. But this requirement does not apply
|
|
||||||
if neither you nor any third party retains the ability to install
|
|
||||||
modified object code on the User Product (for example, the work has
|
|
||||||
been installed in ROM).
|
|
||||||
|
|
||||||
The requirement to provide Installation Information does not include a
|
|
||||||
requirement to continue to provide support service, warranty, or updates
|
|
||||||
for a work that has been modified or installed by the recipient, or for
|
|
||||||
the User Product in which it has been modified or installed. Access to a
|
|
||||||
network may be denied when the modification itself materially and
|
|
||||||
adversely affects the operation of the network or violates the rules and
|
|
||||||
protocols for communication across the network.
|
|
||||||
|
|
||||||
Corresponding Source conveyed, and Installation Information provided,
|
|
||||||
in accord with this section must be in a format that is publicly
|
|
||||||
documented (and with an implementation available to the public in
|
|
||||||
source code form), and must require no special password or key for
|
|
||||||
unpacking, reading or copying.
|
|
||||||
|
|
||||||
7. Additional Terms.
|
|
||||||
|
|
||||||
"Additional permissions" are terms that supplement the terms of this
|
|
||||||
License by making exceptions from one or more of its conditions.
|
|
||||||
Additional permissions that are applicable to the entire Program shall
|
|
||||||
be treated as though they were included in this License, to the extent
|
|
||||||
that they are valid under applicable law. If additional permissions
|
|
||||||
apply only to part of the Program, that part may be used separately
|
|
||||||
under those permissions, but the entire Program remains governed by
|
|
||||||
this License without regard to the additional permissions.
|
|
||||||
|
|
||||||
When you convey a copy of a covered work, you may at your option
|
|
||||||
remove any additional permissions from that copy, or from any part of
|
|
||||||
it. (Additional permissions may be written to require their own
|
|
||||||
removal in certain cases when you modify the work.) You may place
|
|
||||||
additional permissions on material, added by you to a covered work,
|
|
||||||
for which you have or can give appropriate copyright permission.
|
|
||||||
|
|
||||||
Notwithstanding any other provision of this License, for material you
|
|
||||||
add to a covered work, you may (if authorized by the copyright holders of
|
|
||||||
that material) supplement the terms of this License with terms:
|
|
||||||
|
|
||||||
a) Disclaiming warranty or limiting liability differently from the
|
|
||||||
terms of sections 15 and 16 of this License; or
|
|
||||||
|
|
||||||
b) Requiring preservation of specified reasonable legal notices or
|
|
||||||
author attributions in that material or in the Appropriate Legal
|
|
||||||
Notices displayed by works containing it; or
|
|
||||||
|
|
||||||
c) Prohibiting misrepresentation of the origin of that material, or
|
|
||||||
requiring that modified versions of such material be marked in
|
|
||||||
reasonable ways as different from the original version; or
|
|
||||||
|
|
||||||
d) Limiting the use for publicity purposes of names of licensors or
|
|
||||||
authors of the material; or
|
|
||||||
|
|
||||||
e) Declining to grant rights under trademark law for use of some
|
|
||||||
trade names, trademarks, or service marks; or
|
|
||||||
|
|
||||||
f) Requiring indemnification of licensors and authors of that
|
|
||||||
material by anyone who conveys the material (or modified versions of
|
|
||||||
it) with contractual assumptions of liability to the recipient, for
|
|
||||||
any liability that these contractual assumptions directly impose on
|
|
||||||
those licensors and authors.
|
|
||||||
|
|
||||||
All other non-permissive additional terms are considered "further
|
|
||||||
restrictions" within the meaning of section 10. If the Program as you
|
|
||||||
received it, or any part of it, contains a notice stating that it is
|
|
||||||
governed by this License along with a term that is a further
|
|
||||||
restriction, you may remove that term. If a license document contains
|
|
||||||
a further restriction but permits relicensing or conveying under this
|
|
||||||
License, you may add to a covered work material governed by the terms
|
|
||||||
of that license document, provided that the further restriction does
|
|
||||||
not survive such relicensing or conveying.
|
|
||||||
|
|
||||||
If you add terms to a covered work in accord with this section, you
|
|
||||||
must place, in the relevant source files, a statement of the
|
|
||||||
additional terms that apply to those files, or a notice indicating
|
|
||||||
where to find the applicable terms.
|
|
||||||
|
|
||||||
Additional terms, permissive or non-permissive, may be stated in the
|
|
||||||
form of a separately written license, or stated as exceptions;
|
|
||||||
the above requirements apply either way.
|
|
||||||
|
|
||||||
8. Termination.
|
|
||||||
|
|
||||||
You may not propagate or modify a covered work except as expressly
|
|
||||||
provided under this License. Any attempt otherwise to propagate or
|
|
||||||
modify it is void, and will automatically terminate your rights under
|
|
||||||
this License (including any patent licenses granted under the third
|
|
||||||
paragraph of section 11).
|
|
||||||
|
|
||||||
However, if you cease all violation of this License, then your
|
|
||||||
license from a particular copyright holder is reinstated (a)
|
|
||||||
provisionally, unless and until the copyright holder explicitly and
|
|
||||||
finally terminates your license, and (b) permanently, if the copyright
|
|
||||||
holder fails to notify you of the violation by some reasonable means
|
|
||||||
prior to 60 days after the cessation.
|
|
||||||
|
|
||||||
Moreover, your license from a particular copyright holder is
|
|
||||||
reinstated permanently if the copyright holder notifies you of the
|
|
||||||
violation by some reasonable means, this is the first time you have
|
|
||||||
received notice of violation of this License (for any work) from that
|
|
||||||
copyright holder, and you cure the violation prior to 30 days after
|
|
||||||
your receipt of the notice.
|
|
||||||
|
|
||||||
Termination of your rights under this section does not terminate the
|
|
||||||
licenses of parties who have received copies or rights from you under
|
|
||||||
this License. If your rights have been terminated and not permanently
|
|
||||||
reinstated, you do not qualify to receive new licenses for the same
|
|
||||||
material under section 10.
|
|
||||||
|
|
||||||
9. Acceptance Not Required for Having Copies.
|
|
||||||
|
|
||||||
You are not required to accept this License in order to receive or
|
|
||||||
run a copy of the Program. Ancillary propagation of a covered work
|
|
||||||
occurring solely as a consequence of using peer-to-peer transmission
|
|
||||||
to receive a copy likewise does not require acceptance. However,
|
|
||||||
nothing other than this License grants you permission to propagate or
|
|
||||||
modify any covered work. These actions infringe copyright if you do
|
|
||||||
not accept this License. Therefore, by modifying or propagating a
|
|
||||||
covered work, you indicate your acceptance of this License to do so.
|
|
||||||
|
|
||||||
10. Automatic Licensing of Downstream Recipients.
|
|
||||||
|
|
||||||
Each time you convey a covered work, the recipient automatically
|
|
||||||
receives a license from the original licensors, to run, modify and
|
|
||||||
propagate that work, subject to this License. You are not responsible
|
|
||||||
for enforcing compliance by third parties with this License.
|
|
||||||
|
|
||||||
An "entity transaction" is a transaction transferring control of an
|
|
||||||
organization, or substantially all assets of one, or subdividing an
|
|
||||||
organization, or merging organizations. If propagation of a covered
|
|
||||||
work results from an entity transaction, each party to that
|
|
||||||
transaction who receives a copy of the work also receives whatever
|
|
||||||
licenses to the work the party's predecessor in interest had or could
|
|
||||||
give under the previous paragraph, plus a right to possession of the
|
|
||||||
Corresponding Source of the work from the predecessor in interest, if
|
|
||||||
the predecessor has it or can get it with reasonable efforts.
|
|
||||||
|
|
||||||
You may not impose any further restrictions on the exercise of the
|
|
||||||
rights granted or affirmed under this License. For example, you may
|
|
||||||
not impose a license fee, royalty, or other charge for exercise of
|
|
||||||
rights granted under this License, and you may not initiate litigation
|
|
||||||
(including a cross-claim or counterclaim in a lawsuit) alleging that
|
|
||||||
any patent claim is infringed by making, using, selling, offering for
|
|
||||||
sale, or importing the Program or any portion of it.
|
|
||||||
|
|
||||||
11. Patents.
|
|
||||||
|
|
||||||
A "contributor" is a copyright holder who authorizes use under this
|
|
||||||
License of the Program or a work on which the Program is based. The
|
|
||||||
work thus licensed is called the contributor's "contributor version".
|
|
||||||
|
|
||||||
A contributor's "essential patent claims" are all patent claims
|
|
||||||
owned or controlled by the contributor, whether already acquired or
|
|
||||||
hereafter acquired, that would be infringed by some manner, permitted
|
|
||||||
by this License, of making, using, or selling its contributor version,
|
|
||||||
but do not include claims that would be infringed only as a
|
|
||||||
consequence of further modification of the contributor version. For
|
|
||||||
purposes of this definition, "control" includes the right to grant
|
|
||||||
patent sublicenses in a manner consistent with the requirements of
|
|
||||||
this License.
|
|
||||||
|
|
||||||
Each contributor grants you a non-exclusive, worldwide, royalty-free
|
|
||||||
patent license under the contributor's essential patent claims, to
|
|
||||||
make, use, sell, offer for sale, import and otherwise run, modify and
|
|
||||||
propagate the contents of its contributor version.
|
|
||||||
|
|
||||||
In the following three paragraphs, a "patent license" is any express
|
|
||||||
agreement or commitment, however denominated, not to enforce a patent
|
|
||||||
(such as an express permission to practice a patent or covenant not to
|
|
||||||
sue for patent infringement). To "grant" such a patent license to a
|
|
||||||
party means to make such an agreement or commitment not to enforce a
|
|
||||||
patent against the party.
|
|
||||||
|
|
||||||
If you convey a covered work, knowingly relying on a patent license,
|
|
||||||
and the Corresponding Source of the work is not available for anyone
|
|
||||||
to copy, free of charge and under the terms of this License, through a
|
|
||||||
publicly available network server or other readily accessible means,
|
|
||||||
then you must either (1) cause the Corresponding Source to be so
|
|
||||||
available, or (2) arrange to deprive yourself of the benefit of the
|
|
||||||
patent license for this particular work, or (3) arrange, in a manner
|
|
||||||
consistent with the requirements of this License, to extend the patent
|
|
||||||
license to downstream recipients. "Knowingly relying" means you have
|
|
||||||
actual knowledge that, but for the patent license, your conveying the
|
|
||||||
covered work in a country, or your recipient's use of the covered work
|
|
||||||
in a country, would infringe one or more identifiable patents in that
|
|
||||||
country that you have reason to believe are valid.
|
|
||||||
|
|
||||||
If, pursuant to or in connection with a single transaction or
|
|
||||||
arrangement, you convey, or propagate by procuring conveyance of, a
|
|
||||||
covered work, and grant a patent license to some of the parties
|
|
||||||
receiving the covered work authorizing them to use, propagate, modify
|
|
||||||
or convey a specific copy of the covered work, then the patent license
|
|
||||||
you grant is automatically extended to all recipients of the covered
|
|
||||||
work and works based on it.
|
|
||||||
|
|
||||||
A patent license is "discriminatory" if it does not include within
|
|
||||||
the scope of its coverage, prohibits the exercise of, or is
|
|
||||||
conditioned on the non-exercise of one or more of the rights that are
|
|
||||||
specifically granted under this License. You may not convey a covered
|
|
||||||
work if you are a party to an arrangement with a third party that is
|
|
||||||
in the business of distributing software, under which you make payment
|
|
||||||
to the third party based on the extent of your activity of conveying
|
|
||||||
the work, and under which the third party grants, to any of the
|
|
||||||
parties who would receive the covered work from you, a discriminatory
|
|
||||||
patent license (a) in connection with copies of the covered work
|
|
||||||
conveyed by you (or copies made from those copies), or (b) primarily
|
|
||||||
for and in connection with specific products or compilations that
|
|
||||||
contain the covered work, unless you entered into that arrangement,
|
|
||||||
or that patent license was granted, prior to 28 March 2007.
|
|
||||||
|
|
||||||
Nothing in this License shall be construed as excluding or limiting
|
|
||||||
any implied license or other defenses to infringement that may
|
|
||||||
otherwise be available to you under applicable patent law.
|
|
||||||
|
|
||||||
12. No Surrender of Others' Freedom.
|
|
||||||
|
|
||||||
If conditions are imposed on you (whether by court order, agreement or
|
|
||||||
otherwise) that contradict the conditions of this License, they do not
|
|
||||||
excuse you from the conditions of this License. If you cannot convey a
|
|
||||||
covered work so as to satisfy simultaneously your obligations under this
|
|
||||||
License and any other pertinent obligations, then as a consequence you may
|
|
||||||
not convey it at all. For example, if you agree to terms that obligate you
|
|
||||||
to collect a royalty for further conveying from those to whom you convey
|
|
||||||
the Program, the only way you could satisfy both those terms and this
|
|
||||||
License would be to refrain entirely from conveying the Program.
|
|
||||||
|
|
||||||
13. Remote Network Interaction; Use with the GNU General Public License.
|
|
||||||
|
|
||||||
Notwithstanding any other provision of this License, if you modify the
|
|
||||||
Program, your modified version must prominently offer all users
|
|
||||||
interacting with it remotely through a computer network (if your version
|
|
||||||
supports such interaction) an opportunity to receive the Corresponding
|
|
||||||
Source of your version by providing access to the Corresponding Source
|
|
||||||
from a network server at no charge, through some standard or customary
|
|
||||||
means of facilitating copying of software. This Corresponding Source
|
|
||||||
shall include the Corresponding Source for any work covered by version 3
|
|
||||||
of the GNU General Public License that is incorporated pursuant to the
|
|
||||||
following paragraph.
|
|
||||||
|
|
||||||
Notwithstanding any other provision of this License, you have
|
|
||||||
permission to link or combine any covered work with a work licensed
|
|
||||||
under version 3 of the GNU General Public License into a single
|
|
||||||
combined work, and to convey the resulting work. The terms of this
|
|
||||||
License will continue to apply to the part which is the covered work,
|
|
||||||
but the work with which it is combined will remain governed by version
|
|
||||||
3 of the GNU General Public License.
|
|
||||||
|
|
||||||
14. Revised Versions of this License.
|
|
||||||
|
|
||||||
The Free Software Foundation may publish revised and/or new versions of
|
|
||||||
the GNU Affero General Public License from time to time. Such new versions
|
|
||||||
will be similar in spirit to the present version, but may differ in detail to
|
|
||||||
address new problems or concerns.
|
|
||||||
|
|
||||||
Each version is given a distinguishing version number. If the
|
|
||||||
Program specifies that a certain numbered version of the GNU Affero General
|
|
||||||
Public License "or any later version" applies to it, you have the
|
|
||||||
option of following the terms and conditions either of that numbered
|
|
||||||
version or of any later version published by the Free Software
|
|
||||||
Foundation. If the Program does not specify a version number of the
|
|
||||||
GNU Affero General Public License, you may choose any version ever published
|
|
||||||
by the Free Software Foundation.
|
|
||||||
|
|
||||||
If the Program specifies that a proxy can decide which future
|
|
||||||
versions of the GNU Affero General Public License can be used, that proxy's
|
|
||||||
public statement of acceptance of a version permanently authorizes you
|
|
||||||
to choose that version for the Program.
|
|
||||||
|
|
||||||
Later license versions may give you additional or different
|
|
||||||
permissions. However, no additional obligations are imposed on any
|
|
||||||
author or copyright holder as a result of your choosing to follow a
|
|
||||||
later version.
|
|
||||||
|
|
||||||
15. Disclaimer of Warranty.
|
|
||||||
|
|
||||||
THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
|
|
||||||
APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
|
|
||||||
HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
|
|
||||||
OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
|
|
||||||
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
|
|
||||||
PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
|
|
||||||
IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
|
|
||||||
ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
|
|
||||||
|
|
||||||
16. Limitation of Liability.
|
|
||||||
|
|
||||||
IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
|
|
||||||
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
|
|
||||||
THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
|
|
||||||
GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
|
|
||||||
USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
|
|
||||||
DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
|
|
||||||
PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
|
|
||||||
EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
|
|
||||||
SUCH DAMAGES.
|
|
||||||
|
|
||||||
17. Interpretation of Sections 15 and 16.
|
|
||||||
|
|
||||||
If the disclaimer of warranty and limitation of liability provided
|
|
||||||
above cannot be given local legal effect according to their terms,
|
|
||||||
reviewing courts shall apply local law that most closely approximates
|
|
||||||
an absolute waiver of all civil liability in connection with the
|
|
||||||
Program, unless a warranty or assumption of liability accompanies a
|
|
||||||
copy of the Program in return for a fee.
|
|
||||||
|
|
||||||
END OF TERMS AND CONDITIONS
|
|
||||||
|
|
||||||
How to Apply These Terms to Your New Programs
|
|
||||||
|
|
||||||
If you develop a new program, and you want it to be of the greatest
|
|
||||||
possible use to the public, the best way to achieve this is to make it
|
|
||||||
free software which everyone can redistribute and change under these terms.
|
|
||||||
|
|
||||||
To do so, attach the following notices to the program. It is safest
|
|
||||||
to attach them to the start of each source file to most effectively
|
|
||||||
state the exclusion of warranty; and each file should have at least
|
|
||||||
the "copyright" line and a pointer to where the full notice is found.
|
|
||||||
|
|
||||||
<one line to give the program's name and a brief idea of what it does.>
|
|
||||||
Copyright (C) <year> <name of author>
|
|
||||||
|
|
||||||
This program is free software: you can redistribute it and/or modify
|
|
||||||
it under the terms of the GNU Affero General Public License as published
|
|
||||||
by the Free Software Foundation, either version 3 of the License, or
|
|
||||||
(at your option) any later version.
|
|
||||||
|
|
||||||
This program is distributed in the hope that it will be useful,
|
|
||||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
GNU Affero General Public License for more details.
|
|
||||||
|
|
||||||
You should have received a copy of the GNU Affero General Public License
|
|
||||||
along with this program. If not, see <https://www.gnu.org/licenses/>.
|
|
||||||
|
|
||||||
Also add information on how to contact you by electronic and paper mail.
|
|
||||||
|
|
||||||
If your software can interact with users remotely through a computer
|
|
||||||
network, you should also make sure that it provides a way for users to
|
|
||||||
get its source. For example, if your program is a web application, its
|
|
||||||
interface could display a "Source" link that leads users to an archive
|
|
||||||
of the code. There are many ways you could offer source, and different
|
|
||||||
solutions will be better for different programs; see section 13 for the
|
|
||||||
specific requirements.
|
|
||||||
|
|
||||||
You should also get your employer (if you work as a programmer) or school,
|
|
||||||
if any, to sign a "copyright disclaimer" for the program, if necessary.
|
|
||||||
For more information on this, and how to apply and follow the GNU AGPL, see
|
|
||||||
<https://www.gnu.org/licenses/>.
|
|
|
@ -1,102 +0,0 @@
|
||||||
const { PrismaClient } = require('@prisma/client')
|
|
||||||
const redis = require('../utils/redis.js')
|
|
||||||
const prisma = new PrismaClient()
|
|
||||||
|
|
||||||
function createSitemapXML(urls) {
|
|
||||||
const xml = urls.map(url => `
|
|
||||||
<url>
|
|
||||||
<loc>${url}</loc>
|
|
||||||
<changefreq>never</changefreq>
|
|
||||||
<priority>0.7</priority>
|
|
||||||
</url>
|
|
||||||
`).join('');
|
|
||||||
|
|
||||||
return `<?xml version="1.0" encoding="UTF-8"?>
|
|
||||||
<urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9">
|
|
||||||
${xml}
|
|
||||||
</urlset>`;
|
|
||||||
}
|
|
||||||
|
|
||||||
function createSitemapIndexXML(sitemaps) {
|
|
||||||
const xml = sitemaps.map((sitemap, index) => `
|
|
||||||
<sitemap>
|
|
||||||
<loc>https://api.preservetube.com/${sitemap}</loc>
|
|
||||||
<lastmod>${new Date().toISOString()}</lastmod>
|
|
||||||
</sitemap>
|
|
||||||
`).join('');
|
|
||||||
|
|
||||||
return `<?xml version="1.0" encoding="UTF-8"?>
|
|
||||||
<sitemapindex xmlns="http://www.sitemaps.org/schemas/sitemap/0.9">
|
|
||||||
${xml}
|
|
||||||
</sitemapindex>`;
|
|
||||||
}
|
|
||||||
|
|
||||||
exports.getLatest = async (req, res) => {
|
|
||||||
let json
|
|
||||||
const cached = await redis.get('latest')
|
|
||||||
|
|
||||||
if (cached) {
|
|
||||||
json = JSON.parse(cached)
|
|
||||||
} else {
|
|
||||||
json = await prisma.videos.findMany({
|
|
||||||
take: 90,
|
|
||||||
orderBy: [
|
|
||||||
{
|
|
||||||
archived: 'desc'
|
|
||||||
}
|
|
||||||
],
|
|
||||||
select: {
|
|
||||||
id: true,
|
|
||||||
title: true,
|
|
||||||
thumbnail: true,
|
|
||||||
published: true,
|
|
||||||
archived: true,
|
|
||||||
channel: true,
|
|
||||||
channelId: true,
|
|
||||||
channelAvatar: true,
|
|
||||||
channelVerified: true
|
|
||||||
}
|
|
||||||
})
|
|
||||||
await redis.set('latest', JSON.stringify(json), 'EX', 3600)
|
|
||||||
}
|
|
||||||
|
|
||||||
res.json(json)
|
|
||||||
}
|
|
||||||
|
|
||||||
exports.getSitemap = async (req, res) => {
|
|
||||||
const cachedSitemapIndex = await redis.get('sitemap-index');
|
|
||||||
if (cachedSitemapIndex) {
|
|
||||||
res.header('Content-Type', 'application/xml');
|
|
||||||
return res.send(cachedSitemapIndex);
|
|
||||||
}
|
|
||||||
|
|
||||||
const dbVideos = await prisma.videos.findMany({
|
|
||||||
select: {
|
|
||||||
id: true,
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
const urls = dbVideos.map((video) => `https://preservetube.com/watch?v=${video.id}`);
|
|
||||||
const sitemaps = [];
|
|
||||||
for (let i = 0; i < urls.length; i += 50000) {
|
|
||||||
const batch = urls.slice(i, i + 50000);
|
|
||||||
await redis.set(`sitemap-${sitemaps.length}`, createSitemapXML(batch), 'EX', 86400);
|
|
||||||
sitemaps.push(`sitemap-${sitemaps.length}.xml`);
|
|
||||||
}
|
|
||||||
|
|
||||||
const sitemapIndexXML = createSitemapIndexXML(sitemaps);
|
|
||||||
await redis.set('sitemap-index', sitemapIndexXML, 'EX', 86400);
|
|
||||||
|
|
||||||
res.header('Content-Type', 'application/xml');
|
|
||||||
res.send(sitemapIndexXML);
|
|
||||||
};
|
|
||||||
|
|
||||||
exports.getSubSitemap = async (req, res) => {
|
|
||||||
const cachedSitemap = await redis.get(`sitemap-${req.params.index}`);
|
|
||||||
if (cachedSitemap) {
|
|
||||||
res.header('Content-Type', 'application/xml');
|
|
||||||
return res.send(cachedSitemap);
|
|
||||||
}
|
|
||||||
|
|
||||||
res.status(404).send('');
|
|
||||||
};
|
|
|
@ -1,52 +0,0 @@
|
||||||
const crypto = require('node:crypto')
|
|
||||||
const validate = require('../utils/validate.js')
|
|
||||||
const redis = require('../utils/redis.js')
|
|
||||||
const { RedisRateLimiter } = require('rolling-rate-limiter')
|
|
||||||
const { PrismaClient } = require('@prisma/client')
|
|
||||||
const prisma = new PrismaClient()
|
|
||||||
|
|
||||||
const limiter = new RedisRateLimiter({
|
|
||||||
client: redis,
|
|
||||||
namespace: 'search:',
|
|
||||||
interval: 5 * 60 * 1000,
|
|
||||||
maxInInterval: 5
|
|
||||||
})
|
|
||||||
|
|
||||||
exports.searchVideo = async (req, res) => {
|
|
||||||
const ipHash = crypto.createHash('sha256').update(req.headers['x-userip'] || '0.0.0.0').digest('hex')
|
|
||||||
const isLimited = await limiter.limit(ipHash)
|
|
||||||
if (isLimited) return res.status(429).send('error-You have been ratelimited.')
|
|
||||||
|
|
||||||
const id = await validate.validateVideoInput(req.query.search)
|
|
||||||
if (id.fail) {
|
|
||||||
const videos = await prisma.videos.findMany({
|
|
||||||
where: {
|
|
||||||
title: {
|
|
||||||
contains: req.query.search,
|
|
||||||
mode: 'insensitive'
|
|
||||||
}
|
|
||||||
}
|
|
||||||
})
|
|
||||||
res.json(videos)
|
|
||||||
} else {
|
|
||||||
res.send(`redirect-${process.env.FRONTEND}/watch?v=${id}`)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
exports.searchPlaylist = async (req, res) => {
|
|
||||||
const id = await validate.validatePlaylistInput(req.query.url)
|
|
||||||
if (id.fail) {
|
|
||||||
res.status(500).send(id.message)
|
|
||||||
} else {
|
|
||||||
res.redirect(`${process.env.FRONTEND}/playlist?list=${id}`)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
exports.searchChannel = async (req, res) => {
|
|
||||||
const id = await validate.validateChannelInput(req.query.url)
|
|
||||||
if (id.fail) {
|
|
||||||
res.status(500).send(id.message)
|
|
||||||
} else {
|
|
||||||
res.redirect(`${process.env.FRONTEND}/channel/${id}`)
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,32 +0,0 @@
|
||||||
const { PrismaClient } = require('@prisma/client')
|
|
||||||
const redis = require('../utils/redis.js')
|
|
||||||
const prisma = new PrismaClient()
|
|
||||||
|
|
||||||
exports.getReports = async (req, res) => {
|
|
||||||
let json
|
|
||||||
const cached = await redis.get('transparency')
|
|
||||||
|
|
||||||
if (cached) {
|
|
||||||
json = JSON.parse(cached)
|
|
||||||
} else {
|
|
||||||
json = (await prisma.reports.findMany()).map(r => {
|
|
||||||
return {
|
|
||||||
...r,
|
|
||||||
details: (r.details).split('<').join('<').split('>').join('>'),
|
|
||||||
date: (r.date).toISOString().slice(0,10)
|
|
||||||
}
|
|
||||||
})
|
|
||||||
await redis.set('transparency', JSON.stringify(json), 'EX', 3600)
|
|
||||||
}
|
|
||||||
|
|
||||||
res.json(json)
|
|
||||||
}
|
|
||||||
|
|
||||||
exports.getReports = async (req, res) => {
|
|
||||||
const reports = await prisma.reports.findMany({
|
|
||||||
where: {
|
|
||||||
target: req.params.id
|
|
||||||
}
|
|
||||||
})
|
|
||||||
res.json(reports)
|
|
||||||
}
|
|
|
@ -1,200 +0,0 @@
|
||||||
const { PrismaClient } = require('@prisma/client')
|
|
||||||
const prisma = new PrismaClient()
|
|
||||||
|
|
||||||
const DOMPurify = require('isomorphic-dompurify')
|
|
||||||
const rtm = require('readable-to-ms')
|
|
||||||
const metadata = require('../utils/metadata.js')
|
|
||||||
const redis = require('../utils/redis.js')
|
|
||||||
|
|
||||||
exports.getVideo = async (req, res) => {
|
|
||||||
let info
|
|
||||||
const cached = await redis.get(`video:${req.params.id}`)
|
|
||||||
|
|
||||||
if (cached) {
|
|
||||||
info = JSON.parse(cached)
|
|
||||||
} else {
|
|
||||||
info = await prisma.videos.findFirst({
|
|
||||||
where: {
|
|
||||||
id: req.params.id
|
|
||||||
},
|
|
||||||
select: {
|
|
||||||
title: true,
|
|
||||||
description: true,
|
|
||||||
thumbnail: true,
|
|
||||||
source: true,
|
|
||||||
published: true,
|
|
||||||
archived: true,
|
|
||||||
channel: true,
|
|
||||||
channelId: true,
|
|
||||||
channelAvatar: true,
|
|
||||||
channelVerified: true,
|
|
||||||
disabled: true,
|
|
||||||
hasBeenReported: true
|
|
||||||
}
|
|
||||||
})
|
|
||||||
|
|
||||||
if (!info) return res.json({ error: '404' })
|
|
||||||
await redis.set(`video:${req.params.id}`, JSON.stringify(info), 'EX', 3600)
|
|
||||||
}
|
|
||||||
|
|
||||||
res.json({
|
|
||||||
...info,
|
|
||||||
description: DOMPurify.sanitize(info.description),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
exports.getChannel = async (req, res) => {
|
|
||||||
const cached = await redis.get(`channel:${req.params.id}`)
|
|
||||||
if (cached) return res.json(JSON.parse(cached))
|
|
||||||
|
|
||||||
const [videos, channel] = await Promise.all([
|
|
||||||
metadata.getChannelVideos(req.params.id),
|
|
||||||
metadata.getChannel(req.params.id)
|
|
||||||
])
|
|
||||||
|
|
||||||
if (!videos || !channel || videos.error || channel.error) {
|
|
||||||
return res.json({ error: '404' });
|
|
||||||
}
|
|
||||||
|
|
||||||
const archived = await prisma.videos.findMany({
|
|
||||||
where: {
|
|
||||||
channelId: req.params.id
|
|
||||||
},
|
|
||||||
select: {
|
|
||||||
id: true,
|
|
||||||
title: true,
|
|
||||||
thumbnail: true,
|
|
||||||
published: true,
|
|
||||||
archived: true
|
|
||||||
}
|
|
||||||
})
|
|
||||||
|
|
||||||
const processedVideos = videos.map(video => {
|
|
||||||
const date = !isNaN(new Date(video.published.text).getTime()) ? new Date(video.published.text) : new Date((new Date()).getTime() - rtm(video.published.text).ms); // life is great.
|
|
||||||
return {
|
|
||||||
id: video.id,
|
|
||||||
title: video.title.text,
|
|
||||||
thumbnail: video.thumbnails[0].url,
|
|
||||||
published: new Date(date).toISOString().slice(0, 10)
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
archived.forEach(v => {
|
|
||||||
const existingVideoIndex = processedVideos.findIndex(video => video.id === v.id);
|
|
||||||
if (existingVideoIndex !== -1) {
|
|
||||||
processedVideos[existingVideoIndex] = v;
|
|
||||||
} else {
|
|
||||||
processedVideos.push({ ...v, deleted: undefined });
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
processedVideos.sort((a, b) => new Date(b.published) - new Date(a.published));
|
|
||||||
|
|
||||||
const json = {
|
|
||||||
name: channel.metadata.title,
|
|
||||||
avatar: channel.metadata.avatar[0].url,
|
|
||||||
verified: channel.header.author?.is_verified,
|
|
||||||
videos: processedVideos
|
|
||||||
}
|
|
||||||
await redis.set(`channel:${req.params.id}`, JSON.stringify(json), 'EX', 3600)
|
|
||||||
res.json(json)
|
|
||||||
}
|
|
||||||
|
|
||||||
exports.getOnlyChannelVideos = async (req, res) => {
|
|
||||||
const cached = await redis.get(`channelVideos:${req.params.id}`)
|
|
||||||
if (cached) return res.json(JSON.parse(cached))
|
|
||||||
|
|
||||||
const archived = await prisma.videos.findMany({
|
|
||||||
where: {
|
|
||||||
channelId: req.params.id
|
|
||||||
},
|
|
||||||
select: {
|
|
||||||
id: true,
|
|
||||||
title: true,
|
|
||||||
thumbnail: true,
|
|
||||||
published: true,
|
|
||||||
archived: true
|
|
||||||
},
|
|
||||||
orderBy: {
|
|
||||||
published: 'desc'
|
|
||||||
}
|
|
||||||
})
|
|
||||||
|
|
||||||
const json = {
|
|
||||||
videos: archived
|
|
||||||
}
|
|
||||||
await redis.set(`channelVideos:${req.params.id}`, JSON.stringify(json), 'EX', 3600)
|
|
||||||
res.json(json)
|
|
||||||
}
|
|
||||||
|
|
||||||
exports.getPlaylist = async (req, res) => {
|
|
||||||
const cached = await redis.get(`playlist:${req.params.id}`)
|
|
||||||
if (cached) return res.json(JSON.parse(cached))
|
|
||||||
|
|
||||||
const playlist = await metadata.getPlaylistVideos(req.params.id)
|
|
||||||
if (!playlist || playlist.error) return res.json({ error: '404' })
|
|
||||||
|
|
||||||
const playlistArchived = await prisma.videos.findMany({
|
|
||||||
where: {
|
|
||||||
playlist: req.params.id
|
|
||||||
},
|
|
||||||
select: {
|
|
||||||
id: true,
|
|
||||||
title: true,
|
|
||||||
thumbnail: true,
|
|
||||||
published: true,
|
|
||||||
archived: true
|
|
||||||
}
|
|
||||||
})
|
|
||||||
|
|
||||||
const allVideos = playlist.relatedStreams.map(video => ({
|
|
||||||
id: video.url.replace('/watch?v=', ''),
|
|
||||||
published: new Date(video.uploaded).toISOString().slice(0, 10),
|
|
||||||
...video
|
|
||||||
}));
|
|
||||||
|
|
||||||
await Promise.all(playlistArchived.map(async (v) => {
|
|
||||||
const allVideo = allVideos.find(o => o.id == v.id);
|
|
||||||
if (allVideo) {
|
|
||||||
const index = allVideos.findIndex(o => o.id == v.id);
|
|
||||||
allVideos[index] = v;
|
|
||||||
} else {
|
|
||||||
const live = await metadata.getVideoMetadata(v.id);
|
|
||||||
allVideos.push({
|
|
||||||
...v,
|
|
||||||
deleted: live.error ? true : false
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}));
|
|
||||||
|
|
||||||
await Promise.all(allVideos.filter(v => !v.archived).map(async (v) => {
|
|
||||||
const video = await prisma.videos.findFirst({
|
|
||||||
where: {
|
|
||||||
id: v.id
|
|
||||||
},
|
|
||||||
select: {
|
|
||||||
id: true,
|
|
||||||
title: true,
|
|
||||||
thumbnail: true,
|
|
||||||
published: true,
|
|
||||||
archived: true
|
|
||||||
}
|
|
||||||
});
|
|
||||||
if (video) {
|
|
||||||
const index = allVideos.findIndex(o => o.id == v.id);
|
|
||||||
allVideos[index] = video;
|
|
||||||
}
|
|
||||||
}));
|
|
||||||
|
|
||||||
allVideos.sort((a, b) => new Date(b.published) - new Date(a.published));
|
|
||||||
|
|
||||||
const json = {
|
|
||||||
name: playlist.name,
|
|
||||||
channel: playlist.uploader,
|
|
||||||
url: playlist.uploaderUrl,
|
|
||||||
avatar: playlist.uploaderAvatar,
|
|
||||||
videos: allVideos
|
|
||||||
}
|
|
||||||
await redis.set(`playlist:${req.params.id}`, JSON.stringify(json), 'EX', 3600)
|
|
||||||
res.json(json)
|
|
||||||
}
|
|
|
@ -1,335 +0,0 @@
|
||||||
const fs = require('node:fs')
|
|
||||||
const crypto = require('node:crypto')
|
|
||||||
const { RedisRateLimiter } = require('rolling-rate-limiter')
|
|
||||||
const rtm = require('readable-to-ms')
|
|
||||||
|
|
||||||
const upload = require('../utils/upload.js')
|
|
||||||
const ytdlp = require('../utils/ytdlp.js')
|
|
||||||
const redis = require('../utils/redis.js')
|
|
||||||
|
|
||||||
const validate = require('../utils/validate.js')
|
|
||||||
const metadata = require('../utils/metadata.js')
|
|
||||||
const websocket = require('../utils/websocket.js')
|
|
||||||
const captcha = require("../utils/captcha.js")
|
|
||||||
const logger = require("../utils/logger.js")
|
|
||||||
|
|
||||||
const { PrismaClient } = require('@prisma/client')
|
|
||||||
const prisma = new PrismaClient()
|
|
||||||
|
|
||||||
const limiter = new RedisRateLimiter({
|
|
||||||
client: redis,
|
|
||||||
namespace: 'autodownload:',
|
|
||||||
interval: 24 * 60 * 60 * 1000,
|
|
||||||
maxInInterval: 5
|
|
||||||
})
|
|
||||||
|
|
||||||
exports.save = async (ws, req) => {
|
|
||||||
logger.info({ message: `${req.path} ${JSON.stringify(req.query)}` })
|
|
||||||
|
|
||||||
const id = await validate.validateVideoInput(req.query.url)
|
|
||||||
if (id.fail) {
|
|
||||||
ws.send(`ERROR - ${id.message}`)
|
|
||||||
return ws.close()
|
|
||||||
}
|
|
||||||
|
|
||||||
if (await redis.get(id)) {
|
|
||||||
ws.send('DATA - Someone is already downloading this video...')
|
|
||||||
return ws.close()
|
|
||||||
}
|
|
||||||
|
|
||||||
if (await redis.get(`blacklist:${id}`)) {
|
|
||||||
ws.send('DATA - You can\'t download that. The video is blacklisted.')
|
|
||||||
return ws.close()
|
|
||||||
}
|
|
||||||
|
|
||||||
const already = await prisma.videos.findFirst({
|
|
||||||
where: {
|
|
||||||
id: id
|
|
||||||
}
|
|
||||||
})
|
|
||||||
|
|
||||||
if (already) return ws.send(`DONE - ${process.env.FRONTEND}/watch?v=${id}`)
|
|
||||||
|
|
||||||
ws.send('DATA - This process is automatic. Your video will start archiving shortly.')
|
|
||||||
ws.send('CAPTCHA - Solving a cryptographic challenge before downloading.')
|
|
||||||
|
|
||||||
ws.on('message', async function(msg) {
|
|
||||||
if (msg == 'alive') return
|
|
||||||
|
|
||||||
if (await redis.get(id) != 'downloading') {
|
|
||||||
await redis.set(id, 'downloading', 'EX', 300)
|
|
||||||
const confirm = await captcha.checkCaptcha(msg)
|
|
||||||
|
|
||||||
if (confirm) startDownloading()
|
|
||||||
else {
|
|
||||||
await redis.del(id)
|
|
||||||
ws.send('DATA - You little goofy goober tried to mess with the captcha...')
|
|
||||||
ws.close()
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
ws.send('DATA - You already sent captcha reply...')
|
|
||||||
}
|
|
||||||
})
|
|
||||||
|
|
||||||
async function startDownloading() {
|
|
||||||
const download = await ytdlp.downloadVideo(`https://www.youtube.com/watch?v=${id}`, ws, id)
|
|
||||||
if (download.fail) {
|
|
||||||
await redis.del(id)
|
|
||||||
ws.send(`DATA - ${download.message}`)
|
|
||||||
ws.close()
|
|
||||||
} else {
|
|
||||||
const file = fs.readdirSync("videos").find(f => f.includes(id))
|
|
||||||
if (file) {
|
|
||||||
ws.send('DATA - Uploading file...')
|
|
||||||
const videoUrl = await upload.uploadVideo(`./videos/${id}.mp4`)
|
|
||||||
fs.unlinkSync(`./videos/${id}.mp4`)
|
|
||||||
|
|
||||||
const uploaded = await websocket.createDatabaseVideo(id, videoUrl)
|
|
||||||
if (uploaded != 'success') {
|
|
||||||
ws.send(`DATA - Error while uploading - ${JSON.stringify(uploaded)}`)
|
|
||||||
} else {
|
|
||||||
ws.send(`DONE - ${process.env.FRONTEND}/watch?v=${id}`)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
await redis.del(id)
|
|
||||||
ws.close();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
exports.playlist = async (ws, req) => {
|
|
||||||
logger.info({ message: `${req.path} ${JSON.stringify(req.query)}` })
|
|
||||||
|
|
||||||
const playlistId = await validate.validatePlaylistInput(req.query.url)
|
|
||||||
if (playlistId.fail) {
|
|
||||||
ws.send(`ERROR - ${playlistId.message}`)
|
|
||||||
return ws.close()
|
|
||||||
}
|
|
||||||
|
|
||||||
let status = 'captcha'
|
|
||||||
ws.send('DATA - This process is automatic. Your video will start archiving shortly.')
|
|
||||||
ws.send('CAPTCHA - Solving a cryptographic challenge before downloading.')
|
|
||||||
|
|
||||||
ws.on('message', async function(msg) {
|
|
||||||
if (msg == 'alive') return
|
|
||||||
|
|
||||||
if (status == 'captcha') {
|
|
||||||
status = 'downloading'
|
|
||||||
const confirm = await captcha.checkCaptcha(msg)
|
|
||||||
|
|
||||||
if (confirm) startDownloading()
|
|
||||||
else {
|
|
||||||
await redis.del(id)
|
|
||||||
ws.send('DATA - You little goofy goober tried to mess with the captcha...')
|
|
||||||
ws.close()
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
ws.send('DATA - You already sent captcha reply...')
|
|
||||||
}
|
|
||||||
})
|
|
||||||
|
|
||||||
async function startDownloading() {
|
|
||||||
const playlist = await metadata.getPlaylistVideos(playlistId)
|
|
||||||
for (video of playlist.relatedStreams.slice(0, 5)) {
|
|
||||||
if (ws.readyState !== ws.OPEN) {
|
|
||||||
return logger.info({ message: `Stopped downloading ${playlistId}, websocket is closed` })
|
|
||||||
}
|
|
||||||
|
|
||||||
const id = video.url.match(/[?&]v=([^&]+)/)[1]
|
|
||||||
|
|
||||||
const already = await prisma.videos.findFirst({
|
|
||||||
where: {
|
|
||||||
id: id
|
|
||||||
}
|
|
||||||
})
|
|
||||||
|
|
||||||
if (already) {
|
|
||||||
ws.send(`DATA - Already downloaded ${video.title}`)
|
|
||||||
await prisma.videos.updateMany({
|
|
||||||
where: {
|
|
||||||
id: id
|
|
||||||
},
|
|
||||||
data: {
|
|
||||||
playlist: playlistId
|
|
||||||
}
|
|
||||||
})
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
if (await redis.get(id)) {
|
|
||||||
ws.send(`DATA - Someone is already downloading ${video.title}, skipping.`)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
if (await redis.get(`blacklist:${id}`)) {
|
|
||||||
ws.send(`DATA - ${video.title} is blacklisted from downloading, skipping`)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
ws.send(`INFO - Downloading ${video.title}<br><br>`)
|
|
||||||
await redis.set(id, 'downloading', 'EX', 300)
|
|
||||||
|
|
||||||
const download = await ytdlp.downloadVideo('https://www.youtube.com' + video.url, ws, id)
|
|
||||||
if (download.fail) {
|
|
||||||
ws.send(`DATA - ${download.message}`)
|
|
||||||
await redis.del(id)
|
|
||||||
continue
|
|
||||||
} else {
|
|
||||||
const file = fs.readdirSync("./videos").find(f => f.includes(id))
|
|
||||||
if (file) {
|
|
||||||
try {
|
|
||||||
ws.send(`DATA - Downloaded ${video.title}`)
|
|
||||||
ws.send(`DATA - Uploading ${video.title}`)
|
|
||||||
|
|
||||||
const videoUrl = await upload.uploadVideo(`./videos/${id}.mp4`)
|
|
||||||
ws.send(`DATA - Uploaded ${video.title}`)
|
|
||||||
fs.unlinkSync(`./videos/${id}.mp4`)
|
|
||||||
|
|
||||||
await websocket.createDatabaseVideo(id, videoUrl, playlistId)
|
|
||||||
ws.send(`DATA - Created video page for ${video.title}`)
|
|
||||||
} catch (e) {
|
|
||||||
ws.send(`DATA - Failed downloading video ${video.title}. Going to next video`)
|
|
||||||
logger.error(e)
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
ws.send(`DATA - Failed to find file for ${video.title}. Going to next video in the playlist`)
|
|
||||||
}
|
|
||||||
|
|
||||||
await redis.del(id)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
ws.send(`DONE - ${process.env.FRONTEND}/playlist?list=${playlistId}`)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
exports.channel = async (ws, req) => {
|
|
||||||
logger.info({ message: `${req.path} ${JSON.stringify(req.query)}` })
|
|
||||||
|
|
||||||
const channelId = await validate.validateChannelInput(req.query.url)
|
|
||||||
if (channelId.fail) {
|
|
||||||
ws.send(`ERROR - ${channelId.message}`)
|
|
||||||
return ws.close()
|
|
||||||
}
|
|
||||||
|
|
||||||
let status = 'captcha'
|
|
||||||
ws.send('DATA - This process is automatic. Your video will start archiving shortly.')
|
|
||||||
ws.send('CAPTCHA - Solving a cryptographic challenge before downloading.')
|
|
||||||
|
|
||||||
ws.on('message', async function(msg) {
|
|
||||||
if (msg == 'alive') return
|
|
||||||
|
|
||||||
if (status == 'captcha') {
|
|
||||||
status = 'downloading'
|
|
||||||
const confirm = await captcha.checkCaptcha(msg)
|
|
||||||
|
|
||||||
if (confirm) startDownloading()
|
|
||||||
else {
|
|
||||||
ws.send('DATA - You little goofy goober tried to mess with the captcha...')
|
|
||||||
ws.close()
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
ws.send('DATA - You already sent captcha reply...')
|
|
||||||
}
|
|
||||||
})
|
|
||||||
|
|
||||||
async function startDownloading() {
|
|
||||||
const videos = await metadata.getChannelVideos(channelId)
|
|
||||||
|
|
||||||
for (const video of videos.slice(0, 5)) {
|
|
||||||
if (ws.readyState !== ws.OPEN) {
|
|
||||||
return logger.info({ message: `Stopped downloading ${channelId}, websocket is closed` })
|
|
||||||
}
|
|
||||||
|
|
||||||
const already = await prisma.videos.findFirst({
|
|
||||||
where: {
|
|
||||||
id: video.id
|
|
||||||
}
|
|
||||||
})
|
|
||||||
|
|
||||||
if (already) {
|
|
||||||
ws.send(`DATA - Already downloaded ${video.title.text}`)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
if (await redis.get(video.id)) {
|
|
||||||
ws.send(`DATA - Someone is already downloading ${video.title.text}, skipping.`)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
if (await redis.get(`blacklist:${video.id}`)) {
|
|
||||||
ws.send(`DATA - ${video.title.text} is blacklisted from downloading, skipping`)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
ws.send(`INFO - Downloading ${video.title.text}<br><br>`)
|
|
||||||
await redis.set(video.id, 'downloading', 'EX', 300)
|
|
||||||
|
|
||||||
const download = await ytdlp.downloadVideo(`https://www.youtube.com/watch?v=${video.id}`, ws, video.id)
|
|
||||||
if (download.fail) {
|
|
||||||
ws.send(`DATA - ${download.message}`)
|
|
||||||
await redis.del(video.id)
|
|
||||||
continue
|
|
||||||
} else {
|
|
||||||
const file = fs.readdirSync("./videos").find(f => f.includes(video.id))
|
|
||||||
if (file) {
|
|
||||||
try {
|
|
||||||
ws.send(`DATA - Downloaded ${video.title.text}`)
|
|
||||||
ws.send(`DATA - Uploading ${video.title.text}`)
|
|
||||||
|
|
||||||
const videoUrl = await upload.uploadVideo(`./videos/${video.id}.mp4`)
|
|
||||||
ws.send(`DATA - Uploaded ${video.title.text}`)
|
|
||||||
fs.unlinkSync(`./videos/${video.id}.mp4`)
|
|
||||||
|
|
||||||
await websocket.createDatabaseVideo(video.id, videoUrl)
|
|
||||||
ws.send(`DATA - Created video page for ${video.title.text}`)
|
|
||||||
} catch (e) {
|
|
||||||
ws.send(`DATA - Failed downloading video ${video.title.text}. Going to next video`)
|
|
||||||
logger.error(e)
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
ws.send(`DATA - Failed to find file for ${video.title.text}. Going to next video`)
|
|
||||||
}
|
|
||||||
|
|
||||||
await redis.del(video.id)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
ws.send(`DONE - ${process.env.FRONTEND}/channel/${channelId}`)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
exports.addAutodownload = async (req, res) => {
|
|
||||||
const confirm = await captcha.checkCaptcha(req.query.captcha)
|
|
||||||
if (!confirm) return res.status(500).send('You little goofy goober tried to mess with the captcha...')
|
|
||||||
|
|
||||||
const channelId = await validate.validateChannelInput(req.query.url)
|
|
||||||
if (channelId.fail) {
|
|
||||||
return res.status(500).send(channelId.message)
|
|
||||||
}
|
|
||||||
|
|
||||||
const already = await prisma.autodownload.findFirst({
|
|
||||||
where: {
|
|
||||||
channel: channelId
|
|
||||||
}
|
|
||||||
})
|
|
||||||
|
|
||||||
if (already) {
|
|
||||||
res.status(500).send(`This channel is already being automatically downloaded...`)
|
|
||||||
} else {
|
|
||||||
const ipHash = crypto.createHash('sha256').update(req.headers['x-forwarded-for'] || req.connection.remoteAddress).digest('hex')
|
|
||||||
const isLimited = await limiter.limit(ipHash)
|
|
||||||
|
|
||||||
if (isLimited) return res.status(420).send(`Hey! You have reached the limit of 5 queued auto-download channels per day. Sadly, hard drives don't grow on trees, so rate limits are necessary. The "Save Channel" feature has no limits, so feel free to use that.<br><br>
|
|
||||||
|
|
||||||
Are you planning something awesome? Feel free to email me at admin[@]preservetube.com.`)
|
|
||||||
|
|
||||||
await prisma.autodownload.create({
|
|
||||||
data: {
|
|
||||||
channel: channelId
|
|
||||||
}
|
|
||||||
})
|
|
||||||
res.send('Perfect! Each time this channel uploads their videos will be downloaded')
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -6,17 +6,13 @@ services:
|
||||||
image: ghcr.io/preservetube/backend
|
image: ghcr.io/preservetube/backend
|
||||||
networks:
|
networks:
|
||||||
- public
|
- public
|
||||||
- db
|
|
||||||
restart: on-failure
|
restart: on-failure
|
||||||
volumes:
|
volumes:
|
||||||
- ./logs:/usr/src/preservetube/backend/logs
|
- /mnt/hdd/preservetube-videos:/usr/src/preservetube/backend/videos
|
||||||
- ./.env:/usr/src/preservetube/backend/.env
|
- ./.env:/usr/src/preservetube/backend/.env
|
||||||
- ./s3.json:/usr/src/preservetube/backend/s3.json
|
- ./s3.json:/usr/src/preservetube/backend/s3.json
|
||||||
|
|
||||||
networks:
|
networks:
|
||||||
public:
|
public:
|
||||||
external: true
|
external: true
|
||||||
name: public
|
name: public
|
||||||
db:
|
|
||||||
external: true
|
|
||||||
name: db
|
|
47
index.js
47
index.js
|
@ -1,47 +0,0 @@
|
||||||
require('dotenv').config()
|
|
||||||
|
|
||||||
const express = require('express')
|
|
||||||
const cors = require('cors')
|
|
||||||
|
|
||||||
const logger = require('./utils/logger.js')
|
|
||||||
|
|
||||||
const latestController = require('./controller/latest.js')
|
|
||||||
const videoController = require('./controller/video.js')
|
|
||||||
const searchController = require('./controller/search.js')
|
|
||||||
const websocketController = require('./controller/websocket.js')
|
|
||||||
const transparencyController = require('./controller/transparency.js')
|
|
||||||
|
|
||||||
const app = express()
|
|
||||||
|
|
||||||
require('express-ws')(app)
|
|
||||||
app.use(cors())
|
|
||||||
|
|
||||||
app.get('/latest', latestController.getLatest)
|
|
||||||
app.get('/sitemap-index.xml', latestController.getSitemap)
|
|
||||||
app.get('/sitemap-:index.xml', latestController.getSubSitemap)
|
|
||||||
|
|
||||||
app.get('/video/:id', videoController.getVideo)
|
|
||||||
app.get('/channel/:id', videoController.getChannel)
|
|
||||||
app.get('/channel/:id/videos', videoController.getOnlyChannelVideos)
|
|
||||||
app.get('/playlist/:id', videoController.getPlaylist)
|
|
||||||
|
|
||||||
app.get('/search/video', searchController.searchVideo)
|
|
||||||
app.get('/search/playlist', searchController.searchPlaylist)
|
|
||||||
app.get('/search/channel', searchController.searchChannel)
|
|
||||||
|
|
||||||
app.get('/transparency/list', transparencyController.getReports)
|
|
||||||
app.get('/transparency/:id', transparencyController.getReports)
|
|
||||||
|
|
||||||
app.ws('/save', websocketController.save)
|
|
||||||
app.ws('/saveplaylist', websocketController.playlist)
|
|
||||||
app.ws('/savechannel', websocketController.channel)
|
|
||||||
app.get('/autodownload', websocketController.addAutodownload)
|
|
||||||
|
|
||||||
process.on('uncaughtException', err => {
|
|
||||||
logger.error(err)
|
|
||||||
console.log(err)
|
|
||||||
})
|
|
||||||
|
|
||||||
app.listen(1337, () => {
|
|
||||||
logger.info({ message: 'Server listening on port 1337!' })
|
|
||||||
})
|
|
45
package.json
45
package.json
|
@ -1,26 +1,27 @@
|
||||||
{
|
{
|
||||||
"name": "preservetube",
|
"name": "preservetube-backend",
|
||||||
"version": "1.0.0",
|
"module": "src/index.ts",
|
||||||
"main": "index.js",
|
"type": "module",
|
||||||
"license": "AGPL-3.0",
|
"scripts": {
|
||||||
"dependencies": {
|
"test": "echo \"Error: no test specified\" && exit 1",
|
||||||
"@logtail/node": "^0.4.0",
|
"dev": "bun run --watch src/index.ts"
|
||||||
"@logtail/winston": "^0.4.1",
|
|
||||||
"@prisma/client": "4.9.0",
|
|
||||||
"aws-sdk": "2.1128.0",
|
|
||||||
"cors": "^2.8.5",
|
|
||||||
"dotenv": "^16.0.3",
|
|
||||||
"express": "^4.18.2",
|
|
||||||
"express-ws": "^5.0.2",
|
|
||||||
"ioredis": "^5.3.1",
|
|
||||||
"isomorphic-dompurify": "^1.0.0",
|
|
||||||
"node-fetch": "2",
|
|
||||||
"readable-to-ms": "^1.0.3",
|
|
||||||
"rolling-rate-limiter": "^0.4.2",
|
|
||||||
"winston": "^3.8.2",
|
|
||||||
"ws": "^8.17.1"
|
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"prisma": "4.9.0"
|
"@types/bun": "latest"
|
||||||
|
},
|
||||||
|
"peerDependencies": {
|
||||||
|
"typescript": "^5.0.0"
|
||||||
|
},
|
||||||
|
"dependencies": {
|
||||||
|
"@types/pg": "^8.11.10",
|
||||||
|
"date-fns": "^4.1.0",
|
||||||
|
"elysia": "^1.1.25",
|
||||||
|
"ioredis": "^5.4.1",
|
||||||
|
"isomorphic-dompurify": "^2.18.0",
|
||||||
|
"kysely": "^0.27.4",
|
||||||
|
"pg": "^8.13.1",
|
||||||
|
"readable-to-ms": "^1.0.3",
|
||||||
|
"rolling-rate-limiter": "^0.4.2",
|
||||||
|
"ultralight-s3": "^0.0.7"
|
||||||
}
|
}
|
||||||
}
|
}
|
|
@ -1,44 +0,0 @@
|
||||||
// This is your Prisma schema file,
|
|
||||||
// learn more about it in the docs: https://pris.ly/d/prisma-schema
|
|
||||||
|
|
||||||
generator client {
|
|
||||||
provider = "prisma-client-js"
|
|
||||||
}
|
|
||||||
|
|
||||||
datasource db {
|
|
||||||
provider = "postgresql"
|
|
||||||
url = env("DATABASE_URL")
|
|
||||||
}
|
|
||||||
|
|
||||||
model videos {
|
|
||||||
uuid String @id @default(uuid())
|
|
||||||
id String @unique
|
|
||||||
title String
|
|
||||||
description String
|
|
||||||
thumbnail String
|
|
||||||
source String
|
|
||||||
published String
|
|
||||||
archived String
|
|
||||||
channel String
|
|
||||||
channelId String
|
|
||||||
channelVerified Boolean
|
|
||||||
channelAvatar String
|
|
||||||
playlist String?
|
|
||||||
disabled Boolean @default(false)
|
|
||||||
hasBeenReported Boolean @default(false)
|
|
||||||
|
|
||||||
@@index([title], name: "idx_title")
|
|
||||||
}
|
|
||||||
|
|
||||||
model reports {
|
|
||||||
uuid String @id @default(uuid())
|
|
||||||
target String
|
|
||||||
title String
|
|
||||||
details String
|
|
||||||
date DateTime @default(now())
|
|
||||||
}
|
|
||||||
|
|
||||||
model autodownload {
|
|
||||||
uuid String @id @default(uuid())
|
|
||||||
channel String
|
|
||||||
}
|
|
|
@ -0,0 +1,23 @@
|
||||||
|
import { Elysia } from 'elysia';
|
||||||
|
|
||||||
|
import latest from '@/router/latest'
|
||||||
|
import search from '@/router/search'
|
||||||
|
import transparency from '@/router/transparency'
|
||||||
|
import video from '@/router/video'
|
||||||
|
import websocket from '@/router/websocket'
|
||||||
|
|
||||||
|
const app = new Elysia()
|
||||||
|
app.use(latest)
|
||||||
|
app.use(search)
|
||||||
|
app.use(transparency)
|
||||||
|
app.use(video)
|
||||||
|
app.use(websocket)
|
||||||
|
|
||||||
|
process.on('uncaughtException', err => {
|
||||||
|
console.log(err)
|
||||||
|
})
|
||||||
|
|
||||||
|
app.listen(1337);
|
||||||
|
console.log(
|
||||||
|
`api is running at ${app.server?.hostname}:${app.server?.port}`
|
||||||
|
);
|
|
@ -0,0 +1,66 @@
|
||||||
|
import { Elysia } from 'elysia';
|
||||||
|
import { Redis } from 'ioredis'
|
||||||
|
|
||||||
|
import { db } from '@/utils/database'
|
||||||
|
import { createSitemapXML, createSitemapIndexXML } from '@/utils/sitemap'
|
||||||
|
|
||||||
|
const app = new Elysia()
|
||||||
|
const redis = new Redis({
|
||||||
|
host: process.env.REDIS_HOST,
|
||||||
|
password: process.env.REDIS_PASS,
|
||||||
|
});
|
||||||
|
|
||||||
|
app.get('/latest', async () => {
|
||||||
|
const cached = await redis.get('latest')
|
||||||
|
if (cached) return JSON.parse(cached)
|
||||||
|
|
||||||
|
const json = await db.selectFrom('videos')
|
||||||
|
.select(['id', 'title', 'thumbnail', 'published', 'archived', 'channel', 'channelId', 'channelAvatar', 'channelVerified'])
|
||||||
|
.orderBy('archived desc')
|
||||||
|
.limit(50)
|
||||||
|
.execute()
|
||||||
|
|
||||||
|
await redis.set('latest', JSON.stringify(json), 'EX', 3600)
|
||||||
|
|
||||||
|
return json
|
||||||
|
})
|
||||||
|
|
||||||
|
app.get('/sitemap-index.xml', async ({ set }) => {
|
||||||
|
const cachedSitemapIndex = await redis.get('sitemap-index');
|
||||||
|
if (cachedSitemapIndex) {
|
||||||
|
set.headers['Content-Type'] = 'application/xml'
|
||||||
|
return cachedSitemapIndex
|
||||||
|
}
|
||||||
|
|
||||||
|
const videos = await db.selectFrom('videos')
|
||||||
|
.select('id')
|
||||||
|
.execute()
|
||||||
|
|
||||||
|
const urls = videos.map((video) => `https://preservetube.com/watch?v=${video.id}`);
|
||||||
|
const sitemaps = [];
|
||||||
|
|
||||||
|
for (let i = 0; i < urls.length; i += 50000) {
|
||||||
|
const batch = urls.slice(i, i + 50000);
|
||||||
|
await redis.set(`sitemap-${sitemaps.length}`, createSitemapXML(batch), 'EX', 86400);
|
||||||
|
sitemaps.push(`sitemap-${sitemaps.length}.xml`);
|
||||||
|
}
|
||||||
|
|
||||||
|
const sitemapIndexXML = createSitemapIndexXML(sitemaps);
|
||||||
|
await redis.set('sitemap-index', sitemapIndexXML, 'EX', 86400);
|
||||||
|
|
||||||
|
set.headers['Content-Type'] = 'application/xml'
|
||||||
|
return sitemapIndexXML
|
||||||
|
})
|
||||||
|
|
||||||
|
app.get('/sitemap-:index.xml', async ({ set, params: { index }, error, path }) => {
|
||||||
|
const indexNum = path.replace('/sitemap-', '').replace('.xml', '')
|
||||||
|
const cachedSitemap = await redis.get(`sitemap-${indexNum}`);
|
||||||
|
if (cachedSitemap) {
|
||||||
|
set.headers['Content-Type'] = 'application/xml'
|
||||||
|
return cachedSitemap
|
||||||
|
}
|
||||||
|
|
||||||
|
return error(404)
|
||||||
|
})
|
||||||
|
|
||||||
|
export default app
|
|
@ -0,0 +1,52 @@
|
||||||
|
import { Elysia, t } from 'elysia';
|
||||||
|
import { Redis } from 'ioredis'
|
||||||
|
import { RedisRateLimiter } from 'rolling-rate-limiter'
|
||||||
|
|
||||||
|
import { db } from '@/utils/database'
|
||||||
|
import { validateVideo, validatePlaylist, validateChannel } from '@/utils/regex'
|
||||||
|
|
||||||
|
const app = new Elysia()
|
||||||
|
const redis = new Redis({
|
||||||
|
host: process.env.REDIS_HOST,
|
||||||
|
password: process.env.REDIS_PASS,
|
||||||
|
});
|
||||||
|
|
||||||
|
const limiter = new RedisRateLimiter({
|
||||||
|
client: redis,
|
||||||
|
namespace: 'search:',
|
||||||
|
interval: 5 * 60 * 1000,
|
||||||
|
maxInInterval: 15
|
||||||
|
})
|
||||||
|
|
||||||
|
app.get('/search/video', async ({ headers, query: { search }, error }) => {
|
||||||
|
const hash = Bun.hash(headers['x-userip'] || headers['cf-connecting-ip'] || '0.0.0.0')
|
||||||
|
const isLimited = await limiter.limit(hash.toString())
|
||||||
|
if (isLimited) return error(429, 'error-You have been ratelimited.')
|
||||||
|
|
||||||
|
const videoId = validateVideo(search)
|
||||||
|
if (videoId) return `redirect-${process.env.FRONTEND}/watch?v=${videoId}`
|
||||||
|
|
||||||
|
const videos = await db.selectFrom('videos')
|
||||||
|
.selectAll()
|
||||||
|
.where('title', 'ilike', `%${search}%`)
|
||||||
|
.execute()
|
||||||
|
|
||||||
|
return videos
|
||||||
|
}, {
|
||||||
|
query: t.Object({
|
||||||
|
search: t.String()
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
app.get('/search/channel', async ({ query: { url }, error, redirect }) => {
|
||||||
|
const channelId = await validateChannel(url)
|
||||||
|
if (!channelId) return error(400, 'Whoops! What is that? That is not a Youtube Channel.')
|
||||||
|
|
||||||
|
return redirect(`${process.env.FRONTEND}/channel/${channelId}`)
|
||||||
|
}, {
|
||||||
|
query: t.Object({
|
||||||
|
url: t.String()
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
export default app
|
|
@ -0,0 +1,45 @@
|
||||||
|
import { Elysia } from 'elysia';
|
||||||
|
import { Redis } from 'ioredis'
|
||||||
|
|
||||||
|
import { db } from '@/utils/database'
|
||||||
|
|
||||||
|
const app = new Elysia()
|
||||||
|
const redis = new Redis({
|
||||||
|
host: process.env.REDIS_HOST,
|
||||||
|
password: process.env.REDIS_PASS,
|
||||||
|
});
|
||||||
|
|
||||||
|
app.get('/transparency/list', async () => {
|
||||||
|
const cached = await redis.get('transparency')
|
||||||
|
if (cached) return JSON.parse(cached)
|
||||||
|
|
||||||
|
const reports = await db.selectFrom('reports')
|
||||||
|
.selectAll()
|
||||||
|
.execute()
|
||||||
|
|
||||||
|
const json = reports.map(r => {
|
||||||
|
return {
|
||||||
|
...r,
|
||||||
|
details: (r.details).split('<').join('<').split('>').join('>'),
|
||||||
|
date: (r.date).toISOString().slice(0, 10)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
await redis.set('transparency', JSON.stringify(json), 'EX', 3600)
|
||||||
|
return json
|
||||||
|
})
|
||||||
|
|
||||||
|
app.get('/transparency/:id', async ({ params: { id } }) => {
|
||||||
|
const cached = await redis.get(`transparency:${id}`)
|
||||||
|
if (cached) return JSON.parse(cached)
|
||||||
|
|
||||||
|
const json = await db.selectFrom('reports')
|
||||||
|
.selectAll()
|
||||||
|
.where('target', '=', id)
|
||||||
|
.execute()
|
||||||
|
|
||||||
|
await redis.set(`transparency:${id}`, JSON.stringify(json), 'EX', 3600)
|
||||||
|
return json
|
||||||
|
})
|
||||||
|
|
||||||
|
export default app
|
|
@ -0,0 +1,103 @@
|
||||||
|
import { Elysia } from 'elysia';
|
||||||
|
import { Redis } from 'ioredis'
|
||||||
|
import DOMPurify from 'isomorphic-dompurify'
|
||||||
|
|
||||||
|
import { db } from '@/utils/database'
|
||||||
|
import { getChannel, getChannelVideos } from '@/utils/metadata';
|
||||||
|
import { convertRelativeToDate } from '@/utils/common';
|
||||||
|
|
||||||
|
const app = new Elysia()
|
||||||
|
const redis = new Redis({
|
||||||
|
host: process.env.REDIS_HOST,
|
||||||
|
password: process.env.REDIS_PASS,
|
||||||
|
});
|
||||||
|
|
||||||
|
interface processedVideo {
|
||||||
|
id: string;
|
||||||
|
title: string;
|
||||||
|
thumbnail: string;
|
||||||
|
published: string;
|
||||||
|
deleted?: undefined;
|
||||||
|
}
|
||||||
|
|
||||||
|
app.get('/video/:id', async ({ params: { id }, error }) => {
|
||||||
|
const cached = await redis.get(`video:${id}`)
|
||||||
|
if (cached) return JSON.parse(cached)
|
||||||
|
|
||||||
|
const json = await db.selectFrom('videos')
|
||||||
|
.selectAll()
|
||||||
|
.where('id', '=', id)
|
||||||
|
.executeTakeFirst()
|
||||||
|
|
||||||
|
if (!json) return error(404, { error: '404' })
|
||||||
|
await redis.set(`video:${id}`, JSON.stringify(json), 'EX', 3600)
|
||||||
|
|
||||||
|
return {
|
||||||
|
...json,
|
||||||
|
description: DOMPurify.sanitize(json.description),
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
app.get('/channel/:id', async ({ params: { id }, error }) => {
|
||||||
|
const cached = await redis.get(`channel:${id}`)
|
||||||
|
if (cached) return JSON.parse(cached)
|
||||||
|
|
||||||
|
const [videos, channel] = await Promise.all([
|
||||||
|
getChannelVideos(id),
|
||||||
|
getChannel(id)
|
||||||
|
])
|
||||||
|
|
||||||
|
if (!videos || !channel || videos.error || channel.error) return error(404, { error: '404' })
|
||||||
|
|
||||||
|
const archived = await db.selectFrom('videos')
|
||||||
|
.select(['id', 'title', 'thumbnail', 'published', 'archived'])
|
||||||
|
.where('channelId', '=', id)
|
||||||
|
.execute()
|
||||||
|
|
||||||
|
const processedVideos: processedVideo[] = videos.map((video: any) => ({ // it would be impossible to set types for youtube output... they change it every day.
|
||||||
|
id: video.id,
|
||||||
|
title: video.title.text,
|
||||||
|
thumbnail: video.thumbnails[0].url,
|
||||||
|
published: (video.published.text.endsWith('ago') ? convertRelativeToDate(video.published.text) : new Date(video.published.text)).toISOString().slice(0, 10)
|
||||||
|
}))
|
||||||
|
|
||||||
|
archived.forEach(v => {
|
||||||
|
const existingVideoIndex = processedVideos.findIndex(video => video.id === v.id);
|
||||||
|
if (existingVideoIndex !== -1) {
|
||||||
|
processedVideos[existingVideoIndex] = v;
|
||||||
|
} else {
|
||||||
|
processedVideos.push({ ...v, deleted: undefined });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
processedVideos.sort((a: any, b: any) => new Date(b.published).getTime() - new Date(a.published).getTime());
|
||||||
|
|
||||||
|
const json = {
|
||||||
|
name: channel.metadata.title,
|
||||||
|
avatar: channel.metadata.avatar[0].url,
|
||||||
|
verified: channel.header.author?.is_verified,
|
||||||
|
videos: processedVideos
|
||||||
|
}
|
||||||
|
|
||||||
|
await redis.set(`channel:${id}`, JSON.stringify(json), 'EX', 3600)
|
||||||
|
return json
|
||||||
|
})
|
||||||
|
|
||||||
|
app.get('/channel/:id/videos', async ({ params: { id } }) => {
|
||||||
|
const cached = await redis.get(`channelVideos:${id}`)
|
||||||
|
if (cached) return JSON.parse(cached)
|
||||||
|
|
||||||
|
const archived = await db.selectFrom('videos')
|
||||||
|
.select(['id', 'title', 'thumbnail', 'published', 'archived'])
|
||||||
|
.where('channelId', '=', id)
|
||||||
|
.orderBy('published desc')
|
||||||
|
.execute()
|
||||||
|
|
||||||
|
const json = {
|
||||||
|
videos: archived
|
||||||
|
}
|
||||||
|
await redis.set(`channelVideos:${id}`, JSON.stringify(json), 'EX', 3600)
|
||||||
|
return json
|
||||||
|
})
|
||||||
|
|
||||||
|
export default app
|
|
@ -0,0 +1,179 @@
|
||||||
|
import { Elysia, t } from 'elysia';
|
||||||
|
import { Redis } from 'ioredis'
|
||||||
|
import * as fs from 'node:fs'
|
||||||
|
|
||||||
|
import { db } from '@/utils/database'
|
||||||
|
import { validateVideo, validateChannel } from '@/utils/regex'
|
||||||
|
import { checkCaptcha, createDatabaseVideo } from '@/utils/common';
|
||||||
|
import { downloadVideo } from '@/utils/download';
|
||||||
|
import { uploadVideo } from '@/utils/upload';
|
||||||
|
import { getChannelVideos } from '@/utils/metadata';
|
||||||
|
|
||||||
|
const app = new Elysia()
|
||||||
|
const redis = new Redis({
|
||||||
|
host: process.env.REDIS_HOST,
|
||||||
|
password: process.env.REDIS_PASS,
|
||||||
|
});
|
||||||
|
const videoIds: Record<string, string> = {}
|
||||||
|
|
||||||
|
const sendError = (ws: any, message: string) => {
|
||||||
|
ws.send(`ERROR - ${message}`);
|
||||||
|
ws.close();
|
||||||
|
};
|
||||||
|
|
||||||
|
const cleanup = async (ws: any, videoId: string) => {
|
||||||
|
delete videoIds[ws.id];
|
||||||
|
if (videoId) await redis.del(videoId);
|
||||||
|
await redis.del(ws.id);
|
||||||
|
};
|
||||||
|
|
||||||
|
const handleUpload = async (ws: any, videoId: string, isChannel: boolean = false) => {
|
||||||
|
const filePath = `./videos/${videoId}.mp4`;
|
||||||
|
if (!fs.existsSync(filePath)) {
|
||||||
|
ws.send(`DATA - Video file for ${videoId} not found. Skipping.`);
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
ws.send('DATA - Uploading file...');
|
||||||
|
const videoUrl = await uploadVideo(filePath);
|
||||||
|
fs.unlinkSync(filePath);
|
||||||
|
|
||||||
|
const uploaded = await createDatabaseVideo(videoId, videoUrl);
|
||||||
|
if (uploaded !== 'success') {
|
||||||
|
ws.send(`DATA - Error while uploading - ${JSON.stringify(uploaded)}`);
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!isChannel) ws.send(`DONE - ${process.env.FRONTEND}/watch?v=${videoId}`);
|
||||||
|
return true;
|
||||||
|
} catch (error: any) {
|
||||||
|
ws.send(`ERROR - Upload failed for ${videoId}: ${error.message}`);
|
||||||
|
fs.unlinkSync(filePath);
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
|
app.ws('/save', {
|
||||||
|
query: t.Object({
|
||||||
|
url: t.String()
|
||||||
|
}),
|
||||||
|
body: t.String(),
|
||||||
|
open: async (ws) => {
|
||||||
|
console.log(`${ws.id} - ${ws.data.path} - ${JSON.stringify(ws.data.query)}`)
|
||||||
|
|
||||||
|
const videoId = validateVideo(ws.data.query.url)
|
||||||
|
if (!videoId) return sendError(ws, 'Invalid video URL.');
|
||||||
|
if (await redis.get(videoId)) return sendError(ws, 'Someone is already downloading this video...');
|
||||||
|
if (await redis.get(`blacklist:${videoId}`)) return sendError(ws, 'This video is blacklisted.');
|
||||||
|
|
||||||
|
const already = await db.selectFrom('videos')
|
||||||
|
.select('id')
|
||||||
|
.where('id', '=', videoId)
|
||||||
|
.executeTakeFirst()
|
||||||
|
|
||||||
|
if (already) {
|
||||||
|
ws.send(`DONE - ${process.env.FRONTEND}/watch?v=${videoId}`)
|
||||||
|
ws.close()
|
||||||
|
} else {
|
||||||
|
ws.send('DATA - This process is automatic. Your video will start archiving shortly.')
|
||||||
|
ws.send('CAPTCHA - Solving a cryptographic challenge before downloading.')
|
||||||
|
videoIds[ws.id] = videoId
|
||||||
|
}
|
||||||
|
},
|
||||||
|
message: async (ws, message) => {
|
||||||
|
if (message == 'alive') return
|
||||||
|
|
||||||
|
const videoId = videoIds[ws.id];
|
||||||
|
if (!videoId) return sendError(ws, 'No video ID associated with this session.');
|
||||||
|
|
||||||
|
if (await redis.get(videoId) !== 'downloading') {
|
||||||
|
await redis.set(videoId, 'downloading', 'EX', 300)
|
||||||
|
|
||||||
|
if (!(await checkCaptcha(message))) {
|
||||||
|
await cleanup(ws, videoId);
|
||||||
|
return sendError(ws, 'Captcha validation failed.');
|
||||||
|
}
|
||||||
|
|
||||||
|
const downloadResult = await downloadVideo(ws, videoId);
|
||||||
|
if (downloadResult.fail) {
|
||||||
|
await cleanup(ws, videoId);
|
||||||
|
return sendError(ws, downloadResult.message);
|
||||||
|
}
|
||||||
|
|
||||||
|
const uploadSuccess = await handleUpload(ws, videoId);
|
||||||
|
if (!uploadSuccess) await redis.del(videoId);
|
||||||
|
|
||||||
|
await cleanup(ws, videoId);
|
||||||
|
ws.close();
|
||||||
|
} else {
|
||||||
|
ws.send('DATA - Captcha already submitted.');
|
||||||
|
}
|
||||||
|
},
|
||||||
|
close: async (ws) => {
|
||||||
|
await cleanup(ws, videoIds[ws.id]);
|
||||||
|
console.log(`closed - ${ws.data.path} - ${JSON.stringify(ws.data.query)}`)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
app.ws('/savechannel', {
|
||||||
|
query: t.Object({
|
||||||
|
url: t.String()
|
||||||
|
}),
|
||||||
|
body: t.String(),
|
||||||
|
open: async (ws) => {
|
||||||
|
console.log(`${ws.id} - ${ws.data.path} - ${JSON.stringify(ws.data.query)}`)
|
||||||
|
|
||||||
|
const channelId = await validateChannel(ws.data.query.url);
|
||||||
|
if (!channelId) return sendError(ws, 'Invalid channel URL.');
|
||||||
|
|
||||||
|
ws.send('DATA - This process is automatic. Your video will start archiving shortly.')
|
||||||
|
ws.send('CAPTCHA - Solving a cryptographic challenge before downloading.')
|
||||||
|
videoIds[ws.id] = `captcha-${channelId}`;
|
||||||
|
},
|
||||||
|
message: async (ws, message) => {
|
||||||
|
if (message == 'alive') return
|
||||||
|
|
||||||
|
const status = videoIds[ws.id];
|
||||||
|
if (!status || !status.startsWith('captcha-')) return sendError(ws, 'No channel associated with this session.');
|
||||||
|
|
||||||
|
const channelId = status.replace('captcha-', '');
|
||||||
|
if (!(await checkCaptcha(message))) {
|
||||||
|
await cleanup(ws, channelId);
|
||||||
|
return sendError(ws, 'Captcha validation failed.');
|
||||||
|
}
|
||||||
|
|
||||||
|
videoIds[ws.id] = `downloading-${channelId}`;
|
||||||
|
const videos = await getChannelVideos(channelId);
|
||||||
|
|
||||||
|
for (const video of videos.slice(0, 5)) {
|
||||||
|
if (!video || (await redis.get(video.id)) || (await redis.get(`blacklist:${video.id}`))) continue;
|
||||||
|
|
||||||
|
const already = await db.selectFrom('videos')
|
||||||
|
.select('id')
|
||||||
|
.where('id', '=', video.id)
|
||||||
|
.executeTakeFirst()
|
||||||
|
if (already) continue
|
||||||
|
|
||||||
|
ws.send(`DATA - Processing video: ${video.title.text}`);
|
||||||
|
await redis.set(video.id, 'downloading', 'EX', 300);
|
||||||
|
|
||||||
|
const downloadResult = await downloadVideo(ws, video.id);
|
||||||
|
if (!downloadResult.fail) await handleUpload(ws, video.id, true);
|
||||||
|
|
||||||
|
await redis.del(video.id);
|
||||||
|
ws.send(`DATA - Created video page for ${video.title.text}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
await cleanup(ws, channelId);
|
||||||
|
ws.send(`DONE - ${process.env.FRONTEND}/channel/${channelId}`)
|
||||||
|
ws.close();
|
||||||
|
},
|
||||||
|
close: async (ws) => {
|
||||||
|
await cleanup(ws, videoIds[ws.id]);
|
||||||
|
console.log(`closed - ${ws.data.path} - ${JSON.stringify(ws.data.query)}`)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
export default app
|
|
@ -0,0 +1,45 @@
|
||||||
|
import type {
|
||||||
|
Generated,
|
||||||
|
Insertable,
|
||||||
|
Selectable,
|
||||||
|
Updateable,
|
||||||
|
} from 'kysely'
|
||||||
|
|
||||||
|
export interface Database {
|
||||||
|
videos: VideosTable
|
||||||
|
reports: ReportsTable
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface VideosTable {
|
||||||
|
uuid: Generated<string>
|
||||||
|
id: string
|
||||||
|
title: string
|
||||||
|
description: string
|
||||||
|
thumbnail: string
|
||||||
|
source: string
|
||||||
|
published: string
|
||||||
|
archived: string
|
||||||
|
channel: string
|
||||||
|
channelId: string
|
||||||
|
channelVerified: boolean
|
||||||
|
channelAvatar: string
|
||||||
|
playlist?: string | null
|
||||||
|
disabled: boolean
|
||||||
|
hasBeenReported: boolean
|
||||||
|
}
|
||||||
|
|
||||||
|
export type Video = Selectable<VideosTable>
|
||||||
|
export type NewVideo = Insertable<VideosTable>
|
||||||
|
export type UpdateVideo = Updateable<VideosTable>
|
||||||
|
|
||||||
|
export interface ReportsTable {
|
||||||
|
uuid: Generated<string>
|
||||||
|
target: string
|
||||||
|
title: string
|
||||||
|
details: string
|
||||||
|
date: Date
|
||||||
|
}
|
||||||
|
|
||||||
|
export type Report = Selectable<ReportsTable>
|
||||||
|
export type NewReport = Insertable<ReportsTable>
|
||||||
|
export type UpdateReport = Updateable<ReportsTable>
|
|
@ -0,0 +1,86 @@
|
||||||
|
import { getVideo, getChannel } from "@/utils/metadata";
|
||||||
|
import { uploadImage } from "@/utils/upload";
|
||||||
|
import { db } from '@/utils/database'
|
||||||
|
import crypto from 'node:crypto';
|
||||||
|
|
||||||
|
function convertRelativeToDate(relativeTime: string) {
|
||||||
|
const parts = relativeTime.split(' ');
|
||||||
|
const amount = parseInt(parts[0]);
|
||||||
|
const unit = parts[1];
|
||||||
|
|
||||||
|
const currentDate = new Date();
|
||||||
|
|
||||||
|
switch (unit) {
|
||||||
|
case 'hour':
|
||||||
|
case 'hours':
|
||||||
|
currentDate.setHours(currentDate.getHours() - amount);
|
||||||
|
break;
|
||||||
|
case 'minute':
|
||||||
|
case 'minutes':
|
||||||
|
currentDate.setMinutes(currentDate.getMinutes() - amount);
|
||||||
|
break;
|
||||||
|
case 'day':
|
||||||
|
case 'days':
|
||||||
|
currentDate.setDate(currentDate.getDate() - amount);
|
||||||
|
break;
|
||||||
|
case 'month':
|
||||||
|
case 'months':
|
||||||
|
currentDate.setMonth(currentDate.getMonth() - amount);
|
||||||
|
break;
|
||||||
|
case 'year':
|
||||||
|
case 'years':
|
||||||
|
currentDate.setFullYear(currentDate.getFullYear() - amount);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
return currentDate;
|
||||||
|
}
|
||||||
|
|
||||||
|
async function checkCaptcha(response: string) {
|
||||||
|
const confirm = await (await fetch('https://challenges.cloudflare.com/turnstile/v0/siteverify', {
|
||||||
|
method: 'POST',
|
||||||
|
body: JSON.stringify({
|
||||||
|
'response': response,
|
||||||
|
'secret': process.env.CAPTCHA_SECRET
|
||||||
|
}),
|
||||||
|
headers: {
|
||||||
|
'content-type': 'application/json'
|
||||||
|
}
|
||||||
|
})).json()
|
||||||
|
|
||||||
|
return confirm.success
|
||||||
|
}
|
||||||
|
|
||||||
|
async function createDatabaseVideo(id: string, videoUrl: string) {
|
||||||
|
const data = await getVideo(id)
|
||||||
|
const channelData = await getChannel(data.basic_info.channel_id)
|
||||||
|
|
||||||
|
if (data.error) return data
|
||||||
|
if (channelData.error) return channelData
|
||||||
|
|
||||||
|
const uploaderAvatar = await uploadImage(data.basic_info.channel_id, channelData.metadata.thumbnail[0].url)
|
||||||
|
const thumbnailUrl = await uploadImage(id, data.basic_info.thumbnail[0].url)
|
||||||
|
|
||||||
|
await db.insertInto('videos')
|
||||||
|
.values({
|
||||||
|
uuid: crypto.randomUUID(),
|
||||||
|
id: id,
|
||||||
|
title: data.basic_info.title,
|
||||||
|
description: (data.basic_info.short_description).replaceAll('\n', '<br>'),
|
||||||
|
thumbnail: thumbnailUrl,
|
||||||
|
source: videoUrl,
|
||||||
|
published: (data.primary_info.published.text.endsWith('ago') ? convertRelativeToDate(data.primary_info.published.text) : new Date(data.primary_info.published.text)).toISOString().slice(0, 10),
|
||||||
|
archived: (new Date()).toISOString().slice(0, 10),
|
||||||
|
channel: channelData.metadata.title,
|
||||||
|
channelId: channelData.metadata.external_id,
|
||||||
|
channelVerified: channelData.header.author?.is_verified || false,
|
||||||
|
channelAvatar: uploaderAvatar,
|
||||||
|
disabled: false,
|
||||||
|
hasBeenReported: false
|
||||||
|
})
|
||||||
|
.execute()
|
||||||
|
|
||||||
|
return 'success'
|
||||||
|
}
|
||||||
|
|
||||||
|
export { convertRelativeToDate, checkCaptcha, createDatabaseVideo }
|
|
@ -0,0 +1,14 @@
|
||||||
|
import type { Database } from '@/types.ts'
|
||||||
|
import { Pool } from 'pg'
|
||||||
|
import { Kysely, PostgresDialect } from 'kysely'
|
||||||
|
|
||||||
|
const dialect = new PostgresDialect({
|
||||||
|
pool: new Pool({
|
||||||
|
connectionString: process.env.DATABASE_URL,
|
||||||
|
max: 10,
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
export const db = new Kysely<Database>({
|
||||||
|
dialect,
|
||||||
|
})
|
|
@ -0,0 +1,62 @@
|
||||||
|
import WebSocket from 'ws';
|
||||||
|
import { getVideo } from '@/utils/metadata';
|
||||||
|
|
||||||
|
async function downloadVideo(ws: any, id: string): Promise<{ fail: boolean, message: string }> {
|
||||||
|
return new Promise(async (resolve, reject) => {
|
||||||
|
let quality = '480p'
|
||||||
|
const video = await getVideo(id)
|
||||||
|
if (video.error) {
|
||||||
|
return resolve({
|
||||||
|
message: `Failed to request Youtube with error ${video.error}. Please retry...`,
|
||||||
|
fail: true
|
||||||
|
})
|
||||||
|
}
|
||||||
|
if (video.basic_info.duration >= 900) quality = '360p' // 15 minutes
|
||||||
|
|
||||||
|
quality = await getVideoQuality(video, quality)
|
||||||
|
|
||||||
|
let isDownloading = true
|
||||||
|
const downloader = new WebSocket(`ws://${(process.env.METADATA!).replace('http://', '')}/download/${id}/${quality}`)
|
||||||
|
|
||||||
|
downloader.on('message', async function message(data: any) {
|
||||||
|
const text = data.toString()
|
||||||
|
if (text == 'done') {
|
||||||
|
isDownloading = false
|
||||||
|
return resolve({
|
||||||
|
fail: false,
|
||||||
|
message: ''
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
ws.send(`DATA - ${text}`)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
downloader.on('close', function close() {
|
||||||
|
if (!isDownloading) return
|
||||||
|
|
||||||
|
return resolve({
|
||||||
|
fail: true,
|
||||||
|
message: 'The metadata server unexpectedly closed the websocket. Please try again.'
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
async function getVideoQuality(json: any, quality: string) {
|
||||||
|
const adaptiveFormats = json['streaming_data']['adaptive_formats'];
|
||||||
|
let video = adaptiveFormats.find((f: any) => f.quality_label === quality && !f.has_audio);
|
||||||
|
|
||||||
|
// If the specified quality isn't available, find the lowest quality video
|
||||||
|
if (!video) { // @ts-ignore
|
||||||
|
video = adaptiveFormats.filter((f: any) => !f.has_audio).reduce((prev, current) => {
|
||||||
|
if (!prev || parseInt(current.quality_label) < parseInt(prev.quality_label)) {
|
||||||
|
return current;
|
||||||
|
}
|
||||||
|
return prev;
|
||||||
|
}, null);
|
||||||
|
}
|
||||||
|
|
||||||
|
return video ? video.quality_label : null;
|
||||||
|
}
|
||||||
|
|
||||||
|
export { downloadVideo }
|
|
@ -0,0 +1,15 @@
|
||||||
|
// metadata either returns innertube or { error: string }
|
||||||
|
|
||||||
|
async function getVideo(id: string) {
|
||||||
|
return await (await fetch(`${process.env.METADATA}/video/${id}`)).json()
|
||||||
|
}
|
||||||
|
|
||||||
|
async function getChannel(id: string) {
|
||||||
|
return await (await fetch(`${process.env.METADATA}/channel/${id}`)).json()
|
||||||
|
}
|
||||||
|
|
||||||
|
async function getChannelVideos(id: string) {
|
||||||
|
return await (await fetch(`${process.env.METADATA}/videos/${id}`)).json()
|
||||||
|
}
|
||||||
|
|
||||||
|
export { getVideo, getChannel, getChannelVideos }
|
|
@ -0,0 +1,92 @@
|
||||||
|
function validateVideo(input: string): string | false {
|
||||||
|
try {
|
||||||
|
const url = new URL(input);
|
||||||
|
const hostnames = [
|
||||||
|
'youtube.com',
|
||||||
|
'www.youtube.com',
|
||||||
|
'm.youtube.com'
|
||||||
|
];
|
||||||
|
|
||||||
|
// basic hostname check
|
||||||
|
if (hostnames.includes(url.hostname)) {
|
||||||
|
// basic url
|
||||||
|
if (url.pathname === '/watch') {
|
||||||
|
const videoId = url.searchParams.get('v');
|
||||||
|
return videoId || false;
|
||||||
|
}
|
||||||
|
|
||||||
|
// embed url
|
||||||
|
const embedMatch = url.pathname.match(/^\/embed\/([a-zA-Z0-9_-]+)/);
|
||||||
|
if (embedMatch) {
|
||||||
|
return embedMatch[1];
|
||||||
|
}
|
||||||
|
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
// short urls
|
||||||
|
if (url.hostname === 'youtu.be') {
|
||||||
|
const videoId = url.pathname.replace(/^\//, '');
|
||||||
|
return videoId || false;
|
||||||
|
}
|
||||||
|
|
||||||
|
return false;
|
||||||
|
} catch {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function validatePlaylist(input: string): string | false {
|
||||||
|
try {
|
||||||
|
const url = new URL(input);
|
||||||
|
const hostnames = [
|
||||||
|
'youtube.com',
|
||||||
|
'www.youtube.com',
|
||||||
|
'm.youtube.com'
|
||||||
|
];
|
||||||
|
|
||||||
|
if (hostnames.includes(url.hostname)) {
|
||||||
|
// all urls are the same, thank god
|
||||||
|
if (url.pathname === '/playlist' || url.pathname === '/watch') {
|
||||||
|
const playlistId = url.searchParams.get('list');
|
||||||
|
return playlistId || false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return false;
|
||||||
|
} catch {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function validateChannel(input: string): Promise<string | false> {
|
||||||
|
try {
|
||||||
|
const url = new URL(input);
|
||||||
|
const hostnames = [
|
||||||
|
'youtube.com',
|
||||||
|
'www.youtube.com',
|
||||||
|
'm.youtube.com'
|
||||||
|
];
|
||||||
|
|
||||||
|
if (hostnames.includes(url.hostname)) {
|
||||||
|
// @ urls
|
||||||
|
const atMatch = url.pathname.match(/^\/@([a-zA-Z0-9.-]+)/);
|
||||||
|
if (atMatch) {
|
||||||
|
const channelId = await (await fetch(`https://yt.jaybee.digital/api/channels?part=channels&handle=${atMatch[1]}`)).json()
|
||||||
|
return channelId['items'][0]['id']
|
||||||
|
}
|
||||||
|
|
||||||
|
// /channel/ and /c/
|
||||||
|
const channelMatch = url.pathname.match(/^\/(channel|c)\/([a-zA-Z0-9_-]+)/);
|
||||||
|
if (channelMatch) {
|
||||||
|
return channelMatch[2];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return false;
|
||||||
|
} catch {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export { validateVideo, validatePlaylist, validateChannel }
|
|
@ -0,0 +1,32 @@
|
||||||
|
import { format } from 'date-fns';
|
||||||
|
|
||||||
|
function createSitemapXML(urls: string[]) {
|
||||||
|
const xml = urls.map(url => `
|
||||||
|
<url>
|
||||||
|
<loc>${encodeURI(url)}</loc>
|
||||||
|
<changefreq>never</changefreq>
|
||||||
|
<priority>0.7</priority>
|
||||||
|
</url>
|
||||||
|
`).join('');
|
||||||
|
|
||||||
|
return `<?xml version="1.0" encoding="UTF-8"?>
|
||||||
|
<urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9">
|
||||||
|
${xml}
|
||||||
|
</urlset>`;
|
||||||
|
}
|
||||||
|
|
||||||
|
function createSitemapIndexXML(sitemaps: string[]) {
|
||||||
|
const xml = sitemaps.map(sitemap => `
|
||||||
|
<sitemap>
|
||||||
|
<loc>${encodeURI(sitemap)}</loc>
|
||||||
|
<lastmod>${format(new Date(), 'yyyy-MM-dd')}</lastmod>
|
||||||
|
</sitemap>
|
||||||
|
`).join('');
|
||||||
|
|
||||||
|
return `<?xml version="1.0" encoding="UTF-8"?>
|
||||||
|
<sitemapindex xmlns="http://www.sitemaps.org/schemas/sitemap/0.9">
|
||||||
|
${xml}
|
||||||
|
</sitemapindex>`;
|
||||||
|
}
|
||||||
|
|
||||||
|
export { createSitemapXML, createSitemapIndexXML }
|
|
@ -0,0 +1,38 @@
|
||||||
|
import { S3 } from 'ultralight-s3';
|
||||||
|
import * as fs from 'node:fs'
|
||||||
|
|
||||||
|
const keys = JSON.parse(fs.readFileSync('s3.json', 'utf-8'))
|
||||||
|
const videos3 = new S3({
|
||||||
|
endpoint: keys.endpoint,
|
||||||
|
accessKeyId: keys.videos[0].access,
|
||||||
|
secretAccessKey: keys.videos[0].secret,
|
||||||
|
bucketName: keys.videos[0].bucket,
|
||||||
|
region: 'auto'
|
||||||
|
})
|
||||||
|
|
||||||
|
const images3 = new S3({
|
||||||
|
endpoint: keys.endpoint,
|
||||||
|
accessKeyId: keys.images[0].access,
|
||||||
|
secretAccessKey: keys.images[0].secret,
|
||||||
|
bucketName: keys.images[0].bucket,
|
||||||
|
region: 'auto'
|
||||||
|
});
|
||||||
|
|
||||||
|
async function uploadVideo(video: string) {
|
||||||
|
const videoFile = fs.readFileSync(video)
|
||||||
|
const uploaded = await videos3.put(video.split('/')[2], videoFile)
|
||||||
|
return uploaded.url.replace(keys.endpoint, 'https://s2.archive.party')
|
||||||
|
}
|
||||||
|
|
||||||
|
async function uploadImage(id: string, url: string) {
|
||||||
|
const exists = await images3.fileExists(`${id}.webp`)
|
||||||
|
if (exists) return `${keys.images[0].url}${id}.webp`
|
||||||
|
|
||||||
|
const response = await fetch(url)
|
||||||
|
const buffer = Buffer.from(await response.arrayBuffer())
|
||||||
|
|
||||||
|
const uploaded = await images3.put(`${id}.webp`, buffer)
|
||||||
|
return uploaded.url.replace(keys.endpoint, 'https://s2.archive.party')
|
||||||
|
}
|
||||||
|
|
||||||
|
export { uploadVideo, uploadImage }
|
|
@ -0,0 +1,31 @@
|
||||||
|
{
|
||||||
|
"compilerOptions": {
|
||||||
|
// Enable latest features
|
||||||
|
"lib": ["ESNext", "DOM"],
|
||||||
|
"target": "ESNext",
|
||||||
|
"module": "ESNext",
|
||||||
|
"moduleDetection": "force",
|
||||||
|
"jsx": "react-jsx",
|
||||||
|
"allowJs": true,
|
||||||
|
|
||||||
|
// Bundler mode
|
||||||
|
"moduleResolution": "bundler",
|
||||||
|
"allowImportingTsExtensions": true,
|
||||||
|
"verbatimModuleSyntax": true,
|
||||||
|
"noEmit": true,
|
||||||
|
|
||||||
|
// Best practices
|
||||||
|
"strict": true,
|
||||||
|
"skipLibCheck": true,
|
||||||
|
"noFallthroughCasesInSwitch": true,
|
||||||
|
|
||||||
|
// Some stricter flags (disabled by default)
|
||||||
|
"noUnusedLocals": false,
|
||||||
|
"noUnusedParameters": false,
|
||||||
|
"noPropertyAccessFromIndexSignature": false,
|
||||||
|
|
||||||
|
"paths": {
|
||||||
|
"@/*": ["./src/*"]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
|
@ -1,18 +0,0 @@
|
||||||
const fetch = require('node-fetch')
|
|
||||||
|
|
||||||
async function checkCaptcha(response) {
|
|
||||||
const confirm = await (await fetch('https://challenges.cloudflare.com/turnstile/v0/siteverify', {
|
|
||||||
method: 'POST',
|
|
||||||
body: JSON.stringify({
|
|
||||||
'response': response,
|
|
||||||
'secret': process.env.CAPTCHA_SECRET
|
|
||||||
}),
|
|
||||||
headers: {
|
|
||||||
'content-type': 'application/json'
|
|
||||||
}
|
|
||||||
})).json()
|
|
||||||
|
|
||||||
return confirm.success
|
|
||||||
}
|
|
||||||
|
|
||||||
module.exports = { checkCaptcha }
|
|
|
@ -1,27 +0,0 @@
|
||||||
const winston = require('winston')
|
|
||||||
const { Logtail } = require("@logtail/node")
|
|
||||||
const { LogtailTransport } = require("@logtail/winston")
|
|
||||||
|
|
||||||
const logtail = new Logtail(process.env.LOGTAIL)
|
|
||||||
|
|
||||||
const logger = winston.createLogger({
|
|
||||||
format: winston.format.json(),
|
|
||||||
transports: [
|
|
||||||
new winston.transports.Console({
|
|
||||||
format:winston.format.combine(
|
|
||||||
winston.format.timestamp({format: 'MMM-DD-YYYY HH:mm:ss'}),
|
|
||||||
winston.format.printf(info => `${[info.timestamp]}: ${info.message}`),
|
|
||||||
)}),
|
|
||||||
new winston.transports.File({
|
|
||||||
filename: 'logs/client.log',
|
|
||||||
format:winston.format.combine(
|
|
||||||
winston.format.timestamp({format: 'MMM-DD-YYYY HH:mm:ss'}),
|
|
||||||
winston.format.printf(info => `${[info.timestamp]}: ${info.message}`),
|
|
||||||
)}),
|
|
||||||
new LogtailTransport(logtail, {
|
|
||||||
level: 'error'
|
|
||||||
})
|
|
||||||
],
|
|
||||||
});
|
|
||||||
|
|
||||||
module.exports = logger
|
|
|
@ -1,57 +0,0 @@
|
||||||
const fetch = require('node-fetch')
|
|
||||||
|
|
||||||
async function getPipedInstance() {
|
|
||||||
const instances = await (await fetch('https://piped-instances.kavin.rocks/', {
|
|
||||||
headers: {
|
|
||||||
'User-Agent': 'Mozilla/5.0 (compatible; PreserveTube/0.0; +https://preservetube.com)'
|
|
||||||
}
|
|
||||||
})).json()
|
|
||||||
return (instances[Math.floor(Math.random() * instances.length)]).api_url
|
|
||||||
}
|
|
||||||
|
|
||||||
async function getVideoMetadata(id) {
|
|
||||||
return await (await fetch(`${process.env.METADATA}/video/${id}`)).json()
|
|
||||||
}
|
|
||||||
|
|
||||||
async function getChannel(id) {
|
|
||||||
return await (await fetch(`${process.env.METADATA}/channel/${id}`)).json()
|
|
||||||
}
|
|
||||||
|
|
||||||
async function getChannelVideos(id) {
|
|
||||||
return await (await fetch(`${process.env.METADATA}/videos/${id}`)).json()
|
|
||||||
}
|
|
||||||
|
|
||||||
async function getPlaylistVideos(id) {
|
|
||||||
const instance = await getPipedInstance()
|
|
||||||
const json = await (await fetch(`${instance}/playlists/${id}`, {
|
|
||||||
headers: {
|
|
||||||
'User-Agent': 'Mozilla/5.0 (compatible; PreserveTube/0.0; +https://preservetube.com)'
|
|
||||||
}
|
|
||||||
})).json()
|
|
||||||
return json
|
|
||||||
}
|
|
||||||
|
|
||||||
async function getVideoDownload(json, quality) {
|
|
||||||
const adaptiveFormats = json['streaming_data']['adaptive_formats'];
|
|
||||||
let video = adaptiveFormats.find(f => f.quality_label === `${quality}p` && !f.has_audio);
|
|
||||||
if (!video) { // stupid bullshit. basically finding the lowest quality if the quality specified isnt there
|
|
||||||
video = adaptiveFormats.filter(f => !f.has_audio).reduce((prev, current) => {
|
|
||||||
if (!prev || parseInt(current.quality_label) < parseInt(prev.quality_label)) {
|
|
||||||
return current;
|
|
||||||
}
|
|
||||||
return prev;
|
|
||||||
}, null);
|
|
||||||
}
|
|
||||||
|
|
||||||
const audio = adaptiveFormats.find(f => f.has_audio);
|
|
||||||
|
|
||||||
return {
|
|
||||||
url: [
|
|
||||||
video.url,
|
|
||||||
audio.url
|
|
||||||
]
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
module.exports = { getVideoMetadata, getChannel, getChannelVideos, getPlaylistVideos, getVideoDownload }
|
|
|
@ -1,33 +0,0 @@
|
||||||
const Redis = require('ioredis')
|
|
||||||
const fs = require('node:fs')
|
|
||||||
|
|
||||||
const logger = require("./logger.js")
|
|
||||||
|
|
||||||
const redis = new Redis({
|
|
||||||
host: process.env.REDIS_HOST,
|
|
||||||
port: process.env.REDIS_PORT,
|
|
||||||
password: process.env.REDIS_PASS,
|
|
||||||
});
|
|
||||||
|
|
||||||
redis.on('ready', async function () {
|
|
||||||
logger.info({ message: 'Connected to redis!' })
|
|
||||||
|
|
||||||
const keys = await redis.keys('*')
|
|
||||||
const filteredKeys = keys.filter(key => !key.startsWith('blacklist:'))
|
|
||||||
if (filteredKeys.length) await redis.del(filteredKeys)
|
|
||||||
|
|
||||||
setInterval(async () => {
|
|
||||||
const files = fs.readdirSync('videos')
|
|
||||||
const webmFiles = files.filter((file) => file.endsWith('.mp4'))
|
|
||||||
webmFiles.forEach(async (f) => {
|
|
||||||
const videoId = f.replace('.mp4', '')
|
|
||||||
const isActive = await redis.get(videoId)
|
|
||||||
if (!isActive) {
|
|
||||||
fs.unlinkSync(`./videos/${f}`)
|
|
||||||
logger.info({ message: `deleted file ${f} because there is no active download of it` })
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}, 5*60000)
|
|
||||||
})
|
|
||||||
|
|
||||||
module.exports = redis
|
|
|
@ -1,68 +0,0 @@
|
||||||
const AWS = require('aws-sdk')
|
|
||||||
const fs = require('node:fs')
|
|
||||||
|
|
||||||
const keys = require('../s3.json')
|
|
||||||
|
|
||||||
async function uploadVideo(video) {
|
|
||||||
const key = keys.videos[0]
|
|
||||||
|
|
||||||
const s3 = new AWS.S3({
|
|
||||||
accessKeyId: key.access,
|
|
||||||
secretAccessKey: key.secret,
|
|
||||||
endpoint: keys.endpoint,
|
|
||||||
s3ForcePathStyle: true
|
|
||||||
})
|
|
||||||
|
|
||||||
const videoFile = fs.createReadStream(video)
|
|
||||||
const uploaded = await s3.upload({
|
|
||||||
Bucket: key.bucket,
|
|
||||||
Key: video.split('/')[2],
|
|
||||||
Body: videoFile,
|
|
||||||
ContentType: 'video/mp4',
|
|
||||||
}).promise()
|
|
||||||
|
|
||||||
return (uploaded.Location).replace(keys.endpoint, 'https://s2.archive.party')
|
|
||||||
}
|
|
||||||
|
|
||||||
async function uploadImage(id, url) {
|
|
||||||
const key = keys.images[0]
|
|
||||||
|
|
||||||
const s3 = new AWS.S3({
|
|
||||||
accessKeyId: key.access,
|
|
||||||
secretAccessKey: key.secret,
|
|
||||||
endpoint: keys.endpoint,
|
|
||||||
s3ForcePathStyle: true
|
|
||||||
})
|
|
||||||
|
|
||||||
const exists = await checkIfFileExists({
|
|
||||||
Bucket: key.bucket,
|
|
||||||
Key: `${id}.webp`
|
|
||||||
}, s3)
|
|
||||||
|
|
||||||
if (exists) {
|
|
||||||
return `${key.url}${id}.webp`
|
|
||||||
} else {
|
|
||||||
const response = await fetch(url)
|
|
||||||
const buffer = Buffer.from(await response.arrayBuffer())
|
|
||||||
|
|
||||||
const uploaded = await s3.upload({
|
|
||||||
Bucket: key.bucket,
|
|
||||||
Key: `${id}.webp`,
|
|
||||||
Body: buffer,
|
|
||||||
ContentType: 'video/webp',
|
|
||||||
}).promise()
|
|
||||||
|
|
||||||
return (uploaded.Location).replace(keys.endpoint, 'https://s2.archive.party')
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async function checkIfFileExists(params, s3) {
|
|
||||||
try {
|
|
||||||
await s3.headObject(params).promise()
|
|
||||||
return true
|
|
||||||
} catch (err) {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
module.exports = { uploadVideo, uploadImage }
|
|
|
@ -1,53 +0,0 @@
|
||||||
const fetch = require('node-fetch')
|
|
||||||
|
|
||||||
async function validateVideoInput(input) {
|
|
||||||
if (!input) return {
|
|
||||||
fail: true,
|
|
||||||
message: 'Missing URL'
|
|
||||||
}
|
|
||||||
|
|
||||||
const id = (input.trim()).match(/^https:\/\/(?:(?:www\.)?youtube\.com\/(?:watch\?v=|shorts\/)|youtu\.be\/)([A-Za-z0-9_-]{11})$/m)?.[1]
|
|
||||||
if (!id) return {
|
|
||||||
fail: true,
|
|
||||||
message: 'Whoops! What is that? That is not a Youtube url.'
|
|
||||||
}
|
|
||||||
|
|
||||||
return id
|
|
||||||
}
|
|
||||||
|
|
||||||
async function validatePlaylistInput(input) {
|
|
||||||
if (!input) return {
|
|
||||||
fail: true,
|
|
||||||
message: 'Missing URL'
|
|
||||||
}
|
|
||||||
|
|
||||||
const id = (input.trim()).match(/^(?:https?:\/\/)?(?:www\.)?youtu(?:(?:\.be)|(?:be\.com))\/playlist\?list=([\w_-]{34})$/m)?.[1]
|
|
||||||
if (!id) return {
|
|
||||||
fail: true,
|
|
||||||
message: 'Whoops! What is that? That is not a Youtube Playlist.'
|
|
||||||
}
|
|
||||||
|
|
||||||
return id
|
|
||||||
}
|
|
||||||
|
|
||||||
async function validateChannelInput(input) {
|
|
||||||
if (!input) return {
|
|
||||||
fail: true,
|
|
||||||
message: 'Missing URL'
|
|
||||||
}
|
|
||||||
|
|
||||||
const id = input.match(/^(?:https?:\/\/)?(?:www\.)?youtu(?:(?:\.be)|(?:be\.com))\/(?:channel\/|@)([\w-]+)/m)?.[1]
|
|
||||||
if (!id) return {
|
|
||||||
fail: true,
|
|
||||||
message: 'Whoops! What is that? That is not a Youtube Channel.'
|
|
||||||
}
|
|
||||||
|
|
||||||
if (input.includes('@')) {
|
|
||||||
const channelId = await (await fetch(`https://yt.jaybee.digital/api/channels?part=channels&handle=${id}`)).json()
|
|
||||||
return channelId['items'][0]['id']
|
|
||||||
} else {
|
|
||||||
return id
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
module.exports = { validateVideoInput, validatePlaylistInput, validateChannelInput }
|
|
|
@ -1,62 +0,0 @@
|
||||||
const { PrismaClient } = require('@prisma/client')
|
|
||||||
const prisma = new PrismaClient()
|
|
||||||
|
|
||||||
const metadata = require('./metadata.js')
|
|
||||||
const upload = require('./upload.js')
|
|
||||||
|
|
||||||
function convertRelativeToDate(relativeTime) {
|
|
||||||
const parts = relativeTime.split(' ');
|
|
||||||
const amount = parseInt(parts[0]);
|
|
||||||
const unit = parts[1];
|
|
||||||
|
|
||||||
const currentDate = new Date();
|
|
||||||
|
|
||||||
switch (unit) {
|
|
||||||
case 'hour':
|
|
||||||
case 'hours':
|
|
||||||
currentDate.setHours(currentDate.getHours() - amount);
|
|
||||||
break;
|
|
||||||
case 'minute':
|
|
||||||
case 'minutes':
|
|
||||||
currentDate.setMinutes(currentDate.getMinutes() - amount);
|
|
||||||
break;
|
|
||||||
case 'day':
|
|
||||||
case 'days':
|
|
||||||
currentDate.setDate(currentDate.getDate() - amount);
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
|
|
||||||
return currentDate;
|
|
||||||
}
|
|
||||||
|
|
||||||
async function createDatabaseVideo(id, videoUrl, playlistId) {
|
|
||||||
const data = await metadata.getVideoMetadata(id)
|
|
||||||
const channelData = await metadata.getChannel(data.basic_info.channel_id)
|
|
||||||
|
|
||||||
if (data.error) return data
|
|
||||||
if (channelData.error) return channelData
|
|
||||||
|
|
||||||
const uploaderAvatar = await upload.uploadImage(data.basic_info.channel_id, channelData.metadata.thumbnail[0].url)
|
|
||||||
const thumbnailUrl = await upload.uploadImage(id, data.basic_info.thumbnail[0].url)
|
|
||||||
|
|
||||||
await prisma.videos.create({
|
|
||||||
data: {
|
|
||||||
id: id,
|
|
||||||
title: data.basic_info.title,
|
|
||||||
description: (data.basic_info.short_description).replaceAll('\n', '<br>'),
|
|
||||||
thumbnail: thumbnailUrl,
|
|
||||||
source: videoUrl,
|
|
||||||
published: (data.primary_info.published.text.endsWith('ago') ? convertRelativeToDate(data.primary_info.published.text) : new Date(data.primary_info.published.text)).toISOString().slice(0, 10),
|
|
||||||
archived: (new Date()).toISOString().slice(0, 10),
|
|
||||||
channel: channelData.metadata.title,
|
|
||||||
channelId: channelData.metadata.external_id,
|
|
||||||
channelAvatar: uploaderAvatar,
|
|
||||||
channelVerified: channelData.header.author?.is_verified || false,
|
|
||||||
playlist: playlistId
|
|
||||||
}
|
|
||||||
})
|
|
||||||
|
|
||||||
return 'success'
|
|
||||||
}
|
|
||||||
|
|
||||||
module.exports = { createDatabaseVideo }
|
|
|
@ -1,60 +0,0 @@
|
||||||
const WebSocket = require('ws')
|
|
||||||
const metadata = require('./metadata.js')
|
|
||||||
|
|
||||||
async function downloadVideo(url, ws, id) {
|
|
||||||
return new Promise(async (resolve, reject) => {
|
|
||||||
let quality = '480p'
|
|
||||||
const video = await metadata.getVideoMetadata(id)
|
|
||||||
if (video.error) {
|
|
||||||
return resolve({
|
|
||||||
message: `Failed to request Youtube with error ${video.error}. Please retry...`,
|
|
||||||
fail: true
|
|
||||||
})
|
|
||||||
}
|
|
||||||
if (video.basic_info.duration >= 900) quality = '360p' // 15 minutes
|
|
||||||
|
|
||||||
quality = await getVideoQuality(video, quality)
|
|
||||||
|
|
||||||
let isDownloading = true
|
|
||||||
const downloader = new WebSocket(`ws://${process.env.METADATA.replace('http://', '')}/download/${id}/${quality}`)
|
|
||||||
downloader.on('message', async function message(data) {
|
|
||||||
const text = data.toString()
|
|
||||||
if (text == 'done') {
|
|
||||||
isDownloading = false
|
|
||||||
return resolve({
|
|
||||||
fail: false
|
|
||||||
})
|
|
||||||
} else {
|
|
||||||
ws.send(`DATA - ${text}`)
|
|
||||||
}
|
|
||||||
})
|
|
||||||
|
|
||||||
downloader.on('close', function close(code, reason) {
|
|
||||||
if (!isDownloading) return
|
|
||||||
|
|
||||||
return resolve({
|
|
||||||
fail: true,
|
|
||||||
message: 'The metadata server unexpectedly closed the websocket. Please try again.'
|
|
||||||
})
|
|
||||||
})
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
async function getVideoQuality(json, quality) {
|
|
||||||
const adaptiveFormats = json['streaming_data']['adaptive_formats'];
|
|
||||||
let video = adaptiveFormats.find(f => f.quality_label === quality && !f.has_audio);
|
|
||||||
|
|
||||||
// If the specified quality isn't available, find the lowest quality video
|
|
||||||
if (!video) {
|
|
||||||
video = adaptiveFormats.filter(f => !f.has_audio).reduce((prev, current) => {
|
|
||||||
if (!prev || parseInt(current.quality_label) < parseInt(prev.quality_label)) {
|
|
||||||
return current;
|
|
||||||
}
|
|
||||||
return prev;
|
|
||||||
}, null);
|
|
||||||
}
|
|
||||||
|
|
||||||
return video ? video.quality_label : null;
|
|
||||||
}
|
|
||||||
|
|
||||||
module.exports = { downloadVideo }
|
|
Loading…
Reference in New Issue