This commit is contained in:
pegasko 2025-02-25 14:45:14 +03:00
commit 7024d24316
4 changed files with 200 additions and 0 deletions

24
README.md Normal file
View file

@ -0,0 +1,24 @@
# yt-dlp service
Simple wrapper service used primary for queueing downloads while browsing.
# Why
Have you ever scrolled over yt music/soundcloud/whatever and wanted to download something, but you want to download many tracks and you don't want to keep track of already downlaoded with that old sccript you made, but still have to do it so you don't downlaod everything twice, etc? That's it
# How to use
API is simple as my paws: just `POST` plaintext `url` to `/add` endpoint and it will schedule.
# LICENSE
No rights granted. You have the right to use the project only. You are not permitted not remove license notices and/or copyrights, distribute, sell, fork (exept for contribution), perform commercial usage.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
You are forbidden to use this software to bypass copyright protections and online services' agreements. Authors are not responsible for any kind or damaga and/or service usage policy violation. Software is provided for demonstration purpose only and not for the real use.

1
submit-clipboard.sh Normal file
View file

@ -0,0 +1 @@
wl-paste | curl -X POST http://192.168.1.70:9182/add --data-binary @-

7
submit-list.sh Normal file
View file

@ -0,0 +1,7 @@
for line in $(cat list.txt) ; do
if ! [ -z $line ] && ! [[ $line == "#"* ]] ; then
echo "Submit $line"
echo "$line" | curl -X POST http://127.0.0.1:9182/add --data-binary @-
echo ""
fi
done

168
ytserv.py Normal file
View file

@ -0,0 +1,168 @@
#!/bin/bash
NUM_PROCESSES = 1
OUT_DIR = 'output'
# COOKIE_DATA = "firefox:99887766.default-release"
# PROXY = '--proxy socks5://127.0.0.1:25344'
PROXY = ''
import os
import json
import logging
import asyncio
import threading
import multiprocessing.pool
import aiohttp
import aiohttp.web
def mkcmd(url):
cmd = f'yt-dlp { PROXY } '
# cmd += f'--cookies-from-browser { COOKIE_DATA }'
cmd += f'--restrict-filenames '
cmd += f'--xattrs '
cmd += f'--add-metadata '
cmd += f'--embed-metadata '
cmd += f'--embed-thumbnail '
cmd += f'--extract-audio '
cmd += f'--audio-format best '
cmd += f'--audio-quality 0 '
cmd += f'--format bestaudio '
cmd += f'-o "{ OUT_DIR }/%(title).200s-%(id)s.%(ext)s" '
cmd += f'-- '
cmd += f'"{ url }"'
return cmd
def main():
logging.basicConfig(
level=logging.INFO,
format='[%(asctime)s] %(levelname)s: %(message)s',
)
# Prepare
os.makedirs(OUT_DIR, exist_ok=True)
# State
lock = threading.Lock()
links = set()
pool = multiprocessing.pool.ThreadPool(
processes=NUM_PROCESSES,
)
# Restore state
if os.path.exists(os.path.join(OUT_DIR, 'done.txt')):
with open(os.path.join(OUT_DIR, 'done.txt'), 'r') as f:
lines = f.readlines()
lines = [ line.strip() for line in lines if line.strip() ]
links.update(lines)
# Pupupu
def process_link(link: str) -> None:
logging.info(f'Process { link !r}')
cmd = mkcmd(link)
logging.info(f'cmd { cmd !r}')
try:
code = os.system(cmd)
if code != 0:
logging.info(f'Failed { link !r}')
logging.info(f' { code = }')
return
logging.info(f'Done { link !r}')
# Append to done
with lock:
with open(os.path.join(OUT_DIR, 'done.txt'), 'a') as f:
f.write(link)
f.write('\n')
except KeyboardInterrupt:
logging.warning('Exiting')
exit(0)
except Exception as e:
logging.info(f'Failed { link !r}')
logging.exception(e)
# Prepare API
routes = aiohttp.web.RouteTableDef()
@routes.post('/add')
async def post__add(request: aiohttp.web.Request):
try:
if request.body_exists:
content = await request.read()
content = content.decode('utf-8')
content = content.strip()
with lock:
if content in links:
return aiohttp.web.Response(
text="Exists",
status=200,
)
else:
links.add(content)
# Append to history
with open(os.path.join(OUT_DIR, 'history.txt'), 'a') as f:
f.write(content)
f.write('\n')
logging.info(f'Schedule link { content !r}')
pool.apply_async(process_link, (content,))
return aiohttp.web.Response(
text="Created",
status=200,
)
except Exception as e:
logging.exception(e)
return aiohttp.web.Response(
text=str(e),
status=500,
)
app = aiohttp.web.Application()
app.add_routes(routes)
# Run it
logging.info('meow :3')
try:
aiohttp.web.run_app(
app=app,
host='0.0.0.0',
port=9182,
)
except KeyboardInterrupt:
pass
pool.close()
pool.join()
if __name__ == '__main__':
main()