ytdl-service/ytserv.py
2025-02-25 14:45:14 +03:00

168 lines
3.8 KiB
Python

#!/bin/bash
NUM_PROCESSES = 1
OUT_DIR = 'output'
# COOKIE_DATA = "firefox:99887766.default-release"
# PROXY = '--proxy socks5://127.0.0.1:25344'
PROXY = ''
import os
import json
import logging
import asyncio
import threading
import multiprocessing.pool
import aiohttp
import aiohttp.web
def mkcmd(url):
cmd = f'yt-dlp { PROXY } '
# cmd += f'--cookies-from-browser { COOKIE_DATA }'
cmd += f'--restrict-filenames '
cmd += f'--xattrs '
cmd += f'--add-metadata '
cmd += f'--embed-metadata '
cmd += f'--embed-thumbnail '
cmd += f'--extract-audio '
cmd += f'--audio-format best '
cmd += f'--audio-quality 0 '
cmd += f'--format bestaudio '
cmd += f'-o "{ OUT_DIR }/%(title).200s-%(id)s.%(ext)s" '
cmd += f'-- '
cmd += f'"{ url }"'
return cmd
def main():
logging.basicConfig(
level=logging.INFO,
format='[%(asctime)s] %(levelname)s: %(message)s',
)
# Prepare
os.makedirs(OUT_DIR, exist_ok=True)
# State
lock = threading.Lock()
links = set()
pool = multiprocessing.pool.ThreadPool(
processes=NUM_PROCESSES,
)
# Restore state
if os.path.exists(os.path.join(OUT_DIR, 'done.txt')):
with open(os.path.join(OUT_DIR, 'done.txt'), 'r') as f:
lines = f.readlines()
lines = [ line.strip() for line in lines if line.strip() ]
links.update(lines)
# Pupupu
def process_link(link: str) -> None:
logging.info(f'Process { link !r}')
cmd = mkcmd(link)
logging.info(f'cmd { cmd !r}')
try:
code = os.system(cmd)
if code != 0:
logging.info(f'Failed { link !r}')
logging.info(f' { code = }')
return
logging.info(f'Done { link !r}')
# Append to done
with lock:
with open(os.path.join(OUT_DIR, 'done.txt'), 'a') as f:
f.write(link)
f.write('\n')
except KeyboardInterrupt:
logging.warning('Exiting')
exit(0)
except Exception as e:
logging.info(f'Failed { link !r}')
logging.exception(e)
# Prepare API
routes = aiohttp.web.RouteTableDef()
@routes.post('/add')
async def post__add(request: aiohttp.web.Request):
try:
if request.body_exists:
content = await request.read()
content = content.decode('utf-8')
content = content.strip()
with lock:
if content in links:
return aiohttp.web.Response(
text="Exists",
status=200,
)
else:
links.add(content)
# Append to history
with open(os.path.join(OUT_DIR, 'history.txt'), 'a') as f:
f.write(content)
f.write('\n')
logging.info(f'Schedule link { content !r}')
pool.apply_async(process_link, (content,))
return aiohttp.web.Response(
text="Created",
status=200,
)
except Exception as e:
logging.exception(e)
return aiohttp.web.Response(
text=str(e),
status=500,
)
app = aiohttp.web.Application()
app.add_routes(routes)
# Run it
logging.info('meow :3')
try:
aiohttp.web.run_app(
app=app,
host='0.0.0.0',
port=9182,
)
except KeyboardInterrupt:
pass
pool.close()
pool.join()
if __name__ == '__main__':
main()