From 453d3913a75b06453222be0de38496ad3674bce3 Mon Sep 17 00:00:00 2001 From: Ayush Sehrawat Date: Sun, 17 Dec 2023 18:34:52 +0000 Subject: [PATCH] feat: formatted using black & prettier (in frontend) and moved to crlf --- backend/controllers/default.py | 44 +- backend/controllers/items.py | 91 +-- backend/controllers/settings.py | 108 +-- backend/main.py | 86 +-- backend/program/content/mdblist.py | 155 ++-- backend/program/content/overseerr.py | 186 ++--- backend/program/content/plex_watchlist.py | 119 +-- backend/program/debrid/realdebrid.py | 490 ++++++------ backend/program/libraries/plex.py | 440 +++++------ backend/program/media.py | 700 +++++++++--------- backend/program/program.py | 14 +- backend/program/scrapers/torrentio.py | 289 ++++---- backend/program/symlink.py | 47 +- backend/program/updaters/trakt.py | 7 +- backend/utils/default_settings.json | 66 +- backend/utils/logger.py | 9 +- backend/utils/request.py | 398 +++++----- backend/utils/settings.py | 108 +-- backend/utils/thread.py | 2 +- backend/utils/ui_helpers.py | 4 +- frontend/src/app.html | 42 +- .../src/lib/components/header-item.svelte | 48 +- frontend/src/lib/components/header.svelte | 214 +++--- .../lib/components/status-media-card.svelte | 116 +-- .../src/lib/components/theme-switcher.svelte | 88 +-- frontend/src/lib/helpers.ts | 142 ++-- frontend/src/lib/types.ts | 76 +- frontend/src/lib/utils.ts | 122 +-- frontend/src/routes/+error.svelte | 18 +- frontend/src/routes/+layout.svelte | 29 +- frontend/src/routes/+page.server.ts | 44 +- frontend/src/routes/+page.svelte | 44 +- frontend/src/routes/settings/+page.svelte | 20 +- frontend/src/routes/status/+page.server.ts | 70 +- frontend/src/routes/status/+page.svelte | 338 ++++----- 35 files changed, 2425 insertions(+), 2349 deletions(-) diff --git a/backend/controllers/default.py b/backend/controllers/default.py index c8844da8..15fa1ba4 100644 --- a/backend/controllers/default.py +++ b/backend/controllers/default.py @@ -1,22 +1,22 @@ -from fastapi import APIRouter, Request -from utils.settings import settings_manager -import requests -from program.debrid.realdebrid import get_user - - -router = APIRouter( - responses={404: {"description": "Not found"}}, -) - - -@router.get("/") -async def root(request: Request): - return { - "success": True, - "message": "Iceburg is running!", - } - - -@router.get("/user") -async def get_rd_user(): - return get_user() +from fastapi import APIRouter, Request +from utils.settings import settings_manager +import requests +from program.debrid.realdebrid import get_user + + +router = APIRouter( + responses={404: {"description": "Not found"}}, +) + + +@router.get("/") +async def root(request: Request): + return { + "success": True, + "message": "Iceburg is running!", + } + + +@router.get("/user") +async def get_rd_user(): + return get_user() diff --git a/backend/controllers/items.py b/backend/controllers/items.py index d3abc9f8..4894ec38 100644 --- a/backend/controllers/items.py +++ b/backend/controllers/items.py @@ -1,45 +1,46 @@ -from fastapi import APIRouter, HTTPException, Request -from program.media import MediaItemState -from utils.logger import logger - - -router = APIRouter( - prefix="/items", - tags=["items"], - responses={404: {"description": "Not found"}}, -) - -@router.get("/states") -async def get_states(request: Request): - return { - "success": True, - "states": [state.name for state in MediaItemState], - } - - -@router.get("/") -async def get_items(request: Request): - return { - "success": True, - "items": [item.to_dict() for item in request.app.program.media_items.items], - } - - -@router.get("/{state}") -async def get_item(request: Request, state: str): - state = MediaItemState[state] - items = request.app.program.media_items.get_items_with_state(state).items - - return { - "success": True, - "items": [item.to_dict() for item in items], - } - - -@router.delete("/remove/{item}") -async def remove_item(request: Request, item: str): - request.app.program.media_items.remove(item) - return { - "success": True, - "message": f"Removed {item}", - } +from fastapi import APIRouter, HTTPException, Request +from program.media import MediaItemState +from utils.logger import logger + + +router = APIRouter( + prefix="/items", + tags=["items"], + responses={404: {"description": "Not found"}}, +) + + +@router.get("/states") +async def get_states(request: Request): + return { + "success": True, + "states": [state.name for state in MediaItemState], + } + + +@router.get("/") +async def get_items(request: Request): + return { + "success": True, + "items": [item.to_dict() for item in request.app.program.media_items.items], + } + + +@router.get("/{state}") +async def get_item(request: Request, state: str): + state = MediaItemState[state] + items = request.app.program.media_items.get_items_with_state(state).items + + return { + "success": True, + "items": [item.to_dict() for item in items], + } + + +@router.delete("/remove/{item}") +async def remove_item(request: Request, item: str): + request.app.program.media_items.remove(item) + return { + "success": True, + "message": f"Removed {item}", + } diff --git a/backend/controllers/settings.py b/backend/controllers/settings.py index ff82e615..0c9e1016 100644 --- a/backend/controllers/settings.py +++ b/backend/controllers/settings.py @@ -1,54 +1,54 @@ -from copy import copy -from fastapi import APIRouter -from utils.settings import settings_manager -from pydantic import BaseModel - - -class SetSettings(BaseModel): - key: str - value: str - - -router = APIRouter( - prefix="/settings", - tags=["settings"], - responses={404: {"description": "Not found"}}, -) - - -@router.get("/load") -async def load_settings(): - settings_manager.load() - return { - "success": True, - "message": "Settings loaded!", - } - - -@router.post("/save") -async def save_settings(): - settings_manager.save() - return { - "success": True, - "message": "Settings saved!", - } - - -@router.get("/get/{key}") -async def get_settings(key: str): - return { - "success": True, - "data": settings_manager.get(key), - } - - -@router.post("/set") -async def set_settings(settings: SetSettings): - settings_manager.set( - settings.key, - settings.value, - ) - return { - "success": True, - "message": "Settings saved!", - } +from copy import copy +from fastapi import APIRouter +from utils.settings import settings_manager +from pydantic import BaseModel + + +class SetSettings(BaseModel): + key: str + value: str + + +router = APIRouter( + prefix="/settings", + tags=["settings"], + responses={404: {"description": "Not found"}}, +) + + +@router.get("/load") +async def load_settings(): + settings_manager.load() + return { + "success": True, + "message": "Settings loaded!", + } + + +@router.post("/save") +async def save_settings(): + settings_manager.save() + return { + "success": True, + "message": "Settings saved!", + } + + +@router.get("/get/{key}") +async def get_settings(key: str): + return { + "success": True, + "data": settings_manager.get(key), + } + + +@router.post("/set") +async def set_settings(settings: SetSettings): + settings_manager.set( + settings.key, + settings.value, + ) + return { + "success": True, + "message": "Settings saved!", + } diff --git a/backend/main.py b/backend/main.py index 84aebc53..6cc6c181 100644 --- a/backend/main.py +++ b/backend/main.py @@ -1,42 +1,44 @@ -import sys -import os -import uvicorn -from fastapi import FastAPI -from fastapi.middleware.cors import CORSMiddleware -from program.program import Program -from utils.thread import ThreadRunner -from controllers.settings import router as settings_router -from controllers.items import router as items_router -from controllers.default import router as default_router - - -sys.path.append(os.getcwd()) -program = Program() -runner = ThreadRunner(program.run, 5) - -def lifespan(app: FastAPI): - runner.start() - yield - runner.stop() - -app = FastAPI(lifespan=lifespan) -app.add_middleware( - CORSMiddleware, - allow_origins=["*"], - allow_credentials=True, - allow_methods=["*"], - allow_headers=["*"], -) - -app.program = program - -app.include_router(default_router) -app.include_router(settings_router) -app.include_router(items_router) - -if __name__ == "__main__": - try: - uvicorn.run("main:app", host="0.0.0.0", port=8080, reload=False) - except KeyboardInterrupt: - print("Exiting...") - sys.exit(0) +import sys +import os +import uvicorn +from fastapi import FastAPI +from fastapi.middleware.cors import CORSMiddleware +from program.program import Program +from utils.thread import ThreadRunner +from controllers.settings import router as settings_router +from controllers.items import router as items_router +from controllers.default import router as default_router + + +sys.path.append(os.getcwd()) +program = Program() +runner = ThreadRunner(program.run, 5) + + +def lifespan(app: FastAPI): + runner.start() + yield + runner.stop() + + +app = FastAPI(lifespan=lifespan) +app.add_middleware( + CORSMiddleware, + allow_origins=["*"], + allow_credentials=True, + allow_methods=["*"], + allow_headers=["*"], +) + +app.program = program + +app.include_router(default_router) +app.include_router(settings_router) +app.include_router(items_router) + +if __name__ == "__main__": + try: + uvicorn.run("main:app", host="0.0.0.0", port=8080, reload=False) + except KeyboardInterrupt: + print("Exiting...") + sys.exit(0) diff --git a/backend/program/content/mdblist.py b/backend/program/content/mdblist.py index 314082a4..16ee0a64 100644 --- a/backend/program/content/mdblist.py +++ b/backend/program/content/mdblist.py @@ -1,78 +1,77 @@ -"""Mdblist content module""" -import json -from utils.settings import settings_manager -from utils.logger import logger -from utils.request import RateLimitExceeded, RateLimiter, get, ping -from program.media import MediaItemContainer -from program.updaters.trakt import Updater as Trakt - - -class Content: - """Content class for mdblist""" - - def __init__( - self, - ): - self.initialized = False - self.settings = settings_manager.get("mdblist") - if not self._validate_settings(): - logger.info("mdblist is not configured and will not be used.") - return - self.updater = Trakt() - self.requests_per_2_minutes = self._calculate_request_time() - self.rate_limiter = RateLimiter(self.requests_per_2_minutes, 120, True) - self.initialized = True - - def _validate_settings(self): - response = ping( - f"https://mdblist.com/api/user?apikey={self.settings['api_key']}" - - ) - return not "Invalid API key!" in response.text - - def update_items(self, media_items: MediaItemContainer): - """Fetch media from mdblist and add them to media_items attribute - if they are not already there""" - try: - with self.rate_limiter: - logger.debug("Getting items...") - - items = [] - for list_id in self.settings["lists"]: - if list_id: - items += self._get_items_from_list( - list_id, self.settings["api_key"] - ) - - new_items = [item for item in items if item not in media_items] - container = self.updater.create_items(new_items) - added_items = media_items.extend(container) - if len(added_items) > 0: - logger.info("Added %s items", len(added_items)) - logger.debug("Done!") - except RateLimitExceeded: - pass - - def _get_items_from_list(self, list_id: str, api_key: str) -> MediaItemContainer: - return [item.imdb_id for item in list_items(list_id, api_key)] - - def _calculate_request_time(self): - limits = my_limits(self.settings["api_key"]).limits - daily_requests = limits.api_requests - requests_per_2_minutes = daily_requests / 24 / 60 * 2 - return requests_per_2_minutes - - -# API METHODS - - -def my_limits(api_key: str): - """Wrapper for mdblist api method 'My limits'""" - response = get(f"http://www.mdblist.com/api/user?apikey={api_key}") - return response.data - - -def list_items(list_id: str, api_key: str): - """Wrapper for mdblist api method 'List items'""" - response = get(f"http://www.mdblist.com/api/lists/{list_id}/items?apikey={api_key}") - return response.data +"""Mdblist content module""" +import json +from utils.settings import settings_manager +from utils.logger import logger +from utils.request import RateLimitExceeded, RateLimiter, get, ping +from program.media import MediaItemContainer +from program.updaters.trakt import Updater as Trakt + + +class Content: + """Content class for mdblist""" + + def __init__( + self, + ): + self.initialized = False + self.settings = settings_manager.get("mdblist") + if not self._validate_settings(): + logger.info("mdblist is not configured and will not be used.") + return + self.updater = Trakt() + self.requests_per_2_minutes = self._calculate_request_time() + self.rate_limiter = RateLimiter(self.requests_per_2_minutes, 120, True) + self.initialized = True + + def _validate_settings(self): + response = ping( + f"https://mdblist.com/api/user?apikey={self.settings['api_key']}" + ) + return not "Invalid API key!" in response.text + + def update_items(self, media_items: MediaItemContainer): + """Fetch media from mdblist and add them to media_items attribute + if they are not already there""" + try: + with self.rate_limiter: + logger.debug("Getting items...") + + items = [] + for list_id in self.settings["lists"]: + if list_id: + items += self._get_items_from_list( + list_id, self.settings["api_key"] + ) + + new_items = [item for item in items if item not in media_items] + container = self.updater.create_items(new_items) + added_items = media_items.extend(container) + if len(added_items) > 0: + logger.info("Added %s items", len(added_items)) + logger.debug("Done!") + except RateLimitExceeded: + pass + + def _get_items_from_list(self, list_id: str, api_key: str) -> MediaItemContainer: + return [item.imdb_id for item in list_items(list_id, api_key)] + + def _calculate_request_time(self): + limits = my_limits(self.settings["api_key"]).limits + daily_requests = limits.api_requests + requests_per_2_minutes = daily_requests / 24 / 60 * 2 + return requests_per_2_minutes + + +# API METHODS + + +def my_limits(api_key: str): + """Wrapper for mdblist api method 'My limits'""" + response = get(f"http://www.mdblist.com/api/user?apikey={api_key}") + return response.data + + +def list_items(list_id: str, api_key: str): + """Wrapper for mdblist api method 'List items'""" + response = get(f"http://www.mdblist.com/api/lists/{list_id}/items?apikey={api_key}") + return response.data diff --git a/backend/program/content/overseerr.py b/backend/program/content/overseerr.py index 79969fd7..b48bfa9d 100644 --- a/backend/program/content/overseerr.py +++ b/backend/program/content/overseerr.py @@ -1,93 +1,93 @@ -"""Mdblist content module""" -from requests import ConnectTimeout -from utils.settings import settings_manager -from utils.logger import logger -from utils.request import get, ping -from program.media import MediaItemContainer -from program.updaters.trakt import Updater as Trakt - - -class Content: - """Content class for overseerr""" - - def __init__( - self, - ): - self.initialized = False - self.settings = settings_manager.get("overseerr") - if self.settings.get("api_key") == "" or not self._validate_settings(): - logger.info("Overseerr is not configured and will not be used.") - return - self.updater = Trakt() - self.not_found_ids = [] - self.initialized = True - - def _validate_settings(self): - try: - response = ping( - self.settings.get("url") + "/api/v1/auth/me", - additional_headers={"X-Api-Key": self.settings.get("api_key")}, - timeout=1 - ) - return response.ok - except ConnectTimeout: - return False - - def update_items(self, media_items: MediaItemContainer): - """Fetch media from overseerr and add them to media_items attribute - if they are not already there""" - logger.debug("Getting items...") - items = self._get_items_from_overseerr(10000) - new_items = [item for item in items if item not in media_items] - container = self.updater.create_items(new_items) - added_items = media_items.extend(container) - if len(added_items) > 0: - logger.info("Added %s items", len(added_items)) - logger.debug("Done!") - - def _get_items_from_overseerr(self, amount: int): - """Fetch media from overseerr""" - - response = get( - self.settings.get("url") + f"/api/v1/request?take={amount}", - additional_headers={"X-Api-Key": self.settings.get("api_key")}, - ) - ids = [] - if response.is_ok: - for item in response.data.results: - if not item.media.imdbId: - imdb_id = self.get_imdb_id(item.media) - if imdb_id: - ids.append(imdb_id) - else: - ids.append(item.media.imdbId) - - return ids - - def get_imdb_id(self, overseerr_item): - """Get imdbId for item from overseerr""" - if overseerr_item.mediaType == "show": - external_id = overseerr_item.tvdbId - overseerr_item.mediaType = "tv" - id_extension = "tvdb-" - else: - external_id = overseerr_item.tmdbId - id_extension = "tmdb-" - - if f"{id_extension}{external_id}" in self.not_found_ids: - return None - response = get( - self.settings.get("url") - + f"/api/v1/{overseerr_item.mediaType}/{external_id}?language=en", - additional_headers={"X-Api-Key": self.settings.get("api_key")}, - ) - if response.is_ok: - imdb_id = response.data.externalIds.imdbId - if imdb_id: - return imdb_id - self.not_found_ids.append(f"{id_extension}{external_id}") - title = getattr(response.data, "title", None) or getattr( - response.data, "originalName", None - ) - logger.debug("Could not get imdbId for %s", title) - return None +"""Mdblist content module""" +from requests import ConnectTimeout +from utils.settings import settings_manager +from utils.logger import logger +from utils.request import get, ping +from program.media import MediaItemContainer +from program.updaters.trakt import Updater as Trakt + + +class Content: + """Content class for overseerr""" + + def __init__( + self, + ): + self.initialized = False + self.settings = settings_manager.get("overseerr") + if self.settings.get("api_key") == "" or not self._validate_settings(): + logger.info("Overseerr is not configured and will not be used.") + return + self.updater = Trakt() + self.not_found_ids = [] + self.initialized = True + + def _validate_settings(self): + try: + response = ping( + self.settings.get("url") + "/api/v1/auth/me", + additional_headers={"X-Api-Key": self.settings.get("api_key")}, + timeout=1, + ) + return response.ok + except ConnectTimeout: + return False + + def update_items(self, media_items: MediaItemContainer): + """Fetch media from overseerr and add them to media_items attribute + if they are not already there""" + logger.debug("Getting items...") + items = self._get_items_from_overseerr(10000) + new_items = [item for item in items if item not in media_items] + container = self.updater.create_items(new_items) + added_items = media_items.extend(container) + if len(added_items) > 0: + logger.info("Added %s items", len(added_items)) + logger.debug("Done!") + + def _get_items_from_overseerr(self, amount: int): + """Fetch media from overseerr""" + + response = get( + self.settings.get("url") + f"/api/v1/request?take={amount}", + additional_headers={"X-Api-Key": self.settings.get("api_key")}, + ) + ids = [] + if response.is_ok: + for item in response.data.results: + if not item.media.imdbId: + imdb_id = self.get_imdb_id(item.media) + if imdb_id: + ids.append(imdb_id) + else: + ids.append(item.media.imdbId) + + return ids + + def get_imdb_id(self, overseerr_item): + """Get imdbId for item from overseerr""" + if overseerr_item.mediaType == "show": + external_id = overseerr_item.tvdbId + overseerr_item.mediaType = "tv" + id_extension = "tvdb-" + else: + external_id = overseerr_item.tmdbId + id_extension = "tmdb-" + + if f"{id_extension}{external_id}" in self.not_found_ids: + return None + response = get( + self.settings.get("url") + + f"/api/v1/{overseerr_item.mediaType}/{external_id}?language=en", + additional_headers={"X-Api-Key": self.settings.get("api_key")}, + ) + if response.is_ok: + imdb_id = response.data.externalIds.imdbId + if imdb_id: + return imdb_id + self.not_found_ids.append(f"{id_extension}{external_id}") + title = getattr(response.data, "title", None) or getattr( + response.data, "originalName", None + ) + logger.debug("Could not get imdbId for %s", title) + return None diff --git a/backend/program/content/plex_watchlist.py b/backend/program/content/plex_watchlist.py index 9a5b1427..41c5e117 100644 --- a/backend/program/content/plex_watchlist.py +++ b/backend/program/content/plex_watchlist.py @@ -1,55 +1,64 @@ -"""Plex Watchlist Module""" -from requests import ConnectTimeout -from utils.request import get, ping -from utils.logger import logger -from utils.settings import settings_manager as settings -from program.media import MediaItemContainer -from program.updaters.trakt import Updater as Trakt -import json - - -class Content: - """Class for managing Plex watchlist""" - - def __init__(self): - self.initialized = False - self.watchlist_url = settings.get("plex")["watchlist"] - if not self.watchlist_url or not self._validate_settings(): - logger.info("Plex watchlist RSS URL is not configured and will not be used.") - return - self.updater = Trakt() - self.initialized = True - - def _validate_settings(self): - try: - response = ping( - self.watchlist_url, - timeout=5, - ) - return response.ok - except ConnectTimeout: - return False - - def update_items(self, media_items: MediaItemContainer): - """Fetch media from Plex watchlist and add them to media_items attribute - if they are not already there""" - logger.debug("Getting items...") - items = self._get_items_from_plex_watchlist() - new_items = [item for item in items if item not in media_items] - container = self.updater.create_items(new_items) - added_items = media_items.extend(container) - if len(added_items) > 0: - logger.info("Added %s items", len(added_items)) - logger.debug("Done!") - - def _get_items_from_plex_watchlist(self) -> list: - """Fetch media from Plex watchlist""" - response_obj = get(self.watchlist_url, timeout=5) - watchlist_data = json.loads(response_obj.response.content) - items = watchlist_data.get('items', []) - ids = [] - for item in items: - imdb_id = next((guid.split('//')[-1] for guid in item.get('guids') if "imdb://" in guid), None) - ids.append(imdb_id) - logger.debug("Found %s items", len(ids)) - return ids +"""Plex Watchlist Module""" +from requests import ConnectTimeout +from utils.request import get, ping +from utils.logger import logger +from utils.settings import settings_manager as settings +from program.media import MediaItemContainer +from program.updaters.trakt import Updater as Trakt +import json + + +class Content: + """Class for managing Plex watchlist""" + + def __init__(self): + self.initialized = False + self.watchlist_url = settings.get("plex")["watchlist"] + if not self.watchlist_url or not self._validate_settings(): + logger.info( + "Plex watchlist RSS URL is not configured and will not be used." + ) + return + self.updater = Trakt() + self.initialized = True + + def _validate_settings(self): + try: + response = ping( + self.watchlist_url, + timeout=5, + ) + return response.ok + except ConnectTimeout: + return False + + def update_items(self, media_items: MediaItemContainer): + """Fetch media from Plex watchlist and add them to media_items attribute + if they are not already there""" + logger.debug("Getting items...") + items = self._get_items_from_plex_watchlist() + new_items = [item for item in items if item not in media_items] + container = self.updater.create_items(new_items) + added_items = media_items.extend(container) + if len(added_items) > 0: + logger.info("Added %s items", len(added_items)) + logger.debug("Done!") + + def _get_items_from_plex_watchlist(self) -> list: + """Fetch media from Plex watchlist""" + response_obj = get(self.watchlist_url, timeout=5) + watchlist_data = json.loads(response_obj.response.content) + items = watchlist_data.get("items", []) + ids = [] + for item in items: + imdb_id = next( + ( + guid.split("//")[-1] + for guid in item.get("guids") + if "imdb://" in guid + ), + None, + ) + ids.append(imdb_id) + logger.debug("Found %s items", len(ids)) + return ids diff --git a/backend/program/debrid/realdebrid.py b/backend/program/debrid/realdebrid.py index b5f9c754..05b1d4b5 100644 --- a/backend/program/debrid/realdebrid.py +++ b/backend/program/debrid/realdebrid.py @@ -1,243 +1,247 @@ -"""Realdebrid module""" -import os -import re -import time - -import requests -from requests import ConnectTimeout -from utils.logger import logger -from utils.request import get, post, ping -from utils.settings import settings_manager -from program.media import MediaItem, MediaItemContainer, MediaItemState - - -WANTED_FORMATS = [".mkv", ".mp4", ".avi"] -RD_BASE_URL = "https://api.real-debrid.com/rest/1.0" - - -def get_user(): - api_key = settings_manager.get("realdebrid")["api_key"] - headers = {"Authorization": f"Bearer {api_key}"} - response = requests.get( - "https://api.real-debrid.com/rest/1.0/user", headers=headers - ) - return response.json() - -class Debrid: # TODO CHECK TORRENTS LIST BEFORE DOWNLOAD, IF DOWNLOADED AND NOT IN LIBRARY CHOOSE ANOTHER TORRENT - """Real-Debrid API Wrapper""" - - def __init__(self): - # Realdebrid class library is a necessity - while True: - self.settings = settings_manager.get("realdebrid") - self.auth_headers = {"Authorization": f'Bearer {self.settings["api_key"]}'} - if self._validate_settings(): - self._torrents = {} - break - logger.error("Realdebrid settings incorrect, retrying in 2...") - time.sleep(2) - - def _validate_settings(self): - try: - response = ping( - "https://api.real-debrid.com/rest/1.0/user", - additional_headers=self.auth_headers - ) - return response.ok - except ConnectTimeout: - return False - - def download(self, media_items: MediaItemContainer): - """Download given media items from real-debrid.com""" - added_files = 0 - - items = [] - for item in media_items: - if item.state is not MediaItemState.LIBRARY: - if item.type == "movie" and item.state is MediaItemState.SCRAPE: - items.append(item) - if item.type == "show": - for season in item.seasons: - if season.state is MediaItemState.SCRAPE: - items.append(season) - else: - for episode in season.episodes: - if episode.state is MediaItemState.SCRAPE: - items.append(episode) - - for item in items: - added_files += self._download(item) - - if added_files > 0: - logger.info("Downloaded %s cached releases", added_files) - - def _download(self, item): - """Download movie from real-debrid.com""" - self.check_stream_availability(item) - self._determine_best_stream(item) - self._download_item(item) - # item.change_state(MediaItemState.DOWNLOAD) - return 1 - - def _download_item(self, item): - if not item.get("active_stream", None): - return 0 - request_id = self.add_magnet(item) - - time.sleep(0.3) - self.select_files(request_id, item) - - if item.type == "movie": - log_string = item.title - if item.type == "season": - log_string = f"{item.parent.title} season {item.number}" - if item.type == "episode": - log_string = f"{item.parent.parent.title} season {item.parent.number} episode {item.number}" - - logger.debug("Downloaded %s", log_string) - return 1 - - def _get_torrent_info(self, request_id): - data = self.get_torrent_info(request_id) - if not data["id"] in self._torrents.keys(): - self._torrents[data["id"]] = data - - def _determine_best_stream(self, item) -> bool: - """Returns true if season stream found for episode""" - cached = [ - stream_hash - for stream_hash, stream_value in item.streams.items() - if stream_value.get("cached") - ] - for stream_hash, stream in item.streams.items(): - if item.type == "episode": - if stream.get("files") and self._real_episode_count(stream["files"]) >= len( - item.parent.episodes - ): - item.parent.set("active_stream", stream) - logger.debug( - "Found cached release for %s %s", - item.parent.parent.title, - item.parent.number, - ) - return True - if stream.get("files") and self._real_episode_count(stream["files"]) == 0: - continue - if stream_hash in cached: - stream["hash"] = stream_hash - item.set("active_stream", stream) - break - match (item.type): - case "movie": - log_string = item.title - case "season": - log_string = f"{item.parent.title} season {item.number}" - case "episode": - log_string = f"{item.parent.parent.title} season {item.parent.number} episode {item.number}" - case _: - log_string = "" - - if item.get("active_stream", None): - logger.debug("Found cached release for %s", log_string) - else: - logger.debug("No cached release found for %s", log_string) - item.streams = {} - return False - - def check_stream_availability(self, item: MediaItem): - if len(item.streams) == 0: - return - streams = "/".join( - list(item.streams) - ) # THIS IT TO SLOW, LETS CHECK ONE STREAM AT A TIME - response = get( - f"https://api.real-debrid.com/rest/1.0/torrents/instantAvailability/{streams}/", - additional_headers=self.auth_headers, - response_type=dict, - ) - cached = False - for stream_hash, provider_list in response.data.items(): - if len(provider_list) == 0: - continue - for containers in provider_list.values(): - for container in containers: - wanted_files = { - file_id: file - for file_id, file in container.items() - if os.path.splitext(file["filename"])[1] in WANTED_FORMATS - and file["filesize"] > 50000000 - } - if wanted_files: - cached = False - if item.type == "season": - if self._real_episode_count(wanted_files) >= len( - item.episodes - ): - cached = True - if item.type == "movie": - if len(wanted_files) == 1: - cached = True - if item.type == "episode": - if len(wanted_files) >= 1: - cached = True - item.streams[stream_hash]["files"] = wanted_files - item.streams[stream_hash]["cached"] = cached - if cached: - return - - def _real_episode_count(self, files): - def count_episodes(episode_numbers): - count = 0 - for episode in episode_numbers: - if "-" in episode: - start, end = map(int, episode.split("-")) - count += end - start + 1 - else: - count += 1 - return count - - total_count = 0 - for file in files.values(): - episode_numbers = re.findall( - r"E(\d{1,2}(?:-\d{1,2})?)", - file["filename"], - re.IGNORECASE, - ) - total_count += count_episodes(episode_numbers) - return total_count - - def add_magnet(self, item: MediaItem) -> str: - """Add magnet link to real-debrid.com""" - if not item.active_stream.get("hash"): - return None - response = post( - "https://api.real-debrid.com/rest/1.0/torrents/addMagnet", - { - "magnet": "magnet:?xt=urn:btih:" - + item.active_stream["hash"] - + "&dn=&tr=" - }, - additional_headers=self.auth_headers, - ) - if response.is_ok: - return response.data.id - return None - - def select_files(self, request_id, item) -> bool: - """Select files from real-debrid.com""" - files = item.active_stream.get("files") - response = post( - f"https://api.real-debrid.com/rest/1.0/torrents/selectFiles/{request_id}", - {"files": ",".join(files.keys())}, - additional_headers=self.auth_headers, - ) - return response.is_ok - - def get_torrent_info(self, request_id): - """Get torrent info from real-debrid.com""" - response = get( - f"https://api.real-debrid.com/rest/1.0/torrents/info/{request_id}", - additional_headers=self.auth_headers, - ) - if response.is_ok: - return response.data +"""Realdebrid module""" +import os +import re +import time + +import requests +from requests import ConnectTimeout +from utils.logger import logger +from utils.request import get, post, ping +from utils.settings import settings_manager +from program.media import MediaItem, MediaItemContainer, MediaItemState + + +WANTED_FORMATS = [".mkv", ".mp4", ".avi"] +RD_BASE_URL = "https://api.real-debrid.com/rest/1.0" + + +def get_user(): + api_key = settings_manager.get("realdebrid")["api_key"] + headers = {"Authorization": f"Bearer {api_key}"} + response = requests.get( + "https://api.real-debrid.com/rest/1.0/user", headers=headers + ) + return response.json() + + +class Debrid: # TODO CHECK TORRENTS LIST BEFORE DOWNLOAD, IF DOWNLOADED AND NOT IN LIBRARY CHOOSE ANOTHER TORRENT + """Real-Debrid API Wrapper""" + + def __init__(self): + # Realdebrid class library is a necessity + while True: + self.settings = settings_manager.get("realdebrid") + self.auth_headers = {"Authorization": f'Bearer {self.settings["api_key"]}'} + if self._validate_settings(): + self._torrents = {} + break + logger.error("Realdebrid settings incorrect, retrying in 2...") + time.sleep(2) + + def _validate_settings(self): + try: + response = ping( + "https://api.real-debrid.com/rest/1.0/user", + additional_headers=self.auth_headers, + ) + return response.ok + except ConnectTimeout: + return False + + def download(self, media_items: MediaItemContainer): + """Download given media items from real-debrid.com""" + added_files = 0 + + items = [] + for item in media_items: + if item.state is not MediaItemState.LIBRARY: + if item.type == "movie" and item.state is MediaItemState.SCRAPE: + items.append(item) + if item.type == "show": + for season in item.seasons: + if season.state is MediaItemState.SCRAPE: + items.append(season) + else: + for episode in season.episodes: + if episode.state is MediaItemState.SCRAPE: + items.append(episode) + + for item in items: + added_files += self._download(item) + + if added_files > 0: + logger.info("Downloaded %s cached releases", added_files) + + def _download(self, item): + """Download movie from real-debrid.com""" + self.check_stream_availability(item) + self._determine_best_stream(item) + self._download_item(item) + # item.change_state(MediaItemState.DOWNLOAD) + return 1 + + def _download_item(self, item): + if not item.get("active_stream", None): + return 0 + request_id = self.add_magnet(item) + + time.sleep(0.3) + self.select_files(request_id, item) + + if item.type == "movie": + log_string = item.title + if item.type == "season": + log_string = f"{item.parent.title} season {item.number}" + if item.type == "episode": + log_string = f"{item.parent.parent.title} season {item.parent.number} episode {item.number}" + + logger.debug("Downloaded %s", log_string) + return 1 + + def _get_torrent_info(self, request_id): + data = self.get_torrent_info(request_id) + if not data["id"] in self._torrents.keys(): + self._torrents[data["id"]] = data + + def _determine_best_stream(self, item) -> bool: + """Returns true if season stream found for episode""" + cached = [ + stream_hash + for stream_hash, stream_value in item.streams.items() + if stream_value.get("cached") + ] + for stream_hash, stream in item.streams.items(): + if item.type == "episode": + if stream.get("files") and self._real_episode_count( + stream["files"] + ) >= len(item.parent.episodes): + item.parent.set("active_stream", stream) + logger.debug( + "Found cached release for %s %s", + item.parent.parent.title, + item.parent.number, + ) + return True + if ( + stream.get("files") + and self._real_episode_count(stream["files"]) == 0 + ): + continue + if stream_hash in cached: + stream["hash"] = stream_hash + item.set("active_stream", stream) + break + match (item.type): + case "movie": + log_string = item.title + case "season": + log_string = f"{item.parent.title} season {item.number}" + case "episode": + log_string = f"{item.parent.parent.title} season {item.parent.number} episode {item.number}" + case _: + log_string = "" + + if item.get("active_stream", None): + logger.debug("Found cached release for %s", log_string) + else: + logger.debug("No cached release found for %s", log_string) + item.streams = {} + return False + + def check_stream_availability(self, item: MediaItem): + if len(item.streams) == 0: + return + streams = "/".join( + list(item.streams) + ) # THIS IT TO SLOW, LETS CHECK ONE STREAM AT A TIME + response = get( + f"https://api.real-debrid.com/rest/1.0/torrents/instantAvailability/{streams}/", + additional_headers=self.auth_headers, + response_type=dict, + ) + cached = False + for stream_hash, provider_list in response.data.items(): + if len(provider_list) == 0: + continue + for containers in provider_list.values(): + for container in containers: + wanted_files = { + file_id: file + for file_id, file in container.items() + if os.path.splitext(file["filename"])[1] in WANTED_FORMATS + and file["filesize"] > 50000000 + } + if wanted_files: + cached = False + if item.type == "season": + if self._real_episode_count(wanted_files) >= len( + item.episodes + ): + cached = True + if item.type == "movie": + if len(wanted_files) == 1: + cached = True + if item.type == "episode": + if len(wanted_files) >= 1: + cached = True + item.streams[stream_hash]["files"] = wanted_files + item.streams[stream_hash]["cached"] = cached + if cached: + return + + def _real_episode_count(self, files): + def count_episodes(episode_numbers): + count = 0 + for episode in episode_numbers: + if "-" in episode: + start, end = map(int, episode.split("-")) + count += end - start + 1 + else: + count += 1 + return count + + total_count = 0 + for file in files.values(): + episode_numbers = re.findall( + r"E(\d{1,2}(?:-\d{1,2})?)", + file["filename"], + re.IGNORECASE, + ) + total_count += count_episodes(episode_numbers) + return total_count + + def add_magnet(self, item: MediaItem) -> str: + """Add magnet link to real-debrid.com""" + if not item.active_stream.get("hash"): + return None + response = post( + "https://api.real-debrid.com/rest/1.0/torrents/addMagnet", + { + "magnet": "magnet:?xt=urn:btih:" + + item.active_stream["hash"] + + "&dn=&tr=" + }, + additional_headers=self.auth_headers, + ) + if response.is_ok: + return response.data.id + return None + + def select_files(self, request_id, item) -> bool: + """Select files from real-debrid.com""" + files = item.active_stream.get("files") + response = post( + f"https://api.real-debrid.com/rest/1.0/torrents/selectFiles/{request_id}", + {"files": ",".join(files.keys())}, + additional_headers=self.auth_headers, + ) + return response.is_ok + + def get_torrent_info(self, request_id): + """Get torrent info from real-debrid.com""" + response = get( + f"https://api.real-debrid.com/rest/1.0/torrents/info/{request_id}", + additional_headers=self.auth_headers, + ) + if response.is_ok: + return response.data diff --git a/backend/program/libraries/plex.py b/backend/program/libraries/plex.py index 801b5b7d..3c585e7b 100644 --- a/backend/program/libraries/plex.py +++ b/backend/program/libraries/plex.py @@ -1,207 +1,233 @@ -"""Plex library module""" -import os -import time -from typing import List, Optional -from plexapi import exceptions -from plexapi.server import PlexServer -import requests -from requests.exceptions import ReadTimeout, ConnectionError -from pydantic import BaseModel, HttpUrl -from utils.logger import logger -from utils.settings import settings_manager as settings -from program.media import MediaItemState, MediaItem, Movie, Show, Season, Episode - - -class PlexSettings(BaseModel): - user: str - token: str - url: HttpUrl - user_watchlist_rss: Optional[str] = None - -class Library: - """Plex library class""" - - def __init__(self): - # Plex class library is a necessity - while True: - try: - temp_settings = settings.get("plex") - self.plex = PlexServer(temp_settings["url"], temp_settings["token"], timeout=15) - self.settings = PlexSettings(**temp_settings) - break - except exceptions.Unauthorized: - logger.error("Wrong plex token, retrying in 2...") - except ConnectionError: - logger.error("Couldnt connect to plex, retrying in 2...") - time.sleep(2) - - def update_items(self, media_items: List[MediaItem]): - logger.debug("Getting items...") - items = [] - sections = self.plex.library.sections() - processed_sections = set() - - for section in sections: - if section.key in processed_sections: - continue - - try: - if not section.refreshing: - for item in section.all(): - media_item = self._create_item(item) - if media_item: - items.append(media_item) - except requests.exceptions.ReadTimeout: - logger.error(f"Timeout occurred when accessing section: {section.title}") - continue # Skip to the next section - - processed_sections.add(section.key) - - # Add items that arent in in media_items - media_items.extend(items) - - matched_items = self.match_items(items, media_items) - if matched_items > 0: - logger.info(f"Found {matched_items} new items") - logger.debug("Done!") - - def update_sections(self, media_items: List[MediaItem]): - """Update plex library section""" - for section in self.plex.library.sections(): - for item in media_items: - if item.type == section.type and item.state in [MediaItemState.SYMLINK, MediaItemState.LIBRARY_PARTIAL]: - if ( - (item.type == "movie" and item.state is MediaItemState.SYMLINK) - or - (item.type == "show" and - any(season for season in item.seasons if season.state is MediaItemState.SYMLINK)) - or any(episode for season in item.seasons for episode in season.episodes if episode.state is MediaItemState.SYMLINK) - ): - if not section.refreshing: - section.update() - logger.debug("Updated section %s", section.title) - break - - def _create_item(self, item): - new_item = _map_item_from_data(item, item.type) - if new_item and item.type == "show": - for season in item.seasons(): - if season.seasonNumber != 0: - new_season = _map_item_from_data(season, "season") - if new_season: - new_season_episodes = [] - for episode in season.episodes(): - new_episode = _map_item_from_data(episode, "episode") - if new_episode: - new_season_episodes.append(new_episode) - new_season.episodes = new_season_episodes - new_item.seasons.append(new_season) - return new_item - - def match_items(self, found_items: List[MediaItem], media_items: List[MediaItem]): - """Matches items in given mediacontainer that are not in library - to items that are in library""" - logger.debug("Matching items...") - - items_to_update = 0 - - for item in media_items: - if item.state != MediaItemState.LIBRARY: - if item.type == "movie": - for found_item in found_items: - if found_item.type == "movie" and found_item.imdb_id == item.imdb_id: - self._update_item(item, found_item) - items_to_update += 1 - if item.type == "show": - for found_item in found_items: - if found_item.type == "show": - for found_season in found_item.seasons: - for found_episode in found_season.episodes: - for season in item.seasons: - if season.state is not MediaItemState.LIBRARY: - for episode in season.episodes: - if episode.imdb_id == found_episode.imdb_id: - self._update_item(episode, found_episode) - items_to_update += 1 - - return items_to_update - - def _update_item(self, item: MediaItem, library_item: MediaItem): - """Internal method to use with match_items - It does some magic to update media items according to library - items found""" - item.set("guid", library_item.guid) - item.set("key", library_item.key) - - def _fix_match(self, library_item: MediaItem, item: MediaItem): - """Internal method to use in match_items method. - It gets plex guid and checks if it matches with plex metadata - for given imdb_id. If it does, it will update the metadata of the plex item.""" - section = next( - section - for section in self.plex.library.sections() - if section.type == item.type - ) - dummy = section.search(maxresults=1)[0] - - if dummy and not section.refreshing: - if item.imdb_id: - try: - match = dummy.matches(agent=section.agent, title=item.imdb_id)[0] - except ReadTimeout: - return False - except IndexError: - return False - if library_item.guid != match.guid: - item_to_update = self.plex.fetchItem(library_item.key) - item_to_update.fixMatch(match) - return True - return False - - - - -def _map_item_from_data(item, item_type): - """Map Plex API data to MediaItemContainer.""" - guid = getattr(item, "guid", None) - file = None - if item_type in ["movie", "episode"]: - file = getattr(item, "locations", [None])[0].split("/")[-1] - genres = [genre.tag for genre in getattr(item, "genres", [])] - available_at = getattr(item, "originallyAvailableAt", None) - title = getattr(item, "title", None) - year = getattr(item, "year", None) - guids = getattr(item, "guids", []) - key = getattr(item, "key", None) - season_number = getattr(item, "seasonNumber", None) - episode_number = getattr(item, "episodeNumber", None) - art_url = getattr(item, "artUrl", None) - - imdb_id = next((guid.id.split("://")[-1] for guid in guids if "imdb" in guid.id), None) - aired_at = available_at or None - - media_item_data = { - "title": title, - "imdb_id": imdb_id, - "aired_at": aired_at, - "genres": genres, - "key": key, - "guid": guid, - "art_url": art_url, - "file": file, - } - - # Instantiate the appropriate subclass based on 'item_type' - if item_type == "movie": - return Movie(media_item_data) - elif item_type == "show": - return Show(media_item_data) - elif item_type == "season": - media_item_data["number"] = season_number - return Season(media_item_data) - elif item_type == "episode": - media_item_data["number"] = episode_number - media_item_data["season_number"] = season_number - return Episode(media_item_data) - else: - return None +"""Plex library module""" +import os +import time +from typing import List, Optional +from plexapi import exceptions +from plexapi.server import PlexServer +import requests +from requests.exceptions import ReadTimeout, ConnectionError +from pydantic import BaseModel, HttpUrl +from utils.logger import logger +from utils.settings import settings_manager as settings +from program.media import MediaItemState, MediaItem, Movie, Show, Season, Episode + + +class PlexSettings(BaseModel): + user: str + token: str + url: HttpUrl + user_watchlist_rss: Optional[str] = None + + +class Library: + """Plex library class""" + + def __init__(self): + # Plex class library is a necessity + while True: + try: + temp_settings = settings.get("plex") + self.plex = PlexServer( + temp_settings["url"], temp_settings["token"], timeout=15 + ) + self.settings = PlexSettings(**temp_settings) + break + except exceptions.Unauthorized: + logger.error("Wrong plex token, retrying in 2...") + except ConnectionError: + logger.error("Couldnt connect to plex, retrying in 2...") + time.sleep(2) + + def update_items(self, media_items: List[MediaItem]): + logger.debug("Getting items...") + items = [] + sections = self.plex.library.sections() + processed_sections = set() + + for section in sections: + if section.key in processed_sections: + continue + + try: + if not section.refreshing: + for item in section.all(): + media_item = self._create_item(item) + if media_item: + items.append(media_item) + except requests.exceptions.ReadTimeout: + logger.error( + f"Timeout occurred when accessing section: {section.title}" + ) + continue # Skip to the next section + + processed_sections.add(section.key) + + # Add items that arent in in media_items + media_items.extend(items) + + matched_items = self.match_items(items, media_items) + if matched_items > 0: + logger.info(f"Found {matched_items} new items") + logger.debug("Done!") + + def update_sections(self, media_items: List[MediaItem]): + """Update plex library section""" + for section in self.plex.library.sections(): + for item in media_items: + if item.type == section.type and item.state in [ + MediaItemState.SYMLINK, + MediaItemState.LIBRARY_PARTIAL, + ]: + if ( + (item.type == "movie" and item.state is MediaItemState.SYMLINK) + or ( + item.type == "show" + and any( + season + for season in item.seasons + if season.state is MediaItemState.SYMLINK + ) + ) + or any( + episode + for season in item.seasons + for episode in season.episodes + if episode.state is MediaItemState.SYMLINK + ) + ): + if not section.refreshing: + section.update() + logger.debug("Updated section %s", section.title) + break + + def _create_item(self, item): + new_item = _map_item_from_data(item, item.type) + if new_item and item.type == "show": + for season in item.seasons(): + if season.seasonNumber != 0: + new_season = _map_item_from_data(season, "season") + if new_season: + new_season_episodes = [] + for episode in season.episodes(): + new_episode = _map_item_from_data(episode, "episode") + if new_episode: + new_season_episodes.append(new_episode) + new_season.episodes = new_season_episodes + new_item.seasons.append(new_season) + return new_item + + def match_items(self, found_items: List[MediaItem], media_items: List[MediaItem]): + """Matches items in given mediacontainer that are not in library + to items that are in library""" + logger.debug("Matching items...") + + items_to_update = 0 + + for item in media_items: + if item.state != MediaItemState.LIBRARY: + if item.type == "movie": + for found_item in found_items: + if ( + found_item.type == "movie" + and found_item.imdb_id == item.imdb_id + ): + self._update_item(item, found_item) + items_to_update += 1 + if item.type == "show": + for found_item in found_items: + if found_item.type == "show": + for found_season in found_item.seasons: + for found_episode in found_season.episodes: + for season in item.seasons: + if season.state is not MediaItemState.LIBRARY: + for episode in season.episodes: + if ( + episode.imdb_id + == found_episode.imdb_id + ): + self._update_item( + episode, found_episode + ) + items_to_update += 1 + + return items_to_update + + def _update_item(self, item: MediaItem, library_item: MediaItem): + """Internal method to use with match_items + It does some magic to update media items according to library + items found""" + item.set("guid", library_item.guid) + item.set("key", library_item.key) + + def _fix_match(self, library_item: MediaItem, item: MediaItem): + """Internal method to use in match_items method. + It gets plex guid and checks if it matches with plex metadata + for given imdb_id. If it does, it will update the metadata of the plex item.""" + section = next( + section + for section in self.plex.library.sections() + if section.type == item.type + ) + dummy = section.search(maxresults=1)[0] + + if dummy and not section.refreshing: + if item.imdb_id: + try: + match = dummy.matches(agent=section.agent, title=item.imdb_id)[0] + except ReadTimeout: + return False + except IndexError: + return False + if library_item.guid != match.guid: + item_to_update = self.plex.fetchItem(library_item.key) + item_to_update.fixMatch(match) + return True + return False + + +def _map_item_from_data(item, item_type): + """Map Plex API data to MediaItemContainer.""" + guid = getattr(item, "guid", None) + file = None + if item_type in ["movie", "episode"]: + file = getattr(item, "locations", [None])[0].split("/")[-1] + genres = [genre.tag for genre in getattr(item, "genres", [])] + available_at = getattr(item, "originallyAvailableAt", None) + title = getattr(item, "title", None) + year = getattr(item, "year", None) + guids = getattr(item, "guids", []) + key = getattr(item, "key", None) + season_number = getattr(item, "seasonNumber", None) + episode_number = getattr(item, "episodeNumber", None) + art_url = getattr(item, "artUrl", None) + + imdb_id = next( + (guid.id.split("://")[-1] for guid in guids if "imdb" in guid.id), None + ) + aired_at = available_at or None + + media_item_data = { + "title": title, + "imdb_id": imdb_id, + "aired_at": aired_at, + "genres": genres, + "key": key, + "guid": guid, + "art_url": art_url, + "file": file, + } + + # Instantiate the appropriate subclass based on 'item_type' + if item_type == "movie": + return Movie(media_item_data) + elif item_type == "show": + return Show(media_item_data) + elif item_type == "season": + media_item_data["number"] = season_number + return Season(media_item_data) + elif item_type == "episode": + media_item_data["number"] = episode_number + media_item_data["season_number"] = season_number + return Episode(media_item_data) + else: + return None diff --git a/backend/program/media.py b/backend/program/media.py index 77dc1eeb..068f6913 100644 --- a/backend/program/media.py +++ b/backend/program/media.py @@ -1,345 +1,355 @@ -"""MediaItem module""" - -from enum import IntEnum -from typing import List, Optional -from datetime import datetime -import datetime -import threading -import dill -import PTN - - -class MediaItemState(IntEnum): - """MediaItem states""" - - UNKNOWN = 0 - CONTENT = 1 - SCRAPE = 2 - DOWNLOAD = 3 - SYMLINK = 4 - LIBRARY = 5 - LIBRARY_PARTIAL = 6 - -class MediaItem: - """MediaItem class""" - - def __init__(self, item): - self._lock = threading.Lock() - self.scraped_at = 0 - self.active_stream = item.get("active_stream", None) - self.streams = {} - self.symlinked = False - self.requested_at = item.get("requested_at", None) or datetime.datetime.now() - - # Media related - self.title = item.get("title", None) - self.imdb_id = item.get("imdb_id", None) - if self.imdb_id: - self.imdb_link = f"https://www.imdb.com/title/{self.imdb_id}/" - self.aired_at = item.get("aired_at", None) - self.genres = item.get("genres", []) - - # Plex related - self.key = item.get("key", None) - self.guid = item.get("guid", None) - - @property - def state(self): - if self.key: - return MediaItemState.LIBRARY - if self.symlinked: - return MediaItemState.SYMLINK - if self.is_cached() or self.file: - return MediaItemState.DOWNLOAD - if len(self.streams) > 0: - return MediaItemState.SCRAPE - if self.title: - return MediaItemState.CONTENT - return MediaItemState.UNKNOWN - - def is_cached(self): - if self.streams: - return any(stream.get("cached", None) for stream in self.streams.values()) - return False - - def is_scraped(self): - return len(self.streams) > 0 - - def is_checked_for_availability(self): - if self.streams: - return all( - stream.get("cached", None) is not None - for stream in self.streams.values() - ) - return False - - def to_dict(self): - return { - "title": self.title, - "imdb_id": self.imdb_id, - "state": self.state.name, - "imdb_link": self.imdb_link if hasattr(self, 'imdb_link') else None, - "aired_at": self.aired_at, - "genres": self.genres, - "guid": self.guid, - "requested_at": self.requested_at - } - - def is_not_cached(self): - return not self.is_cached() - - def __iter__(self): - with self._lock: - for attr, _ in vars(self).items(): - yield attr - - def __eq__(self, other): - with self._lock: - if isinstance(other, type(self)): - return self.imdb_id == other.imdb_id - return False - - def get(self, key, default=None): - """Get item attribute""" - with self._lock: - return getattr(self, key, default) - - def set(self, key, value): - """Set item attribute""" - with self._lock: - _set_nested_attr(self, key, value) - - -class Movie(MediaItem): - """Movie class""" - - def __init__(self, item): - super().__init__(item) - self.type = "movie" - self.file = item.get("file", None) - - def __repr__(self): - return f"Movie:{self.title}:{self.state.name}" - - -class Show(MediaItem): - """Show class""" - - def __init__(self, item): - super().__init__(item) - self.locations = item.get("locations", []) - self.seasons = item.get("seasons", []) - self.type = "show" - - @property - def state(self): - if all(season.state is MediaItemState.LIBRARY for season in self.seasons): - return MediaItemState.LIBRARY - if any(season.state in [MediaItemState.LIBRARY, MediaItemState.LIBRARY_PARTIAL] for season in self.seasons): - return MediaItemState.LIBRARY_PARTIAL - if any(season.state == MediaItemState.CONTENT for season in self.seasons): - return MediaItemState.CONTENT - return MediaItemState.UNKNOWN - - def __repr__(self): - return f"Show:{self.title}:{self.state.name}" - - def add_season(self, season): - """Add season to show""" - with self._lock: - self.seasons.append(season) - season.parent = self - - -class Season(MediaItem): - """Season class""" - - def __init__(self, item): - super().__init__(item) - self.type = "season" - self.parent = None - self.number = item.get("number", None) - self.episodes = item.get("episodes", []) - - @property - def state(self): - if len(self.episodes) > 0: - if all(episode.state == MediaItemState.LIBRARY for episode in self.episodes): - return MediaItemState.LIBRARY - if any(episode.state == MediaItemState.LIBRARY for episode in self.episodes): - return MediaItemState.LIBRARY_PARTIAL - if self.is_cached(): - return MediaItemState.DOWNLOAD - if self.is_scraped(): - return MediaItemState.SCRAPE - if any(episode.state == MediaItemState.CONTENT for episode in self.episodes): - return MediaItemState.CONTENT - return MediaItemState.UNKNOWN - - def __eq__(self, other): - return self.number == other.number - - def __repr__(self): - return f"Season:{self.number}:{self.state.name}" - - def add_episode(self, episode): - """Add episode to season""" - with self._lock: - self.episodes.append(episode) - episode.parent = self - - -class Episode(MediaItem): - """Episode class""" - - def __init__(self, item): - super().__init__(item) - self.type = "episode" - self.parent = None - self.number = item.get("number", None) - self.file = item.get("file", None) - - @property - def state(self): - return super().state - - def __eq__(self, other): - return self.number == other.number - - def __repr__(self): - return f"Episode:{self.number}:{self.state.name}" - - def get_file_episodes(self): - parse = PTN.parse(self.file) - episode_number = parse.get("episode") - if type(episode_number) == int: - episode_number = [episode_number] - if parse.get("excess"): - excess_episodes = None - if type(parse["excess"]) == list: - for excess in parse["excess"]: - excess_parse = PTN.parse(excess) - if excess_parse.get("episode") is not None: - excess_episodes = excess_parse["episode"] - break - if type(parse["excess"]) == str: - excess_parse = PTN.parse(parse["excess"]) - if excess_parse.get("episode") is not None: - excess_episodes = excess_parse["episode"] - if excess_episodes: - episode_number = episode_number + excess_episodes - return episode_number - - -class MediaItemContainer: - """MediaItemContainer class""" - - def __init__(self, items: Optional[List[MediaItem]] = None): - self.items = items if items is not None else [] - self.updated_at = None - - def __iter__(self): - for item in self.items: - yield item - - def __iadd__(self, other): - if not isinstance(other, MediaItem) and other is not None: - raise TypeError("Cannot append non-MediaItem to MediaItemContainer") - if other not in self.items: - self.items.append(other) - self._set_updated_at() - return self - - def sort(self, by): - self.items.sort(key=lambda item: item.get(by), reverse=True) - - def __len__(self): - """Get length of container""" - return len(self.items) - - def append(self, item) -> bool: - """Append item to container""" - self.items.append(item) - self._set_updated_at() - - def get(self, item) -> MediaItem: - """Get item matching given item from container""" - for my_item in self.items: - if my_item == item: - return my_item - return None - - def get_item(self, attr, value) -> "MediaItemContainer": - """Get items that match given items""" - return next((item for item in self.items if getattr(item, attr) == value), None) - - def extend(self, items) -> "MediaItemContainer": - """Extend container with items""" - added_items = MediaItemContainer() - for media_item in items: - if media_item not in self.items: - self.items.append(media_item) - added_items.append(media_item) - return added_items - - def _set_updated_at(self): - self.updated_at = { - "length": len(self.items), - "time": datetime.datetime.now().timestamp(), - } - - def remove(self, item): - """Remove item from container""" - if item in self.items: - self.items.remove(item) - self._set_updated_at() - - def count(self, state) -> int: - """Count items with given state in container""" - return len(self.get_items_with_state(state)) - - def get_items_with_state(self, state): - """Get items that need to be updated""" - return MediaItemContainer([item for item in self.items if item.state == state]) - - def save(self, filename): - """Save container to file""" - with open(filename, "wb") as file: - dill.dump(self.items, file) - - def load(self, filename): - """Load container from file""" - try: - with open(filename, "rb") as file: - self.items = dill.load(file) - except FileNotFoundError: - self.items = [] - - -def _set_nested_attr(obj, key, value): - if "." in key: - parts = key.split(".", 1) - current_key, rest_of_keys = parts[0], parts[1] - - if not hasattr(obj, current_key): - raise AttributeError(f"Object does not have the attribute '{current_key}'.") - - current_obj = getattr(obj, current_key) - _set_nested_attr(current_obj, rest_of_keys, value) - else: - if isinstance(obj, dict): - if key in obj: - obj[key] = value - else: - setattr(obj, key, value) - - -def count_episodes(episode_nums): - count = 0 - for ep in episode_nums: - if "-" in ep: # Range of episodes - start, end = map(int, ep.split("-")) - count += end - start + 1 - else: # Individual episodes - count += 1 - return count +"""MediaItem module""" + +from enum import IntEnum +from typing import List, Optional +from datetime import datetime +import datetime +import threading +import dill +import PTN + + +class MediaItemState(IntEnum): + """MediaItem states""" + + UNKNOWN = 0 + CONTENT = 1 + SCRAPE = 2 + DOWNLOAD = 3 + SYMLINK = 4 + LIBRARY = 5 + LIBRARY_PARTIAL = 6 + + +class MediaItem: + """MediaItem class""" + + def __init__(self, item): + self._lock = threading.Lock() + self.scraped_at = 0 + self.active_stream = item.get("active_stream", None) + self.streams = {} + self.symlinked = False + self.requested_at = item.get("requested_at", None) or datetime.datetime.now() + + # Media related + self.title = item.get("title", None) + self.imdb_id = item.get("imdb_id", None) + if self.imdb_id: + self.imdb_link = f"https://www.imdb.com/title/{self.imdb_id}/" + self.aired_at = item.get("aired_at", None) + self.genres = item.get("genres", []) + + # Plex related + self.key = item.get("key", None) + self.guid = item.get("guid", None) + + @property + def state(self): + if self.key: + return MediaItemState.LIBRARY + if self.symlinked: + return MediaItemState.SYMLINK + if self.is_cached() or self.file: + return MediaItemState.DOWNLOAD + if len(self.streams) > 0: + return MediaItemState.SCRAPE + if self.title: + return MediaItemState.CONTENT + return MediaItemState.UNKNOWN + + def is_cached(self): + if self.streams: + return any(stream.get("cached", None) for stream in self.streams.values()) + return False + + def is_scraped(self): + return len(self.streams) > 0 + + def is_checked_for_availability(self): + if self.streams: + return all( + stream.get("cached", None) is not None + for stream in self.streams.values() + ) + return False + + def to_dict(self): + return { + "title": self.title, + "imdb_id": self.imdb_id, + "state": self.state.name, + "imdb_link": self.imdb_link if hasattr(self, "imdb_link") else None, + "aired_at": self.aired_at, + "genres": self.genres, + "guid": self.guid, + "requested_at": self.requested_at, + } + + def is_not_cached(self): + return not self.is_cached() + + def __iter__(self): + with self._lock: + for attr, _ in vars(self).items(): + yield attr + + def __eq__(self, other): + with self._lock: + if isinstance(other, type(self)): + return self.imdb_id == other.imdb_id + return False + + def get(self, key, default=None): + """Get item attribute""" + with self._lock: + return getattr(self, key, default) + + def set(self, key, value): + """Set item attribute""" + with self._lock: + _set_nested_attr(self, key, value) + + +class Movie(MediaItem): + """Movie class""" + + def __init__(self, item): + super().__init__(item) + self.type = "movie" + self.file = item.get("file", None) + + def __repr__(self): + return f"Movie:{self.title}:{self.state.name}" + + +class Show(MediaItem): + """Show class""" + + def __init__(self, item): + super().__init__(item) + self.locations = item.get("locations", []) + self.seasons = item.get("seasons", []) + self.type = "show" + + @property + def state(self): + if all(season.state is MediaItemState.LIBRARY for season in self.seasons): + return MediaItemState.LIBRARY + if any( + season.state in [MediaItemState.LIBRARY, MediaItemState.LIBRARY_PARTIAL] + for season in self.seasons + ): + return MediaItemState.LIBRARY_PARTIAL + if any(season.state == MediaItemState.CONTENT for season in self.seasons): + return MediaItemState.CONTENT + return MediaItemState.UNKNOWN + + def __repr__(self): + return f"Show:{self.title}:{self.state.name}" + + def add_season(self, season): + """Add season to show""" + with self._lock: + self.seasons.append(season) + season.parent = self + + +class Season(MediaItem): + """Season class""" + + def __init__(self, item): + super().__init__(item) + self.type = "season" + self.parent = None + self.number = item.get("number", None) + self.episodes = item.get("episodes", []) + + @property + def state(self): + if len(self.episodes) > 0: + if all( + episode.state == MediaItemState.LIBRARY for episode in self.episodes + ): + return MediaItemState.LIBRARY + if any( + episode.state == MediaItemState.LIBRARY for episode in self.episodes + ): + return MediaItemState.LIBRARY_PARTIAL + if self.is_cached(): + return MediaItemState.DOWNLOAD + if self.is_scraped(): + return MediaItemState.SCRAPE + if any( + episode.state == MediaItemState.CONTENT for episode in self.episodes + ): + return MediaItemState.CONTENT + return MediaItemState.UNKNOWN + + def __eq__(self, other): + return self.number == other.number + + def __repr__(self): + return f"Season:{self.number}:{self.state.name}" + + def add_episode(self, episode): + """Add episode to season""" + with self._lock: + self.episodes.append(episode) + episode.parent = self + + +class Episode(MediaItem): + """Episode class""" + + def __init__(self, item): + super().__init__(item) + self.type = "episode" + self.parent = None + self.number = item.get("number", None) + self.file = item.get("file", None) + + @property + def state(self): + return super().state + + def __eq__(self, other): + return self.number == other.number + + def __repr__(self): + return f"Episode:{self.number}:{self.state.name}" + + def get_file_episodes(self): + parse = PTN.parse(self.file) + episode_number = parse.get("episode") + if type(episode_number) == int: + episode_number = [episode_number] + if parse.get("excess"): + excess_episodes = None + if type(parse["excess"]) == list: + for excess in parse["excess"]: + excess_parse = PTN.parse(excess) + if excess_parse.get("episode") is not None: + excess_episodes = excess_parse["episode"] + break + if type(parse["excess"]) == str: + excess_parse = PTN.parse(parse["excess"]) + if excess_parse.get("episode") is not None: + excess_episodes = excess_parse["episode"] + if excess_episodes: + episode_number = episode_number + excess_episodes + return episode_number + + +class MediaItemContainer: + """MediaItemContainer class""" + + def __init__(self, items: Optional[List[MediaItem]] = None): + self.items = items if items is not None else [] + self.updated_at = None + + def __iter__(self): + for item in self.items: + yield item + + def __iadd__(self, other): + if not isinstance(other, MediaItem) and other is not None: + raise TypeError("Cannot append non-MediaItem to MediaItemContainer") + if other not in self.items: + self.items.append(other) + self._set_updated_at() + return self + + def sort(self, by): + self.items.sort(key=lambda item: item.get(by), reverse=True) + + def __len__(self): + """Get length of container""" + return len(self.items) + + def append(self, item) -> bool: + """Append item to container""" + self.items.append(item) + self._set_updated_at() + + def get(self, item) -> MediaItem: + """Get item matching given item from container""" + for my_item in self.items: + if my_item == item: + return my_item + return None + + def get_item(self, attr, value) -> "MediaItemContainer": + """Get items that match given items""" + return next((item for item in self.items if getattr(item, attr) == value), None) + + def extend(self, items) -> "MediaItemContainer": + """Extend container with items""" + added_items = MediaItemContainer() + for media_item in items: + if media_item not in self.items: + self.items.append(media_item) + added_items.append(media_item) + return added_items + + def _set_updated_at(self): + self.updated_at = { + "length": len(self.items), + "time": datetime.datetime.now().timestamp(), + } + + def remove(self, item): + """Remove item from container""" + if item in self.items: + self.items.remove(item) + self._set_updated_at() + + def count(self, state) -> int: + """Count items with given state in container""" + return len(self.get_items_with_state(state)) + + def get_items_with_state(self, state): + """Get items that need to be updated""" + return MediaItemContainer([item for item in self.items if item.state == state]) + + def save(self, filename): + """Save container to file""" + with open(filename, "wb") as file: + dill.dump(self.items, file) + + def load(self, filename): + """Load container from file""" + try: + with open(filename, "rb") as file: + self.items = dill.load(file) + except FileNotFoundError: + self.items = [] + + +def _set_nested_attr(obj, key, value): + if "." in key: + parts = key.split(".", 1) + current_key, rest_of_keys = parts[0], parts[1] + + if not hasattr(obj, current_key): + raise AttributeError(f"Object does not have the attribute '{current_key}'.") + + current_obj = getattr(obj, current_key) + _set_nested_attr(current_obj, rest_of_keys, value) + else: + if isinstance(obj, dict): + if key in obj: + obj[key] = value + else: + setattr(obj, key, value) + + +def count_episodes(episode_nums): + count = 0 + for ep in episode_nums: + if "-" in ep: # Range of episodes + start, end = map(int, ep.split("-")) + count += end - start + 1 + else: # Individual episodes + count += 1 + return count diff --git a/backend/program/program.py b/backend/program/program.py index c2b94729..9897bf76 100644 --- a/backend/program/program.py +++ b/backend/program/program.py @@ -12,6 +12,7 @@ from program.libraries.plex import Library as Plex from program.debrid.realdebrid import Debrid as RealDebrid + # Pydantic models for configuration class PlexConfig(BaseModel): user: str @@ -19,21 +20,26 @@ class PlexConfig(BaseModel): address: HttpUrl watchlist: Optional[HttpUrl] = None + class MdblistConfig(BaseModel): lists: list[str] = Field(default_factory=list) api_key: str update_interval: int = 80 + class OverseerrConfig(BaseModel): url: HttpUrl api_key: str + class RealDebridConfig(BaseModel): api_key: str + class TorrentioConfig(BaseModel): filter: str + class Settings(BaseModel): version: str debug: bool @@ -49,6 +55,7 @@ class Settings(BaseModel): class Program: """Program class""" + def __init__(self): self.settings = settings_manager.get_all() self.plex = Plex() @@ -58,9 +65,8 @@ def __init__(self): self.content_services = self.__import_modules("backend/program/content") self.scraping_services = self.__import_modules("backend/program/scrapers") self.data_path = get_data_path() - - logger.info("Iceberg initialized") + logger.info("Iceberg initialized") def run(self): """Run the program""" @@ -97,8 +103,8 @@ def _validate_modules(self): return False def __import_modules(self, folder_path: str) -> list[object]: - if os.path.exists('/iceberg'): - folder_path = os.path.join('/iceberg', folder_path) + if os.path.exists("/iceberg"): + folder_path = os.path.join("/iceberg", folder_path) else: folder_path = folder_path file_list = [ diff --git a/backend/program/scrapers/torrentio.py b/backend/program/scrapers/torrentio.py index f88965ad..210e9d7d 100644 --- a/backend/program/scrapers/torrentio.py +++ b/backend/program/scrapers/torrentio.py @@ -1,146 +1,143 @@ -""" Torrentio scraper module """ -from datetime import datetime -import re -from requests.exceptions import RequestException -from utils.logger import logger -from utils.request import RateLimitExceeded, get, RateLimiter -from utils.settings import settings_manager -from program.media import ( - MediaItem, - MediaItemContainer, - MediaItemState, -) - - -class Scraper: - """Scraper for torrentio""" - - def __init__(self): - self.settings = "torrentio" - self.class_settings = settings_manager.get(self.settings) - self.last_scrape = 0 - self.filters = self.class_settings["filter"] - self.minute_limiter = RateLimiter( - max_calls=140, period=60 * 5, raise_on_limit=True - ) - self.second_limiter = RateLimiter(max_calls=1, period=1) - self.initialized = True - - def scrape(self, media_items: MediaItemContainer): - """Scrape the torrentio site for the given media items - and update the object with scraped streams""" - logger.info("Scraping...") - scraped_amount = 0 - items = [item for item in media_items if self._can_we_scrape(item)] - for item in items: - try: - if item.type == "movie": - scraped_amount += self._scrape_items([item]) - else: - scraped_amount += self._scrape_show(item) - except RequestException as exception: - logger.error("%s, trying again next cycle", exception) - break - except RateLimitExceeded as exception: - logger.error("%s, trying again next cycle", exception) - break - if scraped_amount > 0: - logger.info("Scraped %s streams", scraped_amount) - logger.info("Done!") - - def _scrape_show(self, item: MediaItem): - scraped_amount = 0 - seasons = [season for season in item.seasons if self._can_we_scrape(season)] - scraped_amount += self._scrape_items(seasons) - episodes = [ - episode - for season in item.seasons - for episode in season.episodes - if not season.is_scraped() and self._can_we_scrape(episode) - ] - scraped_amount += self._scrape_items(episodes) - return scraped_amount - - def _scrape_items(self, items: list): - amount_scraped = 0 - for item in items: - data = self.api_scrape(item) - log_string = item.title - if item.type == "season": - log_string = f"{item.parent.title} season {item.number}" - if item.type == "episode": - log_string = f"{item.parent.parent.title} season {item.parent.number} episode {item.number}" - if len(data) > 0: - item.set("streams", data) - logger.debug("Found %s streams for %s", len(data), log_string) - amount_scraped += 1 - continue - logger.debug("Could not find streams for %s", log_string) - return amount_scraped - - def _can_we_scrape(self, item: MediaItem) -> bool: - def is_released(): - return ( - item.aired_at is not None - and item.aired_at < datetime.now() - ) - - def needs_new_scrape(): - return ( - datetime.now().timestamp() - item.scraped_at > 60 * 30 - or item.scraped_at == 0 - ) - - if item.type == "show" and item.state in [ - MediaItemState.CONTENT, - MediaItemState.LIBRARY_PARTIAL, - ]: - return True - - if item.type in ["movie", "season", "episode"] and is_released(): - valid_states = { - "movie": [MediaItemState.CONTENT], - "season": [MediaItemState.CONTENT], - "episode": [MediaItemState.CONTENT], - } - if (item.state in valid_states[item.type]): - return needs_new_scrape() - - return False - - def api_scrape(self, item): - """Wrapper for torrentio scrape method""" - with self.minute_limiter: - if item.type == "season": - identifier = f":{item.number}:1" - scrape_type = "show" - imdb_id = item.parent.imdb_id - elif item.type == "episode": - identifier = f":{item.parent.number}:{item.number}" - scrape_type = "show" - imdb_id = item.parent.parent.imdb_id - else: - identifier = None - scrape_type = "movie" - imdb_id = item.imdb_id - - url = ( - f"https://torrentio.strem.fun/{self.filters}" - + f"/stream/{scrape_type}/{imdb_id}" - ) - if identifier: - url += f"{identifier}" - with self.second_limiter: - response = get(f"{url}.json", retry_if_failed=False) - item.set("scraped_at", datetime.now().timestamp()) - if response.is_ok: - data = {} - for stream in response.data.streams: - if len(data) >= 20: - break - data[stream.infoHash] = { - "name": stream.title.split("\n👤")[0], - } - if len(data) > 0: - return data - return {} +""" Torrentio scraper module """ +from datetime import datetime +import re +from requests.exceptions import RequestException +from utils.logger import logger +from utils.request import RateLimitExceeded, get, RateLimiter +from utils.settings import settings_manager +from program.media import ( + MediaItem, + MediaItemContainer, + MediaItemState, +) + + +class Scraper: + """Scraper for torrentio""" + + def __init__(self): + self.settings = "torrentio" + self.class_settings = settings_manager.get(self.settings) + self.last_scrape = 0 + self.filters = self.class_settings["filter"] + self.minute_limiter = RateLimiter( + max_calls=140, period=60 * 5, raise_on_limit=True + ) + self.second_limiter = RateLimiter(max_calls=1, period=1) + self.initialized = True + + def scrape(self, media_items: MediaItemContainer): + """Scrape the torrentio site for the given media items + and update the object with scraped streams""" + logger.info("Scraping...") + scraped_amount = 0 + items = [item for item in media_items if self._can_we_scrape(item)] + for item in items: + try: + if item.type == "movie": + scraped_amount += self._scrape_items([item]) + else: + scraped_amount += self._scrape_show(item) + except RequestException as exception: + logger.error("%s, trying again next cycle", exception) + break + except RateLimitExceeded as exception: + logger.error("%s, trying again next cycle", exception) + break + if scraped_amount > 0: + logger.info("Scraped %s streams", scraped_amount) + logger.info("Done!") + + def _scrape_show(self, item: MediaItem): + scraped_amount = 0 + seasons = [season for season in item.seasons if self._can_we_scrape(season)] + scraped_amount += self._scrape_items(seasons) + episodes = [ + episode + for season in item.seasons + for episode in season.episodes + if not season.is_scraped() and self._can_we_scrape(episode) + ] + scraped_amount += self._scrape_items(episodes) + return scraped_amount + + def _scrape_items(self, items: list): + amount_scraped = 0 + for item in items: + data = self.api_scrape(item) + log_string = item.title + if item.type == "season": + log_string = f"{item.parent.title} season {item.number}" + if item.type == "episode": + log_string = f"{item.parent.parent.title} season {item.parent.number} episode {item.number}" + if len(data) > 0: + item.set("streams", data) + logger.debug("Found %s streams for %s", len(data), log_string) + amount_scraped += 1 + continue + logger.debug("Could not find streams for %s", log_string) + return amount_scraped + + def _can_we_scrape(self, item: MediaItem) -> bool: + def is_released(): + return item.aired_at is not None and item.aired_at < datetime.now() + + def needs_new_scrape(): + return ( + datetime.now().timestamp() - item.scraped_at > 60 * 30 + or item.scraped_at == 0 + ) + + if item.type == "show" and item.state in [ + MediaItemState.CONTENT, + MediaItemState.LIBRARY_PARTIAL, + ]: + return True + + if item.type in ["movie", "season", "episode"] and is_released(): + valid_states = { + "movie": [MediaItemState.CONTENT], + "season": [MediaItemState.CONTENT], + "episode": [MediaItemState.CONTENT], + } + if item.state in valid_states[item.type]: + return needs_new_scrape() + + return False + + def api_scrape(self, item): + """Wrapper for torrentio scrape method""" + with self.minute_limiter: + if item.type == "season": + identifier = f":{item.number}:1" + scrape_type = "show" + imdb_id = item.parent.imdb_id + elif item.type == "episode": + identifier = f":{item.parent.number}:{item.number}" + scrape_type = "show" + imdb_id = item.parent.parent.imdb_id + else: + identifier = None + scrape_type = "movie" + imdb_id = item.imdb_id + + url = ( + f"https://torrentio.strem.fun/{self.filters}" + + f"/stream/{scrape_type}/{imdb_id}" + ) + if identifier: + url += f"{identifier}" + with self.second_limiter: + response = get(f"{url}.json", retry_if_failed=False) + item.set("scraped_at", datetime.now().timestamp()) + if response.is_ok: + data = {} + for stream in response.data.streams: + if len(data) >= 20: + break + data[stream.infoHash] = { + "name": stream.title.split("\n👤")[0], + } + if len(data) > 0: + return data + return {} diff --git a/backend/program/symlink.py b/backend/program/symlink.py index 3f8fd1be..4ff74edc 100644 --- a/backend/program/symlink.py +++ b/backend/program/symlink.py @@ -37,7 +37,9 @@ def __init__( def _determine_file_name(self, item): filename = None if item.type == "movie": - filename = f"{item.title} ({item.aired_at.year}) " + "{imdb-" + item.imdb_id + "}" + filename = ( + f"{item.title} ({item.aired_at.year}) " + "{imdb-" + item.imdb_id + "}" + ) if item.type == "episode": episode_string = "" episode_number = item.get_file_episodes() @@ -49,7 +51,7 @@ def _determine_file_name(self, item): if episode_string != "": showname = item.parent.parent.title showyear = item.parent.parent.aired_at.year - filename = f"{showname} ({showyear}) - s{str(item.parent.number).zfill(2)}{episode_string} - {item.title}" + filename = f"{showname} ({showyear}) - s{str(item.parent.number).zfill(2)}{episode_string} - {item.title}" return filename def run(self, media_items): @@ -57,12 +59,18 @@ def run(self, media_items): items = [] for item in media_items: if item.type == "movie" and item.state is MediaItemState.DOWNLOAD: - item.file = next(iter(item.active_stream["files"].values())).get("filename") + item.file = next(iter(item.active_stream["files"].values())).get( + "filename" + ) file = self._find_file(item.file) if file: item.folder = os.path.dirname(file).split("/")[-1] items.append(item) - if item.type == "show" and item.state in [MediaItemState.LIBRARY_PARTIAL, MediaItemState.SYMLINK, MediaItemState.DOWNLOAD]: + if item.type == "show" and item.state in [ + MediaItemState.LIBRARY_PARTIAL, + MediaItemState.SYMLINK, + MediaItemState.DOWNLOAD, + ]: for season in item.seasons: if season.state is MediaItemState.DOWNLOAD: stream = season.get("active_stream") @@ -74,16 +82,22 @@ def run(self, media_items): episode = obj["episode"] if type(episode) == list: for sub_episode in episode: - season.episodes[sub_episode - 1].file = file["filename"] + season.episodes[sub_episode - 1].file = file[ + "filename" + ] else: index = obj["episode"] - 1 if index in range(len(season.episodes)): - season.episodes[obj["episode"] - 1].file = file["filename"] + season.episodes[obj["episode"] - 1].file = file[ + "filename" + ] for episode in season.episodes: if episode.state is MediaItemState.DOWNLOAD: file = self._find_file(episode.file) if file: - episode.folder = os.path.dirname(file).split("/")[-1] + episode.folder = os.path.dirname(file).split("/")[ + -1 + ] items.append(episode) for item in items: @@ -91,14 +105,21 @@ def run(self, media_items): symlink_filename = f"{self._determine_file_name(item)}.{extension}" if item.type == "movie": - movie_folder = f"{item.title} ({item.aired_at.year}) " + "{imdb-" + item.imdb_id + "}" + movie_folder = ( + f"{item.title} ({item.aired_at.year}) " + + "{imdb-" + + item.imdb_id + + "}" + ) folder_path = os.path.join(self.symlink_path, "movies", movie_folder) symlink_path = os.path.join(folder_path, symlink_filename) if not os.path.exists(folder_path): os.mkdir(folder_path) if item.type == "episode": show = item.parent.parent - show_folder = f"{show.title} ({show.aired_at.year})" + " {" + show.imdb_id + "}" + show_folder = ( + f"{show.title} ({show.aired_at.year})" + " {" + show.imdb_id + "}" + ) show_path = os.path.join(self.symlink_path, "shows", show_folder) if not os.path.exists(show_path): os.mkdir(show_path) @@ -108,7 +129,7 @@ def run(self, media_items): if not os.path.exists(season_path): os.mkdir(season_path) symlink_path = os.path.join(season_path, symlink_filename) - + if symlink_path: try: os.remove(symlink_path) @@ -116,7 +137,7 @@ def run(self, media_items): pass os.symlink( os.path.join(self.mount_path, "torrents", item.folder, item.file), - symlink_path + symlink_path, ) logger.debug("Created symlink for %s", item.__repr__) item.symlinked = True @@ -124,8 +145,8 @@ def run(self, media_items): def _find_file(self, filename): return self.cache.get(filename, None) - + def update_cache(self): for root, _, files in os.walk(os.path.join(self.host_path, "torrents")): for file in files: - self.cache[file] = os.path.join(root, file) \ No newline at end of file + self.cache[file] = os.path.join(root, file) diff --git a/backend/program/updaters/trakt.py b/backend/program/updaters/trakt.py index 0734dcb5..fd07e988 100644 --- a/backend/program/updaters/trakt.py +++ b/backend/program/updaters/trakt.py @@ -14,6 +14,7 @@ CLIENT_ID = "0183a05ad97098d87287fe46da4ae286f434f32e8e951caad4cc147c947d79a3" + class Updater: """Trakt updater class""" @@ -63,9 +64,7 @@ def _map_item_from_data(data, item_type): formatted_aired_at = None if getattr(data, "first_aired", None): aired_at = data.first_aired - formatted_aired_at = datetime.strptime( - aired_at, "%Y-%m-%dT%H:%M:%S.%fZ" - ) + formatted_aired_at = datetime.strptime(aired_at, "%Y-%m-%dT%H:%M:%S.%fZ") if getattr(data, "released", None): released_at = data.released formatted_aired_at = datetime.strptime(released_at, "%Y-%m-%d") @@ -126,4 +125,4 @@ def create_item_from_imdb_id(imdb_id: str): data = response.data[0].show if data: return _map_item_from_data(data, media_type) - return None \ No newline at end of file + return None diff --git a/backend/utils/default_settings.json b/backend/utils/default_settings.json index 54b8a628..ac59de81 100644 --- a/backend/utils/default_settings.json +++ b/backend/utils/default_settings.json @@ -1,34 +1,32 @@ -{ - "version": "0.2.1", - "debug": true, - "service_mode": false, - "log": true, - "menu_on_startup": true, - "plex": { - "user": "", - "token" : "", - "url": "http://localhost:32400", - "watchlist": "" - }, - "symlink": { - "host_mount": "", - "mount": "" - }, - "mdblist": { - "lists": [ - "" - ], - "api_key" : "", - "update_interval": 80 - }, - "overseerr": { - "url" : "http://localhost:5055", - "api_key" : "" - }, - "torrentio" : { - "filter" : "sort=qualitysize%7Cqualityfilter=480p,other,scr,cam,unknown" - }, - "realdebrid" : { - "api_key" : "" - } -} +{ + "version": "0.2.1", + "debug": true, + "service_mode": false, + "log": true, + "menu_on_startup": true, + "plex": { + "user": "", + "token": "", + "url": "http://localhost:32400", + "watchlist": "" + }, + "symlink": { + "host_mount": "", + "mount": "" + }, + "mdblist": { + "lists": [""], + "api_key": "", + "update_interval": 80 + }, + "overseerr": { + "url": "http://localhost:5055", + "api_key": "" + }, + "torrentio": { + "filter": "sort=qualitysize%7Cqualityfilter=480p,other,scr,cam,unknown" + }, + "realdebrid": { + "api_key": "" + } +} diff --git a/backend/utils/logger.py b/backend/utils/logger.py index bea406e9..7ea31bd5 100644 --- a/backend/utils/logger.py +++ b/backend/utils/logger.py @@ -5,12 +5,11 @@ import re import sys + def get_data_path(): main_dir = os.path.dirname(os.path.abspath(sys.modules["__main__"].__file__)) - return os.path.join( - main_dir, - os.pardir, - "data") + return os.path.join(main_dir, os.pardir, "data") + class RedactSensitiveInfo(logging.Filter): """logging filter to redact sensitive info""" @@ -91,5 +90,5 @@ def __init__(self): self.addHandler(file_handler) self.addHandler(console_handler) -logger = Logger() +logger = Logger() diff --git a/backend/utils/request.py b/backend/utils/request.py index 9a1832de..b5e667da 100644 --- a/backend/utils/request.py +++ b/backend/utils/request.py @@ -1,200 +1,198 @@ -"""Requests wrapper""" -import json -import logging -from multiprocessing import Lock -import time -from types import SimpleNamespace -import requests -from lxml import etree -from urllib3.util.retry import Retry -from requests.adapters import HTTPAdapter -import xmltodict - -logger = logging.getLogger(__name__) - -_retry_strategy = Retry( - total=5, - status_forcelist=[500, 502, 503, 504], -) -_adapter = HTTPAdapter(max_retries=_retry_strategy) - - -class ResponseObject: - """Response object""" - - def __init__(self, response: requests.Response, response_type=SimpleNamespace): - self.response = response - self.is_ok = response.ok - self.status_code = response.status_code - self.response_type = response_type - self.data = self.handle_response(response) - - def handle_response(self, response: requests.Response): - """Handle different types of responses""" - if not self.is_ok: - logger.warning("Error: %s %s", response.status_code, response.content) - if self.status_code not in [200, 201, 204]: - if self.status_code == 429: - raise requests.exceptions.RequestException(response.content) - return {} - if len(response.content) > 0: - if "handler error" not in response.text: - content_type = response.headers.get("Content-Type") - if "text/xml" in content_type: - if self.response_type == dict: - return xmltodict.parse(response.content) - return _xml_to_simplenamespace(response.content) - if "application/json" in content_type: - if self.response_type == dict: - return json.loads(response.content) - return json.loads( - response.content, - object_hook=lambda item: SimpleNamespace(**item), - ) - return {} - - -def _handle_request_exception() -> SimpleNamespace: - """Handle exceptions during requests and return a namespace object.""" - logger.error("Request failed", exc_info=True) - return SimpleNamespace(ok=False, data={}, content={}, status_code=500) - - -def _make_request( - method: str, - url: str, - data: dict = None, - timeout=5, - additional_headers=None, - retry_if_failed=True, - response_type=SimpleNamespace, -) -> ResponseObject: - session = requests.Session() - if retry_if_failed: - session.mount("http://", _adapter) - session.mount("https://", _adapter) - headers = {"Content-Type": "application/json", "Accept": "application/json"} - if additional_headers: - headers.update(additional_headers) - - try: - response = session.request( - method, url, headers=headers, data=data, timeout=timeout - ) - except requests.RequestException: - response = _handle_request_exception() - - session.close() - return ResponseObject(response, response_type) - -def ping( - url: str, - timeout=10, - additional_headers=None -): - return requests.Session().get(url, headers=additional_headers, timeout=timeout) - -def get( - url: str, - timeout=10, - additional_headers=None, - retry_if_failed=True, - response_type=SimpleNamespace, -) -> ResponseObject: - """Requests get wrapper""" - return _make_request( - "GET", - url, - timeout=timeout, - additional_headers=additional_headers, - retry_if_failed=retry_if_failed, - response_type=response_type, - ) - - -def post( - url: str, data: dict, timeout=10, additional_headers=None, retry_if_failed=False -) -> ResponseObject: - """Requests post wrapper""" - return _make_request( - "POST", - url, - data=data, - timeout=timeout, - additional_headers=additional_headers, - retry_if_failed=retry_if_failed, - ) - - -def put( - url: str, - data: dict = None, - timeout=10, - additional_headers=None, - retry_if_failed=False, -) -> ResponseObject: - """Requests put wrapper""" - return _make_request( - "PUT", - url, - data=data, - timeout=timeout, - additional_headers=additional_headers, - retry_if_failed=retry_if_failed, - ) - - -def _xml_to_simplenamespace(xml_string): - root = etree.fromstring(xml_string) - - def element_to_simplenamespace(element): - children_as_ns = { - child.tag: element_to_simplenamespace(child) for child in element - } - attributes = {key: value for key, value in element.attrib.items()} - attributes.update(children_as_ns) - return SimpleNamespace(**attributes, text=element.text) - - return element_to_simplenamespace(root) - - -class RateLimitExceeded(Exception): - pass - - -class RateLimiter: - def __init__(self, max_calls, period, raise_on_limit=False): - self.max_calls = max_calls - self.period = period - self.tokens = max_calls - self.last_call = ( - time.time() - period - ) # Initialize as if the last call was a "period" ago - self.lock = Lock() - self.raise_on_limit = raise_on_limit - - def __enter__(self): - with self.lock: - current_time = time.time() - time_since_last_call = current_time - self.last_call - - # Refill tokens only if the entire period has elapsed - if time_since_last_call >= self.period: - self.tokens = self.max_calls - - if self.tokens < 1: - if self.raise_on_limit: - raise RateLimitExceeded("Rate limit exceeded!") - time_to_sleep = self.period - time_since_last_call - time.sleep(time_to_sleep) - # After sleeping, update the last_call to account for the time we just waited - self.last_call = current_time + time_to_sleep - else: - # If we had enough tokens, just consume one and update the last call - self.tokens -= 1 - self.last_call = current_time - - return self - - def __exit__(self, exc_type, exc_value, traceback): - pass +"""Requests wrapper""" +import json +import logging +from multiprocessing import Lock +import time +from types import SimpleNamespace +import requests +from lxml import etree +from urllib3.util.retry import Retry +from requests.adapters import HTTPAdapter +import xmltodict + +logger = logging.getLogger(__name__) + +_retry_strategy = Retry( + total=5, + status_forcelist=[500, 502, 503, 504], +) +_adapter = HTTPAdapter(max_retries=_retry_strategy) + + +class ResponseObject: + """Response object""" + + def __init__(self, response: requests.Response, response_type=SimpleNamespace): + self.response = response + self.is_ok = response.ok + self.status_code = response.status_code + self.response_type = response_type + self.data = self.handle_response(response) + + def handle_response(self, response: requests.Response): + """Handle different types of responses""" + if not self.is_ok: + logger.warning("Error: %s %s", response.status_code, response.content) + if self.status_code not in [200, 201, 204]: + if self.status_code == 429: + raise requests.exceptions.RequestException(response.content) + return {} + if len(response.content) > 0: + if "handler error" not in response.text: + content_type = response.headers.get("Content-Type") + if "text/xml" in content_type: + if self.response_type == dict: + return xmltodict.parse(response.content) + return _xml_to_simplenamespace(response.content) + if "application/json" in content_type: + if self.response_type == dict: + return json.loads(response.content) + return json.loads( + response.content, + object_hook=lambda item: SimpleNamespace(**item), + ) + return {} + + +def _handle_request_exception() -> SimpleNamespace: + """Handle exceptions during requests and return a namespace object.""" + logger.error("Request failed", exc_info=True) + return SimpleNamespace(ok=False, data={}, content={}, status_code=500) + + +def _make_request( + method: str, + url: str, + data: dict = None, + timeout=5, + additional_headers=None, + retry_if_failed=True, + response_type=SimpleNamespace, +) -> ResponseObject: + session = requests.Session() + if retry_if_failed: + session.mount("http://", _adapter) + session.mount("https://", _adapter) + headers = {"Content-Type": "application/json", "Accept": "application/json"} + if additional_headers: + headers.update(additional_headers) + + try: + response = session.request( + method, url, headers=headers, data=data, timeout=timeout + ) + except requests.RequestException: + response = _handle_request_exception() + + session.close() + return ResponseObject(response, response_type) + + +def ping(url: str, timeout=10, additional_headers=None): + return requests.Session().get(url, headers=additional_headers, timeout=timeout) + + +def get( + url: str, + timeout=10, + additional_headers=None, + retry_if_failed=True, + response_type=SimpleNamespace, +) -> ResponseObject: + """Requests get wrapper""" + return _make_request( + "GET", + url, + timeout=timeout, + additional_headers=additional_headers, + retry_if_failed=retry_if_failed, + response_type=response_type, + ) + + +def post( + url: str, data: dict, timeout=10, additional_headers=None, retry_if_failed=False +) -> ResponseObject: + """Requests post wrapper""" + return _make_request( + "POST", + url, + data=data, + timeout=timeout, + additional_headers=additional_headers, + retry_if_failed=retry_if_failed, + ) + + +def put( + url: str, + data: dict = None, + timeout=10, + additional_headers=None, + retry_if_failed=False, +) -> ResponseObject: + """Requests put wrapper""" + return _make_request( + "PUT", + url, + data=data, + timeout=timeout, + additional_headers=additional_headers, + retry_if_failed=retry_if_failed, + ) + + +def _xml_to_simplenamespace(xml_string): + root = etree.fromstring(xml_string) + + def element_to_simplenamespace(element): + children_as_ns = { + child.tag: element_to_simplenamespace(child) for child in element + } + attributes = {key: value for key, value in element.attrib.items()} + attributes.update(children_as_ns) + return SimpleNamespace(**attributes, text=element.text) + + return element_to_simplenamespace(root) + + +class RateLimitExceeded(Exception): + pass + + +class RateLimiter: + def __init__(self, max_calls, period, raise_on_limit=False): + self.max_calls = max_calls + self.period = period + self.tokens = max_calls + self.last_call = ( + time.time() - period + ) # Initialize as if the last call was a "period" ago + self.lock = Lock() + self.raise_on_limit = raise_on_limit + + def __enter__(self): + with self.lock: + current_time = time.time() + time_since_last_call = current_time - self.last_call + + # Refill tokens only if the entire period has elapsed + if time_since_last_call >= self.period: + self.tokens = self.max_calls + + if self.tokens < 1: + if self.raise_on_limit: + raise RateLimitExceeded("Rate limit exceeded!") + time_to_sleep = self.period - time_since_last_call + time.sleep(time_to_sleep) + # After sleeping, update the last_call to account for the time we just waited + self.last_call = current_time + time_to_sleep + else: + # If we had enough tokens, just consume one and update the last call + self.tokens -= 1 + self.last_call = current_time + + return self + + def __exit__(self, exc_type, exc_value, traceback): + pass diff --git a/backend/utils/settings.py b/backend/utils/settings.py index dd4f37e9..4a18f7f3 100644 --- a/backend/utils/settings.py +++ b/backend/utils/settings.py @@ -1,51 +1,57 @@ -"""Settings manager""" -from utils.logger import logger -import json -import os -import shutil - -class SettingsManager: - """Class that handles settings""" - - def __init__(self): - self.filename = "data/settings.json" - self.config_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir, os.pardir)) - self.settings_file = os.path.join(self.config_dir, self.filename) - self.settings = {} - self.load() - - def load(self): - """Load settings from file""" - if not os.path.exists(self.settings_file): - default_settings_path = os.path.join(os.path.dirname(__file__), "default_settings.json") - shutil.copy(default_settings_path, self.settings_file) - logger.debug("Settings file not found, using default settings") - with open(self.settings_file, "r", encoding="utf-8") as file: - self.settings = json.loads(file.read()) - logger.debug("Settings loaded from %s", self.settings_file) - - def save(self): - """Save settings to file""" - with open(self.settings_file, "w", encoding="utf-8") as file: - json.dump(self.settings, file, indent=4) - logger.debug("Settings saved to %s", self.settings_file) - - def get(self, key): - """Get setting with key""" - if key in self.settings: - value = self.settings[key] - logger.debug("Get (%s) returned: %s", key, value) - return value - return None - - def set(self, key, value): - """Set setting value with key""" - if key in self.settings: - logger.debug("Setting (%s) to (%s)", key, value) - self.settings[key] = value - - def get_all(self): - """Return all settings""" - return self.settings - -settings_manager = SettingsManager() +"""Settings manager""" +from utils.logger import logger +import json +import os +import shutil + + +class SettingsManager: + """Class that handles settings""" + + def __init__(self): + self.filename = "data/settings.json" + self.config_dir = os.path.abspath( + os.path.join(os.path.dirname(__file__), os.pardir, os.pardir) + ) + self.settings_file = os.path.join(self.config_dir, self.filename) + self.settings = {} + self.load() + + def load(self): + """Load settings from file""" + if not os.path.exists(self.settings_file): + default_settings_path = os.path.join( + os.path.dirname(__file__), "default_settings.json" + ) + shutil.copy(default_settings_path, self.settings_file) + logger.debug("Settings file not found, using default settings") + with open(self.settings_file, "r", encoding="utf-8") as file: + self.settings = json.loads(file.read()) + logger.debug("Settings loaded from %s", self.settings_file) + + def save(self): + """Save settings to file""" + with open(self.settings_file, "w", encoding="utf-8") as file: + json.dump(self.settings, file, indent=4) + logger.debug("Settings saved to %s", self.settings_file) + + def get(self, key): + """Get setting with key""" + if key in self.settings: + value = self.settings[key] + logger.debug("Get (%s) returned: %s", key, value) + return value + return None + + def set(self, key, value): + """Set setting value with key""" + if key in self.settings: + logger.debug("Setting (%s) to (%s)", key, value) + self.settings[key] = value + + def get_all(self): + """Return all settings""" + return self.settings + + +settings_manager = SettingsManager() diff --git a/backend/utils/thread.py b/backend/utils/thread.py index 7d21f2a7..4ff56109 100644 --- a/backend/utils/thread.py +++ b/backend/utils/thread.py @@ -45,7 +45,7 @@ def split_and_execute_in_parallel(lst, method): MIN_ITEMS = 20 # Split list into sublists with at least MIN_ITEMS - sublists = [lst[i: i + MIN_ITEMS] for i in range(0, len(lst), MIN_ITEMS)] + sublists = [lst[i : i + MIN_ITEMS] for i in range(0, len(lst), MIN_ITEMS)] # Use a Pool of processes pool = Pool() diff --git a/backend/utils/ui_helpers.py b/backend/utils/ui_helpers.py index b991fc45..1868e783 100644 --- a/backend/utils/ui_helpers.py +++ b/backend/utils/ui_helpers.py @@ -9,7 +9,9 @@ class CustomJSONEncoder(JSONEncoder): def default(self, o): if isinstance(o, MediaItem): - attributes = {k: v for k, v in o.__dict__.items() if k not in ["_lock", "parent"]} + attributes = { + k: v for k, v in o.__dict__.items() if k not in ["_lock", "parent"] + } return attributes if isinstance(o, MediaItemState): return o.name diff --git a/frontend/src/app.html b/frontend/src/app.html index 5465e1f7..3d5590b2 100644 --- a/frontend/src/app.html +++ b/frontend/src/app.html @@ -1,21 +1,21 @@ - - - - - - - - - - - - - %sveltekit.head% - - -
%sveltekit.body%
- - + + + + + + + + + + + + + %sveltekit.head% + + +
%sveltekit.body%
+ + diff --git a/frontend/src/lib/components/header-item.svelte b/frontend/src/lib/components/header-item.svelte index 80a2c3f7..b435c1b5 100644 --- a/frontend/src/lib/components/header-item.svelte +++ b/frontend/src/lib/components/header-item.svelte @@ -1,24 +1,24 @@ - - -{#if $page.url.pathname === navItem.path} - -
- {navItem.name} -
-
-{:else} - -
- {navItem.name} -
-
-{/if} + + +{#if $page.url.pathname === navItem.path} + +
+ {navItem.name} +
+
+{:else} + +
+ {navItem.name} +
+
+{/if} diff --git a/frontend/src/lib/components/header.svelte b/frontend/src/lib/components/header.svelte index a494d324..37a9a8dc 100644 --- a/frontend/src/lib/components/header.svelte +++ b/frontend/src/lib/components/header.svelte @@ -1,107 +1,107 @@ - - -
- - -

Iceberg

-
- - -
- -{#if $navigating} -
-
-
-{/if} - - - - + + +
+ + +

Iceberg

+
+ + +
+ +{#if $navigating} +
+
+
+{/if} + + + + diff --git a/frontend/src/lib/components/status-media-card.svelte b/frontend/src/lib/components/status-media-card.svelte index a102078f..5f3fe3a2 100644 --- a/frontend/src/lib/components/status-media-card.svelte +++ b/frontend/src/lib/components/status-media-card.svelte @@ -1,58 +1,58 @@ - - -
-
-
-
- - test (poster = fallback)} - class=" w-[4.5rem] min-w-[4.5rem] h-24 rounded-md hover:scale-105 transition-all ease-in-out duration-300" - /> - -
-

- {plexDebridItem.title} -

-

Aired {formatDate(plexDebridItem.aired_at, 'short')}

-
- {#each plexDebridItem.genres as genre} - - {formatWords(genre)} - - {/each} -
-
-
-
-
-

Status

- - {itemState.text ?? formatWords(plexDebridItem.state)} - -
-
-

Requested

-

{formatDate(plexDebridItem.requested_at, 'long', true)}

-
-
-
-
+ + +
+
+
+
+ + test (poster = fallback)} + class=" w-[4.5rem] min-w-[4.5rem] h-24 rounded-md hover:scale-105 transition-all ease-in-out duration-300" + /> + +
+

+ {plexDebridItem.title} +

+

Aired {formatDate(plexDebridItem.aired_at, 'short')}

+
+ {#each plexDebridItem.genres as genre} + + {formatWords(genre)} + + {/each} +
+
+
+
+
+

Status

+ + {itemState.text ?? formatWords(plexDebridItem.state)} + +
+
+

Requested

+

{formatDate(plexDebridItem.requested_at, 'long', true)}

+
+
+
+
diff --git a/frontend/src/lib/components/theme-switcher.svelte b/frontend/src/lib/components/theme-switcher.svelte index 85ba7d2a..0fd30c4f 100644 --- a/frontend/src/lib/components/theme-switcher.svelte +++ b/frontend/src/lib/components/theme-switcher.svelte @@ -1,44 +1,44 @@ - - -{#if $mode === 'light'} - - - - - -

Dark mode

-
-
-{:else} - - - - - -

Light mode

-
-
-{/if} + + +{#if $mode === 'light'} + + + + + +

Dark mode

+
+
+{:else} + + + + + +

Light mode

+
+
+{/if} diff --git a/frontend/src/lib/helpers.ts b/frontend/src/lib/helpers.ts index 48069674..7b1bb4db 100644 --- a/frontend/src/lib/helpers.ts +++ b/frontend/src/lib/helpers.ts @@ -1,71 +1,71 @@ -import { DateTime } from 'luxon'; -import type { PlexDebridItem } from '$lib/types'; - -// only works with real-debrid dates because of CET format provided by RD -export function formatRDDate(inputDate: string, format: string = 'long'): string { - let cetDate = DateTime.fromISO(inputDate, { zone: 'Europe/Paris' }); - cetDate = cetDate.setZone('utc'); - - const userTimeZone = Intl.DateTimeFormat().resolvedOptions().timeZone; - cetDate = cetDate.setZone(userTimeZone); - - let formattedDate; - if (format === 'short') { - formattedDate = cetDate.toLocaleString({ - year: 'numeric', - month: 'short', - day: 'numeric' - }); - } else { - formattedDate = cetDate.toLocaleString(DateTime.DATETIME_FULL); - } - - return formattedDate; -} - -export function formatDate( - inputDate: string, - format: string = 'long', - relative: boolean = false -): string { - let date = DateTime.fromISO(inputDate, { zone: 'utc' }); - date = date.setZone('local'); - - let formattedDate; - - if (relative) { - formattedDate = date.toRelative() || ''; - } else { - if (format === 'short') { - formattedDate = date.toLocaleString({ - year: 'numeric', - month: 'short', - day: 'numeric' - }); - } else { - formattedDate = date.toLocaleString(DateTime.DATETIME_FULL); - } - } - - return formattedDate; -} - -export function formatWords(words: string) { - return words - .split('_') - .map((word) => word.charAt(0).toUpperCase() + word.slice(1).toLowerCase()) - .join(' '); -} - -export function convertPlexDebridItemsToObject(items: PlexDebridItem[]) { - const result: { [key: string]: PlexDebridItem[] } = {}; - - for (const item of items) { - if (!result[item.state]) { - result[item.state] = []; - } - result[item.state].push(item); - } - - return result; -} +import { DateTime } from 'luxon'; +import type { PlexDebridItem } from '$lib/types'; + +// only works with real-debrid dates because of CET format provided by RD +export function formatRDDate(inputDate: string, format: string = 'long'): string { + let cetDate = DateTime.fromISO(inputDate, { zone: 'Europe/Paris' }); + cetDate = cetDate.setZone('utc'); + + const userTimeZone = Intl.DateTimeFormat().resolvedOptions().timeZone; + cetDate = cetDate.setZone(userTimeZone); + + let formattedDate; + if (format === 'short') { + formattedDate = cetDate.toLocaleString({ + year: 'numeric', + month: 'short', + day: 'numeric' + }); + } else { + formattedDate = cetDate.toLocaleString(DateTime.DATETIME_FULL); + } + + return formattedDate; +} + +export function formatDate( + inputDate: string, + format: string = 'long', + relative: boolean = false +): string { + let date = DateTime.fromISO(inputDate, { zone: 'utc' }); + date = date.setZone('local'); + + let formattedDate; + + if (relative) { + formattedDate = date.toRelative() || ''; + } else { + if (format === 'short') { + formattedDate = date.toLocaleString({ + year: 'numeric', + month: 'short', + day: 'numeric' + }); + } else { + formattedDate = date.toLocaleString(DateTime.DATETIME_FULL); + } + } + + return formattedDate; +} + +export function formatWords(words: string) { + return words + .split('_') + .map((word) => word.charAt(0).toUpperCase() + word.slice(1).toLowerCase()) + .join(' '); +} + +export function convertPlexDebridItemsToObject(items: PlexDebridItem[]) { + const result: { [key: string]: PlexDebridItem[] } = {}; + + for (const item of items) { + if (!result[item.state]) { + result[item.state] = []; + } + result[item.state].push(item); + } + + return result; +} diff --git a/frontend/src/lib/types.ts b/frontend/src/lib/types.ts index 7c97d5e2..dbb249a8 100644 --- a/frontend/src/lib/types.ts +++ b/frontend/src/lib/types.ts @@ -1,38 +1,38 @@ -export interface NavItem { - name: string; - path: string; -} - -export interface UserResponse { - id: number; - username: string; - email: string; - points: number; - locale: string; - avatar: string; - type: string; - premium: number; - expiration: string; -} - -export interface PlexDebridItem { - title: string; - imdb_id: string; - state: string; - imdb_link: string; - aired_at: string; - genres: string[]; - guid: string; - requested_at: string; -} - -export interface StatusInterface { - text?: string; - color: string; - bg: string; - description: string; -} - -export interface StatusInfo { - [key: string]: StatusInterface; -} +export interface NavItem { + name: string; + path: string; +} + +export interface UserResponse { + id: number; + username: string; + email: string; + points: number; + locale: string; + avatar: string; + type: string; + premium: number; + expiration: string; +} + +export interface PlexDebridItem { + title: string; + imdb_id: string; + state: string; + imdb_link: string; + aired_at: string; + genres: string[]; + guid: string; + requested_at: string; +} + +export interface StatusInterface { + text?: string; + color: string; + bg: string; + description: string; +} + +export interface StatusInfo { + [key: string]: StatusInterface; +} diff --git a/frontend/src/lib/utils.ts b/frontend/src/lib/utils.ts index 230a1fbd..28ed1c4f 100644 --- a/frontend/src/lib/utils.ts +++ b/frontend/src/lib/utils.ts @@ -1,62 +1,62 @@ -import { type ClassValue, clsx } from "clsx"; -import { twMerge } from "tailwind-merge"; -import { cubicOut } from "svelte/easing"; -import type { TransitionConfig } from "svelte/transition"; - -export function cn(...inputs: ClassValue[]) { - return twMerge(clsx(inputs)); -} - -type FlyAndScaleParams = { - y?: number; - x?: number; - start?: number; - duration?: number; -}; - -export const flyAndScale = ( - node: Element, - params: FlyAndScaleParams = { y: -8, x: 0, start: 0.95, duration: 150 } -): TransitionConfig => { - const style = getComputedStyle(node); - const transform = style.transform === "none" ? "" : style.transform; - - const scaleConversion = ( - valueA: number, - scaleA: [number, number], - scaleB: [number, number] - ) => { - const [minA, maxA] = scaleA; - const [minB, maxB] = scaleB; - - const percentage = (valueA - minA) / (maxA - minA); - const valueB = percentage * (maxB - minB) + minB; - - return valueB; - }; - - const styleToString = ( - style: Record - ): string => { - return Object.keys(style).reduce((str, key) => { - if (style[key] === undefined) return str; - return str + `${key}:${style[key]};`; - }, ""); - }; - - return { - duration: params.duration ?? 200, - delay: 0, - css: (t) => { - const y = scaleConversion(t, [0, 1], [params.y ?? 5, 0]); - const x = scaleConversion(t, [0, 1], [params.x ?? 0, 0]); - const scale = scaleConversion(t, [0, 1], [params.start ?? 0.95, 1]); - - return styleToString({ - transform: `${transform} translate3d(${x}px, ${y}px, 0) scale(${scale})`, - opacity: t - }); - }, - easing: cubicOut - }; +import { type ClassValue, clsx } from "clsx"; +import { twMerge } from "tailwind-merge"; +import { cubicOut } from "svelte/easing"; +import type { TransitionConfig } from "svelte/transition"; + +export function cn(...inputs: ClassValue[]) { + return twMerge(clsx(inputs)); +} + +type FlyAndScaleParams = { + y?: number; + x?: number; + start?: number; + duration?: number; +}; + +export const flyAndScale = ( + node: Element, + params: FlyAndScaleParams = { y: -8, x: 0, start: 0.95, duration: 150 } +): TransitionConfig => { + const style = getComputedStyle(node); + const transform = style.transform === "none" ? "" : style.transform; + + const scaleConversion = ( + valueA: number, + scaleA: [number, number], + scaleB: [number, number] + ) => { + const [minA, maxA] = scaleA; + const [minB, maxB] = scaleB; + + const percentage = (valueA - minA) / (maxA - minA); + const valueB = percentage * (maxB - minB) + minB; + + return valueB; + }; + + const styleToString = ( + style: Record + ): string => { + return Object.keys(style).reduce((str, key) => { + if (style[key] === undefined) return str; + return str + `${key}:${style[key]};`; + }, ""); + }; + + return { + duration: params.duration ?? 200, + delay: 0, + css: (t) => { + const y = scaleConversion(t, [0, 1], [params.y ?? 5, 0]); + const x = scaleConversion(t, [0, 1], [params.x ?? 0, 0]); + const scale = scaleConversion(t, [0, 1], [params.start ?? 0.95, 1]); + + return styleToString({ + transform: `${transform} translate3d(${x}px, ${y}px, 0) scale(${scale})`, + opacity: t + }); + }, + easing: cubicOut + }; }; \ No newline at end of file diff --git a/frontend/src/routes/+error.svelte b/frontend/src/routes/+error.svelte index b225a59b..33da0508 100644 --- a/frontend/src/routes/+error.svelte +++ b/frontend/src/routes/+error.svelte @@ -1,9 +1,9 @@ - - -
-

Something went wrong

-

Error code: {$page.status}

-

Error message: {$page.error?.message}

-
+ + +
+

Something went wrong

+

Error code: {$page.status}

+

Error message: {$page.error?.message}

+
diff --git a/frontend/src/routes/+layout.svelte b/frontend/src/routes/+layout.svelte index 39935fa3..70d40124 100644 --- a/frontend/src/routes/+layout.svelte +++ b/frontend/src/routes/+layout.svelte @@ -1,15 +1,14 @@ - - - - - -
-
- -
+ + + + + +
+
+ +
diff --git a/frontend/src/routes/+page.server.ts b/frontend/src/routes/+page.server.ts index 36c74ed6..cdc2f3a8 100644 --- a/frontend/src/routes/+page.server.ts +++ b/frontend/src/routes/+page.server.ts @@ -1,22 +1,22 @@ -import type { PageServerLoad } from './$types'; -import type { UserResponse } from '$lib/types'; -import { error } from '@sveltejs/kit'; - -export const load: PageServerLoad = async ({ fetch }) => { - async function getUserData() { - try { - const res = await fetch('http://127.0.0.1:8080/user'); - if (res.ok) { - return (await res.json()) as UserResponse; - } - error(400, `Unable to fetch user data: ${res.status} ${res.statusText}`); - } catch (e) { - console.error(e); - error(500, 'Unable to fetch user data. API is down.'); - } - } - - return { - user: await getUserData() - }; -}; +import type { PageServerLoad } from './$types'; +import type { UserResponse } from '$lib/types'; +import { error } from '@sveltejs/kit'; + +export const load: PageServerLoad = async ({ fetch }) => { + async function getUserData() { + try { + const res = await fetch('http://127.0.0.1:8080/user'); + if (res.ok) { + return (await res.json()) as UserResponse; + } + error(400, `Unable to fetch user data: ${res.status} ${res.statusText}`); + } catch (e) { + console.error(e); + error(500, 'Unable to fetch user data. API is down.'); + } + } + + return { + user: await getUserData() + }; +}; diff --git a/frontend/src/routes/+page.svelte b/frontend/src/routes/+page.svelte index d6d0aaf1..d517c6b5 100644 --- a/frontend/src/routes/+page.svelte +++ b/frontend/src/routes/+page.svelte @@ -1,22 +1,22 @@ - - - - Iceberg | Home - - -
-

Welcome {data.user?.username}

-

{data.user?.email}

-

- Premium expires on {formatRDDate(data.user?.expiration, 'short')} -

-
+ + + + Iceberg | Home + + +
+

Welcome {data.user?.username}

+

{data.user?.email}

+

+ Premium expires on {formatRDDate(data.user?.expiration, 'short')} +

+
diff --git a/frontend/src/routes/settings/+page.svelte b/frontend/src/routes/settings/+page.svelte index cac21117..e48a4ac3 100644 --- a/frontend/src/routes/settings/+page.svelte +++ b/frontend/src/routes/settings/+page.svelte @@ -1,10 +1,10 @@ - - -
-
- -

This page is under construction

-
-
+ + +
+
+ +

This page is under construction

+
+
diff --git a/frontend/src/routes/status/+page.server.ts b/frontend/src/routes/status/+page.server.ts index 52c2d1a9..c8d71a26 100644 --- a/frontend/src/routes/status/+page.server.ts +++ b/frontend/src/routes/status/+page.server.ts @@ -1,35 +1,35 @@ -import type { PageServerLoad } from './$types'; -import { error } from '@sveltejs/kit'; - -export const load: PageServerLoad = async ({ fetch }) => { - async function getStates() { - try { - const res = await fetch('http://127.0.0.1:8080/items/states'); - if (res.ok) { - return await res.json(); - } - error(400, `Unable to fetch states data: ${res.status} ${res.statusText}`); - } catch (e) { - console.error(e); - error(500, 'Unable to fetch states data. API is down.'); - } - } - - async function getItems() { - try { - const res = await fetch('http://127.0.0.1:8080/items/'); - if (res.ok) { - return await res.json(); - } - error(400, `Unable to fetch items data: ${res.status} ${res.statusText}`); - } catch (e) { - console.error(e); - error(500, 'Unable to fetch items data. API is down.'); - } - } - - return { - items: getItems(), - states: await getStates() - }; -}; +import type { PageServerLoad } from './$types'; +import { error } from '@sveltejs/kit'; + +export const load: PageServerLoad = async ({ fetch }) => { + async function getStates() { + try { + const res = await fetch('http://127.0.0.1:8080/items/states'); + if (res.ok) { + return await res.json(); + } + error(400, `Unable to fetch states data: ${res.status} ${res.statusText}`); + } catch (e) { + console.error(e); + error(500, 'Unable to fetch states data. API is down.'); + } + } + + async function getItems() { + try { + const res = await fetch('http://127.0.0.1:8080/items/'); + if (res.ok) { + return await res.json(); + } + error(400, `Unable to fetch items data: ${res.status} ${res.statusText}`); + } catch (e) { + console.error(e); + error(500, 'Unable to fetch items data. API is down.'); + } + } + + return { + items: getItems(), + states: await getStates() + }; +}; diff --git a/frontend/src/routes/status/+page.svelte b/frontend/src/routes/status/+page.svelte index 9b040f74..60121752 100644 --- a/frontend/src/routes/status/+page.svelte +++ b/frontend/src/routes/status/+page.svelte @@ -1,169 +1,169 @@ - - - - Iceberg | Status - - -
- {#await data.items} -
- -

Loading library items...

-
- {:then items} -
-
-

- Status ({items.items.length}) -

-

- This page shows the status of your library items. -

-
-
- - - - - -

Reload data

-
-
- - - - - - -

Open Plex

-
-
-
-
- - - - -
- -

Learn more about status badges

-
-
- -
    - {#each Object.keys(statusInfo) as key (key)} -
  • - - {statusInfo[key].text ?? formatWords(key)} - - {statusInfo[key].description} -
  • - {/each} -
-
-
-
- {@const plexDebridItems = convertPlexDebridItemsToObject(items.items)} - {#each Object.keys(plexDebridItems) as key (key)} -
- {#each plexDebridItems[key] as item} - - {/each} -
- {/each} - {:catch error} -
-

Something went wrong

-

Error message: {error.message}

-
- {/await} -
+ + + + Iceberg | Status + + +
+ {#await data.items} +
+ +

Loading library items...

+
+ {:then items} +
+
+

+ Status ({items.items.length}) +

+

+ This page shows the status of your library items. +

+
+
+ + + + + +

Reload data

+
+
+ + + + + + +

Open Plex

+
+
+
+
+ + + + +
+ +

Learn more about status badges

+
+
+ +
    + {#each Object.keys(statusInfo) as key (key)} +
  • + + {statusInfo[key].text ?? formatWords(key)} + + {statusInfo[key].description} +
  • + {/each} +
+
+
+
+ {@const plexDebridItems = convertPlexDebridItemsToObject(items.items)} + {#each Object.keys(plexDebridItems) as key (key)} +
+ {#each plexDebridItems[key] as item} + + {/each} +
+ {/each} + {:catch error} +
+

Something went wrong

+

Error message: {error.message}

+
+ {/await} +