Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Symlink fixes #76

Merged
merged 8 commits into from
Dec 21, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -34,4 +34,4 @@ COPY entrypoint.sh ./entrypoint.sh
RUN chmod +x ./entrypoint.sh
ENTRYPOINT ["/iceberg/entrypoint.sh"]

EXPOSE 3000 8080
EXPOSE 3000 8080
6 changes: 4 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -85,10 +85,12 @@ You can view the readme in `make` to get started!
make
```

To get started you can simply
To get started you can simply do this. This will stop any previous Iceberg containers and remove previous image.
As well as rebuild the image using cached layers. If your a developer, then any files changed in the code will not get cached,
and thus rebuilt in the image.

```sh
make start
```

You can restart with `make restart` **or** `make restart-nocache` to build the image without caching layers.
You can also restart the container with `make restart`, or view the logs with `make logs`.
2 changes: 2 additions & 0 deletions backend/program/content/mdblist.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,8 @@ def run(self):

new_items = [item for item in items if item not in self.media_items]
container = self.updater.create_items(new_items)
for item in container:
item.set("requested_by", "Mdblist")
added_items = self.media_items.extend(container)
if len(added_items) > 0:
logger.info("Added %s items", len(added_items))
Expand Down
2 changes: 2 additions & 0 deletions backend/program/content/overseerr.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,8 @@ def run(self):
items = self._get_items_from_overseerr(10000)
new_items = [item for item in items if item not in self.media_items]
container = self.updater.create_items(new_items)
for item in container:
item.set("requested_by", "Overseerr")
added_items = self.media_items.extend(container)
if len(added_items) > 0:
logger.info("Added %s items", len(added_items))
Expand Down
34 changes: 21 additions & 13 deletions backend/program/content/plex_watchlist.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@ def __init__(self, media_items: MediaItemContainer):
self.initialized = False
self.media_items = media_items
self.watchlist_url = settings.get("plex")["watchlist"]
self.previous_added_items_count = 0
if not self.watchlist_url or not self._validate_settings():
logger.info(
"Plex watchlist RSS URL is not configured and will not be used."
Expand All @@ -39,25 +40,32 @@ def run(self):
items = self._get_items_from_plex_watchlist()
new_items = [item for item in items if item not in self.media_items]
container = self.updater.create_items(new_items)
for item in container:
item.set("requested_by", "Plex Watchlist")
previous_count = len(self.media_items)
added_items = self.media_items.extend(container)
if len(added_items) > 0:
logger.info("Added %s items", len(added_items))
added_items_count = len(self.media_items) - previous_count
if added_items_count != self.previous_added_items_count and added_items_count > 0:
logger.info("Added %s items", added_items_count)
self.previous_added_items_count = added_items_count
if added_items_count > 0:
for added_item in added_items:
logger.debug("Added %s", added_item.title)

def _get_items_from_plex_watchlist(self) -> list:
"""Fetch media from Plex watchlist"""
response_obj = get(self.watchlist_url, timeout=5)
response_obj = get(self.watchlist_url, timeout=30)
watchlist_data = json.loads(response_obj.response.content)
items = watchlist_data.get("items", [])
ids = []
for item in items:
imdb_id = next(
(
guid.split("//")[-1]
for guid in item.get("guids")
if "imdb://" in guid
),
None,
)
ids.append(imdb_id)
logger.debug("Found %s items", len(ids))
imdb_id = next((guid.split("//")[-1] for guid in item.get("guids") if "imdb://" in guid), None)
if imdb_id:
tvdb_id = next((guid.split("//")[-1] for guid in item.get("guids") if "tvdb://" in guid), None)
if tvdb_id:
# TODO: Convert tvdb to imdb. Will work on this later.
logger.debug("Found imdb id %s and tvdb id %s for %s", imdb_id, tvdb_id, item.get("title"))
else:
# TODO: Needs testing to determine if streams only return tvdb/imdb or if there are other options.
logger.debug("No imdb id or tvdb id found for %s", item.get("title"))
return ids
4 changes: 2 additions & 2 deletions backend/program/debrid/realdebrid.py
Original file line number Diff line number Diff line change
Expand Up @@ -123,9 +123,9 @@ def _download_item(self, item):
if item.type == "movie":
log_string = item.title
if item.type == "season":
log_string = f"{item.parent.title} season {item.number}"
log_string = f"{item.parent.title} S{item.number}"
if item.type == "episode":
log_string = f"{item.parent.parent.title} season {item.parent.number} episode {item.number}"
log_string = f"{item.parent.parent.title} S{item.parent.number}E{item.number}"

logger.debug("Downloaded %s", log_string)
return 1
Expand Down
6 changes: 4 additions & 2 deletions backend/program/media.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,7 @@ def __init__(self, item):
self.streams = {}
self.symlinked = False
self.requested_at = item.get("requested_at", None) or datetime.datetime.now()
self.requested_by = item.get("requested_by", None)

# Media related
self.title = item.get("title", None)
Expand Down Expand Up @@ -87,7 +88,8 @@ def to_dict(self):
"aired_at": self.aired_at,
"genres": self.genres,
"guid": self.guid,
"requested_at": self.requested_at
"requested_at": self.requested_at,
"requested_by": self.requested_by
}

def to_extended_dict(self):
Expand All @@ -97,7 +99,7 @@ def to_extended_dict(self):
if self.type == "season":
dict["episodes"] = [episode.to_extended_dict() for episode in self.episodes]
return dict

def is_not_cached(self):
return not self.is_cached()

Expand Down
6 changes: 2 additions & 4 deletions backend/program/scrapers/torrentio.py
Original file line number Diff line number Diff line change
Expand Up @@ -64,16 +64,14 @@ def _scrape_items(self, items: list):
amount_scraped = 0
for item in items:
data = self.api_scrape(item)
log_string = item.title
if item.type == "season":
log_string = f"{item.parent.title} season {item.number}"
log_string = f"{item.parent.title} S{item.number}"
if item.type == "episode":
log_string = f"{item.parent.parent.title} season {item.parent.number} episode {item.number}"
log_string = f"{item.parent.parent.title} S{item.parent.number}E{item.number}"
if len(data) > 0:
item.set("streams", data)
logger.debug("Found %s streams for %s", len(data), log_string)
amount_scraped += 1
continue
logger.debug("Could not find streams for %s", log_string)
return amount_scraped

Expand Down
20 changes: 16 additions & 4 deletions backend/program/symlink.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,18 @@


class Symlinker(threading.Thread):
"""Content class for mdblist"""
"""
A class that represents a symlinker thread.

Attributes:
media_items (MediaItemContainer): The container of media items.
running (bool): Flag indicating if the thread is running.
cache (dict): A dictionary to cache file paths.
mount_path (str): The absolute path of the container mount.
host_path (str): The absolute path of the host mount.
symlink_path (str): The path where the symlinks will be created.
cache_thread (ThreadRunner): The thread runner for updating the cache.
"""

def __init__(self, media_items: MediaItemContainer):
# Symlinking is required
Expand Down Expand Up @@ -95,9 +106,10 @@ def _run(self):
episode = obj["episode"]
if type(episode) == list:
for sub_episode in episode:
season.episodes[sub_episode - 1].set(
"file", file["filename"]
)
if sub_episode - 1 in range(len(season.episodes)):
season.episodes[sub_episode - 1].set(
"file", file["filename"]
)
else:
index = obj["episode"] - 1
if index in range(len(season.episodes)):
Expand Down
15 changes: 13 additions & 2 deletions backend/program/updaters/trakt.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
"""Trakt updater module"""
from datetime import datetime
from os import path
from utils.logger import get_data_path, logger
from utils.request import get
from backend.utils.logger import get_data_path, logger
from backend.utils.request import get
from program.media import (
Episode,
MediaItemContainer,
Expand Down Expand Up @@ -130,3 +130,14 @@ def create_item_from_imdb_id(imdb_id: str):
if data:
return _map_item_from_data(data, media_type)
return None

def get_imdb_id_from_tvdb(tvdb_id: str) -> str:
"""Get IMDb ID from TVDB ID in Trakt"""
url = f"https://api.trakt.tv/search/tvdb/{tvdb_id}?extended=full"
response = get(
url,
additional_headers={"trakt-api-version": "2", "trakt-api-key": CLIENT_ID},
)
if response.is_ok and len(response.data) > 0:
return response.data[0].show.ids.imdb
return None
4 changes: 2 additions & 2 deletions backend/utils/default_settings.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"version": "0.2.1",
"debug": true,
"version": "0.3.0",
"debug": false,
"log": true,
"host_mount": "",
"container_mount": "",
Expand Down
1 change: 1 addition & 0 deletions backend/utils/logger.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@ def __init__(self):
"api_key": re.compile(r"(\'api_key\'\s*:\s*\')[^\']*\'", re.IGNORECASE),
"token": re.compile(r"(\'token\'\s*:\s*\')[^\']*\'", re.IGNORECASE),
"user": re.compile(r"(\'user\'\s*:\s*\')[^\']*\'", re.IGNORECASE),
"watchlist": re.compile(r"(\'watchlist\'\s*:\s*\')[^\']*\'", re.IGNORECASE),
}

def _redact_string(self, data):
Expand Down
2 changes: 1 addition & 1 deletion entrypoint.sh
Original file line number Diff line number Diff line change
Expand Up @@ -31,4 +31,4 @@ fi
chown -R ${USERNAME}:${GROUPNAME} /iceberg

echo "Container Initialization complete."
exec su -m $USERNAME -c 'cd backend && source /venv/bin/activate && exec python /iceberg/backend/main.py & node /iceberg/frontend/build'
exec su -m $USERNAME -c 'cd backend && source /venv/bin/activate && exec python /iceberg/backend/main.py & node /iceberg/frontend/build'
20 changes: 5 additions & 15 deletions makefile
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
.PHONY: help start reset stop restart rebuild logs exec sc ec update
.PHONY: help start stop restart logs exec sc ec update

# Detect operating system
ifeq ($(OS),Windows_NT)
Expand All @@ -13,7 +13,6 @@ help:
@echo Iceberg Local Development Environment
@echo -------------------------------------------------------------------------
@echo start : Build and run the Iceberg container
@echo reset : Build and run the Iceberg container without caching image
@echo stop : Stop and remove the Iceberg container and image
@echo restart : Restart the Iceberg container (without rebuilding image)
@echo rebuild : Rebuild the Iceberg container (with rebuilding image)
Expand All @@ -24,33 +23,24 @@ help:
@echo update : Update this repository from GitHub and rebuild image
@echo -------------------------------------------------------------------------

start:
start: stop
@docker build -t iceberg:latest -f Dockerfile .
@docker run -d --name iceberg --hostname iceberg -p 3000:3000 -p 8080:8080 -e PUID=1000 -e PGID=1000 -v $(DATA_PATH):/iceberg/data iceberg:latest
@echo Iceberg Frontend is running on http://localhost:3000/status/
@echo Iceberg Backend is running on http://localhost:8080/items/
@docker logs iceberg -f

reset:
@docker build --no-cache -t iceberg:latest -f Dockerfile .
@docker run -d --name iceberg --hostname iceberg -p 3000:3000 -p 8080:8080 -e PUID=1000 -e PGID=1000 -v $(DATA_PATH):/iceberg/data iceberg:latest
@echo Iceberg Frontend is running on http://localhost:3000/status/
@echo Iceberg Backend is running on http://localhost:8080/items/
@docker logs iceberg -f

stop:
@-docker stop iceberg
@-docker rm iceberg
@-docker rmi iceberg:latest
@-docker stop iceberg --time 0
@-docker rm iceberg --force
@-docker rmi iceberg:latest --force

restart:
@-docker restart iceberg
@echo Iceberg Frontend is running on http://localhost:3000/status/
@echo Iceberg Backend is running on http://localhost:8080/items/
@docker logs iceberg -f

rebuild: stop reset

logs:
@docker logs iceberg -f

Expand Down
Loading