Skip to content

Commit

Permalink
find_media_errors.py: run daily
Browse files Browse the repository at this point in the history
  • Loading branch information
double16 committed Aug 13, 2024
1 parent 34161ba commit 61ef3ce
Show file tree
Hide file tree
Showing 5 changed files with 27 additions and 8 deletions.
1 change: 1 addition & 0 deletions Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -72,6 +72,7 @@ COPY tvshow-summary.sh /etc/cron.daily/tvshow-summary
#COPY comchap-apply.sh /etc/cron.daily/comchap-apply
COPY comtune-apply.sh /etc/cron.daily/comtune-apply
COPY transcode-apply.sh /etc/cron.hourly/transcode-apply
COPY media-errors.sh /etc/cron.daily/media-errors
COPY profanity-filter-apply.sh /etc/cron.daily/profanity-filter-apply
COPY logrotate.conf /etc/logrotate.d/dvr
COPY sendmail-log.sh /usr/sbin/sendmail
Expand Down
2 changes: 1 addition & 1 deletion dvrprocess/common/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -289,7 +289,7 @@ def get_file_config(path: str) -> ConfigParser:
config = ConfigParser()
if os.path.exists(config_path):
config.read(config_path)
if config.get('general', 'fingerprint') != _mkv_fingerprint(path):
if config.get('general', 'fingerprint', fallback=None) != _mkv_fingerprint(path):
config = ConfigParser()
return config

Expand Down
18 changes: 11 additions & 7 deletions dvrprocess/find_media_errors.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,8 @@ def usage():
Directory containing media. Defaults to {common.get_media_roots()}
--nagios
Output for Nagios monitoring. Also human readable with statistics and estimates of transcode time.
--cache-only
Only report cached results, do not look for new media errors.
--time-limit={config.get_global_config_option('background_limits', 'time_limit')}
Limit runtime. Set to 0 for no limit.
--ignore-compute
Expand All @@ -44,10 +46,11 @@ def find_media_errors_cli(argv):
nagios_output = False
time_limit = config.get_global_config_time_seconds('background_limits', 'time_limit')
check_compute = True
cache_only = False

try:
opts, args = getopt.getopt(argv, "t:d:",
["terminator=", "dir=", "nagios", "time-limit=", "ignore-compute"])
["terminator=", "dir=", "nagios", "time-limit=", "ignore-compute", "cache-only"])
except getopt.GetoptError:
usage()
return 2
Expand All @@ -70,6 +73,8 @@ def find_media_errors_cli(argv):
time_limit = config.parse_seconds(arg)
elif opt == '--ignore-compute':
check_compute = False
elif opt == '--cache-only':
cache_only = True

if not roots:
roots = common.get_media_roots()
Expand All @@ -84,7 +89,7 @@ def find_media_errors_cli(argv):
return 0

generator = media_errors_generator(media_paths=media_paths, media_roots=roots,
time_limit=time_limit, check_compute=check_compute)
time_limit=time_limit, check_compute=check_compute, cache_only=cache_only)

if nagios_output:
corrupt_files = list(generator)
Expand Down Expand Up @@ -122,9 +127,8 @@ def __init__(self, file_name: str, host_file_path: str, size: float, error_count

def media_errors_generator(media_paths: list[str], media_roots: list[str],
time_limit=config.get_global_config_time_seconds('background_limits', 'time_limit'),
check_compute=True) -> Iterable[MediaErrorFileInfo]:
check_compute=True, cache_only=False) -> Iterable[MediaErrorFileInfo]:
time_start = time.time()
only_cached = False

for media_path in media_paths:
for root, dirs, files in os.walk(media_path, topdown=True):
Expand All @@ -133,19 +137,19 @@ def media_errors_generator(media_paths: list[str], media_roots: list[str],
cached_error_count = config.get_file_config_option(filepath, 'error', 'count')
if cached_error_count:
error_count = int(cached_error_count)
elif only_cached:
elif cache_only:
continue
else:
duration = time.time() - time_start
if 0 < time_limit < duration:
logger.debug(
f"Time limit expired after processing {common.s_to_ts(int(duration))}, limit of {common.s_to_ts(time_limit)} reached, only using cached data")
only_cached = True
cache_only = True
continue
if check_compute and common.should_stop_processing():
# when compute limit is reached, use cached data
logger.debug("not enough compute available, only using cached data")
only_cached = True
cache_only = True
continue

error_count = len(tools.ffmpeg.check_output(
Expand Down
8 changes: 8 additions & 0 deletions logrotate.conf
Original file line number Diff line number Diff line change
Expand Up @@ -69,3 +69,11 @@
missingok
notifempty
}

/var/log/find_media_errors.log {
rotate 2
daily
compress
missingok
notifempty
}
6 changes: 6 additions & 0 deletions media-errors.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
#!/usr/bin/env bash

# random sleep to prevent multiple programs running at the exact same time
sleep $((5 + RANDOM % 30))

exec ionice -c 3 nice -n 15 /usr/local/bin/find_media_errors.py "$@" >>/var/log/find_media_errors.log 2>&1

0 comments on commit 61ef3ce

Please sign in to comment.