diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index c1dd9205..6068cab1 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -28,7 +28,9 @@ jobs: pip install pipenv pipenv install --system --skip-lock - name: Set up Django environment - run: cp tubesync/tubesync/local_settings.py.example tubesync/tubesync/local_settings.py + run: | + cp -v -p tubesync/tubesync/local_settings.py.example tubesync/tubesync/local_settings.py + cp -v -a -t "${Python3_ROOT_DIR}"/lib/python3.*/site-packages/yt_dlp/ patches/yt_dlp/* - name: Run Django tests run: cd tubesync && python3 manage.py test --verbosity=2 containerise: diff --git a/Dockerfile b/Dockerfile index 95b909ce..96c10a9f 100644 --- a/Dockerfile +++ b/Dockerfile @@ -275,10 +275,14 @@ RUN --mount=type=cache,id=apt-lib-cache,sharing=locked,target=/var/lib/apt \ pipenv \ pkgconf \ python3 \ + python3-libsass \ + python3-socks \ python3-wheel \ curl \ less \ && \ + # Link to the current python3 version + ln -v -s -f -T "$(find /usr/local/lib -name 'python3.[0-9]*' -type d -printf '%P\n' | sort -r -V | head -n 1)" /usr/local/lib/python3 && \ # Clean up apt-get -y autopurge && \ apt-get -y autoclean && \ @@ -346,12 +350,18 @@ RUN --mount=type=tmpfs,target=/cache \ COPY tubesync /app COPY tubesync/tubesync/local_settings.py.container /app/tubesync/local_settings.py +# patch background_task +COPY patches/background_task/ \ + /usr/local/lib/python3/dist-packages/background_task/ + +# patch yt_dlp +COPY patches/yt_dlp/ \ + /usr/local/lib/python3/dist-packages/yt_dlp/ + # Build app RUN set -x && \ # Make absolutely sure we didn't accidentally bundle a SQLite dev database rm -rf /app/db.sqlite3 && \ - # Check nginx configuration - nginx -t && \ # Run any required app commands /usr/bin/python3 -B /app/manage.py compilescss && \ /usr/bin/python3 -B /app/manage.py collectstatic --no-input --link && \ @@ -361,8 +371,6 @@ RUN set -x && \ mkdir -v -p /config/cache/pycache && \ mkdir -v -p /downloads/audio && \ mkdir -v -p /downloads/video && \ - # Link to the current python3 version - ln -v -s -f -T "$(find /usr/local/lib -name 'python3.[0-9]*' -type d -printf '%P\n' | sort -r -V | head -n 1)" /usr/local/lib/python3 && \ # Append software versions ffmpeg_version=$(/usr/local/bin/ffmpeg -version | awk -v 'ev=31' '1 == NR && "ffmpeg" == $1 { print $3; ev=0; } END { exit ev; }') && \ test -n "${ffmpeg_version}" && \ @@ -371,13 +379,8 @@ RUN set -x && \ # Copy root COPY config/root / -# patch background_task -COPY patches/background_task/ \ - /usr/local/lib/python3/dist-packages/background_task/ - -# patch yt_dlp -COPY patches/yt_dlp/ \ - /usr/local/lib/python3/dist-packages/yt_dlp/ +# Check nginx configuration copied from config/root/etc +RUN set -x && nginx -t # Create a healthcheck HEALTHCHECK --interval=1m --timeout=10s --start-period=3m CMD ["/app/healthcheck.py", "http://127.0.0.1:8080/healthcheck"] diff --git a/Pipfile b/Pipfile index b0aad1e4..3c29cafb 100644 --- a/Pipfile +++ b/Pipfile @@ -8,17 +8,19 @@ autopep8 = "*" [packages] django = "*" -django-sass-processor = "*" -libsass = "*" +django-sass-processor = {extras = ["management-command"], version = "*"} pillow = "*" whitenoise = "*" gunicorn = "*" -django-compressor = "*" httptools = "*" django-background-tasks = ">=1.2.8" django-basicauth = "*" psycopg2-binary = "*" mysqlclient = "*" -yt-dlp = "*" +PySocks = "*" +urllib3 = {extras = ["socks"], version = "*"} requests = {extras = ["socks"], version = "*"} +yt-dlp = "*" emoji = "*" +brotli = "*" +html5lib = "*" diff --git a/README.md b/README.md index af3cd910..17367a4a 100644 --- a/README.md +++ b/README.md @@ -70,7 +70,7 @@ currently just Plex, to complete the PVR experience. TubeSync is designed to be run in a container, such as via Docker or Podman. It also works in a Docker Compose stack. `amd64` (most desktop PCs and servers) and `arm64` -(modern ARM computers, such as the Rasperry Pi 3 or later) are supported. +(modern ARM computers, such as the Raspberry Pi 3 or later) are supported. Example (with Docker on *nix): @@ -356,7 +356,7 @@ etc.). Configuration of this is beyond the scope of this README. Only two are supported, for the moment: - `amd64` (most desktop PCs and servers) - `arm64` -(modern ARM computers, such as the Rasperry Pi 3 or later) +(modern ARM computers, such as the Raspberry Pi 3 or later) Others may be made available, if there is demand. diff --git a/patches/yt_dlp/patch/__init__.py b/patches/yt_dlp/patch/__init__.py new file mode 100644 index 00000000..f2d40a97 --- /dev/null +++ b/patches/yt_dlp/patch/__init__.py @@ -0,0 +1,5 @@ +from yt_dlp.compat.compat_utils import passthrough_module + +passthrough_module(__name__, '.patch') +del passthrough_module + diff --git a/patches/yt_dlp/patch/check_thumbnails.py b/patches/yt_dlp/patch/check_thumbnails.py new file mode 100644 index 00000000..25723bb6 --- /dev/null +++ b/patches/yt_dlp/patch/check_thumbnails.py @@ -0,0 +1,43 @@ +from yt_dlp import YoutubeDL +from yt_dlp.utils import sanitize_url, LazyList + +class PatchedYoutubeDL(YoutubeDL): + + def _sanitize_thumbnails(self, info_dict): + thumbnails = info_dict.get('thumbnails') + if thumbnails is None: + thumbnail = info_dict.get('thumbnail') + if thumbnail: + info_dict['thumbnails'] = thumbnails = [{'url': thumbnail}] + if not thumbnails: + return + + + def check_thumbnails(thumbnails): + for t in thumbnails: + self.to_screen(f'[info] Testing thumbnail {t["id"]}: {t["url"]!r}') + try: + self.urlopen(HEADRequest(t['url'])) + except network_exceptions as err: + self.to_screen(f'[info] Unable to connect to thumbnail {t["id"]} URL {t["url"]!r} - {err}. Skipping...') + continue + yield t + + + self._sort_thumbnails(thumbnails) + for i, t in enumerate(thumbnails): + if t.get('id') is None: + t['id'] = str(i) + if t.get('width') and t.get('height'): + t['resolution'] = '%dx%d' % (t['width'], t['height']) + t['url'] = sanitize_url(t['url']) + + + if self.params.get('check_thumbnails') is True: + info_dict['thumbnails'] = LazyList(check_thumbnails(thumbnails[::-1]), reverse=True) + else: + info_dict['thumbnails'] = thumbnails + + +YoutubeDL.__unpatched___sanitize_thumbnails = YoutubeDL._sanitize_thumbnails +YoutubeDL._sanitize_thumbnails = PatchedYoutubeDL._sanitize_thumbnails diff --git a/tubesync/common/utils.py b/tubesync/common/utils.py index 7bf89041..f99cdf23 100644 --- a/tubesync/common/utils.py +++ b/tubesync/common/utils.py @@ -1,6 +1,7 @@ import cProfile import emoji import io +import os import pstats import string import time @@ -10,6 +11,41 @@ from yt_dlp.utils import LazyList from .errors import DatabaseConnectionError +def getenv(key, default=None, /, *, integer=False, string=True): + ''' + Guarantees a returned type from calling `os.getenv` + The caller can request the integer type, + or use the default string type. + ''' + + args = dict(key=key, default=default, integer=integer, string=string) + supported_types = dict(zip(args.keys(), ( + (str,), # key + ( + bool, + float, + int, + str, + None.__class__, + ), # default + (bool,) * (len(args.keys()) - 2), + ))) + unsupported_type_msg = 'Unsupported type for positional argument, "{}": {}' + for k, t in supported_types.items(): + v = args[k] + assert isinstance(v, t), unsupported_type_msg.format(k, type(v)) + + d = str(default) if default is not None else None + + r = os.getenv(key, d) + if r is None: + if string: r = str() + if integer: r = int() + elif integer: + r = int(float(r)) + return r + + def parse_database_connection_string(database_connection_string): ''' Parses a connection string in a URL style format, such as: diff --git a/tubesync/sync/models.py b/tubesync/sync/models.py index 2e802599..9ab126db 100644 --- a/tubesync/sync/models.py +++ b/tubesync/sync/models.py @@ -1507,17 +1507,35 @@ class Media(models.Model): def calculate_episode_number(self): if self.source.is_playlist: - sorted_media = Media.objects.filter(source=self.source) + sorted_media = Media.objects.filter( + source=self.source, + metadata__isnull=False, + ).order_by( + 'published', + 'created', + 'key', + ) else: - self_year = self.upload_date.year if self.upload_date else self.created.year - filtered_media = Media.objects.filter(source=self.source, published__year=self_year) - filtered_media = [m for m in filtered_media if m.upload_date is not None] - sorted_media = sorted(filtered_media, key=lambda x: (x.upload_date, x.key)) - position_counter = 1 - for media in sorted_media: + self_year = self.created.year # unlikely to be accurate + if self.published: + self_year = self.published.year + elif self.has_metadata and self.upload_date: + self_year = self.upload_date.year + elif self.download_date: + # also, unlikely to be accurate + self_year = self.download_date.year + sorted_media = Media.objects.filter( + source=self.source, + metadata__isnull=False, + published__year=self_year, + ).order_by( + 'published', + 'created', + 'key', + ) + for counter, media in enumerate(sorted_media, start=1): if media == self: - return position_counter - position_counter += 1 + return counter def get_episode_str(self, use_padding=False): episode_number = self.calculate_episode_number() diff --git a/tubesync/sync/signals.py b/tubesync/sync/signals.py index c03a4f72..8bea1ce2 100644 --- a/tubesync/sync/signals.py +++ b/tubesync/sync/signals.py @@ -43,6 +43,8 @@ def source_pre_save(sender, instance, **kwargs): work_directory = existing_dirpath for _count in range(parents_count, 0, -1): work_directory = work_directory.parent + if not Path(work_directory).resolve(strict=True).is_relative_to(Path(settings.DOWNLOAD_ROOT)): + work_directory = Path(settings.DOWNLOAD_ROOT) with TemporaryDirectory(suffix=('.'+new_dirpath.name), prefix='.tmp.', dir=work_directory) as tmp_dir: tmp_dirpath = Path(tmp_dir) existed = None @@ -129,7 +131,7 @@ def source_post_save(sender, instance, created, **kwargs): verbose_name = _('Checking all media for source "{}"') save_all_media_for_source( str(instance.pk), - priority=9, + priority=25, verbose_name=verbose_name.format(instance.name), remove_existing_tasks=True ) @@ -167,6 +169,7 @@ def task_task_failed(sender, task_id, completed_task, **kwargs): @receiver(post_save, sender=Media) def media_post_save(sender, instance, created, **kwargs): + media = instance # If the media is skipped manually, bail. if instance.manual_skip: return @@ -176,12 +179,27 @@ def media_post_save(sender, instance, created, **kwargs): # Reset the skip flag if the download cap has changed if the media has not # already been downloaded downloaded = instance.downloaded + existing_media_metadata_task = get_media_metadata_task(str(instance.pk)) + existing_media_download_task = get_media_download_task(str(instance.pk)) if not downloaded: - skip_changed = filter_media(instance) + # the decision to download was already made if a download task exists + if not existing_media_download_task: + # Recalculate the "can_download" flag, this may + # need to change if the source specifications have been changed + if instance.metadata: + if instance.get_format_str(): + if not instance.can_download: + instance.can_download = True + can_download_changed = True + else: + if instance.can_download: + instance.can_download = False + can_download_changed = True + # Recalculate the "skip_changed" flag + skip_changed = filter_media(instance) else: # Downloaded media might need to be renamed # Check settings before any rename tasks are scheduled - media = instance rename_sources_setting = settings.RENAME_SOURCES or list() create_rename_task = ( ( @@ -195,23 +213,11 @@ def media_post_save(sender, instance, created, **kwargs): rename_media( str(media.pk), queue=str(media.pk), - priority=16, + priority=20, verbose_name=verbose_name.format(media.key, media.name), remove_existing_tasks=True ) - # Recalculate the "can_download" flag, this may - # need to change if the source specifications have been changed - if instance.metadata: - if instance.get_format_str(): - if not instance.can_download: - instance.can_download = True - can_download_changed = True - else: - if instance.can_download: - instance.can_download = False - can_download_changed = True - existing_media_metadata_task = get_media_metadata_task(str(instance.pk)) # If the media is missing metadata schedule it to be downloaded if not (instance.skip or instance.metadata or existing_media_metadata_task): log.info(f'Scheduling task to download metadata for: {instance.url}') @@ -239,7 +245,6 @@ def media_post_save(sender, instance, created, **kwargs): verbose_name=verbose_name.format(instance.name), remove_existing_tasks=True ) - existing_media_download_task = get_media_download_task(str(instance.pk)) # If the media has not yet been downloaded schedule it to be downloaded if not (instance.media_file_exists or instance.filepath.exists() or existing_media_download_task): # The file was deleted after it was downloaded, skip this media. diff --git a/tubesync/sync/tasks.py b/tubesync/sync/tasks.py index fdc954a3..498d73fe 100644 --- a/tubesync/sync/tasks.py +++ b/tubesync/sync/tasks.py @@ -17,6 +17,7 @@ from django.conf import settings from django.core.files.base import ContentFile from django.core.files.uploadedfile import SimpleUploadedFile from django.utils import timezone +from django.db.transaction import atomic from django.db.utils import IntegrityError from django.utils.translation import gettext_lazy as _ from background_task import background @@ -179,6 +180,7 @@ def cleanup_removed_media(source, videos): @background(schedule=300, remove_existing_tasks=True) +@atomic(durable=True) def index_source_task(source_id): ''' Indexes media available from a Source object. @@ -221,7 +223,8 @@ def index_source_task(source_id): if published_dt is not None: media.published = published_dt try: - media.save() + with atomic(): + media.save() log.debug(f'Indexed media: {source} / {media}') # log the new media instances new_media_instance = ( @@ -231,6 +234,13 @@ def index_source_task(source_id): ) if new_media_instance: log.info(f'Indexed new media: {source} / {media}') + log.info(f'Scheduling task to download metadata for: {media.url}') + verbose_name = _('Downloading metadata for "{}"') + download_media_metadata( + str(media.pk), + priority=9, + verbose_name=verbose_name.format(media.pk), + ) except IntegrityError as e: log.error(f'Index media failed: {source} / {media} with "{e}"') # Tack on a cleanup of old completed tasks @@ -611,9 +621,10 @@ def save_all_media_for_source(source_id): # Trigger the post_save signal for each media item linked to this source as various # flags may need to be recalculated - for media in mqs: - if media.uuid not in already_saved: - media.save() + with atomic(): + for media in mqs: + if media.uuid not in already_saved: + media.save() @background(schedule=60, remove_existing_tasks=True) @@ -626,6 +637,7 @@ def rename_media(media_id): @background(schedule=300, remove_existing_tasks=True) +@atomic(durable=True) def rename_all_media_for_source(source_id): try: source = Source.objects.get(pk=source_id) @@ -653,7 +665,8 @@ def rename_all_media_for_source(source_id): downloaded=True, ) for media in mqs: - media.rename_files() + with atomic(): + media.rename_files() @background(schedule=60, remove_existing_tasks=True) diff --git a/tubesync/sync/youtube.py b/tubesync/sync/youtube.py index 95eebb8a..d6419da0 100644 --- a/tubesync/sync/youtube.py +++ b/tubesync/sync/youtube.py @@ -17,6 +17,7 @@ from django.conf import settings from .hooks import postprocessor_hook, progress_hook from .utils import mkdir_p import yt_dlp +import yt_dlp.patch.check_thumbnails from yt_dlp.utils import remove_end @@ -146,6 +147,14 @@ def get_media_info(url, days=None): f'yesterday-{days!s}days' if days else None ) opts = get_yt_opts() + paths = opts.get('paths', dict()) + if 'temp' in paths: + temp_dir_obj = TemporaryDirectory(prefix='.yt_dlp-', dir=paths['temp']) + temp_dir_path = Path(temp_dir_obj.name) + (temp_dir_path / '.ignore').touch(exist_ok=True) + paths.update({ + 'temp': str(temp_dir_path), + }) opts.update({ 'ignoreerrors': False, # explicitly set this to catch exceptions 'ignore_no_formats_error': False, # we must fail first to try again with this enabled @@ -154,12 +163,17 @@ def get_media_info(url, days=None): 'logger': log, 'extract_flat': True, 'check_formats': True, + 'check_thumbnails': False, 'daterange': yt_dlp.utils.DateRange(start=start), 'extractor_args': { - 'youtube': {'formats': ['missing_pot']}, 'youtubetab': {'approximate_date': ['true']}, }, + 'paths': paths, + 'sleep_interval_requests': 2, + 'verbose': True if settings.DEBUG else False, }) + if start: + log.debug(f'get_media_info: used date range: {opts["daterange"]} for URL: {url}') response = {} with yt_dlp.YoutubeDL(opts) as y: try: diff --git a/tubesync/tubesync/local_settings.py.container b/tubesync/tubesync/local_settings.py.container index 4b73b7d7..cc20f73b 100644 --- a/tubesync/tubesync/local_settings.py.container +++ b/tubesync/tubesync/local_settings.py.container @@ -1,40 +1,41 @@ -import os import sys from pathlib import Path from urllib.parse import urljoin -from common.utils import parse_database_connection_string +from common.utils import getenv, parse_database_connection_string BASE_DIR = Path(__file__).resolve().parent.parent ROOT_DIR = Path('/') CONFIG_BASE_DIR = ROOT_DIR / 'config' DOWNLOADS_BASE_DIR = ROOT_DIR / 'downloads' -DJANGO_URL_PREFIX = os.getenv('DJANGO_URL_PREFIX', None) -STATIC_URL = str(os.getenv('DJANGO_STATIC_URL', '/static/')) +DJANGO_URL_PREFIX = getenv('DJANGO_URL_PREFIX').strip() +STATIC_URL = getenv('DJANGO_STATIC_URL', '/static/').strip() if DJANGO_URL_PREFIX and STATIC_URL: STATIC_URL = urljoin(DJANGO_URL_PREFIX, STATIC_URL[1:]) # This is not ever meant to be a public web interface so this isn't too critical -SECRET_KEY = str(os.getenv('DJANGO_SECRET_KEY', 'tubesync-django-secret')) +SECRET_KEY = getenv('DJANGO_SECRET_KEY', 'tubesync-django-secret') -ALLOWED_HOSTS_STR = str(os.getenv('TUBESYNC_HOSTS', '*')) +ALLOWED_HOSTS_STR = getenv('TUBESYNC_HOSTS', '*') ALLOWED_HOSTS = ALLOWED_HOSTS_STR.split(',') -DEBUG = True if os.getenv('TUBESYNC_DEBUG', False) else False -FORCE_SCRIPT_NAME = os.getenv('DJANGO_FORCE_SCRIPT_NAME', DJANGO_URL_PREFIX) +DEBUG_STR = getenv('TUBESYNC_DEBUG', False) +DEBUG = True if 'true' == DEBUG_STR.strip().lower() else False +FORCE_SCRIPT_NAME = getenv('DJANGO_FORCE_SCRIPT_NAME', DJANGO_URL_PREFIX) database_dict = {} -database_connection_env = os.getenv('DATABASE_CONNECTION', '') +database_connection_env = getenv('DATABASE_CONNECTION') if database_connection_env: database_dict = parse_database_connection_string(database_connection_env) if database_dict: - print(f'Using database connection: {database_dict["ENGINE"]}://' + print(f'Using database connection: {database_dict["DRIVER"]}://' f'{database_dict["USER"]}:[hidden]@{database_dict["HOST"]}:' - f'{database_dict["PORT"]}/{database_dict["NAME"]}', file=sys.stdout) + f'{database_dict["PORT"]}/{database_dict["NAME"]}', + file=sys.stdout, flush=True) DATABASES = { 'default': database_dict, } @@ -60,7 +61,7 @@ else: DEFAULT_THREADS = 1 -BACKGROUND_TASK_ASYNC_THREADS = int(os.getenv('TUBESYNC_WORKERS', DEFAULT_THREADS)) +BACKGROUND_TASK_ASYNC_THREADS = getenv('TUBESYNC_WORKERS', DEFAULT_THREADS, integer=True) MEDIA_ROOT = CONFIG_BASE_DIR / 'media' @@ -70,14 +71,14 @@ YOUTUBE_DL_TEMPDIR = DOWNLOAD_ROOT / 'cache' COOKIES_FILE = CONFIG_BASE_DIR / 'cookies.txt' -HEALTHCHECK_FIREWALL_STR = str(os.getenv('TUBESYNC_HEALTHCHECK_FIREWALL', 'True')).strip().lower() -HEALTHCHECK_FIREWALL = True if HEALTHCHECK_FIREWALL_STR == 'true' else False -HEALTHCHECK_ALLOWED_IPS_STR = str(os.getenv('TUBESYNC_HEALTHCHECK_ALLOWED_IPS', '127.0.0.1')) +HEALTHCHECK_FIREWALL_STR = getenv('TUBESYNC_HEALTHCHECK_FIREWALL', True) +HEALTHCHECK_FIREWALL = ( 'true' == HEALTHCHECK_FIREWALL_STR.strip().lower() ) +HEALTHCHECK_ALLOWED_IPS_STR = getenv('TUBESYNC_HEALTHCHECK_ALLOWED_IPS', '127.0.0.1') HEALTHCHECK_ALLOWED_IPS = HEALTHCHECK_ALLOWED_IPS_STR.split(',') -BASICAUTH_USERNAME = os.getenv('HTTP_USER', '').strip() -BASICAUTH_PASSWORD = os.getenv('HTTP_PASS', '').strip() +BASICAUTH_USERNAME = getenv('HTTP_USER').strip() +BASICAUTH_PASSWORD = getenv('HTTP_PASS').strip() if BASICAUTH_USERNAME and BASICAUTH_PASSWORD: BASICAUTH_DISABLE = False BASICAUTH_USERS = { @@ -88,25 +89,25 @@ else: BASICAUTH_USERS = {} -SOURCE_DOWNLOAD_DIRECTORY_PREFIX_STR = os.getenv('TUBESYNC_DIRECTORY_PREFIX', 'True').strip().lower() -SOURCE_DOWNLOAD_DIRECTORY_PREFIX = True if SOURCE_DOWNLOAD_DIRECTORY_PREFIX_STR == 'true' else False +SOURCE_DOWNLOAD_DIRECTORY_PREFIX_STR = getenv('TUBESYNC_DIRECTORY_PREFIX', True) +SOURCE_DOWNLOAD_DIRECTORY_PREFIX = ( 'true' == SOURCE_DOWNLOAD_DIRECTORY_PREFIX_STR.strip().lower() ) -SHRINK_NEW_MEDIA_METADATA_STR = os.getenv('TUBESYNC_SHRINK_NEW', 'false').strip().lower() -SHRINK_NEW_MEDIA_METADATA = ( 'true' == SHRINK_NEW_MEDIA_METADATA_STR ) -SHRINK_OLD_MEDIA_METADATA_STR = os.getenv('TUBESYNC_SHRINK_OLD', 'false').strip().lower() -SHRINK_OLD_MEDIA_METADATA = ( 'true' == SHRINK_OLD_MEDIA_METADATA_STR ) +SHRINK_NEW_MEDIA_METADATA_STR = getenv('TUBESYNC_SHRINK_NEW', False) +SHRINK_NEW_MEDIA_METADATA = ( 'true' == SHRINK_NEW_MEDIA_METADATA_STR.strip().lower() ) +SHRINK_OLD_MEDIA_METADATA_STR = getenv('TUBESYNC_SHRINK_OLD', False) +SHRINK_OLD_MEDIA_METADATA = ( 'true' == SHRINK_OLD_MEDIA_METADATA_STR.strip().lower() ) # TUBESYNC_RENAME_ALL_SOURCES: True or False -RENAME_ALL_SOURCES_STR = os.getenv('TUBESYNC_RENAME_ALL_SOURCES', 'False').strip().lower() -RENAME_ALL_SOURCES = ( 'true' == RENAME_ALL_SOURCES_STR ) +RENAME_ALL_SOURCES_STR = getenv('TUBESYNC_RENAME_ALL_SOURCES', False) +RENAME_ALL_SOURCES = ( 'true' == RENAME_ALL_SOURCES_STR.strip().lower() ) # TUBESYNC_RENAME_SOURCES: A comma-separated list of Source directories -RENAME_SOURCES_STR = os.getenv('TUBESYNC_RENAME_SOURCES', '') +RENAME_SOURCES_STR = getenv('TUBESYNC_RENAME_SOURCES') RENAME_SOURCES = RENAME_SOURCES_STR.split(',') if RENAME_SOURCES_STR else None -VIDEO_HEIGHT_CUTOFF = int(os.getenv("TUBESYNC_VIDEO_HEIGHT_CUTOFF", "240")) +VIDEO_HEIGHT_CUTOFF = getenv("TUBESYNC_VIDEO_HEIGHT_CUTOFF", 240, integer=True) # ensure that the current directory exists @@ -117,4 +118,11 @@ old_youtube_cache_dirs = list(YOUTUBE_DL_CACHEDIR.parent.glob('youtube-*')) old_youtube_cache_dirs.extend(list(YOUTUBE_DL_CACHEDIR.parent.glob('youtube/youtube-*'))) for cache_dir in old_youtube_cache_dirs: cache_dir.rename(YOUTUBE_DL_CACHEDIR / cache_dir.name) +# try to remove the old, hopefully empty, directory +empty_old_youtube_dir = YOUTUBE_DL_CACHEDIR.parent / 'youtube' +if empty_old_youtube_dir.is_dir(): + try: + empty_old_youtube_dir.rmdir() + except: + pass diff --git a/tubesync/tubesync/settings.py b/tubesync/tubesync/settings.py index a9f4061c..ff88a669 100644 --- a/tubesync/tubesync/settings.py +++ b/tubesync/tubesync/settings.py @@ -1,5 +1,5 @@ -import os from pathlib import Path +from common.utils import getenv BASE_DIR = Path(__file__).resolve().parent.parent @@ -97,7 +97,7 @@ AUTH_PASSWORD_VALIDATORS = [ LANGUAGE_CODE = 'en-us' -TIME_ZONE = os.getenv('TZ', 'UTC') +TIME_ZONE = getenv('TZ', 'UTC') USE_I18N = True USE_L10N = True USE_TZ = True