mirror of
https://github.com/meeb/tubesync.git
synced 2025-06-23 13:36:35 +00:00
Merge branch 'main' into patch-6
This commit is contained in:
commit
30b3f08b17
@ -275,6 +275,8 @@ RUN --mount=type=cache,id=apt-lib-cache,sharing=locked,target=/var/lib/apt \
|
||||
pipenv \
|
||||
pkgconf \
|
||||
python3 \
|
||||
python3-libsass \
|
||||
python3-socks \
|
||||
python3-wheel \
|
||||
curl \
|
||||
less \
|
||||
@ -360,8 +362,6 @@ COPY patches/yt_dlp/ \
|
||||
RUN set -x && \
|
||||
# Make absolutely sure we didn't accidentally bundle a SQLite dev database
|
||||
rm -rf /app/db.sqlite3 && \
|
||||
# Check nginx configuration
|
||||
nginx -t && \
|
||||
# Run any required app commands
|
||||
/usr/bin/python3 -B /app/manage.py compilescss && \
|
||||
/usr/bin/python3 -B /app/manage.py collectstatic --no-input --link && \
|
||||
@ -379,6 +379,9 @@ RUN set -x && \
|
||||
# Copy root
|
||||
COPY config/root /
|
||||
|
||||
# Check nginx configuration copied from config/root/etc
|
||||
RUN set -x && nginx -t
|
||||
|
||||
# Create a healthcheck
|
||||
HEALTHCHECK --interval=1m --timeout=10s --start-period=3m CMD ["/app/healthcheck.py", "http://127.0.0.1:8080/healthcheck"]
|
||||
|
||||
|
10
Pipfile
10
Pipfile
@ -8,17 +8,19 @@ autopep8 = "*"
|
||||
|
||||
[packages]
|
||||
django = "*"
|
||||
django-sass-processor = "*"
|
||||
libsass = "*"
|
||||
django-sass-processor = {extras = ["management-command"], version = "*"}
|
||||
pillow = "*"
|
||||
whitenoise = "*"
|
||||
gunicorn = "*"
|
||||
django-compressor = "*"
|
||||
httptools = "*"
|
||||
django-background-tasks = ">=1.2.8"
|
||||
django-basicauth = "*"
|
||||
psycopg2-binary = "*"
|
||||
mysqlclient = "*"
|
||||
yt-dlp = "*"
|
||||
PySocks = "*"
|
||||
urllib3 = {extras = ["socks"], version = "*"}
|
||||
requests = {extras = ["socks"], version = "*"}
|
||||
yt-dlp = "*"
|
||||
emoji = "*"
|
||||
brotli = "*"
|
||||
html5lib = "*"
|
||||
|
@ -70,7 +70,7 @@ currently just Plex, to complete the PVR experience.
|
||||
|
||||
TubeSync is designed to be run in a container, such as via Docker or Podman. It also
|
||||
works in a Docker Compose stack. `amd64` (most desktop PCs and servers) and `arm64`
|
||||
(modern ARM computers, such as the Rasperry Pi 3 or later) are supported.
|
||||
(modern ARM computers, such as the Raspberry Pi 3 or later) are supported.
|
||||
|
||||
Example (with Docker on *nix):
|
||||
|
||||
@ -356,7 +356,7 @@ etc.). Configuration of this is beyond the scope of this README.
|
||||
Only two are supported, for the moment:
|
||||
- `amd64` (most desktop PCs and servers)
|
||||
- `arm64`
|
||||
(modern ARM computers, such as the Rasperry Pi 3 or later)
|
||||
(modern ARM computers, such as the Raspberry Pi 3 or later)
|
||||
|
||||
Others may be made available, if there is demand.
|
||||
|
||||
|
@ -1,3 +1,4 @@
|
||||
import os
|
||||
import string
|
||||
from datetime import datetime
|
||||
from urllib.parse import urlunsplit, urlencode, urlparse
|
||||
@ -6,6 +7,41 @@ from yt_dlp.utils import LazyList
|
||||
from .errors import DatabaseConnectionError
|
||||
|
||||
|
||||
def getenv(key, default=None, /, *, integer=False, string=True):
|
||||
'''
|
||||
Guarantees a returned type from calling `os.getenv`
|
||||
The caller can request the integer type,
|
||||
or use the default string type.
|
||||
'''
|
||||
|
||||
args = dict(key=key, default=default, integer=integer, string=string)
|
||||
supported_types = dict(zip(args.keys(), (
|
||||
(str,), # key
|
||||
(
|
||||
bool,
|
||||
float,
|
||||
int,
|
||||
str,
|
||||
None.__class__,
|
||||
), # default
|
||||
(bool,) * (len(args.keys()) - 2),
|
||||
)))
|
||||
unsupported_type_msg = 'Unsupported type for positional argument, "{}": {}'
|
||||
for k, t in supported_types.items():
|
||||
v = args[k]
|
||||
assert isinstance(v, t), unsupported_type_msg.format(k, type(v))
|
||||
|
||||
d = str(default) if default is not None else None
|
||||
|
||||
r = os.getenv(key, d)
|
||||
if r is None:
|
||||
if string: r = str()
|
||||
if integer: r = int()
|
||||
elif integer:
|
||||
r = int(float(r))
|
||||
return r
|
||||
|
||||
|
||||
def parse_database_connection_string(database_connection_string):
|
||||
'''
|
||||
Parses a connection string in a URL style format, such as:
|
||||
|
@ -11,7 +11,7 @@ class Migration(migrations.Migration):
|
||||
migrations.AlterField(
|
||||
model_name='source',
|
||||
name='source_resolution',
|
||||
field=models.CharField(choices=[('audio', 'Audio only'), ('360p', '360p (SD)'), ('480p', '480p (SD)'), ('720p', '720p (HD)'), ('1080p', '1080p (Full HD)'), ('1440p', '1440p (2K)'), ('2160p', '4320p (8K)'), ('4320p', '4320p (8K)')], db_index=True, default='1080p', help_text='Source resolution, desired video resolution to download', max_length=8, verbose_name='source resolution'),
|
||||
field=models.CharField(choices=[('audio', 'Audio only'), ('360p', '360p (SD)'), ('480p', '480p (SD)'), ('720p', '720p (HD)'), ('1080p', '1080p (Full HD)'), ('1440p', '1440p (2K)'), ('2160p', '2160p (4K)'), ('4320p', '4320p (8K)')], db_index=True, default='1080p', help_text='Source resolution, desired video resolution to download', max_length=8, verbose_name='source resolution'),
|
||||
),
|
||||
]
|
||||
|
||||
|
@ -1507,17 +1507,35 @@ class Media(models.Model):
|
||||
|
||||
def calculate_episode_number(self):
|
||||
if self.source.is_playlist:
|
||||
sorted_media = Media.objects.filter(source=self.source)
|
||||
sorted_media = Media.objects.filter(
|
||||
source=self.source,
|
||||
metadata__isnull=False,
|
||||
).order_by(
|
||||
'published',
|
||||
'created',
|
||||
'key',
|
||||
)
|
||||
else:
|
||||
self_year = self.upload_date.year if self.upload_date else self.created.year
|
||||
filtered_media = Media.objects.filter(source=self.source, published__year=self_year)
|
||||
filtered_media = [m for m in filtered_media if m.upload_date is not None]
|
||||
sorted_media = sorted(filtered_media, key=lambda x: (x.upload_date, x.key))
|
||||
position_counter = 1
|
||||
for media in sorted_media:
|
||||
self_year = self.created.year # unlikely to be accurate
|
||||
if self.published:
|
||||
self_year = self.published.year
|
||||
elif self.has_metadata and self.upload_date:
|
||||
self_year = self.upload_date.year
|
||||
elif self.download_date:
|
||||
# also, unlikely to be accurate
|
||||
self_year = self.download_date.year
|
||||
sorted_media = Media.objects.filter(
|
||||
source=self.source,
|
||||
metadata__isnull=False,
|
||||
published__year=self_year,
|
||||
).order_by(
|
||||
'published',
|
||||
'created',
|
||||
'key',
|
||||
)
|
||||
for counter, media in enumerate(sorted_media, start=1):
|
||||
if media == self:
|
||||
return position_counter
|
||||
position_counter += 1
|
||||
return counter
|
||||
|
||||
def get_episode_str(self, use_padding=False):
|
||||
episode_number = self.calculate_episode_number()
|
||||
|
@ -43,6 +43,8 @@ def source_pre_save(sender, instance, **kwargs):
|
||||
work_directory = existing_dirpath
|
||||
for _count in range(parents_count, 0, -1):
|
||||
work_directory = work_directory.parent
|
||||
if not Path(work_directory).resolve(strict=True).is_relative_to(Path(settings.DOWNLOAD_ROOT)):
|
||||
work_directory = Path(settings.DOWNLOAD_ROOT)
|
||||
with TemporaryDirectory(suffix=('.'+new_dirpath.name), prefix='.tmp.', dir=work_directory) as tmp_dir:
|
||||
tmp_dirpath = Path(tmp_dir)
|
||||
existed = None
|
||||
@ -129,7 +131,7 @@ def source_post_save(sender, instance, created, **kwargs):
|
||||
verbose_name = _('Checking all media for source "{}"')
|
||||
save_all_media_for_source(
|
||||
str(instance.pk),
|
||||
priority=9,
|
||||
priority=25,
|
||||
verbose_name=verbose_name.format(instance.name),
|
||||
remove_existing_tasks=True
|
||||
)
|
||||
@ -167,6 +169,7 @@ def task_task_failed(sender, task_id, completed_task, **kwargs):
|
||||
|
||||
@receiver(post_save, sender=Media)
|
||||
def media_post_save(sender, instance, created, **kwargs):
|
||||
media = instance
|
||||
# If the media is skipped manually, bail.
|
||||
if instance.manual_skip:
|
||||
return
|
||||
@ -176,12 +179,27 @@ def media_post_save(sender, instance, created, **kwargs):
|
||||
# Reset the skip flag if the download cap has changed if the media has not
|
||||
# already been downloaded
|
||||
downloaded = instance.downloaded
|
||||
existing_media_metadata_task = get_media_metadata_task(str(instance.pk))
|
||||
existing_media_download_task = get_media_download_task(str(instance.pk))
|
||||
if not downloaded:
|
||||
skip_changed = filter_media(instance)
|
||||
# the decision to download was already made if a download task exists
|
||||
if not existing_media_download_task:
|
||||
# Recalculate the "can_download" flag, this may
|
||||
# need to change if the source specifications have been changed
|
||||
if instance.metadata:
|
||||
if instance.get_format_str():
|
||||
if not instance.can_download:
|
||||
instance.can_download = True
|
||||
can_download_changed = True
|
||||
else:
|
||||
if instance.can_download:
|
||||
instance.can_download = False
|
||||
can_download_changed = True
|
||||
# Recalculate the "skip_changed" flag
|
||||
skip_changed = filter_media(instance)
|
||||
else:
|
||||
# Downloaded media might need to be renamed
|
||||
# Check settings before any rename tasks are scheduled
|
||||
media = instance
|
||||
rename_sources_setting = settings.RENAME_SOURCES or list()
|
||||
create_rename_task = (
|
||||
(
|
||||
@ -195,23 +213,11 @@ def media_post_save(sender, instance, created, **kwargs):
|
||||
rename_media(
|
||||
str(media.pk),
|
||||
queue=str(media.pk),
|
||||
priority=16,
|
||||
priority=20,
|
||||
verbose_name=verbose_name.format(media.key, media.name),
|
||||
remove_existing_tasks=True
|
||||
)
|
||||
|
||||
# Recalculate the "can_download" flag, this may
|
||||
# need to change if the source specifications have been changed
|
||||
if instance.metadata:
|
||||
if instance.get_format_str():
|
||||
if not instance.can_download:
|
||||
instance.can_download = True
|
||||
can_download_changed = True
|
||||
else:
|
||||
if instance.can_download:
|
||||
instance.can_download = False
|
||||
can_download_changed = True
|
||||
existing_media_metadata_task = get_media_metadata_task(str(instance.pk))
|
||||
# If the media is missing metadata schedule it to be downloaded
|
||||
if not (instance.skip or instance.metadata or existing_media_metadata_task):
|
||||
log.info(f'Scheduling task to download metadata for: {instance.url}')
|
||||
@ -239,7 +245,6 @@ def media_post_save(sender, instance, created, **kwargs):
|
||||
verbose_name=verbose_name.format(instance.name),
|
||||
remove_existing_tasks=True
|
||||
)
|
||||
existing_media_download_task = get_media_download_task(str(instance.pk))
|
||||
# If the media has not yet been downloaded schedule it to be downloaded
|
||||
if not (instance.media_file_exists or instance.filepath.exists() or existing_media_download_task):
|
||||
# The file was deleted after it was downloaded, skip this media.
|
||||
|
@ -17,6 +17,7 @@ from django.conf import settings
|
||||
from django.core.files.base import ContentFile
|
||||
from django.core.files.uploadedfile import SimpleUploadedFile
|
||||
from django.utils import timezone
|
||||
from django.db.transaction import atomic
|
||||
from django.db.utils import IntegrityError
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from background_task import background
|
||||
@ -179,6 +180,7 @@ def cleanup_removed_media(source, videos):
|
||||
|
||||
|
||||
@background(schedule=300, remove_existing_tasks=True)
|
||||
@atomic(durable=True)
|
||||
def index_source_task(source_id):
|
||||
'''
|
||||
Indexes media available from a Source object.
|
||||
@ -221,7 +223,8 @@ def index_source_task(source_id):
|
||||
if published_dt is not None:
|
||||
media.published = published_dt
|
||||
try:
|
||||
media.save()
|
||||
with atomic():
|
||||
media.save()
|
||||
log.debug(f'Indexed media: {source} / {media}')
|
||||
# log the new media instances
|
||||
new_media_instance = (
|
||||
@ -231,6 +234,13 @@ def index_source_task(source_id):
|
||||
)
|
||||
if new_media_instance:
|
||||
log.info(f'Indexed new media: {source} / {media}')
|
||||
log.info(f'Scheduling task to download metadata for: {media.url}')
|
||||
verbose_name = _('Downloading metadata for "{}"')
|
||||
download_media_metadata(
|
||||
str(media.pk),
|
||||
priority=9,
|
||||
verbose_name=verbose_name.format(media.pk),
|
||||
)
|
||||
except IntegrityError as e:
|
||||
log.error(f'Index media failed: {source} / {media} with "{e}"')
|
||||
# Tack on a cleanup of old completed tasks
|
||||
@ -597,6 +607,7 @@ def save_all_media_for_source(source_id):
|
||||
skip=False,
|
||||
manual_skip=False,
|
||||
downloaded=False,
|
||||
metadata__isnull=False,
|
||||
)
|
||||
for media in refresh_qs:
|
||||
try:
|
||||
@ -610,9 +621,10 @@ def save_all_media_for_source(source_id):
|
||||
|
||||
# Trigger the post_save signal for each media item linked to this source as various
|
||||
# flags may need to be recalculated
|
||||
for media in mqs:
|
||||
if media.uuid not in already_saved:
|
||||
media.save()
|
||||
with atomic():
|
||||
for media in mqs:
|
||||
if media.uuid not in already_saved:
|
||||
media.save()
|
||||
|
||||
|
||||
@background(schedule=60, remove_existing_tasks=True)
|
||||
@ -625,6 +637,7 @@ def rename_media(media_id):
|
||||
|
||||
|
||||
@background(schedule=300, remove_existing_tasks=True)
|
||||
@atomic(durable=True)
|
||||
def rename_all_media_for_source(source_id):
|
||||
try:
|
||||
source = Source.objects.get(pk=source_id)
|
||||
@ -652,7 +665,8 @@ def rename_all_media_for_source(source_id):
|
||||
downloaded=True,
|
||||
)
|
||||
for media in mqs:
|
||||
media.rename_files()
|
||||
with atomic():
|
||||
media.rename_files()
|
||||
|
||||
|
||||
@background(schedule=60, remove_existing_tasks=True)
|
||||
|
@ -147,6 +147,14 @@ def get_media_info(url, days=None):
|
||||
f'yesterday-{days!s}days' if days else None
|
||||
)
|
||||
opts = get_yt_opts()
|
||||
paths = opts.get('paths', dict())
|
||||
if 'temp' in paths:
|
||||
temp_dir_obj = TemporaryDirectory(prefix='.yt_dlp-', dir=paths['temp'])
|
||||
temp_dir_path = Path(temp_dir_obj.name)
|
||||
(temp_dir_path / '.ignore').touch(exist_ok=True)
|
||||
paths.update({
|
||||
'temp': str(temp_dir_path),
|
||||
})
|
||||
opts.update({
|
||||
'ignoreerrors': False, # explicitly set this to catch exceptions
|
||||
'ignore_no_formats_error': False, # we must fail first to try again with this enabled
|
||||
@ -158,10 +166,14 @@ def get_media_info(url, days=None):
|
||||
'check_thumbnails': False,
|
||||
'daterange': yt_dlp.utils.DateRange(start=start),
|
||||
'extractor_args': {
|
||||
'youtube': {'formats': ['missing_pot']},
|
||||
'youtubetab': {'approximate_date': ['true']},
|
||||
},
|
||||
'paths': paths,
|
||||
'sleep_interval_requests': 2,
|
||||
'verbose': True if settings.DEBUG else False,
|
||||
})
|
||||
if start:
|
||||
log.debug(f'get_media_info: used date range: {opts["daterange"]} for URL: {url}')
|
||||
response = {}
|
||||
with yt_dlp.YoutubeDL(opts) as y:
|
||||
try:
|
||||
|
@ -1,40 +1,41 @@
|
||||
import os
|
||||
import sys
|
||||
from pathlib import Path
|
||||
from urllib.parse import urljoin
|
||||
from common.utils import parse_database_connection_string
|
||||
from common.utils import getenv, parse_database_connection_string
|
||||
|
||||
|
||||
BASE_DIR = Path(__file__).resolve().parent.parent
|
||||
ROOT_DIR = Path('/')
|
||||
CONFIG_BASE_DIR = ROOT_DIR / 'config'
|
||||
DOWNLOADS_BASE_DIR = ROOT_DIR / 'downloads'
|
||||
DJANGO_URL_PREFIX = os.getenv('DJANGO_URL_PREFIX', None)
|
||||
STATIC_URL = str(os.getenv('DJANGO_STATIC_URL', '/static/'))
|
||||
DJANGO_URL_PREFIX = getenv('DJANGO_URL_PREFIX').strip()
|
||||
STATIC_URL = getenv('DJANGO_STATIC_URL', '/static/').strip()
|
||||
if DJANGO_URL_PREFIX and STATIC_URL:
|
||||
STATIC_URL = urljoin(DJANGO_URL_PREFIX, STATIC_URL[1:])
|
||||
|
||||
|
||||
# This is not ever meant to be a public web interface so this isn't too critical
|
||||
SECRET_KEY = str(os.getenv('DJANGO_SECRET_KEY', 'tubesync-django-secret'))
|
||||
SECRET_KEY = getenv('DJANGO_SECRET_KEY', 'tubesync-django-secret')
|
||||
|
||||
|
||||
ALLOWED_HOSTS_STR = str(os.getenv('TUBESYNC_HOSTS', '*'))
|
||||
ALLOWED_HOSTS_STR = getenv('TUBESYNC_HOSTS', '*')
|
||||
ALLOWED_HOSTS = ALLOWED_HOSTS_STR.split(',')
|
||||
DEBUG = True if os.getenv('TUBESYNC_DEBUG', False) else False
|
||||
FORCE_SCRIPT_NAME = os.getenv('DJANGO_FORCE_SCRIPT_NAME', DJANGO_URL_PREFIX)
|
||||
DEBUG_STR = getenv('TUBESYNC_DEBUG', False)
|
||||
DEBUG = True if 'true' == DEBUG_STR.strip().lower() else False
|
||||
FORCE_SCRIPT_NAME = getenv('DJANGO_FORCE_SCRIPT_NAME', DJANGO_URL_PREFIX)
|
||||
|
||||
|
||||
database_dict = {}
|
||||
database_connection_env = os.getenv('DATABASE_CONNECTION', '')
|
||||
database_connection_env = getenv('DATABASE_CONNECTION')
|
||||
if database_connection_env:
|
||||
database_dict = parse_database_connection_string(database_connection_env)
|
||||
|
||||
|
||||
if database_dict:
|
||||
print(f'Using database connection: {database_dict["ENGINE"]}://'
|
||||
print(f'Using database connection: {database_dict["DRIVER"]}://'
|
||||
f'{database_dict["USER"]}:[hidden]@{database_dict["HOST"]}:'
|
||||
f'{database_dict["PORT"]}/{database_dict["NAME"]}', file=sys.stdout)
|
||||
f'{database_dict["PORT"]}/{database_dict["NAME"]}',
|
||||
file=sys.stdout, flush=True)
|
||||
DATABASES = {
|
||||
'default': database_dict,
|
||||
}
|
||||
@ -60,7 +61,7 @@ else:
|
||||
|
||||
|
||||
DEFAULT_THREADS = 1
|
||||
BACKGROUND_TASK_ASYNC_THREADS = int(os.getenv('TUBESYNC_WORKERS', DEFAULT_THREADS))
|
||||
BACKGROUND_TASK_ASYNC_THREADS = getenv('TUBESYNC_WORKERS', DEFAULT_THREADS, integer=True)
|
||||
|
||||
|
||||
MEDIA_ROOT = CONFIG_BASE_DIR / 'media'
|
||||
@ -70,14 +71,14 @@ YOUTUBE_DL_TEMPDIR = DOWNLOAD_ROOT / 'cache'
|
||||
COOKIES_FILE = CONFIG_BASE_DIR / 'cookies.txt'
|
||||
|
||||
|
||||
HEALTHCHECK_FIREWALL_STR = str(os.getenv('TUBESYNC_HEALTHCHECK_FIREWALL', 'True')).strip().lower()
|
||||
HEALTHCHECK_FIREWALL = True if HEALTHCHECK_FIREWALL_STR == 'true' else False
|
||||
HEALTHCHECK_ALLOWED_IPS_STR = str(os.getenv('TUBESYNC_HEALTHCHECK_ALLOWED_IPS', '127.0.0.1'))
|
||||
HEALTHCHECK_FIREWALL_STR = getenv('TUBESYNC_HEALTHCHECK_FIREWALL', True)
|
||||
HEALTHCHECK_FIREWALL = ( 'true' == HEALTHCHECK_FIREWALL_STR.strip().lower() )
|
||||
HEALTHCHECK_ALLOWED_IPS_STR = getenv('TUBESYNC_HEALTHCHECK_ALLOWED_IPS', '127.0.0.1')
|
||||
HEALTHCHECK_ALLOWED_IPS = HEALTHCHECK_ALLOWED_IPS_STR.split(',')
|
||||
|
||||
|
||||
BASICAUTH_USERNAME = os.getenv('HTTP_USER', '').strip()
|
||||
BASICAUTH_PASSWORD = os.getenv('HTTP_PASS', '').strip()
|
||||
BASICAUTH_USERNAME = getenv('HTTP_USER').strip()
|
||||
BASICAUTH_PASSWORD = getenv('HTTP_PASS').strip()
|
||||
if BASICAUTH_USERNAME and BASICAUTH_PASSWORD:
|
||||
BASICAUTH_DISABLE = False
|
||||
BASICAUTH_USERS = {
|
||||
@ -88,25 +89,25 @@ else:
|
||||
BASICAUTH_USERS = {}
|
||||
|
||||
|
||||
SOURCE_DOWNLOAD_DIRECTORY_PREFIX_STR = os.getenv('TUBESYNC_DIRECTORY_PREFIX', 'True').strip().lower()
|
||||
SOURCE_DOWNLOAD_DIRECTORY_PREFIX = True if SOURCE_DOWNLOAD_DIRECTORY_PREFIX_STR == 'true' else False
|
||||
SOURCE_DOWNLOAD_DIRECTORY_PREFIX_STR = getenv('TUBESYNC_DIRECTORY_PREFIX', True)
|
||||
SOURCE_DOWNLOAD_DIRECTORY_PREFIX = ( 'true' == SOURCE_DOWNLOAD_DIRECTORY_PREFIX_STR.strip().lower() )
|
||||
|
||||
|
||||
SHRINK_NEW_MEDIA_METADATA_STR = os.getenv('TUBESYNC_SHRINK_NEW', 'false').strip().lower()
|
||||
SHRINK_NEW_MEDIA_METADATA = ( 'true' == SHRINK_NEW_MEDIA_METADATA_STR )
|
||||
SHRINK_OLD_MEDIA_METADATA_STR = os.getenv('TUBESYNC_SHRINK_OLD', 'false').strip().lower()
|
||||
SHRINK_OLD_MEDIA_METADATA = ( 'true' == SHRINK_OLD_MEDIA_METADATA_STR )
|
||||
SHRINK_NEW_MEDIA_METADATA_STR = getenv('TUBESYNC_SHRINK_NEW', False)
|
||||
SHRINK_NEW_MEDIA_METADATA = ( 'true' == SHRINK_NEW_MEDIA_METADATA_STR.strip().lower() )
|
||||
SHRINK_OLD_MEDIA_METADATA_STR = getenv('TUBESYNC_SHRINK_OLD', False)
|
||||
SHRINK_OLD_MEDIA_METADATA = ( 'true' == SHRINK_OLD_MEDIA_METADATA_STR.strip().lower() )
|
||||
|
||||
|
||||
# TUBESYNC_RENAME_ALL_SOURCES: True or False
|
||||
RENAME_ALL_SOURCES_STR = os.getenv('TUBESYNC_RENAME_ALL_SOURCES', 'False').strip().lower()
|
||||
RENAME_ALL_SOURCES = ( 'true' == RENAME_ALL_SOURCES_STR )
|
||||
RENAME_ALL_SOURCES_STR = getenv('TUBESYNC_RENAME_ALL_SOURCES', False)
|
||||
RENAME_ALL_SOURCES = ( 'true' == RENAME_ALL_SOURCES_STR.strip().lower() )
|
||||
# TUBESYNC_RENAME_SOURCES: A comma-separated list of Source directories
|
||||
RENAME_SOURCES_STR = os.getenv('TUBESYNC_RENAME_SOURCES', '')
|
||||
RENAME_SOURCES_STR = getenv('TUBESYNC_RENAME_SOURCES')
|
||||
RENAME_SOURCES = RENAME_SOURCES_STR.split(',') if RENAME_SOURCES_STR else None
|
||||
|
||||
|
||||
VIDEO_HEIGHT_CUTOFF = int(os.getenv("TUBESYNC_VIDEO_HEIGHT_CUTOFF", "240"))
|
||||
VIDEO_HEIGHT_CUTOFF = getenv("TUBESYNC_VIDEO_HEIGHT_CUTOFF", 240, integer=True)
|
||||
|
||||
|
||||
# ensure that the current directory exists
|
||||
@ -117,4 +118,11 @@ old_youtube_cache_dirs = list(YOUTUBE_DL_CACHEDIR.parent.glob('youtube-*'))
|
||||
old_youtube_cache_dirs.extend(list(YOUTUBE_DL_CACHEDIR.parent.glob('youtube/youtube-*')))
|
||||
for cache_dir in old_youtube_cache_dirs:
|
||||
cache_dir.rename(YOUTUBE_DL_CACHEDIR / cache_dir.name)
|
||||
# try to remove the old, hopefully empty, directory
|
||||
empty_old_youtube_dir = YOUTUBE_DL_CACHEDIR.parent / 'youtube'
|
||||
if empty_old_youtube_dir.is_dir():
|
||||
try:
|
||||
empty_old_youtube_dir.rmdir()
|
||||
except:
|
||||
pass
|
||||
|
||||
|
@ -1,5 +1,5 @@
|
||||
import os
|
||||
from pathlib import Path
|
||||
from common.utils import getenv
|
||||
|
||||
|
||||
BASE_DIR = Path(__file__).resolve().parent.parent
|
||||
@ -97,7 +97,7 @@ AUTH_PASSWORD_VALIDATORS = [
|
||||
|
||||
|
||||
LANGUAGE_CODE = 'en-us'
|
||||
TIME_ZONE = os.getenv('TZ', 'UTC')
|
||||
TIME_ZONE = getenv('TZ', 'UTC')
|
||||
USE_I18N = True
|
||||
USE_L10N = True
|
||||
USE_TZ = True
|
||||
|
Loading…
Reference in New Issue
Block a user