From 3e5711f0f247eb77ecddd19e5131c6010cca7e9e Mon Sep 17 00:00:00 2001 From: tcely Date: Wed, 11 Dec 2024 08:19:34 -0500 Subject: [PATCH 01/79] Report db.sqlite3 size on dashboard --- tubesync/sync/views.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/tubesync/sync/views.py b/tubesync/sync/views.py index 94e91432..e998d559 100644 --- a/tubesync/sync/views.py +++ b/tubesync/sync/views.py @@ -85,6 +85,12 @@ class DashboardView(TemplateView): data['config_dir'] = str(settings.CONFIG_BASE_DIR) data['downloads_dir'] = str(settings.DOWNLOAD_ROOT) data['database_connection'] = settings.DATABASE_CONNECTION_STR + # Add the database filesize when using db.sqlite3 + db_name = str(settings.DATABASES["default"]["NAME"]) + db_path = pathlib.Path(db_name) if '/' == db_name[0] else None + if db_path and settings.DATABASE_CONNECTION_STR.startswith('sqlite at '): + db_size = db_path.stat().st_size + data['database_connection'] += f' ({db_size:,} bytes)' return data From 730275746d78f7ab3e75a65fb7f020a01f263280 Mon Sep 17 00:00:00 2001 From: tcely Date: Thu, 12 Dec 2024 15:19:54 -0500 Subject: [PATCH 02/79] Use django.db.connection as suggested --- tubesync/sync/views.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/tubesync/sync/views.py b/tubesync/sync/views.py index e998d559..3fb23044 100644 --- a/tubesync/sync/views.py +++ b/tubesync/sync/views.py @@ -14,7 +14,7 @@ from django.views.generic.detail import SingleObjectMixin from django.core.exceptions import SuspiciousFileOperation from django.http import HttpResponse from django.urls import reverse_lazy -from django.db import IntegrityError +from django.db import connection, IntegrityError from django.db.models import Q, Count, Sum, When, Case from django.forms import Form, ValidationError from django.utils.text import slugify @@ -86,9 +86,9 @@ class DashboardView(TemplateView): data['downloads_dir'] = str(settings.DOWNLOAD_ROOT) data['database_connection'] = settings.DATABASE_CONNECTION_STR # Add the database filesize when using db.sqlite3 - db_name = str(settings.DATABASES["default"]["NAME"]) + db_name = str(connection.get_connection_params()['database']) db_path = pathlib.Path(db_name) if '/' == db_name[0] else None - if db_path and settings.DATABASE_CONNECTION_STR.startswith('sqlite at '): + if db_path and 'sqlite' == connection.vendor: db_size = db_path.stat().st_size data['database_connection'] += f' ({db_size:,} bytes)' return data From 59865a885bf411bfeeed775a32c20113349eeb1b Mon Sep 17 00:00:00 2001 From: tcely Date: Thu, 2 Jan 2025 02:39:19 -0500 Subject: [PATCH 03/79] Match another variation of the message ``` {key}: This video is available to this channel's members on level: Level 2: Contributor (or any higher level). Join this YouTube channel from your computer or Android app. ``` --- tubesync/sync/youtube.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/tubesync/sync/youtube.py b/tubesync/sync/youtube.py index 1eac4c7f..5fdef3cb 100644 --- a/tubesync/sync/youtube.py +++ b/tubesync/sync/youtube.py @@ -81,6 +81,8 @@ def _subscriber_only(msg='', response=None): return True if ': Join this channel' in msg: return True + if 'Join this YouTube channel' in msg: + return True else: # ignore msg entirely if not isinstance(response, dict): From e64f71a9704b8d72d2b6b0f693605c6bc7988977 Mon Sep 17 00:00:00 2001 From: tcely Date: Mon, 6 Jan 2025 10:23:17 -0500 Subject: [PATCH 04/79] Don't chmod a+r when it already has those permissions --- tubesync/sync/utils.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/tubesync/sync/utils.py b/tubesync/sync/utils.py index 3e29fe3f..73c8f394 100644 --- a/tubesync/sync/utils.py +++ b/tubesync/sync/utils.py @@ -123,7 +123,8 @@ def write_text_file(filepath, filedata): bytes_written = f.write(filedata) # chmod a+r temp_file old_mode = new_filepath.stat().st_mode - new_filepath.chmod(0o444 | old_mode) + if 0o444 != (0o444 & old_mode): + new_filepath.chmod(0o444 | old_mode) if not file_is_editable(new_filepath): new_filepath.unlink() raise ValueError(f'File cannot be edited or removed: {filepath}') From 215aa64f2d6b3e5ead44f8a37ff382358eee1ace Mon Sep 17 00:00:00 2001 From: tcely Date: Mon, 6 Jan 2025 10:35:37 -0500 Subject: [PATCH 05/79] Change to a logged warning for NFO permission problems --- tubesync/sync/tasks.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/tubesync/sync/tasks.py b/tubesync/sync/tasks.py index 3df651ba..b36b5d49 100644 --- a/tubesync/sync/tasks.py +++ b/tubesync/sync/tasks.py @@ -446,7 +446,11 @@ def download_media(media_id): # If selected, write an NFO file if media.source.write_nfo: log.info(f'Writing media NFO file to: {media.nfopath}') - write_text_file(media.nfopath, media.nfoxml) + try: + write_text_file(media.nfopath, media.nfoxml) + except PermissionError as e: + log.warn(f'A permissions problem occured when writing the new media NFO file: {e.msg}') + pass # Schedule a task to update media servers for mediaserver in MediaServer.objects.all(): log.info(f'Scheduling media server updates') From 8c22b6c99efb464dfb450707440c6162f04b7b46 Mon Sep 17 00:00:00 2001 From: tcely Date: Tue, 7 Jan 2025 00:05:57 -0500 Subject: [PATCH 06/79] Add response filtering These functions aren't being used yet, they will be tested against my database before that happens. --- tubesync/sync/utils.py | 62 ++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 62 insertions(+) diff --git a/tubesync/sync/utils.py b/tubesync/sync/utils.py index 3e29fe3f..e44cef1f 100644 --- a/tubesync/sync/utils.py +++ b/tubesync/sync/utils.py @@ -170,6 +170,68 @@ def normalize_codec(codec_str): return result +def _url_keys(arg_dict, filter_func): + result = {} + for key in arg_dict.keys(): + if 'url' in key: + result.update( + {key: (key, filter_func(key=key, url=arg_dict[key]),)} + ) + return result + + +def _drop_url_keys(arg_dict, key, filter_func): + if key in arg_dict.keys(): + for val_dict in arg_dict[key]: + for url_key in _url_keys(val_dict, filter_func): + if url_key[1] is True: + del val_dict[url_key[0]] + + +def filter_response(response_dict): + ''' + Clean up the response so as to not store useless metadata in the database. + ''' + # raise an exception for an unexpected argument type + if not isinstance(filedata, dict): + raise TypeError(f'filedata must be a dict, got "{type(filedata)}"') + # optimize the empty case + if not response_dict: + return response_dict + + # beginning of formats cleanup {{{ + # drop urls that expire, or restrict IPs + def drop_format_url(**kwargs): + url = kwargs['url'] + return ( + url + and '://' in url + and ( + '/ip/' in url + or '/expire/' in url + ) + ) + + _drop_url_keys(response_dict, 'formats', drop_format_url) + _drop_url_keys(response_dict, 'requested_formats', drop_format_url) + # end of formats cleanup }}} + + # beginning of automatic_captions cleanup {{{ + # drop urls that expire, or restrict IPs + def drop_auto_caption_url(**kwargs): + url = kwargs['url'] + return ( + url + and '://' in url + and '&expire=' in url + ) + + _drop_url_keys(response_dict, 'automatic_captions', drop_auto_caption_url) + # end of automatic_captions cleanup }}} + + return response_dict + + def parse_media_format(format_dict): ''' This parser primarily adapts the format dict returned by youtube-dl into a From 63fa97cc5842af7805c3efb1c8d58971b096893d Mon Sep 17 00:00:00 2001 From: tcely Date: Tue, 7 Jan 2025 00:43:59 -0500 Subject: [PATCH 07/79] More compact JSON The software doesn't need an extra space per key. --- tubesync/sync/tasks.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tubesync/sync/tasks.py b/tubesync/sync/tasks.py index 3df651ba..080dff6d 100644 --- a/tubesync/sync/tasks.py +++ b/tubesync/sync/tasks.py @@ -304,7 +304,7 @@ def download_media_metadata(media_id): return source = media.source metadata = media.index_metadata() - media.metadata = json.dumps(metadata, default=json_serial) + media.metadata = json.dumps(metadata, separators=(',', ':'), default=json_serial) upload_date = media.upload_date # Media must have a valid upload date if upload_date: From 8c31720bf707b0b12713af0e8a5a356f3bc6255d Mon Sep 17 00:00:00 2001 From: tcely Date: Tue, 7 Jan 2025 01:33:06 -0500 Subject: [PATCH 08/79] Log the reduction of metadata length --- tubesync/sync/models.py | 17 ++++++++++++++++- 1 file changed, 16 insertions(+), 1 deletion(-) diff --git a/tubesync/sync/models.py b/tubesync/sync/models.py index 2037492d..7ae68729 100644 --- a/tubesync/sync/models.py +++ b/tubesync/sync/models.py @@ -19,7 +19,7 @@ from common.utils import clean_filename, clean_emoji from .youtube import (get_media_info as get_youtube_media_info, download_media as download_youtube_media, get_channel_image_info as get_youtube_channel_image_info) -from .utils import seconds_to_timestr, parse_media_format +from .utils import seconds_to_timestr, parse_media_format, filter_response from .matching import (get_best_combined_format, get_best_audio_format, get_best_video_format) from .mediaservers import PlexMediaServer @@ -1143,12 +1143,27 @@ class Media(models.Model): def has_metadata(self): return self.metadata is not None + + def reduce_data(self, data): + from common.logger import log + from common.utils import json_serial + # log the results of filtering / compacting on metadata size + filtered_data = filter_response(data) + compact_metadata = json.dumps(filtered_data, separators=(',', ':'), default=json_serial) + old_mdl = len(self.metadata) + new_mdl = len(compact_metadata) + if old_mdl > new_mdl: + delta = old_mdl - new_mdl + log.info(f'{self.key}: metadata reduced by {delta,} characters ({old_mdl,} -> {new_mdl,})') + + @property def loaded_metadata(self): try: data = json.loads(self.metadata) if not isinstance(data, dict): return {} + self.reduce_data(data) return data except Exception as e: return {} From 25d2ff680270aa9e4188233cba3770cd9dc5275e Mon Sep 17 00:00:00 2001 From: tcely Date: Tue, 7 Jan 2025 02:12:22 -0500 Subject: [PATCH 09/79] Don't reduce the actual data yet --- tubesync/sync/models.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tubesync/sync/models.py b/tubesync/sync/models.py index 7ae68729..44f24dfb 100644 --- a/tubesync/sync/models.py +++ b/tubesync/sync/models.py @@ -1163,7 +1163,7 @@ class Media(models.Model): data = json.loads(self.metadata) if not isinstance(data, dict): return {} - self.reduce_data(data) + self.reduce_data(json.loads(self.metadata)) return data except Exception as e: return {} From 2f34fff7133754c05d348d50e43442a481c8adfc Mon Sep 17 00:00:00 2001 From: tcely Date: Tue, 7 Jan 2025 02:55:05 -0500 Subject: [PATCH 10/79] Fixes from testing The `automatic_captions` has a layer for language codes that I didn't account for. The type checking was copied and I didn't adjust for the arguments in this function. --- tubesync/sync/utils.py | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/tubesync/sync/utils.py b/tubesync/sync/utils.py index 162146eb..b85abaab 100644 --- a/tubesync/sync/utils.py +++ b/tubesync/sync/utils.py @@ -194,8 +194,8 @@ def filter_response(response_dict): Clean up the response so as to not store useless metadata in the database. ''' # raise an exception for an unexpected argument type - if not isinstance(filedata, dict): - raise TypeError(f'filedata must be a dict, got "{type(filedata)}"') + if not isinstance(response_dict, dict): + raise TypeError(f'response_dict must be a dict, got "{type(response_dict)}"') # optimize the empty case if not response_dict: return response_dict @@ -227,7 +227,11 @@ def filter_response(response_dict): and '&expire=' in url ) - _drop_url_keys(response_dict, 'automatic_captions', drop_auto_caption_url) + ac_key = 'automatic_captions' + if ac_key in response_dict.keys(): + ac_dict = response_dict[ac_key] + for lang_code in ac_dict: + _drop_url_keys(ac_dict, lang_code, drop_auto_caption_url) # end of automatic_captions cleanup }}} return response_dict From 9a4101a0a147f3fe0ee91c13197a077f1f27cd3e Mon Sep 17 00:00:00 2001 From: tcely Date: Tue, 7 Jan 2025 03:18:39 -0500 Subject: [PATCH 11/79] Fix formatting --- tubesync/sync/models.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tubesync/sync/models.py b/tubesync/sync/models.py index 44f24dfb..077a8283 100644 --- a/tubesync/sync/models.py +++ b/tubesync/sync/models.py @@ -1154,7 +1154,7 @@ class Media(models.Model): new_mdl = len(compact_metadata) if old_mdl > new_mdl: delta = old_mdl - new_mdl - log.info(f'{self.key}: metadata reduced by {delta,} characters ({old_mdl,} -> {new_mdl,})') + log.info(f'{self.key}: metadata reduced by {delta:,} characters ({old_mdl:,} -> {new_mdl:,})') @property From db25fa80294e035b1742fac2e044d2ff7de27464 Mon Sep 17 00:00:00 2001 From: tcely Date: Tue, 7 Jan 2025 03:35:58 -0500 Subject: [PATCH 12/79] Adjusted comment --- tubesync/sync/utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tubesync/sync/utils.py b/tubesync/sync/utils.py index b85abaab..108cd757 100644 --- a/tubesync/sync/utils.py +++ b/tubesync/sync/utils.py @@ -218,7 +218,7 @@ def filter_response(response_dict): # end of formats cleanup }}} # beginning of automatic_captions cleanup {{{ - # drop urls that expire, or restrict IPs + # drop urls that expire def drop_auto_caption_url(**kwargs): url = kwargs['url'] return ( From 431de2e0dfa606d5a725a475159afe5fe370a251 Mon Sep 17 00:00:00 2001 From: tcely Date: Tue, 7 Jan 2025 04:11:14 -0500 Subject: [PATCH 13/79] Loop over a set of keys for each URL type --- tubesync/sync/utils.py | 21 +++++++++++---------- 1 file changed, 11 insertions(+), 10 deletions(-) diff --git a/tubesync/sync/utils.py b/tubesync/sync/utils.py index 108cd757..f66348b4 100644 --- a/tubesync/sync/utils.py +++ b/tubesync/sync/utils.py @@ -213,13 +213,13 @@ def filter_response(response_dict): ) ) - _drop_url_keys(response_dict, 'formats', drop_format_url) - _drop_url_keys(response_dict, 'requested_formats', drop_format_url) + for key in frozenset(('formats', 'requested_formats',)): + _drop_url_keys(response_dict, key, drop_format_url) # end of formats cleanup }}} - # beginning of automatic_captions cleanup {{{ + # beginning of subtitles cleanup {{{ # drop urls that expire - def drop_auto_caption_url(**kwargs): + def drop_subtitles_url(**kwargs): url = kwargs['url'] return ( url @@ -227,12 +227,13 @@ def filter_response(response_dict): and '&expire=' in url ) - ac_key = 'automatic_captions' - if ac_key in response_dict.keys(): - ac_dict = response_dict[ac_key] - for lang_code in ac_dict: - _drop_url_keys(ac_dict, lang_code, drop_auto_caption_url) - # end of automatic_captions cleanup }}} + # beginning of automatic_captions cleanup {{{ + for key in frozenset(('subtitles', 'automatic_captions',)): + if key in response_dict.keys(): + key_dict = response_dict[key] + for lang_code in key_dict: + _drop_url_keys(key_dict, lang_code, drop_subtitles_url) + # end of subtitles cleanup }}} return response_dict From 7b8d11791d9725191146304f612ae7e2f7d3d0ec Mon Sep 17 00:00:00 2001 From: tcely Date: Tue, 7 Jan 2025 05:39:50 -0500 Subject: [PATCH 14/79] Drop keys from formats that cannot be useful --- tubesync/sync/utils.py | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/tubesync/sync/utils.py b/tubesync/sync/utils.py index f66348b4..8e98857e 100644 --- a/tubesync/sync/utils.py +++ b/tubesync/sync/utils.py @@ -213,8 +213,20 @@ def filter_response(response_dict): ) ) + # these format keys are not useful to us + drop_keys = frozenset(( + 'downloader_options', + 'fragments', + 'http_headers', + '__needs_testing', + '__working', + )) for key in frozenset(('formats', 'requested_formats',)): _drop_url_keys(response_dict, key, drop_format_url) + if key in response_dict.keys(): + for format in response_dict[key]: + for drop_key in drop_keys: + del format[drop_key] # end of formats cleanup }}} # beginning of subtitles cleanup {{{ @@ -227,7 +239,6 @@ def filter_response(response_dict): and '&expire=' in url ) - # beginning of automatic_captions cleanup {{{ for key in frozenset(('subtitles', 'automatic_captions',)): if key in response_dict.keys(): key_dict = response_dict[key] From c7457e94ac1f27c04f912a086b9cc766f4ab5882 Mon Sep 17 00:00:00 2001 From: tcely Date: Tue, 7 Jan 2025 05:58:50 -0500 Subject: [PATCH 15/79] Check that the drop_key exists --- tubesync/sync/utils.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/tubesync/sync/utils.py b/tubesync/sync/utils.py index 8e98857e..f73e243b 100644 --- a/tubesync/sync/utils.py +++ b/tubesync/sync/utils.py @@ -226,7 +226,8 @@ def filter_response(response_dict): if key in response_dict.keys(): for format in response_dict[key]: for drop_key in drop_keys: - del format[drop_key] + if drop_key in format.keys(): + del format[drop_key] # end of formats cleanup }}} # beginning of subtitles cleanup {{{ From 2d85bcbe14c0701782d5c76b0cb36116be193d08 Mon Sep 17 00:00:00 2001 From: tcely Date: Tue, 7 Jan 2025 06:20:01 -0500 Subject: [PATCH 16/79] Use a distinct try to log errors --- tubesync/sync/models.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/tubesync/sync/models.py b/tubesync/sync/models.py index 077a8283..6bcac984 100644 --- a/tubesync/sync/models.py +++ b/tubesync/sync/models.py @@ -1159,11 +1159,17 @@ class Media(models.Model): @property def loaded_metadata(self): + from common.logger import log + try: + self.reduce_data(json.loads(self.metadata)) + except Exception as e: + log.error(f'reduce_data: {e.msg}') + pass + try: data = json.loads(self.metadata) if not isinstance(data, dict): return {} - self.reduce_data(json.loads(self.metadata)) return data except Exception as e: return {} From 8ac5b36eee9a504d0f0b5a9092c5120fa7f8ecbf Mon Sep 17 00:00:00 2001 From: tcely Date: Tue, 7 Jan 2025 06:38:56 -0500 Subject: [PATCH 17/79] Use the exception function for traceback --- tubesync/sync/models.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tubesync/sync/models.py b/tubesync/sync/models.py index 6bcac984..54fcdaa6 100644 --- a/tubesync/sync/models.py +++ b/tubesync/sync/models.py @@ -1163,7 +1163,7 @@ class Media(models.Model): try: self.reduce_data(json.loads(self.metadata)) except Exception as e: - log.error(f'reduce_data: {e.msg}') + log.exception('reduce_data: %s', e) pass try: From 779370122847bb24484181834a299f7e3f41ed1f Mon Sep 17 00:00:00 2001 From: tcely Date: Tue, 7 Jan 2025 13:01:06 -0500 Subject: [PATCH 18/79] Simplify results from _url_keys Also, name the tuple values when using the results. --- tubesync/sync/utils.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/tubesync/sync/utils.py b/tubesync/sync/utils.py index f73e243b..170b2a51 100644 --- a/tubesync/sync/utils.py +++ b/tubesync/sync/utils.py @@ -176,7 +176,7 @@ def _url_keys(arg_dict, filter_func): for key in arg_dict.keys(): if 'url' in key: result.update( - {key: (key, filter_func(key=key, url=arg_dict[key]),)} + {key: (filter_func(key=key, url=arg_dict[key]),)} ) return result @@ -184,9 +184,9 @@ def _url_keys(arg_dict, filter_func): def _drop_url_keys(arg_dict, key, filter_func): if key in arg_dict.keys(): for val_dict in arg_dict[key]: - for url_key in _url_keys(val_dict, filter_func): - if url_key[1] is True: - del val_dict[url_key[0]] + for url_key, remove in _url_keys(val_dict, filter_func).items(): + if remove is True: + del val_dict[url_key] def filter_response(response_dict): From 1c432ccce127439bc722e4d0727d545794d51e4e Mon Sep 17 00:00:00 2001 From: tcely Date: Tue, 7 Jan 2025 13:49:58 -0500 Subject: [PATCH 19/79] Some formats are using a different URL --- tubesync/sync/utils.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/tubesync/sync/utils.py b/tubesync/sync/utils.py index 170b2a51..14e7505f 100644 --- a/tubesync/sync/utils.py +++ b/tubesync/sync/utils.py @@ -176,7 +176,7 @@ def _url_keys(arg_dict, filter_func): for key in arg_dict.keys(): if 'url' in key: result.update( - {key: (filter_func(key=key, url=arg_dict[key]),)} + {key: filter_func(key=key, url=arg_dict[key])} ) return result @@ -209,7 +209,9 @@ def filter_response(response_dict): and '://' in url and ( '/ip/' in url + or 'ip=' in url or '/expire/' in url + or 'expire=' in url ) ) From 6e116899a72dfe9cf8f9b94442274b276a113715 Mon Sep 17 00:00:00 2001 From: tcely Date: Tue, 7 Jan 2025 23:45:25 -0500 Subject: [PATCH 20/79] We don't need to keep bash running --- config/root/etc/s6-overlay/s6-rc.d/nginx/run | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/config/root/etc/s6-overlay/s6-rc.d/nginx/run b/config/root/etc/s6-overlay/s6-rc.d/nginx/run index 6981f2e9..87769e62 100755 --- a/config/root/etc/s6-overlay/s6-rc.d/nginx/run +++ b/config/root/etc/s6-overlay/s6-rc.d/nginx/run @@ -2,4 +2,4 @@ cd / -/usr/sbin/nginx +exec /usr/sbin/nginx From ab5e63f6433898d6545b42cb7f80af92734e3e07 Mon Sep 17 00:00:00 2001 From: tcely Date: Wed, 8 Jan 2025 00:12:21 -0500 Subject: [PATCH 21/79] Make better use of CPU cache with nginx --- config/root/etc/nginx/nginx.conf | 1 + 1 file changed, 1 insertion(+) diff --git a/config/root/etc/nginx/nginx.conf b/config/root/etc/nginx/nginx.conf index 14c5aea9..f09c02e1 100644 --- a/config/root/etc/nginx/nginx.conf +++ b/config/root/etc/nginx/nginx.conf @@ -2,6 +2,7 @@ daemon off; user app; worker_processes auto; +worker_cpu_affinity auto; pid /run/nginx.pid; events { From f0b7d31949dbdf0203efb716d0b6a72999582c7b Mon Sep 17 00:00:00 2001 From: tcely Date: Wed, 8 Jan 2025 00:31:30 -0500 Subject: [PATCH 22/79] Test and log ffmpeg version output earlier Running the ffmpeg in an earlier (hopefully cached) layer should clean up the logs a bit. On a related note, shadowing the environment variable was causing some confusing log output, so stop doing that as well. --- Dockerfile | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/Dockerfile b/Dockerfile index 76bb21b2..c552f9d5 100644 --- a/Dockerfile +++ b/Dockerfile @@ -109,6 +109,7 @@ RUN decide_arch() { \ _file="/tmp/ffmpeg-${ARCH}.tar.xz" && \ download_expected_file ffmpeg "${TARGETARCH}" "${_file}" && \ tar -xvvpf "${_file}" --strip-components=2 --no-anchored -C /usr/local/bin/ "ffmpeg" "ffprobe" && rm -f "${_file}" && \ + /usr/local/bin/ffmpeg -version && \ file /usr/local/bin/ff* && \ # Clean up apt-get -y autoremove --purge curl file binutils xz-utils && \ @@ -217,10 +218,9 @@ RUN set -x && \ # Append software versions RUN set -x && \ - /usr/local/bin/ffmpeg -version && \ - FFMPEG_VERSION=$(/usr/local/bin/ffmpeg -version | awk -v 'ev=31' '1 == NR && "ffmpeg" == $1 { print $3; ev=0; } END { exit ev; }') && \ - test -n "${FFMPEG_VERSION}" && \ - printf -- "ffmpeg_version = '%s'\n" "${FFMPEG_VERSION}" >> /app/common/third_party_versions.py + ffmpeg_version=$(/usr/local/bin/ffmpeg -version | awk -v 'ev=31' '1 == NR && "ffmpeg" == $1 { print $3; ev=0; } END { exit ev; }') && \ + test -n "${ffmpeg_version}" && \ + printf -- "ffmpeg_version = '%s'\n" "${ffmpeg_version}" >> /app/common/third_party_versions.py # Copy root COPY config/root / From 7b42213bbb50c172ffe23dfbb6728913c3b1ebf4 Mon Sep 17 00:00:00 2001 From: tcely Date: Wed, 8 Jan 2025 00:36:37 -0500 Subject: [PATCH 23/79] Keep curl and add less These are very useful when using the shell inside the container and don't use much space. --- Dockerfile | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/Dockerfile b/Dockerfile index c552f9d5..880dd677 100644 --- a/Dockerfile +++ b/Dockerfile @@ -112,7 +112,7 @@ RUN decide_arch() { \ /usr/local/bin/ffmpeg -version && \ file /usr/local/bin/ff* && \ # Clean up - apt-get -y autoremove --purge curl file binutils xz-utils && \ + apt-get -y autoremove --purge file binutils xz-utils && \ rm -rf /var/lib/apt/lists/* && \ rm -rf /var/cache/apt/* && \ rm -rf /tmp/* @@ -132,6 +132,8 @@ RUN set -x && \ python3 \ python3-wheel \ redis-server \ + curl \ + less \ && apt-get -y autoclean && \ rm -rf /var/lib/apt/lists/* && \ rm -rf /var/cache/apt/* && \ From d35f52f8acb07c30f81c855a855b63d284dbaedf Mon Sep 17 00:00:00 2001 From: tcely Date: Wed, 8 Jan 2025 11:31:23 -0500 Subject: [PATCH 24/79] Drop /expire/ URLs from automatic_captions too --- tubesync/sync/utils.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/tubesync/sync/utils.py b/tubesync/sync/utils.py index 14e7505f..b424528b 100644 --- a/tubesync/sync/utils.py +++ b/tubesync/sync/utils.py @@ -239,7 +239,10 @@ def filter_response(response_dict): return ( url and '://' in url - and '&expire=' in url + and ( + '/expire/' in url + or '&expire=' in url + ) ) for key in frozenset(('subtitles', 'automatic_captions',)): From ad10bcfa61af480fd9be9b3f7a97baeba18e033d Mon Sep 17 00:00:00 2001 From: tcely Date: Wed, 8 Jan 2025 22:48:23 -0500 Subject: [PATCH 25/79] Log both compacted and reduced sizes --- tubesync/sync/models.py | 43 ++++++++++++++++++++++++----------------- 1 file changed, 25 insertions(+), 18 deletions(-) diff --git a/tubesync/sync/models.py b/tubesync/sync/models.py index 54fcdaa6..76dea0b1 100644 --- a/tubesync/sync/models.py +++ b/tubesync/sync/models.py @@ -1144,28 +1144,35 @@ class Media(models.Model): return self.metadata is not None - def reduce_data(self, data): - from common.logger import log - from common.utils import json_serial - # log the results of filtering / compacting on metadata size - filtered_data = filter_response(data) - compact_metadata = json.dumps(filtered_data, separators=(',', ':'), default=json_serial) - old_mdl = len(self.metadata) - new_mdl = len(compact_metadata) - if old_mdl > new_mdl: - delta = old_mdl - new_mdl - log.info(f'{self.key}: metadata reduced by {delta:,} characters ({old_mdl:,} -> {new_mdl:,})') + @property + def reduce_data(self): + try: + from common.logger import log + from common.utils import json_serial + + old_mdl = len(self.metadata or "") + data = json.loads(self.metadata or "") + compact_data = json.dumps(data, separators=(',', ':'), default=json_serial) + + filtered_data = filter_response(data) + filtered_json = json.dumps(filtered_data, separators=(',', ':'), default=json_serial) + except Exception as e: + log.exception('reduce_data: %s', e) + else: + # log the results of filtering / compacting on metadata size + new_mdl = len(compact_data) + if old_mdl > new_mdl: + delta = old_mdl - new_mdl + log.info(f'{self.key}: metadata compacted by {delta:,} characters ({old_mdl:,} -> {new_mdl:,})') + new_mdl = len(filtered_json) + if old_mdl > new_mdl: + delta = old_mdl - new_mdl + log.info(f'{self.key}: metadata reduced by {delta:,} characters ({old_mdl:,} -> {new_mdl:,})') @property def loaded_metadata(self): - from common.logger import log - try: - self.reduce_data(json.loads(self.metadata)) - except Exception as e: - log.exception('reduce_data: %s', e) - pass - + self.reduce_data try: data = json.loads(self.metadata) if not isinstance(data, dict): From 100382f66fea8b8dd27532932f23f4160d354401 Mon Sep 17 00:00:00 2001 From: tcely Date: Thu, 9 Jan 2025 09:28:58 -0500 Subject: [PATCH 26/79] Rename compact_data to compact_json This was misleading because the data dict becomes a JSON string. --- tubesync/sync/models.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tubesync/sync/models.py b/tubesync/sync/models.py index 76dea0b1..67453f03 100644 --- a/tubesync/sync/models.py +++ b/tubesync/sync/models.py @@ -1152,7 +1152,7 @@ class Media(models.Model): old_mdl = len(self.metadata or "") data = json.loads(self.metadata or "") - compact_data = json.dumps(data, separators=(',', ':'), default=json_serial) + compact_json = json.dumps(data, separators=(',', ':'), default=json_serial) filtered_data = filter_response(data) filtered_json = json.dumps(filtered_data, separators=(',', ':'), default=json_serial) @@ -1160,7 +1160,7 @@ class Media(models.Model): log.exception('reduce_data: %s', e) else: # log the results of filtering / compacting on metadata size - new_mdl = len(compact_data) + new_mdl = len(compact_json) if old_mdl > new_mdl: delta = old_mdl - new_mdl log.info(f'{self.key}: metadata compacted by {delta:,} characters ({old_mdl:,} -> {new_mdl:,})') From 682a53da34d18d777e58e6080df4390f44519686 Mon Sep 17 00:00:00 2001 From: tcely Date: Thu, 9 Jan 2025 10:17:37 -0500 Subject: [PATCH 27/79] Add a filter_response test First, only check that changes did happen. --- tubesync/sync/tests.py | 38 ++++++++++++++++++++++++++++++++++++++ 1 file changed, 38 insertions(+) diff --git a/tubesync/sync/tests.py b/tubesync/sync/tests.py index 8f0de6ef..935ad569 100644 --- a/tubesync/sync/tests.py +++ b/tubesync/sync/tests.py @@ -18,6 +18,7 @@ from background_task.models import Task from .models import Source, Media from .tasks import cleanup_old_media from .filtering import filter_media +from .utils import filter_response class FrontEndTestCase(TestCase): @@ -1709,6 +1710,43 @@ class FormatMatchingTestCase(TestCase): f'expected {expected_match_result}') +class ResponseFilteringTestCase(TestCase): + + def setUp(self): + # Disable general logging for test case + logging.disable(logging.CRITICAL) + # Add a test source + self.source = Source.objects.create( + source_type=Source.SOURCE_TYPE_YOUTUBE_CHANNEL, + key='testkey', + name='testname', + directory='testdirectory', + index_schedule=3600, + delete_old_media=False, + days_to_keep=14, + source_resolution=Source.SOURCE_RESOLUTION_1080P, + source_vcodec=Source.SOURCE_VCODEC_VP9, + source_acodec=Source.SOURCE_ACODEC_OPUS, + prefer_60fps=False, + prefer_hdr=False, + fallback=Source.FALLBACK_FAIL + ) + # Add some media + self.media = Media.objects.create( + key='mediakey', + source=self.source, + metadata='{}' + ) + + def test_metadata_20230629(self): + self.media.metadata = all_test_metadata['20230629'] + self.media.save() + + unfiltered = self.media.loaded_metadata + filtered = filter_response(self.media.loaded_metadata) + self.assertNotEqual(len(str(unfiltered)), len(str(filtered))) + + class TasksTestCase(TestCase): def setUp(self): From 4c9fa40bb0e47871caffaf9a3212932727ffc1cb Mon Sep 17 00:00:00 2001 From: tcely Date: Thu, 9 Jan 2025 11:47:10 -0500 Subject: [PATCH 28/79] More filter_response asserts --- tubesync/sync/tests.py | 36 +++++++++++++++++++++++++++++++++++- 1 file changed, 35 insertions(+), 1 deletion(-) diff --git a/tubesync/sync/tests.py b/tubesync/sync/tests.py index 935ad569..bc199282 100644 --- a/tubesync/sync/tests.py +++ b/tubesync/sync/tests.py @@ -1744,7 +1744,41 @@ class ResponseFilteringTestCase(TestCase): unfiltered = self.media.loaded_metadata filtered = filter_response(self.media.loaded_metadata) - self.assertNotEqual(len(str(unfiltered)), len(str(filtered))) + self.assertIn('formats', unfiltered.keys()) + self.assertIn('formats', filtered.keys()) + # filtered 'http_headers' + self.assertIn('http_headers', unfiltered['formats'][0].keys()) + self.assertNotIn('http_headers', filtered['formats'][0].keys()) + # did not lose any formats + self.assertEqual(48, len(unfiltered['formats'])) + self.assertEqual(48, len(filtered['formats'])) + self.assertEqual(len(unfiltered['formats']), len(filtered['formats'])) + # did reduce the size of the metadata + self.assertTrue(len(str(filtered)) < len(str(unfiltered))) + + url_keys = [] + for format in unfiltered['formats']: + for key in format.keys(): + if 'url' in key: + url_keys.append((format['format_id'], key, format[key],)) + unfiltered_url_keys = url_keys + self.assertEqual(63, len(unfiltered_url_keys), msg=str(unfiltered_url_keys)) + + url_keys = [] + for format in filtered['formats']: + for key in format.keys(): + if 'url' in key: + url_keys.append((format['format_id'], key, format[key],)) + filtered_url_keys = url_keys + self.assertEqual(3, len(filtered_url_keys), msg=str(filtered_url_keys)) + + url_keys = [] + for lang_code, captions in filtered['automatic_captions'].items(): + for caption in captions: + for key in caption.keys(): + if 'url' in key: + url_keys.append((lang_code, caption['ext'], caption[key],)) + self.assertEqual(0, len(url_keys), msg=str(url_keys)) class TasksTestCase(TestCase): From 3e3f80d287c637c34f5c5094aa313531dfbe7b77 Mon Sep 17 00:00:00 2001 From: tcely Date: Thu, 9 Jan 2025 12:04:01 -0500 Subject: [PATCH 29/79] More filter_response asserts --- tubesync/sync/tests.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/tubesync/sync/tests.py b/tubesync/sync/tests.py index bc199282..2704058f 100644 --- a/tubesync/sync/tests.py +++ b/tubesync/sync/tests.py @@ -1746,6 +1746,9 @@ class ResponseFilteringTestCase(TestCase): filtered = filter_response(self.media.loaded_metadata) self.assertIn('formats', unfiltered.keys()) self.assertIn('formats', filtered.keys()) + # filtered 'downloader_options' + self.assertIn('downloader_options', unfiltered['formats'][10].keys()) + self.assertNotIn('downloader_options', filtered['formats'][10].keys()) # filtered 'http_headers' self.assertIn('http_headers', unfiltered['formats'][0].keys()) self.assertNotIn('http_headers', filtered['formats'][0].keys()) @@ -1753,6 +1756,10 @@ class ResponseFilteringTestCase(TestCase): self.assertEqual(48, len(unfiltered['formats'])) self.assertEqual(48, len(filtered['formats'])) self.assertEqual(len(unfiltered['formats']), len(filtered['formats'])) + # did not remove everything with url + self.assertIn('original_url', unfiltered.keys()) + self.assertIn('original_url', filtered.keys()) + self.assertEqual(unfiltered['original_url'], filtered['original_url']) # did reduce the size of the metadata self.assertTrue(len(str(filtered)) < len(str(unfiltered))) From 29c39aab1f7096a7267c351cc3ebf0d786c98723 Mon Sep 17 00:00:00 2001 From: tcely Date: Thu, 9 Jan 2025 13:20:22 -0500 Subject: [PATCH 30/79] Add SHRINK_NEW_MEDIA_METADATA setting --- tubesync/sync/tasks.py | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/tubesync/sync/tasks.py b/tubesync/sync/tasks.py index 30f8c827..644918b7 100644 --- a/tubesync/sync/tasks.py +++ b/tubesync/sync/tasks.py @@ -8,6 +8,7 @@ import os import json import math import uuid +from copy import deepcopy from io import BytesIO from hashlib import sha1 from datetime import timedelta, datetime @@ -26,7 +27,7 @@ from common.errors import NoMediaException, DownloadFailedException from common.utils import json_serial from .models import Source, Media, MediaServer from .utils import (get_remote_image, resize_image_to_height, delete_file, - write_text_file) + write_text_file, filter_response) from .filtering import filter_media @@ -304,7 +305,11 @@ def download_media_metadata(media_id): return source = media.source metadata = media.index_metadata() - media.metadata = json.dumps(metadata, separators=(',', ':'), default=json_serial) + if getattr(settings, 'SHRINK_NEW_MEDIA_METADATA', False): + response = filter_response(deepcopy(metadata)) + else: + response = metadata + media.metadata = json.dumps(response, separators=(',', ':'), default=json_serial) upload_date = media.upload_date # Media must have a valid upload date if upload_date: From 0f986949e5ad18195de2265eae83f5360f6c5277 Mon Sep 17 00:00:00 2001 From: tcely Date: Thu, 9 Jan 2025 13:36:43 -0500 Subject: [PATCH 31/79] Have filter_response return a copy, if requested --- tubesync/sync/utils.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/tubesync/sync/utils.py b/tubesync/sync/utils.py index b424528b..1d67af38 100644 --- a/tubesync/sync/utils.py +++ b/tubesync/sync/utils.py @@ -1,6 +1,7 @@ import os import re import math +from copy import deepcopy from operator import itemgetter from pathlib import Path from tempfile import NamedTemporaryFile @@ -189,13 +190,18 @@ def _drop_url_keys(arg_dict, key, filter_func): del val_dict[url_key] -def filter_response(response_dict): +def filter_response(arg_dict, copy_arg=False): ''' Clean up the response so as to not store useless metadata in the database. ''' + response_dict = arg_dict # raise an exception for an unexpected argument type if not isinstance(response_dict, dict): raise TypeError(f'response_dict must be a dict, got "{type(response_dict)}"') + + if copy_arg: + response_dict = deepcopy(arg_dict) + # optimize the empty case if not response_dict: return response_dict From 274f19fa15547c1a9d76c967e4134ffafa822aa1 Mon Sep 17 00:00:00 2001 From: tcely Date: Thu, 9 Jan 2025 13:41:23 -0500 Subject: [PATCH 32/79] Use the new copy argument to filter_response --- tubesync/sync/tasks.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/tubesync/sync/tasks.py b/tubesync/sync/tasks.py index 644918b7..ab92e2c8 100644 --- a/tubesync/sync/tasks.py +++ b/tubesync/sync/tasks.py @@ -8,7 +8,6 @@ import os import json import math import uuid -from copy import deepcopy from io import BytesIO from hashlib import sha1 from datetime import timedelta, datetime @@ -305,10 +304,9 @@ def download_media_metadata(media_id): return source = media.source metadata = media.index_metadata() + response = metadata if getattr(settings, 'SHRINK_NEW_MEDIA_METADATA', False): - response = filter_response(deepcopy(metadata)) - else: - response = metadata + response = filter_response(metadata, True) media.metadata = json.dumps(response, separators=(',', ':'), default=json_serial) upload_date = media.upload_date # Media must have a valid upload date From 1ff8dfda9897dd8c409feba2649b5ce15f5f7e32 Mon Sep 17 00:00:00 2001 From: tcely Date: Thu, 9 Jan 2025 13:53:12 -0500 Subject: [PATCH 33/79] Use the new copy argument to filter_response --- tubesync/sync/models.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tubesync/sync/models.py b/tubesync/sync/models.py index 67453f03..10fbbdbd 100644 --- a/tubesync/sync/models.py +++ b/tubesync/sync/models.py @@ -1154,7 +1154,7 @@ class Media(models.Model): data = json.loads(self.metadata or "") compact_json = json.dumps(data, separators=(',', ':'), default=json_serial) - filtered_data = filter_response(data) + filtered_data = filter_response(data, True) filtered_json = json.dumps(filtered_data, separators=(',', ':'), default=json_serial) except Exception as e: log.exception('reduce_data: %s', e) From 6292a9a59dc5d05db79241b9bd2d58f51be3cc6a Mon Sep 17 00:00:00 2001 From: tcely Date: Thu, 9 Jan 2025 14:22:37 -0500 Subject: [PATCH 34/79] Add SHRINK_OLD_MEDIA_METADATA setting --- tubesync/sync/models.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/tubesync/sync/models.py b/tubesync/sync/models.py index 10fbbdbd..bb850af3 100644 --- a/tubesync/sync/models.py +++ b/tubesync/sync/models.py @@ -1168,6 +1168,8 @@ class Media(models.Model): if old_mdl > new_mdl: delta = old_mdl - new_mdl log.info(f'{self.key}: metadata reduced by {delta:,} characters ({old_mdl:,} -> {new_mdl:,})') + if getattr(settings, 'SHRINK_OLD_MEDIA_METADATA', False): + self.metadata = filtered_json @property From 81edd08c7d8ce8d0844b82751b730d2dc91ff4ac Mon Sep 17 00:00:00 2001 From: Makhuta Date: Sat, 11 Jan 2025 14:38:31 +0100 Subject: [PATCH 35/79] Update - added video order to Media Format --- tubesync/sync/models.py | 9 +++++++-- tubesync/sync/templates/sync/_mediaformatvars.html | 5 +++++ 2 files changed, 12 insertions(+), 2 deletions(-) diff --git a/tubesync/sync/models.py b/tubesync/sync/models.py index 2037492d..8e37bdbe 100644 --- a/tubesync/sync/models.py +++ b/tubesync/sync/models.py @@ -589,6 +589,7 @@ class Source(models.Model): 'key': 'SoMeUnIqUiD', 'format': '-'.join(fmt), 'playlist_title': 'Some Playlist Title', + 'video_order': '1', 'ext': self.extension, 'resolution': self.source_resolution if self.source_resolution else '', 'height': '720' if self.source_resolution else '', @@ -1128,6 +1129,7 @@ class Media(models.Model): 'key': self.key, 'format': '-'.join(display_format['format']), 'playlist_title': self.playlist_title, + 'video_order': self.get_episode_str(), 'ext': self.source.extension, 'resolution': display_format['resolution'], 'height': display_format['height'], @@ -1373,8 +1375,7 @@ class Media(models.Model): nfo.append(season) # episode = number of video in the year episode = nfo.makeelement('episode', {}) - episode_number = self.calculate_episode_number() - episode.text = str(episode_number) if episode_number else '' + episode.text = self.get_episode_str() episode.tail = '\n ' nfo.append(episode) # ratings = media metadata youtube rating @@ -1524,6 +1525,10 @@ class Media(models.Model): return position_counter position_counter += 1 + def get_episode_str(self): + episode_number = self.calculate_episode_number() + return f'{episode_number:02}' if episode_number else '' + class MediaServer(models.Model): ''' diff --git a/tubesync/sync/templates/sync/_mediaformatvars.html b/tubesync/sync/templates/sync/_mediaformatvars.html index 438b200a..06068f90 100644 --- a/tubesync/sync/templates/sync/_mediaformatvars.html +++ b/tubesync/sync/templates/sync/_mediaformatvars.html @@ -73,6 +73,11 @@ Playlist title of media, if it's in a playlist Some Playlist + + {video_order} + Episode order in playlist, if in playlist (can cause issues if playlist is changed after adding) + 01 + {ext} File extension From 8dda325dbd841535708ca8f8d58602d26080b019 Mon Sep 17 00:00:00 2001 From: Makhuta Date: Sat, 11 Jan 2025 15:53:36 +0100 Subject: [PATCH 36/79] Update models.py --- tubesync/sync/models.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tubesync/sync/models.py b/tubesync/sync/models.py index 8e37bdbe..a5b7adbd 100644 --- a/tubesync/sync/models.py +++ b/tubesync/sync/models.py @@ -589,7 +589,7 @@ class Source(models.Model): 'key': 'SoMeUnIqUiD', 'format': '-'.join(fmt), 'playlist_title': 'Some Playlist Title', - 'video_order': '1', + 'video_order': '01', 'ext': self.extension, 'resolution': self.source_resolution if self.source_resolution else '', 'height': '720' if self.source_resolution else '', From 4364ebbff3cd2f8147206ce05c63745cda88406c Mon Sep 17 00:00:00 2001 From: tcely Date: Sat, 11 Jan 2025 11:53:10 -0500 Subject: [PATCH 37/79] Multi-stage docker build for ffmpeg & s6-overlay * Create a s6-overlay-extracted stage to copy from This was largely inspired by: @socheatsok78 Our downloaded files are checked where that version doesn't do any verification of the downloads. * Update ffmpeg to the first build with checksums.sha256 * Create a ffmpeg-extracted stage to copy from * Don't preserve ownership from the builder I was sick of the extra work with ffmpeg builds. So, I managed to get sums generated for those builds and now we don't need to manually fill out SHA256 hashes anymore. Now to bump ffmpeg, we can just change the date. --- Dockerfile | 286 +++++++++++++++++++++++++++++++++++++---------------- 1 file changed, 202 insertions(+), 84 deletions(-) diff --git a/Dockerfile b/Dockerfile index 880dd677..a69609c5 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,17 +1,202 @@ -FROM debian:bookworm-slim - -ARG TARGETARCH -ARG TARGETPLATFORM +ARG FFMPEG_DATE="2025-01-10-19-43" +ARG FFMPEG_VERSION="N-118280-g5cd49e1bfd" ARG S6_VERSION="3.2.0.2" + ARG SHA256_S6_AMD64="59289456ab1761e277bd456a95e737c06b03ede99158beb24f12b165a904f478" ARG SHA256_S6_ARM64="8b22a2eaca4bf0b27a43d36e65c89d2701738f628d1abd0cea5569619f66f785" ARG SHA256_S6_NOARCH="6dbcde158a3e78b9bb141d7bcb5ccb421e563523babbe2c64470e76f4fd02dae" -ARG FFMPEG_DATE="autobuild-2024-12-24-14-15" -ARG FFMPEG_VERSION="N-118163-g954d55c2a4" -ARG SHA256_FFMPEG_AMD64="798a7e5a0724139e6bb70df8921522b23be27028f9f551dfa83c305ec4ffaf3a" -ARG SHA256_FFMPEG_ARM64="c3e6cc0fec42cc7e3804014fbb02c1384a1a31ef13f6f9a36121f2e1216240c0" +ARG ALPINE_VERSION="latest" +ARG FFMPEG_PREFIX_FILE="ffmpeg-${FFMPEG_VERSION%%-*}" +ARG FFMPEG_SUFFIX_FILE=".tar.xz" + +FROM alpine:${ALPINE_VERSION} AS ffmpeg-download +ARG FFMPEG_DATE +ARG FFMPEG_VERSION +ARG FFMPEG_PREFIX_FILE +ARG FFMPEG_SUFFIX_FILE +ARG SHA256_FFMPEG_AMD64 +ARG SHA256_FFMPEG_ARM64 +ARG CHECKSUM_ALGORITHM="sha256" +ARG FFMPEG_CHECKSUM_AMD64="${SHA256_FFMPEG_AMD64}" +ARG FFMPEG_CHECKSUM_ARM64="${SHA256_FFMPEG_ARM64}" + +ARG FFMPEG_FILE_SUMS="checksums.${CHECKSUM_ALGORITHM}" +ARG FFMPEG_URL="https://github.com/yt-dlp/FFmpeg-Builds/releases/download/autobuild-${FFMPEG_DATE}" + +ARG DESTDIR="/downloaded" +ARG TARGETARCH +ADD "${FFMPEG_URL}/${FFMPEG_FILE_SUMS}" "${DESTDIR}/" +RUN set -eu ; \ + apk --no-cache --no-progress add cmd:aria2c cmd:awk ; \ +\ + aria2c_options() { \ + algorithm="${CHECKSUM_ALGORITHM%[0-9]??}" ; \ + bytes="${CHECKSUM_ALGORITHM#${algorithm}}" ; \ + hash="$( awk -v fn="${1##*/}" '$0 ~ fn"$" { print $1; exit; }' "${DESTDIR}/${FFMPEG_FILE_SUMS}" )" ; \ +\ + printf -- '\t%s\n' \ + 'allow-overwrite=true' \ + 'always-resume=false' \ + 'check-integrity=true' \ + "checksum=${algorithm}-${bytes}=${hash}" \ + 'max-connection-per-server=2' \ +; \ + printf -- '\n' ; \ + } ; \ +\ + decide_arch() { \ + case "${TARGETARCH}" in \ + (amd64) printf -- 'linux64' ;; \ + (arm64) printf -- 'linuxarm64' ;; \ + esac ; \ + } ; \ +\ + FFMPEG_ARCH="$(decide_arch)" ; \ + for url in $(awk ' \ + $2 ~ /^[*]?'"${FFMPEG_PREFIX_FILE}"'/ && /-'"${FFMPEG_ARCH}"'-/ { $1=""; print; } \ + ' "${DESTDIR}/${FFMPEG_FILE_SUMS}") ; \ + do \ + url="${FFMPEG_URL}/${url# }" ; \ + printf -- '%s\n' "${url}" ; \ + aria2c_options "${url}" ; \ + printf -- '\n' ; \ + done > /tmp/downloads ; \ + unset -v url ; \ +\ + aria2c --no-conf=true \ + --dir /downloaded \ + --lowest-speed-limit='16K' \ + --show-console-readout=false \ + --summary-interval=0 \ + --input-file /tmp/downloads ; \ +\ + apk --no-cache --no-progress add cmd:awk "cmd:${CHECKSUM_ALGORITHM}sum" ; \ +\ + decide_expected() { \ + case "${TARGETARCH}" in \ + (amd64) printf -- '%s' "${FFMPEG_CHECKSUM_AMD64}" ;; \ + (arm64) printf -- '%s' "${FFMPEG_CHECKSUM_ARM64}" ;; \ + esac ; \ + } ; \ +\ + FFMPEG_HASH="$(decide_expected)" ; \ +\ + cd "${DESTDIR}" ; \ + if [ -n "${FFMPEG_HASH}" ] ; \ + then \ + printf -- '%s *%s\n' "${FFMPEG_HASH}" "${FFMPEG_PREFIX_FILE}"*-"${FFMPEG_ARCH}"-*"${FFMPEG_SUFFIX_FILE}" >> /tmp/SUMS ; \ + "${CHECKSUM_ALGORITHM}sum" --check --strict /tmp/SUMS || exit ; \ + fi ; \ + "${CHECKSUM_ALGORITHM}sum" --check --strict --ignore-missing "${DESTDIR}/${FFMPEG_FILE_SUMS}" ; \ +\ + mkdir -v -p "/verified/${TARGETARCH}" ; \ + ln -v "${FFMPEG_PREFIX_FILE}"*-"${FFMPEG_ARCH}"-*"${FFMPEG_SUFFIX_FILE}" "/verified/${TARGETARCH}/" ; \ + rm -rf "${DESTDIR}" ; + +FROM alpine:${ALPINE_VERSION} AS ffmpeg-extracted +COPY --link --from=ffmpeg-download /verified /verified + +ARG FFMPEG_PREFIX_FILE +ARG FFMPEG_SUFFIX_FILE +ARG TARGETARCH +RUN set -eu ; \ + apk --no-cache --no-progress add cmd:tar cmd:xz ; \ +\ + mkdir -v /extracted ; \ + cd /extracted ; \ + set -x ; \ + tar -xp \ + --strip-components=2 \ + --no-anchored \ + --no-same-owner \ + -f "/verified/${TARGETARCH}"/"${FFMPEG_PREFIX_FILE}"*"${FFMPEG_SUFFIX_FILE}" \ + 'ffmpeg' 'ffprobe' ; \ +\ + ls -AlR /extracted ; + +FROM scratch AS s6-overlay-download +ARG S6_VERSION +ARG SHA256_S6_AMD64 +ARG SHA256_S6_ARM64 +ARG SHA256_S6_NOARCH + +ARG DESTDIR="/downloaded" +ARG CHECKSUM_ALGORITHM="sha256" + +ARG S6_CHECKSUM_AMD64="${CHECKSUM_ALGORITHM}:${SHA256_S6_AMD64}" +ARG S6_CHECKSUM_ARM64="${CHECKSUM_ALGORITHM}:${SHA256_S6_ARM64}" +ARG S6_CHECKSUM_NOARCH="${CHECKSUM_ALGORITHM}:${SHA256_S6_NOARCH}" + +ARG S6_OVERLAY_URL="https://github.com/just-containers/s6-overlay/releases/download/v${S6_VERSION}" +ARG S6_PREFIX_FILE="s6-overlay-" +ARG S6_SUFFIX_FILE=".tar.xz" + +ARG S6_FILE_AMD64="${S6_PREFIX_FILE}x86_64${S6_SUFFIX_FILE}" +ARG S6_FILE_ARM64="${S6_PREFIX_FILE}aarch64${S6_SUFFIX_FILE}" +ARG S6_FILE_NOARCH="${S6_PREFIX_FILE}noarch${S6_SUFFIX_FILE}" + +ADD "${S6_OVERLAY_URL}/${S6_FILE_AMD64}.${CHECKSUM_ALGORITHM}" "${DESTDIR}/" +ADD "${S6_OVERLAY_URL}/${S6_FILE_ARM64}.${CHECKSUM_ALGORITHM}" "${DESTDIR}/" +ADD "${S6_OVERLAY_URL}/${S6_FILE_NOARCH}.${CHECKSUM_ALGORITHM}" "${DESTDIR}/" + +ADD --checksum="${S6_CHECKSUM_AMD64}" "${S6_OVERLAY_URL}/${S6_FILE_AMD64}" "${DESTDIR}/" +ADD --checksum="${S6_CHECKSUM_ARM64}" "${S6_OVERLAY_URL}/${S6_FILE_ARM64}" "${DESTDIR}/" +ADD --checksum="${S6_CHECKSUM_NOARCH}" "${S6_OVERLAY_URL}/${S6_FILE_NOARCH}" "${DESTDIR}/" + +FROM alpine:${ALPINE_VERSION} AS s6-overlay-extracted +COPY --link --from=s6-overlay-download /downloaded /downloaded + +ARG TARGETARCH + +RUN set -eu ; \ +\ + decide_arch() { \ + local arg1 ; \ + arg1="${1:-$(uname -m)}" ; \ +\ + case "${arg1}" in \ + (amd64) printf -- 'x86_64' ;; \ + (arm64) printf -- 'aarch64' ;; \ + (armv7l) printf -- 'arm' ;; \ + (*) printf -- '%s' "${arg1}" ;; \ + esac ; \ + unset -v arg1 ; \ + } ; \ +\ + mkdir -v /verified ; \ + cd /downloaded ; \ + for f in *.sha256 ; \ + do \ + sha256sum -c < "${f}" || exit ; \ + ln -v "${f%.sha256}" /verified/ || exit ; \ + done ; \ + unset -v f ; \ +\ + S6_ARCH="$(decide_arch "${TARGETARCH}")" ; \ + set -x ; \ + mkdir -v /s6-overlay-rootfs ; \ + cd /s6-overlay-rootfs ; \ + for f in /verified/*.tar* ; \ + do \ + case "${f}" in \ + (*-noarch.tar*|*-"${S6_ARCH}".tar*) \ + tar -xpf "${f}" || exit ;; \ + esac ; \ + done ; \ + set +x ; \ + unset -v f ; + +FROM debian:bookworm-slim AS tubesync + +ARG TARGETARCH +ARG TARGETPLATFORM + +ARG S6_VERSION + +ARG FFMPEG_DATE +ARG FFMPEG_VERSION ENV S6_VERSION="${S6_VERSION}" \ FFMPEG_DATE="${FFMPEG_DATE}" \ @@ -26,89 +211,20 @@ ENV DEBIAN_FRONTEND="noninteractive" \ S6_CMD_WAIT_FOR_SERVICES_MAXTIME="0" # Install third party software +COPY --link --from=s6-overlay-extracted /s6-overlay-rootfs / +COPY --link --from=ffmpeg-extracted /extracted /usr/local/bin/ + # Reminder: the SHELL handles all variables -RUN decide_arch() { \ - case "${TARGETARCH:=amd64}" in \ - (arm64) printf -- 'aarch64' ;; \ - (*) printf -- '%s' "${TARGETARCH}" ;; \ - esac ; \ - } && \ - decide_expected() { \ - case "${1}" in \ - (ffmpeg) case "${2}" in \ - (amd64) printf -- '%s' "${SHA256_FFMPEG_AMD64}" ;; \ - (arm64) printf -- '%s' "${SHA256_FFMPEG_ARM64}" ;; \ - esac ;; \ - (s6) case "${2}" in \ - (amd64) printf -- '%s' "${SHA256_S6_AMD64}" ;; \ - (arm64) printf -- '%s' "${SHA256_S6_ARM64}" ;; \ - (noarch) printf -- '%s' "${SHA256_S6_NOARCH}" ;; \ - esac ;; \ - esac ; \ - } && \ - decide_url() { \ - case "${1}" in \ - (ffmpeg) printf -- \ - 'https://github.com/yt-dlp/FFmpeg-Builds/releases/download/%s/ffmpeg-%s-linux%s-gpl%s.tar.xz' \ - "${FFMPEG_DATE}" \ - "${FFMPEG_VERSION}" \ - "$(case "${2}" in \ - (amd64) printf -- '64' ;; \ - (*) printf -- '%s' "${2}" ;; \ - esac)" \ - "$(case "${FFMPEG_VERSION%%-*}" in \ - (n*) printf -- '-%s\n' "${FFMPEG_VERSION#n}" | cut -d '-' -f 1,2 ;; \ - (*) printf -- '' ;; \ - esac)" ;; \ - (s6) printf -- \ - 'https://github.com/just-containers/s6-overlay/releases/download/v%s/s6-overlay-%s.tar.xz' \ - "${S6_VERSION}" \ - "$(case "${2}" in \ - (amd64) printf -- 'x86_64' ;; \ - (arm64) printf -- 'aarch64' ;; \ - (*) printf -- '%s' "${2}" ;; \ - esac)" ;; \ - esac ; \ - } && \ - verify_download() { \ - while [ $# -ge 2 ] ; do \ - sha256sum "${2}" ; \ - printf -- '%s %s\n' "${1}" "${2}" | sha256sum -c || return ; \ - shift ; shift ; \ - done ; \ - } && \ - download_expected_file() { \ - local arg1 expected file url ; \ - arg1="$(printf -- '%s\n' "${1}" | awk '{print toupper($0);}')" ; \ - expected="$(decide_expected "${1}" "${2}")" ; \ - file="${3}" ; \ - url="$(decide_url "${1}" "${2}")" ; \ - printf -- '%s\n' \ - "Building for arch: ${2}|${ARCH}, downloading ${arg1} from: ${url}, expecting ${arg1} SHA256: ${expected}" && \ - rm -rf "${file}" && \ - curl --disable --output "${file}" --clobber --location --no-progress-meter --url "${url}" && \ - verify_download "${expected}" "${file}" ; \ - } && \ - export ARCH="$(decide_arch)" && \ - set -x && \ +RUN set -x && \ apt-get update && \ apt-get -y --no-install-recommends install locales && \ printf -- "en_US.UTF-8 UTF-8\n" > /etc/locale.gen && \ locale-gen en_US.UTF-8 && \ # Install required distro packages apt-get -y --no-install-recommends install curl ca-certificates file binutils xz-utils && \ - # Install s6 - _file="/tmp/s6-overlay-noarch.tar.xz" && \ - download_expected_file s6 noarch "${_file}" && \ - tar -C / -xpf "${_file}" && rm -f "${_file}" && \ - _file="/tmp/s6-overlay-${ARCH}.tar.xz" && \ - download_expected_file s6 "${TARGETARCH}" "${_file}" && \ - tar -C / -xpf "${_file}" && rm -f "${_file}" && \ + # Installed s6 (using COPY earlier) file -L /command/s6-overlay-suexec && \ - # Install ffmpeg - _file="/tmp/ffmpeg-${ARCH}.tar.xz" && \ - download_expected_file ffmpeg "${TARGETARCH}" "${_file}" && \ - tar -xvvpf "${_file}" --strip-components=2 --no-anchored -C /usr/local/bin/ "ffmpeg" "ffprobe" && rm -f "${_file}" && \ + # Installed ffmpeg (using COPY earlier) /usr/local/bin/ffmpeg -version && \ file /usr/local/bin/ff* && \ # Clean up @@ -154,7 +270,9 @@ ENV PIP_NO_COMPILE=1 \ WORKDIR /app # Set up the app -RUN set -x && \ +#BuildKit#RUN --mount=type=bind,source=Pipfile,target=/app/Pipfile \ +RUN \ + set -x && \ apt-get update && \ # Install required build packages apt-get -y --no-install-recommends install \ From f464acaa6331913abd5bb341344568f6d9eb73fc Mon Sep 17 00:00:00 2001 From: tcely Date: Sat, 11 Jan 2025 15:38:45 -0500 Subject: [PATCH 38/79] Simplify directory_path for Media --- tubesync/sync/models.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/tubesync/sync/models.py b/tubesync/sync/models.py index 2037492d..ad17258c 100644 --- a/tubesync/sync/models.py +++ b/tubesync/sync/models.py @@ -1263,8 +1263,7 @@ class Media(models.Model): @property def directory_path(self): - dirname = self.source.directory_path / self.filename - return dirname.parent + return self.filepath.parent @property def filepath(self): From 3ea7e6c8ee0ab7631507938734c255ec9116c2bb Mon Sep 17 00:00:00 2001 From: Makhuta Date: Sat, 11 Jan 2025 22:07:36 +0100 Subject: [PATCH 39/79] Change - changed the episode_str to be togglable and use the old format by default --- tubesync/sync/models.py | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/tubesync/sync/models.py b/tubesync/sync/models.py index a5b7adbd..d22cdb57 100644 --- a/tubesync/sync/models.py +++ b/tubesync/sync/models.py @@ -1129,7 +1129,7 @@ class Media(models.Model): 'key': self.key, 'format': '-'.join(display_format['format']), 'playlist_title': self.playlist_title, - 'video_order': self.get_episode_str(), + 'video_order': self.get_episode_str(True), 'ext': self.source.extension, 'resolution': display_format['resolution'], 'height': display_format['height'], @@ -1525,9 +1525,12 @@ class Media(models.Model): return position_counter position_counter += 1 - def get_episode_str(self): + def get_episode_str(self, use_padding=False): episode_number = self.calculate_episode_number() - return f'{episode_number:02}' if episode_number else '' + if use_padding: + return f'{episode_number:02}' if episode_number else '' + + return str(episode_number) if episode_number else '' class MediaServer(models.Model): From df4b824672bcc00442064ced9de6b86e05a505ea Mon Sep 17 00:00:00 2001 From: Makhuta Date: Sat, 11 Jan 2025 22:17:10 +0100 Subject: [PATCH 40/79] Change - simplified the returns --- tubesync/sync/models.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/tubesync/sync/models.py b/tubesync/sync/models.py index d22cdb57..66bb0481 100644 --- a/tubesync/sync/models.py +++ b/tubesync/sync/models.py @@ -1527,10 +1527,13 @@ class Media(models.Model): def get_episode_str(self, use_padding=False): episode_number = self.calculate_episode_number() + if not episode_number: + return '' + if use_padding: - return f'{episode_number:02}' if episode_number else '' + return f'{episode_number:02}' - return str(episode_number) if episode_number else '' + return str(episode_number) class MediaServer(models.Model): From ef4181c2c42239512c811bdb6fb456bdcb0289cd Mon Sep 17 00:00:00 2001 From: tcely Date: Sun, 12 Jan 2025 00:37:30 -0500 Subject: [PATCH 41/79] Dockerfile syntax and checks - Specify the syntax be the latest stable version and that failed checks should stop the build. ``` By default, builds with failing build checks exit with a zero status code despite warnings. To make the build fail on warnings, set #check=error=true. ``` - Use the form of health checking that doesn't involve an extra shell on every check. --- Dockerfile | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/Dockerfile b/Dockerfile index 880dd677..c63e24d2 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,3 +1,6 @@ +# syntax=docker/dockerfile:1 +# check=error=true + FROM debian:bookworm-slim ARG TARGETARCH @@ -228,7 +231,7 @@ RUN set -x && \ COPY config/root / # Create a healthcheck -HEALTHCHECK --interval=1m --timeout=10s CMD /app/healthcheck.py http://127.0.0.1:8080/healthcheck +HEALTHCHECK --interval=1m --timeout=10s --start-period=3m CMD ["/app/healthcheck.py", "http://127.0.0.1:8080/healthcheck"] # ENVS and ports ENV PYTHONPATH="/app" PYTHONPYCACHEPREFIX="/config/cache/pycache" From 5e5d011b640be82d7c5d7d749f1801be787c46bf Mon Sep 17 00:00:00 2001 From: tcely Date: Sun, 12 Jan 2025 00:49:19 -0500 Subject: [PATCH 42/79] Add parser directives This hopefully helps anyone building on an older docker, such as Debian / Ubuntu packaged versions. --- Dockerfile | 3 +++ 1 file changed, 3 insertions(+) diff --git a/Dockerfile b/Dockerfile index a69609c5..f7a26bb3 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,3 +1,6 @@ +# syntax=docker/dockerfile:1 +# check=error=true + ARG FFMPEG_DATE="2025-01-10-19-43" ARG FFMPEG_VERSION="N-118280-g5cd49e1bfd" From 2860147212fb21087d699761d959727aae1c707a Mon Sep 17 00:00:00 2001 From: tcely Date: Sun, 12 Jan 2025 04:46:32 -0500 Subject: [PATCH 43/79] Build on older docker also * Do without --link for COPY or ADD * Do without --checksum for ADD * Trim the FFMPEG_VERSION variable with cut instead I've built successfully on old Debian systems using these changes. Everything else I use has a newer docker on it. --- Dockerfile | 74 ++++++++++++++++++++++++++++++++++++------------------ 1 file changed, 50 insertions(+), 24 deletions(-) diff --git a/Dockerfile b/Dockerfile index f7a26bb3..d0107385 100644 --- a/Dockerfile +++ b/Dockerfile @@ -11,9 +11,12 @@ ARG SHA256_S6_ARM64="8b22a2eaca4bf0b27a43d36e65c89d2701738f628d1abd0cea5569619f6 ARG SHA256_S6_NOARCH="6dbcde158a3e78b9bb141d7bcb5ccb421e563523babbe2c64470e76f4fd02dae" ARG ALPINE_VERSION="latest" -ARG FFMPEG_PREFIX_FILE="ffmpeg-${FFMPEG_VERSION%%-*}" +ARG FFMPEG_PREFIX_FILE="ffmpeg-${FFMPEG_VERSION}" ARG FFMPEG_SUFFIX_FILE=".tar.xz" +ARG FFMPEG_CHECKSUM_ALGORITHM="sha256" +ARG S6_CHECKSUM_ALGORITHM="sha256" + FROM alpine:${ALPINE_VERSION} AS ffmpeg-download ARG FFMPEG_DATE ARG FFMPEG_VERSION @@ -21,7 +24,8 @@ ARG FFMPEG_PREFIX_FILE ARG FFMPEG_SUFFIX_FILE ARG SHA256_FFMPEG_AMD64 ARG SHA256_FFMPEG_ARM64 -ARG CHECKSUM_ALGORITHM="sha256" +ARG FFMPEG_CHECKSUM_ALGORITHM +ARG CHECKSUM_ALGORITHM="${FFMPEG_CHECKSUM_ALGORITHM}" ARG FFMPEG_CHECKSUM_AMD64="${SHA256_FFMPEG_AMD64}" ARG FFMPEG_CHECKSUM_ARM64="${SHA256_FFMPEG_ARM64}" @@ -57,6 +61,7 @@ RUN set -eu ; \ } ; \ \ FFMPEG_ARCH="$(decide_arch)" ; \ + FFMPEG_PREFIX_FILE="$( printf -- '%s' "${FFMPEG_PREFIX_FILE}" | cut -d '-' -f 1,2 )" ; \ for url in $(awk ' \ $2 ~ /^[*]?'"${FFMPEG_PREFIX_FILE}"'/ && /-'"${FFMPEG_ARCH}"'-/ { $1=""; print; } \ ' "${DESTDIR}/${FFMPEG_FILE_SUMS}") ; \ @@ -75,7 +80,7 @@ RUN set -eu ; \ --summary-interval=0 \ --input-file /tmp/downloads ; \ \ - apk --no-cache --no-progress add cmd:awk "cmd:${CHECKSUM_ALGORITHM}sum" ; \ + apk --no-cache --no-progress add "cmd:${CHECKSUM_ALGORITHM}sum" ; \ \ decide_expected() { \ case "${TARGETARCH}" in \ @@ -90,43 +95,44 @@ RUN set -eu ; \ if [ -n "${FFMPEG_HASH}" ] ; \ then \ printf -- '%s *%s\n' "${FFMPEG_HASH}" "${FFMPEG_PREFIX_FILE}"*-"${FFMPEG_ARCH}"-*"${FFMPEG_SUFFIX_FILE}" >> /tmp/SUMS ; \ - "${CHECKSUM_ALGORITHM}sum" --check --strict /tmp/SUMS || exit ; \ + "${CHECKSUM_ALGORITHM}sum" --check --warn --strict /tmp/SUMS || exit ; \ fi ; \ - "${CHECKSUM_ALGORITHM}sum" --check --strict --ignore-missing "${DESTDIR}/${FFMPEG_FILE_SUMS}" ; \ + "${CHECKSUM_ALGORITHM}sum" --check --warn --strict --ignore-missing "${DESTDIR}/${FFMPEG_FILE_SUMS}" ; \ \ mkdir -v -p "/verified/${TARGETARCH}" ; \ ln -v "${FFMPEG_PREFIX_FILE}"*-"${FFMPEG_ARCH}"-*"${FFMPEG_SUFFIX_FILE}" "/verified/${TARGETARCH}/" ; \ rm -rf "${DESTDIR}" ; FROM alpine:${ALPINE_VERSION} AS ffmpeg-extracted -COPY --link --from=ffmpeg-download /verified /verified +COPY --from=ffmpeg-download /verified /verified ARG FFMPEG_PREFIX_FILE ARG FFMPEG_SUFFIX_FILE ARG TARGETARCH -RUN set -eu ; \ - apk --no-cache --no-progress add cmd:tar cmd:xz ; \ -\ +RUN set -eux ; \ mkdir -v /extracted ; \ cd /extracted ; \ - set -x ; \ - tar -xp \ + ln -s "/verified/${TARGETARCH}"/"${FFMPEG_PREFIX_FILE}"*"${FFMPEG_SUFFIX_FILE}" "/tmp/ffmpeg${FFMPEG_SUFFIX_FILE}" ; \ + tar -tf "/tmp/ffmpeg${FFMPEG_SUFFIX_FILE}" | grep '/bin/\(ffmpeg\|ffprobe\)' > /tmp/files ; \ + tar -xop \ --strip-components=2 \ - --no-anchored \ - --no-same-owner \ - -f "/verified/${TARGETARCH}"/"${FFMPEG_PREFIX_FILE}"*"${FFMPEG_SUFFIX_FILE}" \ - 'ffmpeg' 'ffprobe' ; \ + -f "/tmp/ffmpeg${FFMPEG_SUFFIX_FILE}" \ + -T /tmp/files ; \ \ ls -AlR /extracted ; -FROM scratch AS s6-overlay-download +FROM scratch AS ffmpeg +COPY --from=ffmpeg-extracted /extracted /usr/local/bin/ + +FROM alpine:${ALPINE_VERSION} AS s6-overlay-download ARG S6_VERSION ARG SHA256_S6_AMD64 ARG SHA256_S6_ARM64 ARG SHA256_S6_NOARCH ARG DESTDIR="/downloaded" -ARG CHECKSUM_ALGORITHM="sha256" +ARG S6_CHECKSUM_ALGORITHM +ARG CHECKSUM_ALGORITHM="${S6_CHECKSUM_ALGORITHM}" ARG S6_CHECKSUM_AMD64="${CHECKSUM_ALGORITHM}:${SHA256_S6_AMD64}" ARG S6_CHECKSUM_ARM64="${CHECKSUM_ALGORITHM}:${SHA256_S6_ARM64}" @@ -144,12 +150,28 @@ ADD "${S6_OVERLAY_URL}/${S6_FILE_AMD64}.${CHECKSUM_ALGORITHM}" "${DESTDIR}/" ADD "${S6_OVERLAY_URL}/${S6_FILE_ARM64}.${CHECKSUM_ALGORITHM}" "${DESTDIR}/" ADD "${S6_OVERLAY_URL}/${S6_FILE_NOARCH}.${CHECKSUM_ALGORITHM}" "${DESTDIR}/" -ADD --checksum="${S6_CHECKSUM_AMD64}" "${S6_OVERLAY_URL}/${S6_FILE_AMD64}" "${DESTDIR}/" -ADD --checksum="${S6_CHECKSUM_ARM64}" "${S6_OVERLAY_URL}/${S6_FILE_ARM64}" "${DESTDIR}/" -ADD --checksum="${S6_CHECKSUM_NOARCH}" "${S6_OVERLAY_URL}/${S6_FILE_NOARCH}" "${DESTDIR}/" +##ADD --checksum="${S6_CHECKSUM_AMD64}" "${S6_OVERLAY_URL}/${S6_FILE_AMD64}" "${DESTDIR}/" +##ADD --checksum="${S6_CHECKSUM_ARM64}" "${S6_OVERLAY_URL}/${S6_FILE_ARM64}" "${DESTDIR}/" +##ADD --checksum="${S6_CHECKSUM_NOARCH}" "${S6_OVERLAY_URL}/${S6_FILE_NOARCH}" "${DESTDIR}/" + +# --checksum wasn't recognized, so use busybox to check the sums instead +ADD "${S6_OVERLAY_URL}/${S6_FILE_AMD64}" "${DESTDIR}/" +RUN set -eu ; checksum="${S6_CHECKSUM_AMD64}" ; file="${S6_FILE_AMD64}" ; cd "${DESTDIR}/" && \ + printf -- '%s *%s\n' "$(printf -- '%s' "${checksum}" | cut -d : -f 2-)" "${file}" | "${CHECKSUM_ALGORITHM}sum" -cw + +ADD "${S6_OVERLAY_URL}/${S6_FILE_ARM64}" "${DESTDIR}/" +RUN set -eu ; checksum="${S6_CHECKSUM_ARM64}" ; file="${S6_FILE_ARM64}" ; cd "${DESTDIR}/" && \ + printf -- '%s *%s\n' "$(printf -- '%s' "${checksum}" | cut -d : -f 2-)" "${file}" | "${CHECKSUM_ALGORITHM}sum" -cw + +ADD "${S6_OVERLAY_URL}/${S6_FILE_NOARCH}" "${DESTDIR}/" +RUN set -eu ; checksum="${S6_CHECKSUM_NOARCH}" ; file="${S6_FILE_NOARCH}" ; cd "${DESTDIR}/" && \ + printf -- '%s *%s\n' "$(printf -- '%s' "${checksum}" | cut -d : -f 2-)" "${file}" | "${CHECKSUM_ALGORITHM}sum" -cw FROM alpine:${ALPINE_VERSION} AS s6-overlay-extracted -COPY --link --from=s6-overlay-download /downloaded /downloaded +COPY --from=s6-overlay-download /downloaded /downloaded + +ARG S6_CHECKSUM_ALGORITHM +ARG CHECKSUM_ALGORITHM="${S6_CHECKSUM_ALGORITHM}" ARG TARGETARCH @@ -168,11 +190,12 @@ RUN set -eu ; \ unset -v arg1 ; \ } ; \ \ + apk --no-cache --no-progress add "cmd:${CHECKSUM_ALGORITHM}sum" ; \ mkdir -v /verified ; \ cd /downloaded ; \ for f in *.sha256 ; \ do \ - sha256sum -c < "${f}" || exit ; \ + "${CHECKSUM_ALGORITHM}sum" --check --warn --strict "${f}" || exit ; \ ln -v "${f%.sha256}" /verified/ || exit ; \ done ; \ unset -v f ; \ @@ -191,6 +214,9 @@ RUN set -eu ; \ set +x ; \ unset -v f ; +FROM scratch AS s6-overlay +COPY --from=s6-overlay-extracted /s6-overlay-rootfs / + FROM debian:bookworm-slim AS tubesync ARG TARGETARCH @@ -214,8 +240,8 @@ ENV DEBIAN_FRONTEND="noninteractive" \ S6_CMD_WAIT_FOR_SERVICES_MAXTIME="0" # Install third party software -COPY --link --from=s6-overlay-extracted /s6-overlay-rootfs / -COPY --link --from=ffmpeg-extracted /extracted /usr/local/bin/ +COPY --from=s6-overlay / / +COPY --from=ffmpeg /usr/local/bin/ /usr/local/bin/ # Reminder: the SHELL handles all variables RUN set -x && \ From 45d7039188c746e9726562808caa7ed8bbc5f6ee Mon Sep 17 00:00:00 2001 From: tcely Date: Tue, 14 Jan 2025 05:34:59 -0500 Subject: [PATCH 44/79] Only log the extra messages with the new setting --- tubesync/sync/models.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/tubesync/sync/models.py b/tubesync/sync/models.py index bb850af3..a65abdf8 100644 --- a/tubesync/sync/models.py +++ b/tubesync/sync/models.py @@ -1174,7 +1174,8 @@ class Media(models.Model): @property def loaded_metadata(self): - self.reduce_data + if getattr(settings, 'SHRINK_OLD_MEDIA_METADATA', False): + self.reduce_data try: data = json.loads(self.metadata) if not isinstance(data, dict): From ebf9ff8ebae8ef7b6eec0a0d64b6352c269d4bbf Mon Sep 17 00:00:00 2001 From: tcely Date: Wed, 15 Jan 2025 04:26:55 -0500 Subject: [PATCH 45/79] Add environment variables for container - TUBESYNC_SHRINK_NEW - TUBESYNC_SHRINK_OLD These must be set to the word 'True' (case insensitive) to enable the setting. --- tubesync/tubesync/local_settings.py.container | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/tubesync/tubesync/local_settings.py.container b/tubesync/tubesync/local_settings.py.container index e75778b8..20f55098 100644 --- a/tubesync/tubesync/local_settings.py.container +++ b/tubesync/tubesync/local_settings.py.container @@ -87,6 +87,11 @@ SOURCE_DOWNLOAD_DIRECTORY_PREFIX_STR = os.getenv('TUBESYNC_DIRECTORY_PREFIX', 'T SOURCE_DOWNLOAD_DIRECTORY_PREFIX = True if SOURCE_DOWNLOAD_DIRECTORY_PREFIX_STR == 'true' else False +SHRINK_NEW_MEDIA_METADATA_STR = os.getenv('TUBESYNC_SHRINK_NEW', 'false').strip().lower() +SHRINK_NEW_MEDIA_METADATA = ( 'true' == SHRINK_NEW_MEDIA_METADATA_STR ) +SHRINK_OLD_MEDIA_METADATA_STR = os.getenv('TUBESYNC_SHRINK_OLD', 'false').strip().lower() +SHRINK_OLD_MEDIA_METADATA = ( 'true' == SHRINK_OLD_MEDIA_METADATA_STR ) + VIDEO_HEIGHT_CUTOFF = int(os.getenv("TUBESYNC_VIDEO_HEIGHT_CUTOFF", "240")) From d349bd55c4d883353622884126db8e7060a4b298 Mon Sep 17 00:00:00 2001 From: tcely Date: Wed, 15 Jan 2025 04:43:58 -0500 Subject: [PATCH 46/79] Avoid env for healthcheck This is only intended for use in containers, so we know where python3 should be installed. This is called very often, so we should try to use as few resources as we can. --- tubesync/healthcheck.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tubesync/healthcheck.py b/tubesync/healthcheck.py index 840da640..5bc127b0 100755 --- a/tubesync/healthcheck.py +++ b/tubesync/healthcheck.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python3 +#!/usr/bin/python3 ''' Perform an HTTP request to a URL and exit with an exit code of 1 if the From 35f6a54823bd598b8c076ad6dd935919865d5235 Mon Sep 17 00:00:00 2001 From: tcely Date: Wed, 15 Jan 2025 06:17:33 -0500 Subject: [PATCH 47/79] Balance blank lines --- tubesync/tubesync/local_settings.py.container | 1 + 1 file changed, 1 insertion(+) diff --git a/tubesync/tubesync/local_settings.py.container b/tubesync/tubesync/local_settings.py.container index 20f55098..0114e76d 100644 --- a/tubesync/tubesync/local_settings.py.container +++ b/tubesync/tubesync/local_settings.py.container @@ -92,6 +92,7 @@ SHRINK_NEW_MEDIA_METADATA = ( 'true' == SHRINK_NEW_MEDIA_METADATA_STR ) SHRINK_OLD_MEDIA_METADATA_STR = os.getenv('TUBESYNC_SHRINK_OLD', 'false').strip().lower() SHRINK_OLD_MEDIA_METADATA = ( 'true' == SHRINK_OLD_MEDIA_METADATA_STR ) + VIDEO_HEIGHT_CUTOFF = int(os.getenv("TUBESYNC_VIDEO_HEIGHT_CUTOFF", "240")) From 0bf72fd5f061c9c851aa4ae8f055e8d23eae68b3 Mon Sep 17 00:00:00 2001 From: FaySmash <30392780+FaySmash@users.noreply.github.com> Date: Wed, 15 Jan 2025 18:46:05 +0100 Subject: [PATCH 48/79] Update README.md Added a small section about potential permission issues with volumes. --- README.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/README.md b/README.md index a01f9830..e49a30c7 100644 --- a/README.md +++ b/README.md @@ -138,6 +138,8 @@ services: - PGID=1000 ``` +> [!IMPORTANT] +> If the `/downloads` directory is mounted to a volume which points to a remote storage, make sure to suppy the `UID` and `GID` parameters in the driver options, to match the `PUID` and `PGID` specified as environment variables to prevent permission issues. [See this issue for details](https://github.com/meeb/tubesync/issues/616#issuecomment-2593458282) ## Optional authentication From af0aae3de4ef85513de39201a0e6d94310a92f5f Mon Sep 17 00:00:00 2001 From: tcely Date: Thu, 16 Jan 2025 01:18:20 -0500 Subject: [PATCH 49/79] Don't write 'None' in default rating It's better to not have a rating than to create parsing problems. An example of what is avoided: ``` None 16781 ``` --- tubesync/sync/models.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/tubesync/sync/models.py b/tubesync/sync/models.py index 14ce4cf0..59d9e6d8 100644 --- a/tubesync/sync/models.py +++ b/tubesync/sync/models.py @@ -1426,7 +1426,8 @@ class Media(models.Model): rating.tail = '\n ' ratings = nfo.makeelement('ratings', {}) ratings.text = '\n ' - ratings.append(rating) + if self.rating is not None: + ratings.append(rating) ratings.tail = '\n ' nfo.append(ratings) # plot = media metadata description From 57417915bf9357c170ba5748dde2d812c61dad4a Mon Sep 17 00:00:00 2001 From: tcely Date: Thu, 16 Jan 2025 01:27:41 -0500 Subject: [PATCH 50/79] lowercase the true value I doubt it's unsupported by any parser, but every example uses lowercase for this, we may as well also. --- tubesync/sync/models.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tubesync/sync/models.py b/tubesync/sync/models.py index 59d9e6d8..44a94454 100644 --- a/tubesync/sync/models.py +++ b/tubesync/sync/models.py @@ -1418,7 +1418,7 @@ class Media(models.Model): rating_attrs = OrderedDict() rating_attrs['name'] = 'youtube' rating_attrs['max'] = '5' - rating_attrs['default'] = 'True' + rating_attrs['default'] = 'true' rating = nfo.makeelement('rating', rating_attrs) rating.text = '\n ' rating.append(value) From ab7b601ad27e680e9aff58e814195563719fc2ce Mon Sep 17 00:00:00 2001 From: tcely Date: Thu, 16 Jan 2025 01:31:07 -0500 Subject: [PATCH 51/79] Don't write zero into MPAA in .nfo --- tubesync/sync/models.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/tubesync/sync/models.py b/tubesync/sync/models.py index 14ce4cf0..546828f1 100644 --- a/tubesync/sync/models.py +++ b/tubesync/sync/models.py @@ -1443,7 +1443,8 @@ class Media(models.Model): mpaa = nfo.makeelement('mpaa', {}) mpaa.text = str(self.age_limit) mpaa.tail = '\n ' - nfo.append(mpaa) + if self.age_limit and self.age_limit > 0: + nfo.append(mpaa) # runtime = media metadata duration in seconds runtime = nfo.makeelement('runtime', {}) runtime.text = str(self.duration) From 6f00ce812061318cdd50d8808332a971d8bbf201 Mon Sep 17 00:00:00 2001 From: tcely Date: Thu, 16 Jan 2025 03:01:10 -0500 Subject: [PATCH 52/79] Create upgrade_yt-dlp.sh --- tubesync/upgrade_yt-dlp.sh | 11 +++++++++++ 1 file changed, 11 insertions(+) create mode 100644 tubesync/upgrade_yt-dlp.sh diff --git a/tubesync/upgrade_yt-dlp.sh b/tubesync/upgrade_yt-dlp.sh new file mode 100644 index 00000000..e4fdd171 --- /dev/null +++ b/tubesync/upgrade_yt-dlp.sh @@ -0,0 +1,11 @@ +#!/usr/bin/env bash + +pip3() { + local pip_whl + pip_whl="$(ls -1r /usr/share/python-wheels/pip-*-py3-none-any.whl | head -n 1)" + + python3 "${pip_whl}/pip" "$@" +} + +pip3 install --upgrade --break-system-packages yt-dlp + From d7f9fa45ecb2d94fe48ea2778538fea2a961ca83 Mon Sep 17 00:00:00 2001 From: tcely Date: Thu, 16 Jan 2025 08:16:31 +0000 Subject: [PATCH 53/79] Add executable to sh script --- tubesync/upgrade_yt-dlp.sh | 0 1 file changed, 0 insertions(+), 0 deletions(-) mode change 100644 => 100755 tubesync/upgrade_yt-dlp.sh diff --git a/tubesync/upgrade_yt-dlp.sh b/tubesync/upgrade_yt-dlp.sh old mode 100644 new mode 100755 From 862c17b67656980ed852f0c88a71f67c41be5990 Mon Sep 17 00:00:00 2001 From: tcely Date: Thu, 16 Jan 2025 03:30:06 -0500 Subject: [PATCH 54/79] Bump ffmpeg & yt-dlp --- Dockerfile | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Dockerfile b/Dockerfile index 554e0aaf..a83fa1da 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,8 +1,8 @@ # syntax=docker/dockerfile:1 # check=error=true -ARG FFMPEG_DATE="2025-01-10-19-43" -ARG FFMPEG_VERSION="N-118280-g5cd49e1bfd" +ARG FFMPEG_DATE="2025-01-15-14-13" +ARG FFMPEG_VERSION="N-118315-g4f3c9f2f03" ARG S6_VERSION="3.2.0.2" From 4e51d54ec122dc10550be77618250276e6a7b4b2 Mon Sep 17 00:00:00 2001 From: tcely Date: Thu, 16 Jan 2025 08:24:11 -0500 Subject: [PATCH 55/79] Use pip runner from pipenv --- tubesync/upgrade_yt-dlp.sh | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/tubesync/upgrade_yt-dlp.sh b/tubesync/upgrade_yt-dlp.sh index e4fdd171..21d51564 100755 --- a/tubesync/upgrade_yt-dlp.sh +++ b/tubesync/upgrade_yt-dlp.sh @@ -1,10 +1,16 @@ #!/usr/bin/env bash pip3() { - local pip_whl - pip_whl="$(ls -1r /usr/share/python-wheels/pip-*-py3-none-any.whl | head -n 1)" + local pip_runner pip_whl run_whl - python3 "${pip_whl}/pip" "$@" + # pipenv + pip_runner='/usr/lib/python3/dist-packages/pipenv/patched/pip/__pip-runner__.py' + + # python3-pip-whl + pip_whl="$(ls -1r /usr/share/python-wheels/pip-*-py3-none-any.whl | head -n 1)" + run_whl="${pip_whl}/pip" + + python3 "${pip_runner}" "$@" } pip3 install --upgrade --break-system-packages yt-dlp From ccdd43845c0ae28ee803e75cd3781bb04df2113c Mon Sep 17 00:00:00 2001 From: tcely Date: Fri, 17 Jan 2025 12:46:11 -0500 Subject: [PATCH 56/79] Cache for pipenv & apt - Consolidate apk & apt commands - Consolidate ENV layers - Consolidate RUN layers - Remove unused variables - Remove packages that are no longer needed - Bind mount /app/Pipfile `/cache` is now a `tmpfs` mount that has a `cache` mount on top for `pipenv` to use. `apt` has `cache` mounts in the standard places: - /var/lib/apt - /var/cache/apt --- Dockerfile | 106 +++++++++++++++++++++++++---------------------------- 1 file changed, 50 insertions(+), 56 deletions(-) diff --git a/Dockerfile b/Dockerfile index a83fa1da..4a366e96 100644 --- a/Dockerfile +++ b/Dockerfile @@ -11,12 +11,15 @@ ARG SHA256_S6_ARM64="8b22a2eaca4bf0b27a43d36e65c89d2701738f628d1abd0cea5569619f6 ARG SHA256_S6_NOARCH="6dbcde158a3e78b9bb141d7bcb5ccb421e563523babbe2c64470e76f4fd02dae" ARG ALPINE_VERSION="latest" +ARG DEBIAN_VERSION="bookworm-slim" + ARG FFMPEG_PREFIX_FILE="ffmpeg-${FFMPEG_VERSION}" ARG FFMPEG_SUFFIX_FILE=".tar.xz" ARG FFMPEG_CHECKSUM_ALGORITHM="sha256" ARG S6_CHECKSUM_ALGORITHM="sha256" + FROM alpine:${ALPINE_VERSION} AS ffmpeg-download ARG FFMPEG_DATE ARG FFMPEG_VERSION @@ -36,7 +39,7 @@ ARG DESTDIR="/downloaded" ARG TARGETARCH ADD "${FFMPEG_URL}/${FFMPEG_FILE_SUMS}" "${DESTDIR}/" RUN set -eu ; \ - apk --no-cache --no-progress add cmd:aria2c cmd:awk ; \ + apk --no-cache --no-progress add cmd:aria2c cmd:awk "cmd:${CHECKSUM_ALGORITHM}sum" ; \ \ aria2c_options() { \ algorithm="${CHECKSUM_ALGORITHM%[0-9]??}" ; \ @@ -80,8 +83,6 @@ RUN set -eu ; \ --summary-interval=0 \ --input-file /tmp/downloads ; \ \ - apk --no-cache --no-progress add "cmd:${CHECKSUM_ALGORITHM}sum" ; \ -\ decide_expected() { \ case "${TARGETARCH}" in \ (amd64) printf -- '%s' "${FFMPEG_CHECKSUM_AMD64}" ;; \ @@ -217,54 +218,53 @@ RUN set -eu ; \ FROM scratch AS s6-overlay COPY --from=s6-overlay-extracted /s6-overlay-rootfs / -FROM debian:bookworm-slim AS tubesync - -ARG TARGETARCH -ARG TARGETPLATFORM +FROM debian:${DEBIAN_VERSION} AS tubesync ARG S6_VERSION ARG FFMPEG_DATE ARG FFMPEG_VERSION -ENV S6_VERSION="${S6_VERSION}" \ - FFMPEG_DATE="${FFMPEG_DATE}" \ - FFMPEG_VERSION="${FFMPEG_VERSION}" - ENV DEBIAN_FRONTEND="noninteractive" \ - HOME="/root" \ - LANGUAGE="en_US.UTF-8" \ - LANG="en_US.UTF-8" \ - LC_ALL="en_US.UTF-8" \ - TERM="xterm" \ - S6_CMD_WAIT_FOR_SERVICES_MAXTIME="0" + HOME="/root" \ + LANGUAGE="en_US.UTF-8" \ + LANG="en_US.UTF-8" \ + LC_ALL="en_US.UTF-8" \ + TERM="xterm" \ + # Do not include compiled byte-code + PIP_NO_COMPILE=1 \ + PIP_ROOT_USER_ACTION='ignore' \ + S6_CMD_WAIT_FOR_SERVICES_MAXTIME="0" + +ENV S6_VERSION="${S6_VERSION}" \ + FFMPEG_DATE="${FFMPEG_DATE}" \ + FFMPEG_VERSION="${FFMPEG_VERSION}" # Install third party software COPY --from=s6-overlay / / COPY --from=ffmpeg /usr/local/bin/ /usr/local/bin/ # Reminder: the SHELL handles all variables -RUN set -x && \ +RUN --mount=type=cache,id=apt-lib-cache,sharing=locked,target=/var/lib/apt \ + --mount=type=cache,id=apt-cache-cache,sharing=locked,target=/var/cache/apt \ + set -x && \ + # Update from the network and keep cache + rm -f /etc/apt/apt.conf.d/docker-clean && \ apt-get update && \ + # Install locales apt-get -y --no-install-recommends install locales && \ printf -- "en_US.UTF-8 UTF-8\n" > /etc/locale.gen && \ locale-gen en_US.UTF-8 && \ - # Install required distro packages - apt-get -y --no-install-recommends install curl ca-certificates file binutils xz-utils && \ + # Install file + apt-get -y --no-install-recommends install file && \ # Installed s6 (using COPY earlier) file -L /command/s6-overlay-suexec && \ # Installed ffmpeg (using COPY earlier) /usr/local/bin/ffmpeg -version && \ file /usr/local/bin/ff* && \ - # Clean up - apt-get -y autoremove --purge file binutils xz-utils && \ - rm -rf /var/lib/apt/lists/* && \ - rm -rf /var/cache/apt/* && \ - rm -rf /tmp/* - -# Install dependencies we keep -RUN set -x && \ - apt-get update && \ + # Clean up file + apt-get -y autoremove --purge file && \ + # Install dependencies we keep # Install required distro packages apt-get -y --no-install-recommends install \ libjpeg62-turbo \ @@ -279,29 +279,27 @@ RUN set -x && \ redis-server \ curl \ less \ - && apt-get -y autoclean && \ - rm -rf /var/lib/apt/lists/* && \ - rm -rf /var/cache/apt/* && \ + && \ + # Clean up + apt-get -y autopurge && \ + apt-get -y autoclean && \ rm -rf /tmp/* # Copy over pip.conf to use piwheels COPY pip.conf /etc/pip.conf -# Add Pipfile -COPY Pipfile /app/Pipfile - -# Do not include compiled byte-code -ENV PIP_NO_COMPILE=1 \ - PIP_NO_CACHE_DIR=1 \ - PIP_ROOT_USER_ACTION='ignore' - # Switch workdir to the the app WORKDIR /app # Set up the app -#BuildKit#RUN --mount=type=bind,source=Pipfile,target=/app/Pipfile \ -RUN \ +RUN --mount=type=tmpfs,target=/cache \ + --mount=type=cache,id=pipenv-cache,sharing=locked,target=/cache/pipenv \ + --mount=type=cache,id=apt-lib-cache,sharing=locked,target=/var/lib/apt \ + --mount=type=cache,id=apt-cache-cache,sharing=locked,target=/var/cache/apt \ + --mount=type=bind,source=Pipfile,target=/app/Pipfile \ set -x && \ + # Update from the network and keep cache + rm -f /etc/apt/apt.conf.d/docker-clean && \ apt-get update && \ # Install required build packages apt-get -y --no-install-recommends install \ @@ -322,10 +320,11 @@ RUN \ useradd -M -d /app -s /bin/false -g app app && \ # Install non-distro packages cp -at /tmp/ "${HOME}" && \ - PIPENV_VERBOSITY=64 HOME="/tmp/${HOME#/}" pipenv install --system --skip-lock && \ + HOME="/tmp/${HOME#/}" \ + XDG_CACHE_HOME='/cache' \ + PIPENV_VERBOSITY=64 \ + pipenv install --system --skip-lock && \ # Clean up - rm /app/Pipfile && \ - pipenv --clear && \ apt-get -y autoremove --purge \ default-libmysqlclient-dev \ g++ \ @@ -339,12 +338,9 @@ RUN \ python3-pip \ zlib1g-dev \ && \ - apt-get -y autoremove && \ + apt-get -y autopurge && \ apt-get -y autoclean && \ - rm -rf /var/lib/apt/lists/* && \ - rm -rf /var/cache/apt/* && \ - rm -rf /tmp/* - + rm -v -rf /tmp/* # Copy app COPY tubesync /app @@ -362,11 +358,8 @@ RUN set -x && \ mkdir -v -p /config/media && \ mkdir -v -p /config/cache/pycache && \ mkdir -v -p /downloads/audio && \ - mkdir -v -p /downloads/video - - -# Append software versions -RUN set -x && \ + mkdir -v -p /downloads/video && \ + # Append software versions ffmpeg_version=$(/usr/local/bin/ffmpeg -version | awk -v 'ev=31' '1 == NR && "ffmpeg" == $1 { print $3; ev=0; } END { exit ev; }') && \ test -n "${ffmpeg_version}" && \ printf -- "ffmpeg_version = '%s'\n" "${ffmpeg_version}" >> /app/common/third_party_versions.py @@ -378,7 +371,8 @@ COPY config/root / HEALTHCHECK --interval=1m --timeout=10s --start-period=3m CMD ["/app/healthcheck.py", "http://127.0.0.1:8080/healthcheck"] # ENVS and ports -ENV PYTHONPATH="/app" PYTHONPYCACHEPREFIX="/config/cache/pycache" +ENV PYTHONPATH="/app" \ + PYTHONPYCACHEPREFIX="/config/cache/pycache" EXPOSE 4848 # Volumes From ae07c1ce8942385897e1ee743aaf5bf3270d1d52 Mon Sep 17 00:00:00 2001 From: tcely Date: Fri, 17 Jan 2025 13:03:06 -0500 Subject: [PATCH 57/79] Fallback to run_whl Prefer `pip_runner` but try the `pip` wheel too, if it's missing. --- tubesync/upgrade_yt-dlp.sh | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/tubesync/upgrade_yt-dlp.sh b/tubesync/upgrade_yt-dlp.sh index 21d51564..c3a7edab 100755 --- a/tubesync/upgrade_yt-dlp.sh +++ b/tubesync/upgrade_yt-dlp.sh @@ -5,12 +5,13 @@ pip3() { # pipenv pip_runner='/usr/lib/python3/dist-packages/pipenv/patched/pip/__pip-runner__.py' + test -s "${pip_runner}" || pip_runner='' # python3-pip-whl pip_whl="$(ls -1r /usr/share/python-wheels/pip-*-py3-none-any.whl | head -n 1)" run_whl="${pip_whl}/pip" - python3 "${pip_runner}" "$@" + python3 "${pip_runner:-"${run_whl}"}" "$@" } pip3 install --upgrade --break-system-packages yt-dlp From eff92e3469accb46e941f75e969d86eaf89baf17 Mon Sep 17 00:00:00 2001 From: FaySmash <30392780+FaySmash@users.noreply.github.com> Date: Fri, 17 Jan 2025 19:46:10 +0100 Subject: [PATCH 58/79] Update README.md Revised version of the section about potential permission issues with Samba volumes. --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index e49a30c7..d695221f 100644 --- a/README.md +++ b/README.md @@ -139,7 +139,7 @@ services: ``` > [!IMPORTANT] -> If the `/downloads` directory is mounted to a volume which points to a remote storage, make sure to suppy the `UID` and `GID` parameters in the driver options, to match the `PUID` and `PGID` specified as environment variables to prevent permission issues. [See this issue for details](https://github.com/meeb/tubesync/issues/616#issuecomment-2593458282) +> If the `/downloads` directory is mounted to a [Samba volume](https://docs.docker.com/engine/storage/volumes/#create-cifssamba-volumes), make sure to suppy the `UID` and `GID` parameters in the driver options. These have to be the same as the `PUID` and `PGID`, which were specified as environment variables. This prevents issues when executing file actions (like writing metadata). [See this issue for details](https://github.com/meeb/tubesync/issues/616#issuecomment-2593458282) ## Optional authentication From c5cdbe4a550fc5f38e7cd0533f7ad28e237f8390 Mon Sep 17 00:00:00 2001 From: FaySmash <30392780+FaySmash@users.noreply.github.com> Date: Sun, 19 Jan 2025 17:18:11 +0100 Subject: [PATCH 59/79] Update README.md Fix the suppy => supply misspelling +alternate phrasing Co-authored-by: tcely --- README.md | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index d695221f..ad437bba 100644 --- a/README.md +++ b/README.md @@ -139,7 +139,10 @@ services: ``` > [!IMPORTANT] -> If the `/downloads` directory is mounted to a [Samba volume](https://docs.docker.com/engine/storage/volumes/#create-cifssamba-volumes), make sure to suppy the `UID` and `GID` parameters in the driver options. These have to be the same as the `PUID` and `PGID`, which were specified as environment variables. This prevents issues when executing file actions (like writing metadata). [See this issue for details](https://github.com/meeb/tubesync/issues/616#issuecomment-2593458282) +> If the `/downloads` directory is mounted from a [Samba volume](https://docs.docker.com/engine/storage/volumes/#create-cifssamba-volumes), be sure to also supply the `uid` and `gid` mount parameters in the driver options. +> These must be matched to the `PUID` and `PGID` values, which were specified as environment variables. +> +> Matching these user and group ID numbers prevents issues when executing file actions, such as writing metadata. See [this issue](https://github.com/meeb/tubesync/issues/616#issuecomment-2593458282) for details. ## Optional authentication From 44db638b122d608cf3f2957dae2a4708892e9caa Mon Sep 17 00:00:00 2001 From: meeb Date: Tue, 21 Jan 2025 00:21:03 +1100 Subject: [PATCH 60/79] fix num workers comparison check, resolves #634 --- tubesync/tubesync/gunicorn.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/tubesync/tubesync/gunicorn.py b/tubesync/tubesync/gunicorn.py index d59c1389..0058fa65 100644 --- a/tubesync/tubesync/gunicorn.py +++ b/tubesync/tubesync/gunicorn.py @@ -10,9 +10,10 @@ def get_num_workers(): num_workers = int(os.getenv('GUNICORN_WORKERS', 3)) except ValueError: num_workers = cpu_workers - if 0 > num_workers > cpu_workers: - num_workers = cpu_workers - return num_workers + if 0 < num_workers < cpu_workers: + return num_workers + else: + return cpu_workers def get_bind(): From 66e51929803cace51ba946eece5af1822e225d51 Mon Sep 17 00:00:00 2001 From: tcely Date: Mon, 20 Jan 2025 08:49:14 -0500 Subject: [PATCH 61/79] Warn against regular updating of yt-dlp --- tubesync/upgrade_yt-dlp.sh | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/tubesync/upgrade_yt-dlp.sh b/tubesync/upgrade_yt-dlp.sh index c3a7edab..b92e1fd0 100755 --- a/tubesync/upgrade_yt-dlp.sh +++ b/tubesync/upgrade_yt-dlp.sh @@ -1,5 +1,14 @@ #!/usr/bin/env bash +warning_message() { + cat <&2 + pip3() { local pip_runner pip_whl run_whl @@ -14,5 +23,8 @@ pip3() { python3 "${pip_runner:-"${run_whl}"}" "$@" } +warning_message +test -n "${TUBESYNC_DEBUG}" || exit 1 + pip3 install --upgrade --break-system-packages yt-dlp From f65f6f1de5637b29ebee76c5ccb32388c7752c72 Mon Sep 17 00:00:00 2001 From: tcely Date: Tue, 21 Jan 2025 01:12:24 -0500 Subject: [PATCH 62/79] Treat static_url the same as other URLs --- tubesync/tubesync/wsgi.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/tubesync/tubesync/wsgi.py b/tubesync/tubesync/wsgi.py index 71c61003..123dfde6 100644 --- a/tubesync/tubesync/wsgi.py +++ b/tubesync/tubesync/wsgi.py @@ -1,5 +1,4 @@ import os -from urllib.parse import urljoin from django.core.wsgi import get_wsgi_application @@ -17,9 +16,8 @@ def application(environ, start_response): raise Exception(f'DJANGO_URL_PREFIX must end with a /, ' f'got: {DJANGO_URL_PREFIX}') if script_name: - static_url = urljoin(script_name, 'static/') environ['SCRIPT_NAME'] = script_name path_info = environ['PATH_INFO'] - if path_info.startswith(script_name) and not path_info.startswith(static_url): + if path_info.startswith(script_name): environ['PATH_INFO'] = path_info[len(script_name) - 1:] return _application(environ, start_response) From 52d703ff1ff358f9ae4a44d54b33601160a82737 Mon Sep 17 00:00:00 2001 From: tcely Date: Tue, 21 Jan 2025 01:55:13 -0500 Subject: [PATCH 63/79] Better check for script_name --- tubesync/tubesync/wsgi.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tubesync/tubesync/wsgi.py b/tubesync/tubesync/wsgi.py index 123dfde6..74912aef 100644 --- a/tubesync/tubesync/wsgi.py +++ b/tubesync/tubesync/wsgi.py @@ -15,7 +15,7 @@ def application(environ, start_response): else: raise Exception(f'DJANGO_URL_PREFIX must end with a /, ' f'got: {DJANGO_URL_PREFIX}') - if script_name: + if script_name is not None: environ['SCRIPT_NAME'] = script_name path_info = environ['PATH_INFO'] if path_info.startswith(script_name): From 1f95b858f2ef7ec6cc9c2994327474be8c4cbd5f Mon Sep 17 00:00:00 2001 From: tcely Date: Tue, 21 Jan 2025 03:08:24 -0500 Subject: [PATCH 64/79] Use --break-system-packages with pip Unfortunately, both versions of `pip` don't have this flag. Check the version, then add the flag if it is not too old. --- tubesync/upgrade_yt-dlp.sh | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/tubesync/upgrade_yt-dlp.sh b/tubesync/upgrade_yt-dlp.sh index b92e1fd0..9da6d555 100755 --- a/tubesync/upgrade_yt-dlp.sh +++ b/tubesync/upgrade_yt-dlp.sh @@ -26,5 +26,13 @@ pip3() { warning_message test -n "${TUBESYNC_DEBUG}" || exit 1 -pip3 install --upgrade --break-system-packages yt-dlp +# Use the flag added in 23.0.1, if possible. +# https://github.com/pypa/pip/pull/11780 +break_system_packages='--break-system-packages' +pip_version="$(pip3 --version | awk '$1 = "pip" { print $2; exit; }')" +if [[ "${pip_version}" < "23.0.1" ]]; then + break_system_packages='' +fi + +pip3 install --upgrade ${break_system_packages} yt-dlp From 0ea508443ade4ae05b9ea19500c3d7f1dc1a8b93 Mon Sep 17 00:00:00 2001 From: tcely Date: Tue, 21 Jan 2025 04:21:47 -0500 Subject: [PATCH 65/79] Pipefile -> Pipfile --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index ad437bba..dbf4b6fe 100644 --- a/README.md +++ b/README.md @@ -325,7 +325,7 @@ Notable libraries and software used: * [django-sass](https://github.com/coderedcorp/django-sass/) * The container bundles with `s6-init` and `nginx` -See the [Pipefile](https://github.com/meeb/tubesync/blob/main/Pipfile) for a full list. +See the [Pipfile](https://github.com/meeb/tubesync/blob/main/Pipfile) for a full list. ### Can I get access to the full Django admin? From 96078f8d40cfd3b4f75e3d51ac1667cefad78aad Mon Sep 17 00:00:00 2001 From: tcely Date: Tue, 21 Jan 2025 04:38:06 -0500 Subject: [PATCH 66/79] Update architectures FAQ --- README.md | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index ad437bba..ce89cb7a 100644 --- a/README.md +++ b/README.md @@ -353,7 +353,12 @@ etc.). Configuration of this is beyond the scope of this README. ### What architectures does the container support? -Just `amd64` for the moment. Others may be made available if there is demand. +Only two are supported, for the moment: +- `amd64` (most desktop PCs and servers) +- `arm64` +(modern ARM computers, such as the Rasperry Pi 3 or later) + +Others may be made available, if there is demand. ### The pipenv install fails with "Locking failed"! From 1671c6e7066e83d8ee684c9256726fbcada329ee Mon Sep 17 00:00:00 2001 From: tcely Date: Wed, 22 Jan 2025 04:55:57 -0500 Subject: [PATCH 67/79] DRY YouTube domain list I am tired of links copied from YouTube not working without adjustments. --- tubesync/sync/views.py | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/tubesync/sync/views.py b/tubesync/sync/views.py index 52090042..8ca853de 100644 --- a/tubesync/sync/views.py +++ b/tubesync/sync/views.py @@ -193,10 +193,15 @@ class ValidateSourceView(FormView): Source.SOURCE_TYPE_YOUTUBE_PLAYLIST: ('https://www.youtube.com/playlist?list=' 'PL590L5WQmH8dpP0RyH5pCfIaDEdt9nk7r') } + _youtube_domains = frozenset({ + 'youtube.com', + 'm.youtube.com', + 'www.youtube.com', + }) validation_urls = { Source.SOURCE_TYPE_YOUTUBE_CHANNEL: { 'scheme': 'https', - 'domains': ('m.youtube.com', 'www.youtube.com'), + 'domains': _youtube_domains, 'path_regex': '^\/(c\/)?([^\/]+)(\/videos)?$', 'path_must_not_match': ('/playlist', '/c/playlist'), 'qs_args': [], @@ -205,7 +210,7 @@ class ValidateSourceView(FormView): }, Source.SOURCE_TYPE_YOUTUBE_CHANNEL_ID: { 'scheme': 'https', - 'domains': ('m.youtube.com', 'www.youtube.com'), + 'domains': _youtube_domains, 'path_regex': '^\/channel\/([^\/]+)(\/videos)?$', 'path_must_not_match': ('/playlist', '/c/playlist'), 'qs_args': [], @@ -214,7 +219,7 @@ class ValidateSourceView(FormView): }, Source.SOURCE_TYPE_YOUTUBE_PLAYLIST: { 'scheme': 'https', - 'domains': ('m.youtube.com', 'www.youtube.com'), + 'domains': _youtube_domains, 'path_regex': '^\/(playlist|watch)$', 'path_must_not_match': (), 'qs_args': ('list',), From b38c7d7c7f03598c4450d71433292ff92c9f7eb5 Mon Sep 17 00:00:00 2001 From: meeb Date: Wed, 22 Jan 2025 23:59:41 +1100 Subject: [PATCH 68/79] bump ffmpeg and yt-dlp --- Dockerfile | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Dockerfile b/Dockerfile index 4a366e96..023f4fd8 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,8 +1,8 @@ # syntax=docker/dockerfile:1 # check=error=true -ARG FFMPEG_DATE="2025-01-15-14-13" -ARG FFMPEG_VERSION="N-118315-g4f3c9f2f03" +ARG FFMPEG_DATE="2025-01-21-14-19" +ARG FFMPEG_VERSION="N-118328-g504df09c34" ARG S6_VERSION="3.2.0.2" From ba321945b6239f99403e429c6ff268c9de336537 Mon Sep 17 00:00:00 2001 From: tcely Date: Wed, 22 Jan 2025 19:41:03 -0500 Subject: [PATCH 69/79] Automated channel_id extraction --- tubesync/sync/views.py | 15 ++++++++++++++- 1 file changed, 14 insertions(+), 1 deletion(-) diff --git a/tubesync/sync/views.py b/tubesync/sync/views.py index 8ca853de..ff4a87f4 100644 --- a/tubesync/sync/views.py +++ b/tubesync/sync/views.py @@ -291,11 +291,24 @@ class ValidateSourceView(FormView): url = reverse_lazy('sync:add-source') fields_to_populate = self.prepopulate_fields.get(self.source_type) fields = {} + value = self.key + use_channel_id = ( + 'youtube-channel' == self.source_type_str and + '@' == self.key[0] + ) + if use_channel_id: + self.source_type_str = 'youtube-channel-id' + self.source_type = self.source_types.get(self.source_type_str, None) + self.key = youtube.get_channel_id( + Source.create_index_url(self.source_type, self.key, 'videos') + ) for field in fields_to_populate: if field == 'source_type': fields[field] = self.source_type - elif field in ('key', 'name', 'directory'): + elif field == 'key': fields[field] = self.key + elif field in ('name', 'directory'): + fields[field] = value return append_uri_params(url, fields) From e5b4e9dbc0eb418827f3fac308dcd8b58118911c Mon Sep 17 00:00:00 2001 From: tcely Date: Wed, 22 Jan 2025 20:07:59 -0500 Subject: [PATCH 70/79] Add get_channel_id --- tubesync/sync/youtube.py | 25 +++++++++++++++++++++++++ 1 file changed, 25 insertions(+) diff --git a/tubesync/sync/youtube.py b/tubesync/sync/youtube.py index 5fdef3cb..5371c937 100644 --- a/tubesync/sync/youtube.py +++ b/tubesync/sync/youtube.py @@ -45,6 +45,31 @@ def get_yt_opts(): opts.update({'cookiefile': cookie_file_path}) return opts +def get_channel_id(url): + # yt-dlp --simulate --no-check-formats --playlist-items 1 + # --print 'pre_process:%(playlist_channel_id,playlist_id,channel_id)s' + opts = get_yt_opts() + opts.update({ + 'skip_download': True, + 'simulate': True, + 'logger': log, + 'extract_flat': True, # Change to False to get detailed info + 'check_formats': False, + 'playlist_items': '1', + }) + + with yt_dlp.YoutubeDL(opts) as y: + try: + response = y.extract_info(url, download=False) + + channel_id = response['channel_id'] + playlist_id = response['playlist_id'] + playlist_channel_id = response['playlist_channel_id'] + except yt_dlp.utils.DownloadError as e: + raise YouTubeError(f'Failed to extract channel ID for "{url}": {e}') from e + else: + return playlist_channel_id or playlist_id or channel_id + def get_channel_image_info(url): opts = get_yt_opts() opts.update({ From 5546c5dad24aaae939392de851024ff298edbd42 Mon Sep 17 00:00:00 2001 From: tcely Date: Wed, 22 Jan 2025 20:21:23 -0500 Subject: [PATCH 71/79] Use the channel type to fetch channel_id --- tubesync/sync/views.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/tubesync/sync/views.py b/tubesync/sync/views.py index ff4a87f4..17f88522 100644 --- a/tubesync/sync/views.py +++ b/tubesync/sync/views.py @@ -297,10 +297,11 @@ class ValidateSourceView(FormView): '@' == self.key[0] ) if use_channel_id: + source_type = self.source_type self.source_type_str = 'youtube-channel-id' self.source_type = self.source_types.get(self.source_type_str, None) self.key = youtube.get_channel_id( - Source.create_index_url(self.source_type, self.key, 'videos') + Source.create_index_url(source_type, self.key, 'videos') ) for field in fields_to_populate: if field == 'source_type': From 25892388a1316b67dd16e38cc1fd31c50651b1e6 Mon Sep 17 00:00:00 2001 From: tcely Date: Wed, 22 Jan 2025 20:42:18 -0500 Subject: [PATCH 72/79] Remove the /channel/ from the URL --- tubesync/sync/views.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tubesync/sync/views.py b/tubesync/sync/views.py index 17f88522..fe49f105 100644 --- a/tubesync/sync/views.py +++ b/tubesync/sync/views.py @@ -297,11 +297,11 @@ class ValidateSourceView(FormView): '@' == self.key[0] ) if use_channel_id: - source_type = self.source_type self.source_type_str = 'youtube-channel-id' self.source_type = self.source_types.get(self.source_type_str, None) + url = Source.create_index_url(self.source_type, self.key, 'videos') self.key = youtube.get_channel_id( - Source.create_index_url(source_type, self.key, 'videos') + url.replace('/channel/', '/') ) for field in fields_to_populate: if field == 'source_type': From fb87b54300bd590f7a703a4b10788cdf7ef34d39 Mon Sep 17 00:00:00 2001 From: tcely Date: Wed, 22 Jan 2025 20:59:21 -0500 Subject: [PATCH 73/79] channel_id is the only available key --- tubesync/sync/youtube.py | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/tubesync/sync/youtube.py b/tubesync/sync/youtube.py index 5371c937..27dc88fc 100644 --- a/tubesync/sync/youtube.py +++ b/tubesync/sync/youtube.py @@ -61,14 +61,15 @@ def get_channel_id(url): with yt_dlp.YoutubeDL(opts) as y: try: response = y.extract_info(url, download=False) - - channel_id = response['channel_id'] - playlist_id = response['playlist_id'] - playlist_channel_id = response['playlist_channel_id'] except yt_dlp.utils.DownloadError as e: raise YouTubeError(f'Failed to extract channel ID for "{url}": {e}') from e else: - return playlist_channel_id or playlist_id or channel_id + try: + channel_id = response['channel_id'] + except Exception as e: + raise YouTubeError(f'Failed to extract channel ID for "{url}": {e}') from e + else: + return channel_id def get_channel_image_info(url): opts = get_yt_opts() From ed381715b5840c8976f62b63020e5e480b3c3e29 Mon Sep 17 00:00:00 2001 From: tcely Date: Wed, 22 Jan 2025 21:09:03 -0500 Subject: [PATCH 74/79] Fail to previous behavior --- tubesync/sync/views.py | 19 +++++++++++++++---- 1 file changed, 15 insertions(+), 4 deletions(-) diff --git a/tubesync/sync/views.py b/tubesync/sync/views.py index fe49f105..dccf1820 100644 --- a/tubesync/sync/views.py +++ b/tubesync/sync/views.py @@ -297,12 +297,23 @@ class ValidateSourceView(FormView): '@' == self.key[0] ) if use_channel_id: + old_key = self.key + old_source_type = self.source_type + old_source_type_str = self.source_type_str + self.source_type_str = 'youtube-channel-id' self.source_type = self.source_types.get(self.source_type_str, None) - url = Source.create_index_url(self.source_type, self.key, 'videos') - self.key = youtube.get_channel_id( - url.replace('/channel/', '/') - ) + index_url = Source.create_index_url(self.source_type, self.key, 'videos') + try: + self.key = youtube.get_channel_id( + index_url.replace('/channel/', '/') + ) + except youtube.YouTubeError as e: + # It did not work, revert to previous behavior + self.key = old_key + self.source_type = old_source_type + self.source_type_str = old_source_type_str + for field in fields_to_populate: if field == 'source_type': fields[field] = self.source_type From 3d148fc5aafb47c95e07a228eeb33ceb656e299a Mon Sep 17 00:00:00 2001 From: tcely Date: Fri, 24 Jan 2025 08:22:46 -0500 Subject: [PATCH 75/79] Return `format_note` also --- tubesync/sync/utils.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tubesync/sync/utils.py b/tubesync/sync/utils.py index 1d67af38..5d2c6921 100644 --- a/tubesync/sync/utils.py +++ b/tubesync/sync/utils.py @@ -304,6 +304,7 @@ def parse_media_format(format_dict): return { 'id': format_dict.get('format_id', ''), 'format': format_str, + 'format_note': format_dict.get('format_note', ''), 'format_verbose': format_dict.get('format', ''), 'height': height, 'width': width, From d4a5a78831bf2cb27236a7d39fedd3e730343736 Mon Sep 17 00:00:00 2001 From: tcely Date: Fri, 24 Jan 2025 08:26:19 -0500 Subject: [PATCH 76/79] Display `format_note` after audio-only formats --- tubesync/sync/templates/sync/media-item.html | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tubesync/sync/templates/sync/media-item.html b/tubesync/sync/templates/sync/media-item.html index 6f751be6..026e5a54 100644 --- a/tubesync/sync/templates/sync/media-item.html +++ b/tubesync/sync/templates/sync/media-item.html @@ -146,7 +146,7 @@
ID: {{ format.format_id }} {% if format.vcodec|lower != 'none' %}, {{ format.format_note }} ({{ format.width }}x{{ format.height }}), fps:{{ format.fps|lower }}, video:{{ format.vcodec }} @{{ format.tbr }}k{% endif %} - {% if format.acodec|lower != 'none' %}, audio:{{ format.acodec }} @{{ format.abr }}k / {{ format.asr }}Hz{% endif %} + {% if format.acodec|lower != 'none' %}, audio:{{ format.acodec }} @{{ format.abr }}k / {{ format.asr }}Hz {{ format.format_note }}{% endif %} {% if format.format_id == combined_format or format.format_id == audio_format or format.format_id == video_format %}(matched){% endif %}
{% empty %} From ee303c638b066cc74669214ba235e6874eb2eaa2 Mon Sep 17 00:00:00 2001 From: tcely Date: Fri, 24 Jan 2025 09:14:53 -0500 Subject: [PATCH 77/79] Display database_filesize --- tubesync/sync/templates/sync/dashboard.html | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tubesync/sync/templates/sync/dashboard.html b/tubesync/sync/templates/sync/dashboard.html index 8c27684c..4d9fc2da 100644 --- a/tubesync/sync/templates/sync/dashboard.html +++ b/tubesync/sync/templates/sync/dashboard.html @@ -125,7 +125,7 @@ Database - Database
{{ database_connection }} + Database
{{ database_connection }} {{ database_filesize|filesizeformat }} From b65ecc43ffc7ad40c68fe171fc9c9df42c93a534 Mon Sep 17 00:00:00 2001 From: tcely Date: Fri, 24 Jan 2025 09:20:12 -0500 Subject: [PATCH 78/79] Pass raw bytes count as `database_filesize` --- tubesync/sync/views.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tubesync/sync/views.py b/tubesync/sync/views.py index 3fb23044..7f77e858 100644 --- a/tubesync/sync/views.py +++ b/tubesync/sync/views.py @@ -86,11 +86,11 @@ class DashboardView(TemplateView): data['downloads_dir'] = str(settings.DOWNLOAD_ROOT) data['database_connection'] = settings.DATABASE_CONNECTION_STR # Add the database filesize when using db.sqlite3 + data['database_filesize'] = None db_name = str(connection.get_connection_params()['database']) db_path = pathlib.Path(db_name) if '/' == db_name[0] else None if db_path and 'sqlite' == connection.vendor: - db_size = db_path.stat().st_size - data['database_connection'] += f' ({db_size:,} bytes)' + data['database_filesize'] = db_path.stat().st_size return data From e16f6bb86a723760b9ccd10815bfc432f82fde0a Mon Sep 17 00:00:00 2001 From: tcely Date: Fri, 24 Jan 2025 09:26:04 -0500 Subject: [PATCH 79/79] Display `database_filesize` only if set --- tubesync/sync/templates/sync/dashboard.html | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tubesync/sync/templates/sync/dashboard.html b/tubesync/sync/templates/sync/dashboard.html index 4d9fc2da..ccf4a6c3 100644 --- a/tubesync/sync/templates/sync/dashboard.html +++ b/tubesync/sync/templates/sync/dashboard.html @@ -125,7 +125,7 @@ Database - Database
{{ database_connection }} {{ database_filesize|filesizeformat }} + Database
{{ database_connection }}{% if database_filesize %} {{ database_filesize|filesizeformat }}{% endif %}