Merge branch 'main' into patch-7

This commit is contained in:
tcely 2025-06-19 11:30:53 -04:00 committed by GitHub
commit 0e93af60d3
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
4 changed files with 53 additions and 28 deletions

View File

@ -26,7 +26,8 @@ def h_q_dict(q, /):
return dict(
scheduled=(q.scheduled_count(), q.scheduled(),),
pending=(q.pending_count(), q.pending(),),
result=(q.result_count(), list(q.all_results().keys()),),
running=q._tasks_in_flight,
results=(q.result_count(), list(q.all_results().keys()),),
)

View File

@ -722,8 +722,19 @@ class Media(models.Model):
@property
def slugtitle(self):
replaced = self.title.replace('_', '-').replace('&', 'and').replace('+', 'and')
return slugify(replaced)[:80]
transtab = str.maketrans({
'&': 'and', '+': 'and',
})
slugified = slugify(
self.title.translate(transtab),
allow_unicode=True,
)
encoding = os.sys.getfilesystemencoding()
decoded = slugified.encode(
encoding=encoding,
errors='ignore',
).decode(encoding=encoding)
return decoded[:80]
@property
def thumbnail(self):

View File

@ -27,7 +27,7 @@ from django.utils.translation import gettext_lazy as _
from background_task import background
from background_task.exceptions import InvalidTaskError
from background_task.models import Task, CompletedTask
from django_huey import task as huey_task # noqa
from django_huey import lock_task as huey_lock_task, task as huey_task # noqa
from django_huey import db_periodic_task, db_task, signal as huey_signal
from huey import crontab as huey_crontab, signals as huey_signals
from common.huey import CancelExecution, dynamic_retry, register_huey_signals
@ -402,22 +402,26 @@ def migrate_to_metadata(media_id):
except Metadata.DoesNotExist as e:
raise CancelExecution(_('no indexed data to migrate to metadata'), retry=False) from e
video = data.value
fields = lambda f, m: m.get_metadata_field(f)
timestamp = video.get(fields('timestamp', media), None)
for key in ('epoch', 'availability', 'extractor_key',):
field = fields(key, media)
value = video.get(field)
existing_value = media.get_metadata_first_value(key)
if value is None:
if 'epoch' == key:
value = timestamp
elif 'extractor_key' == key:
value = data.site
if value is not None:
if existing_value and ('epoch' == key or value == existing_value):
continue
media.save_to_metadata(field, value)
with huey_lock_task(
f'media:{media.uuid}',
queue=Val(TaskQueue.DB),
):
video = data.value
fields = lambda f, m: m.get_metadata_field(f)
timestamp = video.get(fields('timestamp', media), None)
for key in ('epoch', 'availability', 'extractor_key',):
field = fields(key, media)
value = video.get(field)
existing_value = media.get_metadata_first_value(key)
if value is None:
if 'epoch' == key:
value = timestamp
elif 'extractor_key' == key:
value = data.site
if value is not None:
if existing_value and ('epoch' == key or value == existing_value):
continue
media.save_to_metadata(field, value)
@background(schedule=dict(priority=0, run_at=0), queue=Val(TaskQueue.NET), remove_existing_tasks=False)
@ -1004,7 +1008,7 @@ def refresh_formats(media_id):
retry=retry,
)
# combine the strings
exc.args = (' '.join(exc.args),)
exc.args = (' '.join(map(str, exc.args)),)
# store instance details
exc.instance = dict(
key=media.key,
@ -1020,17 +1024,21 @@ def refresh_formats(media_id):
@db_task(delay=60, priority=80, retries=5, retry_delay=60, queue=Val(TaskQueue.FS))
@atomic(durable=True)
def rename_media(media_id):
try:
media = Media.objects.get(pk=media_id)
except Media.DoesNotExist as e:
raise CancelExecution(_('no such media'), retry=False) from e
else:
with atomic():
with huey_lock_task(
f'media:{media.uuid}',
queue=Val(TaskQueue.DB),
):
media.rename_files()
@background(schedule=dict(priority=20, run_at=300), queue=Val(TaskQueue.FS), remove_existing_tasks=True)
@db_task(delay=300, priority=80, retries=5, retry_delay=600, queue=Val(TaskQueue.FS))
@atomic(durable=True)
def rename_all_media_for_source(source_id):
try:
@ -1039,7 +1047,7 @@ def rename_all_media_for_source(source_id):
# Task triggered but the source no longer exists, do nothing
log.error(f'Task rename_all_media_for_source(pk={source_id}) called but no '
f'source exists with ID: {source_id}')
raise InvalidTaskError(_('no such source')) from e
raise CancelExecution(_('no such source'), retry=False) from e
# Check that the settings allow renaming
rename_sources_setting = getattr(settings, 'RENAME_SOURCES') or list()
create_rename_tasks = (
@ -1050,14 +1058,18 @@ def rename_all_media_for_source(source_id):
getattr(settings, 'RENAME_ALL_SOURCES', False)
)
if not create_rename_tasks:
return
return None
mqs = Media.objects.all().filter(
source=source,
downloaded=True,
)
for media in qs_gen(mqs):
with atomic():
media.rename_files()
with huey_lock_task(
f'media:{media.uuid}',
queue=Val(TaskQueue.DB),
):
with atomic():
media.rename_files()
@background(schedule=dict(priority=0, run_at=60), queue=Val(TaskQueue.DB), remove_existing_tasks=True)

View File

@ -31,7 +31,8 @@
{% if source.has_failed %}
<span class="error-text"><i class="fas fa-exclamation-triangle"></i> <strong>Source has permanent failures</strong></span>
{% else %}
<strong>{{ source.media_count }}</strong> media items, <strong>{{ source.downloaded_count }}</strong> downloaded{% if source.delete_old_media and source.days_to_keep > 0 %}, keeping {{ source.days_to_keep }} days of media{% endif %}
<strong>{{ source.media_count }}</strong> media items, <strong>{{ source.downloaded_count }}</strong> downloaded{% if source.delete_old_media and source.days_to_keep > 0 %}, keeping {{ source.days_to_keep }} days of media{% endif %}<br>
Next update target: <strong>{% if source.target_schedule %}{{ source.target_schedule|date:'l, h:00 A' }}{% else %}Not set{% endif %}</strong>
{% endif %}
</a>
<a href="{% url 'sync:source-sync-now' pk=source.pk %}" class="btn">Sync Now</a>