Merge pull request #860 from tcely/patch-12

Better indexing of inactive sources
This commit is contained in:
meeb 2025-03-19 15:29:53 +11:00 committed by GitHub
commit dbae674bb8
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
2 changed files with 39 additions and 31 deletions

View File

@ -374,17 +374,3 @@ def media_post_delete(sender, instance, **kwargs):
log.info(f'Deleting file for: {instance} path: {file}') log.info(f'Deleting file for: {instance} path: {file}')
delete_file(file) delete_file(file)
if not instance.source.is_active:
return
# Schedule a task to update media servers
for mediaserver in MediaServer.objects.all():
log.info(f'Scheduling media server updates')
verbose_name = _('Request media server rescan for "{}"')
rescan_media_server(
str(mediaserver.pk),
schedule=5,
priority=0,
verbose_name=verbose_name.format(mediaserver),
remove_existing_tasks=True
)

View File

@ -160,24 +160,46 @@ def cleanup_completed_tasks():
CompletedTask.objects.filter(run_at__lt=delta).delete() CompletedTask.objects.filter(run_at__lt=delta).delete()
def schedule_media_servers_update():
with atomic():
# Schedule a task to update media servers
log.info(f'Scheduling media server updates')
verbose_name = _('Request media server rescan for "{}"')
for mediaserver in MediaServer.objects.all():
rescan_media_server(
str(mediaserver.pk),
priority=30,
verbose_name=verbose_name.format(mediaserver),
remove_existing_tasks=True,
)
def cleanup_old_media(): def cleanup_old_media():
for source in Source.objects.filter(delete_old_media=True, days_to_keep__gt=0): with atomic():
delta = timezone.now() - timedelta(days=source.days_to_keep) for source in Source.objects.filter(delete_old_media=True, days_to_keep__gt=0):
for media in source.media_source.filter(downloaded=True, download_date__lt=delta): delta = timezone.now() - timedelta(days=source.days_to_keep)
log.info(f'Deleting expired media: {source} / {media} ' for media in source.media_source.filter(downloaded=True, download_date__lt=delta):
f'(now older than {source.days_to_keep} days / ' log.info(f'Deleting expired media: {source} / {media} '
f'download_date before {delta})') f'(now older than {source.days_to_keep} days / '
# .delete() also triggers a pre_delete signal that removes the files f'download_date before {delta})')
media.delete() with atomic():
# .delete() also triggers a pre_delete/post_delete signals that remove files
media.delete()
schedule_media_servers_update()
def cleanup_removed_media(source, videos): def cleanup_removed_media(source, videos):
if not source.delete_removed_media:
return
log.info(f'Cleaning up media no longer in source: {source}')
media_objects = Media.objects.filter(source=source) media_objects = Media.objects.filter(source=source)
for media in media_objects: for media in media_objects:
matching_source_item = [video['id'] for video in videos if video['id'] == media.key] matching_source_item = [video['id'] for video in videos if video['id'] == media.key]
if not matching_source_item: if not matching_source_item:
log.info(f'{media.name} is no longer in source, removing') log.info(f'{media.name} is no longer in source, removing')
media.delete() with atomic():
media.delete()
schedule_media_servers_update()
@background(schedule=300, remove_existing_tasks=True) @background(schedule=300, remove_existing_tasks=True)
@ -185,11 +207,17 @@ def index_source_task(source_id):
''' '''
Indexes media available from a Source object. Indexes media available from a Source object.
''' '''
cleanup_completed_tasks()
# deleting expired media should happen any time an index task is requested
cleanup_old_media()
try: try:
source = Source.objects.get(pk=source_id) source = Source.objects.get(pk=source_id)
except Source.DoesNotExist: except Source.DoesNotExist:
# Task triggered but the Source has been deleted, delete the task # Task triggered but the Source has been deleted, delete the task
return return
# An inactive Source would return an empty list for videos anyway
if not source.is_active:
return
# Reset any errors # Reset any errors
source.has_failed = False source.has_failed = False
source.save() source.save()
@ -245,14 +273,8 @@ def index_source_task(source_id):
priority=20, priority=20,
verbose_name=verbose_name.format(media.pk), verbose_name=verbose_name.format(media.pk),
) )
# Tack on a cleanup of old completed tasks # Cleanup of media no longer available from the source
cleanup_completed_tasks() cleanup_removed_media(source, videos)
with atomic(durable=True):
# Tack on a cleanup of old media
cleanup_old_media()
if source.delete_removed_media:
log.info(f'Cleaning up media no longer in source: {source}')
cleanup_removed_media(source, videos)
@background(schedule=0) @background(schedule=0)