mirror of
https://github.com/meeb/tubesync.git
synced 2025-06-23 21:46:44 +00:00
Merge branch 'main' into patch-3
This commit is contained in:
commit
5dc3cc2564
407
Dockerfile
407
Dockerfile
@ -1,124 +1,270 @@
|
||||
FROM debian:bookworm-slim
|
||||
# syntax=docker/dockerfile:1
|
||||
# check=error=true
|
||||
|
||||
ARG TARGETARCH
|
||||
ARG TARGETPLATFORM
|
||||
ARG FFMPEG_DATE="2025-01-21-14-19"
|
||||
ARG FFMPEG_VERSION="N-118328-g504df09c34"
|
||||
|
||||
ARG S6_VERSION="3.2.0.2"
|
||||
|
||||
ARG SHA256_S6_AMD64="59289456ab1761e277bd456a95e737c06b03ede99158beb24f12b165a904f478"
|
||||
ARG SHA256_S6_ARM64="8b22a2eaca4bf0b27a43d36e65c89d2701738f628d1abd0cea5569619f66f785"
|
||||
ARG SHA256_S6_NOARCH="6dbcde158a3e78b9bb141d7bcb5ccb421e563523babbe2c64470e76f4fd02dae"
|
||||
|
||||
ARG FFMPEG_DATE="autobuild-2024-12-24-14-15"
|
||||
ARG FFMPEG_VERSION="N-118163-g954d55c2a4"
|
||||
ARG SHA256_FFMPEG_AMD64="798a7e5a0724139e6bb70df8921522b23be27028f9f551dfa83c305ec4ffaf3a"
|
||||
ARG SHA256_FFMPEG_ARM64="c3e6cc0fec42cc7e3804014fbb02c1384a1a31ef13f6f9a36121f2e1216240c0"
|
||||
ARG ALPINE_VERSION="latest"
|
||||
ARG DEBIAN_VERSION="bookworm-slim"
|
||||
|
||||
ENV S6_VERSION="${S6_VERSION}" \
|
||||
FFMPEG_DATE="${FFMPEG_DATE}" \
|
||||
FFMPEG_VERSION="${FFMPEG_VERSION}"
|
||||
ARG FFMPEG_PREFIX_FILE="ffmpeg-${FFMPEG_VERSION}"
|
||||
ARG FFMPEG_SUFFIX_FILE=".tar.xz"
|
||||
|
||||
ARG FFMPEG_CHECKSUM_ALGORITHM="sha256"
|
||||
ARG S6_CHECKSUM_ALGORITHM="sha256"
|
||||
|
||||
|
||||
FROM alpine:${ALPINE_VERSION} AS ffmpeg-download
|
||||
ARG FFMPEG_DATE
|
||||
ARG FFMPEG_VERSION
|
||||
ARG FFMPEG_PREFIX_FILE
|
||||
ARG FFMPEG_SUFFIX_FILE
|
||||
ARG SHA256_FFMPEG_AMD64
|
||||
ARG SHA256_FFMPEG_ARM64
|
||||
ARG FFMPEG_CHECKSUM_ALGORITHM
|
||||
ARG CHECKSUM_ALGORITHM="${FFMPEG_CHECKSUM_ALGORITHM}"
|
||||
ARG FFMPEG_CHECKSUM_AMD64="${SHA256_FFMPEG_AMD64}"
|
||||
ARG FFMPEG_CHECKSUM_ARM64="${SHA256_FFMPEG_ARM64}"
|
||||
|
||||
ARG FFMPEG_FILE_SUMS="checksums.${CHECKSUM_ALGORITHM}"
|
||||
ARG FFMPEG_URL="https://github.com/yt-dlp/FFmpeg-Builds/releases/download/autobuild-${FFMPEG_DATE}"
|
||||
|
||||
ARG DESTDIR="/downloaded"
|
||||
ARG TARGETARCH
|
||||
ADD "${FFMPEG_URL}/${FFMPEG_FILE_SUMS}" "${DESTDIR}/"
|
||||
RUN set -eu ; \
|
||||
apk --no-cache --no-progress add cmd:aria2c cmd:awk "cmd:${CHECKSUM_ALGORITHM}sum" ; \
|
||||
\
|
||||
aria2c_options() { \
|
||||
algorithm="${CHECKSUM_ALGORITHM%[0-9]??}" ; \
|
||||
bytes="${CHECKSUM_ALGORITHM#${algorithm}}" ; \
|
||||
hash="$( awk -v fn="${1##*/}" '$0 ~ fn"$" { print $1; exit; }' "${DESTDIR}/${FFMPEG_FILE_SUMS}" )" ; \
|
||||
\
|
||||
printf -- '\t%s\n' \
|
||||
'allow-overwrite=true' \
|
||||
'always-resume=false' \
|
||||
'check-integrity=true' \
|
||||
"checksum=${algorithm}-${bytes}=${hash}" \
|
||||
'max-connection-per-server=2' \
|
||||
; \
|
||||
printf -- '\n' ; \
|
||||
} ; \
|
||||
\
|
||||
decide_arch() { \
|
||||
case "${TARGETARCH}" in \
|
||||
(amd64) printf -- 'linux64' ;; \
|
||||
(arm64) printf -- 'linuxarm64' ;; \
|
||||
esac ; \
|
||||
} ; \
|
||||
\
|
||||
FFMPEG_ARCH="$(decide_arch)" ; \
|
||||
FFMPEG_PREFIX_FILE="$( printf -- '%s' "${FFMPEG_PREFIX_FILE}" | cut -d '-' -f 1,2 )" ; \
|
||||
for url in $(awk ' \
|
||||
$2 ~ /^[*]?'"${FFMPEG_PREFIX_FILE}"'/ && /-'"${FFMPEG_ARCH}"'-/ { $1=""; print; } \
|
||||
' "${DESTDIR}/${FFMPEG_FILE_SUMS}") ; \
|
||||
do \
|
||||
url="${FFMPEG_URL}/${url# }" ; \
|
||||
printf -- '%s\n' "${url}" ; \
|
||||
aria2c_options "${url}" ; \
|
||||
printf -- '\n' ; \
|
||||
done > /tmp/downloads ; \
|
||||
unset -v url ; \
|
||||
\
|
||||
aria2c --no-conf=true \
|
||||
--dir /downloaded \
|
||||
--lowest-speed-limit='16K' \
|
||||
--show-console-readout=false \
|
||||
--summary-interval=0 \
|
||||
--input-file /tmp/downloads ; \
|
||||
\
|
||||
decide_expected() { \
|
||||
case "${TARGETARCH}" in \
|
||||
(amd64) printf -- '%s' "${FFMPEG_CHECKSUM_AMD64}" ;; \
|
||||
(arm64) printf -- '%s' "${FFMPEG_CHECKSUM_ARM64}" ;; \
|
||||
esac ; \
|
||||
} ; \
|
||||
\
|
||||
FFMPEG_HASH="$(decide_expected)" ; \
|
||||
\
|
||||
cd "${DESTDIR}" ; \
|
||||
if [ -n "${FFMPEG_HASH}" ] ; \
|
||||
then \
|
||||
printf -- '%s *%s\n' "${FFMPEG_HASH}" "${FFMPEG_PREFIX_FILE}"*-"${FFMPEG_ARCH}"-*"${FFMPEG_SUFFIX_FILE}" >> /tmp/SUMS ; \
|
||||
"${CHECKSUM_ALGORITHM}sum" --check --warn --strict /tmp/SUMS || exit ; \
|
||||
fi ; \
|
||||
"${CHECKSUM_ALGORITHM}sum" --check --warn --strict --ignore-missing "${DESTDIR}/${FFMPEG_FILE_SUMS}" ; \
|
||||
\
|
||||
mkdir -v -p "/verified/${TARGETARCH}" ; \
|
||||
ln -v "${FFMPEG_PREFIX_FILE}"*-"${FFMPEG_ARCH}"-*"${FFMPEG_SUFFIX_FILE}" "/verified/${TARGETARCH}/" ; \
|
||||
rm -rf "${DESTDIR}" ;
|
||||
|
||||
FROM alpine:${ALPINE_VERSION} AS ffmpeg-extracted
|
||||
COPY --from=ffmpeg-download /verified /verified
|
||||
|
||||
ARG FFMPEG_PREFIX_FILE
|
||||
ARG FFMPEG_SUFFIX_FILE
|
||||
ARG TARGETARCH
|
||||
RUN set -eux ; \
|
||||
mkdir -v /extracted ; \
|
||||
cd /extracted ; \
|
||||
ln -s "/verified/${TARGETARCH}"/"${FFMPEG_PREFIX_FILE}"*"${FFMPEG_SUFFIX_FILE}" "/tmp/ffmpeg${FFMPEG_SUFFIX_FILE}" ; \
|
||||
tar -tf "/tmp/ffmpeg${FFMPEG_SUFFIX_FILE}" | grep '/bin/\(ffmpeg\|ffprobe\)' > /tmp/files ; \
|
||||
tar -xop \
|
||||
--strip-components=2 \
|
||||
-f "/tmp/ffmpeg${FFMPEG_SUFFIX_FILE}" \
|
||||
-T /tmp/files ; \
|
||||
\
|
||||
ls -AlR /extracted ;
|
||||
|
||||
FROM scratch AS ffmpeg
|
||||
COPY --from=ffmpeg-extracted /extracted /usr/local/bin/
|
||||
|
||||
FROM alpine:${ALPINE_VERSION} AS s6-overlay-download
|
||||
ARG S6_VERSION
|
||||
ARG SHA256_S6_AMD64
|
||||
ARG SHA256_S6_ARM64
|
||||
ARG SHA256_S6_NOARCH
|
||||
|
||||
ARG DESTDIR="/downloaded"
|
||||
ARG S6_CHECKSUM_ALGORITHM
|
||||
ARG CHECKSUM_ALGORITHM="${S6_CHECKSUM_ALGORITHM}"
|
||||
|
||||
ARG S6_CHECKSUM_AMD64="${CHECKSUM_ALGORITHM}:${SHA256_S6_AMD64}"
|
||||
ARG S6_CHECKSUM_ARM64="${CHECKSUM_ALGORITHM}:${SHA256_S6_ARM64}"
|
||||
ARG S6_CHECKSUM_NOARCH="${CHECKSUM_ALGORITHM}:${SHA256_S6_NOARCH}"
|
||||
|
||||
ARG S6_OVERLAY_URL="https://github.com/just-containers/s6-overlay/releases/download/v${S6_VERSION}"
|
||||
ARG S6_PREFIX_FILE="s6-overlay-"
|
||||
ARG S6_SUFFIX_FILE=".tar.xz"
|
||||
|
||||
ARG S6_FILE_AMD64="${S6_PREFIX_FILE}x86_64${S6_SUFFIX_FILE}"
|
||||
ARG S6_FILE_ARM64="${S6_PREFIX_FILE}aarch64${S6_SUFFIX_FILE}"
|
||||
ARG S6_FILE_NOARCH="${S6_PREFIX_FILE}noarch${S6_SUFFIX_FILE}"
|
||||
|
||||
ADD "${S6_OVERLAY_URL}/${S6_FILE_AMD64}.${CHECKSUM_ALGORITHM}" "${DESTDIR}/"
|
||||
ADD "${S6_OVERLAY_URL}/${S6_FILE_ARM64}.${CHECKSUM_ALGORITHM}" "${DESTDIR}/"
|
||||
ADD "${S6_OVERLAY_URL}/${S6_FILE_NOARCH}.${CHECKSUM_ALGORITHM}" "${DESTDIR}/"
|
||||
|
||||
##ADD --checksum="${S6_CHECKSUM_AMD64}" "${S6_OVERLAY_URL}/${S6_FILE_AMD64}" "${DESTDIR}/"
|
||||
##ADD --checksum="${S6_CHECKSUM_ARM64}" "${S6_OVERLAY_URL}/${S6_FILE_ARM64}" "${DESTDIR}/"
|
||||
##ADD --checksum="${S6_CHECKSUM_NOARCH}" "${S6_OVERLAY_URL}/${S6_FILE_NOARCH}" "${DESTDIR}/"
|
||||
|
||||
# --checksum wasn't recognized, so use busybox to check the sums instead
|
||||
ADD "${S6_OVERLAY_URL}/${S6_FILE_AMD64}" "${DESTDIR}/"
|
||||
RUN set -eu ; checksum="${S6_CHECKSUM_AMD64}" ; file="${S6_FILE_AMD64}" ; cd "${DESTDIR}/" && \
|
||||
printf -- '%s *%s\n' "$(printf -- '%s' "${checksum}" | cut -d : -f 2-)" "${file}" | "${CHECKSUM_ALGORITHM}sum" -cw
|
||||
|
||||
ADD "${S6_OVERLAY_URL}/${S6_FILE_ARM64}" "${DESTDIR}/"
|
||||
RUN set -eu ; checksum="${S6_CHECKSUM_ARM64}" ; file="${S6_FILE_ARM64}" ; cd "${DESTDIR}/" && \
|
||||
printf -- '%s *%s\n' "$(printf -- '%s' "${checksum}" | cut -d : -f 2-)" "${file}" | "${CHECKSUM_ALGORITHM}sum" -cw
|
||||
|
||||
ADD "${S6_OVERLAY_URL}/${S6_FILE_NOARCH}" "${DESTDIR}/"
|
||||
RUN set -eu ; checksum="${S6_CHECKSUM_NOARCH}" ; file="${S6_FILE_NOARCH}" ; cd "${DESTDIR}/" && \
|
||||
printf -- '%s *%s\n' "$(printf -- '%s' "${checksum}" | cut -d : -f 2-)" "${file}" | "${CHECKSUM_ALGORITHM}sum" -cw
|
||||
|
||||
FROM alpine:${ALPINE_VERSION} AS s6-overlay-extracted
|
||||
COPY --from=s6-overlay-download /downloaded /downloaded
|
||||
|
||||
ARG S6_CHECKSUM_ALGORITHM
|
||||
ARG CHECKSUM_ALGORITHM="${S6_CHECKSUM_ALGORITHM}"
|
||||
|
||||
ARG TARGETARCH
|
||||
|
||||
RUN set -eu ; \
|
||||
\
|
||||
decide_arch() { \
|
||||
local arg1 ; \
|
||||
arg1="${1:-$(uname -m)}" ; \
|
||||
\
|
||||
case "${arg1}" in \
|
||||
(amd64) printf -- 'x86_64' ;; \
|
||||
(arm64) printf -- 'aarch64' ;; \
|
||||
(armv7l) printf -- 'arm' ;; \
|
||||
(*) printf -- '%s' "${arg1}" ;; \
|
||||
esac ; \
|
||||
unset -v arg1 ; \
|
||||
} ; \
|
||||
\
|
||||
apk --no-cache --no-progress add "cmd:${CHECKSUM_ALGORITHM}sum" ; \
|
||||
mkdir -v /verified ; \
|
||||
cd /downloaded ; \
|
||||
for f in *.sha256 ; \
|
||||
do \
|
||||
"${CHECKSUM_ALGORITHM}sum" --check --warn --strict "${f}" || exit ; \
|
||||
ln -v "${f%.sha256}" /verified/ || exit ; \
|
||||
done ; \
|
||||
unset -v f ; \
|
||||
\
|
||||
S6_ARCH="$(decide_arch "${TARGETARCH}")" ; \
|
||||
set -x ; \
|
||||
mkdir -v /s6-overlay-rootfs ; \
|
||||
cd /s6-overlay-rootfs ; \
|
||||
for f in /verified/*.tar* ; \
|
||||
do \
|
||||
case "${f}" in \
|
||||
(*-noarch.tar*|*-"${S6_ARCH}".tar*) \
|
||||
tar -xpf "${f}" || exit ;; \
|
||||
esac ; \
|
||||
done ; \
|
||||
set +x ; \
|
||||
unset -v f ;
|
||||
|
||||
FROM scratch AS s6-overlay
|
||||
COPY --from=s6-overlay-extracted /s6-overlay-rootfs /
|
||||
|
||||
FROM debian:${DEBIAN_VERSION} AS tubesync
|
||||
|
||||
ARG S6_VERSION
|
||||
|
||||
ARG FFMPEG_DATE
|
||||
ARG FFMPEG_VERSION
|
||||
|
||||
ENV DEBIAN_FRONTEND="noninteractive" \
|
||||
HOME="/root" \
|
||||
LANGUAGE="en_US.UTF-8" \
|
||||
LANG="en_US.UTF-8" \
|
||||
LC_ALL="en_US.UTF-8" \
|
||||
TERM="xterm" \
|
||||
S6_CMD_WAIT_FOR_SERVICES_MAXTIME="0"
|
||||
HOME="/root" \
|
||||
LANGUAGE="en_US.UTF-8" \
|
||||
LANG="en_US.UTF-8" \
|
||||
LC_ALL="en_US.UTF-8" \
|
||||
TERM="xterm" \
|
||||
# Do not include compiled byte-code
|
||||
PIP_NO_COMPILE=1 \
|
||||
PIP_ROOT_USER_ACTION='ignore' \
|
||||
S6_CMD_WAIT_FOR_SERVICES_MAXTIME="0"
|
||||
|
||||
ENV S6_VERSION="${S6_VERSION}" \
|
||||
FFMPEG_DATE="${FFMPEG_DATE}" \
|
||||
FFMPEG_VERSION="${FFMPEG_VERSION}"
|
||||
|
||||
# Install third party software
|
||||
COPY --from=s6-overlay / /
|
||||
COPY --from=ffmpeg /usr/local/bin/ /usr/local/bin/
|
||||
|
||||
# Reminder: the SHELL handles all variables
|
||||
RUN decide_arch() { \
|
||||
case "${TARGETARCH:=amd64}" in \
|
||||
(arm64) printf -- 'aarch64' ;; \
|
||||
(*) printf -- '%s' "${TARGETARCH}" ;; \
|
||||
esac ; \
|
||||
} && \
|
||||
decide_expected() { \
|
||||
case "${1}" in \
|
||||
(ffmpeg) case "${2}" in \
|
||||
(amd64) printf -- '%s' "${SHA256_FFMPEG_AMD64}" ;; \
|
||||
(arm64) printf -- '%s' "${SHA256_FFMPEG_ARM64}" ;; \
|
||||
esac ;; \
|
||||
(s6) case "${2}" in \
|
||||
(amd64) printf -- '%s' "${SHA256_S6_AMD64}" ;; \
|
||||
(arm64) printf -- '%s' "${SHA256_S6_ARM64}" ;; \
|
||||
(noarch) printf -- '%s' "${SHA256_S6_NOARCH}" ;; \
|
||||
esac ;; \
|
||||
esac ; \
|
||||
} && \
|
||||
decide_url() { \
|
||||
case "${1}" in \
|
||||
(ffmpeg) printf -- \
|
||||
'https://github.com/yt-dlp/FFmpeg-Builds/releases/download/%s/ffmpeg-%s-linux%s-gpl%s.tar.xz' \
|
||||
"${FFMPEG_DATE}" \
|
||||
"${FFMPEG_VERSION}" \
|
||||
"$(case "${2}" in \
|
||||
(amd64) printf -- '64' ;; \
|
||||
(*) printf -- '%s' "${2}" ;; \
|
||||
esac)" \
|
||||
"$(case "${FFMPEG_VERSION%%-*}" in \
|
||||
(n*) printf -- '-%s\n' "${FFMPEG_VERSION#n}" | cut -d '-' -f 1,2 ;; \
|
||||
(*) printf -- '' ;; \
|
||||
esac)" ;; \
|
||||
(s6) printf -- \
|
||||
'https://github.com/just-containers/s6-overlay/releases/download/v%s/s6-overlay-%s.tar.xz' \
|
||||
"${S6_VERSION}" \
|
||||
"$(case "${2}" in \
|
||||
(amd64) printf -- 'x86_64' ;; \
|
||||
(arm64) printf -- 'aarch64' ;; \
|
||||
(*) printf -- '%s' "${2}" ;; \
|
||||
esac)" ;; \
|
||||
esac ; \
|
||||
} && \
|
||||
verify_download() { \
|
||||
while [ $# -ge 2 ] ; do \
|
||||
sha256sum "${2}" ; \
|
||||
printf -- '%s %s\n' "${1}" "${2}" | sha256sum -c || return ; \
|
||||
shift ; shift ; \
|
||||
done ; \
|
||||
} && \
|
||||
download_expected_file() { \
|
||||
local arg1 expected file url ; \
|
||||
arg1="$(printf -- '%s\n' "${1}" | awk '{print toupper($0);}')" ; \
|
||||
expected="$(decide_expected "${1}" "${2}")" ; \
|
||||
file="${3}" ; \
|
||||
url="$(decide_url "${1}" "${2}")" ; \
|
||||
printf -- '%s\n' \
|
||||
"Building for arch: ${2}|${ARCH}, downloading ${arg1} from: ${url}, expecting ${arg1} SHA256: ${expected}" && \
|
||||
rm -rf "${file}" && \
|
||||
curl --disable --output "${file}" --clobber --location --no-progress-meter --url "${url}" && \
|
||||
verify_download "${expected}" "${file}" ; \
|
||||
} && \
|
||||
export ARCH="$(decide_arch)" && \
|
||||
RUN --mount=type=cache,id=apt-lib-cache,sharing=locked,target=/var/lib/apt \
|
||||
--mount=type=cache,id=apt-cache-cache,sharing=locked,target=/var/cache/apt \
|
||||
set -x && \
|
||||
# Update from the network and keep cache
|
||||
rm -f /etc/apt/apt.conf.d/docker-clean && \
|
||||
apt-get update && \
|
||||
# Install locales
|
||||
apt-get -y --no-install-recommends install locales && \
|
||||
printf -- "en_US.UTF-8 UTF-8\n" > /etc/locale.gen && \
|
||||
locale-gen en_US.UTF-8 && \
|
||||
# Install required distro packages
|
||||
apt-get -y --no-install-recommends install curl ca-certificates file binutils xz-utils && \
|
||||
# Install s6
|
||||
_file="/tmp/s6-overlay-noarch.tar.xz" && \
|
||||
download_expected_file s6 noarch "${_file}" && \
|
||||
tar -C / -xpf "${_file}" && rm -f "${_file}" && \
|
||||
_file="/tmp/s6-overlay-${ARCH}.tar.xz" && \
|
||||
download_expected_file s6 "${TARGETARCH}" "${_file}" && \
|
||||
tar -C / -xpf "${_file}" && rm -f "${_file}" && \
|
||||
# Install file
|
||||
apt-get -y --no-install-recommends install file && \
|
||||
# Installed s6 (using COPY earlier)
|
||||
file -L /command/s6-overlay-suexec && \
|
||||
# Install ffmpeg
|
||||
_file="/tmp/ffmpeg-${ARCH}.tar.xz" && \
|
||||
download_expected_file ffmpeg "${TARGETARCH}" "${_file}" && \
|
||||
tar -xvvpf "${_file}" --strip-components=2 --no-anchored -C /usr/local/bin/ "ffmpeg" "ffprobe" && rm -f "${_file}" && \
|
||||
# Installed ffmpeg (using COPY earlier)
|
||||
/usr/local/bin/ffmpeg -version && \
|
||||
file /usr/local/bin/ff* && \
|
||||
# Clean up
|
||||
apt-get -y autoremove --purge curl file binutils xz-utils && \
|
||||
rm -rf /var/lib/apt/lists/* && \
|
||||
rm -rf /var/cache/apt/* && \
|
||||
rm -rf /tmp/*
|
||||
|
||||
# Install dependencies we keep
|
||||
RUN set -x && \
|
||||
apt-get update && \
|
||||
# Clean up file
|
||||
apt-get -y autoremove --purge file && \
|
||||
# Install dependencies we keep
|
||||
# Install required distro packages
|
||||
apt-get -y --no-install-recommends install \
|
||||
libjpeg62-turbo \
|
||||
@ -131,27 +277,29 @@ RUN set -x && \
|
||||
python3 \
|
||||
python3-wheel \
|
||||
redis-server \
|
||||
&& apt-get -y autoclean && \
|
||||
rm -rf /var/lib/apt/lists/* && \
|
||||
rm -rf /var/cache/apt/* && \
|
||||
curl \
|
||||
less \
|
||||
&& \
|
||||
# Clean up
|
||||
apt-get -y autopurge && \
|
||||
apt-get -y autoclean && \
|
||||
rm -rf /tmp/*
|
||||
|
||||
# Copy over pip.conf to use piwheels
|
||||
COPY pip.conf /etc/pip.conf
|
||||
|
||||
# Add Pipfile
|
||||
COPY Pipfile /app/Pipfile
|
||||
|
||||
# Do not include compiled byte-code
|
||||
ENV PIP_NO_COMPILE=1 \
|
||||
PIP_NO_CACHE_DIR=1 \
|
||||
PIP_ROOT_USER_ACTION='ignore'
|
||||
|
||||
# Switch workdir to the the app
|
||||
WORKDIR /app
|
||||
|
||||
# Set up the app
|
||||
RUN set -x && \
|
||||
RUN --mount=type=tmpfs,target=/cache \
|
||||
--mount=type=cache,id=pipenv-cache,sharing=locked,target=/cache/pipenv \
|
||||
--mount=type=cache,id=apt-lib-cache,sharing=locked,target=/var/lib/apt \
|
||||
--mount=type=cache,id=apt-cache-cache,sharing=locked,target=/var/cache/apt \
|
||||
--mount=type=bind,source=Pipfile,target=/app/Pipfile \
|
||||
set -x && \
|
||||
# Update from the network and keep cache
|
||||
rm -f /etc/apt/apt.conf.d/docker-clean && \
|
||||
apt-get update && \
|
||||
# Install required build packages
|
||||
apt-get -y --no-install-recommends install \
|
||||
@ -172,10 +320,11 @@ RUN set -x && \
|
||||
useradd -M -d /app -s /bin/false -g app app && \
|
||||
# Install non-distro packages
|
||||
cp -at /tmp/ "${HOME}" && \
|
||||
PIPENV_VERBOSITY=64 HOME="/tmp/${HOME#/}" pipenv install --system --skip-lock && \
|
||||
HOME="/tmp/${HOME#/}" \
|
||||
XDG_CACHE_HOME='/cache' \
|
||||
PIPENV_VERBOSITY=64 \
|
||||
pipenv install --system --skip-lock && \
|
||||
# Clean up
|
||||
rm /app/Pipfile && \
|
||||
pipenv --clear && \
|
||||
apt-get -y autoremove --purge \
|
||||
default-libmysqlclient-dev \
|
||||
g++ \
|
||||
@ -189,12 +338,9 @@ RUN set -x && \
|
||||
python3-pip \
|
||||
zlib1g-dev \
|
||||
&& \
|
||||
apt-get -y autoremove && \
|
||||
apt-get -y autopurge && \
|
||||
apt-get -y autoclean && \
|
||||
rm -rf /var/lib/apt/lists/* && \
|
||||
rm -rf /var/cache/apt/* && \
|
||||
rm -rf /tmp/*
|
||||
|
||||
rm -v -rf /tmp/*
|
||||
|
||||
# Copy app
|
||||
COPY tubesync /app
|
||||
@ -212,24 +358,21 @@ RUN set -x && \
|
||||
mkdir -v -p /config/media && \
|
||||
mkdir -v -p /config/cache/pycache && \
|
||||
mkdir -v -p /downloads/audio && \
|
||||
mkdir -v -p /downloads/video
|
||||
|
||||
|
||||
# Append software versions
|
||||
RUN set -x && \
|
||||
/usr/local/bin/ffmpeg -version && \
|
||||
FFMPEG_VERSION=$(/usr/local/bin/ffmpeg -version | awk -v 'ev=31' '1 == NR && "ffmpeg" == $1 { print $3; ev=0; } END { exit ev; }') && \
|
||||
test -n "${FFMPEG_VERSION}" && \
|
||||
printf -- "ffmpeg_version = '%s'\n" "${FFMPEG_VERSION}" >> /app/common/third_party_versions.py
|
||||
mkdir -v -p /downloads/video && \
|
||||
# Append software versions
|
||||
ffmpeg_version=$(/usr/local/bin/ffmpeg -version | awk -v 'ev=31' '1 == NR && "ffmpeg" == $1 { print $3; ev=0; } END { exit ev; }') && \
|
||||
test -n "${ffmpeg_version}" && \
|
||||
printf -- "ffmpeg_version = '%s'\n" "${ffmpeg_version}" >> /app/common/third_party_versions.py
|
||||
|
||||
# Copy root
|
||||
COPY config/root /
|
||||
|
||||
# Create a healthcheck
|
||||
HEALTHCHECK --interval=1m --timeout=10s CMD /app/healthcheck.py http://127.0.0.1:8080/healthcheck
|
||||
HEALTHCHECK --interval=1m --timeout=10s --start-period=3m CMD ["/app/healthcheck.py", "http://127.0.0.1:8080/healthcheck"]
|
||||
|
||||
# ENVS and ports
|
||||
ENV PYTHONPATH="/app" PYTHONPYCACHEPREFIX="/config/cache/pycache"
|
||||
ENV PYTHONPATH="/app" \
|
||||
PYTHONPYCACHEPREFIX="/config/cache/pycache"
|
||||
EXPOSE 4848
|
||||
|
||||
# Volumes
|
||||
|
14
README.md
14
README.md
@ -138,6 +138,11 @@ services:
|
||||
- PGID=1000
|
||||
```
|
||||
|
||||
> [!IMPORTANT]
|
||||
> If the `/downloads` directory is mounted from a [Samba volume](https://docs.docker.com/engine/storage/volumes/#create-cifssamba-volumes), be sure to also supply the `uid` and `gid` mount parameters in the driver options.
|
||||
> These must be matched to the `PUID` and `PGID` values, which were specified as environment variables.
|
||||
>
|
||||
> Matching these user and group ID numbers prevents issues when executing file actions, such as writing metadata. See [this issue](https://github.com/meeb/tubesync/issues/616#issuecomment-2593458282) for details.
|
||||
|
||||
## Optional authentication
|
||||
|
||||
@ -320,7 +325,7 @@ Notable libraries and software used:
|
||||
* [django-sass](https://github.com/coderedcorp/django-sass/)
|
||||
* The container bundles with `s6-init` and `nginx`
|
||||
|
||||
See the [Pipefile](https://github.com/meeb/tubesync/blob/main/Pipfile) for a full list.
|
||||
See the [Pipfile](https://github.com/meeb/tubesync/blob/main/Pipfile) for a full list.
|
||||
|
||||
### Can I get access to the full Django admin?
|
||||
|
||||
@ -348,7 +353,12 @@ etc.). Configuration of this is beyond the scope of this README.
|
||||
|
||||
### What architectures does the container support?
|
||||
|
||||
Just `amd64` for the moment. Others may be made available if there is demand.
|
||||
Only two are supported, for the moment:
|
||||
- `amd64` (most desktop PCs and servers)
|
||||
- `arm64`
|
||||
(modern ARM computers, such as the Rasperry Pi 3 or later)
|
||||
|
||||
Others may be made available, if there is demand.
|
||||
|
||||
### The pipenv install fails with "Locking failed"!
|
||||
|
||||
|
@ -2,6 +2,7 @@ daemon off;
|
||||
|
||||
user app;
|
||||
worker_processes auto;
|
||||
worker_cpu_affinity auto;
|
||||
pid /run/nginx.pid;
|
||||
|
||||
events {
|
||||
|
@ -2,4 +2,4 @@
|
||||
|
||||
cd /
|
||||
|
||||
/usr/sbin/nginx
|
||||
exec /usr/sbin/nginx
|
||||
|
@ -1,4 +1,4 @@
|
||||
#!/command/with-contenv bash
|
||||
|
||||
exec s6-setuidgid app \
|
||||
exec nice -n "${TUBESYNC_NICE:-1}" s6-setuidgid app \
|
||||
/usr/bin/python3 /app/manage.py process_tasks
|
||||
|
@ -1,4 +1,4 @@
|
||||
#!/usr/bin/env python3
|
||||
#!/usr/bin/python3
|
||||
'''
|
||||
|
||||
Perform an HTTP request to a URL and exit with an exit code of 1 if the
|
||||
|
@ -3,6 +3,25 @@ from django.db import models
|
||||
from typing import Any, Optional, Dict
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
|
||||
# as stolen from:
|
||||
# - https://wiki.sponsor.ajay.app/w/Types
|
||||
# - https://github.com/yt-dlp/yt-dlp/blob/master/yt_dlp/postprocessor/sponsorblock.py
|
||||
#
|
||||
# The spacing is a little odd, it is for easy copy/paste selection.
|
||||
# Please don't change it.
|
||||
# Every possible category fits in a string < 128 characters
|
||||
class SponsorBlock_Category(models.TextChoices):
|
||||
SPONSOR = 'sponsor', _( 'Sponsor' )
|
||||
INTRO = 'intro', _( 'Intermission/Intro Animation' )
|
||||
OUTRO = 'outro', _( 'Endcards/Credits' )
|
||||
SELFPROMO = 'selfpromo', _( 'Unpaid/Self Promotion' )
|
||||
PREVIEW = 'preview', _( 'Preview/Recap' )
|
||||
FILLER = 'filler', _( 'Filler Tangent' )
|
||||
INTERACTION = 'interaction', _( 'Interaction Reminder' )
|
||||
MUSIC_OFFTOPIC = 'music_offtopic', _( 'Non-Music Section' )
|
||||
|
||||
|
||||
# this is a form field!
|
||||
class CustomCheckboxSelectMultiple(CheckboxSelectMultiple):
|
||||
template_name = 'widgets/checkbox_select.html'
|
||||
@ -32,24 +51,28 @@ class CustomCheckboxSelectMultiple(CheckboxSelectMultiple):
|
||||
class CommaSepChoiceField(models.Field):
|
||||
"Implements comma-separated storage of lists"
|
||||
|
||||
def __init__(self, separator=",", possible_choices=(("","")), all_choice="", all_label="All", allow_all=False, *args, **kwargs):
|
||||
self.separator = separator
|
||||
# If 'text' isn't correct add the vendor override here.
|
||||
_DB_TYPES = {}
|
||||
|
||||
def __init__(self, *args, separator=",", possible_choices=(("","")), all_choice="", all_label="All", allow_all=False, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
self.separator = str(separator)
|
||||
self.possible_choices = possible_choices
|
||||
self.selected_choices = []
|
||||
self.allow_all = allow_all
|
||||
self.all_label = all_label
|
||||
self.all_choice = all_choice
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
def deconstruct(self):
|
||||
name, path, args, kwargs = super().deconstruct()
|
||||
if self.separator != ",":
|
||||
if ',' != self.separator:
|
||||
kwargs['separator'] = self.separator
|
||||
kwargs['possible_choices'] = self.possible_choices
|
||||
return name, path, args, kwargs
|
||||
|
||||
def db_type(self, connection):
|
||||
return 'text'
|
||||
value = self._DB_TYPES.get(connection.vendor, None)
|
||||
return value if value is not None else 'text'
|
||||
|
||||
def get_my_choices(self):
|
||||
choiceArray = []
|
||||
@ -60,7 +83,7 @@ class CommaSepChoiceField(models.Field):
|
||||
|
||||
for t in self.possible_choices:
|
||||
choiceArray.append(t)
|
||||
|
||||
|
||||
return choiceArray
|
||||
|
||||
def formfield(self, **kwargs):
|
||||
@ -72,21 +95,13 @@ class CommaSepChoiceField(models.Field):
|
||||
'label': '',
|
||||
'required': False}
|
||||
defaults.update(kwargs)
|
||||
#del defaults.required
|
||||
return super().formfield(**defaults)
|
||||
|
||||
def deconstruct(self):
|
||||
name, path, args, kwargs = super().deconstruct()
|
||||
# Only include kwarg if it's not the default
|
||||
if self.separator != ",":
|
||||
kwargs['separator'] = self.separator
|
||||
return name, path, args, kwargs
|
||||
|
||||
def from_db_value(self, value, expr, conn):
|
||||
if value is None:
|
||||
if 0 == len(value) or value is None:
|
||||
self.selected_choices = []
|
||||
else:
|
||||
self.selected_choices = value.split(",")
|
||||
self.selected_choices = value.split(self.separator)
|
||||
|
||||
return self
|
||||
|
||||
@ -97,7 +112,7 @@ class CommaSepChoiceField(models.Field):
|
||||
return ""
|
||||
|
||||
if self.all_choice not in value:
|
||||
return ",".join(value)
|
||||
return self.separator.join(value)
|
||||
else:
|
||||
return self.all_choice
|
||||
|
||||
|
@ -0,0 +1,19 @@
|
||||
# Generated by Django 3.2.25 on 2025-01-29 06:14
|
||||
|
||||
from django.db import migrations
|
||||
import sync.fields
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('sync', '0026_alter_source_sub_langs'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='source',
|
||||
name='sponsorblock_categories',
|
||||
field=sync.fields.CommaSepChoiceField(default='all', help_text='Select the sponsorblocks you want to enforce', possible_choices=[('sponsor', 'Sponsor'), ('intro', 'Intermission/Intro Animation'), ('outro', 'Endcards/Credits'), ('selfpromo', 'Unpaid/Self Promotion'), ('preview', 'Preview/Recap'), ('filler', 'Filler Tangent'), ('interaction', 'Interaction Reminder'), ('music_offtopic', 'Non-Music Section')], verbose_name=''),
|
||||
),
|
||||
]
|
@ -14,16 +14,18 @@ from django.core.validators import RegexValidator
|
||||
from django.utils.text import slugify
|
||||
from django.utils import timezone
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from common.logger import log
|
||||
from common.errors import NoFormatException
|
||||
from common.utils import clean_filename, clean_emoji
|
||||
from .youtube import (get_media_info as get_youtube_media_info,
|
||||
download_media as download_youtube_media,
|
||||
get_channel_image_info as get_youtube_channel_image_info)
|
||||
from .utils import seconds_to_timestr, parse_media_format
|
||||
from .utils import (seconds_to_timestr, parse_media_format, filter_response,
|
||||
write_text_file, mkdir_p, directory_and_stem, glob_quote)
|
||||
from .matching import (get_best_combined_format, get_best_audio_format,
|
||||
get_best_video_format)
|
||||
from .mediaservers import PlexMediaServer
|
||||
from .fields import CommaSepChoiceField
|
||||
from .fields import CommaSepChoiceField, SponsorBlock_Category
|
||||
|
||||
media_file_storage = FileSystemStorage(location=str(settings.DOWNLOAD_ROOT), base_url='/media-data/')
|
||||
|
||||
@ -114,21 +116,9 @@ class Source(models.Model):
|
||||
EXTENSION_MKV = 'mkv'
|
||||
EXTENSIONS = (EXTENSION_M4A, EXTENSION_OGG, EXTENSION_MKV)
|
||||
|
||||
# as stolen from: https://wiki.sponsor.ajay.app/w/Types / https://github.com/yt-dlp/yt-dlp/blob/master/yt_dlp/postprocessor/sponsorblock.py
|
||||
SPONSORBLOCK_CATEGORIES_CHOICES = (
|
||||
('sponsor', 'Sponsor'),
|
||||
('intro', 'Intermission/Intro Animation'),
|
||||
('outro', 'Endcards/Credits'),
|
||||
('selfpromo', 'Unpaid/Self Promotion'),
|
||||
('preview', 'Preview/Recap'),
|
||||
('filler', 'Filler Tangent'),
|
||||
('interaction', 'Interaction Reminder'),
|
||||
('music_offtopic', 'Non-Music Section'),
|
||||
)
|
||||
|
||||
sponsorblock_categories = CommaSepChoiceField(
|
||||
_(''),
|
||||
possible_choices=SPONSORBLOCK_CATEGORIES_CHOICES,
|
||||
possible_choices=SponsorBlock_Category.choices,
|
||||
all_choice='all',
|
||||
allow_all=True,
|
||||
all_label='(all options)',
|
||||
@ -537,7 +527,7 @@ class Source(models.Model):
|
||||
def get_image_url(self):
|
||||
if self.source_type == self.SOURCE_TYPE_YOUTUBE_PLAYLIST:
|
||||
raise SuspiciousOperation('This source is a playlist so it doesn\'t have thumbnail.')
|
||||
|
||||
|
||||
return get_youtube_channel_image_info(self.url)
|
||||
|
||||
|
||||
@ -589,6 +579,7 @@ class Source(models.Model):
|
||||
'key': 'SoMeUnIqUiD',
|
||||
'format': '-'.join(fmt),
|
||||
'playlist_title': 'Some Playlist Title',
|
||||
'video_order': '01',
|
||||
'ext': self.extension,
|
||||
'resolution': self.source_resolution if self.source_resolution else '',
|
||||
'height': '720' if self.source_resolution else '',
|
||||
@ -966,7 +957,7 @@ class Media(models.Model):
|
||||
|
||||
def get_best_video_format(self):
|
||||
return get_best_video_format(self)
|
||||
|
||||
|
||||
def get_format_str(self):
|
||||
'''
|
||||
Returns a youtube-dl compatible format string for the best matches
|
||||
@ -991,7 +982,7 @@ class Media(models.Model):
|
||||
else:
|
||||
return False
|
||||
return False
|
||||
|
||||
|
||||
def get_display_format(self, format_str):
|
||||
'''
|
||||
Returns a tuple used in the format component of the output filename. This
|
||||
@ -1128,6 +1119,7 @@ class Media(models.Model):
|
||||
'key': self.key,
|
||||
'format': '-'.join(display_format['format']),
|
||||
'playlist_title': self.playlist_title,
|
||||
'video_order': self.get_episode_str(True),
|
||||
'ext': self.source.extension,
|
||||
'resolution': display_format['resolution'],
|
||||
'height': display_format['height'],
|
||||
@ -1143,8 +1135,39 @@ class Media(models.Model):
|
||||
def has_metadata(self):
|
||||
return self.metadata is not None
|
||||
|
||||
|
||||
@property
|
||||
def reduce_data(self):
|
||||
try:
|
||||
from common.logger import log
|
||||
from common.utils import json_serial
|
||||
|
||||
old_mdl = len(self.metadata or "")
|
||||
data = json.loads(self.metadata or "{}")
|
||||
compact_json = json.dumps(data, separators=(',', ':'), default=json_serial)
|
||||
|
||||
filtered_data = filter_response(data, True)
|
||||
filtered_json = json.dumps(filtered_data, separators=(',', ':'), default=json_serial)
|
||||
except Exception as e:
|
||||
log.exception('reduce_data: %s', e)
|
||||
else:
|
||||
# log the results of filtering / compacting on metadata size
|
||||
new_mdl = len(compact_json)
|
||||
if old_mdl > new_mdl:
|
||||
delta = old_mdl - new_mdl
|
||||
log.info(f'{self.key}: metadata compacted by {delta:,} characters ({old_mdl:,} -> {new_mdl:,})')
|
||||
new_mdl = len(filtered_json)
|
||||
if old_mdl > new_mdl:
|
||||
delta = old_mdl - new_mdl
|
||||
log.info(f'{self.key}: metadata reduced by {delta:,} characters ({old_mdl:,} -> {new_mdl:,})')
|
||||
if getattr(settings, 'SHRINK_OLD_MEDIA_METADATA', False):
|
||||
self.metadata = filtered_json
|
||||
|
||||
|
||||
@property
|
||||
def loaded_metadata(self):
|
||||
if getattr(settings, 'SHRINK_OLD_MEDIA_METADATA', False):
|
||||
self.reduce_data
|
||||
try:
|
||||
data = json.loads(self.metadata)
|
||||
if not isinstance(data, dict):
|
||||
@ -1263,20 +1286,26 @@ class Media(models.Model):
|
||||
|
||||
@property
|
||||
def directory_path(self):
|
||||
dirname = self.source.directory_path / self.filename
|
||||
return dirname.parent
|
||||
return self.filepath.parent
|
||||
|
||||
@property
|
||||
def filepath(self):
|
||||
return self.source.directory_path / self.filename
|
||||
|
||||
@property
|
||||
def thumbname(self):
|
||||
def filename_prefix(self):
|
||||
if self.downloaded and self.media_file:
|
||||
filename = self.media_file.path
|
||||
else:
|
||||
filename = self.filename
|
||||
# The returned prefix should not contain any directories.
|
||||
# So, we do not care about the different directories
|
||||
# used for filename in the cases above.
|
||||
prefix, ext = os.path.splitext(os.path.basename(filename))
|
||||
return prefix
|
||||
|
||||
@property
|
||||
def thumbname(self):
|
||||
prefix = self.filename_prefix()
|
||||
return f'{prefix}.jpg'
|
||||
|
||||
@property
|
||||
@ -1285,26 +1314,18 @@ class Media(models.Model):
|
||||
|
||||
@property
|
||||
def nfoname(self):
|
||||
if self.downloaded and self.media_file:
|
||||
filename = self.media_file.path
|
||||
else:
|
||||
filename = self.filename
|
||||
prefix, ext = os.path.splitext(os.path.basename(filename))
|
||||
prefix = self.filename_prefix()
|
||||
return f'{prefix}.nfo'
|
||||
|
||||
|
||||
@property
|
||||
def nfopath(self):
|
||||
return self.directory_path / self.nfoname
|
||||
|
||||
@property
|
||||
def jsonname(self):
|
||||
if self.downloaded and self.media_file:
|
||||
filename = self.media_file.path
|
||||
else:
|
||||
filename = self.filename
|
||||
prefix, ext = os.path.splitext(os.path.basename(filename))
|
||||
prefix = self.filename_prefix()
|
||||
return f'{prefix}.info.json'
|
||||
|
||||
|
||||
@property
|
||||
def jsonpath(self):
|
||||
return self.directory_path / self.jsonname
|
||||
@ -1373,8 +1394,7 @@ class Media(models.Model):
|
||||
nfo.append(season)
|
||||
# episode = number of video in the year
|
||||
episode = nfo.makeelement('episode', {})
|
||||
episode_number = self.calculate_episode_number()
|
||||
episode.text = str(episode_number) if episode_number else ''
|
||||
episode.text = self.get_episode_str()
|
||||
episode.tail = '\n '
|
||||
nfo.append(episode)
|
||||
# ratings = media metadata youtube rating
|
||||
@ -1387,7 +1407,7 @@ class Media(models.Model):
|
||||
rating_attrs = OrderedDict()
|
||||
rating_attrs['name'] = 'youtube'
|
||||
rating_attrs['max'] = '5'
|
||||
rating_attrs['default'] = 'True'
|
||||
rating_attrs['default'] = 'true'
|
||||
rating = nfo.makeelement('rating', rating_attrs)
|
||||
rating.text = '\n '
|
||||
rating.append(value)
|
||||
@ -1395,7 +1415,8 @@ class Media(models.Model):
|
||||
rating.tail = '\n '
|
||||
ratings = nfo.makeelement('ratings', {})
|
||||
ratings.text = '\n '
|
||||
ratings.append(rating)
|
||||
if self.rating is not None:
|
||||
ratings.append(rating)
|
||||
ratings.tail = '\n '
|
||||
nfo.append(ratings)
|
||||
# plot = media metadata description
|
||||
@ -1412,7 +1433,8 @@ class Media(models.Model):
|
||||
mpaa = nfo.makeelement('mpaa', {})
|
||||
mpaa.text = str(self.age_limit)
|
||||
mpaa.tail = '\n '
|
||||
nfo.append(mpaa)
|
||||
if self.age_limit and self.age_limit > 0:
|
||||
nfo.append(mpaa)
|
||||
# runtime = media metadata duration in seconds
|
||||
runtime = nfo.makeelement('runtime', {})
|
||||
runtime.text = str(self.duration)
|
||||
@ -1524,6 +1546,89 @@ class Media(models.Model):
|
||||
return position_counter
|
||||
position_counter += 1
|
||||
|
||||
def get_episode_str(self, use_padding=False):
|
||||
episode_number = self.calculate_episode_number()
|
||||
if not episode_number:
|
||||
return ''
|
||||
|
||||
if use_padding:
|
||||
return f'{episode_number:02}'
|
||||
|
||||
return str(episode_number)
|
||||
|
||||
def rename_files(self):
|
||||
if self.downloaded and self.media_file:
|
||||
old_video_path = Path(self.media_file.path)
|
||||
new_video_path = Path(get_media_file_path(self, None))
|
||||
if old_video_path.exists() and not new_video_path.exists():
|
||||
old_video_path = old_video_path.resolve(strict=True)
|
||||
|
||||
# move video to destination
|
||||
mkdir_p(new_video_path.parent)
|
||||
log.debug(f'{self!s}: {old_video_path!s} => {new_video_path!s}')
|
||||
old_video_path.rename(new_video_path)
|
||||
log.info(f'Renamed video file for: {self!s}')
|
||||
|
||||
# collect the list of files to move
|
||||
# this should not include the video we just moved
|
||||
(old_prefix_path, old_stem) = directory_and_stem(old_video_path)
|
||||
other_paths = list(old_prefix_path.glob(glob_quote(old_stem) + '*'))
|
||||
log.info(f'Collected {len(other_paths)} other paths for: {self!s}')
|
||||
|
||||
# adopt orphaned files, if possible
|
||||
media_format = str(self.source.media_format)
|
||||
top_dir_path = Path(self.source.directory_path)
|
||||
if '{key}' in media_format:
|
||||
fuzzy_paths = list(top_dir_path.rglob('*' + glob_quote(str(self.key)) + '*'))
|
||||
log.info(f'Collected {len(fuzzy_paths)} fuzzy paths for: {self!s}')
|
||||
|
||||
if new_video_path.exists():
|
||||
new_video_path = new_video_path.resolve(strict=True)
|
||||
|
||||
# update the media_file in the db
|
||||
self.media_file.name = str(new_video_path.relative_to(self.media_file.storage.location))
|
||||
self.save()
|
||||
log.info(f'Updated "media_file" in the database for: {self!s}')
|
||||
|
||||
(new_prefix_path, new_stem) = directory_and_stem(new_video_path)
|
||||
|
||||
# move and change names to match stem
|
||||
for other_path in other_paths:
|
||||
old_file_str = other_path.name
|
||||
new_file_str = new_stem + old_file_str[len(old_stem):]
|
||||
new_file_path = Path(new_prefix_path / new_file_str)
|
||||
log.debug(f'Considering replace for: {self!s}\n\t{other_path!s}\n\t{new_file_path!s}')
|
||||
# it should exist, but check anyway
|
||||
if other_path.exists():
|
||||
log.debug(f'{self!s}: {other_path!s} => {new_file_path!s}')
|
||||
other_path.replace(new_file_path)
|
||||
|
||||
for fuzzy_path in fuzzy_paths:
|
||||
(fuzzy_prefix_path, fuzzy_stem) = directory_and_stem(fuzzy_path)
|
||||
old_file_str = fuzzy_path.name
|
||||
new_file_str = new_stem + old_file_str[len(fuzzy_stem):]
|
||||
new_file_path = Path(new_prefix_path / new_file_str)
|
||||
log.debug(f'Considering rename for: {self!s}\n\t{fuzzy_path!s}\n\t{new_file_path!s}')
|
||||
# it quite possibly was renamed already
|
||||
if fuzzy_path.exists() and not new_file_path.exists():
|
||||
log.debug(f'{self!s}: {fuzzy_path!s} => {new_file_path!s}')
|
||||
fuzzy_path.rename(new_file_path)
|
||||
|
||||
# The thumbpath inside the .nfo file may have changed
|
||||
if self.source.write_nfo and self.source.copy_thumbnails:
|
||||
write_text_file(new_prefix_path / self.nfopath.name, self.nfoxml)
|
||||
log.info(f'Wrote new ".nfo" file for: {self!s}')
|
||||
|
||||
# try to remove empty dirs
|
||||
parent_dir = old_video_path.parent
|
||||
try:
|
||||
while parent_dir.is_dir():
|
||||
parent_dir.rmdir()
|
||||
log.info(f'Removed empty directory: {parent_dir!s}')
|
||||
parent_dir = parent_dir.parent
|
||||
except OSError as e:
|
||||
pass
|
||||
|
||||
|
||||
class MediaServer(models.Model):
|
||||
'''
|
||||
|
@ -13,7 +13,8 @@ from .tasks import (delete_task_by_source, delete_task_by_media, index_source_ta
|
||||
download_media_thumbnail, download_media_metadata,
|
||||
map_task_to_instance, check_source_directory_exists,
|
||||
download_media, rescan_media_server, download_source_images,
|
||||
save_all_media_for_source, get_media_metadata_task)
|
||||
save_all_media_for_source, rename_all_media_for_source,
|
||||
get_media_metadata_task)
|
||||
from .utils import delete_file, glob_quote
|
||||
from .filtering import filter_media
|
||||
|
||||
@ -54,7 +55,7 @@ def source_post_save(sender, instance, created, **kwargs):
|
||||
if instance.source_type != Source.SOURCE_TYPE_YOUTUBE_PLAYLIST and instance.copy_channel_images:
|
||||
download_source_images(
|
||||
str(instance.pk),
|
||||
priority=0,
|
||||
priority=2,
|
||||
verbose_name=verbose_name.format(instance.name)
|
||||
)
|
||||
if instance.index_schedule > 0:
|
||||
@ -69,10 +70,28 @@ def source_post_save(sender, instance, created, **kwargs):
|
||||
verbose_name=verbose_name.format(instance.name),
|
||||
remove_existing_tasks=True
|
||||
)
|
||||
# Check settings before any rename tasks are scheduled
|
||||
rename_sources_setting = settings.RENAME_SOURCES or list()
|
||||
create_rename_task = (
|
||||
(
|
||||
instance.directory and
|
||||
instance.directory in rename_sources_setting
|
||||
) or
|
||||
settings.RENAME_ALL_SOURCES
|
||||
)
|
||||
if create_rename_task:
|
||||
verbose_name = _('Renaming all media for source "{}"')
|
||||
rename_all_media_for_source(
|
||||
str(instance.pk),
|
||||
queue=str(instance.pk),
|
||||
priority=1,
|
||||
verbose_name=verbose_name.format(instance.name),
|
||||
remove_existing_tasks=False
|
||||
)
|
||||
verbose_name = _('Checking all media for source "{}"')
|
||||
save_all_media_for_source(
|
||||
str(instance.pk),
|
||||
priority=0,
|
||||
priority=2,
|
||||
verbose_name=verbose_name.format(instance.name),
|
||||
remove_existing_tasks=True
|
||||
)
|
||||
@ -175,7 +194,7 @@ def media_post_save(sender, instance, created, **kwargs):
|
||||
download_media(
|
||||
str(instance.pk),
|
||||
queue=str(instance.source.pk),
|
||||
priority=15,
|
||||
priority=10,
|
||||
verbose_name=verbose_name.format(instance.name),
|
||||
remove_existing_tasks=True
|
||||
)
|
||||
|
@ -26,7 +26,7 @@ from common.errors import NoMediaException, DownloadFailedException
|
||||
from common.utils import json_serial
|
||||
from .models import Source, Media, MediaServer
|
||||
from .utils import (get_remote_image, resize_image_to_height, delete_file,
|
||||
write_text_file)
|
||||
write_text_file, filter_response)
|
||||
from .filtering import filter_media
|
||||
|
||||
|
||||
@ -51,6 +51,7 @@ def map_task_to_instance(task):
|
||||
'sync.tasks.download_media': Media,
|
||||
'sync.tasks.download_media_metadata': Media,
|
||||
'sync.tasks.save_all_media_for_source': Source,
|
||||
'sync.tasks.rename_all_media_for_source': Source,
|
||||
}
|
||||
MODEL_URL_MAP = {
|
||||
Source: 'sync:source',
|
||||
@ -304,7 +305,10 @@ def download_media_metadata(media_id):
|
||||
return
|
||||
source = media.source
|
||||
metadata = media.index_metadata()
|
||||
media.metadata = json.dumps(metadata, default=json_serial)
|
||||
response = metadata
|
||||
if getattr(settings, 'SHRINK_NEW_MEDIA_METADATA', False):
|
||||
response = filter_response(metadata, True)
|
||||
media.metadata = json.dumps(response, separators=(',', ':'), default=json_serial)
|
||||
upload_date = media.upload_date
|
||||
# Media must have a valid upload date
|
||||
if upload_date:
|
||||
@ -439,14 +443,26 @@ def download_media(media_id):
|
||||
media.downloaded_format = 'audio'
|
||||
media.save()
|
||||
# If selected, copy the thumbnail over as well
|
||||
if media.source.copy_thumbnails and media.thumb:
|
||||
log.info(f'Copying media thumbnail from: {media.thumb.path} '
|
||||
f'to: {media.thumbpath}')
|
||||
copyfile(media.thumb.path, media.thumbpath)
|
||||
if media.source.copy_thumbnails:
|
||||
if not media.thumb_file_exists:
|
||||
thumbnail_url = media.thumbnail
|
||||
if thumbnail_url:
|
||||
args = ( str(media.pk), thumbnail_url, )
|
||||
delete_task_by_media('sync.tasks.download_media_thumbnail', args)
|
||||
if download_media_thumbnail.now(*args):
|
||||
media.refresh_from_db()
|
||||
if media.thumb_file_exists:
|
||||
log.info(f'Copying media thumbnail from: {media.thumb.path} '
|
||||
f'to: {media.thumbpath}')
|
||||
copyfile(media.thumb.path, media.thumbpath)
|
||||
# If selected, write an NFO file
|
||||
if media.source.write_nfo:
|
||||
log.info(f'Writing media NFO file to: {media.nfopath}')
|
||||
write_text_file(media.nfopath, media.nfoxml)
|
||||
try:
|
||||
write_text_file(media.nfopath, media.nfoxml)
|
||||
except PermissionError as e:
|
||||
log.warn(f'A permissions problem occured when writing the new media NFO file: {e.msg}')
|
||||
pass
|
||||
# Schedule a task to update media servers
|
||||
for mediaserver in MediaServer.objects.all():
|
||||
log.info(f'Scheduling media server updates')
|
||||
@ -501,3 +517,18 @@ def save_all_media_for_source(source_id):
|
||||
# flags may need to be recalculated
|
||||
for media in Media.objects.filter(source=source):
|
||||
media.save()
|
||||
|
||||
|
||||
@background(schedule=0)
|
||||
def rename_all_media_for_source(source_id):
|
||||
try:
|
||||
source = Source.objects.get(pk=source_id)
|
||||
except Source.DoesNotExist:
|
||||
# Task triggered but the source no longer exists, do nothing
|
||||
log.error(f'Task rename_all_media_for_source(pk={source_id}) called but no '
|
||||
f'source exists with ID: {source_id}')
|
||||
return
|
||||
for media in Media.objects.filter(source=source):
|
||||
media.rename_files()
|
||||
|
||||
|
||||
|
@ -73,6 +73,11 @@
|
||||
<td>Playlist title of media, if it's in a playlist</td>
|
||||
<td>Some Playlist</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>{video_order}</td>
|
||||
<td>Episode order in playlist, if in playlist <sub><sup>(can cause issues if playlist is changed after adding)</sup></sub></td>
|
||||
<td>01</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>{ext}</td>
|
||||
<td>File extension</td>
|
||||
|
@ -125,7 +125,7 @@
|
||||
</tr>
|
||||
<tr title="Database connection used by TubeSync">
|
||||
<td class="hide-on-small-only">Database</td>
|
||||
<td><span class="hide-on-med-and-up">Database<br></span><strong>{{ database_connection }}</strong></td>
|
||||
<td><span class="hide-on-med-and-up">Database<br></span><strong>{{ database_connection }}{% if database_filesize %} {{ database_filesize|filesizeformat }}{% endif %}</strong></td>
|
||||
</tr>
|
||||
</table>
|
||||
</div>
|
||||
|
@ -1,4 +1,4 @@
|
||||
{% extends 'base.html' %}{% load static %}
|
||||
{% extends 'base.html' %}{% load static %}{% load humanize %}
|
||||
|
||||
{% block headtitle %}Media - {{ media.key }}{% endblock %}
|
||||
|
||||
@ -142,12 +142,12 @@
|
||||
<tr title="The available media formats">
|
||||
<td class="hide-on-small-only">Available formats</td>
|
||||
<td><span class="hide-on-med-and-up">Available formats<br></span>
|
||||
{% for format in media.formats %}
|
||||
{% for format in media.iter_formats %}
|
||||
<div>
|
||||
ID: <strong>{{ format.format_id }}</strong>
|
||||
{% if format.vcodec|lower != 'none' %}, {{ format.format_note }} ({{ format.width }}x{{ format.height }}), fps:{{ format.fps|lower }}, video:{{ format.vcodec }} @{{ format.tbr }}k{% endif %}
|
||||
{% if format.acodec|lower != 'none' %}, audio:{{ format.acodec }} @{{ format.abr }}k / {{ format.asr }}Hz{% endif %}
|
||||
{% if format.format_id == combined_format or format.format_id == audio_format or format.format_id == video_format %}<strong>(matched)</strong>{% endif %}
|
||||
ID: <strong>{{ format.id }}</strong>
|
||||
{% if format.vcodec|lower != 'none' %}, {{ format.format_note }} ({{ format.width }}x{{ format.height }}), fps:{{ format.fps|lower }}, video:{{ format.vcodec }} @{{ format.vbr }}k{% endif %}
|
||||
{% if format.acodec|lower != 'none' %}, audio:{{ format.acodec }} {% if format.abr %}@{{ format.abr }}k / {% endif %}{{ format.asr|intcomma }}Hz{% if format.language_code %} [{{ format.language_code }}]{% endif %}{% if format.abr %} {{ format.format_note }}{% endif %}{% endif %}
|
||||
{% if format.id == combined_format or format.id == audio_format or format.id == video_format %}<strong>(matched)</strong>{% endif %}
|
||||
</div>
|
||||
{% empty %}
|
||||
Media has no indexed available formats
|
||||
|
@ -18,6 +18,7 @@ from background_task.models import Task
|
||||
from .models import Source, Media
|
||||
from .tasks import cleanup_old_media
|
||||
from .filtering import filter_media
|
||||
from .utils import filter_response
|
||||
|
||||
|
||||
class FrontEndTestCase(TestCase):
|
||||
@ -1709,6 +1710,84 @@ class FormatMatchingTestCase(TestCase):
|
||||
f'expected {expected_match_result}')
|
||||
|
||||
|
||||
class ResponseFilteringTestCase(TestCase):
|
||||
|
||||
def setUp(self):
|
||||
# Disable general logging for test case
|
||||
logging.disable(logging.CRITICAL)
|
||||
# Add a test source
|
||||
self.source = Source.objects.create(
|
||||
source_type=Source.SOURCE_TYPE_YOUTUBE_CHANNEL,
|
||||
key='testkey',
|
||||
name='testname',
|
||||
directory='testdirectory',
|
||||
index_schedule=3600,
|
||||
delete_old_media=False,
|
||||
days_to_keep=14,
|
||||
source_resolution=Source.SOURCE_RESOLUTION_1080P,
|
||||
source_vcodec=Source.SOURCE_VCODEC_VP9,
|
||||
source_acodec=Source.SOURCE_ACODEC_OPUS,
|
||||
prefer_60fps=False,
|
||||
prefer_hdr=False,
|
||||
fallback=Source.FALLBACK_FAIL
|
||||
)
|
||||
# Add some media
|
||||
self.media = Media.objects.create(
|
||||
key='mediakey',
|
||||
source=self.source,
|
||||
metadata='{}'
|
||||
)
|
||||
|
||||
def test_metadata_20230629(self):
|
||||
self.media.metadata = all_test_metadata['20230629']
|
||||
self.media.save()
|
||||
|
||||
unfiltered = self.media.loaded_metadata
|
||||
filtered = filter_response(self.media.loaded_metadata)
|
||||
self.assertIn('formats', unfiltered.keys())
|
||||
self.assertIn('formats', filtered.keys())
|
||||
# filtered 'downloader_options'
|
||||
self.assertIn('downloader_options', unfiltered['formats'][10].keys())
|
||||
self.assertNotIn('downloader_options', filtered['formats'][10].keys())
|
||||
# filtered 'http_headers'
|
||||
self.assertIn('http_headers', unfiltered['formats'][0].keys())
|
||||
self.assertNotIn('http_headers', filtered['formats'][0].keys())
|
||||
# did not lose any formats
|
||||
self.assertEqual(48, len(unfiltered['formats']))
|
||||
self.assertEqual(48, len(filtered['formats']))
|
||||
self.assertEqual(len(unfiltered['formats']), len(filtered['formats']))
|
||||
# did not remove everything with url
|
||||
self.assertIn('original_url', unfiltered.keys())
|
||||
self.assertIn('original_url', filtered.keys())
|
||||
self.assertEqual(unfiltered['original_url'], filtered['original_url'])
|
||||
# did reduce the size of the metadata
|
||||
self.assertTrue(len(str(filtered)) < len(str(unfiltered)))
|
||||
|
||||
url_keys = []
|
||||
for format in unfiltered['formats']:
|
||||
for key in format.keys():
|
||||
if 'url' in key:
|
||||
url_keys.append((format['format_id'], key, format[key],))
|
||||
unfiltered_url_keys = url_keys
|
||||
self.assertEqual(63, len(unfiltered_url_keys), msg=str(unfiltered_url_keys))
|
||||
|
||||
url_keys = []
|
||||
for format in filtered['formats']:
|
||||
for key in format.keys():
|
||||
if 'url' in key:
|
||||
url_keys.append((format['format_id'], key, format[key],))
|
||||
filtered_url_keys = url_keys
|
||||
self.assertEqual(3, len(filtered_url_keys), msg=str(filtered_url_keys))
|
||||
|
||||
url_keys = []
|
||||
for lang_code, captions in filtered['automatic_captions'].items():
|
||||
for caption in captions:
|
||||
for key in caption.keys():
|
||||
if 'url' in key:
|
||||
url_keys.append((lang_code, caption['ext'], caption[key],))
|
||||
self.assertEqual(0, len(url_keys), msg=str(url_keys))
|
||||
|
||||
|
||||
class TasksTestCase(TestCase):
|
||||
|
||||
def setUp(self):
|
||||
|
@ -17,7 +17,7 @@ urlpatterns = [
|
||||
path('',
|
||||
DashboardView.as_view(),
|
||||
name='dashboard'),
|
||||
|
||||
|
||||
# Source URLs
|
||||
|
||||
path('sources',
|
||||
|
@ -1,6 +1,7 @@
|
||||
import os
|
||||
import re
|
||||
import math
|
||||
from copy import deepcopy
|
||||
from operator import itemgetter
|
||||
from pathlib import Path
|
||||
from tempfile import NamedTemporaryFile
|
||||
@ -128,6 +129,23 @@ def file_is_editable(filepath):
|
||||
return False
|
||||
|
||||
|
||||
def directory_and_stem(arg_path):
|
||||
filepath = Path(arg_path)
|
||||
stem = Path(filepath.stem)
|
||||
while stem.suffixes and '' != stem.suffix:
|
||||
stem = Path(stem.stem)
|
||||
stem = str(stem)
|
||||
return (filepath.parent, stem,)
|
||||
|
||||
|
||||
def mkdir_p(arg_path, mode=0o777):
|
||||
'''
|
||||
Reminder: mode only affects the last directory
|
||||
'''
|
||||
dirpath = Path(arg_path)
|
||||
return dirpath.mkdir(mode=mode, parents=True, exist_ok=True)
|
||||
|
||||
|
||||
def write_text_file(filepath, filedata):
|
||||
if not isinstance(filedata, str):
|
||||
raise TypeError(f'filedata must be a str, got "{type(filedata)}"')
|
||||
@ -137,7 +155,8 @@ def write_text_file(filepath, filedata):
|
||||
bytes_written = f.write(filedata)
|
||||
# chmod a+r temp_file
|
||||
old_mode = new_filepath.stat().st_mode
|
||||
new_filepath.chmod(0o444 | old_mode)
|
||||
if 0o444 != (0o444 & old_mode):
|
||||
new_filepath.chmod(0o444 | old_mode)
|
||||
if not file_is_editable(new_filepath):
|
||||
new_filepath.unlink()
|
||||
raise ValueError(f'File cannot be edited or removed: {filepath}')
|
||||
@ -184,6 +203,95 @@ def normalize_codec(codec_str):
|
||||
return result
|
||||
|
||||
|
||||
def _url_keys(arg_dict, filter_func):
|
||||
result = {}
|
||||
for key in arg_dict.keys():
|
||||
if 'url' in key:
|
||||
result.update(
|
||||
{key: filter_func(key=key, url=arg_dict[key])}
|
||||
)
|
||||
return result
|
||||
|
||||
|
||||
def _drop_url_keys(arg_dict, key, filter_func):
|
||||
if key in arg_dict.keys():
|
||||
for val_dict in arg_dict[key]:
|
||||
for url_key, remove in _url_keys(val_dict, filter_func).items():
|
||||
if remove is True:
|
||||
del val_dict[url_key]
|
||||
|
||||
|
||||
def filter_response(arg_dict, copy_arg=False):
|
||||
'''
|
||||
Clean up the response so as to not store useless metadata in the database.
|
||||
'''
|
||||
response_dict = arg_dict
|
||||
# raise an exception for an unexpected argument type
|
||||
if not isinstance(response_dict, dict):
|
||||
raise TypeError(f'response_dict must be a dict, got "{type(response_dict)}"')
|
||||
|
||||
if copy_arg:
|
||||
response_dict = deepcopy(arg_dict)
|
||||
|
||||
# optimize the empty case
|
||||
if not response_dict:
|
||||
return response_dict
|
||||
|
||||
# beginning of formats cleanup {{{
|
||||
# drop urls that expire, or restrict IPs
|
||||
def drop_format_url(**kwargs):
|
||||
url = kwargs['url']
|
||||
return (
|
||||
url
|
||||
and '://' in url
|
||||
and (
|
||||
'/ip/' in url
|
||||
or 'ip=' in url
|
||||
or '/expire/' in url
|
||||
or 'expire=' in url
|
||||
)
|
||||
)
|
||||
|
||||
# these format keys are not useful to us
|
||||
drop_keys = frozenset((
|
||||
'downloader_options',
|
||||
'fragments',
|
||||
'http_headers',
|
||||
'__needs_testing',
|
||||
'__working',
|
||||
))
|
||||
for key in frozenset(('formats', 'requested_formats',)):
|
||||
_drop_url_keys(response_dict, key, drop_format_url)
|
||||
if key in response_dict.keys():
|
||||
for format in response_dict[key]:
|
||||
for drop_key in drop_keys:
|
||||
if drop_key in format.keys():
|
||||
del format[drop_key]
|
||||
# end of formats cleanup }}}
|
||||
|
||||
# beginning of subtitles cleanup {{{
|
||||
# drop urls that expire
|
||||
def drop_subtitles_url(**kwargs):
|
||||
url = kwargs['url']
|
||||
return (
|
||||
url
|
||||
and '://' in url
|
||||
and (
|
||||
'/expire/' in url
|
||||
or '&expire=' in url
|
||||
)
|
||||
)
|
||||
|
||||
for key in frozenset(('subtitles', 'automatic_captions',)):
|
||||
if key in response_dict.keys():
|
||||
key_dict = response_dict[key]
|
||||
for lang_code in key_dict:
|
||||
_drop_url_keys(key_dict, lang_code, drop_subtitles_url)
|
||||
# end of subtitles cleanup }}}
|
||||
|
||||
return response_dict
|
||||
|
||||
|
||||
def parse_media_format(format_dict):
|
||||
'''
|
||||
This parser primarily adapts the format dict returned by youtube-dl into a
|
||||
@ -224,10 +332,13 @@ def parse_media_format(format_dict):
|
||||
format_str = f'{height}P'
|
||||
else:
|
||||
format_str = None
|
||||
|
||||
return {
|
||||
'id': format_dict.get('format_id', ''),
|
||||
'format': format_str,
|
||||
'format_note': format_dict.get('format_note', ''),
|
||||
'format_verbose': format_dict.get('format', ''),
|
||||
'language_code': format_dict.get('language', None),
|
||||
'height': height,
|
||||
'width': width,
|
||||
'vcodec': vcodec,
|
||||
@ -235,6 +346,7 @@ def parse_media_format(format_dict):
|
||||
'vbr': format_dict.get('tbr', 0),
|
||||
'acodec': acodec,
|
||||
'abr': format_dict.get('abr', 0),
|
||||
'asr': format_dict.get('asr', 0),
|
||||
'is_60fps': fps > 50,
|
||||
'is_hdr': 'HDR' in format_dict.get('format', '').upper(),
|
||||
'is_hls': is_hls,
|
||||
|
@ -14,7 +14,7 @@ from django.views.generic.detail import SingleObjectMixin
|
||||
from django.core.exceptions import SuspiciousFileOperation
|
||||
from django.http import HttpResponse
|
||||
from django.urls import reverse_lazy
|
||||
from django.db import IntegrityError
|
||||
from django.db import connection, IntegrityError
|
||||
from django.db.models import Q, Count, Sum, When, Case
|
||||
from django.forms import Form, ValidationError
|
||||
from django.utils.text import slugify
|
||||
@ -85,6 +85,12 @@ class DashboardView(TemplateView):
|
||||
data['config_dir'] = str(settings.CONFIG_BASE_DIR)
|
||||
data['downloads_dir'] = str(settings.DOWNLOAD_ROOT)
|
||||
data['database_connection'] = settings.DATABASE_CONNECTION_STR
|
||||
# Add the database filesize when using db.sqlite3
|
||||
data['database_filesize'] = None
|
||||
db_name = str(connection.get_connection_params()['database'])
|
||||
db_path = pathlib.Path(db_name) if '/' == db_name[0] else None
|
||||
if db_path and 'sqlite' == connection.vendor:
|
||||
data['database_filesize'] = db_path.stat().st_size
|
||||
return data
|
||||
|
||||
|
||||
@ -106,7 +112,7 @@ class SourcesView(ListView):
|
||||
sobj = Source.objects.get(pk=kwargs["pk"])
|
||||
if sobj is None:
|
||||
return HttpResponseNotFound()
|
||||
|
||||
|
||||
verbose_name = _('Index media from source "{}" once')
|
||||
index_source_task(
|
||||
str(sobj.pk),
|
||||
@ -193,10 +199,15 @@ class ValidateSourceView(FormView):
|
||||
Source.SOURCE_TYPE_YOUTUBE_PLAYLIST: ('https://www.youtube.com/playlist?list='
|
||||
'PL590L5WQmH8dpP0RyH5pCfIaDEdt9nk7r')
|
||||
}
|
||||
_youtube_domains = frozenset({
|
||||
'youtube.com',
|
||||
'm.youtube.com',
|
||||
'www.youtube.com',
|
||||
})
|
||||
validation_urls = {
|
||||
Source.SOURCE_TYPE_YOUTUBE_CHANNEL: {
|
||||
'scheme': 'https',
|
||||
'domains': ('m.youtube.com', 'www.youtube.com'),
|
||||
'domains': _youtube_domains,
|
||||
'path_regex': '^\/(c\/)?([^\/]+)(\/videos)?$',
|
||||
'path_must_not_match': ('/playlist', '/c/playlist'),
|
||||
'qs_args': [],
|
||||
@ -205,7 +216,7 @@ class ValidateSourceView(FormView):
|
||||
},
|
||||
Source.SOURCE_TYPE_YOUTUBE_CHANNEL_ID: {
|
||||
'scheme': 'https',
|
||||
'domains': ('m.youtube.com', 'www.youtube.com'),
|
||||
'domains': _youtube_domains,
|
||||
'path_regex': '^\/channel\/([^\/]+)(\/videos)?$',
|
||||
'path_must_not_match': ('/playlist', '/c/playlist'),
|
||||
'qs_args': [],
|
||||
@ -214,7 +225,7 @@ class ValidateSourceView(FormView):
|
||||
},
|
||||
Source.SOURCE_TYPE_YOUTUBE_PLAYLIST: {
|
||||
'scheme': 'https',
|
||||
'domains': ('m.youtube.com', 'www.youtube.com'),
|
||||
'domains': _youtube_domains,
|
||||
'path_regex': '^\/(playlist|watch)$',
|
||||
'path_must_not_match': (),
|
||||
'qs_args': ('list',),
|
||||
@ -286,11 +297,36 @@ class ValidateSourceView(FormView):
|
||||
url = reverse_lazy('sync:add-source')
|
||||
fields_to_populate = self.prepopulate_fields.get(self.source_type)
|
||||
fields = {}
|
||||
value = self.key
|
||||
use_channel_id = (
|
||||
'youtube-channel' == self.source_type_str and
|
||||
'@' == self.key[0]
|
||||
)
|
||||
if use_channel_id:
|
||||
old_key = self.key
|
||||
old_source_type = self.source_type
|
||||
old_source_type_str = self.source_type_str
|
||||
|
||||
self.source_type_str = 'youtube-channel-id'
|
||||
self.source_type = self.source_types.get(self.source_type_str, None)
|
||||
index_url = Source.create_index_url(self.source_type, self.key, 'videos')
|
||||
try:
|
||||
self.key = youtube.get_channel_id(
|
||||
index_url.replace('/channel/', '/')
|
||||
)
|
||||
except youtube.YouTubeError as e:
|
||||
# It did not work, revert to previous behavior
|
||||
self.key = old_key
|
||||
self.source_type = old_source_type
|
||||
self.source_type_str = old_source_type_str
|
||||
|
||||
for field in fields_to_populate:
|
||||
if field == 'source_type':
|
||||
fields[field] = self.source_type
|
||||
elif field in ('key', 'name', 'directory'):
|
||||
elif field == 'key':
|
||||
fields[field] = self.key
|
||||
elif field in ('name', 'directory'):
|
||||
fields[field] = value
|
||||
return append_uri_params(url, fields)
|
||||
|
||||
|
||||
@ -318,7 +354,7 @@ class EditSourceMixin:
|
||||
obj = form.save(commit=False)
|
||||
source_type = form.cleaned_data['media_format']
|
||||
example_media_file = obj.get_example_media_format()
|
||||
|
||||
|
||||
if example_media_file == '':
|
||||
form.add_error(
|
||||
'media_format',
|
||||
@ -488,20 +524,15 @@ class MediaView(ListView):
|
||||
return super().dispatch(request, *args, **kwargs)
|
||||
|
||||
def get_queryset(self):
|
||||
q = Media.objects.all()
|
||||
|
||||
if self.filter_source:
|
||||
if self.show_skipped:
|
||||
q = Media.objects.filter(source=self.filter_source)
|
||||
elif self.only_skipped:
|
||||
q = Media.objects.filter(Q(source=self.filter_source) & (Q(skip=True) | Q(manual_skip=True)))
|
||||
else:
|
||||
q = Media.objects.filter(Q(source=self.filter_source) & (Q(skip=False) & Q(manual_skip=False)))
|
||||
else:
|
||||
if self.show_skipped:
|
||||
q = Media.objects.all()
|
||||
elif self.only_skipped:
|
||||
q = Media.objects.filter(Q(skip=True)|Q(manual_skip=True))
|
||||
else:
|
||||
q = Media.objects.filter(Q(skip=False)&Q(manual_skip=False))
|
||||
q = q.filter(source=self.filter_source)
|
||||
if self.only_skipped:
|
||||
q = q.filter(Q(can_download=False) | Q(skip=True) | Q(manual_skip=True))
|
||||
elif not self.show_skipped:
|
||||
q = q.filter(Q(can_download=True) & Q(skip=False) & Q(manual_skip=False))
|
||||
|
||||
return q.order_by('-published', '-created')
|
||||
|
||||
def get_context_data(self, *args, **kwargs):
|
||||
@ -740,18 +771,18 @@ class MediaContent(DetailView):
|
||||
pth = pth[1]
|
||||
else:
|
||||
pth = pth[0]
|
||||
|
||||
|
||||
|
||||
|
||||
# build final path
|
||||
filepth = pathlib.Path(str(settings.DOWNLOAD_ROOT) + pth)
|
||||
|
||||
|
||||
if filepth.exists():
|
||||
# return file
|
||||
response = FileResponse(open(filepth,'rb'))
|
||||
return response
|
||||
else:
|
||||
return HttpResponseNotFound()
|
||||
|
||||
|
||||
else:
|
||||
headers = {
|
||||
'Content-Type': self.object.content_type,
|
||||
|
@ -5,10 +5,16 @@
|
||||
|
||||
|
||||
import os
|
||||
from pathlib import Path
|
||||
from django.conf import settings
|
||||
from copy import copy
|
||||
|
||||
from collections import namedtuple
|
||||
from common.logger import log
|
||||
from copy import deepcopy
|
||||
from pathlib import Path
|
||||
from tempfile import TemporaryDirectory
|
||||
from urllib.parse import urlsplit, parse_qs
|
||||
|
||||
from django.conf import settings
|
||||
from .utils import mkdir_p
|
||||
import yt_dlp
|
||||
|
||||
|
||||
@ -21,7 +27,7 @@ _youtubedl_tempdir = getattr(settings, 'YOUTUBE_DL_TEMPDIR', None)
|
||||
if _youtubedl_tempdir:
|
||||
_youtubedl_tempdir = str(_youtubedl_tempdir)
|
||||
_youtubedl_tempdir_path = Path(_youtubedl_tempdir)
|
||||
_youtubedl_tempdir_path.mkdir(parents=True, exist_ok=True)
|
||||
mkdir_p(_youtubedl_tempdir_path)
|
||||
(_youtubedl_tempdir_path / '.ignore').touch(exist_ok=True)
|
||||
_paths = _defaults.get('paths', {})
|
||||
_paths.update({ 'temp': _youtubedl_tempdir, })
|
||||
@ -37,7 +43,7 @@ class YouTubeError(yt_dlp.utils.DownloadError):
|
||||
|
||||
|
||||
def get_yt_opts():
|
||||
opts = copy(_defaults)
|
||||
opts = deepcopy(_defaults)
|
||||
cookie_file = settings.COOKIES_FILE
|
||||
if cookie_file.is_file():
|
||||
cookie_file_path = str(cookie_file.resolve())
|
||||
@ -45,6 +51,32 @@ def get_yt_opts():
|
||||
opts.update({'cookiefile': cookie_file_path})
|
||||
return opts
|
||||
|
||||
def get_channel_id(url):
|
||||
# yt-dlp --simulate --no-check-formats --playlist-items 1
|
||||
# --print 'pre_process:%(playlist_channel_id,playlist_id,channel_id)s'
|
||||
opts = get_yt_opts()
|
||||
opts.update({
|
||||
'skip_download': True,
|
||||
'simulate': True,
|
||||
'logger': log,
|
||||
'extract_flat': True, # Change to False to get detailed info
|
||||
'check_formats': False,
|
||||
'playlist_items': '1',
|
||||
})
|
||||
|
||||
with yt_dlp.YoutubeDL(opts) as y:
|
||||
try:
|
||||
response = y.extract_info(url, download=False)
|
||||
except yt_dlp.utils.DownloadError as e:
|
||||
raise YouTubeError(f'Failed to extract channel ID for "{url}": {e}') from e
|
||||
else:
|
||||
try:
|
||||
channel_id = response['channel_id']
|
||||
except Exception as e:
|
||||
raise YouTubeError(f'Failed to extract channel ID for "{url}": {e}') from e
|
||||
else:
|
||||
return channel_id
|
||||
|
||||
def get_channel_image_info(url):
|
||||
opts = get_yt_opts()
|
||||
opts.update({
|
||||
@ -57,7 +89,7 @@ def get_channel_image_info(url):
|
||||
with yt_dlp.YoutubeDL(opts) as y:
|
||||
try:
|
||||
response = y.extract_info(url, download=False)
|
||||
|
||||
|
||||
avatar_url = None
|
||||
banner_url = None
|
||||
for thumbnail in response['thumbnails']:
|
||||
@ -67,7 +99,7 @@ def get_channel_image_info(url):
|
||||
banner_url = thumbnail['url']
|
||||
if banner_url != None and avatar_url != None:
|
||||
break
|
||||
|
||||
|
||||
return avatar_url, banner_url
|
||||
except yt_dlp.utils.DownloadError as e:
|
||||
raise YouTubeError(f'Failed to extract channel info for "{url}": {e}') from e
|
||||
@ -81,6 +113,8 @@ def _subscriber_only(msg='', response=None):
|
||||
return True
|
||||
if ': Join this channel' in msg:
|
||||
return True
|
||||
if 'Join this YouTube channel' in msg:
|
||||
return True
|
||||
else:
|
||||
# ignore msg entirely
|
||||
if not isinstance(response, dict):
|
||||
@ -179,6 +213,23 @@ def download_media(url, media_format, extension, output_file, info_json,
|
||||
log.warn(f'[youtube-dl] unknown event: {str(event)}')
|
||||
|
||||
hook.download_progress = 0
|
||||
|
||||
default_opts = yt_dlp.parse_options([]).options
|
||||
pp_opts = deepcopy(default_opts)
|
||||
pp_opts.__dict__.update({
|
||||
'embedthumbnail': embed_thumbnail,
|
||||
'addmetadata': embed_metadata,
|
||||
'addchapters': True,
|
||||
'embed_infojson': False,
|
||||
'writethumbnail': False,
|
||||
'force_keyframes_at_cuts': True,
|
||||
'sponskrub': False,
|
||||
})
|
||||
|
||||
if skip_sponsors:
|
||||
pp_opts.sponsorblock_mark.update('all,-chapter'.split(','))
|
||||
pp_opts.sponsorblock_remove.update(sponsor_categories or {})
|
||||
|
||||
ytopts = {
|
||||
'format': media_format,
|
||||
'merge_output_format': extension,
|
||||
@ -192,28 +243,48 @@ def download_media(url, media_format, extension, output_file, info_json,
|
||||
'writesubtitles': write_subtitles,
|
||||
'writeautomaticsub': auto_subtitles,
|
||||
'subtitleslangs': sub_langs.split(','),
|
||||
}
|
||||
if not sponsor_categories:
|
||||
sponsor_categories = []
|
||||
sbopt = {
|
||||
'key': 'SponsorBlock',
|
||||
'categories': sponsor_categories
|
||||
}
|
||||
ffmdopt = {
|
||||
'key': 'FFmpegMetadata',
|
||||
'add_chapters': embed_metadata,
|
||||
'add_metadata': embed_metadata
|
||||
'writethumbnail': True,
|
||||
'check_formats': False,
|
||||
'overwrites': None,
|
||||
'sleep_interval': 30,
|
||||
'max_sleep_interval': 600,
|
||||
'sleep_interval_requests': 30,
|
||||
}
|
||||
opts = get_yt_opts()
|
||||
ytopts['paths'] = opts.get('paths', {})
|
||||
output_dir = os.path.dirname(output_file)
|
||||
temp_dir_parent = output_dir
|
||||
temp_dir_prefix = '.yt_dlp-'
|
||||
if 'temp' in ytopts['paths']:
|
||||
v_key = parse_qs(urlsplit(url).query).get('v').pop()
|
||||
temp_dir_parent = ytopts['paths']['temp']
|
||||
temp_dir_prefix = f'{temp_dir_prefix}{v_key}-'
|
||||
temp_dir = TemporaryDirectory(prefix=temp_dir_prefix,dir=temp_dir_parent)
|
||||
(Path(temp_dir.name) / '.ignore').touch(exist_ok=True)
|
||||
ytopts['paths'].update({
|
||||
'home': os.path.dirname(output_file),
|
||||
'home': output_dir,
|
||||
'temp': temp_dir.name,
|
||||
})
|
||||
if embed_thumbnail:
|
||||
ytopts['postprocessors'].append({'key': 'EmbedThumbnail'})
|
||||
if skip_sponsors:
|
||||
ytopts['postprocessors'].append(sbopt)
|
||||
ytopts['postprocessors'].append(ffmdopt)
|
||||
|
||||
codec_options = []
|
||||
ofn = ytopts['outtmpl']
|
||||
if 'av1-' in ofn:
|
||||
codec_options = ['-c:v', 'libsvtav1', '-preset', '8', '-crf', '35']
|
||||
elif 'vp9-' in ofn:
|
||||
codec_options = ['-c:v', 'libvpx-vp9', '-b:v', '0', '-crf', '31']
|
||||
ytopts['postprocessor_args'] = opts.get('postprocessor_args', {})
|
||||
set_ffmpeg_codec = not (
|
||||
ytopts['postprocessor_args'] and
|
||||
ytopts['postprocessor_args']['modifychapters+ffmpeg']
|
||||
)
|
||||
if set_ffmpeg_codec and codec_options:
|
||||
ytopts['postprocessor_args'].update({
|
||||
'modifychapters+ffmpeg': codec_options,
|
||||
})
|
||||
|
||||
# create the post processors list
|
||||
ytopts['postprocessors'] = list(yt_dlp.get_postprocessors(pp_opts))
|
||||
|
||||
opts.update(ytopts)
|
||||
|
||||
with yt_dlp.YoutubeDL(opts) as y:
|
||||
|
@ -10,9 +10,10 @@ def get_num_workers():
|
||||
num_workers = int(os.getenv('GUNICORN_WORKERS', 3))
|
||||
except ValueError:
|
||||
num_workers = cpu_workers
|
||||
if 0 > num_workers > cpu_workers:
|
||||
num_workers = cpu_workers
|
||||
return num_workers
|
||||
if 0 < num_workers < cpu_workers:
|
||||
return num_workers
|
||||
else:
|
||||
return cpu_workers
|
||||
|
||||
|
||||
def get_bind():
|
||||
|
@ -87,6 +87,20 @@ SOURCE_DOWNLOAD_DIRECTORY_PREFIX_STR = os.getenv('TUBESYNC_DIRECTORY_PREFIX', 'T
|
||||
SOURCE_DOWNLOAD_DIRECTORY_PREFIX = True if SOURCE_DOWNLOAD_DIRECTORY_PREFIX_STR == 'true' else False
|
||||
|
||||
|
||||
SHRINK_NEW_MEDIA_METADATA_STR = os.getenv('TUBESYNC_SHRINK_NEW', 'false').strip().lower()
|
||||
SHRINK_NEW_MEDIA_METADATA = ( 'true' == SHRINK_NEW_MEDIA_METADATA_STR )
|
||||
SHRINK_OLD_MEDIA_METADATA_STR = os.getenv('TUBESYNC_SHRINK_OLD', 'false').strip().lower()
|
||||
SHRINK_OLD_MEDIA_METADATA = ( 'true' == SHRINK_OLD_MEDIA_METADATA_STR )
|
||||
|
||||
|
||||
# TUBESYNC_RENAME_ALL_SOURCES: True or False
|
||||
RENAME_ALL_SOURCES_STR = os.getenv('TUBESYNC_RENAME_ALL_SOURCES', 'False').strip().lower()
|
||||
RENAME_ALL_SOURCES = ( 'true' == RENAME_ALL_SOURCES_STR )
|
||||
# TUBESYNC_RENAME_SOURCES: A comma-separated list of Source directories
|
||||
RENAME_SOURCES_STR = os.getenv('TUBESYNC_RENAME_SOURCES', '')
|
||||
RENAME_SOURCES = RENAME_SOURCES_STR.split(',') if RENAME_SOURCES_STR else None
|
||||
|
||||
|
||||
VIDEO_HEIGHT_CUTOFF = int(os.getenv("TUBESYNC_VIDEO_HEIGHT_CUTOFF", "240"))
|
||||
|
||||
|
||||
|
@ -177,6 +177,10 @@ COOKIES_FILE = CONFIG_BASE_DIR / 'cookies.txt'
|
||||
MEDIA_FORMATSTR_DEFAULT = '{yyyy_mm_dd}_{source}_{title}_{key}_{format}.{ext}'
|
||||
|
||||
|
||||
RENAME_ALL_SOURCES = False
|
||||
RENAME_SOURCES = None
|
||||
|
||||
|
||||
try:
|
||||
from .local_settings import *
|
||||
except ImportError as e:
|
||||
|
@ -1,5 +1,4 @@
|
||||
import os
|
||||
from urllib.parse import urljoin
|
||||
from django.core.wsgi import get_wsgi_application
|
||||
|
||||
|
||||
@ -16,10 +15,9 @@ def application(environ, start_response):
|
||||
else:
|
||||
raise Exception(f'DJANGO_URL_PREFIX must end with a /, '
|
||||
f'got: {DJANGO_URL_PREFIX}')
|
||||
if script_name:
|
||||
static_url = urljoin(script_name, 'static/')
|
||||
if script_name is not None:
|
||||
environ['SCRIPT_NAME'] = script_name
|
||||
path_info = environ['PATH_INFO']
|
||||
if path_info.startswith(script_name) and not path_info.startswith(static_url):
|
||||
if path_info.startswith(script_name):
|
||||
environ['PATH_INFO'] = path_info[len(script_name) - 1:]
|
||||
return _application(environ, start_response)
|
||||
|
38
tubesync/upgrade_yt-dlp.sh
Executable file
38
tubesync/upgrade_yt-dlp.sh
Executable file
@ -0,0 +1,38 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
warning_message() {
|
||||
cat <<EOM
|
||||
Please report any issues that you have encountered before updating yt-dlp.
|
||||
|
||||
This is a tool to assist developers with debugging YouTube issues.
|
||||
It should not be used as an alternative to updating container images!
|
||||
EOM
|
||||
} 1>&2
|
||||
|
||||
pip3() {
|
||||
local pip_runner pip_whl run_whl
|
||||
|
||||
# pipenv
|
||||
pip_runner='/usr/lib/python3/dist-packages/pipenv/patched/pip/__pip-runner__.py'
|
||||
test -s "${pip_runner}" || pip_runner=''
|
||||
|
||||
# python3-pip-whl
|
||||
pip_whl="$(ls -1r /usr/share/python-wheels/pip-*-py3-none-any.whl | head -n 1)"
|
||||
run_whl="${pip_whl}/pip"
|
||||
|
||||
python3 "${pip_runner:-"${run_whl}"}" "$@"
|
||||
}
|
||||
|
||||
warning_message
|
||||
test -n "${TUBESYNC_DEBUG}" || exit 1
|
||||
|
||||
# Use the flag added in 23.0.1, if possible.
|
||||
# https://github.com/pypa/pip/pull/11780
|
||||
break_system_packages='--break-system-packages'
|
||||
pip_version="$(pip3 --version | awk '$1 = "pip" { print $2; exit; }')"
|
||||
if [[ "${pip_version}" < "23.0.1" ]]; then
|
||||
break_system_packages=''
|
||||
fi
|
||||
|
||||
pip3 install --upgrade ${break_system_packages} yt-dlp
|
||||
|
Loading…
Reference in New Issue
Block a user