mirror of
https://github.com/meeb/tubesync.git
synced 2025-06-24 14:06:36 +00:00
Merge branch 'main' into rename-files-with-source-format-issue-185
This commit is contained in:
commit
a44e856378
407
Dockerfile
407
Dockerfile
@ -1,124 +1,270 @@
|
|||||||
FROM debian:bookworm-slim
|
# syntax=docker/dockerfile:1
|
||||||
|
# check=error=true
|
||||||
|
|
||||||
ARG TARGETARCH
|
ARG FFMPEG_DATE="2025-01-21-14-19"
|
||||||
ARG TARGETPLATFORM
|
ARG FFMPEG_VERSION="N-118328-g504df09c34"
|
||||||
|
|
||||||
ARG S6_VERSION="3.2.0.2"
|
ARG S6_VERSION="3.2.0.2"
|
||||||
|
|
||||||
ARG SHA256_S6_AMD64="59289456ab1761e277bd456a95e737c06b03ede99158beb24f12b165a904f478"
|
ARG SHA256_S6_AMD64="59289456ab1761e277bd456a95e737c06b03ede99158beb24f12b165a904f478"
|
||||||
ARG SHA256_S6_ARM64="8b22a2eaca4bf0b27a43d36e65c89d2701738f628d1abd0cea5569619f66f785"
|
ARG SHA256_S6_ARM64="8b22a2eaca4bf0b27a43d36e65c89d2701738f628d1abd0cea5569619f66f785"
|
||||||
ARG SHA256_S6_NOARCH="6dbcde158a3e78b9bb141d7bcb5ccb421e563523babbe2c64470e76f4fd02dae"
|
ARG SHA256_S6_NOARCH="6dbcde158a3e78b9bb141d7bcb5ccb421e563523babbe2c64470e76f4fd02dae"
|
||||||
|
|
||||||
ARG FFMPEG_DATE="autobuild-2024-12-24-14-15"
|
ARG ALPINE_VERSION="latest"
|
||||||
ARG FFMPEG_VERSION="N-118163-g954d55c2a4"
|
ARG DEBIAN_VERSION="bookworm-slim"
|
||||||
ARG SHA256_FFMPEG_AMD64="798a7e5a0724139e6bb70df8921522b23be27028f9f551dfa83c305ec4ffaf3a"
|
|
||||||
ARG SHA256_FFMPEG_ARM64="c3e6cc0fec42cc7e3804014fbb02c1384a1a31ef13f6f9a36121f2e1216240c0"
|
|
||||||
|
|
||||||
ENV S6_VERSION="${S6_VERSION}" \
|
ARG FFMPEG_PREFIX_FILE="ffmpeg-${FFMPEG_VERSION}"
|
||||||
FFMPEG_DATE="${FFMPEG_DATE}" \
|
ARG FFMPEG_SUFFIX_FILE=".tar.xz"
|
||||||
FFMPEG_VERSION="${FFMPEG_VERSION}"
|
|
||||||
|
ARG FFMPEG_CHECKSUM_ALGORITHM="sha256"
|
||||||
|
ARG S6_CHECKSUM_ALGORITHM="sha256"
|
||||||
|
|
||||||
|
|
||||||
|
FROM alpine:${ALPINE_VERSION} AS ffmpeg-download
|
||||||
|
ARG FFMPEG_DATE
|
||||||
|
ARG FFMPEG_VERSION
|
||||||
|
ARG FFMPEG_PREFIX_FILE
|
||||||
|
ARG FFMPEG_SUFFIX_FILE
|
||||||
|
ARG SHA256_FFMPEG_AMD64
|
||||||
|
ARG SHA256_FFMPEG_ARM64
|
||||||
|
ARG FFMPEG_CHECKSUM_ALGORITHM
|
||||||
|
ARG CHECKSUM_ALGORITHM="${FFMPEG_CHECKSUM_ALGORITHM}"
|
||||||
|
ARG FFMPEG_CHECKSUM_AMD64="${SHA256_FFMPEG_AMD64}"
|
||||||
|
ARG FFMPEG_CHECKSUM_ARM64="${SHA256_FFMPEG_ARM64}"
|
||||||
|
|
||||||
|
ARG FFMPEG_FILE_SUMS="checksums.${CHECKSUM_ALGORITHM}"
|
||||||
|
ARG FFMPEG_URL="https://github.com/yt-dlp/FFmpeg-Builds/releases/download/autobuild-${FFMPEG_DATE}"
|
||||||
|
|
||||||
|
ARG DESTDIR="/downloaded"
|
||||||
|
ARG TARGETARCH
|
||||||
|
ADD "${FFMPEG_URL}/${FFMPEG_FILE_SUMS}" "${DESTDIR}/"
|
||||||
|
RUN set -eu ; \
|
||||||
|
apk --no-cache --no-progress add cmd:aria2c cmd:awk "cmd:${CHECKSUM_ALGORITHM}sum" ; \
|
||||||
|
\
|
||||||
|
aria2c_options() { \
|
||||||
|
algorithm="${CHECKSUM_ALGORITHM%[0-9]??}" ; \
|
||||||
|
bytes="${CHECKSUM_ALGORITHM#${algorithm}}" ; \
|
||||||
|
hash="$( awk -v fn="${1##*/}" '$0 ~ fn"$" { print $1; exit; }' "${DESTDIR}/${FFMPEG_FILE_SUMS}" )" ; \
|
||||||
|
\
|
||||||
|
printf -- '\t%s\n' \
|
||||||
|
'allow-overwrite=true' \
|
||||||
|
'always-resume=false' \
|
||||||
|
'check-integrity=true' \
|
||||||
|
"checksum=${algorithm}-${bytes}=${hash}" \
|
||||||
|
'max-connection-per-server=2' \
|
||||||
|
; \
|
||||||
|
printf -- '\n' ; \
|
||||||
|
} ; \
|
||||||
|
\
|
||||||
|
decide_arch() { \
|
||||||
|
case "${TARGETARCH}" in \
|
||||||
|
(amd64) printf -- 'linux64' ;; \
|
||||||
|
(arm64) printf -- 'linuxarm64' ;; \
|
||||||
|
esac ; \
|
||||||
|
} ; \
|
||||||
|
\
|
||||||
|
FFMPEG_ARCH="$(decide_arch)" ; \
|
||||||
|
FFMPEG_PREFIX_FILE="$( printf -- '%s' "${FFMPEG_PREFIX_FILE}" | cut -d '-' -f 1,2 )" ; \
|
||||||
|
for url in $(awk ' \
|
||||||
|
$2 ~ /^[*]?'"${FFMPEG_PREFIX_FILE}"'/ && /-'"${FFMPEG_ARCH}"'-/ { $1=""; print; } \
|
||||||
|
' "${DESTDIR}/${FFMPEG_FILE_SUMS}") ; \
|
||||||
|
do \
|
||||||
|
url="${FFMPEG_URL}/${url# }" ; \
|
||||||
|
printf -- '%s\n' "${url}" ; \
|
||||||
|
aria2c_options "${url}" ; \
|
||||||
|
printf -- '\n' ; \
|
||||||
|
done > /tmp/downloads ; \
|
||||||
|
unset -v url ; \
|
||||||
|
\
|
||||||
|
aria2c --no-conf=true \
|
||||||
|
--dir /downloaded \
|
||||||
|
--lowest-speed-limit='16K' \
|
||||||
|
--show-console-readout=false \
|
||||||
|
--summary-interval=0 \
|
||||||
|
--input-file /tmp/downloads ; \
|
||||||
|
\
|
||||||
|
decide_expected() { \
|
||||||
|
case "${TARGETARCH}" in \
|
||||||
|
(amd64) printf -- '%s' "${FFMPEG_CHECKSUM_AMD64}" ;; \
|
||||||
|
(arm64) printf -- '%s' "${FFMPEG_CHECKSUM_ARM64}" ;; \
|
||||||
|
esac ; \
|
||||||
|
} ; \
|
||||||
|
\
|
||||||
|
FFMPEG_HASH="$(decide_expected)" ; \
|
||||||
|
\
|
||||||
|
cd "${DESTDIR}" ; \
|
||||||
|
if [ -n "${FFMPEG_HASH}" ] ; \
|
||||||
|
then \
|
||||||
|
printf -- '%s *%s\n' "${FFMPEG_HASH}" "${FFMPEG_PREFIX_FILE}"*-"${FFMPEG_ARCH}"-*"${FFMPEG_SUFFIX_FILE}" >> /tmp/SUMS ; \
|
||||||
|
"${CHECKSUM_ALGORITHM}sum" --check --warn --strict /tmp/SUMS || exit ; \
|
||||||
|
fi ; \
|
||||||
|
"${CHECKSUM_ALGORITHM}sum" --check --warn --strict --ignore-missing "${DESTDIR}/${FFMPEG_FILE_SUMS}" ; \
|
||||||
|
\
|
||||||
|
mkdir -v -p "/verified/${TARGETARCH}" ; \
|
||||||
|
ln -v "${FFMPEG_PREFIX_FILE}"*-"${FFMPEG_ARCH}"-*"${FFMPEG_SUFFIX_FILE}" "/verified/${TARGETARCH}/" ; \
|
||||||
|
rm -rf "${DESTDIR}" ;
|
||||||
|
|
||||||
|
FROM alpine:${ALPINE_VERSION} AS ffmpeg-extracted
|
||||||
|
COPY --from=ffmpeg-download /verified /verified
|
||||||
|
|
||||||
|
ARG FFMPEG_PREFIX_FILE
|
||||||
|
ARG FFMPEG_SUFFIX_FILE
|
||||||
|
ARG TARGETARCH
|
||||||
|
RUN set -eux ; \
|
||||||
|
mkdir -v /extracted ; \
|
||||||
|
cd /extracted ; \
|
||||||
|
ln -s "/verified/${TARGETARCH}"/"${FFMPEG_PREFIX_FILE}"*"${FFMPEG_SUFFIX_FILE}" "/tmp/ffmpeg${FFMPEG_SUFFIX_FILE}" ; \
|
||||||
|
tar -tf "/tmp/ffmpeg${FFMPEG_SUFFIX_FILE}" | grep '/bin/\(ffmpeg\|ffprobe\)' > /tmp/files ; \
|
||||||
|
tar -xop \
|
||||||
|
--strip-components=2 \
|
||||||
|
-f "/tmp/ffmpeg${FFMPEG_SUFFIX_FILE}" \
|
||||||
|
-T /tmp/files ; \
|
||||||
|
\
|
||||||
|
ls -AlR /extracted ;
|
||||||
|
|
||||||
|
FROM scratch AS ffmpeg
|
||||||
|
COPY --from=ffmpeg-extracted /extracted /usr/local/bin/
|
||||||
|
|
||||||
|
FROM alpine:${ALPINE_VERSION} AS s6-overlay-download
|
||||||
|
ARG S6_VERSION
|
||||||
|
ARG SHA256_S6_AMD64
|
||||||
|
ARG SHA256_S6_ARM64
|
||||||
|
ARG SHA256_S6_NOARCH
|
||||||
|
|
||||||
|
ARG DESTDIR="/downloaded"
|
||||||
|
ARG S6_CHECKSUM_ALGORITHM
|
||||||
|
ARG CHECKSUM_ALGORITHM="${S6_CHECKSUM_ALGORITHM}"
|
||||||
|
|
||||||
|
ARG S6_CHECKSUM_AMD64="${CHECKSUM_ALGORITHM}:${SHA256_S6_AMD64}"
|
||||||
|
ARG S6_CHECKSUM_ARM64="${CHECKSUM_ALGORITHM}:${SHA256_S6_ARM64}"
|
||||||
|
ARG S6_CHECKSUM_NOARCH="${CHECKSUM_ALGORITHM}:${SHA256_S6_NOARCH}"
|
||||||
|
|
||||||
|
ARG S6_OVERLAY_URL="https://github.com/just-containers/s6-overlay/releases/download/v${S6_VERSION}"
|
||||||
|
ARG S6_PREFIX_FILE="s6-overlay-"
|
||||||
|
ARG S6_SUFFIX_FILE=".tar.xz"
|
||||||
|
|
||||||
|
ARG S6_FILE_AMD64="${S6_PREFIX_FILE}x86_64${S6_SUFFIX_FILE}"
|
||||||
|
ARG S6_FILE_ARM64="${S6_PREFIX_FILE}aarch64${S6_SUFFIX_FILE}"
|
||||||
|
ARG S6_FILE_NOARCH="${S6_PREFIX_FILE}noarch${S6_SUFFIX_FILE}"
|
||||||
|
|
||||||
|
ADD "${S6_OVERLAY_URL}/${S6_FILE_AMD64}.${CHECKSUM_ALGORITHM}" "${DESTDIR}/"
|
||||||
|
ADD "${S6_OVERLAY_URL}/${S6_FILE_ARM64}.${CHECKSUM_ALGORITHM}" "${DESTDIR}/"
|
||||||
|
ADD "${S6_OVERLAY_URL}/${S6_FILE_NOARCH}.${CHECKSUM_ALGORITHM}" "${DESTDIR}/"
|
||||||
|
|
||||||
|
##ADD --checksum="${S6_CHECKSUM_AMD64}" "${S6_OVERLAY_URL}/${S6_FILE_AMD64}" "${DESTDIR}/"
|
||||||
|
##ADD --checksum="${S6_CHECKSUM_ARM64}" "${S6_OVERLAY_URL}/${S6_FILE_ARM64}" "${DESTDIR}/"
|
||||||
|
##ADD --checksum="${S6_CHECKSUM_NOARCH}" "${S6_OVERLAY_URL}/${S6_FILE_NOARCH}" "${DESTDIR}/"
|
||||||
|
|
||||||
|
# --checksum wasn't recognized, so use busybox to check the sums instead
|
||||||
|
ADD "${S6_OVERLAY_URL}/${S6_FILE_AMD64}" "${DESTDIR}/"
|
||||||
|
RUN set -eu ; checksum="${S6_CHECKSUM_AMD64}" ; file="${S6_FILE_AMD64}" ; cd "${DESTDIR}/" && \
|
||||||
|
printf -- '%s *%s\n' "$(printf -- '%s' "${checksum}" | cut -d : -f 2-)" "${file}" | "${CHECKSUM_ALGORITHM}sum" -cw
|
||||||
|
|
||||||
|
ADD "${S6_OVERLAY_URL}/${S6_FILE_ARM64}" "${DESTDIR}/"
|
||||||
|
RUN set -eu ; checksum="${S6_CHECKSUM_ARM64}" ; file="${S6_FILE_ARM64}" ; cd "${DESTDIR}/" && \
|
||||||
|
printf -- '%s *%s\n' "$(printf -- '%s' "${checksum}" | cut -d : -f 2-)" "${file}" | "${CHECKSUM_ALGORITHM}sum" -cw
|
||||||
|
|
||||||
|
ADD "${S6_OVERLAY_URL}/${S6_FILE_NOARCH}" "${DESTDIR}/"
|
||||||
|
RUN set -eu ; checksum="${S6_CHECKSUM_NOARCH}" ; file="${S6_FILE_NOARCH}" ; cd "${DESTDIR}/" && \
|
||||||
|
printf -- '%s *%s\n' "$(printf -- '%s' "${checksum}" | cut -d : -f 2-)" "${file}" | "${CHECKSUM_ALGORITHM}sum" -cw
|
||||||
|
|
||||||
|
FROM alpine:${ALPINE_VERSION} AS s6-overlay-extracted
|
||||||
|
COPY --from=s6-overlay-download /downloaded /downloaded
|
||||||
|
|
||||||
|
ARG S6_CHECKSUM_ALGORITHM
|
||||||
|
ARG CHECKSUM_ALGORITHM="${S6_CHECKSUM_ALGORITHM}"
|
||||||
|
|
||||||
|
ARG TARGETARCH
|
||||||
|
|
||||||
|
RUN set -eu ; \
|
||||||
|
\
|
||||||
|
decide_arch() { \
|
||||||
|
local arg1 ; \
|
||||||
|
arg1="${1:-$(uname -m)}" ; \
|
||||||
|
\
|
||||||
|
case "${arg1}" in \
|
||||||
|
(amd64) printf -- 'x86_64' ;; \
|
||||||
|
(arm64) printf -- 'aarch64' ;; \
|
||||||
|
(armv7l) printf -- 'arm' ;; \
|
||||||
|
(*) printf -- '%s' "${arg1}" ;; \
|
||||||
|
esac ; \
|
||||||
|
unset -v arg1 ; \
|
||||||
|
} ; \
|
||||||
|
\
|
||||||
|
apk --no-cache --no-progress add "cmd:${CHECKSUM_ALGORITHM}sum" ; \
|
||||||
|
mkdir -v /verified ; \
|
||||||
|
cd /downloaded ; \
|
||||||
|
for f in *.sha256 ; \
|
||||||
|
do \
|
||||||
|
"${CHECKSUM_ALGORITHM}sum" --check --warn --strict "${f}" || exit ; \
|
||||||
|
ln -v "${f%.sha256}" /verified/ || exit ; \
|
||||||
|
done ; \
|
||||||
|
unset -v f ; \
|
||||||
|
\
|
||||||
|
S6_ARCH="$(decide_arch "${TARGETARCH}")" ; \
|
||||||
|
set -x ; \
|
||||||
|
mkdir -v /s6-overlay-rootfs ; \
|
||||||
|
cd /s6-overlay-rootfs ; \
|
||||||
|
for f in /verified/*.tar* ; \
|
||||||
|
do \
|
||||||
|
case "${f}" in \
|
||||||
|
(*-noarch.tar*|*-"${S6_ARCH}".tar*) \
|
||||||
|
tar -xpf "${f}" || exit ;; \
|
||||||
|
esac ; \
|
||||||
|
done ; \
|
||||||
|
set +x ; \
|
||||||
|
unset -v f ;
|
||||||
|
|
||||||
|
FROM scratch AS s6-overlay
|
||||||
|
COPY --from=s6-overlay-extracted /s6-overlay-rootfs /
|
||||||
|
|
||||||
|
FROM debian:${DEBIAN_VERSION} AS tubesync
|
||||||
|
|
||||||
|
ARG S6_VERSION
|
||||||
|
|
||||||
|
ARG FFMPEG_DATE
|
||||||
|
ARG FFMPEG_VERSION
|
||||||
|
|
||||||
ENV DEBIAN_FRONTEND="noninteractive" \
|
ENV DEBIAN_FRONTEND="noninteractive" \
|
||||||
HOME="/root" \
|
HOME="/root" \
|
||||||
LANGUAGE="en_US.UTF-8" \
|
LANGUAGE="en_US.UTF-8" \
|
||||||
LANG="en_US.UTF-8" \
|
LANG="en_US.UTF-8" \
|
||||||
LC_ALL="en_US.UTF-8" \
|
LC_ALL="en_US.UTF-8" \
|
||||||
TERM="xterm" \
|
TERM="xterm" \
|
||||||
S6_CMD_WAIT_FOR_SERVICES_MAXTIME="0"
|
# Do not include compiled byte-code
|
||||||
|
PIP_NO_COMPILE=1 \
|
||||||
|
PIP_ROOT_USER_ACTION='ignore' \
|
||||||
|
S6_CMD_WAIT_FOR_SERVICES_MAXTIME="0"
|
||||||
|
|
||||||
|
ENV S6_VERSION="${S6_VERSION}" \
|
||||||
|
FFMPEG_DATE="${FFMPEG_DATE}" \
|
||||||
|
FFMPEG_VERSION="${FFMPEG_VERSION}"
|
||||||
|
|
||||||
# Install third party software
|
# Install third party software
|
||||||
|
COPY --from=s6-overlay / /
|
||||||
|
COPY --from=ffmpeg /usr/local/bin/ /usr/local/bin/
|
||||||
|
|
||||||
# Reminder: the SHELL handles all variables
|
# Reminder: the SHELL handles all variables
|
||||||
RUN decide_arch() { \
|
RUN --mount=type=cache,id=apt-lib-cache,sharing=locked,target=/var/lib/apt \
|
||||||
case "${TARGETARCH:=amd64}" in \
|
--mount=type=cache,id=apt-cache-cache,sharing=locked,target=/var/cache/apt \
|
||||||
(arm64) printf -- 'aarch64' ;; \
|
|
||||||
(*) printf -- '%s' "${TARGETARCH}" ;; \
|
|
||||||
esac ; \
|
|
||||||
} && \
|
|
||||||
decide_expected() { \
|
|
||||||
case "${1}" in \
|
|
||||||
(ffmpeg) case "${2}" in \
|
|
||||||
(amd64) printf -- '%s' "${SHA256_FFMPEG_AMD64}" ;; \
|
|
||||||
(arm64) printf -- '%s' "${SHA256_FFMPEG_ARM64}" ;; \
|
|
||||||
esac ;; \
|
|
||||||
(s6) case "${2}" in \
|
|
||||||
(amd64) printf -- '%s' "${SHA256_S6_AMD64}" ;; \
|
|
||||||
(arm64) printf -- '%s' "${SHA256_S6_ARM64}" ;; \
|
|
||||||
(noarch) printf -- '%s' "${SHA256_S6_NOARCH}" ;; \
|
|
||||||
esac ;; \
|
|
||||||
esac ; \
|
|
||||||
} && \
|
|
||||||
decide_url() { \
|
|
||||||
case "${1}" in \
|
|
||||||
(ffmpeg) printf -- \
|
|
||||||
'https://github.com/yt-dlp/FFmpeg-Builds/releases/download/%s/ffmpeg-%s-linux%s-gpl%s.tar.xz' \
|
|
||||||
"${FFMPEG_DATE}" \
|
|
||||||
"${FFMPEG_VERSION}" \
|
|
||||||
"$(case "${2}" in \
|
|
||||||
(amd64) printf -- '64' ;; \
|
|
||||||
(*) printf -- '%s' "${2}" ;; \
|
|
||||||
esac)" \
|
|
||||||
"$(case "${FFMPEG_VERSION%%-*}" in \
|
|
||||||
(n*) printf -- '-%s\n' "${FFMPEG_VERSION#n}" | cut -d '-' -f 1,2 ;; \
|
|
||||||
(*) printf -- '' ;; \
|
|
||||||
esac)" ;; \
|
|
||||||
(s6) printf -- \
|
|
||||||
'https://github.com/just-containers/s6-overlay/releases/download/v%s/s6-overlay-%s.tar.xz' \
|
|
||||||
"${S6_VERSION}" \
|
|
||||||
"$(case "${2}" in \
|
|
||||||
(amd64) printf -- 'x86_64' ;; \
|
|
||||||
(arm64) printf -- 'aarch64' ;; \
|
|
||||||
(*) printf -- '%s' "${2}" ;; \
|
|
||||||
esac)" ;; \
|
|
||||||
esac ; \
|
|
||||||
} && \
|
|
||||||
verify_download() { \
|
|
||||||
while [ $# -ge 2 ] ; do \
|
|
||||||
sha256sum "${2}" ; \
|
|
||||||
printf -- '%s %s\n' "${1}" "${2}" | sha256sum -c || return ; \
|
|
||||||
shift ; shift ; \
|
|
||||||
done ; \
|
|
||||||
} && \
|
|
||||||
download_expected_file() { \
|
|
||||||
local arg1 expected file url ; \
|
|
||||||
arg1="$(printf -- '%s\n' "${1}" | awk '{print toupper($0);}')" ; \
|
|
||||||
expected="$(decide_expected "${1}" "${2}")" ; \
|
|
||||||
file="${3}" ; \
|
|
||||||
url="$(decide_url "${1}" "${2}")" ; \
|
|
||||||
printf -- '%s\n' \
|
|
||||||
"Building for arch: ${2}|${ARCH}, downloading ${arg1} from: ${url}, expecting ${arg1} SHA256: ${expected}" && \
|
|
||||||
rm -rf "${file}" && \
|
|
||||||
curl --disable --output "${file}" --clobber --location --no-progress-meter --url "${url}" && \
|
|
||||||
verify_download "${expected}" "${file}" ; \
|
|
||||||
} && \
|
|
||||||
export ARCH="$(decide_arch)" && \
|
|
||||||
set -x && \
|
set -x && \
|
||||||
|
# Update from the network and keep cache
|
||||||
|
rm -f /etc/apt/apt.conf.d/docker-clean && \
|
||||||
apt-get update && \
|
apt-get update && \
|
||||||
|
# Install locales
|
||||||
apt-get -y --no-install-recommends install locales && \
|
apt-get -y --no-install-recommends install locales && \
|
||||||
printf -- "en_US.UTF-8 UTF-8\n" > /etc/locale.gen && \
|
printf -- "en_US.UTF-8 UTF-8\n" > /etc/locale.gen && \
|
||||||
locale-gen en_US.UTF-8 && \
|
locale-gen en_US.UTF-8 && \
|
||||||
# Install required distro packages
|
# Install file
|
||||||
apt-get -y --no-install-recommends install curl ca-certificates file binutils xz-utils && \
|
apt-get -y --no-install-recommends install file && \
|
||||||
# Install s6
|
# Installed s6 (using COPY earlier)
|
||||||
_file="/tmp/s6-overlay-noarch.tar.xz" && \
|
|
||||||
download_expected_file s6 noarch "${_file}" && \
|
|
||||||
tar -C / -xpf "${_file}" && rm -f "${_file}" && \
|
|
||||||
_file="/tmp/s6-overlay-${ARCH}.tar.xz" && \
|
|
||||||
download_expected_file s6 "${TARGETARCH}" "${_file}" && \
|
|
||||||
tar -C / -xpf "${_file}" && rm -f "${_file}" && \
|
|
||||||
file -L /command/s6-overlay-suexec && \
|
file -L /command/s6-overlay-suexec && \
|
||||||
# Install ffmpeg
|
# Installed ffmpeg (using COPY earlier)
|
||||||
_file="/tmp/ffmpeg-${ARCH}.tar.xz" && \
|
/usr/local/bin/ffmpeg -version && \
|
||||||
download_expected_file ffmpeg "${TARGETARCH}" "${_file}" && \
|
|
||||||
tar -xvvpf "${_file}" --strip-components=2 --no-anchored -C /usr/local/bin/ "ffmpeg" "ffprobe" && rm -f "${_file}" && \
|
|
||||||
file /usr/local/bin/ff* && \
|
file /usr/local/bin/ff* && \
|
||||||
# Clean up
|
# Clean up file
|
||||||
apt-get -y autoremove --purge curl file binutils xz-utils && \
|
apt-get -y autoremove --purge file && \
|
||||||
rm -rf /var/lib/apt/lists/* && \
|
# Install dependencies we keep
|
||||||
rm -rf /var/cache/apt/* && \
|
|
||||||
rm -rf /tmp/*
|
|
||||||
|
|
||||||
# Install dependencies we keep
|
|
||||||
RUN set -x && \
|
|
||||||
apt-get update && \
|
|
||||||
# Install required distro packages
|
# Install required distro packages
|
||||||
apt-get -y --no-install-recommends install \
|
apt-get -y --no-install-recommends install \
|
||||||
libjpeg62-turbo \
|
libjpeg62-turbo \
|
||||||
@ -131,27 +277,29 @@ RUN set -x && \
|
|||||||
python3 \
|
python3 \
|
||||||
python3-wheel \
|
python3-wheel \
|
||||||
redis-server \
|
redis-server \
|
||||||
&& apt-get -y autoclean && \
|
curl \
|
||||||
rm -rf /var/lib/apt/lists/* && \
|
less \
|
||||||
rm -rf /var/cache/apt/* && \
|
&& \
|
||||||
|
# Clean up
|
||||||
|
apt-get -y autopurge && \
|
||||||
|
apt-get -y autoclean && \
|
||||||
rm -rf /tmp/*
|
rm -rf /tmp/*
|
||||||
|
|
||||||
# Copy over pip.conf to use piwheels
|
# Copy over pip.conf to use piwheels
|
||||||
COPY pip.conf /etc/pip.conf
|
COPY pip.conf /etc/pip.conf
|
||||||
|
|
||||||
# Add Pipfile
|
|
||||||
COPY Pipfile /app/Pipfile
|
|
||||||
|
|
||||||
# Do not include compiled byte-code
|
|
||||||
ENV PIP_NO_COMPILE=1 \
|
|
||||||
PIP_NO_CACHE_DIR=1 \
|
|
||||||
PIP_ROOT_USER_ACTION='ignore'
|
|
||||||
|
|
||||||
# Switch workdir to the the app
|
# Switch workdir to the the app
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
|
|
||||||
# Set up the app
|
# Set up the app
|
||||||
RUN set -x && \
|
RUN --mount=type=tmpfs,target=/cache \
|
||||||
|
--mount=type=cache,id=pipenv-cache,sharing=locked,target=/cache/pipenv \
|
||||||
|
--mount=type=cache,id=apt-lib-cache,sharing=locked,target=/var/lib/apt \
|
||||||
|
--mount=type=cache,id=apt-cache-cache,sharing=locked,target=/var/cache/apt \
|
||||||
|
--mount=type=bind,source=Pipfile,target=/app/Pipfile \
|
||||||
|
set -x && \
|
||||||
|
# Update from the network and keep cache
|
||||||
|
rm -f /etc/apt/apt.conf.d/docker-clean && \
|
||||||
apt-get update && \
|
apt-get update && \
|
||||||
# Install required build packages
|
# Install required build packages
|
||||||
apt-get -y --no-install-recommends install \
|
apt-get -y --no-install-recommends install \
|
||||||
@ -172,10 +320,11 @@ RUN set -x && \
|
|||||||
useradd -M -d /app -s /bin/false -g app app && \
|
useradd -M -d /app -s /bin/false -g app app && \
|
||||||
# Install non-distro packages
|
# Install non-distro packages
|
||||||
cp -at /tmp/ "${HOME}" && \
|
cp -at /tmp/ "${HOME}" && \
|
||||||
PIPENV_VERBOSITY=64 HOME="/tmp/${HOME#/}" pipenv install --system --skip-lock && \
|
HOME="/tmp/${HOME#/}" \
|
||||||
|
XDG_CACHE_HOME='/cache' \
|
||||||
|
PIPENV_VERBOSITY=64 \
|
||||||
|
pipenv install --system --skip-lock && \
|
||||||
# Clean up
|
# Clean up
|
||||||
rm /app/Pipfile && \
|
|
||||||
pipenv --clear && \
|
|
||||||
apt-get -y autoremove --purge \
|
apt-get -y autoremove --purge \
|
||||||
default-libmysqlclient-dev \
|
default-libmysqlclient-dev \
|
||||||
g++ \
|
g++ \
|
||||||
@ -189,12 +338,9 @@ RUN set -x && \
|
|||||||
python3-pip \
|
python3-pip \
|
||||||
zlib1g-dev \
|
zlib1g-dev \
|
||||||
&& \
|
&& \
|
||||||
apt-get -y autoremove && \
|
apt-get -y autopurge && \
|
||||||
apt-get -y autoclean && \
|
apt-get -y autoclean && \
|
||||||
rm -rf /var/lib/apt/lists/* && \
|
rm -v -rf /tmp/*
|
||||||
rm -rf /var/cache/apt/* && \
|
|
||||||
rm -rf /tmp/*
|
|
||||||
|
|
||||||
|
|
||||||
# Copy app
|
# Copy app
|
||||||
COPY tubesync /app
|
COPY tubesync /app
|
||||||
@ -212,24 +358,21 @@ RUN set -x && \
|
|||||||
mkdir -v -p /config/media && \
|
mkdir -v -p /config/media && \
|
||||||
mkdir -v -p /config/cache/pycache && \
|
mkdir -v -p /config/cache/pycache && \
|
||||||
mkdir -v -p /downloads/audio && \
|
mkdir -v -p /downloads/audio && \
|
||||||
mkdir -v -p /downloads/video
|
mkdir -v -p /downloads/video && \
|
||||||
|
# Append software versions
|
||||||
|
ffmpeg_version=$(/usr/local/bin/ffmpeg -version | awk -v 'ev=31' '1 == NR && "ffmpeg" == $1 { print $3; ev=0; } END { exit ev; }') && \
|
||||||
# Append software versions
|
test -n "${ffmpeg_version}" && \
|
||||||
RUN set -x && \
|
printf -- "ffmpeg_version = '%s'\n" "${ffmpeg_version}" >> /app/common/third_party_versions.py
|
||||||
/usr/local/bin/ffmpeg -version && \
|
|
||||||
FFMPEG_VERSION=$(/usr/local/bin/ffmpeg -version | awk -v 'ev=31' '1 == NR && "ffmpeg" == $1 { print $3; ev=0; } END { exit ev; }') && \
|
|
||||||
test -n "${FFMPEG_VERSION}" && \
|
|
||||||
printf -- "ffmpeg_version = '%s'\n" "${FFMPEG_VERSION}" >> /app/common/third_party_versions.py
|
|
||||||
|
|
||||||
# Copy root
|
# Copy root
|
||||||
COPY config/root /
|
COPY config/root /
|
||||||
|
|
||||||
# Create a healthcheck
|
# Create a healthcheck
|
||||||
HEALTHCHECK --interval=1m --timeout=10s CMD /app/healthcheck.py http://127.0.0.1:8080/healthcheck
|
HEALTHCHECK --interval=1m --timeout=10s --start-period=3m CMD ["/app/healthcheck.py", "http://127.0.0.1:8080/healthcheck"]
|
||||||
|
|
||||||
# ENVS and ports
|
# ENVS and ports
|
||||||
ENV PYTHONPATH="/app" PYTHONPYCACHEPREFIX="/config/cache/pycache"
|
ENV PYTHONPATH="/app" \
|
||||||
|
PYTHONPYCACHEPREFIX="/config/cache/pycache"
|
||||||
EXPOSE 4848
|
EXPOSE 4848
|
||||||
|
|
||||||
# Volumes
|
# Volumes
|
||||||
|
14
README.md
14
README.md
@ -138,6 +138,11 @@ services:
|
|||||||
- PGID=1000
|
- PGID=1000
|
||||||
```
|
```
|
||||||
|
|
||||||
|
> [!IMPORTANT]
|
||||||
|
> If the `/downloads` directory is mounted from a [Samba volume](https://docs.docker.com/engine/storage/volumes/#create-cifssamba-volumes), be sure to also supply the `uid` and `gid` mount parameters in the driver options.
|
||||||
|
> These must be matched to the `PUID` and `PGID` values, which were specified as environment variables.
|
||||||
|
>
|
||||||
|
> Matching these user and group ID numbers prevents issues when executing file actions, such as writing metadata. See [this issue](https://github.com/meeb/tubesync/issues/616#issuecomment-2593458282) for details.
|
||||||
|
|
||||||
## Optional authentication
|
## Optional authentication
|
||||||
|
|
||||||
@ -320,7 +325,7 @@ Notable libraries and software used:
|
|||||||
* [django-sass](https://github.com/coderedcorp/django-sass/)
|
* [django-sass](https://github.com/coderedcorp/django-sass/)
|
||||||
* The container bundles with `s6-init` and `nginx`
|
* The container bundles with `s6-init` and `nginx`
|
||||||
|
|
||||||
See the [Pipefile](https://github.com/meeb/tubesync/blob/main/Pipfile) for a full list.
|
See the [Pipfile](https://github.com/meeb/tubesync/blob/main/Pipfile) for a full list.
|
||||||
|
|
||||||
### Can I get access to the full Django admin?
|
### Can I get access to the full Django admin?
|
||||||
|
|
||||||
@ -348,7 +353,12 @@ etc.). Configuration of this is beyond the scope of this README.
|
|||||||
|
|
||||||
### What architectures does the container support?
|
### What architectures does the container support?
|
||||||
|
|
||||||
Just `amd64` for the moment. Others may be made available if there is demand.
|
Only two are supported, for the moment:
|
||||||
|
- `amd64` (most desktop PCs and servers)
|
||||||
|
- `arm64`
|
||||||
|
(modern ARM computers, such as the Rasperry Pi 3 or later)
|
||||||
|
|
||||||
|
Others may be made available, if there is demand.
|
||||||
|
|
||||||
### The pipenv install fails with "Locking failed"!
|
### The pipenv install fails with "Locking failed"!
|
||||||
|
|
||||||
|
@ -2,6 +2,7 @@ daemon off;
|
|||||||
|
|
||||||
user app;
|
user app;
|
||||||
worker_processes auto;
|
worker_processes auto;
|
||||||
|
worker_cpu_affinity auto;
|
||||||
pid /run/nginx.pid;
|
pid /run/nginx.pid;
|
||||||
|
|
||||||
events {
|
events {
|
||||||
|
@ -2,4 +2,4 @@
|
|||||||
|
|
||||||
cd /
|
cd /
|
||||||
|
|
||||||
/usr/sbin/nginx
|
exec /usr/sbin/nginx
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
#!/usr/bin/env python3
|
#!/usr/bin/python3
|
||||||
'''
|
'''
|
||||||
|
|
||||||
Perform an HTTP request to a URL and exit with an exit code of 1 if the
|
Perform an HTTP request to a URL and exit with an exit code of 1 if the
|
||||||
|
@ -20,8 +20,8 @@ from common.utils import clean_filename, clean_emoji
|
|||||||
from .youtube import (get_media_info as get_youtube_media_info,
|
from .youtube import (get_media_info as get_youtube_media_info,
|
||||||
download_media as download_youtube_media,
|
download_media as download_youtube_media,
|
||||||
get_channel_image_info as get_youtube_channel_image_info)
|
get_channel_image_info as get_youtube_channel_image_info)
|
||||||
from .utils import (seconds_to_timestr, parse_media_format, write_text_file,
|
from .utils import (seconds_to_timestr, parse_media_format, filter_response,
|
||||||
mkdir_p, directory_and_stem, glob_quote)
|
write_text_file, mkdir_p, directory_and_stem, glob_quote)
|
||||||
from .matching import (get_best_combined_format, get_best_audio_format,
|
from .matching import (get_best_combined_format, get_best_audio_format,
|
||||||
get_best_video_format)
|
get_best_video_format)
|
||||||
from .mediaservers import PlexMediaServer
|
from .mediaservers import PlexMediaServer
|
||||||
@ -591,6 +591,7 @@ class Source(models.Model):
|
|||||||
'key': 'SoMeUnIqUiD',
|
'key': 'SoMeUnIqUiD',
|
||||||
'format': '-'.join(fmt),
|
'format': '-'.join(fmt),
|
||||||
'playlist_title': 'Some Playlist Title',
|
'playlist_title': 'Some Playlist Title',
|
||||||
|
'video_order': '01',
|
||||||
'ext': self.extension,
|
'ext': self.extension,
|
||||||
'resolution': self.source_resolution if self.source_resolution else '',
|
'resolution': self.source_resolution if self.source_resolution else '',
|
||||||
'height': '720' if self.source_resolution else '',
|
'height': '720' if self.source_resolution else '',
|
||||||
@ -1130,6 +1131,7 @@ class Media(models.Model):
|
|||||||
'key': self.key,
|
'key': self.key,
|
||||||
'format': '-'.join(display_format['format']),
|
'format': '-'.join(display_format['format']),
|
||||||
'playlist_title': self.playlist_title,
|
'playlist_title': self.playlist_title,
|
||||||
|
'video_order': self.get_episode_str(True),
|
||||||
'ext': self.source.extension,
|
'ext': self.source.extension,
|
||||||
'resolution': display_format['resolution'],
|
'resolution': display_format['resolution'],
|
||||||
'height': display_format['height'],
|
'height': display_format['height'],
|
||||||
@ -1145,8 +1147,39 @@ class Media(models.Model):
|
|||||||
def has_metadata(self):
|
def has_metadata(self):
|
||||||
return self.metadata is not None
|
return self.metadata is not None
|
||||||
|
|
||||||
|
|
||||||
|
@property
|
||||||
|
def reduce_data(self):
|
||||||
|
try:
|
||||||
|
from common.logger import log
|
||||||
|
from common.utils import json_serial
|
||||||
|
|
||||||
|
old_mdl = len(self.metadata or "")
|
||||||
|
data = json.loads(self.metadata or "")
|
||||||
|
compact_json = json.dumps(data, separators=(',', ':'), default=json_serial)
|
||||||
|
|
||||||
|
filtered_data = filter_response(data, True)
|
||||||
|
filtered_json = json.dumps(filtered_data, separators=(',', ':'), default=json_serial)
|
||||||
|
except Exception as e:
|
||||||
|
log.exception('reduce_data: %s', e)
|
||||||
|
else:
|
||||||
|
# log the results of filtering / compacting on metadata size
|
||||||
|
new_mdl = len(compact_json)
|
||||||
|
if old_mdl > new_mdl:
|
||||||
|
delta = old_mdl - new_mdl
|
||||||
|
log.info(f'{self.key}: metadata compacted by {delta:,} characters ({old_mdl:,} -> {new_mdl:,})')
|
||||||
|
new_mdl = len(filtered_json)
|
||||||
|
if old_mdl > new_mdl:
|
||||||
|
delta = old_mdl - new_mdl
|
||||||
|
log.info(f'{self.key}: metadata reduced by {delta:,} characters ({old_mdl:,} -> {new_mdl:,})')
|
||||||
|
if getattr(settings, 'SHRINK_OLD_MEDIA_METADATA', False):
|
||||||
|
self.metadata = filtered_json
|
||||||
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def loaded_metadata(self):
|
def loaded_metadata(self):
|
||||||
|
if getattr(settings, 'SHRINK_OLD_MEDIA_METADATA', False):
|
||||||
|
self.reduce_data
|
||||||
try:
|
try:
|
||||||
data = json.loads(self.metadata)
|
data = json.loads(self.metadata)
|
||||||
if not isinstance(data, dict):
|
if not isinstance(data, dict):
|
||||||
@ -1265,8 +1298,7 @@ class Media(models.Model):
|
|||||||
|
|
||||||
@property
|
@property
|
||||||
def directory_path(self):
|
def directory_path(self):
|
||||||
dirname = self.source.directory_path / self.filename
|
return self.filepath.parent
|
||||||
return dirname.parent
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def filepath(self):
|
def filepath(self):
|
||||||
@ -1375,8 +1407,7 @@ class Media(models.Model):
|
|||||||
nfo.append(season)
|
nfo.append(season)
|
||||||
# episode = number of video in the year
|
# episode = number of video in the year
|
||||||
episode = nfo.makeelement('episode', {})
|
episode = nfo.makeelement('episode', {})
|
||||||
episode_number = self.calculate_episode_number()
|
episode.text = self.get_episode_str()
|
||||||
episode.text = str(episode_number) if episode_number else ''
|
|
||||||
episode.tail = '\n '
|
episode.tail = '\n '
|
||||||
nfo.append(episode)
|
nfo.append(episode)
|
||||||
# ratings = media metadata youtube rating
|
# ratings = media metadata youtube rating
|
||||||
@ -1389,7 +1420,7 @@ class Media(models.Model):
|
|||||||
rating_attrs = OrderedDict()
|
rating_attrs = OrderedDict()
|
||||||
rating_attrs['name'] = 'youtube'
|
rating_attrs['name'] = 'youtube'
|
||||||
rating_attrs['max'] = '5'
|
rating_attrs['max'] = '5'
|
||||||
rating_attrs['default'] = 'True'
|
rating_attrs['default'] = 'true'
|
||||||
rating = nfo.makeelement('rating', rating_attrs)
|
rating = nfo.makeelement('rating', rating_attrs)
|
||||||
rating.text = '\n '
|
rating.text = '\n '
|
||||||
rating.append(value)
|
rating.append(value)
|
||||||
@ -1397,7 +1428,8 @@ class Media(models.Model):
|
|||||||
rating.tail = '\n '
|
rating.tail = '\n '
|
||||||
ratings = nfo.makeelement('ratings', {})
|
ratings = nfo.makeelement('ratings', {})
|
||||||
ratings.text = '\n '
|
ratings.text = '\n '
|
||||||
ratings.append(rating)
|
if self.rating is not None:
|
||||||
|
ratings.append(rating)
|
||||||
ratings.tail = '\n '
|
ratings.tail = '\n '
|
||||||
nfo.append(ratings)
|
nfo.append(ratings)
|
||||||
# plot = media metadata description
|
# plot = media metadata description
|
||||||
@ -1414,7 +1446,8 @@ class Media(models.Model):
|
|||||||
mpaa = nfo.makeelement('mpaa', {})
|
mpaa = nfo.makeelement('mpaa', {})
|
||||||
mpaa.text = str(self.age_limit)
|
mpaa.text = str(self.age_limit)
|
||||||
mpaa.tail = '\n '
|
mpaa.tail = '\n '
|
||||||
nfo.append(mpaa)
|
if self.age_limit and self.age_limit > 0:
|
||||||
|
nfo.append(mpaa)
|
||||||
# runtime = media metadata duration in seconds
|
# runtime = media metadata duration in seconds
|
||||||
runtime = nfo.makeelement('runtime', {})
|
runtime = nfo.makeelement('runtime', {})
|
||||||
runtime.text = str(self.duration)
|
runtime.text = str(self.duration)
|
||||||
@ -1526,6 +1559,16 @@ class Media(models.Model):
|
|||||||
return position_counter
|
return position_counter
|
||||||
position_counter += 1
|
position_counter += 1
|
||||||
|
|
||||||
|
def get_episode_str(self, use_padding=False):
|
||||||
|
episode_number = self.calculate_episode_number()
|
||||||
|
if not episode_number:
|
||||||
|
return ''
|
||||||
|
|
||||||
|
if use_padding:
|
||||||
|
return f'{episode_number:02}'
|
||||||
|
|
||||||
|
return str(episode_number)
|
||||||
|
|
||||||
def rename_files(self):
|
def rename_files(self):
|
||||||
if self.downloaded and self.media_file:
|
if self.downloaded and self.media_file:
|
||||||
old_video_path = Path(self.media_file.path)
|
old_video_path = Path(self.media_file.path)
|
||||||
|
@ -26,7 +26,7 @@ from common.errors import NoMediaException, DownloadFailedException
|
|||||||
from common.utils import json_serial
|
from common.utils import json_serial
|
||||||
from .models import Source, Media, MediaServer
|
from .models import Source, Media, MediaServer
|
||||||
from .utils import (get_remote_image, resize_image_to_height, delete_file,
|
from .utils import (get_remote_image, resize_image_to_height, delete_file,
|
||||||
write_text_file)
|
write_text_file, filter_response)
|
||||||
from .filtering import filter_media
|
from .filtering import filter_media
|
||||||
|
|
||||||
|
|
||||||
@ -305,7 +305,10 @@ def download_media_metadata(media_id):
|
|||||||
return
|
return
|
||||||
source = media.source
|
source = media.source
|
||||||
metadata = media.index_metadata()
|
metadata = media.index_metadata()
|
||||||
media.metadata = json.dumps(metadata, default=json_serial)
|
response = metadata
|
||||||
|
if getattr(settings, 'SHRINK_NEW_MEDIA_METADATA', False):
|
||||||
|
response = filter_response(metadata, True)
|
||||||
|
media.metadata = json.dumps(response, separators=(',', ':'), default=json_serial)
|
||||||
upload_date = media.upload_date
|
upload_date = media.upload_date
|
||||||
# Media must have a valid upload date
|
# Media must have a valid upload date
|
||||||
if upload_date:
|
if upload_date:
|
||||||
@ -447,7 +450,11 @@ def download_media(media_id):
|
|||||||
# If selected, write an NFO file
|
# If selected, write an NFO file
|
||||||
if media.source.write_nfo:
|
if media.source.write_nfo:
|
||||||
log.info(f'Writing media NFO file to: {media.nfopath}')
|
log.info(f'Writing media NFO file to: {media.nfopath}')
|
||||||
write_text_file(media.nfopath, media.nfoxml)
|
try:
|
||||||
|
write_text_file(media.nfopath, media.nfoxml)
|
||||||
|
except PermissionError as e:
|
||||||
|
log.warn(f'A permissions problem occured when writing the new media NFO file: {e.msg}')
|
||||||
|
pass
|
||||||
# Schedule a task to update media servers
|
# Schedule a task to update media servers
|
||||||
for mediaserver in MediaServer.objects.all():
|
for mediaserver in MediaServer.objects.all():
|
||||||
log.info(f'Scheduling media server updates')
|
log.info(f'Scheduling media server updates')
|
||||||
|
@ -73,6 +73,11 @@
|
|||||||
<td>Playlist title of media, if it's in a playlist</td>
|
<td>Playlist title of media, if it's in a playlist</td>
|
||||||
<td>Some Playlist</td>
|
<td>Some Playlist</td>
|
||||||
</tr>
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td>{video_order}</td>
|
||||||
|
<td>Episode order in playlist, if in playlist <sub><sup>(can cause issues if playlist is changed after adding)</sup></sub></td>
|
||||||
|
<td>01</td>
|
||||||
|
</tr>
|
||||||
<tr>
|
<tr>
|
||||||
<td>{ext}</td>
|
<td>{ext}</td>
|
||||||
<td>File extension</td>
|
<td>File extension</td>
|
||||||
|
@ -125,7 +125,7 @@
|
|||||||
</tr>
|
</tr>
|
||||||
<tr title="Database connection used by TubeSync">
|
<tr title="Database connection used by TubeSync">
|
||||||
<td class="hide-on-small-only">Database</td>
|
<td class="hide-on-small-only">Database</td>
|
||||||
<td><span class="hide-on-med-and-up">Database<br></span><strong>{{ database_connection }}</strong></td>
|
<td><span class="hide-on-med-and-up">Database<br></span><strong>{{ database_connection }}{% if database_filesize %} {{ database_filesize|filesizeformat }}{% endif %}</strong></td>
|
||||||
</tr>
|
</tr>
|
||||||
</table>
|
</table>
|
||||||
</div>
|
</div>
|
||||||
|
@ -146,7 +146,7 @@
|
|||||||
<div>
|
<div>
|
||||||
ID: <strong>{{ format.format_id }}</strong>
|
ID: <strong>{{ format.format_id }}</strong>
|
||||||
{% if format.vcodec|lower != 'none' %}, {{ format.format_note }} ({{ format.width }}x{{ format.height }}), fps:{{ format.fps|lower }}, video:{{ format.vcodec }} @{{ format.tbr }}k{% endif %}
|
{% if format.vcodec|lower != 'none' %}, {{ format.format_note }} ({{ format.width }}x{{ format.height }}), fps:{{ format.fps|lower }}, video:{{ format.vcodec }} @{{ format.tbr }}k{% endif %}
|
||||||
{% if format.acodec|lower != 'none' %}, audio:{{ format.acodec }} @{{ format.abr }}k / {{ format.asr }}Hz{% endif %}
|
{% if format.acodec|lower != 'none' %}, audio:{{ format.acodec }} @{{ format.abr }}k / {{ format.asr }}Hz {{ format.format_note }}{% endif %}
|
||||||
{% if format.format_id == combined_format or format.format_id == audio_format or format.format_id == video_format %}<strong>(matched)</strong>{% endif %}
|
{% if format.format_id == combined_format or format.format_id == audio_format or format.format_id == video_format %}<strong>(matched)</strong>{% endif %}
|
||||||
</div>
|
</div>
|
||||||
{% empty %}
|
{% empty %}
|
||||||
|
@ -18,6 +18,7 @@ from background_task.models import Task
|
|||||||
from .models import Source, Media
|
from .models import Source, Media
|
||||||
from .tasks import cleanup_old_media
|
from .tasks import cleanup_old_media
|
||||||
from .filtering import filter_media
|
from .filtering import filter_media
|
||||||
|
from .utils import filter_response
|
||||||
|
|
||||||
|
|
||||||
class FrontEndTestCase(TestCase):
|
class FrontEndTestCase(TestCase):
|
||||||
@ -1709,6 +1710,84 @@ class FormatMatchingTestCase(TestCase):
|
|||||||
f'expected {expected_match_result}')
|
f'expected {expected_match_result}')
|
||||||
|
|
||||||
|
|
||||||
|
class ResponseFilteringTestCase(TestCase):
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
# Disable general logging for test case
|
||||||
|
logging.disable(logging.CRITICAL)
|
||||||
|
# Add a test source
|
||||||
|
self.source = Source.objects.create(
|
||||||
|
source_type=Source.SOURCE_TYPE_YOUTUBE_CHANNEL,
|
||||||
|
key='testkey',
|
||||||
|
name='testname',
|
||||||
|
directory='testdirectory',
|
||||||
|
index_schedule=3600,
|
||||||
|
delete_old_media=False,
|
||||||
|
days_to_keep=14,
|
||||||
|
source_resolution=Source.SOURCE_RESOLUTION_1080P,
|
||||||
|
source_vcodec=Source.SOURCE_VCODEC_VP9,
|
||||||
|
source_acodec=Source.SOURCE_ACODEC_OPUS,
|
||||||
|
prefer_60fps=False,
|
||||||
|
prefer_hdr=False,
|
||||||
|
fallback=Source.FALLBACK_FAIL
|
||||||
|
)
|
||||||
|
# Add some media
|
||||||
|
self.media = Media.objects.create(
|
||||||
|
key='mediakey',
|
||||||
|
source=self.source,
|
||||||
|
metadata='{}'
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_metadata_20230629(self):
|
||||||
|
self.media.metadata = all_test_metadata['20230629']
|
||||||
|
self.media.save()
|
||||||
|
|
||||||
|
unfiltered = self.media.loaded_metadata
|
||||||
|
filtered = filter_response(self.media.loaded_metadata)
|
||||||
|
self.assertIn('formats', unfiltered.keys())
|
||||||
|
self.assertIn('formats', filtered.keys())
|
||||||
|
# filtered 'downloader_options'
|
||||||
|
self.assertIn('downloader_options', unfiltered['formats'][10].keys())
|
||||||
|
self.assertNotIn('downloader_options', filtered['formats'][10].keys())
|
||||||
|
# filtered 'http_headers'
|
||||||
|
self.assertIn('http_headers', unfiltered['formats'][0].keys())
|
||||||
|
self.assertNotIn('http_headers', filtered['formats'][0].keys())
|
||||||
|
# did not lose any formats
|
||||||
|
self.assertEqual(48, len(unfiltered['formats']))
|
||||||
|
self.assertEqual(48, len(filtered['formats']))
|
||||||
|
self.assertEqual(len(unfiltered['formats']), len(filtered['formats']))
|
||||||
|
# did not remove everything with url
|
||||||
|
self.assertIn('original_url', unfiltered.keys())
|
||||||
|
self.assertIn('original_url', filtered.keys())
|
||||||
|
self.assertEqual(unfiltered['original_url'], filtered['original_url'])
|
||||||
|
# did reduce the size of the metadata
|
||||||
|
self.assertTrue(len(str(filtered)) < len(str(unfiltered)))
|
||||||
|
|
||||||
|
url_keys = []
|
||||||
|
for format in unfiltered['formats']:
|
||||||
|
for key in format.keys():
|
||||||
|
if 'url' in key:
|
||||||
|
url_keys.append((format['format_id'], key, format[key],))
|
||||||
|
unfiltered_url_keys = url_keys
|
||||||
|
self.assertEqual(63, len(unfiltered_url_keys), msg=str(unfiltered_url_keys))
|
||||||
|
|
||||||
|
url_keys = []
|
||||||
|
for format in filtered['formats']:
|
||||||
|
for key in format.keys():
|
||||||
|
if 'url' in key:
|
||||||
|
url_keys.append((format['format_id'], key, format[key],))
|
||||||
|
filtered_url_keys = url_keys
|
||||||
|
self.assertEqual(3, len(filtered_url_keys), msg=str(filtered_url_keys))
|
||||||
|
|
||||||
|
url_keys = []
|
||||||
|
for lang_code, captions in filtered['automatic_captions'].items():
|
||||||
|
for caption in captions:
|
||||||
|
for key in caption.keys():
|
||||||
|
if 'url' in key:
|
||||||
|
url_keys.append((lang_code, caption['ext'], caption[key],))
|
||||||
|
self.assertEqual(0, len(url_keys), msg=str(url_keys))
|
||||||
|
|
||||||
|
|
||||||
class TasksTestCase(TestCase):
|
class TasksTestCase(TestCase):
|
||||||
|
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
|
@ -1,6 +1,7 @@
|
|||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import math
|
import math
|
||||||
|
from copy import deepcopy
|
||||||
from operator import itemgetter
|
from operator import itemgetter
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from tempfile import NamedTemporaryFile
|
from tempfile import NamedTemporaryFile
|
||||||
@ -154,7 +155,8 @@ def write_text_file(filepath, filedata):
|
|||||||
bytes_written = f.write(filedata)
|
bytes_written = f.write(filedata)
|
||||||
# chmod a+r temp_file
|
# chmod a+r temp_file
|
||||||
old_mode = new_filepath.stat().st_mode
|
old_mode = new_filepath.stat().st_mode
|
||||||
new_filepath.chmod(0o444 | old_mode)
|
if 0o444 != (0o444 & old_mode):
|
||||||
|
new_filepath.chmod(0o444 | old_mode)
|
||||||
if not file_is_editable(new_filepath):
|
if not file_is_editable(new_filepath):
|
||||||
new_filepath.unlink()
|
new_filepath.unlink()
|
||||||
raise ValueError(f'File cannot be edited or removed: {filepath}')
|
raise ValueError(f'File cannot be edited or removed: {filepath}')
|
||||||
@ -201,6 +203,95 @@ def normalize_codec(codec_str):
|
|||||||
return result
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
def _url_keys(arg_dict, filter_func):
|
||||||
|
result = {}
|
||||||
|
for key in arg_dict.keys():
|
||||||
|
if 'url' in key:
|
||||||
|
result.update(
|
||||||
|
{key: filter_func(key=key, url=arg_dict[key])}
|
||||||
|
)
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
def _drop_url_keys(arg_dict, key, filter_func):
|
||||||
|
if key in arg_dict.keys():
|
||||||
|
for val_dict in arg_dict[key]:
|
||||||
|
for url_key, remove in _url_keys(val_dict, filter_func).items():
|
||||||
|
if remove is True:
|
||||||
|
del val_dict[url_key]
|
||||||
|
|
||||||
|
|
||||||
|
def filter_response(arg_dict, copy_arg=False):
|
||||||
|
'''
|
||||||
|
Clean up the response so as to not store useless metadata in the database.
|
||||||
|
'''
|
||||||
|
response_dict = arg_dict
|
||||||
|
# raise an exception for an unexpected argument type
|
||||||
|
if not isinstance(response_dict, dict):
|
||||||
|
raise TypeError(f'response_dict must be a dict, got "{type(response_dict)}"')
|
||||||
|
|
||||||
|
if copy_arg:
|
||||||
|
response_dict = deepcopy(arg_dict)
|
||||||
|
|
||||||
|
# optimize the empty case
|
||||||
|
if not response_dict:
|
||||||
|
return response_dict
|
||||||
|
|
||||||
|
# beginning of formats cleanup {{{
|
||||||
|
# drop urls that expire, or restrict IPs
|
||||||
|
def drop_format_url(**kwargs):
|
||||||
|
url = kwargs['url']
|
||||||
|
return (
|
||||||
|
url
|
||||||
|
and '://' in url
|
||||||
|
and (
|
||||||
|
'/ip/' in url
|
||||||
|
or 'ip=' in url
|
||||||
|
or '/expire/' in url
|
||||||
|
or 'expire=' in url
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
# these format keys are not useful to us
|
||||||
|
drop_keys = frozenset((
|
||||||
|
'downloader_options',
|
||||||
|
'fragments',
|
||||||
|
'http_headers',
|
||||||
|
'__needs_testing',
|
||||||
|
'__working',
|
||||||
|
))
|
||||||
|
for key in frozenset(('formats', 'requested_formats',)):
|
||||||
|
_drop_url_keys(response_dict, key, drop_format_url)
|
||||||
|
if key in response_dict.keys():
|
||||||
|
for format in response_dict[key]:
|
||||||
|
for drop_key in drop_keys:
|
||||||
|
if drop_key in format.keys():
|
||||||
|
del format[drop_key]
|
||||||
|
# end of formats cleanup }}}
|
||||||
|
|
||||||
|
# beginning of subtitles cleanup {{{
|
||||||
|
# drop urls that expire
|
||||||
|
def drop_subtitles_url(**kwargs):
|
||||||
|
url = kwargs['url']
|
||||||
|
return (
|
||||||
|
url
|
||||||
|
and '://' in url
|
||||||
|
and (
|
||||||
|
'/expire/' in url
|
||||||
|
or '&expire=' in url
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
for key in frozenset(('subtitles', 'automatic_captions',)):
|
||||||
|
if key in response_dict.keys():
|
||||||
|
key_dict = response_dict[key]
|
||||||
|
for lang_code in key_dict:
|
||||||
|
_drop_url_keys(key_dict, lang_code, drop_subtitles_url)
|
||||||
|
# end of subtitles cleanup }}}
|
||||||
|
|
||||||
|
return response_dict
|
||||||
|
|
||||||
|
|
||||||
def parse_media_format(format_dict):
|
def parse_media_format(format_dict):
|
||||||
'''
|
'''
|
||||||
This parser primarily adapts the format dict returned by youtube-dl into a
|
This parser primarily adapts the format dict returned by youtube-dl into a
|
||||||
@ -244,6 +335,7 @@ def parse_media_format(format_dict):
|
|||||||
return {
|
return {
|
||||||
'id': format_dict.get('format_id', ''),
|
'id': format_dict.get('format_id', ''),
|
||||||
'format': format_str,
|
'format': format_str,
|
||||||
|
'format_note': format_dict.get('format_note', ''),
|
||||||
'format_verbose': format_dict.get('format', ''),
|
'format_verbose': format_dict.get('format', ''),
|
||||||
'height': height,
|
'height': height,
|
||||||
'width': width,
|
'width': width,
|
||||||
|
@ -14,7 +14,7 @@ from django.views.generic.detail import SingleObjectMixin
|
|||||||
from django.core.exceptions import SuspiciousFileOperation
|
from django.core.exceptions import SuspiciousFileOperation
|
||||||
from django.http import HttpResponse
|
from django.http import HttpResponse
|
||||||
from django.urls import reverse_lazy
|
from django.urls import reverse_lazy
|
||||||
from django.db import IntegrityError
|
from django.db import connection, IntegrityError
|
||||||
from django.db.models import Q, Count, Sum, When, Case
|
from django.db.models import Q, Count, Sum, When, Case
|
||||||
from django.forms import Form, ValidationError
|
from django.forms import Form, ValidationError
|
||||||
from django.utils.text import slugify
|
from django.utils.text import slugify
|
||||||
@ -85,6 +85,12 @@ class DashboardView(TemplateView):
|
|||||||
data['config_dir'] = str(settings.CONFIG_BASE_DIR)
|
data['config_dir'] = str(settings.CONFIG_BASE_DIR)
|
||||||
data['downloads_dir'] = str(settings.DOWNLOAD_ROOT)
|
data['downloads_dir'] = str(settings.DOWNLOAD_ROOT)
|
||||||
data['database_connection'] = settings.DATABASE_CONNECTION_STR
|
data['database_connection'] = settings.DATABASE_CONNECTION_STR
|
||||||
|
# Add the database filesize when using db.sqlite3
|
||||||
|
data['database_filesize'] = None
|
||||||
|
db_name = str(connection.get_connection_params()['database'])
|
||||||
|
db_path = pathlib.Path(db_name) if '/' == db_name[0] else None
|
||||||
|
if db_path and 'sqlite' == connection.vendor:
|
||||||
|
data['database_filesize'] = db_path.stat().st_size
|
||||||
return data
|
return data
|
||||||
|
|
||||||
|
|
||||||
@ -193,10 +199,15 @@ class ValidateSourceView(FormView):
|
|||||||
Source.SOURCE_TYPE_YOUTUBE_PLAYLIST: ('https://www.youtube.com/playlist?list='
|
Source.SOURCE_TYPE_YOUTUBE_PLAYLIST: ('https://www.youtube.com/playlist?list='
|
||||||
'PL590L5WQmH8dpP0RyH5pCfIaDEdt9nk7r')
|
'PL590L5WQmH8dpP0RyH5pCfIaDEdt9nk7r')
|
||||||
}
|
}
|
||||||
|
_youtube_domains = frozenset({
|
||||||
|
'youtube.com',
|
||||||
|
'm.youtube.com',
|
||||||
|
'www.youtube.com',
|
||||||
|
})
|
||||||
validation_urls = {
|
validation_urls = {
|
||||||
Source.SOURCE_TYPE_YOUTUBE_CHANNEL: {
|
Source.SOURCE_TYPE_YOUTUBE_CHANNEL: {
|
||||||
'scheme': 'https',
|
'scheme': 'https',
|
||||||
'domains': ('m.youtube.com', 'www.youtube.com'),
|
'domains': _youtube_domains,
|
||||||
'path_regex': '^\/(c\/)?([^\/]+)(\/videos)?$',
|
'path_regex': '^\/(c\/)?([^\/]+)(\/videos)?$',
|
||||||
'path_must_not_match': ('/playlist', '/c/playlist'),
|
'path_must_not_match': ('/playlist', '/c/playlist'),
|
||||||
'qs_args': [],
|
'qs_args': [],
|
||||||
@ -205,7 +216,7 @@ class ValidateSourceView(FormView):
|
|||||||
},
|
},
|
||||||
Source.SOURCE_TYPE_YOUTUBE_CHANNEL_ID: {
|
Source.SOURCE_TYPE_YOUTUBE_CHANNEL_ID: {
|
||||||
'scheme': 'https',
|
'scheme': 'https',
|
||||||
'domains': ('m.youtube.com', 'www.youtube.com'),
|
'domains': _youtube_domains,
|
||||||
'path_regex': '^\/channel\/([^\/]+)(\/videos)?$',
|
'path_regex': '^\/channel\/([^\/]+)(\/videos)?$',
|
||||||
'path_must_not_match': ('/playlist', '/c/playlist'),
|
'path_must_not_match': ('/playlist', '/c/playlist'),
|
||||||
'qs_args': [],
|
'qs_args': [],
|
||||||
@ -214,7 +225,7 @@ class ValidateSourceView(FormView):
|
|||||||
},
|
},
|
||||||
Source.SOURCE_TYPE_YOUTUBE_PLAYLIST: {
|
Source.SOURCE_TYPE_YOUTUBE_PLAYLIST: {
|
||||||
'scheme': 'https',
|
'scheme': 'https',
|
||||||
'domains': ('m.youtube.com', 'www.youtube.com'),
|
'domains': _youtube_domains,
|
||||||
'path_regex': '^\/(playlist|watch)$',
|
'path_regex': '^\/(playlist|watch)$',
|
||||||
'path_must_not_match': (),
|
'path_must_not_match': (),
|
||||||
'qs_args': ('list',),
|
'qs_args': ('list',),
|
||||||
@ -286,11 +297,36 @@ class ValidateSourceView(FormView):
|
|||||||
url = reverse_lazy('sync:add-source')
|
url = reverse_lazy('sync:add-source')
|
||||||
fields_to_populate = self.prepopulate_fields.get(self.source_type)
|
fields_to_populate = self.prepopulate_fields.get(self.source_type)
|
||||||
fields = {}
|
fields = {}
|
||||||
|
value = self.key
|
||||||
|
use_channel_id = (
|
||||||
|
'youtube-channel' == self.source_type_str and
|
||||||
|
'@' == self.key[0]
|
||||||
|
)
|
||||||
|
if use_channel_id:
|
||||||
|
old_key = self.key
|
||||||
|
old_source_type = self.source_type
|
||||||
|
old_source_type_str = self.source_type_str
|
||||||
|
|
||||||
|
self.source_type_str = 'youtube-channel-id'
|
||||||
|
self.source_type = self.source_types.get(self.source_type_str, None)
|
||||||
|
index_url = Source.create_index_url(self.source_type, self.key, 'videos')
|
||||||
|
try:
|
||||||
|
self.key = youtube.get_channel_id(
|
||||||
|
index_url.replace('/channel/', '/')
|
||||||
|
)
|
||||||
|
except youtube.YouTubeError as e:
|
||||||
|
# It did not work, revert to previous behavior
|
||||||
|
self.key = old_key
|
||||||
|
self.source_type = old_source_type
|
||||||
|
self.source_type_str = old_source_type_str
|
||||||
|
|
||||||
for field in fields_to_populate:
|
for field in fields_to_populate:
|
||||||
if field == 'source_type':
|
if field == 'source_type':
|
||||||
fields[field] = self.source_type
|
fields[field] = self.source_type
|
||||||
elif field in ('key', 'name', 'directory'):
|
elif field == 'key':
|
||||||
fields[field] = self.key
|
fields[field] = self.key
|
||||||
|
elif field in ('name', 'directory'):
|
||||||
|
fields[field] = value
|
||||||
return append_uri_params(url, fields)
|
return append_uri_params(url, fields)
|
||||||
|
|
||||||
|
|
||||||
|
@ -46,6 +46,32 @@ def get_yt_opts():
|
|||||||
opts.update({'cookiefile': cookie_file_path})
|
opts.update({'cookiefile': cookie_file_path})
|
||||||
return opts
|
return opts
|
||||||
|
|
||||||
|
def get_channel_id(url):
|
||||||
|
# yt-dlp --simulate --no-check-formats --playlist-items 1
|
||||||
|
# --print 'pre_process:%(playlist_channel_id,playlist_id,channel_id)s'
|
||||||
|
opts = get_yt_opts()
|
||||||
|
opts.update({
|
||||||
|
'skip_download': True,
|
||||||
|
'simulate': True,
|
||||||
|
'logger': log,
|
||||||
|
'extract_flat': True, # Change to False to get detailed info
|
||||||
|
'check_formats': False,
|
||||||
|
'playlist_items': '1',
|
||||||
|
})
|
||||||
|
|
||||||
|
with yt_dlp.YoutubeDL(opts) as y:
|
||||||
|
try:
|
||||||
|
response = y.extract_info(url, download=False)
|
||||||
|
except yt_dlp.utils.DownloadError as e:
|
||||||
|
raise YouTubeError(f'Failed to extract channel ID for "{url}": {e}') from e
|
||||||
|
else:
|
||||||
|
try:
|
||||||
|
channel_id = response['channel_id']
|
||||||
|
except Exception as e:
|
||||||
|
raise YouTubeError(f'Failed to extract channel ID for "{url}": {e}') from e
|
||||||
|
else:
|
||||||
|
return channel_id
|
||||||
|
|
||||||
def get_channel_image_info(url):
|
def get_channel_image_info(url):
|
||||||
opts = get_yt_opts()
|
opts = get_yt_opts()
|
||||||
opts.update({
|
opts.update({
|
||||||
@ -82,6 +108,8 @@ def _subscriber_only(msg='', response=None):
|
|||||||
return True
|
return True
|
||||||
if ': Join this channel' in msg:
|
if ': Join this channel' in msg:
|
||||||
return True
|
return True
|
||||||
|
if 'Join this YouTube channel' in msg:
|
||||||
|
return True
|
||||||
else:
|
else:
|
||||||
# ignore msg entirely
|
# ignore msg entirely
|
||||||
if not isinstance(response, dict):
|
if not isinstance(response, dict):
|
||||||
|
@ -10,9 +10,10 @@ def get_num_workers():
|
|||||||
num_workers = int(os.getenv('GUNICORN_WORKERS', 3))
|
num_workers = int(os.getenv('GUNICORN_WORKERS', 3))
|
||||||
except ValueError:
|
except ValueError:
|
||||||
num_workers = cpu_workers
|
num_workers = cpu_workers
|
||||||
if 0 > num_workers > cpu_workers:
|
if 0 < num_workers < cpu_workers:
|
||||||
num_workers = cpu_workers
|
return num_workers
|
||||||
return num_workers
|
else:
|
||||||
|
return cpu_workers
|
||||||
|
|
||||||
|
|
||||||
def get_bind():
|
def get_bind():
|
||||||
|
@ -87,6 +87,12 @@ SOURCE_DOWNLOAD_DIRECTORY_PREFIX_STR = os.getenv('TUBESYNC_DIRECTORY_PREFIX', 'T
|
|||||||
SOURCE_DOWNLOAD_DIRECTORY_PREFIX = True if SOURCE_DOWNLOAD_DIRECTORY_PREFIX_STR == 'true' else False
|
SOURCE_DOWNLOAD_DIRECTORY_PREFIX = True if SOURCE_DOWNLOAD_DIRECTORY_PREFIX_STR == 'true' else False
|
||||||
|
|
||||||
|
|
||||||
|
SHRINK_NEW_MEDIA_METADATA_STR = os.getenv('TUBESYNC_SHRINK_NEW', 'false').strip().lower()
|
||||||
|
SHRINK_NEW_MEDIA_METADATA = ( 'true' == SHRINK_NEW_MEDIA_METADATA_STR )
|
||||||
|
SHRINK_OLD_MEDIA_METADATA_STR = os.getenv('TUBESYNC_SHRINK_OLD', 'false').strip().lower()
|
||||||
|
SHRINK_OLD_MEDIA_METADATA = ( 'true' == SHRINK_OLD_MEDIA_METADATA_STR )
|
||||||
|
|
||||||
|
|
||||||
VIDEO_HEIGHT_CUTOFF = int(os.getenv("TUBESYNC_VIDEO_HEIGHT_CUTOFF", "240"))
|
VIDEO_HEIGHT_CUTOFF = int(os.getenv("TUBESYNC_VIDEO_HEIGHT_CUTOFF", "240"))
|
||||||
|
|
||||||
|
|
||||||
|
@ -1,5 +1,4 @@
|
|||||||
import os
|
import os
|
||||||
from urllib.parse import urljoin
|
|
||||||
from django.core.wsgi import get_wsgi_application
|
from django.core.wsgi import get_wsgi_application
|
||||||
|
|
||||||
|
|
||||||
@ -16,10 +15,9 @@ def application(environ, start_response):
|
|||||||
else:
|
else:
|
||||||
raise Exception(f'DJANGO_URL_PREFIX must end with a /, '
|
raise Exception(f'DJANGO_URL_PREFIX must end with a /, '
|
||||||
f'got: {DJANGO_URL_PREFIX}')
|
f'got: {DJANGO_URL_PREFIX}')
|
||||||
if script_name:
|
if script_name is not None:
|
||||||
static_url = urljoin(script_name, 'static/')
|
|
||||||
environ['SCRIPT_NAME'] = script_name
|
environ['SCRIPT_NAME'] = script_name
|
||||||
path_info = environ['PATH_INFO']
|
path_info = environ['PATH_INFO']
|
||||||
if path_info.startswith(script_name) and not path_info.startswith(static_url):
|
if path_info.startswith(script_name):
|
||||||
environ['PATH_INFO'] = path_info[len(script_name) - 1:]
|
environ['PATH_INFO'] = path_info[len(script_name) - 1:]
|
||||||
return _application(environ, start_response)
|
return _application(environ, start_response)
|
||||||
|
38
tubesync/upgrade_yt-dlp.sh
Executable file
38
tubesync/upgrade_yt-dlp.sh
Executable file
@ -0,0 +1,38 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
|
warning_message() {
|
||||||
|
cat <<EOM
|
||||||
|
Please report any issues that you have encountered before updating yt-dlp.
|
||||||
|
|
||||||
|
This is a tool to assist developers with debugging YouTube issues.
|
||||||
|
It should not be used as an alternative to updating container images!
|
||||||
|
EOM
|
||||||
|
} 1>&2
|
||||||
|
|
||||||
|
pip3() {
|
||||||
|
local pip_runner pip_whl run_whl
|
||||||
|
|
||||||
|
# pipenv
|
||||||
|
pip_runner='/usr/lib/python3/dist-packages/pipenv/patched/pip/__pip-runner__.py'
|
||||||
|
test -s "${pip_runner}" || pip_runner=''
|
||||||
|
|
||||||
|
# python3-pip-whl
|
||||||
|
pip_whl="$(ls -1r /usr/share/python-wheels/pip-*-py3-none-any.whl | head -n 1)"
|
||||||
|
run_whl="${pip_whl}/pip"
|
||||||
|
|
||||||
|
python3 "${pip_runner:-"${run_whl}"}" "$@"
|
||||||
|
}
|
||||||
|
|
||||||
|
warning_message
|
||||||
|
test -n "${TUBESYNC_DEBUG}" || exit 1
|
||||||
|
|
||||||
|
# Use the flag added in 23.0.1, if possible.
|
||||||
|
# https://github.com/pypa/pip/pull/11780
|
||||||
|
break_system_packages='--break-system-packages'
|
||||||
|
pip_version="$(pip3 --version | awk '$1 = "pip" { print $2; exit; }')"
|
||||||
|
if [[ "${pip_version}" < "23.0.1" ]]; then
|
||||||
|
break_system_packages=''
|
||||||
|
fi
|
||||||
|
|
||||||
|
pip3 install --upgrade ${break_system_packages} yt-dlp
|
||||||
|
|
Loading…
Reference in New Issue
Block a user