with open("pyproject.toml", "wb") as f:
tomli_w.dump(pyproject, f)
- - name: Build and Push
+ # we use 3 different jobs here due to the fact that the architecture is named differently in buildx
+ - name: Build and Push amd64
uses: docker/build-push-action@v4.1.1
with:
context: .
- platforms: linux/amd64,linux/arm64
+ platforms: linux/amd64
file: Dockerfile
tags: |-
ghcr.io/${{ github.repository_owner }}/server:${{ steps.tags.outputs.patch }},
ghcr.io/${{ github.repository_owner }}/server:${{ steps.tags.outputs.channel }},
ghcr.io/${{ github.repository_owner }}/server:latest
push: true
- build-args:
+ build-args: |
"MASS_VERSION=${{ needs.build-and-publish-pypi.outputs.version }}"
+ "BUILD_ARCH=amd64"
+ - name: Build and Push arm64
+ uses: docker/build-push-action@v4.1.1
+ with:
+ context: .
+ platforms: linux/arm64
+ file: Dockerfile
+ tags: |-
+ ghcr.io/${{ github.repository_owner }}/server:${{ steps.tags.outputs.patch }},
+ ghcr.io/${{ github.repository_owner }}/server:${{ steps.tags.outputs.minor }},
+ ghcr.io/${{ github.repository_owner }}/server:${{ steps.tags.outputs.major }},
+ ghcr.io/${{ github.repository_owner }}/server:${{ steps.tags.outputs.channel }},
+ ghcr.io/${{ github.repository_owner }}/server:latest
+ push: true
+ build-args: |
+ "MASS_VERSION=${{ needs.build-and-publish-pypi.outputs.version }}"
+ "BUILD_ARCH=aarch64"
+ - name: Build and Push armv7
+ uses: docker/build-push-action@v4.1.1
+ with:
+ context: .
+ platforms: linux/arm/v7
+ file: Dockerfile
+ tags: |-
+ ghcr.io/${{ github.repository_owner }}/server:${{ steps.tags.outputs.patch }},
+ ghcr.io/${{ github.repository_owner }}/server:${{ steps.tags.outputs.minor }},
+ ghcr.io/${{ github.repository_owner }}/server:${{ steps.tags.outputs.major }},
+ ghcr.io/${{ github.repository_owner }}/server:${{ steps.tags.outputs.channel }},
+ ghcr.io/${{ github.repository_owner }}/server:latest
+ push: true
+ build-args: |
+ "MASS_VERSION=${{ needs.build-and-publish-pypi.outputs.version }}"
+ "BUILD_ARCH=armv7"
# syntax=docker/dockerfile:1
-ARG TARGETPLATFORM="linux/amd64"
-ARG BUILD_VERSION=latest
+ARG MASS_VERSION
+ARG BUILD_ARCH
ARG PYTHON_VERSION="3.11"
+ARG BASE_IMAGE_VERSION="3.11-alpine3.18"
-#####################################################################
-# #
-# Build Wheels #
-# #
-#####################################################################
-FROM python:${PYTHON_VERSION}-slim as wheels-builder
-ARG TARGETPLATFORM
+FROM ghcr.io/home-assistant/$BUILD_ARCH-base-python:${BASE_IMAGE_VERSION}
-# Install buildtime packages
-RUN set -x \
- && apt-get update \
- && apt-get install -y --no-install-recommends \
- build-essential \
- libffi-dev \
- cargo \
- git \
- curl
-
-# build jemalloc
-ARG JEMALLOC_VERSION=5.3.0
-RUN curl -L -s https://github.com/jemalloc/jemalloc/releases/download/${JEMALLOC_VERSION}/jemalloc-${JEMALLOC_VERSION}.tar.bz2 \
- | tar -xjf - -C /tmp \
- && cd /tmp/jemalloc-${JEMALLOC_VERSION} \
- && ./configure \
- && make \
- && make install
-
-WORKDIR /wheels
-COPY requirements_all.txt .
-
-
-# build python wheels for all dependencies
-RUN set -x \
- && pip install --upgrade pip \
- && pip install build maturin \
- && pip wheel -r requirements_all.txt
+ENV S6_SERVICES_GRACETIME=220000
+ENV WHEELS_LINKS="https://wheels.home-assistant.io/musllinux/"
-# build music assistant wheel
-COPY music_assistant music_assistant
-COPY pyproject.toml .
-COPY MANIFEST.in .
-RUN python3 -m build --wheel --outdir /wheels --skip-dependency-check
+WORKDIR /usr/src
-#####################################################################
-# #
-# Final Image #
-# #
-#####################################################################
-FROM python:${PYTHON_VERSION}-slim AS final-build
-WORKDIR /app
-COPY --from=wheels-builder /usr/local/lib/libjemalloc.so /usr/local/lib/libjemalloc.so
-
-RUN set -x \
- && apt-get update \
- && apt-get install -y --no-install-recommends \
- ca-certificates \
- curl \
+# Install OS requirements
+RUN apk update \
+ && apk add --no-cache \
git \
wget \
- tzdata \
ffmpeg \
- libsox-fmt-all \
- libsox3 \
sox \
cifs-utils \
- libnfs-utils \
- libjemalloc2 \
- # cleanup
- && rm -rf /tmp/* \
- && rm -rf /var/lib/apt/lists/*
-
-
-# https://github.com/moby/buildkit/blob/master/frontend/dockerfile/docs/syntax.md#build-mounts-run---mount
-# Install all built wheels
-RUN --mount=type=bind,target=/tmp/wheels,source=/wheels,from=wheels-builder,rw \
- set -x \
- && pip install --upgrade pip \
- && pip install --no-cache-dir /tmp/wheels/*.whl
+ nfs-utils
-# Required to persist build arg
-ARG BUILD_VERSION
-ARG TARGETPLATFORM
-
-# Set some labels for the Home Assistant add-on
+## Setup Core dependencies
+COPY requirements_all.txt .
+RUN pip3 install \
+ --no-cache-dir \
+ --only-binary=:all: \
+ --find-links ${WHEELS_LINKS} \
+ -r requirements_all.txt
+
+# Install Music Assistant
+RUN pip3 install \
+ --no-cache-dir \
+ music-assistant[server]==${MASS_VERSION} \
+ && python3 -m compileall music-assistant
+
+# Set some labels
LABEL \
- io.hass.version=${BUILD_VERSION} \
+ org.opencontainers.image.title="Music Assistant" \
+ org.opencontainers.image.description="Music Assistant Server/Core" \
+ org.opencontainers.image.source="https://github.com/music-assistant/server" \
+ org.opencontainers.image.authors="The Music Assistant Team" \
+ org.opencontainers.image.documentation="https://github.com/orgs/music-assistant/discussions" \
+ org.opencontainers.image.licenses="Apache License 2.0" \
+ io.hass.version=${MASS_VERSION} \
+ io.hass.type="addon" \
io.hass.name="Music Assistant" \
io.hass.description="Music Assistant Server/Core" \
- io.hass.platform="${TARGETPLATFORM}" \
+ io.hass.platform="linux/${BUILD_ARCH}" \
io.hass.type="addon"
VOLUME [ "/data" ]
-ENV LD_PRELOAD=/usr/local/lib/libjemalloc.so
-ENTRYPOINT ["mass", "--config", "/data"]
+# S6-Overlay
+COPY rootfs /
+
+WORKDIR /data
import asyncio
import logging
import os
+import subprocess
import sys
import threading
from contextlib import suppress
FORMAT_TIME: Final = "%H:%M:%S"
FORMAT_DATETIME: Final = f"{FORMAT_DATE} {FORMAT_TIME}"
MAX_LOG_FILESIZE = 1000000 * 10 # 10 MB
+ALPINE_RELEASE_FILE = "/etc/alpine-release"
def get_arguments():
return logger
+def _enable_posix_spawn() -> None:
+ """Enable posix_spawn on Alpine Linux."""
+ # pylint: disable=protected-access
+ if subprocess._USE_POSIX_SPAWN:
+ return
+
+ # The subprocess module does not know about Alpine Linux/musl
+ # and will use fork() instead of posix_spawn() which significantly
+ # less efficient. This is a workaround to force posix_spawn()
+ # on Alpine Linux which is supported by musl.
+ subprocess._USE_POSIX_SPAWN = os.path.exists(ALPINE_RELEASE_FILE)
+
+
def main():
"""Start MusicAssistant."""
# parse arguments
logger = setup_logger(data_dir, log_level)
mass = MusicAssistant(data_dir)
+ # enable alpine subprocess workaround
+ _enable_posix_spawn()
+
def on_shutdown(loop):
logger.info("shutdown requested!")
loop.run_until_complete(mass.stop())
run(
start_mass(),
- use_uvloop=False,
+ use_uvloop=True,
shutdown_callback=on_shutdown,
executor_workers=64,
)
iterator = (
ffmpeg_proc.iter_chunked(icy_meta_interval)
if enable_icy
- else ffmpeg_proc.iter_chunked(128000)
+ else ffmpeg_proc.iter_chunked(256000)
)
async for chunk in iterator:
try:
stderr=asyncio.subprocess.PIPE if self._enable_stderr else None,
close_fds=True,
)
-
- # Fix BrokenPipeError due to a race condition
- # by attaching a default done callback
- def _done_cb(fut: asyncio.Future):
- fut.exception()
-
- self._proc._transport._protocol._stdin_closed.add_done_callback(_done_cb)
return self
async def __aexit__(self, exc_type, exc_value, traceback) -> bool:
"shortuuid==1.0.11",
"zeroconf==0.70.0",
"cryptography==41.0.1",
- "ifaddr==0.2.0"
+ "ifaddr==0.2.0",
+ "uvloop==0.17.0"
]
test = [
"black==23.3.0",
shortuuid==1.0.11
soco==0.29.1
unidecode==1.3.6
+uvloop==0.17.0
xmltodict==0.13.0
ytmusicapi==1.0.0
zeroconf==0.70.0
--- /dev/null
+#!/usr/bin/env bashio
+# ==============================================================================
+# Take down the S6 supervision tree when Music Assistant fails
+# ==============================================================================
+declare RESTART_EXIT_CODE=100
+declare SIGNAL_EXIT_CODE=256
+declare SIGTERM=15
+declare APP_EXIT_CODE=${1}
+declare SIGNAL_NO=${2}
+declare NEW_EXIT_CODE=
+
+bashio::log.info "Music Assistant Core finish process exit code ${APP_EXIT_CODE}"
+
+if [[ ${APP_EXIT_CODE} -eq ${RESTART_EXIT_CODE} ]]; then
+ exit 0
+elif [[ ${APP_EXIT_CODE} -eq ${SIGNAL_EXIT_CODE} ]]; then
+ bashio::log.info "Music Assistant Core finish process received signal ${SIGNAL_NO}"
+
+ NEW_EXIT_CODE=$((128 + SIGNAL_NO))
+ echo ${NEW_EXIT_CODE} > /run/s6-linux-init-container-results/exitcode
+
+ if [[ ${SIGNAL_NO} -eq ${SIGTERM} ]]; then
+ /run/s6/basedir/bin/halt
+ fi
+else
+ bashio::log.info "Music Assistant Core service shutdown"
+
+ echo ${APP_EXIT_CODE} > /run/s6-linux-init-container-results/exitcode
+ /run/s6/basedir/bin/halt
+fi
--- /dev/null
+#!/usr/bin/with-contenv bashio
+# ==============================================================================
+# Start Music Assistant service
+# ==============================================================================
+
+cd /data || bashio::exit.nok "Can't find data folder!"
+
+# Enable jemalloc
+export LD_PRELOAD="/usr/local/lib/libjemalloc.so.2"
+export MALLOC_CONF="background_thread:true,metadata_thp:auto,dirty_decay_ms:20000,muzzy_decay_ms:20000"
+exec python3 -m music_assistant --config /data
--- /dev/null
+#!/bin/sh -e
+
+# This is the first program launched at container start.
+# We don't know where our binaries are and we cannot guarantee
+# that the default PATH can access them.
+# So this script needs to be entirely self-contained until it has
+# at least /command, /usr/bin and /bin in its PATH.
+
+addpath () {
+ x="$1"
+ IFS=:
+ set -- $PATH
+ IFS=
+ while test "$#" -gt 0 ; do
+ if test "$1" = "$x" ; then
+ return
+ fi
+ shift
+ done
+ PATH="${x}:$PATH"
+}
+
+if test -z "$PATH" ; then
+ PATH=/bin
+fi
+
+addpath /bin
+addpath /usr/bin
+addpath /command
+export PATH
+
+# Now we're good: s6-overlay-suexec is accessible via PATH, as are
+# all our binaries.
+
+# Skip further init if the user has a given CMD.
+# This is to prevent Home Assistant from starting twice if the user
+# decided to override/start via the CMD.
+if test $# -ne 0 ; then
+ exec "$@"
+fi
+
+# Run preinit as root, then run stage0 as the container's user (can be
+# root, can be a normal user).
+
+exec s6-overlay-suexec \
+ ' /package/admin/s6-overlay/libexec/preinit' \
+ '' \
+ /package/admin/s6-overlay/libexec/stage0 \
+ "$@"