def __post_init__(self):
"""Call after init."""
- if not self.name:
+ if self.name is None:
# we've got some reports where the name was empty, causing weird issues.
# e.g. here: https://github.com/music-assistant/hass-music-assistant/issues/1515
self.name = "[Unknown]"
- if not self.uri:
+ if self.uri is None:
self.uri = create_uri(self.media_type, self.provider, self.item_id)
- if not self.sort_name:
+ if self.sort_name is None:
self.sort_name = create_sort_name(self.name)
@property
metadata: MediaItemMetadata = field(default_factory=MediaItemMetadata)
favorite: bool = False
position: int | None = None # required for playlist tracks, optional for all other
- # timestamps to determine when the item was added/modified to the db
- timestamp_added: int = 0
- timestamp_modified: int = 0
@property
def available(self):
API_SCHEMA_VERSION: Final[int] = 24
MIN_SCHEMA_VERSION: Final[int] = 24
-DB_SCHEMA_VERSION: Final[int] = 30
+DB_SCHEMA_VERSION: Final[int] = 1
MASS_LOGGER_NAME: Final[str] = "music_assistant"
DB_TABLE_ARTISTS: Final[str] = "artists"
DB_TABLE_ALBUMS: Final[str] = "albums"
DB_TABLE_TRACKS: Final[str] = "tracks"
-DB_TABLE_ALBUM_TRACKS: Final[str] = "albumtracks"
-DB_TABLE_TRACK_ARTISTS: Final[str] = "trackartists"
-DB_TABLE_ALBUM_ARTISTS: Final[str] = "albumartists"
DB_TABLE_PLAYLISTS: Final[str] = "playlists"
DB_TABLE_RADIOS: Final[str] = "radios"
DB_TABLE_CACHE: Final[str] = "cache"
DB_TABLE_SETTINGS: Final[str] = "settings"
DB_TABLE_THUMBS: Final[str] = "thumbnails"
DB_TABLE_PROVIDER_MAPPINGS: Final[str] = "provider_mappings"
+DB_TABLE_ALBUM_TRACKS: Final[str] = "album_tracks"
+DB_TABLE_TRACK_ARTISTS: Final[str] = "track_artists"
+DB_TABLE_ALBUM_ARTISTS: Final[str] = "album_artists"
+
# all other
MASS_LOGO_ONLINE: Final[str] = (
import asyncio
import contextlib
+from collections.abc import Iterable
from random import choice, random
from typing import TYPE_CHECKING, cast
-from music_assistant.common.helpers.datetime import utc_timestamp
from music_assistant.common.helpers.global_cache import get_global_cache_value
from music_assistant.common.helpers.json import serialize_to_json
from music_assistant.common.models.enums import EventType, ProviderFeature
DB_TABLE_ALBUM_TRACKS,
DB_TABLE_ALBUMS,
DB_TABLE_ARTISTS,
- DB_TABLE_PROVIDER_MAPPINGS,
- DB_TABLE_TRACKS,
)
from music_assistant.server.controllers.media.base import MediaControllerBase
from music_assistant.server.helpers.compare import (
def __init__(self, *args, **kwargs) -> None:
"""Initialize class."""
super().__init__(*args, **kwargs)
- self._db_add_lock = asyncio.Lock()
self.base_query = f"""
- SELECT
- {self.db_table}.*,
- {DB_TABLE_ARTISTS}.sort_name AS sort_artist,
- json_group_array(
- DISTINCT json_object(
- 'item_id', {DB_TABLE_PROVIDER_MAPPINGS}.provider_item_id,
- 'provider_domain', {DB_TABLE_PROVIDER_MAPPINGS}.provider_domain,
- 'provider_instance', {DB_TABLE_PROVIDER_MAPPINGS}.provider_instance,
- 'available', {DB_TABLE_PROVIDER_MAPPINGS}.available,
- 'url', {DB_TABLE_PROVIDER_MAPPINGS}.url,
- 'audio_format', json({DB_TABLE_PROVIDER_MAPPINGS}.audio_format),
- 'details', {DB_TABLE_PROVIDER_MAPPINGS}.details
- )) filter ( where {DB_TABLE_PROVIDER_MAPPINGS}.item_id is not null) as {DB_TABLE_PROVIDER_MAPPINGS},
- json_group_array(
- DISTINCT json_object(
- 'item_id', {DB_TABLE_ARTISTS}.item_id,
- 'provider', 'library',
- 'name', {DB_TABLE_ARTISTS}.name,
- 'sort_name', {DB_TABLE_ARTISTS}.sort_name,
- 'media_type', 'artist'
- )) filter ( where {DB_TABLE_ARTISTS}.name is not null) as {DB_TABLE_ARTISTS}
- FROM {self.db_table}
- LEFT JOIN {DB_TABLE_ALBUM_ARTISTS} on {DB_TABLE_ALBUM_ARTISTS}.album_id = {self.db_table}.item_id
- LEFT JOIN {DB_TABLE_ARTISTS} on {DB_TABLE_ARTISTS}.item_id = {DB_TABLE_ALBUM_ARTISTS}.artist_id
- LEFT JOIN {DB_TABLE_PROVIDER_MAPPINGS}
- ON {self.db_table}.item_id = {DB_TABLE_PROVIDER_MAPPINGS}.item_id
- AND {DB_TABLE_PROVIDER_MAPPINGS}.media_type == '{self.media_type.value}'
+ SELECT
+ {self.db_table}.*
+ FROM {self.db_table}
+ LEFT JOIN {DB_TABLE_ALBUM_ARTISTS} on {DB_TABLE_ALBUM_ARTISTS}.album_id = {self.db_table}.item_id
+ LEFT JOIN {DB_TABLE_ARTISTS} on {DB_TABLE_ARTISTS}.item_id = {DB_TABLE_ALBUM_ARTISTS}.artist_id
""" # noqa: E501
+ self._db_add_lock = asyncio.Lock()
# register api handlers
self.mass.register_api_command("music/albums/library_items", self.library_items)
self.mass.register_api_command(
cur_item.item_id, item, overwrite=overwrite_existing
)
else:
- # search by name
- async for db_item in self.iter_library_items(search=item.name):
+ # search by (exact) name match
+ query = f"WHERE {self.db_table}.name = :name OR {self.db_table}.sort_name = :sort_name"
+ query_params = {"name": item.name, "sort_name": item.sort_name}
+ async for db_item in self.iter_library_items(
+ extra_query=query, extra_query_params=query_params
+ ):
if compare_album(db_item, item):
# existing item found: update it
library_item = await self.update_item_in_library(
else:
album_type = cur_item.album_type
cur_item.external_ids.update(update.external_ids)
+ provider_mappings = (
+ update.provider_mappings
+ if overwrite
+ else {*cur_item.provider_mappings, *update.provider_mappings}
+ )
await self.mass.music.database.update(
self.db_table,
{"item_id": db_id},
"external_ids": serialize_to_json(
update.external_ids if overwrite else cur_item.external_ids
),
- "timestamp_modified": int(utc_timestamp()),
},
)
+ self.logger.debug("updated %s in database: %s", update.name, db_id)
# update/set provider_mappings table
- await self._set_provider_mappings(db_id, update.provider_mappings, overwrite=overwrite)
+ await self._set_provider_mappings(db_id, provider_mappings, overwrite)
# set album artist(s)
- await self._set_album_artists(db_id, update.artists, overwrite=overwrite)
-
- self.logger.debug("updated %s in database: %s", update.name, db_id)
+ artists = update.artists if overwrite else cur_item.artists + update.artists
+ await self._set_album_artists(db_id, artists, overwrite=overwrite)
# get full created object
library_item = await self.get_library_item(db_id)
self.mass.signal_event(
return sorted(db_items, key=lambda x: (x.disc_number, x.track_number))
# return all (unique) items from all providers
result: list[AlbumTrack] = [*db_items]
- unique_ids: set[str] = set()
+ unique_ids: set[str] = {f"{x.disc_number or 1}.{x.track_number}" for x in db_items}
for provider_mapping in full_album.provider_mappings:
provider_tracks = await self._get_provider_album_tracks(
provider_mapping.item_id, provider_mapping.provider_instance
if unique_id in unique_ids:
continue
unique_ids.add(unique_id)
- # prefer db item
- if db_item := await self.mass.music.tracks.get_library_item_by_prov_id(
- provider_track.item_id, provider_track.provider
- ):
- if db_item in db_items:
- continue
- result.append(
- AlbumTrack.from_track(
- db_item,
- full_album,
- disc_number=provider_track.disc_number,
- track_number=provider_track.track_number,
- )
- )
- elif not in_library_only and provider_track not in result:
- result.append(AlbumTrack.from_track(provider_track, full_album))
+ result.append(AlbumTrack.from_track(provider_track, full_album))
# NOTE: we need to return the results sorted on disc/track here
# to ensure the correct order at playback
return sorted(result, key=lambda x: (x.disc_number, x.track_number))
item_id: str | int,
) -> list[AlbumTrack]:
"""Return in-database album tracks for the given database album."""
- subquery = (
- f"SELECT DISTINCT track_id FROM {DB_TABLE_ALBUM_TRACKS} "
- f"WHERE {DB_TABLE_ALBUM_TRACKS}.album_id = {item_id} AND albums.item_id = {item_id}"
- )
- query = f"WHERE {DB_TABLE_TRACKS}.item_id in ({subquery})"
- result = await self.mass.music.tracks.library_items(extra_query=query)
+ query = f"WHERE {DB_TABLE_ALBUM_TRACKS}.album_id = {item_id}"
+ result = await self.mass.music.tracks._get_library_items_by_query(extra_query=query)
if TYPE_CHECKING:
- return cast(list[AlbumTrack], result.items)
- return result.items
+ return cast(list[AlbumTrack], result)
+ return result
async def _add_library_item(self, item: Album) -> Album:
"""Add a new record to the database."""
"year": item.year,
"metadata": serialize_to_json(item.metadata),
"external_ids": serialize_to_json(item.external_ids),
- "timestamp_added": int(utc_timestamp()),
- "timestamp_modified": int(utc_timestamp()),
},
)
db_id = new_item["item_id"]
# update/set provider_mappings table
await self._set_provider_mappings(db_id, item.provider_mappings)
- # set album artist(s)
+ # set track artist(s)
await self._set_album_artists(db_id, item.artists)
- self.logger.debug("added %s to database", item.name)
+ self.logger.debug("added %s to database (item id %s)", item.name, db_id)
# return the full item we just added
return await self.get_library_item(db_id)
msg = "No Music Provider found that supports requesting similar tracks."
raise UnsupportedFeaturedException(msg)
+ async def _set_album_artists(
+ self, db_id: int, artists: Iterable[Artist | ItemMapping], overwrite: bool = False
+ ) -> None:
+ """Store Album Artists."""
+ if overwrite:
+ # on overwrite, clear the album_artists table first
+ await self.mass.music.database.delete(
+ DB_TABLE_ALBUM_ARTISTS,
+ {
+ "album_id": db_id,
+ },
+ )
+ artist_mappings: list[ItemMapping] = []
+ for artist in artists:
+ mapping = await self._set_album_artist(db_id, artist=artist, overwrite=overwrite)
+ artist_mappings.append(mapping)
+ # we (temporary?) duplicate the artist mappings in a separate column of the media
+ # item's table, because the json_group_array query is superslow
+ await self.mass.music.database.update(
+ self.db_table,
+ {"item_id": db_id},
+ {"artists": serialize_to_json(artist_mappings)},
+ )
+
+ async def _set_album_artist(
+ self, db_id: int, artist: Artist | ItemMapping, overwrite: bool = False
+ ) -> ItemMapping:
+ """Store Album Artist info."""
+ db_artist: Artist | ItemMapping = None
+ if artist.provider == "library":
+ db_artist = artist
+ elif existing := await self.mass.music.artists.get_library_item_by_prov_id(
+ artist.item_id, artist.provider
+ ):
+ db_artist = existing
+
+ if not db_artist or overwrite:
+ db_artist = await self.mass.music.artists.add_item_to_library(
+ artist, metadata_lookup=False, overwrite_existing=overwrite
+ )
+ # write (or update) record in album_artists table
+ await self.mass.music.database.insert_or_replace(
+ DB_TABLE_ALBUM_ARTISTS,
+ {
+ "album_id": db_id,
+ "artist_id": int(db_artist.item_id),
+ },
+ )
+ return ItemMapping.from_item(db_artist)
+
async def _match(self, db_album: Album) -> None:
"""Try to find match on all (streaming) providers for the provided (database) album.
db_album.name,
provider.name,
)
-
- async def _set_album_artists(
- self, db_id: int, artists: list[Artist | ItemMapping], overwrite: bool = False
- ) -> None:
- """Store Album Artists."""
- if overwrite:
- # on overwrite, clear the album_artists table first
- await self.mass.music.database.delete(
- DB_TABLE_ALBUM_ARTISTS,
- {
- "album_id": db_id,
- },
- )
- for artist in artists:
- await self._set_album_artist(db_id, artist=artist, overwrite=overwrite)
-
- async def _set_album_artist(
- self, db_id: int, artist: Artist | ItemMapping, overwrite: bool = False
- ) -> None:
- """Store Album Artist info."""
- db_artist: Album | ItemMapping = None
- if artist.provider == "library":
- db_artist = artist
- elif existing := await self.mass.music.artists.get_library_item_by_prov_id(
- artist.item_id, artist.provider
- ):
- db_artist = existing
- else:
- # not an existing artist, we need to fetch before we can add it to the library
- if isinstance(artist, ItemMapping):
- artist = await self.mass.music.artists.get_provider_item(
- artist.item_id, artist.provider, fallback=artist
- )
- with contextlib.suppress(MediaNotFoundError, AssertionError, InvalidDataError):
- db_artist = await self.mass.music.artists.add_item_to_library(
- artist, metadata_lookup=False, overwrite_existing=overwrite
- )
- if not db_artist:
- # this should not happen but streaming providers can be awful sometimes
- self.logger.warning(
- "Unable to resolve Artist %s for album %s, "
- "album will be added to the library without this artist!",
- artist.uri,
- db_id,
- )
- return
- # write (or update) record in album_artists table
- await self.mass.music.database.insert_or_replace(
- DB_TABLE_ALBUM_ARTISTS,
- {
- "album_id": db_id,
- "artist_id": int(db_artist.item_id),
- },
- )
from random import choice, random
from typing import TYPE_CHECKING, Any
-from music_assistant.common.helpers.datetime import utc_timestamp
from music_assistant.common.helpers.json import serialize_to_json
from music_assistant.common.models.enums import EventType, ProviderFeature
from music_assistant.common.models.errors import MediaNotFoundError, UnsupportedFeaturedException
cur_item.item_id, item, overwrite=overwrite_existing
)
else:
- # search by name
- async for db_item in self.iter_library_items(search=item.name):
+ # search by (exact) name match
+ query = f"WHERE {self.db_table}.name = :name OR {self.db_table}.sort_name = :sort_name"
+ query_params = {"name": item.name, "sort_name": item.sort_name}
+ async for db_item in self.iter_library_items(
+ extra_query=query, extra_query_params=query_params
+ ):
if compare_artist(db_item, item):
# existing item found: update it
# NOTE: if we matched an artist by name this could theoretically lead to
update.mbid = VARIOUS_ARTISTS_ID_MBID
if update.mbid == VARIOUS_ARTISTS_ID_MBID:
update.name = VARIOUS_ARTISTS_NAME
+
await self.mass.music.database.update(
self.db_table,
{"item_id": db_id},
update.external_ids if overwrite else cur_item.external_ids
),
"metadata": serialize_to_json(metadata),
- "timestamp_modified": int(utc_timestamp()),
},
)
- ## update/set provider_mappings table
- await self._set_provider_mappings(db_id, update.provider_mappings, overwrite=overwrite)
self.logger.debug("updated %s in database: %s", update.name, db_id)
+ # update/set provider_mappings table
+ provider_mappings = (
+ update.provider_mappings
+ if overwrite
+ else {*cur_item.provider_mappings, *update.provider_mappings}
+ )
+ await self._set_provider_mappings(db_id, provider_mappings, overwrite)
# get full created object
library_item = await self.get_library_item(db_id)
self.mass.signal_event(
) -> PagedItems:
"""Get in-database (album) artists."""
if album_artists_only:
- artist_query = "artists.item_id in (select albumartists.artist_id from albumartists)"
+ artist_query = (
+ f"artists.item_id in (select {DB_TABLE_ALBUM_ARTISTS}.artist_id "
+ f"from {DB_TABLE_ALBUM_ARTISTS})"
+ )
extra_query = f"{extra_query} AND {artist_query}" if extra_query else artist_query
return await super().library_items(
favorite=favorite,
limit=5000,
):
with contextlib.suppress(MediaNotFoundError):
- await self.mass.music.albums.remove_item_from_library(db_row["album_id"])
+ await self.mass.music.albums.remove_item_from_library(db_row["item_id"])
# recursively also remove artist tracks
for db_row in await self.mass.music.database.get_rows_from_query(
- f"SELECT track_id FROM {DB_TABLE_TRACK_ARTISTS} WHERE artist_id = {db_id}",
+ f"SELECT track_id FROM {DB_TABLE_TRACKS} WHERE artist_id = {db_id}",
limit=5000,
):
with contextlib.suppress(MediaNotFoundError):
- await self.mass.music.tracks.remove_item_from_library(db_row["track_id"])
+ await self.mass.music.tracks.remove_item_from_library(db_row["item_id"])
# delete the artist itself from db
await super().remove_item_from_library(db_id)
item_id,
provider_instance_id_or_domain,
):
+ subquery = (
+ "SELECT item_id FROM provider_mappings WHERE "
+ "media_type = 'track' AND (provider_domain = :prov_id "
+ "OR provider_instance = :prov_id)"
+ )
query = (
- "WHERE trackartists.artist_id = :artist_id AND "
- "(provider_mappings.provider_domain = :prov_id OR "
- "provider_mappings.provider_instance = :prov_id)"
+ f"WHERE {DB_TABLE_TRACKS}.item_id IN ({subquery}) "
+ f"AND {DB_TABLE_TRACK_ARTISTS}.artist_id = :artist_id"
)
query_params = {
"artist_id": db_artist.item_id,
"prov_id": provider_instance_id_or_domain,
}
- paged_list = await self.mass.music.tracks.library_items(
+ return await self.mass.music.tracks._get_library_items_by_query(
extra_query=query, extra_query_params=query_params
)
- return paged_list.items
# store (serializable items) in cache
if prov.is_streaming_provider:
self.mass.create_task(self.mass.cache.set(cache_key, [x.to_dict() for x in items]))
item_id: str | int,
) -> list[Track]:
"""Return all tracks for an artist in the library/db."""
- subquery = f"SELECT track_id FROM {DB_TABLE_TRACK_ARTISTS} WHERE artist_id = {item_id}"
- query = f"WHERE {DB_TABLE_TRACKS}.item_id in ({subquery})"
- paged_list = await self.mass.music.tracks.library_items(extra_query=query)
- return paged_list.items
+ return await self.mass.music.tracks._get_library_items_by_query(
+ extra_query=f"WHERE {DB_TABLE_TRACK_ARTISTS}.artist_id = {item_id}",
+ )
async def get_provider_artist_albums(
self,
item_id,
provider_instance_id_or_domain,
):
+ subquery = (
+ "SELECT item_id FROM provider_mappings WHERE "
+ "media_type = 'album' AND (provider_domain = :prov_id "
+ "OR provider_instance = :prov_id)"
+ )
query = (
- f"WHERE albumartists.artist_id = {db_artist.item_id} AND "
- f'(provider_mappings.provider_domain = "{provider_instance_id_or_domain}" OR '
- f'provider_mappings.provider_instance = "{provider_instance_id_or_domain}")'
+ f"WHERE {DB_TABLE_ALBUMS}.item_id IN ({subquery}) "
+ f"AND {DB_TABLE_ALBUM_ARTISTS}.artist_id = :artist_id"
)
- paged_list = await self.mass.music.albums.library_items(extra_query=query)
- return paged_list.items
+ query_params = {
+ "prov_id": provider_instance_id_or_domain,
+ "artist_id": db_artist.item_id,
+ }
+ return await self.mass.music.albums._get_library_items_by_query(
+ extra_query=query, extra_query_params=query_params
+ )
+
# store (serializable items) in cache
if prov.is_streaming_provider:
self.mass.create_task(self.mass.cache.set(cache_key, [x.to_dict() for x in items]))
item_id: str | int,
) -> list[Album]:
"""Return all in-library albums for an artist."""
- subquery = f"SELECT album_id FROM {DB_TABLE_ALBUM_ARTISTS} WHERE artist_id = {item_id}"
- query = f"WHERE {DB_TABLE_ALBUMS}.item_id in ({subquery})"
- paged_list = await self.mass.music.albums.library_items(extra_query=query)
- return paged_list.items
+ query = f"WHERE {DB_TABLE_ALBUM_ARTISTS}.artist_id = {item_id}"
+ return await self.mass.music.albums._get_library_items_by_query(extra_query=query)
async def _add_library_item(self, item: Artist) -> Artist:
"""Add a new item record to the database."""
if item.mbid == VARIOUS_ARTISTS_ID_MBID:
item.name = VARIOUS_ARTISTS_NAME
# no existing item matched: insert item
- item.timestamp_added = int(utc_timestamp())
- item.timestamp_modified = int(utc_timestamp())
new_item = await self.mass.music.database.insert(
self.db_table,
{
"favorite": item.favorite,
"external_ids": serialize_to_json(item.external_ids),
"metadata": serialize_to_json(item.metadata),
- "timestamp_added": int(utc_timestamp()),
- "timestamp_modified": int(utc_timestamp()),
},
)
db_id = new_item["item_id"]
import logging
from abc import ABCMeta, abstractmethod
+from collections.abc import Iterable
from contextlib import suppress
from time import time
from typing import TYPE_CHECKING, Any, Generic, TypeVar
-from music_assistant.common.helpers.json import json_dumps, json_loads
+from music_assistant.common.helpers.json import json_loads, serialize_to_json
from music_assistant.common.models.enums import EventType, ExternalID, MediaType, ProviderFeature
from music_assistant.common.models.errors import MediaNotFoundError, ProviderUnavailableError
from music_assistant.common.models.media_items import (
Track,
media_from_dict,
)
-from music_assistant.constants import DB_TABLE_PROVIDER_MAPPINGS, MASS_LOGGER_NAME
+from music_assistant.constants import (
+ DB_TABLE_ALBUMS,
+ DB_TABLE_ARTISTS,
+ DB_TABLE_PROVIDER_MAPPINGS,
+ MASS_LOGGER_NAME,
+)
if TYPE_CHECKING:
- from collections.abc import AsyncGenerator, Iterable, Mapping
+ from collections.abc import AsyncGenerator, Mapping
from music_assistant.server import MusicAssistant
ItemCls = TypeVar("ItemCls", bound="MediaItemType")
REFRESH_INTERVAL = 60 * 60 * 24 * 30
-JSON_KEYS = ("artists", "album", "albums", "metadata", "provider_mappings", "external_ids")
+JSON_KEYS = ("artists", "album", "metadata", "provider_mappings", "external_ids")
class MediaControllerBase(Generic[ItemCls], metaclass=ABCMeta):
def __init__(self, mass: MusicAssistant) -> None:
"""Initialize class."""
self.mass = mass
- self.base_query = f"""
- SELECT
- {self.db_table}.*,
- json_group_array(
- DISTINCT json_object(
- 'item_id', {DB_TABLE_PROVIDER_MAPPINGS}.provider_item_id,
- 'provider_domain', {DB_TABLE_PROVIDER_MAPPINGS}.provider_domain,
- 'provider_instance', {DB_TABLE_PROVIDER_MAPPINGS}.provider_instance,
- 'available', {DB_TABLE_PROVIDER_MAPPINGS}.available,
- 'url', {DB_TABLE_PROVIDER_MAPPINGS}.url,
- 'audio_format', json({DB_TABLE_PROVIDER_MAPPINGS}.audio_format),
- 'details', {DB_TABLE_PROVIDER_MAPPINGS}.details
- )) filter ( where {DB_TABLE_PROVIDER_MAPPINGS}.item_id is not null) as {DB_TABLE_PROVIDER_MAPPINGS}
- FROM {self.db_table}
- LEFT JOIN {DB_TABLE_PROVIDER_MAPPINGS}
- ON {self.db_table}.item_id = {DB_TABLE_PROVIDER_MAPPINGS}.item_id
- AND {DB_TABLE_PROVIDER_MAPPINGS}.media_type == '{self.media_type.value}'
- """ # noqa: E501
- self.sql_group_by = f"{self.db_table}.item_id"
+ self.base_query = f"SELECT * FROM {self.db_table}"
self.logger = logging.getLogger(f"{MASS_LOGGER_NAME}.music.{self.media_type.value}")
@abstractmethod
self.db_table,
{"item_id": db_id},
)
- # update provider_mappings table
- await self.mass.music.database.delete(
- DB_TABLE_PROVIDER_MAPPINGS,
- {"media_type": self.media_type.value, "item_id": db_id},
- )
# NOTE: this does not delete any references to this item in other records,
# this is handled/overridden in the mediatype specific controllers
self.mass.signal_event(EventType.MEDIA_ITEM_DELETED, library_item.uri, library_item)
extra_query_params: dict[str, Any] | None = None,
) -> PagedItems:
"""Get in-database items."""
- sql_query = self.base_query
- params = extra_query_params or {}
- query_parts: list[str] = []
- if extra_query:
- # prevent duplicate where statement
- if extra_query.lower().startswith("where "):
- extra_query = extra_query[5:]
- query_parts.append(extra_query)
- if search:
- params["search"] = f"%{search}%"
- if self.media_type == MediaType.ALBUM:
- query_parts.append(
- f"({self.db_table}.name LIKE :search OR {self.db_table}.sort_name LIKE :search "
- "OR sort_artist LIKE :search)"
- )
- elif self.media_type == MediaType.TRACK:
- query_parts.append(
- f"({self.db_table}.name LIKE :search OR {self.db_table}.sort_name LIKE :search "
- "OR sort_artist LIKE :search OR sort_album LIKE :search)"
- )
- else:
- query_parts.append(
- f"{self.db_table}.name LIKE :search OR {self.db_table}.sort_name LIKE :search"
- )
- if favorite is not None:
- query_parts.append(f"{self.db_table}.favorite = :favorite")
- params["favorite"] = favorite
- if query_parts:
- # concetenate all where queries
- sql_query += " WHERE " + " AND ".join(query_parts)
- sql_query += f" GROUP BY {self.sql_group_by} ORDER BY {order_by}"
items = await self._get_library_items_by_query(
- sql_query, params, limit=limit, offset=offset
+ favorite=favorite,
+ search=search,
+ limit=limit,
+ offset=offset,
+ order_by=order_by,
+ extra_query=extra_query,
+ extra_query_params=extra_query_params,
)
count = len(items)
if 0 < count < limit:
total = offset + count
else:
- total = await self.mass.music.database.get_count_from_query(sql_query, params)
+ total = await self._get_library_items_by_query(
+ favorite=favorite,
+ search=search,
+ limit=limit,
+ offset=offset,
+ order_by=order_by,
+ extra_query=extra_query,
+ extra_query_params=extra_query_params,
+ count_only=True,
+ )
return PagedItems(items=items, count=count, limit=limit, offset=offset, total=total)
async def iter_library_items(
limit: int = 500
offset: int = 0
while True:
- next_items = await self.library_items(
+ next_items = await self._get_library_items_by_query(
favorite=favorite,
search=search,
limit=limit,
extra_query=extra_query,
extra_query_params=extra_query_params,
)
- for item in next_items.items:
+ for item in next_items:
yield item
- if next_items.count < limit:
+ if len(next_items) < limit:
break
offset += limit
if library_item and (time() - (library_item.metadata.last_refresh or 0)) > REFRESH_INTERVAL:
# it's been too long since the full metadata was last retrieved (or never at all)
metadata_lookup = True
- if library_item and force_refresh:
+ if library_item and (force_refresh or metadata_lookup):
# get (first) provider item id belonging to this library item
add_to_library = True
provider_instance_id_or_domain, item_id = await self.get_provider_mapping(library_item)
async def get_library_item(self, item_id: int | str) -> ItemCls:
"""Get single library item by id."""
db_id = int(item_id) # ensure integer
- extra_query = f"WHERE {self.db_table}.item_id is {item_id}"
+ extra_query = f"WHERE {self.db_table}.item_id = {item_id}"
async for db_item in self.iter_library_items(extra_query=extra_query):
return db_item
msg = f"{self.media_type.value} not found in library: {db_id}"
if provider_instance_id_or_domain == "library":
return await self.get_library_item(item_id)
for item in await self.get_library_items_by_prov_id(
- provider_instance_id_or_domain,
- provider_item_ids=(item_id,),
+ provider_instance_id_or_domain=provider_instance_id_or_domain,
+ provider_item_id=item_id,
):
return item
return None
# always prefer provider instance first
for mapping in provider_mappings:
for item in await self.get_library_items_by_prov_id(
- mapping.provider_instance,
- provider_item_ids=(mapping.item_id,),
+ provider_instance=mapping.provider_instance,
+ provider_item_id=mapping.item_id,
):
return item
# check by domain too
for mapping in provider_mappings:
for item in await self.get_library_items_by_prov_id(
- mapping.provider_domain,
- provider_item_ids=(mapping.item_id,),
+ provider_domain=mapping.provider_domain,
+ provider_item_id=mapping.item_id,
):
return item
return None
self, external_id: str, external_id_type: ExternalID | None = None
) -> ItemCls | None:
"""Get the library item for the given external id."""
- query = self.base_query + f" WHERE {self.db_table}.external_ids LIKE :external_id_str"
+ query = f"WHERE {self.db_table}.external_ids LIKE :external_id_str"
if external_id_type:
external_id_str = f'%"{external_id_type}","{external_id}"%'
else:
external_id_str = f'%"{external_id}"%'
- query += f" GROUP BY {self.sql_group_by}"
for item in await self._get_library_items_by_query(
- query=query, query_params={"external_id_str": external_id_str}
+ extra_query=query, extra_query_params={"external_id_str": external_id_str}
):
return item
return None
async def get_library_items_by_prov_id(
self,
- provider_instance_id_or_domain: str,
- provider_item_ids: tuple[str, ...] | None = None,
+ provider_domain: str | None = None,
+ provider_instance: str | None = None,
+ provider_instance_id_or_domain: str | None = None,
+ provider_item_id: str | None = None,
limit: int = 500,
offset: int = 0,
) -> list[ItemCls]:
"""Fetch all records from library for given provider."""
- query_parts = []
- query_params = {
- "prov_id": provider_instance_id_or_domain,
- }
-
- if provider_instance_id_or_domain == "library":
- # request for specific library id's
- if provider_item_ids:
- query_parts.append(f"{self.db_table}.item_id in :item_ids")
- query_params["item_ids"] = provider_item_ids
+ assert provider_instance_id_or_domain != "library"
+ assert provider_domain != "library"
+ assert provider_instance != "library"
+ subquery = f"WHERE provider_mappings.media_type = '{self.media_type.value}' "
+ if provider_instance:
+ query_params = {"prov_id": provider_instance}
+ subquery += "AND provider_mappings.provider_instance = :prov_id"
+ elif provider_domain:
+ query_params = {"prov_id": provider_domain}
+ subquery += "AND provider_mappings.provider_domain = :prov_id"
else:
- # provider filtered response
- query_parts.append(
- "(provider_mappings.provider_instance = :prov_id "
- "OR provider_mappings.provider_domain = :prov_id)"
+ query_params = {"prov_id": provider_instance_id_or_domain}
+ subquery += (
+ "AND (provider_mappings.provider_instance = :prov_id "
+ "OR provider_mappings.provider_domain = :prov_id) "
)
- if provider_item_ids:
- query_parts.append("provider_mappings.provider_item_id in :item_ids")
- query_params["item_ids"] = provider_item_ids
-
- # build final query
- query = "WHERE " + " AND ".join(query_parts)
- paged_list = await self.library_items(
+ if provider_item_id:
+ subquery += " AND provider_mappings.provider_item_id = :item_id"
+ query_params["item_id"] = provider_item_id
+ query = (
+ f"WHERE {self.db_table}.item_id in (SELECT item_id FROM provider_mappings {subquery})"
+ )
+ return await self._get_library_items_by_query(
limit=limit, offset=offset, extra_query=query, extra_query_params=query_params
)
- return paged_list.items
async def iter_library_items_by_prov_id(
self,
provider_instance_id_or_domain: str,
- provider_item_ids: tuple[str, ...] | None = None,
+ provider_item_id: str | None = None,
) -> AsyncGenerator[ItemCls, None]:
"""Iterate all records from database for given provider."""
limit: int = 500
offset: int = 0
while True:
next_items = await self.get_library_items_by_prov_id(
- provider_instance_id_or_domain,
- provider_item_ids=provider_item_ids,
+ provider_instance_id_or_domain=provider_instance_id_or_domain,
+ provider_item_id=provider_item_id,
limit=limit,
offset=offset,
)
# ignore if the mapping is already present
if provider_mapping in library_item.provider_mappings:
return
- # update provider_mappings table
- await self._set_provider_mappings(item_id=item_id, provider_mappings=[provider_mapping])
+ library_item.provider_mappings.add(provider_mapping)
+ await self._set_provider_mappings(db_id, library_item.provider_mappings)
async def remove_provider_mapping(
self, item_id: str | int, provider_instance_id: str, provider_item_id: str
except MediaNotFoundError:
# edge case: already deleted / race condition
return
-
+ library_item.provider_mappings = {
+ x
+ for x in library_item.provider_mappings
+ if x.provider_instance != provider_instance_id and x.item_id != provider_item_id
+ }
# update provider_mappings table
await self.mass.music.database.delete(
DB_TABLE_PROVIDER_MAPPINGS,
"provider_item_id": provider_item_id,
},
)
-
- # update the item in db (provider_mappings column only)
- library_item.provider_mappings = {
- x
- for x in library_item.provider_mappings
- if x.provider_instance != provider_instance_id and x.item_id != provider_item_id
- }
if library_item.provider_mappings:
+ await self._set_provider_mappings(db_id, library_item.provider_mappings)
self.logger.debug(
"removed provider_mapping %s/%s from item id %s",
provider_instance_id,
except MediaNotFoundError:
# edge case: already deleted / race condition
return
-
# update provider_mappings table
await self.mass.music.database.delete(
DB_TABLE_PROVIDER_MAPPINGS,
"provider_instance": provider_instance_id,
},
)
-
# update the item's provider mappings (and check if we still have any)
library_item.provider_mappings = {
x for x in library_item.provider_mappings if x.provider_instance != provider_instance_id
async def _get_library_items_by_query(
self,
- query: str,
- query_params: dict | None = None,
+ favorite: bool | None = None,
+ search: str | None = None,
limit: int = 500,
offset: int = 0,
- ) -> list[ItemCls]:
+ order_by: str | None = None,
+ extra_query: str | None = None,
+ extra_query_params: dict[str, Any] | None = None,
+ count_only: bool = False,
+ ) -> list[ItemCls] | int:
"""Fetch MediaItem records from database given a custom (WHERE) clause."""
- if query_params is None:
- query_params = {}
+ sql_query = self.base_query
+ query_params = extra_query_params or {}
+ query_parts: list[str] = []
+ # handle extra/custom query
+ if extra_query:
+ # prevent duplicate where statement
+ if extra_query.lower().startswith("where "):
+ extra_query = extra_query[5:]
+ query_parts.append(extra_query)
+ # handle basic search on name
+ if search:
+ query_params["search"] = f"%{search}%"
+ if self.media_type == MediaType.ALBUM:
+ query_parts.append(
+ f"({self.db_table}.name LIKE :search "
+ f"OR {DB_TABLE_ARTISTS}.name LIKE :search)"
+ )
+ elif self.media_type == MediaType.TRACK:
+ query_parts.append(
+ f"({self.db_table}.name LIKE :search "
+ f"OR {DB_TABLE_ARTISTS}.name LIKE :search "
+ f"OR {DB_TABLE_ALBUMS}.name LIKE :search)"
+ )
+ else:
+ query_parts.append(f"{self.db_table}.name LIKE :search")
+ # handle favorite filter
+ if favorite is not None:
+ query_parts.append(f"{self.db_table}.favorite = :favorite")
+ query_params["favorite"] = favorite
+ # concetenate all where queries
+ if query_parts:
+ sql_query += " WHERE " + " AND ".join(query_parts)
+ # build final query
+ if count_only:
+ return await self.mass.music.database.get_count_from_query(sql_query, query_params)
+ if order_by:
+ order_by = order_by.replace("sort_artist", f"{DB_TABLE_ARTISTS}.sort_name")
+ order_by = order_by.replace("sort_album", f"{DB_TABLE_ALBUMS}.sort_name")
+ sql_query += f" ORDER BY {order_by}"
+ # return dbresult parsed to media item model
return [
self.item_cls.from_dict(self._parse_db_row(db_row))
for db_row in await self.mass.music.database.get_rows_from_query(
- query, query_params, limit=limit, offset=offset
+ sql_query, query_params, limit=limit, offset=offset
)
]
"provider_item_id": provider_mapping.item_id,
"available": provider_mapping.available,
"url": provider_mapping.url,
- "audio_format": json_dumps(provider_mapping.audio_format),
+ "audio_format": serialize_to_json(provider_mapping.audio_format),
"details": provider_mapping.details,
},
)
+ # we (temporary?) duplicate the provider mappings in a separate column of the media
+ # item's table, because the json_group_array query is superslow
+ await self.mass.music.database.update(
+ self.db_table,
+ {"item_id": db_id},
+ {"provider_mappings": serialize_to_json(provider_mappings)},
+ )
@staticmethod
def _parse_db_row(db_row: Mapping) -> dict[str, Any]:
"""Parse raw db Mapping into a dict."""
db_row_dict = dict(db_row)
db_row_dict["provider"] = "library"
+ db_row_dict["favorite"] = bool(db_row_dict["favorite"])
+ db_row_dict["item_id"] = str(db_row_dict["item_id"])
for key in JSON_KEYS:
if key in db_row_dict and db_row_dict[key] not in (None, ""):
db_row_dict[key] = json_loads(db_row_dict[key])
- if key == "provider_mappings":
- for prov_mapping_dict in db_row_dict[key]:
- prov_mapping_dict["available"] = bool(prov_mapping_dict["available"])
-
- if "favorite" in db_row_dict:
- db_row_dict["favorite"] = bool(db_row_dict["favorite"])
- if "item_id" in db_row_dict:
- db_row_dict["item_id"] = str(db_row_dict["item_id"])
- if "album" in db_row_dict and db_row_dict["album"]["item_id"] is None:
- db_row_dict.pop("album")
+
# copy album image to itemmapping single image
- if "album" in db_row_dict and (images := db_row_dict["album"].get("images")):
+ if (album := db_row_dict.get("album")) and (images := album.get("images")):
db_row_dict["album"]["image"] = next((x for x in images if x["type"] == "thumb"), None)
return db_row_dict
from collections.abc import AsyncGenerator
from typing import TYPE_CHECKING, Any, cast
-from music_assistant.common.helpers.datetime import utc_timestamp
from music_assistant.common.helpers.json import serialize_to_json
from music_assistant.common.helpers.uri import create_uri, parse_uri
from music_assistant.common.models.enums import EventType, MediaType, ProviderFeature
"external_ids": serialize_to_json(
update.external_ids if overwrite else cur_item.external_ids
),
- "timestamp_modified": int(utc_timestamp()),
},
)
# update/set provider_mappings table
- await self._set_provider_mappings(db_id, update.provider_mappings, overwrite=overwrite)
+ provider_mappings = (
+ update.provider_mappings
+ if overwrite
+ else {*cur_item.provider_mappings, *update.provider_mappings}
+ )
+ await self._set_provider_mappings(db_id, provider_mappings, overwrite)
self.logger.debug("updated %s in database: %s", update.name, db_id)
# get full created object
library_item = await self.get_library_item(db_id)
async def _add_library_item(self, item: Playlist) -> Playlist:
"""Add a new record to the database."""
- item.timestamp_added = int(utc_timestamp())
- item.timestamp_modified = int(utc_timestamp())
new_item = await self.mass.music.database.insert(
self.db_table,
{
"favorite": item.favorite,
"metadata": serialize_to_json(item.metadata),
"external_ids": serialize_to_json(item.external_ids),
- "timestamp_added": int(utc_timestamp()),
- "timestamp_modified": int(utc_timestamp()),
},
)
db_id = new_item["item_id"]
import asyncio
-from music_assistant.common.helpers.datetime import utc_timestamp
from music_assistant.common.helpers.json import serialize_to_json
from music_assistant.common.models.enums import EventType, MediaType
from music_assistant.common.models.errors import InvalidDataError
cur_item.item_id, item, overwrite=overwrite_existing
)
else:
- # search by name
- async for db_item in self.iter_library_items(search=item.name):
+ # search by (exact) name match
+ query = f"WHERE {self.db_table}.name = :name OR {self.db_table}.sort_name = :sort_name"
+ query_params = {"name": item.name, "sort_name": item.sort_name}
+ async for db_item in self.iter_library_items(
+ extra_query=query, extra_query_params=query_params
+ ):
if compare_strings(db_item.name, item.name, strict=True):
# existing item found: update it
library_item = await self.update_item_in_library(db_item.item_id, item)
"external_ids": serialize_to_json(
update.external_ids if overwrite else cur_item.external_ids
),
- "timestamp_modified": int(utc_timestamp()),
},
)
# update/set provider_mappings table
- await self._set_provider_mappings(db_id, update.provider_mappings, overwrite=overwrite)
+ provider_mappings = (
+ update.provider_mappings
+ if overwrite
+ else {*cur_item.provider_mappings, *update.provider_mappings}
+ )
+ await self._set_provider_mappings(db_id, provider_mappings, overwrite)
self.logger.debug("updated %s in database: %s", update.name, db_id)
# get full created object
library_item = await self.get_library_item(db_id)
async def _add_library_item(self, item: Radio) -> Radio:
"""Add a new item record to the database."""
- item.timestamp_added = int(utc_timestamp())
- item.timestamp_modified = int(utc_timestamp())
new_item = await self.mass.music.database.insert(
self.db_table,
{
"favorite": item.favorite,
"metadata": serialize_to_json(item.metadata),
"external_ids": serialize_to_json(item.external_ids),
- "timestamp_added": int(utc_timestamp()),
- "timestamp_modified": int(utc_timestamp()),
},
)
db_id = new_item["item_id"]
import asyncio
import urllib.parse
+from collections.abc import Iterable
from contextlib import suppress
-from music_assistant.common.helpers.datetime import utc_timestamp
from music_assistant.common.helpers.json import serialize_to_json
from music_assistant.common.models.enums import AlbumType, EventType, MediaType, ProviderFeature
from music_assistant.common.models.errors import (
DB_TABLE_ALBUM_TRACKS,
DB_TABLE_ALBUMS,
DB_TABLE_ARTISTS,
- DB_TABLE_PROVIDER_MAPPINGS,
DB_TABLE_TRACK_ARTISTS,
DB_TABLE_TRACKS,
)
"""Initialize class."""
super().__init__(*args, **kwargs)
self.base_query = f"""
- SELECT
- {self.db_table}.*,
- {DB_TABLE_ARTISTS}.sort_name AS sort_artist,
- {DB_TABLE_ALBUMS}.sort_name AS sort_album,
- json_group_array(
- DISTINCT json_object(
- 'item_id', {DB_TABLE_PROVIDER_MAPPINGS}.provider_item_id,
- 'provider_domain', {DB_TABLE_PROVIDER_MAPPINGS}.provider_domain,
- 'provider_instance', {DB_TABLE_PROVIDER_MAPPINGS}.provider_instance,
- 'available', {DB_TABLE_PROVIDER_MAPPINGS}.available,
- 'url', {DB_TABLE_PROVIDER_MAPPINGS}.url,
- 'audio_format', json({DB_TABLE_PROVIDER_MAPPINGS}.audio_format),
- 'details', {DB_TABLE_PROVIDER_MAPPINGS}.details
- )) filter ( where {DB_TABLE_PROVIDER_MAPPINGS}.item_id is not null) as {DB_TABLE_PROVIDER_MAPPINGS},
- json_group_array(
- DISTINCT json_object(
- 'item_id', {DB_TABLE_ARTISTS}.item_id,
- 'provider', 'library',
- 'name', {DB_TABLE_ARTISTS}.name,
- 'sort_name', {DB_TABLE_ARTISTS}.sort_name,
- 'media_type', 'artist'
- )) filter ( where {DB_TABLE_ARTISTS}.name is not null) as {DB_TABLE_ARTISTS},
- json_object(
- 'item_id', {DB_TABLE_ALBUMS}.item_id,
- 'provider', 'library',
- 'name', {DB_TABLE_ALBUMS}.name,
- 'sort_name', {DB_TABLE_ALBUMS}.sort_name,
- 'version', {DB_TABLE_ALBUMS}.version,
- 'images', json_extract({DB_TABLE_ALBUMS}.metadata, '$.images'),
- 'media_type', 'album'
- ) as album,
- {DB_TABLE_ALBUM_TRACKS}.disc_number,
- {DB_TABLE_ALBUM_TRACKS}.track_number
- FROM {self.db_table}
- LEFT JOIN {DB_TABLE_TRACK_ARTISTS} on {DB_TABLE_TRACK_ARTISTS}.track_id = {self.db_table}.item_id
- LEFT JOIN {DB_TABLE_ARTISTS} on {DB_TABLE_ARTISTS}.item_id = {DB_TABLE_TRACK_ARTISTS}.artist_id
- LEFT JOIN {DB_TABLE_ALBUM_TRACKS} on {DB_TABLE_ALBUM_TRACKS}.track_id = {self.db_table}.item_id
- LEFT JOIN {DB_TABLE_ALBUMS} on {DB_TABLE_ALBUMS}.item_id = {DB_TABLE_ALBUM_TRACKS}.album_id
- LEFT JOIN {DB_TABLE_PROVIDER_MAPPINGS}
- ON {self.db_table}.item_id = {DB_TABLE_PROVIDER_MAPPINGS}.item_id
- AND {DB_TABLE_PROVIDER_MAPPINGS}.media_type == '{self.media_type.value}'
+ SELECT
+ {self.db_table}.*,
+ CASE WHEN albums.item_id IS NULL THEN NULL ELSE
+ json_object(
+ 'item_id', {DB_TABLE_ALBUMS}.item_id,
+ 'provider', 'library',
+ 'name', {DB_TABLE_ALBUMS}.name,
+ 'sort_name', {DB_TABLE_ALBUMS}.sort_name,
+ 'version', {DB_TABLE_ALBUMS}.version,
+ 'images', json_extract({DB_TABLE_ALBUMS}.metadata, '$.images'),
+ 'media_type', 'album') END as album,
+ {DB_TABLE_ALBUM_TRACKS}.disc_number,
+ {DB_TABLE_ALBUM_TRACKS}.track_number
+ FROM {self.db_table}
+ LEFT JOIN {DB_TABLE_ALBUM_TRACKS} on {DB_TABLE_ALBUM_TRACKS}.track_id = {self.db_table}.item_id
+ LEFT JOIN {DB_TABLE_ALBUMS} on {DB_TABLE_ALBUMS}.item_id = {DB_TABLE_ALBUM_TRACKS}.album_id
+ LEFT JOIN {DB_TABLE_TRACK_ARTISTS} on {DB_TABLE_TRACK_ARTISTS}.track_id = {self.db_table}.item_id
+ LEFT JOIN {DB_TABLE_ARTISTS} on {DB_TABLE_ARTISTS}.item_id = {DB_TABLE_TRACK_ARTISTS}.artist_id
""" # noqa: E501
- self.sql_group_by = f"{self.db_table}.item_id, {DB_TABLE_ALBUMS}.item_id"
self._db_add_lock = asyncio.Lock()
# register api handlers
self.mass.register_api_command("music/tracks/library_items", self.library_items)
# grab additional metadata
if metadata_lookup:
await self.mass.metadata.get_track_metadata(item)
- # copy track image from album (only if albumtype = single !)
+ # copy album image from track (only if albumtype != single)
+ # this deals with embedded images from filesystem providers
if (
- not item.image
- and isinstance(item.album, Album)
- and item.album.image
+ isinstance(item.album, Album)
+ and not item.album.image
+ and item.image
and item.album.album_type == AlbumType.SINGLE
):
- item.metadata.images = [item.album.image]
+ item.album.metadata.images = [item.image]
# check for existing item first
library_item = None
if cur_item := await self.get_library_item_by_prov_id(item.item_id, item.provider):
cur_item.item_id, item, overwrite=overwrite_existing
)
else:
- # search by name
- async for db_item in self.iter_library_items(search=item.name):
+ # search by (exact) name match
+ query = f"WHERE {self.db_table}.name = :name OR {self.db_table}.sort_name = :sort_name"
+ query_params = {"name": item.name, "sort_name": item.sort_name}
+ async for db_item in self.iter_library_items(
+ extra_query=query, extra_query_params=query_params
+ ):
if compare_track(db_item, item):
# existing item found: update it
library_item = await self.update_item_in_library(
cur_item = await self.get_library_item(db_id)
metadata = update.metadata if overwrite else cur_item.metadata.update(update.metadata)
cur_item.external_ids.update(update.external_ids)
+
await self.mass.music.database.update(
self.db_table,
{"item_id": db_id},
"version": update.version if overwrite else cur_item.version or update.version,
"duration": update.duration if overwrite else cur_item.duration or update.duration,
"metadata": serialize_to_json(metadata),
- "timestamp_modified": int(utc_timestamp()),
"external_ids": serialize_to_json(
update.external_ids if overwrite else cur_item.external_ids
),
},
)
+
# update/set provider_mappings table
- await self._set_provider_mappings(db_id, update.provider_mappings, overwrite=overwrite)
+ provider_mappings = (
+ update.provider_mappings
+ if overwrite
+ else {*cur_item.provider_mappings, *update.provider_mappings}
+ )
+ await self._set_provider_mappings(db_id, provider_mappings, overwrite)
+ # set track artist(s)
+ artists = update.artists if overwrite else cur_item.artists + update.artists
+ await self._set_track_artists(db_id, artists, overwrite=overwrite)
# update/set track album
if update.album:
overwrite=overwrite,
)
- # set track artist(s)
- await self._set_track_artists(db_id, update.artists, overwrite=overwrite)
-
# get full/final created object
library_item = await self.get_library_item(db_id)
self.mass.signal_event(
item_id: str | int,
) -> list[Album]:
"""Return all in-library albums for a track."""
- subquery = f"SELECT album_id FROM {DB_TABLE_ALBUM_TRACKS} WHERE track_id = {item_id}"
+ subquery = (
+ f"SELECT album_id FROM {DB_TABLE_ALBUM_TRACKS} "
+ f"WHERE {DB_TABLE_ALBUM_TRACKS}.track_id = {item_id}"
+ )
query = f"WHERE {DB_TABLE_ALBUMS}.item_id in ({subquery})"
- paged_list = await self.mass.music.albums.library_items(extra_query=query)
- return paged_list.items
+ return await self.mass.music.albums._get_library_items_by_query(extra_query=query)
async def _match(self, db_track: Track) -> None:
"""Try to find matching track on all providers for the provided (database) track_id.
"favorite": item.favorite,
"external_ids": serialize_to_json(item.external_ids),
"metadata": serialize_to_json(item.metadata),
- "timestamp_added": int(utc_timestamp()),
- "timestamp_modified": int(utc_timestamp()),
},
)
db_id = new_item["item_id"]
)
async def _set_track_artists(
- self, db_id: int, artists: list[Artist | ItemMapping], overwrite: bool = False
+ self, db_id: int, artists: Iterable[Artist | ItemMapping], overwrite: bool = False
) -> None:
"""Store Track Artists."""
if overwrite:
"track_id": db_id,
},
)
+ artist_mappings: list[ItemMapping] = []
for artist in artists:
- await self._set_track_artist(db_id, artist=artist, overwrite=overwrite)
+ mapping = await self._set_track_artist(db_id, artist=artist, overwrite=overwrite)
+ artist_mappings.append(mapping)
+ # we (temporary?) duplicate the artist mappings in a separate column of the media
+ # item's table, because the json_group_array query is superslow
+ await self.mass.music.database.update(
+ self.db_table,
+ {"item_id": db_id},
+ {"artists": serialize_to_json(artist_mappings)},
+ )
async def _set_track_artist(
self, db_id: int, artist: Artist | ItemMapping, overwrite: bool = False
- ) -> None:
+ ) -> ItemMapping:
"""Store Track Artist info."""
- db_artist: Album | ItemMapping = None
+ db_artist: Artist | ItemMapping = None
if artist.provider == "library":
db_artist = artist
elif existing := await self.mass.music.artists.get_library_item_by_prov_id(
artist.item_id, artist.provider
):
db_artist = existing
- else:
- # not an existing artist, we need to fetch before we can add it to the library
- if isinstance(artist, ItemMapping):
- artist = await self.mass.music.artists.get_provider_item(
- artist.item_id, artist.provider, fallback=artist
- )
- with suppress(MediaNotFoundError, AssertionError, InvalidDataError):
- db_artist = await self.mass.music.artists.add_item_to_library(
- artist, metadata_lookup=False, overwrite_existing=overwrite
- )
- if not db_artist:
- # this should not happen but streaming providers can be awful sometimes
- self.logger.warning(
- "Unable to resolve Artist %s for track %s, "
- "track will be added to the library without this artist!",
- artist.uri,
- db_id,
+
+ if not db_artist or overwrite:
+ db_artist = await self.mass.music.artists.add_item_to_library(
+ artist, metadata_lookup=False, overwrite_existing=overwrite
)
- return
- # write (or update) record in track_artists table
+ # write (or update) record in album_artists table
await self.mass.music.database.insert_or_replace(
DB_TABLE_TRACK_ARTISTS,
{
"artist_id": int(db_artist.item_id),
},
)
+ return ItemMapping.from_item(db_artist)
from music_assistant.common.helpers.datetime import utc_timestamp
from music_assistant.common.helpers.global_cache import get_global_cache_value
-from music_assistant.common.helpers.json import json_dumps, json_loads
from music_assistant.common.helpers.uri import parse_uri
from music_assistant.common.models.config_entries import ConfigEntry, ConfigValueType
from music_assistant.common.models.enums import (
self, media_type: MediaType, item_id: str, provider_instance_id_or_domain: str
) -> None:
"""Mark item as played in playlog."""
+ # TODO: also mark in media table (for library items)
timestamp = utc_timestamp()
await self.database.insert(
DB_TABLE_PLAYLOG,
self.mass.music.albums,
self.mass.music.artists,
):
- prov_items = await ctrl.get_library_items_by_prov_id(provider_instance)
+ prov_items = await ctrl.get_library_items_by_prov_id(
+ provider_instance=provider_instance
+ )
for item in prov_items:
try:
await ctrl.remove_provider_mappings(item.item_id, provider_instance)
DB_TABLE_SETTINGS,
{"key": "version", "value": str(DB_SCHEMA_VERSION), "type": "str"},
)
- # create indexes if needed
+ # create indexes and triggers if needed
await self.__create_database_indexes()
+ await self.__create_database_triggers()
# compact db
self.logger.debug("Compacting database...")
await self.database.vacuum()
async def __migrate_database(self, prev_version: int) -> None:
"""Perform a database migration."""
- if prev_version in (27, 28, 29):
- self.logger.info(
- "Performing database migration from %s to %s",
- prev_version,
- DB_SCHEMA_VERSION,
- )
- self.logger.warning("DATABASE MIGRATION IN PROGRESS - THIS CAN TAKE A WHILE")
-
- # recreate loudness measurements table
- if prev_version in (27, 28):
- await self.database.execute(f"DROP TABLE IF EXISTS {DB_TABLE_TRACK_LOUDNESS}")
- await self.__create_database_tables()
-
- # # migrate track artists
- async for db_track in self.database.iter_items(DB_TABLE_TRACKS):
- for track_artist in json_loads(db_track["artists"]):
- await self.database.insert_or_replace(
- DB_TABLE_TRACK_ARTISTS,
- {
- "track_id": db_track["item_id"],
- "artist_id": int(track_artist["item_id"]),
- },
- )
- await self.database.execute(f"ALTER TABLE {DB_TABLE_TRACKS} DROP COLUMN artists;")
- await self.database.execute(f"ALTER TABLE {DB_TABLE_TRACKS} DROP COLUMN sort_artist;")
-
- # # migrate album artists
- async for db_album in self.database.iter_items(DB_TABLE_ALBUMS):
- for album_artist in json_loads(db_album["artists"]):
- await self.database.insert_or_replace(
- DB_TABLE_ALBUM_ARTISTS,
- {
- "album_id": db_album["item_id"],
- "artist_id": int(album_artist["item_id"]),
- },
- )
- await self.database.execute(f"ALTER TABLE {DB_TABLE_ALBUMS} DROP COLUMN artists;")
- await self.database.execute(f"ALTER TABLE {DB_TABLE_ALBUMS} DROP COLUMN sort_artist;")
-
- # migrate provider_mappings
- await self.database.execute(
- f"ALTER TABLE {DB_TABLE_PROVIDER_MAPPINGS} ADD [available] BOOLEAN DEFAULT 1;"
- )
- await self.database.execute(f"ALTER TABLE {DB_TABLE_PROVIDER_MAPPINGS} ADD [url] TEXT;")
- await self.database.execute(
- f"ALTER TABLE {DB_TABLE_PROVIDER_MAPPINGS} ADD [audio_format] json;"
- )
- await self.database.execute(
- f"ALTER TABLE {DB_TABLE_PROVIDER_MAPPINGS} ADD [details] json;"
- )
-
- for media_type_str in ("track", "album", "artist", "playlist", "radio"):
- table = f"{media_type_str}s"
- async for db_item in self.database.iter_items(table):
- for db_prov_map in json_loads(db_item["provider_mappings"]):
- await self.database.insert_or_replace(
- DB_TABLE_PROVIDER_MAPPINGS,
- {
- "media_type": media_type_str,
- "item_id": int(db_item["item_id"]),
- "provider_domain": db_prov_map["provider_domain"],
- "provider_instance": db_prov_map["provider_instance"],
- "provider_item_id": db_prov_map["item_id"],
- "available": db_prov_map["available"],
- "url": db_prov_map["url"],
- "audio_format": json_dumps(db_prov_map["audio_format"])
- if db_prov_map["audio_format"]
- else None,
- "details": db_prov_map["details"],
- },
- )
- await self.database.execute(f"ALTER TABLE {table} DROP COLUMN provider_mappings;")
- self.logger.info(
- "Database migration to version %s completed",
- DB_SCHEMA_VERSION,
- )
- return
-
- # handle all other schema versions
- # we keep it simple and just recreate the tables
- # if the schema is too old (or too new)
- # we do migrations only for up to 1 schema version behind
self.logger.warning(
"Database schema too old - Resetting library/database - "
"a full rescan will be performed, this can take a while!"
DB_TABLE_ARTISTS,
DB_TABLE_PLAYLISTS,
DB_TABLE_RADIOS,
+ DB_TABLE_ALBUM_TRACKS,
+ DB_TABLE_PLAYLOG,
+ DB_TABLE_TRACK_LOUDNESS,
DB_TABLE_PROVIDER_MAPPINGS,
):
await self.database.execute(f"DROP TABLE IF EXISTS {table}")
"""Create database tables."""
await self.database.execute(
f"""CREATE TABLE IF NOT EXISTS {DB_TABLE_SETTINGS}(
- key TEXT PRIMARY KEY,
- value TEXT,
- type TEXT
+ [key] TEXT PRIMARY KEY,
+ [value] TEXT,
+ [type] TEXT
);"""
)
await self.database.execute(
f"""CREATE TABLE IF NOT EXISTS {DB_TABLE_TRACK_LOUDNESS}(
- item_id INTEGER NOT NULL,
- provider TEXT NOT NULL,
- integrated REAL,
- true_peak REAL,
- lra REAL,
- threshold REAL,
- target_offset REAL,
+ [id] INTEGER PRIMARY KEY AUTOINCREMENT,
+ [item_id] TEXT NOT NULL,
+ [provider] TEXT NOT NULL,
+ [integrated] REAL,
+ [true_peak] REAL,
+ [lra] REAL,
+ [threshold] REAL,
+ [target_offset] REAL,
UNIQUE(item_id, provider));"""
)
await self.database.execute(
f"""CREATE TABLE IF NOT EXISTS {DB_TABLE_PLAYLOG}(
- item_id INTEGER NOT NULL,
- provider TEXT NOT NULL,
- media_type TEXT NOT NULL DEFAULT 'track',
- timestamp INTEGER DEFAULT 0,
+ [id] INTEGER PRIMARY KEY AUTOINCREMENT,
+ [item_id] TEXT NOT NULL,
+ [provider] TEXT NOT NULL,
+ [media_type] TEXT NOT NULL DEFAULT 'track',
+ [timestamp] INTEGER DEFAULT 0,
UNIQUE(item_id, provider, media_type));"""
)
await self.database.execute(
f"""CREATE TABLE IF NOT EXISTS {DB_TABLE_ALBUMS}(
- item_id INTEGER PRIMARY KEY AUTOINCREMENT,
- name TEXT NOT NULL,
- sort_name TEXT NOT NULL,
- album_type TEXT NOT NULL,
- year INTEGER,
- version TEXT,
- favorite BOOLEAN DEFAULT 0,
- metadata json NOT NULL,
- external_ids json NOT NULL,
- timestamp_added INTEGER NOT NULL,
- timestamp_modified INTEGER NOT NULL
+ [item_id] INTEGER PRIMARY KEY AUTOINCREMENT,
+ [name] TEXT NOT NULL,
+ [sort_name] TEXT NOT NULL,
+ [version] TEXT,
+ [album_type] TEXT NOT NULL,
+ [year] INTEGER,
+ [favorite] BOOLEAN DEFAULT 0,
+ [metadata] json NOT NULL,
+ [external_ids] json NOT NULL,
+ [play_count] INTEGER DEFAULT 0,
+ [last_played] INTEGER DEFAULT 0,
+ [timestamp_added] INTEGER DEFAULT (cast(strftime('%s','now') as int)),
+ [timestamp_modified] INTEGER,
+
+ [artists] json DEFAULT '[]',
+ [provider_mappings] json DEFAULT '[]'
);"""
)
await self.database.execute(
- f"""CREATE TABLE IF NOT EXISTS {DB_TABLE_ARTISTS}(
- item_id INTEGER PRIMARY KEY AUTOINCREMENT,
- name TEXT NOT NULL,
- sort_name TEXT NOT NULL,
- favorite BOOLEAN DEFAULT 0,
- metadata json NOT NULL,
- external_ids json NOT NULL,
- timestamp_added INTEGER NOT NULL,
- timestamp_modified INTEGER NOT NULL
- );"""
+ f"""
+ CREATE TABLE IF NOT EXISTS {DB_TABLE_ARTISTS}(
+ [item_id] INTEGER PRIMARY KEY AUTOINCREMENT,
+ [name] TEXT NOT NULL,
+ [sort_name] TEXT NOT NULL,
+ [favorite] BOOLEAN DEFAULT 0,
+ [metadata] json NOT NULL,
+ [external_ids] json NOT NULL,
+ [play_count] INTEGER DEFAULT 0,
+ [last_played] INTEGER DEFAULT 0,
+ [timestamp_added] INTEGER DEFAULT (cast(strftime('%s','now') as int)),
+ [timestamp_modified] INTEGER,
+
+ [provider_mappings] json DEFAULT '[]'
+ );"""
)
await self.database.execute(
- f"""CREATE TABLE IF NOT EXISTS {DB_TABLE_TRACKS}(
- item_id INTEGER PRIMARY KEY AUTOINCREMENT,
- name TEXT NOT NULL,
- sort_name TEXT NOT NULL,
- version TEXT,
- duration INTEGER,
- favorite BOOLEAN DEFAULT 0,
- metadata json NOT NULL,
- external_ids json NOT NULL,
- timestamp_added INTEGER NOT NULL,
- timestamp_modified INTEGER NOT NULL
- );"""
+ f"""
+ CREATE TABLE IF NOT EXISTS {DB_TABLE_TRACKS}(
+ [item_id] INTEGER PRIMARY KEY AUTOINCREMENT,
+ [name] TEXT NOT NULL,
+ [sort_name] TEXT NOT NULL,
+ [version] TEXT,
+ [duration] INTEGER,
+ [favorite] BOOLEAN DEFAULT 0,
+ [metadata] json NOT NULL,
+ [external_ids] json NOT NULL,
+ [play_count] INTEGER DEFAULT 0,
+ [last_played] INTEGER DEFAULT 0,
+ [timestamp_added] INTEGER DEFAULT (cast(strftime('%s','now') as int)),
+ [timestamp_modified] INTEGER,
+
+ [artists] json DEFAULT '[]',
+ [provider_mappings] json DEFAULT '[]'
+ );"""
)
await self.database.execute(
- f"""CREATE TABLE IF NOT EXISTS {DB_TABLE_ALBUM_TRACKS}(
- [id] INTEGER PRIMARY KEY AUTOINCREMENT,
- [track_id] INTEGER NOT NULL,
- [album_id] INTEGER NOT NULL,
- [disc_number] INTEGER NOT NULL,
- [track_number] INTEGER NOT NULL,
- UNIQUE(track_id, album_id)
- );"""
+ f"""
+ CREATE TABLE IF NOT EXISTS {DB_TABLE_PLAYLISTS}(
+ [item_id] INTEGER PRIMARY KEY AUTOINCREMENT,
+ [name] TEXT NOT NULL,
+ [sort_name] TEXT NOT NULL,
+ [owner] TEXT NOT NULL,
+ [is_editable] BOOLEAN NOT NULL,
+ [favorite] BOOLEAN DEFAULT 0,
+ [metadata] json NOT NULL,
+ [external_ids] json NOT NULL,
+ [play_count] INTEGER DEFAULT 0,
+ [last_played] INTEGER DEFAULT 0,
+ [timestamp_added] INTEGER DEFAULT (cast(strftime('%s','now') as int)),
+ [timestamp_modified] INTEGER,
+
+ [provider_mappings] json DEFAULT '[]'
+ );"""
)
await self.database.execute(
- f"""CREATE TABLE IF NOT EXISTS {DB_TABLE_TRACK_ARTISTS}(
- [id] INTEGER PRIMARY KEY AUTOINCREMENT,
- [track_id] INTEGER NOT NULL,
- [artist_id] INTEGER NOT NULL,
- FOREIGN KEY([track_id]) REFERENCES [tracks]([item_id]),
- FOREIGN KEY([artist_id]) REFERENCES [artists]([item_id]),
- UNIQUE(track_id, artist_id)
- );"""
+ f"""
+ CREATE TABLE IF NOT EXISTS {DB_TABLE_RADIOS}(
+ [item_id] INTEGER PRIMARY KEY AUTOINCREMENT,
+ [name] TEXT NOT NULL,
+ [sort_name] TEXT NOT NULL,
+ [favorite] BOOLEAN DEFAULT 0,
+ [metadata] json NOT NULL,
+ [external_ids] json NOT NULL,
+ [play_count] INTEGER DEFAULT 0,
+ [last_played] INTEGER DEFAULT 0,
+ [timestamp_added] INTEGER DEFAULT (cast(strftime('%s','now') as int)),
+ [timestamp_modified] INTEGER,
+
+ [provider_mappings] json DEFAULT '[]'
+ );"""
)
await self.database.execute(
- f"""CREATE TABLE IF NOT EXISTS {DB_TABLE_ALBUM_ARTISTS}(
- [id] INTEGER PRIMARY KEY AUTOINCREMENT,
- [album_id] INTEGER NOT NULL,
- [artist_id] INTEGER NOT NULL,
- FOREIGN KEY([album_id]) REFERENCES [albums]([item_id]),
- FOREIGN KEY([artist_id]) REFERENCES [artists]([item_id]),
- UNIQUE(album_id, artist_id)
- );"""
+ f"""
+ CREATE TABLE IF NOT EXISTS {DB_TABLE_ALBUM_TRACKS}(
+ [id] INTEGER PRIMARY KEY AUTOINCREMENT,
+ [track_id] INTEGER NOT NULL,
+ [album_id] INTEGER NOT NULL,
+ [disc_number] INTEGER NOT NULL,
+ [track_number] INTEGER NOT NULL,
+ FOREIGN KEY([track_id]) REFERENCES [tracks]([item_id]),
+ FOREIGN KEY([album_id]) REFERENCES [albums]([item_id]),
+ UNIQUE(track_id, album_id)
+ );"""
)
await self.database.execute(
- f"""CREATE TABLE IF NOT EXISTS {DB_TABLE_PLAYLISTS}(
- item_id INTEGER PRIMARY KEY AUTOINCREMENT,
- name TEXT NOT NULL,
- sort_name TEXT NOT NULL,
- owner TEXT NOT NULL,
- is_editable BOOLEAN NOT NULL,
- favorite BOOLEAN DEFAULT 0,
- metadata json,
- external_ids json NOT NULL,
- timestamp_added INTEGER NOT NULL,
- timestamp_modified INTEGER NOT NULL
- );"""
+ f"""
+ CREATE TABLE IF NOT EXISTS {DB_TABLE_PROVIDER_MAPPINGS}(
+ [media_type] TEXT NOT NULL,
+ [item_id] INTEGER NOT NULL,
+ [provider_domain] TEXT NOT NULL,
+ [provider_instance] TEXT NOT NULL,
+ [provider_item_id] TEXT NOT NULL,
+ [available] BOOLEAN DEFAULT 1,
+ [url] text,
+ [audio_format] json,
+ [details] json,
+ UNIQUE(media_type, provider_instance, provider_item_id)
+ );"""
)
await self.database.execute(
- f"""CREATE TABLE IF NOT EXISTS {DB_TABLE_RADIOS}(
- item_id INTEGER PRIMARY KEY AUTOINCREMENT,
- name TEXT NOT NULL UNIQUE,
- sort_name TEXT NOT NULL,
- favorite BOOLEAN DEFAULT 0,
- metadata json,
- external_ids json NOT NULL,
- timestamp_added INTEGER NOT NULL,
- timestamp_modified INTEGER NOT NULL
- );"""
+ f"""CREATE TABLE IF NOT EXISTS {DB_TABLE_TRACK_ARTISTS}(
+ [track_id] INTEGER NOT NULL,
+ [artist_id] INTEGER NOT NULL,
+ FOREIGN KEY([track_id]) REFERENCES [tracks]([item_id]),
+ FOREIGN KEY([artist_id]) REFERENCES [artists]([item_id]),
+ UNIQUE(track_id, artist_id)
+ );"""
)
await self.database.execute(
- f"""CREATE TABLE IF NOT EXISTS {DB_TABLE_PROVIDER_MAPPINGS}(
- [id] INTEGER PRIMARY KEY AUTOINCREMENT,
- [media_type] TEXT NOT NULL,
- [item_id] INTEGER NOT NULL,
- [provider_domain] TEXT NOT NULL,
- [provider_instance] TEXT NOT NULL,
- [provider_item_id] TEXT NOT NULL,
- [available] BOOLEAN DEFAULT 1,
- [url] text,
- [audio_format] json,
- [details] json,
- UNIQUE(media_type, item_id, provider_instance, provider_item_id)
- );"""
+ f"""CREATE TABLE IF NOT EXISTS {DB_TABLE_ALBUM_ARTISTS}(
+ [album_id] INTEGER NOT NULL,
+ [artist_id] INTEGER NOT NULL,
+ FOREIGN KEY([album_id]) REFERENCES [albums]([item_id]),
+ FOREIGN KEY([artist_id]) REFERENCES [artists]([item_id]),
+ UNIQUE(album_id, artist_id)
+ );"""
)
async def __create_database_indexes(self) -> None:
"""Create database indexes."""
+ for db_table in (
+ DB_TABLE_ARTISTS,
+ DB_TABLE_ALBUMS,
+ DB_TABLE_TRACKS,
+ DB_TABLE_PLAYLISTS,
+ DB_TABLE_RADIOS,
+ ):
+ # index on favorite column
+ await self.database.execute(
+ f"CREATE INDEX IF NOT EXISTS {db_table}_favorite_idx on {db_table}(favorite);"
+ )
+ # index on name
+ await self.database.execute(
+ f"CREATE INDEX IF NOT EXISTS {db_table}_name_idx on {db_table}(name);"
+ )
+ # index on sort_name
+ await self.database.execute(
+ f"CREATE INDEX IF NOT EXISTS {db_table}_sort_name_idx on {db_table}(sort_name);"
+ )
+ # index on external_ids
+ await self.database.execute(
+ f"CREATE INDEX IF NOT EXISTS {db_table}_external_ids_idx on {db_table}(external_ids);" # noqa: E501
+ )
+
+ # indexes on provider_mappings table
await self.database.execute(
- "CREATE INDEX IF NOT EXISTS artists_in_library_idx on artists(favorite);"
- )
- await self.database.execute(
- "CREATE INDEX IF NOT EXISTS albums_in_library_idx on albums(favorite);"
- )
- await self.database.execute(
- "CREATE INDEX IF NOT EXISTS tracks_in_library_idx on tracks(favorite);"
- )
- await self.database.execute(
- "CREATE INDEX IF NOT EXISTS playlists_in_library_idx on playlists(favorite);"
+ f"CREATE INDEX IF NOT EXISTS {DB_TABLE_PROVIDER_MAPPINGS}_media_type_item_id_idx "
+ f"on {DB_TABLE_PROVIDER_MAPPINGS}(media_type,item_id);"
)
await self.database.execute(
- "CREATE INDEX IF NOT EXISTS radios_in_library_idx on radios(favorite);"
+ f"CREATE INDEX IF NOT EXISTS {DB_TABLE_PROVIDER_MAPPINGS}_provider_domain_idx "
+ f"on {DB_TABLE_PROVIDER_MAPPINGS}(media_type,provider_domain,provider_item_id);"
)
await self.database.execute(
- "CREATE INDEX IF NOT EXISTS artists_sort_name_idx on artists(sort_name);"
+ f"CREATE UNIQUE INDEX IF NOT EXISTS {DB_TABLE_PROVIDER_MAPPINGS}_provider_instance_idx "
+ f"on {DB_TABLE_PROVIDER_MAPPINGS}(media_type,provider_instance,provider_item_id);"
)
+
+ # indexes on track_artists table
await self.database.execute(
- "CREATE INDEX IF NOT EXISTS albums_sort_name_idx on albums(sort_name);"
+ f"CREATE INDEX IF NOT EXISTS {DB_TABLE_TRACK_ARTISTS}_track_id_idx "
+ f"on {DB_TABLE_TRACK_ARTISTS}(track_id);"
)
await self.database.execute(
- "CREATE INDEX IF NOT EXISTS tracks_sort_name_idx on tracks(sort_name);"
+ f"CREATE INDEX IF NOT EXISTS {DB_TABLE_TRACK_ARTISTS}_artist_id_idx "
+ f"on {DB_TABLE_TRACK_ARTISTS}(artist_id);"
)
+ # indexes on album_artists table
await self.database.execute(
- "CREATE INDEX IF NOT EXISTS playlists_sort_name_idx on playlists(sort_name);"
+ f"CREATE INDEX IF NOT EXISTS {DB_TABLE_ALBUM_ARTISTS}_album_id_idx "
+ f"on {DB_TABLE_ALBUM_ARTISTS}(album_id);"
)
await self.database.execute(
- "CREATE INDEX IF NOT EXISTS radios_sort_name_idx on radios(sort_name);"
+ f"CREATE INDEX IF NOT EXISTS {DB_TABLE_ALBUM_ARTISTS}_artist_id_idx "
+ f"on {DB_TABLE_ALBUM_ARTISTS}(artist_id);"
)
+
+ async def __create_database_triggers(self) -> None:
+ """Create database triggers."""
+ # triggers to auto update timestamps
+ for db_table in ("artists", "albums", "tracks", "playlists", "radios"):
+ await self.database.execute(
+ f"""
+ CREATE TRIGGER IF NOT EXISTS update_{db_table}_timestamp
+ AFTER UPDATE ON {db_table} FOR EACH ROW
+ WHEN NEW.timestamp_modified <= OLD.timestamp_modified
+ BEGIN
+ UPDATE {db_table} set timestamp_modified=cast(strftime('%s','now') as int)
+ WHERE item_id=OLD.item_id;
+ END;
+ """
+ )
state: PlayerState
current_index: int | None
elapsed_time: int
+ stream_title: str | None
class PlayerQueuesController(CoreController):
state=PlayerState.IDLE,
current_index=None,
elapsed_time=0,
+ stream_title=None,
),
)
new_state = CompareState(
state=queue.state,
current_index=queue.current_index,
elapsed_time=queue.elapsed_time,
+ stream_title=queue.current_item.streamdetails.stream_title
+ if queue.current_item and queue.current_item.streamdetails
+ else None,
)
changed_keys = get_changed_keys(prev_state, new_state)
# return early if nothing changed
queue.next_item = None
# signal update and store state
self.signal_update(queue_id)
-
self._prev_states[queue_id] = new_state
# watch dynamic radio items refill if needed
if "current_index" in changed_keys:
from __future__ import annotations
import asyncio
+import logging
+import os
+import time
+from contextlib import asynccontextmanager
+from sqlite3 import OperationalError
from typing import TYPE_CHECKING, Any
import aiosqlite
+from music_assistant.constants import MASS_LOGGER_NAME
+
if TYPE_CHECKING:
from collections.abc import AsyncGenerator, Mapping
+LOGGER = logging.getLogger(f"{MASS_LOGGER_NAME}.database")
+
+ENABLE_DEBUG = bool(os.environ.get("PYTHONDEVMODE", "0"))
+
+
+@asynccontextmanager
+async def debug_query(sql_query: str):
+ """Time the processing time of an sql query."""
+ if not ENABLE_DEBUG:
+ yield
+ return
+ time_start = time.time()
+ try:
+ yield
+ except OperationalError as err:
+ LOGGER.error(f"{err}\n{sql_query}")
+ raise
+ finally:
+ process_time = time.time() - time_start
+ if process_time > 0.5:
+ LOGGER.warning("SQL Query took %s seconds! (\n%s", process_time, sql_query)
+
def query_params(query: str, params: dict[str, Any] | None) -> tuple[str, dict[str, Any]]:
"""Extend query parameters support."""
if order_by is not None:
sql_query += f" ORDER BY {order_by}"
sql_query += f" LIMIT {limit} OFFSET {offset}"
- return await self._db.execute_fetchall(sql_query, match)
+ async with debug_query(sql_query):
+ return await self._db.execute_fetchall(sql_query, match)
async def get_rows_from_query(
self,
"""Get all rows for given custom query."""
query = f"{query} LIMIT {limit} OFFSET {offset}"
_query, _params = query_params(query, params)
- return await self._db.execute_fetchall(_query, _params)
+ async with debug_query(_query):
+ return await self._db.execute_fetchall(_query, _params)
async def get_count_from_query(
self,
"""Get row count for given custom query."""
query = f"SELECT count() FROM ({query})"
_query, _params = query_params(query, params)
- async with self._db.execute(_query, _params) as cursor:
- if result := await cursor.fetchone():
- return result[0]
- return 0
+ async with debug_query(_query):
+ async with self._db.execute(_query, _params) as cursor:
+ if result := await cursor.fetchone():
+ return result[0]
+ return 0
async def get_count(
self,
) -> int:
"""Get row count for given table."""
query = f"SELECT count(*) FROM {table}"
- async with self._db.execute(query) as cursor:
- if result := await cursor.fetchone():
- return result[0]
- return 0
+ async with debug_query(query):
+ async with self._db.execute(query) as cursor:
+ if result := await cursor.fetchone():
+ return result[0]
+ return 0
async def search(self, table: str, search: str, column: str = "name") -> list[Mapping]:
"""Search table by column."""
sql_query = f"SELECT * FROM {table} WHERE {table}.{column} LIKE :search"
params = {"search": f"%{search}%"}
- return await self._db.execute_fetchall(sql_query, params)
+ async with debug_query(sql_query):
+ return await self._db.execute_fetchall(sql_query, params)
async def get_row(self, table: str, match: dict[str, Any]) -> Mapping | None:
"""Get single row for given table where column matches keys/values."""
sql_query = f"SELECT * FROM {table} WHERE "
sql_query += " AND ".join(f"{table}.{x} = :{x}" for x in match)
- async with self._db.execute(sql_query, match) as cursor:
+ async with debug_query(sql_query), self._db.execute(sql_query, match) as cursor:
return await cursor.fetchone()
async def insert(
file_path,
"-an",
"-vcodec",
- "copy",
+ "mjpeg",
"-f",
"mjpeg",
"-",
if not await asyncio.to_thread(os.path.exists, self._playlists_dir):
await asyncio.to_thread(os.mkdir, self._playlists_dir)
- # TEMP: Migrate URL provider entries to builtin
- # TODO: Remove this once 2.0 is released!
- cache_key = f"{self.instance_id}.url_migration_done"
- if await self.mass.cache.get(cache_key):
- return
- self.logger.info("Starting migration...")
- url_instance_id: str | None = None
- for ctrl in (
- self.mass.music.radio,
- self.mass.music.tracks,
- self.mass.music.artists,
- ):
- prov_items = await ctrl.get_library_items_by_prov_id("url")
- for item in prov_items:
- try:
- existing_mapping = next(
- x for x in item.provider_mappings if x.provider_domain == "url"
- )
- # add new prov mapping for the builtin provider
- new_mapping = ProviderMapping.from_dict(existing_mapping.to_dict())
- new_mapping.provider_instance = self.instance_id
- new_mapping.provider_domain = self.domain
- new_mapping.available = True
- await ctrl.add_provider_mapping(item.item_id, new_mapping)
- # lookup instance id of the url provider if we dont have it yet
- url_instance_id = existing_mapping.provider_instance
- # remove the old provider mapping for url provider
- await ctrl.remove_provider_mappings(item.item_id, url_instance_id)
- # ensure its added to our local settings
- item.item_id = new_mapping.item_id
- item.provider = new_mapping.provider_instance
- await self.library_add(item)
- self.logger.info("Migrated item %s", item.name)
- except Exception as err:
- self.logger.exception(err)
- if url_instance_id:
- await self.mass.cache.clear(url_instance_id)
- await self.mass.cache.set(cache_key, True, expiration=365 * 86400)
-
@property
def is_streaming_provider(self) -> bool:
"""Return True if the provider is a streaming provider."""
self, builtin_playlist_id: str
) -> AsyncGenerator[Track, None]:
"""Get all playlist tracks for given builtin playlist id."""
- count = 0
if builtin_playlist_id == ALL_LIBRARY_TRACKS:
- async for item in self.mass.music.tracks.iter_library_items(order_by="RANDOM()"):
- count += 1
- item.position = count
+ res = await self.mass.music.tracks.library_items(limit=2500, order_by="RANDOM()")
+ for idx, item in enumerate(res.items, 1):
+ item.position = idx
yield item
return
if builtin_playlist_id == ALL_FAVORITE_TRACKS:
- async for item in self.mass.music.tracks.iter_library_items(
- favorite=True, order_by="RANDOM()"
- ):
- count += 1
- item.position = count
+ res = await self.mass.music.tracks.library_items(
+ favorite=True, limit=2500, order_by="RANDOM()"
+ )
+ for idx, item in enumerate(res.items, 1):
+ item.position = idx
yield item
return
if builtin_playlist_id == RANDOM_TRACKS:
- async for item in self.mass.music.tracks.iter_library_items(order_by="RANDOM()"):
- count += 1
- item.position = count
+ res = await self.mass.music.tracks.library_items(limit=100, order_by="RANDOM()")
+ for idx, item in enumerate(res.items, 1):
+ item.position = idx
yield item
- if count == 100:
- return
return
if builtin_playlist_id == RANDOM_ALBUM:
- async for random_album in self.mass.music.albums.iter_library_items(
- order_by="RANDOM()"
- ):
+ for random_album in (
+ await self.mass.music.albums.library_items(limit=1, order_by="RANDOM()")
+ ).items:
# use the function specified in the queue controller as that
# already handles unwrapping an album by user preference
- for album_track in await self.mass.player_queues.get_album_tracks(random_album):
- count += 1
- album_track.position = count
- yield album_track
+ tracks = await self.mass.music.albums.tracks(
+ random_album.item_id, random_album.provider
+ )
+ for idx, track in enumerate(tracks, 1):
+ track.position = idx
+ yield track
return
if builtin_playlist_id == RANDOM_ARTIST:
- async for random_artist in self.mass.music.artists.iter_library_items(
- order_by="RANDOM()"
- ):
+ for random_artist in (
+ await self.mass.music.artists.library_items(limit=1, order_by="RANDOM()")
+ ).items:
# use the function specified in the queue controller as that
# already handles unwrapping an artist by user preference
- for artist_track in await self.mass.player_queues.get_artist_tracks(random_artist):
- count += 1
- artist_track.position = count
- yield artist_track
+ tracks = await self.mass.music.artists.tracks(
+ random_artist.item_id, random_artist.provider
+ )
+ for idx, track in enumerate(tracks, 1):
+ track.position = idx
+ yield track
return
if builtin_playlist_id == RECENTLY_PLAYED:
- for track in await self.mass.music.recently_played(250, [MediaType.TRACK]):
- count += 1
- track.position = count
+ tracks = await self.mass.music.recently_played(250, [MediaType.TRACK])
+ for idx, track in enumerate(tracks, 1):
+ track.position = idx
yield track
return
"documentation": "https://music-assistant.io/music-providers/builtin/",
"multi_instance": false,
"builtin": true,
- "hidden": true
+ "hidden": false
}
from dataclasses import dataclass
from typing import TYPE_CHECKING
+import aiofiles
import cchardet
import xmltodict
+from music_assistant.common.helpers.json import JSON_DECODE_EXCEPTIONS, json_dumps, json_loads
from music_assistant.common.helpers.util import parse_title_and_version
from music_assistant.common.models.config_entries import (
ConfigEntry,
"""
write_access: bool = False
+ checksums_file: str
+ file_checksums: dict[str, int]
@property
def supported_features(self) -> tuple[ProviderFeature, ...]:
)
return SUPPORTED_FEATURES
+ async def loaded_in_mass(self) -> None:
+ """Call after the provider has been loaded."""
+ # load the checksums from disk and store in memory
+ self.checksums_file = os.path.join(self.mass.storage_path, f"{self.instance_id}.json")
+ self.file_checksums = {}
+ if await asyncio.to_thread(os.path.isfile, self.checksums_file):
+ try:
+ async with aiofiles.open(self.checksums_file, "r", encoding="utf-8") as _file:
+ self.file_checksums = json_loads(await _file.read())
+ self.logger.debug("Loaded persistent checksums from %s", self.checksums_file)
+ return
+ except FileNotFoundError:
+ pass
+ except JSON_DECODE_EXCEPTIONS: # pylint: disable=catching-non-exception
+ self.logger.exception(
+ "Error while reading persistent checksums file %s", self.checksums_file
+ )
+
@abstractmethod
async def listdir(
self, path: str, recursive: bool = False
"name": f"%{search_query}%",
"provider_instance": self.instance_id,
}
+ subquery = "WHERE "
# ruff: noqa: E501
if media_types is None or MediaType.TRACK in media_types:
- query = "WHERE tracks.name LIKE :name AND provider_mappings.provider_instance = :provider_instance"
- result.tracks = (
- await self.mass.music.tracks.library_items(
- extra_query=query, extra_query_params=params
- )
- ).items
+ subquery = (
+ "WHERE provider_mappings.media_type = 'track' "
+ "AND provider_mappings.provider_instance = :provider_instance"
+ )
+ query = (
+ "WHERE tracks.name LIKE :name AND tracks.item_id in "
+ f"(SELECT item_id FROM provider_mappings {subquery})"
+ )
+ result.tracks = await self.mass.music.tracks._get_library_items_by_query(
+ extra_query=query, extra_query_params=params
+ )
+
if media_types is None or MediaType.ALBUM in media_types:
- query = "WHERE albums.name LIKE :name AND provider_mappings.provider_instance = :provider_instance"
- result.albums = (
- await self.mass.music.albums.library_items(
- extra_query=query, extra_query_params=params
- )
- ).items
+ subquery = (
+ "WHERE provider_mappings.media_type = 'album' "
+ "AND provider_mappings.provider_instance = :provider_instance"
+ )
+ query = (
+ "WHERE albums.name LIKE :name AND albums.item_id in "
+ f"(SELECT item_id FROM provider_mappings {subquery})"
+ )
+ result.albums = await self.mass.music.albums._get_library_items_by_query(
+ extra_query=query, extra_query_params=params
+ )
+
if media_types is None or MediaType.ARTIST in media_types:
- query = "WHERE artists.name LIKE :name AND provider_mappings.provider_instance = :provider_instance"
- result.artists = (
- await self.mass.music.artists.library_items(
- extra_query=query, extra_query_params=params
- )
- ).items
+ subquery = (
+ "WHERE provider_mappings.media_type = 'artist' "
+ "AND provider_mappings.provider_instance = :provider_instance"
+ )
+ query = (
+ "WHERE artists.name LIKE :name AND artists.item_id in "
+ f"(SELECT item_id FROM provider_mappings {subquery})"
+ )
+ result.artists = await self.mass.music.artists._get_library_items_by_query(
+ extra_query=query, extra_query_params=params
+ )
if media_types is None or MediaType.PLAYLIST in media_types:
- query = "WHERE playlists.name LIKE :name AND provider_mappings.provider_instance = :provider_instance"
- result.playlists = (
- await self.mass.music.playlists.library_items(
- extra_query=query, extra_query_params=params
- )
- ).items
+ subquery = (
+ "WHERE provider_mappings.media_type = 'playlist' "
+ "AND provider_mappings.provider_instance = :provider_instance"
+ )
+ query = (
+ "WHERE playlists.name LIKE :name AND playlists.item_id in "
+ f"(SELECT item_id FROM provider_mappings {subquery})"
+ )
+ result.playlists = await self.mass.music.playlists._get_library_items_by_query(
+ extra_query=query, extra_query_params=params
+ )
return result
async def browse(self, path: str) -> AsyncGenerator[MediaItemType, None]:
async def sync_library(self, media_types: tuple[MediaType, ...]) -> None:
"""Run library sync for this provider."""
- # first build a listing of all current items and their checksums
- prev_checksums = {}
- for ctrl in (self.mass.music.tracks, self.mass.music.playlists):
- async for db_item in ctrl.iter_library_items_by_prov_id(self.instance_id):
- file_name = next(
- x.item_id
- for x in db_item.provider_mappings
- if x.provider_instance == self.instance_id
- )
- prev_checksums[file_name] = db_item.metadata.cache_checksum
-
- # process all deleted (or renamed) files first
- cur_filenames = set()
- async for item in self.listdir("", recursive=True):
- if "." not in item.filename or not item.ext:
- # skip system files and files without extension
- continue
-
- if item.ext not in SUPPORTED_EXTENSIONS:
- # unsupported file extension
- continue
- cur_filenames.add(item.path)
- # work out deletions
- deleted_files = set(prev_checksums.keys()) - cur_filenames
- await self._process_deletions(deleted_files)
-
# find all music files in the music directory and all subfolders
# we work bottom up, as-in we derive all info from the tracks
+ cur_filenames = set()
+ prev_filenames = set(self.file_checksums.keys())
async for item in self.listdir("", recursive=True):
if "." not in item.filename or not item.ext:
# skip system files and files without extension
# unsupported file extension
continue
+ cur_filenames.add(item.path)
try:
# continue if the item did not change (checksum still the same)
- if item.checksum == prev_checksums.get(item.path):
+ if item.checksum == self.file_checksums.get(item.path):
continue
self.logger.debug("Processing: %s", item.path)
if item.ext in TRACK_EXTENSIONS:
str(err),
exc_info=err if self.logger.isEnabledFor(logging.DEBUG) else None,
)
+ else:
+ self.file_checksums[item.path] = item.checksum
+ # save the checksums every 500 items to speed up scan restarts
+ if len(cur_filenames) % 500 == 0:
+ await self._async_save_checksums()
+
+ await self._async_save_checksums()
+ # work out deletions
+ deleted_files = prev_filenames - cur_filenames
+ await self._process_deletions(deleted_files)
async def _process_deletions(self, deleted_files: set[str]) -> None:
"""Process all deletions."""
)
break
return images
+
+ async def _async_save_checksums(self) -> None:
+ """Save persistent checksums data to disk."""
+ filename_backup = f"{self.checksums_file}.backup"
+ # make backup before we write a new file
+ if await asyncio.to_thread(os.path.isfile, self.checksums_file):
+ if await asyncio.to_thread(os.path.isfile, filename_backup):
+ await asyncio.to_thread(os.remove, filename_backup)
+ await asyncio.to_thread(os.rename, self.checksums_file, filename_backup)
+
+ async with aiofiles.open(self.checksums_file, "w", encoding="utf-8") as _file:
+ await _file.write(json_dumps(self.file_checksums, indent=True))
+ self.logger.debug("Saved data to persistent storage")
)
async def _get_or_create_artist_by_name(self, artist_name) -> Artist:
+ subquery = (
+ "WHERE provider_mappings.media_type = 'artist' "
+ "AND provider_mappings.provider_instance = :provider_instance"
+ )
query = (
- "WHERE artists.name = :name AND "
- "provider_mappings.provider_instance = :provider_instance"
+ "WHERE artists.name LIKE :name AND artists.item_id in "
+ f"(SELECT item_id FROM provider_mappings {subquery})"
)
query_params = {"name": artist_name, "provider_instance": self.instance_id}
- paged_list = await self.mass.music.artists.library_items(
+ if library_items := await self.mass.music.artists._get_library_items_by_query(
extra_query=query, extra_query_params=query_params
- )
- if paged_list and paged_list.items:
- return ItemMapping.from_item(paged_list.items[0])
+ ):
+ return ItemMapping.from_item(library_items[0])
artist_id = FAKE_ARTIST_PREFIX + artist_name
return Artist(
--- /dev/null
+"""Test/Demo provider that creates a collection of fake media items."""
+
+from __future__ import annotations
+
+from collections.abc import AsyncGenerator
+from typing import TYPE_CHECKING
+
+from music_assistant.common.models.config_entries import ConfigEntry
+from music_assistant.common.models.enums import (
+ ContentType,
+ ImageType,
+ MediaType,
+ ProviderFeature,
+ StreamType,
+)
+from music_assistant.common.models.media_items import (
+ Album,
+ Artist,
+ AudioFormat,
+ MediaItemImage,
+ MediaItemMetadata,
+ ProviderMapping,
+ Track,
+)
+from music_assistant.common.models.streamdetails import StreamDetails
+from music_assistant.constants import MASS_LOGO, VARIOUS_ARTISTS_FANART
+from music_assistant.server.models.music_provider import MusicProvider
+
+if TYPE_CHECKING:
+ from music_assistant.common.models.config_entries import ConfigValueType, ProviderConfig
+ from music_assistant.common.models.provider import ProviderManifest
+ from music_assistant.server import MusicAssistant
+ from music_assistant.server.models import ProviderInstanceType
+
+
+DEFAULT_THUMB = MediaItemImage(
+ type=ImageType.THUMB,
+ path=MASS_LOGO,
+ provider="builtin",
+ remotely_accessible=False,
+)
+
+DEFAULT_FANART = MediaItemImage(
+ type=ImageType.FANART,
+ path=VARIOUS_ARTISTS_FANART,
+ provider="builtin",
+ remotely_accessible=False,
+)
+
+
+async def setup(
+ mass: MusicAssistant, manifest: ProviderManifest, config: ProviderConfig
+) -> ProviderInstanceType:
+ """Initialize provider(instance) with given configuration."""
+ return TestProvider(mass, manifest, config)
+
+
+async def get_config_entries(
+ mass: MusicAssistant, # noqa: ARG001
+ instance_id: str | None = None, # noqa: ARG001
+ action: str | None = None, # noqa: ARG001
+ values: dict[str, ConfigValueType] | None = None, # noqa: ARG001
+) -> tuple[ConfigEntry, ...]:
+ """
+ Return Config entries to setup this provider.
+
+ instance_id: id of an existing provider instance (None if new instance setup).
+ action: [optional] action key called from config entries UI.
+ values: the (intermediate) raw values for config entries sent with the action.
+ """
+ return ()
+
+
+class TestProvider(MusicProvider):
+ """Test/Demo provider that creates a collection of fake media items."""
+
+ @property
+ def is_streaming_provider(self) -> bool:
+ """Return True if the provider is a streaming provider."""
+ return False
+
+ @property
+ def supported_features(self) -> tuple[ProviderFeature, ...]:
+ """Return the features supported by this Provider."""
+ return (ProviderFeature.LIBRARY_TRACKS,)
+
+ async def get_track(self, prov_track_id: str) -> Track:
+ """Get full track details by id."""
+ artist_idx, album_idx, track_idx = prov_track_id.split("_", 3)
+ return Track(
+ item_id=prov_track_id,
+ provider=self.instance_id,
+ name=f"Test Track {artist_idx} - {album_idx} - {track_idx}",
+ duration=5,
+ artists=[await self.get_artist(artist_idx)],
+ album=await self.get_album(f"{artist_idx}_{album_idx}"),
+ provider_mappings={
+ ProviderMapping(
+ item_id=prov_track_id,
+ provider_domain=self.domain,
+ provider_instance=self.instance_id,
+ ),
+ },
+ metadata=MediaItemMetadata(images=[DEFAULT_THUMB]),
+ disc_number=1,
+ track_number=int(track_idx),
+ )
+
+ async def get_artist(self, prov_artist_id: str) -> Track:
+ """Get full artist details by id."""
+ return Artist(
+ item_id=prov_artist_id,
+ provider=self.instance_id,
+ name=f"Test Artist {prov_artist_id}",
+ metadata=MediaItemMetadata(images=[DEFAULT_THUMB, DEFAULT_FANART]),
+ provider_mappings={
+ ProviderMapping(
+ item_id=prov_artist_id,
+ provider_domain=self.domain,
+ provider_instance=self.instance_id,
+ )
+ },
+ )
+
+ async def get_album(self, prov_album_id: str) -> Album:
+ """Get full artist details by id."""
+ artist_idx, album_idx = prov_album_id.split("_", 2)
+ return Album(
+ item_id=prov_album_id,
+ provider=self.instance_id,
+ name=f"Test Album {album_idx}",
+ artists=[await self.get_artist(artist_idx)],
+ provider_mappings={
+ ProviderMapping(
+ item_id=prov_album_id,
+ provider_domain=self.domain,
+ provider_instance=self.instance_id,
+ )
+ },
+ metadata=MediaItemMetadata(images=[DEFAULT_THUMB]),
+ )
+
+ async def get_library_tracks(self) -> AsyncGenerator[Track, None]:
+ """Retrieve library tracks from the provider."""
+ for artist_idx in range(50):
+ for album_idx in range(25):
+ for track_idx in range(25):
+ track_item_id = f"{artist_idx}_{album_idx}_{track_idx}"
+ yield await self.get_track(track_item_id)
+
+ async def get_stream_details(self, item_id: str) -> StreamDetails:
+ """Get streamdetails for a track/radio."""
+ media_info = await self._get_media_info(item_id)
+ is_radio = media_info.get("icy-name") or not media_info.duration
+ return StreamDetails(
+ provider=self.instance_id,
+ item_id=item_id,
+ audio_format=AudioFormat(
+ content_type=ContentType.try_parse(media_info.format),
+ sample_rate=media_info.sample_rate,
+ bit_depth=media_info.bits_per_sample,
+ channels=media_info.channels,
+ ),
+ media_type=MediaType.RADIO if is_radio else MediaType.TRACK,
+ stream_type=StreamType.HTTP,
+ path=item_id,
+ can_seek=not is_radio,
+ )
--- /dev/null
+<?xml version="1.0" encoding="UTF-8"?>
+<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" viewBox="0 0 25 25" version="1.1">
+<g id="surface1">
+<path style=" stroke:none;fill-rule:nonzero;fill:rgb(0%,0%,0%);fill-opacity:1;" d="M 1.5 0 L 23.5 0 C 24.328125 0 25 0.671875 25 1.5 L 25 23.5 C 25 24.328125 24.328125 25 23.5 25 L 1.5 25 C 0.671875 25 0 24.328125 0 23.5 L 0 1.5 C 0 0.671875 0.671875 0 1.5 0 Z M 1.5 0 "/>
+<path style=" stroke:none;fill-rule:nonzero;fill:rgb(100%,100%,100%);fill-opacity:1;" d="M 10.386719 18.875 L 14.8125 7.125 L 16.113281 7.125 L 11.6875 18.875 Z M 10.386719 18.875 "/>
+<path style=" stroke:none;fill-rule:nonzero;fill:rgb(100%,100%,100%);fill-opacity:1;" d="M 21.371094 18.875 L 16.945312 7.125 L 18.246094 7.125 L 22.671875 18.875 Z M 21.371094 18.875 "/>
+<path style=" stroke:none;fill-rule:nonzero;fill:rgb(100%,100%,100%);fill-opacity:1;" d="M 2.636719 18.875 L 2.636719 7.125 L 3.875 7.125 L 3.875 18.875 Z M 2.636719 18.875 "/>
+<path style=" stroke:none;fill-rule:nonzero;fill:rgb(100%,100%,100%);fill-opacity:1;" d="M 5.445312 18.875 L 5.445312 7.125 L 6.683594 7.125 L 6.683594 18.875 Z M 5.445312 18.875 "/>
+<path style=" stroke:none;fill-rule:nonzero;fill:rgb(100%,100%,100%);fill-opacity:1;" d="M 8.253906 18.875 L 8.253906 7.125 L 9.492188 7.125 L 9.492188 18.875 Z M 8.253906 18.875 "/>
+</g>
+</svg>
--- /dev/null
+{
+ "type": "music",
+ "domain": "test",
+ "name": "Test / demo provider",
+ "description": "Test/Demo provider that creates a collection of fake media items.",
+ "codeowners": [
+ "@music-assistant"
+ ],
+ "requirements": [],
+ "documentation": "",
+ "multi_instance": false,
+ "builtin": false,
+ "hidden": false
+}
EventCallBackType, tuple[EventType, ...] | None, tuple[str, ...] | None
]
+ENABLE_DEBUG = bool(os.environ.get("PYTHONDEVMODE", "0"))
LOGGER = logging.getLogger(MASS_LOGGER_NAME)
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
async with asyncio.TaskGroup() as tg:
for dir_str in os.listdir(PROVIDERS_PATH):
dir_path = os.path.join(PROVIDERS_PATH, dir_str)
+ if dir_str == "test" and not ENABLE_DEBUG:
+ continue
if not await isdir(dir_path):
continue
tg.create_task(load_provider_manifest(dir_str, dir_path))