--- /dev/null
+"""Provides a simple stateless caching system."""
+from __future__ import annotations
+
+import asyncio
+import functools
+import json
+import time
+from collections import OrderedDict
+from collections.abc import MutableMapping
+from typing import TYPE_CHECKING, Any, Iterator, Optional
+
+from music_assistant.controllers.database import TABLE_CACHE
+
+if TYPE_CHECKING:
+ from music_assistant.mass import MusicAssistant
+
+
+class CacheController:
+ """Basic cache controller using both memory and database."""
+
+ def __init__(self, mass: MusicAssistant) -> None:
+ """Initialize our caching class."""
+ self.mass = mass
+ self.logger = mass.logger.getChild("cache")
+ self._mem_cache = MemoryCache(500)
+
+ async def setup(self) -> None:
+ """Async initialize of cache module."""
+ self.__schedule_cleanup_task()
+
+ async def get(self, cache_key: str, checksum: Optional[str] = None, default=None):
+ """
+ Get object from cache and return the results.
+
+ cache_key: the (unique) name of the cache object as reference
+ checkum: optional argument to check if the checksum in the
+ cacheobject matches the checkum provided
+ """
+ cur_time = int(time.time())
+ if checksum is not None and not isinstance(checksum, str):
+ checksum = str(checksum)
+
+ # try memory cache first
+ cache_data = self._mem_cache.get(cache_key)
+ if (
+ cache_data
+ and (not checksum or cache_data[1] == checksum)
+ and cache_data[2] >= cur_time
+ ):
+ return cache_data[0]
+ # fall back to db cache
+ if db_row := await self.mass.database.get_row(TABLE_CACHE, {"key": cache_key}):
+ if (
+ not checksum
+ or db_row["checksum"] == checksum
+ and db_row["expires"] >= cur_time
+ ):
+ try:
+ data = await asyncio.get_running_loop().run_in_executor(
+ None, json.loads, db_row["data"]
+ )
+ except Exception as exc: # pylint: disable=broad-except
+ self.logger.exception(
+ "Error parsing cache data for %s", cache_key, exc_info=exc
+ )
+ else:
+ # also store in memory cache for faster access
+ self._mem_cache[cache_key] = (
+ data,
+ db_row["checksum"],
+ db_row["expires"],
+ )
+ return data
+ return default
+
+ async def set(self, cache_key, data, checksum="", expiration=(86400 * 30)):
+ """Set data in cache."""
+ if not isinstance(checksum, str):
+ checksum = str(checksum)
+ expires = int(time.time() + expiration)
+ self._mem_cache[cache_key] = (data, checksum, expires)
+ if (expires - time.time()) < 3600 * 4:
+ # do not cache items in db with short expiration
+ return
+ data = await asyncio.get_running_loop().run_in_executor(None, json.dumps, data)
+ await self.mass.database.insert(
+ TABLE_CACHE,
+ {"key": cache_key, "expires": expires, "checksum": checksum, "data": data},
+ allow_replace=True,
+ )
+
+ async def delete(self, cache_key):
+ """Delete data from cache."""
+ self._mem_cache.pop(cache_key, None)
+ await self.mass.database.delete(TABLE_CACHE, {"key": cache_key})
+
+ async def clear(self, key_filter: Optional[str] = None) -> None:
+ """Clear all/partial items from cache."""
+ self._mem_cache = {}
+ query = f"key LIKE '%{key_filter}%'" if key_filter else None
+ await self.mass.database.delete(TABLE_CACHE, query=query)
+
+ async def auto_cleanup(self):
+ """Sceduled auto cleanup task."""
+ # for now we simply reset the memory cache
+ self._mem_cache = {}
+ cur_timestamp = int(time.time())
+ for db_row in await self.mass.database.get_rows(TABLE_CACHE):
+ # clean up db cache object only if expired
+ if db_row["expires"] < cur_timestamp:
+ await self.delete(db_row["key"])
+ # compact db
+ await self.mass.database.execute("VACUUM")
+
+ def __schedule_cleanup_task(self):
+ """Schedule the cleanup task."""
+ self.mass.add_job(self.auto_cleanup(), "Cleanup cache")
+ # reschedule self
+ self.mass.loop.call_later(3600, self.__schedule_cleanup_task)
+
+
+def use_cache(expiration=86400 * 30):
+ """Return decorator that can be used to cache a method's result."""
+
+ def wrapper(func):
+ @functools.wraps(func)
+ async def wrapped(*args, **kwargs):
+ method_class = args[0]
+ method_class_name = method_class.__class__.__name__
+ cache_key_parts = [method_class_name, func.__name__]
+ skip_cache = kwargs.pop("skip_cache", False)
+ cache_checksum = kwargs.pop("cache_checksum", "")
+ if len(args) > 1:
+ cache_key_parts += args[1:]
+ for key in sorted(kwargs.keys()):
+ cache_key_parts.append(f"{key}{kwargs[key]}")
+ cache_key = ".".join(cache_key_parts)
+
+ cachedata = await method_class.cache.get(cache_key, checksum=cache_checksum)
+
+ if not skip_cache and cachedata is not None:
+ return cachedata
+ result = await func(*args, **kwargs)
+ asyncio.create_task(
+ method_class.cache.set(
+ cache_key, result, expiration=expiration, checksum=cache_checksum
+ )
+ )
+ return result
+
+ return wrapped
+
+ return wrapper
+
+
+class MemoryCache(MutableMapping):
+ """Simple limited in-memory cache implementation."""
+
+ def __init__(self, maxlen: int):
+ """Initialize."""
+ self._maxlen = maxlen
+ self.d = OrderedDict()
+
+ @property
+ def maxlen(self) -> int:
+ """Return max length."""
+ return self._maxlen
+
+ def get(self, key: str, default: Any = None) -> Any:
+ """Return item or default."""
+ return self.d.get(key, default)
+
+ def pop(self, key: str, default: Any = None) -> Any:
+ """Pop item from collection."""
+ return self.d.pop(key, default)
+
+ def __getitem__(self, key: str) -> Any:
+ """Get item."""
+ self.d.move_to_end(key)
+ return self.d[key]
+
+ def __setitem__(self, key: str, value: Any) -> None:
+ """Set item."""
+ if key in self.d:
+ self.d.move_to_end(key)
+ elif len(self.d) == self.maxlen:
+ self.d.popitem(last=False)
+ self.d[key] = value
+
+ def __delitem__(self, key) -> None:
+ """Delete item."""
+ del self.d[key]
+
+ def __iter__(self) -> Iterator:
+ """Iterate items."""
+ return self.d.__iter__()
+
+ def __len__(self) -> int:
+ """Return length."""
+ return len(self.d)
--- /dev/null
+"""Database logic."""
+from __future__ import annotations
+
+from typing import TYPE_CHECKING, Any, Dict, List, Mapping, Optional, Union
+
+from databases import Database as Db
+from sqlalchemy.sql import ClauseElement
+
+if TYPE_CHECKING:
+ from music_assistant.mass import MusicAssistant
+
+
+SCHEMA_VERSION = 18
+
+TABLE_TRACK_LOUDNESS = "track_loudness"
+TABLE_PLAYLOG = "playlog"
+TABLE_ARTISTS = "artists"
+TABLE_ALBUMS = "albums"
+TABLE_TRACKS = "tracks"
+TABLE_PLAYLISTS = "playlists"
+TABLE_RADIOS = "radios"
+TABLE_CACHE = "cache"
+TABLE_SETTINGS = "settings"
+TABLE_THUMBS = "thumbnails"
+
+
+class DatabaseController:
+ """Controller that holds the (connection to the) database."""
+
+ def __init__(self, mass: MusicAssistant):
+ """Initialize class."""
+ self.url = mass.config.database_url
+ self.mass = mass
+ self.logger = mass.logger.getChild("db")
+ # we maintain one global connection - otherwise we run into (dead)lock issues.
+ # https://github.com/encode/databases/issues/456
+ self._db = Db(self.url, timeout=360)
+
+ async def setup(self) -> None:
+ """Perform async initialization."""
+ await self._db.connect()
+ self.logger.info("Database connected.")
+ await self._migrate()
+
+ async def close(self) -> None:
+ """Close db connection on exit."""
+ self.logger.info("Database disconnected.")
+ await self._db.disconnect()
+
+ async def get_setting(self, key: str) -> str | None:
+ """Get setting from settings table."""
+ if db_row := await self.get_row(TABLE_SETTINGS, {"key": key}):
+ return db_row["value"]
+ return None
+
+ async def set_setting(self, key: str, value: str) -> None:
+ """Set setting in settings table."""
+ if not isinstance(value, str):
+ value = str(value)
+ return await self.insert(
+ TABLE_SETTINGS, {"key": key, "value": value}, allow_replace=True
+ )
+
+ async def get_rows(
+ self,
+ table: str,
+ match: dict = None,
+ order_by: str = None,
+ limit: int = 500,
+ offset: int = 0,
+ ) -> List[Mapping]:
+ """Get all rows for given table."""
+ sql_query = f"SELECT * FROM {table}"
+ if match is not None:
+ sql_query += " WHERE " + " AND ".join((f"{x} = :{x}" for x in match))
+ if order_by is not None:
+ sql_query += f" ORDER BY {order_by}"
+ sql_query += f" LIMIT {limit} OFFSET {offset}"
+ return await self._db.fetch_all(sql_query, match)
+
+ async def get_rows_from_query(
+ self,
+ query: str,
+ params: Optional[dict] = None,
+ limit: int = 500,
+ offset: int = 0,
+ ) -> List[Mapping]:
+ """Get all rows for given custom query."""
+ query = f"{query} LIMIT {limit} OFFSET {offset}"
+ return await self._db.fetch_all(query, params)
+
+ async def get_count_from_query(
+ self,
+ query: str,
+ params: Optional[dict] = None,
+ ) -> int:
+ """Get row count for given custom query."""
+ query = f"SELECT count() FROM ({query})"
+ if result := await self._db.fetch_one(query, params):
+ return result[0]
+ return 0
+
+ async def search(
+ self, table: str, search: str, column: str = "name"
+ ) -> List[Mapping]:
+ """Search table by column."""
+ sql_query = f"SELECT * FROM {table} WHERE {column} LIKE :search"
+ params = {"search": f"%{search}%"}
+ return await self._db.fetch_all(sql_query, params)
+
+ async def get_row(self, table: str, match: Dict[str, Any]) -> Mapping | None:
+ """Get single row for given table where column matches keys/values."""
+ sql_query = f"SELECT * FROM {table} WHERE "
+ sql_query += " AND ".join((f"{x} = :{x}" for x in match))
+ return await self._db.fetch_one(sql_query, match)
+
+ async def insert(
+ self,
+ table: str,
+ values: Dict[str, Any],
+ allow_replace: bool = False,
+ ) -> Mapping:
+ """Insert data in given table."""
+ keys = tuple(values.keys())
+ if allow_replace:
+ sql_query = f'INSERT OR REPLACE INTO {table}({",".join(keys)})'
+ else:
+ sql_query = f'INSERT INTO {table}({",".join(keys)})'
+ sql_query += f' VALUES ({",".join((f":{x}" for x in keys))})'
+ await self.execute(sql_query, values)
+ # return inserted/replaced item
+ lookup_vals = {
+ key: value
+ for key, value in values.items()
+ if value is not None and value != ""
+ }
+ return await self.get_row(table, lookup_vals)
+
+ async def insert_or_replace(self, table: str, values: Dict[str, Any]) -> Mapping:
+ """Insert or replace data in given table."""
+ return await self.insert(table=table, values=values, allow_replace=True)
+
+ async def update(
+ self,
+ table: str,
+ match: Dict[str, Any],
+ values: Dict[str, Any],
+ ) -> Mapping:
+ """Update record."""
+ keys = tuple(values.keys())
+ sql_query = f'UPDATE {table} SET {",".join((f"{x}=:{x}" for x in keys))} WHERE '
+ sql_query += " AND ".join((f"{x} = :{x}" for x in match))
+ await self.execute(sql_query, {**match, **values})
+ # return updated item
+ return await self.get_row(table, match)
+
+ async def delete(
+ self, table: str, match: Optional[dict] = None, query: Optional[str] = None
+ ) -> None:
+ """Delete data in given table."""
+ assert not (query and "where" in query.lower())
+ sql_query = f"DELETE FROM {table} "
+ if match:
+ sql_query += " WHERE " + " AND ".join((f"{x} = :{x}" for x in match))
+ elif query and "query" not in query.lower():
+ sql_query += "WHERE " + query
+ elif query:
+ sql_query += query
+
+ await self.execute(sql_query, match)
+
+ async def delete_where_query(self, table: str, query: Optional[str] = None) -> None:
+ """Delete data in given table using given where clausule."""
+ sql_query = f"DELETE FROM {table} WHERE {query}"
+ await self.execute(sql_query)
+
+ async def execute(
+ self, query: Union[ClauseElement, str], values: dict = None
+ ) -> Any:
+ """Execute command on the database."""
+ return await self._db.execute(query, values)
+
+ async def _migrate(self):
+ """Perform database migration actions if needed."""
+ # always create db tables if they don't exist to prevent errors trying to access them later
+ await self.__create_database_tables()
+ try:
+ if prev_version := await self.get_setting("version"):
+ prev_version = int(prev_version)
+ else:
+ prev_version = 0
+ except (KeyError, ValueError):
+ prev_version = 0
+
+ if SCHEMA_VERSION != prev_version:
+ self.logger.info(
+ "Performing database migration from %s to %s",
+ prev_version,
+ SCHEMA_VERSION,
+ )
+
+ if prev_version < 18:
+ # too many changes, just recreate
+ await self.execute(f"DROP TABLE IF EXISTS {TABLE_ARTISTS}")
+ await self.execute(f"DROP TABLE IF EXISTS {TABLE_ALBUMS}")
+ await self.execute(f"DROP TABLE IF EXISTS {TABLE_TRACKS}")
+ await self.execute(f"DROP TABLE IF EXISTS {TABLE_PLAYLISTS}")
+ await self.execute(f"DROP TABLE IF EXISTS {TABLE_RADIOS}")
+ await self.execute(f"DROP TABLE IF EXISTS {TABLE_CACHE}")
+ await self.execute(f"DROP TABLE IF EXISTS {TABLE_THUMBS}")
+ await self.execute("DROP TABLE IF EXISTS provider_mappings")
+ # recreate missing tables
+ await self.__create_database_tables()
+
+ # store current schema version
+ await self.set_setting("version", str(SCHEMA_VERSION))
+
+ async def __create_database_tables(self) -> None:
+ """Init database tables."""
+ await self.execute(
+ """CREATE TABLE IF NOT EXISTS settings(
+ key TEXT PRIMARY KEY,
+ value TEXT
+ );"""
+ )
+ await self.execute(
+ f"""CREATE TABLE IF NOT EXISTS {TABLE_TRACK_LOUDNESS}(
+ item_id INTEGER NOT NULL,
+ provider TEXT NOT NULL,
+ loudness REAL,
+ UNIQUE(item_id, provider));"""
+ )
+ await self.execute(
+ f"""CREATE TABLE IF NOT EXISTS {TABLE_PLAYLOG}(
+ item_id INTEGER NOT NULL,
+ provider TEXT NOT NULL,
+ timestamp INTEGER DEFAULT 0,
+ UNIQUE(item_id, provider));"""
+ )
+ await self.execute(
+ f"""CREATE TABLE IF NOT EXISTS {TABLE_ALBUMS}(
+ item_id INTEGER PRIMARY KEY AUTOINCREMENT,
+ name TEXT NOT NULL,
+ sort_name TEXT NOT NULL,
+ sort_artist TEXT,
+ album_type TEXT,
+ year INTEGER,
+ version TEXT,
+ in_library BOOLEAN DEFAULT 0,
+ upc TEXT,
+ musicbrainz_id TEXT,
+ artists json,
+ metadata json,
+ provider_ids json,
+ timestamp INTEGER DEFAULT 0
+ );"""
+ )
+ await self.execute(
+ f"""CREATE TABLE IF NOT EXISTS {TABLE_ARTISTS}(
+ item_id INTEGER PRIMARY KEY AUTOINCREMENT,
+ name TEXT NOT NULL,
+ sort_name TEXT NOT NULL,
+ musicbrainz_id TEXT,
+ in_library BOOLEAN DEFAULT 0,
+ metadata json,
+ provider_ids json,
+ timestamp INTEGER DEFAULT 0
+ );"""
+ )
+ await self.execute(
+ f"""CREATE TABLE IF NOT EXISTS {TABLE_TRACKS}(
+ item_id INTEGER PRIMARY KEY AUTOINCREMENT,
+ name TEXT NOT NULL,
+ sort_name TEXT NOT NULL,
+ sort_artist TEXT,
+ sort_album TEXT,
+ version TEXT,
+ duration INTEGER,
+ in_library BOOLEAN DEFAULT 0,
+ isrc TEXT,
+ musicbrainz_id TEXT,
+ artists json,
+ albums json,
+ metadata json,
+ provider_ids json,
+ timestamp INTEGER DEFAULT 0
+ );"""
+ )
+ await self.execute(
+ f"""CREATE TABLE IF NOT EXISTS {TABLE_PLAYLISTS}(
+ item_id INTEGER PRIMARY KEY AUTOINCREMENT,
+ name TEXT NOT NULL,
+ sort_name TEXT NOT NULL,
+ owner TEXT NOT NULL,
+ is_editable BOOLEAN NOT NULL,
+ in_library BOOLEAN DEFAULT 0,
+ metadata json,
+ provider_ids json,
+ timestamp INTEGER DEFAULT 0,
+ UNIQUE(name, owner)
+ );"""
+ )
+ await self.execute(
+ f"""CREATE TABLE IF NOT EXISTS {TABLE_RADIOS}(
+ item_id INTEGER PRIMARY KEY AUTOINCREMENT,
+ name TEXT NOT NULL UNIQUE,
+ sort_name TEXT NOT NULL,
+ in_library BOOLEAN DEFAULT 0,
+ metadata json,
+ provider_ids json,
+ timestamp INTEGER DEFAULT 0
+ );"""
+ )
+ await self.execute(
+ f"""CREATE TABLE IF NOT EXISTS {TABLE_CACHE}(
+ key TEXT UNIQUE NOT NULL, expires INTEGER NOT NULL, data TEXT, checksum TEXT NULL)"""
+ )
+ await self.execute(
+ f"""CREATE TABLE IF NOT EXISTS {TABLE_THUMBS}(
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
+ path TEXT NOT NULL,
+ size INTEGER DEFAULT 0,
+ data BLOB,
+ UNIQUE(path, size));"""
+ )
+ # create indexes
+ # TODO: create indexes for the json columns ?
+ await self.execute(
+ "CREATE INDEX IF NOT EXISTS artists_in_library_idx on artists(in_library);"
+ )
+ await self.execute(
+ "CREATE INDEX IF NOT EXISTS albums_in_library_idx on albums(in_library);"
+ )
+ await self.execute(
+ "CREATE INDEX IF NOT EXISTS tracks_in_library_idx on tracks(in_library);"
+ )
+ await self.execute(
+ "CREATE INDEX IF NOT EXISTS playlists_in_library_idx on playlists(in_library);"
+ )
+ await self.execute(
+ "CREATE INDEX IF NOT EXISTS radios_in_library_idx on radios(in_library);"
+ )
+ await self.execute(
+ "CREATE INDEX IF NOT EXISTS artists_sort_name_idx on artists(sort_name);"
+ )
+ await self.execute(
+ "CREATE INDEX IF NOT EXISTS albums_sort_name_idx on albums(sort_name);"
+ )
+ await self.execute(
+ "CREATE INDEX IF NOT EXISTS tracks_sort_name_idx on tracks(sort_name);"
+ )
+ await self.execute(
+ "CREATE INDEX IF NOT EXISTS playlists_sort_name_idx on playlists(sort_name);"
+ )
+ await self.execute(
+ "CREATE INDEX IF NOT EXISTS radios_sort_name_idx on radios(sort_name);"
+ )
+ await self.execute(
+ "CREATE INDEX IF NOT EXISTS artists_musicbrainz_id_idx on artists(musicbrainz_id);"
+ )
+ await self.execute(
+ "CREATE INDEX IF NOT EXISTS albums_musicbrainz_id_idx on albums(musicbrainz_id);"
+ )
+ await self.execute(
+ "CREATE INDEX IF NOT EXISTS tracks_musicbrainz_id_idx on tracks(musicbrainz_id);"
+ )
+ await self.execute(
+ "CREATE INDEX IF NOT EXISTS tracks_isrc_idx on tracks(isrc);"
+ )
+ await self.execute("CREATE INDEX IF NOT EXISTS albums_upc_idx on albums(upc);")
--- /dev/null
+"""Package with Media controllers."""
--- /dev/null
+"""Manage MediaItems of type Album."""
+from __future__ import annotations
+
+import asyncio
+from random import choice, random
+from typing import TYPE_CHECKING, List, Optional, Union
+
+from music_assistant.constants import VARIOUS_ARTISTS
+from music_assistant.controllers.database import TABLE_ALBUMS, TABLE_TRACKS
+from music_assistant.controllers.media.base import MediaControllerBase
+from music_assistant.helpers.compare import compare_album, loose_compare_strings
+from music_assistant.helpers.json import json_serializer
+from music_assistant.models.enums import EventType, MusicProviderFeature, ProviderType
+from music_assistant.models.errors import (
+ MediaNotFoundError,
+ UnsupportedFeaturedException,
+)
+from music_assistant.models.event import MassEvent
+from music_assistant.models.media_items import (
+ Album,
+ AlbumType,
+ Artist,
+ ItemMapping,
+ MediaType,
+ Track,
+)
+
+if TYPE_CHECKING:
+ from music_assistant.models.music_provider import MusicProvider
+
+
+class AlbumsController(MediaControllerBase[Album]):
+ """Controller managing MediaItems of type Album."""
+
+ db_table = TABLE_ALBUMS
+ media_type = MediaType.ALBUM
+ item_cls = Album
+
+ async def get(self, *args, **kwargs) -> Album:
+ """Return (full) details for a single media item."""
+ album = await super().get(*args, **kwargs)
+ # append full artist details to full album item
+ if album.artist:
+ album.artist = await self.mass.music.artists.get(
+ album.artist.item_id, album.artist.provider
+ )
+ return album
+
+ async def tracks(
+ self,
+ item_id: str,
+ provider: Optional[ProviderType] = None,
+ provider_id: Optional[str] = None,
+ ) -> List[Track]:
+ """Return album tracks for the given provider album id."""
+
+ if not (provider == ProviderType.DATABASE or provider_id == "database"):
+ # return provider album tracks
+ return await self._get_provider_album_tracks(item_id, provider, provider_id)
+
+ # db_album requested: get results from first (non-file) provider
+ return await self._get_db_album_tracks(item_id)
+
+ async def versions(
+ self,
+ item_id: str,
+ provider: Optional[ProviderType] = None,
+ provider_id: Optional[str] = None,
+ ) -> List[Album]:
+ """Return all versions of an album we can find on all providers."""
+ assert provider or provider_id, "Provider type or ID must be specified"
+ album = await self.get(item_id, provider, provider_id)
+ # perform a search on all provider(types) to collect all versions/variants
+ prov_types = {item.type for item in self.mass.music.providers}
+ search_query = f"{album.artist.name} - {album.name}"
+ all_versions = {
+ prov_item.item_id: prov_item
+ for prov_items in await asyncio.gather(
+ *[self.search(search_query, prov_type) for prov_type in prov_types]
+ )
+ for prov_item in prov_items
+ if loose_compare_strings(album.name, prov_item.name)
+ }
+ # make sure that the 'base' version is included
+ for prov_version in album.provider_ids:
+ if prov_version.item_id in all_versions:
+ continue
+ album_copy = Album.from_dict(album.to_dict())
+ album_copy.item_id = prov_version.item_id
+ album_copy.provider = prov_version.prov_type
+ album_copy.provider_ids = {prov_version}
+ all_versions[prov_version.item_id] = album_copy
+
+ # return the aggregated result
+ return all_versions.values()
+
+ async def add(self, item: Album) -> Album:
+ """Add album to local db and return the database item."""
+ # grab additional metadata
+ await self.mass.metadata.get_album_metadata(item)
+ existing = await self.get_db_item_by_prov_id(item.item_id, item.provider)
+ if existing:
+ db_item = await self.update_db_item(existing.item_id, item)
+ else:
+ db_item = await self.add_db_item(item)
+ # also fetch same album on all providers
+ await self._match(db_item)
+ # return final db_item after all match/metadata actions
+ db_item = await self.get_db_item(db_item.item_id)
+ self.mass.signal_event(
+ MassEvent(
+ EventType.MEDIA_ITEM_UPDATED
+ if existing
+ else EventType.MEDIA_ITEM_ADDED,
+ db_item.uri,
+ db_item,
+ )
+ )
+ return db_item
+
+ async def add_db_item(self, item: Album, overwrite_existing: bool = False) -> Album:
+ """Add a new record to the database."""
+ assert item.provider_ids, f"Album {item.name} is missing provider id(s)"
+ assert item.artist, f"Album {item.name} is missing artist"
+ async with self._db_add_lock:
+ cur_item = None
+ # always try to grab existing item by musicbrainz_id/upc
+ if item.musicbrainz_id:
+ match = {"musicbrainz_id": item.musicbrainz_id}
+ cur_item = await self.mass.database.get_row(self.db_table, match)
+ if not cur_item and item.upc:
+ match = {"upc": item.upc}
+ cur_item = await self.mass.database.get_row(self.db_table, match)
+ if not cur_item:
+ # fallback to search and match
+ for row in await self.mass.database.search(self.db_table, item.name):
+ row_album = Album.from_db_row(row)
+ if compare_album(row_album, item):
+ cur_item = row_album
+ break
+ if cur_item:
+ # update existing
+ return await self.update_db_item(
+ cur_item.item_id, item, overwrite=overwrite_existing
+ )
+
+ # insert new item
+ album_artists = await self._get_album_artists(item, cur_item)
+ if album_artists:
+ sort_artist = album_artists[0].sort_name
+ else:
+ sort_artist = ""
+ new_item = await self.mass.database.insert(
+ self.db_table,
+ {
+ **item.to_db_row(),
+ "artists": json_serializer(album_artists) or None,
+ "sort_artist": sort_artist,
+ },
+ )
+ item_id = new_item["item_id"]
+ self.logger.debug("added %s to database", item.name)
+ # return created object
+ return await self.get_db_item(item_id)
+
+ async def update_db_item(
+ self,
+ item_id: int,
+ item: Album,
+ overwrite: bool = False,
+ ) -> Album:
+ """Update Album record in the database."""
+ assert item.provider_ids, f"Album {item.name} is missing provider id(s)"
+ assert item.artist, f"Album {item.name} is missing artist"
+ cur_item = await self.get_db_item(item_id)
+
+ if overwrite:
+ metadata = item.metadata
+ metadata.last_refresh = None
+ provider_ids = item.provider_ids
+ album_artists = await self._get_album_artists(item, overwrite=True)
+ else:
+ metadata = cur_item.metadata.update(item.metadata, item.provider.is_file())
+ provider_ids = {*cur_item.provider_ids, *item.provider_ids}
+ album_artists = await self._get_album_artists(item, cur_item)
+
+ if item.album_type != AlbumType.UNKNOWN:
+ album_type = item.album_type
+ else:
+ album_type = cur_item.album_type
+
+ if album_artists:
+ sort_artist = album_artists[0].sort_name
+ else:
+ sort_artist = ""
+
+ await self.mass.database.update(
+ self.db_table,
+ {"item_id": item_id},
+ {
+ "name": item.name if overwrite else cur_item.name,
+ "sort_name": item.sort_name if overwrite else cur_item.sort_name,
+ "sort_artist": sort_artist,
+ "version": item.version if overwrite else cur_item.version,
+ "year": item.year or cur_item.year,
+ "upc": item.upc or cur_item.upc,
+ "album_type": album_type.value,
+ "artists": json_serializer(album_artists) or None,
+ "metadata": json_serializer(metadata),
+ "provider_ids": json_serializer(provider_ids),
+ "musicbrainz_id": item.musicbrainz_id or cur_item.musicbrainz_id,
+ },
+ )
+ self.logger.debug("updated %s in database: %s", item.name, item_id)
+ return await self.get_db_item(item_id)
+
+ async def delete_db_item(self, item_id: int, recursive: bool = False) -> None:
+ """Delete record from the database."""
+ # check album tracks
+ db_rows = await self.mass.database.get_rows_from_query(
+ f"SELECT item_id FROM {TABLE_TRACKS} WHERE albums LIKE '%\"{item_id}\"%'",
+ limit=5000,
+ )
+ assert not (db_rows and not recursive), "Tracks attached to album"
+ for db_row in db_rows:
+ try:
+ await self.mass.music.albums.delete_db_item(
+ db_row["item_id"], recursive
+ )
+ except MediaNotFoundError:
+ pass
+
+ # delete the album itself from db
+ await super().delete_db_item(item_id)
+
+ async def _get_provider_album_tracks(
+ self,
+ item_id: str,
+ provider: Optional[ProviderType] = None,
+ provider_id: Optional[str] = None,
+ ) -> List[Track]:
+ """Return album tracks for the given provider album id."""
+ prov = self.mass.music.get_provider(provider_id or provider)
+ if not prov:
+ return []
+ full_album = await self.get_provider_item(item_id, provider_id or provider)
+ # prefer cache items (if any)
+ cache_key = f"{prov.type.value}.albumtracks.{item_id}"
+ cache_checksum = full_album.metadata.checksum
+ if cache := await self.mass.cache.get(cache_key, checksum=cache_checksum):
+ return [Track.from_dict(x) for x in cache]
+ # no items in cache - get listing from provider
+ items = []
+ for track in await prov.get_album_tracks(item_id):
+ # make sure that the (full) album is stored on the tracks
+ track.album = full_album
+ if full_album.metadata.images:
+ track.metadata.images = full_album.metadata.images
+ items.append(track)
+ # store (serializable items) in cache
+ self.mass.create_task(
+ self.mass.cache.set(
+ cache_key, [x.to_dict() for x in items], checksum=cache_checksum
+ )
+ )
+ return items
+
+ async def _get_provider_dynamic_tracks(
+ self,
+ item_id: str,
+ provider: Optional[ProviderType] = None,
+ provider_id: Optional[str] = None,
+ limit: int = 25,
+ ):
+ """Generate a dynamic list of tracks based on the album content."""
+ prov = self.mass.music.get_provider(provider_id or provider)
+ if (
+ not prov
+ or MusicProviderFeature.SIMILAR_TRACKS not in prov.supported_features
+ ):
+ return []
+ album_tracks = await self._get_provider_album_tracks(
+ item_id=item_id, provider=provider, provider_id=provider_id
+ )
+ # Grab a random track from the album that we use to obtain similar tracks for
+ track = choice(album_tracks)
+ # Calculate no of songs to grab from each list at a 10/90 ratio
+ total_no_of_tracks = limit + limit % 2
+ no_of_album_tracks = int(total_no_of_tracks * 10 / 100)
+ no_of_similar_tracks = int(total_no_of_tracks * 90 / 100)
+ # Grab similar tracks from the music provider
+ similar_tracks = await prov.get_similar_tracks(
+ prov_track_id=track.item_id, limit=no_of_similar_tracks
+ )
+ # Merge album content with similar tracks
+ dynamic_playlist = [
+ *sorted(album_tracks, key=lambda n: random())[:no_of_album_tracks],
+ *sorted(similar_tracks, key=lambda n: random())[:no_of_similar_tracks],
+ ]
+ return sorted(dynamic_playlist, key=lambda n: random())
+
+ async def _get_dynamic_tracks(self, media_item: Album, limit=25) -> List[Track]:
+ """Get dynamic list of tracks for given item, fallback/default implementation."""
+ # TODO: query metadata provider(s) to get similar tracks (or tracks from similar artists)
+ raise UnsupportedFeaturedException(
+ "No Music Provider found that supports requesting similar tracks."
+ )
+
+ async def _get_db_album_tracks(
+ self,
+ item_id: str,
+ ) -> List[Track]:
+ """Return in-database album tracks for the given database album."""
+ db_album = await self.get_db_item(item_id)
+ # simply grab all tracks in the db that are linked to this album
+ # TODO: adjust to json query instead of text search?
+ query = f"SELECT * FROM tracks WHERE albums LIKE '%\"{item_id}\"%'"
+ result = []
+ for track in await self.mass.music.tracks.get_db_items_by_query(query):
+ if album_mapping := next(
+ (x for x in track.albums if x.item_id == db_album.item_id), None
+ ):
+ # make sure that the full album is set on the track and prefer the album's images
+ track.album = db_album
+ if db_album.metadata.images:
+ track.metadata.images = db_album.metadata.images
+ # apply the disc and track number from the mapping
+ track.disc_number = album_mapping.disc_number
+ track.track_number = album_mapping.track_number
+ result.append(track)
+ return sorted(result, key=lambda x: (x.disc_number or 0, x.track_number or 0))
+
+ async def _match(self, db_album: Album) -> None:
+ """
+ Try to find matching album on all providers for the provided (database) album.
+
+ This is used to link objects of different providers/qualities together.
+ """
+ if db_album.provider != ProviderType.DATABASE:
+ return # Matching only supported for database items
+
+ async def find_prov_match(provider: MusicProvider):
+ self.logger.debug(
+ "Trying to match album %s on provider %s", db_album.name, provider.name
+ )
+ match_found = False
+ for search_str in (
+ db_album.name,
+ f"{db_album.artist.name} - {db_album.name}",
+ f"{db_album.artist.name} {db_album.name}",
+ ):
+ if match_found:
+ break
+ search_result = await self.search(search_str, provider.id)
+ for search_result_item in search_result:
+ if not search_result_item.available:
+ continue
+ if not compare_album(search_result_item, db_album):
+ continue
+ # we must fetch the full album version, search results are simplified objects
+ prov_album = await self.get_provider_item(
+ search_result_item.item_id, search_result_item.provider
+ )
+ if compare_album(prov_album, db_album):
+ # 100% match, we can simply update the db with additional provider ids
+ await self.update_db_item(db_album.item_id, prov_album)
+ match_found = True
+ return match_found
+
+ # try to find match on all providers
+ cur_prov_types = {x.prov_type for x in db_album.provider_ids}
+ for provider in self.mass.music.providers:
+ if provider.type in cur_prov_types:
+ continue
+ if MusicProviderFeature.SEARCH not in provider.supported_features:
+ continue
+ if await find_prov_match(provider):
+ cur_prov_types.add(provider.type)
+ else:
+ self.logger.debug(
+ "Could not find match for Album %s on provider %s",
+ db_album.name,
+ provider.name,
+ )
+
+ async def _get_album_artists(
+ self,
+ db_album: Album,
+ updated_album: Optional[Album] = None,
+ overwrite: bool = False,
+ ) -> List[ItemMapping]:
+ """Extract (database) album artist(s) as ItemMapping."""
+ album_artists = set()
+ for album in (updated_album, db_album):
+ if not album:
+ continue
+ for artist in album.artists:
+ album_artists.add(await self._get_artist_mapping(artist, overwrite))
+ # use intermediate set to prevent duplicates
+ # filter various artists if multiple artists
+ if len(album_artists) > 1:
+ album_artists = {x for x in album_artists if (x.name != VARIOUS_ARTISTS)}
+ return list(album_artists)
+
+ async def _get_artist_mapping(
+ self, artist: Union[Artist, ItemMapping], overwrite: bool = False
+ ) -> ItemMapping:
+ """Extract (database) track artist as ItemMapping."""
+ if overwrite:
+ artist = await self.mass.music.artists.add_db_item(
+ artist, overwrite_existing=True
+ )
+ if artist.provider == ProviderType.DATABASE:
+ if isinstance(artist, ItemMapping):
+ return artist
+ return ItemMapping.from_item(artist)
+
+ if db_artist := await self.mass.music.artists.get_db_item_by_prov_id(
+ artist.item_id, provider=artist.provider
+ ):
+ return ItemMapping.from_item(db_artist)
+
+ db_artist = await self.mass.music.artists.add_db_item(artist)
+ return ItemMapping.from_item(db_artist)
--- /dev/null
+"""Manage MediaItems of type Artist."""
+
+import asyncio
+import itertools
+from random import choice, random
+from time import time
+from typing import TYPE_CHECKING, Any, Dict, List, Optional
+
+from music_assistant.constants import VARIOUS_ARTISTS, VARIOUS_ARTISTS_ID
+from music_assistant.controllers.database import (
+ TABLE_ALBUMS,
+ TABLE_ARTISTS,
+ TABLE_TRACKS,
+)
+from music_assistant.controllers.media.base import MediaControllerBase
+from music_assistant.helpers.compare import compare_strings
+from music_assistant.helpers.json import json_serializer
+from music_assistant.models.enums import EventType, MusicProviderFeature, ProviderType
+from music_assistant.models.errors import (
+ MediaNotFoundError,
+ UnsupportedFeaturedException,
+)
+from music_assistant.models.event import MassEvent
+from music_assistant.models.media_items import (
+ Album,
+ AlbumType,
+ Artist,
+ ItemMapping,
+ MediaType,
+ PagedItems,
+ Track,
+)
+
+if TYPE_CHECKING:
+ from music_assistant.models.music_provider import MusicProvider
+
+
+class ArtistsController(MediaControllerBase[Artist]):
+ """Controller managing MediaItems of type Artist."""
+
+ db_table = TABLE_ARTISTS
+ media_type = MediaType.ARTIST
+ item_cls = Artist
+
+ async def album_artists(
+ self,
+ in_library: Optional[bool] = None,
+ search: Optional[str] = None,
+ limit: int = 500,
+ offset: int = 0,
+ order_by: str = "sort_name",
+ ) -> PagedItems:
+ """Get in-database album artists."""
+ return await self.db_items(
+ in_library=in_library,
+ search=search,
+ limit=limit,
+ offset=offset,
+ order_by=order_by,
+ query_parts=[
+ "artists.sort_name in (select albums.sort_artist from albums)"
+ ],
+ )
+
+ async def toptracks(
+ self,
+ item_id: Optional[str] = None,
+ provider: Optional[ProviderType] = None,
+ provider_id: Optional[str] = None,
+ artist: Optional[Artist] = None,
+ ) -> List[Track]:
+ """Return top tracks for an artist."""
+ if not artist:
+ artist = await self.get(item_id, provider, provider_id)
+ # get results from all providers
+ coros = [
+ self.get_provider_artist_toptracks(
+ item.item_id,
+ provider=item.prov_type,
+ provider_id=item.prov_id,
+ cache_checksum=artist.metadata.checksum,
+ )
+ for item in artist.provider_ids
+ ]
+ tracks = itertools.chain.from_iterable(await asyncio.gather(*coros))
+ # merge duplicates using a dict
+ final_items: Dict[str, Track] = {}
+ for track in tracks:
+ key = f".{track.name}.{track.version}"
+ if key in final_items:
+ final_items[key].provider_ids.update(track.provider_ids)
+ else:
+ final_items[key] = track
+ return list(final_items.values())
+
+ async def albums(
+ self,
+ item_id: Optional[str] = None,
+ provider: Optional[ProviderType] = None,
+ provider_id: Optional[str] = None,
+ artist: Optional[Artist] = None,
+ ) -> List[Album]:
+ """Return (all/most popular) albums for an artist."""
+ if not artist:
+ artist = await self.get(item_id, provider, provider_id)
+ # get results from all providers
+ coros = [
+ self.get_provider_artist_albums(
+ item.item_id, item.prov_type, cache_checksum=artist.metadata.checksum
+ )
+ for item in artist.provider_ids
+ ]
+ albums = itertools.chain.from_iterable(await asyncio.gather(*coros))
+ # merge duplicates using a dict
+ final_items: Dict[str, Album] = {}
+ for album in albums:
+ key = f".{album.name}.{album.version}"
+ if key in final_items:
+ final_items[key].provider_ids.update(album.provider_ids)
+ else:
+ final_items[key] = album
+ if album.in_library:
+ final_items[key].in_library = True
+ return list(final_items.values())
+
+ async def add(self, item: Artist) -> Artist:
+ """Add artist to local db and return the database item."""
+ # grab musicbrainz id and additional metadata
+ await self.mass.metadata.get_artist_metadata(item)
+ existing = await self.get_db_item_by_prov_id(item.item_id, item.provider)
+ if existing:
+ db_item = await self.update_db_item(existing.item_id, item)
+ else:
+ db_item = await self.add_db_item(item)
+ # also fetch same artist on all providers
+ await self.match_artist(db_item)
+ # return final db_item after all match/metadata actions
+ db_item = await self.get_db_item(db_item.item_id)
+ self.mass.signal_event(
+ MassEvent(
+ EventType.MEDIA_ITEM_UPDATED
+ if existing
+ else EventType.MEDIA_ITEM_ADDED,
+ db_item.uri,
+ db_item,
+ )
+ )
+ return db_item
+
+ async def match_artist(self, db_artist: Artist):
+ """
+ Try to find matching artists on all providers for the provided (database) item_id.
+
+ This is used to link objects of different providers together.
+ """
+ assert (
+ db_artist.provider == ProviderType.DATABASE
+ ), "Matching only supported for database items!"
+ cur_prov_types = {x.prov_type for x in db_artist.provider_ids}
+ for provider in self.mass.music.providers:
+ if provider.type in cur_prov_types:
+ continue
+ if MusicProviderFeature.SEARCH not in provider.supported_features:
+ continue
+ if await self._match(db_artist, provider):
+ cur_prov_types.add(provider.type)
+ else:
+ self.logger.debug(
+ "Could not find match for Artist %s on provider %s",
+ db_artist.name,
+ provider.name,
+ )
+
+ async def get_provider_artist_toptracks(
+ self,
+ item_id: str,
+ provider: Optional[ProviderType] = None,
+ provider_id: Optional[str] = None,
+ cache_checksum: Any = None,
+ ) -> List[Track]:
+ """Return top tracks for an artist on given provider."""
+ prov = self.mass.music.get_provider(provider_id or provider)
+ if not prov:
+ return []
+ # prefer cache items (if any)
+ cache_key = f"{prov.type.value}.artist_toptracks.{item_id}"
+ if cache := await self.mass.cache.get(cache_key, checksum=cache_checksum):
+ return [Track.from_dict(x) for x in cache]
+ # no items in cache - get listing from provider
+ if MusicProviderFeature.ARTIST_TOPTRACKS in prov.supported_features:
+ items = await prov.get_artist_toptracks(item_id)
+ else:
+ # fallback implementation using the db
+ if db_artist := await self.mass.music.artists.get_db_item_by_prov_id(
+ item_id, provider=provider, provider_id=provider_id
+ ):
+ prov_id = provider_id or provider.value
+ # TODO: adjust to json query instead of text search?
+ query = f"SELECT * FROM tracks WHERE artists LIKE '%\"{db_artist.item_id}\"%'"
+ query += f" AND provider_ids LIKE '%\"{prov_id}\"%'"
+ items = await self.mass.music.tracks.get_db_items_by_query(query)
+ # store (serializable items) in cache
+ self.mass.create_task(
+ self.mass.cache.set(
+ cache_key, [x.to_dict() for x in items], checksum=cache_checksum
+ )
+ )
+ return items
+
+ async def get_provider_artist_albums(
+ self,
+ item_id: str,
+ provider: Optional[ProviderType] = None,
+ provider_id: Optional[str] = None,
+ cache_checksum: Any = None,
+ ) -> List[Album]:
+ """Return albums for an artist on given provider."""
+ prov = self.mass.music.get_provider(provider_id or provider)
+ if not prov:
+ return []
+ # prefer cache items (if any)
+ cache_key = f"{prov.type.value}.artist_albums.{item_id}"
+ if cache := await self.mass.cache.get(cache_key, checksum=cache_checksum):
+ return [Album.from_dict(x) for x in cache]
+ # no items in cache - get listing from provider
+ if MusicProviderFeature.ARTIST_ALBUMS in prov.supported_features:
+ items = await prov.get_artist_albums(item_id)
+ else:
+ # fallback implementation using the db
+ if db_artist := await self.mass.music.artists.get_db_item_by_prov_id(
+ item_id, provider=provider, provider_id=provider_id
+ ):
+ prov_id = provider_id or provider.value
+ # TODO: adjust to json query instead of text search?
+ query = f"SELECT * FROM albums WHERE artists LIKE '%\"{db_artist.item_id}\"%'"
+ query += f" AND provider_ids LIKE '%\"{prov_id}\"%'"
+ items = await self.mass.music.albums.get_db_items_by_query(query)
+ else:
+ # edge case
+ items = []
+ # store (serializable items) in cache
+ self.mass.create_task(
+ self.mass.cache.set(
+ cache_key, [x.to_dict() for x in items], checksum=cache_checksum
+ )
+ )
+ return items
+
+ async def add_db_item(
+ self, item: Artist, overwrite_existing: bool = False
+ ) -> Artist:
+ """Add a new item record to the database."""
+ assert isinstance(item, Artist), "Not a full Artist object"
+ assert item.provider_ids, "Artist is missing provider id(s)"
+ # enforce various artists name + id
+ if compare_strings(item.name, VARIOUS_ARTISTS):
+ item.musicbrainz_id = VARIOUS_ARTISTS_ID
+ if item.musicbrainz_id == VARIOUS_ARTISTS_ID:
+ item.name = VARIOUS_ARTISTS
+
+ async with self._db_add_lock:
+ # always try to grab existing item by musicbrainz_id
+ cur_item = None
+ if item.musicbrainz_id:
+ match = {"musicbrainz_id": item.musicbrainz_id}
+ cur_item = await self.mass.database.get_row(self.db_table, match)
+ if not cur_item:
+ # fallback to exact name match
+ # NOTE: we match an artist by name which could theoretically lead to collisions
+ # but the chance is so small it is not worth the additional overhead of grabbing
+ # the musicbrainz id upfront
+ match = {"sort_name": item.sort_name}
+ for row in await self.mass.database.get_rows(self.db_table, match):
+ row_artist = Artist.from_db_row(row)
+ if row_artist.sort_name == item.sort_name:
+ cur_item = row_artist
+ break
+ if cur_item:
+ # update existing
+ return await self.update_db_item(
+ cur_item.item_id, item, overwrite=overwrite_existing
+ )
+
+ # insert item
+ if item.in_library and not item.timestamp:
+ item.timestamp = int(time())
+ new_item = await self.mass.database.insert(self.db_table, item.to_db_row())
+ item_id = new_item["item_id"]
+ self.logger.debug("added %s to database", item.name)
+ # return created object
+ return await self.get_db_item(item_id)
+
+ async def update_db_item(
+ self,
+ item_id: int,
+ item: Artist,
+ overwrite: bool = False,
+ ) -> Artist:
+ """Update Artist record in the database."""
+ cur_item = await self.get_db_item(item_id)
+ if overwrite:
+ metadata = item.metadata
+ provider_ids = item.provider_ids
+ else:
+ metadata = cur_item.metadata.update(item.metadata, item.provider.is_file())
+ provider_ids = {*cur_item.provider_ids, *item.provider_ids}
+
+ # enforce various artists name + id
+ if compare_strings(item.name, VARIOUS_ARTISTS):
+ item.musicbrainz_id = VARIOUS_ARTISTS_ID
+ if item.musicbrainz_id == VARIOUS_ARTISTS_ID:
+ item.name = VARIOUS_ARTISTS
+
+ await self.mass.database.update(
+ self.db_table,
+ {"item_id": item_id},
+ {
+ "name": item.name if overwrite else cur_item.name,
+ "sort_name": item.sort_name if overwrite else cur_item.sort_name,
+ "musicbrainz_id": item.musicbrainz_id or cur_item.musicbrainz_id,
+ "metadata": json_serializer(metadata),
+ "provider_ids": json_serializer(provider_ids),
+ },
+ )
+ self.logger.debug("updated %s in database: %s", item.name, item_id)
+ return await self.get_db_item(item_id)
+
+ async def delete_db_item(self, item_id: int, recursive: bool = False) -> None:
+ """Delete record from the database."""
+ # check artist albums
+ db_rows = await self.mass.database.get_rows_from_query(
+ f"SELECT item_id FROM {TABLE_ALBUMS} WHERE artists LIKE '%\"{item_id}\"%'",
+ limit=5000,
+ )
+ assert not (db_rows and not recursive), "Albums attached to artist"
+ for db_row in db_rows:
+ try:
+ await self.mass.music.albums.delete_db_item(
+ db_row["item_id"], recursive
+ )
+ except MediaNotFoundError:
+ pass
+
+ # check artist tracks
+ db_rows = await self.mass.database.get_rows_from_query(
+ f"SELECT item_id FROM {TABLE_TRACKS} WHERE artists LIKE '%\"{item_id}\"%'",
+ limit=5000,
+ )
+ assert not (db_rows and not recursive), "Tracks attached to artist"
+ for db_row in db_rows:
+ try:
+ await self.mass.music.albums.delete_db_item(
+ db_row["item_id"], recursive
+ )
+ except MediaNotFoundError:
+ pass
+
+ # delete the artist itself from db
+ await super().delete_db_item(item_id)
+
+ async def _get_provider_dynamic_tracks(
+ self,
+ item_id: str,
+ provider: Optional[ProviderType] = None,
+ provider_id: Optional[str] = None,
+ limit: int = 25,
+ ):
+ """Generate a dynamic list of tracks based on the artist's top tracks."""
+ prov = self.mass.music.get_provider(provider_id or provider)
+ if (
+ not prov
+ or MusicProviderFeature.SIMILAR_TRACKS not in prov.supported_features
+ ):
+ return []
+ top_tracks = await self.get_provider_artist_toptracks(
+ item_id=item_id, provider=provider, provider_id=provider_id
+ )
+ # Grab a random track from the album that we use to obtain similar tracks for
+ track = choice(top_tracks)
+ # Calculate no of songs to grab from each list at a 10/90 ratio
+ total_no_of_tracks = limit + limit % 2
+ no_of_artist_tracks = int(total_no_of_tracks * 10 / 100)
+ no_of_similar_tracks = int(total_no_of_tracks * 90 / 100)
+ # Grab similar tracks from the music provider
+ similar_tracks = await prov.get_similar_tracks(
+ prov_track_id=track.item_id, limit=no_of_similar_tracks
+ )
+ # Merge album content with similar tracks
+ dynamic_playlist = [
+ *sorted(top_tracks, key=lambda n: random())[:no_of_artist_tracks],
+ *sorted(similar_tracks, key=lambda n: random())[:no_of_similar_tracks],
+ ]
+ return sorted(dynamic_playlist, key=lambda n: random())
+
+ async def _get_dynamic_tracks(
+ self, media_item: Artist, limit: int = 25
+ ) -> List[Track]:
+ """Get dynamic list of tracks for given item, fallback/default implementation."""
+ # TODO: query metadata provider(s) to get similar tracks (or tracks from similar artists)
+ raise UnsupportedFeaturedException(
+ "No Music Provider found that supports requesting similar tracks."
+ )
+
+ async def _match(self, db_artist: Artist, provider: "MusicProvider") -> bool:
+ """Try to find matching artists on given provider for the provided (database) artist."""
+ self.logger.debug(
+ "Trying to match artist %s on provider %s", db_artist.name, provider.name
+ )
+ # try to get a match with some reference tracks of this artist
+ for ref_track in await self.toptracks(
+ db_artist.item_id, db_artist.provider, artist=db_artist
+ ):
+ # make sure we have a full track
+ if isinstance(ref_track.album, ItemMapping):
+ ref_track = await self.mass.music.tracks.get(
+ ref_track.item_id, ref_track.provider
+ )
+ for search_str in (
+ f"{db_artist.name} - {ref_track.name}",
+ f"{db_artist.name} {ref_track.name}",
+ ref_track.name,
+ ):
+ search_results = await self.mass.music.tracks.search(
+ search_str, provider.type
+ )
+ for search_result_item in search_results:
+ if search_result_item.sort_name != ref_track.sort_name:
+ continue
+ # get matching artist from track
+ for search_item_artist in search_result_item.artists:
+ if search_item_artist.sort_name != db_artist.sort_name:
+ continue
+ # 100% album match
+ # get full artist details so we have all metadata
+ prov_artist = await self.get_provider_item(
+ search_item_artist.item_id, search_item_artist.provider
+ )
+ await self.update_db_item(db_artist.item_id, prov_artist)
+ return True
+ # try to get a match with some reference albums of this artist
+ artist_albums = await self.albums(
+ db_artist.item_id, db_artist.provider, artist=db_artist
+ )
+ for ref_album in artist_albums:
+ if ref_album.album_type == AlbumType.COMPILATION:
+ continue
+ if ref_album.artist is None:
+ continue
+ for search_str in (
+ ref_album.name,
+ f"{db_artist.name} - {ref_album.name}",
+ f"{db_artist.name} {ref_album.name}",
+ ):
+ search_result = await self.mass.music.albums.search(
+ search_str, provider.type
+ )
+ for search_result_item in search_result:
+ if search_result_item.artist is None:
+ continue
+ if search_result_item.sort_name != ref_album.sort_name:
+ continue
+ # artist must match 100%
+ if (
+ search_result_item.artist.sort_name
+ != ref_album.artist.sort_name
+ ):
+ continue
+ # 100% match
+ # get full artist details so we have all metadata
+ prov_artist = await self.get_provider_item(
+ search_result_item.artist.item_id,
+ search_result_item.artist.provider,
+ )
+ await self.update_db_item(db_artist.item_id, prov_artist)
+ return True
+ return False
--- /dev/null
+"""Base (ABC) MediaType specific controller."""
+from __future__ import annotations
+
+import asyncio
+from abc import ABCMeta, abstractmethod
+from time import time
+from typing import (
+ TYPE_CHECKING,
+ AsyncGenerator,
+ Generic,
+ List,
+ Optional,
+ Tuple,
+ TypeVar,
+ Union,
+)
+
+from music_assistant.helpers.json import json_serializer
+from music_assistant.models.enums import (
+ EventType,
+ MediaType,
+ MusicProviderFeature,
+ ProviderType,
+)
+from music_assistant.models.errors import MediaNotFoundError
+from music_assistant.models.event import MassEvent
+from music_assistant.models.media_items import (
+ MediaItemType,
+ PagedItems,
+ Track,
+ media_from_dict,
+)
+
+if TYPE_CHECKING:
+ from music_assistant.mass import MusicAssistant
+
+ItemCls = TypeVar("ItemCls", bound="MediaControllerBase")
+
+REFRESH_INTERVAL = 60 * 60 * 24 * 30
+
+
+class MediaControllerBase(Generic[ItemCls], metaclass=ABCMeta):
+ """Base model for controller managing a MediaType."""
+
+ media_type: MediaType
+ item_cls: MediaItemType
+ db_table: str
+
+ def __init__(self, mass: MusicAssistant):
+ """Initialize class."""
+ self.mass = mass
+ self.logger = mass.logger.getChild(f"music.{self.media_type.value}")
+ self._db_add_lock = asyncio.Lock()
+
+ @abstractmethod
+ async def add(self, item: ItemCls) -> ItemCls:
+ """Add item to local db and return the database item."""
+ raise NotImplementedError
+
+ @abstractmethod
+ async def add_db_item(
+ self, item: ItemCls, overwrite_existing: bool = False
+ ) -> ItemCls:
+ """Add a new record for this mediatype to the database."""
+ raise NotImplementedError
+
+ @abstractmethod
+ async def update_db_item(
+ self,
+ item_id: int,
+ item: ItemCls,
+ overwrite: bool = False,
+ ) -> ItemCls:
+ """Update record in the database, merging data."""
+ raise NotImplementedError
+
+ async def db_items(
+ self,
+ in_library: Optional[bool] = None,
+ search: Optional[str] = None,
+ limit: int = 500,
+ offset: int = 0,
+ order_by: str = "sort_name",
+ query_parts: Optional[List[str]] = None,
+ ) -> PagedItems:
+ """Get in-database items."""
+ sql_query = f"SELECT * FROM {self.db_table}"
+ params = {}
+ query_parts = query_parts or []
+ if search:
+ params["search"] = f"%{search}%"
+ if self.media_type in (MediaType.ALBUM, MediaType.TRACK):
+ query_parts.append("(name LIKE :search or artists LIKE :search)")
+ else:
+ query_parts.append("name LIKE :search")
+ if in_library is not None:
+ query_parts.append("in_library = :in_library")
+ params["in_library"] = in_library
+ if query_parts:
+ sql_query += " WHERE " + " AND ".join(query_parts)
+ sql_query += f" ORDER BY {order_by}"
+ items = await self.get_db_items_by_query(
+ sql_query, params, limit=limit, offset=offset
+ )
+ count = len(items)
+ if 0 < count < limit:
+ total = offset + count
+ else:
+ total = await self.mass.database.get_count_from_query(sql_query, params)
+ return PagedItems(items, count, limit, offset, total)
+
+ async def iter_db_items(
+ self,
+ in_library: Optional[bool] = None,
+ search: Optional[str] = None,
+ order_by: str = "sort_name",
+ ) -> AsyncGenerator[ItemCls, None]:
+ """Iterate all in-database items."""
+ limit: int = 500
+ offset: int = 0
+ while True:
+ next_items = await self.db_items(
+ in_library=in_library,
+ search=search,
+ limit=limit,
+ offset=offset,
+ order_by=order_by,
+ )
+ for item in next_items.items:
+ yield item
+ if next_items.count < limit:
+ break
+ offset += limit
+
+ async def get(
+ self,
+ provider_item_id: str,
+ provider: Optional[ProviderType] = None,
+ provider_id: Optional[str] = None,
+ force_refresh: bool = False,
+ lazy: bool = True,
+ details: ItemCls = None,
+ ) -> ItemCls:
+ """Return (full) details for a single media item."""
+ assert provider or provider_id, "provider or provider_id must be supplied"
+ if isinstance(provider, str):
+ provider = ProviderType(provider)
+ db_item = await self.get_db_item_by_prov_id(
+ provider_item_id=provider_item_id,
+ provider=provider,
+ provider_id=provider_id,
+ )
+ if db_item and (time() - db_item.last_refresh) > REFRESH_INTERVAL:
+ # it's been too long since the full metadata was last retrieved (or never at all)
+ force_refresh = True
+ if db_item and force_refresh:
+ # get (first) provider item id belonging to this db item
+ provider_id, provider_item_id = await self.get_provider_id(db_item)
+ elif db_item:
+ # we have a db item and no refreshing is needed, return the results!
+ return db_item
+ if not details and provider_id:
+ # no details provider nor in db, fetch them from the provider
+ details = await self.get_provider_item(provider_item_id, provider_id)
+ if not details and provider:
+ # check providers for given provider type one by one
+ for prov in self.mass.music.providers:
+ if not prov.available:
+ continue
+ if prov.type == provider:
+ try:
+ details = await self.get_provider_item(
+ provider_item_id, prov.id
+ )
+ except MediaNotFoundError:
+ pass
+ else:
+ break
+ if not details:
+ # we couldn't get a match from any of the providers, raise error
+ raise MediaNotFoundError(
+ f"Item not found: {provider.value or provider_id}/{provider_item_id}"
+ )
+ # create job to add the item to the db, including matching metadata etc. takes some time
+ # in 99% of the cases we just return lazy because we want the details as fast as possible
+ # only if we really need to wait for the result (e.g. to prevent race conditions), we
+ # can set lazy to false and we await to job to complete.
+ add_job = self.mass.add_job(
+ self.add(details),
+ f"Add {details.uri} to database",
+ )
+ if not lazy:
+ await add_job.wait()
+ return add_job.result
+
+ return details
+
+ async def search(
+ self,
+ search_query: str,
+ provider: Optional[ProviderType] = None,
+ provider_id: Optional[str] = None,
+ limit: int = 25,
+ ) -> List[ItemCls]:
+ """Search database or provider with given query."""
+ # create safe search string
+ search_query = search_query.replace("/", " ").replace("'", "")
+ if provider == ProviderType.DATABASE or provider_id == "database":
+ return [
+ self.item_cls.from_db_row(db_row)
+ for db_row in await self.mass.database.search(
+ self.db_table, search_query
+ )
+ ]
+
+ prov = self.mass.music.get_provider(provider_id or provider)
+ if not prov or MusicProviderFeature.SEARCH not in prov.supported_features:
+ return []
+ if not prov.library_supported(self.media_type):
+ # assume library supported also means that this mediatype is supported
+ return []
+
+ # prefer cache items (if any)
+ cache_key = (
+ f"{prov.type.value}.search.{self.media_type.value}.{search_query}.{limit}"
+ )
+ if cache := await self.mass.cache.get(cache_key):
+ return [media_from_dict(x) for x in cache]
+ # no items in cache - get listing from provider
+ items = await prov.search(
+ search_query,
+ [self.media_type],
+ limit,
+ )
+ # store (serializable items) in cache
+ if not prov.type.is_file(): # do not cache filesystem results
+ self.mass.create_task(
+ self.mass.cache.set(
+ cache_key, [x.to_dict() for x in items], expiration=86400 * 7
+ )
+ )
+ return items
+
+ async def add_to_library(
+ self,
+ provider_item_id: str,
+ provider: Optional[ProviderType] = None,
+ provider_id: Optional[str] = None,
+ ) -> None:
+ """Add an item to the library."""
+ prov_item = await self.get_db_item_by_prov_id(
+ provider_item_id, provider=provider, provider_id=provider_id
+ )
+ if prov_item is None:
+ prov_item = await self.get_provider_item(
+ provider_item_id, provider_id or provider
+ )
+ if prov_item.in_library is True:
+ return
+ # mark as favorite/library item on provider(s)
+ for prov_id in prov_item.provider_ids:
+ if prov := self.mass.music.get_provider(prov_id.prov_id):
+ if not prov.library_edit_supported(self.media_type):
+ continue
+ await prov.library_add(prov_id.item_id, self.media_type)
+ # mark as library item in internal db if db item
+ if prov_item.provider == ProviderType.DATABASE:
+ if not prov_item.in_library:
+ prov_item.in_library = True
+ await self.set_db_library(prov_item.item_id, True)
+
+ async def remove_from_library(
+ self,
+ provider_item_id: str,
+ provider: Optional[ProviderType] = None,
+ provider_id: Optional[str] = None,
+ ) -> None:
+ """Remove item from the library."""
+ prov_item = await self.get_db_item_by_prov_id(
+ provider_item_id, provider=provider, provider_id=provider_id
+ )
+ if prov_item is None:
+ prov_item = await self.get_provider_item(
+ provider_item_id, provider_id or provider
+ )
+ if prov_item.in_library is False:
+ return
+ # unmark as favorite/library item on provider(s)
+ for prov_id in prov_item.provider_ids:
+ if prov := self.mass.music.get_provider(prov_id.prov_id):
+ if not prov.library_edit_supported(self.media_type):
+ continue
+ await prov.library_remove(prov_id.item_id, self.media_type)
+ # unmark as library item in internal db if db item
+ if prov_item.provider == ProviderType.DATABASE:
+ prov_item.in_library = False
+ await self.set_db_library(prov_item.item_id, False)
+
+ async def get_provider_id(self, item: ItemCls) -> Tuple[str, str]:
+ """Return (first) provider and item id."""
+ if item.provider == ProviderType.DATABASE:
+ # make sure we have a full object
+ item = await self.get_db_item(item.item_id)
+ for prefer_file in (True, False):
+ for prov in item.provider_ids:
+ # returns the first provider that is available
+ if not prov.available:
+ continue
+ if prefer_file and not prov.prov_type.is_file():
+ continue
+ if self.mass.music.get_provider(prov.prov_id):
+ return (prov.prov_id, prov.item_id)
+ return None, None
+
+ async def get_db_items_by_query(
+ self,
+ custom_query: Optional[str] = None,
+ query_params: Optional[dict] = None,
+ limit: int = 500,
+ offset: int = 0,
+ ) -> List[ItemCls]:
+ """Fetch MediaItem records from database given a custom query."""
+ return [
+ self.item_cls.from_db_row(db_row)
+ for db_row in await self.mass.database.get_rows_from_query(
+ custom_query, query_params, limit=limit, offset=offset
+ )
+ ]
+
+ async def get_db_item(self, item_id: Union[int, str]) -> ItemCls:
+ """Get record by id."""
+ match = {"item_id": int(item_id)}
+ if db_row := await self.mass.database.get_row(self.db_table, match):
+ return self.item_cls.from_db_row(db_row)
+ raise MediaNotFoundError(f"Album not found in database: {item_id}")
+
+ async def get_db_item_by_prov_id(
+ self,
+ provider_item_id: str,
+ provider: Optional[ProviderType] = None,
+ provider_id: Optional[str] = None,
+ ) -> ItemCls | None:
+ """Get the database item for the given prov_id."""
+ assert provider or provider_id, "provider or provider_id must be supplied"
+ if isinstance(provider, str):
+ provider = ProviderType(provider)
+ if provider == ProviderType.DATABASE or provider_id == "database":
+ return await self.get_db_item(provider_item_id)
+ for item in await self.get_db_items_by_prov_id(
+ provider=provider,
+ provider_id=provider_id,
+ provider_item_ids=(provider_item_id,),
+ ):
+ return item
+ return None
+
+ async def get_db_items_by_prov_id(
+ self,
+ provider: Optional[ProviderType] = None,
+ provider_id: Optional[str] = None,
+ provider_item_ids: Optional[Tuple[str]] = None,
+ limit: int = 500,
+ offset: int = 0,
+ ) -> List[ItemCls]:
+ """Fetch all records from database for given provider."""
+ assert provider or provider_id, "provider or provider_id must be supplied"
+ if isinstance(provider, str):
+ provider = ProviderType(provider)
+ if provider == ProviderType.DATABASE or provider_id == "database":
+ return await self.get_db_items_by_query(limit=limit, offset=offset)
+
+ query = f"SELECT * FROM {self.db_table}, json_each(provider_ids)"
+ if provider_id is not None:
+ query += (
+ f" WHERE json_extract(json_each.value, '$.prov_id') = '{provider_id}'"
+ )
+ elif provider is not None:
+ query += f" WHERE json_extract(json_each.value, '$.prov_type') = '{provider.value}'"
+ if provider_item_ids is not None:
+ prov_ids = str(tuple(provider_item_ids))
+ if prov_ids.endswith(",)"):
+ prov_ids = prov_ids.replace(",)", ")")
+ query += f" AND json_extract(json_each.value, '$.item_id') in {prov_ids}"
+
+ return await self.get_db_items_by_query(query, limit=limit, offset=offset)
+
+ async def set_db_library(self, item_id: int, in_library: bool) -> None:
+ """Set the in-library bool on a database item."""
+ match = {"item_id": item_id}
+ timestamp = int(time()) if in_library else 0
+ await self.mass.database.update(
+ self.db_table, match, {"in_library": in_library, "timestamp": timestamp}
+ )
+ db_item = await self.get_db_item(item_id)
+ self.mass.signal_event(
+ MassEvent(EventType.MEDIA_ITEM_UPDATED, db_item.uri, db_item)
+ )
+
+ async def get_provider_item(
+ self,
+ item_id: str,
+ provider_id: Union[str, ProviderType],
+ ) -> ItemCls:
+ """Return item details for the given provider item id."""
+ if provider_id in ("database", ProviderType.DATABASE):
+ item = await self.get_db_item(item_id)
+ else:
+ provider = self.mass.music.get_provider(provider_id)
+ item = await provider.get_item(self.media_type, item_id)
+ if not item:
+ raise MediaNotFoundError(
+ f"{self.media_type.value} {item_id} not found on provider {provider.name}"
+ )
+ return item
+
+ async def remove_prov_mapping(self, item_id: int, prov_id: str) -> None:
+ """Remove provider id(s) from item."""
+ try:
+ db_item = await self.get_db_item(item_id)
+ except MediaNotFoundError:
+ # edge case: already deleted / race condition
+ return
+
+ db_item.provider_ids = {x for x in db_item.provider_ids if x.prov_id != prov_id}
+ if not db_item.provider_ids:
+ # item has no more provider_ids left, it is completely deleted
+ try:
+ await self.delete_db_item(db_item.item_id)
+ except AssertionError:
+ self.logger.debug(
+ "Could not delete %s: it has items attached", db_item.item_id
+ )
+ return
+
+ # update the item in db (provider_ids column only)
+ match = {"item_id": item_id}
+ await self.mass.database.update(
+ self.db_table,
+ match,
+ {"provider_ids": json_serializer(db_item.provider_ids)},
+ )
+ self.mass.signal_event(
+ MassEvent(EventType.MEDIA_ITEM_UPDATED, db_item.uri, db_item)
+ )
+
+ self.logger.debug("removed provider %s from item id %s", prov_id, item_id)
+
+ async def delete_db_item(self, item_id: int, recursive: bool = False) -> None:
+ """Delete record from the database."""
+ db_item = await self.get_db_item(item_id)
+ assert db_item, f"Item does not exist: {item_id}"
+ # delete item
+ await self.mass.database.delete(
+ self.db_table,
+ {"item_id": int(item_id)},
+ )
+ # NOTE: this does not delete any references to this item in other records,
+ # this is handled/overridden in the mediatype specific controllers
+ self.mass.signal_event(
+ MassEvent(EventType.MEDIA_ITEM_DELETED, db_item.uri, db_item)
+ )
+ self.logger.debug("deleted item with id %s from database", item_id)
+
+ async def dynamic_tracks(
+ self,
+ item_id: str,
+ provider: Optional[ProviderType] = None,
+ provider_id: Optional[str] = None,
+ limit: int = 25,
+ ) -> List[Track]:
+ """Return a dynamic list of tracks based on the given item."""
+ ref_item = await self.get(item_id, provider, provider_id)
+ for prov_id in ref_item.provider_ids:
+ prov = self.mass.music.get_provider(prov_id.prov_id)
+ if not prov.available:
+ continue
+ if MusicProviderFeature.SIMILAR_TRACKS not in prov.supported_features:
+ continue
+ return await self._get_provider_dynamic_tracks(
+ item_id=prov_id.item_id,
+ provider=prov_id.prov_type,
+ provider_id=prov_id.prov_id,
+ limit=limit,
+ )
+ # Fallback to the default implementation
+ return await self._get_dynamic_tracks(ref_item)
+
+ @abstractmethod
+ async def _get_provider_dynamic_tracks(
+ self,
+ item_id: str,
+ provider: Optional[ProviderType] = None,
+ provider_id: Optional[str] = None,
+ limit: int = 25,
+ ) -> List[Track]:
+ """Generate a dynamic list of tracks based on the item's content."""
+
+ @abstractmethod
+ async def _get_dynamic_tracks(
+ self, media_item: ItemCls, limit: int = 25
+ ) -> List[Track]:
+ """Get dynamic list of tracks for given item, fallback/default implementation."""
--- /dev/null
+"""Manage MediaItems of type Playlist."""
+from __future__ import annotations
+
+from ctypes import Union
+from random import choice, random
+from time import time
+from typing import Any, List, Optional, Tuple
+
+from music_assistant.controllers.database import TABLE_PLAYLISTS
+from music_assistant.helpers.json import json_serializer
+from music_assistant.helpers.uri import create_uri
+from music_assistant.models.enums import (
+ EventType,
+ MediaType,
+ MusicProviderFeature,
+ ProviderType,
+)
+from music_assistant.models.errors import (
+ InvalidDataError,
+ MediaNotFoundError,
+ ProviderUnavailableError,
+ UnsupportedFeaturedException,
+)
+from music_assistant.models.event import MassEvent
+from music_assistant.models.media_items import Playlist, Track
+
+from .base import MediaControllerBase
+
+
+class PlaylistController(MediaControllerBase[Playlist]):
+ """Controller managing MediaItems of type Playlist."""
+
+ db_table = TABLE_PLAYLISTS
+ media_type = MediaType.PLAYLIST
+ item_cls = Playlist
+
+ async def get_playlist_by_name(self, name: str) -> Playlist | None:
+ """Get in-library playlist by name."""
+ return await self.mass.database.get_row(self.db_table, {"name": name})
+
+ async def tracks(
+ self,
+ item_id: str,
+ provider: Optional[ProviderType] = None,
+ provider_id: Optional[str] = None,
+ ) -> List[Track]:
+ """Return playlist tracks for the given provider playlist id."""
+ playlist = await self.get(item_id, provider, provider_id)
+ prov = next(x for x in playlist.provider_ids)
+ return await self._get_provider_playlist_tracks(
+ prov.item_id,
+ provider=prov.prov_type,
+ provider_id=prov.prov_id,
+ cache_checksum=playlist.metadata.checksum,
+ )
+
+ async def add(self, item: Playlist) -> Playlist:
+ """Add playlist to local db and return the new database item."""
+ item.metadata.last_refresh = int(time())
+ await self.mass.metadata.get_playlist_metadata(item)
+ existing = await self.get_db_item_by_prov_id(item.item_id, item.provider)
+ if existing:
+ db_item = await self.update_db_item(existing.item_id, item)
+ else:
+ db_item = await self.add_db_item(item)
+ self.mass.signal_event(
+ MassEvent(
+ EventType.MEDIA_ITEM_UPDATED
+ if existing
+ else EventType.MEDIA_ITEM_ADDED,
+ db_item.uri,
+ db_item,
+ )
+ )
+ return db_item
+
+ async def create(
+ self, name: str, prov_id: Union[ProviderType, str, None] = None
+ ) -> Playlist:
+ """Create new playlist."""
+ # if prov_id is omitted, prefer file
+ if prov_id:
+ provider = self.mass.music.get_provider(prov_id)
+ else:
+ try:
+ provider = self.mass.music.get_provider(ProviderType.FILESYSTEM_LOCAL)
+ except ProviderUnavailableError:
+ provider = next(
+ (
+ x
+ for x in self.mass.music.providers
+ if MusicProviderFeature.PLAYLIST_CREATE in x.supported_features
+ ),
+ None,
+ )
+ if provider is None:
+ raise ProviderUnavailableError(
+ "No provider available which allows playlists creation."
+ )
+
+ return await provider.create_playlist(name)
+
+ async def add_playlist_tracks(self, db_playlist_id: str, uris: List[str]) -> None:
+ """Add multiple tracks to playlist. Creates background tasks to process the action."""
+ playlist = await self.get_db_item(db_playlist_id)
+ if not playlist:
+ raise MediaNotFoundError(f"Playlist with id {db_playlist_id} not found")
+ if not playlist.is_editable:
+ raise InvalidDataError(f"Playlist {playlist.name} is not editable")
+ for uri in uris:
+ job_desc = f"Add track {uri} to playlist {playlist.name}"
+ self.mass.add_job(self.add_playlist_track(db_playlist_id, uri), job_desc)
+
+ async def add_playlist_track(self, db_playlist_id: str, track_uri: str) -> None:
+ """Add track to playlist - make sure we dont add duplicates."""
+ # we can only edit playlists that are in the database (marked as editable)
+ playlist = await self.get_db_item(db_playlist_id)
+ if not playlist:
+ raise MediaNotFoundError(f"Playlist with id {db_playlist_id} not found")
+ if not playlist.is_editable:
+ raise InvalidDataError(f"Playlist {playlist.name} is not editable")
+ # make sure we have recent full track details
+ track = await self.mass.music.get_item_by_uri(track_uri, lazy=False)
+ assert track.media_type == MediaType.TRACK
+ # a playlist can only have one provider (for now)
+ playlist_prov = next(iter(playlist.provider_ids))
+ # grab all existing track ids in the playlist so we can check for duplicates
+ cur_playlist_track_ids = set()
+ count = 0
+ for item in await self.tracks(playlist_prov.item_id, playlist_prov.prov_type):
+ count += 1
+ cur_playlist_track_ids.update(
+ {
+ i.item_id
+ for i in item.provider_ids
+ if i.prov_id == playlist_prov.prov_id
+ }
+ )
+ # check for duplicates
+ for track_prov in track.provider_ids:
+ if (
+ track_prov.prov_type == playlist_prov.prov_type
+ and track_prov.item_id in cur_playlist_track_ids
+ ):
+ raise InvalidDataError(
+ "Track already exists in playlist {playlist.name}"
+ )
+ # add track to playlist
+ # we can only add a track to a provider playlist if track is available on that provider
+ # a track can contain multiple versions on the same provider
+ # simply sort by quality and just add the first one (assuming track is still available)
+ track_id_to_add = None
+ for track_version in sorted(
+ track.provider_ids, key=lambda x: x.quality, reverse=True
+ ):
+ if not track.available:
+ continue
+ if playlist_prov.prov_type.is_file():
+ # the file provider can handle uri's from all providers so simply add the uri
+ track_id_to_add = track_version.url or create_uri(
+ MediaType.TRACK,
+ track_version.prov_type,
+ track_version.item_id,
+ )
+ break
+ if track_version.prov_type == playlist_prov.prov_type:
+ track_id_to_add = track_version.item_id
+ break
+ if not track_id_to_add:
+ raise MediaNotFoundError(
+ f"Track is not available on provider {playlist_prov.prov_type}"
+ )
+ # actually add the tracks to the playlist on the provider
+ provider = self.mass.music.get_provider(playlist_prov.prov_id)
+ await provider.add_playlist_tracks(playlist_prov.item_id, [track_id_to_add])
+ # invalidate cache by updating the checksum
+ await self.get(
+ db_playlist_id, provider=ProviderType.DATABASE, force_refresh=True
+ )
+
+ async def remove_playlist_tracks(
+ self, db_playlist_id: str, positions_to_remove: Tuple[int]
+ ) -> None:
+ """Remove multiple tracks from playlist."""
+ playlist = await self.get_db_item(db_playlist_id)
+ if not playlist:
+ raise MediaNotFoundError(f"Playlist with id {db_playlist_id} not found")
+ if not playlist.is_editable:
+ raise InvalidDataError(f"Playlist {playlist.name} is not editable")
+ for prov in playlist.provider_ids:
+ provider = self.mass.music.get_provider(prov.prov_id)
+ if (
+ MusicProviderFeature.PLAYLIST_TRACKS_EDIT
+ not in provider.supported_features
+ ):
+ self.logger.warning(
+ "Provider %s does not support editing playlists",
+ prov.prov_type.value,
+ )
+ continue
+ await provider.remove_playlist_tracks(prov.item_id, positions_to_remove)
+ # invalidate cache by updating the checksum
+ await self.get(
+ db_playlist_id, provider=ProviderType.DATABASE, force_refresh=True
+ )
+
+ async def add_db_item(
+ self, item: Playlist, overwrite_existing: bool = False
+ ) -> Playlist:
+ """Add a new record to the database."""
+ async with self._db_add_lock:
+ match = {"name": item.name, "owner": item.owner}
+ if cur_item := await self.mass.database.get_row(self.db_table, match):
+ # update existing
+ return await self.update_db_item(
+ cur_item["item_id"], item, overwrite=overwrite_existing
+ )
+
+ # insert new item
+ new_item = await self.mass.database.insert(self.db_table, item.to_db_row())
+ item_id = new_item["item_id"]
+ self.logger.debug("added %s to database", item.name)
+ # return created object
+ return await self.get_db_item(item_id)
+
+ async def update_db_item(
+ self,
+ item_id: int,
+ item: Playlist,
+ overwrite: bool = False,
+ ) -> Playlist:
+ """Update Playlist record in the database."""
+ cur_item = await self.get_db_item(item_id)
+ if overwrite:
+ metadata = item.metadata
+ provider_ids = item.provider_ids
+ else:
+ metadata = cur_item.metadata.update(item.metadata)
+ provider_ids = {*cur_item.provider_ids, *item.provider_ids}
+
+ await self.mass.database.update(
+ self.db_table,
+ {"item_id": item_id},
+ {
+ # always prefer name/owner from updated item here
+ "name": item.name,
+ "sort_name": item.sort_name,
+ "owner": item.owner,
+ "is_editable": item.is_editable,
+ "metadata": json_serializer(metadata),
+ "provider_ids": json_serializer(provider_ids),
+ },
+ )
+ self.logger.debug("updated %s in database: %s", item.name, item_id)
+ return await self.get_db_item(item_id)
+
+ async def _get_provider_playlist_tracks(
+ self,
+ item_id: str,
+ provider: Optional[ProviderType] = None,
+ provider_id: Optional[str] = None,
+ cache_checksum: Any = None,
+ ) -> List[Track]:
+ """Return album tracks for the given provider album id."""
+ prov = self.mass.music.get_provider(provider_id or provider)
+ if not prov:
+ return []
+ # prefer cache items (if any)
+ cache_key = f"{prov.id}.playlist.{item_id}.tracks"
+ if cache := await self.mass.cache.get(cache_key, checksum=cache_checksum):
+ return [Track.from_dict(x) for x in cache]
+ # no items in cache - get listing from provider
+ items = await prov.get_playlist_tracks(item_id)
+ # double check if position set
+ if items:
+ assert (
+ items[0].position is not None
+ ), "Playlist items require position to be set"
+ # store (serializable items) in cache
+ self.mass.create_task(
+ self.mass.cache.set(
+ cache_key, [x.to_dict() for x in items], checksum=cache_checksum
+ )
+ )
+ return items
+
+ async def _get_provider_dynamic_tracks(
+ self,
+ item_id: str,
+ provider: Optional[ProviderType] = None,
+ provider_id: Optional[str] = None,
+ limit: int = 25,
+ ):
+ """Generate a dynamic list of tracks based on the playlist content."""
+ prov = self.mass.music.get_provider(provider_id or provider)
+ if (
+ not prov
+ or MusicProviderFeature.SIMILAR_TRACKS not in prov.supported_features
+ ):
+ return []
+ playlist_tracks = await self._get_provider_playlist_tracks(
+ item_id=item_id, provider=provider, provider_id=provider_id
+ )
+ # Grab a random track from the playlist that we use to obtain similar tracks for
+ track = choice(playlist_tracks)
+ # Calculate no of songs to grab from each list at a 50/50 ratio
+ total_no_of_tracks = limit + limit % 2
+ tracks_per_list = int(total_no_of_tracks / 2)
+ # Grab similar tracks from the music provider
+ similar_tracks = await prov.get_similar_tracks(
+ prov_track_id=track.item_id, limit=tracks_per_list
+ )
+ # Merge playlist content with similar tracks
+ dynamic_playlist = [
+ *sorted(playlist_tracks, key=lambda n: random())[:tracks_per_list],
+ *sorted(similar_tracks, key=lambda n: random())[:tracks_per_list],
+ ]
+ return sorted(dynamic_playlist, key=lambda n: random())
+
+ async def _get_dynamic_tracks(
+ self, media_item: Playlist, limit: int = 25
+ ) -> List[Track]:
+ """Get dynamic list of tracks for given item, fallback/default implementation."""
+ # TODO: query metadata provider(s) to get similar tracks (or tracks from similar artists)
+ raise UnsupportedFeaturedException(
+ "No Music Provider found that supports requesting similar tracks."
+ )
--- /dev/null
+"""Manage MediaItems of type Radio."""
+from __future__ import annotations
+
+import asyncio
+from time import time
+from typing import List, Optional
+
+from music_assistant.controllers.database import TABLE_RADIOS
+from music_assistant.helpers.compare import loose_compare_strings
+from music_assistant.helpers.json import json_serializer
+from music_assistant.models.enums import EventType, MediaType, ProviderType
+from music_assistant.models.event import MassEvent
+from music_assistant.models.media_items import Radio, Track
+
+from .base import MediaControllerBase
+
+
+class RadioController(MediaControllerBase[Radio]):
+ """Controller managing MediaItems of type Radio."""
+
+ db_table = TABLE_RADIOS
+ media_type = MediaType.RADIO
+ item_cls = Radio
+
+ async def get_radio_by_name(self, name: str) -> Radio | None:
+ """Get in-library radio by name."""
+ return await self.mass.database.get_row(self.db_table, {"name": name})
+
+ async def versions(
+ self,
+ item_id: str,
+ provider: Optional[ProviderType] = None,
+ provider_id: Optional[str] = None,
+ ) -> List[Radio]:
+ """Return all versions of a radio station we can find on all providers."""
+ assert provider or provider_id, "Provider type or ID must be specified"
+ radio = await self.get(item_id, provider, provider_id)
+ # perform a search on all provider(types) to collect all versions/variants
+ prov_types = {item.type for item in self.mass.music.providers}
+ all_versions = {
+ prov_item.item_id: prov_item
+ for prov_items in await asyncio.gather(
+ *[self.search(radio.name, prov_type) for prov_type in prov_types]
+ )
+ for prov_item in prov_items
+ if loose_compare_strings(radio.name, prov_item.name)
+ }
+ # make sure that the 'base' version is included
+ for prov_version in radio.provider_ids:
+ if prov_version.item_id in all_versions:
+ continue
+ radio_copy = Radio.from_dict(radio.to_dict())
+ radio_copy.item_id = prov_version.item_id
+ radio_copy.provider = prov_version.prov_type
+ radio_copy.provider_ids = {prov_version}
+ all_versions[prov_version.item_id] = radio_copy
+
+ # return the aggregated result
+ return all_versions.values()
+
+ async def add(self, item: Radio) -> Radio:
+ """Add radio to local db and return the new database item."""
+ item.metadata.last_refresh = int(time())
+ await self.mass.metadata.get_radio_metadata(item)
+ existing = await self.get_db_item_by_prov_id(item.item_id, item.provider)
+ if existing:
+ db_item = await self.update_db_item(existing.item_id, item)
+ else:
+ db_item = await self.add_db_item(item)
+ self.mass.signal_event(
+ MassEvent(
+ EventType.MEDIA_ITEM_UPDATED
+ if existing
+ else EventType.MEDIA_ITEM_ADDED,
+ db_item.uri,
+ db_item,
+ )
+ )
+ return db_item
+
+ async def add_db_item(self, item: Radio, overwrite_existing: bool = False) -> Radio:
+ """Add a new item record to the database."""
+ assert item.provider_ids
+ async with self._db_add_lock:
+ match = {"name": item.name}
+ if cur_item := await self.mass.database.get_row(self.db_table, match):
+ # update existing
+ return await self.update_db_item(
+ cur_item["item_id"], item, overwrite=overwrite_existing
+ )
+
+ # insert new item
+ new_item = await self.mass.database.insert(self.db_table, item.to_db_row())
+ item_id = new_item["item_id"]
+ self.logger.debug("added %s to database", item.name)
+ # return created object
+ return await self.get_db_item(item_id)
+
+ async def update_db_item(
+ self,
+ item_id: int,
+ item: Radio,
+ overwrite: bool = False,
+ ) -> Radio:
+ """Update Radio record in the database."""
+ cur_item = await self.get_db_item(item_id)
+ if overwrite:
+ metadata = item.metadata
+ provider_ids = item.provider_ids
+ else:
+ metadata = cur_item.metadata.update(item.metadata)
+ provider_ids = {*cur_item.provider_ids, *item.provider_ids}
+
+ match = {"item_id": item_id}
+ await self.mass.database.update(
+ self.db_table,
+ match,
+ {
+ # always prefer name from updated item here
+ "name": item.name,
+ "sort_name": item.sort_name,
+ "metadata": json_serializer(metadata),
+ "provider_ids": json_serializer(provider_ids),
+ },
+ )
+ self.logger.debug("updated %s in database: %s", item.name, item_id)
+ return await self.get_db_item(item_id)
+
+ async def _get_provider_dynamic_tracks(
+ self,
+ item_id: str,
+ provider: Optional[ProviderType] = None,
+ provider_id: Optional[str] = None,
+ limit: int = 25,
+ ) -> List[Track]:
+ """Generate a dynamic list of tracks based on the item's content."""
+ raise NotImplementedError("Dynamic tracks not supported for Radio MediaItem")
+
+ async def _get_dynamic_tracks(
+ self, media_item: Radio, limit: int = 25
+ ) -> List[Track]:
+ """Get dynamic list of tracks for given item, fallback/default implementation."""
+ raise NotImplementedError("Dynamic tracks not supported for Radio MediaItem")
--- /dev/null
+"""Manage MediaItems of type Track."""
+from __future__ import annotations
+
+import asyncio
+from typing import List, Optional, Union
+
+from music_assistant.controllers.database import TABLE_TRACKS
+from music_assistant.helpers.compare import (
+ compare_artists,
+ compare_track,
+ loose_compare_strings,
+)
+from music_assistant.helpers.json import json_serializer
+from music_assistant.models.enums import (
+ EventType,
+ MediaType,
+ MusicProviderFeature,
+ ProviderType,
+)
+from music_assistant.models.errors import (
+ MediaNotFoundError,
+ UnsupportedFeaturedException,
+)
+from music_assistant.models.event import MassEvent
+from music_assistant.models.media_items import (
+ Album,
+ Artist,
+ ItemMapping,
+ Track,
+ TrackAlbumMapping,
+)
+
+from .base import MediaControllerBase
+
+
+class TracksController(MediaControllerBase[Track]):
+ """Controller managing MediaItems of type Track."""
+
+ db_table = TABLE_TRACKS
+ media_type = MediaType.TRACK
+ item_cls = Track
+
+ async def get(self, *args, **kwargs) -> Track:
+ """Return (full) details for a single media item."""
+ track = await super().get(*args, **kwargs)
+ # append full album details to full track item
+ if track.album:
+ try:
+ track.album = await self.mass.music.albums.get(
+ track.album.item_id, track.album.provider
+ )
+ except MediaNotFoundError:
+ # edge case where playlist track has invalid albumdetails
+ self.logger.warning("Unable to fetch album details %s", track.album.uri)
+ # append full artist details to full track item
+ full_artists = []
+ for artist in track.artists:
+ full_artists.append(
+ await self.mass.music.artists.get(artist.item_id, artist.provider)
+ )
+ track.artists = full_artists
+ return track
+
+ async def add(self, item: Track) -> Track:
+ """Add track to local db and return the new database item."""
+ # make sure we have artists
+ assert item.artists
+ # grab additional metadata
+ await self.mass.metadata.get_track_metadata(item)
+ existing = await self.get_db_item_by_prov_id(item.item_id, item.provider)
+ if existing:
+ db_item = await self.update_db_item(existing.item_id, item)
+ else:
+ db_item = await self.add_db_item(item)
+ # also fetch same track on all providers (will also get other quality versions)
+ await self._match(db_item)
+ # return final db_item after all match/metadata actions
+ db_item = await self.get_db_item(db_item.item_id)
+ self.mass.signal_event(
+ MassEvent(
+ EventType.MEDIA_ITEM_UPDATED
+ if existing
+ else EventType.MEDIA_ITEM_ADDED,
+ db_item.uri,
+ db_item,
+ )
+ )
+ return db_item
+
+ async def versions(
+ self,
+ item_id: str,
+ provider: Optional[ProviderType] = None,
+ provider_id: Optional[str] = None,
+ ) -> List[Track]:
+ """Return all versions of a track we can find on all providers."""
+ assert provider or provider_id, "Provider type or ID must be specified"
+ track = await self.get(item_id, provider, provider_id)
+ # perform a search on all provider(types) to collect all versions/variants
+ prov_types = {item.type for item in self.mass.music.providers}
+ search_query = f"{track.artist.name} - {track.name}"
+ all_versions = {
+ prov_item.item_id: prov_item
+ for prov_items in await asyncio.gather(
+ *[self.search(search_query, prov_type) for prov_type in prov_types]
+ )
+ for prov_item in prov_items
+ if loose_compare_strings(track.name, prov_item.name)
+ and compare_artists(prov_item.artists, track.artists, any_match=True)
+ }
+ # make sure that the 'base' version is included
+ for prov_version in track.provider_ids:
+ if prov_version.item_id in all_versions:
+ continue
+ # grab full item here including album details etc
+ prov_track = await self.get_provider_item(
+ prov_version.item_id, prov_version.prov_id
+ )
+ all_versions[prov_version.item_id] = prov_track
+
+ # return the aggregated result
+ return all_versions.values()
+
+ async def _match(self, db_track: Track) -> None:
+ """
+ Try to find matching track on all providers for the provided (database) track_id.
+
+ This is used to link objects of different providers/qualities together.
+ """
+ if db_track.provider != ProviderType.DATABASE:
+ return # Matching only supported for database items
+ for provider in self.mass.music.providers:
+ if MusicProviderFeature.SEARCH not in provider.supported_features:
+ continue
+ self.logger.debug(
+ "Trying to match track %s on provider %s", db_track.name, provider.name
+ )
+ match_found = False
+ for search_str in (
+ db_track.name,
+ f"{db_track.artists[0].name} - {db_track.name}",
+ f"{db_track.artists[0].name} {db_track.name}",
+ ):
+ if match_found:
+ break
+ search_result = await self.search(search_str, provider.type)
+ for search_result_item in search_result:
+ if not search_result_item.available:
+ continue
+ if compare_track(search_result_item, db_track):
+ # 100% match, we can simply update the db with additional provider ids
+ match_found = True
+ await self.update_db_item(db_track.item_id, search_result_item)
+
+ if not match_found:
+ self.logger.debug(
+ "Could not find match for Track %s on provider %s",
+ db_track.name,
+ provider.name,
+ )
+
+ async def _get_provider_dynamic_tracks(
+ self,
+ item_id: str,
+ provider: Optional[ProviderType] = None,
+ provider_id: Optional[str] = None,
+ limit: int = 25,
+ ):
+ """Generate a dynamic list of tracks based on the track."""
+ prov = self.mass.music.get_provider(provider_id or provider)
+ if (
+ not prov
+ or MusicProviderFeature.SIMILAR_TRACKS not in prov.supported_features
+ ):
+ return []
+ # Grab similar tracks from the music provider
+ similar_tracks = await prov.get_similar_tracks(
+ prov_track_id=item_id, limit=limit
+ )
+ return similar_tracks
+
+ async def _get_dynamic_tracks(
+ self, media_item: Track, limit: int = 25
+ ) -> List[Track]:
+ """Get dynamic list of tracks for given item, fallback/default implementation."""
+ # TODO: query metadata provider(s) to get similar tracks (or tracks from similar artists)
+ raise UnsupportedFeaturedException(
+ "No Music Provider found that supports requesting similar tracks."
+ )
+
+ async def add_db_item(self, item: Track, overwrite_existing: bool = False) -> Track:
+ """Add a new item record to the database."""
+ assert isinstance(item, Track), "Not a full Track object"
+ assert item.artists, "Track is missing artist(s)"
+ assert item.provider_ids, "Track is missing provider id(s)"
+ async with self._db_add_lock:
+ cur_item = None
+
+ # always try to grab existing item by external_id
+ if item.musicbrainz_id:
+ match = {"musicbrainz_id": item.musicbrainz_id}
+ cur_item = await self.mass.database.get_row(self.db_table, match)
+ for isrc in item.isrcs:
+ match = {"isrc": isrc}
+ cur_item = await self.mass.database.get_row(self.db_table, match)
+ if not cur_item:
+ # fallback to matching
+ match = {"sort_name": item.sort_name}
+ for row in await self.mass.database.get_rows(self.db_table, match):
+ row_track = Track.from_db_row(row)
+ if compare_track(row_track, item):
+ cur_item = row_track
+ break
+ if cur_item:
+ # update existing
+ return await self.update_db_item(
+ cur_item.item_id, item, overwrite=overwrite_existing
+ )
+
+ # no existing match found: insert new item
+ track_artists = await self._get_track_artists(item)
+ track_albums = await self._get_track_albums(
+ item, overwrite=overwrite_existing
+ )
+ if track_artists:
+ sort_artist = track_artists[0].sort_name
+ else:
+ sort_artist = ""
+ if track_albums:
+ sort_album = track_albums[0].sort_name
+ else:
+ sort_album = ""
+ new_item = await self.mass.database.insert(
+ self.db_table,
+ {
+ **item.to_db_row(),
+ "artists": json_serializer(track_artists),
+ "albums": json_serializer(track_albums),
+ "sort_artist": sort_artist,
+ "sort_album": sort_album,
+ },
+ )
+ item_id = new_item["item_id"]
+ # return created object
+ self.logger.debug("added %s to database: %s", item.name, item_id)
+ return await self.get_db_item(item_id)
+
+ async def update_db_item(
+ self,
+ item_id: int,
+ item: Track,
+ overwrite: bool = False,
+ ) -> Track:
+ """Update Track record in the database, merging data."""
+ cur_item = await self.get_db_item(item_id)
+
+ if overwrite:
+ metadata = item.metadata
+ provider_ids = item.provider_ids
+ metadata.last_refresh = None
+ # we store a mapping to artists/albums on the item for easier access/listings
+ track_artists = await self._get_track_artists(item, overwrite=True)
+ track_albums = await self._get_track_albums(item, overwrite=True)
+ else:
+ metadata = cur_item.metadata.update(item.metadata, item.provider.is_file())
+ provider_ids = {*cur_item.provider_ids, *item.provider_ids}
+ track_artists = await self._get_track_artists(cur_item, item)
+ track_albums = await self._get_track_albums(cur_item, item)
+
+ await self.mass.database.update(
+ self.db_table,
+ {"item_id": item_id},
+ {
+ "name": item.name if overwrite else cur_item.name,
+ "sort_name": item.sort_name if overwrite else cur_item.sort_name,
+ "version": item.version if overwrite else cur_item.version,
+ "duration": item.duration if overwrite else cur_item.duration,
+ "artists": json_serializer(track_artists),
+ "albums": json_serializer(track_albums),
+ "metadata": json_serializer(metadata),
+ "provider_ids": json_serializer(provider_ids),
+ "isrc": item.isrc or cur_item.isrc,
+ },
+ )
+ self.logger.debug("updated %s in database: %s", item.name, item_id)
+ return await self.get_db_item(item_id)
+
+ async def _get_track_artists(
+ self,
+ base_track: Track,
+ upd_track: Optional[Track] = None,
+ overwrite: bool = False,
+ ) -> List[ItemMapping]:
+ """Extract all (unique) artists of track as ItemMapping."""
+ if upd_track and upd_track.artists:
+ track_artists = upd_track.artists
+ else:
+ track_artists = base_track.artists
+ # use intermediate set to clear out duplicates
+ return list(
+ {await self._get_artist_mapping(x, overwrite) for x in track_artists}
+ )
+
+ async def _get_track_albums(
+ self,
+ base_track: Track,
+ upd_track: Optional[Track] = None,
+ overwrite: bool = False,
+ ) -> List[TrackAlbumMapping]:
+ """Extract all (unique) albums of track as TrackAlbumMapping."""
+ track_albums: List[TrackAlbumMapping] = []
+ # existing TrackAlbumMappings are starting point
+ if base_track.albums:
+ track_albums = base_track.albums
+ elif upd_track and upd_track.albums:
+ track_albums = upd_track.albums
+ # append update item album if needed
+ if upd_track and upd_track.album:
+ mapping = await self._get_album_mapping(
+ upd_track.album, overwrite=overwrite
+ )
+ mapping = TrackAlbumMapping.from_dict(
+ {
+ **mapping.to_dict(),
+ "disc_number": upd_track.disc_number,
+ "track_number": upd_track.track_number,
+ }
+ )
+ if mapping not in track_albums:
+ track_albums.append(mapping)
+ # append base item album if needed
+ elif base_track and base_track.album:
+ mapping = await self._get_album_mapping(
+ base_track.album, overwrite=overwrite
+ )
+ mapping = TrackAlbumMapping.from_dict(
+ {
+ **mapping.to_dict(),
+ "disc_number": base_track.disc_number,
+ "track_number": base_track.track_number,
+ }
+ )
+ if mapping not in track_albums:
+ track_albums.append(mapping)
+
+ return track_albums
+
+ async def _get_album_mapping(
+ self,
+ album: Union[Album, ItemMapping],
+ overwrite: bool = False,
+ ) -> ItemMapping:
+ """Extract (database) album as ItemMapping."""
+
+ if album.provider == ProviderType.DATABASE:
+ if isinstance(album, ItemMapping):
+ return album
+ return ItemMapping.from_item(album)
+
+ if overwrite:
+ db_album = await self.mass.music.albums.add_db_item(
+ album, overwrite_existing=True
+ )
+
+ if db_album := await self.mass.music.albums.get_db_item_by_prov_id(
+ album.item_id, provider=album.provider
+ ):
+ return ItemMapping.from_item(db_album)
+
+ db_album = await self.mass.music.albums.add_db_item(
+ album, overwrite_existing=overwrite
+ )
+ return ItemMapping.from_item(db_album)
+
+ async def _get_artist_mapping(
+ self, artist: Union[Artist, ItemMapping], overwrite: bool = False
+ ) -> ItemMapping:
+ """Extract (database) track artist as ItemMapping."""
+
+ if artist.provider == ProviderType.DATABASE:
+ if isinstance(artist, ItemMapping):
+ return artist
+ return ItemMapping.from_item(artist)
+
+ if overwrite:
+ artist = await self.mass.music.artists.add_db_item(
+ artist, overwrite_existing=True
+ )
+
+ if db_artist := await self.mass.music.artists.get_db_item_by_prov_id(
+ artist.item_id, provider=artist.provider
+ ):
+ return ItemMapping.from_item(db_artist)
+
+ db_artist = await self.mass.music.artists.add_db_item(artist)
+ return ItemMapping.from_item(db_artist)
-"""All logic for metadata retrieval."""
-from __future__ import annotations
+"""Package with Metadata controller and providers."""
-from base64 import b64encode
-from time import time
-from typing import TYPE_CHECKING, Optional
-
-from music_assistant.helpers.database import TABLE_THUMBS
-from music_assistant.helpers.images import create_collage, create_thumbnail
-from music_assistant.models.enums import ImageType, MediaType
-from music_assistant.models.media_items import (
- Album,
- Artist,
- ItemMapping,
- MediaItemImage,
- MediaItemType,
- Playlist,
- Radio,
- Track,
-)
-
-from .audiodb import TheAudioDb
-from .fanarttv import FanartTv
-from .musicbrainz import MusicBrainz
-
-if TYPE_CHECKING:
- from music_assistant.mass import MusicAssistant
-
-
-class MetaDataController:
- """Several helpers to search and store metadata for mediaitems."""
-
- def __init__(self, mass: MusicAssistant) -> None:
- """Initialize class."""
- self.mass = mass
- self.cache = mass.cache
- self.logger = mass.logger.getChild("metadata")
- self.fanarttv = FanartTv(mass)
- self.musicbrainz = MusicBrainz(mass)
- self.audiodb = TheAudioDb(mass)
- self._pref_lang: Optional[str] = None
-
- @property
- def preferred_language(self) -> str:
- """
- Return preferred language for metadata as 2 letter country code (uppercase).
-
- Defaults to English (EN).
- """
- return self._pref_lang or "EN"
-
- @preferred_language.setter
- def preferred_language(self, lang: str) -> None:
- """
- Set preferred language to 2 letter country code.
-
- Can only be set once.
- """
- if self._pref_lang is None:
- self._pref_lang = lang.upper()
-
- async def setup(self):
- """Async initialize of module."""
-
- async def get_artist_metadata(self, artist: Artist) -> None:
- """Get/update rich metadata for an artist."""
- # set timestamp, used to determine when this function was last called
- artist.metadata.last_refresh = int(time())
-
- if not artist.musicbrainz_id:
- artist.musicbrainz_id = await self.get_artist_musicbrainz_id(artist)
-
- if artist.musicbrainz_id:
- if metadata := await self.fanarttv.get_artist_metadata(artist):
- artist.metadata.update(metadata)
- if metadata := await self.audiodb.get_artist_metadata(artist):
- artist.metadata.update(metadata)
-
- async def get_album_metadata(self, album: Album) -> None:
- """Get/update rich metadata for an album."""
- # set timestamp, used to determine when this function was last called
- album.metadata.last_refresh = int(time())
-
- if not (album.musicbrainz_id or album.artist):
- return
- if metadata := await self.audiodb.get_album_metadata(album):
- album.metadata.update(metadata)
- if metadata := await self.fanarttv.get_album_metadata(album):
- album.metadata.update(metadata)
-
- async def get_track_metadata(self, track: Track) -> None:
- """Get/update rich metadata for a track."""
- # set timestamp, used to determine when this function was last called
- track.metadata.last_refresh = int(time())
-
- if not (track.album and track.artists):
- return
- if metadata := await self.audiodb.get_track_metadata(track):
- track.metadata.update(metadata)
-
- async def get_playlist_metadata(self, playlist: Playlist) -> None:
- """Get/update rich metadata for a playlist."""
- # set timestamp, used to determine when this function was last called
- playlist.metadata.last_refresh = int(time())
- # retrieve genres from tracks
- # TODO: retrieve style/mood ?
- playlist.metadata.genres = set()
- image_urls = set()
- for track in await self.mass.music.playlists.tracks(
- playlist.item_id, playlist.provider
- ):
- if not playlist.image and track.image:
- image_urls.add(track.image.url)
- if track.media_type != MediaType.TRACK:
- # filter out radio items
- continue
- if track.metadata.genres:
- playlist.metadata.genres.update(track.metadata.genres)
- elif track.album and track.album.metadata.genres:
- playlist.metadata.genres.update(track.album.metadata.genres)
- # create collage thumb/fanart from playlist tracks
- if image_urls:
- fake_path = f"playlist_collage.{playlist.provider.value}.{playlist.item_id}"
- collage = await create_collage(self.mass, list(image_urls))
- match = {"path": fake_path, "size": 0}
- await self.mass.database.insert(
- TABLE_THUMBS, {**match, "data": collage}, allow_replace=True
- )
- playlist.metadata.images = [
- MediaItemImage(ImageType.THUMB, fake_path, True)
- ]
-
- async def get_radio_metadata(self, radio: Radio) -> None:
- """Get/update rich metadata for a radio station."""
- # NOTE: we do not have any metadata for radio so consider this future proofing ;-)
- radio.metadata.last_refresh = int(time())
-
- async def get_artist_musicbrainz_id(self, artist: Artist) -> str | None:
- """Fetch musicbrainz id by performing search using the artist name, albums and tracks."""
- ref_albums = await self.mass.music.artists.albums(artist=artist)
- # first try audiodb
- if musicbrainz_id := await self.audiodb.get_musicbrainz_id(artist, ref_albums):
- return musicbrainz_id
- # try again with musicbrainz with albums with upc
- for ref_album in ref_albums:
- if ref_album.upc:
- if musicbrainz_id := await self.musicbrainz.get_mb_artist_id(
- artist.name,
- album_upc=ref_album.upc,
- ):
- return musicbrainz_id
- if ref_album.musicbrainz_id:
- if musicbrainz_id := await self.musicbrainz.search_artist_by_album_mbid(
- artist.name, ref_album.musicbrainz_id
- ):
- return musicbrainz_id
-
- # try again with matching on track isrc
- ref_tracks = await self.mass.music.artists.toptracks(artist=artist)
- for ref_track in ref_tracks:
- for isrc in ref_track.isrcs:
- if musicbrainz_id := await self.musicbrainz.get_mb_artist_id(
- artist.name,
- track_isrc=isrc,
- ):
- return musicbrainz_id
-
- # last restort: track matching by name
- for ref_track in ref_tracks:
- if musicbrainz_id := await self.musicbrainz.get_mb_artist_id(
- artist.name,
- trackname=ref_track.name,
- ):
- return musicbrainz_id
- # lookup failed
- ref_albums_str = "/".join(x.name for x in ref_albums) or "none"
- ref_tracks_str = "/".join(x.name for x in ref_tracks) or "none"
- self.logger.info(
- "Unable to get musicbrainz ID for artist %s\n"
- " - using lookup-album(s): %s\n"
- " - using lookup-track(s): %s\n",
- artist.name,
- ref_albums_str,
- ref_tracks_str,
- )
- return None
-
- async def get_image_data_for_item(
- self,
- media_item: MediaItemType,
- img_type: ImageType = ImageType.THUMB,
- size: int = 0,
- ) -> bytes | None:
- """Get image data for given MedaItem."""
- img_path = await self.get_image_url_for_item(
- media_item=media_item,
- img_type=img_type,
- allow_local=True,
- local_as_base64=False,
- )
- if not img_path:
- return None
- return await self.get_thumbnail(img_path, size)
-
- async def get_image_url_for_item(
- self,
- media_item: MediaItemType,
- img_type: ImageType = ImageType.THUMB,
- allow_local: bool = True,
- local_as_base64: bool = False,
- ) -> str | None:
- """Get url to image for given media media_item."""
- if not media_item:
- return None
- if isinstance(media_item, ItemMapping):
- media_item = await self.mass.music.get_item_by_uri(media_item.uri)
- if media_item and media_item.metadata.images:
- for img in media_item.metadata.images:
- if img.type != img_type:
- continue
- if img.is_file and not allow_local:
- continue
- if img.is_file and local_as_base64:
- # return base64 string of the image (compatible with browsers)
- return await self.get_thumbnail(img.url, base64=True)
- return img.url
-
- # retry with track's album
- if media_item.media_type == MediaType.TRACK and media_item.album:
- return await self.get_image_url_for_item(
- media_item.album, img_type, allow_local, local_as_base64
- )
-
- # try artist instead for albums
- if media_item.media_type == MediaType.ALBUM and media_item.artist:
- return await self.get_image_url_for_item(
- media_item.artist, img_type, allow_local, local_as_base64
- )
-
- # last resort: track artist(s)
- if media_item.media_type == MediaType.TRACK and media_item.artists:
- for artist in media_item.artists:
- return await self.get_image_url_for_item(
- artist, img_type, allow_local, local_as_base64
- )
-
- return None
-
- async def get_thumbnail(
- self, path: str, size: int = 0, base64: bool = False
- ) -> bytes | str:
- """Get/create thumbnail image for path (image url or local path)."""
- # check if we already have this cached in the db
- match_path = path.split("?")[0].split("&")[0]
- match = {"path": match_path, "size": size}
- if result := await self.mass.database.get_row(TABLE_THUMBS, match):
- thumbnail = result["data"]
- else:
- # create thumbnail if it doesn't exist
- thumbnail = await create_thumbnail(self.mass, path, size)
- await self.mass.database.insert(
- TABLE_THUMBS, {**match, "data": thumbnail}, allow_replace=True
- )
- if base64:
- enc_image = b64encode(thumbnail).decode()
- thumbnail = f"data:image/png;base64,{enc_image}"
- return thumbnail
+from .metadata import MetaDataController # noqa
import aiohttp
from asyncio_throttle import Throttler
+from music_assistant.controllers.cache import use_cache
from music_assistant.helpers.app_vars import ( # pylint: disable=no-name-in-module
app_var,
)
-from music_assistant.helpers.cache import use_cache
from music_assistant.helpers.compare import compare_strings
from music_assistant.models.media_items import (
Album,
import aiohttp
from asyncio_throttle import Throttler
+from music_assistant.controllers.cache import use_cache
from music_assistant.helpers.app_vars import ( # pylint: disable=no-name-in-module
app_var,
)
-from music_assistant.helpers.cache import use_cache
from music_assistant.models.media_items import (
Album,
Artist,
--- /dev/null
+"""All logic for metadata retrieval."""
+from __future__ import annotations
+
+from base64 import b64encode
+from time import time
+from typing import TYPE_CHECKING, Optional
+
+from music_assistant.controllers.database import TABLE_THUMBS
+from music_assistant.helpers.images import create_collage, create_thumbnail
+from music_assistant.models.enums import ImageType, MediaType
+from music_assistant.models.media_items import (
+ Album,
+ Artist,
+ ItemMapping,
+ MediaItemImage,
+ MediaItemType,
+ Playlist,
+ Radio,
+ Track,
+)
+
+from .audiodb import TheAudioDb
+from .fanarttv import FanartTv
+from .musicbrainz import MusicBrainz
+
+if TYPE_CHECKING:
+ from music_assistant.mass import MusicAssistant
+
+
+class MetaDataController:
+ """Several helpers to search and store metadata for mediaitems."""
+
+ def __init__(self, mass: MusicAssistant) -> None:
+ """Initialize class."""
+ self.mass = mass
+ self.cache = mass.cache
+ self.logger = mass.logger.getChild("metadata")
+ self.fanarttv = FanartTv(mass)
+ self.musicbrainz = MusicBrainz(mass)
+ self.audiodb = TheAudioDb(mass)
+ self._pref_lang: Optional[str] = None
+
+ @property
+ def preferred_language(self) -> str:
+ """
+ Return preferred language for metadata as 2 letter country code (uppercase).
+
+ Defaults to English (EN).
+ """
+ return self._pref_lang or "EN"
+
+ @preferred_language.setter
+ def preferred_language(self, lang: str) -> None:
+ """
+ Set preferred language to 2 letter country code.
+
+ Can only be set once.
+ """
+ if self._pref_lang is None:
+ self._pref_lang = lang.upper()
+
+ async def setup(self):
+ """Async initialize of module."""
+
+ async def get_artist_metadata(self, artist: Artist) -> None:
+ """Get/update rich metadata for an artist."""
+ # set timestamp, used to determine when this function was last called
+ artist.metadata.last_refresh = int(time())
+
+ if not artist.musicbrainz_id:
+ artist.musicbrainz_id = await self.get_artist_musicbrainz_id(artist)
+
+ if artist.musicbrainz_id:
+ if metadata := await self.fanarttv.get_artist_metadata(artist):
+ artist.metadata.update(metadata)
+ if metadata := await self.audiodb.get_artist_metadata(artist):
+ artist.metadata.update(metadata)
+
+ async def get_album_metadata(self, album: Album) -> None:
+ """Get/update rich metadata for an album."""
+ # set timestamp, used to determine when this function was last called
+ album.metadata.last_refresh = int(time())
+
+ if not (album.musicbrainz_id or album.artist):
+ return
+ if metadata := await self.audiodb.get_album_metadata(album):
+ album.metadata.update(metadata)
+ if metadata := await self.fanarttv.get_album_metadata(album):
+ album.metadata.update(metadata)
+
+ async def get_track_metadata(self, track: Track) -> None:
+ """Get/update rich metadata for a track."""
+ # set timestamp, used to determine when this function was last called
+ track.metadata.last_refresh = int(time())
+
+ if not (track.album and track.artists):
+ return
+ if metadata := await self.audiodb.get_track_metadata(track):
+ track.metadata.update(metadata)
+
+ async def get_playlist_metadata(self, playlist: Playlist) -> None:
+ """Get/update rich metadata for a playlist."""
+ # set timestamp, used to determine when this function was last called
+ playlist.metadata.last_refresh = int(time())
+ # retrieve genres from tracks
+ # TODO: retrieve style/mood ?
+ playlist.metadata.genres = set()
+ image_urls = set()
+ for track in await self.mass.music.playlists.tracks(
+ playlist.item_id, playlist.provider
+ ):
+ if not playlist.image and track.image:
+ image_urls.add(track.image.url)
+ if track.media_type != MediaType.TRACK:
+ # filter out radio items
+ continue
+ if track.metadata.genres:
+ playlist.metadata.genres.update(track.metadata.genres)
+ elif track.album and track.album.metadata.genres:
+ playlist.metadata.genres.update(track.album.metadata.genres)
+ # create collage thumb/fanart from playlist tracks
+ if image_urls:
+ fake_path = f"playlist_collage.{playlist.provider.value}.{playlist.item_id}"
+ collage = await create_collage(self.mass, list(image_urls))
+ match = {"path": fake_path, "size": 0}
+ await self.mass.database.insert(
+ TABLE_THUMBS, {**match, "data": collage}, allow_replace=True
+ )
+ playlist.metadata.images = [
+ MediaItemImage(ImageType.THUMB, fake_path, True)
+ ]
+
+ async def get_radio_metadata(self, radio: Radio) -> None:
+ """Get/update rich metadata for a radio station."""
+ # NOTE: we do not have any metadata for radio so consider this future proofing ;-)
+ radio.metadata.last_refresh = int(time())
+
+ async def get_artist_musicbrainz_id(self, artist: Artist) -> str | None:
+ """Fetch musicbrainz id by performing search using the artist name, albums and tracks."""
+ ref_albums = await self.mass.music.artists.albums(artist=artist)
+ # first try audiodb
+ if musicbrainz_id := await self.audiodb.get_musicbrainz_id(artist, ref_albums):
+ return musicbrainz_id
+ # try again with musicbrainz with albums with upc
+ for ref_album in ref_albums:
+ if ref_album.upc:
+ if musicbrainz_id := await self.musicbrainz.get_mb_artist_id(
+ artist.name,
+ album_upc=ref_album.upc,
+ ):
+ return musicbrainz_id
+ if ref_album.musicbrainz_id:
+ if musicbrainz_id := await self.musicbrainz.search_artist_by_album_mbid(
+ artist.name, ref_album.musicbrainz_id
+ ):
+ return musicbrainz_id
+
+ # try again with matching on track isrc
+ ref_tracks = await self.mass.music.artists.toptracks(artist=artist)
+ for ref_track in ref_tracks:
+ for isrc in ref_track.isrcs:
+ if musicbrainz_id := await self.musicbrainz.get_mb_artist_id(
+ artist.name,
+ track_isrc=isrc,
+ ):
+ return musicbrainz_id
+
+ # last restort: track matching by name
+ for ref_track in ref_tracks:
+ if musicbrainz_id := await self.musicbrainz.get_mb_artist_id(
+ artist.name,
+ trackname=ref_track.name,
+ ):
+ return musicbrainz_id
+ # lookup failed
+ ref_albums_str = "/".join(x.name for x in ref_albums) or "none"
+ ref_tracks_str = "/".join(x.name for x in ref_tracks) or "none"
+ self.logger.info(
+ "Unable to get musicbrainz ID for artist %s\n"
+ " - using lookup-album(s): %s\n"
+ " - using lookup-track(s): %s\n",
+ artist.name,
+ ref_albums_str,
+ ref_tracks_str,
+ )
+ return None
+
+ async def get_image_data_for_item(
+ self,
+ media_item: MediaItemType,
+ img_type: ImageType = ImageType.THUMB,
+ size: int = 0,
+ ) -> bytes | None:
+ """Get image data for given MedaItem."""
+ img_path = await self.get_image_url_for_item(
+ media_item=media_item,
+ img_type=img_type,
+ allow_local=True,
+ local_as_base64=False,
+ )
+ if not img_path:
+ return None
+ return await self.get_thumbnail(img_path, size)
+
+ async def get_image_url_for_item(
+ self,
+ media_item: MediaItemType,
+ img_type: ImageType = ImageType.THUMB,
+ allow_local: bool = True,
+ local_as_base64: bool = False,
+ ) -> str | None:
+ """Get url to image for given media media_item."""
+ if not media_item:
+ return None
+ if isinstance(media_item, ItemMapping):
+ media_item = await self.mass.music.get_item_by_uri(media_item.uri)
+ if media_item and media_item.metadata.images:
+ for img in media_item.metadata.images:
+ if img.type != img_type:
+ continue
+ if img.is_file and not allow_local:
+ continue
+ if img.is_file and local_as_base64:
+ # return base64 string of the image (compatible with browsers)
+ return await self.get_thumbnail(img.url, base64=True)
+ return img.url
+
+ # retry with track's album
+ if media_item.media_type == MediaType.TRACK and media_item.album:
+ return await self.get_image_url_for_item(
+ media_item.album, img_type, allow_local, local_as_base64
+ )
+
+ # try artist instead for albums
+ if media_item.media_type == MediaType.ALBUM and media_item.artist:
+ return await self.get_image_url_for_item(
+ media_item.artist, img_type, allow_local, local_as_base64
+ )
+
+ # last resort: track artist(s)
+ if media_item.media_type == MediaType.TRACK and media_item.artists:
+ for artist in media_item.artists:
+ return await self.get_image_url_for_item(
+ artist, img_type, allow_local, local_as_base64
+ )
+
+ return None
+
+ async def get_thumbnail(
+ self, path: str, size: int = 0, base64: bool = False
+ ) -> bytes | str:
+ """Get/create thumbnail image for path (image url or local path)."""
+ # check if we already have this cached in the db
+ match_path = path.split("?")[0].split("&")[0]
+ match = {"path": match_path, "size": size}
+ if result := await self.mass.database.get_row(TABLE_THUMBS, match):
+ thumbnail = result["data"]
+ else:
+ # create thumbnail if it doesn't exist
+ thumbnail = await create_thumbnail(self.mass, path, size)
+ await self.mass.database.insert(
+ TABLE_THUMBS, {**match, "data": thumbnail}, allow_replace=True
+ )
+ if base64:
+ enc_image = b64encode(thumbnail).decode()
+ thumbnail = f"data:image/png;base64,{enc_image}"
+ return thumbnail
import aiohttp
from asyncio_throttle import Throttler
-from music_assistant.helpers.cache import use_cache
+from music_assistant.controllers.cache import use_cache
from music_assistant.helpers.compare import compare_strings
from music_assistant.helpers.util import create_sort_name
--- /dev/null
+"""MusicController: Orchestrates all data from music providers and sync to internal database."""
+from __future__ import annotations
+
+import asyncio
+import itertools
+import statistics
+from typing import TYPE_CHECKING, Dict, List, Optional, Tuple, Union
+
+from music_assistant.controllers.database import TABLE_PLAYLOG, TABLE_TRACK_LOUDNESS
+from music_assistant.controllers.media.albums import AlbumsController
+from music_assistant.controllers.media.artists import ArtistsController
+from music_assistant.controllers.media.playlists import PlaylistController
+from music_assistant.controllers.media.radio import RadioController
+from music_assistant.controllers.media.tracks import TracksController
+from music_assistant.helpers.datetime import utc_timestamp
+from music_assistant.helpers.uri import parse_uri
+from music_assistant.models.config import MusicProviderConfig
+from music_assistant.models.enums import MediaType, MusicProviderFeature, ProviderType
+from music_assistant.models.errors import (
+ MusicAssistantError,
+ ProviderUnavailableError,
+ SetupFailedError,
+)
+from music_assistant.models.media_items import (
+ BrowseFolder,
+ MediaItem,
+ MediaItemType,
+ media_from_dict,
+)
+from music_assistant.models.music_provider import MusicProvider
+from music_assistant.music_providers.filesystem.filesystem import FileSystemProvider
+from music_assistant.music_providers.qobuz.qobuz import QobuzProvider
+from music_assistant.music_providers.spotify import SpotifyProvider
+from music_assistant.music_providers.tunein.tunein import TuneInProvider
+from music_assistant.music_providers.url.url import PROVIDER_CONFIG as URL_CONFIG
+from music_assistant.music_providers.url.url import URLProvider
+from music_assistant.music_providers.ytmusic import YoutubeMusicProvider
+
+if TYPE_CHECKING:
+ from music_assistant.mass import MusicAssistant
+
+PROV_MAP = {
+ ProviderType.FILESYSTEM_LOCAL: FileSystemProvider,
+ ProviderType.SPOTIFY: SpotifyProvider,
+ ProviderType.QOBUZ: QobuzProvider,
+ ProviderType.TUNEIN: TuneInProvider,
+ ProviderType.YTMUSIC: YoutubeMusicProvider,
+}
+
+
+class MusicController:
+ """Several helpers around the musicproviders."""
+
+ def __init__(self, mass: MusicAssistant):
+ """Initialize class."""
+ self.logger = mass.logger.getChild("music")
+ self.mass = mass
+ self.artists = ArtistsController(mass)
+ self.albums = AlbumsController(mass)
+ self.tracks = TracksController(mass)
+ self.radio = RadioController(mass)
+ self.playlists = PlaylistController(mass)
+ self._providers: Dict[str, MusicProvider] = {}
+
+ async def setup(self):
+ """Async initialize of module."""
+ # register providers
+ for prov_conf in self.mass.config.providers:
+ prov_cls = PROV_MAP[prov_conf.type]
+ await self._register_provider(prov_cls(self.mass, prov_conf), prov_conf)
+ # always register url provider
+ await self._register_provider(URLProvider(self.mass, URL_CONFIG), URL_CONFIG)
+ # add job to cleanup old records from db
+ self.mass.add_job(
+ self._cleanup_library(),
+ "Cleanup removed items from database",
+ allow_duplicate=False,
+ )
+
+ async def start_sync(
+ self,
+ media_types: Optional[Tuple[MediaType]] = None,
+ prov_types: Optional[Tuple[ProviderType]] = None,
+ schedule: Optional[float] = None,
+ ) -> None:
+ """
+ Start running the sync of all registred providers.
+
+ media_types: only sync these media types. None for all.
+ prov_types: only sync these provider types. None for all.
+ schedule: schedule syncjob every X hours, set to None for just a manual sync run.
+ """
+
+ async def do_sync():
+ while True:
+ for prov in self.providers:
+ if prov_types is not None and prov.type not in prov_types:
+ continue
+ self.mass.add_job(
+ prov.sync_library(media_types),
+ f"Library sync for provider {prov.name}",
+ allow_duplicate=False,
+ )
+ if schedule is None:
+ return
+ await asyncio.sleep(3600 * schedule)
+
+ self.mass.create_task(do_sync())
+
+ @property
+ def provider_count(self) -> int:
+ """Return count of all registered music providers."""
+ return len(self._providers)
+
+ @property
+ def providers(self) -> Tuple[MusicProvider]:
+ """Return all (available) music providers."""
+ return tuple(x for x in self._providers.values() if x.available)
+
+ def get_provider(self, provider_id: Union[str, ProviderType]) -> MusicProvider:
+ """Return Music provider by id (or type)."""
+ if prov := self._providers.get(provider_id):
+ return prov
+ for prov in self._providers.values():
+ if provider_id in (prov.type, prov.id, prov.type.value):
+ return prov
+ raise ProviderUnavailableError(f"Provider {provider_id} is not available")
+
+ async def search(
+ self,
+ search_query,
+ media_types: List[MediaType] = MediaType.ALL,
+ limit: int = 10,
+ ) -> List[MediaItemType]:
+ """
+ Perform global search for media items on all providers.
+
+ :param search_query: Search query.
+ :param media_types: A list of media_types to include.
+ :param limit: number of items to return in the search (per type).
+ """
+ # include results from all music providers
+ provider_ids = [item.id for item in self.providers]
+ # TODO: sort by name and filter out duplicates ?
+ return itertools.chain.from_iterable(
+ await asyncio.gather(
+ *[
+ self.search_provider(
+ search_query, media_types, provider_id=prov_id, limit=limit
+ )
+ for prov_id in provider_ids
+ ]
+ )
+ )
+
+ async def search_provider(
+ self,
+ search_query: str,
+ media_types: List[MediaType] = MediaType.ALL,
+ provider: Optional[ProviderType] = None,
+ provider_id: Optional[str] = None,
+ limit: int = 10,
+ ) -> List[MediaItemType]:
+ """
+ Perform search on given provider.
+
+ :param search_query: Search query
+ :param provider_id: provider_id of the provider to perform the search on.
+ :param media_types: A list of media_types to include. All types if None.
+ :param limit: number of items to return in the search (per type).
+ """
+ assert provider or provider_id, "Provider needs to be supplied"
+ prov = self.get_provider(provider_id or provider)
+ if MusicProviderFeature.SEARCH not in prov.supported_features:
+ return []
+
+ # create safe search string
+ search_query = search_query.replace("/", " ").replace("'", "")
+
+ # prefer cache items (if any)
+ cache_key = f"{prov.type.value}.search.{search_query}.{limit}"
+ cache_key += "".join((x.value for x in media_types))
+
+ if cache := await self.mass.cache.get(cache_key):
+ return [media_from_dict(x) for x in cache]
+ # no items in cache - get listing from provider
+ items = await prov.search(
+ search_query,
+ media_types,
+ limit,
+ )
+ # store (serializable items) in cache
+ self.mass.create_task(
+ self.mass.cache.set(
+ cache_key, [x.to_dict() for x in items], expiration=86400 * 7
+ )
+ )
+ return items
+
+ async def browse(self, path: Optional[str] = None) -> BrowseFolder:
+ """Browse Music providers."""
+ # root level; folder per provider
+ if not path or path == "root":
+ return BrowseFolder(
+ item_id="root",
+ provider=ProviderType.DATABASE,
+ path="root",
+ label="browse",
+ name="",
+ items=[
+ BrowseFolder(
+ item_id="root",
+ provider=prov.type,
+ path=f"{prov.id}://",
+ name=prov.name,
+ )
+ for prov in self.providers
+ if MusicProviderFeature.BROWSE in prov.supported_features
+ ],
+ )
+ # provider level
+ provider_id = path.split("://", 1)[0]
+ prov = self.get_provider(provider_id)
+ return await prov.browse(path)
+
+ async def get_item_by_uri(
+ self, uri: str, force_refresh: bool = False, lazy: bool = True
+ ) -> MediaItemType:
+ """Fetch MediaItem by uri."""
+ media_type, provider, item_id = parse_uri(uri)
+ return await self.get_item(
+ item_id=item_id,
+ media_type=media_type,
+ provider=provider,
+ force_refresh=force_refresh,
+ lazy=lazy,
+ )
+
+ async def get_item(
+ self,
+ item_id: str,
+ media_type: MediaType,
+ provider: Optional[ProviderType] = None,
+ provider_id: Optional[str] = None,
+ force_refresh: bool = False,
+ lazy: bool = True,
+ ) -> MediaItemType:
+ """Get single music item by id and media type."""
+ assert provider or provider_id, "provider or provider_id must be supplied"
+ if provider == ProviderType.URL or provider_id == "url":
+ # handle special case of 'URL' MusicProvider which allows us to play regular url's
+ return await self.get_provider(ProviderType.URL).parse_item(item_id)
+ ctrl = self.get_controller(media_type)
+ return await ctrl.get(
+ provider_item_id=item_id,
+ provider=provider,
+ provider_id=provider_id,
+ force_refresh=force_refresh,
+ lazy=lazy,
+ )
+
+ async def add_to_library(
+ self,
+ media_type: MediaType,
+ provider_item_id: str,
+ provider: Optional[ProviderType] = None,
+ provider_id: Optional[str] = None,
+ ) -> None:
+ """Add an item to the library."""
+ ctrl = self.get_controller(media_type)
+ await ctrl.add_to_library(
+ provider_item_id, provider=provider, provider_id=provider_id
+ )
+
+ async def remove_from_library(
+ self,
+ media_type: MediaType,
+ provider_item_id: str,
+ provider: Optional[ProviderType] = None,
+ provider_id: Optional[str] = None,
+ ) -> None:
+ """Remove item from the library."""
+ ctrl = self.get_controller(media_type)
+ await ctrl.remove_from_library(
+ provider_item_id, provider=provider, provider_id=provider_id
+ )
+
+ async def delete_db_item(
+ self, media_type: MediaType, db_item_id: str, recursive: bool = False
+ ) -> None:
+ """Remove item from the library."""
+ ctrl = self.get_controller(media_type)
+ await ctrl.delete_db_item(db_item_id, recursive)
+
+ async def refresh_items(self, items: List[MediaItem]) -> None:
+ """
+ Refresh MediaItems to force retrieval of full info and matches.
+
+ Creates background tasks to process the action.
+ """
+ for media_item in items:
+ job_desc = f"Refresh metadata of {media_item.uri}"
+ self.mass.add_job(self.refresh_item(media_item), job_desc)
+
+ async def refresh_item(
+ self,
+ media_item: MediaItem,
+ ):
+ """Try to refresh a mediaitem by requesting it's full object or search for substitutes."""
+ try:
+ return await self.get_item(
+ media_item.item_id,
+ media_item.media_type,
+ provider=media_item.provider,
+ force_refresh=True,
+ lazy=False,
+ )
+ except MusicAssistantError:
+ pass
+
+ for item in await self.search(media_item.name, [media_item.media_type], 20):
+ if item.available:
+ await self.get_item(
+ item.item_id, item.media_type, item.provider, lazy=False
+ )
+
+ async def set_track_loudness(
+ self, item_id: str, provider: ProviderType, loudness: int
+ ):
+ """List integrated loudness for a track in db."""
+ await self.mass.database.insert(
+ TABLE_TRACK_LOUDNESS,
+ {"item_id": item_id, "provider": provider.value, "loudness": loudness},
+ allow_replace=True,
+ )
+
+ async def get_track_loudness(
+ self, provider_item_id: str, provider: ProviderType
+ ) -> float | None:
+ """Get integrated loudness for a track in db."""
+ if result := await self.mass.database.get_row(
+ TABLE_TRACK_LOUDNESS,
+ {
+ "item_id": provider_item_id,
+ "provider": provider.value,
+ },
+ ):
+ return result["loudness"]
+ return None
+
+ async def get_provider_loudness(self, provider: ProviderType) -> float | None:
+ """Get average integrated loudness for tracks of given provider."""
+ all_items = []
+ if provider == ProviderType.URL:
+ # this is not a very good idea for random urls
+ return None
+ for db_row in await self.mass.database.get_rows(
+ TABLE_TRACK_LOUDNESS,
+ {
+ "provider": provider.value,
+ },
+ ):
+ all_items.append(db_row["loudness"])
+ if all_items:
+ return statistics.fmean(all_items)
+ return None
+
+ async def mark_item_played(self, item_id: str, provider: ProviderType):
+ """Mark item as played in playlog."""
+ timestamp = utc_timestamp()
+ await self.mass.database.insert(
+ TABLE_PLAYLOG,
+ {"item_id": item_id, "provider": provider.value, "timestamp": timestamp},
+ allow_replace=True,
+ )
+
+ async def library_add_items(self, items: List[MediaItem]) -> None:
+ """
+ Add media item(s) to the library.
+
+ Creates background tasks to process the action.
+ """
+ for media_item in items:
+ job_desc = f"Add {media_item.uri} to library"
+ self.mass.add_job(
+ self.add_to_library(
+ media_item.media_type, media_item.item_id, media_item.provider
+ ),
+ job_desc,
+ )
+
+ async def library_remove_items(self, items: List[MediaItem]) -> None:
+ """
+ Remove media item(s) from the library.
+
+ Creates background tasks to process the action.
+ """
+ for media_item in items:
+ job_desc = f"Remove {media_item.uri} from library"
+ self.mass.add_job(
+ self.remove_from_library(
+ media_item.media_type, media_item.item_id, media_item.provider
+ ),
+ job_desc,
+ )
+
+ def get_controller(
+ self, media_type: MediaType
+ ) -> ArtistsController | AlbumsController | TracksController | RadioController | PlaylistController:
+ """Return controller for MediaType."""
+ if media_type == MediaType.ARTIST:
+ return self.artists
+ if media_type == MediaType.ALBUM:
+ return self.albums
+ if media_type == MediaType.TRACK:
+ return self.tracks
+ if media_type == MediaType.RADIO:
+ return self.radio
+ if media_type == MediaType.PLAYLIST:
+ return self.playlists
+
+ async def _register_provider(
+ self, provider: MusicProvider, conf: MusicProviderConfig
+ ) -> None:
+ """Register a music provider."""
+ if provider.id in self._providers:
+ raise SetupFailedError(
+ f"Provider with id {provider.id} is already registered"
+ )
+ try:
+ provider.config = conf
+ provider.mass = self.mass
+ provider.cache = self.mass.cache
+ provider.logger = self.logger.getChild(provider.type.value)
+ if await provider.setup():
+ self._providers[provider.id] = provider
+ except Exception as err: # pylint: disable=broad-except
+ raise SetupFailedError(
+ f"Setup failed of provider {provider.type.value}: {str(err)}"
+ ) from err
+
+ async def _cleanup_library(self) -> None:
+ """Cleanup deleted items from library/database."""
+ prev_providers = await self.mass.cache.get("prov_ids", default=[])
+ cur_providers = list(self._providers.keys())
+ removed_providers = {x for x in prev_providers if x not in cur_providers}
+
+ for prov_id in removed_providers:
+
+ # clean cache items from deleted provider(s)
+ await self.mass.cache.clear(prov_id)
+
+ # cleanup media items from db matched to deleted provider
+ for ctrl in (
+ # order is important here to recursively cleanup bottom up
+ self.mass.music.radio,
+ self.mass.music.playlists,
+ self.mass.music.tracks,
+ self.mass.music.albums,
+ self.mass.music.artists,
+ ):
+ prov_items = await ctrl.get_db_items_by_prov_id(provider_id=prov_id)
+ for item in prov_items:
+ await ctrl.remove_prov_mapping(item.item_id, prov_id)
+ await self.mass.cache.set("prov_ids", cur_providers)
+++ /dev/null
-"""MusicController: Orchestrates all data from music providers and sync to internal database."""
-from __future__ import annotations
-
-import asyncio
-import itertools
-import statistics
-from typing import TYPE_CHECKING, Dict, List, Optional, Tuple, Union
-
-from music_assistant.controllers.music.albums import AlbumsController
-from music_assistant.controllers.music.artists import ArtistsController
-from music_assistant.controllers.music.playlists import PlaylistController
-from music_assistant.controllers.music.radio import RadioController
-from music_assistant.controllers.music.tracks import TracksController
-from music_assistant.helpers.database import TABLE_PLAYLOG, TABLE_TRACK_LOUDNESS
-from music_assistant.helpers.datetime import utc_timestamp
-from music_assistant.helpers.uri import parse_uri
-from music_assistant.models.config import MusicProviderConfig
-from music_assistant.models.enums import MediaType, MusicProviderFeature, ProviderType
-from music_assistant.models.errors import (
- MusicAssistantError,
- ProviderUnavailableError,
- SetupFailedError,
-)
-from music_assistant.models.media_items import (
- BrowseFolder,
- MediaItem,
- MediaItemType,
- media_from_dict,
-)
-from music_assistant.models.music_provider import MusicProvider
-from music_assistant.music_providers.filesystem import FileSystemProvider
-from music_assistant.music_providers.qobuz import QobuzProvider
-from music_assistant.music_providers.spotify import SpotifyProvider
-from music_assistant.music_providers.tunein import TuneInProvider
-from music_assistant.music_providers.url import PROVIDER_CONFIG as URL_CONFIG
-from music_assistant.music_providers.url import URLProvider
-from music_assistant.music_providers.ytmusic import YoutubeMusicProvider
-
-if TYPE_CHECKING:
- from music_assistant.mass import MusicAssistant
-
-PROV_MAP = {
- ProviderType.FILESYSTEM_LOCAL: FileSystemProvider,
- ProviderType.SPOTIFY: SpotifyProvider,
- ProviderType.QOBUZ: QobuzProvider,
- ProviderType.TUNEIN: TuneInProvider,
- ProviderType.YTMUSIC: YoutubeMusicProvider,
-}
-
-
-class MusicController:
- """Several helpers around the musicproviders."""
-
- def __init__(self, mass: MusicAssistant):
- """Initialize class."""
- self.logger = mass.logger.getChild("music")
- self.mass = mass
- self.artists = ArtistsController(mass)
- self.albums = AlbumsController(mass)
- self.tracks = TracksController(mass)
- self.radio = RadioController(mass)
- self.playlists = PlaylistController(mass)
- self._providers: Dict[str, MusicProvider] = {}
-
- async def setup(self):
- """Async initialize of module."""
- # register providers
- for prov_conf in self.mass.config.providers:
- prov_cls = PROV_MAP[prov_conf.type]
- await self._register_provider(prov_cls(self.mass, prov_conf), prov_conf)
- # always register url provider
- await self._register_provider(URLProvider(self.mass, URL_CONFIG), URL_CONFIG)
- # add job to cleanup old records from db
- self.mass.add_job(
- self._cleanup_library(),
- "Cleanup removed items from database",
- allow_duplicate=False,
- )
-
- async def start_sync(
- self,
- media_types: Optional[Tuple[MediaType]] = None,
- prov_types: Optional[Tuple[ProviderType]] = None,
- schedule: Optional[float] = None,
- ) -> None:
- """
- Start running the sync of all registred providers.
-
- media_types: only sync these media types. None for all.
- prov_types: only sync these provider types. None for all.
- schedule: schedule syncjob every X hours, set to None for just a manual sync run.
- """
-
- async def do_sync():
- while True:
- for prov in self.providers:
- if prov_types is not None and prov.type not in prov_types:
- continue
- self.mass.add_job(
- prov.sync_library(media_types),
- f"Library sync for provider {prov.name}",
- allow_duplicate=False,
- )
- if schedule is None:
- return
- await asyncio.sleep(3600 * schedule)
-
- self.mass.create_task(do_sync())
-
- @property
- def provider_count(self) -> int:
- """Return count of all registered music providers."""
- return len(self._providers)
-
- @property
- def providers(self) -> Tuple[MusicProvider]:
- """Return all (available) music providers."""
- return tuple(x for x in self._providers.values() if x.available)
-
- def get_provider(self, provider_id: Union[str, ProviderType]) -> MusicProvider:
- """Return Music provider by id (or type)."""
- if prov := self._providers.get(provider_id):
- return prov
- for prov in self._providers.values():
- if provider_id in (prov.type, prov.id, prov.type.value):
- return prov
- raise ProviderUnavailableError(f"Provider {provider_id} is not available")
-
- async def search(
- self,
- search_query,
- media_types: List[MediaType] = MediaType.ALL,
- limit: int = 10,
- ) -> List[MediaItemType]:
- """
- Perform global search for media items on all providers.
-
- :param search_query: Search query.
- :param media_types: A list of media_types to include.
- :param limit: number of items to return in the search (per type).
- """
- # include results from all music providers
- provider_ids = [item.id for item in self.providers]
- # TODO: sort by name and filter out duplicates ?
- return itertools.chain.from_iterable(
- await asyncio.gather(
- *[
- self.search_provider(
- search_query, media_types, provider_id=prov_id, limit=limit
- )
- for prov_id in provider_ids
- ]
- )
- )
-
- async def search_provider(
- self,
- search_query: str,
- media_types: List[MediaType] = MediaType.ALL,
- provider: Optional[ProviderType] = None,
- provider_id: Optional[str] = None,
- limit: int = 10,
- ) -> List[MediaItemType]:
- """
- Perform search on given provider.
-
- :param search_query: Search query
- :param provider_id: provider_id of the provider to perform the search on.
- :param media_types: A list of media_types to include. All types if None.
- :param limit: number of items to return in the search (per type).
- """
- assert provider or provider_id, "Provider needs to be supplied"
- prov = self.get_provider(provider_id or provider)
- if MusicProviderFeature.SEARCH not in prov.supported_features:
- return []
-
- # create safe search string
- search_query = search_query.replace("/", " ").replace("'", "")
-
- # prefer cache items (if any)
- cache_key = f"{prov.type.value}.search.{search_query}.{limit}"
- cache_key += "".join((x.value for x in media_types))
-
- if cache := await self.mass.cache.get(cache_key):
- return [media_from_dict(x) for x in cache]
- # no items in cache - get listing from provider
- items = await prov.search(
- search_query,
- media_types,
- limit,
- )
- # store (serializable items) in cache
- self.mass.create_task(
- self.mass.cache.set(
- cache_key, [x.to_dict() for x in items], expiration=86400 * 7
- )
- )
- return items
-
- async def browse(self, path: Optional[str] = None) -> BrowseFolder:
- """Browse Music providers."""
- # root level; folder per provider
- if not path or path == "root":
- return BrowseFolder(
- item_id="root",
- provider=ProviderType.DATABASE,
- path="root",
- label="browse",
- name="",
- items=[
- BrowseFolder(
- item_id="root",
- provider=prov.type,
- path=f"{prov.id}://",
- name=prov.name,
- )
- for prov in self.providers
- if MusicProviderFeature.BROWSE in prov.supported_features
- ],
- )
- # provider level
- provider_id = path.split("://", 1)[0]
- prov = self.get_provider(provider_id)
- return await prov.browse(path)
-
- async def get_item_by_uri(
- self, uri: str, force_refresh: bool = False, lazy: bool = True
- ) -> MediaItemType:
- """Fetch MediaItem by uri."""
- media_type, provider, item_id = parse_uri(uri)
- return await self.get_item(
- item_id=item_id,
- media_type=media_type,
- provider=provider,
- force_refresh=force_refresh,
- lazy=lazy,
- )
-
- async def get_item(
- self,
- item_id: str,
- media_type: MediaType,
- provider: Optional[ProviderType] = None,
- provider_id: Optional[str] = None,
- force_refresh: bool = False,
- lazy: bool = True,
- ) -> MediaItemType:
- """Get single music item by id and media type."""
- assert provider or provider_id, "provider or provider_id must be supplied"
- if provider == ProviderType.URL or provider_id == "url":
- # handle special case of 'URL' MusicProvider which allows us to play regular url's
- return await self.get_provider(ProviderType.URL).parse_item(item_id)
- ctrl = self.get_controller(media_type)
- return await ctrl.get(
- provider_item_id=item_id,
- provider=provider,
- provider_id=provider_id,
- force_refresh=force_refresh,
- lazy=lazy,
- )
-
- async def add_to_library(
- self,
- media_type: MediaType,
- provider_item_id: str,
- provider: Optional[ProviderType] = None,
- provider_id: Optional[str] = None,
- ) -> None:
- """Add an item to the library."""
- ctrl = self.get_controller(media_type)
- await ctrl.add_to_library(
- provider_item_id, provider=provider, provider_id=provider_id
- )
-
- async def remove_from_library(
- self,
- media_type: MediaType,
- provider_item_id: str,
- provider: Optional[ProviderType] = None,
- provider_id: Optional[str] = None,
- ) -> None:
- """Remove item from the library."""
- ctrl = self.get_controller(media_type)
- await ctrl.remove_from_library(
- provider_item_id, provider=provider, provider_id=provider_id
- )
-
- async def delete_db_item(
- self, media_type: MediaType, db_item_id: str, recursive: bool = False
- ) -> None:
- """Remove item from the library."""
- ctrl = self.get_controller(media_type)
- await ctrl.delete_db_item(db_item_id, recursive)
-
- async def refresh_items(self, items: List[MediaItem]) -> None:
- """
- Refresh MediaItems to force retrieval of full info and matches.
-
- Creates background tasks to process the action.
- """
- for media_item in items:
- job_desc = f"Refresh metadata of {media_item.uri}"
- self.mass.add_job(self.refresh_item(media_item), job_desc)
-
- async def refresh_item(
- self,
- media_item: MediaItem,
- ):
- """Try to refresh a mediaitem by requesting it's full object or search for substitutes."""
- try:
- return await self.get_item(
- media_item.item_id,
- media_item.media_type,
- provider=media_item.provider,
- force_refresh=True,
- lazy=False,
- )
- except MusicAssistantError:
- pass
-
- for item in await self.search(media_item.name, [media_item.media_type], 20):
- if item.available:
- await self.get_item(
- item.item_id, item.media_type, item.provider, lazy=False
- )
-
- async def set_track_loudness(
- self, item_id: str, provider: ProviderType, loudness: int
- ):
- """List integrated loudness for a track in db."""
- await self.mass.database.insert(
- TABLE_TRACK_LOUDNESS,
- {"item_id": item_id, "provider": provider.value, "loudness": loudness},
- allow_replace=True,
- )
-
- async def get_track_loudness(
- self, provider_item_id: str, provider: ProviderType
- ) -> float | None:
- """Get integrated loudness for a track in db."""
- if result := await self.mass.database.get_row(
- TABLE_TRACK_LOUDNESS,
- {
- "item_id": provider_item_id,
- "provider": provider.value,
- },
- ):
- return result["loudness"]
- return None
-
- async def get_provider_loudness(self, provider: ProviderType) -> float | None:
- """Get average integrated loudness for tracks of given provider."""
- all_items = []
- if provider == ProviderType.URL:
- # this is not a very good idea for random urls
- return None
- for db_row in await self.mass.database.get_rows(
- TABLE_TRACK_LOUDNESS,
- {
- "provider": provider.value,
- },
- ):
- all_items.append(db_row["loudness"])
- if all_items:
- return statistics.fmean(all_items)
- return None
-
- async def mark_item_played(self, item_id: str, provider: ProviderType):
- """Mark item as played in playlog."""
- timestamp = utc_timestamp()
- await self.mass.database.insert(
- TABLE_PLAYLOG,
- {"item_id": item_id, "provider": provider.value, "timestamp": timestamp},
- allow_replace=True,
- )
-
- async def library_add_items(self, items: List[MediaItem]) -> None:
- """
- Add media item(s) to the library.
-
- Creates background tasks to process the action.
- """
- for media_item in items:
- job_desc = f"Add {media_item.uri} to library"
- self.mass.add_job(
- self.add_to_library(
- media_item.media_type, media_item.item_id, media_item.provider
- ),
- job_desc,
- )
-
- async def library_remove_items(self, items: List[MediaItem]) -> None:
- """
- Remove media item(s) from the library.
-
- Creates background tasks to process the action.
- """
- for media_item in items:
- job_desc = f"Remove {media_item.uri} from library"
- self.mass.add_job(
- self.remove_from_library(
- media_item.media_type, media_item.item_id, media_item.provider
- ),
- job_desc,
- )
-
- def get_controller(
- self, media_type: MediaType
- ) -> ArtistsController | AlbumsController | TracksController | RadioController | PlaylistController:
- """Return controller for MediaType."""
- if media_type == MediaType.ARTIST:
- return self.artists
- if media_type == MediaType.ALBUM:
- return self.albums
- if media_type == MediaType.TRACK:
- return self.tracks
- if media_type == MediaType.RADIO:
- return self.radio
- if media_type == MediaType.PLAYLIST:
- return self.playlists
-
- async def _register_provider(
- self, provider: MusicProvider, conf: MusicProviderConfig
- ) -> None:
- """Register a music provider."""
- if provider.id in self._providers:
- raise SetupFailedError(
- f"Provider with id {provider.id} is already registered"
- )
- try:
- provider.config = conf
- provider.mass = self.mass
- provider.cache = self.mass.cache
- provider.logger = self.logger.getChild(provider.type.value)
- if await provider.setup():
- self._providers[provider.id] = provider
- except Exception as err: # pylint: disable=broad-except
- raise SetupFailedError(
- f"Setup failed of provider {provider.type.value}: {str(err)}"
- ) from err
-
- async def _cleanup_library(self) -> None:
- """Cleanup deleted items from library/database."""
- prev_providers = await self.mass.cache.get("prov_ids", default=[])
- cur_providers = list(self._providers.keys())
- removed_providers = {x for x in prev_providers if x not in cur_providers}
-
- for prov_id in removed_providers:
-
- # clean cache items from deleted provider(s)
- await self.mass.cache.clear(prov_id)
-
- # cleanup media items from db matched to deleted provider
- for ctrl in (
- # order is important here to recursively cleanup bottom up
- self.mass.music.radio,
- self.mass.music.playlists,
- self.mass.music.tracks,
- self.mass.music.albums,
- self.mass.music.artists,
- ):
- prov_items = await ctrl.get_db_items_by_prov_id(provider_id=prov_id)
- for item in prov_items:
- await ctrl.remove_prov_mapping(item.item_id, prov_id)
- await self.mass.cache.set("prov_ids", cur_providers)
+++ /dev/null
-"""Manage MediaItems of type Album."""
-from __future__ import annotations
-
-import asyncio
-from random import choice, random
-from typing import List, Optional, Union
-
-from music_assistant.constants import VARIOUS_ARTISTS
-from music_assistant.helpers.compare import compare_album, loose_compare_strings
-from music_assistant.helpers.database import TABLE_ALBUMS, TABLE_TRACKS
-from music_assistant.helpers.json import json_serializer
-from music_assistant.models.enums import EventType, MusicProviderFeature, ProviderType
-from music_assistant.models.errors import (
- MediaNotFoundError,
- UnsupportedFeaturedException,
-)
-from music_assistant.models.event import MassEvent
-from music_assistant.models.media_controller import MediaControllerBase
-from music_assistant.models.media_items import (
- Album,
- AlbumType,
- Artist,
- ItemMapping,
- MediaType,
- Track,
-)
-from music_assistant.models.music_provider import MusicProvider
-
-
-class AlbumsController(MediaControllerBase[Album]):
- """Controller managing MediaItems of type Album."""
-
- db_table = TABLE_ALBUMS
- media_type = MediaType.ALBUM
- item_cls = Album
-
- async def get(self, *args, **kwargs) -> Album:
- """Return (full) details for a single media item."""
- album = await super().get(*args, **kwargs)
- # append full artist details to full album item
- if album.artist:
- album.artist = await self.mass.music.artists.get(
- album.artist.item_id, album.artist.provider
- )
- return album
-
- async def tracks(
- self,
- item_id: str,
- provider: Optional[ProviderType] = None,
- provider_id: Optional[str] = None,
- ) -> List[Track]:
- """Return album tracks for the given provider album id."""
-
- if not (provider == ProviderType.DATABASE or provider_id == "database"):
- # return provider album tracks
- return await self._get_provider_album_tracks(item_id, provider, provider_id)
-
- # db_album requested: get results from first (non-file) provider
- return await self._get_db_album_tracks(item_id)
-
- async def versions(
- self,
- item_id: str,
- provider: Optional[ProviderType] = None,
- provider_id: Optional[str] = None,
- ) -> List[Album]:
- """Return all versions of an album we can find on all providers."""
- assert provider or provider_id, "Provider type or ID must be specified"
- album = await self.get(item_id, provider, provider_id)
- # perform a search on all provider(types) to collect all versions/variants
- prov_types = {item.type for item in self.mass.music.providers}
- search_query = f"{album.artist.name} - {album.name}"
- all_versions = {
- prov_item.item_id: prov_item
- for prov_items in await asyncio.gather(
- *[self.search(search_query, prov_type) for prov_type in prov_types]
- )
- for prov_item in prov_items
- if loose_compare_strings(album.name, prov_item.name)
- }
- # make sure that the 'base' version is included
- for prov_version in album.provider_ids:
- if prov_version.item_id in all_versions:
- continue
- album_copy = Album.from_dict(album.to_dict())
- album_copy.item_id = prov_version.item_id
- album_copy.provider = prov_version.prov_type
- album_copy.provider_ids = {prov_version}
- all_versions[prov_version.item_id] = album_copy
-
- # return the aggregated result
- return all_versions.values()
-
- async def add(self, item: Album) -> Album:
- """Add album to local db and return the database item."""
- # grab additional metadata
- await self.mass.metadata.get_album_metadata(item)
- existing = await self.get_db_item_by_prov_id(item.item_id, item.provider)
- if existing:
- db_item = await self.update_db_item(existing.item_id, item)
- else:
- db_item = await self.add_db_item(item)
- # also fetch same album on all providers
- await self._match(db_item)
- # return final db_item after all match/metadata actions
- db_item = await self.get_db_item(db_item.item_id)
- self.mass.signal_event(
- MassEvent(
- EventType.MEDIA_ITEM_UPDATED
- if existing
- else EventType.MEDIA_ITEM_ADDED,
- db_item.uri,
- db_item,
- )
- )
- return db_item
-
- async def add_db_item(self, item: Album, overwrite_existing: bool = False) -> Album:
- """Add a new record to the database."""
- assert item.provider_ids, f"Album {item.name} is missing provider id(s)"
- assert item.artist, f"Album {item.name} is missing artist"
- async with self._db_add_lock:
- cur_item = None
- # always try to grab existing item by musicbrainz_id/upc
- if item.musicbrainz_id:
- match = {"musicbrainz_id": item.musicbrainz_id}
- cur_item = await self.mass.database.get_row(self.db_table, match)
- if not cur_item and item.upc:
- match = {"upc": item.upc}
- cur_item = await self.mass.database.get_row(self.db_table, match)
- if not cur_item:
- # fallback to search and match
- for row in await self.mass.database.search(self.db_table, item.name):
- row_album = Album.from_db_row(row)
- if compare_album(row_album, item):
- cur_item = row_album
- break
- if cur_item:
- # update existing
- return await self.update_db_item(
- cur_item.item_id, item, overwrite=overwrite_existing
- )
-
- # insert new item
- album_artists = await self._get_album_artists(item, cur_item)
- if album_artists:
- sort_artist = album_artists[0].sort_name
- else:
- sort_artist = ""
- new_item = await self.mass.database.insert(
- self.db_table,
- {
- **item.to_db_row(),
- "artists": json_serializer(album_artists) or None,
- "sort_artist": sort_artist,
- },
- )
- item_id = new_item["item_id"]
- self.logger.debug("added %s to database", item.name)
- # return created object
- return await self.get_db_item(item_id)
-
- async def update_db_item(
- self,
- item_id: int,
- item: Album,
- overwrite: bool = False,
- ) -> Album:
- """Update Album record in the database."""
- assert item.provider_ids, f"Album {item.name} is missing provider id(s)"
- assert item.artist, f"Album {item.name} is missing artist"
- cur_item = await self.get_db_item(item_id)
-
- if overwrite:
- metadata = item.metadata
- metadata.last_refresh = None
- provider_ids = item.provider_ids
- album_artists = await self._get_album_artists(item, overwrite=True)
- else:
- metadata = cur_item.metadata.update(item.metadata, item.provider.is_file())
- provider_ids = {*cur_item.provider_ids, *item.provider_ids}
- album_artists = await self._get_album_artists(item, cur_item)
-
- if item.album_type != AlbumType.UNKNOWN:
- album_type = item.album_type
- else:
- album_type = cur_item.album_type
-
- if album_artists:
- sort_artist = album_artists[0].sort_name
- else:
- sort_artist = ""
-
- await self.mass.database.update(
- self.db_table,
- {"item_id": item_id},
- {
- "name": item.name if overwrite else cur_item.name,
- "sort_name": item.sort_name if overwrite else cur_item.sort_name,
- "sort_artist": sort_artist,
- "version": item.version if overwrite else cur_item.version,
- "year": item.year or cur_item.year,
- "upc": item.upc or cur_item.upc,
- "album_type": album_type.value,
- "artists": json_serializer(album_artists) or None,
- "metadata": json_serializer(metadata),
- "provider_ids": json_serializer(provider_ids),
- "musicbrainz_id": item.musicbrainz_id or cur_item.musicbrainz_id,
- },
- )
- self.logger.debug("updated %s in database: %s", item.name, item_id)
- return await self.get_db_item(item_id)
-
- async def delete_db_item(self, item_id: int, recursive: bool = False) -> None:
- """Delete record from the database."""
- # check album tracks
- db_rows = await self.mass.database.get_rows_from_query(
- f"SELECT item_id FROM {TABLE_TRACKS} WHERE albums LIKE '%\"{item_id}\"%'",
- limit=5000,
- )
- assert not (db_rows and not recursive), "Tracks attached to album"
- for db_row in db_rows:
- try:
- await self.mass.music.albums.delete_db_item(
- db_row["item_id"], recursive
- )
- except MediaNotFoundError:
- pass
-
- # delete the album itself from db
- await super().delete_db_item(item_id)
-
- async def _get_provider_album_tracks(
- self,
- item_id: str,
- provider: Optional[ProviderType] = None,
- provider_id: Optional[str] = None,
- ) -> List[Track]:
- """Return album tracks for the given provider album id."""
- prov = self.mass.music.get_provider(provider_id or provider)
- if not prov:
- return []
- full_album = await self.get_provider_item(item_id, provider_id or provider)
- # prefer cache items (if any)
- cache_key = f"{prov.type.value}.albumtracks.{item_id}"
- cache_checksum = full_album.metadata.checksum
- if cache := await self.mass.cache.get(cache_key, checksum=cache_checksum):
- return [Track.from_dict(x) for x in cache]
- # no items in cache - get listing from provider
- items = []
- for track in await prov.get_album_tracks(item_id):
- # make sure that the (full) album is stored on the tracks
- track.album = full_album
- if full_album.metadata.images:
- track.metadata.images = full_album.metadata.images
- items.append(track)
- # store (serializable items) in cache
- self.mass.create_task(
- self.mass.cache.set(
- cache_key, [x.to_dict() for x in items], checksum=cache_checksum
- )
- )
- return items
-
- async def _get_provider_dynamic_tracks(
- self,
- item_id: str,
- provider: Optional[ProviderType] = None,
- provider_id: Optional[str] = None,
- limit: int = 25,
- ):
- """Generate a dynamic list of tracks based on the album content."""
- prov = self.mass.music.get_provider(provider_id or provider)
- if (
- not prov
- or MusicProviderFeature.SIMILAR_TRACKS not in prov.supported_features
- ):
- return []
- album_tracks = await self._get_provider_album_tracks(
- item_id=item_id, provider=provider, provider_id=provider_id
- )
- # Grab a random track from the album that we use to obtain similar tracks for
- track = choice(album_tracks)
- # Calculate no of songs to grab from each list at a 10/90 ratio
- total_no_of_tracks = limit + limit % 2
- no_of_album_tracks = int(total_no_of_tracks * 10 / 100)
- no_of_similar_tracks = int(total_no_of_tracks * 90 / 100)
- # Grab similar tracks from the music provider
- similar_tracks = await prov.get_similar_tracks(
- prov_track_id=track.item_id, limit=no_of_similar_tracks
- )
- # Merge album content with similar tracks
- dynamic_playlist = [
- *sorted(album_tracks, key=lambda n: random())[:no_of_album_tracks],
- *sorted(similar_tracks, key=lambda n: random())[:no_of_similar_tracks],
- ]
- return sorted(dynamic_playlist, key=lambda n: random())
-
- async def _get_dynamic_tracks(self, media_item: Album, limit=25) -> List[Track]:
- """Get dynamic list of tracks for given item, fallback/default implementation."""
- # TODO: query metadata provider(s) to get similar tracks (or tracks from similar artists)
- raise UnsupportedFeaturedException(
- "No Music Provider found that supports requesting similar tracks."
- )
-
- async def _get_db_album_tracks(
- self,
- item_id: str,
- ) -> List[Track]:
- """Return in-database album tracks for the given database album."""
- db_album = await self.get_db_item(item_id)
- # simply grab all tracks in the db that are linked to this album
- # TODO: adjust to json query instead of text search?
- query = f"SELECT * FROM tracks WHERE albums LIKE '%\"{item_id}\"%'"
- result = []
- for track in await self.mass.music.tracks.get_db_items_by_query(query):
- if album_mapping := next(
- (x for x in track.albums if x.item_id == db_album.item_id), None
- ):
- # make sure that the full album is set on the track and prefer the album's images
- track.album = db_album
- if db_album.metadata.images:
- track.metadata.images = db_album.metadata.images
- # apply the disc and track number from the mapping
- track.disc_number = album_mapping.disc_number
- track.track_number = album_mapping.track_number
- result.append(track)
- return sorted(result, key=lambda x: (x.disc_number or 0, x.track_number or 0))
-
- async def _match(self, db_album: Album) -> None:
- """
- Try to find matching album on all providers for the provided (database) album.
-
- This is used to link objects of different providers/qualities together.
- """
- if db_album.provider != ProviderType.DATABASE:
- return # Matching only supported for database items
-
- async def find_prov_match(provider: MusicProvider):
- self.logger.debug(
- "Trying to match album %s on provider %s", db_album.name, provider.name
- )
- match_found = False
- for search_str in (
- db_album.name,
- f"{db_album.artist.name} - {db_album.name}",
- f"{db_album.artist.name} {db_album.name}",
- ):
- if match_found:
- break
- search_result = await self.search(search_str, provider.id)
- for search_result_item in search_result:
- if not search_result_item.available:
- continue
- if not compare_album(search_result_item, db_album):
- continue
- # we must fetch the full album version, search results are simplified objects
- prov_album = await self.get_provider_item(
- search_result_item.item_id, search_result_item.provider
- )
- if compare_album(prov_album, db_album):
- # 100% match, we can simply update the db with additional provider ids
- await self.update_db_item(db_album.item_id, prov_album)
- match_found = True
- return match_found
-
- # try to find match on all providers
- cur_prov_types = {x.prov_type for x in db_album.provider_ids}
- for provider in self.mass.music.providers:
- if provider.type in cur_prov_types:
- continue
- if MusicProviderFeature.SEARCH not in provider.supported_features:
- continue
- if await find_prov_match(provider):
- cur_prov_types.add(provider.type)
- else:
- self.logger.debug(
- "Could not find match for Album %s on provider %s",
- db_album.name,
- provider.name,
- )
-
- async def _get_album_artists(
- self,
- db_album: Album,
- updated_album: Optional[Album] = None,
- overwrite: bool = False,
- ) -> List[ItemMapping]:
- """Extract (database) album artist(s) as ItemMapping."""
- album_artists = set()
- for album in (updated_album, db_album):
- if not album:
- continue
- for artist in album.artists:
- album_artists.add(await self._get_artist_mapping(artist, overwrite))
- # use intermediate set to prevent duplicates
- # filter various artists if multiple artists
- if len(album_artists) > 1:
- album_artists = {x for x in album_artists if (x.name != VARIOUS_ARTISTS)}
- return list(album_artists)
-
- async def _get_artist_mapping(
- self, artist: Union[Artist, ItemMapping], overwrite: bool = False
- ) -> ItemMapping:
- """Extract (database) track artist as ItemMapping."""
- if overwrite:
- artist = await self.mass.music.artists.add_db_item(
- artist, overwrite_existing=True
- )
- if artist.provider == ProviderType.DATABASE:
- if isinstance(artist, ItemMapping):
- return artist
- return ItemMapping.from_item(artist)
-
- if db_artist := await self.mass.music.artists.get_db_item_by_prov_id(
- artist.item_id, provider=artist.provider
- ):
- return ItemMapping.from_item(db_artist)
-
- db_artist = await self.mass.music.artists.add_db_item(artist)
- return ItemMapping.from_item(db_artist)
+++ /dev/null
-"""Manage MediaItems of type Artist."""
-
-import asyncio
-import itertools
-from random import choice, random
-from time import time
-from typing import Any, Dict, List, Optional
-
-from music_assistant.constants import VARIOUS_ARTISTS, VARIOUS_ARTISTS_ID
-from music_assistant.helpers.compare import compare_strings
-from music_assistant.helpers.database import TABLE_ALBUMS, TABLE_ARTISTS, TABLE_TRACKS
-from music_assistant.helpers.json import json_serializer
-from music_assistant.models.enums import EventType, MusicProviderFeature, ProviderType
-from music_assistant.models.errors import (
- MediaNotFoundError,
- UnsupportedFeaturedException,
-)
-from music_assistant.models.event import MassEvent
-from music_assistant.models.media_controller import MediaControllerBase
-from music_assistant.models.media_items import (
- Album,
- AlbumType,
- Artist,
- ItemMapping,
- MediaType,
- PagedItems,
- Track,
-)
-from music_assistant.models.music_provider import MusicProvider
-
-
-class ArtistsController(MediaControllerBase[Artist]):
- """Controller managing MediaItems of type Artist."""
-
- db_table = TABLE_ARTISTS
- media_type = MediaType.ARTIST
- item_cls = Artist
-
- async def album_artists(
- self,
- in_library: Optional[bool] = None,
- search: Optional[str] = None,
- limit: int = 500,
- offset: int = 0,
- order_by: str = "sort_name",
- ) -> PagedItems:
- """Get in-database album artists."""
- return await self.db_items(
- in_library=in_library,
- search=search,
- limit=limit,
- offset=offset,
- order_by=order_by,
- query_parts=[
- "artists.sort_name in (select albums.sort_artist from albums)"
- ],
- )
-
- async def toptracks(
- self,
- item_id: Optional[str] = None,
- provider: Optional[ProviderType] = None,
- provider_id: Optional[str] = None,
- artist: Optional[Artist] = None,
- ) -> List[Track]:
- """Return top tracks for an artist."""
- if not artist:
- artist = await self.get(item_id, provider, provider_id)
- # get results from all providers
- coros = [
- self.get_provider_artist_toptracks(
- item.item_id,
- provider=item.prov_type,
- provider_id=item.prov_id,
- cache_checksum=artist.metadata.checksum,
- )
- for item in artist.provider_ids
- ]
- tracks = itertools.chain.from_iterable(await asyncio.gather(*coros))
- # merge duplicates using a dict
- final_items: Dict[str, Track] = {}
- for track in tracks:
- key = f".{track.name}.{track.version}"
- if key in final_items:
- final_items[key].provider_ids.update(track.provider_ids)
- else:
- final_items[key] = track
- return list(final_items.values())
-
- async def albums(
- self,
- item_id: Optional[str] = None,
- provider: Optional[ProviderType] = None,
- provider_id: Optional[str] = None,
- artist: Optional[Artist] = None,
- ) -> List[Album]:
- """Return (all/most popular) albums for an artist."""
- if not artist:
- artist = await self.get(item_id, provider, provider_id)
- # get results from all providers
- coros = [
- self.get_provider_artist_albums(
- item.item_id, item.prov_type, cache_checksum=artist.metadata.checksum
- )
- for item in artist.provider_ids
- ]
- albums = itertools.chain.from_iterable(await asyncio.gather(*coros))
- # merge duplicates using a dict
- final_items: Dict[str, Album] = {}
- for album in albums:
- key = f".{album.name}.{album.version}"
- if key in final_items:
- final_items[key].provider_ids.update(album.provider_ids)
- else:
- final_items[key] = album
- if album.in_library:
- final_items[key].in_library = True
- return list(final_items.values())
-
- async def add(self, item: Artist) -> Artist:
- """Add artist to local db and return the database item."""
- # grab musicbrainz id and additional metadata
- await self.mass.metadata.get_artist_metadata(item)
- existing = await self.get_db_item_by_prov_id(item.item_id, item.provider)
- if existing:
- db_item = await self.update_db_item(existing.item_id, item)
- else:
- db_item = await self.add_db_item(item)
- # also fetch same artist on all providers
- await self.match_artist(db_item)
- # return final db_item after all match/metadata actions
- db_item = await self.get_db_item(db_item.item_id)
- self.mass.signal_event(
- MassEvent(
- EventType.MEDIA_ITEM_UPDATED
- if existing
- else EventType.MEDIA_ITEM_ADDED,
- db_item.uri,
- db_item,
- )
- )
- return db_item
-
- async def match_artist(self, db_artist: Artist):
- """
- Try to find matching artists on all providers for the provided (database) item_id.
-
- This is used to link objects of different providers together.
- """
- assert (
- db_artist.provider == ProviderType.DATABASE
- ), "Matching only supported for database items!"
- cur_prov_types = {x.prov_type for x in db_artist.provider_ids}
- for provider in self.mass.music.providers:
- if provider.type in cur_prov_types:
- continue
- if MusicProviderFeature.SEARCH not in provider.supported_features:
- continue
- if await self._match(db_artist, provider):
- cur_prov_types.add(provider.type)
- else:
- self.logger.debug(
- "Could not find match for Artist %s on provider %s",
- db_artist.name,
- provider.name,
- )
-
- async def get_provider_artist_toptracks(
- self,
- item_id: str,
- provider: Optional[ProviderType] = None,
- provider_id: Optional[str] = None,
- cache_checksum: Any = None,
- ) -> List[Track]:
- """Return top tracks for an artist on given provider."""
- prov = self.mass.music.get_provider(provider_id or provider)
- if not prov:
- return []
- # prefer cache items (if any)
- cache_key = f"{prov.type.value}.artist_toptracks.{item_id}"
- if cache := await self.mass.cache.get(cache_key, checksum=cache_checksum):
- return [Track.from_dict(x) for x in cache]
- # no items in cache - get listing from provider
- if MusicProviderFeature.ARTIST_TOPTRACKS in prov.supported_features:
- items = await prov.get_artist_toptracks(item_id)
- else:
- # fallback implementation using the db
- if db_artist := await self.mass.music.artists.get_db_item_by_prov_id(
- item_id, provider=provider, provider_id=provider_id
- ):
- prov_id = provider_id or provider.value
- # TODO: adjust to json query instead of text search?
- query = f"SELECT * FROM tracks WHERE artists LIKE '%\"{db_artist.item_id}\"%'"
- query += f" AND provider_ids LIKE '%\"{prov_id}\"%'"
- items = await self.mass.music.tracks.get_db_items_by_query(query)
- # store (serializable items) in cache
- self.mass.create_task(
- self.mass.cache.set(
- cache_key, [x.to_dict() for x in items], checksum=cache_checksum
- )
- )
- return items
-
- async def get_provider_artist_albums(
- self,
- item_id: str,
- provider: Optional[ProviderType] = None,
- provider_id: Optional[str] = None,
- cache_checksum: Any = None,
- ) -> List[Album]:
- """Return albums for an artist on given provider."""
- prov = self.mass.music.get_provider(provider_id or provider)
- if not prov:
- return []
- # prefer cache items (if any)
- cache_key = f"{prov.type.value}.artist_albums.{item_id}"
- if cache := await self.mass.cache.get(cache_key, checksum=cache_checksum):
- return [Album.from_dict(x) for x in cache]
- # no items in cache - get listing from provider
- if MusicProviderFeature.ARTIST_ALBUMS in prov.supported_features:
- items = await prov.get_artist_albums(item_id)
- else:
- # fallback implementation using the db
- if db_artist := await self.mass.music.artists.get_db_item_by_prov_id(
- item_id, provider=provider, provider_id=provider_id
- ):
- prov_id = provider_id or provider.value
- # TODO: adjust to json query instead of text search?
- query = f"SELECT * FROM albums WHERE artists LIKE '%\"{db_artist.item_id}\"%'"
- query += f" AND provider_ids LIKE '%\"{prov_id}\"%'"
- items = await self.mass.music.albums.get_db_items_by_query(query)
- else:
- # edge case
- items = []
- # store (serializable items) in cache
- self.mass.create_task(
- self.mass.cache.set(
- cache_key, [x.to_dict() for x in items], checksum=cache_checksum
- )
- )
- return items
-
- async def add_db_item(
- self, item: Artist, overwrite_existing: bool = False
- ) -> Artist:
- """Add a new item record to the database."""
- assert isinstance(item, Artist), "Not a full Artist object"
- assert item.provider_ids, "Artist is missing provider id(s)"
- # enforce various artists name + id
- if compare_strings(item.name, VARIOUS_ARTISTS):
- item.musicbrainz_id = VARIOUS_ARTISTS_ID
- if item.musicbrainz_id == VARIOUS_ARTISTS_ID:
- item.name = VARIOUS_ARTISTS
-
- async with self._db_add_lock:
- # always try to grab existing item by musicbrainz_id
- cur_item = None
- if item.musicbrainz_id:
- match = {"musicbrainz_id": item.musicbrainz_id}
- cur_item = await self.mass.database.get_row(self.db_table, match)
- if not cur_item:
- # fallback to exact name match
- # NOTE: we match an artist by name which could theoretically lead to collisions
- # but the chance is so small it is not worth the additional overhead of grabbing
- # the musicbrainz id upfront
- match = {"sort_name": item.sort_name}
- for row in await self.mass.database.get_rows(self.db_table, match):
- row_artist = Artist.from_db_row(row)
- if row_artist.sort_name == item.sort_name:
- cur_item = row_artist
- break
- if cur_item:
- # update existing
- return await self.update_db_item(
- cur_item.item_id, item, overwrite=overwrite_existing
- )
-
- # insert item
- if item.in_library and not item.timestamp:
- item.timestamp = int(time())
- new_item = await self.mass.database.insert(self.db_table, item.to_db_row())
- item_id = new_item["item_id"]
- self.logger.debug("added %s to database", item.name)
- # return created object
- return await self.get_db_item(item_id)
-
- async def update_db_item(
- self,
- item_id: int,
- item: Artist,
- overwrite: bool = False,
- ) -> Artist:
- """Update Artist record in the database."""
- cur_item = await self.get_db_item(item_id)
- if overwrite:
- metadata = item.metadata
- provider_ids = item.provider_ids
- else:
- metadata = cur_item.metadata.update(item.metadata, item.provider.is_file())
- provider_ids = {*cur_item.provider_ids, *item.provider_ids}
-
- # enforce various artists name + id
- if compare_strings(item.name, VARIOUS_ARTISTS):
- item.musicbrainz_id = VARIOUS_ARTISTS_ID
- if item.musicbrainz_id == VARIOUS_ARTISTS_ID:
- item.name = VARIOUS_ARTISTS
-
- await self.mass.database.update(
- self.db_table,
- {"item_id": item_id},
- {
- "name": item.name if overwrite else cur_item.name,
- "sort_name": item.sort_name if overwrite else cur_item.sort_name,
- "musicbrainz_id": item.musicbrainz_id or cur_item.musicbrainz_id,
- "metadata": json_serializer(metadata),
- "provider_ids": json_serializer(provider_ids),
- },
- )
- self.logger.debug("updated %s in database: %s", item.name, item_id)
- return await self.get_db_item(item_id)
-
- async def delete_db_item(self, item_id: int, recursive: bool = False) -> None:
- """Delete record from the database."""
- # check artist albums
- db_rows = await self.mass.database.get_rows_from_query(
- f"SELECT item_id FROM {TABLE_ALBUMS} WHERE artists LIKE '%\"{item_id}\"%'",
- limit=5000,
- )
- assert not (db_rows and not recursive), "Albums attached to artist"
- for db_row in db_rows:
- try:
- await self.mass.music.albums.delete_db_item(
- db_row["item_id"], recursive
- )
- except MediaNotFoundError:
- pass
-
- # check artist tracks
- db_rows = await self.mass.database.get_rows_from_query(
- f"SELECT item_id FROM {TABLE_TRACKS} WHERE artists LIKE '%\"{item_id}\"%'",
- limit=5000,
- )
- assert not (db_rows and not recursive), "Tracks attached to artist"
- for db_row in db_rows:
- try:
- await self.mass.music.albums.delete_db_item(
- db_row["item_id"], recursive
- )
- except MediaNotFoundError:
- pass
-
- # delete the artist itself from db
- await super().delete_db_item(item_id)
-
- async def _get_provider_dynamic_tracks(
- self,
- item_id: str,
- provider: Optional[ProviderType] = None,
- provider_id: Optional[str] = None,
- limit: int = 25,
- ):
- """Generate a dynamic list of tracks based on the artist's top tracks."""
- prov = self.mass.music.get_provider(provider_id or provider)
- if (
- not prov
- or MusicProviderFeature.SIMILAR_TRACKS not in prov.supported_features
- ):
- return []
- top_tracks = await self.get_provider_artist_toptracks(
- item_id=item_id, provider=provider, provider_id=provider_id
- )
- # Grab a random track from the album that we use to obtain similar tracks for
- track = choice(top_tracks)
- # Calculate no of songs to grab from each list at a 10/90 ratio
- total_no_of_tracks = limit + limit % 2
- no_of_artist_tracks = int(total_no_of_tracks * 10 / 100)
- no_of_similar_tracks = int(total_no_of_tracks * 90 / 100)
- # Grab similar tracks from the music provider
- similar_tracks = await prov.get_similar_tracks(
- prov_track_id=track.item_id, limit=no_of_similar_tracks
- )
- # Merge album content with similar tracks
- dynamic_playlist = [
- *sorted(top_tracks, key=lambda n: random())[:no_of_artist_tracks],
- *sorted(similar_tracks, key=lambda n: random())[:no_of_similar_tracks],
- ]
- return sorted(dynamic_playlist, key=lambda n: random())
-
- async def _get_dynamic_tracks(
- self, media_item: Artist, limit: int = 25
- ) -> List[Track]:
- """Get dynamic list of tracks for given item, fallback/default implementation."""
- # TODO: query metadata provider(s) to get similar tracks (or tracks from similar artists)
- raise UnsupportedFeaturedException(
- "No Music Provider found that supports requesting similar tracks."
- )
-
- async def _match(self, db_artist: Artist, provider: MusicProvider) -> bool:
- """Try to find matching artists on given provider for the provided (database) artist."""
- self.logger.debug(
- "Trying to match artist %s on provider %s", db_artist.name, provider.name
- )
- # try to get a match with some reference tracks of this artist
- for ref_track in await self.toptracks(
- db_artist.item_id, db_artist.provider, artist=db_artist
- ):
- # make sure we have a full track
- if isinstance(ref_track.album, ItemMapping):
- ref_track = await self.mass.music.tracks.get(
- ref_track.item_id, ref_track.provider
- )
- for search_str in (
- f"{db_artist.name} - {ref_track.name}",
- f"{db_artist.name} {ref_track.name}",
- ref_track.name,
- ):
- search_results = await self.mass.music.tracks.search(
- search_str, provider.type
- )
- for search_result_item in search_results:
- if search_result_item.sort_name != ref_track.sort_name:
- continue
- # get matching artist from track
- for search_item_artist in search_result_item.artists:
- if search_item_artist.sort_name != db_artist.sort_name:
- continue
- # 100% album match
- # get full artist details so we have all metadata
- prov_artist = await self.get_provider_item(
- search_item_artist.item_id, search_item_artist.provider
- )
- await self.update_db_item(db_artist.item_id, prov_artist)
- return True
- # try to get a match with some reference albums of this artist
- artist_albums = await self.albums(
- db_artist.item_id, db_artist.provider, artist=db_artist
- )
- for ref_album in artist_albums:
- if ref_album.album_type == AlbumType.COMPILATION:
- continue
- if ref_album.artist is None:
- continue
- for search_str in (
- ref_album.name,
- f"{db_artist.name} - {ref_album.name}",
- f"{db_artist.name} {ref_album.name}",
- ):
- search_result = await self.mass.music.albums.search(
- search_str, provider.type
- )
- for search_result_item in search_result:
- if search_result_item.artist is None:
- continue
- if search_result_item.sort_name != ref_album.sort_name:
- continue
- # artist must match 100%
- if (
- search_result_item.artist.sort_name
- != ref_album.artist.sort_name
- ):
- continue
- # 100% match
- # get full artist details so we have all metadata
- prov_artist = await self.get_provider_item(
- search_result_item.artist.item_id,
- search_result_item.artist.provider,
- )
- await self.update_db_item(db_artist.item_id, prov_artist)
- return True
- return False
+++ /dev/null
-"""Manage MediaItems of type Playlist."""
-from __future__ import annotations
-
-from ctypes import Union
-from random import choice, random
-from time import time
-from typing import Any, List, Optional, Tuple
-
-from music_assistant.helpers.database import TABLE_PLAYLISTS
-from music_assistant.helpers.json import json_serializer
-from music_assistant.helpers.uri import create_uri
-from music_assistant.models.enums import (
- EventType,
- MediaType,
- MusicProviderFeature,
- ProviderType,
-)
-from music_assistant.models.errors import (
- InvalidDataError,
- MediaNotFoundError,
- ProviderUnavailableError,
- UnsupportedFeaturedException,
-)
-from music_assistant.models.event import MassEvent
-from music_assistant.models.media_controller import MediaControllerBase
-from music_assistant.models.media_items import Playlist, Track
-
-
-class PlaylistController(MediaControllerBase[Playlist]):
- """Controller managing MediaItems of type Playlist."""
-
- db_table = TABLE_PLAYLISTS
- media_type = MediaType.PLAYLIST
- item_cls = Playlist
-
- async def get_playlist_by_name(self, name: str) -> Playlist | None:
- """Get in-library playlist by name."""
- return await self.mass.database.get_row(self.db_table, {"name": name})
-
- async def tracks(
- self,
- item_id: str,
- provider: Optional[ProviderType] = None,
- provider_id: Optional[str] = None,
- ) -> List[Track]:
- """Return playlist tracks for the given provider playlist id."""
- playlist = await self.get(item_id, provider, provider_id)
- prov = next(x for x in playlist.provider_ids)
- return await self._get_provider_playlist_tracks(
- prov.item_id,
- provider=prov.prov_type,
- provider_id=prov.prov_id,
- cache_checksum=playlist.metadata.checksum,
- )
-
- async def add(self, item: Playlist) -> Playlist:
- """Add playlist to local db and return the new database item."""
- item.metadata.last_refresh = int(time())
- await self.mass.metadata.get_playlist_metadata(item)
- existing = await self.get_db_item_by_prov_id(item.item_id, item.provider)
- if existing:
- db_item = await self.update_db_item(existing.item_id, item)
- else:
- db_item = await self.add_db_item(item)
- self.mass.signal_event(
- MassEvent(
- EventType.MEDIA_ITEM_UPDATED
- if existing
- else EventType.MEDIA_ITEM_ADDED,
- db_item.uri,
- db_item,
- )
- )
- return db_item
-
- async def create(
- self, name: str, prov_id: Union[ProviderType, str, None] = None
- ) -> Playlist:
- """Create new playlist."""
- # if prov_id is omitted, prefer file
- if prov_id:
- provider = self.mass.music.get_provider(prov_id)
- else:
- try:
- provider = self.mass.music.get_provider(ProviderType.FILESYSTEM_LOCAL)
- except ProviderUnavailableError:
- provider = next(
- (
- x
- for x in self.mass.music.providers
- if MusicProviderFeature.PLAYLIST_CREATE in x.supported_features
- ),
- None,
- )
- if provider is None:
- raise ProviderUnavailableError(
- "No provider available which allows playlists creation."
- )
-
- return await provider.create_playlist(name)
-
- async def add_playlist_tracks(self, db_playlist_id: str, uris: List[str]) -> None:
- """Add multiple tracks to playlist. Creates background tasks to process the action."""
- playlist = await self.get_db_item(db_playlist_id)
- if not playlist:
- raise MediaNotFoundError(f"Playlist with id {db_playlist_id} not found")
- if not playlist.is_editable:
- raise InvalidDataError(f"Playlist {playlist.name} is not editable")
- for uri in uris:
- job_desc = f"Add track {uri} to playlist {playlist.name}"
- self.mass.add_job(self.add_playlist_track(db_playlist_id, uri), job_desc)
-
- async def add_playlist_track(self, db_playlist_id: str, track_uri: str) -> None:
- """Add track to playlist - make sure we dont add duplicates."""
- # we can only edit playlists that are in the database (marked as editable)
- playlist = await self.get_db_item(db_playlist_id)
- if not playlist:
- raise MediaNotFoundError(f"Playlist with id {db_playlist_id} not found")
- if not playlist.is_editable:
- raise InvalidDataError(f"Playlist {playlist.name} is not editable")
- # make sure we have recent full track details
- track = await self.mass.music.get_item_by_uri(track_uri, lazy=False)
- assert track.media_type == MediaType.TRACK
- # a playlist can only have one provider (for now)
- playlist_prov = next(iter(playlist.provider_ids))
- # grab all existing track ids in the playlist so we can check for duplicates
- cur_playlist_track_ids = set()
- count = 0
- for item in await self.tracks(playlist_prov.item_id, playlist_prov.prov_type):
- count += 1
- cur_playlist_track_ids.update(
- {
- i.item_id
- for i in item.provider_ids
- if i.prov_id == playlist_prov.prov_id
- }
- )
- # check for duplicates
- for track_prov in track.provider_ids:
- if (
- track_prov.prov_type == playlist_prov.prov_type
- and track_prov.item_id in cur_playlist_track_ids
- ):
- raise InvalidDataError(
- "Track already exists in playlist {playlist.name}"
- )
- # add track to playlist
- # we can only add a track to a provider playlist if track is available on that provider
- # a track can contain multiple versions on the same provider
- # simply sort by quality and just add the first one (assuming track is still available)
- track_id_to_add = None
- for track_version in sorted(
- track.provider_ids, key=lambda x: x.quality, reverse=True
- ):
- if not track.available:
- continue
- if playlist_prov.prov_type.is_file():
- # the file provider can handle uri's from all providers so simply add the uri
- track_id_to_add = track_version.url or create_uri(
- MediaType.TRACK,
- track_version.prov_type,
- track_version.item_id,
- )
- break
- if track_version.prov_type == playlist_prov.prov_type:
- track_id_to_add = track_version.item_id
- break
- if not track_id_to_add:
- raise MediaNotFoundError(
- f"Track is not available on provider {playlist_prov.prov_type}"
- )
- # actually add the tracks to the playlist on the provider
- provider = self.mass.music.get_provider(playlist_prov.prov_id)
- await provider.add_playlist_tracks(playlist_prov.item_id, [track_id_to_add])
- # invalidate cache by updating the checksum
- await self.get(
- db_playlist_id, provider=ProviderType.DATABASE, force_refresh=True
- )
-
- async def remove_playlist_tracks(
- self, db_playlist_id: str, positions_to_remove: Tuple[int]
- ) -> None:
- """Remove multiple tracks from playlist."""
- playlist = await self.get_db_item(db_playlist_id)
- if not playlist:
- raise MediaNotFoundError(f"Playlist with id {db_playlist_id} not found")
- if not playlist.is_editable:
- raise InvalidDataError(f"Playlist {playlist.name} is not editable")
- for prov in playlist.provider_ids:
- provider = self.mass.music.get_provider(prov.prov_id)
- if (
- MusicProviderFeature.PLAYLIST_TRACKS_EDIT
- not in provider.supported_features
- ):
- self.logger.warning(
- "Provider %s does not support editing playlists",
- prov.prov_type.value,
- )
- continue
- await provider.remove_playlist_tracks(prov.item_id, positions_to_remove)
- # invalidate cache by updating the checksum
- await self.get(
- db_playlist_id, provider=ProviderType.DATABASE, force_refresh=True
- )
-
- async def add_db_item(
- self, item: Playlist, overwrite_existing: bool = False
- ) -> Playlist:
- """Add a new record to the database."""
- async with self._db_add_lock:
- match = {"name": item.name, "owner": item.owner}
- if cur_item := await self.mass.database.get_row(self.db_table, match):
- # update existing
- return await self.update_db_item(
- cur_item["item_id"], item, overwrite=overwrite_existing
- )
-
- # insert new item
- new_item = await self.mass.database.insert(self.db_table, item.to_db_row())
- item_id = new_item["item_id"]
- self.logger.debug("added %s to database", item.name)
- # return created object
- return await self.get_db_item(item_id)
-
- async def update_db_item(
- self,
- item_id: int,
- item: Playlist,
- overwrite: bool = False,
- ) -> Playlist:
- """Update Playlist record in the database."""
- cur_item = await self.get_db_item(item_id)
- if overwrite:
- metadata = item.metadata
- provider_ids = item.provider_ids
- else:
- metadata = cur_item.metadata.update(item.metadata)
- provider_ids = {*cur_item.provider_ids, *item.provider_ids}
-
- await self.mass.database.update(
- self.db_table,
- {"item_id": item_id},
- {
- # always prefer name/owner from updated item here
- "name": item.name,
- "sort_name": item.sort_name,
- "owner": item.owner,
- "is_editable": item.is_editable,
- "metadata": json_serializer(metadata),
- "provider_ids": json_serializer(provider_ids),
- },
- )
- self.logger.debug("updated %s in database: %s", item.name, item_id)
- return await self.get_db_item(item_id)
-
- async def _get_provider_playlist_tracks(
- self,
- item_id: str,
- provider: Optional[ProviderType] = None,
- provider_id: Optional[str] = None,
- cache_checksum: Any = None,
- ) -> List[Track]:
- """Return album tracks for the given provider album id."""
- prov = self.mass.music.get_provider(provider_id or provider)
- if not prov:
- return []
- # prefer cache items (if any)
- cache_key = f"{prov.id}.playlist.{item_id}.tracks"
- if cache := await self.mass.cache.get(cache_key, checksum=cache_checksum):
- return [Track.from_dict(x) for x in cache]
- # no items in cache - get listing from provider
- items = await prov.get_playlist_tracks(item_id)
- # double check if position set
- if items:
- assert (
- items[0].position is not None
- ), "Playlist items require position to be set"
- # store (serializable items) in cache
- self.mass.create_task(
- self.mass.cache.set(
- cache_key, [x.to_dict() for x in items], checksum=cache_checksum
- )
- )
- return items
-
- async def _get_provider_dynamic_tracks(
- self,
- item_id: str,
- provider: Optional[ProviderType] = None,
- provider_id: Optional[str] = None,
- limit: int = 25,
- ):
- """Generate a dynamic list of tracks based on the playlist content."""
- prov = self.mass.music.get_provider(provider_id or provider)
- if (
- not prov
- or MusicProviderFeature.SIMILAR_TRACKS not in prov.supported_features
- ):
- return []
- playlist_tracks = await self._get_provider_playlist_tracks(
- item_id=item_id, provider=provider, provider_id=provider_id
- )
- # Grab a random track from the playlist that we use to obtain similar tracks for
- track = choice(playlist_tracks)
- # Calculate no of songs to grab from each list at a 50/50 ratio
- total_no_of_tracks = limit + limit % 2
- tracks_per_list = int(total_no_of_tracks / 2)
- # Grab similar tracks from the music provider
- similar_tracks = await prov.get_similar_tracks(
- prov_track_id=track.item_id, limit=tracks_per_list
- )
- # Merge playlist content with similar tracks
- dynamic_playlist = [
- *sorted(playlist_tracks, key=lambda n: random())[:tracks_per_list],
- *sorted(similar_tracks, key=lambda n: random())[:tracks_per_list],
- ]
- return sorted(dynamic_playlist, key=lambda n: random())
-
- async def _get_dynamic_tracks(
- self, media_item: Playlist, limit: int = 25
- ) -> List[Track]:
- """Get dynamic list of tracks for given item, fallback/default implementation."""
- # TODO: query metadata provider(s) to get similar tracks (or tracks from similar artists)
- raise UnsupportedFeaturedException(
- "No Music Provider found that supports requesting similar tracks."
- )
+++ /dev/null
-"""Manage MediaItems of type Radio."""
-from __future__ import annotations
-
-import asyncio
-from time import time
-from typing import List, Optional
-
-from music_assistant.helpers.compare import loose_compare_strings
-from music_assistant.helpers.database import TABLE_RADIOS
-from music_assistant.helpers.json import json_serializer
-from music_assistant.models.enums import EventType, MediaType, ProviderType
-from music_assistant.models.event import MassEvent
-from music_assistant.models.media_controller import MediaControllerBase
-from music_assistant.models.media_items import Radio, Track
-
-
-class RadioController(MediaControllerBase[Radio]):
- """Controller managing MediaItems of type Radio."""
-
- db_table = TABLE_RADIOS
- media_type = MediaType.RADIO
- item_cls = Radio
-
- async def get_radio_by_name(self, name: str) -> Radio | None:
- """Get in-library radio by name."""
- return await self.mass.database.get_row(self.db_table, {"name": name})
-
- async def versions(
- self,
- item_id: str,
- provider: Optional[ProviderType] = None,
- provider_id: Optional[str] = None,
- ) -> List[Radio]:
- """Return all versions of a radio station we can find on all providers."""
- assert provider or provider_id, "Provider type or ID must be specified"
- radio = await self.get(item_id, provider, provider_id)
- # perform a search on all provider(types) to collect all versions/variants
- prov_types = {item.type for item in self.mass.music.providers}
- all_versions = {
- prov_item.item_id: prov_item
- for prov_items in await asyncio.gather(
- *[self.search(radio.name, prov_type) for prov_type in prov_types]
- )
- for prov_item in prov_items
- if loose_compare_strings(radio.name, prov_item.name)
- }
- # make sure that the 'base' version is included
- for prov_version in radio.provider_ids:
- if prov_version.item_id in all_versions:
- continue
- radio_copy = Radio.from_dict(radio.to_dict())
- radio_copy.item_id = prov_version.item_id
- radio_copy.provider = prov_version.prov_type
- radio_copy.provider_ids = {prov_version}
- all_versions[prov_version.item_id] = radio_copy
-
- # return the aggregated result
- return all_versions.values()
-
- async def add(self, item: Radio) -> Radio:
- """Add radio to local db and return the new database item."""
- item.metadata.last_refresh = int(time())
- await self.mass.metadata.get_radio_metadata(item)
- existing = await self.get_db_item_by_prov_id(item.item_id, item.provider)
- if existing:
- db_item = await self.update_db_item(existing.item_id, item)
- else:
- db_item = await self.add_db_item(item)
- self.mass.signal_event(
- MassEvent(
- EventType.MEDIA_ITEM_UPDATED
- if existing
- else EventType.MEDIA_ITEM_ADDED,
- db_item.uri,
- db_item,
- )
- )
- return db_item
-
- async def add_db_item(self, item: Radio, overwrite_existing: bool = False) -> Radio:
- """Add a new item record to the database."""
- assert item.provider_ids
- async with self._db_add_lock:
- match = {"name": item.name}
- if cur_item := await self.mass.database.get_row(self.db_table, match):
- # update existing
- return await self.update_db_item(
- cur_item["item_id"], item, overwrite=overwrite_existing
- )
-
- # insert new item
- new_item = await self.mass.database.insert(self.db_table, item.to_db_row())
- item_id = new_item["item_id"]
- self.logger.debug("added %s to database", item.name)
- # return created object
- return await self.get_db_item(item_id)
-
- async def update_db_item(
- self,
- item_id: int,
- item: Radio,
- overwrite: bool = False,
- ) -> Radio:
- """Update Radio record in the database."""
- cur_item = await self.get_db_item(item_id)
- if overwrite:
- metadata = item.metadata
- provider_ids = item.provider_ids
- else:
- metadata = cur_item.metadata.update(item.metadata)
- provider_ids = {*cur_item.provider_ids, *item.provider_ids}
-
- match = {"item_id": item_id}
- await self.mass.database.update(
- self.db_table,
- match,
- {
- # always prefer name from updated item here
- "name": item.name,
- "sort_name": item.sort_name,
- "metadata": json_serializer(metadata),
- "provider_ids": json_serializer(provider_ids),
- },
- )
- self.logger.debug("updated %s in database: %s", item.name, item_id)
- return await self.get_db_item(item_id)
-
- async def _get_provider_dynamic_tracks(
- self,
- item_id: str,
- provider: Optional[ProviderType] = None,
- provider_id: Optional[str] = None,
- limit: int = 25,
- ) -> List[Track]:
- """Generate a dynamic list of tracks based on the item's content."""
- raise NotImplementedError("Dynamic tracks not supported for Radio MediaItem")
-
- async def _get_dynamic_tracks(
- self, media_item: Radio, limit: int = 25
- ) -> List[Track]:
- """Get dynamic list of tracks for given item, fallback/default implementation."""
- raise NotImplementedError("Dynamic tracks not supported for Radio MediaItem")
+++ /dev/null
-"""Manage MediaItems of type Track."""
-from __future__ import annotations
-
-import asyncio
-from typing import List, Optional, Union
-
-from music_assistant.helpers.compare import (
- compare_artists,
- compare_track,
- loose_compare_strings,
-)
-from music_assistant.helpers.database import TABLE_TRACKS
-from music_assistant.helpers.json import json_serializer
-from music_assistant.models.enums import (
- EventType,
- MediaType,
- MusicProviderFeature,
- ProviderType,
-)
-from music_assistant.models.errors import (
- MediaNotFoundError,
- UnsupportedFeaturedException,
-)
-from music_assistant.models.event import MassEvent
-from music_assistant.models.media_controller import MediaControllerBase
-from music_assistant.models.media_items import (
- Album,
- Artist,
- ItemMapping,
- Track,
- TrackAlbumMapping,
-)
-
-
-class TracksController(MediaControllerBase[Track]):
- """Controller managing MediaItems of type Track."""
-
- db_table = TABLE_TRACKS
- media_type = MediaType.TRACK
- item_cls = Track
-
- async def get(self, *args, **kwargs) -> Track:
- """Return (full) details for a single media item."""
- track = await super().get(*args, **kwargs)
- # append full album details to full track item
- if track.album:
- try:
- track.album = await self.mass.music.albums.get(
- track.album.item_id, track.album.provider
- )
- except MediaNotFoundError:
- # edge case where playlist track has invalid albumdetails
- self.logger.warning("Unable to fetch album details %s", track.album.uri)
- # append full artist details to full track item
- full_artists = []
- for artist in track.artists:
- full_artists.append(
- await self.mass.music.artists.get(artist.item_id, artist.provider)
- )
- track.artists = full_artists
- return track
-
- async def add(self, item: Track) -> Track:
- """Add track to local db and return the new database item."""
- # make sure we have artists
- assert item.artists
- # grab additional metadata
- await self.mass.metadata.get_track_metadata(item)
- existing = await self.get_db_item_by_prov_id(item.item_id, item.provider)
- if existing:
- db_item = await self.update_db_item(existing.item_id, item)
- else:
- db_item = await self.add_db_item(item)
- # also fetch same track on all providers (will also get other quality versions)
- await self._match(db_item)
- # return final db_item after all match/metadata actions
- db_item = await self.get_db_item(db_item.item_id)
- self.mass.signal_event(
- MassEvent(
- EventType.MEDIA_ITEM_UPDATED
- if existing
- else EventType.MEDIA_ITEM_ADDED,
- db_item.uri,
- db_item,
- )
- )
- return db_item
-
- async def versions(
- self,
- item_id: str,
- provider: Optional[ProviderType] = None,
- provider_id: Optional[str] = None,
- ) -> List[Track]:
- """Return all versions of a track we can find on all providers."""
- assert provider or provider_id, "Provider type or ID must be specified"
- track = await self.get(item_id, provider, provider_id)
- # perform a search on all provider(types) to collect all versions/variants
- prov_types = {item.type for item in self.mass.music.providers}
- search_query = f"{track.artist.name} - {track.name}"
- all_versions = {
- prov_item.item_id: prov_item
- for prov_items in await asyncio.gather(
- *[self.search(search_query, prov_type) for prov_type in prov_types]
- )
- for prov_item in prov_items
- if loose_compare_strings(track.name, prov_item.name)
- and compare_artists(prov_item.artists, track.artists, any_match=True)
- }
- # make sure that the 'base' version is included
- for prov_version in track.provider_ids:
- if prov_version.item_id in all_versions:
- continue
- # grab full item here including album details etc
- prov_track = await self.get_provider_item(
- prov_version.item_id, prov_version.prov_id
- )
- all_versions[prov_version.item_id] = prov_track
-
- # return the aggregated result
- return all_versions.values()
-
- async def _match(self, db_track: Track) -> None:
- """
- Try to find matching track on all providers for the provided (database) track_id.
-
- This is used to link objects of different providers/qualities together.
- """
- if db_track.provider != ProviderType.DATABASE:
- return # Matching only supported for database items
- for provider in self.mass.music.providers:
- if MusicProviderFeature.SEARCH not in provider.supported_features:
- continue
- self.logger.debug(
- "Trying to match track %s on provider %s", db_track.name, provider.name
- )
- match_found = False
- for search_str in (
- db_track.name,
- f"{db_track.artists[0].name} - {db_track.name}",
- f"{db_track.artists[0].name} {db_track.name}",
- ):
- if match_found:
- break
- search_result = await self.search(search_str, provider.type)
- for search_result_item in search_result:
- if not search_result_item.available:
- continue
- if compare_track(search_result_item, db_track):
- # 100% match, we can simply update the db with additional provider ids
- match_found = True
- await self.update_db_item(db_track.item_id, search_result_item)
-
- if not match_found:
- self.logger.debug(
- "Could not find match for Track %s on provider %s",
- db_track.name,
- provider.name,
- )
-
- async def _get_provider_dynamic_tracks(
- self,
- item_id: str,
- provider: Optional[ProviderType] = None,
- provider_id: Optional[str] = None,
- limit: int = 25,
- ):
- """Generate a dynamic list of tracks based on the track."""
- prov = self.mass.music.get_provider(provider_id or provider)
- if (
- not prov
- or MusicProviderFeature.SIMILAR_TRACKS not in prov.supported_features
- ):
- return []
- # Grab similar tracks from the music provider
- similar_tracks = await prov.get_similar_tracks(
- prov_track_id=item_id, limit=limit
- )
- return similar_tracks
-
- async def _get_dynamic_tracks(
- self, media_item: Track, limit: int = 25
- ) -> List[Track]:
- """Get dynamic list of tracks for given item, fallback/default implementation."""
- # TODO: query metadata provider(s) to get similar tracks (or tracks from similar artists)
- raise UnsupportedFeaturedException(
- "No Music Provider found that supports requesting similar tracks."
- )
-
- async def add_db_item(self, item: Track, overwrite_existing: bool = False) -> Track:
- """Add a new item record to the database."""
- assert isinstance(item, Track), "Not a full Track object"
- assert item.artists, "Track is missing artist(s)"
- assert item.provider_ids, "Track is missing provider id(s)"
- async with self._db_add_lock:
- cur_item = None
-
- # always try to grab existing item by external_id
- if item.musicbrainz_id:
- match = {"musicbrainz_id": item.musicbrainz_id}
- cur_item = await self.mass.database.get_row(self.db_table, match)
- for isrc in item.isrcs:
- match = {"isrc": isrc}
- cur_item = await self.mass.database.get_row(self.db_table, match)
- if not cur_item:
- # fallback to matching
- match = {"sort_name": item.sort_name}
- for row in await self.mass.database.get_rows(self.db_table, match):
- row_track = Track.from_db_row(row)
- if compare_track(row_track, item):
- cur_item = row_track
- break
- if cur_item:
- # update existing
- return await self.update_db_item(
- cur_item.item_id, item, overwrite=overwrite_existing
- )
-
- # no existing match found: insert new item
- track_artists = await self._get_track_artists(item)
- track_albums = await self._get_track_albums(
- item, overwrite=overwrite_existing
- )
- if track_artists:
- sort_artist = track_artists[0].sort_name
- else:
- sort_artist = ""
- if track_albums:
- sort_album = track_albums[0].sort_name
- else:
- sort_album = ""
- new_item = await self.mass.database.insert(
- self.db_table,
- {
- **item.to_db_row(),
- "artists": json_serializer(track_artists),
- "albums": json_serializer(track_albums),
- "sort_artist": sort_artist,
- "sort_album": sort_album,
- },
- )
- item_id = new_item["item_id"]
- # return created object
- self.logger.debug("added %s to database: %s", item.name, item_id)
- return await self.get_db_item(item_id)
-
- async def update_db_item(
- self,
- item_id: int,
- item: Track,
- overwrite: bool = False,
- ) -> Track:
- """Update Track record in the database, merging data."""
- cur_item = await self.get_db_item(item_id)
-
- if overwrite:
- metadata = item.metadata
- provider_ids = item.provider_ids
- metadata.last_refresh = None
- # we store a mapping to artists/albums on the item for easier access/listings
- track_artists = await self._get_track_artists(item, overwrite=True)
- track_albums = await self._get_track_albums(item, overwrite=True)
- else:
- metadata = cur_item.metadata.update(item.metadata, item.provider.is_file())
- provider_ids = {*cur_item.provider_ids, *item.provider_ids}
- track_artists = await self._get_track_artists(cur_item, item)
- track_albums = await self._get_track_albums(cur_item, item)
-
- await self.mass.database.update(
- self.db_table,
- {"item_id": item_id},
- {
- "name": item.name if overwrite else cur_item.name,
- "sort_name": item.sort_name if overwrite else cur_item.sort_name,
- "version": item.version if overwrite else cur_item.version,
- "duration": item.duration if overwrite else cur_item.duration,
- "artists": json_serializer(track_artists),
- "albums": json_serializer(track_albums),
- "metadata": json_serializer(metadata),
- "provider_ids": json_serializer(provider_ids),
- "isrc": item.isrc or cur_item.isrc,
- },
- )
- self.logger.debug("updated %s in database: %s", item.name, item_id)
- return await self.get_db_item(item_id)
-
- async def _get_track_artists(
- self,
- base_track: Track,
- upd_track: Optional[Track] = None,
- overwrite: bool = False,
- ) -> List[ItemMapping]:
- """Extract all (unique) artists of track as ItemMapping."""
- if upd_track and upd_track.artists:
- track_artists = upd_track.artists
- else:
- track_artists = base_track.artists
- # use intermediate set to clear out duplicates
- return list(
- {await self._get_artist_mapping(x, overwrite) for x in track_artists}
- )
-
- async def _get_track_albums(
- self,
- base_track: Track,
- upd_track: Optional[Track] = None,
- overwrite: bool = False,
- ) -> List[TrackAlbumMapping]:
- """Extract all (unique) albums of track as TrackAlbumMapping."""
- track_albums: List[TrackAlbumMapping] = []
- # existing TrackAlbumMappings are starting point
- if base_track.albums:
- track_albums = base_track.albums
- elif upd_track and upd_track.albums:
- track_albums = upd_track.albums
- # append update item album if needed
- if upd_track and upd_track.album:
- mapping = await self._get_album_mapping(
- upd_track.album, overwrite=overwrite
- )
- mapping = TrackAlbumMapping.from_dict(
- {
- **mapping.to_dict(),
- "disc_number": upd_track.disc_number,
- "track_number": upd_track.track_number,
- }
- )
- if mapping not in track_albums:
- track_albums.append(mapping)
- # append base item album if needed
- elif base_track and base_track.album:
- mapping = await self._get_album_mapping(
- base_track.album, overwrite=overwrite
- )
- mapping = TrackAlbumMapping.from_dict(
- {
- **mapping.to_dict(),
- "disc_number": base_track.disc_number,
- "track_number": base_track.track_number,
- }
- )
- if mapping not in track_albums:
- track_albums.append(mapping)
-
- return track_albums
-
- async def _get_album_mapping(
- self,
- album: Union[Album, ItemMapping],
- overwrite: bool = False,
- ) -> ItemMapping:
- """Extract (database) album as ItemMapping."""
-
- if album.provider == ProviderType.DATABASE:
- if isinstance(album, ItemMapping):
- return album
- return ItemMapping.from_item(album)
-
- if overwrite:
- db_album = await self.mass.music.albums.add_db_item(
- album, overwrite_existing=True
- )
-
- if db_album := await self.mass.music.albums.get_db_item_by_prov_id(
- album.item_id, provider=album.provider
- ):
- return ItemMapping.from_item(db_album)
-
- db_album = await self.mass.music.albums.add_db_item(
- album, overwrite_existing=overwrite
- )
- return ItemMapping.from_item(db_album)
-
- async def _get_artist_mapping(
- self, artist: Union[Artist, ItemMapping], overwrite: bool = False
- ) -> ItemMapping:
- """Extract (database) track artist as ItemMapping."""
-
- if artist.provider == ProviderType.DATABASE:
- if isinstance(artist, ItemMapping):
- return artist
- return ItemMapping.from_item(artist)
-
- if overwrite:
- artist = await self.mass.music.artists.add_db_item(
- artist, overwrite_existing=True
- )
-
- if db_artist := await self.mass.music.artists.get_db_item_by_prov_id(
- artist.item_id, provider=artist.provider
- ):
- return ItemMapping.from_item(db_artist)
-
- db_artist = await self.mass.music.artists.add_db_item(artist)
- return ItemMapping.from_item(db_artist)
+++ /dev/null
-"""Provides a simple stateless caching system."""
-from __future__ import annotations
-
-import asyncio
-import functools
-import json
-import time
-from collections import OrderedDict
-from collections.abc import MutableMapping
-from typing import TYPE_CHECKING, Any, Iterator, Optional
-
-from music_assistant.helpers.database import TABLE_CACHE
-
-if TYPE_CHECKING:
- from music_assistant.mass import MusicAssistant
-
-
-class Cache:
- """Basic cache using both memory and database."""
-
- def __init__(self, mass: MusicAssistant) -> None:
- """Initialize our caching class."""
- self.mass = mass
- self.logger = mass.logger.getChild("cache")
- self._mem_cache = MemoryCache(500)
-
- async def setup(self) -> None:
- """Async initialize of cache module."""
- self.__schedule_cleanup_task()
-
- async def get(self, cache_key: str, checksum: Optional[str] = None, default=None):
- """
- Get object from cache and return the results.
-
- cache_key: the (unique) name of the cache object as reference
- checkum: optional argument to check if the checksum in the
- cacheobject matches the checkum provided
- """
- cur_time = int(time.time())
- if checksum is not None and not isinstance(checksum, str):
- checksum = str(checksum)
-
- # try memory cache first
- cache_data = self._mem_cache.get(cache_key)
- if (
- cache_data
- and (not checksum or cache_data[1] == checksum)
- and cache_data[2] >= cur_time
- ):
- return cache_data[0]
- # fall back to db cache
- if db_row := await self.mass.database.get_row(TABLE_CACHE, {"key": cache_key}):
- if (
- not checksum
- or db_row["checksum"] == checksum
- and db_row["expires"] >= cur_time
- ):
- try:
- data = await asyncio.get_running_loop().run_in_executor(
- None, json.loads, db_row["data"]
- )
- except Exception as exc: # pylint: disable=broad-except
- self.logger.exception(
- "Error parsing cache data for %s", cache_key, exc_info=exc
- )
- else:
- # also store in memory cache for faster access
- self._mem_cache[cache_key] = (
- data,
- db_row["checksum"],
- db_row["expires"],
- )
- return data
- return default
-
- async def set(self, cache_key, data, checksum="", expiration=(86400 * 30)):
- """Set data in cache."""
- if not isinstance(checksum, str):
- checksum = str(checksum)
- expires = int(time.time() + expiration)
- self._mem_cache[cache_key] = (data, checksum, expires)
- if (expires - time.time()) < 3600 * 4:
- # do not cache items in db with short expiration
- return
- data = await asyncio.get_running_loop().run_in_executor(None, json.dumps, data)
- await self.mass.database.insert(
- TABLE_CACHE,
- {"key": cache_key, "expires": expires, "checksum": checksum, "data": data},
- allow_replace=True,
- )
-
- async def delete(self, cache_key):
- """Delete data from cache."""
- self._mem_cache.pop(cache_key, None)
- await self.mass.database.delete(TABLE_CACHE, {"key": cache_key})
-
- async def clear(self, key_filter: Optional[str] = None) -> None:
- """Clear all/partial items from cache."""
- self._mem_cache = {}
- query = f"key LIKE '%{key_filter}%'" if key_filter else None
- await self.mass.database.delete(TABLE_CACHE, query=query)
-
- async def auto_cleanup(self):
- """Sceduled auto cleanup task."""
- # for now we simply reset the memory cache
- self._mem_cache = {}
- cur_timestamp = int(time.time())
- for db_row in await self.mass.database.get_rows(TABLE_CACHE):
- # clean up db cache object only if expired
- if db_row["expires"] < cur_timestamp:
- await self.delete(db_row["key"])
- # compact db
- await self.mass.database.execute("VACUUM")
-
- def __schedule_cleanup_task(self):
- """Schedule the cleanup task."""
- self.mass.add_job(self.auto_cleanup(), "Cleanup cache")
- # reschedule self
- self.mass.loop.call_later(3600, self.__schedule_cleanup_task)
-
-
-def use_cache(expiration=86400 * 30):
- """Return decorator that can be used to cache a method's result."""
-
- def wrapper(func):
- @functools.wraps(func)
- async def wrapped(*args, **kwargs):
- method_class = args[0]
- method_class_name = method_class.__class__.__name__
- cache_key_parts = [method_class_name, func.__name__]
- skip_cache = kwargs.pop("skip_cache", False)
- cache_checksum = kwargs.pop("cache_checksum", "")
- if len(args) > 1:
- cache_key_parts += args[1:]
- for key in sorted(kwargs.keys()):
- cache_key_parts.append(f"{key}{kwargs[key]}")
- cache_key = ".".join(cache_key_parts)
-
- cachedata = await method_class.cache.get(cache_key, checksum=cache_checksum)
-
- if not skip_cache and cachedata is not None:
- return cachedata
- result = await func(*args, **kwargs)
- asyncio.create_task(
- method_class.cache.set(
- cache_key, result, expiration=expiration, checksum=cache_checksum
- )
- )
- return result
-
- return wrapped
-
- return wrapper
-
-
-class MemoryCache(MutableMapping):
- """Simple limited in-memory cache implementation."""
-
- def __init__(self, maxlen: int):
- """Initialize."""
- self._maxlen = maxlen
- self.d = OrderedDict()
-
- @property
- def maxlen(self) -> int:
- """Return max length."""
- return self._maxlen
-
- def get(self, key: str, default: Any = None) -> Any:
- """Return item or default."""
- return self.d.get(key, default)
-
- def pop(self, key: str, default: Any = None) -> Any:
- """Pop item from collection."""
- return self.d.pop(key, default)
-
- def __getitem__(self, key: str) -> Any:
- """Get item."""
- self.d.move_to_end(key)
- return self.d[key]
-
- def __setitem__(self, key: str, value: Any) -> None:
- """Set item."""
- if key in self.d:
- self.d.move_to_end(key)
- elif len(self.d) == self.maxlen:
- self.d.popitem(last=False)
- self.d[key] = value
-
- def __delitem__(self, key) -> None:
- """Delete item."""
- del self.d[key]
-
- def __iter__(self) -> Iterator:
- """Iterate items."""
- return self.d.__iter__()
-
- def __len__(self) -> int:
- """Return length."""
- return len(self.d)
+++ /dev/null
-"""Database logic."""
-from __future__ import annotations
-
-from typing import TYPE_CHECKING, Any, Dict, List, Mapping, Optional, Union
-
-from databases import Database as Db
-from sqlalchemy.sql import ClauseElement
-
-if TYPE_CHECKING:
- from music_assistant.mass import MusicAssistant
-
-
-SCHEMA_VERSION = 18
-
-TABLE_TRACK_LOUDNESS = "track_loudness"
-TABLE_PLAYLOG = "playlog"
-TABLE_ARTISTS = "artists"
-TABLE_ALBUMS = "albums"
-TABLE_TRACKS = "tracks"
-TABLE_PLAYLISTS = "playlists"
-TABLE_RADIOS = "radios"
-TABLE_CACHE = "cache"
-TABLE_SETTINGS = "settings"
-TABLE_THUMBS = "thumbnails"
-
-
-class Database:
- """Class that holds the (logic to the) database."""
-
- def __init__(self, mass: MusicAssistant):
- """Initialize class."""
- self.url = mass.config.database_url
- self.mass = mass
- self.logger = mass.logger.getChild("db")
- # we maintain one global connection - otherwise we run into (dead)lock issues.
- # https://github.com/encode/databases/issues/456
- self._db = Db(self.url, timeout=360)
-
- async def setup(self) -> None:
- """Perform async initialization."""
- await self._db.connect()
- self.logger.info("Database connected.")
- await self._migrate()
-
- async def close(self) -> None:
- """Close db connection on exit."""
- self.logger.info("Database disconnected.")
- await self._db.disconnect()
-
- async def get_setting(self, key: str) -> str | None:
- """Get setting from settings table."""
- if db_row := await self.get_row(TABLE_SETTINGS, {"key": key}):
- return db_row["value"]
- return None
-
- async def set_setting(self, key: str, value: str) -> None:
- """Set setting in settings table."""
- if not isinstance(value, str):
- value = str(value)
- return await self.insert(
- TABLE_SETTINGS, {"key": key, "value": value}, allow_replace=True
- )
-
- async def get_rows(
- self,
- table: str,
- match: dict = None,
- order_by: str = None,
- limit: int = 500,
- offset: int = 0,
- ) -> List[Mapping]:
- """Get all rows for given table."""
- sql_query = f"SELECT * FROM {table}"
- if match is not None:
- sql_query += " WHERE " + " AND ".join((f"{x} = :{x}" for x in match))
- if order_by is not None:
- sql_query += f" ORDER BY {order_by}"
- sql_query += f" LIMIT {limit} OFFSET {offset}"
- return await self._db.fetch_all(sql_query, match)
-
- async def get_rows_from_query(
- self,
- query: str,
- params: Optional[dict] = None,
- limit: int = 500,
- offset: int = 0,
- ) -> List[Mapping]:
- """Get all rows for given custom query."""
- query = f"{query} LIMIT {limit} OFFSET {offset}"
- return await self._db.fetch_all(query, params)
-
- async def get_count_from_query(
- self,
- query: str,
- params: Optional[dict] = None,
- ) -> int:
- """Get row count for given custom query."""
- query = f"SELECT count() FROM ({query})"
- if result := await self._db.fetch_one(query, params):
- return result[0]
- return 0
-
- async def search(
- self, table: str, search: str, column: str = "name"
- ) -> List[Mapping]:
- """Search table by column."""
- sql_query = f"SELECT * FROM {table} WHERE {column} LIKE :search"
- params = {"search": f"%{search}%"}
- return await self._db.fetch_all(sql_query, params)
-
- async def get_row(self, table: str, match: Dict[str, Any]) -> Mapping | None:
- """Get single row for given table where column matches keys/values."""
- sql_query = f"SELECT * FROM {table} WHERE "
- sql_query += " AND ".join((f"{x} = :{x}" for x in match))
- return await self._db.fetch_one(sql_query, match)
-
- async def insert(
- self,
- table: str,
- values: Dict[str, Any],
- allow_replace: bool = False,
- ) -> Mapping:
- """Insert data in given table."""
- keys = tuple(values.keys())
- if allow_replace:
- sql_query = f'INSERT OR REPLACE INTO {table}({",".join(keys)})'
- else:
- sql_query = f'INSERT INTO {table}({",".join(keys)})'
- sql_query += f' VALUES ({",".join((f":{x}" for x in keys))})'
- await self.execute(sql_query, values)
- # return inserted/replaced item
- lookup_vals = {
- key: value
- for key, value in values.items()
- if value is not None and value != ""
- }
- return await self.get_row(table, lookup_vals)
-
- async def insert_or_replace(self, table: str, values: Dict[str, Any]) -> Mapping:
- """Insert or replace data in given table."""
- return await self.insert(table=table, values=values, allow_replace=True)
-
- async def update(
- self,
- table: str,
- match: Dict[str, Any],
- values: Dict[str, Any],
- ) -> Mapping:
- """Update record."""
- keys = tuple(values.keys())
- sql_query = f'UPDATE {table} SET {",".join((f"{x}=:{x}" for x in keys))} WHERE '
- sql_query += " AND ".join((f"{x} = :{x}" for x in match))
- await self.execute(sql_query, {**match, **values})
- # return updated item
- return await self.get_row(table, match)
-
- async def delete(
- self, table: str, match: Optional[dict] = None, query: Optional[str] = None
- ) -> None:
- """Delete data in given table."""
- assert not (query and "where" in query.lower())
- sql_query = f"DELETE FROM {table} "
- if match:
- sql_query += " WHERE " + " AND ".join((f"{x} = :{x}" for x in match))
- elif query and "query" not in query.lower():
- sql_query += "WHERE " + query
- elif query:
- sql_query += query
-
- await self.execute(sql_query, match)
-
- async def delete_where_query(self, table: str, query: Optional[str] = None) -> None:
- """Delete data in given table using given where clausule."""
- sql_query = f"DELETE FROM {table} WHERE {query}"
- await self.execute(sql_query)
-
- async def execute(
- self, query: Union[ClauseElement, str], values: dict = None
- ) -> Any:
- """Execute command on the database."""
- return await self._db.execute(query, values)
-
- async def _migrate(self):
- """Perform database migration actions if needed."""
- # always create db tables if they don't exist to prevent errors trying to access them later
- await self.__create_database_tables()
- try:
- if prev_version := await self.get_setting("version"):
- prev_version = int(prev_version)
- else:
- prev_version = 0
- except (KeyError, ValueError):
- prev_version = 0
-
- if SCHEMA_VERSION != prev_version:
- self.logger.info(
- "Performing database migration from %s to %s",
- prev_version,
- SCHEMA_VERSION,
- )
-
- if prev_version < 18:
- # too many changes, just recreate
- await self.execute(f"DROP TABLE IF EXISTS {TABLE_ARTISTS}")
- await self.execute(f"DROP TABLE IF EXISTS {TABLE_ALBUMS}")
- await self.execute(f"DROP TABLE IF EXISTS {TABLE_TRACKS}")
- await self.execute(f"DROP TABLE IF EXISTS {TABLE_PLAYLISTS}")
- await self.execute(f"DROP TABLE IF EXISTS {TABLE_RADIOS}")
- await self.execute(f"DROP TABLE IF EXISTS {TABLE_CACHE}")
- await self.execute(f"DROP TABLE IF EXISTS {TABLE_THUMBS}")
- await self.execute("DROP TABLE IF EXISTS provider_mappings")
- # recreate missing tables
- await self.__create_database_tables()
-
- # store current schema version
- await self.set_setting("version", str(SCHEMA_VERSION))
-
- async def __create_database_tables(self) -> None:
- """Init database tables."""
- await self.execute(
- """CREATE TABLE IF NOT EXISTS settings(
- key TEXT PRIMARY KEY,
- value TEXT
- );"""
- )
- await self.execute(
- f"""CREATE TABLE IF NOT EXISTS {TABLE_TRACK_LOUDNESS}(
- item_id INTEGER NOT NULL,
- provider TEXT NOT NULL,
- loudness REAL,
- UNIQUE(item_id, provider));"""
- )
- await self.execute(
- f"""CREATE TABLE IF NOT EXISTS {TABLE_PLAYLOG}(
- item_id INTEGER NOT NULL,
- provider TEXT NOT NULL,
- timestamp INTEGER DEFAULT 0,
- UNIQUE(item_id, provider));"""
- )
- await self.execute(
- f"""CREATE TABLE IF NOT EXISTS {TABLE_ALBUMS}(
- item_id INTEGER PRIMARY KEY AUTOINCREMENT,
- name TEXT NOT NULL,
- sort_name TEXT NOT NULL,
- sort_artist TEXT,
- album_type TEXT,
- year INTEGER,
- version TEXT,
- in_library BOOLEAN DEFAULT 0,
- upc TEXT,
- musicbrainz_id TEXT,
- artists json,
- metadata json,
- provider_ids json,
- timestamp INTEGER DEFAULT 0
- );"""
- )
- await self.execute(
- f"""CREATE TABLE IF NOT EXISTS {TABLE_ARTISTS}(
- item_id INTEGER PRIMARY KEY AUTOINCREMENT,
- name TEXT NOT NULL,
- sort_name TEXT NOT NULL,
- musicbrainz_id TEXT,
- in_library BOOLEAN DEFAULT 0,
- metadata json,
- provider_ids json,
- timestamp INTEGER DEFAULT 0
- );"""
- )
- await self.execute(
- f"""CREATE TABLE IF NOT EXISTS {TABLE_TRACKS}(
- item_id INTEGER PRIMARY KEY AUTOINCREMENT,
- name TEXT NOT NULL,
- sort_name TEXT NOT NULL,
- sort_artist TEXT,
- sort_album TEXT,
- version TEXT,
- duration INTEGER,
- in_library BOOLEAN DEFAULT 0,
- isrc TEXT,
- musicbrainz_id TEXT,
- artists json,
- albums json,
- metadata json,
- provider_ids json,
- timestamp INTEGER DEFAULT 0
- );"""
- )
- await self.execute(
- f"""CREATE TABLE IF NOT EXISTS {TABLE_PLAYLISTS}(
- item_id INTEGER PRIMARY KEY AUTOINCREMENT,
- name TEXT NOT NULL,
- sort_name TEXT NOT NULL,
- owner TEXT NOT NULL,
- is_editable BOOLEAN NOT NULL,
- in_library BOOLEAN DEFAULT 0,
- metadata json,
- provider_ids json,
- timestamp INTEGER DEFAULT 0,
- UNIQUE(name, owner)
- );"""
- )
- await self.execute(
- f"""CREATE TABLE IF NOT EXISTS {TABLE_RADIOS}(
- item_id INTEGER PRIMARY KEY AUTOINCREMENT,
- name TEXT NOT NULL UNIQUE,
- sort_name TEXT NOT NULL,
- in_library BOOLEAN DEFAULT 0,
- metadata json,
- provider_ids json,
- timestamp INTEGER DEFAULT 0
- );"""
- )
- await self.execute(
- f"""CREATE TABLE IF NOT EXISTS {TABLE_CACHE}(
- key TEXT UNIQUE NOT NULL, expires INTEGER NOT NULL, data TEXT, checksum TEXT NULL)"""
- )
- await self.execute(
- f"""CREATE TABLE IF NOT EXISTS {TABLE_THUMBS}(
- id INTEGER PRIMARY KEY AUTOINCREMENT,
- path TEXT NOT NULL,
- size INTEGER DEFAULT 0,
- data BLOB,
- UNIQUE(path, size));"""
- )
- # create indexes
- # TODO: create indexes for the json columns ?
- await self.execute(
- "CREATE INDEX IF NOT EXISTS artists_in_library_idx on artists(in_library);"
- )
- await self.execute(
- "CREATE INDEX IF NOT EXISTS albums_in_library_idx on albums(in_library);"
- )
- await self.execute(
- "CREATE INDEX IF NOT EXISTS tracks_in_library_idx on tracks(in_library);"
- )
- await self.execute(
- "CREATE INDEX IF NOT EXISTS playlists_in_library_idx on playlists(in_library);"
- )
- await self.execute(
- "CREATE INDEX IF NOT EXISTS radios_in_library_idx on radios(in_library);"
- )
- await self.execute(
- "CREATE INDEX IF NOT EXISTS artists_sort_name_idx on artists(sort_name);"
- )
- await self.execute(
- "CREATE INDEX IF NOT EXISTS albums_sort_name_idx on albums(sort_name);"
- )
- await self.execute(
- "CREATE INDEX IF NOT EXISTS tracks_sort_name_idx on tracks(sort_name);"
- )
- await self.execute(
- "CREATE INDEX IF NOT EXISTS playlists_sort_name_idx on playlists(sort_name);"
- )
- await self.execute(
- "CREATE INDEX IF NOT EXISTS radios_sort_name_idx on radios(sort_name);"
- )
- await self.execute(
- "CREATE INDEX IF NOT EXISTS artists_musicbrainz_id_idx on artists(musicbrainz_id);"
- )
- await self.execute(
- "CREATE INDEX IF NOT EXISTS albums_musicbrainz_id_idx on albums(musicbrainz_id);"
- )
- await self.execute(
- "CREATE INDEX IF NOT EXISTS tracks_musicbrainz_id_idx on tracks(musicbrainz_id);"
- )
- await self.execute(
- "CREATE INDEX IF NOT EXISTS tracks_isrc_idx on tracks(isrc);"
- )
- await self.execute("CREATE INDEX IF NOT EXISTS albums_upc_idx on albums(upc);")
from PIL import Image
-from music_assistant.helpers.database import TABLE_THUMBS
+from music_assistant.controllers.database import TABLE_THUMBS
from music_assistant.helpers.tags import get_embedded_image
if TYPE_CHECKING:
import aiohttp
from music_assistant.constants import ROOT_LOGGER_NAME
-from music_assistant.controllers.metadata import MetaDataController
+from music_assistant.controllers.cache import CacheController
+from music_assistant.controllers.database import DatabaseController
+from music_assistant.controllers.metadata.metadata import MetaDataController
from music_assistant.controllers.music import MusicController
from music_assistant.controllers.players import PlayerController
from music_assistant.controllers.streams import StreamsController
-from music_assistant.helpers.cache import Cache
-from music_assistant.helpers.database import Database
from music_assistant.models.background_job import BackgroundJob
from music_assistant.models.config import MassConfig
from music_assistant.models.enums import EventType, JobStatus
self._jobs_event = asyncio.Event()
# init core controllers
- self.database = Database(self)
- self.cache = Cache(self)
+ self.database = DatabaseController(self)
+ self.cache = CacheController(self)
self.metadata = MetaDataController(self)
self.music = MusicController(self)
self.players = PlayerController(self)
+++ /dev/null
-"""Model for a base media_controller."""
-from __future__ import annotations
-
-import asyncio
-from abc import ABCMeta, abstractmethod
-from time import time
-from typing import (
- TYPE_CHECKING,
- AsyncGenerator,
- Generic,
- List,
- Optional,
- Tuple,
- TypeVar,
- Union,
-)
-
-from music_assistant.helpers.json import json_serializer
-from music_assistant.models.errors import MediaNotFoundError
-from music_assistant.models.event import MassEvent
-
-from .enums import EventType, MediaType, MusicProviderFeature, ProviderType
-from .media_items import MediaItemType, PagedItems, Track, media_from_dict
-
-if TYPE_CHECKING:
- from music_assistant.mass import MusicAssistant
-
-ItemCls = TypeVar("ItemCls", bound="MediaControllerBase")
-
-REFRESH_INTERVAL = 60 * 60 * 24 * 30
-
-
-class MediaControllerBase(Generic[ItemCls], metaclass=ABCMeta):
- """Base model for controller managing a MediaType."""
-
- media_type: MediaType
- item_cls: MediaItemType
- db_table: str
-
- def __init__(self, mass: MusicAssistant):
- """Initialize class."""
- self.mass = mass
- self.logger = mass.logger.getChild(f"music.{self.media_type.value}")
- self._db_add_lock = asyncio.Lock()
-
- @abstractmethod
- async def add(self, item: ItemCls) -> ItemCls:
- """Add item to local db and return the database item."""
- raise NotImplementedError
-
- @abstractmethod
- async def add_db_item(
- self, item: ItemCls, overwrite_existing: bool = False
- ) -> ItemCls:
- """Add a new record for this mediatype to the database."""
- raise NotImplementedError
-
- @abstractmethod
- async def update_db_item(
- self,
- item_id: int,
- item: ItemCls,
- overwrite: bool = False,
- ) -> ItemCls:
- """Update record in the database, merging data."""
- raise NotImplementedError
-
- async def db_items(
- self,
- in_library: Optional[bool] = None,
- search: Optional[str] = None,
- limit: int = 500,
- offset: int = 0,
- order_by: str = "sort_name",
- query_parts: Optional[List[str]] = None,
- ) -> PagedItems:
- """Get in-database items."""
- sql_query = f"SELECT * FROM {self.db_table}"
- params = {}
- query_parts = query_parts or []
- if search:
- params["search"] = f"%{search}%"
- if self.media_type in (MediaType.ALBUM, MediaType.TRACK):
- query_parts.append("(name LIKE :search or artists LIKE :search)")
- else:
- query_parts.append("name LIKE :search")
- if in_library is not None:
- query_parts.append("in_library = :in_library")
- params["in_library"] = in_library
- if query_parts:
- sql_query += " WHERE " + " AND ".join(query_parts)
- sql_query += f" ORDER BY {order_by}"
- items = await self.get_db_items_by_query(
- sql_query, params, limit=limit, offset=offset
- )
- count = len(items)
- if 0 < count < limit:
- total = offset + count
- else:
- total = await self.mass.database.get_count_from_query(sql_query, params)
- return PagedItems(items, count, limit, offset, total)
-
- async def iter_db_items(
- self,
- in_library: Optional[bool] = None,
- search: Optional[str] = None,
- order_by: str = "sort_name",
- ) -> AsyncGenerator[ItemCls, None]:
- """Iterate all in-database items."""
- limit: int = 500
- offset: int = 0
- while True:
- next_items = await self.db_items(
- in_library=in_library,
- search=search,
- limit=limit,
- offset=offset,
- order_by=order_by,
- )
- for item in next_items.items:
- yield item
- if next_items.count < limit:
- break
- offset += limit
-
- async def get(
- self,
- provider_item_id: str,
- provider: Optional[ProviderType] = None,
- provider_id: Optional[str] = None,
- force_refresh: bool = False,
- lazy: bool = True,
- details: ItemCls = None,
- ) -> ItemCls:
- """Return (full) details for a single media item."""
- assert provider or provider_id, "provider or provider_id must be supplied"
- if isinstance(provider, str):
- provider = ProviderType(provider)
- db_item = await self.get_db_item_by_prov_id(
- provider_item_id=provider_item_id,
- provider=provider,
- provider_id=provider_id,
- )
- if db_item and (time() - db_item.last_refresh) > REFRESH_INTERVAL:
- # it's been too long since the full metadata was last retrieved (or never at all)
- force_refresh = True
- if db_item and force_refresh:
- # get (first) provider item id belonging to this db item
- provider_id, provider_item_id = await self.get_provider_id(db_item)
- elif db_item:
- # we have a db item and no refreshing is needed, return the results!
- return db_item
- if not details and provider_id:
- # no details provider nor in db, fetch them from the provider
- details = await self.get_provider_item(provider_item_id, provider_id)
- if not details and provider:
- # check providers for given provider type one by one
- for prov in self.mass.music.providers:
- if not prov.available:
- continue
- if prov.type == provider:
- try:
- details = await self.get_provider_item(
- provider_item_id, prov.id
- )
- except MediaNotFoundError:
- pass
- else:
- break
- if not details:
- # we couldn't get a match from any of the providers, raise error
- raise MediaNotFoundError(
- f"Item not found: {provider.value or provider_id}/{provider_item_id}"
- )
- # create job to add the item to the db, including matching metadata etc. takes some time
- # in 99% of the cases we just return lazy because we want the details as fast as possible
- # only if we really need to wait for the result (e.g. to prevent race conditions), we
- # can set lazy to false and we await to job to complete.
- add_job = self.mass.add_job(
- self.add(details),
- f"Add {details.uri} to database",
- )
- if not lazy:
- await add_job.wait()
- return add_job.result
-
- return details
-
- async def search(
- self,
- search_query: str,
- provider: Optional[ProviderType] = None,
- provider_id: Optional[str] = None,
- limit: int = 25,
- ) -> List[ItemCls]:
- """Search database or provider with given query."""
- # create safe search string
- search_query = search_query.replace("/", " ").replace("'", "")
- if provider == ProviderType.DATABASE or provider_id == "database":
- return [
- self.item_cls.from_db_row(db_row)
- for db_row in await self.mass.database.search(
- self.db_table, search_query
- )
- ]
-
- prov = self.mass.music.get_provider(provider_id or provider)
- if not prov or MusicProviderFeature.SEARCH not in prov.supported_features:
- return []
- if not prov.library_supported(self.media_type):
- # assume library supported also means that this mediatype is supported
- return []
-
- # prefer cache items (if any)
- cache_key = (
- f"{prov.type.value}.search.{self.media_type.value}.{search_query}.{limit}"
- )
- if cache := await self.mass.cache.get(cache_key):
- return [media_from_dict(x) for x in cache]
- # no items in cache - get listing from provider
- items = await prov.search(
- search_query,
- [self.media_type],
- limit,
- )
- # store (serializable items) in cache
- if not prov.type.is_file(): # do not cache filesystem results
- self.mass.create_task(
- self.mass.cache.set(
- cache_key, [x.to_dict() for x in items], expiration=86400 * 7
- )
- )
- return items
-
- async def add_to_library(
- self,
- provider_item_id: str,
- provider: Optional[ProviderType] = None,
- provider_id: Optional[str] = None,
- ) -> None:
- """Add an item to the library."""
- prov_item = await self.get_db_item_by_prov_id(
- provider_item_id, provider=provider, provider_id=provider_id
- )
- if prov_item is None:
- prov_item = await self.get_provider_item(
- provider_item_id, provider_id or provider
- )
- if prov_item.in_library is True:
- return
- # mark as favorite/library item on provider(s)
- for prov_id in prov_item.provider_ids:
- if prov := self.mass.music.get_provider(prov_id.prov_id):
- if not prov.library_edit_supported(self.media_type):
- continue
- await prov.library_add(prov_id.item_id, self.media_type)
- # mark as library item in internal db if db item
- if prov_item.provider == ProviderType.DATABASE:
- if not prov_item.in_library:
- prov_item.in_library = True
- await self.set_db_library(prov_item.item_id, True)
-
- async def remove_from_library(
- self,
- provider_item_id: str,
- provider: Optional[ProviderType] = None,
- provider_id: Optional[str] = None,
- ) -> None:
- """Remove item from the library."""
- prov_item = await self.get_db_item_by_prov_id(
- provider_item_id, provider=provider, provider_id=provider_id
- )
- if prov_item is None:
- prov_item = await self.get_provider_item(
- provider_item_id, provider_id or provider
- )
- if prov_item.in_library is False:
- return
- # unmark as favorite/library item on provider(s)
- for prov_id in prov_item.provider_ids:
- if prov := self.mass.music.get_provider(prov_id.prov_id):
- if not prov.library_edit_supported(self.media_type):
- continue
- await prov.library_remove(prov_id.item_id, self.media_type)
- # unmark as library item in internal db if db item
- if prov_item.provider == ProviderType.DATABASE:
- prov_item.in_library = False
- await self.set_db_library(prov_item.item_id, False)
-
- async def get_provider_id(self, item: ItemCls) -> Tuple[str, str]:
- """Return (first) provider and item id."""
- if item.provider == ProviderType.DATABASE:
- # make sure we have a full object
- item = await self.get_db_item(item.item_id)
- for prefer_file in (True, False):
- for prov in item.provider_ids:
- # returns the first provider that is available
- if not prov.available:
- continue
- if prefer_file and not prov.prov_type.is_file():
- continue
- if self.mass.music.get_provider(prov.prov_id):
- return (prov.prov_id, prov.item_id)
- return None, None
-
- async def get_db_items_by_query(
- self,
- custom_query: Optional[str] = None,
- query_params: Optional[dict] = None,
- limit: int = 500,
- offset: int = 0,
- ) -> List[ItemCls]:
- """Fetch MediaItem records from database given a custom query."""
- return [
- self.item_cls.from_db_row(db_row)
- for db_row in await self.mass.database.get_rows_from_query(
- custom_query, query_params, limit=limit, offset=offset
- )
- ]
-
- async def get_db_item(self, item_id: Union[int, str]) -> ItemCls:
- """Get record by id."""
- match = {"item_id": int(item_id)}
- if db_row := await self.mass.database.get_row(self.db_table, match):
- return self.item_cls.from_db_row(db_row)
- raise MediaNotFoundError(f"Album not found in database: {item_id}")
-
- async def get_db_item_by_prov_id(
- self,
- provider_item_id: str,
- provider: Optional[ProviderType] = None,
- provider_id: Optional[str] = None,
- ) -> ItemCls | None:
- """Get the database item for the given prov_id."""
- assert provider or provider_id, "provider or provider_id must be supplied"
- if isinstance(provider, str):
- provider = ProviderType(provider)
- if provider == ProviderType.DATABASE or provider_id == "database":
- return await self.get_db_item(provider_item_id)
- for item in await self.get_db_items_by_prov_id(
- provider=provider,
- provider_id=provider_id,
- provider_item_ids=(provider_item_id,),
- ):
- return item
- return None
-
- async def get_db_items_by_prov_id(
- self,
- provider: Optional[ProviderType] = None,
- provider_id: Optional[str] = None,
- provider_item_ids: Optional[Tuple[str]] = None,
- limit: int = 500,
- offset: int = 0,
- ) -> List[ItemCls]:
- """Fetch all records from database for given provider."""
- assert provider or provider_id, "provider or provider_id must be supplied"
- if isinstance(provider, str):
- provider = ProviderType(provider)
- if provider == ProviderType.DATABASE or provider_id == "database":
- return await self.get_db_items_by_query(limit=limit, offset=offset)
-
- query = f"SELECT * FROM {self.db_table}, json_each(provider_ids)"
- if provider_id is not None:
- query += (
- f" WHERE json_extract(json_each.value, '$.prov_id') = '{provider_id}'"
- )
- elif provider is not None:
- query += f" WHERE json_extract(json_each.value, '$.prov_type') = '{provider.value}'"
- if provider_item_ids is not None:
- prov_ids = str(tuple(provider_item_ids))
- if prov_ids.endswith(",)"):
- prov_ids = prov_ids.replace(",)", ")")
- query += f" AND json_extract(json_each.value, '$.item_id') in {prov_ids}"
-
- return await self.get_db_items_by_query(query, limit=limit, offset=offset)
-
- async def set_db_library(self, item_id: int, in_library: bool) -> None:
- """Set the in-library bool on a database item."""
- match = {"item_id": item_id}
- timestamp = int(time()) if in_library else 0
- await self.mass.database.update(
- self.db_table, match, {"in_library": in_library, "timestamp": timestamp}
- )
- db_item = await self.get_db_item(item_id)
- self.mass.signal_event(
- MassEvent(EventType.MEDIA_ITEM_UPDATED, db_item.uri, db_item)
- )
-
- async def get_provider_item(
- self,
- item_id: str,
- provider_id: Union[str, ProviderType],
- ) -> ItemCls:
- """Return item details for the given provider item id."""
- if provider_id in ("database", ProviderType.DATABASE):
- item = await self.get_db_item(item_id)
- else:
- provider = self.mass.music.get_provider(provider_id)
- item = await provider.get_item(self.media_type, item_id)
- if not item:
- raise MediaNotFoundError(
- f"{self.media_type.value} {item_id} not found on provider {provider.name}"
- )
- return item
-
- async def remove_prov_mapping(self, item_id: int, prov_id: str) -> None:
- """Remove provider id(s) from item."""
- try:
- db_item = await self.get_db_item(item_id)
- except MediaNotFoundError:
- # edge case: already deleted / race condition
- return
-
- db_item.provider_ids = {x for x in db_item.provider_ids if x.prov_id != prov_id}
- if not db_item.provider_ids:
- # item has no more provider_ids left, it is completely deleted
- try:
- await self.delete_db_item(db_item.item_id)
- except AssertionError:
- self.logger.debug(
- "Could not delete %s: it has items attached", db_item.item_id
- )
- return
-
- # update the item in db (provider_ids column only)
- match = {"item_id": item_id}
- await self.mass.database.update(
- self.db_table,
- match,
- {"provider_ids": json_serializer(db_item.provider_ids)},
- )
- self.mass.signal_event(
- MassEvent(EventType.MEDIA_ITEM_UPDATED, db_item.uri, db_item)
- )
-
- self.logger.debug("removed provider %s from item id %s", prov_id, item_id)
-
- async def delete_db_item(self, item_id: int, recursive: bool = False) -> None:
- """Delete record from the database."""
- db_item = await self.get_db_item(item_id)
- assert db_item, f"Item does not exist: {item_id}"
- # delete item
- await self.mass.database.delete(
- self.db_table,
- {"item_id": int(item_id)},
- )
- # NOTE: this does not delete any references to this item in other records,
- # this is handled/overridden in the mediatype specific controllers
- self.mass.signal_event(
- MassEvent(EventType.MEDIA_ITEM_DELETED, db_item.uri, db_item)
- )
- self.logger.debug("deleted item with id %s from database", item_id)
-
- async def dynamic_tracks(
- self,
- item_id: str,
- provider: Optional[ProviderType] = None,
- provider_id: Optional[str] = None,
- limit: int = 25,
- ) -> List[Track]:
- """Return a dynamic list of tracks based on the given item."""
- ref_item = await self.get(item_id, provider, provider_id)
- for prov_id in ref_item.provider_ids:
- prov = self.mass.music.get_provider(prov_id.prov_id)
- if not prov.available:
- continue
- if MusicProviderFeature.SIMILAR_TRACKS not in prov.supported_features:
- continue
- return await self._get_provider_dynamic_tracks(
- item_id=prov_id.item_id,
- provider=prov_id.prov_type,
- provider_id=prov_id.prov_id,
- limit=limit,
- )
- # Fallback to the default implementation
- return await self._get_dynamic_tracks(ref_item)
-
- @abstractmethod
- async def _get_provider_dynamic_tracks(
- self,
- item_id: str,
- provider: Optional[ProviderType] = None,
- provider_id: Optional[str] = None,
- limit: int = 25,
- ) -> List[Track]:
- """Generate a dynamic list of tracks based on the item's content."""
-
- @abstractmethod
- async def _get_dynamic_tracks(
- self, media_item: ItemCls, limit: int = 25
- ) -> List[Track]:
- """Get dynamic list of tracks for given item, fallback/default implementation."""
-"""Model for a Music Providers."""
+"""Model/base for a Music Provider implementation."""
from __future__ import annotations
from abc import abstractmethod
-"""Model and helpders for a PlayerQueue."""
+"""Model for a PlayerQueue."""
from __future__ import annotations
import asyncio
-"""Package with Music Providers."""
+"""Package with Music Provider controllers."""
+++ /dev/null
-"""Filesystem musicprovider support for MusicAssistant."""
-from __future__ import annotations
-
-import asyncio
-import logging
-import os
-import urllib.parse
-from contextlib import asynccontextmanager
-from time import time
-from typing import AsyncGenerator, List, Optional, Set, Tuple
-
-import aiofiles
-import xmltodict
-from aiofiles.os import wrap
-from aiofiles.threadpool.binary import AsyncFileIO
-
-from music_assistant.constants import VARIOUS_ARTISTS, VARIOUS_ARTISTS_ID
-from music_assistant.helpers.compare import compare_strings
-from music_assistant.helpers.playlists import parse_m3u, parse_pls
-from music_assistant.helpers.tags import parse_tags, split_items
-from music_assistant.helpers.util import create_safe_string, parse_title_and_version
-from music_assistant.models.enums import MusicProviderFeature, ProviderType
-from music_assistant.models.errors import MediaNotFoundError, MusicAssistantError
-from music_assistant.models.media_items import (
- Album,
- AlbumType,
- Artist,
- BrowseFolder,
- ContentType,
- ImageType,
- MediaItemImage,
- MediaItemProviderId,
- MediaItemType,
- MediaQuality,
- MediaType,
- Playlist,
- Radio,
- StreamDetails,
- Track,
-)
-from music_assistant.models.music_provider import MusicProvider
-
-TRACK_EXTENSIONS = ("mp3", "m4a", "mp4", "flac", "wav", "ogg", "aiff", "wma", "dsf")
-PLAYLIST_EXTENSIONS = ("m3u", "pls")
-SUPPORTED_EXTENSIONS = TRACK_EXTENSIONS + PLAYLIST_EXTENSIONS
-IMAGE_EXTENSIONS = ("jpg", "jpeg", "JPG", "JPEG", "png", "PNG", "gif", "GIF")
-SCHEMA_VERSION = 17
-LOGGER = logging.getLogger(__name__)
-
-listdir = wrap(os.listdir)
-isdir = wrap(os.path.isdir)
-isfile = wrap(os.path.isfile)
-
-
-async def scantree(path: str) -> AsyncGenerator[os.DirEntry, None]:
- """Recursively yield DirEntry objects for given directory."""
-
- def is_dir(entry: os.DirEntry) -> bool:
- return entry.is_dir(follow_symlinks=False)
-
- loop = asyncio.get_running_loop()
- for entry in await loop.run_in_executor(None, os.scandir, path):
- if entry.name.startswith("."):
- continue
- if await loop.run_in_executor(None, is_dir, entry):
- try:
- async for subitem in scantree(entry.path):
- yield subitem
- except (OSError, PermissionError) as err:
- LOGGER.warning("Skip folder %s: %s", entry.path, str(err))
- else:
- yield entry
-
-
-def get_parentdir(base_path: str, name: str) -> str | None:
- """Look for folder name in path (to find dedicated artist or album folder)."""
- parentdir = os.path.dirname(base_path)
- for _ in range(3):
- dirname = parentdir.rsplit(os.sep)[-1]
- if compare_strings(name, dirname, False):
- return parentdir
- parentdir = os.path.dirname(parentdir)
- return None
-
-
-class FileSystemProvider(MusicProvider):
- """
- Implementation of a musicprovider for local files.
-
- Reads ID3 tags from file and falls back to parsing filename.
- Optionally reads metadata from nfo files and images in folder structure <artist>/<album>.
- Supports m3u files only for playlists.
- Supports having URI's from streaming providers within m3u playlist.
- """
-
- _attr_name = "Filesystem"
- _attr_type = ProviderType.FILESYSTEM_LOCAL
-
- @property
- def supported_features(self) -> Tuple[MusicProviderFeature]:
- """Return the features supported by this MusicProvider."""
- return (
- MusicProviderFeature.LIBRARY_ARTISTS,
- MusicProviderFeature.LIBRARY_ALBUMS,
- MusicProviderFeature.LIBRARY_TRACKS,
- MusicProviderFeature.LIBRARY_PLAYLISTS,
- MusicProviderFeature.PLAYLIST_TRACKS_EDIT,
- MusicProviderFeature.PLAYLIST_CREATE,
- MusicProviderFeature.BROWSE,
- MusicProviderFeature.SEARCH,
- )
-
- async def setup(self) -> bool:
- """Handle async initialization of the provider."""
-
- if not await isdir(self.config.path):
- raise MediaNotFoundError(
- f"Music Directory {self.config.path} does not exist"
- )
-
- return True
-
- async def search(
- self, search_query: str, media_types=Optional[List[MediaType]], limit: int = 5
- ) -> List[MediaItemType]:
- """Perform search on musicprovider."""
- result = []
- # searching the filesystem is slow and unreliable,
- # instead we make some (slow) freaking queries to the db ;-)
- params = {"name": f"%{search_query}%", "prov_type": f"%{self.type.value}%"}
- if media_types is None or MediaType.TRACK in media_types:
- query = "SELECT * FROM tracks WHERE name LIKE :name AND provider_ids LIKE :prov_type"
- tracks = await self.mass.music.tracks.get_db_items_by_query(query, params)
- result += tracks
- if media_types is None or MediaType.ALBUM in media_types:
- query = "SELECT * FROM albums WHERE name LIKE :name AND provider_ids LIKE :prov_type"
- albums = await self.mass.music.albums.get_db_items_by_query(query, params)
- result += albums
- if media_types is None or MediaType.ARTIST in media_types:
- query = "SELECT * FROM artists WHERE name LIKE :name AND provider_ids LIKE :prov_type"
- artists = await self.mass.music.artists.get_db_items_by_query(query, params)
- result += artists
- if media_types is None or MediaType.PLAYLIST in media_types:
- query = "SELECT * FROM playlists WHERE name LIKE :name AND provider_ids LIKE :prov_type"
- playlists = await self.mass.music.playlists.get_db_items_by_query(
- query, params
- )
- result += playlists
- return result
-
- async def browse(self, path: str) -> BrowseFolder:
- """
- Browse this provider's items.
-
- :param path: The path to browse, (e.g. provid://artists).
- """
- _, sub_path = path.split("://")
- if not sub_path:
- item_path = self.config.path
- else:
- item_path = os.path.join(self.config.path, sub_path)
- subitems = []
- for filename in await listdir(item_path):
- full_path: str = os.path.join(item_path, filename)
- rel_path = full_path.replace(self.config.path + os.sep, "")
- if await isdir(full_path):
- subitems.append(
- BrowseFolder(
- item_id=rel_path,
- provider=self.type,
- path=f"{self.id}://{rel_path}",
- name=filename,
- )
- )
- continue
-
- if "." not in filename or filename.startswith("."):
- # skip system files and files without extension
- continue
-
- _, ext = filename.rsplit(".", 1)
-
- if ext in TRACK_EXTENSIONS:
- item_id = self._get_item_id(full_path)
- if db_item := await self.mass.music.tracks.get_db_item_by_prov_id(
- item_id, provider_id=self.id
- ):
- subitems.append(db_item)
- elif track := await self._parse_track(full_path):
- # make sure that the item exists
- # https://github.com/music-assistant/hass-music-assistant/issues/707
- db_item = await self.mass.music.tracks.add_db_item(track)
- subitems.append(db_item)
- continue
- if ext in PLAYLIST_EXTENSIONS:
- item_id = self._get_item_id(full_path)
- if db_item := await self.mass.music.playlists.get_db_item_by_prov_id(
- item_id, provider_id=self.id
- ):
- subitems.append(db_item)
- elif playlist := await self._parse_playlist(full_path):
- # make sure that the item exists
- # https://github.com/music-assistant/hass-music-assistant/issues/707
- db_item = await self.mass.music.playlists.add_db_item(playlist)
- subitems.append(db_item)
- continue
-
- return BrowseFolder(
- item_id=sub_path,
- provider=self.type,
- path=path,
- name=sub_path or self.name,
- # make sure to sort the resulting listing
- items=sorted(subitems, key=lambda x: x.name),
- )
-
- async def sync_library(
- self, media_types: Optional[Tuple[MediaType]] = None
- ) -> None:
- """Run library sync for this provider."""
- cache_key = f"{self.id}.checksums"
- prev_checksums = await self.mass.cache.get(cache_key, SCHEMA_VERSION)
- save_checksum_interval = 0
- if prev_checksums is None:
- prev_checksums = {}
-
- # find all music files in the music directory and all subfolders
- # we work bottom up, as-in we derive all info from the tracks
- cur_checksums = {}
- async for entry in scantree(self.config.path):
-
- if "." not in entry.path or entry.path.startswith("."):
- # skip system files and files without extension
- continue
-
- _, ext = entry.path.rsplit(".", 1)
- if ext not in SUPPORTED_EXTENSIONS:
- # unsupported file extension
- continue
-
- try:
- # mtime is used as file checksum
- stat = await asyncio.get_running_loop().run_in_executor(
- None, entry.stat
- )
- checksum = int(stat.st_mtime)
- cur_checksums[entry.path] = checksum
- if checksum == prev_checksums.get(entry.path):
- continue
-
- if ext in TRACK_EXTENSIONS:
- # add/update track to db
- track = await self._parse_track(entry.path)
- # if the track was edited on disk, always overwrite existing db details
- overwrite_existing = entry.path in prev_checksums
- await self.mass.music.tracks.add_db_item(
- track, overwrite_existing=overwrite_existing
- )
- elif ext in PLAYLIST_EXTENSIONS:
- playlist = await self._parse_playlist(entry.path)
- # add/update] playlist to db
- playlist.metadata.checksum = checksum
- # playlist is always in-library
- playlist.in_library = True
- await self.mass.music.playlists.add_db_item(playlist)
- except Exception as err: # pylint: disable=broad-except
- # we don't want the whole sync to crash on one file so we catch all exceptions here
- self.logger.exception("Error processing %s - %s", entry.path, str(err))
-
- # save checksums every 100 processed items
- # this allows us to pickup where we leftoff when initial scan gets interrupted
- if save_checksum_interval == 100:
- await self.mass.cache.set(cache_key, cur_checksums, SCHEMA_VERSION)
- save_checksum_interval = 0
- else:
- save_checksum_interval += 1
-
- # store (final) checksums in cache
- await self.mass.cache.set(cache_key, cur_checksums, SCHEMA_VERSION)
- # work out deletions
- deleted_files = set(prev_checksums.keys()) - set(cur_checksums.keys())
- await self._process_deletions(deleted_files)
-
- async def _process_deletions(self, deleted_files: Set[str]) -> None:
- """Process all deletions."""
- # process deleted tracks/playlists
- for file_path in deleted_files:
-
- if "." not in file_path or file_path.startswith("."):
- # skip system files and files without extension
- continue
-
- _, ext = file_path.rsplit(".", 1)
- if ext not in SUPPORTED_EXTENSIONS:
- # unsupported file extension
- continue
-
- item_id = self._get_item_id(file_path)
-
- if ext in PLAYLIST_EXTENSIONS:
- controller = self.mass.music.get_controller(MediaType.PLAYLIST)
- else:
- controller = self.mass.music.get_controller(MediaType.TRACK)
-
- if db_item := await controller.get_db_item_by_prov_id(item_id, self.type):
- await controller.remove_prov_mapping(db_item.item_id, self.id)
-
- async def get_artist(self, prov_artist_id: str) -> Artist:
- """Get full artist details by id."""
- db_artist = await self.mass.music.artists.get_db_item_by_prov_id(
- provider_item_id=prov_artist_id, provider_id=self.id
- )
- if db_artist is None:
- raise MediaNotFoundError(f"Artist not found: {prov_artist_id}")
- itempath = await self._get_filepath(MediaType.ARTIST, prov_artist_id)
- if await self.exists(itempath):
- # if path exists on disk allow parsing full details to allow refresh of metadata
- return await self._parse_artist(db_artist.name, artist_path=itempath)
- return db_artist
-
- async def get_album(self, prov_album_id: str) -> Album:
- """Get full album details by id."""
- db_album = await self.mass.music.albums.get_db_item_by_prov_id(
- provider_item_id=prov_album_id, provider_id=self.id
- )
- if db_album is None:
- raise MediaNotFoundError(f"Album not found: {prov_album_id}")
- itempath = await self._get_filepath(MediaType.ALBUM, prov_album_id)
- if await self.exists(itempath):
- # if path exists on disk allow parsing full details to allow refresh of metadata
- return await self._parse_album(db_album.name, itempath, db_album.artists)
- return db_album
-
- async def get_track(self, prov_track_id: str) -> Track:
- """Get full track details by id."""
- itempath = await self._get_filepath(MediaType.TRACK, prov_track_id)
- return await self._parse_track(itempath)
-
- async def get_playlist(self, prov_playlist_id: str) -> Playlist:
- """Get full playlist details by id."""
- itempath = await self._get_filepath(MediaType.PLAYLIST, prov_playlist_id)
- return await self._parse_playlist(itempath)
-
- async def get_album_tracks(self, prov_album_id: str) -> List[Track]:
- """Get album tracks for given album id."""
- # filesystem items are always stored in db so we can query the database
- db_album = await self.mass.music.albums.get_db_item_by_prov_id(
- prov_album_id, provider_id=self.id
- )
- if db_album is None:
- raise MediaNotFoundError(f"Album not found: {prov_album_id}")
- # TODO: adjust to json query instead of text search
- query = f"SELECT * FROM tracks WHERE albums LIKE '%\"{db_album.item_id}\"%'"
- query += f" AND provider_ids LIKE '%\"{self.type.value}\"%'"
- result = []
- for track in await self.mass.music.tracks.get_db_items_by_query(query):
- track.album = db_album
- if album_mapping := next(
- (x for x in track.albums if x.item_id == db_album.item_id), None
- ):
- track.disc_number = album_mapping.disc_number
- track.track_number = album_mapping.track_number
- result.append(track)
- return sorted(result, key=lambda x: (x.disc_number or 0, x.track_number or 0))
-
- async def get_playlist_tracks(self, prov_playlist_id: str) -> List[Track]:
- """Get playlist tracks for given playlist id."""
- result = []
- playlist_path = await self._get_filepath(MediaType.PLAYLIST, prov_playlist_id)
- if not await self.exists(playlist_path):
- raise MediaNotFoundError(f"Playlist path does not exist: {playlist_path}")
- parentdir = os.path.dirname(playlist_path)
- _, ext = playlist_path.rsplit(".", 1)
- try:
- async with self.open_file(playlist_path, "r") as _file:
- playlist_data = await _file.read()
-
- if ext in ("m3u", "m3u8"):
- playlist_lines = await parse_m3u(playlist_data)
- else:
- playlist_lines = await parse_pls(playlist_data)
-
- for line_no, playlist_line in enumerate(playlist_lines):
-
- if media_item := await self._parse_playlist_line(
- playlist_line, parentdir
- ):
- # use the linenumber as position for easier deletions
- media_item.position = line_no
- result.append(media_item)
-
- except Exception as err: # pylint: disable=broad-except
- self.logger.warning(
- "Error while parsing playlist %s", playlist_path, exc_info=err
- )
- return result
-
- async def _parse_playlist_line(
- self, line: str, playlist_path: str
- ) -> Track | Radio | None:
- """Try to parse a track from a playlist line."""
- try:
- # try to treat uri as (relative) filename
- if "://" not in line:
- for filename in (line, os.path.join(playlist_path, line)):
- if not await self.exists(filename):
- continue
- file_path = await self.resolve(filename)
- return await self._parse_track(file_path)
- # fallback to generic uri parsing
- return await self.mass.music.get_item_by_uri(line)
- except MusicAssistantError as err:
- self.logger.warning(
- "Could not parse uri/file %s to track: %s", line, str(err)
- )
- return None
-
- async def add_playlist_tracks(
- self, prov_playlist_id: str, prov_track_ids: List[str]
- ) -> None:
- """Add track(s) to playlist."""
- itempath = await self._get_filepath(MediaType.PLAYLIST, prov_playlist_id)
- if not await self.exists(itempath):
- raise MediaNotFoundError(f"Playlist path does not exist: {itempath}")
- async with self.open_file(itempath, "r") as _file:
- cur_data = await _file.read()
- async with self.open_file(itempath, "w") as _file:
- await _file.write(cur_data)
- for uri in prov_track_ids:
- await _file.write(f"\n{uri}")
-
- async def remove_playlist_tracks(
- self, prov_playlist_id: str, positions_to_remove: Tuple[int]
- ) -> None:
- """Remove track(s) from playlist."""
- itempath = await self._get_filepath(MediaType.PLAYLIST, prov_playlist_id)
- if not await self.exists(itempath):
- raise MediaNotFoundError(f"Playlist path does not exist: {itempath}")
- cur_lines = []
- async with self.open_file(itempath, "r") as _file:
- for line_no, line in enumerate(await _file.readlines()):
- line = urllib.parse.unquote(line.strip())
- if line_no not in positions_to_remove:
- cur_lines.append(line)
- async with self.open_file(itempath, "w") as _file:
- for uri in cur_lines:
- await _file.write(f"{uri}\n")
-
- async def create_playlist(self, name: str) -> Playlist:
- """Create a new playlist on provider with given name."""
- # creating a new playlist on the filesystem is as easy
- # as creating a new (empty) file with the m3u extension...
- filename = await self.resolve(f"{name}.m3u")
- async with self.open_file(filename, "w") as _file:
- await _file.write("\n")
- playlist = await self._parse_playlist(filename)
- db_playlist = await self.mass.music.playlists.add_db_item(playlist)
- return db_playlist
-
- async def get_stream_details(self, item_id: str) -> StreamDetails:
- """Return the content details for the given track when it will be streamed."""
- itempath = await self._get_filepath(MediaType.TRACK, item_id)
- if not await self.exists(itempath):
- raise MediaNotFoundError(f"Track path does not exist: {itempath}")
-
- metadata = await parse_tags(itempath)
- stat = await self.mass.loop.run_in_executor(None, os.stat, itempath)
-
- return StreamDetails(
- provider=self.type,
- item_id=item_id,
- content_type=ContentType.try_parse(metadata.format),
- media_type=MediaType.TRACK,
- duration=metadata.duration,
- size=stat.st_size,
- sample_rate=metadata.sample_rate,
- bit_depth=metadata.bits_per_sample,
- direct=itempath,
- )
-
- async def _parse_track(self, track_path: str) -> Track:
- """Try to parse a track from a filename by reading its tags."""
-
- if not await self.exists(track_path):
- raise MediaNotFoundError(f"Track path does not exist: {track_path}")
-
- track_item_id = self._get_item_id(track_path)
-
- # parse tags
- tags = await parse_tags(track_path)
-
- name, version = parse_title_and_version(tags.title)
- track = Track(
- item_id=track_item_id,
- provider=self.type,
- name=name,
- version=version,
- )
-
- # album
- if tags.album:
- # work out if we have an album folder
- album_dir = get_parentdir(track_path, tags.album)
-
- # album artist(s)
- if tags.album_artists:
- album_artists = []
- for index, album_artist_str in enumerate(tags.album_artists):
- # work out if we have an artist folder
- artist_dir = get_parentdir(track_path, album_artist_str)
- artist = await self._parse_artist(
- album_artist_str, artist_path=artist_dir
- )
- if not artist.musicbrainz_id:
- try:
- artist.musicbrainz_id = tags.musicbrainz_albumartistids[
- index
- ]
- except IndexError:
- pass
- album_artists.append(artist)
- else:
- # always fallback to various artists as album artist if user did not tag album artist
- # ID3 tag properly because we must have an album artist
- self.logger.warning(
- "%s is missing ID3 tag [albumartist], using %s as fallback",
- track_path,
- VARIOUS_ARTISTS,
- )
- album_artists = [await self._parse_artist(name=VARIOUS_ARTISTS)]
-
- track.album = await self._parse_album(
- tags.album,
- album_dir,
- artists=album_artists,
- )
- else:
- self.logger.warning("%s is missing ID3 tag [album]", track_path)
-
- # track artist(s)
- for index, track_artist_str in enumerate(tags.artists):
- # re-use album artist details if possible
- if track.album:
- if artist := next(
- (x for x in track.album.artists if x.name == track_artist_str), None
- ):
- track.artists.append(artist)
- continue
- artist = await self._parse_artist(track_artist_str)
- if not artist.musicbrainz_id:
- try:
- artist.musicbrainz_id = tags.musicbrainz_artistids[index]
- except IndexError:
- pass
- track.artists.append(artist)
-
- # cover image - prefer album image, fallback to embedded
- if track.album and track.album.image:
- track.metadata.images = [track.album.image]
- elif tags.has_cover_image:
- # we do not actually embed the image in the metadata because that would consume too
- # much space and bandwidth. Instead we set the filename as value so the image can
- # be retrieved later in realtime.
- track.metadata.images = [MediaItemImage(ImageType.THUMB, track_path, True)]
- if track.album:
- # set embedded cover on album
- track.album.metadata.images = track.metadata.images
-
- # parse other info
- track.duration = tags.duration or 0
- track.metadata.genres = tags.genres
- track.disc_number = tags.disc
- track.track_number = tags.track
- track.isrc = tags.get("isrc")
- track.metadata.copyright = tags.get("copyright")
- track.metadata.lyrics = tags.get("lyrics")
- track.musicbrainz_id = tags.musicbrainz_trackid
- if track.album:
- if not track.album.musicbrainz_id:
- track.album.musicbrainz_id = tags.musicbrainz_releasegroupid
- if not track.album.year:
- track.album.year = tags.year
- if not track.album.upc:
- track.album.upc = tags.get("barcode")
- # try to parse albumtype
- if track.album and track.album.album_type == AlbumType.UNKNOWN:
- album_type = tags.album_type
- if album_type and "compilation" in album_type:
- track.album.album_type = AlbumType.COMPILATION
- elif album_type and "single" in album_type:
- track.album.album_type = AlbumType.SINGLE
- elif album_type and "album" in album_type:
- track.album.album_type = AlbumType.ALBUM
- elif track.album.sort_name in track.sort_name:
- track.album.album_type = AlbumType.SINGLE
-
- # set checksum to invalidate any cached listings
- checksum_timestamp = str(int(time()))
- track.metadata.checksum = checksum_timestamp
- if track.album:
- track.album.metadata.checksum = checksum_timestamp
- for artist in track.album.artists:
- artist.metadata.checksum = checksum_timestamp
-
- quality_details = ""
- content_type = ContentType.try_parse(tags.format)
- quality_details = f"{int(tags.bit_rate / 1000)} kbps"
- if content_type == ContentType.MP3:
- quality = MediaQuality.LOSSY_MP3
- elif content_type == ContentType.OGG:
- quality = MediaQuality.LOSSY_OGG
- elif content_type == ContentType.AAC:
- quality = MediaQuality.LOSSY_AAC
- elif content_type == ContentType.M4A:
- quality = MediaQuality.LOSSY_M4A
- elif content_type.is_lossless():
- quality = MediaQuality.LOSSLESS
- quality_details = f"{tags.sample_rate / 1000} Khz / {tags.bit_rate} bit"
- if tags.sample_rate > 192000:
- quality = MediaQuality.LOSSLESS_HI_RES_4
- elif tags.sample_rate > 96000:
- quality = MediaQuality.LOSSLESS_HI_RES_3
- elif tags.sample_rate > 48000:
- quality = MediaQuality.LOSSLESS_HI_RES_2
- elif tags.bits_per_sample > 16:
- quality = MediaQuality.LOSSLESS_HI_RES_1
- else:
- quality = MediaQuality.UNKNOWN
-
- track.add_provider_id(
- MediaItemProviderId(
- item_id=track_item_id,
- prov_type=self.type,
- prov_id=self.id,
- quality=quality,
- details=quality_details,
- url=track_path,
- )
- )
- return track
-
- async def _parse_artist(
- self,
- name: Optional[str] = None,
- artist_path: Optional[str] = None,
- ) -> Artist | None:
- """Lookup metadata in Artist folder."""
- assert name or artist_path
- if not artist_path:
- # create fake path
- artist_path = os.path.join(self.config.path, name)
-
- artist_item_id = self._get_item_id(artist_path)
- if not name:
- name = artist_path.split(os.sep)[-1]
-
- artist = Artist(
- artist_item_id,
- self.type,
- name,
- provider_ids={
- MediaItemProviderId(artist_item_id, self.type, self.id, url=artist_path)
- },
- musicbrainz_id=VARIOUS_ARTISTS_ID
- if compare_strings(name, VARIOUS_ARTISTS)
- else None,
- )
-
- if not await self.exists(artist_path):
- # return basic object if there is no dedicated artist folder
- return artist
-
- nfo_file = os.path.join(artist_path, "artist.nfo")
- if await self.exists(nfo_file):
- # found NFO file with metadata
- # https://kodi.wiki/view/NFO_files/Artists
- async with self.open_file(nfo_file, "r") as _file:
- data = await _file.read()
- info = await self.mass.loop.run_in_executor(None, xmltodict.parse, data)
- info = info["artist"]
- artist.name = info.get("title", info.get("name", name))
- if sort_name := info.get("sortname"):
- artist.sort_name = sort_name
- if musicbrainz_id := info.get("musicbrainzartistid"):
- artist.musicbrainz_id = musicbrainz_id
- if descripton := info.get("biography"):
- artist.metadata.description = descripton
- if genre := info.get("genre"):
- artist.metadata.genres = set(split_items(genre))
- # find local images
- artist.metadata.images = await self._get_local_images(artist_path) or None
-
- return artist
-
- async def _parse_album(
- self, name: Optional[str], album_path: Optional[str], artists: List[Artist]
- ) -> Album | None:
- """Lookup metadata in Album folder."""
- assert (name or album_path) and artists
- if not album_path:
- # create fake path
- album_path = os.path.join(self.config.path, artists[0].name, name)
-
- album_item_id = self._get_item_id(album_path)
- if not name:
- name = album_path.split(os.sep)[-1]
-
- album = Album(
- album_item_id,
- self.type,
- name,
- artists=artists,
- provider_ids={
- MediaItemProviderId(album_item_id, self.type, self.id, url=album_path)
- },
- )
-
- if not await self.exists(album_path):
- # return basic object if there is no dedicated album folder
- return album
-
- nfo_file = os.path.join(album_path, "album.nfo")
- if await self.exists(nfo_file):
- # found NFO file with metadata
- # https://kodi.wiki/view/NFO_files/Artists
- async with self.open_file(nfo_file) as _file:
- data = await _file.read()
- info = await self.mass.loop.run_in_executor(None, xmltodict.parse, data)
- info = info["album"]
- album.name = info.get("title", info.get("name", name))
- if sort_name := info.get("sortname"):
- album.sort_name = sort_name
- if musicbrainz_id := info.get("musicbrainzreleasegroupid"):
- album.musicbrainz_id = musicbrainz_id
- if mb_artist_id := info.get("musicbrainzalbumartistid"):
- if album.artist and not album.artist.musicbrainz_id:
- album.artist.musicbrainz_id = mb_artist_id
- if description := info.get("review"):
- album.metadata.description = description
- if year := info.get("year"):
- album.year = int(year)
- if genre := info.get("genre"):
- album.metadata.genres = set(split_items(genre))
- # parse name/version
- album.name, album.version = parse_title_and_version(album.name)
-
- # find local images
- album.metadata.images = await self._get_local_images(album_path) or None
-
- return album
-
- async def _parse_playlist(self, playlist_path: str) -> Playlist:
- """Parse playlist from file."""
- playlist_item_id = self._get_item_id(playlist_path)
-
- if not await self.exists(playlist_path):
- raise MediaNotFoundError(f"Playlist path does not exist: {playlist_path}")
-
- playlist_path_base, ext = playlist_path.rsplit(".", 1)
- name = playlist_path_base.split(os.sep)[-1]
-
- playlist = Playlist(playlist_item_id, provider=self.type, name=name)
- playlist.is_editable = ext != "pls" # can only edit m3u playlists
-
- playlist.add_provider_id(
- MediaItemProviderId(
- item_id=playlist_item_id,
- prov_type=self.type,
- prov_id=self.id,
- url=playlist_path,
- )
- )
- playlist.owner = self._attr_name
- getmtime = wrap(os.path.getmtime)
- mtime = await getmtime(playlist_path)
- checksum = f"{SCHEMA_VERSION}.{int(mtime)}"
- playlist.metadata.checksum = checksum
- return playlist
-
- async def exists(self, file_path: str) -> bool:
- """Return bool is this FileSystem musicprovider has given file/dir."""
- if not file_path:
- return False # guard
- file_path = await self.resolve(file_path)
- _exists = wrap(os.path.exists)
- return await _exists(file_path)
-
- @asynccontextmanager
- async def open_file(self, file_path: str, mode="rb") -> AsyncFileIO:
- """Return (async) handle to given file."""
- # ensure we have a full path and not relative
- if self.config.path not in file_path:
- file_path = os.path.join(self.config.path, file_path)
- file_path = await self.resolve(file_path)
- async with aiofiles.open(file_path, mode) as _file:
- yield _file
-
- async def resolve(self, file_path: str) -> str:
- """Resolve local accessible file."""
- # remote file locations should return a tempfile here so this is future proofing
- if self.config.path not in file_path:
- file_path = os.path.join(self.config.path, file_path)
- return file_path
-
- async def _get_filepath(
- self, media_type: MediaType, prov_item_id: str
- ) -> str | None:
- """Get full filepath on disk for item_id."""
- if prov_item_id is None:
- return None # guard
- # funky sql queries go here ;-)
- table = f"{media_type.value}s"
- query = (
- f"SELECT json_extract(json_each.value, '$.url') as url FROM {table}"
- " ,json_each(provider_ids) WHERE"
- f" json_extract(json_each.value, '$.prov_id') = '{self.id}'"
- f" AND json_extract(json_each.value, '$.item_id') = '{prov_item_id}'"
- )
- for db_row in await self.mass.database.get_rows_from_query(query):
- file_path = db_row["url"]
- # ensure we have a full path and not relative
- if self.config.path not in file_path:
- file_path = os.path.join(self.config.path, file_path)
- return file_path
- return None
-
- def _get_item_id(self, file_path: str) -> str:
- """Create item id from filename."""
- return create_safe_string(file_path.replace(self.config.path, ""))
-
- async def _get_local_images(self, folder: str) -> List[MediaItemImage]:
- """Return local images found in a given folderpath."""
- images = []
- async for _path in scantree(folder):
- if "." not in _path.path or _path.is_dir():
- continue
- for ext in IMAGE_EXTENSIONS:
- if not _path.path.endswith(f".{ext}"):
- continue
- filename = _path.path.rsplit(os.sep, 1)[-1].replace(f".{ext}", "")
- try:
- images.append(MediaItemImage(ImageType(filename), _path.path, True))
- except ValueError:
- if "folder" in filename:
- images.append(MediaItemImage(ImageType.THUMB, _path.path, True))
- elif "AlbumArt" in filename:
- images.append(MediaItemImage(ImageType.THUMB, _path.path, True))
- elif "Artist" in filename:
- images.append(MediaItemImage(ImageType.THUMB, _path.path, True))
- return images
--- /dev/null
+"""Package with FileSystem Music provider(s)."""
+
+from .filesystem import FileSystemProvider # noqa
--- /dev/null
+"""Filesystem musicprovider support for MusicAssistant."""
+from __future__ import annotations
+
+import asyncio
+import logging
+import os
+import urllib.parse
+from contextlib import asynccontextmanager
+from time import time
+from typing import AsyncGenerator, List, Optional, Set, Tuple
+
+import aiofiles
+import xmltodict
+from aiofiles.os import wrap
+from aiofiles.threadpool.binary import AsyncFileIO
+
+from music_assistant.constants import VARIOUS_ARTISTS, VARIOUS_ARTISTS_ID
+from music_assistant.helpers.compare import compare_strings
+from music_assistant.helpers.playlists import parse_m3u, parse_pls
+from music_assistant.helpers.tags import parse_tags, split_items
+from music_assistant.helpers.util import create_safe_string, parse_title_and_version
+from music_assistant.models.enums import MusicProviderFeature, ProviderType
+from music_assistant.models.errors import MediaNotFoundError, MusicAssistantError
+from music_assistant.models.media_items import (
+ Album,
+ AlbumType,
+ Artist,
+ BrowseFolder,
+ ContentType,
+ ImageType,
+ MediaItemImage,
+ MediaItemProviderId,
+ MediaItemType,
+ MediaQuality,
+ MediaType,
+ Playlist,
+ Radio,
+ StreamDetails,
+ Track,
+)
+from music_assistant.models.music_provider import MusicProvider
+
+TRACK_EXTENSIONS = ("mp3", "m4a", "mp4", "flac", "wav", "ogg", "aiff", "wma", "dsf")
+PLAYLIST_EXTENSIONS = ("m3u", "pls")
+SUPPORTED_EXTENSIONS = TRACK_EXTENSIONS + PLAYLIST_EXTENSIONS
+IMAGE_EXTENSIONS = ("jpg", "jpeg", "JPG", "JPEG", "png", "PNG", "gif", "GIF")
+SCHEMA_VERSION = 17
+LOGGER = logging.getLogger(__name__)
+
+listdir = wrap(os.listdir)
+isdir = wrap(os.path.isdir)
+isfile = wrap(os.path.isfile)
+
+
+async def scantree(path: str) -> AsyncGenerator[os.DirEntry, None]:
+ """Recursively yield DirEntry objects for given directory."""
+
+ def is_dir(entry: os.DirEntry) -> bool:
+ return entry.is_dir(follow_symlinks=False)
+
+ loop = asyncio.get_running_loop()
+ for entry in await loop.run_in_executor(None, os.scandir, path):
+ if entry.name.startswith("."):
+ continue
+ if await loop.run_in_executor(None, is_dir, entry):
+ try:
+ async for subitem in scantree(entry.path):
+ yield subitem
+ except (OSError, PermissionError) as err:
+ LOGGER.warning("Skip folder %s: %s", entry.path, str(err))
+ else:
+ yield entry
+
+
+def get_parentdir(base_path: str, name: str) -> str | None:
+ """Look for folder name in path (to find dedicated artist or album folder)."""
+ parentdir = os.path.dirname(base_path)
+ for _ in range(3):
+ dirname = parentdir.rsplit(os.sep)[-1]
+ if compare_strings(name, dirname, False):
+ return parentdir
+ parentdir = os.path.dirname(parentdir)
+ return None
+
+
+class FileSystemProvider(MusicProvider):
+ """
+ Implementation of a musicprovider for local files.
+
+ Reads ID3 tags from file and falls back to parsing filename.
+ Optionally reads metadata from nfo files and images in folder structure <artist>/<album>.
+ Supports m3u files only for playlists.
+ Supports having URI's from streaming providers within m3u playlist.
+ """
+
+ _attr_name = "Filesystem"
+ _attr_type = ProviderType.FILESYSTEM_LOCAL
+
+ @property
+ def supported_features(self) -> Tuple[MusicProviderFeature]:
+ """Return the features supported by this MusicProvider."""
+ return (
+ MusicProviderFeature.LIBRARY_ARTISTS,
+ MusicProviderFeature.LIBRARY_ALBUMS,
+ MusicProviderFeature.LIBRARY_TRACKS,
+ MusicProviderFeature.LIBRARY_PLAYLISTS,
+ MusicProviderFeature.PLAYLIST_TRACKS_EDIT,
+ MusicProviderFeature.PLAYLIST_CREATE,
+ MusicProviderFeature.BROWSE,
+ MusicProviderFeature.SEARCH,
+ )
+
+ async def setup(self) -> bool:
+ """Handle async initialization of the provider."""
+
+ if not await isdir(self.config.path):
+ raise MediaNotFoundError(
+ f"Music Directory {self.config.path} does not exist"
+ )
+
+ return True
+
+ async def search(
+ self, search_query: str, media_types=Optional[List[MediaType]], limit: int = 5
+ ) -> List[MediaItemType]:
+ """Perform search on musicprovider."""
+ result = []
+ # searching the filesystem is slow and unreliable,
+ # instead we make some (slow) freaking queries to the db ;-)
+ params = {"name": f"%{search_query}%", "prov_type": f"%{self.type.value}%"}
+ if media_types is None or MediaType.TRACK in media_types:
+ query = "SELECT * FROM tracks WHERE name LIKE :name AND provider_ids LIKE :prov_type"
+ tracks = await self.mass.music.tracks.get_db_items_by_query(query, params)
+ result += tracks
+ if media_types is None or MediaType.ALBUM in media_types:
+ query = "SELECT * FROM albums WHERE name LIKE :name AND provider_ids LIKE :prov_type"
+ albums = await self.mass.music.albums.get_db_items_by_query(query, params)
+ result += albums
+ if media_types is None or MediaType.ARTIST in media_types:
+ query = "SELECT * FROM artists WHERE name LIKE :name AND provider_ids LIKE :prov_type"
+ artists = await self.mass.music.artists.get_db_items_by_query(query, params)
+ result += artists
+ if media_types is None or MediaType.PLAYLIST in media_types:
+ query = "SELECT * FROM playlists WHERE name LIKE :name AND provider_ids LIKE :prov_type"
+ playlists = await self.mass.music.playlists.get_db_items_by_query(
+ query, params
+ )
+ result += playlists
+ return result
+
+ async def browse(self, path: str) -> BrowseFolder:
+ """
+ Browse this provider's items.
+
+ :param path: The path to browse, (e.g. provid://artists).
+ """
+ _, sub_path = path.split("://")
+ if not sub_path:
+ item_path = self.config.path
+ else:
+ item_path = os.path.join(self.config.path, sub_path)
+ subitems = []
+ for filename in await listdir(item_path):
+ full_path: str = os.path.join(item_path, filename)
+ rel_path = full_path.replace(self.config.path + os.sep, "")
+ if await isdir(full_path):
+ subitems.append(
+ BrowseFolder(
+ item_id=rel_path,
+ provider=self.type,
+ path=f"{self.id}://{rel_path}",
+ name=filename,
+ )
+ )
+ continue
+
+ if "." not in filename or filename.startswith("."):
+ # skip system files and files without extension
+ continue
+
+ _, ext = filename.rsplit(".", 1)
+
+ if ext in TRACK_EXTENSIONS:
+ item_id = self._get_item_id(full_path)
+ if db_item := await self.mass.music.tracks.get_db_item_by_prov_id(
+ item_id, provider_id=self.id
+ ):
+ subitems.append(db_item)
+ elif track := await self._parse_track(full_path):
+ # make sure that the item exists
+ # https://github.com/music-assistant/hass-music-assistant/issues/707
+ db_item = await self.mass.music.tracks.add_db_item(track)
+ subitems.append(db_item)
+ continue
+ if ext in PLAYLIST_EXTENSIONS:
+ item_id = self._get_item_id(full_path)
+ if db_item := await self.mass.music.playlists.get_db_item_by_prov_id(
+ item_id, provider_id=self.id
+ ):
+ subitems.append(db_item)
+ elif playlist := await self._parse_playlist(full_path):
+ # make sure that the item exists
+ # https://github.com/music-assistant/hass-music-assistant/issues/707
+ db_item = await self.mass.music.playlists.add_db_item(playlist)
+ subitems.append(db_item)
+ continue
+
+ return BrowseFolder(
+ item_id=sub_path,
+ provider=self.type,
+ path=path,
+ name=sub_path or self.name,
+ # make sure to sort the resulting listing
+ items=sorted(subitems, key=lambda x: x.name),
+ )
+
+ async def sync_library(
+ self, media_types: Optional[Tuple[MediaType]] = None
+ ) -> None:
+ """Run library sync for this provider."""
+ cache_key = f"{self.id}.checksums"
+ prev_checksums = await self.mass.cache.get(cache_key, SCHEMA_VERSION)
+ save_checksum_interval = 0
+ if prev_checksums is None:
+ prev_checksums = {}
+
+ # find all music files in the music directory and all subfolders
+ # we work bottom up, as-in we derive all info from the tracks
+ cur_checksums = {}
+ async for entry in scantree(self.config.path):
+
+ if "." not in entry.path or entry.path.startswith("."):
+ # skip system files and files without extension
+ continue
+
+ _, ext = entry.path.rsplit(".", 1)
+ if ext not in SUPPORTED_EXTENSIONS:
+ # unsupported file extension
+ continue
+
+ try:
+ # mtime is used as file checksum
+ stat = await asyncio.get_running_loop().run_in_executor(
+ None, entry.stat
+ )
+ checksum = int(stat.st_mtime)
+ cur_checksums[entry.path] = checksum
+ if checksum == prev_checksums.get(entry.path):
+ continue
+
+ if ext in TRACK_EXTENSIONS:
+ # add/update track to db
+ track = await self._parse_track(entry.path)
+ # if the track was edited on disk, always overwrite existing db details
+ overwrite_existing = entry.path in prev_checksums
+ await self.mass.music.tracks.add_db_item(
+ track, overwrite_existing=overwrite_existing
+ )
+ elif ext in PLAYLIST_EXTENSIONS:
+ playlist = await self._parse_playlist(entry.path)
+ # add/update] playlist to db
+ playlist.metadata.checksum = checksum
+ # playlist is always in-library
+ playlist.in_library = True
+ await self.mass.music.playlists.add_db_item(playlist)
+ except Exception as err: # pylint: disable=broad-except
+ # we don't want the whole sync to crash on one file so we catch all exceptions here
+ self.logger.exception("Error processing %s - %s", entry.path, str(err))
+
+ # save checksums every 100 processed items
+ # this allows us to pickup where we leftoff when initial scan gets interrupted
+ if save_checksum_interval == 100:
+ await self.mass.cache.set(cache_key, cur_checksums, SCHEMA_VERSION)
+ save_checksum_interval = 0
+ else:
+ save_checksum_interval += 1
+
+ # store (final) checksums in cache
+ await self.mass.cache.set(cache_key, cur_checksums, SCHEMA_VERSION)
+ # work out deletions
+ deleted_files = set(prev_checksums.keys()) - set(cur_checksums.keys())
+ await self._process_deletions(deleted_files)
+
+ async def _process_deletions(self, deleted_files: Set[str]) -> None:
+ """Process all deletions."""
+ # process deleted tracks/playlists
+ for file_path in deleted_files:
+
+ if "." not in file_path or file_path.startswith("."):
+ # skip system files and files without extension
+ continue
+
+ _, ext = file_path.rsplit(".", 1)
+ if ext not in SUPPORTED_EXTENSIONS:
+ # unsupported file extension
+ continue
+
+ item_id = self._get_item_id(file_path)
+
+ if ext in PLAYLIST_EXTENSIONS:
+ controller = self.mass.music.get_controller(MediaType.PLAYLIST)
+ else:
+ controller = self.mass.music.get_controller(MediaType.TRACK)
+
+ if db_item := await controller.get_db_item_by_prov_id(item_id, self.type):
+ await controller.remove_prov_mapping(db_item.item_id, self.id)
+
+ async def get_artist(self, prov_artist_id: str) -> Artist:
+ """Get full artist details by id."""
+ db_artist = await self.mass.music.artists.get_db_item_by_prov_id(
+ provider_item_id=prov_artist_id, provider_id=self.id
+ )
+ if db_artist is None:
+ raise MediaNotFoundError(f"Artist not found: {prov_artist_id}")
+ itempath = await self._get_filepath(MediaType.ARTIST, prov_artist_id)
+ if await self.exists(itempath):
+ # if path exists on disk allow parsing full details to allow refresh of metadata
+ return await self._parse_artist(db_artist.name, artist_path=itempath)
+ return db_artist
+
+ async def get_album(self, prov_album_id: str) -> Album:
+ """Get full album details by id."""
+ db_album = await self.mass.music.albums.get_db_item_by_prov_id(
+ provider_item_id=prov_album_id, provider_id=self.id
+ )
+ if db_album is None:
+ raise MediaNotFoundError(f"Album not found: {prov_album_id}")
+ itempath = await self._get_filepath(MediaType.ALBUM, prov_album_id)
+ if await self.exists(itempath):
+ # if path exists on disk allow parsing full details to allow refresh of metadata
+ return await self._parse_album(db_album.name, itempath, db_album.artists)
+ return db_album
+
+ async def get_track(self, prov_track_id: str) -> Track:
+ """Get full track details by id."""
+ itempath = await self._get_filepath(MediaType.TRACK, prov_track_id)
+ return await self._parse_track(itempath)
+
+ async def get_playlist(self, prov_playlist_id: str) -> Playlist:
+ """Get full playlist details by id."""
+ itempath = await self._get_filepath(MediaType.PLAYLIST, prov_playlist_id)
+ return await self._parse_playlist(itempath)
+
+ async def get_album_tracks(self, prov_album_id: str) -> List[Track]:
+ """Get album tracks for given album id."""
+ # filesystem items are always stored in db so we can query the database
+ db_album = await self.mass.music.albums.get_db_item_by_prov_id(
+ prov_album_id, provider_id=self.id
+ )
+ if db_album is None:
+ raise MediaNotFoundError(f"Album not found: {prov_album_id}")
+ # TODO: adjust to json query instead of text search
+ query = f"SELECT * FROM tracks WHERE albums LIKE '%\"{db_album.item_id}\"%'"
+ query += f" AND provider_ids LIKE '%\"{self.type.value}\"%'"
+ result = []
+ for track in await self.mass.music.tracks.get_db_items_by_query(query):
+ track.album = db_album
+ if album_mapping := next(
+ (x for x in track.albums if x.item_id == db_album.item_id), None
+ ):
+ track.disc_number = album_mapping.disc_number
+ track.track_number = album_mapping.track_number
+ result.append(track)
+ return sorted(result, key=lambda x: (x.disc_number or 0, x.track_number or 0))
+
+ async def get_playlist_tracks(self, prov_playlist_id: str) -> List[Track]:
+ """Get playlist tracks for given playlist id."""
+ result = []
+ playlist_path = await self._get_filepath(MediaType.PLAYLIST, prov_playlist_id)
+ if not await self.exists(playlist_path):
+ raise MediaNotFoundError(f"Playlist path does not exist: {playlist_path}")
+ parentdir = os.path.dirname(playlist_path)
+ _, ext = playlist_path.rsplit(".", 1)
+ try:
+ async with self.open_file(playlist_path, "r") as _file:
+ playlist_data = await _file.read()
+
+ if ext in ("m3u", "m3u8"):
+ playlist_lines = await parse_m3u(playlist_data)
+ else:
+ playlist_lines = await parse_pls(playlist_data)
+
+ for line_no, playlist_line in enumerate(playlist_lines):
+
+ if media_item := await self._parse_playlist_line(
+ playlist_line, parentdir
+ ):
+ # use the linenumber as position for easier deletions
+ media_item.position = line_no
+ result.append(media_item)
+
+ except Exception as err: # pylint: disable=broad-except
+ self.logger.warning(
+ "Error while parsing playlist %s", playlist_path, exc_info=err
+ )
+ return result
+
+ async def _parse_playlist_line(
+ self, line: str, playlist_path: str
+ ) -> Track | Radio | None:
+ """Try to parse a track from a playlist line."""
+ try:
+ # try to treat uri as (relative) filename
+ if "://" not in line:
+ for filename in (line, os.path.join(playlist_path, line)):
+ if not await self.exists(filename):
+ continue
+ file_path = await self.resolve(filename)
+ return await self._parse_track(file_path)
+ # fallback to generic uri parsing
+ return await self.mass.music.get_item_by_uri(line)
+ except MusicAssistantError as err:
+ self.logger.warning(
+ "Could not parse uri/file %s to track: %s", line, str(err)
+ )
+ return None
+
+ async def add_playlist_tracks(
+ self, prov_playlist_id: str, prov_track_ids: List[str]
+ ) -> None:
+ """Add track(s) to playlist."""
+ itempath = await self._get_filepath(MediaType.PLAYLIST, prov_playlist_id)
+ if not await self.exists(itempath):
+ raise MediaNotFoundError(f"Playlist path does not exist: {itempath}")
+ async with self.open_file(itempath, "r") as _file:
+ cur_data = await _file.read()
+ async with self.open_file(itempath, "w") as _file:
+ await _file.write(cur_data)
+ for uri in prov_track_ids:
+ await _file.write(f"\n{uri}")
+
+ async def remove_playlist_tracks(
+ self, prov_playlist_id: str, positions_to_remove: Tuple[int]
+ ) -> None:
+ """Remove track(s) from playlist."""
+ itempath = await self._get_filepath(MediaType.PLAYLIST, prov_playlist_id)
+ if not await self.exists(itempath):
+ raise MediaNotFoundError(f"Playlist path does not exist: {itempath}")
+ cur_lines = []
+ async with self.open_file(itempath, "r") as _file:
+ for line_no, line in enumerate(await _file.readlines()):
+ line = urllib.parse.unquote(line.strip())
+ if line_no not in positions_to_remove:
+ cur_lines.append(line)
+ async with self.open_file(itempath, "w") as _file:
+ for uri in cur_lines:
+ await _file.write(f"{uri}\n")
+
+ async def create_playlist(self, name: str) -> Playlist:
+ """Create a new playlist on provider with given name."""
+ # creating a new playlist on the filesystem is as easy
+ # as creating a new (empty) file with the m3u extension...
+ filename = await self.resolve(f"{name}.m3u")
+ async with self.open_file(filename, "w") as _file:
+ await _file.write("\n")
+ playlist = await self._parse_playlist(filename)
+ db_playlist = await self.mass.music.playlists.add_db_item(playlist)
+ return db_playlist
+
+ async def get_stream_details(self, item_id: str) -> StreamDetails:
+ """Return the content details for the given track when it will be streamed."""
+ itempath = await self._get_filepath(MediaType.TRACK, item_id)
+ if not await self.exists(itempath):
+ raise MediaNotFoundError(f"Track path does not exist: {itempath}")
+
+ metadata = await parse_tags(itempath)
+ stat = await self.mass.loop.run_in_executor(None, os.stat, itempath)
+
+ return StreamDetails(
+ provider=self.type,
+ item_id=item_id,
+ content_type=ContentType.try_parse(metadata.format),
+ media_type=MediaType.TRACK,
+ duration=metadata.duration,
+ size=stat.st_size,
+ sample_rate=metadata.sample_rate,
+ bit_depth=metadata.bits_per_sample,
+ direct=itempath,
+ )
+
+ async def _parse_track(self, track_path: str) -> Track:
+ """Try to parse a track from a filename by reading its tags."""
+
+ if not await self.exists(track_path):
+ raise MediaNotFoundError(f"Track path does not exist: {track_path}")
+
+ track_item_id = self._get_item_id(track_path)
+
+ # parse tags
+ tags = await parse_tags(track_path)
+
+ name, version = parse_title_and_version(tags.title)
+ track = Track(
+ item_id=track_item_id,
+ provider=self.type,
+ name=name,
+ version=version,
+ )
+
+ # album
+ if tags.album:
+ # work out if we have an album folder
+ album_dir = get_parentdir(track_path, tags.album)
+
+ # album artist(s)
+ if tags.album_artists:
+ album_artists = []
+ for index, album_artist_str in enumerate(tags.album_artists):
+ # work out if we have an artist folder
+ artist_dir = get_parentdir(track_path, album_artist_str)
+ artist = await self._parse_artist(
+ album_artist_str, artist_path=artist_dir
+ )
+ if not artist.musicbrainz_id:
+ try:
+ artist.musicbrainz_id = tags.musicbrainz_albumartistids[
+ index
+ ]
+ except IndexError:
+ pass
+ album_artists.append(artist)
+ else:
+ # always fallback to various artists as album artist if user did not tag album artist
+ # ID3 tag properly because we must have an album artist
+ self.logger.warning(
+ "%s is missing ID3 tag [albumartist], using %s as fallback",
+ track_path,
+ VARIOUS_ARTISTS,
+ )
+ album_artists = [await self._parse_artist(name=VARIOUS_ARTISTS)]
+
+ track.album = await self._parse_album(
+ tags.album,
+ album_dir,
+ artists=album_artists,
+ )
+ else:
+ self.logger.warning("%s is missing ID3 tag [album]", track_path)
+
+ # track artist(s)
+ for index, track_artist_str in enumerate(tags.artists):
+ # re-use album artist details if possible
+ if track.album:
+ if artist := next(
+ (x for x in track.album.artists if x.name == track_artist_str), None
+ ):
+ track.artists.append(artist)
+ continue
+ artist = await self._parse_artist(track_artist_str)
+ if not artist.musicbrainz_id:
+ try:
+ artist.musicbrainz_id = tags.musicbrainz_artistids[index]
+ except IndexError:
+ pass
+ track.artists.append(artist)
+
+ # cover image - prefer album image, fallback to embedded
+ if track.album and track.album.image:
+ track.metadata.images = [track.album.image]
+ elif tags.has_cover_image:
+ # we do not actually embed the image in the metadata because that would consume too
+ # much space and bandwidth. Instead we set the filename as value so the image can
+ # be retrieved later in realtime.
+ track.metadata.images = [MediaItemImage(ImageType.THUMB, track_path, True)]
+ if track.album:
+ # set embedded cover on album
+ track.album.metadata.images = track.metadata.images
+
+ # parse other info
+ track.duration = tags.duration or 0
+ track.metadata.genres = tags.genres
+ track.disc_number = tags.disc
+ track.track_number = tags.track
+ track.isrc = tags.get("isrc")
+ track.metadata.copyright = tags.get("copyright")
+ track.metadata.lyrics = tags.get("lyrics")
+ track.musicbrainz_id = tags.musicbrainz_trackid
+ if track.album:
+ if not track.album.musicbrainz_id:
+ track.album.musicbrainz_id = tags.musicbrainz_releasegroupid
+ if not track.album.year:
+ track.album.year = tags.year
+ if not track.album.upc:
+ track.album.upc = tags.get("barcode")
+ # try to parse albumtype
+ if track.album and track.album.album_type == AlbumType.UNKNOWN:
+ album_type = tags.album_type
+ if album_type and "compilation" in album_type:
+ track.album.album_type = AlbumType.COMPILATION
+ elif album_type and "single" in album_type:
+ track.album.album_type = AlbumType.SINGLE
+ elif album_type and "album" in album_type:
+ track.album.album_type = AlbumType.ALBUM
+ elif track.album.sort_name in track.sort_name:
+ track.album.album_type = AlbumType.SINGLE
+
+ # set checksum to invalidate any cached listings
+ checksum_timestamp = str(int(time()))
+ track.metadata.checksum = checksum_timestamp
+ if track.album:
+ track.album.metadata.checksum = checksum_timestamp
+ for artist in track.album.artists:
+ artist.metadata.checksum = checksum_timestamp
+
+ quality_details = ""
+ content_type = ContentType.try_parse(tags.format)
+ quality_details = f"{int(tags.bit_rate / 1000)} kbps"
+ if content_type == ContentType.MP3:
+ quality = MediaQuality.LOSSY_MP3
+ elif content_type == ContentType.OGG:
+ quality = MediaQuality.LOSSY_OGG
+ elif content_type == ContentType.AAC:
+ quality = MediaQuality.LOSSY_AAC
+ elif content_type == ContentType.M4A:
+ quality = MediaQuality.LOSSY_M4A
+ elif content_type.is_lossless():
+ quality = MediaQuality.LOSSLESS
+ quality_details = f"{tags.sample_rate / 1000} Khz / {tags.bit_rate} bit"
+ if tags.sample_rate > 192000:
+ quality = MediaQuality.LOSSLESS_HI_RES_4
+ elif tags.sample_rate > 96000:
+ quality = MediaQuality.LOSSLESS_HI_RES_3
+ elif tags.sample_rate > 48000:
+ quality = MediaQuality.LOSSLESS_HI_RES_2
+ elif tags.bits_per_sample > 16:
+ quality = MediaQuality.LOSSLESS_HI_RES_1
+ else:
+ quality = MediaQuality.UNKNOWN
+
+ track.add_provider_id(
+ MediaItemProviderId(
+ item_id=track_item_id,
+ prov_type=self.type,
+ prov_id=self.id,
+ quality=quality,
+ details=quality_details,
+ url=track_path,
+ )
+ )
+ return track
+
+ async def _parse_artist(
+ self,
+ name: Optional[str] = None,
+ artist_path: Optional[str] = None,
+ ) -> Artist | None:
+ """Lookup metadata in Artist folder."""
+ assert name or artist_path
+ if not artist_path:
+ # create fake path
+ artist_path = os.path.join(self.config.path, name)
+
+ artist_item_id = self._get_item_id(artist_path)
+ if not name:
+ name = artist_path.split(os.sep)[-1]
+
+ artist = Artist(
+ artist_item_id,
+ self.type,
+ name,
+ provider_ids={
+ MediaItemProviderId(artist_item_id, self.type, self.id, url=artist_path)
+ },
+ musicbrainz_id=VARIOUS_ARTISTS_ID
+ if compare_strings(name, VARIOUS_ARTISTS)
+ else None,
+ )
+
+ if not await self.exists(artist_path):
+ # return basic object if there is no dedicated artist folder
+ return artist
+
+ nfo_file = os.path.join(artist_path, "artist.nfo")
+ if await self.exists(nfo_file):
+ # found NFO file with metadata
+ # https://kodi.wiki/view/NFO_files/Artists
+ async with self.open_file(nfo_file, "r") as _file:
+ data = await _file.read()
+ info = await self.mass.loop.run_in_executor(None, xmltodict.parse, data)
+ info = info["artist"]
+ artist.name = info.get("title", info.get("name", name))
+ if sort_name := info.get("sortname"):
+ artist.sort_name = sort_name
+ if musicbrainz_id := info.get("musicbrainzartistid"):
+ artist.musicbrainz_id = musicbrainz_id
+ if descripton := info.get("biography"):
+ artist.metadata.description = descripton
+ if genre := info.get("genre"):
+ artist.metadata.genres = set(split_items(genre))
+ # find local images
+ artist.metadata.images = await self._get_local_images(artist_path) or None
+
+ return artist
+
+ async def _parse_album(
+ self, name: Optional[str], album_path: Optional[str], artists: List[Artist]
+ ) -> Album | None:
+ """Lookup metadata in Album folder."""
+ assert (name or album_path) and artists
+ if not album_path:
+ # create fake path
+ album_path = os.path.join(self.config.path, artists[0].name, name)
+
+ album_item_id = self._get_item_id(album_path)
+ if not name:
+ name = album_path.split(os.sep)[-1]
+
+ album = Album(
+ album_item_id,
+ self.type,
+ name,
+ artists=artists,
+ provider_ids={
+ MediaItemProviderId(album_item_id, self.type, self.id, url=album_path)
+ },
+ )
+
+ if not await self.exists(album_path):
+ # return basic object if there is no dedicated album folder
+ return album
+
+ nfo_file = os.path.join(album_path, "album.nfo")
+ if await self.exists(nfo_file):
+ # found NFO file with metadata
+ # https://kodi.wiki/view/NFO_files/Artists
+ async with self.open_file(nfo_file) as _file:
+ data = await _file.read()
+ info = await self.mass.loop.run_in_executor(None, xmltodict.parse, data)
+ info = info["album"]
+ album.name = info.get("title", info.get("name", name))
+ if sort_name := info.get("sortname"):
+ album.sort_name = sort_name
+ if musicbrainz_id := info.get("musicbrainzreleasegroupid"):
+ album.musicbrainz_id = musicbrainz_id
+ if mb_artist_id := info.get("musicbrainzalbumartistid"):
+ if album.artist and not album.artist.musicbrainz_id:
+ album.artist.musicbrainz_id = mb_artist_id
+ if description := info.get("review"):
+ album.metadata.description = description
+ if year := info.get("year"):
+ album.year = int(year)
+ if genre := info.get("genre"):
+ album.metadata.genres = set(split_items(genre))
+ # parse name/version
+ album.name, album.version = parse_title_and_version(album.name)
+
+ # find local images
+ album.metadata.images = await self._get_local_images(album_path) or None
+
+ return album
+
+ async def _parse_playlist(self, playlist_path: str) -> Playlist:
+ """Parse playlist from file."""
+ playlist_item_id = self._get_item_id(playlist_path)
+
+ if not await self.exists(playlist_path):
+ raise MediaNotFoundError(f"Playlist path does not exist: {playlist_path}")
+
+ playlist_path_base, ext = playlist_path.rsplit(".", 1)
+ name = playlist_path_base.split(os.sep)[-1]
+
+ playlist = Playlist(playlist_item_id, provider=self.type, name=name)
+ playlist.is_editable = ext != "pls" # can only edit m3u playlists
+
+ playlist.add_provider_id(
+ MediaItemProviderId(
+ item_id=playlist_item_id,
+ prov_type=self.type,
+ prov_id=self.id,
+ url=playlist_path,
+ )
+ )
+ playlist.owner = self._attr_name
+ getmtime = wrap(os.path.getmtime)
+ mtime = await getmtime(playlist_path)
+ checksum = f"{SCHEMA_VERSION}.{int(mtime)}"
+ playlist.metadata.checksum = checksum
+ return playlist
+
+ async def exists(self, file_path: str) -> bool:
+ """Return bool is this FileSystem musicprovider has given file/dir."""
+ if not file_path:
+ return False # guard
+ file_path = await self.resolve(file_path)
+ _exists = wrap(os.path.exists)
+ return await _exists(file_path)
+
+ @asynccontextmanager
+ async def open_file(self, file_path: str, mode="rb") -> AsyncFileIO:
+ """Return (async) handle to given file."""
+ # ensure we have a full path and not relative
+ if self.config.path not in file_path:
+ file_path = os.path.join(self.config.path, file_path)
+ file_path = await self.resolve(file_path)
+ async with aiofiles.open(file_path, mode) as _file:
+ yield _file
+
+ async def resolve(self, file_path: str) -> str:
+ """Resolve local accessible file."""
+ # remote file locations should return a tempfile here so this is future proofing
+ if self.config.path not in file_path:
+ file_path = os.path.join(self.config.path, file_path)
+ return file_path
+
+ async def _get_filepath(
+ self, media_type: MediaType, prov_item_id: str
+ ) -> str | None:
+ """Get full filepath on disk for item_id."""
+ if prov_item_id is None:
+ return None # guard
+ # funky sql queries go here ;-)
+ table = f"{media_type.value}s"
+ query = (
+ f"SELECT json_extract(json_each.value, '$.url') as url FROM {table}"
+ " ,json_each(provider_ids) WHERE"
+ f" json_extract(json_each.value, '$.prov_id') = '{self.id}'"
+ f" AND json_extract(json_each.value, '$.item_id') = '{prov_item_id}'"
+ )
+ for db_row in await self.mass.database.get_rows_from_query(query):
+ file_path = db_row["url"]
+ # ensure we have a full path and not relative
+ if self.config.path not in file_path:
+ file_path = os.path.join(self.config.path, file_path)
+ return file_path
+ return None
+
+ def _get_item_id(self, file_path: str) -> str:
+ """Create item id from filename."""
+ return create_safe_string(file_path.replace(self.config.path, ""))
+
+ async def _get_local_images(self, folder: str) -> List[MediaItemImage]:
+ """Return local images found in a given folderpath."""
+ images = []
+ async for _path in scantree(folder):
+ if "." not in _path.path or _path.is_dir():
+ continue
+ for ext in IMAGE_EXTENSIONS:
+ if not _path.path.endswith(f".{ext}"):
+ continue
+ filename = _path.path.rsplit(os.sep, 1)[-1].replace(f".{ext}", "")
+ try:
+ images.append(MediaItemImage(ImageType(filename), _path.path, True))
+ except ValueError:
+ if "folder" in filename:
+ images.append(MediaItemImage(ImageType.THUMB, _path.path, True))
+ elif "AlbumArt" in filename:
+ images.append(MediaItemImage(ImageType.THUMB, _path.path, True))
+ elif "Artist" in filename:
+ images.append(MediaItemImage(ImageType.THUMB, _path.path, True))
+ return images
+++ /dev/null
-"""Qobuz musicprovider support for MusicAssistant."""
-from __future__ import annotations
-
-import datetime
-import hashlib
-import time
-from json import JSONDecodeError
-from typing import AsyncGenerator, List, Optional, Tuple
-
-import aiohttp
-from asyncio_throttle import Throttler
-
-from music_assistant.helpers.app_vars import ( # pylint: disable=no-name-in-module
- app_var,
-)
-from music_assistant.helpers.util import parse_title_and_version, try_parse_int
-from music_assistant.models.enums import MusicProviderFeature, ProviderType
-from music_assistant.models.errors import LoginFailed, MediaNotFoundError
-from music_assistant.models.media_items import (
- Album,
- AlbumType,
- Artist,
- ContentType,
- ImageType,
- MediaItemImage,
- MediaItemProviderId,
- MediaItemType,
- MediaQuality,
- MediaType,
- Playlist,
- StreamDetails,
- Track,
-)
-from music_assistant.models.music_provider import MusicProvider
-
-
-class QobuzProvider(MusicProvider):
- """Provider for the Qobux music service."""
-
- _attr_type = ProviderType.QOBUZ
- _attr_name = "Qobuz"
- _user_auth_info = None
- _throttler = Throttler(rate_limit=4, period=1)
-
- @property
- def supported_features(self) -> Tuple[MusicProviderFeature]:
- """Return the features supported by this MusicProvider."""
- return (
- MusicProviderFeature.LIBRARY_ARTISTS,
- MusicProviderFeature.LIBRARY_ALBUMS,
- MusicProviderFeature.LIBRARY_TRACKS,
- MusicProviderFeature.LIBRARY_PLAYLISTS,
- MusicProviderFeature.LIBRARY_ARTISTS_EDIT,
- MusicProviderFeature.LIBRARY_ALBUMS_EDIT,
- MusicProviderFeature.LIBRARY_PLAYLISTS_EDIT,
- MusicProviderFeature.LIBRARY_TRACKS_EDIT,
- MusicProviderFeature.PLAYLIST_TRACKS_EDIT,
- MusicProviderFeature.BROWSE,
- MusicProviderFeature.SEARCH,
- MusicProviderFeature.ARTIST_ALBUMS,
- MusicProviderFeature.ARTIST_TOPTRACKS,
- )
-
- async def setup(self) -> bool:
- """Handle async initialization of the provider."""
- if not self.config.enabled:
- return False
- if not self.config.username or not self.config.password:
- raise LoginFailed("Invalid login credentials")
- # try to get a token, raise if that fails
- token = await self._auth_token()
- if not token:
- raise LoginFailed(f"Login failed for user {self.config.username}")
- return True
-
- async def search(
- self, search_query: str, media_types=Optional[List[MediaType]], limit: int = 5
- ) -> List[MediaItemType]:
- """
- Perform search on musicprovider.
-
- :param search_query: Search query.
- :param media_types: A list of media_types to include. All types if None.
- :param limit: Number of items to return in the search (per type).
- """
- result = []
- params = {"query": search_query, "limit": limit}
- if len(media_types) == 1:
- # qobuz does not support multiple searchtypes, falls back to all if no type given
- if media_types[0] == MediaType.ARTIST:
- params["type"] = "artists"
- if media_types[0] == MediaType.ALBUM:
- params["type"] = "albums"
- if media_types[0] == MediaType.TRACK:
- params["type"] = "tracks"
- if media_types[0] == MediaType.PLAYLIST:
- params["type"] = "playlists"
- if searchresult := await self._get_data("catalog/search", **params):
- if "artists" in searchresult:
- result += [
- await self._parse_artist(item)
- for item in searchresult["artists"]["items"]
- if (item and item["id"])
- ]
- if "albums" in searchresult:
- result += [
- await self._parse_album(item)
- for item in searchresult["albums"]["items"]
- if (item and item["id"])
- ]
- if "tracks" in searchresult:
- result += [
- await self._parse_track(item)
- for item in searchresult["tracks"]["items"]
- if (item and item["id"])
- ]
- if "playlists" in searchresult:
- result += [
- await self._parse_playlist(item)
- for item in searchresult["playlists"]["items"]
- if (item and item["id"])
- ]
- return result
-
- async def get_library_artists(self) -> AsyncGenerator[Artist, None]:
- """Retrieve all library artists from Qobuz."""
- endpoint = "favorite/getUserFavorites"
- for item in await self._get_all_items(endpoint, key="artists", type="artists"):
- if item and item["id"]:
- yield await self._parse_artist(item)
-
- async def get_library_albums(self) -> AsyncGenerator[Album, None]:
- """Retrieve all library albums from Qobuz."""
- endpoint = "favorite/getUserFavorites"
- for item in await self._get_all_items(endpoint, key="albums", type="albums"):
- if item and item["id"]:
- yield await self._parse_album(item)
-
- async def get_library_tracks(self) -> AsyncGenerator[Track, None]:
- """Retrieve library tracks from Qobuz."""
- endpoint = "favorite/getUserFavorites"
- for item in await self._get_all_items(endpoint, key="tracks", type="tracks"):
- if item and item["id"]:
- yield await self._parse_track(item)
-
- async def get_library_playlists(self) -> AsyncGenerator[Playlist, None]:
- """Retrieve all library playlists from the provider."""
- endpoint = "playlist/getUserPlaylists"
- for item in await self._get_all_items(endpoint, key="playlists"):
- if item and item["id"]:
- yield await self._parse_playlist(item)
-
- async def get_artist(self, prov_artist_id) -> Artist:
- """Get full artist details by id."""
- params = {"artist_id": prov_artist_id}
- artist_obj = await self._get_data("artist/get", **params)
- return (
- await self._parse_artist(artist_obj)
- if artist_obj and artist_obj["id"]
- else None
- )
-
- async def get_album(self, prov_album_id) -> Album:
- """Get full album details by id."""
- params = {"album_id": prov_album_id}
- album_obj = await self._get_data("album/get", **params)
- return (
- await self._parse_album(album_obj)
- if album_obj and album_obj["id"]
- else None
- )
-
- async def get_track(self, prov_track_id) -> Track:
- """Get full track details by id."""
- params = {"track_id": prov_track_id}
- track_obj = await self._get_data("track/get", **params)
- return (
- await self._parse_track(track_obj)
- if track_obj and track_obj["id"]
- else None
- )
-
- async def get_playlist(self, prov_playlist_id) -> Playlist:
- """Get full playlist details by id."""
- params = {"playlist_id": prov_playlist_id}
- playlist_obj = await self._get_data("playlist/get", **params)
- return (
- await self._parse_playlist(playlist_obj)
- if playlist_obj and playlist_obj["id"]
- else None
- )
-
- async def get_album_tracks(self, prov_album_id) -> List[Track]:
- """Get all album tracks for given album id."""
- params = {"album_id": prov_album_id}
- return [
- await self._parse_track(item)
- for item in await self._get_all_items("album/get", **params, key="tracks")
- if (item and item["id"])
- ]
-
- async def get_playlist_tracks(self, prov_playlist_id) -> List[Track]:
- """Get all playlist tracks for given playlist id."""
- count = 0
- result = []
- for item in await self._get_all_items(
- "playlist/get",
- key="tracks",
- playlist_id=prov_playlist_id,
- extra="tracks",
- ):
- if not (item and item["id"]):
- continue
- track = await self._parse_track(item)
- # use count as position
- track.position = count
- result.append(track)
- count += 1
- return result
-
- async def get_artist_albums(self, prov_artist_id) -> List[Album]:
- """Get a list of albums for the given artist."""
- endpoint = "artist/get"
- return [
- await self._parse_album(item)
- for item in await self._get_all_items(
- endpoint, key="albums", artist_id=prov_artist_id, extra="albums"
- )
- if (item and item["id"] and str(item["artist"]["id"]) == prov_artist_id)
- ]
-
- async def get_artist_toptracks(self, prov_artist_id) -> List[Track]:
- """Get a list of most popular tracks for the given artist."""
- result = await self._get_data(
- "artist/get",
- artist_id=prov_artist_id,
- extra="playlists",
- offset=0,
- limit=25,
- )
- if result and result["playlists"]:
- return [
- await self._parse_track(item)
- for item in result["playlists"][0]["tracks"]["items"]
- if (item and item["id"])
- ]
- # fallback to search
- artist = await self.get_artist(prov_artist_id)
- searchresult = await self._get_data(
- "catalog/search", query=artist.name, limit=25, type="tracks"
- )
- return [
- await self._parse_track(item)
- for item in searchresult["tracks"]["items"]
- if (
- item
- and item["id"]
- and "performer" in item
- and str(item["performer"]["id"]) == str(prov_artist_id)
- )
- ]
-
- async def get_similar_artists(self, prov_artist_id):
- """Get similar artists for given artist."""
- # https://www.qobuz.com/api.json/0.2/artist/getSimilarArtists?artist_id=220020&offset=0&limit=3
-
- async def library_add(self, prov_item_id, media_type: MediaType):
- """Add item to library."""
- result = None
- if media_type == MediaType.ARTIST:
- result = await self._get_data("favorite/create", artist_id=prov_item_id)
- elif media_type == MediaType.ALBUM:
- result = await self._get_data("favorite/create", album_ids=prov_item_id)
- elif media_type == MediaType.TRACK:
- result = await self._get_data("favorite/create", track_ids=prov_item_id)
- elif media_type == MediaType.PLAYLIST:
- result = await self._get_data(
- "playlist/subscribe", playlist_id=prov_item_id
- )
- return result
-
- async def library_remove(self, prov_item_id, media_type: MediaType):
- """Remove item from library."""
- result = None
- if media_type == MediaType.ARTIST:
- result = await self._get_data("favorite/delete", artist_ids=prov_item_id)
- elif media_type == MediaType.ALBUM:
- result = await self._get_data("favorite/delete", album_ids=prov_item_id)
- elif media_type == MediaType.TRACK:
- result = await self._get_data("favorite/delete", track_ids=prov_item_id)
- elif media_type == MediaType.PLAYLIST:
- playlist = await self.get_playlist(prov_item_id)
- if playlist.is_editable:
- result = await self._get_data(
- "playlist/delete", playlist_id=prov_item_id
- )
- else:
- result = await self._get_data(
- "playlist/unsubscribe", playlist_id=prov_item_id
- )
- return result
-
- async def add_playlist_tracks(
- self, prov_playlist_id: str, prov_track_ids: List[str]
- ) -> None:
- """Add track(s) to playlist."""
- return await self._get_data(
- "playlist/addTracks",
- playlist_id=prov_playlist_id,
- track_ids=",".join(prov_track_ids),
- playlist_track_ids=",".join(prov_track_ids),
- )
-
- async def remove_playlist_tracks(
- self, prov_playlist_id: str, positions_to_remove: Tuple[int]
- ) -> None:
- """Remove track(s) from playlist."""
- playlist_track_ids = set()
- for track in await self.get_playlist_tracks(prov_playlist_id):
- if track.position in positions_to_remove:
- playlist_track_ids.add(str(track["playlist_track_id"]))
- if len(playlist_track_ids) == positions_to_remove:
- break
- return await self._get_data(
- "playlist/deleteTracks",
- playlist_id=prov_playlist_id,
- playlist_track_ids=",".join(playlist_track_ids),
- )
-
- async def get_stream_details(self, item_id: str) -> StreamDetails:
- """Return the content details for the given track when it will be streamed."""
- streamdata = None
- for format_id in [27, 7, 6, 5]:
- # it seems that simply requesting for highest available quality does not work
- # from time to time the api response is empty for this request ?!
- result = await self._get_data(
- "track/getFileUrl",
- sign_request=True,
- format_id=format_id,
- track_id=item_id,
- intent="stream",
- )
- if result and result.get("url"):
- streamdata = result
- break
- if not streamdata:
- raise MediaNotFoundError(f"Unable to retrieve stream details for {item_id}")
- if streamdata["mime_type"] == "audio/mpeg":
- content_type = ContentType.MPEG
- elif streamdata["mime_type"] == "audio/flac":
- content_type = ContentType.FLAC
- else:
- raise MediaNotFoundError(f"Unsupported mime type for {item_id}")
- # report playback started as soon as the streamdetails are requested
- self.mass.create_task(self._report_playback_started(streamdata))
- return StreamDetails(
- item_id=str(item_id),
- provider=self.type,
- content_type=content_type,
- duration=streamdata["duration"],
- sample_rate=int(streamdata["sampling_rate"] * 1000),
- bit_depth=streamdata["bit_depth"],
- data=streamdata, # we need these details for reporting playback
- expires=time.time() + 1800, # not sure about the real allowed value
- direct=streamdata["url"],
- callback=self._report_playback_stopped,
- )
-
- async def _report_playback_started(self, streamdata: dict) -> None:
- """Report playback start to qobuz."""
- # TODO: need to figure out if the streamed track is purchased by user
- # https://www.qobuz.com/api.json/0.2/purchase/getUserPurchasesIds?limit=5000&user_id=xxxxxxx
- # {"albums":{"total":0,"items":[]},"tracks":{"total":0,"items":[]},"user":{"id":xxxx,"login":"xxxxx"}}
- device_id = self._user_auth_info["user"]["device"]["id"]
- credential_id = self._user_auth_info["user"]["credential"]["id"]
- user_id = self._user_auth_info["user"]["id"]
- format_id = streamdata["format_id"]
- timestamp = int(time.time())
- events = [
- {
- "online": True,
- "sample": False,
- "intent": "stream",
- "device_id": device_id,
- "track_id": streamdata["track_id"],
- "purchase": False,
- "date": timestamp,
- "credential_id": credential_id,
- "user_id": user_id,
- "local": False,
- "format_id": format_id,
- }
- ]
- await self._post_data("track/reportStreamingStart", data=events)
-
- async def _report_playback_stopped(self, streamdetails: StreamDetails) -> None:
- """Report playback stop to qobuz."""
- user_id = self._user_auth_info["user"]["id"]
- await self._get_data(
- "/track/reportStreamingEnd",
- user_id=user_id,
- track_id=str(streamdetails.item_id),
- duration=try_parse_int(streamdetails.seconds_streamed),
- )
-
- async def _parse_artist(self, artist_obj: dict):
- """Parse qobuz artist object to generic layout."""
- artist = Artist(
- item_id=str(artist_obj["id"]), provider=self.type, name=artist_obj["name"]
- )
- artist.add_provider_id(
- MediaItemProviderId(
- item_id=str(artist_obj["id"]),
- prov_type=self.type,
- prov_id=self.id,
- url=artist_obj.get(
- "url", f'https://open.qobuz.com/artist/{artist_obj["id"]}'
- ),
- )
- )
- if img := self.__get_image(artist_obj):
- artist.metadata.images = [MediaItemImage(ImageType.THUMB, img)]
- if artist_obj.get("biography"):
- artist.metadata.description = artist_obj["biography"].get("content")
- return artist
-
- async def _parse_album(self, album_obj: dict, artist_obj: dict = None):
- """Parse qobuz album object to generic layout."""
- if not artist_obj and "artist" not in album_obj:
- # artist missing in album info, return full abum instead
- return await self.get_album(album_obj["id"])
- name, version = parse_title_and_version(
- album_obj["title"], album_obj.get("version")
- )
- album = Album(
- item_id=str(album_obj["id"]), provider=self.type, name=name, version=version
- )
- if album_obj["maximum_sampling_rate"] > 192:
- quality = MediaQuality.LOSSLESS_HI_RES_4
- elif album_obj["maximum_sampling_rate"] > 96:
- quality = MediaQuality.LOSSLESS_HI_RES_3
- elif album_obj["maximum_sampling_rate"] > 48:
- quality = MediaQuality.LOSSLESS_HI_RES_2
- elif album_obj["maximum_bit_depth"] > 16:
- quality = MediaQuality.LOSSLESS_HI_RES_1
- elif album_obj.get("format_id", 0) == 5:
- quality = MediaQuality.LOSSY_AAC
- else:
- quality = MediaQuality.LOSSLESS
- album.add_provider_id(
- MediaItemProviderId(
- item_id=str(album_obj["id"]),
- prov_type=self.type,
- prov_id=self.id,
- quality=quality,
- url=album_obj.get(
- "url", f'https://open.qobuz.com/album/{album_obj["id"]}'
- ),
- details=f'{album_obj["maximum_sampling_rate"]}kHz {album_obj["maximum_bit_depth"]}bit',
- available=album_obj["streamable"] and album_obj["displayable"],
- )
- )
-
- album.artist = await self._parse_artist(artist_obj or album_obj["artist"])
- if (
- album_obj.get("product_type", "") == "single"
- or album_obj.get("release_type", "") == "single"
- ):
- album.album_type = AlbumType.SINGLE
- elif (
- album_obj.get("product_type", "") == "compilation"
- or "Various" in album.artist.name
- ):
- album.album_type = AlbumType.COMPILATION
- elif (
- album_obj.get("product_type", "") == "album"
- or album_obj.get("release_type", "") == "album"
- ):
- album.album_type = AlbumType.ALBUM
- if "genre" in album_obj:
- album.metadata.genres = {album_obj["genre"]["name"]}
- if img := self.__get_image(album_obj):
- album.metadata.images = [MediaItemImage(ImageType.THUMB, img)]
- if len(album_obj["upc"]) == 13:
- # qobuz writes ean as upc ?!
- album.upc = album_obj["upc"][1:]
- else:
- album.upc = album_obj["upc"]
- if "label" in album_obj:
- album.metadata.label = album_obj["label"]["name"]
- if album_obj.get("released_at"):
- album.year = datetime.datetime.fromtimestamp(album_obj["released_at"]).year
- if album_obj.get("copyright"):
- album.metadata.copyright = album_obj["copyright"]
- if album_obj.get("description"):
- album.metadata.description = album_obj["description"]
- return album
-
- async def _parse_track(self, track_obj: dict):
- """Parse qobuz track object to generic layout."""
- name, version = parse_title_and_version(
- track_obj["title"], track_obj.get("version")
- )
- track = Track(
- item_id=str(track_obj["id"]),
- provider=self.type,
- name=name,
- version=version,
- disc_number=track_obj["media_number"],
- track_number=track_obj["track_number"],
- duration=track_obj["duration"],
- position=track_obj.get("position"),
- )
- if track_obj.get("performer") and "Various " not in track_obj["performer"]:
- artist = await self._parse_artist(track_obj["performer"])
- if artist:
- track.artists.append(artist)
- if not track.artists:
- # try to grab artist from album
- if (
- track_obj.get("album")
- and track_obj["album"].get("artist")
- and "Various " not in track_obj["album"]["artist"]
- ):
- artist = await self._parse_artist(track_obj["album"]["artist"])
- if artist:
- track.artists.append(artist)
- if not track.artists:
- # last resort: parse from performers string
- for performer_str in track_obj["performers"].split(" - "):
- role = performer_str.split(", ")[1]
- name = performer_str.split(", ")[0]
- if "artist" in role.lower():
- artist = Artist(name, self.type, name)
- track.artists.append(artist)
- # TODO: fix grabbing composer from details
-
- if "album" in track_obj:
- album = await self._parse_album(track_obj["album"])
- if album:
- track.album = album
- if track_obj.get("isrc"):
- track.isrc = track_obj["isrc"]
- if track_obj.get("performers"):
- track.metadata.performers = {
- x.strip() for x in track_obj["performers"].split("-")
- }
- if track_obj.get("copyright"):
- track.metadata.copyright = track_obj["copyright"]
- if track_obj.get("audio_info"):
- track.metadata.replaygain = track_obj["audio_info"]["replaygain_track_gain"]
- if track_obj.get("parental_warning"):
- track.metadata.explicit = True
- if img := self.__get_image(track_obj):
- track.metadata.images = [MediaItemImage(ImageType.THUMB, img)]
- # get track quality
- if track_obj["maximum_sampling_rate"] > 192:
- quality = MediaQuality.LOSSLESS_HI_RES_4
- elif track_obj["maximum_sampling_rate"] > 96:
- quality = MediaQuality.LOSSLESS_HI_RES_3
- elif track_obj["maximum_sampling_rate"] > 48:
- quality = MediaQuality.LOSSLESS_HI_RES_2
- elif track_obj["maximum_bit_depth"] > 16:
- quality = MediaQuality.LOSSLESS_HI_RES_1
- elif track_obj.get("format_id", 0) == 5:
- quality = MediaQuality.LOSSY_AAC
- else:
- quality = MediaQuality.LOSSLESS
- track.add_provider_id(
- MediaItemProviderId(
- item_id=str(track_obj["id"]),
- prov_type=self.type,
- prov_id=self.id,
- quality=quality,
- url=track_obj.get(
- "url", f'https://open.qobuz.com/track/{track_obj["id"]}'
- ),
- details=f'{track_obj["maximum_sampling_rate"]}kHz {track_obj["maximum_bit_depth"]}bit',
- available=track_obj["streamable"] and track_obj["displayable"],
- )
- )
- return track
-
- async def _parse_playlist(self, playlist_obj):
- """Parse qobuz playlist object to generic layout."""
- playlist = Playlist(
- item_id=str(playlist_obj["id"]),
- provider=self.type,
- name=playlist_obj["name"],
- owner=playlist_obj["owner"]["name"],
- )
- playlist.add_provider_id(
- MediaItemProviderId(
- item_id=str(playlist_obj["id"]),
- prov_type=self.type,
- prov_id=self.id,
- url=playlist_obj.get(
- "url", f'https://open.qobuz.com/playlist/{playlist_obj["id"]}'
- ),
- )
- )
- playlist.is_editable = (
- playlist_obj["owner"]["id"] == self._user_auth_info["user"]["id"]
- or playlist_obj["is_collaborative"]
- )
- if img := self.__get_image(playlist_obj):
- playlist.metadata.images = [MediaItemImage(ImageType.THUMB, img)]
- playlist.metadata.checksum = str(playlist_obj["updated_at"])
- return playlist
-
- async def _auth_token(self):
- """Login to qobuz and store the token."""
- if self._user_auth_info:
- return self._user_auth_info["user_auth_token"]
- params = {
- "username": self.config.username,
- "password": self.config.password,
- "device_manufacturer_id": "music_assistant",
- }
- details = await self._get_data("user/login", **params)
- if details and "user" in details:
- self._user_auth_info = details
- self.logger.info(
- "Succesfully logged in to Qobuz as %s", details["user"]["display_name"]
- )
- self.mass.metadata.preferred_language = details["user"]["country_code"]
- return details["user_auth_token"]
-
- async def _get_all_items(self, endpoint, key="tracks", **kwargs):
- """Get all items from a paged list."""
- limit = 50
- offset = 0
- all_items = []
- while True:
- kwargs["limit"] = limit
- kwargs["offset"] = offset
- result = await self._get_data(endpoint, **kwargs)
- offset += limit
- if not result:
- break
- if not result.get(key) or not result[key].get("items"):
- break
- for item in result[key]["items"]:
- item["position"] = len(all_items) + 1
- all_items.append(item)
- if len(result[key]["items"]) < limit:
- break
- return all_items
-
- async def _get_data(self, endpoint, sign_request=False, **kwargs):
- """Get data from api."""
- url = f"http://www.qobuz.com/api.json/0.2/{endpoint}"
- headers = {"X-App-Id": app_var(0)}
- if endpoint != "user/login":
- auth_token = await self._auth_token()
- if not auth_token:
- self.logger.debug("Not logged in")
- return None
- headers["X-User-Auth-Token"] = auth_token
- if sign_request:
- signing_data = "".join(endpoint.split("/"))
- keys = list(kwargs.keys())
- keys.sort()
- for key in keys:
- signing_data += f"{key}{kwargs[key]}"
- request_ts = str(time.time())
- request_sig = signing_data + request_ts + app_var(1)
- request_sig = str(hashlib.md5(request_sig.encode()).hexdigest())
- kwargs["request_ts"] = request_ts
- kwargs["request_sig"] = request_sig
- kwargs["app_id"] = app_var(0)
- kwargs["user_auth_token"] = await self._auth_token()
- async with self._throttler:
- async with self.mass.http_session.get(
- url, headers=headers, params=kwargs, verify_ssl=False
- ) as response:
- try:
- # make sure status is 200
- assert response.status == 200
- result = await response.json()
- # check for error in json
- if error := result.get("error"):
- raise ValueError(error)
- if result.get("status") and "error" in result["status"]:
- raise ValueError(result["status"])
- except (
- aiohttp.ContentTypeError,
- JSONDecodeError,
- AssertionError,
- ValueError,
- ) as err:
- text = await response.text()
- self.logger.exception(
- "Error while processing %s: %s", endpoint, text, exc_info=err
- )
- return None
- return result
-
- async def _post_data(self, endpoint, params=None, data=None):
- """Post data to api."""
- if not params:
- params = {}
- if not data:
- data = {}
- url = f"http://www.qobuz.com/api.json/0.2/{endpoint}"
- params["app_id"] = app_var(0)
- params["user_auth_token"] = await self._auth_token()
- async with self.mass.http_session.post(
- url, params=params, json=data, verify_ssl=False
- ) as response:
- try:
- result = await response.json()
- # check for error in json
- if error := result.get("error"):
- raise ValueError(error)
- if result.get("status") and "error" in result["status"]:
- raise ValueError(result["status"])
- except (
- aiohttp.ContentTypeError,
- JSONDecodeError,
- AssertionError,
- ValueError,
- ) as err:
- text = await response.text()
- self.logger.exception(
- "Error while processing %s: %s", endpoint, text, exc_info=err
- )
- return None
- return result
-
- def __get_image(self, obj: dict) -> Optional[str]:
- """Try to parse image from Qobuz media object."""
- if obj.get("image"):
- for key in ["extralarge", "large", "medium", "small"]:
- if obj["image"].get(key):
- if "2a96cbd8b46e442fc41c2b86b821562f" in obj["image"][key]:
- continue
- return obj["image"][key]
- if obj.get("images300"):
- # playlists seem to use this strange format
- return obj["images300"][0]
- if obj.get("album"):
- return self.__get_image(obj["album"])
- if obj.get("artist"):
- return self.__get_image(obj["artist"])
- return None
--- /dev/null
+"""Package with Qobuz Music provider."""
+
+from .qobuz import QobuzProvider # noqa
--- /dev/null
+"""Qobuz musicprovider support for MusicAssistant."""
+from __future__ import annotations
+
+import datetime
+import hashlib
+import time
+from json import JSONDecodeError
+from typing import AsyncGenerator, List, Optional, Tuple
+
+import aiohttp
+from asyncio_throttle import Throttler
+
+from music_assistant.helpers.app_vars import ( # pylint: disable=no-name-in-module
+ app_var,
+)
+from music_assistant.helpers.util import parse_title_and_version, try_parse_int
+from music_assistant.models.enums import MusicProviderFeature, ProviderType
+from music_assistant.models.errors import LoginFailed, MediaNotFoundError
+from music_assistant.models.media_items import (
+ Album,
+ AlbumType,
+ Artist,
+ ContentType,
+ ImageType,
+ MediaItemImage,
+ MediaItemProviderId,
+ MediaItemType,
+ MediaQuality,
+ MediaType,
+ Playlist,
+ StreamDetails,
+ Track,
+)
+from music_assistant.models.music_provider import MusicProvider
+
+
+class QobuzProvider(MusicProvider):
+ """Provider for the Qobux music service."""
+
+ _attr_type = ProviderType.QOBUZ
+ _attr_name = "Qobuz"
+ _user_auth_info = None
+ _throttler = Throttler(rate_limit=4, period=1)
+
+ @property
+ def supported_features(self) -> Tuple[MusicProviderFeature]:
+ """Return the features supported by this MusicProvider."""
+ return (
+ MusicProviderFeature.LIBRARY_ARTISTS,
+ MusicProviderFeature.LIBRARY_ALBUMS,
+ MusicProviderFeature.LIBRARY_TRACKS,
+ MusicProviderFeature.LIBRARY_PLAYLISTS,
+ MusicProviderFeature.LIBRARY_ARTISTS_EDIT,
+ MusicProviderFeature.LIBRARY_ALBUMS_EDIT,
+ MusicProviderFeature.LIBRARY_PLAYLISTS_EDIT,
+ MusicProviderFeature.LIBRARY_TRACKS_EDIT,
+ MusicProviderFeature.PLAYLIST_TRACKS_EDIT,
+ MusicProviderFeature.BROWSE,
+ MusicProviderFeature.SEARCH,
+ MusicProviderFeature.ARTIST_ALBUMS,
+ MusicProviderFeature.ARTIST_TOPTRACKS,
+ )
+
+ async def setup(self) -> bool:
+ """Handle async initialization of the provider."""
+ if not self.config.enabled:
+ return False
+ if not self.config.username or not self.config.password:
+ raise LoginFailed("Invalid login credentials")
+ # try to get a token, raise if that fails
+ token = await self._auth_token()
+ if not token:
+ raise LoginFailed(f"Login failed for user {self.config.username}")
+ return True
+
+ async def search(
+ self, search_query: str, media_types=Optional[List[MediaType]], limit: int = 5
+ ) -> List[MediaItemType]:
+ """
+ Perform search on musicprovider.
+
+ :param search_query: Search query.
+ :param media_types: A list of media_types to include. All types if None.
+ :param limit: Number of items to return in the search (per type).
+ """
+ result = []
+ params = {"query": search_query, "limit": limit}
+ if len(media_types) == 1:
+ # qobuz does not support multiple searchtypes, falls back to all if no type given
+ if media_types[0] == MediaType.ARTIST:
+ params["type"] = "artists"
+ if media_types[0] == MediaType.ALBUM:
+ params["type"] = "albums"
+ if media_types[0] == MediaType.TRACK:
+ params["type"] = "tracks"
+ if media_types[0] == MediaType.PLAYLIST:
+ params["type"] = "playlists"
+ if searchresult := await self._get_data("catalog/search", **params):
+ if "artists" in searchresult:
+ result += [
+ await self._parse_artist(item)
+ for item in searchresult["artists"]["items"]
+ if (item and item["id"])
+ ]
+ if "albums" in searchresult:
+ result += [
+ await self._parse_album(item)
+ for item in searchresult["albums"]["items"]
+ if (item and item["id"])
+ ]
+ if "tracks" in searchresult:
+ result += [
+ await self._parse_track(item)
+ for item in searchresult["tracks"]["items"]
+ if (item and item["id"])
+ ]
+ if "playlists" in searchresult:
+ result += [
+ await self._parse_playlist(item)
+ for item in searchresult["playlists"]["items"]
+ if (item and item["id"])
+ ]
+ return result
+
+ async def get_library_artists(self) -> AsyncGenerator[Artist, None]:
+ """Retrieve all library artists from Qobuz."""
+ endpoint = "favorite/getUserFavorites"
+ for item in await self._get_all_items(endpoint, key="artists", type="artists"):
+ if item and item["id"]:
+ yield await self._parse_artist(item)
+
+ async def get_library_albums(self) -> AsyncGenerator[Album, None]:
+ """Retrieve all library albums from Qobuz."""
+ endpoint = "favorite/getUserFavorites"
+ for item in await self._get_all_items(endpoint, key="albums", type="albums"):
+ if item and item["id"]:
+ yield await self._parse_album(item)
+
+ async def get_library_tracks(self) -> AsyncGenerator[Track, None]:
+ """Retrieve library tracks from Qobuz."""
+ endpoint = "favorite/getUserFavorites"
+ for item in await self._get_all_items(endpoint, key="tracks", type="tracks"):
+ if item and item["id"]:
+ yield await self._parse_track(item)
+
+ async def get_library_playlists(self) -> AsyncGenerator[Playlist, None]:
+ """Retrieve all library playlists from the provider."""
+ endpoint = "playlist/getUserPlaylists"
+ for item in await self._get_all_items(endpoint, key="playlists"):
+ if item and item["id"]:
+ yield await self._parse_playlist(item)
+
+ async def get_artist(self, prov_artist_id) -> Artist:
+ """Get full artist details by id."""
+ params = {"artist_id": prov_artist_id}
+ artist_obj = await self._get_data("artist/get", **params)
+ return (
+ await self._parse_artist(artist_obj)
+ if artist_obj and artist_obj["id"]
+ else None
+ )
+
+ async def get_album(self, prov_album_id) -> Album:
+ """Get full album details by id."""
+ params = {"album_id": prov_album_id}
+ album_obj = await self._get_data("album/get", **params)
+ return (
+ await self._parse_album(album_obj)
+ if album_obj and album_obj["id"]
+ else None
+ )
+
+ async def get_track(self, prov_track_id) -> Track:
+ """Get full track details by id."""
+ params = {"track_id": prov_track_id}
+ track_obj = await self._get_data("track/get", **params)
+ return (
+ await self._parse_track(track_obj)
+ if track_obj and track_obj["id"]
+ else None
+ )
+
+ async def get_playlist(self, prov_playlist_id) -> Playlist:
+ """Get full playlist details by id."""
+ params = {"playlist_id": prov_playlist_id}
+ playlist_obj = await self._get_data("playlist/get", **params)
+ return (
+ await self._parse_playlist(playlist_obj)
+ if playlist_obj and playlist_obj["id"]
+ else None
+ )
+
+ async def get_album_tracks(self, prov_album_id) -> List[Track]:
+ """Get all album tracks for given album id."""
+ params = {"album_id": prov_album_id}
+ return [
+ await self._parse_track(item)
+ for item in await self._get_all_items("album/get", **params, key="tracks")
+ if (item and item["id"])
+ ]
+
+ async def get_playlist_tracks(self, prov_playlist_id) -> List[Track]:
+ """Get all playlist tracks for given playlist id."""
+ count = 0
+ result = []
+ for item in await self._get_all_items(
+ "playlist/get",
+ key="tracks",
+ playlist_id=prov_playlist_id,
+ extra="tracks",
+ ):
+ if not (item and item["id"]):
+ continue
+ track = await self._parse_track(item)
+ # use count as position
+ track.position = count
+ result.append(track)
+ count += 1
+ return result
+
+ async def get_artist_albums(self, prov_artist_id) -> List[Album]:
+ """Get a list of albums for the given artist."""
+ endpoint = "artist/get"
+ return [
+ await self._parse_album(item)
+ for item in await self._get_all_items(
+ endpoint, key="albums", artist_id=prov_artist_id, extra="albums"
+ )
+ if (item and item["id"] and str(item["artist"]["id"]) == prov_artist_id)
+ ]
+
+ async def get_artist_toptracks(self, prov_artist_id) -> List[Track]:
+ """Get a list of most popular tracks for the given artist."""
+ result = await self._get_data(
+ "artist/get",
+ artist_id=prov_artist_id,
+ extra="playlists",
+ offset=0,
+ limit=25,
+ )
+ if result and result["playlists"]:
+ return [
+ await self._parse_track(item)
+ for item in result["playlists"][0]["tracks"]["items"]
+ if (item and item["id"])
+ ]
+ # fallback to search
+ artist = await self.get_artist(prov_artist_id)
+ searchresult = await self._get_data(
+ "catalog/search", query=artist.name, limit=25, type="tracks"
+ )
+ return [
+ await self._parse_track(item)
+ for item in searchresult["tracks"]["items"]
+ if (
+ item
+ and item["id"]
+ and "performer" in item
+ and str(item["performer"]["id"]) == str(prov_artist_id)
+ )
+ ]
+
+ async def get_similar_artists(self, prov_artist_id):
+ """Get similar artists for given artist."""
+ # https://www.qobuz.com/api.json/0.2/artist/getSimilarArtists?artist_id=220020&offset=0&limit=3
+
+ async def library_add(self, prov_item_id, media_type: MediaType):
+ """Add item to library."""
+ result = None
+ if media_type == MediaType.ARTIST:
+ result = await self._get_data("favorite/create", artist_id=prov_item_id)
+ elif media_type == MediaType.ALBUM:
+ result = await self._get_data("favorite/create", album_ids=prov_item_id)
+ elif media_type == MediaType.TRACK:
+ result = await self._get_data("favorite/create", track_ids=prov_item_id)
+ elif media_type == MediaType.PLAYLIST:
+ result = await self._get_data(
+ "playlist/subscribe", playlist_id=prov_item_id
+ )
+ return result
+
+ async def library_remove(self, prov_item_id, media_type: MediaType):
+ """Remove item from library."""
+ result = None
+ if media_type == MediaType.ARTIST:
+ result = await self._get_data("favorite/delete", artist_ids=prov_item_id)
+ elif media_type == MediaType.ALBUM:
+ result = await self._get_data("favorite/delete", album_ids=prov_item_id)
+ elif media_type == MediaType.TRACK:
+ result = await self._get_data("favorite/delete", track_ids=prov_item_id)
+ elif media_type == MediaType.PLAYLIST:
+ playlist = await self.get_playlist(prov_item_id)
+ if playlist.is_editable:
+ result = await self._get_data(
+ "playlist/delete", playlist_id=prov_item_id
+ )
+ else:
+ result = await self._get_data(
+ "playlist/unsubscribe", playlist_id=prov_item_id
+ )
+ return result
+
+ async def add_playlist_tracks(
+ self, prov_playlist_id: str, prov_track_ids: List[str]
+ ) -> None:
+ """Add track(s) to playlist."""
+ return await self._get_data(
+ "playlist/addTracks",
+ playlist_id=prov_playlist_id,
+ track_ids=",".join(prov_track_ids),
+ playlist_track_ids=",".join(prov_track_ids),
+ )
+
+ async def remove_playlist_tracks(
+ self, prov_playlist_id: str, positions_to_remove: Tuple[int]
+ ) -> None:
+ """Remove track(s) from playlist."""
+ playlist_track_ids = set()
+ for track in await self.get_playlist_tracks(prov_playlist_id):
+ if track.position in positions_to_remove:
+ playlist_track_ids.add(str(track["playlist_track_id"]))
+ if len(playlist_track_ids) == positions_to_remove:
+ break
+ return await self._get_data(
+ "playlist/deleteTracks",
+ playlist_id=prov_playlist_id,
+ playlist_track_ids=",".join(playlist_track_ids),
+ )
+
+ async def get_stream_details(self, item_id: str) -> StreamDetails:
+ """Return the content details for the given track when it will be streamed."""
+ streamdata = None
+ for format_id in [27, 7, 6, 5]:
+ # it seems that simply requesting for highest available quality does not work
+ # from time to time the api response is empty for this request ?!
+ result = await self._get_data(
+ "track/getFileUrl",
+ sign_request=True,
+ format_id=format_id,
+ track_id=item_id,
+ intent="stream",
+ )
+ if result and result.get("url"):
+ streamdata = result
+ break
+ if not streamdata:
+ raise MediaNotFoundError(f"Unable to retrieve stream details for {item_id}")
+ if streamdata["mime_type"] == "audio/mpeg":
+ content_type = ContentType.MPEG
+ elif streamdata["mime_type"] == "audio/flac":
+ content_type = ContentType.FLAC
+ else:
+ raise MediaNotFoundError(f"Unsupported mime type for {item_id}")
+ # report playback started as soon as the streamdetails are requested
+ self.mass.create_task(self._report_playback_started(streamdata))
+ return StreamDetails(
+ item_id=str(item_id),
+ provider=self.type,
+ content_type=content_type,
+ duration=streamdata["duration"],
+ sample_rate=int(streamdata["sampling_rate"] * 1000),
+ bit_depth=streamdata["bit_depth"],
+ data=streamdata, # we need these details for reporting playback
+ expires=time.time() + 1800, # not sure about the real allowed value
+ direct=streamdata["url"],
+ callback=self._report_playback_stopped,
+ )
+
+ async def _report_playback_started(self, streamdata: dict) -> None:
+ """Report playback start to qobuz."""
+ # TODO: need to figure out if the streamed track is purchased by user
+ # https://www.qobuz.com/api.json/0.2/purchase/getUserPurchasesIds?limit=5000&user_id=xxxxxxx
+ # {"albums":{"total":0,"items":[]},"tracks":{"total":0,"items":[]},"user":{"id":xxxx,"login":"xxxxx"}}
+ device_id = self._user_auth_info["user"]["device"]["id"]
+ credential_id = self._user_auth_info["user"]["credential"]["id"]
+ user_id = self._user_auth_info["user"]["id"]
+ format_id = streamdata["format_id"]
+ timestamp = int(time.time())
+ events = [
+ {
+ "online": True,
+ "sample": False,
+ "intent": "stream",
+ "device_id": device_id,
+ "track_id": streamdata["track_id"],
+ "purchase": False,
+ "date": timestamp,
+ "credential_id": credential_id,
+ "user_id": user_id,
+ "local": False,
+ "format_id": format_id,
+ }
+ ]
+ await self._post_data("track/reportStreamingStart", data=events)
+
+ async def _report_playback_stopped(self, streamdetails: StreamDetails) -> None:
+ """Report playback stop to qobuz."""
+ user_id = self._user_auth_info["user"]["id"]
+ await self._get_data(
+ "/track/reportStreamingEnd",
+ user_id=user_id,
+ track_id=str(streamdetails.item_id),
+ duration=try_parse_int(streamdetails.seconds_streamed),
+ )
+
+ async def _parse_artist(self, artist_obj: dict):
+ """Parse qobuz artist object to generic layout."""
+ artist = Artist(
+ item_id=str(artist_obj["id"]), provider=self.type, name=artist_obj["name"]
+ )
+ artist.add_provider_id(
+ MediaItemProviderId(
+ item_id=str(artist_obj["id"]),
+ prov_type=self.type,
+ prov_id=self.id,
+ url=artist_obj.get(
+ "url", f'https://open.qobuz.com/artist/{artist_obj["id"]}'
+ ),
+ )
+ )
+ if img := self.__get_image(artist_obj):
+ artist.metadata.images = [MediaItemImage(ImageType.THUMB, img)]
+ if artist_obj.get("biography"):
+ artist.metadata.description = artist_obj["biography"].get("content")
+ return artist
+
+ async def _parse_album(self, album_obj: dict, artist_obj: dict = None):
+ """Parse qobuz album object to generic layout."""
+ if not artist_obj and "artist" not in album_obj:
+ # artist missing in album info, return full abum instead
+ return await self.get_album(album_obj["id"])
+ name, version = parse_title_and_version(
+ album_obj["title"], album_obj.get("version")
+ )
+ album = Album(
+ item_id=str(album_obj["id"]), provider=self.type, name=name, version=version
+ )
+ if album_obj["maximum_sampling_rate"] > 192:
+ quality = MediaQuality.LOSSLESS_HI_RES_4
+ elif album_obj["maximum_sampling_rate"] > 96:
+ quality = MediaQuality.LOSSLESS_HI_RES_3
+ elif album_obj["maximum_sampling_rate"] > 48:
+ quality = MediaQuality.LOSSLESS_HI_RES_2
+ elif album_obj["maximum_bit_depth"] > 16:
+ quality = MediaQuality.LOSSLESS_HI_RES_1
+ elif album_obj.get("format_id", 0) == 5:
+ quality = MediaQuality.LOSSY_AAC
+ else:
+ quality = MediaQuality.LOSSLESS
+ album.add_provider_id(
+ MediaItemProviderId(
+ item_id=str(album_obj["id"]),
+ prov_type=self.type,
+ prov_id=self.id,
+ quality=quality,
+ url=album_obj.get(
+ "url", f'https://open.qobuz.com/album/{album_obj["id"]}'
+ ),
+ details=f'{album_obj["maximum_sampling_rate"]}kHz {album_obj["maximum_bit_depth"]}bit',
+ available=album_obj["streamable"] and album_obj["displayable"],
+ )
+ )
+
+ album.artist = await self._parse_artist(artist_obj or album_obj["artist"])
+ if (
+ album_obj.get("product_type", "") == "single"
+ or album_obj.get("release_type", "") == "single"
+ ):
+ album.album_type = AlbumType.SINGLE
+ elif (
+ album_obj.get("product_type", "") == "compilation"
+ or "Various" in album.artist.name
+ ):
+ album.album_type = AlbumType.COMPILATION
+ elif (
+ album_obj.get("product_type", "") == "album"
+ or album_obj.get("release_type", "") == "album"
+ ):
+ album.album_type = AlbumType.ALBUM
+ if "genre" in album_obj:
+ album.metadata.genres = {album_obj["genre"]["name"]}
+ if img := self.__get_image(album_obj):
+ album.metadata.images = [MediaItemImage(ImageType.THUMB, img)]
+ if len(album_obj["upc"]) == 13:
+ # qobuz writes ean as upc ?!
+ album.upc = album_obj["upc"][1:]
+ else:
+ album.upc = album_obj["upc"]
+ if "label" in album_obj:
+ album.metadata.label = album_obj["label"]["name"]
+ if album_obj.get("released_at"):
+ album.year = datetime.datetime.fromtimestamp(album_obj["released_at"]).year
+ if album_obj.get("copyright"):
+ album.metadata.copyright = album_obj["copyright"]
+ if album_obj.get("description"):
+ album.metadata.description = album_obj["description"]
+ return album
+
+ async def _parse_track(self, track_obj: dict):
+ """Parse qobuz track object to generic layout."""
+ name, version = parse_title_and_version(
+ track_obj["title"], track_obj.get("version")
+ )
+ track = Track(
+ item_id=str(track_obj["id"]),
+ provider=self.type,
+ name=name,
+ version=version,
+ disc_number=track_obj["media_number"],
+ track_number=track_obj["track_number"],
+ duration=track_obj["duration"],
+ position=track_obj.get("position"),
+ )
+ if track_obj.get("performer") and "Various " not in track_obj["performer"]:
+ artist = await self._parse_artist(track_obj["performer"])
+ if artist:
+ track.artists.append(artist)
+ if not track.artists:
+ # try to grab artist from album
+ if (
+ track_obj.get("album")
+ and track_obj["album"].get("artist")
+ and "Various " not in track_obj["album"]["artist"]
+ ):
+ artist = await self._parse_artist(track_obj["album"]["artist"])
+ if artist:
+ track.artists.append(artist)
+ if not track.artists:
+ # last resort: parse from performers string
+ for performer_str in track_obj["performers"].split(" - "):
+ role = performer_str.split(", ")[1]
+ name = performer_str.split(", ")[0]
+ if "artist" in role.lower():
+ artist = Artist(name, self.type, name)
+ track.artists.append(artist)
+ # TODO: fix grabbing composer from details
+
+ if "album" in track_obj:
+ album = await self._parse_album(track_obj["album"])
+ if album:
+ track.album = album
+ if track_obj.get("isrc"):
+ track.isrc = track_obj["isrc"]
+ if track_obj.get("performers"):
+ track.metadata.performers = {
+ x.strip() for x in track_obj["performers"].split("-")
+ }
+ if track_obj.get("copyright"):
+ track.metadata.copyright = track_obj["copyright"]
+ if track_obj.get("audio_info"):
+ track.metadata.replaygain = track_obj["audio_info"]["replaygain_track_gain"]
+ if track_obj.get("parental_warning"):
+ track.metadata.explicit = True
+ if img := self.__get_image(track_obj):
+ track.metadata.images = [MediaItemImage(ImageType.THUMB, img)]
+ # get track quality
+ if track_obj["maximum_sampling_rate"] > 192:
+ quality = MediaQuality.LOSSLESS_HI_RES_4
+ elif track_obj["maximum_sampling_rate"] > 96:
+ quality = MediaQuality.LOSSLESS_HI_RES_3
+ elif track_obj["maximum_sampling_rate"] > 48:
+ quality = MediaQuality.LOSSLESS_HI_RES_2
+ elif track_obj["maximum_bit_depth"] > 16:
+ quality = MediaQuality.LOSSLESS_HI_RES_1
+ elif track_obj.get("format_id", 0) == 5:
+ quality = MediaQuality.LOSSY_AAC
+ else:
+ quality = MediaQuality.LOSSLESS
+ track.add_provider_id(
+ MediaItemProviderId(
+ item_id=str(track_obj["id"]),
+ prov_type=self.type,
+ prov_id=self.id,
+ quality=quality,
+ url=track_obj.get(
+ "url", f'https://open.qobuz.com/track/{track_obj["id"]}'
+ ),
+ details=f'{track_obj["maximum_sampling_rate"]}kHz {track_obj["maximum_bit_depth"]}bit',
+ available=track_obj["streamable"] and track_obj["displayable"],
+ )
+ )
+ return track
+
+ async def _parse_playlist(self, playlist_obj):
+ """Parse qobuz playlist object to generic layout."""
+ playlist = Playlist(
+ item_id=str(playlist_obj["id"]),
+ provider=self.type,
+ name=playlist_obj["name"],
+ owner=playlist_obj["owner"]["name"],
+ )
+ playlist.add_provider_id(
+ MediaItemProviderId(
+ item_id=str(playlist_obj["id"]),
+ prov_type=self.type,
+ prov_id=self.id,
+ url=playlist_obj.get(
+ "url", f'https://open.qobuz.com/playlist/{playlist_obj["id"]}'
+ ),
+ )
+ )
+ playlist.is_editable = (
+ playlist_obj["owner"]["id"] == self._user_auth_info["user"]["id"]
+ or playlist_obj["is_collaborative"]
+ )
+ if img := self.__get_image(playlist_obj):
+ playlist.metadata.images = [MediaItemImage(ImageType.THUMB, img)]
+ playlist.metadata.checksum = str(playlist_obj["updated_at"])
+ return playlist
+
+ async def _auth_token(self):
+ """Login to qobuz and store the token."""
+ if self._user_auth_info:
+ return self._user_auth_info["user_auth_token"]
+ params = {
+ "username": self.config.username,
+ "password": self.config.password,
+ "device_manufacturer_id": "music_assistant",
+ }
+ details = await self._get_data("user/login", **params)
+ if details and "user" in details:
+ self._user_auth_info = details
+ self.logger.info(
+ "Succesfully logged in to Qobuz as %s", details["user"]["display_name"]
+ )
+ self.mass.metadata.preferred_language = details["user"]["country_code"]
+ return details["user_auth_token"]
+
+ async def _get_all_items(self, endpoint, key="tracks", **kwargs):
+ """Get all items from a paged list."""
+ limit = 50
+ offset = 0
+ all_items = []
+ while True:
+ kwargs["limit"] = limit
+ kwargs["offset"] = offset
+ result = await self._get_data(endpoint, **kwargs)
+ offset += limit
+ if not result:
+ break
+ if not result.get(key) or not result[key].get("items"):
+ break
+ for item in result[key]["items"]:
+ item["position"] = len(all_items) + 1
+ all_items.append(item)
+ if len(result[key]["items"]) < limit:
+ break
+ return all_items
+
+ async def _get_data(self, endpoint, sign_request=False, **kwargs):
+ """Get data from api."""
+ url = f"http://www.qobuz.com/api.json/0.2/{endpoint}"
+ headers = {"X-App-Id": app_var(0)}
+ if endpoint != "user/login":
+ auth_token = await self._auth_token()
+ if not auth_token:
+ self.logger.debug("Not logged in")
+ return None
+ headers["X-User-Auth-Token"] = auth_token
+ if sign_request:
+ signing_data = "".join(endpoint.split("/"))
+ keys = list(kwargs.keys())
+ keys.sort()
+ for key in keys:
+ signing_data += f"{key}{kwargs[key]}"
+ request_ts = str(time.time())
+ request_sig = signing_data + request_ts + app_var(1)
+ request_sig = str(hashlib.md5(request_sig.encode()).hexdigest())
+ kwargs["request_ts"] = request_ts
+ kwargs["request_sig"] = request_sig
+ kwargs["app_id"] = app_var(0)
+ kwargs["user_auth_token"] = await self._auth_token()
+ async with self._throttler:
+ async with self.mass.http_session.get(
+ url, headers=headers, params=kwargs, verify_ssl=False
+ ) as response:
+ try:
+ # make sure status is 200
+ assert response.status == 200
+ result = await response.json()
+ # check for error in json
+ if error := result.get("error"):
+ raise ValueError(error)
+ if result.get("status") and "error" in result["status"]:
+ raise ValueError(result["status"])
+ except (
+ aiohttp.ContentTypeError,
+ JSONDecodeError,
+ AssertionError,
+ ValueError,
+ ) as err:
+ text = await response.text()
+ self.logger.exception(
+ "Error while processing %s: %s", endpoint, text, exc_info=err
+ )
+ return None
+ return result
+
+ async def _post_data(self, endpoint, params=None, data=None):
+ """Post data to api."""
+ if not params:
+ params = {}
+ if not data:
+ data = {}
+ url = f"http://www.qobuz.com/api.json/0.2/{endpoint}"
+ params["app_id"] = app_var(0)
+ params["user_auth_token"] = await self._auth_token()
+ async with self.mass.http_session.post(
+ url, params=params, json=data, verify_ssl=False
+ ) as response:
+ try:
+ result = await response.json()
+ # check for error in json
+ if error := result.get("error"):
+ raise ValueError(error)
+ if result.get("status") and "error" in result["status"]:
+ raise ValueError(result["status"])
+ except (
+ aiohttp.ContentTypeError,
+ JSONDecodeError,
+ AssertionError,
+ ValueError,
+ ) as err:
+ text = await response.text()
+ self.logger.exception(
+ "Error while processing %s: %s", endpoint, text, exc_info=err
+ )
+ return None
+ return result
+
+ def __get_image(self, obj: dict) -> Optional[str]:
+ """Try to parse image from Qobuz media object."""
+ if obj.get("image"):
+ for key in ["extralarge", "large", "medium", "small"]:
+ if obj["image"].get(key):
+ if "2a96cbd8b46e442fc41c2b86b821562f" in obj["image"][key]:
+ continue
+ return obj["image"][key]
+ if obj.get("images300"):
+ # playlists seem to use this strange format
+ return obj["images300"][0]
+ if obj.get("album"):
+ return self.__get_image(obj["album"])
+ if obj.get("artist"):
+ return self.__get_image(obj["artist"])
+ return None
+++ /dev/null
-"""Spotify musicprovider support for MusicAssistant."""
-from __future__ import annotations
-
-import asyncio
-import json
-import os
-import platform
-import time
-from json.decoder import JSONDecodeError
-from tempfile import gettempdir
-from typing import AsyncGenerator, List, Optional, Tuple
-
-import aiohttp
-from asyncio_throttle import Throttler
-
-from music_assistant.helpers.app_vars import ( # noqa # pylint: disable=no-name-in-module
- app_var,
-)
-from music_assistant.helpers.process import AsyncProcess
-from music_assistant.helpers.util import parse_title_and_version
-from music_assistant.models.enums import MusicProviderFeature, ProviderType
-from music_assistant.models.errors import LoginFailed, MediaNotFoundError
-from music_assistant.models.media_items import (
- Album,
- AlbumType,
- Artist,
- ContentType,
- ImageType,
- MediaItemImage,
- MediaItemProviderId,
- MediaItemType,
- MediaQuality,
- MediaType,
- Playlist,
- StreamDetails,
- Track,
-)
-from music_assistant.models.music_provider import MusicProvider
-
-CACHE_DIR = gettempdir()
-
-
-class SpotifyProvider(MusicProvider):
- """Implementation of a Spotify MusicProvider."""
-
- _attr_type = ProviderType.SPOTIFY
- _attr_name = "Spotify"
- _auth_token = None
- _sp_user = None
- _librespot_bin = None
- _throttler = Throttler(rate_limit=4, period=1)
- _cache_dir = CACHE_DIR
- _ap_workaround = False
-
- @property
- def supported_features(self) -> Tuple[MusicProviderFeature]:
- """Return the features supported by this MusicProvider."""
- return (
- MusicProviderFeature.LIBRARY_ARTISTS,
- MusicProviderFeature.LIBRARY_ALBUMS,
- MusicProviderFeature.LIBRARY_TRACKS,
- MusicProviderFeature.LIBRARY_PLAYLISTS,
- MusicProviderFeature.LIBRARY_ARTISTS_EDIT,
- MusicProviderFeature.LIBRARY_ALBUMS_EDIT,
- MusicProviderFeature.LIBRARY_PLAYLISTS_EDIT,
- MusicProviderFeature.LIBRARY_TRACKS_EDIT,
- MusicProviderFeature.PLAYLIST_TRACKS_EDIT,
- MusicProviderFeature.BROWSE,
- MusicProviderFeature.SEARCH,
- MusicProviderFeature.ARTIST_ALBUMS,
- MusicProviderFeature.ARTIST_TOPTRACKS,
- MusicProviderFeature.SIMILAR_TRACKS,
- )
-
- async def setup(self) -> bool:
- """Handle async initialization of the provider."""
- if not self.config.enabled:
- return False
- # try to get a token, raise if that fails
- self._cache_dir = os.path.join(CACHE_DIR, self.id)
- # try login which will raise if it fails
- await self.login()
- return True
-
- async def search(
- self, search_query: str, media_types=Optional[List[MediaType]], limit: int = 5
- ) -> List[MediaItemType]:
- """
- Perform search on musicprovider.
-
- :param search_query: Search query.
- :param media_types: A list of media_types to include. All types if None.
- :param limit: Number of items to return in the search (per type).
- """
- result = []
- searchtypes = []
- if MediaType.ARTIST in media_types:
- searchtypes.append("artist")
- if MediaType.ALBUM in media_types:
- searchtypes.append("album")
- if MediaType.TRACK in media_types:
- searchtypes.append("track")
- if MediaType.PLAYLIST in media_types:
- searchtypes.append("playlist")
- searchtype = ",".join(searchtypes)
- search_query = search_query.replace("'", "")
- if searchresult := await self._get_data(
- "search", q=search_query, type=searchtype, limit=limit
- ):
- if "artists" in searchresult:
- result += [
- await self._parse_artist(item)
- for item in searchresult["artists"]["items"]
- if (item and item["id"])
- ]
- if "albums" in searchresult:
- result += [
- await self._parse_album(item)
- for item in searchresult["albums"]["items"]
- if (item and item["id"])
- ]
- if "tracks" in searchresult:
- result += [
- await self._parse_track(item)
- for item in searchresult["tracks"]["items"]
- if (item and item["id"])
- ]
- if "playlists" in searchresult:
- result += [
- await self._parse_playlist(item)
- for item in searchresult["playlists"]["items"]
- if (item and item["id"])
- ]
- return result
-
- async def get_library_artists(self) -> AsyncGenerator[Artist, None]:
- """Retrieve library artists from spotify."""
- endpoint = "me/following"
- while True:
- spotify_artists = await self._get_data(
- endpoint,
- type="artist",
- limit=50,
- )
- for item in spotify_artists["artists"]["items"]:
- if item and item["id"]:
- yield await self._parse_artist(item)
- if spotify_artists["artists"]["next"]:
- endpoint = spotify_artists["artists"]["next"]
- endpoint = endpoint.replace("https://api.spotify.com/v1/", "")
- else:
- break
-
- async def get_library_albums(self) -> AsyncGenerator[Album, None]:
- """Retrieve library albums from the provider."""
- for item in await self._get_all_items("me/albums"):
- if item["album"] and item["album"]["id"]:
- yield await self._parse_album(item["album"])
-
- async def get_library_tracks(self) -> AsyncGenerator[Track, None]:
- """Retrieve library tracks from the provider."""
- for item in await self._get_all_items("me/tracks"):
- if item and item["track"]["id"]:
- yield await self._parse_track(item["track"])
-
- async def get_library_playlists(self) -> AsyncGenerator[Playlist, None]:
- """Retrieve playlists from the provider."""
- for item in await self._get_all_items("me/playlists"):
- if item and item["id"]:
- yield await self._parse_playlist(item)
-
- async def get_artist(self, prov_artist_id) -> Artist:
- """Get full artist details by id."""
- artist_obj = await self._get_data(f"artists/{prov_artist_id}")
- return await self._parse_artist(artist_obj) if artist_obj else None
-
- async def get_album(self, prov_album_id) -> Album:
- """Get full album details by id."""
- album_obj = await self._get_data(f"albums/{prov_album_id}")
- return await self._parse_album(album_obj) if album_obj else None
-
- async def get_track(self, prov_track_id) -> Track:
- """Get full track details by id."""
- track_obj = await self._get_data(f"tracks/{prov_track_id}")
- return await self._parse_track(track_obj) if track_obj else None
-
- async def get_playlist(self, prov_playlist_id) -> Playlist:
- """Get full playlist details by id."""
- playlist_obj = await self._get_data(f"playlists/{prov_playlist_id}")
- return await self._parse_playlist(playlist_obj) if playlist_obj else None
-
- async def get_album_tracks(self, prov_album_id) -> List[Track]:
- """Get all album tracks for given album id."""
- return [
- await self._parse_track(item)
- for item in await self._get_all_items(f"albums/{prov_album_id}/tracks")
- if (item and item["id"])
- ]
-
- async def get_playlist_tracks(self, prov_playlist_id) -> List[Track]:
- """Get all playlist tracks for given playlist id."""
- count = 0
- result = []
- for item in await self._get_all_items(
- f"playlists/{prov_playlist_id}/tracks",
- ):
- if not (item and item["track"] and item["track"]["id"]):
- continue
- track = await self._parse_track(item["track"])
- # use count as position
- track.position = count
- result.append(track)
- count += 1
- return result
-
- async def get_artist_albums(self, prov_artist_id) -> List[Album]:
- """Get a list of all albums for the given artist."""
- return [
- await self._parse_album(item)
- for item in await self._get_all_items(
- f"artists/{prov_artist_id}/albums?include_groups=album,single,compilation"
- )
- if (item and item["id"])
- ]
-
- async def get_artist_toptracks(self, prov_artist_id) -> List[Track]:
- """Get a list of 10 most popular tracks for the given artist."""
- artist = await self.get_artist(prov_artist_id)
- endpoint = f"artists/{prov_artist_id}/top-tracks"
- items = await self._get_data(endpoint)
- return [
- await self._parse_track(item, artist=artist)
- for item in items["tracks"]
- if (item and item["id"])
- ]
-
- async def library_add(self, prov_item_id, media_type: MediaType):
- """Add item to library."""
- result = False
- if media_type == MediaType.ARTIST:
- result = await self._put_data(
- "me/following", {"ids": prov_item_id, "type": "artist"}
- )
- elif media_type == MediaType.ALBUM:
- result = await self._put_data("me/albums", {"ids": prov_item_id})
- elif media_type == MediaType.TRACK:
- result = await self._put_data("me/tracks", {"ids": prov_item_id})
- elif media_type == MediaType.PLAYLIST:
- result = await self._put_data(
- f"playlists/{prov_item_id}/followers", data={"public": False}
- )
- return result
-
- async def library_remove(self, prov_item_id, media_type: MediaType):
- """Remove item from library."""
- result = False
- if media_type == MediaType.ARTIST:
- result = await self._delete_data(
- "me/following", {"ids": prov_item_id, "type": "artist"}
- )
- elif media_type == MediaType.ALBUM:
- result = await self._delete_data("me/albums", {"ids": prov_item_id})
- elif media_type == MediaType.TRACK:
- result = await self._delete_data("me/tracks", {"ids": prov_item_id})
- elif media_type == MediaType.PLAYLIST:
- result = await self._delete_data(f"playlists/{prov_item_id}/followers")
- return result
-
- async def add_playlist_tracks(
- self, prov_playlist_id: str, prov_track_ids: List[str]
- ):
- """Add track(s) to playlist."""
- track_uris = []
- for track_id in prov_track_ids:
- track_uris.append(f"spotify:track:{track_id}")
- data = {"uris": track_uris}
- return await self._post_data(f"playlists/{prov_playlist_id}/tracks", data=data)
-
- async def remove_playlist_tracks(
- self, prov_playlist_id: str, positions_to_remove: Tuple[int]
- ) -> None:
- """Remove track(s) from playlist."""
- track_uris = []
- for track in await self.get_playlist_tracks(prov_playlist_id):
- if track.position in positions_to_remove:
- track_uris.append({"uri": f"spotify:track:{track.item_id}"})
- if len(track_uris) == positions_to_remove:
- break
- data = {"tracks": track_uris}
- return await self._delete_data(
- f"playlists/{prov_playlist_id}/tracks", data=data
- )
-
- async def get_similar_tracks(self, prov_track_id, limit=25) -> List[Track]:
- """Retrieve a dynamic list of tracks based on the provided item."""
- endpoint = "recommendations"
- items = await self._get_data(endpoint, seed_tracks=prov_track_id, limit=limit)
- return [
- await self._parse_track(item)
- for item in items["tracks"]
- if (item and item["id"])
- ]
-
- async def get_stream_details(self, item_id: str) -> StreamDetails:
- """Return the content details for the given track when it will be streamed."""
- # make sure a valid track is requested.
- track = await self.get_track(item_id)
- if not track:
- raise MediaNotFoundError(f"track {item_id} not found")
- # make sure that the token is still valid by just requesting it
- await self.login()
- return StreamDetails(
- item_id=track.item_id,
- provider=self.type,
- content_type=ContentType.OGG,
- duration=track.duration,
- )
-
- async def get_audio_stream(
- self, streamdetails: StreamDetails, seek_position: int = 0
- ) -> AsyncGenerator[bytes, None]:
- """Return the audio stream for the provider item."""
- # make sure that the token is still valid by just requesting it
- await self.login()
- librespot = await self.get_librespot_binary()
- args = [
- librespot,
- "-c",
- self._cache_dir,
- "--pass-through",
- "-b",
- "320",
- "--single-track",
- f"spotify://track:{streamdetails.item_id}",
- ]
- if seek_position:
- args += ["--start-position", str(int(seek_position))]
- if self._ap_workaround:
- args += ["--ap-port", "12345"]
- bytes_sent = 0
- async with AsyncProcess(args) as librespot_proc:
- async for chunk in librespot_proc.iter_any():
- yield chunk
- bytes_sent += len(chunk)
-
- if bytes_sent == 0 and not self._ap_workaround:
- # AP resolve failure
- # https://github.com/librespot-org/librespot/issues/972
- # retry with ap-port set to invalid value, which will force fallback
- args += ["--ap-port", "12345"]
- async with AsyncProcess(args) as librespot_proc:
- async for chunk in librespot_proc.iter_any(64000):
- yield chunk
- self._ap_workaround = True
-
- async def _parse_artist(self, artist_obj):
- """Parse spotify artist object to generic layout."""
- artist = Artist(
- item_id=artist_obj["id"], provider=self.type, name=artist_obj["name"]
- )
- artist.add_provider_id(
- MediaItemProviderId(
- item_id=artist_obj["id"],
- prov_type=self.type,
- prov_id=self.id,
- url=artist_obj["external_urls"]["spotify"],
- )
- )
- if "genres" in artist_obj:
- artist.metadata.genres = set(artist_obj["genres"])
- if artist_obj.get("images"):
- for img in artist_obj["images"]:
- img_url = img["url"]
- if "2a96cbd8b46e442fc41c2b86b821562f" not in img_url:
- artist.metadata.images = [MediaItemImage(ImageType.THUMB, img_url)]
- break
- return artist
-
- async def _parse_album(self, album_obj: dict):
- """Parse spotify album object to generic layout."""
- name, version = parse_title_and_version(album_obj["name"])
- album = Album(
- item_id=album_obj["id"], provider=self.type, name=name, version=version
- )
- for artist_obj in album_obj["artists"]:
- album.artists.append(await self._parse_artist(artist_obj))
- if album_obj["album_type"] == "single":
- album.album_type = AlbumType.SINGLE
- elif album_obj["album_type"] == "compilation":
- album.album_type = AlbumType.COMPILATION
- elif album_obj["album_type"] == "album":
- album.album_type = AlbumType.ALBUM
- if "genres" in album_obj:
- album.metadata.genre = set(album_obj["genres"])
- if album_obj.get("images"):
- album.metadata.images = [
- MediaItemImage(ImageType.THUMB, album_obj["images"][0]["url"])
- ]
- if "external_ids" in album_obj and album_obj["external_ids"].get("upc"):
- album.upc = album_obj["external_ids"]["upc"]
- if "label" in album_obj:
- album.metadata.label = album_obj["label"]
- if album_obj.get("release_date"):
- album.year = int(album_obj["release_date"].split("-")[0])
- if album_obj.get("copyrights"):
- album.metadata.copyright = album_obj["copyrights"][0]["text"]
- if album_obj.get("explicit"):
- album.metadata.explicit = album_obj["explicit"]
- album.add_provider_id(
- MediaItemProviderId(
- item_id=album_obj["id"],
- prov_type=self.type,
- prov_id=self.id,
- quality=MediaQuality.LOSSY_OGG,
- url=album_obj["external_urls"]["spotify"],
- )
- )
- return album
-
- async def _parse_track(self, track_obj, artist=None):
- """Parse spotify track object to generic layout."""
- name, version = parse_title_and_version(track_obj["name"])
- track = Track(
- item_id=track_obj["id"],
- provider=self.type,
- name=name,
- version=version,
- duration=track_obj["duration_ms"] / 1000,
- disc_number=track_obj["disc_number"],
- track_number=track_obj["track_number"],
- position=track_obj.get("position"),
- )
- if artist:
- track.artists.append(artist)
- for track_artist in track_obj.get("artists", []):
- artist = await self._parse_artist(track_artist)
- if artist and artist.item_id not in {x.item_id for x in track.artists}:
- track.artists.append(artist)
-
- track.metadata.explicit = track_obj["explicit"]
- if "preview_url" in track_obj:
- track.metadata.preview = track_obj["preview_url"]
- if "external_ids" in track_obj and "isrc" in track_obj["external_ids"]:
- track.isrc = track_obj["external_ids"]["isrc"]
- if "album" in track_obj:
- track.album = await self._parse_album(track_obj["album"])
- if track_obj["album"].get("images"):
- track.metadata.images = [
- MediaItemImage(
- ImageType.THUMB, track_obj["album"]["images"][0]["url"]
- )
- ]
- if track_obj.get("copyright"):
- track.metadata.copyright = track_obj["copyright"]
- if track_obj.get("explicit"):
- track.metadata.explicit = True
- if track_obj.get("popularity"):
- track.metadata.popularity = track_obj["popularity"]
- track.add_provider_id(
- MediaItemProviderId(
- item_id=track_obj["id"],
- prov_type=self.type,
- prov_id=self.id,
- quality=MediaQuality.LOSSY_OGG,
- url=track_obj["external_urls"]["spotify"],
- available=not track_obj["is_local"] and track_obj["is_playable"],
- )
- )
- return track
-
- async def _parse_playlist(self, playlist_obj):
- """Parse spotify playlist object to generic layout."""
- playlist = Playlist(
- item_id=playlist_obj["id"],
- provider=self.type,
- name=playlist_obj["name"],
- owner=playlist_obj["owner"]["display_name"],
- )
- playlist.add_provider_id(
- MediaItemProviderId(
- item_id=playlist_obj["id"],
- prov_type=self.type,
- prov_id=self.id,
- url=playlist_obj["external_urls"]["spotify"],
- )
- )
- playlist.is_editable = (
- playlist_obj["owner"]["id"] == self._sp_user["id"]
- or playlist_obj["collaborative"]
- )
- if playlist_obj.get("images"):
- playlist.metadata.images = [
- MediaItemImage(ImageType.THUMB, playlist_obj["images"][0]["url"])
- ]
- playlist.metadata.checksum = str(playlist_obj["snapshot_id"])
- return playlist
-
- async def login(self) -> dict:
- """Log-in Spotify and return tokeninfo."""
- # return existing token if we have one in memory
- if (
- self._auth_token
- and os.path.isdir(self._cache_dir)
- and (self._auth_token["expiresAt"] > int(time.time()) + 20)
- ):
- return self._auth_token
- tokeninfo, userinfo = None, self._sp_user
- if not self.config.username or not self.config.password:
- raise LoginFailed("Invalid login credentials")
- # retrieve token with librespot
- retries = 0
- while retries < 20:
- try:
- retries += 1
- if not tokeninfo:
- tokeninfo = await asyncio.wait_for(self._get_token(), 5)
- if tokeninfo and not userinfo:
- userinfo = await asyncio.wait_for(
- self._get_data("me", tokeninfo=tokeninfo), 5
- )
- if tokeninfo and userinfo:
- # we have all info we need!
- break
- if retries > 2:
- # switch to ap workaround after 2 retries
- self._ap_workaround = True
- except asyncio.exceptions.TimeoutError:
- await asyncio.sleep(2)
- if tokeninfo and userinfo:
- self._auth_token = tokeninfo
- self._sp_user = userinfo
- self.mass.metadata.preferred_language = userinfo["country"]
- self.logger.info("Succesfully logged in to Spotify as %s", userinfo["id"])
- self._auth_token = tokeninfo
- return tokeninfo
- if tokeninfo and not userinfo:
- raise LoginFailed(
- "Unable to retrieve userdetails from Spotify API - probably just a temporary error"
- )
- if self.config.username.isnumeric():
- # a spotify free/basic account can be recognized when
- # the username consists of numbers only - check that here
- # an integer can be parsed of the username, this is a free account
- raise LoginFailed("Only Spotify Premium accounts are supported")
- raise LoginFailed(f"Login failed for user {self.config.username}")
-
- async def _get_token(self):
- """Get spotify auth token with librespot bin."""
- time_start = time.time()
- # authorize with username and password (NOTE: this can also be Spotify Connect)
- args = [
- await self.get_librespot_binary(),
- "-O",
- "-c",
- self._cache_dir,
- "-a",
- "-u",
- self.config.username,
- "-p",
- self.config.password,
- ]
- librespot = await asyncio.create_subprocess_exec(*args)
- await librespot.wait()
- # get token with (authorized) librespot
- scopes = [
- "user-read-playback-state",
- "user-read-currently-playing",
- "user-modify-playback-state",
- "playlist-read-private",
- "playlist-read-collaborative",
- "playlist-modify-public",
- "playlist-modify-private",
- "user-follow-modify",
- "user-follow-read",
- "user-library-read",
- "user-library-modify",
- "user-read-private",
- "user-read-email",
- "user-read-birthdate",
- "user-top-read",
- ]
- scope = ",".join(scopes)
- args = [
- await self.get_librespot_binary(),
- "-O",
- "-t",
- "--client-id",
- app_var(2),
- "--scope",
- scope,
- "-c",
- self._cache_dir,
- ]
- if self._ap_workaround:
- args += ["--ap-port", "12345"]
- librespot = await asyncio.create_subprocess_exec(
- *args, stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.STDOUT
- )
- stdout, _ = await librespot.communicate()
- duration = round(time.time() - time_start, 2)
- try:
- result = json.loads(stdout)
- except JSONDecodeError:
- self.logger.warning(
- "Error while retrieving Spotify token after %s seconds, details: %s",
- duration,
- stdout.decode(),
- )
- return None
- self.logger.debug(
- "Retrieved Spotify token using librespot in %s seconds",
- duration,
- )
- # transform token info to spotipy compatible format
- if result and "accessToken" in result:
- tokeninfo = result
- tokeninfo["expiresAt"] = tokeninfo["expiresIn"] + int(time.time())
- return tokeninfo
- return None
-
- async def _get_all_items(self, endpoint, key="items", **kwargs) -> List[dict]:
- """Get all items from a paged list."""
- limit = 50
- offset = 0
- all_items = []
- while True:
- kwargs["limit"] = limit
- kwargs["offset"] = offset
- result = await self._get_data(endpoint, **kwargs)
- offset += limit
- if not result or key not in result or not result[key]:
- break
- for item in result[key]:
- item["position"] = len(all_items) + 1
- all_items.append(item)
- if len(result[key]) < limit:
- break
- return all_items
-
- async def _get_data(self, endpoint, tokeninfo: Optional[dict] = None, **kwargs):
- """Get data from api."""
- url = f"https://api.spotify.com/v1/{endpoint}"
- kwargs["market"] = "from_token"
- kwargs["country"] = "from_token"
- if tokeninfo is None:
- tokeninfo = await self.login()
- headers = {"Authorization": f'Bearer {tokeninfo["accessToken"]}'}
- async with self._throttler:
- time_start = time.time()
- try:
- async with self.mass.http_session.get(
- url, headers=headers, params=kwargs, verify_ssl=False, timeout=120
- ) as response:
- result = await response.json()
- if "error" in result or (
- "status" in result and "error" in result["status"]
- ):
- self.logger.error("%s - %s", endpoint, result)
- return None
- except (
- aiohttp.ContentTypeError,
- JSONDecodeError,
- ) as err:
- self.logger.error("%s - %s", endpoint, str(err))
- return None
- finally:
- self.logger.debug(
- "Processing GET/%s took %s seconds",
- endpoint,
- round(time.time() - time_start, 2),
- )
- return result
-
- async def _delete_data(self, endpoint, data=None, **kwargs):
- """Delete data from api."""
- url = f"https://api.spotify.com/v1/{endpoint}"
- token = await self.login()
- if not token:
- return None
- headers = {"Authorization": f'Bearer {token["accessToken"]}'}
- async with self.mass.http_session.delete(
- url, headers=headers, params=kwargs, json=data, verify_ssl=False
- ) as response:
- return await response.text()
-
- async def _put_data(self, endpoint, data=None, **kwargs):
- """Put data on api."""
- url = f"https://api.spotify.com/v1/{endpoint}"
- token = await self.login()
- if not token:
- return None
- headers = {"Authorization": f'Bearer {token["accessToken"]}'}
- async with self.mass.http_session.put(
- url, headers=headers, params=kwargs, json=data, verify_ssl=False
- ) as response:
- return await response.text()
-
- async def _post_data(self, endpoint, data=None, **kwargs):
- """Post data on api."""
- url = f"https://api.spotify.com/v1/{endpoint}"
- token = await self.login()
- if not token:
- return None
- headers = {"Authorization": f'Bearer {token["accessToken"]}'}
- async with self.mass.http_session.post(
- url, headers=headers, params=kwargs, json=data, verify_ssl=False
- ) as response:
- return await response.text()
-
- async def get_librespot_binary(self):
- """Find the correct librespot binary belonging to the platform."""
- if self._librespot_bin is not None:
- return self._librespot_bin
-
- async def check_librespot(librespot_path: str) -> str | None:
- try:
- librespot = await asyncio.create_subprocess_exec(
- *[librespot_path, "--check"], stdout=asyncio.subprocess.PIPE
- )
- stdout, _ = await librespot.communicate()
- if (
- librespot.returncode == 0
- and b"ok spotty" in stdout
- and b"using librespot" in stdout
- ):
- self._librespot_bin = librespot_path
- return librespot_path
- except OSError:
- return None
-
- base_path = os.path.join(os.path.dirname(__file__), "librespot")
- if platform.system() == "Windows":
- if librespot := await check_librespot(
- os.path.join(base_path, "windows", "librespot.exe")
- ):
- return librespot
- if platform.system() == "Darwin":
- # macos binary is x86_64 intel
- if librespot := await check_librespot(
- os.path.join(base_path, "osx", "librespot")
- ):
- return librespot
-
- if platform.system() == "FreeBSD":
- # FreeBSD binary is x86_64 intel
- if librespot := await check_librespot(
- os.path.join(base_path, "freebsd", "librespot")
- ):
- return librespot
-
- if platform.system() == "Linux":
- architecture = platform.machine()
- if architecture in ["AMD64", "x86_64"]:
- # generic linux x86_64 binary
- if librespot := await check_librespot(
- os.path.join(
- base_path,
- "linux",
- "librespot-x86_64",
- )
- ):
- return librespot
-
- # arm architecture... try all options one by one...
- for arch in ["aarch64", "armv7", "armhf", "arm"]:
- if librespot := await check_librespot(
- os.path.join(
- base_path,
- "linux",
- f"librespot-{arch}",
- )
- ):
- return librespot
-
- raise RuntimeError(
- f"Unable to locate Libespot for {platform.system()} ({platform.machine()})"
- )
--- /dev/null
+"""Package with Spotify Music provider."""
+
+from .spotify import SpotifyProvider # noqa
--- /dev/null
+"""Spotify musicprovider support for MusicAssistant."""
+from __future__ import annotations
+
+import asyncio
+import json
+import os
+import platform
+import time
+from json.decoder import JSONDecodeError
+from tempfile import gettempdir
+from typing import AsyncGenerator, List, Optional, Tuple
+
+import aiohttp
+from asyncio_throttle import Throttler
+
+from music_assistant.helpers.app_vars import ( # noqa # pylint: disable=no-name-in-module
+ app_var,
+)
+from music_assistant.helpers.process import AsyncProcess
+from music_assistant.helpers.util import parse_title_and_version
+from music_assistant.models.enums import MusicProviderFeature, ProviderType
+from music_assistant.models.errors import LoginFailed, MediaNotFoundError
+from music_assistant.models.media_items import (
+ Album,
+ AlbumType,
+ Artist,
+ ContentType,
+ ImageType,
+ MediaItemImage,
+ MediaItemProviderId,
+ MediaItemType,
+ MediaQuality,
+ MediaType,
+ Playlist,
+ StreamDetails,
+ Track,
+)
+from music_assistant.models.music_provider import MusicProvider
+
+CACHE_DIR = gettempdir()
+
+
+class SpotifyProvider(MusicProvider):
+ """Implementation of a Spotify MusicProvider."""
+
+ _attr_type = ProviderType.SPOTIFY
+ _attr_name = "Spotify"
+ _auth_token = None
+ _sp_user = None
+ _librespot_bin = None
+ _throttler = Throttler(rate_limit=4, period=1)
+ _cache_dir = CACHE_DIR
+ _ap_workaround = False
+
+ @property
+ def supported_features(self) -> Tuple[MusicProviderFeature]:
+ """Return the features supported by this MusicProvider."""
+ return (
+ MusicProviderFeature.LIBRARY_ARTISTS,
+ MusicProviderFeature.LIBRARY_ALBUMS,
+ MusicProviderFeature.LIBRARY_TRACKS,
+ MusicProviderFeature.LIBRARY_PLAYLISTS,
+ MusicProviderFeature.LIBRARY_ARTISTS_EDIT,
+ MusicProviderFeature.LIBRARY_ALBUMS_EDIT,
+ MusicProviderFeature.LIBRARY_PLAYLISTS_EDIT,
+ MusicProviderFeature.LIBRARY_TRACKS_EDIT,
+ MusicProviderFeature.PLAYLIST_TRACKS_EDIT,
+ MusicProviderFeature.BROWSE,
+ MusicProviderFeature.SEARCH,
+ MusicProviderFeature.ARTIST_ALBUMS,
+ MusicProviderFeature.ARTIST_TOPTRACKS,
+ MusicProviderFeature.SIMILAR_TRACKS,
+ )
+
+ async def setup(self) -> bool:
+ """Handle async initialization of the provider."""
+ if not self.config.enabled:
+ return False
+ # try to get a token, raise if that fails
+ self._cache_dir = os.path.join(CACHE_DIR, self.id)
+ # try login which will raise if it fails
+ await self.login()
+ return True
+
+ async def search(
+ self, search_query: str, media_types=Optional[List[MediaType]], limit: int = 5
+ ) -> List[MediaItemType]:
+ """
+ Perform search on musicprovider.
+
+ :param search_query: Search query.
+ :param media_types: A list of media_types to include. All types if None.
+ :param limit: Number of items to return in the search (per type).
+ """
+ result = []
+ searchtypes = []
+ if MediaType.ARTIST in media_types:
+ searchtypes.append("artist")
+ if MediaType.ALBUM in media_types:
+ searchtypes.append("album")
+ if MediaType.TRACK in media_types:
+ searchtypes.append("track")
+ if MediaType.PLAYLIST in media_types:
+ searchtypes.append("playlist")
+ searchtype = ",".join(searchtypes)
+ search_query = search_query.replace("'", "")
+ if searchresult := await self._get_data(
+ "search", q=search_query, type=searchtype, limit=limit
+ ):
+ if "artists" in searchresult:
+ result += [
+ await self._parse_artist(item)
+ for item in searchresult["artists"]["items"]
+ if (item and item["id"])
+ ]
+ if "albums" in searchresult:
+ result += [
+ await self._parse_album(item)
+ for item in searchresult["albums"]["items"]
+ if (item and item["id"])
+ ]
+ if "tracks" in searchresult:
+ result += [
+ await self._parse_track(item)
+ for item in searchresult["tracks"]["items"]
+ if (item and item["id"])
+ ]
+ if "playlists" in searchresult:
+ result += [
+ await self._parse_playlist(item)
+ for item in searchresult["playlists"]["items"]
+ if (item and item["id"])
+ ]
+ return result
+
+ async def get_library_artists(self) -> AsyncGenerator[Artist, None]:
+ """Retrieve library artists from spotify."""
+ endpoint = "me/following"
+ while True:
+ spotify_artists = await self._get_data(
+ endpoint,
+ type="artist",
+ limit=50,
+ )
+ for item in spotify_artists["artists"]["items"]:
+ if item and item["id"]:
+ yield await self._parse_artist(item)
+ if spotify_artists["artists"]["next"]:
+ endpoint = spotify_artists["artists"]["next"]
+ endpoint = endpoint.replace("https://api.spotify.com/v1/", "")
+ else:
+ break
+
+ async def get_library_albums(self) -> AsyncGenerator[Album, None]:
+ """Retrieve library albums from the provider."""
+ for item in await self._get_all_items("me/albums"):
+ if item["album"] and item["album"]["id"]:
+ yield await self._parse_album(item["album"])
+
+ async def get_library_tracks(self) -> AsyncGenerator[Track, None]:
+ """Retrieve library tracks from the provider."""
+ for item in await self._get_all_items("me/tracks"):
+ if item and item["track"]["id"]:
+ yield await self._parse_track(item["track"])
+
+ async def get_library_playlists(self) -> AsyncGenerator[Playlist, None]:
+ """Retrieve playlists from the provider."""
+ for item in await self._get_all_items("me/playlists"):
+ if item and item["id"]:
+ yield await self._parse_playlist(item)
+
+ async def get_artist(self, prov_artist_id) -> Artist:
+ """Get full artist details by id."""
+ artist_obj = await self._get_data(f"artists/{prov_artist_id}")
+ return await self._parse_artist(artist_obj) if artist_obj else None
+
+ async def get_album(self, prov_album_id) -> Album:
+ """Get full album details by id."""
+ album_obj = await self._get_data(f"albums/{prov_album_id}")
+ return await self._parse_album(album_obj) if album_obj else None
+
+ async def get_track(self, prov_track_id) -> Track:
+ """Get full track details by id."""
+ track_obj = await self._get_data(f"tracks/{prov_track_id}")
+ return await self._parse_track(track_obj) if track_obj else None
+
+ async def get_playlist(self, prov_playlist_id) -> Playlist:
+ """Get full playlist details by id."""
+ playlist_obj = await self._get_data(f"playlists/{prov_playlist_id}")
+ return await self._parse_playlist(playlist_obj) if playlist_obj else None
+
+ async def get_album_tracks(self, prov_album_id) -> List[Track]:
+ """Get all album tracks for given album id."""
+ return [
+ await self._parse_track(item)
+ for item in await self._get_all_items(f"albums/{prov_album_id}/tracks")
+ if (item and item["id"])
+ ]
+
+ async def get_playlist_tracks(self, prov_playlist_id) -> List[Track]:
+ """Get all playlist tracks for given playlist id."""
+ count = 0
+ result = []
+ for item in await self._get_all_items(
+ f"playlists/{prov_playlist_id}/tracks",
+ ):
+ if not (item and item["track"] and item["track"]["id"]):
+ continue
+ track = await self._parse_track(item["track"])
+ # use count as position
+ track.position = count
+ result.append(track)
+ count += 1
+ return result
+
+ async def get_artist_albums(self, prov_artist_id) -> List[Album]:
+ """Get a list of all albums for the given artist."""
+ return [
+ await self._parse_album(item)
+ for item in await self._get_all_items(
+ f"artists/{prov_artist_id}/albums?include_groups=album,single,compilation"
+ )
+ if (item and item["id"])
+ ]
+
+ async def get_artist_toptracks(self, prov_artist_id) -> List[Track]:
+ """Get a list of 10 most popular tracks for the given artist."""
+ artist = await self.get_artist(prov_artist_id)
+ endpoint = f"artists/{prov_artist_id}/top-tracks"
+ items = await self._get_data(endpoint)
+ return [
+ await self._parse_track(item, artist=artist)
+ for item in items["tracks"]
+ if (item and item["id"])
+ ]
+
+ async def library_add(self, prov_item_id, media_type: MediaType):
+ """Add item to library."""
+ result = False
+ if media_type == MediaType.ARTIST:
+ result = await self._put_data(
+ "me/following", {"ids": prov_item_id, "type": "artist"}
+ )
+ elif media_type == MediaType.ALBUM:
+ result = await self._put_data("me/albums", {"ids": prov_item_id})
+ elif media_type == MediaType.TRACK:
+ result = await self._put_data("me/tracks", {"ids": prov_item_id})
+ elif media_type == MediaType.PLAYLIST:
+ result = await self._put_data(
+ f"playlists/{prov_item_id}/followers", data={"public": False}
+ )
+ return result
+
+ async def library_remove(self, prov_item_id, media_type: MediaType):
+ """Remove item from library."""
+ result = False
+ if media_type == MediaType.ARTIST:
+ result = await self._delete_data(
+ "me/following", {"ids": prov_item_id, "type": "artist"}
+ )
+ elif media_type == MediaType.ALBUM:
+ result = await self._delete_data("me/albums", {"ids": prov_item_id})
+ elif media_type == MediaType.TRACK:
+ result = await self._delete_data("me/tracks", {"ids": prov_item_id})
+ elif media_type == MediaType.PLAYLIST:
+ result = await self._delete_data(f"playlists/{prov_item_id}/followers")
+ return result
+
+ async def add_playlist_tracks(
+ self, prov_playlist_id: str, prov_track_ids: List[str]
+ ):
+ """Add track(s) to playlist."""
+ track_uris = []
+ for track_id in prov_track_ids:
+ track_uris.append(f"spotify:track:{track_id}")
+ data = {"uris": track_uris}
+ return await self._post_data(f"playlists/{prov_playlist_id}/tracks", data=data)
+
+ async def remove_playlist_tracks(
+ self, prov_playlist_id: str, positions_to_remove: Tuple[int]
+ ) -> None:
+ """Remove track(s) from playlist."""
+ track_uris = []
+ for track in await self.get_playlist_tracks(prov_playlist_id):
+ if track.position in positions_to_remove:
+ track_uris.append({"uri": f"spotify:track:{track.item_id}"})
+ if len(track_uris) == positions_to_remove:
+ break
+ data = {"tracks": track_uris}
+ return await self._delete_data(
+ f"playlists/{prov_playlist_id}/tracks", data=data
+ )
+
+ async def get_similar_tracks(self, prov_track_id, limit=25) -> List[Track]:
+ """Retrieve a dynamic list of tracks based on the provided item."""
+ endpoint = "recommendations"
+ items = await self._get_data(endpoint, seed_tracks=prov_track_id, limit=limit)
+ return [
+ await self._parse_track(item)
+ for item in items["tracks"]
+ if (item and item["id"])
+ ]
+
+ async def get_stream_details(self, item_id: str) -> StreamDetails:
+ """Return the content details for the given track when it will be streamed."""
+ # make sure a valid track is requested.
+ track = await self.get_track(item_id)
+ if not track:
+ raise MediaNotFoundError(f"track {item_id} not found")
+ # make sure that the token is still valid by just requesting it
+ await self.login()
+ return StreamDetails(
+ item_id=track.item_id,
+ provider=self.type,
+ content_type=ContentType.OGG,
+ duration=track.duration,
+ )
+
+ async def get_audio_stream(
+ self, streamdetails: StreamDetails, seek_position: int = 0
+ ) -> AsyncGenerator[bytes, None]:
+ """Return the audio stream for the provider item."""
+ # make sure that the token is still valid by just requesting it
+ await self.login()
+ librespot = await self.get_librespot_binary()
+ args = [
+ librespot,
+ "-c",
+ self._cache_dir,
+ "--pass-through",
+ "-b",
+ "320",
+ "--single-track",
+ f"spotify://track:{streamdetails.item_id}",
+ ]
+ if seek_position:
+ args += ["--start-position", str(int(seek_position))]
+ if self._ap_workaround:
+ args += ["--ap-port", "12345"]
+ bytes_sent = 0
+ async with AsyncProcess(args) as librespot_proc:
+ async for chunk in librespot_proc.iter_any():
+ yield chunk
+ bytes_sent += len(chunk)
+
+ if bytes_sent == 0 and not self._ap_workaround:
+ # AP resolve failure
+ # https://github.com/librespot-org/librespot/issues/972
+ # retry with ap-port set to invalid value, which will force fallback
+ args += ["--ap-port", "12345"]
+ async with AsyncProcess(args) as librespot_proc:
+ async for chunk in librespot_proc.iter_any(64000):
+ yield chunk
+ self._ap_workaround = True
+
+ async def _parse_artist(self, artist_obj):
+ """Parse spotify artist object to generic layout."""
+ artist = Artist(
+ item_id=artist_obj["id"], provider=self.type, name=artist_obj["name"]
+ )
+ artist.add_provider_id(
+ MediaItemProviderId(
+ item_id=artist_obj["id"],
+ prov_type=self.type,
+ prov_id=self.id,
+ url=artist_obj["external_urls"]["spotify"],
+ )
+ )
+ if "genres" in artist_obj:
+ artist.metadata.genres = set(artist_obj["genres"])
+ if artist_obj.get("images"):
+ for img in artist_obj["images"]:
+ img_url = img["url"]
+ if "2a96cbd8b46e442fc41c2b86b821562f" not in img_url:
+ artist.metadata.images = [MediaItemImage(ImageType.THUMB, img_url)]
+ break
+ return artist
+
+ async def _parse_album(self, album_obj: dict):
+ """Parse spotify album object to generic layout."""
+ name, version = parse_title_and_version(album_obj["name"])
+ album = Album(
+ item_id=album_obj["id"], provider=self.type, name=name, version=version
+ )
+ for artist_obj in album_obj["artists"]:
+ album.artists.append(await self._parse_artist(artist_obj))
+ if album_obj["album_type"] == "single":
+ album.album_type = AlbumType.SINGLE
+ elif album_obj["album_type"] == "compilation":
+ album.album_type = AlbumType.COMPILATION
+ elif album_obj["album_type"] == "album":
+ album.album_type = AlbumType.ALBUM
+ if "genres" in album_obj:
+ album.metadata.genre = set(album_obj["genres"])
+ if album_obj.get("images"):
+ album.metadata.images = [
+ MediaItemImage(ImageType.THUMB, album_obj["images"][0]["url"])
+ ]
+ if "external_ids" in album_obj and album_obj["external_ids"].get("upc"):
+ album.upc = album_obj["external_ids"]["upc"]
+ if "label" in album_obj:
+ album.metadata.label = album_obj["label"]
+ if album_obj.get("release_date"):
+ album.year = int(album_obj["release_date"].split("-")[0])
+ if album_obj.get("copyrights"):
+ album.metadata.copyright = album_obj["copyrights"][0]["text"]
+ if album_obj.get("explicit"):
+ album.metadata.explicit = album_obj["explicit"]
+ album.add_provider_id(
+ MediaItemProviderId(
+ item_id=album_obj["id"],
+ prov_type=self.type,
+ prov_id=self.id,
+ quality=MediaQuality.LOSSY_OGG,
+ url=album_obj["external_urls"]["spotify"],
+ )
+ )
+ return album
+
+ async def _parse_track(self, track_obj, artist=None):
+ """Parse spotify track object to generic layout."""
+ name, version = parse_title_and_version(track_obj["name"])
+ track = Track(
+ item_id=track_obj["id"],
+ provider=self.type,
+ name=name,
+ version=version,
+ duration=track_obj["duration_ms"] / 1000,
+ disc_number=track_obj["disc_number"],
+ track_number=track_obj["track_number"],
+ position=track_obj.get("position"),
+ )
+ if artist:
+ track.artists.append(artist)
+ for track_artist in track_obj.get("artists", []):
+ artist = await self._parse_artist(track_artist)
+ if artist and artist.item_id not in {x.item_id for x in track.artists}:
+ track.artists.append(artist)
+
+ track.metadata.explicit = track_obj["explicit"]
+ if "preview_url" in track_obj:
+ track.metadata.preview = track_obj["preview_url"]
+ if "external_ids" in track_obj and "isrc" in track_obj["external_ids"]:
+ track.isrc = track_obj["external_ids"]["isrc"]
+ if "album" in track_obj:
+ track.album = await self._parse_album(track_obj["album"])
+ if track_obj["album"].get("images"):
+ track.metadata.images = [
+ MediaItemImage(
+ ImageType.THUMB, track_obj["album"]["images"][0]["url"]
+ )
+ ]
+ if track_obj.get("copyright"):
+ track.metadata.copyright = track_obj["copyright"]
+ if track_obj.get("explicit"):
+ track.metadata.explicit = True
+ if track_obj.get("popularity"):
+ track.metadata.popularity = track_obj["popularity"]
+ track.add_provider_id(
+ MediaItemProviderId(
+ item_id=track_obj["id"],
+ prov_type=self.type,
+ prov_id=self.id,
+ quality=MediaQuality.LOSSY_OGG,
+ url=track_obj["external_urls"]["spotify"],
+ available=not track_obj["is_local"] and track_obj["is_playable"],
+ )
+ )
+ return track
+
+ async def _parse_playlist(self, playlist_obj):
+ """Parse spotify playlist object to generic layout."""
+ playlist = Playlist(
+ item_id=playlist_obj["id"],
+ provider=self.type,
+ name=playlist_obj["name"],
+ owner=playlist_obj["owner"]["display_name"],
+ )
+ playlist.add_provider_id(
+ MediaItemProviderId(
+ item_id=playlist_obj["id"],
+ prov_type=self.type,
+ prov_id=self.id,
+ url=playlist_obj["external_urls"]["spotify"],
+ )
+ )
+ playlist.is_editable = (
+ playlist_obj["owner"]["id"] == self._sp_user["id"]
+ or playlist_obj["collaborative"]
+ )
+ if playlist_obj.get("images"):
+ playlist.metadata.images = [
+ MediaItemImage(ImageType.THUMB, playlist_obj["images"][0]["url"])
+ ]
+ playlist.metadata.checksum = str(playlist_obj["snapshot_id"])
+ return playlist
+
+ async def login(self) -> dict:
+ """Log-in Spotify and return tokeninfo."""
+ # return existing token if we have one in memory
+ if (
+ self._auth_token
+ and os.path.isdir(self._cache_dir)
+ and (self._auth_token["expiresAt"] > int(time.time()) + 20)
+ ):
+ return self._auth_token
+ tokeninfo, userinfo = None, self._sp_user
+ if not self.config.username or not self.config.password:
+ raise LoginFailed("Invalid login credentials")
+ # retrieve token with librespot
+ retries = 0
+ while retries < 20:
+ try:
+ retries += 1
+ if not tokeninfo:
+ tokeninfo = await asyncio.wait_for(self._get_token(), 5)
+ if tokeninfo and not userinfo:
+ userinfo = await asyncio.wait_for(
+ self._get_data("me", tokeninfo=tokeninfo), 5
+ )
+ if tokeninfo and userinfo:
+ # we have all info we need!
+ break
+ if retries > 2:
+ # switch to ap workaround after 2 retries
+ self._ap_workaround = True
+ except asyncio.exceptions.TimeoutError:
+ await asyncio.sleep(2)
+ if tokeninfo and userinfo:
+ self._auth_token = tokeninfo
+ self._sp_user = userinfo
+ self.mass.metadata.preferred_language = userinfo["country"]
+ self.logger.info("Succesfully logged in to Spotify as %s", userinfo["id"])
+ self._auth_token = tokeninfo
+ return tokeninfo
+ if tokeninfo and not userinfo:
+ raise LoginFailed(
+ "Unable to retrieve userdetails from Spotify API - probably just a temporary error"
+ )
+ if self.config.username.isnumeric():
+ # a spotify free/basic account can be recognized when
+ # the username consists of numbers only - check that here
+ # an integer can be parsed of the username, this is a free account
+ raise LoginFailed("Only Spotify Premium accounts are supported")
+ raise LoginFailed(f"Login failed for user {self.config.username}")
+
+ async def _get_token(self):
+ """Get spotify auth token with librespot bin."""
+ time_start = time.time()
+ # authorize with username and password (NOTE: this can also be Spotify Connect)
+ args = [
+ await self.get_librespot_binary(),
+ "-O",
+ "-c",
+ self._cache_dir,
+ "-a",
+ "-u",
+ self.config.username,
+ "-p",
+ self.config.password,
+ ]
+ librespot = await asyncio.create_subprocess_exec(*args)
+ await librespot.wait()
+ # get token with (authorized) librespot
+ scopes = [
+ "user-read-playback-state",
+ "user-read-currently-playing",
+ "user-modify-playback-state",
+ "playlist-read-private",
+ "playlist-read-collaborative",
+ "playlist-modify-public",
+ "playlist-modify-private",
+ "user-follow-modify",
+ "user-follow-read",
+ "user-library-read",
+ "user-library-modify",
+ "user-read-private",
+ "user-read-email",
+ "user-read-birthdate",
+ "user-top-read",
+ ]
+ scope = ",".join(scopes)
+ args = [
+ await self.get_librespot_binary(),
+ "-O",
+ "-t",
+ "--client-id",
+ app_var(2),
+ "--scope",
+ scope,
+ "-c",
+ self._cache_dir,
+ ]
+ if self._ap_workaround:
+ args += ["--ap-port", "12345"]
+ librespot = await asyncio.create_subprocess_exec(
+ *args, stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.STDOUT
+ )
+ stdout, _ = await librespot.communicate()
+ duration = round(time.time() - time_start, 2)
+ try:
+ result = json.loads(stdout)
+ except JSONDecodeError:
+ self.logger.warning(
+ "Error while retrieving Spotify token after %s seconds, details: %s",
+ duration,
+ stdout.decode(),
+ )
+ return None
+ self.logger.debug(
+ "Retrieved Spotify token using librespot in %s seconds",
+ duration,
+ )
+ # transform token info to spotipy compatible format
+ if result and "accessToken" in result:
+ tokeninfo = result
+ tokeninfo["expiresAt"] = tokeninfo["expiresIn"] + int(time.time())
+ return tokeninfo
+ return None
+
+ async def _get_all_items(self, endpoint, key="items", **kwargs) -> List[dict]:
+ """Get all items from a paged list."""
+ limit = 50
+ offset = 0
+ all_items = []
+ while True:
+ kwargs["limit"] = limit
+ kwargs["offset"] = offset
+ result = await self._get_data(endpoint, **kwargs)
+ offset += limit
+ if not result or key not in result or not result[key]:
+ break
+ for item in result[key]:
+ item["position"] = len(all_items) + 1
+ all_items.append(item)
+ if len(result[key]) < limit:
+ break
+ return all_items
+
+ async def _get_data(self, endpoint, tokeninfo: Optional[dict] = None, **kwargs):
+ """Get data from api."""
+ url = f"https://api.spotify.com/v1/{endpoint}"
+ kwargs["market"] = "from_token"
+ kwargs["country"] = "from_token"
+ if tokeninfo is None:
+ tokeninfo = await self.login()
+ headers = {"Authorization": f'Bearer {tokeninfo["accessToken"]}'}
+ async with self._throttler:
+ time_start = time.time()
+ try:
+ async with self.mass.http_session.get(
+ url, headers=headers, params=kwargs, verify_ssl=False, timeout=120
+ ) as response:
+ result = await response.json()
+ if "error" in result or (
+ "status" in result and "error" in result["status"]
+ ):
+ self.logger.error("%s - %s", endpoint, result)
+ return None
+ except (
+ aiohttp.ContentTypeError,
+ JSONDecodeError,
+ ) as err:
+ self.logger.error("%s - %s", endpoint, str(err))
+ return None
+ finally:
+ self.logger.debug(
+ "Processing GET/%s took %s seconds",
+ endpoint,
+ round(time.time() - time_start, 2),
+ )
+ return result
+
+ async def _delete_data(self, endpoint, data=None, **kwargs):
+ """Delete data from api."""
+ url = f"https://api.spotify.com/v1/{endpoint}"
+ token = await self.login()
+ if not token:
+ return None
+ headers = {"Authorization": f'Bearer {token["accessToken"]}'}
+ async with self.mass.http_session.delete(
+ url, headers=headers, params=kwargs, json=data, verify_ssl=False
+ ) as response:
+ return await response.text()
+
+ async def _put_data(self, endpoint, data=None, **kwargs):
+ """Put data on api."""
+ url = f"https://api.spotify.com/v1/{endpoint}"
+ token = await self.login()
+ if not token:
+ return None
+ headers = {"Authorization": f'Bearer {token["accessToken"]}'}
+ async with self.mass.http_session.put(
+ url, headers=headers, params=kwargs, json=data, verify_ssl=False
+ ) as response:
+ return await response.text()
+
+ async def _post_data(self, endpoint, data=None, **kwargs):
+ """Post data on api."""
+ url = f"https://api.spotify.com/v1/{endpoint}"
+ token = await self.login()
+ if not token:
+ return None
+ headers = {"Authorization": f'Bearer {token["accessToken"]}'}
+ async with self.mass.http_session.post(
+ url, headers=headers, params=kwargs, json=data, verify_ssl=False
+ ) as response:
+ return await response.text()
+
+ async def get_librespot_binary(self):
+ """Find the correct librespot binary belonging to the platform."""
+ if self._librespot_bin is not None:
+ return self._librespot_bin
+
+ async def check_librespot(librespot_path: str) -> str | None:
+ try:
+ librespot = await asyncio.create_subprocess_exec(
+ *[librespot_path, "--check"], stdout=asyncio.subprocess.PIPE
+ )
+ stdout, _ = await librespot.communicate()
+ if (
+ librespot.returncode == 0
+ and b"ok spotty" in stdout
+ and b"using librespot" in stdout
+ ):
+ self._librespot_bin = librespot_path
+ return librespot_path
+ except OSError:
+ return None
+
+ base_path = os.path.join(os.path.dirname(__file__), "librespot")
+ if platform.system() == "Windows":
+ if librespot := await check_librespot(
+ os.path.join(base_path, "windows", "librespot.exe")
+ ):
+ return librespot
+ if platform.system() == "Darwin":
+ # macos binary is x86_64 intel
+ if librespot := await check_librespot(
+ os.path.join(base_path, "osx", "librespot")
+ ):
+ return librespot
+
+ if platform.system() == "FreeBSD":
+ # FreeBSD binary is x86_64 intel
+ if librespot := await check_librespot(
+ os.path.join(base_path, "freebsd", "librespot")
+ ):
+ return librespot
+
+ if platform.system() == "Linux":
+ architecture = platform.machine()
+ if architecture in ["AMD64", "x86_64"]:
+ # generic linux x86_64 binary
+ if librespot := await check_librespot(
+ os.path.join(
+ base_path,
+ "linux",
+ "librespot-x86_64",
+ )
+ ):
+ return librespot
+
+ # arm architecture... try all options one by one...
+ for arch in ["aarch64", "armv7", "armhf", "arm"]:
+ if librespot := await check_librespot(
+ os.path.join(
+ base_path,
+ "linux",
+ f"librespot-{arch}",
+ )
+ ):
+ return librespot
+
+ raise RuntimeError(
+ f"Unable to locate Libespot for {platform.system()} ({platform.machine()})"
+ )
+++ /dev/null
-"""Tune-In musicprovider support for MusicAssistant."""
-from __future__ import annotations
-
-from time import time
-from typing import AsyncGenerator, List, Optional, Tuple
-
-from asyncio_throttle import Throttler
-
-from music_assistant.helpers.audio import get_radio_stream
-from music_assistant.helpers.playlists import fetch_playlist
-from music_assistant.helpers.util import create_sort_name
-from music_assistant.models.enums import MusicProviderFeature, ProviderType
-from music_assistant.models.errors import LoginFailed, MediaNotFoundError
-from music_assistant.models.media_items import (
- ContentType,
- ImageType,
- MediaItemImage,
- MediaItemProviderId,
- MediaQuality,
- MediaType,
- Radio,
- StreamDetails,
-)
-from music_assistant.models.music_provider import MusicProvider
-
-
-class TuneInProvider(MusicProvider):
- """Provider implementation for Tune In."""
-
- _attr_type = ProviderType.TUNEIN
- _attr_name = "Tune-in Radio"
- _throttler = Throttler(rate_limit=1, period=1)
-
- @property
- def supported_features(self) -> Tuple[MusicProviderFeature]:
- """Return the features supported by this MusicProvider."""
- return (
- MusicProviderFeature.LIBRARY_RADIOS,
- MusicProviderFeature.BROWSE,
- )
-
- async def setup(self) -> bool:
- """Handle async initialization of the provider."""
- if not self.config.enabled:
- return False
- if not self.config.username:
- raise LoginFailed("Username is invalid")
- if "@" in self.config.username:
- self.logger.warning(
- "Emailadress detected instead of username, "
- "it is advised to use the tunein username instead of email."
- )
- return True
-
- async def get_library_radios(self) -> AsyncGenerator[Radio, None]:
- """Retrieve library/subscribed radio stations from the provider."""
-
- async def parse_items(
- items: List[dict], folder: str = None
- ) -> AsyncGenerator[Radio, None]:
- for item in items:
- item_type = item.get("type", "")
- if item_type == "audio":
- if "preset_id" not in item:
- continue
- # each radio station can have multiple streams add each one as different quality
- stream_info = await self.__get_data(
- "Tune.ashx", id=item["preset_id"]
- )
- for stream in stream_info["body"]:
- yield await self._parse_radio(item, stream, folder)
- elif item_type == "link" and item.get("item") == "url":
- # custom url
- yield await self._parse_radio(item)
- elif item_type == "link":
- # stations are in sublevel (new style)
- if sublevel := await self.__get_data(item["URL"], render="json"):
- async for subitem in parse_items(
- sublevel["body"], item["text"]
- ):
- yield subitem
- elif item.get("children"):
- # stations are in sublevel (old style ?)
- async for subitem in parse_items(item["children"], item["text"]):
- yield subitem
-
- data = await self.__get_data("Browse.ashx", c="presets")
- if data and "body" in data:
- async for item in parse_items(data["body"]):
- yield item
-
- async def get_radio(self, prov_radio_id: str) -> Radio:
- """Get radio station details."""
- if not prov_radio_id.startswith("http"):
- prov_radio_id, media_type = prov_radio_id.split("--", 1)
- params = {"c": "composite", "detail": "listing", "id": prov_radio_id}
- result = await self.__get_data("Describe.ashx", **params)
- if result and result.get("body") and result["body"][0].get("children"):
- item = result["body"][0]["children"][0]
- stream_info = await self.__get_data("Tune.ashx", id=prov_radio_id)
- for stream in stream_info["body"]:
- if stream["media_type"] != media_type:
- continue
- return await self._parse_radio(item, stream)
- # fallback - e.g. for handle custom urls ...
- async for radio in self.get_library_radios():
- if radio.item_id == prov_radio_id:
- return radio
- return None
-
- async def _parse_radio(
- self, details: dict, stream: Optional[dict] = None, folder: Optional[str] = None
- ) -> Radio:
- """Parse Radio object from json obj returned from api."""
- if "name" in details:
- name = details["name"]
- else:
- # parse name from text attr
- name = details["text"]
- if " | " in name:
- name = name.split(" | ")[1]
- name = name.split(" (")[0]
-
- if stream is None:
- # custom url (no stream object present)
- url = details["URL"]
- item_id = url
- # TODO: parse header of stream for audio quality details?
- quality = MediaQuality.UNKNOWN
- else:
- url = stream["url"]
- item_id = f'{details["preset_id"]}--{stream["media_type"]}'
- if stream["media_type"] == "aac":
- quality = MediaQuality.LOSSY_AAC
- elif stream["media_type"] == "ogg":
- quality = MediaQuality.LOSSY_OGG
- else:
- quality = MediaQuality.LOSSY_MP3
-
- radio = Radio(item_id=item_id, provider=self.type, name=name)
- radio.add_provider_id(
- MediaItemProviderId(
- item_id=item_id,
- prov_type=self.type,
- prov_id=self.id,
- quality=quality,
- details=url,
- )
- )
- # preset number is used for sorting (not present at stream time)
- preset_number = details.get("preset_number")
- if preset_number and folder:
- radio.sort_name = f'{folder}-{details["preset_number"]}'
- elif preset_number:
- radio.sort_name = details["preset_number"]
- radio.sort_name += create_sort_name(name)
- if "text" in details:
- radio.metadata.description = details["text"]
- # images
- if img := details.get("image"):
- radio.metadata.images = [MediaItemImage(ImageType.THUMB, img)]
- if img := details.get("logo"):
- radio.metadata.images = [MediaItemImage(ImageType.LOGO, img)]
- return radio
-
- async def get_stream_details(self, item_id: str) -> StreamDetails:
- """Get streamdetails for a radio station."""
- if item_id.startswith("http"):
- # custom url
- return StreamDetails(
- provider=self.type,
- item_id=item_id,
- content_type=ContentType.UNKNOWN,
- media_type=MediaType.RADIO,
- data=item_id,
- )
- item_id, media_type = item_id.split("--", 1)
- stream_info = await self.__get_data("Tune.ashx", id=item_id)
- for stream in stream_info["body"]:
-
- if stream["media_type"] != media_type:
- continue
- # check if the radio stream is not a playlist
- url = stream["url"]
- if url.endswith("m3u8") or url.endswith("m3u") or url.endswith("pls"):
- playlist = await fetch_playlist(self.mass, url)
- url = playlist[0]
- return StreamDetails(
- provider=self.type,
- item_id=item_id,
- content_type=ContentType(stream["media_type"]),
- media_type=MediaType.RADIO,
- data=url,
- expires=time() + 24 * 3600,
- )
- raise MediaNotFoundError(f"Unable to retrieve stream details for {item_id}")
-
- async def get_audio_stream(
- self, streamdetails: StreamDetails, seek_position: int = 0
- ) -> AsyncGenerator[bytes, None]:
- """Return the audio stream for the provider item."""
- async for chunk in get_radio_stream(
- self.mass, streamdetails.data, streamdetails
- ):
- yield chunk
-
- async def __get_data(self, endpoint: str, **kwargs):
- """Get data from api."""
- if endpoint.startswith("http"):
- url = endpoint
- else:
- url = f"https://opml.radiotime.com/{endpoint}"
- kwargs["formats"] = "ogg,aac,wma,mp3"
- kwargs["username"] = self.config.username
- kwargs["partnerId"] = "1"
- kwargs["render"] = "json"
- async with self._throttler:
- async with self.mass.http_session.get(
- url, params=kwargs, verify_ssl=False
- ) as response:
- result = await response.json()
- if not result or "error" in result:
- self.logger.error(url)
- self.logger.error(kwargs)
- result = None
- return result
--- /dev/null
+"""Package with Tune-In Music provider."""
+
+from .tunein import TuneInProvider # noqa
--- /dev/null
+"""Tune-In musicprovider support for MusicAssistant."""
+from __future__ import annotations
+
+from time import time
+from typing import AsyncGenerator, List, Optional, Tuple
+
+from asyncio_throttle import Throttler
+
+from music_assistant.helpers.audio import get_radio_stream
+from music_assistant.helpers.playlists import fetch_playlist
+from music_assistant.helpers.util import create_sort_name
+from music_assistant.models.enums import MusicProviderFeature, ProviderType
+from music_assistant.models.errors import LoginFailed, MediaNotFoundError
+from music_assistant.models.media_items import (
+ ContentType,
+ ImageType,
+ MediaItemImage,
+ MediaItemProviderId,
+ MediaQuality,
+ MediaType,
+ Radio,
+ StreamDetails,
+)
+from music_assistant.models.music_provider import MusicProvider
+
+
+class TuneInProvider(MusicProvider):
+ """Provider implementation for Tune In."""
+
+ _attr_type = ProviderType.TUNEIN
+ _attr_name = "Tune-in Radio"
+ _throttler = Throttler(rate_limit=1, period=1)
+
+ @property
+ def supported_features(self) -> Tuple[MusicProviderFeature]:
+ """Return the features supported by this MusicProvider."""
+ return (
+ MusicProviderFeature.LIBRARY_RADIOS,
+ MusicProviderFeature.BROWSE,
+ )
+
+ async def setup(self) -> bool:
+ """Handle async initialization of the provider."""
+ if not self.config.enabled:
+ return False
+ if not self.config.username:
+ raise LoginFailed("Username is invalid")
+ if "@" in self.config.username:
+ self.logger.warning(
+ "Emailadress detected instead of username, "
+ "it is advised to use the tunein username instead of email."
+ )
+ return True
+
+ async def get_library_radios(self) -> AsyncGenerator[Radio, None]:
+ """Retrieve library/subscribed radio stations from the provider."""
+
+ async def parse_items(
+ items: List[dict], folder: str = None
+ ) -> AsyncGenerator[Radio, None]:
+ for item in items:
+ item_type = item.get("type", "")
+ if item_type == "audio":
+ if "preset_id" not in item:
+ continue
+ # each radio station can have multiple streams add each one as different quality
+ stream_info = await self.__get_data(
+ "Tune.ashx", id=item["preset_id"]
+ )
+ for stream in stream_info["body"]:
+ yield await self._parse_radio(item, stream, folder)
+ elif item_type == "link" and item.get("item") == "url":
+ # custom url
+ yield await self._parse_radio(item)
+ elif item_type == "link":
+ # stations are in sublevel (new style)
+ if sublevel := await self.__get_data(item["URL"], render="json"):
+ async for subitem in parse_items(
+ sublevel["body"], item["text"]
+ ):
+ yield subitem
+ elif item.get("children"):
+ # stations are in sublevel (old style ?)
+ async for subitem in parse_items(item["children"], item["text"]):
+ yield subitem
+
+ data = await self.__get_data("Browse.ashx", c="presets")
+ if data and "body" in data:
+ async for item in parse_items(data["body"]):
+ yield item
+
+ async def get_radio(self, prov_radio_id: str) -> Radio:
+ """Get radio station details."""
+ if not prov_radio_id.startswith("http"):
+ prov_radio_id, media_type = prov_radio_id.split("--", 1)
+ params = {"c": "composite", "detail": "listing", "id": prov_radio_id}
+ result = await self.__get_data("Describe.ashx", **params)
+ if result and result.get("body") and result["body"][0].get("children"):
+ item = result["body"][0]["children"][0]
+ stream_info = await self.__get_data("Tune.ashx", id=prov_radio_id)
+ for stream in stream_info["body"]:
+ if stream["media_type"] != media_type:
+ continue
+ return await self._parse_radio(item, stream)
+ # fallback - e.g. for handle custom urls ...
+ async for radio in self.get_library_radios():
+ if radio.item_id == prov_radio_id:
+ return radio
+ return None
+
+ async def _parse_radio(
+ self, details: dict, stream: Optional[dict] = None, folder: Optional[str] = None
+ ) -> Radio:
+ """Parse Radio object from json obj returned from api."""
+ if "name" in details:
+ name = details["name"]
+ else:
+ # parse name from text attr
+ name = details["text"]
+ if " | " in name:
+ name = name.split(" | ")[1]
+ name = name.split(" (")[0]
+
+ if stream is None:
+ # custom url (no stream object present)
+ url = details["URL"]
+ item_id = url
+ # TODO: parse header of stream for audio quality details?
+ quality = MediaQuality.UNKNOWN
+ else:
+ url = stream["url"]
+ item_id = f'{details["preset_id"]}--{stream["media_type"]}'
+ if stream["media_type"] == "aac":
+ quality = MediaQuality.LOSSY_AAC
+ elif stream["media_type"] == "ogg":
+ quality = MediaQuality.LOSSY_OGG
+ else:
+ quality = MediaQuality.LOSSY_MP3
+
+ radio = Radio(item_id=item_id, provider=self.type, name=name)
+ radio.add_provider_id(
+ MediaItemProviderId(
+ item_id=item_id,
+ prov_type=self.type,
+ prov_id=self.id,
+ quality=quality,
+ details=url,
+ )
+ )
+ # preset number is used for sorting (not present at stream time)
+ preset_number = details.get("preset_number")
+ if preset_number and folder:
+ radio.sort_name = f'{folder}-{details["preset_number"]}'
+ elif preset_number:
+ radio.sort_name = details["preset_number"]
+ radio.sort_name += create_sort_name(name)
+ if "text" in details:
+ radio.metadata.description = details["text"]
+ # images
+ if img := details.get("image"):
+ radio.metadata.images = [MediaItemImage(ImageType.THUMB, img)]
+ if img := details.get("logo"):
+ radio.metadata.images = [MediaItemImage(ImageType.LOGO, img)]
+ return radio
+
+ async def get_stream_details(self, item_id: str) -> StreamDetails:
+ """Get streamdetails for a radio station."""
+ if item_id.startswith("http"):
+ # custom url
+ return StreamDetails(
+ provider=self.type,
+ item_id=item_id,
+ content_type=ContentType.UNKNOWN,
+ media_type=MediaType.RADIO,
+ data=item_id,
+ )
+ item_id, media_type = item_id.split("--", 1)
+ stream_info = await self.__get_data("Tune.ashx", id=item_id)
+ for stream in stream_info["body"]:
+
+ if stream["media_type"] != media_type:
+ continue
+ # check if the radio stream is not a playlist
+ url = stream["url"]
+ if url.endswith("m3u8") or url.endswith("m3u") or url.endswith("pls"):
+ playlist = await fetch_playlist(self.mass, url)
+ url = playlist[0]
+ return StreamDetails(
+ provider=self.type,
+ item_id=item_id,
+ content_type=ContentType(stream["media_type"]),
+ media_type=MediaType.RADIO,
+ data=url,
+ expires=time() + 24 * 3600,
+ )
+ raise MediaNotFoundError(f"Unable to retrieve stream details for {item_id}")
+
+ async def get_audio_stream(
+ self, streamdetails: StreamDetails, seek_position: int = 0
+ ) -> AsyncGenerator[bytes, None]:
+ """Return the audio stream for the provider item."""
+ async for chunk in get_radio_stream(
+ self.mass, streamdetails.data, streamdetails
+ ):
+ yield chunk
+
+ async def __get_data(self, endpoint: str, **kwargs):
+ """Get data from api."""
+ if endpoint.startswith("http"):
+ url = endpoint
+ else:
+ url = f"https://opml.radiotime.com/{endpoint}"
+ kwargs["formats"] = "ogg,aac,wma,mp3"
+ kwargs["username"] = self.config.username
+ kwargs["partnerId"] = "1"
+ kwargs["render"] = "json"
+ async with self._throttler:
+ async with self.mass.http_session.get(
+ url, params=kwargs, verify_ssl=False
+ ) as response:
+ result = await response.json()
+ if not result or "error" in result:
+ self.logger.error(url)
+ self.logger.error(kwargs)
+ result = None
+ return result
+++ /dev/null
-"""Basic provider allowing for external URL's to be streamed."""
-from __future__ import annotations
-
-import os
-from typing import AsyncGenerator, Tuple
-
-from music_assistant.helpers.audio import (
- get_file_stream,
- get_http_stream,
- get_radio_stream,
-)
-from music_assistant.helpers.playlists import fetch_playlist
-from music_assistant.helpers.tags import AudioTags, parse_tags
-from music_assistant.models.config import MusicProviderConfig
-from music_assistant.models.enums import (
- ContentType,
- ImageType,
- MediaQuality,
- MediaType,
- ProviderType,
-)
-from music_assistant.models.media_items import (
- Artist,
- MediaItemImage,
- MediaItemProviderId,
- MediaItemType,
- Radio,
- StreamDetails,
- Track,
-)
-from music_assistant.models.music_provider import MusicProvider
-
-PROVIDER_CONFIG = MusicProviderConfig(ProviderType.URL)
-
-# pylint: disable=arguments-renamed
-
-
-class URLProvider(MusicProvider):
- """Music Provider for manual URL's/files added to the queue."""
-
- _attr_name: str = "URL"
- _attr_type: ProviderType = ProviderType.URL
- _attr_available: bool = True
- _full_url = {}
-
- async def setup(self) -> bool:
- """
- Handle async initialization of the provider.
-
- Called when provider is registered.
- """
- return True
-
- async def get_track(self, prov_track_id: str) -> Track:
- """Get full track details by id."""
- return await self.parse_item(prov_track_id)
-
- async def get_radio(self, prov_radio_id: str) -> Radio:
- """Get full radio details by id."""
- return await self.parse_item(prov_radio_id)
-
- async def get_artist(self, prov_artist_id: str) -> Track:
- """Get full artist details by id."""
- artist = prov_artist_id
- # this is here for compatibility reasons only
- return Artist(
- artist,
- self.type,
- artist,
- provider_ids={
- MediaItemProviderId(artist, self.type, self.id, available=False)
- },
- )
-
- async def get_item(self, media_type: MediaType, prov_item_id: str) -> MediaItemType:
- """Get single MediaItem from provider."""
- if media_type == MediaType.ARTIST:
- return await self.get_artist(prov_item_id)
- if media_type == MediaType.TRACK:
- return await self.get_track(prov_item_id)
- if media_type == MediaType.RADIO:
- return await self.get_radio(prov_item_id)
- if media_type == MediaType.UNKNOWN:
- return await self.parse_item(prov_item_id)
- raise NotImplementedError
-
- async def parse_item(
- self, item_id_or_url: str, force_refresh: bool = False
- ) -> Track | Radio:
- """Parse plain URL to MediaItem of type Radio or Track."""
- item_id, url, media_info = await self._get_media_info(
- item_id_or_url, force_refresh
- )
- is_radio = media_info.get("icy-name") or not media_info.duration
- if is_radio:
- # treat as radio
- media_item = Radio(
- item_id=item_id,
- provider=self.type,
- name=media_info.get("icy-name") or media_info.title,
- )
- else:
- media_item = Track(
- item_id=item_id,
- provider=self.type,
- name=media_info.title,
- duration=int(media_info.duration or 0),
- artists=[
- await self.get_artist(artist) for artist in media_info.artists
- ],
- )
-
- quality = MediaQuality.from_file_type(media_info.format)
- media_item.provider_ids = {
- MediaItemProviderId(item_id, self.type, self.id, quality=quality)
- }
- if media_info.has_cover_image:
- media_item.metadata.images = [MediaItemImage(ImageType.THUMB, url, True)]
- return media_item
-
- async def _get_media_info(
- self, item_id_or_url: str, force_refresh: bool = False
- ) -> Tuple[str, str, AudioTags]:
- """Retrieve (cached) mediainfo for url."""
- # check if the radio stream is not a playlist
- if (
- item_id_or_url.endswith("m3u8")
- or item_id_or_url.endswith("m3u")
- or item_id_or_url.endswith("pls")
- ):
- playlist = await fetch_playlist(self.mass, item_id_or_url)
- url = playlist[0]
- item_id = item_id_or_url
- self._full_url[item_id] = url
- elif "?" in item_id_or_url or "&" in item_id_or_url:
- # store the 'real' full url to be picked up later
- # this makes sure that we're not storing any temporary data like auth keys etc
- # a request for an url mediaitem always passes here first before streamdetails
- url = item_id_or_url
- item_id = item_id_or_url.split("?")[0].split("&")[0]
- self._full_url[item_id] = url
- else:
- url = self._full_url.get(item_id_or_url, item_id_or_url)
- item_id = item_id_or_url
- cache_key = f"{self.type.value}.media_info.{item_id}"
- # do we have some cached info for this url ?
- cached_info = await self.mass.cache.get(cache_key)
- if cached_info and not force_refresh:
- media_info = AudioTags.parse(cached_info)
- else:
- # parse info with ffprobe (and store in cache)
- media_info = await parse_tags(url)
- if "authSig" in url:
- media_info.has_cover_image = False
- await self.mass.cache.set(cache_key, media_info.raw)
- return (item_id, url, media_info)
-
- async def get_stream_details(self, item_id: str) -> StreamDetails | None:
- """Get streamdetails for a track/radio."""
- item_id, url, media_info = await self._get_media_info(item_id)
- is_radio = media_info.get("icy-name") or not media_info.duration
- return StreamDetails(
- provider=self.type,
- item_id=item_id,
- content_type=ContentType.try_parse(media_info.format),
- media_type=MediaType.RADIO if is_radio else MediaType.TRACK,
- sample_rate=media_info.sample_rate,
- bit_depth=media_info.bits_per_sample,
- direct=None if is_radio else url,
- data=url,
- )
-
- async def get_audio_stream(
- self, streamdetails: StreamDetails, seek_position: int = 0
- ) -> AsyncGenerator[bytes, None]:
- """Return the audio stream for the provider item."""
- if streamdetails.media_type == MediaType.RADIO:
- # radio stream url
- async for chunk in get_radio_stream(
- self.mass, streamdetails.data, streamdetails
- ):
- yield chunk
- elif os.path.isfile(streamdetails.data):
- # local file
- async for chunk in get_file_stream(
- self.mass, streamdetails.data, streamdetails, seek_position
- ):
- yield chunk
- else:
- # regular stream url (without icy meta)
- async for chunk in get_http_stream(
- self.mass, streamdetails.data, streamdetails, seek_position
- ):
- yield chunk
--- /dev/null
+"""Package with URL Music provider."""
+
+from .url import URLProvider # noqa
--- /dev/null
+"""Basic provider allowing for external URL's to be streamed."""
+from __future__ import annotations
+
+import os
+from typing import AsyncGenerator, Tuple
+
+from music_assistant.helpers.audio import (
+ get_file_stream,
+ get_http_stream,
+ get_radio_stream,
+)
+from music_assistant.helpers.playlists import fetch_playlist
+from music_assistant.helpers.tags import AudioTags, parse_tags
+from music_assistant.models.config import MusicProviderConfig
+from music_assistant.models.enums import (
+ ContentType,
+ ImageType,
+ MediaQuality,
+ MediaType,
+ ProviderType,
+)
+from music_assistant.models.media_items import (
+ Artist,
+ MediaItemImage,
+ MediaItemProviderId,
+ MediaItemType,
+ Radio,
+ StreamDetails,
+ Track,
+)
+from music_assistant.models.music_provider import MusicProvider
+
+PROVIDER_CONFIG = MusicProviderConfig(ProviderType.URL)
+
+# pylint: disable=arguments-renamed
+
+
+class URLProvider(MusicProvider):
+ """Music Provider for manual URL's/files added to the queue."""
+
+ _attr_name: str = "URL"
+ _attr_type: ProviderType = ProviderType.URL
+ _attr_available: bool = True
+ _full_url = {}
+
+ async def setup(self) -> bool:
+ """
+ Handle async initialization of the provider.
+
+ Called when provider is registered.
+ """
+ return True
+
+ async def get_track(self, prov_track_id: str) -> Track:
+ """Get full track details by id."""
+ return await self.parse_item(prov_track_id)
+
+ async def get_radio(self, prov_radio_id: str) -> Radio:
+ """Get full radio details by id."""
+ return await self.parse_item(prov_radio_id)
+
+ async def get_artist(self, prov_artist_id: str) -> Track:
+ """Get full artist details by id."""
+ artist = prov_artist_id
+ # this is here for compatibility reasons only
+ return Artist(
+ artist,
+ self.type,
+ artist,
+ provider_ids={
+ MediaItemProviderId(artist, self.type, self.id, available=False)
+ },
+ )
+
+ async def get_item(self, media_type: MediaType, prov_item_id: str) -> MediaItemType:
+ """Get single MediaItem from provider."""
+ if media_type == MediaType.ARTIST:
+ return await self.get_artist(prov_item_id)
+ if media_type == MediaType.TRACK:
+ return await self.get_track(prov_item_id)
+ if media_type == MediaType.RADIO:
+ return await self.get_radio(prov_item_id)
+ if media_type == MediaType.UNKNOWN:
+ return await self.parse_item(prov_item_id)
+ raise NotImplementedError
+
+ async def parse_item(
+ self, item_id_or_url: str, force_refresh: bool = False
+ ) -> Track | Radio:
+ """Parse plain URL to MediaItem of type Radio or Track."""
+ item_id, url, media_info = await self._get_media_info(
+ item_id_or_url, force_refresh
+ )
+ is_radio = media_info.get("icy-name") or not media_info.duration
+ if is_radio:
+ # treat as radio
+ media_item = Radio(
+ item_id=item_id,
+ provider=self.type,
+ name=media_info.get("icy-name") or media_info.title,
+ )
+ else:
+ media_item = Track(
+ item_id=item_id,
+ provider=self.type,
+ name=media_info.title,
+ duration=int(media_info.duration or 0),
+ artists=[
+ await self.get_artist(artist) for artist in media_info.artists
+ ],
+ )
+
+ quality = MediaQuality.from_file_type(media_info.format)
+ media_item.provider_ids = {
+ MediaItemProviderId(item_id, self.type, self.id, quality=quality)
+ }
+ if media_info.has_cover_image:
+ media_item.metadata.images = [MediaItemImage(ImageType.THUMB, url, True)]
+ return media_item
+
+ async def _get_media_info(
+ self, item_id_or_url: str, force_refresh: bool = False
+ ) -> Tuple[str, str, AudioTags]:
+ """Retrieve (cached) mediainfo for url."""
+ # check if the radio stream is not a playlist
+ if (
+ item_id_or_url.endswith("m3u8")
+ or item_id_or_url.endswith("m3u")
+ or item_id_or_url.endswith("pls")
+ ):
+ playlist = await fetch_playlist(self.mass, item_id_or_url)
+ url = playlist[0]
+ item_id = item_id_or_url
+ self._full_url[item_id] = url
+ elif "?" in item_id_or_url or "&" in item_id_or_url:
+ # store the 'real' full url to be picked up later
+ # this makes sure that we're not storing any temporary data like auth keys etc
+ # a request for an url mediaitem always passes here first before streamdetails
+ url = item_id_or_url
+ item_id = item_id_or_url.split("?")[0].split("&")[0]
+ self._full_url[item_id] = url
+ else:
+ url = self._full_url.get(item_id_or_url, item_id_or_url)
+ item_id = item_id_or_url
+ cache_key = f"{self.type.value}.media_info.{item_id}"
+ # do we have some cached info for this url ?
+ cached_info = await self.mass.cache.get(cache_key)
+ if cached_info and not force_refresh:
+ media_info = AudioTags.parse(cached_info)
+ else:
+ # parse info with ffprobe (and store in cache)
+ media_info = await parse_tags(url)
+ if "authSig" in url:
+ media_info.has_cover_image = False
+ await self.mass.cache.set(cache_key, media_info.raw)
+ return (item_id, url, media_info)
+
+ async def get_stream_details(self, item_id: str) -> StreamDetails | None:
+ """Get streamdetails for a track/radio."""
+ item_id, url, media_info = await self._get_media_info(item_id)
+ is_radio = media_info.get("icy-name") or not media_info.duration
+ return StreamDetails(
+ provider=self.type,
+ item_id=item_id,
+ content_type=ContentType.try_parse(media_info.format),
+ media_type=MediaType.RADIO if is_radio else MediaType.TRACK,
+ sample_rate=media_info.sample_rate,
+ bit_depth=media_info.bits_per_sample,
+ direct=None if is_radio else url,
+ data=url,
+ )
+
+ async def get_audio_stream(
+ self, streamdetails: StreamDetails, seek_position: int = 0
+ ) -> AsyncGenerator[bytes, None]:
+ """Return the audio stream for the provider item."""
+ if streamdetails.media_type == MediaType.RADIO:
+ # radio stream url
+ async for chunk in get_radio_stream(
+ self.mass, streamdetails.data, streamdetails
+ ):
+ yield chunk
+ elif os.path.isfile(streamdetails.data):
+ # local file
+ async for chunk in get_file_stream(
+ self.mass, streamdetails.data, streamdetails, seek_position
+ ):
+ yield chunk
+ else:
+ # regular stream url (without icy meta)
+ async for chunk in get_http_stream(
+ self.mass, streamdetails.data, streamdetails, seek_position
+ ):
+ yield chunk