channels=2,
)
-
-# CACHE categories
-
-CACHE_CATEGORY_DEFAULT: Final[int] = 0
-CACHE_CATEGORY_MUSIC_SEARCH: Final[int] = 1
-CACHE_CATEGORY_MUSIC_ALBUM_TRACKS: Final[int] = 2
-CACHE_CATEGORY_MUSIC_ARTIST_TRACKS: Final[int] = 3
-CACHE_CATEGORY_MUSIC_ARTIST_ALBUMS: Final[int] = 4
-CACHE_CATEGORY_MUSIC_PLAYLIST_TRACKS: Final[int] = 5
-CACHE_CATEGORY_MUSIC_PROVIDER_ITEM: Final[int] = 6
-CACHE_CATEGORY_PLAYER_QUEUE_STATE: Final[int] = 7
-CACHE_CATEGORY_MEDIA_INFO: Final[int] = 8
-CACHE_CATEGORY_LIBRARY_ITEMS: Final[int] = 9
-CACHE_CATEGORY_PLAYERS: Final[int] = 10
-CACHE_CATEGORY_RECOMMENDATIONS: Final[int] = 11
-CACHE_CATEGORY_OPEN_SUBSONIC: Final[int] = 12
-
-# CACHE base keys
-CACHE_KEY_PLAYER_POWER: Final[str] = "player_power"
-
-
# extra data / extra attributes keys
ATTR_FAKE_POWER: Final[str] = "fake_power"
ATTR_FAKE_VOLUME: Final[str] = "fake_volume_level"
import os
import time
from collections import OrderedDict
-from collections.abc import Callable, Iterator, MutableMapping
-from typing import TYPE_CHECKING, Any, ParamSpec, TypeVar
+from collections.abc import AsyncGenerator, Awaitable, Callable, Coroutine, Iterator, MutableMapping
+from contextlib import asynccontextmanager
+from contextvars import ContextVar
+from typing import TYPE_CHECKING, Any, Concatenate, ParamSpec, TypeVar, get_type_hints
from music_assistant_models.config_entries import ConfigEntry, ConfigValueType
from music_assistant_models.enums import ConfigEntryType
from music_assistant.constants import DB_TABLE_CACHE, DB_TABLE_SETTINGS, MASS_LOGGER_NAME
+from music_assistant.helpers.api import parse_value
from music_assistant.helpers.database import DatabaseConnection
from music_assistant.helpers.json import json_dumps, json_loads
from music_assistant.models.core_controller import CoreController
if TYPE_CHECKING:
from music_assistant_models.config_entries import CoreConfig
+ from music_assistant.models.provider import Provider
+
+
LOGGER = logging.getLogger(f"{MASS_LOGGER_NAME}.cache")
CONF_CLEAR_CACHE = "clear_cache"
-DB_SCHEMA_VERSION = 5
+DEFAULT_CACHE_EXPIRATION = 86400 * 30 # 30 days
+DB_SCHEMA_VERSION = 6
+
+BYPASS_CACHE: ContextVar[bool] = ContextVar("BYPASS_CACHE", default=False)
class CacheController(CoreController):
async def get(
self,
key: str,
- checksum: str | None = None,
- default=None,
+ provider: str = "default",
category: int = 0,
- base_key: str = "",
+ checksum: str | None = None,
+ default: Any = None,
+ allow_bypass: bool = True,
) -> Any:
"""Get object from cache and return the results.
- cache_key: the (unique) name of the cache object as reference
- checksum: optional argument to check if the checksum in the
- cacheobject matches the checksum provided
- category: optional category to group cache objects
- base_key: optional base key to group cache objects
+ - key: the (unique) lookup key of the cache object as reference
+ - provider: optional provider id to group cache objects
+ - category: optional category to group cache objects
+ - checksum: optional argument to check if the checksum in the
+ cache object matches the checksum provided
+ - default: value to return if no cache object is found
"""
- if not key:
- return None
+ assert key, "No key provided"
+ if allow_bypass and BYPASS_CACHE.get():
+ return default
cur_time = int(time.time())
if checksum is not None and not isinstance(checksum, str):
checksum = str(checksum)
-
# try memory cache first
- memory_key = f"{category}/{base_key}/{key}"
+ memory_key = f"{provider}/{category}/{key}"
cache_data = self._mem_cache.get(memory_key)
if cache_data and (not checksum or cache_data[1] == checksum) and cache_data[2] >= cur_time:
return cache_data[0]
# fall back to db cache
if (
db_row := await self.database.get_row(
- DB_TABLE_CACHE, {"category": category, "base_key": base_key, "sub_key": key}
+ DB_TABLE_CACHE, {"category": category, "provider": provider, "key": key}
)
) and (not checksum or (db_row["checksum"] == checksum and db_row["expires"] >= cur_time)):
try:
return default
async def set(
- self, key, data, checksum="", expiration=(86400 * 7), category: int = 0, base_key: str = ""
+ self,
+ key: str,
+ data: Any,
+ expiration: int = DEFAULT_CACHE_EXPIRATION,
+ provider: str = "default",
+ category: int = 0,
+ checksum: str | None = None,
+ persistent: bool = False,
) -> None:
- """Set data in cache."""
+ """
+ Set data in cache.
+
+ - key: the (unique) lookup key of the cache object as reference
+ - data: the actual data to store in the cache
+ - expiration: time in seconds the cache object should be valid
+ - provider: optional provider id to group cache objects
+ - category: optional category to group cache objects
+ - checksum: optional argument to store with the cache object
+ - persistent: if True the cache object will not be deleted when clearing the cache
+ """
if not key:
return
if checksum is not None and not isinstance(checksum, str):
checksum = str(checksum)
expires = int(time.time() + expiration)
- memory_key = f"{category}/{base_key}/{key}"
+ memory_key = f"{provider}/{category}/{key}"
self._mem_cache[memory_key] = (data, checksum, expires)
- if (expires - time.time()) < 3600 * 12:
+ if (expires - time.time()) < 1800:
# do not cache items in db with short expiration
return
data = await asyncio.to_thread(json_dumps, data)
DB_TABLE_CACHE,
{
"category": category,
- "base_key": base_key,
- "sub_key": key,
+ "provider": provider,
+ "key": key,
"expires": expires,
"checksum": checksum,
"data": data,
+ "persistent": persistent,
},
)
async def delete(
- self, key: str | None, category: int | None = None, base_key: str | None = None
+ self, key: str | None, category: int | None = None, provider: str | None = None
) -> None:
"""Delete data from cache."""
match: dict[str, str | int] = {}
if key is not None:
- match["sub_key"] = key
+ match["key"] = key
if category is not None:
match["category"] = category
- if base_key is not None:
- match["base_key"] = base_key
- if key is not None and category is not None and base_key is not None:
- self._mem_cache.pop(f"{category}/{base_key}/{key}", None)
+ if provider is not None:
+ match["provider"] = provider
+ if key is not None and category is not None and provider is not None:
+ self._mem_cache.pop(f"{provider}/{category}/{key}", None)
else:
self._mem_cache.clear()
await self.database.delete(DB_TABLE_CACHE, match)
async def clear(
self,
key_filter: str | None = None,
- category: int | None = None,
- base_key_filter: str | None = None,
+ category_filter: int | None = None,
+ provider_filter: str | None = None,
+ include_persistent: bool = False,
) -> None:
"""Clear all/partial items from cache."""
self._mem_cache.clear()
self.logger.info("Clearing database...")
query_parts: list[str] = []
- if category is not None:
- query_parts.append(f"category = {category}")
- if base_key_filter is not None:
- query_parts.append(f"base_key LIKE '%{base_key_filter}%'")
+ if category_filter is not None:
+ query_parts.append(f"category = {category_filter}")
+ if provider_filter is not None:
+ query_parts.append(f"provider LIKE '%{provider_filter}%'")
if key_filter is not None:
- query_parts.append(f"sub_key LIKE '%{key_filter}%'")
+ query_parts.append(f"key LIKE '%{key_filter}%'")
+ if not include_persistent:
+ query_parts.append("persistent = 0")
query = "WHERE " + " AND ".join(query_parts) if query_parts else None
await self.database.delete(DB_TABLE_CACHE, query=query)
self.logger.info("Clearing database DONE")
await asyncio.sleep(0) # yield to eventloop
self.logger.debug("Automatic cleanup finished (cleaned up %s records)", cleaned_records)
+ @asynccontextmanager
+ async def handle_refresh(self, bypass: bool) -> AsyncGenerator[None, None]:
+ """Handle the cache bypass."""
+ try:
+ token = BYPASS_CACHE.set(bypass)
+ yield None
+ finally:
+ BYPASS_CACHE.reset(token)
+
async def _setup_database(self) -> None:
"""Initialize database."""
db_path = os.path.join(self.mass.cache_path, "cache.db")
f"""CREATE TABLE IF NOT EXISTS {DB_TABLE_CACHE}(
[id] INTEGER PRIMARY KEY AUTOINCREMENT,
[category] INTEGER NOT NULL DEFAULT 0,
- [base_key] TEXT NOT NULL,
- [sub_key] TEXT NOT NULL,
+ [key] TEXT NOT NULL,
+ [provider] TEXT NOT NULL,
[expires] INTEGER NOT NULL,
- [data] TEXT,
+ [data] TEXT NULL,
[checksum] TEXT NULL,
- UNIQUE(category, base_key, sub_key)
+ [persistent] INTEGER NOT NULL DEFAULT 0,
+ UNIQUE(category, key, provider)
)"""
)
f"ON {DB_TABLE_CACHE}(category);"
)
await self.database.execute(
- f"CREATE INDEX IF NOT EXISTS {DB_TABLE_CACHE}_base_key_idx "
- f"ON {DB_TABLE_CACHE}(base_key);"
+ f"CREATE INDEX IF NOT EXISTS {DB_TABLE_CACHE}_key_idx ON {DB_TABLE_CACHE}(key);"
+ )
+ await self.database.execute(
+ f"CREATE INDEX IF NOT EXISTS {DB_TABLE_CACHE}_provider_idx "
+ f"ON {DB_TABLE_CACHE}(provider);"
)
await self.database.execute(
- f"CREATE INDEX IF NOT EXISTS {DB_TABLE_CACHE}_sub_key_idx ON {DB_TABLE_CACHE}(sub_key);"
+ f"CREATE INDEX IF NOT EXISTS {DB_TABLE_CACHE}_category_key_idx "
+ f"ON {DB_TABLE_CACHE}(category,key);"
)
await self.database.execute(
- f"CREATE INDEX IF NOT EXISTS {DB_TABLE_CACHE}_category_base_key_idx "
- f"ON {DB_TABLE_CACHE}(category,base_key);"
+ f"CREATE INDEX IF NOT EXISTS {DB_TABLE_CACHE}_category_provider_idx "
+ f"ON {DB_TABLE_CACHE}(category,provider);"
)
await self.database.execute(
- f"CREATE INDEX IF NOT EXISTS {DB_TABLE_CACHE}_category_base_key_sub_key_idx "
- f"ON {DB_TABLE_CACHE}(category,base_key,sub_key);"
+ f"CREATE INDEX IF NOT EXISTS {DB_TABLE_CACHE}_category_key_provider_idx "
+ f"ON {DB_TABLE_CACHE}(category,key,provider);"
+ )
+ await self.database.execute(
+ f"CREATE INDEX IF NOT EXISTS {DB_TABLE_CACHE}_key_provider_idx "
+ f"ON {DB_TABLE_CACHE}(key,provider);"
)
await self.database.commit()
RetType = TypeVar("RetType")
+ProviderT = TypeVar("ProviderT", bound="Provider | CoreController")
+P = ParamSpec("P")
+R = TypeVar("R")
+
+
def use_cache(
- expiration: int = 86400 * 30,
+ expiration: int = DEFAULT_CACHE_EXPIRATION,
category: int = 0,
-) -> Callable[[Callable[Param, RetType]], Callable[Param, RetType]]:
+ persistent: bool = False,
+ cache_checksum: str | None = None,
+ allow_bypass: bool = True,
+) -> Callable[
+ [Callable[Concatenate[ProviderT, P], Awaitable[R]]],
+ Callable[Concatenate[ProviderT, P], Coroutine[Any, Any, R]],
+]:
"""Return decorator that can be used to cache a method's result."""
- def wrapper(func: Callable[Param, RetType]) -> Callable[Param, RetType]:
+ def _decorator(
+ func: Callable[Concatenate[ProviderT, P], Awaitable[R]],
+ ) -> Callable[Concatenate[ProviderT, P], Coroutine[Any, Any, R]]:
@functools.wraps(func)
- async def wrapped(*args: Param.args, **kwargs: Param.kwargs):
- method_class = args[0]
- method_class_name = method_class.__class__.__name__
- cache_base_key = f"{method_class_name}.{func.__name__}"
- cache_sub_key_parts = []
- skip_cache = kwargs.pop("skip_cache", False)
- cache_checksum = kwargs.pop("cache_checksum", "")
- if len(args) > 1:
- cache_sub_key_parts += args[1:]
- for key in sorted(kwargs.keys()):
- cache_sub_key_parts.append(f"{key}{kwargs[key]}")
- cache_sub_key = ".".join(cache_sub_key_parts)
+ async def wrapper(self: ProviderT, *args: P.args, **kwargs: P.kwargs) -> R:
+ cache = self.mass.cache
+ provider_id = getattr(self, "provider_id", self.domain)
- cachedata = await method_class.cache.get(
- cache_sub_key, checksum=cache_checksum, category=category, base_key=cache_base_key
+ # create a cache key dynamically based on the (remaining) args/kwargs
+ cache_key_parts = [func.__name__, *args]
+ for key in sorted(kwargs.keys()):
+ cache_key_parts.append(f"{key}{kwargs[key]}")
+ cache_key = ".".join(map(str, cache_key_parts))
+ # try to retrieve data from the cache
+ cachedata = await cache.get(
+ cache_key,
+ provider=provider_id,
+ checksum=cache_checksum,
+ category=category,
+ allow_bypass=allow_bypass,
)
-
- if not skip_cache and cachedata is not None:
- return cachedata
- result = await func(*args, **kwargs)
- asyncio.create_task(
- method_class.cache.set(
- cache_sub_key,
- result,
+ if cachedata is not None:
+ type_hints = get_type_hints(func)
+ return parse_value(func.__name__, cachedata, type_hints["return"])
+ # get data from method/provider
+ result = await func(self, *args, **kwargs)
+ # store result in cache (but don't await)
+ self.mass.create_task(
+ cache.set(
+ key=cache_key,
+ data=result,
expiration=expiration,
- checksum=cache_checksum,
+ provider=provider_id,
category=category,
- base_key=cache_base_key,
+ checksum=cache_checksum,
+ persistent=persistent,
)
)
return result
- return wrapped
+ return wrapper
- return wrapper
+ return _decorator
class MemoryCache(MutableMapping):
import contextlib
from collections.abc import Iterable
-from typing import TYPE_CHECKING, Any
+from typing import TYPE_CHECKING, Any, cast
from music_assistant_models.enums import AlbumType, MediaType, ProviderFeature
from music_assistant_models.errors import InvalidDataError, MediaNotFoundError, MusicAssistantError
from music_assistant_models.media_items import Album, Artist, ItemMapping, Track, UniqueList
-from music_assistant.constants import (
- CACHE_CATEGORY_MUSIC_ALBUM_TRACKS,
- CACHE_CATEGORY_MUSIC_PROVIDER_ITEM,
- DB_TABLE_ALBUM_ARTISTS,
- DB_TABLE_ALBUM_TRACKS,
- DB_TABLE_ALBUMS,
-)
+from music_assistant.constants import DB_TABLE_ALBUM_ARTISTS, DB_TABLE_ALBUM_TRACKS, DB_TABLE_ALBUMS
from music_assistant.controllers.media.base import MediaControllerBase
from music_assistant.helpers.compare import (
compare_album,
self, item_id: str, provider_instance_id_or_domain: str
) -> list[Track]:
"""Return album tracks for the given provider album id."""
- prov: MusicProvider = self.mass.get_provider(provider_instance_id_or_domain)
- if prov is None:
- return []
- # prefer cache items (if any) - for streaming providers only
- cache_category = CACHE_CATEGORY_MUSIC_ALBUM_TRACKS
- cache_base_key = prov.lookup_key
- cache_key = item_id
- if (
- prov.is_streaming_provider
- and (
- cache := await self.mass.cache.get(
- cache_key, category=cache_category, base_key=cache_base_key
- )
- )
- is not None
- ):
- return [Track.from_dict(x) for x in cache]
- # no items in cache - get listing from provider
- items = await prov.get_album_tracks(item_id)
- # store (serializable items) in cache
- if prov.is_streaming_provider:
- self.mass.create_task(
- self.mass.cache.set(
- cache_key,
- [x.to_dict() for x in items],
- category=cache_category,
- base_key=cache_base_key,
- ),
- )
- for item in items:
- # if this is a complete track object, pre-cache it as
- # that will save us an (expensive) lookup later
- if item.image and item.artist_str and item.album and prov.domain != "builtin":
- await self.mass.cache.set(
- f"track.{item_id}",
- item.to_dict(),
- category=CACHE_CATEGORY_MUSIC_PROVIDER_ITEM,
- base_key=prov.lookup_key,
- )
- return items
+ if prov := self.mass.get_provider(provider_instance_id_or_domain):
+ prov = cast("MusicProvider", prov)
+ return await prov.get_album_tracks(item_id)
+ return []
async def radio_mode_base_tracks(
self,
import asyncio
import contextlib
-from typing import TYPE_CHECKING, Any
+from typing import TYPE_CHECKING, Any, cast
from music_assistant_models.enums import AlbumType, MediaType, ProviderFeature
from music_assistant_models.errors import (
from music_assistant_models.media_items import Album, Artist, ItemMapping, Track, UniqueList
from music_assistant.constants import (
- CACHE_CATEGORY_MUSIC_ARTIST_ALBUMS,
- CACHE_CATEGORY_MUSIC_ARTIST_TRACKS,
- CACHE_CATEGORY_MUSIC_PROVIDER_ITEM,
DB_TABLE_ALBUM_ARTISTS,
DB_TABLE_ARTISTS,
DB_TABLE_TRACK_ARTISTS,
provider_instance_id_or_domain: str,
) -> list[Track]:
"""Return top tracks for an artist on given provider."""
- items = []
assert provider_instance_id_or_domain != "library"
- prov = self.mass.get_provider(provider_instance_id_or_domain)
- if prov is None:
+ if not (prov := self.mass.get_provider(provider_instance_id_or_domain)):
return []
- # prefer cache items (if any) - for streaming providers
- cache_category = CACHE_CATEGORY_MUSIC_ARTIST_TRACKS
- cache_base_key = prov.lookup_key
- cache_key = item_id
- if (
- prov.is_streaming_provider
- and (
- cache := await self.mass.cache.get(
- cache_key, category=cache_category, base_key=cache_base_key
- )
- )
- is not None
- ):
- return [Track.from_dict(x) for x in cache]
- # no items in cache - get listing from provider
+ prov = cast("MusicProvider", prov)
if ProviderFeature.ARTIST_TOPTRACKS in prov.supported_features:
- items = await prov.get_artist_toptracks(item_id)
- for item in items:
- # if this is a complete track object, pre-cache it as
- # that will save us an (expensive) lookup later
- if item.image and item.artist_str and item.album and prov.domain != "builtin":
- await self.mass.cache.set(
- f"track.{item_id}",
- item.to_dict(),
- category=CACHE_CATEGORY_MUSIC_PROVIDER_ITEM,
- base_key=prov.lookup_key,
- )
- else:
- # fallback implementation using the db
- if db_artist := await self.mass.music.artists.get_library_item_by_prov_id(
- item_id,
- provider_instance_id_or_domain,
- ):
- artist_id = db_artist.item_id
- subquery = (
- f"SELECT track_id FROM {DB_TABLE_TRACK_ARTISTS} WHERE artist_id = {artist_id}"
- )
- query = f"tracks.item_id in ({subquery})"
- return await self.mass.music.tracks._get_library_items_by_query(
- extra_query_parts=[query], provider=provider_instance_id_or_domain
- )
- # store (serializable items) in cache
- if prov.is_streaming_provider:
- self.mass.create_task(
- self.mass.cache.set(
- cache_key,
- [x.to_dict() for x in items],
- category=cache_category,
- base_key=cache_base_key,
- )
+ return await prov.get_artist_toptracks(item_id)
+ # fallback implementation using the library db
+ if db_artist := await self.mass.music.artists.get_library_item_by_prov_id(
+ item_id,
+ provider_instance_id_or_domain,
+ ):
+ artist_id = db_artist.item_id
+ subquery = (
+ f"SELECT track_id FROM {DB_TABLE_TRACK_ARTISTS} WHERE artist_id = {artist_id}"
+ )
+ query = f"tracks.item_id in ({subquery})"
+ return await self.mass.music.tracks._get_library_items_by_query(
+ extra_query_parts=[query], provider=provider_instance_id_or_domain
)
- return items
+ return []
async def get_library_artist_tracks(
self,
provider_instance_id_or_domain: str,
) -> list[Album]:
"""Return albums for an artist on given provider."""
- items = []
assert provider_instance_id_or_domain != "library"
- prov = self.mass.get_provider(provider_instance_id_or_domain)
+ if not (prov := self.mass.get_provider(provider_instance_id_or_domain)):
+ return []
+ prov = cast("MusicProvider", prov)
if prov is None:
return []
- # prefer cache items (if any)
- cache_category = CACHE_CATEGORY_MUSIC_ARTIST_ALBUMS
- cache_base_key = prov.lookup_key
- cache_key = item_id
- if (
- prov.is_streaming_provider
- and (
- cache := await self.mass.cache.get(
- cache_key, category=cache_category, base_key=cache_base_key
- )
- )
- is not None
- ):
- return [Album.from_dict(x) for x in cache]
- # no items in cache - get listing from provider
if ProviderFeature.ARTIST_ALBUMS in prov.supported_features:
- items = await prov.get_artist_albums(item_id)
- else:
- # fallback implementation using the db
- # ruff: noqa: PLR5501
- if db_artist := await self.mass.music.artists.get_library_item_by_prov_id(
- item_id,
- provider_instance_id_or_domain,
- ):
- artist_id = db_artist.item_id
- subquery = (
- f"SELECT album_id FROM {DB_TABLE_ALBUM_ARTISTS} WHERE artist_id = {artist_id}"
- )
- query = f"albums.item_id in ({subquery})"
- return await self.mass.music.albums._get_library_items_by_query(
- extra_query_parts=[query], provider=provider_instance_id_or_domain
- )
-
- # store (serializable items) in cache
- if prov.is_streaming_provider:
- self.mass.create_task(
- self.mass.cache.set(
- cache_key,
- [x.to_dict() for x in items],
- category=cache_category,
- base_key=cache_base_key,
- )
+ return await prov.get_artist_albums(item_id)
+ # fallback implementation using the db
+ if db_artist := await self.mass.music.artists.get_library_item_by_prov_id(
+ item_id,
+ provider_instance_id_or_domain,
+ ):
+ artist_id = db_artist.item_id
+ subquery = (
+ f"SELECT album_id FROM {DB_TABLE_ALBUM_ARTISTS} WHERE artist_id = {artist_id}"
+ )
+ query = f"albums.item_id in ({subquery})"
+ return await self.mass.music.albums._get_library_items_by_query(
+ extra_query_parts=[query], provider=provider_instance_id_or_domain
)
- return items
+ return []
async def get_library_artist_albums(
self,
from abc import ABCMeta, abstractmethod
from collections.abc import Iterable
from contextlib import suppress
-from typing import TYPE_CHECKING, Any, TypeVar
+from typing import TYPE_CHECKING, Any, TypeVar, cast
from music_assistant_models.enums import EventType, ExternalID, MediaType, ProviderFeature
from music_assistant_models.errors import MediaNotFoundError, ProviderUnavailableError
-from music_assistant_models.media_items import (
- Album,
- ItemMapping,
- MediaItemType,
- ProviderMapping,
- SearchResults,
- Track,
-)
+from music_assistant_models.media_items import ItemMapping, MediaItemType, ProviderMapping, Track
-from music_assistant.constants import (
- CACHE_CATEGORY_MUSIC_PROVIDER_ITEM,
- CACHE_CATEGORY_MUSIC_SEARCH,
- DB_TABLE_PLAYLOG,
- DB_TABLE_PROVIDER_MAPPINGS,
- MASS_LOGGER_NAME,
-)
+from music_assistant.constants import DB_TABLE_PLAYLOG, DB_TABLE_PROVIDER_MAPPINGS, MASS_LOGGER_NAME
from music_assistant.helpers.compare import compare_media_item, create_safe_string
from music_assistant.helpers.json import json_loads, serialize_to_json
+from music_assistant.helpers.util import guard_single_request
if TYPE_CHECKING:
from collections.abc import AsyncGenerator, Mapping
from music_assistant import MusicAssistant
+ from music_assistant.models import MusicProvider
ItemCls = TypeVar("ItemCls", bound="MediaItemType")
search_query = search_query.replace("/", " ").replace("'", "")
if provider_instance_id_or_domain == "library":
return await self.library_items(search=search_query, limit=limit)
- prov = self.mass.get_provider(provider_instance_id_or_domain)
- if prov is None:
+ if not (prov := self.mass.get_provider(provider_instance_id_or_domain)):
return []
+ prov = cast("MusicProvider", prov)
if ProviderFeature.SEARCH not in prov.supported_features:
return []
if not prov.library_supported(self.media_type):
# assume library supported also means that this mediatype is supported
return []
-
- # prefer cache items (if any)
- cache_category = CACHE_CATEGORY_MUSIC_SEARCH
- cache_base_key = prov.lookup_key
- cache_key = f"{search_query}.{limit}.{self.media_type.value}"
- if (
- cache := await self.mass.cache.get(
- cache_key, category=cache_category, base_key=cache_base_key
- )
- ) is not None:
- searchresult = SearchResults.from_dict(cache)
- else:
- # no items in cache - get listing from provider
- searchresult = await prov.search(
- search_query,
- [self.media_type],
- limit,
- )
- if self.media_type == MediaType.ARTIST:
- items = searchresult.artists
- elif self.media_type == MediaType.ALBUM:
- items = searchresult.albums
- elif self.media_type == MediaType.TRACK:
- items = searchresult.tracks
- elif self.media_type == MediaType.PLAYLIST:
- items = searchresult.playlists
- else:
- items = searchresult.radio
- # store (serializable items) in cache
- if prov.is_streaming_provider: # do not cache filesystem results
- self.mass.create_task(
- self.mass.cache.set(
- cache_key,
- searchresult.to_dict(),
- expiration=86400 * 7,
- category=cache_category,
- base_key=cache_base_key,
- ),
- )
- return items
+ searchresult = await prov.search(
+ search_query,
+ [self.media_type],
+ limit,
+ )
+ match self.media_type:
+ case MediaType.ARTIST:
+ return searchresult.artists
+ case MediaType.ALBUM:
+ return searchresult.albums
+ case MediaType.TRACK:
+ return searchresult.tracks
+ case MediaType.PLAYLIST:
+ return searchresult.playlists
+ case MediaType.AUDIOBOOK:
+ return searchresult.audiobooks
+ case MediaType.PODCAST:
+ return searchresult.podcasts
+ case MediaType.RADIO:
+ return searchresult.radio
+ case _:
+ return []
async def get_provider_mapping(self, item: ItemCls) -> tuple[str, str]:
"""Return (first) provider and item id."""
library_item = await self.get_library_item(db_id)
self.mass.signal_event(EventType.MEDIA_ITEM_UPDATED, library_item.uri, library_item)
+ @guard_single_request
async def get_provider_item(
self,
item_id: str,
return await self.get_library_item(item_id)
if not (provider := self.mass.get_provider(provider_instance_id_or_domain)):
raise ProviderUnavailableError(f"{provider_instance_id_or_domain} is not available")
-
- cache_category = CACHE_CATEGORY_MUSIC_PROVIDER_ITEM
- cache_base_key = provider.lookup_key
- cache_key = f"{self.media_type.value}.{item_id}"
- if not force_refresh and (
- cache := await self.mass.cache.get(
- cache_key, category=cache_category, base_key=cache_base_key
- )
- ):
- return self.item_cls.from_dict(cache)
if provider := self.mass.get_provider(provider_instance_id_or_domain):
+ provider = cast("MusicProvider", provider)
with suppress(MediaNotFoundError):
- if item := await provider.get_item(self.media_type, item_id):
- await self.mass.cache.set(
- cache_key,
- item.to_dict(),
- category=cache_category,
- base_key=cache_base_key,
- )
- return item
+ async with self.mass.cache.handle_refresh(force_refresh):
+ return await provider.get_item(self.media_type, item_id)
# if we reach this point all possibilities failed and the item could not be found.
# There is a possibility that the (streaming) provider changed the id of the item
# so we return the previous details (if we have any) marked as unavailable, so
fallback = fallback or await self.get_library_item_by_prov_id(
item_id, provider_instance_id_or_domain
)
- if fallback and not (isinstance(fallback, ItemMapping) and self.item_cls in (Track, Album)):
+ if (
+ fallback
+ and isinstance(fallback, ItemMapping)
+ and (fallback_provider := self.mass.get_provider(fallback.provider))
+ ):
+ # fallback is a ItemMapping, try to convert to full item
+ with suppress(LookupError, TypeError, ValueError):
+ return self.item_cls.from_dict(
+ {
+ **fallback.to_dict(),
+ "provider_mappings": [
+ {
+ "item_id": fallback.item_id,
+ "provider_domain": fallback_provider.domain,
+ "provider_instance": fallback_provider.instance_id,
+ "available": fallback.available,
+ }
+ ],
+ }
+ )
+ if fallback:
# simply return the fallback item
- # NOTE: we only accept ItemMapping as fallback for flat items
- # so not for tracks and albums (which rely on other objects)
return fallback
# all options exhausted, we really can not find this item
msg = (
join_parts.append(
f"JOIN provider_mappings ON provider_mappings.item_id = {self.db_table}.item_id "
f"AND provider_mappings.media_type = '{self.media_type.value}' "
+ "AND provider_mappings.in_library = 1 "
f"AND (provider_mappings.provider_instance = '{provider}' "
f"OR provider_mappings.provider_domain = '{provider}')"
)
from __future__ import annotations
-import time
from collections.abc import AsyncGenerator
-from typing import Any
+from typing import cast
from music_assistant_models.enums import MediaType, ProviderFeature
from music_assistant_models.errors import (
)
from music_assistant_models.media_items import Playlist, Track
-from music_assistant.constants import (
- CACHE_CATEGORY_MUSIC_PLAYLIST_TRACKS,
- CACHE_CATEGORY_MUSIC_PROVIDER_ITEM,
- DB_TABLE_PLAYLISTS,
-)
+from music_assistant.constants import DB_TABLE_PLAYLISTS
from music_assistant.helpers.compare import create_safe_string
from music_assistant.helpers.json import serialize_to_json
from music_assistant.helpers.uri import create_uri, parse_uri
+from music_assistant.helpers.util import guard_single_request
from music_assistant.models.music_provider import MusicProvider
from .base import MediaControllerBase
force_refresh: bool = False,
) -> AsyncGenerator[Track, None]:
"""Return playlist tracks for the given provider playlist id."""
- playlist = await self.get(
- item_id,
- provider_instance_id_or_domain,
- )
- # a playlist can only have one provider so simply pick the first one
- prov_map = next(x for x in playlist.provider_mappings)
- cache_checksum = playlist.cache_checksum
+ if provider_instance_id_or_domain == "library":
+ library_item = await self.get_library_item(item_id)
+ # a playlist can only have one provider so simply pick the first one
+ prov_map = next(x for x in library_item.provider_mappings)
+ item_id = prov_map.item_id
+ provider_instance_id_or_domain = prov_map.provider_instance
# playlist tracks are not stored in the db,
# we always fetched them (cached) from the provider
page = 0
while True:
tracks = await self._get_provider_playlist_tracks(
- prov_map.item_id,
- prov_map.provider_instance,
- cache_checksum=cache_checksum,
+ item_id,
+ provider_instance_id_or_domain,
page=page,
force_refresh=force_refresh,
)
# actually add the tracks to the playlist on the provider
await playlist_prov.add_playlist_tracks(playlist_prov_map.item_id, list(ids_to_add))
# invalidate cache so tracks get refreshed
- playlist.cache_checksum = str(time.time())
+ self._refresh_playlist_tracks(playlist)
await self.update_item_in_library(db_playlist_id, playlist)
async def add_playlist_track(self, db_playlist_id: str | int, track_uri: str) -> None:
)
continue
await provider.remove_playlist_tracks(prov_mapping.item_id, positions_to_remove)
- # invalidate cache so tracks get refreshed
- playlist.cache_checksum = str(time.time())
+
await self.update_item_in_library(db_playlist_id, playlist)
async def _add_library_item(self, item: Playlist) -> int:
"favorite": item.favorite,
"metadata": serialize_to_json(item.metadata),
"external_ids": serialize_to_json(item.external_ids),
- "cache_checksum": item.cache_checksum,
"search_name": create_safe_string(item.name, True, True),
"search_sort_name": create_safe_string(item.sort_name, True, True),
},
"external_ids": serialize_to_json(
update.external_ids if overwrite else cur_item.external_ids
),
- "cache_checksum": update.cache_checksum or cur_item.cache_checksum,
"search_name": create_safe_string(name, True, True),
"search_sort_name": create_safe_string(sort_name, True, True),
},
await self.set_provider_mappings(db_id, provider_mappings, overwrite)
self.logger.debug("updated %s in database: (id %s)", update.name, db_id)
+ @guard_single_request
async def _get_provider_playlist_tracks(
self,
item_id: str,
provider_instance_id_or_domain: str,
- cache_checksum: Any = None,
page: int = 0,
force_refresh: bool = False,
) -> list[Track]:
"""Return playlist tracks for the given provider playlist id."""
assert provider_instance_id_or_domain != "library"
- provider: MusicProvider = self.mass.get_provider(provider_instance_id_or_domain)
- if not provider:
+ if not (provider := self.mass.get_provider(provider_instance_id_or_domain)):
return []
- # prefer cache items (if any)
- cache_category = CACHE_CATEGORY_MUSIC_PLAYLIST_TRACKS
- cache_base_key = provider.lookup_key
- cache_key = f"{item_id}.{page}"
- if (
- not force_refresh
- and (
- cache := await self.mass.cache.get(
- cache_key,
- checksum=cache_checksum,
- category=cache_category,
- base_key=cache_base_key,
- )
- )
- is not None
- ):
- return [Track.from_dict(x) for x in cache]
- # no items in cache (or force_refresh) - get listing from provider
- items = await provider.get_playlist_tracks(item_id, page=page)
- # store (serializable items) in cache
- self.mass.create_task(
- self.mass.cache.set(
- cache_key,
- [x.to_dict() for x in items],
- checksum=cache_checksum,
- category=cache_category,
- base_key=cache_base_key,
- )
- )
- for item in items:
- # if this is a complete track object, pre-cache it as
- # that will save us an (expensive) lookup later
- if item.image and item.artist_str and item.album and provider.domain != "builtin":
- await self.mass.cache.set(
- f"track.{item_id}",
- item.to_dict(),
- category=CACHE_CATEGORY_MUSIC_PROVIDER_ITEM,
- base_key=provider.lookup_key,
- )
- return items
+ provider = cast("MusicProvider", provider)
+ async with self.mass.cache.handle_refresh(force_refresh):
+ return await provider.get_playlist_tracks(item_id, page=page)
async def radio_mode_base_tracks(
self,
This is used to link objects of different providers/qualities together.
"""
raise NotImplementedError
+
+ def _refresh_playlist_tracks(self, playlist: Playlist) -> None:
+ """Refresh playlist tracks by forcing a cache refresh."""
+
+ async def _refresh(self, playlist: Playlist):
+ # simply iterate all tracks with force_refresh=True to refresh the cache
+ async for _ in self.tracks(playlist.item_id, playlist.provider, force_refresh=True):
+ pass
+
+ task_id = f"refresh_playlist_tracks_{playlist.item_id}"
+ self.mass.call_later(5, _refresh, playlist, task_id=task_id) # debounce multiple calls
self._lookup_jobs: MetadataLookupQueue = MetadataLookupQueue(100)
self._lookup_task: asyncio.Task[None] | None = None
self._throttler = Throttler(1, 30)
- self._missing_metadata_scan_task: asyncio.Task[None] | None = None
async def get_config_entries(
self,
self.mass.streams.register_dynamic_route("/imageproxy", self.handle_imageproxy)
# the lookup task is used to process metadata lookup jobs
self._lookup_task = self.mass.create_task(self._process_metadata_lookup_jobs())
- # just tun the scan for missing metadata once at startup
- # TODO: allows to enable/disable this in the UI and configure interval/time
- self._missing_metadata_scan_task = self.mass.create_task(self._scan_missing_metadata())
+ # just run the scan for missing metadata once at startup
+ # background scan for missing metadata
+ self.mass.call_later(300, self._scan_missing_metadata)
# migrate old image path for collage images from absolute to relative
# TODO: remove this after 2.5+ release
old_path = f"{self.mass.storage_path}/collage_images/"
"""Handle logic on server stop."""
if self._lookup_task and not self._lookup_task.done():
self._lookup_task.cancel()
- if self._missing_metadata_scan_task and not self._missing_metadata_scan_task.done():
- self._missing_metadata_scan_task.cancel()
self.mass.streams.unregister_dynamic_route("/imageproxy")
@property
self, item: str | MediaItemType, force_refresh: bool = False
) -> MediaItemType:
"""Get/update extra/enhanced metadata for/on given MediaItem."""
- if isinstance(item, str):
- retrieved_item = await self.mass.music.get_item_by_uri(item)
- if isinstance(retrieved_item, BrowseFolder):
- raise TypeError("Cannot update metadata on a BrowseFolder item.")
- item = retrieved_item
-
- if item.provider != "library":
- # this shouldn't happen but just in case.
- raise RuntimeError("Metadata can only be updated for library items")
-
- # just in case it was in the queue, prevent duplicate lookups
- if item.uri:
- self._lookup_jobs.pop(item.uri)
- async with self._throttler:
- if item.media_type == MediaType.ARTIST:
- await self._update_artist_metadata(
- cast("Artist", item), force_refresh=force_refresh
- )
- if item.media_type == MediaType.ALBUM:
- await self._update_album_metadata(cast("Album", item), force_refresh=force_refresh)
- if item.media_type == MediaType.TRACK:
- await self._update_track_metadata(cast("Track", item), force_refresh=force_refresh)
- if item.media_type == MediaType.PLAYLIST:
- await self._update_playlist_metadata(
- cast("Playlist", item), force_refresh=force_refresh
- )
- return item
+ async with self.cache.handle_refresh(force_refresh):
+ if isinstance(item, str):
+ retrieved_item = await self.mass.music.get_item_by_uri(item)
+ if isinstance(retrieved_item, BrowseFolder):
+ raise TypeError("Cannot update metadata on a BrowseFolder item.")
+ item = retrieved_item
+
+ if item.provider != "library":
+ # this shouldn't happen but just in case.
+ raise RuntimeError("Metadata can only be updated for library items")
+
+ # just in case it was in the queue, prevent duplicate lookups
+ if item.uri:
+ self._lookup_jobs.pop(item.uri)
+ async with self._throttler:
+ if item.media_type == MediaType.ARTIST:
+ await self._update_artist_metadata(
+ cast("Artist", item), force_refresh=force_refresh
+ )
+ if item.media_type == MediaType.ALBUM:
+ await self._update_album_metadata(
+ cast("Album", item), force_refresh=force_refresh
+ )
+ if item.media_type == MediaType.TRACK:
+ await self._update_track_metadata(
+ cast("Track", item), force_refresh=force_refresh
+ )
+ if item.media_type == MediaType.PLAYLIST:
+ await self._update_playlist_metadata(
+ cast("Playlist", item), force_refresh=force_refresh
+ )
+ return item
def schedule_update_metadata(self, uri: str) -> None:
"""Schedule metadata update for given MediaItem uri."""
await asyncio.sleep(60)
while True:
item_uri = await self._lookup_jobs.get()
+ self.logger.debug(f"Processing metadata lookup for {item_uri}")
try:
item = await self.mass.music.get_item_by_uri(item_uri)
# Type check to ensure it's a valid MediaItemType
f"OR json_extract({DB_TABLE_ARTISTS}.metadata,'$.images') = '[]')"
)
for artist in await self.mass.music.artists.library_items(
- limit=25, order_by="random", extra_query=query
+ limit=5, order_by="random", extra_query=query
):
if artist.uri:
self.schedule_update_metadata(artist.uri)
+ await asyncio.sleep(30)
# Scan for missing album images
self.logger.debug("Start lookup for missing album images...")
):
if album.uri:
self.schedule_update_metadata(album.uri)
+ await asyncio.sleep(30)
# Force refresh playlist metadata every refresh interval
# this will e.g. update the playlist image and genres if the tracks have changed
f"OR json_extract({DB_TABLE_PLAYLISTS}.metadata,'$.last_refresh') < {timestamp}"
)
for playlist in await self.mass.music.playlists.library_items(
- limit=10, order_by="random", extra_query=query
+ limit=5, order_by="random", extra_query=query
):
if playlist.uri:
self.schedule_update_metadata(playlist.uri)
+ await asyncio.sleep(30)
+
+ # reschedule next scan
+ self.mass.call_later(PERIODIC_SCAN_INTERVAL, self._scan_missing_metadata)
class MetadataLookupQueue(asyncio.Queue[str]):
from music_assistant_models.unique_list import UniqueList
from music_assistant.constants import (
- CACHE_CATEGORY_MUSIC_SEARCH,
CONF_ENTRY_LIBRARY_EXPORT_ADD,
CONF_ENTRY_LIBRARY_EXPORT_REMOVE,
DB_TABLE_ALBUM_ARTISTS,
DEFAULT_SYNC_INTERVAL = 12 * 60 # default sync interval in minutes
CONF_SYNC_INTERVAL = "sync_interval"
CONF_DELETED_PROVIDERS = "deleted_providers"
-DB_SCHEMA_VERSION: Final[int] = 19
+DB_SCHEMA_VERSION: Final[int] = 20
+
+CACHE_CATEGORY_LAST_SYNC: Final[int] = 9
class MusicController(CoreController):
# create safe search string
search_query = search_query.replace("/", " ").replace("'", "")
-
- # prefer cache items (if any)
- media_types_str = ",".join(media_types)
- cache_category = CACHE_CATEGORY_MUSIC_SEARCH
- cache_base_key = prov.lookup_key
- cache_key = f"{search_query}.{limit}.{media_types_str}"
-
- if prov.is_streaming_provider and (
- cache := await self.mass.cache.get(
- cache_key, category=cache_category, base_key=cache_base_key
- )
- ):
- return SearchResults.from_dict(cache)
- # no items in cache - get listing from provider
- result = await prov.search(
+ return await prov.search(
search_query,
media_types,
limit,
)
- # store (serializable items) in cache
- if prov.is_streaming_provider:
- self.mass.create_task(
- self.mass.cache.set(
- cache_key,
- result.to_dict(),
- expiration=86400 * 7,
- category=cache_category,
- base_key=cache_base_key,
- )
- )
- return result
async def search_library(
self,
else:
self.logger.info("Sync task for %s/%ss completed", provider.name, media_type.value)
self.mass.signal_event(EventType.SYNC_TASKS_UPDATED, data=self.in_progress_syncs)
- cache_key = f"last_library_sync_{provider.instance_id}_{media_type.value}"
- self.mass.create_task(self.mass.cache.set, cache_key, self.mass.loop.time())
+ self.mass.create_task(
+ self.mass.cache.set(
+ key=media_type.value,
+ data=self.mass.loop.time(),
+ provider=provider.instance_id,
+ category=CACHE_CATEGORY_LAST_SYNC,
+ )
+ )
# schedule db cleanup after sync
if not self.in_progress_syncs:
self.mass.create_task(self._cleanup_database())
if is_initial:
# schedule the first sync run
- cache_key = f"last_library_sync_{provider.instance_id}_{media_type.value}"
initial_interval = 10
- if last_sync := await self.mass.cache.get(cache_key):
+ if last_sync := await self.mass.cache.get(
+ key=media_type.value,
+ provider=provider.instance_id,
+ category=CACHE_CATEGORY_LAST_SYNC,
+ ):
initial_interval += max(0, sync_interval - (self.mass.loop.time() - last_sync))
sync_interval = initial_interval
async def __migrate_database(self, prev_version: int) -> None:
"""Perform a database migration."""
- # ruff: noqa: PLR0915
self.logger.info(
"Migrating database from version %s to %s", prev_version, DB_SCHEMA_VERSION
)
- if prev_version < 7:
+ if prev_version < 15:
raise MusicAssistantError("Database schema version too old to migrate")
- if prev_version <= 7:
- # remove redundant artists and provider_mappings columns
- for table in (
- DB_TABLE_TRACKS,
- DB_TABLE_ALBUMS,
- DB_TABLE_ARTISTS,
- DB_TABLE_RADIOS,
- DB_TABLE_PLAYLISTS,
- ):
- for column in ("artists", "provider_mappings"):
- try:
- await self.database.execute(f"ALTER TABLE {table} DROP COLUMN {column}")
- except Exception as err:
- if "no such column" in str(err):
- continue
- raise
- # add cache_checksum column to playlists
- try:
- await self.database.execute(
- f"ALTER TABLE {DB_TABLE_PLAYLISTS} ADD COLUMN cache_checksum TEXT DEFAULT ''"
- )
- except Exception as err:
- if "duplicate column" not in str(err):
- raise
-
- if prev_version <= 8:
- # migrate track_loudness --> loudness_measurements
- async for db_row in self.database.iter_items("track_loudness"):
- if db_row["integrated"] == inf or db_row["integrated"] == -inf:
- continue
- if db_row["provider"] in ("radiobrowser", "tunein"):
- continue
- await self.database.insert_or_replace(
- DB_TABLE_LOUDNESS_MEASUREMENTS,
- {
- "item_id": db_row["item_id"],
- "media_type": "track",
- "provider": db_row["provider"],
- "loudness": db_row["integrated"],
- },
- )
- await self.database.execute("DROP TABLE IF EXISTS track_loudness")
-
- if prev_version <= 10:
- # Recreate playlog table due to complete new layout
- await self.database.execute(f"DROP TABLE IF EXISTS {DB_TABLE_PLAYLOG}")
- await self.__create_database_tables()
-
- if prev_version <= 12:
- # Need to drop the NOT NULL requirement on podcasts.publisher and audiobooks.publisher
- # However, because there is no ALTER COLUMN support in sqlite, we will need
- # to create the tables again.
- await self.database.execute(f"DROP TABLE IF EXISTS {DB_TABLE_AUDIOBOOKS}")
- await self.database.execute(f"DROP TABLE IF EXISTS {DB_TABLE_PODCASTS}")
- await self.__create_database_tables()
-
- if prev_version <= 13:
- # migrate chapters in metadata
- # this is leftover mess from the old chapters implementation
- for db_row in await self.database.search(DB_TABLE_TRACKS, "position_start", "metadata"):
- metadata = json_loads(db_row["metadata"])
- metadata["chapters"] = None
- await self.database.update(
- DB_TABLE_TRACKS,
- {"item_id": db_row["item_id"]},
- {"metadata": serialize_to_json(metadata)},
- )
-
- if prev_version <= 14:
- # Recreate playlog table due to complete new layout
- await self.database.execute(f"DROP TABLE IF EXISTS {DB_TABLE_PLAYLOG}")
- await self.__create_database_tables()
-
if prev_version <= 15:
# add search_name and search_sort_name columns to all tables
# and populate them with the name and sort_name values
"WHERE provider_domain in ('filesystem_local', 'filesystem_smb');"
)
+ if prev_version <= 20:
+ # drop column cache_checksum from playlists table
+ # this is no longer used and is a leftover from previous designs
+ await self.database.execute(
+ f"ALTER TABLE {DB_TABLE_PLAYLISTS} DROP COLUMN cache_checksum"
+ )
+
# save changes
await self.database.commit()
[sort_name] TEXT NOT NULL,
[owner] TEXT NOT NULL,
[is_editable] BOOLEAN NOT NULL,
- [cache_checksum] TEXT DEFAULT '',
[favorite] BOOLEAN NOT NULL DEFAULT 0,
[metadata] json NOT NULL,
[external_ids] json NOT NULL,
from music_assistant.constants import (
ATTR_ANNOUNCEMENT_IN_PROGRESS,
- CACHE_CATEGORY_PLAYER_QUEUE_STATE,
CONF_CROSSFADE,
CONF_FLOW_MODE,
MASS_LOGO_ONLINE,
CONF_DEFAULT_ENQUEUE_OPTION_FOLDER = "default_enqueue_option_folder"
CONF_DEFAULT_ENQUEUE_OPTION_UNKNOWN = "default_enqueue_option_unknown"
RADIO_TRACK_MAX_DURATION_SECS = 20 * 60 # 20 minutes
+CACHE_CATEGORY_PLAYER_QUEUE_STATE = 0
+CACHE_CATEGORY_PLAYER_QUEUE_ITEMS = 1
class CompareState(TypedDict):
queue = None
# try to restore previous state
if prev_state := await self.mass.cache.get(
- "state", category=CACHE_CATEGORY_PLAYER_QUEUE_STATE, base_key=queue_id
+ key=queue_id, provider=self.domain, category=CACHE_CATEGORY_PLAYER_QUEUE_STATE
):
try:
queue = PlayerQueue.from_cache(prev_state)
prev_items = await self.mass.cache.get(
- "items",
+ key=queue_id,
+ provider=self.domain,
+ category=CACHE_CATEGORY_PLAYER_QUEUE_ITEMS,
default=[],
- category=CACHE_CATEGORY_PLAYER_QUEUE_STATE,
- base_key=queue_id,
)
queue_items = [QueueItem.from_cache(x) for x in prev_items]
except Exception as err:
"""Call when a player is removed from the registry."""
if permanent:
# if the player is permanently removed, we also remove the cached queue data
- self.mass.create_task(self.mass.cache.delete(f"queue.state.{player_id}"))
- self.mass.create_task(self.mass.cache.delete(f"queue.items.{player_id}"))
+ self.mass.create_task(
+ self.mass.cache.delete(
+ key=player_id, provider=self.domain, category=CACHE_CATEGORY_PLAYER_QUEUE_STATE
+ )
+ )
+ self.mass.create_task(
+ self.mass.cache.delete(
+ key=player_id,
+ provider=self.domain,
+ category=CACHE_CATEGORY_PLAYER_QUEUE_ITEMS,
+ )
+ )
self._queues.pop(player_id, None)
self._queue_items.pop(player_id, None)
# save items in cache
self.mass.create_task(
self.mass.cache.set(
- "items",
- [x.to_cache() for x in self._queue_items[queue_id]],
- category=CACHE_CATEGORY_PLAYER_QUEUE_STATE,
- base_key=queue_id,
+ key=queue_id,
+ data=[x.to_cache() for x in self._queue_items[queue_id]],
+ provider=self.domain,
+ category=CACHE_CATEGORY_PLAYER_QUEUE_ITEMS,
)
)
# always send the base event
# save state
self.mass.create_task(
self.mass.cache.set(
- "state",
- queue.to_cache(),
+ key=queue_id,
+ data=queue.to_cache(),
+ provider=self.domain,
category=CACHE_CATEGORY_PLAYER_QUEUE_STATE,
- base_key=queue_id,
)
)
ATTR_GROUP_MEMBERS,
ATTR_LAST_POLL,
ATTR_PREVIOUS_VOLUME,
- CACHE_CATEGORY_PLAYERS,
- CACHE_KEY_PLAYER_POWER,
CONF_AUTO_PLAY,
CONF_ENTRY_ANNOUNCE_VOLUME,
CONF_ENTRY_ANNOUNCE_VOLUME_MAX,
from music_assistant_models.config_entries import CoreConfig, PlayerConfig
from music_assistant_models.player_queue import PlayerQueue
+CACHE_CATEGORY_PLAYER_POWER = 1
+
_PlayerControllerT = TypeVar("_PlayerControllerT", bound="PlayerController")
_R = TypeVar("_R")
# and store the state in the cache
player.extra_data[ATTR_FAKE_POWER] = powered
await self.mass.cache.set(
- player_id, powered, category=CACHE_CATEGORY_PLAYERS, base_key=CACHE_KEY_PLAYER_POWER
+ key=player_id,
+ data=powered,
+ provider=self.domain,
+ category=CACHE_CATEGORY_PLAYER_POWER,
)
else:
# handle external player control
# restore 'fake' power state from cache if available
cached_value = await self.mass.cache.get(
- player.player_id,
+ key=player.player_id,
+ provider=self.domain,
+ category=CACHE_CATEGORY_PLAYER_POWER,
default=False,
- category=CACHE_CATEGORY_PLAYERS,
- base_key=CACHE_KEY_PLAYER_POWER,
)
if cached_value is not None:
player.extra_data[ATTR_FAKE_POWER] = cached_value
SLOW_PROVIDERS = ("tidal", "ytmusic", "apple_music")
-CACHE_BASE_KEY: Final[str] = "audio_cache_path"
+CACHE_CATEGORY_AUDIO_CACHE: Final[int] = 99
+CACHE_CATEGORY_RESOLVED_RADIO_URL: Final[int] = 100
+CACHE_PROVIDER: Final[str] = "audio"
CACHE_FILES_IN_USE: set[str] = set()
"""Create the cache file (if needed)."""
if self._cache_file is None:
if cached_cache_path := await self.mass.cache.get(
- self.streamdetails.uri, base_key=CACHE_BASE_KEY
+ key=self.streamdetails.uri,
+ provider=CACHE_PROVIDER,
+ category=CACHE_CATEGORY_AUDIO_CACHE,
):
# we have a mapping stored for this uri, prefer that
self._cache_file = cached_cache_path
self.mass.streams.audio_cache_dir, cache_id
)
await self.mass.cache.set(
- self.streamdetails.uri, cache_file, base_key=CACHE_BASE_KEY
+ key=self.streamdetails.uri,
+ data=cache_file,
+ provider=CACHE_PROVIDER,
+ category=CACHE_CATEGORY_AUDIO_CACHE,
)
# mark file as in-use to prevent it being deleted
CACHE_FILES_IN_USE.add(self._cache_file)
- unfolded URL as string
- StreamType to determine ICY (radio) or HLS stream.
"""
- cache_base_key = "resolved_radio_info"
- if cache := await mass.cache.get(url, base_key=cache_base_key):
+ if cache := await mass.cache.get(
+ key=url, provider=CACHE_PROVIDER, category=CACHE_CATEGORY_RESOLVED_RADIO_URL
+ ):
return cast("tuple[str, StreamType]", cache)
stream_type = StreamType.HTTP
resolved_url = url
result = (resolved_url, stream_type)
cache_expiration = 3600 * 3
- await mass.cache.set(url, result, expiration=cache_expiration, base_key=cache_base_key)
+ await mass.cache.set(
+ url,
+ result,
+ expiration=cache_expiration,
+ provider=CACHE_PROVIDER,
+ category=CACHE_CATEGORY_RESOLVED_RADIO_URL,
+ )
return result
self, table: str, match: dict[str, Any] | None = None, query: str | None = None
) -> None:
"""Delete data in given table."""
- assert not (query and "where" in query.lower())
+ assert not (match and query), "Cannot use both match and query"
sql_query = f"DELETE FROM {table} "
if match:
sql_query += " WHERE " + " AND ".join(f"{x} = :{x}" for x in match)
async def bypass(self) -> AsyncGenerator[None, None]:
"""Bypass the throttler."""
try:
- BYPASS_THROTTLER.set(True)
+ token = BYPASS_THROTTLER.set(True)
yield None
finally:
- # TODO: token is unbound here
- # BYPASS_THROTTLER.reset(token)
- ...
+ BYPASS_THROTTLER.reset(token)
def throttle_with_retries[ProviderT: "Provider", **P, R](
from importlib.metadata import PackageNotFoundError
from importlib.metadata import version as pkg_version
from types import TracebackType
-from typing import TYPE_CHECKING, Any, ParamSpec, Self, TypeVar, cast
+from typing import TYPE_CHECKING, Any, Concatenate, ParamSpec, Self, TypeVar, cast
from urllib.parse import urlparse
import chardet
from music_assistant.mass import MusicAssistant
from music_assistant.models import ProviderModuleType
+ from music_assistant.models.core_controller import CoreController
+ from music_assistant.models.provider import Provider
from dataclasses import fields, is_dataclass
def __aiter__(self): # type: ignore[no-untyped-def]
"""Return the async iterator."""
return self._factory()
+
+
+def guard_single_request[ProviderT: "Provider | CoreController", **P, R](
+ func: Callable[Concatenate[ProviderT, P], Coroutine[Any, Any, R]],
+) -> Callable[Concatenate[ProviderT, P], Coroutine[Any, Any, R]]:
+ """Guard single request to a function."""
+
+ @functools.wraps(func)
+ async def wrapper(self: ProviderT, *args: P.args, **kwargs: P.kwargs) -> R:
+ mass = self.mass
+ # create a task_id dynamically based on the function and args/kwargs
+ cache_key_parts = [func.__class__.__name__, func.__name__, *args]
+ for key in sorted(kwargs.keys()):
+ cache_key_parts.append(f"{key}{kwargs[key]}")
+ task_id = ".".join(map(str, cache_key_parts))
+ task: asyncio.Task[R] = mass.create_task(
+ func, self, *args, **kwargs, task_id=task_id, abort_existing=False
+ )
+ return await task
+
+ return wrapper
def create_task(
self,
- target: Callable[[MassEvent], Coroutine[Any, Any, None]] | Awaitable[_R],
+ target: Callable[..., Coroutine[Any, Any, _R]] | Awaitable[_R],
*args: Any,
task_id: str | None = None,
abort_existing: bool = False,
import asyncio
from collections.abc import Sequence
-from typing import TYPE_CHECKING, cast
+from typing import TYPE_CHECKING, Final, cast
from music_assistant_models.enums import MediaType, ProviderFeature
from music_assistant_models.errors import (
)
from music_assistant.constants import (
- CACHE_CATEGORY_LIBRARY_ITEMS,
CONF_ENTRY_LIBRARY_IMPORT_ALBUM_TRACKS,
CONF_ENTRY_LIBRARY_IMPORT_PLAYLIST_TRACKS,
)
from music_assistant_models.streamdetails import StreamDetails
+CACHE_CATEGORY_PREV_LIBRARY_IDS: Final[int] = 1
+
class MusicProvider(Provider):
"""Base representation of a Music Provider (controller).
return await self.get_podcast_episode(prov_item_id)
return await self.get_track(prov_item_id)
- async def browse(self, path: str) -> Sequence[MediaItemType | ItemMapping | BrowseFolder]: # noqa: PLR0911, PLR0915
+ async def browse(self, path: str) -> Sequence[MediaItemType | ItemMapping | BrowseFolder]:
"""Browse this provider's items.
:param path: The path to browse, (e.g. provider_id://artists).
subpath = path.split("://", 1)[1]
# this reference implementation can be overridden with a provider specific approach
if subpath == "artists":
- library_item_ids = await self.mass.cache.get(
- "artist",
- category=CACHE_CATEGORY_LIBRARY_ITEMS,
- base_key=self.instance_id,
- )
- if not library_item_ids:
- return [x async for x in self.get_library_artists()]
- library_items = cast("list[int]", library_item_ids)
- query = "artists.item_id in :ids"
- query_params = {"ids": library_items}
return await self.mass.music.artists.library_items(
provider=self.instance_id,
- extra_query=query,
- extra_query_params=query_params,
)
if subpath == "albums":
- library_item_ids = await self.mass.cache.get(
- "album",
- category=CACHE_CATEGORY_LIBRARY_ITEMS,
- base_key=self.instance_id,
- )
- if not library_item_ids:
- return [x async for x in self.get_library_albums()]
- library_item_ids = cast("list[int]", library_item_ids)
- query = "albums.item_id in :ids"
- query_params = {"ids": library_item_ids}
return await self.mass.music.albums.library_items(
- extra_query=query, extra_query_params=query_params
+ provider=self.instance_id,
)
if subpath == "tracks":
- library_item_ids = await self.mass.cache.get(
- "track",
- category=CACHE_CATEGORY_LIBRARY_ITEMS,
- base_key=self.instance_id,
- )
- if not library_item_ids:
- return [x async for x in self.get_library_tracks()]
- library_item_ids = cast("list[int]", library_item_ids)
- query = "tracks.item_id in :ids"
- query_params = {"ids": library_item_ids}
return await self.mass.music.tracks.library_items(
- extra_query=query, extra_query_params=query_params
+ provider=self.instance_id,
)
if subpath == "radios":
- library_item_ids = await self.mass.cache.get(
- "radio",
- category=CACHE_CATEGORY_LIBRARY_ITEMS,
- base_key=self.instance_id,
- )
- if not library_item_ids:
- return [x async for x in self.get_library_radios()]
- library_item_ids = cast("list[int]", library_item_ids)
- query = "radios.item_id in :ids"
- query_params = {"ids": library_item_ids}
return await self.mass.music.radio.library_items(
- extra_query=query, extra_query_params=query_params
+ provider=self.instance_id,
)
if subpath == "playlists":
- library_item_ids = await self.mass.cache.get(
- "playlist",
- category=CACHE_CATEGORY_LIBRARY_ITEMS,
- base_key=self.instance_id,
- )
- if not library_item_ids:
- return [x async for x in self.get_library_playlists()]
- library_item_ids = cast("list[int]", library_item_ids)
- query = "playlists.item_id in :ids"
- query_params = {"ids": library_item_ids}
return await self.mass.music.playlists.library_items(
- extra_query=query, extra_query_params=query_params
+ provider=self.instance_id,
)
if subpath == "audiobooks":
- library_item_ids = await self.mass.cache.get(
- "audiobook",
- category=CACHE_CATEGORY_LIBRARY_ITEMS,
- base_key=self.instance_id,
- )
- if not library_item_ids:
- return [x async for x in self.get_library_audiobooks()]
- library_item_ids = cast("list[int]", library_item_ids)
- query = "audiobooks.item_id in :ids"
- query_params = {"ids": library_item_ids}
return await self.mass.music.audiobooks.library_items(
- extra_query=query, extra_query_params=query_params
+ provider=self.instance_id,
)
if subpath == "podcasts":
- library_item_ids = await self.mass.cache.get(
- "podcast",
- category=CACHE_CATEGORY_LIBRARY_ITEMS,
- base_key=self.instance_id,
- )
- if not library_item_ids:
- return [x async for x in self.get_library_podcasts()]
- library_item_ids = cast("list[int]", library_item_ids)
- query = "podcasts.item_id in :ids"
- query_params = {"ids": library_item_ids}
return await self.mass.music.podcasts.library_items(
- extra_query=query, extra_query_params=query_params
+ provider=self.instance_id,
)
if subpath:
# unknown path
raise UnsupportedFeaturedException(f"Unexpected media type to sync: {media_type}")
# process deletions (= no longer in library)
- cache_category = CACHE_CATEGORY_LIBRARY_ITEMS
- cache_base_key = self.instance_id
-
prev_library_items: list[int] | None
controller = self.mass.music.get_controller(media_type)
if prev_library_items := await self.mass.cache.get(
- media_type.value, category=cache_category, base_key=cache_base_key
+ key=media_type.value,
+ provider=self.instance_id,
+ category=CACHE_CATEGORY_PREV_LIBRARY_IDS,
):
for db_id in prev_library_items:
if db_id not in cur_db_ids:
await asyncio.sleep(0) # yield to eventloop
# store current list of id's in cache so we can track changes
await self.mass.cache.set(
- media_type.value,
- list(cur_db_ids),
- category=cache_category,
- base_key=cache_base_key,
+ key=media_type.value,
+ data=list(cur_db_ids),
+ provider=self.instance_id,
+ category=CACHE_CATEGORY_PREV_LIBRARY_IDS,
)
async def _sync_library_artists(self, import_as_favorite: bool) -> set[int]:
if import_as_favorite:
prov_item.favorite = True
library_item = await self.mass.music.playlists.add_item_to_library(prov_item)
- elif library_item.cache_checksum != prov_item.cache_checksum:
- # existing dbitem checksum changed (used to determine if a playlist has changed)
- library_item = await self.mass.music.playlists.update_item_in_library(
- library_item.item_id, prov_item
- )
elif not library_item.favorite and import_as_favorite:
# existing library item not favorite but should be
await self.mass.music.playlists.set_favorite(library_item.item_id, True)
"""Check if provider mapping(s) are consistent between library and provider items."""
for provider_mapping in provider_item.provider_mappings:
if provider_mapping.item_id != provider_item.item_id:
- raise MusicAssistantError("Inconsistent provider mapping item_id's found")
+ raise MusicAssistantError("Inconsistent provider mapping item_id found")
if provider_mapping.provider_instance != self.instance_id:
- raise MusicAssistantError("Inconsistent provider mapping instance_id's found")
+ raise MusicAssistantError("Inconsistent provider mapping instance_id found")
provider_mapping.in_library = in_library
library_mapping = next(
(
from __future__ import annotations
+from typing import Final
+
from music_assistant_models.enums import ContentType
from music_assistant_models.media_items import AudioFormat
DOMAIN = "airplay"
-CONF_ENCRYPTION = "encryption"
-CONF_ALAC_ENCODE = "alac_encode"
-CONF_VOLUME_START = "volume_start"
-CONF_PASSWORD = "password"
-CONF_READ_AHEAD_BUFFER = "read_ahead_buffer"
-CONF_IGNORE_VOLUME = "ignore_volume"
+CACHE_CATEGORY_PREV_VOLUME: Final[int] = 1
+
+CONF_ENCRYPTION: Final[str] = "encryption"
+CONF_ALAC_ENCODE: Final[str] = "alac_encode"
+CONF_VOLUME_START: Final[str] = "volume_start"
+CONF_PASSWORD: Final[str] = "password"
+CONF_READ_AHEAD_BUFFER: Final[str] = "read_ahead_buffer"
+CONF_IGNORE_VOLUME: Final[str] = "ignore_volume"
+CONF_CREDENTIALS: Final[str] = "credentials"
-BACKOFF_TIME_LOWER_LIMIT = 15 # seconds
-BACKOFF_TIME_UPPER_LIMIT = 300 # Five minutes
+BACKOFF_TIME_LOWER_LIMIT: Final[int] = 15 # seconds
+BACKOFF_TIME_UPPER_LIMIT: Final[int] = 300 # Five minutes
-CONF_CREDENTIALS = "credentials"
-CACHE_KEY_PREV_VOLUME = "airplay_prev_volume"
-FALLBACK_VOLUME = 20
+FALLBACK_VOLUME: Final[int] = 20
AIRPLAY_FLOW_PCM_FORMAT = AudioFormat(
content_type=DEFAULT_PCM_FORMAT.content_type,
from .constants import (
AIRPLAY_FLOW_PCM_FORMAT,
AIRPLAY_PCM_FORMAT,
- CACHE_KEY_PREV_VOLUME,
+ CACHE_CATEGORY_PREV_VOLUME,
CONF_ALAC_ENCODE,
CONF_ENCRYPTION,
CONF_IGNORE_VOLUME,
self._attr_volume_level = volume_level
self.update_state()
# store last state in cache
- await self.mass.cache.set(self.player_id, volume_level, base_key=CACHE_KEY_PREV_VOLUME)
+ await self.mass.cache.set(
+ key=self.player_id,
+ data=volume_level,
+ provider=self.provider.lookup_key,
+ category=CACHE_CATEGORY_PREV_VOLUME,
+ )
async def set_members(
self,
from music_assistant.helpers.util import get_ip_pton, select_free_port
from music_assistant.models.player_provider import PlayerProvider
-from .constants import CACHE_KEY_PREV_VOLUME, CONF_IGNORE_VOLUME, FALLBACK_VOLUME
+from .constants import CACHE_CATEGORY_PREV_VOLUME, CONF_IGNORE_VOLUME, FALLBACK_VOLUME
from .helpers import (
convert_airplay_volume,
get_cliraop_binary,
display_name += " (AirPlay)"
# Get volume from cache
- if not (volume := await self.mass.cache.get(player_id, base_key=CACHE_KEY_PREV_VOLUME)):
+ if not (
+ volume := await self.mass.cache.get(
+ key=player_id, provider=self.lookup_key, category=CACHE_CATEGORY_PREV_VOLUME
+ )
+ ):
volume = FALLBACK_VOLUME
player = AirPlayPlayer(
from pywidevine import PSSH, Cdm, Device, DeviceTypes
from pywidevine.license_protocol_pb2 import WidevinePsshData
+from music_assistant.controllers.cache import use_cache
from music_assistant.helpers.app_vars import app_var
from music_assistant.helpers.auth import AuthenticationHelper
from music_assistant.helpers.json import json_loads
CONF_MUSIC_APP_TOKEN = "music_app_token"
CONF_MUSIC_USER_TOKEN = "music_user_token"
CONF_MUSIC_USER_TOKEN_TIMESTAMP = "music_user_token_timestamp"
+CACHE_CATEGORY_DECRYPT_KEY = 1
async def setup(
) as _file:
self._decrypt_private_key = await _file.read()
+ @use_cache()
async def search(
self, search_query: str, media_types: list[MediaType] | None, limit: int = 5
) -> SearchResults:
elif item and item["id"]:
yield self._parse_playlist(item)
+ @use_cache()
async def get_artist(self, prov_artist_id) -> Artist:
"""Get full artist details by id."""
endpoint = f"catalog/{self._storefront}/artists/{prov_artist_id}"
response = await self._get_data(endpoint, extend="editorialNotes")
return self._parse_artist(response["data"][0])
+ @use_cache()
async def get_album(self, prov_album_id) -> Album:
"""Get full album details by id."""
endpoint = f"catalog/{self._storefront}/albums/{prov_album_id}"
response = await self._get_data(endpoint, include="artists")
return self._parse_album(response["data"][0])
+ @use_cache()
async def get_track(self, prov_track_id) -> Track:
"""Get full track details by id."""
endpoint = f"catalog/{self._storefront}/songs/{prov_track_id}"
response = await self._get_data(endpoint, include="artists,albums")
return self._parse_track(response["data"][0])
+ @use_cache()
async def get_playlist(self, prov_playlist_id) -> Playlist:
"""Get full playlist details by id."""
if self._is_catalog_id(prov_playlist_id):
response = await self._get_data(endpoint)
return self._parse_playlist(response["data"][0])
+ @use_cache()
async def get_album_tracks(self, prov_album_id) -> list[Track]:
"""Get all album tracks for given album id."""
endpoint = f"catalog/{self._storefront}/albums/{prov_album_id}/tracks"
tracks.append(track)
return tracks
+ @use_cache(3600 * 3) # cache for 3 hours
async def get_playlist_tracks(self, prov_playlist_id, page: int = 0) -> list[Track]:
"""Get all playlist tracks for given playlist id."""
if self._is_catalog_id(prov_playlist_id):
result.append(parsed_track)
return result
+ @use_cache(3600 * 24 * 7) # cache for 7 days
async def get_artist_albums(self, prov_artist_id) -> list[Album]:
"""Get a list of all albums for the given artist."""
endpoint = f"catalog/{self._storefront}/artists/{prov_artist_id}/albums"
return []
return [self._parse_album(album) for album in response if album["id"]]
+ @use_cache(3600 * 24 * 7) # cache for 7 days
async def get_artist_toptracks(self, prov_artist_id) -> list[Track]:
"""Get a list of 10 most popular tracks for the given artist."""
endpoint = f"catalog/{self._storefront}/artists/{prov_artist_id}/view/top-songs"
"""Remove track(s) from playlist."""
raise NotImplementedError("Not implemented!")
+ @use_cache(3600 * 24) # cache for 24 hours
async def get_similar_tracks(self, prov_track_id, limit=25) -> list[Track]:
"""Retrieve a dynamic list of tracks based on the provided item."""
# Note, Apple music does not have an official endpoint for similar tracks.
)
if description := attributes.get("description"):
playlist.metadata.description = description.get("standard")
- if checksum := attributes.get("lastModifiedDate"):
- playlist.cache_checksum = checksum
return playlist
async def _get_all_items(self, endpoint, key="data", **kwargs) -> list[dict]:
self, license_url: str, key_id: bytes, uri: str, item_id: str
) -> str:
"""Get the decryption key for a song."""
- cache_key = f"decryption_key.{item_id}"
- if decryption_key := await self.mass.cache.get(cache_key, base_key=self.instance_id):
+ if decryption_key := await self.mass.cache.get(
+ key=item_id, provider=self.instance_id, category=CACHE_CATEGORY_DECRYPT_KEY
+ ):
self.logger.debug("Decryption key for %s found in cache.", item_id)
return decryption_key
pssh = self._get_pssh(key_id)
decryption_key = key.key.hex()
self.mass.create_task(
self.mass.cache.set(
- cache_key, decryption_key, expiration=7200, base_key=self.instance_id
+ key=item_id,
+ data=decryption_key,
+ expiration=7200,
+ provider=self.instance_id,
+ category=CACHE_CATEGORY_DECRYPT_KEY,
)
)
return decryption_key
"""Search and lookup always search remote."""
return True
+ @use_cache(3600 * 24 * 7) # cache for 7 days
async def search(
self,
search_query: str,
return SearchResults(podcasts=podcasts, radio=radios)
+ @use_cache(3600 * 24 * 7) # cache for 7 days
async def get_radio(self, prov_radio_id: str) -> Radio:
"""Get full radio details by id."""
# Get full details of a single Radio station.
return []
+ @use_cache(3600 * 24 * 7) # cache for 7 days
async def get_podcast(self, prov_podcast_id: str) -> Podcast:
"""Get podcast."""
async with await self.get_client() as session:
progress,
)
+ @use_cache(3600 * 24) # cache for 24 hours
async def get_podcast_episode(self, prov_episode_id: str) -> PodcastEpisode:
"""Get single podcast episode."""
await self._update_progress()
allow_seek=seek,
)
- @use_cache(3600)
+ @use_cache(3600 * 24 * 7) # cache for 7 days
async def get_organizations(self, path: str) -> list[BrowseFolder]:
"""Create a list of all available organizations."""
async with await self.get_client() as session:
return organizations
- @use_cache(3600)
+ @use_cache(3600 * 24 * 7) # cache for 7 days
async def get_publication_services(self, path: str, core_id: str) -> list[BrowseFolder]:
"""Create a list of publications for a given organization."""
async with await self.get_client() as session:
return publications
- @use_cache(3600)
+ @use_cache(3600 * 24 * 7) # cache for 7 days
async def get_publications_list(self, core_id: str) -> list[Radio | Podcast]:
"""Create list of available radio stations and shows for a publication service."""
async with await self.get_client() as session:
cached_book = None
if asin:
cached_book = await self.mass.cache.get(
- key=asin, base_key=CACHE_DOMAIN, category=CACHE_CATEGORY_AUDIOBOOK, default=None
+ key=asin,
+ provider=self.provider_instance,
+ category=CACHE_CATEGORY_AUDIOBOOK,
+ default=None,
)
try:
"""Fetch the audiobook by asin."""
if use_cache:
cached_book = await self.mass.cache.get(
- key=asin, base_key=CACHE_DOMAIN, category=CACHE_CATEGORY_AUDIOBOOK, default=None
+ key=asin,
+ provider=self.provider_instance,
+ category=CACHE_CATEGORY_AUDIOBOOK,
+ default=None,
)
if cached_book is not None:
return await self._parse_audiobook(cached_book)
await self.mass.cache.set(
key=asin,
- base_key=CACHE_DOMAIN,
+ provider=self.provider_instance,
category=CACHE_CATEGORY_AUDIOBOOK,
data=item_data,
)
return []
chapters_data: list[Any] = await self.mass.cache.get(
- base_key=CACHE_DOMAIN, category=CACHE_CATEGORY_CHAPTERS, key=asin, default=[]
+ key=asin, provider=self.provider_instance, category=CACHE_CATEGORY_CHAPTERS, default=[]
)
if not chapters_data:
chapters_data = chapter_info.get("chapters") or []
await self.mass.cache.set(
- base_key=CACHE_DOMAIN,
- category=CACHE_CATEGORY_CHAPTERS,
key=asin,
data=chapters_data,
+ provider=self.provider_instance,
+ category=CACHE_CATEGORY_CHAPTERS,
)
except Exception as exc:
self.logger.error(f"Error fetching chapters for ASIN {asin}: {exc}")
cache_key_with_params = f"{path}:{params_hash}"
if use_cache:
response = await self.mass.cache.get(
- key=cache_key_with_params, base_key=CACHE_DOMAIN, category=CACHE_CATEGORY_API
+ key=cache_key_with_params,
+ provider=self.provider_instance,
+ category=CACHE_CATEGORY_API,
)
if not response:
response = await self.client.get(path, **kwargs)
await self.mass.cache.set(
- key=cache_key_with_params, base_key=CACHE_DOMAIN, data=response
+ key=cache_key_with_params, provider=self.provider_instance, data=response
)
return response
from music_assistant_models.media_items.media_item import RecommendationFolder
from music_assistant_models.streamdetails import StreamDetails
+from music_assistant.controllers.cache import use_cache
from music_assistant.helpers.audio import get_multi_file_stream
from music_assistant.models.music_provider import MusicProvider
from music_assistant.providers.audiobookshelf.parsers import (
self._client.server_settings.version,
)
- self.cache_base_key = self.instance_id
-
cached_libraries = await self.mass.cache.get(
key=CACHE_KEY_LIBRARIES,
- base_key=self.cache_base_key,
+ provider=self.instance_id,
category=CACHE_CATEGORY_LIBRARIES,
default=None,
)
return abs_podcast
+ @use_cache(3600)
@handle_refresh_token
async def get_podcast(self, prov_podcast_id: str) -> Podcast:
"""Get single podcast."""
return abs_audiobook
+ @use_cache(3600)
@handle_refresh_token
async def get_audiobook(self, prov_audiobook_id: str) -> Audiobook:
"""Get a single audiobook.
async def _cache_set_helper_libraries(self) -> None:
await self.mass.cache.set(
key=CACHE_KEY_LIBRARIES,
- base_key=self.cache_base_key,
+ provider=self.instance_id,
category=CACHE_CATEGORY_LIBRARIES,
data=self.libraries.to_dict(),
)
import os
import time
from collections.abc import AsyncGenerator
-from typing import TYPE_CHECKING, cast
+from typing import TYPE_CHECKING, Final, cast
import aiofiles
import shortuuid
)
from music_assistant_models.streamdetails import StreamDetails
-from music_assistant.constants import CACHE_CATEGORY_MEDIA_INFO, MASS_LOGO, VARIOUS_ARTISTS_FANART
+from music_assistant.constants import MASS_LOGO, VARIOUS_ARTISTS_FANART
from music_assistant.helpers.tags import AudioTags, async_parse_tags
from music_assistant.helpers.uri import parse_uri
from music_assistant.models.music_provider import MusicProvider
from music_assistant.mass import MusicAssistant
from music_assistant.models import ProviderInstanceType
+CACHE_CATEGORY_MEDIA_INFO: Final[int] = 1
SUPPORTED_FEATURES = {
ProviderFeature.BROWSE,
},
owner="Music Assistant",
is_editable=False,
- cache_checksum=str(int(time.time())),
metadata=MediaItemMetadata(
images=UniqueList([DEFAULT_THUMB])
if prov_playlist_id in COLLAGE_IMAGE_PLAYLISTS
owner="Music Assistant",
is_editable=True,
)
- playlist.cache_checksum = str(stored_item.get("last_updated"))
if image_url := stored_item.get("image_url"):
playlist.metadata.add_image(
MediaItemImage(
async def _get_media_info(self, url: str, force_refresh: bool = False) -> AudioTags:
"""Retrieve mediainfo for url."""
- cache_category = CACHE_CATEGORY_MEDIA_INFO
- cache_base_key = self.lookup_key
# do we have some cached info for this url ?
cached_info = await self.mass.cache.get(
- url, category=cache_category, base_key=cache_base_key
+ url, provider=self.instance_id, category=CACHE_CATEGORY_MEDIA_INFO
)
if cached_info and not force_refresh:
return AudioTags.parse(cached_info)
if "authSig" in url:
media_info.has_cover_image = False
await self.mass.cache.set(
- url, media_info.raw, category=cache_category, base_key=cache_base_key
+ url, media_info.raw, provider=self.instance_id, category=CACHE_CATEGORY_MEDIA_INFO
)
return media_info
from music_assistant_models.streamdetails import StreamDetails
from music_assistant import MusicAssistant
+from music_assistant.controllers.cache import use_cache
from music_assistant.helpers.app_vars import app_var # type: ignore[attr-defined]
from music_assistant.helpers.auth import AuthenticationHelper
from music_assistant.helpers.datetime import utc_timestamp
)
await self.gw_client.setup()
+ @use_cache(3600 * 24 * 7) # Cache for 7 days
async def search(
self, search_query: str, media_types: list[MediaType], limit: int = 5
) -> SearchResults:
async for track in await self.client.get_user_tracks():
yield self.parse_track(track=track, user_country=self.gw_client.user_country)
+ @use_cache(3600 * 24 * 30) # Cache for 30 days
async def get_artist(self, prov_artist_id: str) -> Artist:
"""Get full artist details by id."""
try:
self.logger.warning("Failed getting artist: %s", error)
raise MediaNotFoundError(f"Artist {prov_artist_id} not found on Deezer") from error
+ @use_cache(3600 * 24 * 30) # Cache for 30 days
async def get_album(self, prov_album_id: str) -> Album:
"""Get full album details by id."""
try:
self.logger.warning("Failed getting album: %s", error)
raise MediaNotFoundError(f"Album {prov_album_id} not found on Deezer") from error
+ @use_cache(3600 * 24 * 30) # Cache for 30 days
async def get_playlist(self, prov_playlist_id: str) -> Playlist:
"""Get full playlist details by id."""
try:
self.logger.warning("Failed getting playlist: %s", error)
raise MediaNotFoundError(f"Album {prov_playlist_id} not found on Deezer") from error
+ @use_cache(3600 * 24 * 30) # Cache for 30 days
async def get_track(self, prov_track_id: str) -> Track:
"""Get full track details by id."""
try:
self.logger.warning("Failed getting track: %s", error)
raise MediaNotFoundError(f"Album {prov_track_id} not found on Deezer") from error
+ @use_cache(3600 * 24 * 30) # Cache for 30 days
async def get_album_tracks(self, prov_album_id: str) -> list[Track]:
"""Get all tracks in an album."""
album = await self.client.get_album(album_id=int(prov_album_id))
for deezer_track in await album.get_tracks()
]
+ @use_cache(3600 * 3) # Cache for 3 hours
async def get_playlist_tracks(self, prov_playlist_id: str, page: int = 0) -> list[Track]:
"""Get playlist tracks."""
result: list[Track] = []
)
return result
+ @use_cache(3600 * 24 * 7) # Cache for 7 days
async def get_artist_albums(self, prov_artist_id: str) -> list[Album]:
"""Get albums by an artist."""
artist = await self.client.get_artist(artist_id=int(prov_artist_id))
return [self.parse_album(album=album) async for album in await artist.get_albums()]
+ @use_cache(3600 * 24 * 7) # Cache for 7 days
async def get_artist_toptracks(self, prov_artist_id: str) -> list[Track]:
"""Get top 50 tracks of an artist."""
artist = await self.client.get_artist(artist_id=int(prov_artist_id))
raise NotImplementedError
return result
+ @use_cache(3600) # Cache for 1 hour
async def recommendations(self) -> list[RecommendationFolder]:
"""Get deezer's recommendations."""
return [
playlist = await self.client.get_playlist(playlist_id)
return self.parse_playlist(playlist=playlist)
+ @use_cache(3600 * 24) # Cache for 24 hours
async def get_similar_tracks(self, prov_track_id: str, limit: int = 25) -> list[Track]:
"""Retrieve a dynamic list of tracks based on the provided item."""
endpoint = "song.getSearchTrackMix"
),
is_editable=is_editable,
owner=creator.name,
- cache_checksum=playlist.checksum,
)
def get_playlist_creator(self, playlist: deezer.Playlist) -> deezer.User:
return metadata
return None
- @use_cache(86400 * 30)
+ @use_cache(86400 * 60) # Cache for 60 days
async def _get_data(self, endpoint: str, **kwargs: str) -> dict[str, Any] | None:
"""Get data from api."""
url = f"http://webservice.fanart.tv/v3/{endpoint}"
VARIOUS_ARTISTS_NAME,
VERBOSE_LOG_LEVEL,
)
+from music_assistant.controllers.cache import use_cache
from music_assistant.helpers.compare import compare_strings, create_safe_string
from music_assistant.helpers.json import json_loads
from music_assistant.helpers.playlists import parse_m3u, parse_pls
from .constants import (
AUDIOBOOK_EXTENSIONS,
+ CACHE_CATEGORY_ALBUM_INFO,
+ CACHE_CATEGORY_ARTIST_INFO,
+ CACHE_CATEGORY_AUDIOBOOK_CHAPTERS,
+ CACHE_CATEGORY_FOLDER_IMAGES,
+ CACHE_CATEGORY_PODCAST_METADATA,
CONF_ENTRY_CONTENT_TYPE,
CONF_ENTRY_CONTENT_TYPE_READ_ONLY,
CONF_ENTRY_IGNORE_ALBUM_PLAYLISTS,
async def process_playlist() -> None:
playlist = await self.get_playlist(item.relative_path)
# add/update] playlist to db
- playlist.cache_checksum = item.checksum
playlist.favorite = import_as_favorite
await self.mass.music.playlists.add_item_to_library(
playlist,
if file_item.ext == "pls":
playlist.is_editable = False
playlist.owner = self.name
- checksum = str(file_item.checksum)
- playlist.cache_checksum = checksum
return playlist
async def get_audiobook(self, prov_audiobook_id: str) -> Audiobook:
if any(x.provider_instance == self.instance_id for x in track.provider_mappings)
]
+ @use_cache(3600) # Cache for 1 hour
async def get_playlist_tracks(self, prov_playlist_id: str, page: int = 0) -> list[Track]:
"""Get playlist tracks."""
result: list[Track] = []
break
# prefer (short lived) cache for a bit more speed
- cache_base_key = f"{self.instance_id}.artist"
- if artist_path and (cache := await self.cache.get(artist_path, base_key=cache_base_key)):
+ if artist_path and (
+ cache := await self.cache.get(
+ key=artist_path, provider=self.instance_id, category=CACHE_CATEGORY_ARTIST_INFO
+ )
+ ):
return cast("Artist", cache)
prov_artist_id = artist_path or name
if images := await self._get_local_images(artist_path, extra_thumb_names=("artist",)):
artist.metadata.images = UniqueList(images)
- await self.cache.set(artist_path, artist, base_key=cache_base_key, expiration=120)
+ await self.cache.set(
+ key=artist_path,
+ data=artist,
+ provider=self.instance_id,
+ category=CACHE_CATEGORY_ARTIST_INFO,
+ expiration=120,
+ )
return artist
track_dir = os.path.dirname(track_path)
album_dir = get_album_dir(track_dir, track_tags.album)
- cache_base_key = f"{self.instance_id}.album"
- if album_dir and (cache := await self.cache.get(album_dir, base_key=cache_base_key)):
+ if album_dir and (
+ cache := await self.cache.get(
+ key=album_dir,
+ provider=self.instance_id,
+ category=CACHE_CATEGORY_ALBUM_INFO,
+ )
+ ):
return cast("Album", cache)
# album artist(s)
album.metadata.images = UniqueList(images)
else:
album.metadata.images += images
- await self.cache.set(album_dir, album, base_key=cache_base_key, expiration=120)
+ await self.cache.set(
+ key=album_dir,
+ data=album,
+ provider=self.instance_id,
+ category=CACHE_CATEGORY_ALBUM_INFO,
+ expiration=120,
+ )
return album
async def _get_local_images(
self, folder: str, extra_thumb_names: tuple[str, ...] | None = None
) -> UniqueList[MediaItemImage]:
"""Return local images found in a given folderpath."""
- cache_base_key = f"{self.lookup_key}.folderimages"
- if (cache := await self.cache.get(folder, base_key=cache_base_key)) is not None:
+ if (
+ cache := await self.cache.get(
+ key=folder, provider=self.instance_id, category=CACHE_CATEGORY_FOLDER_IMAGES
+ )
+ ) is not None:
return cast("UniqueList[MediaItemImage]", cache)
if extra_thumb_names is None:
extra_thumb_names = ()
)
)
- await self.cache.set(folder, images, base_key=cache_base_key, expiration=120)
+ await self.cache.set(
+ key=folder,
+ data=images,
+ provider=self.instance_id,
+ category=CACHE_CATEGORY_FOLDER_IMAGES,
+ expiration=120,
+ )
return images
async def check_write_access(self) -> None:
prov_mapping = next(x for x in library_item.provider_mappings if x.item_id == item_id)
file_item = await self.resolve(item_id)
duration = library_item.duration
- chapters_cache_key = f"{self.lookup_key}.audiobook.chapters"
file_based_chapters: list[tuple[str, float]] | None = await self.cache.get(
- file_item.relative_path,
- base_key=chapters_cache_key,
+ key=file_item.relative_path,
+ provider=self.instance_id,
+ category=CACHE_CATEGORY_AUDIOBOOK_CHAPTERS,
)
if file_based_chapters is None:
# no cache available for this audiobook, we need to parse the chapters
tags = await async_parse_tags(file_item.absolute_path, file_item.file_size)
await self._parse_audiobook(file_item, tags)
file_based_chapters = await self.cache.get(
- file_item.relative_path,
- base_key=chapters_cache_key,
+ key=file_item.relative_path,
+ provider=self.instance_id,
+ category=CACHE_CATEGORY_AUDIOBOOK_CHAPTERS,
)
if file_based_chapters:
# store chapter files in cache
# for easy access from streamdetails
await self.cache.set(
- audiobook_file_item.relative_path,
- all_chapter_files,
- base_key=f"{self.lookup_key}.audiobook.chapters",
+ key=audiobook_file_item.relative_path,
+ data=all_chapter_files,
+ provider=self.instance_id,
+ category=CACHE_CATEGORY_AUDIOBOOK_CHAPTERS,
)
return (int(total_duration), chapters)
async def _get_podcast_metadata(self, podcast_folder: str) -> dict[str, Any]:
"""Return metadata for a podcast."""
- cache_base_key = f"{self.lookup_key}.podcastmetadata"
- if (cache := await self.cache.get(podcast_folder, base_key=cache_base_key)) is not None:
+ if (
+ cache := await self.cache.get(
+ key=podcast_folder,
+ provider=self.instance_id,
+ category=CACHE_CATEGORY_PODCAST_METADATA,
+ )
+ ) is not None:
return cast("dict[str, Any]", cache)
data: dict[str, Any] = {}
metadata_file = os.path.join(podcast_folder, "metadata.json")
metadata_file = self.get_absolute_path(metadata_file)
async with aiofiles.open(metadata_file) as _file:
data.update(json_loads(await _file.read()))
- await self.cache.set(podcast_folder, data, base_key=cache_base_key)
+ await self.cache.set(
+ key=podcast_folder,
+ data=data,
+ provider=self.instance_id,
+ category=CACHE_CATEGORY_PODCAST_METADATA,
+ )
return data
from __future__ import annotations
+from typing import Final
+
from music_assistant_models.config_entries import ConfigEntry, ConfigValueOption
from music_assistant_models.enums import ConfigEntryType, ProviderFeature
class IsChapterFile(Exception):
"""Exception to indicate that a file is part of a multi-part media (e.g. audiobook chapter)."""
+
+
+CACHE_CATEGORY_ARTIST_INFO: Final[int] = 1
+CACHE_CATEGORY_ALBUM_INFO: Final[int] = 2
+CACHE_CATEGORY_FOLDER_IMAGES: Final[int] = 3
+CACHE_CATEGORY_AUDIOBOOK_CHAPTERS: Final[int] = 4
+CACHE_CATEGORY_PODCAST_METADATA: Final[int] = 5
from music_assistant_models.enums import ProviderFeature
from music_assistant_models.media_items import MediaItemMetadata, Track
+from music_assistant.controllers.cache import use_cache
from music_assistant.models.metadata_provider import MetadataProvider
if TYPE_CHECKING:
)
return None
- song_lyrics = await asyncio.to_thread(self._fetch_lyrics, artist_name, track.name)
+ song_lyrics = await self.fetch_lyrics(artist_name, track.name)
if song_lyrics:
metadata = MediaItemMetadata()
self.logger.debug("No lyrics found for %s by %s", track.name, artist_name)
return None
- def _fetch_lyrics(self, artist: str, title: str) -> str | None:
- """Fetch lyrics - NOTE: not async friendly."""
- # blank artist / title?
- if artist is None or len(artist.strip()) == 0 or title is None or len(title.strip()) == 0:
- self.logger.error("Cannot fetch lyrics without artist and title")
- return None
-
- # clean song title to increase chance and accuracy of a result
- cleaned_title = clean_song_title(title)
- if cleaned_title != title:
- self.logger.debug(f'Song title was cleaned: "{title}" -> "{cleaned_title}"')
+ @use_cache(86400 * 7) # Cache for 7 days
+ async def fetch_lyrics(self, artist: str, title: str) -> str | None:
+ """Fetch lyrics for a given artist and title."""
+
+ def _fetch_lyrics(artist: str, title: str) -> str | None:
+ """Fetch lyrics - NOTE: not async friendly."""
+ # blank artist / title?
+ if (
+ artist is None
+ or len(artist.strip()) == 0
+ or title is None
+ or len(title.strip()) == 0
+ ):
+ self.logger.error("Cannot fetch lyrics without artist and title")
+ return None
+
+ # clean song title to increase chance and accuracy of a result
+ cleaned_title = clean_song_title(title)
+ if cleaned_title != title:
+ self.logger.debug(f'Song title was cleaned: "{title}" -> "{cleaned_title}"')
+
+ self.logger.info(f"Searching lyrics for artist='{artist}' and title='{cleaned_title}'")
- self.logger.info(f"Searching lyrics for artist='{artist}' and title='{cleaned_title}'")
+ # perform search
+ song = self._genius.search_song(cleaned_title, artist, get_full_info=False)
- # perform search
- song = self._genius.search_song(cleaned_title, artist, get_full_info=False)
+ # second search needed?
+ if not song and " - " in cleaned_title:
+ # aggressively truncate title from the first hyphen
+ cleaned_title = cleaned_title.split(" - ", 1)[0]
+ self.logger.info(f"Second attempt, aggressively cleaned title='{cleaned_title}'")
- # second search needed?
- if not song and " - " in cleaned_title:
- # aggressively truncate title from the first hyphen
- cleaned_title = cleaned_title.split(" - ", 1)[0]
- self.logger.info(f"Second attempt, aggressively cleaned title='{cleaned_title}'")
+ # perform search
+ song = self._genius.search_song(cleaned_title, artist, get_full_info=False)
- # perform search
- song = self._genius.search_song(cleaned_title, artist, get_full_info=False)
+ if song:
+ # attempts to clean lyrics of erroneous text
+ return cleanup_lyrics(song)
- if song:
- # attempts to clean lyrics of erroneous text
- return cleanup_lyrics(song)
+ return None
- return None
+ return await asyncio.to_thread(_fetch_lyrics, artist, title)
timestamps = await self.mass.cache.get(
key=CACHE_KEY_TIMESTAMP,
- base_key=self.lookup_key,
+ provider=self.instance_id,
category=CACHE_CATEGORY_OTHER,
default=None,
)
feeds = await self.mass.cache.get(
key=CACHE_KEY_FEEDS,
- base_key=self.lookup_key,
+ provider=self.instance_id,
category=CACHE_CATEGORY_OTHER,
default=None,
)
async def _cache_get_podcast(self, prov_podcast_id: str) -> dict[str, Any]:
parsed_podcast = await self.mass.cache.get(
key=prov_podcast_id,
- base_key=self.lookup_key,
+ provider=self.instance_id,
category=CACHE_CATEGORY_PODCAST_ITEMS,
default=None,
)
async def _cache_set_podcast(self, feed_url: str, parsed_podcast: dict[str, Any]) -> None:
await self.mass.cache.set(
key=feed_url,
- base_key=self.lookup_key,
+ provider=self.instance_id,
category=CACHE_CATEGORY_PODCAST_ITEMS,
data=parsed_podcast,
expiration=60 * 60 * 24, # 1 day
# seven days default
await self.mass.cache.set(
key=CACHE_KEY_TIMESTAMP,
- base_key=self.lookup_key,
+ provider=self.instance_id,
category=CACHE_CATEGORY_OTHER,
data=[self.timestamp_subscriptions, self.timestamp_actions],
)
# seven days default
await self.mass.cache.set(
key=CACHE_KEY_FEEDS,
- base_key=self.lookup_key,
+ provider=self.instance_id,
category=CACHE_CATEGORY_OTHER,
data=self.feeds,
)
VARIOUS_ARTISTS_MBID,
VARIOUS_ARTISTS_NAME,
)
+from music_assistant.controllers.cache import use_cache
from music_assistant.helpers.util import infer_album_type, parse_title_and_version
from music_assistant.models.music_provider import MusicProvider
self.logger.debug("Parse album failed: %s", album, exc_info=error)
continue
+ @use_cache(3600 * 24 * 7) # Cache for 7 days
async def get_album(self, prov_album_id: str) -> Album:
"""Get full album details by id."""
album_obj = await self._client.get_album(int(prov_album_id))
self.logger.debug("Parse artist failed: %s", artist, exc_info=error)
continue
+ @use_cache(3600 * 24 * 7) # Cache for 7 days
async def get_artist_albums(self, prov_artist_id: str) -> list[Album]:
"""Get a list of albums for the given artist."""
albums_objs = [
continue
return albums
+ @use_cache(3600 * 24 * 7) # Cache for 7 days
async def get_album_tracks(self, prov_album_id: str) -> list[Track]:
"""Get album tracks for given album id."""
album = await self._client.get_album(int(prov_album_id))
return await self._get_tracks(album["tracks"])
+ @use_cache(3600 * 24 * 7) # Cache for 7 days
async def get_track(self, prov_track_id: str) -> Track:
"""Get full track details by id."""
track_obj = await self._client.get_track(int(prov_track_id))
return await self._parse_track(track_obj)
+ @use_cache(3600 * 24 * 7) # Cache for 7 days
async def get_artist(self, prov_artist_id: str) -> Artist:
"""Get full artist details by id."""
artist_obj = await self._client.get_artist(int(prov_artist_id))
if playlist["type"] != "recently-played" and playlist["type"] != "thumbsup":
yield await self._parse_playlist(playlist)
+ @use_cache(3600 * 24 * 7) # Cache for 7 days
async def get_playlist(self, prov_playlist_id: str) -> Playlist:
"""Get full playlist details by id."""
playlist_obj = await self._client.get_playlist(int(prov_playlist_id))
self.logger.debug("Parse playlist failed: %s", playlist_obj, exc_info=error)
return playlist
+ @use_cache(3600) # Cache for 1 hour
async def get_playlist_tracks(self, prov_playlist_id: str, page: int = 0) -> list[Track]:
"""Get playlist tracks."""
tracks: list[Track] = []
)
from music_assistant_models.streamdetails import StreamDetails
+from music_assistant.controllers.cache import use_cache
from music_assistant.helpers.podcast_parsers import (
get_podcastparser_dict,
parse_podcast,
# 20 requests per minute, be a bit below
self.throttler = ThrottlerManager(rate_limit=18, period=60)
+ @use_cache(3600 * 24 * 7) # Cache for 7 days
async def search(
self, search_query: str, media_types: list[MediaType], limit: int = 10
) -> SearchResults:
async def _cache_get_podcast(self, prov_podcast_id: str) -> dict[str, Any]:
parsed_podcast = await self.mass.cache.get(
key=prov_podcast_id,
- base_key=self.lookup_key,
+ provider=self.instance_id,
category=CACHE_CATEGORY_PODCASTS,
default=None,
)
async def _cache_set_podcast(self, feed_url: str, parsed_podcast: dict[str, Any]) -> None:
await self.mass.cache.set(
key=feed_url,
- base_key=self.lookup_key,
+ provider=self.instance_id,
category=CACHE_CATEGORY_PODCASTS,
data=parsed_podcast,
expiration=60 * 60 * 24, # 1 day
async def _cache_set_top_podcasts(self, top_podcast_helper: TopPodcastsHelper) -> None:
await self.mass.cache.set(
key=CACHE_KEY_TOP_PODCASTS,
- base_key=self.lookup_key,
+ provider=self.instance_id,
category=CACHE_CATEGORY_RECOMMENDATIONS,
data=top_podcast_helper.to_dict(),
expiration=60 * 60 * 6, # 6 hours
async def _cache_get_top_podcasts(self) -> list[PodcastSearchResult]:
parsed_top_podcasts = await self.mass.cache.get(
key=CACHE_KEY_TOP_PODCASTS,
- base_key=self.lookup_key,
+ provider=self.instance_id,
category=CACHE_CATEGORY_RECOMMENDATIONS,
)
if parsed_top_podcasts is not None:
from music_assistant_models.streamdetails import StreamDetails
from music_assistant.constants import UNKNOWN_ARTIST_ID_MBID
+from music_assistant.controllers.cache import use_cache
from music_assistant.mass import MusicAssistant
from music_assistant.models import ProviderInstanceType
from music_assistant.models.music_provider import MusicProvider
playlists.append(parse_playlist(self.instance_id, self._client, item))
return playlists
+ @use_cache(60 * 15) # Cache for 15 minutes
async def search(
self,
search_query: str,
media_types: list[MediaType],
limit: int = 20,
) -> SearchResults:
- """Perform search on the plex library.
+ """Perform search on the Jellyfin library.
:param search_query: Search query.
:param media_types: A list of media_types to include. All types if None.
raise MediaNotFoundError(f"Item {prov_album_id} not found")
return parse_album(self.logger, self.instance_id, self._client, album)
+ @use_cache(3600) # Cache for 1 hour
async def get_album_tracks(self, prov_album_id: str) -> list[Track]:
"""Get album tracks for given album id."""
jellyfin_album_tracks = (
for jellyfin_album_track in jellyfin_album_tracks["Items"]
]
+ @use_cache(60 * 15) # Cache for 15 minutes
async def get_artist(self, prov_artist_id: str) -> Artist:
"""Get full artist details by id."""
if prov_artist_id == UNKNOWN_ARTIST_MAPPING.item_id:
raise MediaNotFoundError(f"Item {prov_artist_id} not found")
return parse_artist(self.logger, self.instance_id, self._client, jellyfin_artist)
+ @use_cache(60 * 15) # Cache for 15 minutes
async def get_track(self, prov_track_id: str) -> Track:
"""Get full track details by id."""
try:
raise MediaNotFoundError(f"Item {prov_track_id} not found")
return parse_track(self.logger, self.instance_id, self._client, track)
+ @use_cache(60 * 15) # Cache for 15 minutes
async def get_playlist(self, prov_playlist_id: str) -> Playlist:
"""Get full playlist details by id."""
try:
raise MediaNotFoundError(f"Item {prov_playlist_id} not found")
return parse_playlist(self.instance_id, self._client, playlist)
+ @use_cache(3600) # Cache for 1 hour
async def get_playlist_tracks(self, prov_playlist_id: str, page: int = 0) -> list[Track]:
"""Get playlist tracks."""
result: list[Track] = []
)
return result
+ @use_cache(3600) # Cache for 1 hour
async def get_artist_albums(self, prov_artist_id: str) -> list[Album]:
"""Get a list of albums for the given artist."""
if not prov_artist_id.startswith(FAKE_ARTIST_PREFIX):
allow_seek=True,
)
+ @use_cache(3600) # Cache for 1 hour
async def get_similar_tracks(self, prov_track_id: str, limit: int = 25) -> list[Track]:
"""Retrieve a dynamic list of tracks based on the provided item."""
resp = await self._client.get_similar_tracks(
from music_assistant_models.enums import ConfigEntryType, ProviderFeature
from music_assistant_models.media_items import MediaItemMetadata, Track
+from music_assistant.controllers.cache import use_cache
from music_assistant.helpers.throttle_retry import ThrottlerManager, throttle_with_retries
from music_assistant.models.metadata_provider import MetadataProvider
self.throttler = ThrottlerManager(rate_limit=1, period=1)
self.logger.debug("Using custom API endpoint: %s (throttling disabled)", self.api_url)
+ @use_cache(3600 * 24 * 14) # Cache for 14 days
@throttle_with_retries
async def _get_data(self, **params: Any) -> dict[str, Any] | None:
"""Get data from LRCLib API with throttling and retries."""
return MusicBrainzArtist.from_raw(artist)
return None
- @use_cache(86400 * 30)
+ @use_cache(86400 * 30) # Cache for 30 days
@throttle_with_retries
async def get_data(self, endpoint: str, **kwargs: str) -> Any:
"""Get data from api."""
from music_assistant_models.streamdetails import StreamDetails
from music_assistant.constants import CONF_PASSWORD, CONF_USERNAME
+from music_assistant.controllers.cache import use_cache
from music_assistant.helpers.json import json_loads
from music_assistant.helpers.util import infer_album_type
from music_assistant.models.music_provider import MusicProvider
if item and item["id"]:
yield self._parse_playlist(item)
+ @use_cache(3600 * 24 * 14) # Cache for 14 days
async def get_artist(self, prov_artist_id: str) -> Artist:
"""Get artist details by id."""
endpoint = f"/releases/recent?limit=1&artistIds={prov_artist_id}"
artist_data = artist_response["items"][0]["artist"]
return self._parse_artist(artist_data)
+ @use_cache(3600 * 24 * 14) # Cache for 14 days
async def get_artist_albums(self, prov_artist_id: str) -> list[Album]:
"""Get a list of all albums for the given artist."""
params = {
if (item and item["id"])
]
+ @use_cache(3600 * 24 * 14) # Cache for 14 days
async def get_album(self, prov_album_id: str) -> Album:
"""Get album details by id."""
endpoint = f"shows/{prov_album_id}"
response = await self._get_data("catalog", endpoint)
return self._parse_album(response["Response"])
+ @use_cache(3600 * 24 * 14) # Cache for 14 days
async def get_playlist(self, prov_playlist_id: str) -> Playlist:
"""Get full playlist details by id."""
endpoint = f"playlists/{prov_playlist_id}"
response = await self._get_data("stash", endpoint)
return self._parse_playlist(response["items"])
+ @use_cache(3600 * 24 * 14) # Cache for 14 days
async def get_album_tracks(self, prov_album_id: str) -> list[Track]:
"""Get all album tracks for given album id."""
endpoint = f"shows/{prov_album_id}"
if item["trackID"]
]
+ @use_cache(3600) # Cache for 1 hour
async def get_playlist_tracks(self, prov_playlist_id: str, page: int = 0) -> list[Track]:
"""Get playlist tracks."""
result: list[Track] = []
from music_assistant_models.streamdetails import StreamDetails
from music_assistant.constants import (
- CACHE_CATEGORY_OPEN_SUBSONIC,
CONF_PASSWORD,
CONF_PATH,
CONF_PORT,
from libopensonic.media import ArtistID3 as SonicArtist
from libopensonic.media import Bookmark as SonicBookmark
from libopensonic.media import Child as SonicSong
- from libopensonic.media import OpenSubsonicExtension
+ from libopensonic.media import OpenSubsonicExtension, PodcastChannel
from libopensonic.media import Playlist as SonicPlaylist
from libopensonic.media import PodcastEpisode as SonicEpisode
CONF_PLAYED_ALBUMS = "recommend_played"
CONF_RECO_SIZE = "recommendation_count"
+CACHE_CATEGORY_PODCAST_CHANNEL = 1
+CACHE_CATEGORY_PODCAST_EPISODES = 2
Param = ParamSpec("Param")
RetType = TypeVar("RetType")
_show_new: bool = True
_show_played: bool = True
_reco_limit: int = 10
- _cache_base_key: str = ""
async def handle_async_init(self) -> None:
"""Set up the music provider and test the connection."""
self._show_new = bool(self.config.get_value(CONF_NEW_ALBUMS))
self._show_played = bool(self.config.get_value(CONF_PLAYED_ALBUMS))
self._reco_limit = int(str(self.config.get_value(CONF_RECO_SIZE)))
- self._cache_base_key = f"{self.instance_id}/"
@property
def is_streaming_provider(self) -> bool:
self.logger.debug("Done streaming %s", streamdetails.item_id)
- async def _get_podcast_channel_async(self, chan_id: str, base_key: str) -> None:
- chan = await self._run_async(self.conn.get_podcasts, inc_episodes=True, pid=chan_id)
- if not chan:
- return
- await self.mass.cache.set(
+ async def _get_podcast_channel_async(self, chan_id: str) -> PodcastChannel | None:
+ if cache := await self.mass.cache.get(
key=chan_id,
- data=chan[0],
- base_key=base_key,
- expiration=600,
- category=CACHE_CATEGORY_OPEN_SUBSONIC,
- )
+ provider=self.instance_id,
+ category=CACHE_CATEGORY_PODCAST_CHANNEL,
+ ):
+ return cache
+ if channels := await self._run_async(
+ self.conn.get_podcasts, inc_episodes=True, pid=chan_id
+ ):
+ channel = channels[0]
+ await self.mass.cache.set(
+ key=chan_id,
+ data=channel,
+ provider=self.instance_id,
+ expiration=600,
+ category=CACHE_CATEGORY_PODCAST_CHANNEL,
+ )
+ return channel
+ return None
async def _podcast_recommendations(self) -> RecommendationFolder:
podcasts: RecommendationFolder = RecommendationFolder(
sonic_episodes = await self._run_async(
self.conn.get_newest_podcasts, count=self._reco_limit
)
- chan_ids = set()
- chan_base_key = f"{self._cache_base_key}/podcast_channels/"
- async with TaskGroup() as tg:
- for ep in sonic_episodes:
- if ep.channel_id in chan_ids:
- continue
- tg.create_task(self._get_podcast_channel_async(ep.channel_id, chan_base_key))
- chan_ids.add(ep.channel_id)
-
for ep in sonic_episodes:
- chan = await self.mass.cache.get(
- key=ep.channel_id, base_key=chan_base_key, category=CACHE_CATEGORY_OPEN_SUBSONIC
- )
- if not chan:
- continue
- podcasts.items.append(parse_epsiode(self.instance_id, ep, chan))
+ if channel_info := await self._get_podcast_channel_async(ep.channel_id):
+ podcasts.items.append(parse_epsiode(self.instance_id, ep, channel_info))
return podcasts
async def _favorites_recommendation(self) -> RecommendationFolder:
from plexapi.server import PlexServer
from music_assistant.constants import UNKNOWN_ARTIST
+from music_assistant.controllers.cache import use_cache
from music_assistant.helpers.auth import AuthenticationHelper
from music_assistant.helpers.tags import async_parse_tags
from music_assistant.helpers.util import parse_title_and_version
]
)
playlist.is_editable = not plex_playlist.smart
- playlist.cache_checksum = str(plex_playlist.updatedAt.timestamp())
-
return playlist
async def _parse_track(self, plex_track: PlexTrack) -> Track:
return track
+ @use_cache(3600) # Cache for 1 hour
async def search(
self,
search_query: str,
yield await self._parse_track(plex_track)
offset += page_size
+ @use_cache(3600 * 3) # Cache for 3 hours
async def get_album(self, prov_album_id: str) -> Album:
"""Get full album details by id."""
if plex_album := await self._get_data(prov_album_id, PlexAlbum):
msg = f"Item {prov_album_id} not found"
raise MediaNotFoundError(msg)
+ @use_cache(3600 * 3) # Cache for 3 hours
async def get_album_tracks(self, prov_album_id: str) -> list[Track]:
"""Get album tracks for given album id."""
plex_album: PlexAlbum = await self._get_data(prov_album_id, PlexAlbum)
tracks.append(track)
return tracks
+ @use_cache(3600 * 3) # Cache for 3 hours
async def get_artist(self, prov_artist_id: str) -> Artist:
"""Get full artist details by id."""
if prov_artist_id.startswith(FAKE_ARTIST_PREFIX):
msg = f"Item {prov_artist_id} not found"
raise MediaNotFoundError(msg)
+ @use_cache(3600 * 3) # Cache for 3 hours
async def get_track(self, prov_track_id: str) -> Track:
"""Get full track details by id."""
if plex_track := await self._get_data(prov_track_id, PlexTrack):
msg = f"Item {prov_track_id} not found"
raise MediaNotFoundError(msg)
+ @use_cache(3600 * 3) # Cache for 3 hours
async def get_playlist(self, prov_playlist_id: str) -> Playlist:
"""Get full playlist details by id."""
if plex_playlist := await self._get_data(prov_playlist_id, PlexPlaylist):
msg = f"Item {prov_playlist_id} not found"
raise MediaNotFoundError(msg)
+ @use_cache(3600 * 3) # Cache for 3 hours
async def get_playlist_tracks(self, prov_playlist_id: str, page: int = 0) -> list[Track]:
"""Get playlist tracks."""
result: list[Track] = []
result.append(track)
return result
+ @use_cache(3600 * 3) # Cache for 3 hours
async def get_artist_albums(self, prov_artist_id: str) -> list[Album]:
"""Get a list of albums for the given artist."""
if not prov_artist_id.startswith(FAKE_ARTIST_PREFIX):
self.update_config_value(CONF_STORED_PODCASTS, stored_podcasts)
return True
- @use_cache(86400) # Cache for 24 hours
+ @use_cache(3600 * 24 * 14) # Cache for 14 days
async def get_podcast(self, prov_podcast_id: str) -> Podcast:
"""Get podcast details."""
try:
raise MediaNotFoundError(f"Podcast {prov_podcast_id} not found")
- @use_cache(43200) # Cache for 12 hours
async def get_podcast_episodes(
self, prov_podcast_id: str
) -> AsyncGenerator[PodcastEpisode, None]:
from music_assistant_models.media_items import AudioFormat, Podcast, PodcastEpisode
from music_assistant_models.streamdetails import StreamDetails
+from music_assistant.controllers.cache import use_cache
from music_assistant.helpers.compare import create_safe_string
from music_assistant.helpers.podcast_parsers import (
get_podcastparser_dict,
await self._cache_set_podcast()
yield await self._parse_podcast()
+ @use_cache(3600 * 24 * 7) # Cache for 7 days
async def get_podcast(self, prov_podcast_id: str) -> Podcast:
"""Get full artist details by id."""
if prov_podcast_id != self.podcast_id:
raise RuntimeError(f"Podcast id not in provider: {prov_podcast_id}")
return await self._parse_podcast()
+ @use_cache(3600) # Cache for 1 hour
async def get_podcast_episode(self, prov_episode_id: str) -> PodcastEpisode:
"""Get (full) podcast episode details by id."""
for idx, episode in enumerate(self.parsed_podcast["episodes"]):
if mass_episode := self._parse_episode(episode, idx):
yield mass_episode
+ @use_cache(3600) # Cache for 1 hour
async def get_stream_details(self, item_id: str, media_type: MediaType) -> StreamDetails:
"""Get streamdetails for a track/radio."""
for episode in self.parsed_podcast["episodes"]:
async def _cache_get_podcast(self) -> dict[str, Any]:
parsed_podcast = await self.mass.cache.get(
key=self.podcast_id,
- base_key=self.lookup_key,
+ provider=self.instance_id,
category=CACHE_CATEGORY_PODCASTS,
default=None,
)
async def _cache_set_podcast(self) -> None:
await self.mass.cache.set(
key=self.podcast_id,
- base_key=self.lookup_key,
+ provider=self.instance_id,
category=CACHE_CATEGORY_PODCASTS,
data=self.parsed_podcast,
expiration=60 * 60 * 24, # 1 day
VARIOUS_ARTISTS_MBID,
VARIOUS_ARTISTS_NAME,
)
+from music_assistant.controllers.cache import use_cache
from music_assistant.helpers.app_vars import app_var
from music_assistant.helpers.json import json_loads
from music_assistant.helpers.throttle_retry import ThrottlerManager, throttle_with_retries
msg = f"Login failed for user {self.config.get_value(CONF_USERNAME)}"
raise LoginFailed(msg)
+ @use_cache(3600 * 24 * 14) # Cache for 14 days
async def search(
self, search_query: str, media_types: list[MediaType], limit: int = 5
) -> SearchResults:
if item and item["id"]:
yield self._parse_playlist(item)
+ @use_cache(3600 * 24 * 30) # Cache for 30 days
async def get_artist(self, prov_artist_id) -> Artist:
"""Get full artist details by id."""
params = {"artist_id": prov_artist_id}
msg = f"Item {prov_artist_id} not found"
raise MediaNotFoundError(msg)
+ @use_cache(3600 * 24 * 30) # Cache for 30 days
async def get_album(self, prov_album_id) -> Album:
"""Get full album details by id."""
params = {"album_id": prov_album_id}
msg = f"Item {prov_album_id} not found"
raise MediaNotFoundError(msg)
+ @use_cache(3600 * 24 * 30) # Cache for 30 days
async def get_track(self, prov_track_id) -> Track:
"""Get full track details by id."""
params = {"track_id": prov_track_id}
msg = f"Item {prov_track_id} not found"
raise MediaNotFoundError(msg)
+ @use_cache(3600 * 24 * 30) # Cache for 30 days
async def get_playlist(self, prov_playlist_id) -> Playlist:
"""Get full playlist details by id."""
params = {"playlist_id": prov_playlist_id}
msg = f"Item {prov_playlist_id} not found"
raise MediaNotFoundError(msg)
+ @use_cache(3600 * 24 * 30) # Cache for 30 days
async def get_album_tracks(self, prov_album_id) -> list[Track]:
"""Get all album tracks for given album id."""
params = {"album_id": prov_album_id}
if (item and item["id"])
]
+ @use_cache(3600 * 3) # Cache for 3 hours
async def get_playlist_tracks(self, prov_playlist_id: str, page: int = 0) -> list[Track]:
"""Get playlist tracks."""
result: list[Track] = []
result.append(track)
return result
+ @use_cache(3600 * 24 * 14) # Cache for 14 days
async def get_artist_albums(self, prov_artist_id) -> list[Album]:
"""Get a list of albums for the given artist."""
result = await self._get_data(
if (item and item["id"] and str(item["artist"]["id"]) == prov_artist_id)
]
+ @use_cache(3600 * 24 * 14) # Cache for 14 days
async def get_artist_toptracks(self, prov_artist_id) -> list[Track]:
"""Get a list of most popular tracks for the given artist."""
result = await self._get_data(
remotely_accessible=True,
)
]
- playlist.cache_checksum = str(playlist_obj["updated_at"])
return playlist
@lock
):
await self.library_add(await self.get_radio(db_row["provider_item_id"]))
+ @use_cache(3600 * 24 * 14) # Cache for 14 days
async def search(
self, search_query: str, media_types: list[MediaType], limit: int = 10
) -> SearchResults:
self.update_config_value(CONF_STORED_RADIOS, stored_radios)
return True
- @use_cache(3600)
+ @use_cache(3600 * 6) # Cache for 6 hours
async def get_by_popularity(self) -> Sequence[Radio]:
"""Get radio stations by popularity."""
try:
except RadioBrowserError as err:
raise ProviderUnavailableError(f"Failed to fetch popular stations: {err}") from err
- @use_cache(3600)
+ @use_cache(3600 * 6) # Cache for 6 hours
async def get_by_votes(self) -> Sequence[Radio]:
"""Get radio stations by votes."""
try:
except RadioBrowserError as err:
raise ProviderUnavailableError(f"Failed to fetch stations by votes: {err}") from err
- @use_cache(3600 * 24)
+ @use_cache(3600 * 24 * 7) # Cache for 7 days
async def get_country_folders(self, base_path: str) -> list[BrowseFolder]:
"""Get a list of country names as BrowseFolder."""
try:
items.append(folder)
return items
- @use_cache(3600 * 24)
+ @use_cache(3600 * 24 * 7) # Cache for 7 days
async def get_language_folders(self, base_path: str) -> list[BrowseFolder]:
"""Get a list of language names as BrowseFolder."""
try:
for language in languages
]
- @use_cache(3600 * 24)
+ @use_cache(3600 * 24 * 7) # Cache for 7 days
async def get_tag_folders(self, base_path: str) -> list[BrowseFolder]:
"""Get a list of tag names as BrowseFolder."""
try:
for tag in tags
]
- @use_cache(3600)
+ @use_cache(3600 * 24) # Cache for 1 day
async def get_by_country(self, country_code: str) -> list[Radio]:
"""Get radio stations by country."""
try:
f"Failed to fetch stations for country {country_code}: {err}"
) from err
- @use_cache(3600)
+ @use_cache(3600 * 24) # Cache for 1 day
async def get_by_language(self, language: str) -> list[Radio]:
"""Get radio stations by language."""
try:
f"Failed to fetch stations for language {language}: {err}"
) from err
- @use_cache(3600)
+ @use_cache(3600 * 24) # Cache for 1 day
async def get_by_tag(self, tag: str) -> list[Radio]:
"""Get radio stations by tag."""
try:
f"Failed to fetch stations for tag {tag}: {err}"
) from err
+ @use_cache(3600 * 24 * 14) # Cache for 14 days
async def get_radio(self, prov_radio_id: str) -> Radio:
"""Get radio station details."""
try:
)
from music_assistant_models.streamdetails import StreamDetails
+from music_assistant.controllers.cache import use_cache
from music_assistant.models.music_provider import MusicProvider
from . import parsers
for channel_id in RADIO_PARADISE_CHANNELS:
yield self._parse_radio(channel_id)
+ @use_cache(3600 * 3) # Cache for 3 hours
async def get_radio(self, prov_radio_id: str) -> Radio:
"""Get full radio details by id."""
if prov_radio_id not in RADIO_PARADISE_CHANNELS:
from music_assistant_models.streamdetails import StreamDetails
from tenacity import RetryError
+from music_assistant.controllers.cache import use_cache
from music_assistant.helpers.util import select_free_port
from music_assistant.helpers.webserver import Webserver
from music_assistant.models.music_provider import MusicProvider
if channel.is_favorite:
yield self._parse_radio(channel)
+ @use_cache(3600 * 24 * 14) # Cache for 14 days
async def get_radio(self, prov_radio_id: str) -> Radio: # type: ignore[return]
"""Get full radio details by id."""
if prov_radio_id not in self._channels_by_id:
return self._current_stream_details
+ @use_cache(3600 * 3) # Cache for 3 hours
async def browse(self, path: str) -> Sequence[MediaItemType | ItemMapping | BrowseFolder]:
"""Browse this provider's items.
self._me = await self._soundcloud.get_account_details()
self._user_id = self._me["id"]
+ @use_cache(3600 * 48) # Cache for 48 hours
async def search(
self, search_query: str, media_types: list[MediaType], limit: int = 10
) -> SearchResults:
round(time.time() - time_start, 2),
)
- @use_cache(3600)
+ @use_cache(3600 * 3) # Cache for 3 hours
async def recommendations(self) -> list[RecommendationFolder]:
"""Get available recommendations."""
# Part 1, the mixed selections
folders.append(folder)
return folders
+ @use_cache(3600 * 24 * 14) # Cache for 14 days
async def get_artist(self, prov_artist_id: str) -> Artist:
"""Get full artist details by id."""
artist_obj = await self._soundcloud.get_user_details(prov_artist_id)
self.logger.debug("Parse artist failed: %s", artist_obj, exc_info=error)
return artist
+ @use_cache(3600 * 24 * 14) # Cache for 14 days
async def get_track(self, prov_track_id: str) -> Track:
"""Get full track details by id."""
track_obj = await self._soundcloud.get_track_details(prov_track_id)
self.logger.debug("Parse track failed: %s", track_obj, exc_info=error)
return track
+ @use_cache(3600 * 24 * 14) # Cache for 14 days
async def get_playlist(self, prov_playlist_id: str) -> Playlist:
"""Get full playlist details by id."""
playlist_obj = await self._get_playlist_object(prov_playlist_id)
# Handle regular playlists
return await self._soundcloud.get_playlist_details(prov_playlist_id)
+ @use_cache(3600 * 3) # Cache for 3 hours
async def get_playlist_tracks(self, prov_playlist_id: str, page: int = 0) -> list[Track]:
"""Get playlist tracks."""
result: list[Track] = []
continue
return result
+ @use_cache(3600 * 24 * 14) # Cache for 14 days
async def get_artist_toptracks(self, prov_artist_id: str) -> list[Track]:
"""Get a list of (max 500) tracks for the given artist."""
tracks_obj = await self._soundcloud.get_tracks_from_user(prov_artist_id, 500)
continue
return tracks
+ @use_cache(3600 * 24 * 14) # Cache for 14 days
async def get_similar_tracks(self, prov_track_id: str, limit: int = 25) -> list[Track]:
"""Retrieve a dynamic list of tracks based on the provided item."""
tracks_obj = await self._soundcloud.get_recommended(prov_track_id, limit)
return tracks
+ @use_cache(3600 * 3) # Cache for 3 hours
async def get_stream_details(self, item_id: str, media_type: MediaType) -> StreamDetails:
"""Return the content details for the given track when it will be streamed."""
url: str = await self._soundcloud.get_stream_url(track_id=item_id, presets=["mp3"])
)
playlist.metadata.images = parse_images(playlist_obj.get("images", []), provider.lookup_key)
- playlist.cache_checksum = str(playlist_obj["snapshot_id"])
return playlist
import os
import time
from collections.abc import AsyncGenerator
-from typing import TYPE_CHECKING, Any
+from typing import Any
import aiohttp
from music_assistant_models.enums import (
)
from .streaming import LibrespotStreamer
-if TYPE_CHECKING:
- from music_assistant_models.config_entries import ProviderConfig
- from music_assistant_models.provider import ProviderManifest
- from music_assistant import MusicAssistant
+class NotModifiedError(Exception):
+ """Exception raised when a resource has not been modified."""
class SpotifyProvider(MusicProvider):
_auth_info: dict[str, Any] | None = None
_sp_user: dict[str, Any] | None = None
_librespot_bin: str | None = None
+ _audiobooks_supported = False
custom_client_id_active: bool = False
throttler: ThrottlerManager
- def __init__(
- self,
- mass: MusicAssistant,
- manifest: ProviderManifest,
- config: ProviderConfig,
- supported_features: set[ProviderFeature],
- ) -> None:
- """Initialize the provider."""
- super().__init__(mass, manifest, config)
- self._base_supported_features = supported_features
-
async def handle_async_init(self) -> None:
"""Handle async initialization of the provider."""
self.cache_dir = os.path.join(self.mass.cache_path, self.instance_id)
"for supported countries."
)
- async def _test_audiobook_support(self) -> bool:
- """Test if audiobooks are supported in user's region."""
- try:
- await self._get_data("me/audiobooks", limit=1)
- return True
- except aiohttp.ClientResponseError as e:
- if e.status == 403:
- return False # Not available
- raise # Re-raise other HTTP errors
- except (MediaNotFoundError, ProviderUnavailableError):
- return False
-
@property
def audiobooks_supported(self) -> bool:
"""Check if audiobooks are supported for this user/region."""
- return getattr(self, "_audiobooks_supported", False)
+ return self._audiobooks_supported
@property
def audiobook_progress_sync_enabled(self) -> bool:
@property
def supported_features(self) -> set[ProviderFeature]:
"""Return the features supported by this Provider."""
- features = self._base_supported_features.copy()
+ features = self._supported_features.copy()
# Add audiobook features if enabled
if self.audiobooks_supported:
features.add(ProviderFeature.LIBRARY_AUDIOBOOKS)
features.add(ProviderFeature.LIBRARY_AUDIOBOOKS_EDIT)
-
if not self.custom_client_id_active:
# Spotify has killed the similar tracks api for developers
# https://developer.spotify.com/blog/2024-11-27-changes-to-the-web-api
return {*features, ProviderFeature.SIMILAR_TRACKS}
-
return features
@property
return str(self._sp_user["display_name"])
return None
+ ## Library retrieval methods (generators)
+ async def get_library_artists(self) -> AsyncGenerator[Artist, None]:
+ """Retrieve library artists from spotify."""
+ endpoint = "me/following"
+ while True:
+ spotify_artists = await self._get_data(
+ endpoint,
+ type="artist",
+ limit=50,
+ )
+ for item in spotify_artists["artists"]["items"]:
+ if item and item["id"]:
+ yield parse_artist(item, self)
+ if spotify_artists["artists"]["next"]:
+ endpoint = spotify_artists["artists"]["next"]
+ endpoint = endpoint.replace("https://api.spotify.com/v1/", "")
+ else:
+ break
+
+ async def get_library_albums(self) -> AsyncGenerator[Album, None]:
+ """Retrieve library albums from the provider."""
+ async for item in self._get_all_items("me/albums"):
+ if item["album"] and item["album"]["id"]:
+ yield parse_album(item["album"], self)
+
+ async def get_library_tracks(self) -> AsyncGenerator[Track, None]:
+ """Retrieve library tracks from the provider."""
+ async for item in self._get_all_items("me/tracks"):
+ if item and item["track"]["id"]:
+ yield parse_track(item["track"], self)
+
+ async def get_library_podcasts(self) -> AsyncGenerator[Podcast, None]:
+ """Retrieve library podcasts from spotify."""
+ async for item in self._get_all_items("me/shows"):
+ if item["show"] and item["show"]["id"]:
+ show_obj = item["show"]
+ # Filter out audiobooks - they have a distinctive description format
+ description = show_obj.get("description", "")
+ if description.startswith("Author(s):") and "Narrator(s):" in description:
+ continue
+ yield parse_podcast(show_obj, self)
+
+ async def get_library_audiobooks(self) -> AsyncGenerator[Audiobook, None]:
+ """Retrieve library audiobooks from spotify."""
+ if not self.audiobooks_supported:
+ return
+ async for item in self._get_all_items("me/audiobooks"):
+ if item and item["id"]:
+ # Parse the basic audiobook
+ audiobook = parse_audiobook(item, self)
+ # Add chapters from Spotify API data
+ await self._add_audiobook_chapters(audiobook)
+ yield audiobook
+
+ async def get_library_playlists(self) -> AsyncGenerator[Playlist, None]:
+ """Retrieve playlists from the provider."""
+ yield await self._get_liked_songs_playlist()
+ async for item in self._get_all_items("me/playlists"):
+ if item and item["id"]:
+ yield parse_playlist(item, self)
+
# ruff: noqa: PLR0915
+ @use_cache()
async def search(
self, search_query: str, media_types: list[MediaType] | None = None, limit: int = 5
) -> SearchResults:
break
return searchresult
- async def get_library_artists(self) -> AsyncGenerator[Artist, None]:
- """Retrieve library artists from spotify."""
- endpoint = "me/following"
- while True:
- spotify_artists = await self._get_data(
- endpoint,
- type="artist",
- limit=50,
- )
- for item in spotify_artists["artists"]["items"]:
- if item and item["id"]:
- yield parse_artist(item, self)
- if spotify_artists["artists"]["next"]:
- endpoint = spotify_artists["artists"]["next"]
- endpoint = endpoint.replace("https://api.spotify.com/v1/", "")
- else:
- break
-
- async def get_library_albums(self) -> AsyncGenerator[Album, None]:
- """Retrieve library albums from the provider."""
- async for item in self._get_all_items("me/albums"):
- if item["album"] and item["album"]["id"]:
- yield parse_album(item["album"], self)
-
- async def get_library_tracks(self) -> AsyncGenerator[Track, None]:
- """Retrieve library tracks from the provider."""
- async for item in self._get_all_items("me/tracks"):
- if item and item["track"]["id"]:
- yield parse_track(item["track"], self)
-
- async def get_library_podcasts(self) -> AsyncGenerator[Podcast, None]:
- """Retrieve library podcasts from spotify."""
- async for item in self._get_all_items("me/shows"):
- if item["show"] and item["show"]["id"]:
- show_obj = item["show"]
- # Filter out audiobooks - they have a distinctive description format
- description = show_obj.get("description", "")
- if description.startswith("Author(s):") and "Narrator(s):" in description:
- continue
- yield parse_podcast(show_obj, self)
-
- async def get_library_audiobooks(self) -> AsyncGenerator[Audiobook, None]:
- """Retrieve library audiobooks from spotify."""
- if not self.audiobooks_supported:
- return
- async for item in self._get_all_items("me/audiobooks"):
- if item and item["id"]:
- # Parse the basic audiobook
- audiobook = parse_audiobook(item, self)
- # Add chapters from Spotify API data
- await self._add_audiobook_chapters(audiobook)
- yield audiobook
-
- def _get_liked_songs_playlist_id(self) -> str:
- return f"{LIKED_SONGS_FAKE_PLAYLIST_ID_PREFIX}-{self.instance_id}"
-
- async def _get_liked_songs_playlist(self) -> Playlist:
- if self._sp_user is None:
- raise LoginFailed("User info not available - not logged in")
-
- liked_songs = Playlist(
- item_id=self._get_liked_songs_playlist_id(),
- provider=self.lookup_key,
- name=f"Liked Songs {self._sp_user['display_name']}", # TODO to be translated
- owner=self._sp_user["display_name"],
- provider_mappings={
- ProviderMapping(
- item_id=self._get_liked_songs_playlist_id(),
- provider_domain=self.domain,
- provider_instance=self.instance_id,
- url="https://open.spotify.com/collection/tracks",
- )
- },
- )
-
- liked_songs.is_editable = False # TODO Editing requires special endpoints
-
- # Add image to the playlist metadata
- image = MediaItemImage(
- type=ImageType.THUMB,
- path="https://misc.scdn.co/liked-songs/liked-songs-64.png",
- provider=self.lookup_key,
- remotely_accessible=True,
- )
- if liked_songs.metadata.images is None:
- liked_songs.metadata.images = UniqueList([image])
- else:
- liked_songs.metadata.add_image(image)
-
- liked_songs.cache_checksum = str(time.time())
-
- return liked_songs
-
- async def get_library_playlists(self) -> AsyncGenerator[Playlist, None]:
- """Retrieve playlists from the provider."""
- yield await self._get_liked_songs_playlist()
- async for item in self._get_all_items("me/playlists"):
- if item and item["id"]:
- yield parse_playlist(item, self)
-
+ @use_cache()
async def get_artist(self, prov_artist_id: str) -> Artist:
"""Get full artist details by id."""
artist_obj = await self._get_data(f"artists/{prov_artist_id}")
return parse_artist(artist_obj, self)
+ @use_cache()
async def get_album(self, prov_album_id: str) -> Album:
"""Get full album details by id."""
album_obj = await self._get_data(f"albums/{prov_album_id}")
return parse_album(album_obj, self)
+ @use_cache()
async def get_track(self, prov_track_id: str) -> Track:
"""Get full track details by id."""
track_obj = await self._get_data(f"tracks/{prov_track_id}")
return parse_track(track_obj, self)
+ @use_cache()
async def get_playlist(self, prov_playlist_id: str) -> Playlist:
"""Get full playlist details by id."""
if prov_playlist_id == self._get_liked_songs_playlist_id():
playlist_obj = await self._get_data(f"playlists/{prov_playlist_id}")
return parse_playlist(playlist_obj, self)
- @use_cache(86400) # 24 hours
+ @use_cache()
async def get_podcast(self, prov_podcast_id: str) -> Podcast:
"""Get full podcast details by id."""
podcast_obj = await self._get_data(f"shows/{prov_podcast_id}")
raise MediaNotFoundError(f"Podcast not found: {prov_podcast_id}")
return parse_podcast(podcast_obj, self)
- @use_cache(86400) # 24 hours
+ @use_cache()
async def get_audiobook(self, prov_audiobook_id: str) -> Audiobook:
"""Get full audiobook details by id."""
if not self.audiobooks_supported:
return audiobook
- async def _add_audiobook_chapters(self, audiobook: Audiobook) -> None:
- """Add chapter metadata to an audiobook from Spotify API data."""
- try:
- chapters_data = await self._get_audiobook_chapters_data(audiobook.item_id)
- if chapters_data:
- chapters = []
- total_duration_seconds = 0.0
-
- for idx, chapter in enumerate(chapters_data):
- duration_ms = chapter.get("duration_ms", 0)
- duration_seconds = duration_ms / 1000.0
-
- chapter_obj = MediaItemChapter(
- position=idx + 1,
- name=chapter.get("name", f"Chapter {idx + 1}"),
- start=total_duration_seconds,
- end=total_duration_seconds + duration_seconds,
- )
- chapters.append(chapter_obj)
- total_duration_seconds += duration_seconds
-
- audiobook.metadata.chapters = chapters
- audiobook.duration = int(total_duration_seconds)
-
- except (MediaNotFoundError, ResourceTemporarilyUnavailable, ProviderUnavailableError) as e:
- self.logger.warning(f"Failed to get chapters for audiobook {audiobook.item_id}: {e}")
-
- @use_cache(43200) # 12 hours - balances freshness with performance
- async def _get_podcast_episodes_data(self, prov_podcast_id: str) -> list[dict[str, Any]]:
- """Get raw episode data from Spotify API (cached).
-
- Args:
- prov_podcast_id: Spotify podcast ID
-
- Returns:
- List of episode data dictionaries
- """
- episodes_data: list[dict[str, Any]] = []
-
- try:
- async for item in self._get_all_items(
- f"shows/{prov_podcast_id}/episodes", market="from_token"
- ):
- if item and item.get("id"):
- episodes_data.append(item)
- except MediaNotFoundError:
- self.logger.warning("Podcast %s not found", prov_podcast_id)
- return []
- except ResourceTemporarilyUnavailable as err:
- self.logger.warning(
- "Temporary error fetching episodes for %s: %s", prov_podcast_id, err
- )
- raise
-
- return episodes_data
-
- @use_cache(7200) # 2 hours - shorter cache for resume point data
- async def _get_audiobook_chapters_data(self, prov_audiobook_id: str) -> list[dict[str, Any]]:
- """Get raw chapter data from Spotify API (cached).
-
- Args:
- prov_audiobook_id: Spotify audiobook ID
-
- Returns:
- List of chapter data dictionaries
- """
- chapters_data: list[dict[str, Any]] = []
-
- try:
- async for item in self._get_all_items(
- f"audiobooks/{prov_audiobook_id}/chapters", market="from_token"
- ):
- if item and item.get("id"):
- chapters_data.append(item)
- except MediaNotFoundError:
- self.logger.warning("Audiobook %s not found", prov_audiobook_id)
- return []
- except ResourceTemporarilyUnavailable as err:
- self.logger.warning(
- "Temporary error fetching chapters for %s: %s", prov_audiobook_id, err
- )
- raise
-
- return chapters_data
-
async def get_podcast_episodes(
self, prov_podcast_id: str
) -> AsyncGenerator[PodcastEpisode, None]:
"""Get all podcast episodes."""
# Get podcast object for context if available
- podcast: Podcast | None = None
- try:
- podcast = await self.mass.music.podcasts.get_provider_item(
- prov_podcast_id, self.instance_id
- )
- except MediaNotFoundError:
- # If not in MA library, get it via API (this is cached)
- try:
- podcast = await self.get_podcast(prov_podcast_id)
- except MediaNotFoundError:
- self.logger.warning(
- "Podcast with ID %s is no longer available on Spotify", prov_podcast_id
- )
+ podcast = await self.mass.music.podcasts.get_library_item_by_prov_id(
+ prov_podcast_id, self.instance_id
+ )
+ podcast = await self.get_podcast(prov_podcast_id)
- # Get cached episode data
+ # Get (cached) episode data
episodes_data = await self._get_podcast_episodes_data(prov_podcast_id)
# Parse and yield episodes with position
# The resume position will be automatically updated by MA's internal tracking
# and will be retrieved via get_audiobook() which combines MA + Spotify positions
+ @use_cache()
async def get_album_tracks(self, prov_album_id: str) -> list[Track]:
"""Get all album tracks for given album id."""
return [
if item["id"]
]
+ @use_cache(2600 * 3) # 3 hours
async def get_playlist_tracks(self, prov_playlist_id: str, page: int = 0) -> list[Track]:
"""Get playlist tracks."""
result: list[Track] = []
if prov_playlist_id == self._get_liked_songs_playlist_id()
else f"playlists/{prov_playlist_id}/tracks"
)
+ # do single request to get the etag (which we use as checksum for caching)
+ cache_checksum = await self._get_etag(uri, limit=1, offset=0)
+
page_size = 50
offset = page * page_size
- spotify_result = await self._get_data(uri, limit=page_size, offset=offset)
+ spotify_result = await self._get_data_with_caching(
+ uri, cache_checksum, limit=page_size, offset=offset
+ )
for index, item in enumerate(spotify_result["items"], 1):
if not (item and item["track"] and item["track"]["id"]):
continue
result.append(track)
return result
+ @use_cache(86400 * 14) # 14 days
async def get_artist_albums(self, prov_artist_id: str) -> list[Album]:
"""Get a list of all albums for the given artist."""
return [
if (item and item["id"])
]
+ @use_cache(86400 * 14) # 14 days
async def get_artist_toptracks(self, prov_artist_id: str) -> list[Track]:
"""Get a list of 10 most popular tracks for the given artist."""
artist = await self.get_artist(prov_artist_id)
self._fix_create_playlist_api_bug(new_playlist)
return parse_playlist(new_playlist, self)
+ @use_cache(86400 * 14) # 14 days
async def get_similar_tracks(self, prov_track_id: str, limit: int = 25) -> list[Track]:
"""Retrieve a dynamic list of tracks based on the provided item."""
endpoint = "recommendations"
self.logger.info("Successfully logged in to Spotify as %s", userinfo["display_name"])
return auth_info
+ def _get_liked_songs_playlist_id(self) -> str:
+ return f"{LIKED_SONGS_FAKE_PLAYLIST_ID_PREFIX}-{self.instance_id}"
+
+ async def _get_liked_songs_playlist(self) -> Playlist:
+ if self._sp_user is None:
+ raise LoginFailed("User info not available - not logged in")
+
+ liked_songs = Playlist(
+ item_id=self._get_liked_songs_playlist_id(),
+ provider=self.lookup_key,
+ name=f"Liked Songs {self._sp_user['display_name']}", # TODO to be translated
+ owner=self._sp_user["display_name"],
+ provider_mappings={
+ ProviderMapping(
+ item_id=self._get_liked_songs_playlist_id(),
+ provider_domain=self.domain,
+ provider_instance=self.instance_id,
+ url="https://open.spotify.com/collection/tracks",
+ )
+ },
+ )
+
+ liked_songs.is_editable = False # TODO Editing requires special endpoints
+
+ # Add image to the playlist metadata
+ image = MediaItemImage(
+ type=ImageType.THUMB,
+ path="https://misc.scdn.co/liked-songs/liked-songs-64.png",
+ provider=self.lookup_key,
+ remotely_accessible=True,
+ )
+ if liked_songs.metadata.images is None:
+ liked_songs.metadata.images = UniqueList([image])
+ else:
+ liked_songs.metadata.add_image(image)
+
+ return liked_songs
+
+ async def _add_audiobook_chapters(self, audiobook: Audiobook) -> None:
+ """Add chapter metadata to an audiobook from Spotify API data."""
+ try:
+ chapters_data = await self._get_audiobook_chapters_data(audiobook.item_id)
+ if chapters_data:
+ chapters = []
+ total_duration_seconds = 0.0
+
+ for idx, chapter in enumerate(chapters_data):
+ duration_ms = chapter.get("duration_ms", 0)
+ duration_seconds = duration_ms / 1000.0
+
+ chapter_obj = MediaItemChapter(
+ position=idx + 1,
+ name=chapter.get("name", f"Chapter {idx + 1}"),
+ start=total_duration_seconds,
+ end=total_duration_seconds + duration_seconds,
+ )
+ chapters.append(chapter_obj)
+ total_duration_seconds += duration_seconds
+
+ audiobook.metadata.chapters = chapters
+ audiobook.duration = int(total_duration_seconds)
+
+ except (MediaNotFoundError, ResourceTemporarilyUnavailable, ProviderUnavailableError) as e:
+ self.logger.warning(f"Failed to get chapters for audiobook {audiobook.item_id}: {e}")
+
+ @use_cache(43200) # 12 hours - balances freshness with performance
+ async def _get_podcast_episodes_data(self, prov_podcast_id: str) -> list[dict[str, Any]]:
+ """Get raw episode data from Spotify API (cached).
+
+ Args:
+ prov_podcast_id: Spotify podcast ID
+
+ Returns:
+ List of episode data dictionaries
+ """
+ episodes_data: list[dict[str, Any]] = []
+
+ try:
+ async for item in self._get_all_items(
+ f"shows/{prov_podcast_id}/episodes", market="from_token"
+ ):
+ if item and item.get("id"):
+ episodes_data.append(item)
+ except MediaNotFoundError:
+ self.logger.warning("Podcast %s not found", prov_podcast_id)
+ return []
+ except ResourceTemporarilyUnavailable as err:
+ self.logger.warning(
+ "Temporary error fetching episodes for %s: %s", prov_podcast_id, err
+ )
+ raise
+
+ return episodes_data
+
+ @use_cache(7200) # 2 hours - shorter cache for resume point data
+ async def _get_audiobook_chapters_data(self, prov_audiobook_id: str) -> list[dict[str, Any]]:
+ """Get raw chapter data from Spotify API (cached).
+
+ Args:
+ prov_audiobook_id: Spotify audiobook ID
+
+ Returns:
+ List of chapter data dictionaries
+ """
+ chapters_data: list[dict[str, Any]] = []
+
+ try:
+ async for item in self._get_all_items(
+ f"audiobooks/{prov_audiobook_id}/chapters", market="from_token"
+ ):
+ if item and item.get("id"):
+ chapters_data.append(item)
+ except MediaNotFoundError:
+ self.logger.warning("Audiobook %s not found", prov_audiobook_id)
+ return []
+ except ResourceTemporarilyUnavailable as err:
+ self.logger.warning(
+ "Temporary error fetching chapters for %s: %s", prov_audiobook_id, err
+ )
+ raise
+
+ return chapters_data
+
async def _get_all_items(
self, endpoint: str, key: str = "items", **kwargs: Any
) -> AsyncGenerator[dict[str, Any], None]:
"""Get all items from a paged list."""
limit = 50
offset = 0
+ # do single request to get the etag (which we use as checksum for caching)
+ cache_checksum = await self._get_etag(endpoint, limit=1, offset=0, **kwargs)
while True:
- kwargs["limit"] = limit
- kwargs["offset"] = offset
- result = await self._get_data(endpoint, **kwargs)
+ result = await self._get_data_with_caching(
+ endpoint, cache_checksum=cache_checksum, limit=limit, offset=offset, **kwargs
+ )
offset += limit
if not result or key not in result or not result[key]:
break
if len(result[key]) < limit:
break
+ async def _get_data_with_caching(
+ self, endpoint: str, cache_checksum: str, **kwargs: Any
+ ) -> dict[str, Any]:
+ """Get data from api with caching."""
+ cache_key_parts = [endpoint]
+ for key in sorted(kwargs.keys()):
+ cache_key_parts.append(f"{key}{kwargs[key]}")
+ cache_key = ".".join(map(str, cache_key_parts))
+ if cached := await self.mass.cache.get(
+ cache_key, provider=self.instance_id, checksum=cache_checksum, allow_bypass=False
+ ):
+ return cached
+ result = await self._get_data(endpoint, **kwargs)
+ await self.mass.cache.set(
+ cache_key, result, provider=self.instance_id, checksum=cache_checksum
+ )
+ return result
+
+ @use_cache(120, allow_bypass=False) # short cache for etags (subsequent calls use cached data)
+ async def _get_etag(self, endpoint: str, **kwargs: Any) -> str | None:
+ """Get etag for api endpoint."""
+ _res = await self._get_data(endpoint, **kwargs)
+ return _res.get("etag")
+
@throttle_with_retries
async def _get_data(self, endpoint: str, **kwargs: Any) -> dict[str, Any]:
"""Get data from api."""
locale = self.mass.metadata.locale.replace("_", "-")
language = locale.split("-")[0]
headers["Accept-Language"] = f"{locale}, {language};q=0.9, *;q=0.5"
+ self.logger.debug("handling get data %s with kwargs %s", url, kwargs)
async with (
self.mass.http_session.get(
url,
raise MediaNotFoundError(f"{endpoint} not found")
response.raise_for_status()
result: dict[str, Any] = await response.json(loads=json_loads)
+ if etag := response.headers.get("ETag"):
+ result["etag"] = etag
return result
@throttle_with_retries
self.logger.warning(
"FIXME: Spotify have fixed their Create Playlist API, this fix can be removed."
)
+
+ async def _test_audiobook_support(self) -> bool:
+ """Test if audiobooks are supported in user's region."""
+ try:
+ await self._get_data("me/audiobooks", limit=1)
+ return True
+ except aiohttp.ClientResponseError as e:
+ if e.status == 403:
+ return False # Not available
+ raise # Re-raise other HTTP errors
+ except (MediaNotFoundError, ProviderUnavailableError):
+ return False
CONF_DISPLAY = "display"
CONF_VISUALIZATION = "visualization"
-CACHE_KEY_PREV_STATE = "slimproto_prev_state"
-
DEFAULT_PLAYER_VOLUME = 20
DEFAULT_VISUALIZATION = SlimVisualisationType.NONE
from music_assistant.models.player import DeviceInfo, Player, PlayerMedia
from .constants import (
- CACHE_KEY_PREV_STATE,
CONF_ENTRY_DISPLAY,
CONF_ENTRY_VISUALIZATION,
DEFAULT_PLAYER_VOLUME,
from .provider import SqueezelitePlayerProvider
+CACHE_CATEGORY_PREV_STATE = 0 # category for caching previous player state
+
+
class SqueezelitePlayer(Player):
"""Squeezelite Player implementation."""
# update all dynamic attributes
self.update_attributes()
# restore volume and power state
- if last_state := await self.mass.cache.get(player_id, base_key=CACHE_KEY_PREV_STATE):
+ if last_state := await self.mass.cache.get(
+ key=player_id, provider=self.provider.instance_id, category=CACHE_CATEGORY_PREV_STATE
+ ):
init_power = last_state[0]
init_volume = last_state[1]
else:
await self.client.power(powered)
# store last state in cache
await self.mass.cache.set(
- self.player_id, (powered, self.client.volume_level), base_key=CACHE_KEY_PREV_STATE
+ key=self.player_id,
+ data=(powered, self.client.volume_level),
+ provider=self.provider.instance_id,
+ category=CACHE_CATEGORY_PREV_STATE,
)
async def volume_set(self, volume_level: int) -> None:
await self.client.volume_set(volume_level)
# store last state in cache
await self.mass.cache.set(
- self.player_id, (self.client.powered, volume_level), base_key=CACHE_KEY_PREV_STATE
+ key=self.player_id,
+ data=(self.client.powered, volume_level),
+ provider=self.provider.instance_id,
+ category=CACHE_CATEGORY_PREV_STATE,
)
async def volume_mute(self, muted: bool) -> None:
await self.mass.music.albums.update_item_in_library(track.album.item_id, track.album)
return metadata
- @use_cache(86400 * 30)
+ @use_cache(86400 * 90, persistent=True) # Cache for 90 days
async def _get_data(self, endpoint: str, **kwargs: Any) -> dict[str, Any] | None:
"""Get data from api."""
url = f"https://theaudiodb.com/api/v1/json/{app_var(3)}/{endpoint}"
)
from music_assistant_models.streamdetails import StreamDetails
-from music_assistant.constants import CACHE_CATEGORY_DEFAULT, CACHE_CATEGORY_RECOMMENDATIONS
+from music_assistant.controllers.cache import use_cache
from music_assistant.helpers.throttle_retry import ThrottlerManager, throttle_with_retries
from music_assistant.helpers.util import infer_album_type
from music_assistant.models.music_provider import MusicProvider
from .auth_manager import ManualAuthenticationHelper, TidalAuthManager
+from .constants import (
+ BROWSE_URL,
+ CACHE_CATEGORY_ISRC_MAP,
+ CACHE_CATEGORY_RECOMMENDATIONS,
+ CACHE_KEY_RECOMMENDATIONS_ALL,
+ CONF_ACTION_CLEAR_AUTH,
+ CONF_ACTION_COMPLETE_PKCE_LOGIN,
+ CONF_ACTION_START_PKCE_LOGIN,
+ CONF_AUTH_TOKEN,
+ CONF_EXPIRY_TIME,
+ CONF_OOPS_URL,
+ CONF_QUALITY,
+ CONF_REFRESH_TOKEN,
+ CONF_TEMP_SESSION,
+ CONF_USER_ID,
+ DEFAULT_LIMIT,
+ LABEL_COMPLETE_PKCE_LOGIN,
+ LABEL_OOPS_URL,
+ LABEL_START_PKCE_LOGIN,
+ RESOURCES_URL,
+)
from .tidal_page_parser import TidalPageParser
if TYPE_CHECKING:
from music_assistant.mass import MusicAssistant
from music_assistant.models import ProviderInstanceType
-TOKEN_TYPE = "Bearer"
-
-# Actions
-CONF_ACTION_START_PKCE_LOGIN = "start_pkce_login"
-CONF_ACTION_COMPLETE_PKCE_LOGIN = "auth"
-CONF_ACTION_CLEAR_AUTH = "clear_auth"
-
-# Intermediate steps
-CONF_TEMP_SESSION = "temp_session"
-CONF_OOPS_URL = "oops_url"
-
-# Config keys
-CONF_AUTH_TOKEN = "auth_token"
-CONF_REFRESH_TOKEN = "refresh_token"
-CONF_USER_ID = "user_id"
-CONF_EXPIRY_TIME = "expiry_time"
-CONF_COUNTRY_CODE = "country_code"
-CONF_SESSION_ID = "session_id"
-CONF_QUALITY = "quality"
-
-# Labels
-LABEL_START_PKCE_LOGIN = "start_pkce_login_label"
-LABEL_OOPS_URL = "oops_url_label"
-LABEL_COMPLETE_PKCE_LOGIN = "complete_pkce_login_label"
-
-BROWSE_URL = "https://tidal.com/browse"
-RESOURCES_URL = "https://resources.tidal.com/images"
-
-DEFAULT_LIMIT = 50
T = TypeVar("T")
api_result = await self._get_data(f"users/{prov_user_id}")
return self._extract_data(api_result)
+ @use_cache(3600 * 24 * 14) # Cache for 14 days
async def search(
self,
search_query: str,
return parsed_results
+ @use_cache(3600 * 24) # Cache for 1 day
async def get_similar_tracks(self, prov_track_id: str, limit: int = 25) -> list[Track]:
"""Get similar tracks for given track id."""
try:
# ITEM RETRIEVAL METHODS
#
+ @use_cache(3600 * 24 * 30) # Cache for 30 days
async def get_artist(self, prov_artist_id: str) -> Artist:
"""Get artist details for given artist id."""
try:
except (ClientError, KeyError, ValueError) as err:
raise MediaNotFoundError(f"Artist {prov_artist_id} not found") from err
+ @use_cache(3600 * 24 * 30) # Cache for 30 days
async def get_album(self, prov_album_id: str) -> Album:
"""Get album details for given album id."""
try:
except (ClientError, KeyError, ValueError) as err:
raise MediaNotFoundError(f"Album {prov_album_id} not found") from err
+ @use_cache(3600 * 24 * 30) # Cache for 30 days
async def get_track(self, prov_track_id: str) -> Track:
"""Get track details for given track id."""
try:
except (ClientError, KeyError, ValueError) as err:
raise MediaNotFoundError(f"Track {prov_track_id} not found") from err
+ @use_cache(3600 * 24 * 30) # Cache for 30 days
async def get_playlist(self, prov_playlist_id: str) -> Playlist:
"""Get playlist details for given playlist id."""
# Check if this is a mix by ID prefix
except (ClientError, KeyError, ValueError) as err:
raise MediaNotFoundError(f"Mix {prov_mix_id} not found") from err
+ @use_cache(3600 * 24 * 30) # Cache for 30 days
async def get_album_tracks(self, prov_album_id: str) -> list[Track]:
"""Get album tracks for given album id."""
try:
except (ClientError, KeyError, ValueError) as err:
raise MediaNotFoundError(f"Album {prov_album_id} not found") from err
+ @use_cache(3600 * 24 * 7) # Cache for 7 days
async def get_artist_albums(self, prov_artist_id: str) -> list[Album]:
"""Get a list of all albums for the given artist."""
try:
except (ClientError, KeyError, ValueError) as err:
raise MediaNotFoundError(f"Artist {prov_artist_id} not found") from err
+ @use_cache(3600 * 24 * 7) # Cache for 7 days
async def get_artist_toptracks(self, prov_artist_id: str) -> list[Track]:
"""Get a list of 10 most popular tracks for the given artist."""
try:
except (ClientError, KeyError, ValueError) as err:
raise MediaNotFoundError(f"Artist {prov_artist_id} not found") from err
+ @use_cache(3600 * 3) # Cache for 3 hours
async def get_playlist_tracks(self, prov_playlist_id: str, page: int = 0) -> list[Track]:
"""Get playlist tracks for either regular playlists or Tidal mixes."""
page_size = 200
async def recommendations(self) -> list[RecommendationFolder]:
"""Get this provider's recommendations organized into folders."""
# Check cache first
- cache_key = f"tidal_recommendations_{self.lookup_key}"
cached_recommendations: list[RecommendationFolder] = await self.mass.cache.get(
- cache_key, category=CACHE_CATEGORY_RECOMMENDATIONS, base_key=self.lookup_key
+ CACHE_KEY_RECOMMENDATIONS_ALL,
+ provider=self.instance_id,
+ category=CACHE_CATEGORY_RECOMMENDATIONS,
)
if cached_recommendations:
# Cache the results for 1 hour (3600 seconds)
await self.mass.cache.set(
- cache_key,
- results,
+ key=CACHE_KEY_RECOMMENDATIONS_ALL,
+ data=results,
+ provider=self.instance_id,
category=CACHE_CATEGORY_RECOMMENDATIONS,
- base_key=self.lookup_key,
expiration=3600,
)
async def _get_track_by_isrc(self, item_id: str) -> Track | None:
"""Get track by ISRC from library item, with caching."""
# Try to get from cache first
- cache_key = f"isrc_map_{item_id}"
cached_track_id = await self.mass.cache.get(
- cache_key, category=CACHE_CATEGORY_DEFAULT, base_key=self.lookup_key
+ item_id, provider=self.instance_id, category=CACHE_CATEGORY_ISRC_MAP
)
if cached_track_id:
except MediaNotFoundError:
# Track no longer exists, invalidate cache
await self.mass.cache.delete(
- cache_key, category=CACHE_CATEGORY_DEFAULT, base_key=self.lookup_key
+ item_id, provider=self.instance_id, category=CACHE_CATEGORY_ISRC_MAP
)
# Lookup by ISRC if no cache or cached track not found
# Cache the mapping for future use
await self.mass.cache.set(
- cache_key,
- track_id,
- category=CACHE_CATEGORY_DEFAULT,
- base_key=self.lookup_key,
+ key=item_id,
+ data=track_id,
+ provider=self.instance_id,
+ category=CACHE_CATEGORY_ISRC_MAP,
+ persistent=True,
+ expiration=(86400 * 90),
)
return await self.get_track(track_id)
self.logger.debug("Page '%s' indexed with: %s", page_path, parser.content_stats)
# Cache the parser data
- cache_key = f"tidal_page_{page_path}"
cache_data = {
"module_map": parser._module_map,
"content_map": parser._content_map,
"parsed_at": parser._parsed_at,
}
await self.mass.cache.set(
- cache_key,
- cache_data,
+ key=page_path,
+ data=cache_data,
+ provider=self.instance_id,
category=CACHE_CATEGORY_RECOMMENDATIONS,
- base_key=self.lookup_key,
expiration=self.page_cache_ttl,
)
)
# Metadata - different fields based on type
- if is_mix:
- playlist.cache_checksum = str(playlist_obj.get("updated", ""))
- else:
- playlist.cache_checksum = str(playlist_obj.get("lastUpdated", ""))
- if "popularity" in playlist_obj:
- playlist.metadata.popularity = playlist_obj.get("popularity", 0)
# Add the description from the subtitle for mixes
if is_mix:
--- /dev/null
+"""Constants for the Tidal music provider."""
+
+# API URLs
+from typing import Final
+
+BASE_URL = "https://api.tidal.com/v1"
+BASE_URL_V2 = "https://api.tidal.com/v2"
+OPEN_API_URL = "https://openapi.tidal.com/v2"
+BROWSE_URL = "https://tidal.com/browse"
+RESOURCES_URL = "https://resources.tidal.com/images"
+
+# Authentication
+TOKEN_TYPE = "Bearer"
+
+# Actions
+CONF_ACTION_START_PKCE_LOGIN = "start_pkce_login"
+CONF_ACTION_COMPLETE_PKCE_LOGIN = "auth"
+CONF_ACTION_CLEAR_AUTH = "clear_auth"
+
+# Intermediate steps
+CONF_TEMP_SESSION = "temp_session"
+CONF_OOPS_URL = "oops_url"
+
+# Config keys
+CONF_AUTH_TOKEN = "auth_token"
+CONF_REFRESH_TOKEN = "refresh_token"
+CONF_USER_ID = "user_id"
+CONF_EXPIRY_TIME = "expiry_time"
+CONF_COUNTRY_CODE = "country_code"
+CONF_SESSION_ID = "session_id"
+CONF_QUALITY = "quality"
+
+# Labels
+LABEL_START_PKCE_LOGIN = "start_pkce_login_label"
+LABEL_OOPS_URL = "oops_url_label"
+LABEL_COMPLETE_PKCE_LOGIN = "complete_pkce_login_label"
+
+# API defaults
+DEFAULT_LIMIT: Final[int] = 50
+
+# Cache keys
+CACHE_CATEGORY_DEFAULT: Final[int] = 0
+CACHE_CATEGORY_RECOMMENDATIONS: Final[int] = 1
+CACHE_CATEGORY_ISRC_MAP: Final[int] = 2
+CACHE_KEY_RECOMMENDATIONS_ALL: Final[str] = "all_recommendations"
from music_assistant_models.enums import MediaType
-from music_assistant.constants import CACHE_CATEGORY_RECOMMENDATIONS
+from .constants import CACHE_CATEGORY_RECOMMENDATIONS
if TYPE_CHECKING:
from music_assistant_models.media_items import Album, Artist, Playlist, Track
@classmethod
async def from_cache(cls, provider: TidalProvider, page_path: str) -> TidalPageParser | None:
"""Create a parser instance from cached data if available and valid."""
- cache_key = f"tidal_page_{page_path}"
cached_data = await provider.mass.cache.get(
- cache_key,
+ page_path,
+ provider=provider.instance_id,
category=CACHE_CATEGORY_RECOMMENDATIONS,
- base_key=provider.lookup_key,
)
if not cached_data:
return None
from music_assistant_models.streamdetails import StreamDetails
from music_assistant.constants import CONF_USERNAME
+from music_assistant.controllers.cache import use_cache
from music_assistant.helpers.throttle_retry import Throttler
from music_assistant.models.music_provider import MusicProvider
from music_assistant import MusicAssistant
from music_assistant.models import ProviderInstanceType
+
+CACHE_CATEGORY_STREAMS = 1
+
SUPPORTED_FEATURES = {
ProviderFeature.LIBRARY_RADIOS,
ProviderFeature.BROWSE,
async for item in parse_items(data["body"]):
yield item
+ @use_cache(3600 * 24 * 30) # Cache for 30 days
async def get_radio(self, prov_radio_id: str) -> Radio:
"""Get radio station details."""
if not prov_radio_id.startswith("http"):
if "--" in prov_radio_id:
- prov_radio_id, media_type = prov_radio_id.split("--", 1)
- else:
- media_type = None
+ # handle this for backwards compatibility
+ prov_radio_id = prov_radio_id.split("--")[0]
params = {"c": "composite", "detail": "listing", "id": prov_radio_id}
result = await self.__get_data("Describe.ashx", **params)
if result and result.get("body") and result["body"][0].get("children"):
item = result["body"][0]["children"][0]
stream_info = await self._get_stream_info(prov_radio_id)
- for stream in stream_info:
- if media_type and stream["media_type"] != media_type:
- continue
- return self._parse_radio(item, [stream])
+ return self._parse_radio(item, stream_info)
# fallback - e.g. for handle custom urls ...
async for radio in self.get_library_radios():
if radio.item_id == prov_radio_id:
name = name.split(" (")[0]
if stream_info is not None:
- # stream info is provided: parse stream objects into provider mappings
+ # stream info is provided: parse first stream into provider mapping
+ # assuming here that the streams are sorted by quality (bitrate)
+ # and the first one is the best quality
+ preferred_stream = stream_info[0]
radio = Radio(
item_id=details["preset_id"],
provider=self.lookup_key,
name=name,
provider_mappings={
ProviderMapping(
- item_id=f"{details['preset_id']}--{stream['media_type']}",
+ item_id=details["preset_id"],
provider_domain=self.domain,
provider_instance=self.instance_id,
audio_format=AudioFormat(
- content_type=ContentType.try_parse(stream["media_type"]),
- bit_rate=stream.get("bitrate", 128),
+ content_type=ContentType.try_parse(preferred_stream["media_type"]),
+ bit_rate=preferred_stream.get("bitrate", 128),
),
- details=stream["url"],
+ details=preferred_stream["url"],
available=details.get("is_available", True),
)
- for stream in stream_info
},
)
else:
async def _get_stream_info(self, preset_id: str) -> list[dict]:
"""Get stream info for a radio station."""
- cache_base_key = "tunein_stream"
- if cache := await self.mass.cache.get(preset_id, base_key=cache_base_key):
+ if cache := await self.mass.cache.get(
+ preset_id, provider=self.instance_id, category=CACHE_CATEGORY_STREAMS
+ ):
return cache
result = (await self.__get_data("Tune.ashx", id=preset_id))["body"]
- await self.mass.cache.set(preset_id, result, base_key=cache_base_key)
+ await self.mass.cache.set(
+ key=preset_id, data=result, provider=self.instance_id, category=CACHE_CATEGORY_STREAMS
+ )
return result
async def get_stream_details(self, item_id: str, media_type: MediaType) -> StreamDetails:
can_seek=False,
)
if "--" in item_id:
- stream_item_id, media_type = item_id.split("--", 1)
- else:
- media_type = None
- stream_item_id = item_id
- for stream in await self._get_stream_info(stream_item_id):
- if media_type and stream["media_type"] != media_type:
- continue
+ # handle this for backwards compatibility
+ item_id = item_id.split("--")[0]
+ if stream_info := await self._get_stream_info(item_id):
+ # assuming here that the streams are sorted by quality (bitrate)
+ # and the first one is the best quality
+ preferred_stream = stream_info[0]
return StreamDetails(
provider=self.lookup_key,
item_id=item_id,
audio_format=AudioFormat(content_type=ContentType.UNKNOWN),
media_type=MediaType.RADIO,
stream_type=StreamType.HTTP,
- path=stream["url"],
+ path=preferred_stream["url"],
allow_seek=False,
can_seek=False,
)
msg = f"Unable to retrieve stream details for {item_id}"
raise MediaNotFoundError(msg)
+ @use_cache(3600 * 24 * 7) # Cache for 7 days
async def search(
self, search_query: str, media_types: list[MediaType], limit: int = 10
) -> SearchResults:
if not await self._user_has_ytm_premium():
raise LoginFailed("User does not have Youtube Music Premium")
+ @use_cache(3600 * 24 * 7) # Cache for 7 days
async def search(
self, search_query: str, media_types=list[MediaType], limit: int = 5
) -> SearchResults:
for podcast in podcasts_obj:
yield self._parse_podcast(podcast)
+ @use_cache(3600 * 24 * 30) # Cache for 30 days
async def get_album(self, prov_album_id) -> Album:
"""Get full album details by id."""
if album_obj := await get_album(prov_album_id=prov_album_id, language=self.language):
msg = f"Item {prov_album_id} not found"
raise MediaNotFoundError(msg)
+ @use_cache(3600 * 24 * 30) # Cache for 30 days
async def get_album_tracks(self, prov_album_id: str) -> list[Track]:
"""Get album tracks for given album id."""
album_obj = await get_album(prov_album_id=prov_album_id, language=self.language)
tracks.append(track)
return tracks
+ @use_cache(3600 * 24 * 30) # Cache for 30 days
async def get_artist(self, prov_artist_id) -> Artist:
"""Get full artist details by id."""
if artist_obj := await get_artist(
msg = f"Item {prov_artist_id} not found"
raise MediaNotFoundError(msg)
+ @use_cache(3600 * 24 * 30) # Cache for 30 days
async def get_track(self, prov_track_id) -> Track:
"""Get full track details by id."""
if track_obj := await get_track(
msg = f"Item {prov_track_id} not found"
raise MediaNotFoundError(msg)
+ @use_cache(3600 * 24 * 7) # Cache for 7 days
async def get_playlist(self, prov_playlist_id) -> Playlist:
"""Get full playlist details by id."""
# Grab the playlist id from the full url in case of personal playlists
msg = f"Item {prov_playlist_id} not found"
raise MediaNotFoundError(msg)
+ @use_cache(3600 * 3) # Cache for 3 hours
async def get_playlist_tracks(self, prov_playlist_id: str, page: int = 0) -> list[Track]:
"""Return playlist tracks for the given provider playlist id."""
if page > 0:
# YTM doesn't seem to support paging so we ignore offset and limit
return result
+ @use_cache(3600 * 24 * 7) # Cache for 7 days
async def get_artist_albums(self, prov_artist_id) -> list[Album]:
"""Get a list of albums for the given artist."""
artist_obj = await get_artist(prov_artist_id=prov_artist_id, headers=self._headers)
return albums
return []
+ @use_cache(3600 * 24 * 7) # Cache for 7 days
async def get_artist_toptracks(self, prov_artist_id) -> list[Track]:
"""Get a list of 25 most popular tracks for the given artist."""
artist_obj = await get_artist(prov_artist_id=prov_artist_id, headers=self._headers)
return playlist_tracks[:25]
return []
+ @use_cache(3600 * 24 * 14) # Cache for 14 days
async def get_podcast(self, prov_podcast_id: str) -> Podcast:
"""Get the full details of a Podcast."""
podcast_obj = await get_podcast(prov_podcast_id, headers=self._headers)
episode.position = ep_index
yield episode
+ @use_cache(3600 * 3) # Cache for 3 hours
async def get_podcast_episode(self, prov_episode_id: str) -> PodcastEpisode:
"""Get a single Podcast Episode."""
podcast_id, episode_id = prov_episode_id.split(PODCAST_EPISODE_SPLITTER)
user=self._yt_user,
)
+ @use_cache(3600 * 24) # Cache for 1 day
async def get_similar_tracks(self, prov_track_id, limit=25) -> list[Track]:
"""Retrieve a dynamic list of tracks based on the provided item."""
result = []
playlist.owner = authors["name"]
else:
playlist.owner = self.name
- playlist.cache_checksum = playlist_obj.get("checksum")
return playlist
def _parse_track(self, track_obj: dict) -> Track:
"ifaddr==0.2.0",
"mashumaro==3.16",
"music-assistant-frontend==2.16.1",
- "music-assistant-models==1.1.58",
+ "music-assistant-models==1.1.59",
"mutagen==1.47.0",
"orjson==3.11.3",
"pillow==11.3.0",
lyricsgenius==3.7.2
mashumaro==3.16
music-assistant-frontend==2.16.1
-music-assistant-models==1.1.58
+music-assistant-models==1.1.59
mutagen==1.47.0
orjson==3.11.3
pillow==11.3.0
# ---
# name: test_parse_playlist[gonic-sample.playlist]
dict({
- 'cache_checksum': None,
'external_ids': list([
]),
'favorite': False,