Refactor caching (#2430)
authorMarcel van der Veldt <m.vanderveldt@outlook.com>
Sat, 27 Sep 2025 00:10:24 +0000 (02:10 +0200)
committerGitHub <noreply@github.com>
Sat, 27 Sep 2025 00:10:24 +0000 (02:10 +0200)
58 files changed:
music_assistant/constants.py
music_assistant/controllers/cache.py
music_assistant/controllers/media/albums.py
music_assistant/controllers/media/artists.py
music_assistant/controllers/media/base.py
music_assistant/controllers/media/playlists.py
music_assistant/controllers/metadata.py
music_assistant/controllers/music.py
music_assistant/controllers/player_queues.py
music_assistant/controllers/players.py
music_assistant/helpers/audio.py
music_assistant/helpers/database.py
music_assistant/helpers/throttle_retry.py
music_assistant/helpers/util.py
music_assistant/mass.py
music_assistant/models/music_provider.py
music_assistant/providers/airplay/constants.py
music_assistant/providers/airplay/player.py
music_assistant/providers/airplay/provider.py
music_assistant/providers/apple_music/__init__.py
music_assistant/providers/ard_audiothek/__init__.py
music_assistant/providers/audible/audible_helper.py
music_assistant/providers/audiobookshelf/__init__.py
music_assistant/providers/builtin/__init__.py
music_assistant/providers/deezer/__init__.py
music_assistant/providers/fanarttv/__init__.py
music_assistant/providers/filesystem_local/__init__.py
music_assistant/providers/filesystem_local/constants.py
music_assistant/providers/genius_lyrics/__init__.py
music_assistant/providers/gpodder/__init__.py
music_assistant/providers/ibroadcast/__init__.py
music_assistant/providers/itunes_podcasts/__init__.py
music_assistant/providers/jellyfin/__init__.py
music_assistant/providers/lrclib/__init__.py
music_assistant/providers/musicbrainz/__init__.py
music_assistant/providers/nugs/__init__.py
music_assistant/providers/opensubsonic/sonic_provider.py
music_assistant/providers/plex/__init__.py
music_assistant/providers/podcast-index/provider.py
music_assistant/providers/podcastfeed/__init__.py
music_assistant/providers/qobuz/__init__.py
music_assistant/providers/radiobrowser/__init__.py
music_assistant/providers/radioparadise/provider.py
music_assistant/providers/siriusxm/__init__.py
music_assistant/providers/soundcloud/__init__.py
music_assistant/providers/spotify/parsers.py
music_assistant/providers/spotify/provider.py
music_assistant/providers/squeezelite/constants.py
music_assistant/providers/squeezelite/player.py
music_assistant/providers/theaudiodb/__init__.py
music_assistant/providers/tidal/__init__.py
music_assistant/providers/tidal/constants.py [new file with mode: 0644]
music_assistant/providers/tidal/tidal_page_parser.py
music_assistant/providers/tunein/__init__.py
music_assistant/providers/ytmusic/__init__.py
pyproject.toml
requirements_all.txt
tests/providers/opensubsonic/__snapshots__/test_parsers.ambr

index a0bb0d41e240ba74d2acc8051386b09ce6401da1..9a88ddfc5b92d53ae72e0949a2e99fd64b6911d0 100644 (file)
@@ -952,27 +952,6 @@ DEFAULT_PCM_FORMAT = AudioFormat(
     channels=2,
 )
 
-
-# CACHE categories
-
-CACHE_CATEGORY_DEFAULT: Final[int] = 0
-CACHE_CATEGORY_MUSIC_SEARCH: Final[int] = 1
-CACHE_CATEGORY_MUSIC_ALBUM_TRACKS: Final[int] = 2
-CACHE_CATEGORY_MUSIC_ARTIST_TRACKS: Final[int] = 3
-CACHE_CATEGORY_MUSIC_ARTIST_ALBUMS: Final[int] = 4
-CACHE_CATEGORY_MUSIC_PLAYLIST_TRACKS: Final[int] = 5
-CACHE_CATEGORY_MUSIC_PROVIDER_ITEM: Final[int] = 6
-CACHE_CATEGORY_PLAYER_QUEUE_STATE: Final[int] = 7
-CACHE_CATEGORY_MEDIA_INFO: Final[int] = 8
-CACHE_CATEGORY_LIBRARY_ITEMS: Final[int] = 9
-CACHE_CATEGORY_PLAYERS: Final[int] = 10
-CACHE_CATEGORY_RECOMMENDATIONS: Final[int] = 11
-CACHE_CATEGORY_OPEN_SUBSONIC: Final[int] = 12
-
-# CACHE base keys
-CACHE_KEY_PLAYER_POWER: Final[str] = "player_power"
-
-
 # extra data / extra attributes keys
 ATTR_FAKE_POWER: Final[str] = "fake_power"
 ATTR_FAKE_VOLUME: Final[str] = "fake_volume_level"
index 8ca5116fea4091f79b15db5b8387582688f6e7b0..69931d8121bf30597756e11164e2083a4a4c0fe9 100644 (file)
@@ -8,13 +8,16 @@ import logging
 import os
 import time
 from collections import OrderedDict
-from collections.abc import Callable, Iterator, MutableMapping
-from typing import TYPE_CHECKING, Any, ParamSpec, TypeVar
+from collections.abc import AsyncGenerator, Awaitable, Callable, Coroutine, Iterator, MutableMapping
+from contextlib import asynccontextmanager
+from contextvars import ContextVar
+from typing import TYPE_CHECKING, Any, Concatenate, ParamSpec, TypeVar, get_type_hints
 
 from music_assistant_models.config_entries import ConfigEntry, ConfigValueType
 from music_assistant_models.enums import ConfigEntryType
 
 from music_assistant.constants import DB_TABLE_CACHE, DB_TABLE_SETTINGS, MASS_LOGGER_NAME
+from music_assistant.helpers.api import parse_value
 from music_assistant.helpers.database import DatabaseConnection
 from music_assistant.helpers.json import json_dumps, json_loads
 from music_assistant.models.core_controller import CoreController
@@ -22,9 +25,15 @@ from music_assistant.models.core_controller import CoreController
 if TYPE_CHECKING:
     from music_assistant_models.config_entries import CoreConfig
 
+    from music_assistant.models.provider import Provider
+
+
 LOGGER = logging.getLogger(f"{MASS_LOGGER_NAME}.cache")
 CONF_CLEAR_CACHE = "clear_cache"
-DB_SCHEMA_VERSION = 5
+DEFAULT_CACHE_EXPIRATION = 86400 * 30  # 30 days
+DB_SCHEMA_VERSION = 6
+
+BYPASS_CACHE: ContextVar[bool] = ContextVar("BYPASS_CACHE", default=False)
 
 
 class CacheController(CoreController):
@@ -81,34 +90,36 @@ class CacheController(CoreController):
     async def get(
         self,
         key: str,
-        checksum: str | None = None,
-        default=None,
+        provider: str = "default",
         category: int = 0,
-        base_key: str = "",
+        checksum: str | None = None,
+        default: Any = None,
+        allow_bypass: bool = True,
     ) -> Any:
         """Get object from cache and return the results.
 
-        cache_key: the (unique) name of the cache object as reference
-        checksum: optional argument to check if the checksum in the
-                    cacheobject matches the checksum provided
-        category: optional category to group cache objects
-        base_key: optional base key to group cache objects
+        - key: the (unique) lookup key of the cache object as reference
+        - provider: optional provider id to group cache objects
+        - category: optional category to group cache objects
+        - checksum: optional argument to check if the checksum in the
+                    cache object matches the checksum provided
+        - default: value to return if no cache object is found
         """
-        if not key:
-            return None
+        assert key, "No key provided"
+        if allow_bypass and BYPASS_CACHE.get():
+            return default
         cur_time = int(time.time())
         if checksum is not None and not isinstance(checksum, str):
             checksum = str(checksum)
-
         # try memory cache first
-        memory_key = f"{category}/{base_key}/{key}"
+        memory_key = f"{provider}/{category}/{key}"
         cache_data = self._mem_cache.get(memory_key)
         if cache_data and (not checksum or cache_data[1] == checksum) and cache_data[2] >= cur_time:
             return cache_data[0]
         # fall back to db cache
         if (
             db_row := await self.database.get_row(
-                DB_TABLE_CACHE, {"category": category, "base_key": base_key, "sub_key": key}
+                DB_TABLE_CACHE, {"category": category, "provider": provider, "key": key}
             )
         ) and (not checksum or (db_row["checksum"] == checksum and db_row["expires"] >= cur_time)):
             try:
@@ -131,17 +142,34 @@ class CacheController(CoreController):
         return default
 
     async def set(
-        self, key, data, checksum="", expiration=(86400 * 7), category: int = 0, base_key: str = ""
+        self,
+        key: str,
+        data: Any,
+        expiration: int = DEFAULT_CACHE_EXPIRATION,
+        provider: str = "default",
+        category: int = 0,
+        checksum: str | None = None,
+        persistent: bool = False,
     ) -> None:
-        """Set data in cache."""
+        """
+        Set data in cache.
+
+        - key: the (unique) lookup key of the cache object as reference
+        - data: the actual data to store in the cache
+        - expiration: time in seconds the cache object should be valid
+        - provider: optional provider id to group cache objects
+        - category: optional category to group cache objects
+        - checksum: optional argument to store with the cache object
+        - persistent: if True the cache object will not be deleted when clearing the cache
+        """
         if not key:
             return
         if checksum is not None and not isinstance(checksum, str):
             checksum = str(checksum)
         expires = int(time.time() + expiration)
-        memory_key = f"{category}/{base_key}/{key}"
+        memory_key = f"{provider}/{category}/{key}"
         self._mem_cache[memory_key] = (data, checksum, expires)
-        if (expires - time.time()) < 3600 * 12:
+        if (expires - time.time()) < 1800:
             # do not cache items in db with short expiration
             return
         data = await asyncio.to_thread(json_dumps, data)
@@ -149,27 +177,28 @@ class CacheController(CoreController):
             DB_TABLE_CACHE,
             {
                 "category": category,
-                "base_key": base_key,
-                "sub_key": key,
+                "provider": provider,
+                "key": key,
                 "expires": expires,
                 "checksum": checksum,
                 "data": data,
+                "persistent": persistent,
             },
         )
 
     async def delete(
-        self, key: str | None, category: int | None = None, base_key: str | None = None
+        self, key: str | None, category: int | None = None, provider: str | None = None
     ) -> None:
         """Delete data from cache."""
         match: dict[str, str | int] = {}
         if key is not None:
-            match["sub_key"] = key
+            match["key"] = key
         if category is not None:
             match["category"] = category
-        if base_key is not None:
-            match["base_key"] = base_key
-        if key is not None and category is not None and base_key is not None:
-            self._mem_cache.pop(f"{category}/{base_key}/{key}", None)
+        if provider is not None:
+            match["provider"] = provider
+        if key is not None and category is not None and provider is not None:
+            self._mem_cache.pop(f"{provider}/{category}/{key}", None)
         else:
             self._mem_cache.clear()
         await self.database.delete(DB_TABLE_CACHE, match)
@@ -177,19 +206,22 @@ class CacheController(CoreController):
     async def clear(
         self,
         key_filter: str | None = None,
-        category: int | None = None,
-        base_key_filter: str | None = None,
+        category_filter: int | None = None,
+        provider_filter: str | None = None,
+        include_persistent: bool = False,
     ) -> None:
         """Clear all/partial items from cache."""
         self._mem_cache.clear()
         self.logger.info("Clearing database...")
         query_parts: list[str] = []
-        if category is not None:
-            query_parts.append(f"category = {category}")
-        if base_key_filter is not None:
-            query_parts.append(f"base_key LIKE '%{base_key_filter}%'")
+        if category_filter is not None:
+            query_parts.append(f"category = {category_filter}")
+        if provider_filter is not None:
+            query_parts.append(f"provider LIKE '%{provider_filter}%'")
         if key_filter is not None:
-            query_parts.append(f"sub_key LIKE '%{key_filter}%'")
+            query_parts.append(f"key LIKE '%{key_filter}%'")
+        if not include_persistent:
+            query_parts.append("persistent = 0")
         query = "WHERE " + " AND ".join(query_parts) if query_parts else None
         await self.database.delete(DB_TABLE_CACHE, query=query)
         self.logger.info("Clearing database DONE")
@@ -209,6 +241,15 @@ class CacheController(CoreController):
             await asyncio.sleep(0)  # yield to eventloop
         self.logger.debug("Automatic cleanup finished (cleaned up %s records)", cleaned_records)
 
+    @asynccontextmanager
+    async def handle_refresh(self, bypass: bool) -> AsyncGenerator[None, None]:
+        """Handle the cache bypass."""
+        try:
+            token = BYPASS_CACHE.set(bypass)
+            yield None
+        finally:
+            BYPASS_CACHE.reset(token)
+
     async def _setup_database(self) -> None:
         """Initialize database."""
         db_path = os.path.join(self.mass.cache_path, "cache.db")
@@ -267,12 +308,13 @@ class CacheController(CoreController):
             f"""CREATE TABLE IF NOT EXISTS {DB_TABLE_CACHE}(
                     [id] INTEGER PRIMARY KEY AUTOINCREMENT,
                     [category] INTEGER NOT NULL DEFAULT 0,
-                    [base_key] TEXT NOT NULL,
-                    [sub_key] TEXT NOT NULL,
+                    [key] TEXT NOT NULL,
+                    [provider] TEXT NOT NULL,
                     [expires] INTEGER NOT NULL,
-                    [data] TEXT,
+                    [data] TEXT NULL,
                     [checksum] TEXT NULL,
-                    UNIQUE(category, base_key, sub_key)
+                    [persistent] INTEGER NOT NULL DEFAULT 0,
+                    UNIQUE(category, key, provider)
                     )"""
         )
 
@@ -285,19 +327,27 @@ class CacheController(CoreController):
             f"ON {DB_TABLE_CACHE}(category);"
         )
         await self.database.execute(
-            f"CREATE INDEX IF NOT EXISTS {DB_TABLE_CACHE}_base_key_idx "
-            f"ON {DB_TABLE_CACHE}(base_key);"
+            f"CREATE INDEX IF NOT EXISTS {DB_TABLE_CACHE}_key_idx ON {DB_TABLE_CACHE}(key);"
+        )
+        await self.database.execute(
+            f"CREATE INDEX IF NOT EXISTS {DB_TABLE_CACHE}_provider_idx "
+            f"ON {DB_TABLE_CACHE}(provider);"
         )
         await self.database.execute(
-            f"CREATE INDEX IF NOT EXISTS {DB_TABLE_CACHE}_sub_key_idx ON {DB_TABLE_CACHE}(sub_key);"
+            f"CREATE INDEX IF NOT EXISTS {DB_TABLE_CACHE}_category_key_idx "
+            f"ON {DB_TABLE_CACHE}(category,key);"
         )
         await self.database.execute(
-            f"CREATE INDEX IF NOT EXISTS {DB_TABLE_CACHE}_category_base_key_idx "
-            f"ON {DB_TABLE_CACHE}(category,base_key);"
+            f"CREATE INDEX IF NOT EXISTS {DB_TABLE_CACHE}_category_provider_idx "
+            f"ON {DB_TABLE_CACHE}(category,provider);"
         )
         await self.database.execute(
-            f"CREATE INDEX IF NOT EXISTS {DB_TABLE_CACHE}_category_base_key_sub_key_idx "
-            f"ON {DB_TABLE_CACHE}(category,base_key,sub_key);"
+            f"CREATE INDEX IF NOT EXISTS {DB_TABLE_CACHE}_category_key_provider_idx "
+            f"ON {DB_TABLE_CACHE}(category,key,provider);"
+        )
+        await self.database.execute(
+            f"CREATE INDEX IF NOT EXISTS {DB_TABLE_CACHE}_key_provider_idx "
+            f"ON {DB_TABLE_CACHE}(key,provider);"
         )
         await self.database.commit()
 
@@ -312,49 +362,66 @@ Param = ParamSpec("Param")
 RetType = TypeVar("RetType")
 
 
+ProviderT = TypeVar("ProviderT", bound="Provider | CoreController")
+P = ParamSpec("P")
+R = TypeVar("R")
+
+
 def use_cache(
-    expiration: int = 86400 * 30,
+    expiration: int = DEFAULT_CACHE_EXPIRATION,
     category: int = 0,
-) -> Callable[[Callable[Param, RetType]], Callable[Param, RetType]]:
+    persistent: bool = False,
+    cache_checksum: str | None = None,
+    allow_bypass: bool = True,
+) -> Callable[
+    [Callable[Concatenate[ProviderT, P], Awaitable[R]]],
+    Callable[Concatenate[ProviderT, P], Coroutine[Any, Any, R]],
+]:
     """Return decorator that can be used to cache a method's result."""
 
-    def wrapper(func: Callable[Param, RetType]) -> Callable[Param, RetType]:
+    def _decorator(
+        func: Callable[Concatenate[ProviderT, P], Awaitable[R]],
+    ) -> Callable[Concatenate[ProviderT, P], Coroutine[Any, Any, R]]:
         @functools.wraps(func)
-        async def wrapped(*args: Param.args, **kwargs: Param.kwargs):
-            method_class = args[0]
-            method_class_name = method_class.__class__.__name__
-            cache_base_key = f"{method_class_name}.{func.__name__}"
-            cache_sub_key_parts = []
-            skip_cache = kwargs.pop("skip_cache", False)
-            cache_checksum = kwargs.pop("cache_checksum", "")
-            if len(args) > 1:
-                cache_sub_key_parts += args[1:]
-            for key in sorted(kwargs.keys()):
-                cache_sub_key_parts.append(f"{key}{kwargs[key]}")
-            cache_sub_key = ".".join(cache_sub_key_parts)
+        async def wrapper(self: ProviderT, *args: P.args, **kwargs: P.kwargs) -> R:
+            cache = self.mass.cache
+            provider_id = getattr(self, "provider_id", self.domain)
 
-            cachedata = await method_class.cache.get(
-                cache_sub_key, checksum=cache_checksum, category=category, base_key=cache_base_key
+            # create a cache key dynamically based on the (remaining) args/kwargs
+            cache_key_parts = [func.__name__, *args]
+            for key in sorted(kwargs.keys()):
+                cache_key_parts.append(f"{key}{kwargs[key]}")
+            cache_key = ".".join(map(str, cache_key_parts))
+            # try to retrieve data from the cache
+            cachedata = await cache.get(
+                cache_key,
+                provider=provider_id,
+                checksum=cache_checksum,
+                category=category,
+                allow_bypass=allow_bypass,
             )
-
-            if not skip_cache and cachedata is not None:
-                return cachedata
-            result = await func(*args, **kwargs)
-            asyncio.create_task(
-                method_class.cache.set(
-                    cache_sub_key,
-                    result,
+            if cachedata is not None:
+                type_hints = get_type_hints(func)
+                return parse_value(func.__name__, cachedata, type_hints["return"])
+            # get data from method/provider
+            result = await func(self, *args, **kwargs)
+            # store result in cache (but don't await)
+            self.mass.create_task(
+                cache.set(
+                    key=cache_key,
+                    data=result,
                     expiration=expiration,
-                    checksum=cache_checksum,
+                    provider=provider_id,
                     category=category,
-                    base_key=cache_base_key,
+                    checksum=cache_checksum,
+                    persistent=persistent,
                 )
             )
             return result
 
-        return wrapped
+        return wrapper
 
-    return wrapper
+    return _decorator
 
 
 class MemoryCache(MutableMapping):
index 0e00545642a04f5ba9804afd29cfb028034e8a0b..3d059746783b46a35ccac2718dee10b692e87f43 100644 (file)
@@ -4,19 +4,13 @@ from __future__ import annotations
 
 import contextlib
 from collections.abc import Iterable
-from typing import TYPE_CHECKING, Any
+from typing import TYPE_CHECKING, Any, cast
 
 from music_assistant_models.enums import AlbumType, MediaType, ProviderFeature
 from music_assistant_models.errors import InvalidDataError, MediaNotFoundError, MusicAssistantError
 from music_assistant_models.media_items import Album, Artist, ItemMapping, Track, UniqueList
 
-from music_assistant.constants import (
-    CACHE_CATEGORY_MUSIC_ALBUM_TRACKS,
-    CACHE_CATEGORY_MUSIC_PROVIDER_ITEM,
-    DB_TABLE_ALBUM_ARTISTS,
-    DB_TABLE_ALBUM_TRACKS,
-    DB_TABLE_ALBUMS,
-)
+from music_assistant.constants import DB_TABLE_ALBUM_ARTISTS, DB_TABLE_ALBUM_TRACKS, DB_TABLE_ALBUMS
 from music_assistant.controllers.media.base import MediaControllerBase
 from music_assistant.helpers.compare import (
     compare_album,
@@ -394,46 +388,10 @@ class AlbumsController(MediaControllerBase[Album]):
         self, item_id: str, provider_instance_id_or_domain: str
     ) -> list[Track]:
         """Return album tracks for the given provider album id."""
-        prov: MusicProvider = self.mass.get_provider(provider_instance_id_or_domain)
-        if prov is None:
-            return []
-        # prefer cache items (if any) - for streaming providers only
-        cache_category = CACHE_CATEGORY_MUSIC_ALBUM_TRACKS
-        cache_base_key = prov.lookup_key
-        cache_key = item_id
-        if (
-            prov.is_streaming_provider
-            and (
-                cache := await self.mass.cache.get(
-                    cache_key, category=cache_category, base_key=cache_base_key
-                )
-            )
-            is not None
-        ):
-            return [Track.from_dict(x) for x in cache]
-        # no items in cache - get listing from provider
-        items = await prov.get_album_tracks(item_id)
-        # store (serializable items) in cache
-        if prov.is_streaming_provider:
-            self.mass.create_task(
-                self.mass.cache.set(
-                    cache_key,
-                    [x.to_dict() for x in items],
-                    category=cache_category,
-                    base_key=cache_base_key,
-                ),
-            )
-        for item in items:
-            # if this is a complete track object, pre-cache it as
-            # that will save us an (expensive) lookup later
-            if item.image and item.artist_str and item.album and prov.domain != "builtin":
-                await self.mass.cache.set(
-                    f"track.{item_id}",
-                    item.to_dict(),
-                    category=CACHE_CATEGORY_MUSIC_PROVIDER_ITEM,
-                    base_key=prov.lookup_key,
-                )
-        return items
+        if prov := self.mass.get_provider(provider_instance_id_or_domain):
+            prov = cast("MusicProvider", prov)
+            return await prov.get_album_tracks(item_id)
+        return []
 
     async def radio_mode_base_tracks(
         self,
index 4b7d91fc034456ce19a7a8db56f3eab63a378a86..883d94f611d5745837fd546e15b9ba5e368b7ce7 100644 (file)
@@ -4,7 +4,7 @@ from __future__ import annotations
 
 import asyncio
 import contextlib
-from typing import TYPE_CHECKING, Any
+from typing import TYPE_CHECKING, Any, cast
 
 from music_assistant_models.enums import AlbumType, MediaType, ProviderFeature
 from music_assistant_models.errors import (
@@ -15,9 +15,6 @@ from music_assistant_models.errors import (
 from music_assistant_models.media_items import Album, Artist, ItemMapping, Track, UniqueList
 
 from music_assistant.constants import (
-    CACHE_CATEGORY_MUSIC_ARTIST_ALBUMS,
-    CACHE_CATEGORY_MUSIC_ARTIST_TRACKS,
-    CACHE_CATEGORY_MUSIC_PROVIDER_ITEM,
     DB_TABLE_ALBUM_ARTISTS,
     DB_TABLE_ARTISTS,
     DB_TABLE_TRACK_ARTISTS,
@@ -205,63 +202,26 @@ class ArtistsController(MediaControllerBase[Artist]):
         provider_instance_id_or_domain: str,
     ) -> list[Track]:
         """Return top tracks for an artist on given provider."""
-        items = []
         assert provider_instance_id_or_domain != "library"
-        prov = self.mass.get_provider(provider_instance_id_or_domain)
-        if prov is None:
+        if not (prov := self.mass.get_provider(provider_instance_id_or_domain)):
             return []
-        # prefer cache items (if any) - for streaming providers
-        cache_category = CACHE_CATEGORY_MUSIC_ARTIST_TRACKS
-        cache_base_key = prov.lookup_key
-        cache_key = item_id
-        if (
-            prov.is_streaming_provider
-            and (
-                cache := await self.mass.cache.get(
-                    cache_key, category=cache_category, base_key=cache_base_key
-                )
-            )
-            is not None
-        ):
-            return [Track.from_dict(x) for x in cache]
-        # no items in cache - get listing from provider
+        prov = cast("MusicProvider", prov)
         if ProviderFeature.ARTIST_TOPTRACKS in prov.supported_features:
-            items = await prov.get_artist_toptracks(item_id)
-            for item in items:
-                # if this is a complete track object, pre-cache it as
-                # that will save us an (expensive) lookup later
-                if item.image and item.artist_str and item.album and prov.domain != "builtin":
-                    await self.mass.cache.set(
-                        f"track.{item_id}",
-                        item.to_dict(),
-                        category=CACHE_CATEGORY_MUSIC_PROVIDER_ITEM,
-                        base_key=prov.lookup_key,
-                    )
-        else:
-            # fallback implementation using the db
-            if db_artist := await self.mass.music.artists.get_library_item_by_prov_id(
-                item_id,
-                provider_instance_id_or_domain,
-            ):
-                artist_id = db_artist.item_id
-                subquery = (
-                    f"SELECT track_id FROM {DB_TABLE_TRACK_ARTISTS} WHERE artist_id = {artist_id}"
-                )
-                query = f"tracks.item_id in ({subquery})"
-                return await self.mass.music.tracks._get_library_items_by_query(
-                    extra_query_parts=[query], provider=provider_instance_id_or_domain
-                )
-        # store (serializable items) in cache
-        if prov.is_streaming_provider:
-            self.mass.create_task(
-                self.mass.cache.set(
-                    cache_key,
-                    [x.to_dict() for x in items],
-                    category=cache_category,
-                    base_key=cache_base_key,
-                )
+            return await prov.get_artist_toptracks(item_id)
+        # fallback implementation using the library db
+        if db_artist := await self.mass.music.artists.get_library_item_by_prov_id(
+            item_id,
+            provider_instance_id_or_domain,
+        ):
+            artist_id = db_artist.item_id
+            subquery = (
+                f"SELECT track_id FROM {DB_TABLE_TRACK_ARTISTS} WHERE artist_id = {artist_id}"
+            )
+            query = f"tracks.item_id in ({subquery})"
+            return await self.mass.music.tracks._get_library_items_by_query(
+                extra_query_parts=[query], provider=provider_instance_id_or_domain
             )
-        return items
+        return []
 
     async def get_library_artist_tracks(
         self,
@@ -278,55 +238,28 @@ class ArtistsController(MediaControllerBase[Artist]):
         provider_instance_id_or_domain: str,
     ) -> list[Album]:
         """Return albums for an artist on given provider."""
-        items = []
         assert provider_instance_id_or_domain != "library"
-        prov = self.mass.get_provider(provider_instance_id_or_domain)
+        if not (prov := self.mass.get_provider(provider_instance_id_or_domain)):
+            return []
+        prov = cast("MusicProvider", prov)
         if prov is None:
             return []
-        # prefer cache items (if any)
-        cache_category = CACHE_CATEGORY_MUSIC_ARTIST_ALBUMS
-        cache_base_key = prov.lookup_key
-        cache_key = item_id
-        if (
-            prov.is_streaming_provider
-            and (
-                cache := await self.mass.cache.get(
-                    cache_key, category=cache_category, base_key=cache_base_key
-                )
-            )
-            is not None
-        ):
-            return [Album.from_dict(x) for x in cache]
-        # no items in cache - get listing from provider
         if ProviderFeature.ARTIST_ALBUMS in prov.supported_features:
-            items = await prov.get_artist_albums(item_id)
-        else:
-            # fallback implementation using the db
-            # ruff: noqa: PLR5501
-            if db_artist := await self.mass.music.artists.get_library_item_by_prov_id(
-                item_id,
-                provider_instance_id_or_domain,
-            ):
-                artist_id = db_artist.item_id
-                subquery = (
-                    f"SELECT album_id FROM {DB_TABLE_ALBUM_ARTISTS} WHERE artist_id = {artist_id}"
-                )
-                query = f"albums.item_id in ({subquery})"
-                return await self.mass.music.albums._get_library_items_by_query(
-                    extra_query_parts=[query], provider=provider_instance_id_or_domain
-                )
-
-        # store (serializable items) in cache
-        if prov.is_streaming_provider:
-            self.mass.create_task(
-                self.mass.cache.set(
-                    cache_key,
-                    [x.to_dict() for x in items],
-                    category=cache_category,
-                    base_key=cache_base_key,
-                )
+            return await prov.get_artist_albums(item_id)
+        # fallback implementation using the db
+        if db_artist := await self.mass.music.artists.get_library_item_by_prov_id(
+            item_id,
+            provider_instance_id_or_domain,
+        ):
+            artist_id = db_artist.item_id
+            subquery = (
+                f"SELECT album_id FROM {DB_TABLE_ALBUM_ARTISTS} WHERE artist_id = {artist_id}"
+            )
+            query = f"albums.item_id in ({subquery})"
+            return await self.mass.music.albums._get_library_items_by_query(
+                extra_query_parts=[query], provider=provider_instance_id_or_domain
             )
-        return items
+        return []
 
     async def get_library_artist_albums(
         self,
index 5372fd9f6abd9c52919a2eb2be6b6ec95c9279fd..d1a4b58212fa9a0df4a32ecd435ea9e86fc85699 100644 (file)
@@ -7,33 +7,22 @@ import logging
 from abc import ABCMeta, abstractmethod
 from collections.abc import Iterable
 from contextlib import suppress
-from typing import TYPE_CHECKING, Any, TypeVar
+from typing import TYPE_CHECKING, Any, TypeVar, cast
 
 from music_assistant_models.enums import EventType, ExternalID, MediaType, ProviderFeature
 from music_assistant_models.errors import MediaNotFoundError, ProviderUnavailableError
-from music_assistant_models.media_items import (
-    Album,
-    ItemMapping,
-    MediaItemType,
-    ProviderMapping,
-    SearchResults,
-    Track,
-)
+from music_assistant_models.media_items import ItemMapping, MediaItemType, ProviderMapping, Track
 
-from music_assistant.constants import (
-    CACHE_CATEGORY_MUSIC_PROVIDER_ITEM,
-    CACHE_CATEGORY_MUSIC_SEARCH,
-    DB_TABLE_PLAYLOG,
-    DB_TABLE_PROVIDER_MAPPINGS,
-    MASS_LOGGER_NAME,
-)
+from music_assistant.constants import DB_TABLE_PLAYLOG, DB_TABLE_PROVIDER_MAPPINGS, MASS_LOGGER_NAME
 from music_assistant.helpers.compare import compare_media_item, create_safe_string
 from music_assistant.helpers.json import json_loads, serialize_to_json
+from music_assistant.helpers.util import guard_single_request
 
 if TYPE_CHECKING:
     from collections.abc import AsyncGenerator, Mapping
 
     from music_assistant import MusicAssistant
+    from music_assistant.models import MusicProvider
 
 
 ItemCls = TypeVar("ItemCls", bound="MediaItemType")
@@ -308,54 +297,36 @@ class MediaControllerBase[ItemCls: "MediaItemType"](metaclass=ABCMeta):
         search_query = search_query.replace("/", " ").replace("'", "")
         if provider_instance_id_or_domain == "library":
             return await self.library_items(search=search_query, limit=limit)
-        prov = self.mass.get_provider(provider_instance_id_or_domain)
-        if prov is None:
+        if not (prov := self.mass.get_provider(provider_instance_id_or_domain)):
             return []
+        prov = cast("MusicProvider", prov)
         if ProviderFeature.SEARCH not in prov.supported_features:
             return []
         if not prov.library_supported(self.media_type):
             # assume library supported also means that this mediatype is supported
             return []
-
-        # prefer cache items (if any)
-        cache_category = CACHE_CATEGORY_MUSIC_SEARCH
-        cache_base_key = prov.lookup_key
-        cache_key = f"{search_query}.{limit}.{self.media_type.value}"
-        if (
-            cache := await self.mass.cache.get(
-                cache_key, category=cache_category, base_key=cache_base_key
-            )
-        ) is not None:
-            searchresult = SearchResults.from_dict(cache)
-        else:
-            # no items in cache - get listing from provider
-            searchresult = await prov.search(
-                search_query,
-                [self.media_type],
-                limit,
-            )
-        if self.media_type == MediaType.ARTIST:
-            items = searchresult.artists
-        elif self.media_type == MediaType.ALBUM:
-            items = searchresult.albums
-        elif self.media_type == MediaType.TRACK:
-            items = searchresult.tracks
-        elif self.media_type == MediaType.PLAYLIST:
-            items = searchresult.playlists
-        else:
-            items = searchresult.radio
-        # store (serializable items) in cache
-        if prov.is_streaming_provider:  # do not cache filesystem results
-            self.mass.create_task(
-                self.mass.cache.set(
-                    cache_key,
-                    searchresult.to_dict(),
-                    expiration=86400 * 7,
-                    category=cache_category,
-                    base_key=cache_base_key,
-                ),
-            )
-        return items
+        searchresult = await prov.search(
+            search_query,
+            [self.media_type],
+            limit,
+        )
+        match self.media_type:
+            case MediaType.ARTIST:
+                return searchresult.artists
+            case MediaType.ALBUM:
+                return searchresult.albums
+            case MediaType.TRACK:
+                return searchresult.tracks
+            case MediaType.PLAYLIST:
+                return searchresult.playlists
+            case MediaType.AUDIOBOOK:
+                return searchresult.audiobooks
+            case MediaType.PODCAST:
+                return searchresult.podcasts
+            case MediaType.RADIO:
+                return searchresult.radio
+            case _:
+                return []
 
     async def get_provider_mapping(self, item: ItemCls) -> tuple[str, str]:
         """Return (first) provider and item id."""
@@ -524,6 +495,7 @@ class MediaControllerBase[ItemCls: "MediaItemType"](metaclass=ABCMeta):
         library_item = await self.get_library_item(db_id)
         self.mass.signal_event(EventType.MEDIA_ITEM_UPDATED, library_item.uri, library_item)
 
+    @guard_single_request
     async def get_provider_item(
         self,
         item_id: str,
@@ -536,26 +508,11 @@ class MediaControllerBase[ItemCls: "MediaItemType"](metaclass=ABCMeta):
             return await self.get_library_item(item_id)
         if not (provider := self.mass.get_provider(provider_instance_id_or_domain)):
             raise ProviderUnavailableError(f"{provider_instance_id_or_domain} is not available")
-
-        cache_category = CACHE_CATEGORY_MUSIC_PROVIDER_ITEM
-        cache_base_key = provider.lookup_key
-        cache_key = f"{self.media_type.value}.{item_id}"
-        if not force_refresh and (
-            cache := await self.mass.cache.get(
-                cache_key, category=cache_category, base_key=cache_base_key
-            )
-        ):
-            return self.item_cls.from_dict(cache)
         if provider := self.mass.get_provider(provider_instance_id_or_domain):
+            provider = cast("MusicProvider", provider)
             with suppress(MediaNotFoundError):
-                if item := await provider.get_item(self.media_type, item_id):
-                    await self.mass.cache.set(
-                        cache_key,
-                        item.to_dict(),
-                        category=cache_category,
-                        base_key=cache_base_key,
-                    )
-                    return item
+                async with self.mass.cache.handle_refresh(force_refresh):
+                    return await provider.get_item(self.media_type, item_id)
         # if we reach this point all possibilities failed and the item could not be found.
         # There is a possibility that the (streaming) provider changed the id of the item
         # so we return the previous details (if we have any) marked as unavailable, so
@@ -563,10 +520,28 @@ class MediaControllerBase[ItemCls: "MediaItemType"](metaclass=ABCMeta):
         fallback = fallback or await self.get_library_item_by_prov_id(
             item_id, provider_instance_id_or_domain
         )
-        if fallback and not (isinstance(fallback, ItemMapping) and self.item_cls in (Track, Album)):
+        if (
+            fallback
+            and isinstance(fallback, ItemMapping)
+            and (fallback_provider := self.mass.get_provider(fallback.provider))
+        ):
+            # fallback is a ItemMapping, try to convert to full item
+            with suppress(LookupError, TypeError, ValueError):
+                return self.item_cls.from_dict(
+                    {
+                        **fallback.to_dict(),
+                        "provider_mappings": [
+                            {
+                                "item_id": fallback.item_id,
+                                "provider_domain": fallback_provider.domain,
+                                "provider_instance": fallback_provider.instance_id,
+                                "available": fallback.available,
+                            }
+                        ],
+                    }
+                )
+        if fallback:
             # simply return the fallback item
-            # NOTE: we only accept ItemMapping as fallback for flat items
-            # so not for tracks and albums (which rely on other objects)
             return fallback
         # all options exhausted, we really can not find this item
         msg = (
@@ -842,6 +817,7 @@ class MediaControllerBase[ItemCls: "MediaItemType"](metaclass=ABCMeta):
             join_parts.append(
                 f"JOIN provider_mappings ON provider_mappings.item_id = {self.db_table}.item_id "
                 f"AND provider_mappings.media_type = '{self.media_type.value}' "
+                "AND provider_mappings.in_library = 1 "
                 f"AND (provider_mappings.provider_instance = '{provider}' "
                 f"OR provider_mappings.provider_domain = '{provider}')"
             )
index 35d3a178418252ac98bd2f1c2edf9fdec679ae76..476c884093074b16bf0989fe7c833b166cefcc1d 100644 (file)
@@ -2,9 +2,8 @@
 
 from __future__ import annotations
 
-import time
 from collections.abc import AsyncGenerator
-from typing import Any
+from typing import cast
 
 from music_assistant_models.enums import MediaType, ProviderFeature
 from music_assistant_models.errors import (
@@ -14,14 +13,11 @@ from music_assistant_models.errors import (
 )
 from music_assistant_models.media_items import Playlist, Track
 
-from music_assistant.constants import (
-    CACHE_CATEGORY_MUSIC_PLAYLIST_TRACKS,
-    CACHE_CATEGORY_MUSIC_PROVIDER_ITEM,
-    DB_TABLE_PLAYLISTS,
-)
+from music_assistant.constants import DB_TABLE_PLAYLISTS
 from music_assistant.helpers.compare import create_safe_string
 from music_assistant.helpers.json import serialize_to_json
 from music_assistant.helpers.uri import create_uri, parse_uri
+from music_assistant.helpers.util import guard_single_request
 from music_assistant.models.music_provider import MusicProvider
 
 from .base import MediaControllerBase
@@ -55,21 +51,19 @@ class PlaylistController(MediaControllerBase[Playlist]):
         force_refresh: bool = False,
     ) -> AsyncGenerator[Track, None]:
         """Return playlist tracks for the given provider playlist id."""
-        playlist = await self.get(
-            item_id,
-            provider_instance_id_or_domain,
-        )
-        # a playlist can only have one provider so simply pick the first one
-        prov_map = next(x for x in playlist.provider_mappings)
-        cache_checksum = playlist.cache_checksum
+        if provider_instance_id_or_domain == "library":
+            library_item = await self.get_library_item(item_id)
+            # a playlist can only have one provider so simply pick the first one
+            prov_map = next(x for x in library_item.provider_mappings)
+            item_id = prov_map.item_id
+            provider_instance_id_or_domain = prov_map.provider_instance
         # playlist tracks are not stored in the db,
         # we always fetched them (cached) from the provider
         page = 0
         while True:
             tracks = await self._get_provider_playlist_tracks(
-                prov_map.item_id,
-                prov_map.provider_instance,
-                cache_checksum=cache_checksum,
+                item_id,
+                provider_instance_id_or_domain,
                 page=page,
                 force_refresh=force_refresh,
             )
@@ -274,7 +268,7 @@ class PlaylistController(MediaControllerBase[Playlist]):
         # actually add the tracks to the playlist on the provider
         await playlist_prov.add_playlist_tracks(playlist_prov_map.item_id, list(ids_to_add))
         # invalidate cache so tracks get refreshed
-        playlist.cache_checksum = str(time.time())
+        self._refresh_playlist_tracks(playlist)
         await self.update_item_in_library(db_playlist_id, playlist)
 
     async def add_playlist_track(self, db_playlist_id: str | int, track_uri: str) -> None:
@@ -302,8 +296,7 @@ class PlaylistController(MediaControllerBase[Playlist]):
                 )
                 continue
             await provider.remove_playlist_tracks(prov_mapping.item_id, positions_to_remove)
-        # invalidate cache so tracks get refreshed
-        playlist.cache_checksum = str(time.time())
+
         await self.update_item_in_library(db_playlist_id, playlist)
 
     async def _add_library_item(self, item: Playlist) -> int:
@@ -318,7 +311,6 @@ class PlaylistController(MediaControllerBase[Playlist]):
                 "favorite": item.favorite,
                 "metadata": serialize_to_json(item.metadata),
                 "external_ids": serialize_to_json(item.external_ids),
-                "cache_checksum": item.cache_checksum,
                 "search_name": create_safe_string(item.name, True, True),
                 "search_sort_name": create_safe_string(item.sort_name, True, True),
             },
@@ -351,7 +343,6 @@ class PlaylistController(MediaControllerBase[Playlist]):
                 "external_ids": serialize_to_json(
                     update.external_ids if overwrite else cur_item.external_ids
                 ),
-                "cache_checksum": update.cache_checksum or cur_item.cache_checksum,
                 "search_name": create_safe_string(name, True, True),
                 "search_sort_name": create_safe_string(sort_name, True, True),
             },
@@ -365,59 +356,21 @@ class PlaylistController(MediaControllerBase[Playlist]):
         await self.set_provider_mappings(db_id, provider_mappings, overwrite)
         self.logger.debug("updated %s in database: (id %s)", update.name, db_id)
 
+    @guard_single_request
     async def _get_provider_playlist_tracks(
         self,
         item_id: str,
         provider_instance_id_or_domain: str,
-        cache_checksum: Any = None,
         page: int = 0,
         force_refresh: bool = False,
     ) -> list[Track]:
         """Return playlist tracks for the given provider playlist id."""
         assert provider_instance_id_or_domain != "library"
-        provider: MusicProvider = self.mass.get_provider(provider_instance_id_or_domain)
-        if not provider:
+        if not (provider := self.mass.get_provider(provider_instance_id_or_domain)):
             return []
-        # prefer cache items (if any)
-        cache_category = CACHE_CATEGORY_MUSIC_PLAYLIST_TRACKS
-        cache_base_key = provider.lookup_key
-        cache_key = f"{item_id}.{page}"
-        if (
-            not force_refresh
-            and (
-                cache := await self.mass.cache.get(
-                    cache_key,
-                    checksum=cache_checksum,
-                    category=cache_category,
-                    base_key=cache_base_key,
-                )
-            )
-            is not None
-        ):
-            return [Track.from_dict(x) for x in cache]
-        # no items in cache (or force_refresh) - get listing from provider
-        items = await provider.get_playlist_tracks(item_id, page=page)
-        # store (serializable items) in cache
-        self.mass.create_task(
-            self.mass.cache.set(
-                cache_key,
-                [x.to_dict() for x in items],
-                checksum=cache_checksum,
-                category=cache_category,
-                base_key=cache_base_key,
-            )
-        )
-        for item in items:
-            # if this is a complete track object, pre-cache it as
-            # that will save us an (expensive) lookup later
-            if item.image and item.artist_str and item.album and provider.domain != "builtin":
-                await self.mass.cache.set(
-                    f"track.{item_id}",
-                    item.to_dict(),
-                    category=CACHE_CATEGORY_MUSIC_PROVIDER_ITEM,
-                    base_key=provider.lookup_key,
-                )
-        return items
+        provider = cast("MusicProvider", provider)
+        async with self.mass.cache.handle_refresh(force_refresh):
+            return await provider.get_playlist_tracks(item_id, page=page)
 
     async def radio_mode_base_tracks(
         self,
@@ -438,3 +391,14 @@ class PlaylistController(MediaControllerBase[Playlist]):
         This is used to link objects of different providers/qualities together.
         """
         raise NotImplementedError
+
+    def _refresh_playlist_tracks(self, playlist: Playlist) -> None:
+        """Refresh playlist tracks by forcing a cache refresh."""
+
+        async def _refresh(self, playlist: Playlist):
+            # simply iterate all tracks with force_refresh=True to refresh the cache
+            async for _ in self.tracks(playlist.item_id, playlist.provider, force_refresh=True):
+                pass
+
+        task_id = f"refresh_playlist_tracks_{playlist.item_id}"
+        self.mass.call_later(5, _refresh, playlist, task_id=task_id)  # debounce multiple calls
index 255f308722480d6f54612f7e7db5f73e8f884b63..b3f135e1617ce9cda2b3f5f7101a632fa0f1ec3e 100644 (file)
@@ -128,7 +128,6 @@ class MetaDataController(CoreController):
         self._lookup_jobs: MetadataLookupQueue = MetadataLookupQueue(100)
         self._lookup_task: asyncio.Task[None] | None = None
         self._throttler = Throttler(1, 30)
-        self._missing_metadata_scan_task: asyncio.Task[None] | None = None
 
     async def get_config_entries(
         self,
@@ -180,9 +179,9 @@ class MetaDataController(CoreController):
         self.mass.streams.register_dynamic_route("/imageproxy", self.handle_imageproxy)
         # the lookup task is used to process metadata lookup jobs
         self._lookup_task = self.mass.create_task(self._process_metadata_lookup_jobs())
-        # just tun the scan for missing metadata once at startup
-        # TODO: allows to enable/disable this in the UI and configure interval/time
-        self._missing_metadata_scan_task = self.mass.create_task(self._scan_missing_metadata())
+        # just run the scan for missing metadata once at startup
+        # background scan for missing metadata
+        self.mass.call_later(300, self._scan_missing_metadata)
         # migrate old image path for collage images from absolute to relative
         # TODO: remove this after 2.5+ release
         old_path = f"{self.mass.storage_path}/collage_images/"
@@ -200,8 +199,6 @@ class MetaDataController(CoreController):
         """Handle logic on server stop."""
         if self._lookup_task and not self._lookup_task.done():
             self._lookup_task.cancel()
-        if self._missing_metadata_scan_task and not self._missing_metadata_scan_task.done():
-            self._missing_metadata_scan_task.cancel()
         self.mass.streams.unregister_dynamic_route("/imageproxy")
 
     @property
@@ -261,33 +258,38 @@ class MetaDataController(CoreController):
         self, item: str | MediaItemType, force_refresh: bool = False
     ) -> MediaItemType:
         """Get/update extra/enhanced metadata for/on given MediaItem."""
-        if isinstance(item, str):
-            retrieved_item = await self.mass.music.get_item_by_uri(item)
-            if isinstance(retrieved_item, BrowseFolder):
-                raise TypeError("Cannot update metadata on a BrowseFolder item.")
-            item = retrieved_item
-
-        if item.provider != "library":
-            # this shouldn't happen but just in case.
-            raise RuntimeError("Metadata can only be updated for library items")
-
-        # just in case it was in the queue, prevent duplicate lookups
-        if item.uri:
-            self._lookup_jobs.pop(item.uri)
-        async with self._throttler:
-            if item.media_type == MediaType.ARTIST:
-                await self._update_artist_metadata(
-                    cast("Artist", item), force_refresh=force_refresh
-                )
-            if item.media_type == MediaType.ALBUM:
-                await self._update_album_metadata(cast("Album", item), force_refresh=force_refresh)
-            if item.media_type == MediaType.TRACK:
-                await self._update_track_metadata(cast("Track", item), force_refresh=force_refresh)
-            if item.media_type == MediaType.PLAYLIST:
-                await self._update_playlist_metadata(
-                    cast("Playlist", item), force_refresh=force_refresh
-                )
-        return item
+        async with self.cache.handle_refresh(force_refresh):
+            if isinstance(item, str):
+                retrieved_item = await self.mass.music.get_item_by_uri(item)
+                if isinstance(retrieved_item, BrowseFolder):
+                    raise TypeError("Cannot update metadata on a BrowseFolder item.")
+                item = retrieved_item
+
+            if item.provider != "library":
+                # this shouldn't happen but just in case.
+                raise RuntimeError("Metadata can only be updated for library items")
+
+            # just in case it was in the queue, prevent duplicate lookups
+            if item.uri:
+                self._lookup_jobs.pop(item.uri)
+            async with self._throttler:
+                if item.media_type == MediaType.ARTIST:
+                    await self._update_artist_metadata(
+                        cast("Artist", item), force_refresh=force_refresh
+                    )
+                if item.media_type == MediaType.ALBUM:
+                    await self._update_album_metadata(
+                        cast("Album", item), force_refresh=force_refresh
+                    )
+                if item.media_type == MediaType.TRACK:
+                    await self._update_track_metadata(
+                        cast("Track", item), force_refresh=force_refresh
+                    )
+                if item.media_type == MediaType.PLAYLIST:
+                    await self._update_playlist_metadata(
+                        cast("Playlist", item), force_refresh=force_refresh
+                    )
+            return item
 
     def schedule_update_metadata(self, uri: str) -> None:
         """Schedule metadata update for given MediaItem uri."""
@@ -780,6 +782,7 @@ class MetaDataController(CoreController):
         await asyncio.sleep(60)
         while True:
             item_uri = await self._lookup_jobs.get()
+            self.logger.debug(f"Processing metadata lookup for {item_uri}")
             try:
                 item = await self.mass.music.get_item_by_uri(item_uri)
                 # Type check to ensure it's a valid MediaItemType
@@ -810,10 +813,11 @@ class MetaDataController(CoreController):
             f"OR json_extract({DB_TABLE_ARTISTS}.metadata,'$.images') = '[]')"
         )
         for artist in await self.mass.music.artists.library_items(
-            limit=25, order_by="random", extra_query=query
+            limit=5, order_by="random", extra_query=query
         ):
             if artist.uri:
                 self.schedule_update_metadata(artist.uri)
+            await asyncio.sleep(30)
 
         # Scan for missing album images
         self.logger.debug("Start lookup for missing album images...")
@@ -827,6 +831,7 @@ class MetaDataController(CoreController):
         ):
             if album.uri:
                 self.schedule_update_metadata(album.uri)
+            await asyncio.sleep(30)
 
         # Force refresh playlist metadata every refresh interval
         # this will e.g. update the playlist image and genres if the tracks have changed
@@ -836,10 +841,14 @@ class MetaDataController(CoreController):
             f"OR json_extract({DB_TABLE_PLAYLISTS}.metadata,'$.last_refresh') < {timestamp}"
         )
         for playlist in await self.mass.music.playlists.library_items(
-            limit=10, order_by="random", extra_query=query
+            limit=5, order_by="random", extra_query=query
         ):
             if playlist.uri:
                 self.schedule_update_metadata(playlist.uri)
+            await asyncio.sleep(30)
+
+        # reschedule next scan
+        self.mass.call_later(PERIODIC_SCAN_INTERVAL, self._scan_missing_metadata)
 
 
 class MetadataLookupQueue(asyncio.Queue[str]):
index 95bcde056dc78fead97656c39429c951cfff3547..9730c2ee3c4f013ca445303731b2b5fb8a9e3944 100644 (file)
@@ -41,7 +41,6 @@ from music_assistant_models.provider import SyncTask
 from music_assistant_models.unique_list import UniqueList
 
 from music_assistant.constants import (
-    CACHE_CATEGORY_MUSIC_SEARCH,
     CONF_ENTRY_LIBRARY_EXPORT_ADD,
     CONF_ENTRY_LIBRARY_EXPORT_REMOVE,
     DB_TABLE_ALBUM_ARTISTS,
@@ -88,7 +87,9 @@ CONF_RESET_DB = "reset_db"
 DEFAULT_SYNC_INTERVAL = 12 * 60  # default sync interval in minutes
 CONF_SYNC_INTERVAL = "sync_interval"
 CONF_DELETED_PROVIDERS = "deleted_providers"
-DB_SCHEMA_VERSION: Final[int] = 19
+DB_SCHEMA_VERSION: Final[int] = 20
+
+CACHE_CATEGORY_LAST_SYNC: Final[int] = 9
 
 
 class MusicController(CoreController):
@@ -355,37 +356,11 @@ class MusicController(CoreController):
 
         # create safe search string
         search_query = search_query.replace("/", " ").replace("'", "")
-
-        # prefer cache items (if any)
-        media_types_str = ",".join(media_types)
-        cache_category = CACHE_CATEGORY_MUSIC_SEARCH
-        cache_base_key = prov.lookup_key
-        cache_key = f"{search_query}.{limit}.{media_types_str}"
-
-        if prov.is_streaming_provider and (
-            cache := await self.mass.cache.get(
-                cache_key, category=cache_category, base_key=cache_base_key
-            )
-        ):
-            return SearchResults.from_dict(cache)
-        # no items in cache - get listing from provider
-        result = await prov.search(
+        return await prov.search(
             search_query,
             media_types,
             limit,
         )
-        # store (serializable items) in cache
-        if prov.is_streaming_provider:
-            self.mass.create_task(
-                self.mass.cache.set(
-                    cache_key,
-                    result.to_dict(),
-                    expiration=86400 * 7,
-                    category=cache_category,
-                    base_key=cache_base_key,
-                )
-            )
-        return result
 
     async def search_library(
         self,
@@ -1404,8 +1379,14 @@ class MusicController(CoreController):
             else:
                 self.logger.info("Sync task for %s/%ss completed", provider.name, media_type.value)
             self.mass.signal_event(EventType.SYNC_TASKS_UPDATED, data=self.in_progress_syncs)
-            cache_key = f"last_library_sync_{provider.instance_id}_{media_type.value}"
-            self.mass.create_task(self.mass.cache.set, cache_key, self.mass.loop.time())
+            self.mass.create_task(
+                self.mass.cache.set(
+                    key=media_type.value,
+                    data=self.mass.loop.time(),
+                    provider=provider.instance_id,
+                    category=CACHE_CATEGORY_LAST_SYNC,
+                )
+            )
             # schedule db cleanup after sync
             if not self.in_progress_syncs:
                 self.mass.create_task(self._cleanup_database())
@@ -1480,9 +1461,12 @@ class MusicController(CoreController):
 
         if is_initial:
             # schedule the first sync run
-            cache_key = f"last_library_sync_{provider.instance_id}_{media_type.value}"
             initial_interval = 10
-            if last_sync := await self.mass.cache.get(cache_key):
+            if last_sync := await self.mass.cache.get(
+                key=media_type.value,
+                provider=provider.instance_id,
+                category=CACHE_CATEGORY_LAST_SYNC,
+            ):
                 initial_interval += max(0, sync_interval - (self.mass.loop.time() - last_sync))
             sync_interval = initial_interval
 
@@ -1592,87 +1576,13 @@ class MusicController(CoreController):
 
     async def __migrate_database(self, prev_version: int) -> None:
         """Perform a database migration."""
-        # ruff: noqa: PLR0915
         self.logger.info(
             "Migrating database from version %s to %s", prev_version, DB_SCHEMA_VERSION
         )
 
-        if prev_version < 7:
+        if prev_version < 15:
             raise MusicAssistantError("Database schema version too old to migrate")
 
-        if prev_version <= 7:
-            # remove redundant artists and provider_mappings columns
-            for table in (
-                DB_TABLE_TRACKS,
-                DB_TABLE_ALBUMS,
-                DB_TABLE_ARTISTS,
-                DB_TABLE_RADIOS,
-                DB_TABLE_PLAYLISTS,
-            ):
-                for column in ("artists", "provider_mappings"):
-                    try:
-                        await self.database.execute(f"ALTER TABLE {table} DROP COLUMN {column}")
-                    except Exception as err:
-                        if "no such column" in str(err):
-                            continue
-                        raise
-            # add cache_checksum column to playlists
-            try:
-                await self.database.execute(
-                    f"ALTER TABLE {DB_TABLE_PLAYLISTS} ADD COLUMN cache_checksum TEXT DEFAULT ''"
-                )
-            except Exception as err:
-                if "duplicate column" not in str(err):
-                    raise
-
-        if prev_version <= 8:
-            # migrate track_loudness --> loudness_measurements
-            async for db_row in self.database.iter_items("track_loudness"):
-                if db_row["integrated"] == inf or db_row["integrated"] == -inf:
-                    continue
-                if db_row["provider"] in ("radiobrowser", "tunein"):
-                    continue
-                await self.database.insert_or_replace(
-                    DB_TABLE_LOUDNESS_MEASUREMENTS,
-                    {
-                        "item_id": db_row["item_id"],
-                        "media_type": "track",
-                        "provider": db_row["provider"],
-                        "loudness": db_row["integrated"],
-                    },
-                )
-            await self.database.execute("DROP TABLE IF EXISTS track_loudness")
-
-        if prev_version <= 10:
-            # Recreate playlog table due to complete new layout
-            await self.database.execute(f"DROP TABLE IF EXISTS {DB_TABLE_PLAYLOG}")
-            await self.__create_database_tables()
-
-        if prev_version <= 12:
-            # Need to drop the NOT NULL requirement on podcasts.publisher and audiobooks.publisher
-            # However, because there is no ALTER COLUMN support in sqlite, we will need
-            # to create the tables again.
-            await self.database.execute(f"DROP TABLE IF EXISTS {DB_TABLE_AUDIOBOOKS}")
-            await self.database.execute(f"DROP TABLE IF EXISTS {DB_TABLE_PODCASTS}")
-            await self.__create_database_tables()
-
-        if prev_version <= 13:
-            # migrate chapters in metadata
-            # this is leftover mess from the old chapters implementation
-            for db_row in await self.database.search(DB_TABLE_TRACKS, "position_start", "metadata"):
-                metadata = json_loads(db_row["metadata"])
-                metadata["chapters"] = None
-                await self.database.update(
-                    DB_TABLE_TRACKS,
-                    {"item_id": db_row["item_id"]},
-                    {"metadata": serialize_to_json(metadata)},
-                )
-
-        if prev_version <= 14:
-            # Recreate playlog table due to complete new layout
-            await self.database.execute(f"DROP TABLE IF EXISTS {DB_TABLE_PLAYLOG}")
-            await self.__create_database_tables()
-
         if prev_version <= 15:
             # add search_name and search_sort_name columns to all tables
             # and populate them with the name and sort_name values
@@ -1758,6 +1668,13 @@ class MusicController(CoreController):
                 "WHERE provider_domain in ('filesystem_local', 'filesystem_smb');"
             )
 
+        if prev_version <= 20:
+            # drop column cache_checksum from playlists table
+            # this is no longer used and is a leftover from previous designs
+            await self.database.execute(
+                f"ALTER TABLE {DB_TABLE_PLAYLISTS} DROP COLUMN cache_checksum"
+            )
+
         # save changes
         await self.database.commit()
 
@@ -1858,7 +1775,6 @@ class MusicController(CoreController):
             [sort_name] TEXT NOT NULL,
             [owner] TEXT NOT NULL,
             [is_editable] BOOLEAN NOT NULL,
-            [cache_checksum] TEXT DEFAULT '',
             [favorite] BOOLEAN NOT NULL DEFAULT 0,
             [metadata] json NOT NULL,
             [external_ids] json NOT NULL,
index 7356c5684bb95d16500c2e8d78656a78aca72a95..5a12893d43d100d9070e8a366bb9c43e024532bb 100644 (file)
@@ -58,7 +58,6 @@ from music_assistant_models.queue_item import QueueItem
 
 from music_assistant.constants import (
     ATTR_ANNOUNCEMENT_IN_PROGRESS,
-    CACHE_CATEGORY_PLAYER_QUEUE_STATE,
     CONF_CROSSFADE,
     CONF_FLOW_MODE,
     MASS_LOGO_ONLINE,
@@ -103,6 +102,8 @@ CONF_DEFAULT_ENQUEUE_OPTION_PODCAST_EPISODE = "default_enqueue_option_podcast_ep
 CONF_DEFAULT_ENQUEUE_OPTION_FOLDER = "default_enqueue_option_folder"
 CONF_DEFAULT_ENQUEUE_OPTION_UNKNOWN = "default_enqueue_option_unknown"
 RADIO_TRACK_MAX_DURATION_SECS = 20 * 60  # 20 minutes
+CACHE_CATEGORY_PLAYER_QUEUE_STATE = 0
+CACHE_CATEGORY_PLAYER_QUEUE_ITEMS = 1
 
 
 class CompareState(TypedDict):
@@ -921,15 +922,15 @@ class PlayerQueuesController(CoreController):
         queue = None
         # try to restore previous state
         if prev_state := await self.mass.cache.get(
-            "state", category=CACHE_CATEGORY_PLAYER_QUEUE_STATE, base_key=queue_id
+            key=queue_id, provider=self.domain, category=CACHE_CATEGORY_PLAYER_QUEUE_STATE
         ):
             try:
                 queue = PlayerQueue.from_cache(prev_state)
                 prev_items = await self.mass.cache.get(
-                    "items",
+                    key=queue_id,
+                    provider=self.domain,
+                    category=CACHE_CATEGORY_PLAYER_QUEUE_ITEMS,
                     default=[],
-                    category=CACHE_CATEGORY_PLAYER_QUEUE_STATE,
-                    base_key=queue_id,
                 )
                 queue_items = [QueueItem.from_cache(x) for x in prev_items]
             except Exception as err:
@@ -990,8 +991,18 @@ class PlayerQueuesController(CoreController):
         """Call when a player is removed from the registry."""
         if permanent:
             # if the player is permanently removed, we also remove the cached queue data
-            self.mass.create_task(self.mass.cache.delete(f"queue.state.{player_id}"))
-            self.mass.create_task(self.mass.cache.delete(f"queue.items.{player_id}"))
+            self.mass.create_task(
+                self.mass.cache.delete(
+                    key=player_id, provider=self.domain, category=CACHE_CATEGORY_PLAYER_QUEUE_STATE
+                )
+            )
+            self.mass.create_task(
+                self.mass.cache.delete(
+                    key=player_id,
+                    provider=self.domain,
+                    category=CACHE_CATEGORY_PLAYER_QUEUE_ITEMS,
+                )
+            )
         self._queues.pop(player_id, None)
         self._queue_items.pop(player_id, None)
 
@@ -1236,10 +1247,10 @@ class PlayerQueuesController(CoreController):
             # save items in cache
             self.mass.create_task(
                 self.mass.cache.set(
-                    "items",
-                    [x.to_cache() for x in self._queue_items[queue_id]],
-                    category=CACHE_CATEGORY_PLAYER_QUEUE_STATE,
-                    base_key=queue_id,
+                    key=queue_id,
+                    data=[x.to_cache() for x in self._queue_items[queue_id]],
+                    provider=self.domain,
+                    category=CACHE_CATEGORY_PLAYER_QUEUE_ITEMS,
                 )
             )
         # always send the base event
@@ -1247,10 +1258,10 @@ class PlayerQueuesController(CoreController):
         # save state
         self.mass.create_task(
             self.mass.cache.set(
-                "state",
-                queue.to_cache(),
+                key=queue_id,
+                data=queue.to_cache(),
+                provider=self.domain,
                 category=CACHE_CATEGORY_PLAYER_QUEUE_STATE,
-                base_key=queue_id,
             )
         )
 
index 3ad662724c86d28792ded38d55963da722c76488..b9c7fded25a61f3a054bd58c49a87821d2a546f4 100644 (file)
@@ -55,8 +55,6 @@ from music_assistant.constants import (
     ATTR_GROUP_MEMBERS,
     ATTR_LAST_POLL,
     ATTR_PREVIOUS_VOLUME,
-    CACHE_CATEGORY_PLAYERS,
-    CACHE_KEY_PLAYER_POWER,
     CONF_AUTO_PLAY,
     CONF_ENTRY_ANNOUNCE_VOLUME,
     CONF_ENTRY_ANNOUNCE_VOLUME_MAX,
@@ -83,6 +81,8 @@ if TYPE_CHECKING:
     from music_assistant_models.config_entries import CoreConfig, PlayerConfig
     from music_assistant_models.player_queue import PlayerQueue
 
+CACHE_CATEGORY_PLAYER_POWER = 1
+
 
 _PlayerControllerT = TypeVar("_PlayerControllerT", bound="PlayerController")
 _R = TypeVar("_R")
@@ -513,7 +513,10 @@ class PlayerController(CoreController):
             # and store the state in the cache
             player.extra_data[ATTR_FAKE_POWER] = powered
             await self.mass.cache.set(
-                player_id, powered, category=CACHE_CATEGORY_PLAYERS, base_key=CACHE_KEY_PLAYER_POWER
+                key=player_id,
+                data=powered,
+                provider=self.domain,
+                category=CACHE_CATEGORY_PLAYER_POWER,
             )
         else:
             # handle external player control
@@ -1242,10 +1245,10 @@ class PlayerController(CoreController):
 
         # restore 'fake' power state from cache if available
         cached_value = await self.mass.cache.get(
-            player.player_id,
+            key=player.player_id,
+            provider=self.domain,
+            category=CACHE_CATEGORY_PLAYER_POWER,
             default=False,
-            category=CACHE_CATEGORY_PLAYERS,
-            base_key=CACHE_KEY_PLAYER_POWER,
         )
         if cached_value is not None:
             player.extra_data[ATTR_FAKE_POWER] = cached_value
index e1b6038b4eebf41bbf93bf76fb7bfd540c76547c..54cd0d177bbdd97b7420c93a85427cd72400588b 100644 (file)
@@ -74,7 +74,9 @@ HTTP_HEADERS_ICY = {**HTTP_HEADERS, "Icy-MetaData": "1"}
 
 SLOW_PROVIDERS = ("tidal", "ytmusic", "apple_music")
 
-CACHE_BASE_KEY: Final[str] = "audio_cache_path"
+CACHE_CATEGORY_AUDIO_CACHE: Final[int] = 99
+CACHE_CATEGORY_RESOLVED_RADIO_URL: Final[int] = 100
+CACHE_PROVIDER: Final[str] = "audio"
 CACHE_FILES_IN_USE: set[str] = set()
 
 
@@ -119,7 +121,9 @@ class StreamCache:
         """Create the cache file (if needed)."""
         if self._cache_file is None:
             if cached_cache_path := await self.mass.cache.get(
-                self.streamdetails.uri, base_key=CACHE_BASE_KEY
+                key=self.streamdetails.uri,
+                provider=CACHE_PROVIDER,
+                category=CACHE_CATEGORY_AUDIO_CACHE,
             ):
                 # we have a mapping stored for this uri, prefer that
                 self._cache_file = cached_cache_path
@@ -137,7 +141,10 @@ class StreamCache:
                     self.mass.streams.audio_cache_dir, cache_id
                 )
                 await self.mass.cache.set(
-                    self.streamdetails.uri, cache_file, base_key=CACHE_BASE_KEY
+                    key=self.streamdetails.uri,
+                    data=cache_file,
+                    provider=CACHE_PROVIDER,
+                    category=CACHE_CATEGORY_AUDIO_CACHE,
                 )
         # mark file as in-use to prevent it being deleted
         CACHE_FILES_IN_USE.add(self._cache_file)
@@ -1030,8 +1037,9 @@ async def resolve_radio_stream(mass: MusicAssistant, url: str) -> tuple[str, Str
     - unfolded URL as string
     - StreamType to determine ICY (radio) or HLS stream.
     """
-    cache_base_key = "resolved_radio_info"
-    if cache := await mass.cache.get(url, base_key=cache_base_key):
+    if cache := await mass.cache.get(
+        key=url, provider=CACHE_PROVIDER, category=CACHE_CATEGORY_RESOLVED_RADIO_URL
+    ):
         return cast("tuple[str, StreamType]", cache)
     stream_type = StreamType.HTTP
     resolved_url = url
@@ -1073,7 +1081,13 @@ async def resolve_radio_stream(mass: MusicAssistant, url: str) -> tuple[str, Str
 
     result = (resolved_url, stream_type)
     cache_expiration = 3600 * 3
-    await mass.cache.set(url, result, expiration=cache_expiration, base_key=cache_base_key)
+    await mass.cache.set(
+        url,
+        result,
+        expiration=cache_expiration,
+        provider=CACHE_PROVIDER,
+        category=CACHE_CATEGORY_RESOLVED_RADIO_URL,
+    )
     return result
 
 
index 95fb17e6a553298748bacfc7d843c425a5921187..e56124ccd33f5d888b541021a73c86fcb7b0977e 100644 (file)
@@ -235,7 +235,7 @@ class DatabaseConnection:
         self, table: str, match: dict[str, Any] | None = None, query: str | None = None
     ) -> None:
         """Delete data in given table."""
-        assert not (query and "where" in query.lower())
+        assert not (match and query), "Cannot use both match and query"
         sql_query = f"DELETE FROM {table} "
         if match:
             sql_query += " WHERE " + " AND ".join(f"{x} = :{x}" for x in match)
index 339b7b24dbc8f074e66495a5bdbaf048a1338297..906c6a39b8bbe903341c7c6bd33217737a4d26b1 100644 (file)
@@ -97,12 +97,10 @@ class ThrottlerManager:
     async def bypass(self) -> AsyncGenerator[None, None]:
         """Bypass the throttler."""
         try:
-            BYPASS_THROTTLER.set(True)
+            token = BYPASS_THROTTLER.set(True)
             yield None
         finally:
-            # TODO: token is unbound here
-            # BYPASS_THROTTLER.reset(token)
-            ...
+            BYPASS_THROTTLER.reset(token)
 
 
 def throttle_with_retries[ProviderT: "Provider", **P, R](
index c0e93388616b67b4fe2446a270d6c3c109f4ac26..6eb9111ac2022b8f0830fbcb3d0cce1f2cb7959a 100644 (file)
@@ -18,7 +18,7 @@ from functools import lru_cache
 from importlib.metadata import PackageNotFoundError
 from importlib.metadata import version as pkg_version
 from types import TracebackType
-from typing import TYPE_CHECKING, Any, ParamSpec, Self, TypeVar, cast
+from typing import TYPE_CHECKING, Any, Concatenate, ParamSpec, Self, TypeVar, cast
 from urllib.parse import urlparse
 
 import chardet
@@ -37,6 +37,8 @@ if TYPE_CHECKING:
 
     from music_assistant.mass import MusicAssistant
     from music_assistant.models import ProviderModuleType
+    from music_assistant.models.core_controller import CoreController
+    from music_assistant.models.provider import Provider
 
 from dataclasses import fields, is_dataclass
 
@@ -771,3 +773,24 @@ class TimedAsyncGenerator:
     def __aiter__(self):  # type: ignore[no-untyped-def]
         """Return the async iterator."""
         return self._factory()
+
+
+def guard_single_request[ProviderT: "Provider | CoreController", **P, R](
+    func: Callable[Concatenate[ProviderT, P], Coroutine[Any, Any, R]],
+) -> Callable[Concatenate[ProviderT, P], Coroutine[Any, Any, R]]:
+    """Guard single request to a function."""
+
+    @functools.wraps(func)
+    async def wrapper(self: ProviderT, *args: P.args, **kwargs: P.kwargs) -> R:
+        mass = self.mass
+        # create a task_id dynamically based on the function and args/kwargs
+        cache_key_parts = [func.__class__.__name__, func.__name__, *args]
+        for key in sorted(kwargs.keys()):
+            cache_key_parts.append(f"{key}{kwargs[key]}")
+        task_id = ".".join(map(str, cache_key_parts))
+        task: asyncio.Task[R] = mass.create_task(
+            func, self, *args, **kwargs, task_id=task_id, abort_existing=False
+        )
+        return await task
+
+    return wrapper
index 76ee6b17053beada8e16b557341594f08c263187..5678ed03649bba11b0daaa797ae859f89ed6ff49 100644 (file)
@@ -370,7 +370,7 @@ class MusicAssistant:
 
     def create_task(
         self,
-        target: Callable[[MassEvent], Coroutine[Any, Any, None]] | Awaitable[_R],
+        target: Callable[..., Coroutine[Any, Any, _R]] | Awaitable[_R],
         *args: Any,
         task_id: str | None = None,
         abort_existing: bool = False,
index 5961df4d9f9264fff53a995b07e965acae3d7d63..d170b7b3b83784a8ef080525a373ab37d8a35f26 100644 (file)
@@ -4,7 +4,7 @@ from __future__ import annotations
 
 import asyncio
 from collections.abc import Sequence
-from typing import TYPE_CHECKING, cast
+from typing import TYPE_CHECKING, Final, cast
 
 from music_assistant_models.enums import MediaType, ProviderFeature
 from music_assistant_models.errors import (
@@ -29,7 +29,6 @@ from music_assistant_models.media_items import (
 )
 
 from music_assistant.constants import (
-    CACHE_CATEGORY_LIBRARY_ITEMS,
     CONF_ENTRY_LIBRARY_IMPORT_ALBUM_TRACKS,
     CONF_ENTRY_LIBRARY_IMPORT_PLAYLIST_TRACKS,
 )
@@ -41,6 +40,8 @@ if TYPE_CHECKING:
 
     from music_assistant_models.streamdetails import StreamDetails
 
+CACHE_CATEGORY_PREV_LIBRARY_IDS: Final[int] = 1
+
 
 class MusicProvider(Provider):
     """Base representation of a Music Provider (controller).
@@ -441,7 +442,7 @@ class MusicProvider(Provider):
             return await self.get_podcast_episode(prov_item_id)
         return await self.get_track(prov_item_id)
 
-    async def browse(self, path: str) -> Sequence[MediaItemType | ItemMapping | BrowseFolder]:  # noqa: PLR0911, PLR0915
+    async def browse(self, path: str) -> Sequence[MediaItemType | ItemMapping | BrowseFolder]:
         """Browse this provider's items.
 
         :param path: The path to browse, (e.g. provider_id://artists).
@@ -453,104 +454,32 @@ class MusicProvider(Provider):
         subpath = path.split("://", 1)[1]
         # this reference implementation can be overridden with a provider specific approach
         if subpath == "artists":
-            library_item_ids = await self.mass.cache.get(
-                "artist",
-                category=CACHE_CATEGORY_LIBRARY_ITEMS,
-                base_key=self.instance_id,
-            )
-            if not library_item_ids:
-                return [x async for x in self.get_library_artists()]
-            library_items = cast("list[int]", library_item_ids)
-            query = "artists.item_id in :ids"
-            query_params = {"ids": library_items}
             return await self.mass.music.artists.library_items(
                 provider=self.instance_id,
-                extra_query=query,
-                extra_query_params=query_params,
             )
         if subpath == "albums":
-            library_item_ids = await self.mass.cache.get(
-                "album",
-                category=CACHE_CATEGORY_LIBRARY_ITEMS,
-                base_key=self.instance_id,
-            )
-            if not library_item_ids:
-                return [x async for x in self.get_library_albums()]
-            library_item_ids = cast("list[int]", library_item_ids)
-            query = "albums.item_id in :ids"
-            query_params = {"ids": library_item_ids}
             return await self.mass.music.albums.library_items(
-                extra_query=query, extra_query_params=query_params
+                provider=self.instance_id,
             )
         if subpath == "tracks":
-            library_item_ids = await self.mass.cache.get(
-                "track",
-                category=CACHE_CATEGORY_LIBRARY_ITEMS,
-                base_key=self.instance_id,
-            )
-            if not library_item_ids:
-                return [x async for x in self.get_library_tracks()]
-            library_item_ids = cast("list[int]", library_item_ids)
-            query = "tracks.item_id in :ids"
-            query_params = {"ids": library_item_ids}
             return await self.mass.music.tracks.library_items(
-                extra_query=query, extra_query_params=query_params
+                provider=self.instance_id,
             )
         if subpath == "radios":
-            library_item_ids = await self.mass.cache.get(
-                "radio",
-                category=CACHE_CATEGORY_LIBRARY_ITEMS,
-                base_key=self.instance_id,
-            )
-            if not library_item_ids:
-                return [x async for x in self.get_library_radios()]
-            library_item_ids = cast("list[int]", library_item_ids)
-            query = "radios.item_id in :ids"
-            query_params = {"ids": library_item_ids}
             return await self.mass.music.radio.library_items(
-                extra_query=query, extra_query_params=query_params
+                provider=self.instance_id,
             )
         if subpath == "playlists":
-            library_item_ids = await self.mass.cache.get(
-                "playlist",
-                category=CACHE_CATEGORY_LIBRARY_ITEMS,
-                base_key=self.instance_id,
-            )
-            if not library_item_ids:
-                return [x async for x in self.get_library_playlists()]
-            library_item_ids = cast("list[int]", library_item_ids)
-            query = "playlists.item_id in :ids"
-            query_params = {"ids": library_item_ids}
             return await self.mass.music.playlists.library_items(
-                extra_query=query, extra_query_params=query_params
+                provider=self.instance_id,
             )
         if subpath == "audiobooks":
-            library_item_ids = await self.mass.cache.get(
-                "audiobook",
-                category=CACHE_CATEGORY_LIBRARY_ITEMS,
-                base_key=self.instance_id,
-            )
-            if not library_item_ids:
-                return [x async for x in self.get_library_audiobooks()]
-            library_item_ids = cast("list[int]", library_item_ids)
-            query = "audiobooks.item_id in :ids"
-            query_params = {"ids": library_item_ids}
             return await self.mass.music.audiobooks.library_items(
-                extra_query=query, extra_query_params=query_params
+                provider=self.instance_id,
             )
         if subpath == "podcasts":
-            library_item_ids = await self.mass.cache.get(
-                "podcast",
-                category=CACHE_CATEGORY_LIBRARY_ITEMS,
-                base_key=self.instance_id,
-            )
-            if not library_item_ids:
-                return [x async for x in self.get_library_podcasts()]
-            library_item_ids = cast("list[int]", library_item_ids)
-            query = "podcasts.item_id in :ids"
-            query_params = {"ids": library_item_ids}
             return await self.mass.music.podcasts.library_items(
-                extra_query=query, extra_query_params=query_params
+                provider=self.instance_id,
             )
         if subpath:
             # unknown path
@@ -676,13 +605,12 @@ class MusicProvider(Provider):
             raise UnsupportedFeaturedException(f"Unexpected media type to sync: {media_type}")
 
         # process deletions (= no longer in library)
-        cache_category = CACHE_CATEGORY_LIBRARY_ITEMS
-        cache_base_key = self.instance_id
-
         prev_library_items: list[int] | None
         controller = self.mass.music.get_controller(media_type)
         if prev_library_items := await self.mass.cache.get(
-            media_type.value, category=cache_category, base_key=cache_base_key
+            key=media_type.value,
+            provider=self.instance_id,
+            category=CACHE_CATEGORY_PREV_LIBRARY_IDS,
         ):
             for db_id in prev_library_items:
                 if db_id not in cur_db_ids:
@@ -730,10 +658,10 @@ class MusicProvider(Provider):
                     await asyncio.sleep(0)  # yield to eventloop
         # store current list of id's in cache so we can track changes
         await self.mass.cache.set(
-            media_type.value,
-            list(cur_db_ids),
-            category=cache_category,
-            base_key=cache_base_key,
+            key=media_type.value,
+            data=list(cur_db_ids),
+            provider=self.instance_id,
+            category=CACHE_CATEGORY_PREV_LIBRARY_IDS,
         )
 
     async def _sync_library_artists(self, import_as_favorite: bool) -> set[int]:
@@ -900,11 +828,6 @@ class MusicProvider(Provider):
                     if import_as_favorite:
                         prov_item.favorite = True
                     library_item = await self.mass.music.playlists.add_item_to_library(prov_item)
-                elif library_item.cache_checksum != prov_item.cache_checksum:
-                    # existing dbitem checksum changed (used to determine if a playlist has changed)
-                    library_item = await self.mass.music.playlists.update_item_in_library(
-                        library_item.item_id, prov_item
-                    )
                 elif not library_item.favorite and import_as_favorite:
                     # existing library item not favorite but should be
                     await self.mass.music.playlists.set_favorite(library_item.item_id, True)
@@ -1158,9 +1081,9 @@ class MusicProvider(Provider):
         """Check if provider mapping(s) are consistent between library and provider items."""
         for provider_mapping in provider_item.provider_mappings:
             if provider_mapping.item_id != provider_item.item_id:
-                raise MusicAssistantError("Inconsistent provider mapping item_id's found")
+                raise MusicAssistantError("Inconsistent provider mapping item_id found")
             if provider_mapping.provider_instance != self.instance_id:
-                raise MusicAssistantError("Inconsistent provider mapping instance_id's found")
+                raise MusicAssistantError("Inconsistent provider mapping instance_id found")
             provider_mapping.in_library = in_library
             library_mapping = next(
                 (
index db4dc41c889da64a1441e1dfbf68118be3f584a4..edca96a1b3c69b5df2db99919a2979a098cd163a 100644 (file)
@@ -2,6 +2,8 @@
 
 from __future__ import annotations
 
+from typing import Final
+
 from music_assistant_models.enums import ContentType
 from music_assistant_models.media_items import AudioFormat
 
@@ -9,19 +11,20 @@ from music_assistant.constants import DEFAULT_PCM_FORMAT
 
 DOMAIN = "airplay"
 
-CONF_ENCRYPTION = "encryption"
-CONF_ALAC_ENCODE = "alac_encode"
-CONF_VOLUME_START = "volume_start"
-CONF_PASSWORD = "password"
-CONF_READ_AHEAD_BUFFER = "read_ahead_buffer"
-CONF_IGNORE_VOLUME = "ignore_volume"
+CACHE_CATEGORY_PREV_VOLUME: Final[int] = 1
+
+CONF_ENCRYPTION: Final[str] = "encryption"
+CONF_ALAC_ENCODE: Final[str] = "alac_encode"
+CONF_VOLUME_START: Final[str] = "volume_start"
+CONF_PASSWORD: Final[str] = "password"
+CONF_READ_AHEAD_BUFFER: Final[str] = "read_ahead_buffer"
+CONF_IGNORE_VOLUME: Final[str] = "ignore_volume"
+CONF_CREDENTIALS: Final[str] = "credentials"
 
-BACKOFF_TIME_LOWER_LIMIT = 15  # seconds
-BACKOFF_TIME_UPPER_LIMIT = 300  # Five minutes
+BACKOFF_TIME_LOWER_LIMIT: Final[int] = 15  # seconds
+BACKOFF_TIME_UPPER_LIMIT: Final[int] = 300  # Five minutes
 
-CONF_CREDENTIALS = "credentials"
-CACHE_KEY_PREV_VOLUME = "airplay_prev_volume"
-FALLBACK_VOLUME = 20
+FALLBACK_VOLUME: Final[int] = 20
 
 AIRPLAY_FLOW_PCM_FORMAT = AudioFormat(
     content_type=DEFAULT_PCM_FORMAT.content_type,
index 8ec1411110bd93c5e86981328697bcb383bf1802..08a14034063d3298d6c0a5455e9804f1e4d9ee49 100644 (file)
@@ -33,7 +33,7 @@ from music_assistant.providers.universal_group.constants import UGP_PREFIX
 from .constants import (
     AIRPLAY_FLOW_PCM_FORMAT,
     AIRPLAY_PCM_FORMAT,
-    CACHE_KEY_PREV_VOLUME,
+    CACHE_CATEGORY_PREV_VOLUME,
     CONF_ALAC_ENCODE,
     CONF_ENCRYPTION,
     CONF_IGNORE_VOLUME,
@@ -290,7 +290,12 @@ class AirPlayPlayer(Player):
         self._attr_volume_level = volume_level
         self.update_state()
         # store last state in cache
-        await self.mass.cache.set(self.player_id, volume_level, base_key=CACHE_KEY_PREV_VOLUME)
+        await self.mass.cache.set(
+            key=self.player_id,
+            data=volume_level,
+            provider=self.provider.lookup_key,
+            category=CACHE_CATEGORY_PREV_VOLUME,
+        )
 
     async def set_members(
         self,
index d7c7f4bbaf20e10e18abfb0635b40747c046c1a1..feaa9397204053b5d33607298bf051eae464eef5 100644 (file)
@@ -15,7 +15,7 @@ from music_assistant.helpers.datetime import utc
 from music_assistant.helpers.util import get_ip_pton, select_free_port
 from music_assistant.models.player_provider import PlayerProvider
 
-from .constants import CACHE_KEY_PREV_VOLUME, CONF_IGNORE_VOLUME, FALLBACK_VOLUME
+from .constants import CACHE_CATEGORY_PREV_VOLUME, CONF_IGNORE_VOLUME, FALLBACK_VOLUME
 from .helpers import (
     convert_airplay_volume,
     get_cliraop_binary,
@@ -153,7 +153,11 @@ class AirPlayProvider(PlayerProvider):
             display_name += " (AirPlay)"
 
         # Get volume from cache
-        if not (volume := await self.mass.cache.get(player_id, base_key=CACHE_KEY_PREV_VOLUME)):
+        if not (
+            volume := await self.mass.cache.get(
+                key=player_id, provider=self.lookup_key, category=CACHE_CATEGORY_PREV_VOLUME
+            )
+        ):
             volume = FALLBACK_VOLUME
 
         player = AirPlayPlayer(
index 5740aefed40b86229297f5995c209206351b6901..49759aa49b53657bcd4180592bbd0e5c1c739670 100644 (file)
@@ -60,6 +60,7 @@ from music_assistant_models.streamdetails import StreamDetails
 from pywidevine import PSSH, Cdm, Device, DeviceTypes
 from pywidevine.license_protocol_pb2 import WidevinePsshData
 
+from music_assistant.controllers.cache import use_cache
 from music_assistant.helpers.app_vars import app_var
 from music_assistant.helpers.auth import AuthenticationHelper
 from music_assistant.helpers.json import json_loads
@@ -99,6 +100,7 @@ UNKNOWN_PLAYLIST_NAME = "Unknown Apple Music Playlist"
 CONF_MUSIC_APP_TOKEN = "music_app_token"
 CONF_MUSIC_USER_TOKEN = "music_user_token"
 CONF_MUSIC_USER_TOKEN_TIMESTAMP = "music_user_token_timestamp"
+CACHE_CATEGORY_DECRYPT_KEY = 1
 
 
 async def setup(
@@ -279,6 +281,7 @@ class AppleMusicProvider(MusicProvider):
         ) as _file:
             self._decrypt_private_key = await _file.read()
 
+    @use_cache()
     async def search(
         self, search_query: str, media_types: list[MediaType] | None, limit: int = 5
     ) -> SearchResults:
@@ -377,24 +380,28 @@ class AppleMusicProvider(MusicProvider):
             elif item and item["id"]:
                 yield self._parse_playlist(item)
 
+    @use_cache()
     async def get_artist(self, prov_artist_id) -> Artist:
         """Get full artist details by id."""
         endpoint = f"catalog/{self._storefront}/artists/{prov_artist_id}"
         response = await self._get_data(endpoint, extend="editorialNotes")
         return self._parse_artist(response["data"][0])
 
+    @use_cache()
     async def get_album(self, prov_album_id) -> Album:
         """Get full album details by id."""
         endpoint = f"catalog/{self._storefront}/albums/{prov_album_id}"
         response = await self._get_data(endpoint, include="artists")
         return self._parse_album(response["data"][0])
 
+    @use_cache()
     async def get_track(self, prov_track_id) -> Track:
         """Get full track details by id."""
         endpoint = f"catalog/{self._storefront}/songs/{prov_track_id}"
         response = await self._get_data(endpoint, include="artists,albums")
         return self._parse_track(response["data"][0])
 
+    @use_cache()
     async def get_playlist(self, prov_playlist_id) -> Playlist:
         """Get full playlist details by id."""
         if self._is_catalog_id(prov_playlist_id):
@@ -405,6 +412,7 @@ class AppleMusicProvider(MusicProvider):
         response = await self._get_data(endpoint)
         return self._parse_playlist(response["data"][0])
 
+    @use_cache()
     async def get_album_tracks(self, prov_album_id) -> list[Track]:
         """Get all album tracks for given album id."""
         endpoint = f"catalog/{self._storefront}/albums/{prov_album_id}/tracks"
@@ -420,6 +428,7 @@ class AppleMusicProvider(MusicProvider):
             tracks.append(track)
         return tracks
 
+    @use_cache(3600 * 3)  # cache for 3 hours
     async def get_playlist_tracks(self, prov_playlist_id, page: int = 0) -> list[Track]:
         """Get all playlist tracks for given playlist id."""
         if self._is_catalog_id(prov_playlist_id):
@@ -441,6 +450,7 @@ class AppleMusicProvider(MusicProvider):
                 result.append(parsed_track)
         return result
 
+    @use_cache(3600 * 24 * 7)  # cache for 7 days
     async def get_artist_albums(self, prov_artist_id) -> list[Album]:
         """Get a list of all albums for the given artist."""
         endpoint = f"catalog/{self._storefront}/artists/{prov_artist_id}/albums"
@@ -452,6 +462,7 @@ class AppleMusicProvider(MusicProvider):
             return []
         return [self._parse_album(album) for album in response if album["id"]]
 
+    @use_cache(3600 * 24 * 7)  # cache for 7 days
     async def get_artist_toptracks(self, prov_artist_id) -> list[Track]:
         """Get a list of 10 most popular tracks for the given artist."""
         endpoint = f"catalog/{self._storefront}/artists/{prov_artist_id}/view/top-songs"
@@ -481,6 +492,7 @@ class AppleMusicProvider(MusicProvider):
         """Remove track(s) from playlist."""
         raise NotImplementedError("Not implemented!")
 
+    @use_cache(3600 * 24)  # cache for 24 hours
     async def get_similar_tracks(self, prov_track_id, limit=25) -> list[Track]:
         """Retrieve a dynamic list of tracks based on the provided item."""
         # Note, Apple music does not have an official endpoint for similar tracks.
@@ -770,8 +782,6 @@ class AppleMusicProvider(MusicProvider):
             )
         if description := attributes.get("description"):
             playlist.metadata.description = description.get("standard")
-        if checksum := attributes.get("lastModifiedDate"):
-            playlist.cache_checksum = checksum
         return playlist
 
     async def _get_all_items(self, endpoint, key="data", **kwargs) -> list[dict]:
@@ -928,8 +938,9 @@ class AppleMusicProvider(MusicProvider):
         self, license_url: str, key_id: bytes, uri: str, item_id: str
     ) -> str:
         """Get the decryption key for a song."""
-        cache_key = f"decryption_key.{item_id}"
-        if decryption_key := await self.mass.cache.get(cache_key, base_key=self.instance_id):
+        if decryption_key := await self.mass.cache.get(
+            key=item_id, provider=self.instance_id, category=CACHE_CATEGORY_DECRYPT_KEY
+        ):
             self.logger.debug("Decryption key for %s found in cache.", item_id)
             return decryption_key
         pssh = self._get_pssh(key_id)
@@ -952,7 +963,11 @@ class AppleMusicProvider(MusicProvider):
         decryption_key = key.key.hex()
         self.mass.create_task(
             self.mass.cache.set(
-                cache_key, decryption_key, expiration=7200, base_key=self.instance_id
+                key=item_id,
+                data=decryption_key,
+                expiration=7200,
+                provider=self.instance_id,
+                category=CACHE_CATEGORY_DECRYPT_KEY,
             )
         )
         return decryption_key
index 93912c5f7ec0369c7c4de1b42562fe9d77484aa3..66c5f1cf9a5da9a94b7a4627403786d47bfcb88f 100644 (file)
@@ -354,6 +354,7 @@ class ARDAudiothek(MusicProvider):
         """Search and lookup always search remote."""
         return True
 
+    @use_cache(3600 * 24 * 7)  # cache for 7 days
     async def search(
         self,
         search_query: str,
@@ -409,6 +410,7 @@ class ARDAudiothek(MusicProvider):
 
         return SearchResults(podcasts=podcasts, radio=radios)
 
+    @use_cache(3600 * 24 * 7)  # cache for 7 days
     async def get_radio(self, prov_radio_id: str) -> Radio:
         """Get full radio details by id."""
         # Get full details of a single Radio station.
@@ -463,6 +465,7 @@ class ARDAudiothek(MusicProvider):
 
         return []
 
+    @use_cache(3600 * 24 * 7)  # cache for 7 days
     async def get_podcast(self, prov_podcast_id: str) -> Podcast:
         """Get podcast."""
         async with await self.get_client() as session:
@@ -515,6 +518,7 @@ class ARDAudiothek(MusicProvider):
                         progress,
                     )
 
+    @use_cache(3600 * 24)  # cache for 24 hours
     async def get_podcast_episode(self, prov_episode_id: str) -> PodcastEpisode:
         """Get single podcast episode."""
         await self._update_progress()
@@ -574,7 +578,7 @@ class ARDAudiothek(MusicProvider):
             allow_seek=seek,
         )
 
-    @use_cache(3600)
+    @use_cache(3600 * 24 * 7)  # cache for 7 days
     async def get_organizations(self, path: str) -> list[BrowseFolder]:
         """Create a list of all available organizations."""
         async with await self.get_client() as session:
@@ -607,7 +611,7 @@ class ARDAudiothek(MusicProvider):
 
         return organizations
 
-    @use_cache(3600)
+    @use_cache(3600 * 24 * 7)  # cache for 7 days
     async def get_publication_services(self, path: str, core_id: str) -> list[BrowseFolder]:
         """Create a list of publications for a given organization."""
         async with await self.get_client() as session:
@@ -632,7 +636,7 @@ class ARDAudiothek(MusicProvider):
 
         return publications
 
-    @use_cache(3600)
+    @use_cache(3600 * 24 * 7)  # cache for 7 days
     async def get_publications_list(self, core_id: str) -> list[Radio | Podcast]:
         """Create list of available radio stations and shows for a publication service."""
         async with await self.get_client() as session:
index 404b9ae4a484b4c1bede5a5359626754b4e3eb24..1fd7078932fa5ba37ee532a5f8f8c55c2f274cbb 100644 (file)
@@ -89,7 +89,10 @@ class AudibleHelper:
         cached_book = None
         if asin:
             cached_book = await self.mass.cache.get(
-                key=asin, base_key=CACHE_DOMAIN, category=CACHE_CATEGORY_AUDIOBOOK, default=None
+                key=asin,
+                provider=self.provider_instance,
+                category=CACHE_CATEGORY_AUDIOBOOK,
+                default=None,
             )
 
         try:
@@ -198,7 +201,10 @@ class AudibleHelper:
         """Fetch the audiobook by asin."""
         if use_cache:
             cached_book = await self.mass.cache.get(
-                key=asin, base_key=CACHE_DOMAIN, category=CACHE_CATEGORY_AUDIOBOOK, default=None
+                key=asin,
+                provider=self.provider_instance,
+                category=CACHE_CATEGORY_AUDIOBOOK,
+                default=None,
             )
             if cached_book is not None:
                 return await self._parse_audiobook(cached_book)
@@ -219,7 +225,7 @@ class AudibleHelper:
 
         await self.mass.cache.set(
             key=asin,
-            base_key=CACHE_DOMAIN,
+            provider=self.provider_instance,
             category=CACHE_CATEGORY_AUDIOBOOK,
             data=item_data,
         )
@@ -300,7 +306,7 @@ class AudibleHelper:
             return []
 
         chapters_data: list[Any] = await self.mass.cache.get(
-            base_key=CACHE_DOMAIN, category=CACHE_CATEGORY_CHAPTERS, key=asin, default=[]
+            key=asin, provider=self.provider_instance, category=CACHE_CATEGORY_CHAPTERS, default=[]
         )
 
         if not chapters_data:
@@ -328,10 +334,10 @@ class AudibleHelper:
                 chapters_data = chapter_info.get("chapters") or []
 
                 await self.mass.cache.set(
-                    base_key=CACHE_DOMAIN,
-                    category=CACHE_CATEGORY_CHAPTERS,
                     key=asin,
                     data=chapters_data,
+                    provider=self.provider_instance,
+                    category=CACHE_CATEGORY_CHAPTERS,
                 )
             except Exception as exc:
                 self.logger.error(f"Error fetching chapters for ASIN {asin}: {exc}")
@@ -418,12 +424,14 @@ class AudibleHelper:
         cache_key_with_params = f"{path}:{params_hash}"
         if use_cache:
             response = await self.mass.cache.get(
-                key=cache_key_with_params, base_key=CACHE_DOMAIN, category=CACHE_CATEGORY_API
+                key=cache_key_with_params,
+                provider=self.provider_instance,
+                category=CACHE_CATEGORY_API,
             )
         if not response:
             response = await self.client.get(path, **kwargs)
             await self.mass.cache.set(
-                key=cache_key_with_params, base_key=CACHE_DOMAIN, data=response
+                key=cache_key_with_params, provider=self.provider_instance, data=response
             )
         return response
 
index 1af46cc247193f6dfa7939790140d688061f10a2..0a9bd084d8b2b77ea9267fc3b1ebc1e792025e7d 100644 (file)
@@ -60,6 +60,7 @@ from music_assistant_models.media_items import (
 from music_assistant_models.media_items.media_item import RecommendationFolder
 from music_assistant_models.streamdetails import StreamDetails
 
+from music_assistant.controllers.cache import use_cache
 from music_assistant.helpers.audio import get_multi_file_stream
 from music_assistant.models.music_provider import MusicProvider
 from music_assistant.providers.audiobookshelf.parsers import (
@@ -276,11 +277,9 @@ for more details.
                         self._client.server_settings.version,
                     )
 
-        self.cache_base_key = self.instance_id
-
         cached_libraries = await self.mass.cache.get(
             key=CACHE_KEY_LIBRARIES,
-            base_key=self.cache_base_key,
+            provider=self.instance_id,
             category=CACHE_CATEGORY_LIBRARIES,
             default=None,
         )
@@ -406,6 +405,7 @@ for more details.
 
         return abs_podcast
 
+    @use_cache(3600)
     @handle_refresh_token
     async def get_podcast(self, prov_podcast_id: str) -> Podcast:
         """Get single podcast."""
@@ -522,6 +522,7 @@ for more details.
 
         return abs_audiobook
 
+    @use_cache(3600)
     @handle_refresh_token
     async def get_audiobook(self, prov_audiobook_id: str) -> Audiobook:
         """Get a single audiobook.
@@ -1538,7 +1539,7 @@ for more details.
     async def _cache_set_helper_libraries(self) -> None:
         await self.mass.cache.set(
             key=CACHE_KEY_LIBRARIES,
-            base_key=self.cache_base_key,
+            provider=self.instance_id,
             category=CACHE_CATEGORY_LIBRARIES,
             data=self.libraries.to_dict(),
         )
index 108f6bb8b7db1f3511fff16304be25ffd38d967b..1fdb9d524b918bf6c8e2574a06d2c15f8e9c7619 100644 (file)
@@ -6,7 +6,7 @@ import asyncio
 import os
 import time
 from collections.abc import AsyncGenerator
-from typing import TYPE_CHECKING, cast
+from typing import TYPE_CHECKING, Final, cast
 
 import aiofiles
 import shortuuid
@@ -36,7 +36,7 @@ from music_assistant_models.media_items import (
 )
 from music_assistant_models.streamdetails import StreamDetails
 
-from music_assistant.constants import CACHE_CATEGORY_MEDIA_INFO, MASS_LOGO, VARIOUS_ARTISTS_FANART
+from music_assistant.constants import MASS_LOGO, VARIOUS_ARTISTS_FANART
 from music_assistant.helpers.tags import AudioTags, async_parse_tags
 from music_assistant.helpers.uri import parse_uri
 from music_assistant.models.music_provider import MusicProvider
@@ -73,6 +73,7 @@ if TYPE_CHECKING:
     from music_assistant.mass import MusicAssistant
     from music_assistant.models import ProviderInstanceType
 
+CACHE_CATEGORY_MEDIA_INFO: Final[int] = 1
 
 SUPPORTED_FEATURES = {
     ProviderFeature.BROWSE,
@@ -227,7 +228,6 @@ class BuiltinProvider(MusicProvider):
                 },
                 owner="Music Assistant",
                 is_editable=False,
-                cache_checksum=str(int(time.time())),
                 metadata=MediaItemMetadata(
                     images=UniqueList([DEFAULT_THUMB])
                     if prov_playlist_id in COLLAGE_IMAGE_PLAYLISTS
@@ -253,7 +253,6 @@ class BuiltinProvider(MusicProvider):
             owner="Music Assistant",
             is_editable=True,
         )
-        playlist.cache_checksum = str(stored_item.get("last_updated"))
         if image_url := stored_item.get("image_url"):
             playlist.metadata.add_image(
                 MediaItemImage(
@@ -511,11 +510,9 @@ class BuiltinProvider(MusicProvider):
 
     async def _get_media_info(self, url: str, force_refresh: bool = False) -> AudioTags:
         """Retrieve mediainfo for url."""
-        cache_category = CACHE_CATEGORY_MEDIA_INFO
-        cache_base_key = self.lookup_key
         # do we have some cached info for this url ?
         cached_info = await self.mass.cache.get(
-            url, category=cache_category, base_key=cache_base_key
+            url, provider=self.instance_id, category=CACHE_CATEGORY_MEDIA_INFO
         )
         if cached_info and not force_refresh:
             return AudioTags.parse(cached_info)
@@ -524,7 +521,7 @@ class BuiltinProvider(MusicProvider):
         if "authSig" in url:
             media_info.has_cover_image = False
         await self.mass.cache.set(
-            url, media_info.raw, category=cache_category, base_key=cache_base_key
+            url, media_info.raw, provider=self.instance_id, category=CACHE_CATEGORY_MEDIA_INFO
         )
         return media_info
 
index 1d2011e28630a62b8057ac2eecb9ce9d1b71e1c5..97e3d0e3394e7544937d341ae09f9cbacdf691e2 100644 (file)
@@ -43,6 +43,7 @@ from music_assistant_models.provider import ProviderManifest
 from music_assistant_models.streamdetails import StreamDetails
 
 from music_assistant import MusicAssistant
+from music_assistant.controllers.cache import use_cache
 from music_assistant.helpers.app_vars import app_var  # type: ignore[attr-defined]
 from music_assistant.helpers.auth import AuthenticationHelper
 from music_assistant.helpers.datetime import utc_timestamp
@@ -195,6 +196,7 @@ class DeezerProvider(MusicProvider):
         )
         await self.gw_client.setup()
 
+    @use_cache(3600 * 24 * 7)  # Cache for 7 days
     async def search(
         self, search_query: str, media_types: list[MediaType], limit: int = 5
     ) -> SearchResults:
@@ -263,6 +265,7 @@ class DeezerProvider(MusicProvider):
         async for track in await self.client.get_user_tracks():
             yield self.parse_track(track=track, user_country=self.gw_client.user_country)
 
+    @use_cache(3600 * 24 * 30)  # Cache for 30 days
     async def get_artist(self, prov_artist_id: str) -> Artist:
         """Get full artist details by id."""
         try:
@@ -273,6 +276,7 @@ class DeezerProvider(MusicProvider):
             self.logger.warning("Failed getting artist: %s", error)
             raise MediaNotFoundError(f"Artist {prov_artist_id} not found on Deezer") from error
 
+    @use_cache(3600 * 24 * 30)  # Cache for 30 days
     async def get_album(self, prov_album_id: str) -> Album:
         """Get full album details by id."""
         try:
@@ -281,6 +285,7 @@ class DeezerProvider(MusicProvider):
             self.logger.warning("Failed getting album: %s", error)
             raise MediaNotFoundError(f"Album {prov_album_id} not found on Deezer") from error
 
+    @use_cache(3600 * 24 * 30)  # Cache for 30 days
     async def get_playlist(self, prov_playlist_id: str) -> Playlist:
         """Get full playlist details by id."""
         try:
@@ -291,6 +296,7 @@ class DeezerProvider(MusicProvider):
             self.logger.warning("Failed getting playlist: %s", error)
             raise MediaNotFoundError(f"Album {prov_playlist_id} not found on Deezer") from error
 
+    @use_cache(3600 * 24 * 30)  # Cache for 30 days
     async def get_track(self, prov_track_id: str) -> Track:
         """Get full track details by id."""
         try:
@@ -302,6 +308,7 @@ class DeezerProvider(MusicProvider):
             self.logger.warning("Failed getting track: %s", error)
             raise MediaNotFoundError(f"Album {prov_track_id} not found on Deezer") from error
 
+    @use_cache(3600 * 24 * 30)  # Cache for 30 days
     async def get_album_tracks(self, prov_album_id: str) -> list[Track]:
         """Get all tracks in an album."""
         album = await self.client.get_album(album_id=int(prov_album_id))
@@ -315,6 +322,7 @@ class DeezerProvider(MusicProvider):
             for deezer_track in await album.get_tracks()
         ]
 
+    @use_cache(3600 * 3)  # Cache for 3 hours
     async def get_playlist_tracks(self, prov_playlist_id: str, page: int = 0) -> list[Track]:
         """Get playlist tracks."""
         result: list[Track] = []
@@ -334,11 +342,13 @@ class DeezerProvider(MusicProvider):
             )
         return result
 
+    @use_cache(3600 * 24 * 7)  # Cache for 7 days
     async def get_artist_albums(self, prov_artist_id: str) -> list[Album]:
         """Get albums by an artist."""
         artist = await self.client.get_artist(artist_id=int(prov_artist_id))
         return [self.parse_album(album=album) async for album in await artist.get_albums()]
 
+    @use_cache(3600 * 24 * 7)  # Cache for 7 days
     async def get_artist_toptracks(self, prov_artist_id: str) -> list[Track]:
         """Get top 50 tracks of an artist."""
         artist = await self.client.get_artist(artist_id=int(prov_artist_id))
@@ -393,6 +403,7 @@ class DeezerProvider(MusicProvider):
             raise NotImplementedError
         return result
 
+    @use_cache(3600)  # Cache for 1 hour
     async def recommendations(self) -> list[RecommendationFolder]:
         """Get deezer's recommendations."""
         return [
@@ -434,6 +445,7 @@ class DeezerProvider(MusicProvider):
         playlist = await self.client.get_playlist(playlist_id)
         return self.parse_playlist(playlist=playlist)
 
+    @use_cache(3600 * 24)  # Cache for 24 hours
     async def get_similar_tracks(self, prov_track_id: str, limit: int = 25) -> list[Track]:
         """Retrieve a dynamic list of tracks based on the provided item."""
         endpoint = "song.getSearchTrackMix"
@@ -645,7 +657,6 @@ class DeezerProvider(MusicProvider):
             ),
             is_editable=is_editable,
             owner=creator.name,
-            cache_checksum=playlist.checksum,
         )
 
     def get_playlist_creator(self, playlist: deezer.Playlist) -> deezer.User:
index 9dd16db44b5b13be56738214cf8802b70d567b41..ac392c562806e7aab12c96f3e2266979e3b48bce 100644 (file)
@@ -153,7 +153,7 @@ class FanartTvMetadataProvider(MetadataProvider):
                     return metadata
         return None
 
-    @use_cache(86400 * 30)
+    @use_cache(86400 * 60)  # Cache for 60 days
     async def _get_data(self, endpoint: str, **kwargs: str) -> dict[str, Any] | None:
         """Get data from api."""
         url = f"http://webservice.fanart.tv/v3/{endpoint}"
index 86495aa046e75ad71eff09a6aee47118d3b1a1a1..b116c8e792b57795d89dfa85e9ec6c7bbb57b8b4 100644 (file)
@@ -59,6 +59,7 @@ from music_assistant.constants import (
     VARIOUS_ARTISTS_NAME,
     VERBOSE_LOG_LEVEL,
 )
+from music_assistant.controllers.cache import use_cache
 from music_assistant.helpers.compare import compare_strings, create_safe_string
 from music_assistant.helpers.json import json_loads
 from music_assistant.helpers.playlists import parse_m3u, parse_pls
@@ -73,6 +74,11 @@ from music_assistant.models.music_provider import MusicProvider
 
 from .constants import (
     AUDIOBOOK_EXTENSIONS,
+    CACHE_CATEGORY_ALBUM_INFO,
+    CACHE_CATEGORY_ARTIST_INFO,
+    CACHE_CATEGORY_AUDIOBOOK_CHAPTERS,
+    CACHE_CATEGORY_FOLDER_IMAGES,
+    CACHE_CATEGORY_PODCAST_METADATA,
     CONF_ENTRY_CONTENT_TYPE,
     CONF_ENTRY_CONTENT_TYPE_READ_ONLY,
     CONF_ENTRY_IGNORE_ALBUM_PLAYLISTS,
@@ -468,7 +474,6 @@ class LocalFileSystemProvider(MusicProvider):
                 async def process_playlist() -> None:
                     playlist = await self.get_playlist(item.relative_path)
                     # add/update] playlist to db
-                    playlist.cache_checksum = item.checksum
                     playlist.favorite = import_as_favorite
                     await self.mass.music.playlists.add_item_to_library(
                         playlist,
@@ -652,8 +657,6 @@ class LocalFileSystemProvider(MusicProvider):
         if file_item.ext == "pls":
             playlist.is_editable = False
         playlist.owner = self.name
-        checksum = str(file_item.checksum)
-        playlist.cache_checksum = checksum
         return playlist
 
     async def get_audiobook(self, prov_audiobook_id: str) -> Audiobook:
@@ -691,6 +694,7 @@ class LocalFileSystemProvider(MusicProvider):
             if any(x.provider_instance == self.instance_id for x in track.provider_mappings)
         ]
 
+    @use_cache(3600)  # Cache for 1 hour
     async def get_playlist_tracks(self, prov_playlist_id: str, page: int = 0) -> list[Track]:
         """Get playlist tracks."""
         result: list[Track] = []
@@ -1037,8 +1041,11 @@ class LocalFileSystemProvider(MusicProvider):
                         break
 
         # prefer (short lived) cache for a bit more speed
-        cache_base_key = f"{self.instance_id}.artist"
-        if artist_path and (cache := await self.cache.get(artist_path, base_key=cache_base_key)):
+        if artist_path and (
+            cache := await self.cache.get(
+                key=artist_path, provider=self.instance_id, category=CACHE_CATEGORY_ARTIST_INFO
+            )
+        ):
             return cast("Artist", cache)
 
         prov_artist_id = artist_path or name
@@ -1085,7 +1092,13 @@ class LocalFileSystemProvider(MusicProvider):
         if images := await self._get_local_images(artist_path, extra_thumb_names=("artist",)):
             artist.metadata.images = UniqueList(images)
 
-        await self.cache.set(artist_path, artist, base_key=cache_base_key, expiration=120)
+        await self.cache.set(
+            key=artist_path,
+            data=artist,
+            provider=self.instance_id,
+            category=CACHE_CATEGORY_ARTIST_INFO,
+            expiration=120,
+        )
 
         return artist
 
@@ -1339,8 +1352,13 @@ class LocalFileSystemProvider(MusicProvider):
         track_dir = os.path.dirname(track_path)
         album_dir = get_album_dir(track_dir, track_tags.album)
 
-        cache_base_key = f"{self.instance_id}.album"
-        if album_dir and (cache := await self.cache.get(album_dir, base_key=cache_base_key)):
+        if album_dir and (
+            cache := await self.cache.get(
+                key=album_dir,
+                provider=self.instance_id,
+                category=CACHE_CATEGORY_ALBUM_INFO,
+            )
+        ):
             return cast("Album", cache)
 
         # album artist(s)
@@ -1475,15 +1493,24 @@ class LocalFileSystemProvider(MusicProvider):
                     album.metadata.images = UniqueList(images)
                 else:
                     album.metadata.images += images
-        await self.cache.set(album_dir, album, base_key=cache_base_key, expiration=120)
+        await self.cache.set(
+            key=album_dir,
+            data=album,
+            provider=self.instance_id,
+            category=CACHE_CATEGORY_ALBUM_INFO,
+            expiration=120,
+        )
         return album
 
     async def _get_local_images(
         self, folder: str, extra_thumb_names: tuple[str, ...] | None = None
     ) -> UniqueList[MediaItemImage]:
         """Return local images found in a given folderpath."""
-        cache_base_key = f"{self.lookup_key}.folderimages"
-        if (cache := await self.cache.get(folder, base_key=cache_base_key)) is not None:
+        if (
+            cache := await self.cache.get(
+                key=folder, provider=self.instance_id, category=CACHE_CATEGORY_FOLDER_IMAGES
+            )
+        ) is not None:
             return cast("UniqueList[MediaItemImage]", cache)
         if extra_thumb_names is None:
             extra_thumb_names = ()
@@ -1524,7 +1551,13 @@ class LocalFileSystemProvider(MusicProvider):
                 )
             )
 
-        await self.cache.set(folder, images, base_key=cache_base_key, expiration=120)
+        await self.cache.set(
+            key=folder,
+            data=images,
+            provider=self.instance_id,
+            category=CACHE_CATEGORY_FOLDER_IMAGES,
+            expiration=120,
+        )
         return images
 
     async def check_write_access(self) -> None:
@@ -1650,18 +1683,19 @@ class LocalFileSystemProvider(MusicProvider):
         prov_mapping = next(x for x in library_item.provider_mappings if x.item_id == item_id)
         file_item = await self.resolve(item_id)
         duration = library_item.duration
-        chapters_cache_key = f"{self.lookup_key}.audiobook.chapters"
         file_based_chapters: list[tuple[str, float]] | None = await self.cache.get(
-            file_item.relative_path,
-            base_key=chapters_cache_key,
+            key=file_item.relative_path,
+            provider=self.instance_id,
+            category=CACHE_CATEGORY_AUDIOBOOK_CHAPTERS,
         )
         if file_based_chapters is None:
             # no cache available for this audiobook, we need to parse the chapters
             tags = await async_parse_tags(file_item.absolute_path, file_item.file_size)
             await self._parse_audiobook(file_item, tags)
             file_based_chapters = await self.cache.get(
-                file_item.relative_path,
-                base_key=chapters_cache_key,
+                key=file_item.relative_path,
+                provider=self.instance_id,
+                category=CACHE_CATEGORY_AUDIOBOOK_CHAPTERS,
             )
 
         if file_based_chapters:
@@ -1752,16 +1786,22 @@ class LocalFileSystemProvider(MusicProvider):
         # store chapter files in cache
         # for easy access from streamdetails
         await self.cache.set(
-            audiobook_file_item.relative_path,
-            all_chapter_files,
-            base_key=f"{self.lookup_key}.audiobook.chapters",
+            key=audiobook_file_item.relative_path,
+            data=all_chapter_files,
+            provider=self.instance_id,
+            category=CACHE_CATEGORY_AUDIOBOOK_CHAPTERS,
         )
         return (int(total_duration), chapters)
 
     async def _get_podcast_metadata(self, podcast_folder: str) -> dict[str, Any]:
         """Return metadata for a podcast."""
-        cache_base_key = f"{self.lookup_key}.podcastmetadata"
-        if (cache := await self.cache.get(podcast_folder, base_key=cache_base_key)) is not None:
+        if (
+            cache := await self.cache.get(
+                key=podcast_folder,
+                provider=self.instance_id,
+                category=CACHE_CATEGORY_PODCAST_METADATA,
+            )
+        ) is not None:
             return cast("dict[str, Any]", cache)
         data: dict[str, Any] = {}
         metadata_file = os.path.join(podcast_folder, "metadata.json")
@@ -1770,5 +1810,10 @@ class LocalFileSystemProvider(MusicProvider):
             metadata_file = self.get_absolute_path(metadata_file)
             async with aiofiles.open(metadata_file) as _file:
                 data.update(json_loads(await _file.read()))
-        await self.cache.set(podcast_folder, data, base_key=cache_base_key)
+        await self.cache.set(
+            key=podcast_folder,
+            data=data,
+            provider=self.instance_id,
+            category=CACHE_CATEGORY_PODCAST_METADATA,
+        )
         return data
index 55fe86c728bc0c9271e79571b0e34dc74163695f..3c54c5e6a4badacea8366a9e501d289d582a1315 100644 (file)
@@ -2,6 +2,8 @@
 
 from __future__ import annotations
 
+from typing import Final
+
 from music_assistant_models.config_entries import ConfigEntry, ConfigValueOption
 from music_assistant_models.enums import ConfigEntryType, ProviderFeature
 
@@ -179,3 +181,10 @@ SUPPORTED_FEATURES = {
 
 class IsChapterFile(Exception):
     """Exception to indicate that a file is part of a multi-part media (e.g. audiobook chapter)."""
+
+
+CACHE_CATEGORY_ARTIST_INFO: Final[int] = 1
+CACHE_CATEGORY_ALBUM_INFO: Final[int] = 2
+CACHE_CATEGORY_FOLDER_IMAGES: Final[int] = 3
+CACHE_CATEGORY_AUDIOBOOK_CHAPTERS: Final[int] = 4
+CACHE_CATEGORY_PODCAST_METADATA: Final[int] = 5
index 97a3ce24a1882c5d7066ae15f32ffd083390cbcf..e0cd89b499498902c7e6597b55ead89fb6a02621 100644 (file)
@@ -12,6 +12,7 @@ from typing import TYPE_CHECKING
 from music_assistant_models.enums import ProviderFeature
 from music_assistant_models.media_items import MediaItemMetadata, Track
 
+from music_assistant.controllers.cache import use_cache
 from music_assistant.models.metadata_provider import MetadataProvider
 
 if TYPE_CHECKING:
@@ -79,7 +80,7 @@ class GeniusProvider(MetadataProvider):
             )
             return None
 
-        song_lyrics = await asyncio.to_thread(self._fetch_lyrics, artist_name, track.name)
+        song_lyrics = await self.fetch_lyrics(artist_name, track.name)
 
         if song_lyrics:
             metadata = MediaItemMetadata()
@@ -91,34 +92,45 @@ class GeniusProvider(MetadataProvider):
         self.logger.debug("No lyrics found for %s by %s", track.name, artist_name)
         return None
 
-    def _fetch_lyrics(self, artist: str, title: str) -> str | None:
-        """Fetch lyrics - NOTE: not async friendly."""
-        # blank artist / title?
-        if artist is None or len(artist.strip()) == 0 or title is None or len(title.strip()) == 0:
-            self.logger.error("Cannot fetch lyrics without artist and title")
-            return None
-
-        # clean song title to increase chance and accuracy of a result
-        cleaned_title = clean_song_title(title)
-        if cleaned_title != title:
-            self.logger.debug(f'Song title was cleaned: "{title}"  ->  "{cleaned_title}"')
+    @use_cache(86400 * 7)  # Cache for 7 days
+    async def fetch_lyrics(self, artist: str, title: str) -> str | None:
+        """Fetch lyrics for a given artist and title."""
+
+        def _fetch_lyrics(artist: str, title: str) -> str | None:
+            """Fetch lyrics - NOTE: not async friendly."""
+            # blank artist / title?
+            if (
+                artist is None
+                or len(artist.strip()) == 0
+                or title is None
+                or len(title.strip()) == 0
+            ):
+                self.logger.error("Cannot fetch lyrics without artist and title")
+                return None
+
+            # clean song title to increase chance and accuracy of a result
+            cleaned_title = clean_song_title(title)
+            if cleaned_title != title:
+                self.logger.debug(f'Song title was cleaned: "{title}"  ->  "{cleaned_title}"')
+
+            self.logger.info(f"Searching lyrics for artist='{artist}' and title='{cleaned_title}'")
 
-        self.logger.info(f"Searching lyrics for artist='{artist}' and title='{cleaned_title}'")
+            # perform search
+            song = self._genius.search_song(cleaned_title, artist, get_full_info=False)
 
-        # perform search
-        song = self._genius.search_song(cleaned_title, artist, get_full_info=False)
+            # second search needed?
+            if not song and " - " in cleaned_title:
+                # aggressively truncate title from the first hyphen
+                cleaned_title = cleaned_title.split(" - ", 1)[0]
+                self.logger.info(f"Second attempt, aggressively cleaned title='{cleaned_title}'")
 
-        # second search needed?
-        if not song and " - " in cleaned_title:
-            # aggressively truncate title from the first hyphen
-            cleaned_title = cleaned_title.split(" - ", 1)[0]
-            self.logger.info(f"Second attempt, aggressively cleaned title='{cleaned_title}'")
+                # perform search
+                song = self._genius.search_song(cleaned_title, artist, get_full_info=False)
 
-            # perform search
-            song = self._genius.search_song(cleaned_title, artist, get_full_info=False)
+            if song:
+                # attempts to clean lyrics of erroneous text
+                return cleanup_lyrics(song)
 
-        if song:
-            # attempts to clean lyrics of erroneous text
-            return cleanup_lyrics(song)
+            return None
 
-        return None
+        return await asyncio.to_thread(_fetch_lyrics, artist, title)
index ce5fce6b3bb409379a0bf033a0075656fb5ab8cc..13cb26fa20376fa4b4d4c32a3529fc4148ec74bd 100644 (file)
@@ -288,7 +288,7 @@ class GPodder(MusicProvider):
 
         timestamps = await self.mass.cache.get(
             key=CACHE_KEY_TIMESTAMP,
-            base_key=self.lookup_key,
+            provider=self.instance_id,
             category=CACHE_CATEGORY_OTHER,
             default=None,
         )
@@ -306,7 +306,7 @@ class GPodder(MusicProvider):
 
         feeds = await self.mass.cache.get(
             key=CACHE_KEY_FEEDS,
-            base_key=self.lookup_key,
+            provider=self.instance_id,
             category=CACHE_CATEGORY_OTHER,
             default=None,
         )
@@ -610,7 +610,7 @@ class GPodder(MusicProvider):
     async def _cache_get_podcast(self, prov_podcast_id: str) -> dict[str, Any]:
         parsed_podcast = await self.mass.cache.get(
             key=prov_podcast_id,
-            base_key=self.lookup_key,
+            provider=self.instance_id,
             category=CACHE_CATEGORY_PODCAST_ITEMS,
             default=None,
         )
@@ -628,7 +628,7 @@ class GPodder(MusicProvider):
     async def _cache_set_podcast(self, feed_url: str, parsed_podcast: dict[str, Any]) -> None:
         await self.mass.cache.set(
             key=feed_url,
-            base_key=self.lookup_key,
+            provider=self.instance_id,
             category=CACHE_CATEGORY_PODCAST_ITEMS,
             data=parsed_podcast,
             expiration=60 * 60 * 24,  # 1 day
@@ -638,7 +638,7 @@ class GPodder(MusicProvider):
         # seven days default
         await self.mass.cache.set(
             key=CACHE_KEY_TIMESTAMP,
-            base_key=self.lookup_key,
+            provider=self.instance_id,
             category=CACHE_CATEGORY_OTHER,
             data=[self.timestamp_subscriptions, self.timestamp_actions],
         )
@@ -647,7 +647,7 @@ class GPodder(MusicProvider):
         # seven days default
         await self.mass.cache.set(
             key=CACHE_KEY_FEEDS,
-            base_key=self.lookup_key,
+            provider=self.instance_id,
             category=CACHE_CATEGORY_OTHER,
             data=self.feeds,
         )
index 106028675553d08ecb499c34095259c0391a8a2e..580711b508b3230aeeda138e8118e59ca664c66a 100644 (file)
@@ -36,6 +36,7 @@ from music_assistant.constants import (
     VARIOUS_ARTISTS_MBID,
     VARIOUS_ARTISTS_NAME,
 )
+from music_assistant.controllers.cache import use_cache
 from music_assistant.helpers.util import infer_album_type, parse_title_and_version
 from music_assistant.models.music_provider import MusicProvider
 
@@ -129,6 +130,7 @@ class IBroadcastProvider(MusicProvider):
                 self.logger.debug("Parse album failed: %s", album, exc_info=error)
                 continue
 
+    @use_cache(3600 * 24 * 7)  # Cache for 7 days
     async def get_album(self, prov_album_id: str) -> Album:
         """Get full album details by id."""
         album_obj = await self._client.get_album(int(prov_album_id))
@@ -143,6 +145,7 @@ class IBroadcastProvider(MusicProvider):
                 self.logger.debug("Parse artist failed: %s", artist, exc_info=error)
                 continue
 
+    @use_cache(3600 * 24 * 7)  # Cache for 7 days
     async def get_artist_albums(self, prov_artist_id: str) -> list[Album]:
         """Get a list of albums for the given artist."""
         albums_objs = [
@@ -159,16 +162,19 @@ class IBroadcastProvider(MusicProvider):
                 continue
         return albums
 
+    @use_cache(3600 * 24 * 7)  # Cache for 7 days
     async def get_album_tracks(self, prov_album_id: str) -> list[Track]:
         """Get album tracks for given album id."""
         album = await self._client.get_album(int(prov_album_id))
         return await self._get_tracks(album["tracks"])
 
+    @use_cache(3600 * 24 * 7)  # Cache for 7 days
     async def get_track(self, prov_track_id: str) -> Track:
         """Get full track details by id."""
         track_obj = await self._client.get_track(int(prov_track_id))
         return await self._parse_track(track_obj)
 
+    @use_cache(3600 * 24 * 7)  # Cache for 7 days
     async def get_artist(self, prov_artist_id: str) -> Artist:
         """Get full artist details by id."""
         artist_obj = await self._client.get_artist(int(prov_artist_id))
@@ -205,6 +211,7 @@ class IBroadcastProvider(MusicProvider):
             if playlist["type"] != "recently-played" and playlist["type"] != "thumbsup":
                 yield await self._parse_playlist(playlist)
 
+    @use_cache(3600 * 24 * 7)  # Cache for 7 days
     async def get_playlist(self, prov_playlist_id: str) -> Playlist:
         """Get full playlist details by id."""
         playlist_obj = await self._client.get_playlist(int(prov_playlist_id))
@@ -214,6 +221,7 @@ class IBroadcastProvider(MusicProvider):
             self.logger.debug("Parse playlist failed: %s", playlist_obj, exc_info=error)
         return playlist
 
+    @use_cache(3600)  # Cache for 1 hour
     async def get_playlist_tracks(self, prov_playlist_id: str, page: int = 0) -> list[Track]:
         """Get playlist tracks."""
         tracks: list[Track] = []
index c4e61d6bd9a7bd8a40cf4a7799ddc18e20e03f87..d6c123bedf2f3fa54ac3b87d4c09fa907f47898b 100644 (file)
@@ -31,6 +31,7 @@ from music_assistant_models.media_items import (
 )
 from music_assistant_models.streamdetails import StreamDetails
 
+from music_assistant.controllers.cache import use_cache
 from music_assistant.helpers.podcast_parsers import (
     get_podcastparser_dict,
     parse_podcast,
@@ -134,6 +135,7 @@ class ITunesPodcastsProvider(MusicProvider):
         # 20 requests per minute, be a bit below
         self.throttler = ThrottlerManager(rate_limit=18, period=60)
 
+    @use_cache(3600 * 24 * 7)  # Cache for 7 days
     async def search(
         self, search_query: str, media_types: list[MediaType], limit: int = 10
     ) -> SearchResults:
@@ -329,7 +331,7 @@ class ITunesPodcastsProvider(MusicProvider):
     async def _cache_get_podcast(self, prov_podcast_id: str) -> dict[str, Any]:
         parsed_podcast = await self.mass.cache.get(
             key=prov_podcast_id,
-            base_key=self.lookup_key,
+            provider=self.instance_id,
             category=CACHE_CATEGORY_PODCASTS,
             default=None,
         )
@@ -350,7 +352,7 @@ class ITunesPodcastsProvider(MusicProvider):
     async def _cache_set_podcast(self, feed_url: str, parsed_podcast: dict[str, Any]) -> None:
         await self.mass.cache.set(
             key=feed_url,
-            base_key=self.lookup_key,
+            provider=self.instance_id,
             category=CACHE_CATEGORY_PODCASTS,
             data=parsed_podcast,
             expiration=60 * 60 * 24,  # 1 day
@@ -359,7 +361,7 @@ class ITunesPodcastsProvider(MusicProvider):
     async def _cache_set_top_podcasts(self, top_podcast_helper: TopPodcastsHelper) -> None:
         await self.mass.cache.set(
             key=CACHE_KEY_TOP_PODCASTS,
-            base_key=self.lookup_key,
+            provider=self.instance_id,
             category=CACHE_CATEGORY_RECOMMENDATIONS,
             data=top_podcast_helper.to_dict(),
             expiration=60 * 60 * 6,  # 6 hours
@@ -368,7 +370,7 @@ class ITunesPodcastsProvider(MusicProvider):
     async def _cache_get_top_podcasts(self) -> list[PodcastSearchResult]:
         parsed_top_podcasts = await self.mass.cache.get(
             key=CACHE_KEY_TOP_PODCASTS,
-            base_key=self.lookup_key,
+            provider=self.instance_id,
             category=CACHE_CATEGORY_RECOMMENDATIONS,
         )
         if parsed_top_podcasts is not None:
index c4b7b3ea82b01c4f657678693dba66aa457c568a..fb84d13d18a7452675ee80d0d15b2cacaecfd305 100644 (file)
@@ -25,6 +25,7 @@ from music_assistant_models.media_items import (
 from music_assistant_models.streamdetails import StreamDetails
 
 from music_assistant.constants import UNKNOWN_ARTIST_ID_MBID
+from music_assistant.controllers.cache import use_cache
 from music_assistant.mass import MusicAssistant
 from music_assistant.models import ProviderInstanceType
 from music_assistant.models.music_provider import MusicProvider
@@ -233,13 +234,14 @@ class JellyfinProvider(MusicProvider):
             playlists.append(parse_playlist(self.instance_id, self._client, item))
         return playlists
 
+    @use_cache(60 * 15)  # Cache for 15 minutes
     async def search(
         self,
         search_query: str,
         media_types: list[MediaType],
         limit: int = 20,
     ) -> SearchResults:
-        """Perform search on the plex library.
+        """Perform search on the Jellyfin library.
 
         :param search_query: Search query.
         :param media_types: A list of media_types to include. All types if None.
@@ -341,6 +343,7 @@ class JellyfinProvider(MusicProvider):
             raise MediaNotFoundError(f"Item {prov_album_id} not found")
         return parse_album(self.logger, self.instance_id, self._client, album)
 
+    @use_cache(3600)  # Cache for 1 hour
     async def get_album_tracks(self, prov_album_id: str) -> list[Track]:
         """Get album tracks for given album id."""
         jellyfin_album_tracks = (
@@ -354,6 +357,7 @@ class JellyfinProvider(MusicProvider):
             for jellyfin_album_track in jellyfin_album_tracks["Items"]
         ]
 
+    @use_cache(60 * 15)  # Cache for 15 minutes
     async def get_artist(self, prov_artist_id: str) -> Artist:
         """Get full artist details by id."""
         if prov_artist_id == UNKNOWN_ARTIST_MAPPING.item_id:
@@ -378,6 +382,7 @@ class JellyfinProvider(MusicProvider):
             raise MediaNotFoundError(f"Item {prov_artist_id} not found")
         return parse_artist(self.logger, self.instance_id, self._client, jellyfin_artist)
 
+    @use_cache(60 * 15)  # Cache for 15 minutes
     async def get_track(self, prov_track_id: str) -> Track:
         """Get full track details by id."""
         try:
@@ -386,6 +391,7 @@ class JellyfinProvider(MusicProvider):
             raise MediaNotFoundError(f"Item {prov_track_id} not found")
         return parse_track(self.logger, self.instance_id, self._client, track)
 
+    @use_cache(60 * 15)  # Cache for 15 minutes
     async def get_playlist(self, prov_playlist_id: str) -> Playlist:
         """Get full playlist details by id."""
         try:
@@ -394,6 +400,7 @@ class JellyfinProvider(MusicProvider):
             raise MediaNotFoundError(f"Item {prov_playlist_id} not found")
         return parse_playlist(self.instance_id, self._client, playlist)
 
+    @use_cache(3600)  # Cache for 1 hour
     async def get_playlist_tracks(self, prov_playlist_id: str, page: int = 0) -> list[Track]:
         """Get playlist tracks."""
         result: list[Track] = []
@@ -419,6 +426,7 @@ class JellyfinProvider(MusicProvider):
                 )
         return result
 
+    @use_cache(3600)  # Cache for 1 hour
     async def get_artist_albums(self, prov_artist_id: str) -> list[Album]:
         """Get a list of albums for the given artist."""
         if not prov_artist_id.startswith(FAKE_ARTIST_PREFIX):
@@ -453,6 +461,7 @@ class JellyfinProvider(MusicProvider):
             allow_seek=True,
         )
 
+    @use_cache(3600)  # Cache for 1 hour
     async def get_similar_tracks(self, prov_track_id: str, limit: int = 25) -> list[Track]:
         """Retrieve a dynamic list of tracks based on the provided item."""
         resp = await self._client.get_similar_tracks(
index 167f9c9594f4c031b32546b79f1acf5ef1ab6f0d..8259f8940bcf11e20e9d33459ed18330313966be 100644 (file)
@@ -14,6 +14,7 @@ from music_assistant_models.config_entries import ConfigEntry
 from music_assistant_models.enums import ConfigEntryType, ProviderFeature
 from music_assistant_models.media_items import MediaItemMetadata, Track
 
+from music_assistant.controllers.cache import use_cache
 from music_assistant.helpers.throttle_retry import ThrottlerManager, throttle_with_retries
 from music_assistant.models.metadata_provider import MetadataProvider
 
@@ -77,6 +78,7 @@ class LrclibProvider(MetadataProvider):
             self.throttler = ThrottlerManager(rate_limit=1, period=1)
             self.logger.debug("Using custom API endpoint: %s (throttling disabled)", self.api_url)
 
+    @use_cache(3600 * 24 * 14)  # Cache for 14 days
     @throttle_with_retries
     async def _get_data(self, **params: Any) -> dict[str, Any] | None:
         """Get data from LRCLib API with throttling and retries."""
index 44621755f849737af7fc78e178acef986a2db454..9724d1c9bdead0cd89b7e53d042866ab88ba37d9 100644 (file)
@@ -426,7 +426,7 @@ class MusicbrainzProvider(MetadataProvider):
                 return MusicBrainzArtist.from_raw(artist)
         return None
 
-    @use_cache(86400 * 30)
+    @use_cache(86400 * 30)  # Cache for 30 days
     @throttle_with_retries
     async def get_data(self, endpoint: str, **kwargs: str) -> Any:
         """Get data from api."""
index 53284ea191d242620b5f3bbb95172e3f30fe1a36..731103c0188d1468c0b16f6562d703ce5453be5b 100644 (file)
@@ -38,6 +38,7 @@ from music_assistant_models.media_items import (
 from music_assistant_models.streamdetails import StreamDetails
 
 from music_assistant.constants import CONF_PASSWORD, CONF_USERNAME
+from music_assistant.controllers.cache import use_cache
 from music_assistant.helpers.json import json_loads
 from music_assistant.helpers.util import infer_album_type
 from music_assistant.models.music_provider import MusicProvider
@@ -126,6 +127,7 @@ class NugsProvider(MusicProvider):
             if item and item["id"]:
                 yield self._parse_playlist(item)
 
+    @use_cache(3600 * 24 * 14)  # Cache for 14 days
     async def get_artist(self, prov_artist_id: str) -> Artist:
         """Get artist details by id."""
         endpoint = f"/releases/recent?limit=1&artistIds={prov_artist_id}"
@@ -133,6 +135,7 @@ class NugsProvider(MusicProvider):
         artist_data = artist_response["items"][0]["artist"]
         return self._parse_artist(artist_data)
 
+    @use_cache(3600 * 24 * 14)  # Cache for 14 days
     async def get_artist_albums(self, prov_artist_id: str) -> list[Album]:
         """Get a list of all albums for the given artist."""
         params = {
@@ -145,18 +148,21 @@ class NugsProvider(MusicProvider):
             if (item and item["id"])
         ]
 
+    @use_cache(3600 * 24 * 14)  # Cache for 14 days
     async def get_album(self, prov_album_id: str) -> Album:
         """Get album details by id."""
         endpoint = f"shows/{prov_album_id}"
         response = await self._get_data("catalog", endpoint)
         return self._parse_album(response["Response"])
 
+    @use_cache(3600 * 24 * 14)  # Cache for 14 days
     async def get_playlist(self, prov_playlist_id: str) -> Playlist:
         """Get full playlist details by id."""
         endpoint = f"playlists/{prov_playlist_id}"
         response = await self._get_data("stash", endpoint)
         return self._parse_playlist(response["items"])
 
+    @use_cache(3600 * 24 * 14)  # Cache for 14 days
     async def get_album_tracks(self, prov_album_id: str) -> list[Track]:
         """Get all album tracks for given album id."""
         endpoint = f"shows/{prov_album_id}"
@@ -173,6 +179,7 @@ class NugsProvider(MusicProvider):
             if item["trackID"]
         ]
 
+    @use_cache(3600)  # Cache for 1 hour
     async def get_playlist_tracks(self, prov_playlist_id: str, page: int = 0) -> list[Track]:
         """Get playlist tracks."""
         result: list[Track] = []
index 162f0368cf48bb9e092872e4e7ac30f9fdbc737b..3f2e293cbf5dcfd5a1db5acbcca4e86097c2384a 100644 (file)
@@ -38,7 +38,6 @@ from music_assistant_models.media_items import (
 from music_assistant_models.streamdetails import StreamDetails
 
 from music_assistant.constants import (
-    CACHE_CATEGORY_OPEN_SUBSONIC,
     CONF_PASSWORD,
     CONF_PATH,
     CONF_PORT,
@@ -66,7 +65,7 @@ if TYPE_CHECKING:
     from libopensonic.media import ArtistID3 as SonicArtist
     from libopensonic.media import Bookmark as SonicBookmark
     from libopensonic.media import Child as SonicSong
-    from libopensonic.media import OpenSubsonicExtension
+    from libopensonic.media import OpenSubsonicExtension, PodcastChannel
     from libopensonic.media import Playlist as SonicPlaylist
     from libopensonic.media import PodcastEpisode as SonicEpisode
 
@@ -80,6 +79,8 @@ CONF_NEW_ALBUMS = "recommend_new"
 CONF_PLAYED_ALBUMS = "recommend_played"
 CONF_RECO_SIZE = "recommendation_count"
 
+CACHE_CATEGORY_PODCAST_CHANNEL = 1
+CACHE_CATEGORY_PODCAST_EPISODES = 2
 
 Param = ParamSpec("Param")
 RetType = TypeVar("RetType")
@@ -96,7 +97,6 @@ class OpenSonicProvider(MusicProvider):
     _show_new: bool = True
     _show_played: bool = True
     _reco_limit: int = 10
-    _cache_base_key: str = ""
 
     async def handle_async_init(self) -> None:
         """Set up the music provider and test the connection."""
@@ -139,7 +139,6 @@ class OpenSonicProvider(MusicProvider):
         self._show_new = bool(self.config.get_value(CONF_NEW_ALBUMS))
         self._show_played = bool(self.config.get_value(CONF_PLAYED_ALBUMS))
         self._reco_limit = int(str(self.config.get_value(CONF_RECO_SIZE)))
-        self._cache_base_key = f"{self.instance_id}/"
 
     @property
     def is_streaming_provider(self) -> bool:
@@ -755,17 +754,26 @@ class OpenSonicProvider(MusicProvider):
 
         self.logger.debug("Done streaming %s", streamdetails.item_id)
 
-    async def _get_podcast_channel_async(self, chan_id: str, base_key: str) -> None:
-        chan = await self._run_async(self.conn.get_podcasts, inc_episodes=True, pid=chan_id)
-        if not chan:
-            return
-        await self.mass.cache.set(
+    async def _get_podcast_channel_async(self, chan_id: str) -> PodcastChannel | None:
+        if cache := await self.mass.cache.get(
             key=chan_id,
-            data=chan[0],
-            base_key=base_key,
-            expiration=600,
-            category=CACHE_CATEGORY_OPEN_SUBSONIC,
-        )
+            provider=self.instance_id,
+            category=CACHE_CATEGORY_PODCAST_CHANNEL,
+        ):
+            return cache
+        if channels := await self._run_async(
+            self.conn.get_podcasts, inc_episodes=True, pid=chan_id
+        ):
+            channel = channels[0]
+            await self.mass.cache.set(
+                key=chan_id,
+                data=channel,
+                provider=self.instance_id,
+                expiration=600,
+                category=CACHE_CATEGORY_PODCAST_CHANNEL,
+            )
+            return channel
+        return None
 
     async def _podcast_recommendations(self) -> RecommendationFolder:
         podcasts: RecommendationFolder = RecommendationFolder(
@@ -776,22 +784,9 @@ class OpenSonicProvider(MusicProvider):
         sonic_episodes = await self._run_async(
             self.conn.get_newest_podcasts, count=self._reco_limit
         )
-        chan_ids = set()
-        chan_base_key = f"{self._cache_base_key}/podcast_channels/"
-        async with TaskGroup() as tg:
-            for ep in sonic_episodes:
-                if ep.channel_id in chan_ids:
-                    continue
-                tg.create_task(self._get_podcast_channel_async(ep.channel_id, chan_base_key))
-                chan_ids.add(ep.channel_id)
-
         for ep in sonic_episodes:
-            chan = await self.mass.cache.get(
-                key=ep.channel_id, base_key=chan_base_key, category=CACHE_CATEGORY_OPEN_SUBSONIC
-            )
-            if not chan:
-                continue
-            podcasts.items.append(parse_epsiode(self.instance_id, ep, chan))
+            if channel_info := await self._get_podcast_channel_async(ep.channel_id):
+                podcasts.items.append(parse_epsiode(self.instance_id, ep, channel_info))
         return podcasts
 
     async def _favorites_recommendation(self) -> RecommendationFolder:
index bef07edb0e612ae0f64415fa1b2ab34812ef1aa6..9d2afef0d4eb2706b9fc83caa13e0d8ebf6164f4 100644 (file)
@@ -54,6 +54,7 @@ from plexapi.myplex import MyPlexAccount, MyPlexPinLogin
 from plexapi.server import PlexServer
 
 from music_assistant.constants import UNKNOWN_ARTIST
+from music_assistant.controllers.cache import use_cache
 from music_assistant.helpers.auth import AuthenticationHelper
 from music_assistant.helpers.tags import async_parse_tags
 from music_assistant.helpers.util import parse_title_and_version
@@ -660,8 +661,6 @@ class PlexProvider(MusicProvider):
                 ]
             )
         playlist.is_editable = not plex_playlist.smart
-        playlist.cache_checksum = str(plex_playlist.updatedAt.timestamp())
-
         return playlist
 
     async def _parse_track(self, plex_track: PlexTrack) -> Track:
@@ -741,6 +740,7 @@ class PlexProvider(MusicProvider):
 
         return track
 
+    @use_cache(3600)  # Cache for 1 hour
     async def search(
         self,
         search_query: str,
@@ -842,6 +842,7 @@ class PlexProvider(MusicProvider):
                 yield await self._parse_track(plex_track)
             offset += page_size
 
+    @use_cache(3600 * 3)  # Cache for 3 hours
     async def get_album(self, prov_album_id: str) -> Album:
         """Get full album details by id."""
         if plex_album := await self._get_data(prov_album_id, PlexAlbum):
@@ -849,6 +850,7 @@ class PlexProvider(MusicProvider):
         msg = f"Item {prov_album_id} not found"
         raise MediaNotFoundError(msg)
 
+    @use_cache(3600 * 3)  # Cache for 3 hours
     async def get_album_tracks(self, prov_album_id: str) -> list[Track]:
         """Get album tracks for given album id."""
         plex_album: PlexAlbum = await self._get_data(prov_album_id, PlexAlbum)
@@ -860,6 +862,7 @@ class PlexProvider(MusicProvider):
             tracks.append(track)
         return tracks
 
+    @use_cache(3600 * 3)  # Cache for 3 hours
     async def get_artist(self, prov_artist_id: str) -> Artist:
         """Get full artist details by id."""
         if prov_artist_id.startswith(FAKE_ARTIST_PREFIX):
@@ -877,6 +880,7 @@ class PlexProvider(MusicProvider):
         msg = f"Item {prov_artist_id} not found"
         raise MediaNotFoundError(msg)
 
+    @use_cache(3600 * 3)  # Cache for 3 hours
     async def get_track(self, prov_track_id: str) -> Track:
         """Get full track details by id."""
         if plex_track := await self._get_data(prov_track_id, PlexTrack):
@@ -884,6 +888,7 @@ class PlexProvider(MusicProvider):
         msg = f"Item {prov_track_id} not found"
         raise MediaNotFoundError(msg)
 
+    @use_cache(3600 * 3)  # Cache for 3 hours
     async def get_playlist(self, prov_playlist_id: str) -> Playlist:
         """Get full playlist details by id."""
         if plex_playlist := await self._get_data(prov_playlist_id, PlexPlaylist):
@@ -891,6 +896,7 @@ class PlexProvider(MusicProvider):
         msg = f"Item {prov_playlist_id} not found"
         raise MediaNotFoundError(msg)
 
+    @use_cache(3600 * 3)  # Cache for 3 hours
     async def get_playlist_tracks(self, prov_playlist_id: str, page: int = 0) -> list[Track]:
         """Get playlist tracks."""
         result: list[Track] = []
@@ -906,6 +912,7 @@ class PlexProvider(MusicProvider):
                 result.append(track)
         return result
 
+    @use_cache(3600 * 3)  # Cache for 3 hours
     async def get_artist_albums(self, prov_artist_id: str) -> list[Album]:
         """Get a list of albums for the given artist."""
         if not prov_artist_id.startswith(FAKE_ARTIST_PREFIX):
index 3685e7b4985f427b1f1ed6d7ffc4f1cac4f6046e..76a677f332941ff42322b5d0a3c879764068b05e 100644 (file)
@@ -272,7 +272,7 @@ class PodcastIndexProvider(MusicProvider):
         self.update_config_value(CONF_STORED_PODCASTS, stored_podcasts)
         return True
 
-    @use_cache(86400)  # Cache for 24 hours
+    @use_cache(3600 * 24 * 14)  # Cache for 14 days
     async def get_podcast(self, prov_podcast_id: str) -> Podcast:
         """Get podcast details."""
         try:
@@ -292,7 +292,6 @@ class PodcastIndexProvider(MusicProvider):
 
         raise MediaNotFoundError(f"Podcast {prov_podcast_id} not found")
 
-    @use_cache(43200)  # Cache for 12 hours
     async def get_podcast_episodes(
         self, prov_podcast_id: str
     ) -> AsyncGenerator[PodcastEpisode, None]:
index b40bbc910728877f49072e2b368384b3520503b5..bdf3e3f6968ac576822806543b41820967d44064 100644 (file)
@@ -26,6 +26,7 @@ from music_assistant_models.errors import InvalidProviderURI, MediaNotFoundError
 from music_assistant_models.media_items import AudioFormat, Podcast, PodcastEpisode
 from music_assistant_models.streamdetails import StreamDetails
 
+from music_assistant.controllers.cache import use_cache
 from music_assistant.helpers.compare import create_safe_string
 from music_assistant.helpers.podcast_parsers import (
     get_podcastparser_dict,
@@ -132,12 +133,14 @@ class PodcastMusicprovider(MusicProvider):
         await self._cache_set_podcast()
         yield await self._parse_podcast()
 
+    @use_cache(3600 * 24 * 7)  # Cache for 7 days
     async def get_podcast(self, prov_podcast_id: str) -> Podcast:
         """Get full artist details by id."""
         if prov_podcast_id != self.podcast_id:
             raise RuntimeError(f"Podcast id not in provider: {prov_podcast_id}")
         return await self._parse_podcast()
 
+    @use_cache(3600)  # Cache for 1 hour
     async def get_podcast_episode(self, prov_episode_id: str) -> PodcastEpisode:
         """Get (full) podcast episode details by id."""
         for idx, episode in enumerate(self.parsed_podcast["episodes"]):
@@ -161,6 +164,7 @@ class PodcastMusicprovider(MusicProvider):
             if mass_episode := self._parse_episode(episode, idx):
                 yield mass_episode
 
+    @use_cache(3600)  # Cache for 1 hour
     async def get_stream_details(self, item_id: str, media_type: MediaType) -> StreamDetails:
         """Get streamdetails for a track/radio."""
         for episode in self.parsed_podcast["episodes"]:
@@ -213,7 +217,7 @@ class PodcastMusicprovider(MusicProvider):
     async def _cache_get_podcast(self) -> dict[str, Any]:
         parsed_podcast = await self.mass.cache.get(
             key=self.podcast_id,
-            base_key=self.lookup_key,
+            provider=self.instance_id,
             category=CACHE_CATEGORY_PODCASTS,
             default=None,
         )
@@ -226,7 +230,7 @@ class PodcastMusicprovider(MusicProvider):
     async def _cache_set_podcast(self) -> None:
         await self.mass.cache.set(
             key=self.podcast_id,
-            base_key=self.lookup_key,
+            provider=self.instance_id,
             category=CACHE_CATEGORY_PODCASTS,
             data=self.parsed_podcast,
             expiration=60 * 60 * 24,  # 1 day
index 212b119f27c251e36d04d9797401427f735b518d..749f83ec54f51748b07b28652ab4e56b808e9cbf 100644 (file)
@@ -45,6 +45,7 @@ from music_assistant.constants import (
     VARIOUS_ARTISTS_MBID,
     VARIOUS_ARTISTS_NAME,
 )
+from music_assistant.controllers.cache import use_cache
 from music_assistant.helpers.app_vars import app_var
 from music_assistant.helpers.json import json_loads
 from music_assistant.helpers.throttle_retry import ThrottlerManager, throttle_with_retries
@@ -141,6 +142,7 @@ class QobuzProvider(MusicProvider):
             msg = f"Login failed for user {self.config.get_value(CONF_USERNAME)}"
             raise LoginFailed(msg)
 
+    @use_cache(3600 * 24 * 14)  # Cache for 14 days
     async def search(
         self, search_query: str, media_types: list[MediaType], limit: int = 5
     ) -> SearchResults:
@@ -224,6 +226,7 @@ class QobuzProvider(MusicProvider):
             if item and item["id"]:
                 yield self._parse_playlist(item)
 
+    @use_cache(3600 * 24 * 30)  # Cache for 30 days
     async def get_artist(self, prov_artist_id) -> Artist:
         """Get full artist details by id."""
         params = {"artist_id": prov_artist_id}
@@ -232,6 +235,7 @@ class QobuzProvider(MusicProvider):
         msg = f"Item {prov_artist_id} not found"
         raise MediaNotFoundError(msg)
 
+    @use_cache(3600 * 24 * 30)  # Cache for 30 days
     async def get_album(self, prov_album_id) -> Album:
         """Get full album details by id."""
         params = {"album_id": prov_album_id}
@@ -240,6 +244,7 @@ class QobuzProvider(MusicProvider):
         msg = f"Item {prov_album_id} not found"
         raise MediaNotFoundError(msg)
 
+    @use_cache(3600 * 24 * 30)  # Cache for 30 days
     async def get_track(self, prov_track_id) -> Track:
         """Get full track details by id."""
         params = {"track_id": prov_track_id}
@@ -248,6 +253,7 @@ class QobuzProvider(MusicProvider):
         msg = f"Item {prov_track_id} not found"
         raise MediaNotFoundError(msg)
 
+    @use_cache(3600 * 24 * 30)  # Cache for 30 days
     async def get_playlist(self, prov_playlist_id) -> Playlist:
         """Get full playlist details by id."""
         params = {"playlist_id": prov_playlist_id}
@@ -256,6 +262,7 @@ class QobuzProvider(MusicProvider):
         msg = f"Item {prov_playlist_id} not found"
         raise MediaNotFoundError(msg)
 
+    @use_cache(3600 * 24 * 30)  # Cache for 30 days
     async def get_album_tracks(self, prov_album_id) -> list[Track]:
         """Get all album tracks for given album id."""
         params = {"album_id": prov_album_id}
@@ -265,6 +272,7 @@ class QobuzProvider(MusicProvider):
             if (item and item["id"])
         ]
 
+    @use_cache(3600 * 3)  # Cache for 3 hours
     async def get_playlist_tracks(self, prov_playlist_id: str, page: int = 0) -> list[Track]:
         """Get playlist tracks."""
         result: list[Track] = []
@@ -286,6 +294,7 @@ class QobuzProvider(MusicProvider):
             result.append(track)
         return result
 
+    @use_cache(3600 * 24 * 14)  # Cache for 14 days
     async def get_artist_albums(self, prov_artist_id) -> list[Album]:
         """Get a list of albums for the given artist."""
         result = await self._get_data(
@@ -301,6 +310,7 @@ class QobuzProvider(MusicProvider):
             if (item and item["id"] and str(item["artist"]["id"]) == prov_artist_id)
         ]
 
+    @use_cache(3600 * 24 * 14)  # Cache for 14 days
     async def get_artist_toptracks(self, prov_artist_id) -> list[Track]:
         """Get a list of most popular tracks for the given artist."""
         result = await self._get_data(
@@ -706,7 +716,6 @@ class QobuzProvider(MusicProvider):
                     remotely_accessible=True,
                 )
             ]
-        playlist.cache_checksum = str(playlist_obj["updated_at"])
         return playlist
 
     @lock
index b7b29ed2280ed9735360749b6fdfe3ede99468c1..4d2cca683c36967bed42a55a485be55aa8522cbb 100644 (file)
@@ -148,6 +148,7 @@ class RadioBrowserProvider(MusicProvider):
             ):
                 await self.library_add(await self.get_radio(db_row["provider_item_id"]))
 
+    @use_cache(3600 * 24 * 14)  # Cache for 14 days
     async def search(
         self, search_query: str, media_types: list[MediaType], limit: int = 10
     ) -> SearchResults:
@@ -293,7 +294,7 @@ class RadioBrowserProvider(MusicProvider):
         self.update_config_value(CONF_STORED_RADIOS, stored_radios)
         return True
 
-    @use_cache(3600)
+    @use_cache(3600 * 6)  # Cache for 6 hours
     async def get_by_popularity(self) -> Sequence[Radio]:
         """Get radio stations by popularity."""
         try:
@@ -307,7 +308,7 @@ class RadioBrowserProvider(MusicProvider):
         except RadioBrowserError as err:
             raise ProviderUnavailableError(f"Failed to fetch popular stations: {err}") from err
 
-    @use_cache(3600)
+    @use_cache(3600 * 6)  # Cache for 6 hours
     async def get_by_votes(self) -> Sequence[Radio]:
         """Get radio stations by votes."""
         try:
@@ -321,7 +322,7 @@ class RadioBrowserProvider(MusicProvider):
         except RadioBrowserError as err:
             raise ProviderUnavailableError(f"Failed to fetch stations by votes: {err}") from err
 
-    @use_cache(3600 * 24)
+    @use_cache(3600 * 24 * 7)  # Cache for 7 days
     async def get_country_folders(self, base_path: str) -> list[BrowseFolder]:
         """Get a list of country names as BrowseFolder."""
         try:
@@ -347,7 +348,7 @@ class RadioBrowserProvider(MusicProvider):
             items.append(folder)
         return items
 
-    @use_cache(3600 * 24)
+    @use_cache(3600 * 24 * 7)  # Cache for 7 days
     async def get_language_folders(self, base_path: str) -> list[BrowseFolder]:
         """Get a list of language names as BrowseFolder."""
         try:
@@ -367,7 +368,7 @@ class RadioBrowserProvider(MusicProvider):
             for language in languages
         ]
 
-    @use_cache(3600 * 24)
+    @use_cache(3600 * 24 * 7)  # Cache for 7 days
     async def get_tag_folders(self, base_path: str) -> list[BrowseFolder]:
         """Get a list of tag names as BrowseFolder."""
         try:
@@ -391,7 +392,7 @@ class RadioBrowserProvider(MusicProvider):
             for tag in tags
         ]
 
-    @use_cache(3600)
+    @use_cache(3600 * 24)  # Cache for 1 day
     async def get_by_country(self, country_code: str) -> list[Radio]:
         """Get radio stations by country."""
         try:
@@ -409,7 +410,7 @@ class RadioBrowserProvider(MusicProvider):
                 f"Failed to fetch stations for country {country_code}: {err}"
             ) from err
 
-    @use_cache(3600)
+    @use_cache(3600 * 24)  # Cache for 1 day
     async def get_by_language(self, language: str) -> list[Radio]:
         """Get radio stations by language."""
         try:
@@ -427,7 +428,7 @@ class RadioBrowserProvider(MusicProvider):
                 f"Failed to fetch stations for language {language}: {err}"
             ) from err
 
-    @use_cache(3600)
+    @use_cache(3600 * 24)  # Cache for 1 day
     async def get_by_tag(self, tag: str) -> list[Radio]:
         """Get radio stations by tag."""
         try:
@@ -445,6 +446,7 @@ class RadioBrowserProvider(MusicProvider):
                 f"Failed to fetch stations for tag {tag}: {err}"
             ) from err
 
+    @use_cache(3600 * 24 * 14)  # Cache for 14 days
     async def get_radio(self, prov_radio_id: str) -> Radio:
         """Get radio station details."""
         try:
index b5d60d3657cb595abc9103072d2100c7f377c5c7..d4ffb09f2e33a57d45509d877d30e3da29b87e90 100644 (file)
@@ -19,6 +19,7 @@ from music_assistant_models.media_items import (
 )
 from music_assistant_models.streamdetails import StreamDetails
 
+from music_assistant.controllers.cache import use_cache
 from music_assistant.models.music_provider import MusicProvider
 
 from . import parsers
@@ -39,6 +40,7 @@ class RadioParadiseProvider(MusicProvider):
         for channel_id in RADIO_PARADISE_CHANNELS:
             yield self._parse_radio(channel_id)
 
+    @use_cache(3600 * 3)  # Cache for 3 hours
     async def get_radio(self, prov_radio_id: str) -> Radio:
         """Get full radio details by id."""
         if prov_radio_id not in RADIO_PARADISE_CHANNELS:
index b0929574f29c6520b8662994b8104b97d741aa5b..70257ff61387237eee054edd17c00843b49c9be2 100644 (file)
@@ -29,6 +29,7 @@ from music_assistant_models.media_items import (
 from music_assistant_models.streamdetails import StreamDetails
 from tenacity import RetryError
 
+from music_assistant.controllers.cache import use_cache
 from music_assistant.helpers.util import select_free_port
 from music_assistant.helpers.webserver import Webserver
 from music_assistant.models.music_provider import MusicProvider
@@ -202,6 +203,7 @@ class SiriusXMProvider(MusicProvider):
             if channel.is_favorite:
                 yield self._parse_radio(channel)
 
+    @use_cache(3600 * 24 * 14)  # Cache for 14 days
     async def get_radio(self, prov_radio_id: str) -> Radio:  # type: ignore[return]
         """Get full radio details by id."""
         if prov_radio_id not in self._channels_by_id:
@@ -241,6 +243,7 @@ class SiriusXMProvider(MusicProvider):
 
         return self._current_stream_details
 
+    @use_cache(3600 * 3)  # Cache for 3 hours
     async def browse(self, path: str) -> Sequence[MediaItemType | ItemMapping | BrowseFolder]:
         """Browse this provider's items.
 
index d662e5aecd5d1a4460eeecc172d2f19d79e7cd9b..afcaa49cc4c82b613057afdf022da71973156ab3 100644 (file)
@@ -114,6 +114,7 @@ class SoundcloudMusicProvider(MusicProvider):
         self._me = await self._soundcloud.get_account_details()
         self._user_id = self._me["id"]
 
+    @use_cache(3600 * 48)  # Cache for 48 hours
     async def search(
         self, search_query: str, media_types: list[MediaType], limit: int = 10
     ) -> SearchResults:
@@ -222,7 +223,7 @@ class SoundcloudMusicProvider(MusicProvider):
             round(time.time() - time_start, 2),
         )
 
-    @use_cache(3600)
+    @use_cache(3600 * 3)  # Cache for 3 hours
     async def recommendations(self) -> list[RecommendationFolder]:
         """Get available recommendations."""
         # Part 1, the mixed selections
@@ -265,6 +266,7 @@ class SoundcloudMusicProvider(MusicProvider):
             folders.append(folder)
         return folders
 
+    @use_cache(3600 * 24 * 14)  # Cache for 14 days
     async def get_artist(self, prov_artist_id: str) -> Artist:
         """Get full artist details by id."""
         artist_obj = await self._soundcloud.get_user_details(prov_artist_id)
@@ -275,6 +277,7 @@ class SoundcloudMusicProvider(MusicProvider):
             self.logger.debug("Parse artist failed: %s", artist_obj, exc_info=error)
         return artist
 
+    @use_cache(3600 * 24 * 14)  # Cache for 14 days
     async def get_track(self, prov_track_id: str) -> Track:
         """Get full track details by id."""
         track_obj = await self._soundcloud.get_track_details(prov_track_id)
@@ -284,6 +287,7 @@ class SoundcloudMusicProvider(MusicProvider):
             self.logger.debug("Parse track failed: %s", track_obj, exc_info=error)
         return track
 
+    @use_cache(3600 * 24 * 14)  # Cache for 14 days
     async def get_playlist(self, prov_playlist_id: str) -> Playlist:
         """Get full playlist details by id."""
         playlist_obj = await self._get_playlist_object(prov_playlist_id)
@@ -302,6 +306,7 @@ class SoundcloudMusicProvider(MusicProvider):
             # Handle regular playlists
             return await self._soundcloud.get_playlist_details(prov_playlist_id)
 
+    @use_cache(3600 * 3)  # Cache for 3 hours
     async def get_playlist_tracks(self, prov_playlist_id: str, page: int = 0) -> list[Track]:
         """Get playlist tracks."""
         result: list[Track] = []
@@ -328,6 +333,7 @@ class SoundcloudMusicProvider(MusicProvider):
                 continue
         return result
 
+    @use_cache(3600 * 24 * 14)  # Cache for 14 days
     async def get_artist_toptracks(self, prov_artist_id: str) -> list[Track]:
         """Get a list of (max 500) tracks for the given artist."""
         tracks_obj = await self._soundcloud.get_tracks_from_user(prov_artist_id, 500)
@@ -343,6 +349,7 @@ class SoundcloudMusicProvider(MusicProvider):
                 continue
         return tracks
 
+    @use_cache(3600 * 24 * 14)  # Cache for 14 days
     async def get_similar_tracks(self, prov_track_id: str, limit: int = 25) -> list[Track]:
         """Retrieve a dynamic list of tracks based on the provided item."""
         tracks_obj = await self._soundcloud.get_recommended(prov_track_id, limit)
@@ -358,6 +365,7 @@ class SoundcloudMusicProvider(MusicProvider):
 
         return tracks
 
+    @use_cache(3600 * 3)  # Cache for 3 hours
     async def get_stream_details(self, item_id: str, media_type: MediaType) -> StreamDetails:
         """Return the content details for the given track when it will be streamed."""
         url: str = await self._soundcloud.get_stream_url(track_id=item_id, presets=["mp3"])
index 3211d7180da5bbea475be0610dc413e5fbf93dcf..e3014ab5826d5b9bd5e1044f9a6af96efc37bde5 100644 (file)
@@ -221,7 +221,6 @@ def parse_playlist(playlist_obj: dict[str, Any], provider: SpotifyProvider) -> P
     )
 
     playlist.metadata.images = parse_images(playlist_obj.get("images", []), provider.lookup_key)
-    playlist.cache_checksum = str(playlist_obj["snapshot_id"])
     return playlist
 
 
index 639edb2219cdc7169b2f0b27c68bf84dbf599708..f6be5c395d5b60dfa490a1c932f1b6348006cad2 100644 (file)
@@ -6,7 +6,7 @@ import asyncio
 import os
 import time
 from collections.abc import AsyncGenerator
-from typing import TYPE_CHECKING, Any
+from typing import Any
 
 import aiohttp
 from music_assistant_models.enums import (
@@ -68,11 +68,9 @@ from .parsers import (
 )
 from .streaming import LibrespotStreamer
 
-if TYPE_CHECKING:
-    from music_assistant_models.config_entries import ProviderConfig
-    from music_assistant_models.provider import ProviderManifest
 
-    from music_assistant import MusicAssistant
+class NotModifiedError(Exception):
+    """Exception raised when a resource has not been modified."""
 
 
 class SpotifyProvider(MusicProvider):
@@ -81,20 +79,10 @@ class SpotifyProvider(MusicProvider):
     _auth_info: dict[str, Any] | None = None
     _sp_user: dict[str, Any] | None = None
     _librespot_bin: str | None = None
+    _audiobooks_supported = False
     custom_client_id_active: bool = False
     throttler: ThrottlerManager
 
-    def __init__(
-        self,
-        mass: MusicAssistant,
-        manifest: ProviderManifest,
-        config: ProviderConfig,
-        supported_features: set[ProviderFeature],
-    ) -> None:
-        """Initialize the provider."""
-        super().__init__(mass, manifest, config)
-        self._base_supported_features = supported_features
-
     async def handle_async_init(self) -> None:
         """Handle async initialization of the provider."""
         self.cache_dir = os.path.join(self.mass.cache_path, self.instance_id)
@@ -116,22 +104,10 @@ class SpotifyProvider(MusicProvider):
                 "for supported countries."
             )
 
-    async def _test_audiobook_support(self) -> bool:
-        """Test if audiobooks are supported in user's region."""
-        try:
-            await self._get_data("me/audiobooks", limit=1)
-            return True
-        except aiohttp.ClientResponseError as e:
-            if e.status == 403:
-                return False  # Not available
-            raise  # Re-raise other HTTP errors
-        except (MediaNotFoundError, ProviderUnavailableError):
-            return False
-
     @property
     def audiobooks_supported(self) -> bool:
         """Check if audiobooks are supported for this user/region."""
-        return getattr(self, "_audiobooks_supported", False)
+        return self._audiobooks_supported
 
     @property
     def audiobook_progress_sync_enabled(self) -> bool:
@@ -147,17 +123,15 @@ class SpotifyProvider(MusicProvider):
     @property
     def supported_features(self) -> set[ProviderFeature]:
         """Return the features supported by this Provider."""
-        features = self._base_supported_features.copy()
+        features = self._supported_features.copy()
         # Add audiobook features if enabled
         if self.audiobooks_supported:
             features.add(ProviderFeature.LIBRARY_AUDIOBOOKS)
             features.add(ProviderFeature.LIBRARY_AUDIOBOOKS_EDIT)
-
         if not self.custom_client_id_active:
             # Spotify has killed the similar tracks api for developers
             # https://developer.spotify.com/blog/2024-11-27-changes-to-the-web-api
             return {*features, ProviderFeature.SIMILAR_TRACKS}
-
         return features
 
     @property
@@ -167,7 +141,69 @@ class SpotifyProvider(MusicProvider):
             return str(self._sp_user["display_name"])
         return None
 
+    ## Library retrieval methods (generators)
+    async def get_library_artists(self) -> AsyncGenerator[Artist, None]:
+        """Retrieve library artists from spotify."""
+        endpoint = "me/following"
+        while True:
+            spotify_artists = await self._get_data(
+                endpoint,
+                type="artist",
+                limit=50,
+            )
+            for item in spotify_artists["artists"]["items"]:
+                if item and item["id"]:
+                    yield parse_artist(item, self)
+            if spotify_artists["artists"]["next"]:
+                endpoint = spotify_artists["artists"]["next"]
+                endpoint = endpoint.replace("https://api.spotify.com/v1/", "")
+            else:
+                break
+
+    async def get_library_albums(self) -> AsyncGenerator[Album, None]:
+        """Retrieve library albums from the provider."""
+        async for item in self._get_all_items("me/albums"):
+            if item["album"] and item["album"]["id"]:
+                yield parse_album(item["album"], self)
+
+    async def get_library_tracks(self) -> AsyncGenerator[Track, None]:
+        """Retrieve library tracks from the provider."""
+        async for item in self._get_all_items("me/tracks"):
+            if item and item["track"]["id"]:
+                yield parse_track(item["track"], self)
+
+    async def get_library_podcasts(self) -> AsyncGenerator[Podcast, None]:
+        """Retrieve library podcasts from spotify."""
+        async for item in self._get_all_items("me/shows"):
+            if item["show"] and item["show"]["id"]:
+                show_obj = item["show"]
+                # Filter out audiobooks - they have a distinctive description format
+                description = show_obj.get("description", "")
+                if description.startswith("Author(s):") and "Narrator(s):" in description:
+                    continue
+                yield parse_podcast(show_obj, self)
+
+    async def get_library_audiobooks(self) -> AsyncGenerator[Audiobook, None]:
+        """Retrieve library audiobooks from spotify."""
+        if not self.audiobooks_supported:
+            return
+        async for item in self._get_all_items("me/audiobooks"):
+            if item and item["id"]:
+                # Parse the basic audiobook
+                audiobook = parse_audiobook(item, self)
+                # Add chapters from Spotify API data
+                await self._add_audiobook_chapters(audiobook)
+                yield audiobook
+
+    async def get_library_playlists(self) -> AsyncGenerator[Playlist, None]:
+        """Retrieve playlists from the provider."""
+        yield await self._get_liked_songs_playlist()
+        async for item in self._get_all_items("me/playlists"):
+            if item and item["id"]:
+                yield parse_playlist(item, self)
+
     # ruff: noqa: PLR0915
+    @use_cache()
     async def search(
         self, search_query: str, media_types: list[MediaType] | None = None, limit: int = 5
     ) -> SearchResults:
@@ -263,121 +299,25 @@ class SpotifyProvider(MusicProvider):
                 break
         return searchresult
 
-    async def get_library_artists(self) -> AsyncGenerator[Artist, None]:
-        """Retrieve library artists from spotify."""
-        endpoint = "me/following"
-        while True:
-            spotify_artists = await self._get_data(
-                endpoint,
-                type="artist",
-                limit=50,
-            )
-            for item in spotify_artists["artists"]["items"]:
-                if item and item["id"]:
-                    yield parse_artist(item, self)
-            if spotify_artists["artists"]["next"]:
-                endpoint = spotify_artists["artists"]["next"]
-                endpoint = endpoint.replace("https://api.spotify.com/v1/", "")
-            else:
-                break
-
-    async def get_library_albums(self) -> AsyncGenerator[Album, None]:
-        """Retrieve library albums from the provider."""
-        async for item in self._get_all_items("me/albums"):
-            if item["album"] and item["album"]["id"]:
-                yield parse_album(item["album"], self)
-
-    async def get_library_tracks(self) -> AsyncGenerator[Track, None]:
-        """Retrieve library tracks from the provider."""
-        async for item in self._get_all_items("me/tracks"):
-            if item and item["track"]["id"]:
-                yield parse_track(item["track"], self)
-
-    async def get_library_podcasts(self) -> AsyncGenerator[Podcast, None]:
-        """Retrieve library podcasts from spotify."""
-        async for item in self._get_all_items("me/shows"):
-            if item["show"] and item["show"]["id"]:
-                show_obj = item["show"]
-                # Filter out audiobooks - they have a distinctive description format
-                description = show_obj.get("description", "")
-                if description.startswith("Author(s):") and "Narrator(s):" in description:
-                    continue
-                yield parse_podcast(show_obj, self)
-
-    async def get_library_audiobooks(self) -> AsyncGenerator[Audiobook, None]:
-        """Retrieve library audiobooks from spotify."""
-        if not self.audiobooks_supported:
-            return
-        async for item in self._get_all_items("me/audiobooks"):
-            if item and item["id"]:
-                # Parse the basic audiobook
-                audiobook = parse_audiobook(item, self)
-                # Add chapters from Spotify API data
-                await self._add_audiobook_chapters(audiobook)
-                yield audiobook
-
-    def _get_liked_songs_playlist_id(self) -> str:
-        return f"{LIKED_SONGS_FAKE_PLAYLIST_ID_PREFIX}-{self.instance_id}"
-
-    async def _get_liked_songs_playlist(self) -> Playlist:
-        if self._sp_user is None:
-            raise LoginFailed("User info not available - not logged in")
-
-        liked_songs = Playlist(
-            item_id=self._get_liked_songs_playlist_id(),
-            provider=self.lookup_key,
-            name=f"Liked Songs {self._sp_user['display_name']}",  # TODO to be translated
-            owner=self._sp_user["display_name"],
-            provider_mappings={
-                ProviderMapping(
-                    item_id=self._get_liked_songs_playlist_id(),
-                    provider_domain=self.domain,
-                    provider_instance=self.instance_id,
-                    url="https://open.spotify.com/collection/tracks",
-                )
-            },
-        )
-
-        liked_songs.is_editable = False  # TODO Editing requires special endpoints
-
-        # Add image to the playlist metadata
-        image = MediaItemImage(
-            type=ImageType.THUMB,
-            path="https://misc.scdn.co/liked-songs/liked-songs-64.png",
-            provider=self.lookup_key,
-            remotely_accessible=True,
-        )
-        if liked_songs.metadata.images is None:
-            liked_songs.metadata.images = UniqueList([image])
-        else:
-            liked_songs.metadata.add_image(image)
-
-        liked_songs.cache_checksum = str(time.time())
-
-        return liked_songs
-
-    async def get_library_playlists(self) -> AsyncGenerator[Playlist, None]:
-        """Retrieve playlists from the provider."""
-        yield await self._get_liked_songs_playlist()
-        async for item in self._get_all_items("me/playlists"):
-            if item and item["id"]:
-                yield parse_playlist(item, self)
-
+    @use_cache()
     async def get_artist(self, prov_artist_id: str) -> Artist:
         """Get full artist details by id."""
         artist_obj = await self._get_data(f"artists/{prov_artist_id}")
         return parse_artist(artist_obj, self)
 
+    @use_cache()
     async def get_album(self, prov_album_id: str) -> Album:
         """Get full album details by id."""
         album_obj = await self._get_data(f"albums/{prov_album_id}")
         return parse_album(album_obj, self)
 
+    @use_cache()
     async def get_track(self, prov_track_id: str) -> Track:
         """Get full track details by id."""
         track_obj = await self._get_data(f"tracks/{prov_track_id}")
         return parse_track(track_obj, self)
 
+    @use_cache()
     async def get_playlist(self, prov_playlist_id: str) -> Playlist:
         """Get full playlist details by id."""
         if prov_playlist_id == self._get_liked_songs_playlist_id():
@@ -386,7 +326,7 @@ class SpotifyProvider(MusicProvider):
         playlist_obj = await self._get_data(f"playlists/{prov_playlist_id}")
         return parse_playlist(playlist_obj, self)
 
-    @use_cache(86400)  # 24 hours
+    @use_cache()
     async def get_podcast(self, prov_podcast_id: str) -> Podcast:
         """Get full podcast details by id."""
         podcast_obj = await self._get_data(f"shows/{prov_podcast_id}")
@@ -394,7 +334,7 @@ class SpotifyProvider(MusicProvider):
             raise MediaNotFoundError(f"Podcast not found: {prov_podcast_id}")
         return parse_podcast(podcast_obj, self)
 
-    @use_cache(86400)  # 24 hours
+    @use_cache()
     async def get_audiobook(self, prov_audiobook_id: str) -> Audiobook:
         """Get full audiobook details by id."""
         if not self.audiobooks_supported:
@@ -415,111 +355,17 @@ class SpotifyProvider(MusicProvider):
 
         return audiobook
 
-    async def _add_audiobook_chapters(self, audiobook: Audiobook) -> None:
-        """Add chapter metadata to an audiobook from Spotify API data."""
-        try:
-            chapters_data = await self._get_audiobook_chapters_data(audiobook.item_id)
-            if chapters_data:
-                chapters = []
-                total_duration_seconds = 0.0
-
-                for idx, chapter in enumerate(chapters_data):
-                    duration_ms = chapter.get("duration_ms", 0)
-                    duration_seconds = duration_ms / 1000.0
-
-                    chapter_obj = MediaItemChapter(
-                        position=idx + 1,
-                        name=chapter.get("name", f"Chapter {idx + 1}"),
-                        start=total_duration_seconds,
-                        end=total_duration_seconds + duration_seconds,
-                    )
-                    chapters.append(chapter_obj)
-                    total_duration_seconds += duration_seconds
-
-                audiobook.metadata.chapters = chapters
-                audiobook.duration = int(total_duration_seconds)
-
-        except (MediaNotFoundError, ResourceTemporarilyUnavailable, ProviderUnavailableError) as e:
-            self.logger.warning(f"Failed to get chapters for audiobook {audiobook.item_id}: {e}")
-
-    @use_cache(43200)  # 12 hours - balances freshness with performance
-    async def _get_podcast_episodes_data(self, prov_podcast_id: str) -> list[dict[str, Any]]:
-        """Get raw episode data from Spotify API (cached).
-
-        Args:
-            prov_podcast_id: Spotify podcast ID
-
-        Returns:
-            List of episode data dictionaries
-        """
-        episodes_data: list[dict[str, Any]] = []
-
-        try:
-            async for item in self._get_all_items(
-                f"shows/{prov_podcast_id}/episodes", market="from_token"
-            ):
-                if item and item.get("id"):
-                    episodes_data.append(item)
-        except MediaNotFoundError:
-            self.logger.warning("Podcast %s not found", prov_podcast_id)
-            return []
-        except ResourceTemporarilyUnavailable as err:
-            self.logger.warning(
-                "Temporary error fetching episodes for %s: %s", prov_podcast_id, err
-            )
-            raise
-
-        return episodes_data
-
-    @use_cache(7200)  # 2 hours - shorter cache for resume point data
-    async def _get_audiobook_chapters_data(self, prov_audiobook_id: str) -> list[dict[str, Any]]:
-        """Get raw chapter data from Spotify API (cached).
-
-        Args:
-            prov_audiobook_id: Spotify audiobook ID
-
-        Returns:
-            List of chapter data dictionaries
-        """
-        chapters_data: list[dict[str, Any]] = []
-
-        try:
-            async for item in self._get_all_items(
-                f"audiobooks/{prov_audiobook_id}/chapters", market="from_token"
-            ):
-                if item and item.get("id"):
-                    chapters_data.append(item)
-        except MediaNotFoundError:
-            self.logger.warning("Audiobook %s not found", prov_audiobook_id)
-            return []
-        except ResourceTemporarilyUnavailable as err:
-            self.logger.warning(
-                "Temporary error fetching chapters for %s: %s", prov_audiobook_id, err
-            )
-            raise
-
-        return chapters_data
-
     async def get_podcast_episodes(
         self, prov_podcast_id: str
     ) -> AsyncGenerator[PodcastEpisode, None]:
         """Get all podcast episodes."""
         # Get podcast object for context if available
-        podcast: Podcast | None = None
-        try:
-            podcast = await self.mass.music.podcasts.get_provider_item(
-                prov_podcast_id, self.instance_id
-            )
-        except MediaNotFoundError:
-            # If not in MA library, get it via API (this is cached)
-            try:
-                podcast = await self.get_podcast(prov_podcast_id)
-            except MediaNotFoundError:
-                self.logger.warning(
-                    "Podcast with ID %s is no longer available on Spotify", prov_podcast_id
-                )
+        podcast = await self.mass.music.podcasts.get_library_item_by_prov_id(
+            prov_podcast_id, self.instance_id
+        )
+        podcast = await self.get_podcast(prov_podcast_id)
 
-        # Get cached episode data
+        # Get (cached) episode data
         episodes_data = await self._get_podcast_episodes_data(prov_podcast_id)
 
         # Parse and yield episodes with position
@@ -666,6 +512,7 @@ class SpotifyProvider(MusicProvider):
             # The resume position will be automatically updated by MA's internal tracking
             # and will be retrieved via get_audiobook() which combines MA + Spotify positions
 
+    @use_cache()
     async def get_album_tracks(self, prov_album_id: str) -> list[Track]:
         """Get all album tracks for given album id."""
         return [
@@ -674,6 +521,7 @@ class SpotifyProvider(MusicProvider):
             if item["id"]
         ]
 
+    @use_cache(2600 * 3)  # 3 hours
     async def get_playlist_tracks(self, prov_playlist_id: str, page: int = 0) -> list[Track]:
         """Get playlist tracks."""
         result: list[Track] = []
@@ -682,9 +530,14 @@ class SpotifyProvider(MusicProvider):
             if prov_playlist_id == self._get_liked_songs_playlist_id()
             else f"playlists/{prov_playlist_id}/tracks"
         )
+        # do single request to get the etag (which we use as checksum for caching)
+        cache_checksum = await self._get_etag(uri, limit=1, offset=0)
+
         page_size = 50
         offset = page * page_size
-        spotify_result = await self._get_data(uri, limit=page_size, offset=offset)
+        spotify_result = await self._get_data_with_caching(
+            uri, cache_checksum, limit=page_size, offset=offset
+        )
         for index, item in enumerate(spotify_result["items"], 1):
             if not (item and item["track"] and item["track"]["id"]):
                 continue
@@ -694,6 +547,7 @@ class SpotifyProvider(MusicProvider):
             result.append(track)
         return result
 
+    @use_cache(86400 * 14)  # 14 days
     async def get_artist_albums(self, prov_artist_id: str) -> list[Album]:
         """Get a list of all albums for the given artist."""
         return [
@@ -704,6 +558,7 @@ class SpotifyProvider(MusicProvider):
             if (item and item["id"])
         ]
 
+    @use_cache(86400 * 14)  # 14 days
     async def get_artist_toptracks(self, prov_artist_id: str) -> list[Track]:
         """Get a list of 10 most popular tracks for the given artist."""
         artist = await self.get_artist(prov_artist_id)
@@ -812,6 +667,7 @@ class SpotifyProvider(MusicProvider):
         self._fix_create_playlist_api_bug(new_playlist)
         return parse_playlist(new_playlist, self)
 
+    @use_cache(86400 * 14)  # 14 days
     async def get_similar_tracks(self, prov_track_id: str, limit: int = 25) -> list[Track]:
         """Retrieve a dynamic list of tracks based on the provided item."""
         endpoint = "recommendations"
@@ -991,16 +847,141 @@ class SpotifyProvider(MusicProvider):
             self.logger.info("Successfully logged in to Spotify as %s", userinfo["display_name"])
         return auth_info
 
+    def _get_liked_songs_playlist_id(self) -> str:
+        return f"{LIKED_SONGS_FAKE_PLAYLIST_ID_PREFIX}-{self.instance_id}"
+
+    async def _get_liked_songs_playlist(self) -> Playlist:
+        if self._sp_user is None:
+            raise LoginFailed("User info not available - not logged in")
+
+        liked_songs = Playlist(
+            item_id=self._get_liked_songs_playlist_id(),
+            provider=self.lookup_key,
+            name=f"Liked Songs {self._sp_user['display_name']}",  # TODO to be translated
+            owner=self._sp_user["display_name"],
+            provider_mappings={
+                ProviderMapping(
+                    item_id=self._get_liked_songs_playlist_id(),
+                    provider_domain=self.domain,
+                    provider_instance=self.instance_id,
+                    url="https://open.spotify.com/collection/tracks",
+                )
+            },
+        )
+
+        liked_songs.is_editable = False  # TODO Editing requires special endpoints
+
+        # Add image to the playlist metadata
+        image = MediaItemImage(
+            type=ImageType.THUMB,
+            path="https://misc.scdn.co/liked-songs/liked-songs-64.png",
+            provider=self.lookup_key,
+            remotely_accessible=True,
+        )
+        if liked_songs.metadata.images is None:
+            liked_songs.metadata.images = UniqueList([image])
+        else:
+            liked_songs.metadata.add_image(image)
+
+        return liked_songs
+
+    async def _add_audiobook_chapters(self, audiobook: Audiobook) -> None:
+        """Add chapter metadata to an audiobook from Spotify API data."""
+        try:
+            chapters_data = await self._get_audiobook_chapters_data(audiobook.item_id)
+            if chapters_data:
+                chapters = []
+                total_duration_seconds = 0.0
+
+                for idx, chapter in enumerate(chapters_data):
+                    duration_ms = chapter.get("duration_ms", 0)
+                    duration_seconds = duration_ms / 1000.0
+
+                    chapter_obj = MediaItemChapter(
+                        position=idx + 1,
+                        name=chapter.get("name", f"Chapter {idx + 1}"),
+                        start=total_duration_seconds,
+                        end=total_duration_seconds + duration_seconds,
+                    )
+                    chapters.append(chapter_obj)
+                    total_duration_seconds += duration_seconds
+
+                audiobook.metadata.chapters = chapters
+                audiobook.duration = int(total_duration_seconds)
+
+        except (MediaNotFoundError, ResourceTemporarilyUnavailable, ProviderUnavailableError) as e:
+            self.logger.warning(f"Failed to get chapters for audiobook {audiobook.item_id}: {e}")
+
+    @use_cache(43200)  # 12 hours - balances freshness with performance
+    async def _get_podcast_episodes_data(self, prov_podcast_id: str) -> list[dict[str, Any]]:
+        """Get raw episode data from Spotify API (cached).
+
+        Args:
+            prov_podcast_id: Spotify podcast ID
+
+        Returns:
+            List of episode data dictionaries
+        """
+        episodes_data: list[dict[str, Any]] = []
+
+        try:
+            async for item in self._get_all_items(
+                f"shows/{prov_podcast_id}/episodes", market="from_token"
+            ):
+                if item and item.get("id"):
+                    episodes_data.append(item)
+        except MediaNotFoundError:
+            self.logger.warning("Podcast %s not found", prov_podcast_id)
+            return []
+        except ResourceTemporarilyUnavailable as err:
+            self.logger.warning(
+                "Temporary error fetching episodes for %s: %s", prov_podcast_id, err
+            )
+            raise
+
+        return episodes_data
+
+    @use_cache(7200)  # 2 hours - shorter cache for resume point data
+    async def _get_audiobook_chapters_data(self, prov_audiobook_id: str) -> list[dict[str, Any]]:
+        """Get raw chapter data from Spotify API (cached).
+
+        Args:
+            prov_audiobook_id: Spotify audiobook ID
+
+        Returns:
+            List of chapter data dictionaries
+        """
+        chapters_data: list[dict[str, Any]] = []
+
+        try:
+            async for item in self._get_all_items(
+                f"audiobooks/{prov_audiobook_id}/chapters", market="from_token"
+            ):
+                if item and item.get("id"):
+                    chapters_data.append(item)
+        except MediaNotFoundError:
+            self.logger.warning("Audiobook %s not found", prov_audiobook_id)
+            return []
+        except ResourceTemporarilyUnavailable as err:
+            self.logger.warning(
+                "Temporary error fetching chapters for %s: %s", prov_audiobook_id, err
+            )
+            raise
+
+        return chapters_data
+
     async def _get_all_items(
         self, endpoint: str, key: str = "items", **kwargs: Any
     ) -> AsyncGenerator[dict[str, Any], None]:
         """Get all items from a paged list."""
         limit = 50
         offset = 0
+        # do single request to get the etag (which we use as checksum for caching)
+        cache_checksum = await self._get_etag(endpoint, limit=1, offset=0, **kwargs)
         while True:
-            kwargs["limit"] = limit
-            kwargs["offset"] = offset
-            result = await self._get_data(endpoint, **kwargs)
+            result = await self._get_data_with_caching(
+                endpoint, cache_checksum=cache_checksum, limit=limit, offset=offset, **kwargs
+            )
             offset += limit
             if not result or key not in result or not result[key]:
                 break
@@ -1009,6 +990,30 @@ class SpotifyProvider(MusicProvider):
             if len(result[key]) < limit:
                 break
 
+    async def _get_data_with_caching(
+        self, endpoint: str, cache_checksum: str, **kwargs: Any
+    ) -> dict[str, Any]:
+        """Get data from api with caching."""
+        cache_key_parts = [endpoint]
+        for key in sorted(kwargs.keys()):
+            cache_key_parts.append(f"{key}{kwargs[key]}")
+        cache_key = ".".join(map(str, cache_key_parts))
+        if cached := await self.mass.cache.get(
+            cache_key, provider=self.instance_id, checksum=cache_checksum, allow_bypass=False
+        ):
+            return cached
+        result = await self._get_data(endpoint, **kwargs)
+        await self.mass.cache.set(
+            cache_key, result, provider=self.instance_id, checksum=cache_checksum
+        )
+        return result
+
+    @use_cache(120, allow_bypass=False)  # short cache for etags (subsequent calls use cached data)
+    async def _get_etag(self, endpoint: str, **kwargs: Any) -> str | None:
+        """Get etag for api endpoint."""
+        _res = await self._get_data(endpoint, **kwargs)
+        return _res.get("etag")
+
     @throttle_with_retries
     async def _get_data(self, endpoint: str, **kwargs: Any) -> dict[str, Any]:
         """Get data from api."""
@@ -1021,6 +1026,7 @@ class SpotifyProvider(MusicProvider):
         locale = self.mass.metadata.locale.replace("_", "-")
         language = locale.split("-")[0]
         headers["Accept-Language"] = f"{locale}, {language};q=0.9, *;q=0.5"
+        self.logger.debug("handling get data %s with kwargs %s", url, kwargs)
         async with (
             self.mass.http_session.get(
                 url,
@@ -1050,6 +1056,8 @@ class SpotifyProvider(MusicProvider):
                 raise MediaNotFoundError(f"{endpoint} not found")
             response.raise_for_status()
             result: dict[str, Any] = await response.json(loads=json_loads)
+            if etag := response.headers.get("ETag"):
+                result["etag"] = etag
             return result
 
     @throttle_with_retries
@@ -1142,3 +1150,15 @@ class SpotifyProvider(MusicProvider):
             self.logger.warning(
                 "FIXME: Spotify have fixed their Create Playlist API, this fix can be removed."
             )
+
+    async def _test_audiobook_support(self) -> bool:
+        """Test if audiobooks are supported in user's region."""
+        try:
+            await self._get_data("me/audiobooks", limit=1)
+            return True
+        except aiohttp.ClientResponseError as e:
+            if e.status == 403:
+                return False  # Not available
+            raise  # Re-raise other HTTP errors
+        except (MediaNotFoundError, ProviderUnavailableError):
+            return False
index 8faae126e5338ceef564399c1b23c9e90353724b..cbd412e18837619ae3f9de00ba50f86c9ed53f5b 100644 (file)
@@ -17,8 +17,6 @@ DEFAULT_SLIMPROTO_PORT = 3483
 CONF_DISPLAY = "display"
 CONF_VISUALIZATION = "visualization"
 
-CACHE_KEY_PREV_STATE = "slimproto_prev_state"
-
 DEFAULT_PLAYER_VOLUME = 20
 DEFAULT_VISUALIZATION = SlimVisualisationType.NONE
 
index d16325c92c68584ee0c34b1fc727ae2d00e8eacd..f50bcf006b2eac47ca17faf1b21a9add1ab5d40e 100644 (file)
@@ -42,7 +42,6 @@ from music_assistant.helpers.util import TaskManager
 from music_assistant.models.player import DeviceInfo, Player, PlayerMedia
 
 from .constants import (
-    CACHE_KEY_PREV_STATE,
     CONF_ENTRY_DISPLAY,
     CONF_ENTRY_VISUALIZATION,
     DEFAULT_PLAYER_VOLUME,
@@ -64,6 +63,9 @@ if TYPE_CHECKING:
     from .provider import SqueezelitePlayerProvider
 
 
+CACHE_CATEGORY_PREV_STATE = 0  # category for caching previous player state
+
+
 class SqueezelitePlayer(Player):
     """Squeezelite Player implementation."""
 
@@ -105,7 +107,9 @@ class SqueezelitePlayer(Player):
         # update all dynamic attributes
         self.update_attributes()
         # restore volume and power state
-        if last_state := await self.mass.cache.get(player_id, base_key=CACHE_KEY_PREV_STATE):
+        if last_state := await self.mass.cache.get(
+            key=player_id, provider=self.provider.instance_id, category=CACHE_CATEGORY_PREV_STATE
+        ):
             init_power = last_state[0]
             init_volume = last_state[1]
         else:
@@ -161,7 +165,10 @@ class SqueezelitePlayer(Player):
         await self.client.power(powered)
         # store last state in cache
         await self.mass.cache.set(
-            self.player_id, (powered, self.client.volume_level), base_key=CACHE_KEY_PREV_STATE
+            key=self.player_id,
+            data=(powered, self.client.volume_level),
+            provider=self.provider.instance_id,
+            category=CACHE_CATEGORY_PREV_STATE,
         )
 
     async def volume_set(self, volume_level: int) -> None:
@@ -169,7 +176,10 @@ class SqueezelitePlayer(Player):
         await self.client.volume_set(volume_level)
         # store last state in cache
         await self.mass.cache.set(
-            self.player_id, (self.client.powered, volume_level), base_key=CACHE_KEY_PREV_STATE
+            key=self.player_id,
+            data=(self.client.powered, volume_level),
+            provider=self.provider.instance_id,
+            category=CACHE_CATEGORY_PREV_STATE,
         )
 
     async def volume_mute(self, muted: bool) -> None:
index f1c62f0f22bc538a39ea721b347ef13eaf91f9d3..384c5f07c5c7f6274fa42a6a38d0f15621d5e67a 100644 (file)
@@ -389,7 +389,7 @@ class AudioDbMetadataProvider(MetadataProvider):
             await self.mass.music.albums.update_item_in_library(track.album.item_id, track.album)
         return metadata
 
-    @use_cache(86400 * 30)
+    @use_cache(86400 * 90, persistent=True)  # Cache for 90 days
     async def _get_data(self, endpoint: str, **kwargs: Any) -> dict[str, Any] | None:
         """Get data from api."""
         url = f"https://theaudiodb.com/api/v1/json/{app_var(3)}/{endpoint}"
index e4159419c36ea44810a723589715c59a736a2bc8..e040bbaaddb024dda953b91f02f07ea94460b00d 100644 (file)
@@ -50,12 +50,33 @@ from music_assistant_models.media_items import (
 )
 from music_assistant_models.streamdetails import StreamDetails
 
-from music_assistant.constants import CACHE_CATEGORY_DEFAULT, CACHE_CATEGORY_RECOMMENDATIONS
+from music_assistant.controllers.cache import use_cache
 from music_assistant.helpers.throttle_retry import ThrottlerManager, throttle_with_retries
 from music_assistant.helpers.util import infer_album_type
 from music_assistant.models.music_provider import MusicProvider
 
 from .auth_manager import ManualAuthenticationHelper, TidalAuthManager
+from .constants import (
+    BROWSE_URL,
+    CACHE_CATEGORY_ISRC_MAP,
+    CACHE_CATEGORY_RECOMMENDATIONS,
+    CACHE_KEY_RECOMMENDATIONS_ALL,
+    CONF_ACTION_CLEAR_AUTH,
+    CONF_ACTION_COMPLETE_PKCE_LOGIN,
+    CONF_ACTION_START_PKCE_LOGIN,
+    CONF_AUTH_TOKEN,
+    CONF_EXPIRY_TIME,
+    CONF_OOPS_URL,
+    CONF_QUALITY,
+    CONF_REFRESH_TOKEN,
+    CONF_TEMP_SESSION,
+    CONF_USER_ID,
+    DEFAULT_LIMIT,
+    LABEL_COMPLETE_PKCE_LOGIN,
+    LABEL_OOPS_URL,
+    LABEL_START_PKCE_LOGIN,
+    RESOURCES_URL,
+)
 from .tidal_page_parser import TidalPageParser
 
 if TYPE_CHECKING:
@@ -68,35 +89,6 @@ if TYPE_CHECKING:
     from music_assistant.mass import MusicAssistant
     from music_assistant.models import ProviderInstanceType
 
-TOKEN_TYPE = "Bearer"
-
-# Actions
-CONF_ACTION_START_PKCE_LOGIN = "start_pkce_login"
-CONF_ACTION_COMPLETE_PKCE_LOGIN = "auth"
-CONF_ACTION_CLEAR_AUTH = "clear_auth"
-
-# Intermediate steps
-CONF_TEMP_SESSION = "temp_session"
-CONF_OOPS_URL = "oops_url"
-
-# Config keys
-CONF_AUTH_TOKEN = "auth_token"
-CONF_REFRESH_TOKEN = "refresh_token"
-CONF_USER_ID = "user_id"
-CONF_EXPIRY_TIME = "expiry_time"
-CONF_COUNTRY_CODE = "country_code"
-CONF_SESSION_ID = "session_id"
-CONF_QUALITY = "quality"
-
-# Labels
-LABEL_START_PKCE_LOGIN = "start_pkce_login_label"
-LABEL_OOPS_URL = "oops_url_label"
-LABEL_COMPLETE_PKCE_LOGIN = "complete_pkce_login_label"
-
-BROWSE_URL = "https://tidal.com/browse"
-RESOURCES_URL = "https://resources.tidal.com/images"
-
-DEFAULT_LIMIT = 50
 
 T = TypeVar("T")
 
@@ -657,6 +649,7 @@ class TidalProvider(MusicProvider):
         api_result = await self._get_data(f"users/{prov_user_id}")
         return self._extract_data(api_result)
 
+    @use_cache(3600 * 24 * 14)  # Cache for 14 days
     async def search(
         self,
         search_query: str,
@@ -739,6 +732,7 @@ class TidalProvider(MusicProvider):
 
         return parsed_results
 
+    @use_cache(3600 * 24)  # Cache for 1 day
     async def get_similar_tracks(self, prov_track_id: str, limit: int = 25) -> list[Track]:
         """Get similar tracks for given track id."""
         try:
@@ -756,6 +750,7 @@ class TidalProvider(MusicProvider):
     # ITEM RETRIEVAL METHODS
     #
 
+    @use_cache(3600 * 24 * 30)  # Cache for 30 days
     async def get_artist(self, prov_artist_id: str) -> Artist:
         """Get artist details for given artist id."""
         try:
@@ -767,6 +762,7 @@ class TidalProvider(MusicProvider):
         except (ClientError, KeyError, ValueError) as err:
             raise MediaNotFoundError(f"Artist {prov_artist_id} not found") from err
 
+    @use_cache(3600 * 24 * 30)  # Cache for 30 days
     async def get_album(self, prov_album_id: str) -> Album:
         """Get album details for given album id."""
         try:
@@ -778,6 +774,7 @@ class TidalProvider(MusicProvider):
         except (ClientError, KeyError, ValueError) as err:
             raise MediaNotFoundError(f"Album {prov_album_id} not found") from err
 
+    @use_cache(3600 * 24 * 30)  # Cache for 30 days
     async def get_track(self, prov_track_id: str) -> Track:
         """Get track details for given track id."""
         try:
@@ -797,6 +794,7 @@ class TidalProvider(MusicProvider):
         except (ClientError, KeyError, ValueError) as err:
             raise MediaNotFoundError(f"Track {prov_track_id} not found") from err
 
+    @use_cache(3600 * 24 * 30)  # Cache for 30 days
     async def get_playlist(self, prov_playlist_id: str) -> Playlist:
         """Get playlist details for given playlist id."""
         # Check if this is a mix by ID prefix
@@ -887,6 +885,7 @@ class TidalProvider(MusicProvider):
         except (ClientError, KeyError, ValueError) as err:
             raise MediaNotFoundError(f"Mix {prov_mix_id} not found") from err
 
+    @use_cache(3600 * 24 * 30)  # Cache for 30 days
     async def get_album_tracks(self, prov_album_id: str) -> list[Track]:
         """Get album tracks for given album id."""
         try:
@@ -900,6 +899,7 @@ class TidalProvider(MusicProvider):
         except (ClientError, KeyError, ValueError) as err:
             raise MediaNotFoundError(f"Album {prov_album_id} not found") from err
 
+    @use_cache(3600 * 24 * 7)  # Cache for 7 days
     async def get_artist_albums(self, prov_artist_id: str) -> list[Album]:
         """Get a list of all albums for the given artist."""
         try:
@@ -913,6 +913,7 @@ class TidalProvider(MusicProvider):
         except (ClientError, KeyError, ValueError) as err:
             raise MediaNotFoundError(f"Artist {prov_artist_id} not found") from err
 
+    @use_cache(3600 * 24 * 7)  # Cache for 7 days
     async def get_artist_toptracks(self, prov_artist_id: str) -> list[Track]:
         """Get a list of 10 most popular tracks for the given artist."""
         try:
@@ -928,6 +929,7 @@ class TidalProvider(MusicProvider):
         except (ClientError, KeyError, ValueError) as err:
             raise MediaNotFoundError(f"Artist {prov_artist_id} not found") from err
 
+    @use_cache(3600 * 3)  # Cache for 3 hours
     async def get_playlist_tracks(self, prov_playlist_id: str, page: int = 0) -> list[Track]:
         """Get playlist tracks for either regular playlists or Tidal mixes."""
         page_size = 200
@@ -1011,9 +1013,10 @@ class TidalProvider(MusicProvider):
     async def recommendations(self) -> list[RecommendationFolder]:
         """Get this provider's recommendations organized into folders."""
         # Check cache first
-        cache_key = f"tidal_recommendations_{self.lookup_key}"
         cached_recommendations: list[RecommendationFolder] = await self.mass.cache.get(
-            cache_key, category=CACHE_CATEGORY_RECOMMENDATIONS, base_key=self.lookup_key
+            CACHE_KEY_RECOMMENDATIONS_ALL,
+            provider=self.instance_id,
+            category=CACHE_CATEGORY_RECOMMENDATIONS,
         )
 
         if cached_recommendations:
@@ -1045,10 +1048,10 @@ class TidalProvider(MusicProvider):
 
             # Cache the results for 1 hour (3600 seconds)
             await self.mass.cache.set(
-                cache_key,
-                results,
+                key=CACHE_KEY_RECOMMENDATIONS_ALL,
+                data=results,
+                provider=self.instance_id,
                 category=CACHE_CATEGORY_RECOMMENDATIONS,
-                base_key=self.lookup_key,
                 expiration=3600,
             )
 
@@ -1282,9 +1285,8 @@ class TidalProvider(MusicProvider):
     async def _get_track_by_isrc(self, item_id: str) -> Track | None:
         """Get track by ISRC from library item, with caching."""
         # Try to get from cache first
-        cache_key = f"isrc_map_{item_id}"
         cached_track_id = await self.mass.cache.get(
-            cache_key, category=CACHE_CATEGORY_DEFAULT, base_key=self.lookup_key
+            item_id, provider=self.instance_id, category=CACHE_CATEGORY_ISRC_MAP
         )
 
         if cached_track_id:
@@ -1296,7 +1298,7 @@ class TidalProvider(MusicProvider):
             except MediaNotFoundError:
                 # Track no longer exists, invalidate cache
                 await self.mass.cache.delete(
-                    cache_key, category=CACHE_CATEGORY_DEFAULT, base_key=self.lookup_key
+                    item_id, provider=self.instance_id, category=CACHE_CATEGORY_ISRC_MAP
                 )
 
         # Lookup by ISRC if no cache or cached track not found
@@ -1337,10 +1339,12 @@ class TidalProvider(MusicProvider):
 
         # Cache the mapping for future use
         await self.mass.cache.set(
-            cache_key,
-            track_id,
-            category=CACHE_CATEGORY_DEFAULT,
-            base_key=self.lookup_key,
+            key=item_id,
+            data=track_id,
+            provider=self.instance_id,
+            category=CACHE_CATEGORY_ISRC_MAP,
+            persistent=True,
+            expiration=(86400 * 90),
         )
 
         return await self.get_track(track_id)
@@ -1393,17 +1397,16 @@ class TidalProvider(MusicProvider):
             self.logger.debug("Page '%s' indexed with: %s", page_path, parser.content_stats)
 
             # Cache the parser data
-            cache_key = f"tidal_page_{page_path}"
             cache_data = {
                 "module_map": parser._module_map,
                 "content_map": parser._content_map,
                 "parsed_at": parser._parsed_at,
             }
             await self.mass.cache.set(
-                cache_key,
-                cache_data,
+                key=page_path,
+                data=cache_data,
+                provider=self.instance_id,
                 category=CACHE_CATEGORY_RECOMMENDATIONS,
-                base_key=self.lookup_key,
                 expiration=self.page_cache_ttl,
             )
 
@@ -1888,12 +1891,6 @@ class TidalProvider(MusicProvider):
         )
 
         # Metadata - different fields based on type
-        if is_mix:
-            playlist.cache_checksum = str(playlist_obj.get("updated", ""))
-        else:
-            playlist.cache_checksum = str(playlist_obj.get("lastUpdated", ""))
-            if "popularity" in playlist_obj:
-                playlist.metadata.popularity = playlist_obj.get("popularity", 0)
 
         # Add the description from the subtitle for mixes
         if is_mix:
diff --git a/music_assistant/providers/tidal/constants.py b/music_assistant/providers/tidal/constants.py
new file mode 100644 (file)
index 0000000..bceefd3
--- /dev/null
@@ -0,0 +1,45 @@
+"""Constants for the Tidal music provider."""
+
+# API URLs
+from typing import Final
+
+BASE_URL = "https://api.tidal.com/v1"
+BASE_URL_V2 = "https://api.tidal.com/v2"
+OPEN_API_URL = "https://openapi.tidal.com/v2"
+BROWSE_URL = "https://tidal.com/browse"
+RESOURCES_URL = "https://resources.tidal.com/images"
+
+# Authentication
+TOKEN_TYPE = "Bearer"
+
+# Actions
+CONF_ACTION_START_PKCE_LOGIN = "start_pkce_login"
+CONF_ACTION_COMPLETE_PKCE_LOGIN = "auth"
+CONF_ACTION_CLEAR_AUTH = "clear_auth"
+
+# Intermediate steps
+CONF_TEMP_SESSION = "temp_session"
+CONF_OOPS_URL = "oops_url"
+
+# Config keys
+CONF_AUTH_TOKEN = "auth_token"
+CONF_REFRESH_TOKEN = "refresh_token"
+CONF_USER_ID = "user_id"
+CONF_EXPIRY_TIME = "expiry_time"
+CONF_COUNTRY_CODE = "country_code"
+CONF_SESSION_ID = "session_id"
+CONF_QUALITY = "quality"
+
+# Labels
+LABEL_START_PKCE_LOGIN = "start_pkce_login_label"
+LABEL_OOPS_URL = "oops_url_label"
+LABEL_COMPLETE_PKCE_LOGIN = "complete_pkce_login_label"
+
+# API defaults
+DEFAULT_LIMIT: Final[int] = 50
+
+# Cache keys
+CACHE_CATEGORY_DEFAULT: Final[int] = 0
+CACHE_CATEGORY_RECOMMENDATIONS: Final[int] = 1
+CACHE_CATEGORY_ISRC_MAP: Final[int] = 2
+CACHE_KEY_RECOMMENDATIONS_ALL: Final[str] = "all_recommendations"
index d1dd24e9311657ced9c8e32ea0100ab26701f925..c6833909789937d553508c1c40417377da8dfcae 100644 (file)
@@ -8,7 +8,7 @@ from typing import TYPE_CHECKING, Any
 
 from music_assistant_models.enums import MediaType
 
-from music_assistant.constants import CACHE_CATEGORY_RECOMMENDATIONS
+from .constants import CACHE_CATEGORY_RECOMMENDATIONS
 
 if TYPE_CHECKING:
     from music_assistant_models.media_items import Album, Artist, Playlist, Track
@@ -391,11 +391,10 @@ class TidalPageParser:
     @classmethod
     async def from_cache(cls, provider: TidalProvider, page_path: str) -> TidalPageParser | None:
         """Create a parser instance from cached data if available and valid."""
-        cache_key = f"tidal_page_{page_path}"
         cached_data = await provider.mass.cache.get(
-            cache_key,
+            page_path,
+            provider=provider.instance_id,
             category=CACHE_CATEGORY_RECOMMENDATIONS,
-            base_key=provider.lookup_key,
         )
         if not cached_data:
             return None
index b3e19ff56670562f8fb2d9e1f43f3e58f11c05e4..f12171578fbf1a012214269cde9a029ba2c8f844 100644 (file)
@@ -25,6 +25,7 @@ from music_assistant_models.media_items import (
 from music_assistant_models.streamdetails import StreamDetails
 
 from music_assistant.constants import CONF_USERNAME
+from music_assistant.controllers.cache import use_cache
 from music_assistant.helpers.throttle_retry import Throttler
 from music_assistant.models.music_provider import MusicProvider
 
@@ -37,6 +38,9 @@ if TYPE_CHECKING:
     from music_assistant import MusicAssistant
     from music_assistant.models import ProviderInstanceType
 
+
+CACHE_CATEGORY_STREAMS = 1
+
 SUPPORTED_FEATURES = {
     ProviderFeature.LIBRARY_RADIOS,
     ProviderFeature.BROWSE,
@@ -133,22 +137,19 @@ class TuneInProvider(MusicProvider):
             async for item in parse_items(data["body"]):
                 yield item
 
+    @use_cache(3600 * 24 * 30)  # Cache for 30 days
     async def get_radio(self, prov_radio_id: str) -> Radio:
         """Get radio station details."""
         if not prov_radio_id.startswith("http"):
             if "--" in prov_radio_id:
-                prov_radio_id, media_type = prov_radio_id.split("--", 1)
-            else:
-                media_type = None
+                # handle this for backwards compatibility
+                prov_radio_id = prov_radio_id.split("--")[0]
             params = {"c": "composite", "detail": "listing", "id": prov_radio_id}
             result = await self.__get_data("Describe.ashx", **params)
             if result and result.get("body") and result["body"][0].get("children"):
                 item = result["body"][0]["children"][0]
                 stream_info = await self._get_stream_info(prov_radio_id)
-                for stream in stream_info:
-                    if media_type and stream["media_type"] != media_type:
-                        continue
-                    return self._parse_radio(item, [stream])
+                return self._parse_radio(item, stream_info)
         # fallback - e.g. for handle custom urls ...
         async for radio in self.get_library_radios():
             if radio.item_id == prov_radio_id:
@@ -170,24 +171,26 @@ class TuneInProvider(MusicProvider):
             name = name.split(" (")[0]
 
         if stream_info is not None:
-            # stream info is provided: parse stream objects into provider mappings
+            # stream info is provided: parse first stream into provider mapping
+            # assuming here that the streams are sorted by quality (bitrate)
+            # and the first one is the best quality
+            preferred_stream = stream_info[0]
             radio = Radio(
                 item_id=details["preset_id"],
                 provider=self.lookup_key,
                 name=name,
                 provider_mappings={
                     ProviderMapping(
-                        item_id=f"{details['preset_id']}--{stream['media_type']}",
+                        item_id=details["preset_id"],
                         provider_domain=self.domain,
                         provider_instance=self.instance_id,
                         audio_format=AudioFormat(
-                            content_type=ContentType.try_parse(stream["media_type"]),
-                            bit_rate=stream.get("bitrate", 128),
+                            content_type=ContentType.try_parse(preferred_stream["media_type"]),
+                            bit_rate=preferred_stream.get("bitrate", 128),
                         ),
-                        details=stream["url"],
+                        details=preferred_stream["url"],
                         available=details.get("is_available", True),
                     )
-                    for stream in stream_info
                 },
             )
         else:
@@ -229,11 +232,14 @@ class TuneInProvider(MusicProvider):
 
     async def _get_stream_info(self, preset_id: str) -> list[dict]:
         """Get stream info for a radio station."""
-        cache_base_key = "tunein_stream"
-        if cache := await self.mass.cache.get(preset_id, base_key=cache_base_key):
+        if cache := await self.mass.cache.get(
+            preset_id, provider=self.instance_id, category=CACHE_CATEGORY_STREAMS
+        ):
             return cache
         result = (await self.__get_data("Tune.ashx", id=preset_id))["body"]
-        await self.mass.cache.set(preset_id, result, base_key=cache_base_key)
+        await self.mass.cache.set(
+            key=preset_id, data=result, provider=self.instance_id, category=CACHE_CATEGORY_STREAMS
+        )
         return result
 
     async def get_stream_details(self, item_id: str, media_type: MediaType) -> StreamDetails:
@@ -253,13 +259,12 @@ class TuneInProvider(MusicProvider):
                 can_seek=False,
             )
         if "--" in item_id:
-            stream_item_id, media_type = item_id.split("--", 1)
-        else:
-            media_type = None
-            stream_item_id = item_id
-        for stream in await self._get_stream_info(stream_item_id):
-            if media_type and stream["media_type"] != media_type:
-                continue
+            # handle this for backwards compatibility
+            item_id = item_id.split("--")[0]
+        if stream_info := await self._get_stream_info(item_id):
+            # assuming here that the streams are sorted by quality (bitrate)
+            # and the first one is the best quality
+            preferred_stream = stream_info[0]
             return StreamDetails(
                 provider=self.lookup_key,
                 item_id=item_id,
@@ -267,13 +272,14 @@ class TuneInProvider(MusicProvider):
                 audio_format=AudioFormat(content_type=ContentType.UNKNOWN),
                 media_type=MediaType.RADIO,
                 stream_type=StreamType.HTTP,
-                path=stream["url"],
+                path=preferred_stream["url"],
                 allow_seek=False,
                 can_seek=False,
             )
         msg = f"Unable to retrieve stream details for {item_id}"
         raise MediaNotFoundError(msg)
 
+    @use_cache(3600 * 24 * 7)  # Cache for 7 days
     async def search(
         self, search_query: str, media_types: list[MediaType], limit: int = 10
     ) -> SearchResults:
index 91f334ce5de55eb88c67e2438545996873e35571..973706cefccab51f34ed738cb1931685d23a0f32 100644 (file)
@@ -227,6 +227,7 @@ class YoutubeMusicProvider(MusicProvider):
         if not await self._user_has_ytm_premium():
             raise LoginFailed("User does not have Youtube Music Premium")
 
+    @use_cache(3600 * 24 * 7)  # Cache for 7 days
     async def search(
         self, search_query: str, media_types=list[MediaType], limit: int = 5
     ) -> SearchResults:
@@ -319,6 +320,7 @@ class YoutubeMusicProvider(MusicProvider):
         for podcast in podcasts_obj:
             yield self._parse_podcast(podcast)
 
+    @use_cache(3600 * 24 * 30)  # Cache for 30 days
     async def get_album(self, prov_album_id) -> Album:
         """Get full album details by id."""
         if album_obj := await get_album(prov_album_id=prov_album_id, language=self.language):
@@ -326,6 +328,7 @@ class YoutubeMusicProvider(MusicProvider):
         msg = f"Item {prov_album_id} not found"
         raise MediaNotFoundError(msg)
 
+    @use_cache(3600 * 24 * 30)  # Cache for 30 days
     async def get_album_tracks(self, prov_album_id: str) -> list[Track]:
         """Get album tracks for given album id."""
         album_obj = await get_album(prov_album_id=prov_album_id, language=self.language)
@@ -340,6 +343,7 @@ class YoutubeMusicProvider(MusicProvider):
             tracks.append(track)
         return tracks
 
+    @use_cache(3600 * 24 * 30)  # Cache for 30 days
     async def get_artist(self, prov_artist_id) -> Artist:
         """Get full artist details by id."""
         if artist_obj := await get_artist(
@@ -349,6 +353,7 @@ class YoutubeMusicProvider(MusicProvider):
         msg = f"Item {prov_artist_id} not found"
         raise MediaNotFoundError(msg)
 
+    @use_cache(3600 * 24 * 30)  # Cache for 30 days
     async def get_track(self, prov_track_id) -> Track:
         """Get full track details by id."""
         if track_obj := await get_track(
@@ -360,6 +365,7 @@ class YoutubeMusicProvider(MusicProvider):
         msg = f"Item {prov_track_id} not found"
         raise MediaNotFoundError(msg)
 
+    @use_cache(3600 * 24 * 7)  # Cache for 7 days
     async def get_playlist(self, prov_playlist_id) -> Playlist:
         """Get full playlist details by id."""
         # Grab the playlist id from the full url in case of personal playlists
@@ -375,6 +381,7 @@ class YoutubeMusicProvider(MusicProvider):
         msg = f"Item {prov_playlist_id} not found"
         raise MediaNotFoundError(msg)
 
+    @use_cache(3600 * 3)  # Cache for 3 hours
     async def get_playlist_tracks(self, prov_playlist_id: str, page: int = 0) -> list[Track]:
         """Return playlist tracks for the given provider playlist id."""
         if page > 0:
@@ -410,6 +417,7 @@ class YoutubeMusicProvider(MusicProvider):
         # YTM doesn't seem to support paging so we ignore offset and limit
         return result
 
+    @use_cache(3600 * 24 * 7)  # Cache for 7 days
     async def get_artist_albums(self, prov_artist_id) -> list[Album]:
         """Get a list of albums for the given artist."""
         artist_obj = await get_artist(prov_artist_id=prov_artist_id, headers=self._headers)
@@ -424,6 +432,7 @@ class YoutubeMusicProvider(MusicProvider):
             return albums
         return []
 
+    @use_cache(3600 * 24 * 7)  # Cache for 7 days
     async def get_artist_toptracks(self, prov_artist_id) -> list[Track]:
         """Get a list of 25 most popular tracks for the given artist."""
         artist_obj = await get_artist(prov_artist_id=prov_artist_id, headers=self._headers)
@@ -433,6 +442,7 @@ class YoutubeMusicProvider(MusicProvider):
             return playlist_tracks[:25]
         return []
 
+    @use_cache(3600 * 24 * 14)  # Cache for 14 days
     async def get_podcast(self, prov_podcast_id: str) -> Podcast:
         """Get the full details of a Podcast."""
         podcast_obj = await get_podcast(prov_podcast_id, headers=self._headers)
@@ -451,6 +461,7 @@ class YoutubeMusicProvider(MusicProvider):
             episode.position = ep_index
             yield episode
 
+    @use_cache(3600 * 3)  # Cache for 3 hours
     async def get_podcast_episode(self, prov_episode_id: str) -> PodcastEpisode:
         """Get a single Podcast Episode."""
         podcast_id, episode_id = prov_episode_id.split(PODCAST_EPISODE_SPLITTER)
@@ -546,6 +557,7 @@ class YoutubeMusicProvider(MusicProvider):
             user=self._yt_user,
         )
 
+    @use_cache(3600 * 24)  # Cache for 1 day
     async def get_similar_tracks(self, prov_track_id, limit=25) -> list[Track]:
         """Retrieve a dynamic list of tracks based on the provided item."""
         result = []
@@ -812,7 +824,6 @@ class YoutubeMusicProvider(MusicProvider):
                 playlist.owner = authors["name"]
         else:
             playlist.owner = self.name
-        playlist.cache_checksum = playlist_obj.get("checksum")
         return playlist
 
     def _parse_track(self, track_obj: dict) -> Track:
index 20884e24924bdcfa4e67c1b46b4e43c663e6e142..b5fd71cf1254108f27225b433c8b7e7def1decdf 100644 (file)
@@ -25,7 +25,7 @@ dependencies = [
   "ifaddr==0.2.0",
   "mashumaro==3.16",
   "music-assistant-frontend==2.16.1",
-  "music-assistant-models==1.1.58",
+  "music-assistant-models==1.1.59",
   "mutagen==1.47.0",
   "orjson==3.11.3",
   "pillow==11.3.0",
index cbe2be7dfb4da876f7d67cb060559f57f4fa020c..670889c6b559d019e86ad824a39d48d5e8395a68 100644 (file)
@@ -33,7 +33,7 @@ liblistenbrainz==0.6.0
 lyricsgenius==3.7.2
 mashumaro==3.16
 music-assistant-frontend==2.16.1
-music-assistant-models==1.1.58
+music-assistant-models==1.1.59
 mutagen==1.47.0
 orjson==3.11.3
 pillow==11.3.0
index fe2049983d5037a580161d19e78f8b99413d9732..248f2bf84c1962b6c828dcc663881df59cb8f80b 100644 (file)
 # ---
 # name: test_parse_playlist[gonic-sample.playlist]
   dict({
-    'cache_checksum': None,
     'external_ids': list([
     ]),
     'favorite': False,