'audio_format', json(provider_mappings.audio_format),
'url', provider_mappings.url,
'details', provider_mappings.details,
- 'in_library', provider_mappings.in_library
+ 'in_library', provider_mappings.in_library,
+ 'is_unique', provider_mappings.is_unique
)) FROM provider_mappings WHERE provider_mappings.item_id = albums.item_id AND media_type = 'album') AS provider_mappings,
(SELECT JSON_GROUP_ARRAY(
json_object(
'audio_format', json(provider_mappings.audio_format),
'url', provider_mappings.url,
'details', provider_mappings.details,
- 'in_library', provider_mappings.in_library
+ 'in_library', provider_mappings.in_library,
+ 'is_unique', provider_mappings.is_unique
)) FROM provider_mappings WHERE provider_mappings.item_id = audiobooks.item_id AND media_type = 'audiobook') AS provider_mappings,
playlog.fully_played AS fully_played,
playlog.seconds_played AS seconds_played,
'audio_format', json(provider_mappings.audio_format),
'url', provider_mappings.url,
'details', provider_mappings.details,
- 'in_library', provider_mappings.in_library
+ 'in_library', provider_mappings.in_library,
+ 'is_unique', provider_mappings.is_unique
)) FROM provider_mappings WHERE provider_mappings.item_id = {self.db_table}.item_id
AND provider_mappings.media_type = '{self.media_type.value}') AS provider_mappings
FROM {self.db_table} """ # noqa: E501
except MediaNotFoundError:
# edge case: already deleted / race condition
return
- library_item.provider_mappings = {
- x
- for x in library_item.provider_mappings
- if x.provider_instance != provider_instance_id and x.item_id != provider_item_id
- }
+
# update provider_mappings table
await self.mass.music.database.delete(
DB_TABLE_PROVIDER_MAPPINGS,
"provider": provider_instance_id,
},
)
+ library_item.provider_mappings = {
+ x
+ for x in library_item.provider_mappings
+ if not (x.provider_instance == provider_instance_id and x.item_id == provider_item_id)
+ }
if library_item.provider_mappings:
self.logger.debug(
"removed provider_mapping %s/%s from item id %s",
'audio_format', json(provider_mappings.audio_format),
'url', provider_mappings.url,
'details', provider_mappings.details,
- 'in_library', provider_mappings.in_library
+ 'in_library', provider_mappings.in_library,
+ 'is_unique', provider_mappings.is_unique
)) FROM provider_mappings WHERE provider_mappings.item_id = tracks.item_id AND media_type = 'track') AS provider_mappings,
(SELECT JSON_GROUP_ARRAY(
)
await self._database.execute(
f"UPDATE {DB_TABLE_PROVIDER_MAPPINGS} SET in_library = 1 "
- "WHERE media_type IN "
- "('radio', 'playlist');"
+ "WHERE media_type = 'radio';"
)
# save changes
assert isinstance(base_item, MediaItem) # for type checking
for prov_l in base_item.provider_mappings:
if (
- prov_l.provider_domain == compare_item.provider
+ prov_l.provider_instance == compare_item.provider
and prov_l.item_id == compare_item.item_id
):
return True
if compare_prov_ids is not None:
assert isinstance(compare_item, MediaItem) # for type checking
for prov_r in compare_item.provider_mappings:
- if prov_r.provider_domain == base_item.provider and prov_r.item_id == base_item.item_id:
+ if (
+ prov_r.provider_instance == base_item.provider
+ and prov_r.item_id == base_item.item_id
+ ):
return True
if base_prov_ids is not None and compare_prov_ids is not None:
for prov_r in compare_item.provider_mappings:
if prov_l.provider_domain != prov_r.provider_domain:
continue
+ if (
+ prov_l.is_unique or prov_r.is_unique
+ ) and prov_l.provider_instance != prov_r.provider_instance:
+ continue
if prov_l.item_id == prov_r.item_id:
return True
return False
try:
if not library_item:
# add item to the library
+ for prov_map in prov_item.provider_mappings:
+ prov_map.in_library = True
library_item = await self.mass.music.artists.add_item_to_library(prov_item)
elif not self._check_provider_mappings(library_item, prov_item, True):
# existing library item but provider mapping doesn't match
try:
if not library_item:
# add item to the library
+ for prov_map in prov_item.provider_mappings:
+ prov_map.in_library = True
library_item = await self.mass.music.albums.add_item_to_library(prov_item)
elif not self._check_provider_mappings(library_item, prov_item, True):
# existing library item but provider mapping doesn't match
try:
if not library_track:
# add item to the library
+ for prov_map in prov_track.provider_mappings:
+ prov_map.in_library = True
library_track = await self.mass.music.tracks.add_item_to_library(prov_track)
elif not self._check_provider_mappings(library_track, prov_track, True):
# existing library track but provider mapping doesn't match
try:
if not library_item:
# add item to the library
+ for prov_map in prov_item.provider_mappings:
+ prov_map.in_library = True
library_item = await self.mass.music.audiobooks.add_item_to_library(prov_item)
elif not self._check_provider_mappings(library_item, prov_item, True):
# existing library item but provider mapping doesn't match
try:
if not library_item:
# add item to the library
+ for prov_map in prov_item.provider_mappings:
+ prov_map.in_library = True
library_item = await self.mass.music.playlists.add_item_to_library(prov_item)
elif not self._check_provider_mappings(library_item, prov_item, True):
# existing library item but provider mapping doesn't match
try:
if not library_track:
# add item to the library
+ for prov_map in prov_track.provider_mappings:
+ prov_map.in_library = True
library_track = await self.mass.music.tracks.add_item_to_library(prov_track)
elif not self._check_provider_mappings(library_track, prov_track, True):
# existing library track but provider mapping doesn't match
continue
if not library_item:
# add item to the library
+ for prov_map in prov_item.provider_mappings:
+ prov_map.in_library = True
library_item = await self.mass.music.tracks.add_item_to_library(prov_item)
elif not self._check_provider_mappings(library_item, prov_item, True):
# existing library item but provider mapping doesn't match
try:
if not library_item:
# add item to the library
+ for prov_map in prov_item.provider_mappings:
+ prov_map.in_library = True
library_item = await self.mass.music.podcasts.add_item_to_library(prov_item)
elif not self._check_provider_mappings(library_item, prov_item, True):
# existing library item but provider mapping doesn't match
try:
if not library_item:
# add item to the library
+ for prov_map in prov_item.provider_mappings:
+ prov_map.in_library = True
library_item = await self.mass.music.radio.add_item_to_library(prov_item)
elif not self._check_provider_mappings(library_item, prov_item, True):
# existing library item but provider mapping doesn't match
if prov_playlist_id == self._get_liked_songs_playlist_id():
return await self._get_liked_songs_playlist()
- # Use global session for Spotify-owned playlists (e.g., Daily Mix)
- # as they may not be accessible via the dev token
- use_global = await self._is_spotify_owned_playlist(prov_playlist_id)
- playlist_obj = await self._get_data(
- f"playlists/{prov_playlist_id}", use_global_session=use_global
- )
- return parse_playlist(playlist_obj, self)
+ # Check cache to see if this playlist requires global token
+ use_global = await self._playlist_requires_global_token(prov_playlist_id)
+ if use_global:
+ playlist_obj = await self._get_data(
+ f"playlists/{prov_playlist_id}", use_global_session=True
+ )
+ return parse_playlist(playlist_obj, self)
+
+ # Try with dev token first (if available), fallback to global on 400 error
+ # Some playlists like Spotify-owned (Daily Mix) or Liked Songs only work with global token
+ try:
+ playlist_obj = await self._get_data(f"playlists/{prov_playlist_id}")
+ return parse_playlist(playlist_obj, self)
+ except aiohttp.ClientResponseError as err:
+ if err.status == 400 and self.dev_session_active:
+ # Remember that this playlist requires global token
+ await self._set_playlist_requires_global_token(prov_playlist_id)
+ playlist_obj = await self._get_data(
+ f"playlists/{prov_playlist_id}", use_global_session=True
+ )
+ return parse_playlist(playlist_obj, self)
+ raise
@use_cache()
async def get_podcast(self, prov_podcast_id: str) -> Podcast:
@use_cache(2600 * 3) # 3 hours
async def get_playlist_tracks(self, prov_playlist_id: str, page: int = 0) -> list[Track]:
"""Get playlist tracks."""
- result: list[Track] = []
is_liked_songs = prov_playlist_id == self._get_liked_songs_playlist_id()
- uri = (
- "me/tracks"
- if prov_playlist_id == self._get_liked_songs_playlist_id()
- else f"playlists/{prov_playlist_id}/tracks"
- )
- # Use global session for liked songs or Spotify-owned playlists (e.g., Daily Mix)
- use_global = is_liked_songs or await self._is_spotify_owned_playlist(prov_playlist_id)
- # do single request to get the etag (which we use as checksum for caching)
- cache_checksum = await self._get_etag(uri, limit=1, offset=0, use_global_session=use_global)
+ uri = "me/tracks" if is_liked_songs else f"playlists/{prov_playlist_id}/tracks"
+ # Liked songs always require global session
+ # For other playlists, call get_playlist first to trigger the fallback logic
+ # and populate the cache for which token to use
+ if is_liked_songs:
+ use_global = True
+ else:
+ # This call is cached and will determine/cache if global token is needed
+ await self.get_playlist(prov_playlist_id)
+ use_global = await self._playlist_requires_global_token(prov_playlist_id)
+
+ result: list[Track] = []
page_size = 50
offset = page * page_size
+
+ # Get etag for caching
+ cache_checksum = await self._get_etag(uri, limit=1, offset=0, use_global_session=use_global)
+
spotify_result = await self._get_data_with_caching(
uri, cache_checksum, limit=page_size, offset=offset, use_global_session=use_global
)
for index, item in enumerate(spotify_result["items"], 1):
if not (item and item["track"] and item["track"]["id"]):
continue
- # use count as position
track = parse_track(item["track"], self)
track.position = offset + index
result.append(track)
return liked_songs
- @use_cache(86400 * 90)
- async def _is_spotify_owned_playlist(self, prov_playlist_id: str) -> bool:
- """Check if a playlist is owned by Spotify.
+ async def _playlist_requires_global_token(self, prov_playlist_id: str) -> bool:
+ """Check if a playlist requires global token (cached).
- Spotify-owned playlists (e.g., Daily Mix, Discover Weekly) are only accessible
- via the global token, not through developer API tokens.
+ :param prov_playlist_id: The Spotify playlist ID.
+ :returns: True if the playlist requires global token.
+ """
+ cache_key = f"playlist_global_token_{prov_playlist_id}"
+ return bool(await self.mass.cache.get(cache_key, provider=self.instance_id))
+
+ async def _set_playlist_requires_global_token(self, prov_playlist_id: str) -> None:
+ """Mark a playlist as requiring global token in cache.
:param prov_playlist_id: The Spotify playlist ID.
- :returns: True if the playlist is owned by Spotify.
"""
- if prov_playlist_id == self._get_liked_songs_playlist_id():
- return False
- try:
- # We need to use global session here to actually get the playlist info
- # because if it's a Spotify-owned playlist, the dev session won't have access
- playlist_obj = await self._get_data(
- f"playlists/{prov_playlist_id}",
- fields="owner.id",
- use_global_session=True,
- )
- owner_id = playlist_obj.get("owner", {}).get("id", "").lower()
- return bool(owner_id == "spotify")
- except MediaNotFoundError:
- return False
+ cache_key = f"playlist_global_token_{prov_playlist_id}"
+ # Cache for 90 days - playlist ownership doesn't change
+ await self.mass.cache.set(cache_key, True, provider=self.instance_id, expiration=86400 * 90)
async def _add_audiobook_chapters(self, audiobook: Audiobook) -> None:
"""Add chapter metadata to an audiobook from Spotify API data."""