if new_val is None:
continue
cur_val = getattr(self, fld.name)
- if cur_val is None or allow_overwrite: # noqa: SIM114
- setattr(self, fld.name, new_val)
- elif isinstance(cur_val, list):
+ if isinstance(cur_val, list) and isinstance(new_val, list):
new_val = merge_lists(cur_val, new_val)
setattr(self, fld.name, new_val)
- elif isinstance(cur_val, set):
+ elif isinstance(cur_val, set) and isinstance(new_val, list):
new_val = cur_val.update(new_val)
setattr(self, fld.name, new_val)
- elif new_val and fld.name in ("checksum", "popularity", "last_refresh"):
+ elif new_val and fld.name in ("checksum", "popularity", "last_refresh"): # noqa: SIM114
# some fields are always allowed to be overwritten
# (such as checksum and last_refresh)
setattr(self, fld.name, new_val)
+ elif cur_val is None or (allow_overwrite and new_val): # noqa: SIM114
+ setattr(self, fld.name, new_val)
return self
item_id: str,
provider_instance_id_or_domain: str,
) -> list[Album]:
- """Return all versions of an album we can find on the provider."""
+ """Return all versions of an album we can find on all providers."""
album = await self.get(item_id, provider_instance_id_or_domain, add_to_library=False)
search_query = f"{album.artists[0].name} - {album.name}"
- return [
- prov_item
- for prov_item in await self.search(search_query, provider_instance_id_or_domain)
- if loose_compare_strings(album.name, prov_item.name)
- and compare_artists(prov_item.artists, album.artists, any_match=True)
- # make sure that the 'base' version is NOT included
- and prov_item.item_id != item_id
- ]
+ result: list[Album] = []
+ for provider_id in self.mass.music.get_unique_providers():
+ provider = self.mass.get_provider(provider_id)
+ if not provider:
+ continue
+ if not provider.library_supported(MediaType.ALBUM):
+ continue
+ result += [
+ prov_item
+ for prov_item in await self.search(search_query, provider_id)
+ if loose_compare_strings(album.name, prov_item.name)
+ and compare_artists(prov_item.artists, album.artists, any_match=True)
+ # make sure that the 'base' version is NOT included
+ and prov_item.item_id != item_id
+ ]
+ return result
async def _add_library_item(self, item: Album) -> Album:
"""Add a new record to the database."""
return []
full_album = await self.get_provider_item(item_id, provider_instance_id_or_domain)
- # prefer cache items (if any)
+ # prefer cache items (if any) for streaming providers only
cache_key = f"{prov.instance_id}.albumtracks.{item_id}"
if isinstance(full_album, ItemMapping):
cache_checksum = None
else:
cache_checksum = full_album.metadata.checksum
- if cache := await self.mass.cache.get(cache_key, checksum=cache_checksum):
+ if prov.is_streaming_provider and (
+ cache := await self.mass.cache.get(cache_key, checksum=cache_checksum)
+ ):
return [AlbumTrack.from_dict(x) for x in cache]
# no items in cache - get listing from provider
items = []
track.metadata.images = full_album.metadata.images
items.append(track)
# store (serializable items) in cache
- self.mass.create_task(
- self.mass.cache.set(cache_key, [x.to_dict() for x in items], checksum=cache_checksum)
- )
+ if prov.is_streaming_provider:
+ self.mass.create_task(
+ self.mass.cache.set(
+ cache_key, [x.to_dict() for x in items], checksum=cache_checksum
+ )
+ )
return items
async def _get_provider_dynamic_tracks(
offset: int = 0,
) -> list[ItemCls]:
"""Fetch MediaItem records from database given a custom query."""
+ if custom_query is None:
+ custom_query = f"SELECT * FROM {self.db_table}"
return [
self.item_cls.from_db_row(db_row)
for db_row in await self.mass.music.database.get_rows_from_query(
# we use the separate provider_mappings table to perform quick lookups
# from provider id's to database id's because this is faster
# (and more compatible) than querying the provider_mappings json column
- subquery = f"SELECT item_id FROM {DB_TABLE_PROVIDER_MAPPINGS} "
- subquery += f"WHERE (provider_instance = '{provider_instance_id_or_domain}'"
- subquery += f" OR provider_domain = '{provider_instance_id_or_domain}')"
+ subquery = (
+ f"SELECT item_id FROM {DB_TABLE_PROVIDER_MAPPINGS} "
+ "WHERE ( "
+ f"(provider_instance = '{provider_instance_id_or_domain}' "
+ f"OR provider_domain = '{provider_instance_id_or_domain}') "
+ ")"
+ )
if provider_item_ids is not None:
prov_ids = str(tuple(provider_item_ids))
if prov_ids.endswith(",)"):
prov_ids = prov_ids.replace(",)", ")")
subquery += f" AND provider_item_id in {prov_ids}"
+ subquery += f" AND media_type = '{self.media_type.value}'"
query = f"SELECT * FROM {self.db_table} WHERE item_id in ({subquery})"
return await self.get_library_items_by_query(query, limit=limit, offset=offset)
self,
provider_instance_id_or_domain: str,
provider_item_ids: tuple[str, ...] | None = None,
- limit: int = 500,
- offset: int = 0,
) -> AsyncGenerator[ItemCls, None]:
"""Iterate all records from database for given provider."""
limit: int = 500
# grab additional metadata
if not skip_metadata_lookup:
await self.mass.metadata.get_track_metadata(item)
+ # fallback track image from album
+ if not item.image and isinstance(item.album, Album) and item.album.image:
+ item.metadata.images.append(item.album.image)
# actually add (or update) the item in the library db
# use the lock to prevent a race condition of the same item being added twice
async with self._db_add_lock:
item_id: str,
provider_instance_id_or_domain: str,
) -> list[Track]:
- """Return all versions of a track we can find on the provider."""
+ """Return all versions of a track we can find on all providers."""
track = await self.get(item_id, provider_instance_id_or_domain, add_to_library=False)
search_query = f"{track.artists[0].name} - {track.name}"
- return [
- prov_item
- for prov_item in await self.search(search_query, provider_instance_id_or_domain)
- if loose_compare_strings(track.name, prov_item.name)
- and compare_artists(prov_item.artists, track.artists, any_match=True)
- # make sure that the 'base' version is NOT included
- and prov_item.item_id != item_id
- ]
+ result: list[Track] = []
+ for provider_id in self.mass.music.get_unique_providers():
+ provider = self.mass.get_provider(provider_id)
+ if not provider:
+ continue
+ if not provider.library_supported(MediaType.TRACK):
+ continue
+ result += [
+ prov_item
+ for prov_item in await self.search(search_query, provider_id)
+ if loose_compare_strings(track.name, prov_item.name)
+ and compare_artists(prov_item.artists, track.artists, any_match=True)
+ # make sure that the 'base' version is NOT included
+ and prov_item.item_id != item_id
+ ]
+ return result
async def albums(
self,
async def get_track_metadata(self, track: Track) -> None:
"""Get/update rich metadata for a track."""
- # set timestamp, used to determine when this function was last called
- track.metadata.last_refresh = int(time())
-
if not (track.album and track.artists):
return
# collect metadata from all providers
track.name,
provider.name,
)
+ # set timestamp, used to determine when this function was last called
+ track.metadata.last_refresh = int(time())
async def get_playlist_metadata(self, playlist: Playlist) -> None:
"""Get/update rich metadata for a playlist."""
- # set timestamp, used to determine when this function was last called
- playlist.metadata.last_refresh = int(time())
# retrieve genres from tracks
# TODO: retrieve style/mood ?
playlist.metadata.genres = set()
async for track in self.mass.music.playlists.tracks(
playlist.item_id, playlist.provider
):
- if not playlist.image and track.image:
+ if track.image:
images.add(track.image)
if track.media_type != MediaType.TRACK:
# filter out radio items
# create collage thumb/fanart from playlist tracks
if images:
- if playlist.image and self.mass.storage_path in playlist.image:
- img_path = playlist.image
+ if playlist.image and self.mass.storage_path in playlist.image.path:
+ img_path = playlist.image.path
else:
img_path = os.path.join(self.mass.storage_path, f"{uuid4().hex}.png")
- img_data = await create_collage(self.mass, list(images))
+ img_data = await create_collage(self.mass, list(images))
async with aiofiles.open(img_path, "wb") as _file:
await _file.write(img_data)
- playlist.metadata.images = [MediaItemImage(ImageType.THUMB, img_path, True)]
+ playlist.metadata.images = [MediaItemImage(ImageType.THUMB, img_path, "file")]
except Exception as err:
LOGGER.debug("Error while creating playlist image", exc_info=err)
+ # set timestamp, used to determine when this function was last called
+ playlist.metadata.last_refresh = int(time())
async def get_radio_metadata(self, radio: Radio) -> None:
"""Get/update rich metadata for a radio station."""
# check if both tracks are (not) explicit
if base_item.metadata.explicit is None and isinstance(base_item.album, Album):
base_item.metadata.explicit = base_item.album.metadata.explicit
- if compare_item.metadata.explicit is None and isinstance(base_item.album, Album):
+ if compare_item.metadata.explicit is None and isinstance(compare_item.album, Album):
compare_item.metadata.explicit = compare_item.album.metadata.explicit
if strict and not compare_explicit(base_item.metadata, compare_item.metadata):
return False
items=sorted(subitems, key=lambda x: (x.name.casefold(), x.name)),
)
- async def sync_library(self, media_types: tuple[MediaType, ...]) -> None:
+ async def sync_library(self, media_types: tuple[MediaType, ...]) -> None: # noqa: ARG002
"""Run library sync for this provider."""
- if MediaType.TRACK not in media_types or MediaType.PLAYLIST not in media_types:
- return
- cache_key = f"{self.instance_id}.checksums"
- prev_checksums = await self.mass.cache.get(cache_key, DB_SCHEMA_VERSION)
- save_checksum_interval = 0
- if prev_checksums is None:
- prev_checksums = {}
+ # first build a listing of all current items and their checksums
+ prev_checksums = {}
+ for ctrl in (self.mass.music.tracks, self.mass.music.playlists):
+ async for db_item in ctrl.iter_library_items_by_prov_id(self.instance_id):
+ file_name = next(
+ x.item_id
+ for x in db_item.provider_mappings
+ if x.provider_instance == self.instance_id
+ )
+ prev_checksums[file_name] = db_item.metadata.checksum
# process all deleted (or renamed) files first
cur_filenames = set()
# find all music files in the music directory and all subfolders
# we work bottom up, as-in we derive all info from the tracks
- cur_checksums = {}
async for item in self.listdir("", recursive=True):
if "." not in item.name or not item.ext:
# skip system files and files without extension
try:
# continue if the item did not change (checksum still the same)
if item.checksum == prev_checksums.get(item.path):
- cur_checksums[item.path] = item.checksum
continue
if item.ext in TRACK_EXTENSIONS:
except Exception as err: # pylint: disable=broad-except
# we don't want the whole sync to crash on one file so we catch all exceptions here
self.logger.exception("Error processing %s - %s", item.path, str(err))
- else:
- # save item's checksum only if the parse succeeded
- cur_checksums[item.path] = item.checksum
-
- # save checksums every 100 processed items
- # this allows us to pickup where we leftoff when initial scan gets interrupted
- if save_checksum_interval == 100:
- await self.mass.cache.set(cache_key, cur_checksums, DB_SCHEMA_VERSION)
- save_checksum_interval = 0
- else:
- save_checksum_interval += 1
-
- # store (final) checksums in cache
- await self.mass.cache.set(cache_key, cur_checksums, DB_SCHEMA_VERSION)
async def _process_deletions(self, deleted_files: set[str]) -> None:
"""Process all deletions."""
from typing import TYPE_CHECKING, Any
from aioslimproto.client import PlayerState as SlimPlayerState
-from aioslimproto.client import SlimClient as SlimClientOrg
+from aioslimproto.client import SlimClient
from aioslimproto.client import TransitionType as SlimTransition
from aioslimproto.const import EventType as SlimEventType
from aioslimproto.discovery import start_discovery
self.logger.debug("Socket client connected: %s", addr)
def client_callback(
- event_type: SlimEventType | str, client: SlimClient, data: Any = None # noqa: ARG001
+ event_type: SlimEventType, client: SlimClient, data: Any = None # noqa: ARG001
):
if event_type == SlimEventType.PLAYER_DISCONNECTED:
self.mass.create_task(self._handle_disconnected(client))
self.mass.create_task(self._handle_buffer_ready(client))
return
- if event_type == "output_underrun":
+ if event_type == SlimEventType.PLAYER_OUTPUT_UNDERRUN:
# player ran out of buffer
self.mass.create_task(self._handle_output_underrun(client))
return
self._handle_player_heartbeat(client)
return
- # ignore some uninteresting events
- if event_type in (
- SlimEventType.PLAYER_CLI_EVENT,
- SlimEventType.PLAYER_DECODER_ERROR,
- ):
- return
-
# forward player update to MA player controller
self.mass.create_task(self._handle_player_update(client))
return current_millis
-class SlimClient(SlimClientOrg):
- """Patched SLIMProto socket client."""
+# class SlimClient(SlimClientOrg):
+# """Patched SLIMProto socket client."""
- def _process_stat_stmo(self, data: bytes) -> None: # noqa: ARG002
- """Process incoming stat STMo message: Output Underrun."""
- self.callback("output_underrun", self)
+# def _process_stat_stmo(self, data: bytes) -> None: # noqa: ARG002
+# """Process incoming stat STMo message: Output Underrun."""
+# self.callback("output_underrun", self)
- def _process_stat_stmf(self, data: bytes) -> None: # noqa: ARG002
- """Process incoming stat STMf message (connection closed)."""
- self.logger.debug("STMf received - connection closed.")
- # we should ignore this event, its not relevant
+# def _process_stat_stmf(self, data: bytes) -> None: # noqa: ARG002
+# """Process incoming stat STMf message (connection closed)."""
+# self.logger.debug("STMf received - connection closed.")
+# # we should ignore this event, its not relevant
"name": "Slimproto",
"description": "Support for slimproto based players (e.g. squeezebox, squeezelite).",
"codeowners": ["@music-assistant"],
- "requirements": ["aioslimproto==2.3.2"],
+ "requirements": ["aioslimproto==2.3.3"],
"documentation": "https://github.com/music-assistant/hass-music-assistant/discussions/1123",
"multi_instance": false,
"builtin": false,
# lookup by name
for track_artist in track.artists:
assert isinstance(track_artist, Artist)
- result = await self._get_data("searchtrack.php?", s=track_artist.name, t=track.name)
+ # make sure to include the version in the track name
+ search_name = track.name
+ if track.version:
+ search_name += f" {track.version}"
+ result = await self._get_data("searchtrack.php?", s=track_artist.name, t=search_name)
if result and result.get("track"):
for item in result["track"]:
if track_artist.mbid:
parent_source = player_id
for child_id in conf_members:
if child_player := self.mass.players.get(child_id, False):
+ if not child_player.available:
+ continue
# work out power state
if child_player.mute_as_power:
player_powered = child_player.powered and not child_player.volume_muted
aiofiles==23.1.0
aiohttp==3.8.5
aiorun==2022.11.1
-aioslimproto==2.3.2
+aioslimproto==2.3.3
aiosqlite==0.19.0
async-upnp-client==0.33.2
asyncio-throttle==1.0.2