import asyncio
import logging
import os
-import webbrowser
from os.path import abspath, dirname
from sys import path
required=False,
help="YoutubeMusic cookie",
)
+parser.add_argument(
+ "--smb-username",
+ required=False,
+ help="SMB username",
+)
+parser.add_argument(
+ "--smb-password",
+ required=False,
+ help="SMB password",
+)
+parser.add_argument(
+ "--smb-path",
+ required=False,
+ help="The NetBIOS machine name of the remote server + share (e.g. \\\\machine\\share).",
+)
parser.add_argument(
"--debug",
action="store_true",
logging.getLogger("asyncio").setLevel(logging.INFO)
logging.getLogger("aiosqlite").setLevel(logging.WARNING)
logging.getLogger("databases").setLevel(logging.INFO)
+logging.getLogger("SMB").setLevel(logging.INFO)
# default database based on sqlite
password=args.ytmusic_cookie,
)
)
+
if args.musicdir:
mass_conf.providers.append(
MusicProviderConfig(type=ProviderType.FILESYSTEM_LOCAL, path=args.musicdir)
)
+if args.smb_path:
+ mass_conf.providers.append(
+ MusicProviderConfig(
+ ProviderType.FILESYSTEM_SMB,
+ username=args.smb_username,
+ password=args.smb_password,
+ path=args.smb_path,
+ )
+ )
+
class TestPlayer(Player):
"""Demonstatration player implementation."""
print(f"stream url: {url}")
self._attr_current_url = url
self.update_state()
- # launch stream url in browser so we can hear it playing ;-)
+ # launch stream url with ffplay so we can hear it playing ;-)
# normally this url is sent to the actual player implementation
- webbrowser.open(url)
+ await asyncio.create_subprocess_shell(
+ f'ffplay -hide_banner -loglevel quiet -i "{url}"'
+ )
async def stop(self) -> None:
"""Send STOP command to player."""
playlists = await mass.music.playlists.db_items()
playlists_lib = await mass.music.playlists.db_items(True)
print(
- f"Got {playlists_lib.total} tracks in library (of {playlists.total} total in db)"
+ f"Got {playlists_lib.total} playlists in library (of {playlists.total} total in db)"
)
# register a player
# or a list of items
if playlists.count > 0:
await test_player1.active_queue.play_media(playlists.items[0])
+ elif tracks.count > 0:
+ await test_player1.active_queue.play_media(tracks.items[0])
await asyncio.sleep(3600)
async def set(self, cache_key, data, checksum="", expiration=(86400 * 30)):
"""Set data in cache."""
- if not isinstance(checksum, str):
+ if checksum is not None and not isinstance(checksum, str):
checksum = str(checksum)
expires = int(time.time() + expiration)
self._mem_cache[cache_key] = (data, checksum, expires)
# clean up db cache object only if expired
if db_row["expires"] < cur_timestamp:
await self.delete(db_row["key"])
- # compact db
- await self.mass.database.execute("VACUUM")
def __schedule_cleanup_task(self):
"""Schedule the cleanup task."""
from music_assistant.mass import MusicAssistant
-SCHEMA_VERSION = 18
+SCHEMA_VERSION = 19
TABLE_TRACK_LOUDNESS = "track_loudness"
TABLE_PLAYLOG = "playlog"
except (KeyError, ValueError):
prev_version = 0
- if SCHEMA_VERSION != prev_version:
+ if prev_version not in (0, SCHEMA_VERSION):
self.logger.info(
"Performing database migration from %s to %s",
prev_version,
await self.execute(f"DROP TABLE IF EXISTS {TABLE_RADIOS}")
await self.execute(f"DROP TABLE IF EXISTS {TABLE_CACHE}")
await self.execute(f"DROP TABLE IF EXISTS {TABLE_THUMBS}")
- await self.execute("DROP TABLE IF EXISTS provider_mappings")
# recreate missing tables
await self.__create_database_tables()
+ if prev_version == 18:
+ # model for provider_mapping completely changed,
+ # we just drop the old provider_ids column and add the new provider_mappings column
+ # this will require a full resync of all providers including matching but at least
+ # the additional metadata is not lost
+ await self.execute(
+ f"ALTER TABLE {TABLE_ARTISTS} ADD provider_mappings json DEFAULT '[]';"
+ )
+ await self.execute(
+ f"ALTER TABLE {TABLE_ALBUMS} ADD provider_mappings json DEFAULT '[]';"
+ )
+ await self.execute(
+ f"ALTER TABLE {TABLE_TRACKS} ADD provider_mappings json DEFAULT '[]';"
+ )
+ await self.execute(
+ f"ALTER TABLE {TABLE_PLAYLISTS} ADD provider_mappings json DEFAULT '[]';"
+ )
+ await self.execute(
+ f"ALTER TABLE {TABLE_RADIOS} ADD provider_mappings json DEFAULT '[]';"
+ )
+ await self.execute(
+ f"ALTER TABLE {TABLE_ARTISTS} DROP column provider_ids;"
+ )
+ await self.execute(
+ f"ALTER TABLE {TABLE_ALBUMS} DROP column provider_ids;"
+ )
+ await self.execute(
+ f"ALTER TABLE {TABLE_TRACKS} DROP column provider_ids;"
+ )
+ await self.execute(
+ f"ALTER TABLE {TABLE_PLAYLISTS} DROP column provider_ids;"
+ )
+ await self.execute(
+ f"ALTER TABLE {TABLE_RADIOS} DROP column provider_ids;"
+ )
+ await self.execute(f"DROP TABLE IF EXISTS {TABLE_CACHE}")
+ # recreate missing table(s)
+ await self.__create_database_tables()
+
# store current schema version
await self.set_setting("version", str(SCHEMA_VERSION))
+ # compact db
+ await self.mass.database.execute("VACUUM")
async def __create_database_tables(self) -> None:
"""Init database tables."""
musicbrainz_id TEXT,
artists json,
metadata json,
- provider_ids json,
+ provider_mappings json,
timestamp INTEGER DEFAULT 0
);"""
)
musicbrainz_id TEXT,
in_library BOOLEAN DEFAULT 0,
metadata json,
- provider_ids json,
+ provider_mappings json,
timestamp INTEGER DEFAULT 0
);"""
)
artists json,
albums json,
metadata json,
- provider_ids json,
+ provider_mappings json,
timestamp INTEGER DEFAULT 0
);"""
)
is_editable BOOLEAN NOT NULL,
in_library BOOLEAN DEFAULT 0,
metadata json,
- provider_ids json,
+ provider_mappings json,
timestamp INTEGER DEFAULT 0,
UNIQUE(name, owner)
);"""
sort_name TEXT NOT NULL,
in_library BOOLEAN DEFAULT 0,
metadata json,
- provider_ids json,
+ provider_mappings json,
timestamp INTEGER DEFAULT 0
);"""
)
async def tracks(
self,
item_id: str,
- provider: Optional[ProviderType] = None,
+ provider_type: Optional[ProviderType] = None,
provider_id: Optional[str] = None,
) -> List[Track]:
"""Return album tracks for the given provider album id."""
- if not (provider == ProviderType.DATABASE or provider_id == "database"):
+ if not (provider_type == ProviderType.DATABASE or provider_id == "database"):
# return provider album tracks
- return await self._get_provider_album_tracks(item_id, provider, provider_id)
+ return await self._get_provider_album_tracks(
+ item_id, provider_type or provider_id
+ )
# db_album requested: get results from first (non-file) provider
return await self._get_db_album_tracks(item_id)
async def versions(
self,
item_id: str,
- provider: Optional[ProviderType] = None,
+ provider_type: Optional[ProviderType] = None,
provider_id: Optional[str] = None,
) -> List[Album]:
"""Return all versions of an album we can find on all providers."""
- assert provider or provider_id, "Provider type or ID must be specified"
- album = await self.get(item_id, provider, provider_id)
+ assert provider_type or provider_id, "Provider type or ID must be specified"
+ album = await self.get(item_id, provider_type or provider_id)
# perform a search on all provider(types) to collect all versions/variants
- prov_types = {item.type for item in self.mass.music.providers}
+ provider_types = {item.type for item in self.mass.music.providers}
search_query = f"{album.artist.name} - {album.name}"
all_versions = {
prov_item.item_id: prov_item
for prov_items in await asyncio.gather(
- *[self.search(search_query, prov_type) for prov_type in prov_types]
+ *[
+ self.search(search_query, provider_type)
+ for provider_type in provider_types
+ ]
)
for prov_item in prov_items
if loose_compare_strings(album.name, prov_item.name)
}
# make sure that the 'base' version is included
- for prov_version in album.provider_ids:
+ for prov_version in album.provider_mappings:
if prov_version.item_id in all_versions:
continue
album_copy = Album.from_dict(album.to_dict())
album_copy.item_id = prov_version.item_id
- album_copy.provider = prov_version.prov_type
- album_copy.provider_ids = {prov_version}
+ album_copy.provider = prov_version.provider_type
+ album_copy.provider_mappings = {prov_version}
all_versions[prov_version.item_id] = album_copy
# return the aggregated result
# return final db_item after all match/metadata actions
db_item = await self.get_db_item(db_item.item_id)
# dump album tracks in db
- for prov in db_item.provider_ids:
+ for prov_mapping in db_item.provider_mappings:
for track in await self._get_provider_album_tracks(
- prov.item_id, prov.prov_id
+ prov_mapping.item_id, prov_mapping.provider_id
):
await self.mass.music.tracks.add_db_item(track)
self.mass.signal_event(
async def add_db_item(self, item: Album, overwrite_existing: bool = False) -> Album:
"""Add a new record to the database."""
- assert item.provider_ids, f"Album {item.name} is missing provider id(s)"
+ assert item.provider_mappings, f"Album {item.name} is missing provider id(s)"
assert item.artist, f"Album {item.name} is missing artist"
async with self._db_add_lock:
cur_item = None
overwrite: bool = False,
) -> Album:
"""Update Album record in the database."""
- assert item.provider_ids, f"Album {item.name} is missing provider id(s)"
+ assert item.provider_mappings, f"Album {item.name} is missing provider id(s)"
assert item.artist, f"Album {item.name} is missing artist"
cur_item = await self.get_db_item(item_id)
if overwrite:
metadata = item.metadata
metadata.last_refresh = None
- provider_ids = item.provider_ids
+ provider_mappings = item.provider_mappings
album_artists = await self._get_album_artists(item, overwrite=True)
else:
metadata = cur_item.metadata.update(item.metadata, item.provider.is_file())
- provider_ids = {*cur_item.provider_ids, *item.provider_ids}
+ provider_mappings = {*cur_item.provider_mappings, *item.provider_mappings}
album_artists = await self._get_album_artists(item, cur_item)
if item.album_type != AlbumType.UNKNOWN:
"album_type": album_type.value,
"artists": json_serializer(album_artists) or None,
"metadata": json_serializer(metadata),
- "provider_ids": json_serializer(provider_ids),
+ "provider_mappings": json_serializer(provider_mappings),
"musicbrainz_id": item.musicbrainz_id or cur_item.musicbrainz_id,
},
)
async def _get_provider_album_tracks(
self,
item_id: str,
- provider: Optional[ProviderType] = None,
+ provider_type: Optional[ProviderType] = None,
provider_id: Optional[str] = None,
) -> List[Track]:
"""Return album tracks for the given provider album id."""
- prov = self.mass.music.get_provider(provider_id or provider)
+ prov = self.mass.music.get_provider(provider_id or provider_type)
if not prov:
return []
- full_album = await self.get_provider_item(item_id, provider_id or provider)
+ full_album = await self.get_provider_item(item_id, provider_id or provider_type)
# prefer cache items (if any)
cache_key = f"{prov.type.value}.albumtracks.{item_id}"
cache_checksum = full_album.metadata.checksum
async def _get_provider_dynamic_tracks(
self,
item_id: str,
- provider: Optional[ProviderType] = None,
+ provider_type: Optional[ProviderType] = None,
provider_id: Optional[str] = None,
limit: int = 25,
):
"""Generate a dynamic list of tracks based on the album content."""
- prov = self.mass.music.get_provider(provider_id or provider)
+ prov = self.mass.music.get_provider(provider_id or provider_type)
if (
not prov
or MusicProviderFeature.SIMILAR_TRACKS not in prov.supported_features
):
return []
album_tracks = await self._get_provider_album_tracks(
- item_id=item_id, provider=provider, provider_id=provider_id
+ item_id=item_id, provider_type=provider_type, provider_id=provider_id
)
# Grab a random track from the album that we use to obtain similar tracks for
track = choice(album_tracks)
if db_album.provider != ProviderType.DATABASE:
return # Matching only supported for database items
- async def find_prov_match(provider: MusicProvider):
+ async def find_prov_match(provider_type: MusicProvider):
self.logger.debug(
"Trying to match album %s on provider %s", db_album.name, provider.name
)
return match_found
# try to find match on all providers
- cur_prov_types = {x.prov_type for x in db_album.provider_ids}
+ cur_provider_types = {x.provider_type for x in db_album.provider_mappings}
for provider in self.mass.music.providers:
- if provider.type in cur_prov_types:
+ if provider.type in cur_provider_types:
continue
if MusicProviderFeature.SEARCH not in provider.supported_features:
continue
if await find_prov_match(provider):
- cur_prov_types.add(provider.type)
+ cur_provider_types.add(provider.type)
else:
self.logger.debug(
"Could not find match for Album %s on provider %s",
return ItemMapping.from_item(artist)
if db_artist := await self.mass.music.artists.get_db_item_by_prov_id(
- artist.item_id, provider=artist.provider
+ artist.item_id, provider_type=artist.provider
):
return ItemMapping.from_item(db_artist)
async def tracks(
self,
item_id: Optional[str] = None,
- provider: Optional[ProviderType] = None,
+ provider_type: Optional[ProviderType] = None,
provider_id: Optional[str] = None,
artist: Optional[Artist] = None,
) -> List[Track]:
"""Return top tracks for an artist."""
if not artist:
- artist = await self.get(item_id, provider, provider_id)
+ artist = await self.get(item_id, provider_type, provider_id)
# get results from all providers
coros = [
self.get_provider_artist_toptracks(
- item.item_id,
- provider=item.prov_type,
- provider_id=item.prov_id,
+ prov_mapping.item_id,
+ provider_type=prov_mapping.provider_type,
+ provider_id=prov_mapping.provider_id,
cache_checksum=artist.metadata.checksum,
)
- for item in artist.provider_ids
+ for prov_mapping in artist.provider_mappings
]
tracks = itertools.chain.from_iterable(await asyncio.gather(*coros))
# merge duplicates using a dict
for track in tracks:
key = f".{track.name}.{track.version}"
if key in final_items:
- final_items[key].provider_ids.update(track.provider_ids)
+ final_items[key].provider_mappings.update(track.provider_mappings)
else:
final_items[key] = track
return list(final_items.values())
async def albums(
self,
item_id: Optional[str] = None,
- provider: Optional[ProviderType] = None,
+ provider_type: Optional[ProviderType] = None,
provider_id: Optional[str] = None,
artist: Optional[Artist] = None,
) -> List[Album]:
"""Return (all/most popular) albums for an artist."""
if not artist:
- artist = await self.get(item_id, provider, provider_id)
+ artist = await self.get(item_id, provider_type or provider_id)
# get results from all providers
coros = [
self.get_provider_artist_albums(
- item.item_id, item.prov_type, cache_checksum=artist.metadata.checksum
+ item.item_id,
+ item.provider_type,
+ cache_checksum=artist.metadata.checksum,
)
- for item in artist.provider_ids
+ for item in artist.provider_mappings
]
albums = itertools.chain.from_iterable(await asyncio.gather(*coros))
# merge duplicates using a dict
for album in albums:
key = f".{album.name}.{album.version}"
if key in final_items:
- final_items[key].provider_ids.update(album.provider_ids)
+ final_items[key].provider_mappings.update(album.provider_mappings)
else:
final_items[key] = album
if album.in_library:
assert (
db_artist.provider == ProviderType.DATABASE
), "Matching only supported for database items!"
- cur_prov_types = {x.prov_type for x in db_artist.provider_ids}
+ cur_provider_types = {x.provider_type for x in db_artist.provider_mappings}
for provider in self.mass.music.providers:
- if provider.type in cur_prov_types:
+ if provider.type in cur_provider_types:
continue
if MusicProviderFeature.SEARCH not in provider.supported_features:
continue
if await self._match(db_artist, provider):
- cur_prov_types.add(provider.type)
+ cur_provider_types.add(provider.type)
else:
self.logger.debug(
"Could not find match for Artist %s on provider %s",
async def get_provider_artist_toptracks(
self,
item_id: str,
- provider: Optional[ProviderType] = None,
+ provider_type: Optional[ProviderType] = None,
provider_id: Optional[str] = None,
cache_checksum: Any = None,
) -> List[Track]:
"""Return top tracks for an artist on given provider."""
- prov = self.mass.music.get_provider(provider_id or provider)
+ prov = self.mass.music.get_provider(provider_id or provider_type)
if not prov:
return []
# prefer cache items (if any)
else:
# fallback implementation using the db
if db_artist := await self.mass.music.artists.get_db_item_by_prov_id(
- item_id, provider=provider, provider_id=provider_id
+ item_id, provider_type=provider_type, provider_id=provider_id
):
- prov_id = provider_id or provider.value
+ prov_id = provider_id or provider_type.value
# TODO: adjust to json query instead of text search?
query = f"SELECT * FROM tracks WHERE artists LIKE '%\"{db_artist.item_id}\"%'"
- query += f" AND provider_ids LIKE '%\"{prov_id}\"%'"
+ query += f" AND provider_mappings LIKE '%\"{prov_id}\"%'"
items = await self.mass.music.tracks.get_db_items_by_query(query)
# store (serializable items) in cache
self.mass.create_task(
async def get_provider_artist_albums(
self,
item_id: str,
- provider: Optional[ProviderType] = None,
+ provider_type: Optional[ProviderType] = None,
provider_id: Optional[str] = None,
cache_checksum: Any = None,
) -> List[Album]:
"""Return albums for an artist on given provider."""
- prov = self.mass.music.get_provider(provider_id or provider)
+ prov = self.mass.music.get_provider(provider_id or provider_type)
if not prov:
return []
# prefer cache items (if any)
else:
# fallback implementation using the db
if db_artist := await self.mass.music.artists.get_db_item_by_prov_id(
- item_id, provider=provider, provider_id=provider_id
+ item_id, provider_type=provider_type, provider_id=provider_id
):
- prov_id = provider_id or provider.value
+ prov_id = provider_id or provider_type.value
# TODO: adjust to json query instead of text search?
query = f"SELECT * FROM albums WHERE artists LIKE '%\"{db_artist.item_id}\"%'"
- query += f" AND provider_ids LIKE '%\"{prov_id}\"%'"
+ query += f" AND provider_mappings LIKE '%\"{prov_id}\"%'"
items = await self.mass.music.albums.get_db_items_by_query(query)
else:
# edge case
) -> Artist:
"""Add a new item record to the database."""
assert isinstance(item, Artist), "Not a full Artist object"
- assert item.provider_ids, "Artist is missing provider id(s)"
+ assert item.provider_mappings, "Artist is missing provider id(s)"
# enforce various artists name + id
if compare_strings(item.name, VARIOUS_ARTISTS):
item.musicbrainz_id = VARIOUS_ARTISTS_ID
cur_item = await self.get_db_item(item_id)
if overwrite:
metadata = item.metadata
- provider_ids = item.provider_ids
+ provider_mappings = item.provider_mappings
else:
metadata = cur_item.metadata.update(item.metadata, item.provider.is_file())
- provider_ids = {*cur_item.provider_ids, *item.provider_ids}
+ provider_mappings = {*cur_item.provider_mappings, *item.provider_mappings}
# enforce various artists name + id
if compare_strings(item.name, VARIOUS_ARTISTS):
"sort_name": item.sort_name if overwrite else cur_item.sort_name,
"musicbrainz_id": item.musicbrainz_id or cur_item.musicbrainz_id,
"metadata": json_serializer(metadata),
- "provider_ids": json_serializer(provider_ids),
+ "provider_mappings": json_serializer(provider_mappings),
},
)
self.logger.debug("updated %s in database: %s", item.name, item_id)
async def _get_provider_dynamic_tracks(
self,
item_id: str,
- provider: Optional[ProviderType] = None,
+ provider_type: Optional[ProviderType] = None,
provider_id: Optional[str] = None,
limit: int = 25,
):
"""Generate a dynamic list of tracks based on the artist's top tracks."""
- prov = self.mass.music.get_provider(provider_id or provider)
+ prov = self.mass.music.get_provider(provider_id or provider_type)
if (
not prov
or MusicProviderFeature.SIMILAR_TRACKS not in prov.supported_features
):
return []
top_tracks = await self.get_provider_artist_toptracks(
- item_id=item_id, provider=provider, provider_id=provider_id
+ item_id=item_id, provider_type=provider_type, provider_id=provider_id
)
# Grab a random track from the album that we use to obtain similar tracks for
track = choice(top_tracks)
async def get(
self,
provider_item_id: str,
- provider: Optional[ProviderType] = None,
+ provider_type: Optional[ProviderType] = None,
provider_id: Optional[str] = None,
force_refresh: bool = False,
lazy: bool = True,
details: ItemCls = None,
) -> ItemCls:
"""Return (full) details for a single media item."""
- assert provider or provider_id, "provider or provider_id must be supplied"
- if isinstance(provider, str):
- provider = ProviderType(provider)
+ assert (
+ provider_type or provider_id
+ ), "provider_type or provider_id must be supplied"
+ if isinstance(provider_type, str):
+ provider_type = ProviderType(provider_type)
db_item = await self.get_db_item_by_prov_id(
provider_item_id=provider_item_id,
- provider=provider,
+ provider_type=provider_type,
provider_id=provider_id,
)
if db_item and (time() - db_item.last_refresh) > REFRESH_INTERVAL:
force_refresh = True
if db_item and force_refresh:
# get (first) provider item id belonging to this db item
- provider_id, provider_item_id = await self.get_provider_id(db_item)
+ provider_id, provider_item_id = await self.get_provider_mapping(db_item)
elif db_item:
# we have a db item and no refreshing is needed, return the results!
return db_item
if not details and provider_id:
# no details provider nor in db, fetch them from the provider
details = await self.get_provider_item(provider_item_id, provider_id)
- if not details and provider:
+ if not details and provider_type:
# check providers for given provider type one by one
for prov in self.mass.music.providers:
if not prov.available:
continue
- if prov.type == provider:
+ if prov.type == provider_type:
try:
details = await self.get_provider_item(
provider_item_id, prov.id
if not details:
# we couldn't get a match from any of the providers, raise error
raise MediaNotFoundError(
- f"Item not found: {provider.value or provider_id}/{provider_item_id}"
+ f"Item not found: {provider_type.value or id}/{provider_item_id}"
)
# create job to add the item to the db, including matching metadata etc. takes some time
# in 99% of the cases we just return lazy because we want the details as fast as possible
async def search(
self,
search_query: str,
- provider: Optional[ProviderType] = None,
+ provider_type: Optional[ProviderType] = None,
provider_id: Optional[str] = None,
limit: int = 25,
) -> List[ItemCls]:
"""Search database or provider with given query."""
# create safe search string
search_query = search_query.replace("/", " ").replace("'", "")
- if provider == ProviderType.DATABASE or provider_id == "database":
+ if provider_type == ProviderType.DATABASE or provider_id == "database":
return [
self.item_cls.from_db_row(db_row)
for db_row in await self.mass.database.search(
)
]
- prov = self.mass.music.get_provider(provider_id or provider)
+ prov = self.mass.music.get_provider(provider_id or provider_type)
if not prov or MusicProviderFeature.SEARCH not in prov.supported_features:
return []
if not prov.library_supported(self.media_type):
async def add_to_library(
self,
provider_item_id: str,
- provider: Optional[ProviderType] = None,
+ provider_type: Optional[ProviderType] = None,
provider_id: Optional[str] = None,
) -> None:
"""Add an item to the library."""
prov_item = await self.get_db_item_by_prov_id(
- provider_item_id, provider=provider, provider_id=provider_id
+ provider_item_id, provider_type=provider_type, provider_id=provider_id
)
if prov_item is None:
prov_item = await self.get_provider_item(
- provider_item_id, provider_id or provider
+ provider_item_id, provider_id or provider_type
)
if prov_item.in_library is True:
return
# mark as favorite/library item on provider(s)
- for prov_id in prov_item.provider_ids:
- if prov := self.mass.music.get_provider(prov_id.prov_id):
+ for prov_mapping in prov_item.provider_mappings:
+ if prov := self.mass.music.get_provider(prov_mapping.provider_id):
if not prov.library_edit_supported(self.media_type):
continue
- await prov.library_add(prov_id.item_id, self.media_type)
+ await prov.library_add(provider_id.item_id, self.media_type)
# mark as library item in internal db if db item
if prov_item.provider == ProviderType.DATABASE:
if not prov_item.in_library:
async def remove_from_library(
self,
provider_item_id: str,
- provider: Optional[ProviderType] = None,
+ provider_type: Optional[ProviderType] = None,
provider_id: Optional[str] = None,
) -> None:
"""Remove item from the library."""
prov_item = await self.get_db_item_by_prov_id(
- provider_item_id, provider=provider, provider_id=provider_id
+ provider_item_id, provider_type=provider_type, provider_id=provider_id
)
if prov_item is None:
prov_item = await self.get_provider_item(
- provider_item_id, provider_id or provider
+ provider_item_id, provider_id or provider_type
)
if prov_item.in_library is False:
return
# unmark as favorite/library item on provider(s)
- for prov_id in prov_item.provider_ids:
- if prov := self.mass.music.get_provider(prov_id.prov_id):
+ for prov_mapping in prov_item.provider_mappings:
+ if prov := self.mass.music.get_provider(prov_mapping.provider_id):
if not prov.library_edit_supported(self.media_type):
continue
- await prov.library_remove(prov_id.item_id, self.media_type)
+ await prov.library_remove(prov_mapping.item_id, self.media_type)
# unmark as library item in internal db if db item
if prov_item.provider == ProviderType.DATABASE:
prov_item.in_library = False
await self.set_db_library(prov_item.item_id, False)
- async def get_provider_id(self, item: ItemCls) -> Tuple[str, str]:
+ async def get_provider_mapping(self, item: ItemCls) -> Tuple[str, str]:
"""Return (first) provider and item id."""
if item.provider == ProviderType.DATABASE:
# make sure we have a full object
item = await self.get_db_item(item.item_id)
for prefer_file in (True, False):
- for prov in item.provider_ids:
+ for prov_mapping in item.provider_mappings:
# returns the first provider that is available
- if not prov.available:
+ if not prov_mapping.available:
continue
- if prefer_file and not prov.prov_type.is_file():
+ if prefer_file and not prov_mapping.provider_type.is_file():
continue
- if self.mass.music.get_provider(prov.prov_id):
- return (prov.prov_id, prov.item_id)
+ if self.mass.music.get_provider(prov_mapping.provider_id):
+ return (prov_mapping.provider_id, prov_mapping.item_id)
return None, None
async def get_db_items_by_query(
async def get_db_item_by_prov_id(
self,
provider_item_id: str,
- provider: Optional[ProviderType] = None,
+ provider_type: Optional[ProviderType] = None,
provider_id: Optional[str] = None,
) -> ItemCls | None:
- """Get the database item for the given prov_id."""
- assert provider or provider_id, "provider or provider_id must be supplied"
- if isinstance(provider, str):
- provider = ProviderType(provider)
- if provider == ProviderType.DATABASE or provider_id == "database":
+ """Get the database item for the given provider_id."""
+ assert (
+ provider_type or provider_id
+ ), "provider_type or provider_id must be supplied"
+ if isinstance(provider_type, str):
+ provider_type = ProviderType(provider_type)
+ if provider_type == ProviderType.DATABASE or provider_id == "database":
return await self.get_db_item(provider_item_id)
for item in await self.get_db_items_by_prov_id(
- provider=provider,
+ provider_type=provider_type,
provider_id=provider_id,
provider_item_ids=(provider_item_id,),
):
async def get_db_items_by_prov_id(
self,
- provider: Optional[ProviderType] = None,
+ provider_type: Optional[ProviderType] = None,
provider_id: Optional[str] = None,
provider_item_ids: Optional[Tuple[str]] = None,
limit: int = 500,
offset: int = 0,
) -> List[ItemCls]:
"""Fetch all records from database for given provider."""
- assert provider or provider_id, "provider or provider_id must be supplied"
- if isinstance(provider, str):
- provider = ProviderType(provider)
- if provider == ProviderType.DATABASE or provider_id == "database":
+ assert (
+ provider_type or provider_id
+ ), "provider_type or provider_id must be supplied"
+ if isinstance(provider_type, str):
+ provider_type = ProviderType(provider_type)
+ if provider_type == ProviderType.DATABASE or provider_id == "database":
return await self.get_db_items_by_query(limit=limit, offset=offset)
- query = f"SELECT * FROM {self.db_table}, json_each(provider_ids)"
+ query = f"SELECT * FROM {self.db_table}, json_each(provider_mappings)"
if provider_id is not None:
- query += (
- f" WHERE json_extract(json_each.value, '$.prov_id') = '{provider_id}'"
- )
- elif provider is not None:
- query += f" WHERE json_extract(json_each.value, '$.prov_type') = '{provider.value}'"
+ query += f" WHERE json_extract(json_each.value, '$.provider_id') = '{provider_id}'"
+ elif provider_type is not None:
+ query += f" WHERE json_extract(json_each.value, '$.provider_type') = '{provider_type.value}'"
if provider_item_ids is not None:
prov_ids = str(tuple(provider_item_ids))
if prov_ids.endswith(",)"):
async def get_provider_item(
self,
item_id: str,
- provider_id: Union[str, ProviderType],
+ provider_id_or_type: Union[str, ProviderType],
) -> ItemCls:
"""Return item details for the given provider item id."""
- if provider_id in ("database", ProviderType.DATABASE):
+ if provider_id_or_type in ("database", ProviderType.DATABASE):
item = await self.get_db_item(item_id)
else:
- provider = self.mass.music.get_provider(provider_id)
+ provider = self.mass.music.get_provider(provider_id_or_type)
item = await provider.get_item(self.media_type, item_id)
if not item:
raise MediaNotFoundError(
- f"{self.media_type.value} {item_id} not found on provider {provider.name}"
+ f"{self.media_type.value}//{item_id} not found on provider {provider_id_or_type}"
)
return item
- async def remove_prov_mapping(self, item_id: int, prov_id: str) -> None:
+ async def remove_prov_mapping(self, item_id: int, provider_id: str) -> None:
"""Remove provider id(s) from item."""
try:
db_item = await self.get_db_item(item_id)
# edge case: already deleted / race condition
return
- db_item.provider_ids = {x for x in db_item.provider_ids if x.prov_id != prov_id}
- if not db_item.provider_ids:
- # item has no more provider_ids left, it is completely deleted
+ db_item.provider_mappings = {
+ x for x in db_item.provider_mappings if x.provider_id != provider_id
+ }
+ if not db_item.provider_mappings:
+ # item has no more provider_mappings left, it is completely deleted
try:
await self.delete_db_item(db_item.item_id)
except AssertionError:
)
return
- # update the item in db (provider_ids column only)
+ # update the item in db (provider_mappings column only)
match = {"item_id": item_id}
await self.mass.database.update(
self.db_table,
match,
- {"provider_ids": json_serializer(db_item.provider_ids)},
+ {"provider_mappings": json_serializer(db_item.provider_mappings)},
)
self.mass.signal_event(
MassEvent(EventType.MEDIA_ITEM_UPDATED, db_item.uri, db_item)
)
- self.logger.debug("removed provider %s from item id %s", prov_id, item_id)
+ self.logger.debug("removed provider %s from item id %s", provider_id, item_id)
async def delete_db_item(self, item_id: int, recursive: bool = False) -> None:
"""Delete record from the database."""
async def dynamic_tracks(
self,
item_id: str,
- provider: Optional[ProviderType] = None,
+ provider_type: Optional[ProviderType] = None,
provider_id: Optional[str] = None,
limit: int = 25,
) -> List[Track]:
"""Return a dynamic list of tracks based on the given item."""
- ref_item = await self.get(item_id, provider, provider_id)
- for prov_id in ref_item.provider_ids:
- prov = self.mass.music.get_provider(prov_id.prov_id)
+ ref_item = await self.get(item_id, provider_type, provider_id)
+ for prov_mapping in ref_item.provider_mappings:
+ prov = self.mass.music.get_provider(prov_mapping.provider_id)
if not prov.available:
continue
if MusicProviderFeature.SIMILAR_TRACKS not in prov.supported_features:
continue
return await self._get_provider_dynamic_tracks(
- item_id=prov_id.item_id,
- provider=prov_id.prov_type,
- provider_id=prov_id.prov_id,
+ item_id=prov_mapping.item_id,
+ provider_type=prov_mapping.provider_type,
+ provider_id=prov_mapping.provider_id,
limit=limit,
)
# Fallback to the default implementation
async def _get_provider_dynamic_tracks(
self,
item_id: str,
- provider: Optional[ProviderType] = None,
+ provider_type: Optional[ProviderType] = None,
provider_id: Optional[str] = None,
limit: int = 25,
) -> List[Track]:
async def tracks(
self,
item_id: str,
- provider: Optional[ProviderType] = None,
+ provider_type: Optional[ProviderType] = None,
provider_id: Optional[str] = None,
) -> List[Track]:
"""Return playlist tracks for the given provider playlist id."""
- playlist = await self.get(item_id, provider, provider_id)
- prov = next(x for x in playlist.provider_ids)
+ playlist = await self.get(item_id, provider_type, provider_id)
+ prov = next(x for x in playlist.provider_mappings)
return await self._get_provider_playlist_tracks(
prov.item_id,
- provider=prov.prov_type,
- provider_id=prov.prov_id,
+ provider_type=prov.provider_type,
+ provider_id=prov.provider_id,
cache_checksum=playlist.metadata.checksum,
)
return db_item
async def create(
- self, name: str, prov_id: Union[ProviderType, str, None] = None
+ self, name: str, prov_type_or_id: Union[ProviderType, str, None] = None
) -> Playlist:
"""Create new playlist."""
- # if prov_id is omitted, prefer file
- if prov_id:
- provider = self.mass.music.get_provider(prov_id)
+ # if prov_type_or_id is omitted, prefer file
+ if prov_type_or_id:
+ provider = self.mass.music.get_provider(prov_type_or_id)
else:
try:
provider = self.mass.music.get_provider(ProviderType.FILESYSTEM_LOCAL)
track = await self.mass.music.get_item_by_uri(track_uri, lazy=False)
assert track.media_type == MediaType.TRACK
# a playlist can only have one provider (for now)
- playlist_prov = next(iter(playlist.provider_ids))
+ playlist_prov = next(iter(playlist.provider_mappings))
# grab all existing track ids in the playlist so we can check for duplicates
cur_playlist_track_ids = set()
count = 0
- for item in await self.tracks(playlist_prov.item_id, playlist_prov.prov_type):
+ for item in await self.tracks(
+ playlist_prov.item_id, playlist_prov.provider_type
+ ):
count += 1
cur_playlist_track_ids.update(
{
i.item_id
- for i in item.provider_ids
- if i.prov_id == playlist_prov.prov_id
+ for i in item.provider_mappings
+ if i.provider_id == playlist_prov.provider_id
}
)
# check for duplicates
- for track_prov in track.provider_ids:
+ for track_prov in track.provider_mappings:
if (
- track_prov.prov_type == playlist_prov.prov_type
+ track_prov.provider_type == playlist_prov.provider_type
and track_prov.item_id in cur_playlist_track_ids
):
raise InvalidDataError(
# simply sort by quality and just add the first one (assuming track is still available)
track_id_to_add = None
for track_version in sorted(
- track.provider_ids, key=lambda x: x.quality, reverse=True
+ track.provider_mappings, key=lambda x: x.quality, reverse=True
):
if not track.available:
continue
- if playlist_prov.prov_type.is_file():
+ if playlist_prov.provider_type.is_file():
# the file provider can handle uri's from all providers so simply add the uri
track_id_to_add = track_version.url or create_uri(
MediaType.TRACK,
- track_version.prov_type,
+ track_version.provider_type,
track_version.item_id,
)
break
- if track_version.prov_type == playlist_prov.prov_type:
+ if track_version.provider_type == playlist_prov.provider_type:
track_id_to_add = track_version.item_id
break
if not track_id_to_add:
raise MediaNotFoundError(
- f"Track is not available on provider {playlist_prov.prov_type}"
+ f"Track is not available on provider {playlist_prov.provider_type}"
)
# actually add the tracks to the playlist on the provider
- provider = self.mass.music.get_provider(playlist_prov.prov_id)
+ provider = self.mass.music.get_provider(playlist_prov.provider_id)
await provider.add_playlist_tracks(playlist_prov.item_id, [track_id_to_add])
# invalidate cache by updating the checksum
await self.get(
- db_playlist_id, provider=ProviderType.DATABASE, force_refresh=True
+ db_playlist_id, provider_type=ProviderType.DATABASE, force_refresh=True
)
async def remove_playlist_tracks(
raise MediaNotFoundError(f"Playlist with id {db_playlist_id} not found")
if not playlist.is_editable:
raise InvalidDataError(f"Playlist {playlist.name} is not editable")
- for prov in playlist.provider_ids:
- provider = self.mass.music.get_provider(prov.prov_id)
+ for prov_mapping in playlist.provider_mappings:
+ provider = self.mass.music.get_provider(prov_mapping.provider_id)
if (
MusicProviderFeature.PLAYLIST_TRACKS_EDIT
not in provider.supported_features
):
self.logger.warning(
"Provider %s does not support editing playlists",
- prov.prov_type.value,
+ prov_mapping.provider_type.value,
)
continue
- await provider.remove_playlist_tracks(prov.item_id, positions_to_remove)
+ await provider.remove_playlist_tracks(
+ prov_mapping.item_id, positions_to_remove
+ )
# invalidate cache by updating the checksum
- await self.get(
- db_playlist_id, provider=ProviderType.DATABASE, force_refresh=True
- )
+ await self.get(db_playlist_id, ProviderType.DATABASE, force_refresh=True)
async def add_db_item(
self, item: Playlist, overwrite_existing: bool = False
cur_item = await self.get_db_item(item_id)
if overwrite:
metadata = item.metadata
- provider_ids = item.provider_ids
+ provider_mappings = item.provider_mappings
else:
metadata = cur_item.metadata.update(item.metadata)
- provider_ids = {*cur_item.provider_ids, *item.provider_ids}
+ provider_mappings = {*cur_item.provider_mappings, *item.provider_mappings}
await self.mass.database.update(
self.db_table,
"owner": item.owner,
"is_editable": item.is_editable,
"metadata": json_serializer(metadata),
- "provider_ids": json_serializer(provider_ids),
+ "provider_mappings": json_serializer(provider_mappings),
},
)
self.logger.debug("updated %s in database: %s", item.name, item_id)
async def _get_provider_playlist_tracks(
self,
item_id: str,
- provider: Optional[ProviderType] = None,
+ provider_type: Optional[ProviderType] = None,
provider_id: Optional[str] = None,
cache_checksum: Any = None,
) -> List[Track]:
"""Return album tracks for the given provider album id."""
- prov = self.mass.music.get_provider(provider_id or provider)
- if not prov:
+ provider = self.mass.music.get_provider(provider_id or provider_type)
+ if not provider:
return []
# prefer cache items (if any)
- cache_key = f"{prov.id}.playlist.{item_id}.tracks"
+ cache_key = f"{provider.id}.playlist.{item_id}.tracks"
if cache := await self.mass.cache.get(cache_key, checksum=cache_checksum):
return [Track.from_dict(x) for x in cache]
# no items in cache - get listing from provider
- items = await prov.get_playlist_tracks(item_id)
+ items = await provider.get_playlist_tracks(item_id)
# double check if position set
if items:
assert (
async def _get_provider_dynamic_tracks(
self,
item_id: str,
- provider: Optional[ProviderType] = None,
+ provider_type: Optional[ProviderType] = None,
provider_id: Optional[str] = None,
limit: int = 25,
):
"""Generate a dynamic list of tracks based on the playlist content."""
- prov = self.mass.music.get_provider(provider_id or provider)
+ provider = self.mass.music.get_provider(provider_id or provider_type)
if (
- not prov
- or MusicProviderFeature.SIMILAR_TRACKS not in prov.supported_features
+ not provider
+ or MusicProviderFeature.SIMILAR_TRACKS not in provider.supported_features
):
return []
playlist_tracks = await self._get_provider_playlist_tracks(
- item_id=item_id, provider=provider, provider_id=provider_id
+ item_id=item_id, provider_type=provider_type, provider_id=provider_id
)
# filter out unavailable tracks
playlist_tracks = [x for x in playlist_tracks if x.available]
final_items.update(base_tracks)
# get 5 suggestions for one of the base tracks
base_track = next(x for x in base_tracks if x.available)
- similar_tracks = await prov.get_similar_tracks(
+ similar_tracks = await provider.get_similar_tracks(
prov_track_id=base_track.item_id, limit=5
)
final_items.update(x for x in similar_tracks if x.available)
async def versions(
self,
item_id: str,
- provider: Optional[ProviderType] = None,
+ provider_type: Optional[ProviderType] = None,
provider_id: Optional[str] = None,
) -> List[Radio]:
"""Return all versions of a radio station we can find on all providers."""
- assert provider or provider_id, "Provider type or ID must be specified"
- radio = await self.get(item_id, provider, provider_id)
+ assert provider_type or provider_id, "Provider type or ID must be specified"
+ radio = await self.get(item_id, provider_type, provider_id)
# perform a search on all provider(types) to collect all versions/variants
- prov_types = {item.type for item in self.mass.music.providers}
+ provider_types = {item.type for item in self.mass.music.providers}
all_versions = {
prov_item.item_id: prov_item
for prov_items in await asyncio.gather(
- *[self.search(radio.name, prov_type) for prov_type in prov_types]
+ *[
+ self.search(radio.name, provider_type)
+ for provider_type in provider_types
+ ]
)
for prov_item in prov_items
if loose_compare_strings(radio.name, prov_item.name)
}
# make sure that the 'base' version is included
- for prov_version in radio.provider_ids:
+ for prov_version in radio.provider_mappings:
if prov_version.item_id in all_versions:
continue
radio_copy = Radio.from_dict(radio.to_dict())
radio_copy.item_id = prov_version.item_id
- radio_copy.provider = prov_version.prov_type
- radio_copy.provider_ids = {prov_version}
+ radio_copy.provider = prov_version.provider_type
+ radio_copy.provider_mappings = {prov_version}
all_versions[prov_version.item_id] = radio_copy
# return the aggregated result
async def add_db_item(self, item: Radio, overwrite_existing: bool = False) -> Radio:
"""Add a new item record to the database."""
- assert item.provider_ids
+ assert item.provider_mappings
async with self._db_add_lock:
match = {"name": item.name}
if cur_item := await self.mass.database.get_row(self.db_table, match):
cur_item = await self.get_db_item(item_id)
if overwrite:
metadata = item.metadata
- provider_ids = item.provider_ids
+ provider_mappings = item.provider_mappings
else:
metadata = cur_item.metadata.update(item.metadata)
- provider_ids = {*cur_item.provider_ids, *item.provider_ids}
+ provider_mappings = {*cur_item.provider_mappings, *item.provider_mappings}
match = {"item_id": item_id}
await self.mass.database.update(
"name": item.name,
"sort_name": item.sort_name,
"metadata": json_serializer(metadata),
- "provider_ids": json_serializer(provider_ids),
+ "provider_mappings": json_serializer(provider_mappings),
},
)
self.logger.debug("updated %s in database: %s", item.name, item_id)
async def _get_provider_dynamic_tracks(
self,
item_id: str,
- provider: Optional[ProviderType] = None,
+ provider_type: Optional[ProviderType] = None,
provider_id: Optional[str] = None,
limit: int = 25,
) -> List[Track]:
async def versions(
self,
item_id: str,
- provider: Optional[ProviderType] = None,
+ provider_type: Optional[ProviderType] = None,
provider_id: Optional[str] = None,
) -> List[Track]:
"""Return all versions of a track we can find on all providers."""
- assert provider or provider_id, "Provider type or ID must be specified"
- track = await self.get(item_id, provider, provider_id)
+ assert provider_type or provider_id, "Provider type or ID must be specified"
+ track = await self.get(item_id, provider_type or provider_id)
# perform a search on all provider(types) to collect all versions/variants
- prov_types = {item.type for item in self.mass.music.providers}
+ provider_types = {item.type for item in self.mass.music.providers}
search_query = f"{track.artist.name} - {track.name}"
all_versions = {
prov_item.item_id: prov_item
for prov_items in await asyncio.gather(
- *[self.search(search_query, prov_type) for prov_type in prov_types]
+ *[
+ self.search(search_query, provider_type)
+ for provider_type in provider_types
+ ]
)
for prov_item in prov_items
if loose_compare_strings(track.name, prov_item.name)
and compare_artists(prov_item.artists, track.artists, any_match=True)
}
# make sure that the 'base' version is included
- for prov_version in track.provider_ids:
+ for prov_version in track.provider_mappings:
if prov_version.item_id in all_versions:
continue
# grab full item here including album details etc
prov_track = await self.get_provider_item(
- prov_version.item_id, prov_version.prov_id
+ prov_version.item_id, prov_version.provider_id
)
all_versions[prov_version.item_id] = prov_track
async def _get_provider_dynamic_tracks(
self,
item_id: str,
- provider: Optional[ProviderType] = None,
+ provider_type: Optional[ProviderType] = None,
provider_id: Optional[str] = None,
limit: int = 25,
):
"""Generate a dynamic list of tracks based on the track."""
- prov = self.mass.music.get_provider(provider_id or provider)
+ prov = self.mass.music.get_provider(provider_id or provider_type)
if (
not prov
or MusicProviderFeature.SIMILAR_TRACKS not in prov.supported_features
"""Add a new item record to the database."""
assert isinstance(item, Track), "Not a full Track object"
assert item.artists, "Track is missing artist(s)"
- assert item.provider_ids, "Track is missing provider id(s)"
+ assert item.provider_mappings, "Track is missing provider id(s)"
async with self._db_add_lock:
cur_item = None
if overwrite:
metadata = item.metadata
- provider_ids = item.provider_ids
+ provider_mappings = item.provider_mappings
metadata.last_refresh = None
# we store a mapping to artists/albums on the item for easier access/listings
track_artists = await self._get_track_artists(item, overwrite=True)
track_albums = await self._get_track_albums(item, overwrite=True)
else:
metadata = cur_item.metadata.update(item.metadata, item.provider.is_file())
- provider_ids = {*cur_item.provider_ids, *item.provider_ids}
+ provider_mappings = {*cur_item.provider_mappings, *item.provider_mappings}
track_artists = await self._get_track_artists(cur_item, item)
track_albums = await self._get_track_albums(cur_item, item)
"artists": json_serializer(track_artists),
"albums": json_serializer(track_albums),
"metadata": json_serializer(metadata),
- "provider_ids": json_serializer(provider_ids),
+ "provider_mappings": json_serializer(provider_mappings),
"isrc": item.isrc or cur_item.isrc,
},
)
)
if db_album := await self.mass.music.albums.get_db_item_by_prov_id(
- album.item_id, provider=album.provider
+ album.item_id, provider_type=album.provider
):
return ItemMapping.from_item(db_album)
)
if db_artist := await self.mass.music.artists.get_db_item_by_prov_id(
- artist.item_id, provider=artist.provider
+ artist.item_id, provider_type=artist.provider
):
return ItemMapping.from_item(db_artist)
media_from_dict,
)
from music_assistant.models.music_provider import MusicProvider
-from music_assistant.music_providers.filesystem import FileSystemProvider
+from music_assistant.music_providers.filesystem import (
+ LocalFileSystemProvider,
+ SMBFileSystemProvider,
+)
from music_assistant.music_providers.qobuz import QobuzProvider
from music_assistant.music_providers.spotify import SpotifyProvider
from music_assistant.music_providers.tunein import TuneInProvider
from music_assistant.mass import MusicAssistant
PROV_MAP = {
- ProviderType.FILESYSTEM_LOCAL: FileSystemProvider,
+ ProviderType.FILESYSTEM_LOCAL: LocalFileSystemProvider,
+ ProviderType.FILESYSTEM_SMB: SMBFileSystemProvider,
ProviderType.SPOTIFY: SpotifyProvider,
ProviderType.QOBUZ: QobuzProvider,
ProviderType.TUNEIN: TuneInProvider,
async def start_sync(
self,
media_types: Optional[Tuple[MediaType]] = None,
- prov_types: Optional[Tuple[ProviderType]] = None,
+ provider_types: Optional[Tuple[ProviderType]] = None,
schedule: Optional[float] = None,
) -> None:
"""
Start running the sync of all registred providers.
media_types: only sync these media types. None for all.
- prov_types: only sync these provider types. None for all.
+ provider_types: only sync these provider types. None for all.
schedule: schedule syncjob every X hours, set to None for just a manual sync run.
"""
async def do_sync():
while True:
for prov in self.providers:
- if prov_types is not None and prov.type not in prov_types:
+ if provider_types is not None and prov.type not in provider_types:
continue
self.mass.add_job(
prov.sync_library(media_types),
"""Return all (available) music providers."""
return tuple(x for x in self._providers.values() if x.available)
- def get_provider(self, provider_id: Union[str, ProviderType]) -> MusicProvider:
+ def get_provider(
+ self, provider_id_or_type: Union[str, ProviderType]
+ ) -> MusicProvider:
"""Return Music provider by id (or type)."""
- if prov := self._providers.get(provider_id):
+ if prov := self._providers.get(provider_id_or_type):
return prov
for prov in self._providers.values():
- if provider_id in (prov.type, prov.id, prov.type.value):
+ if provider_id_or_type in (prov.type, prov.id, prov.type.value):
return prov
- raise ProviderUnavailableError(f"Provider {provider_id} is not available")
+ raise ProviderUnavailableError(
+ f"Provider {provider_id_or_type} is not available"
+ )
async def search(
self,
:param limit: number of items to return in the search (per type).
"""
# include results from all music providers
- provider_ids = [item.id for item in self.providers]
+ provider_ids = (item.id for item in self.providers)
# TODO: sort by name and filter out duplicates ?
return itertools.chain.from_iterable(
await asyncio.gather(
*[
self.search_provider(
- search_query, media_types, provider_id=prov_id, limit=limit
+ search_query, media_types, provider_id=provider_id, limit=limit
)
- for prov_id in provider_ids
+ for provider_id in provider_ids
]
)
)
self,
search_query: str,
media_types: List[MediaType] = MediaType.ALL,
- provider: Optional[ProviderType] = None,
+ provider_type: Optional[ProviderType] = None,
provider_id: Optional[str] = None,
limit: int = 10,
) -> List[MediaItemType]:
Perform search on given provider.
:param search_query: Search query
- :param provider_id: provider_id of the provider to perform the search on.
+ :param provider_type: type of the provider to perform the search on.
+ :param provider_id: id of the provider to perform the search on.
:param media_types: A list of media_types to include. All types if None.
:param limit: number of items to return in the search (per type).
"""
- assert provider or provider_id, "Provider needs to be supplied"
- prov = self.get_provider(provider_id or provider)
+ assert provider_type or provider_id, "Provider needs to be supplied"
+ prov = self.get_provider(provider_id or provider_type)
if MusicProviderFeature.SEARCH not in prov.supported_features:
return []
self, uri: str, force_refresh: bool = False, lazy: bool = True
) -> MediaItemType:
"""Fetch MediaItem by uri."""
- media_type, provider, item_id = parse_uri(uri)
+ media_type, provider_type, item_id = parse_uri(uri)
return await self.get_item(
item_id=item_id,
media_type=media_type,
- provider=provider,
+ provider_type=provider_type,
force_refresh=force_refresh,
lazy=lazy,
)
self,
item_id: str,
media_type: MediaType,
- provider: Optional[ProviderType] = None,
+ provider_type: Optional[ProviderType] = None,
provider_id: Optional[str] = None,
force_refresh: bool = False,
lazy: bool = True,
) -> MediaItemType:
"""Get single music item by id and media type."""
- assert provider or provider_id, "provider or provider_id must be supplied"
- if provider == ProviderType.URL or provider_id == "url":
+ assert (
+ provider_type or provider_id
+ ), "provider_type or provider_id must be supplied"
+ if provider_type == ProviderType.URL or provider_id == "url":
# handle special case of 'URL' MusicProvider which allows us to play regular url's
return await self.get_provider(ProviderType.URL).parse_item(item_id)
ctrl = self.get_controller(media_type)
return await ctrl.get(
provider_item_id=item_id,
- provider=provider,
+ provider_type=provider_type,
provider_id=provider_id,
force_refresh=force_refresh,
lazy=lazy,
self,
media_type: MediaType,
provider_item_id: str,
- provider: Optional[ProviderType] = None,
+ provider_type: Optional[ProviderType] = None,
provider_id: Optional[str] = None,
) -> None:
"""Add an item to the library."""
ctrl = self.get_controller(media_type)
await ctrl.add_to_library(
- provider_item_id, provider=provider, provider_id=provider_id
+ provider_item_id, provider_type=provider_type, provider_id=provider_id
)
async def remove_from_library(
self,
media_type: MediaType,
provider_item_id: str,
- provider: Optional[ProviderType] = None,
+ provider_type: Optional[ProviderType] = None,
provider_id: Optional[str] = None,
) -> None:
"""Remove item from the library."""
ctrl = self.get_controller(media_type)
await ctrl.remove_from_library(
- provider_item_id, provider=provider, provider_id=provider_id
+ provider_item_id, provider_type=provider_type, provider_id=provider_id
)
async def delete_db_item(
return await self.get_item(
media_item.item_id,
media_item.media_type,
- provider=media_item.provider,
+ provider_type=media_item.provider,
force_refresh=True,
lazy=False,
)
)
async def set_track_loudness(
- self, item_id: str, provider: ProviderType, loudness: int
+ self, item_id: str, provider_type: ProviderType, loudness: int
):
"""List integrated loudness for a track in db."""
await self.mass.database.insert(
TABLE_TRACK_LOUDNESS,
- {"item_id": item_id, "provider": provider.value, "loudness": loudness},
+ {"item_id": item_id, "provider": provider_type.value, "loudness": loudness},
allow_replace=True,
)
async def get_track_loudness(
- self, provider_item_id: str, provider: ProviderType
+ self, provider_item_id: str, provider_type: ProviderType
) -> float | None:
"""Get integrated loudness for a track in db."""
if result := await self.mass.database.get_row(
TABLE_TRACK_LOUDNESS,
{
"item_id": provider_item_id,
- "provider": provider.value,
+ "provider": provider_type.value,
},
):
return result["loudness"]
return None
- async def get_provider_loudness(self, provider: ProviderType) -> float | None:
+ async def get_provider_loudness(self, provider_type: ProviderType) -> float | None:
"""Get average integrated loudness for tracks of given provider."""
all_items = []
- if provider == ProviderType.URL:
+ if provider_type == ProviderType.URL:
# this is not a very good idea for random urls
return None
for db_row in await self.mass.database.get_rows(
TABLE_TRACK_LOUDNESS,
{
- "provider": provider.value,
+ "provider": provider_type.value,
},
):
all_items.append(db_row["loudness"])
return statistics.fmean(all_items)
return None
- async def mark_item_played(self, item_id: str, provider: ProviderType):
+ async def mark_item_played(self, item_id: str, provider_type: ProviderType):
"""Mark item as played in playlog."""
timestamp = utc_timestamp()
await self.mass.database.insert(
TABLE_PLAYLOG,
- {"item_id": item_id, "provider": provider.value, "timestamp": timestamp},
+ {
+ "item_id": item_id,
+ "provider": provider_type.value,
+ "timestamp": timestamp,
+ },
allow_replace=True,
)
cur_providers = list(self._providers.keys())
removed_providers = {x for x in prev_providers if x not in cur_providers}
- for prov_id in removed_providers:
+ for provider_id in removed_providers:
# clean cache items from deleted provider(s)
- await self.mass.cache.clear(prov_id)
+ await self.mass.cache.clear(provider_id)
# cleanup media items from db matched to deleted provider
for ctrl in (
self.mass.music.albums,
self.mass.music.artists,
):
- prov_items = await ctrl.get_db_items_by_prov_id(provider_id=prov_id)
+ prov_items = await ctrl.get_db_items_by_prov_id(provider_id=provider_id)
for item in prov_items:
- await ctrl.remove_prov_mapping(item.item_id, prov_id)
+ await ctrl.remove_prov_mapping(item.item_id, provider_id)
await self.mass.cache.set("prov_ids", cur_providers)
if preview := track.metadata.preview:
return preview
enc_track_id = urllib.parse.quote(track_id)
- return f"{self.base_url}/preview?provider_id={provider.value}&item_id={enc_track_id}"
+ return (
+ f"{self.base_url}/preview?provider={provider.value}&item_id={enc_track_id}"
+ )
async def setup(self) -> None:
"""Async initialize of module."""
async def serve_preview(self, request: web.Request):
"""Serve short preview sample."""
- provider_id = request.query["provider_id"]
+ provider_mapping = request.query["provider_mapping"]
item_id = urllib.parse.unquote(request.query["item_id"])
resp = web.StreamResponse(
status=200, reason="OK", headers={"Content-Type": "audio/mp3"}
)
await resp.prepare(request)
- async for chunk in get_preview_stream(self.mass, provider_id, item_id):
+ async for chunk in get_preview_stream(self.mass, provider_mapping, item_id):
await resp.write(chunk)
return resp
full_item = await mass.music.get_item_by_uri(queue_item.uri)
# sort by quality and check track availability
for prov_media in sorted(
- full_item.provider_ids, key=lambda x: x.quality or 0, reverse=True
+ full_item.provider_mappings, key=lambda x: x.quality or 0, reverse=True
):
if not prov_media.available:
continue
# get streamdetails from provider
- music_prov = mass.music.get_provider(prov_media.prov_id)
+ music_prov = mass.music.get_provider(prov_media.provider_id)
if not music_prov or not music_prov.available:
continue # provider temporary unavailable ?
try:
async def get_preview_stream(
mass: MusicAssistant,
- provider_id: str,
+ provider_mapping: str,
track_id: str,
) -> AsyncGenerator[bytes, None]:
"""Create a 30 seconds preview audioclip for the given streamdetails."""
- music_prov = mass.music.get_provider(provider_id)
+ music_prov = mass.music.get_provider(provider_mapping)
streamdetails = await music_prov.get_stream_details(track_id)
):
return True
- left_prov_ids = getattr(left_item, "provider_ids", None)
- right_prov_ids = getattr(right_item, "provider_ids", None)
+ left_prov_ids = getattr(left_item, "provider_mappings", None)
+ right_prov_ids = getattr(right_item, "provider_mappings", None)
if left_prov_ids is not None:
- for prov_l in left_item.provider_ids:
+ for prov_l in left_item.provider_mappings:
if (
- prov_l.prov_type == right_item.provider
+ prov_l.provider_type == right_item.provider
and prov_l.item_id == right_item.item_id
):
return True
if right_prov_ids is not None:
- for prov_r in right_item.provider_ids:
+ for prov_r in right_item.provider_mappings:
if (
- prov_r.prov_type == left_item.provider
+ prov_r.provider_type == left_item.provider
and prov_r.item_id == left_item.item_id
):
return True
if left_prov_ids is not None and right_prov_ids is not None:
- for prov_l in left_item.provider_ids:
- for prov_r in right_item.provider_ids:
- if prov_l.prov_type != prov_r.prov_type:
+ for prov_l in left_item.provider_mappings:
+ for prov_r in right_item.provider_mappings:
+ if prov_l.provider_type != prov_r.provider_type:
continue
if prov_l.item_id == prov_r.item_id:
return True
DEFAULT_CHUNKSIZE = 128000
DEFAULT_TIMEOUT = 600
+DEFAULT_LIMIT = 64 * 1024 * 1024
# pylint: disable=invalid-name
args = " ".join(self._args)
else:
args = self._args
-
if isinstance(args, str):
self._proc = await asyncio.create_subprocess_shell(
args,
stdin=asyncio.subprocess.PIPE if self._enable_stdin else None,
stdout=asyncio.subprocess.PIPE if self._enable_stdout else None,
stderr=asyncio.subprocess.PIPE if self._enable_stderr else None,
- limit=64 * 1024 * 1024,
+ limit=DEFAULT_LIMIT,
close_fds=True,
)
else:
stdin=asyncio.subprocess.PIPE if self._enable_stdin else None,
stdout=asyncio.subprocess.PIPE if self._enable_stdout else None,
stderr=asyncio.subprocess.PIPE if self._enable_stderr else None,
- limit=64 * 1024 * 102,
+ limit=DEFAULT_LIMIT,
close_fds=True,
)
+
+ # Fix BrokenPipeError due to a race condition
+ # by attaching a default done callback
+ def _done_cb(fut: asyncio.Future):
+ fut.exception()
+
+ self._proc._transport._protocol._stdin_closed.add_done_callback(_done_cb)
+
return self
async def __aexit__(self, exc_type, exc_value, traceback) -> bool:
pass
if self._proc.returncode is None:
# prevent subprocess deadlocking, read remaining bytes
- await self._proc.communicate(b"" if self._enable_stdin else None)
+ await self._proc.communicate()
+ if self._enable_stdout and not self._proc.stdout.at_eof():
+ await self._proc.stdout.read()
+ if self._enable_stderr and not self._proc.stderr.at_eof():
+ await self._proc.stderr.read()
if self._proc.returncode is None:
# just in case?
self._proc.kill()
"""Yield chunks of n size from the process stdout."""
while True:
chunk = await self.readexactly(n)
+ if chunk == b"":
+ break
yield chunk
if len(chunk) < n:
break
async def iter_any(self, n: int = DEFAULT_CHUNKSIZE) -> AsyncGenerator[bytes, None]:
"""Yield chunks as they come in from process stdout."""
while True:
- chunk = await self._proc.stdout.read(n)
+ chunk = await self.read(n)
if chunk == b"":
break
yield chunk
async def readexactly(self, n: int, timeout: int = DEFAULT_TIMEOUT) -> bytes:
"""Read exactly n bytes from the process stdout (or less if eof)."""
- if self.closed:
- return b""
try:
async with _timeout(timeout):
return await self._proc.stdout.readexactly(n)
and may return less or equal bytes than requested, but at least one byte.
If EOF was received before any byte is read, this function returns empty byte object.
"""
- if self.closed:
- return b""
async with _timeout(timeout):
return await self._proc.stdout.read(n)
async def write(self, data: bytes) -> None:
"""Write data to process stdin."""
- if self.closed:
- return
+ if self.closed or self._proc.stdin.is_closing():
+ raise asyncio.CancelledError()
+ self._proc.stdin.write(data)
+ await self._proc.stdin.drain()
+
+ def write_eof(self) -> None:
+ """Write end of file to to process stdin."""
try:
- self._proc.stdin.write(data)
- await self._proc.stdin.drain()
+ if self._proc.stdin.can_write_eof():
+ self._proc.stdin.write_eof()
except (
AttributeError,
AssertionError,
BrokenPipeError,
RuntimeError,
ConnectionResetError,
- ) as err:
+ ):
# already exited, race condition
- raise asyncio.CancelledError() from err
-
- def write_eof(self) -> None:
- """Write end of file to to process stdin."""
- if self.closed:
return
- if self._proc.stdin.can_write_eof():
- self._proc.stdin.write_eof()
async def communicate(
self, input_data: Optional[bytes] = None
import json
import os
from dataclasses import dataclass
-from typing import Any, Dict, Optional, Tuple, Union
+from typing import Any, AsyncGenerator, Dict, Optional, Tuple, Union
from requests import JSONDecodeError
return self.tags.get(key, default)
-async def parse_tags(file_path: str) -> AudioTags:
- """Parse tags from a media file."""
+async def parse_tags(input_file: Union[str, AsyncGenerator[bytes, None]]) -> AudioTags:
+ """
+ Parse tags from a media file.
+
+ input_file may be a (local) filename/url accessible by ffmpeg or
+ an AsyncGenerator which yields the file contents as bytes.
+ """
+ file_path = input_file if isinstance(input_file, str) else "-"
args = (
"ffprobe",
)
async with AsyncProcess(
- args, enable_stdin=False, enable_stdout=True, enable_stderr=False
+ args, enable_stdin=file_path == "-", enable_stdout=True, enable_stderr=False
) as proc:
+ if file_path == "-":
+ # feed the file contents to the process
+ async def chunk_feeder():
+ # pylint: disable=protected-access
+ async for chunk in input_file:
+ try:
+ await proc.write(chunk)
+ except BrokenPipeError:
+ break # race-condition: read enough data for tags
+
+ proc.attach_task(chunk_feeder())
+
try:
- res, _ = await proc.communicate()
+ res = await proc.read(-1)
data = json.loads(res)
if error := data.get("error"):
raise InvalidDataError(error["string"])
) from err
-async def get_embedded_image(file_path: str) -> bytes | None:
- """Return embedded image data."""
+async def get_embedded_image(
+ input_file: Union[str, AsyncGenerator[bytes, None]]
+) -> bytes | None:
+ """
+ Return embedded image data.
+
+ input_file may be a (local) filename/url accessible by ffmpeg or
+ an AsyncGenerator which yields the file contents as bytes.
+ """
+ file_path = input_file if isinstance(input_file, str) else "-"
args = (
"ffmpeg",
"-hide_banner",
)
async with AsyncProcess(
- args, enable_stdin=False, enable_stdout=True, enable_stderr=False
+ args, enable_stdin=file_path == "-", enable_stdout=True, enable_stderr=False
) as proc:
- res, _ = await proc.communicate()
- return res
+ if file_path == "-":
+ # feed the file contents to the process
+ async for chunk in input_file:
+ await proc.write(chunk)
+
+ if file_path == "-":
+ # feed the file contents to the process
+ async def chunk_feeder():
+ async for chunk in input_file:
+ await proc.write(chunk)
+
+ proc.attach_task(chunk_feeder())
+
+ return await proc.read(-1)
"""Helper and utility functions."""
from __future__ import annotations
+import asyncio
import os
import platform
import re
return port
+async def get_ip_from_host(dns_name: str) -> str:
+ """Resolve (first) IP-address for given dns name."""
+
+ def _resolve():
+ try:
+ return socket.gethostbyname(dns_name)
+ except Exception: # pylint: disable=broad-except
+ # fail gracefully!
+ return dns_name
+
+ loop = asyncio.get_event_loop()
+ return await loop.run_in_executor(None, _resolve)
+
+
def get_folder_size(folderpath):
"""Return folder size in gb."""
total_size = 0
"""Model for the Music Assisant runtime config."""
from dataclasses import dataclass, field
-from typing import List, Optional
+from typing import Dict, List, Optional
from databases import DatabaseURL
username: Optional[str] = None
password: Optional[str] = None
path: Optional[str] = None
+ options: Dict[str, str] = field(default_factory=dict)
# if id is omitted, an id is generated/derived from the other params
id: Optional[str] = None
"""All enums used by the Music Assistant models."""
-from enum import Enum, IntEnum
+from enum import Enum
from typing import List
]
-class MediaQuality(IntEnum):
- """Enum for Media Quality."""
-
- UNKNOWN = 0
- LOSSY_MP3 = 1
- LOSSY_OGG = 2
- LOSSY_AAC = 3
- LOSSY_M4A = 4
- LOSSLESS = 10 # 44.1/48khz 16 bits
- LOSSLESS_HI_RES_1 = 20 # 44.1/48khz 24 bits HI-RES
- LOSSLESS_HI_RES_2 = 21 # 88.2/96khz 24 bits HI-RES
- LOSSLESS_HI_RES_3 = 22 # 176/192khz 24 bits HI-RES
- LOSSLESS_HI_RES_4 = 23 # above 192khz 24 bits HI-RES
-
- @classmethod
- def from_file_type(cls, file_type: str) -> "MediaQuality":
- """Try to parse MediaQuality from file type/extension."""
- if "mp3" in file_type:
- return MediaQuality.LOSSY_MP3
- if "ogg" in file_type:
- return MediaQuality.LOSSY_OGG
- if "aac" in file_type:
- return MediaQuality.LOSSY_AAC
- if "m4a" in file_type:
- return MediaQuality.LOSSY_M4A
- if "flac" in file_type:
- return MediaQuality.LOSSLESS
- if "wav" in file_type:
- return MediaQuality.LOSSLESS
- return MediaQuality.UNKNOWN
-
-
class LinkType(Enum):
"""Enum wth link types."""
ContentType,
ImageType,
LinkType,
- MediaQuality,
MediaType,
ProviderType,
)
MetadataTypes = Union[int, bool, str, List[str]]
-JSON_KEYS = ("artists", "artist", "albums", "metadata", "provider_ids")
+JSON_KEYS = ("artists", "artist", "albums", "metadata", "provider_mappings")
@dataclass(frozen=True)
-class MediaItemProviderId(DataClassDictMixin):
- """Model for a MediaItem's provider id."""
+class ProviderMapping(DataClassDictMixin):
+ """Model for a MediaItem's provider mapping details."""
item_id: str
- prov_type: ProviderType
- prov_id: str
+ provider_type: ProviderType
+ provider_id: str
available: bool = True
- quality: Optional[MediaQuality] = None
+ # quality details (streamable content only)
+ content_type: ContentType = ContentType.UNKNOWN
+ sample_rate: int = 44100
+ bit_depth: int = 16
+ bit_rate: int = 320
+ # optional details to store provider specific details
details: Optional[str] = None
+ # url = link to provider details page if exists
url: Optional[str] = None
+ @property
+ def quality(self) -> int:
+ """Calculate quality score."""
+ if self.content_type.is_lossless():
+ return int(self.sample_rate / 1000) + self.bit_depth
+ # lossy content, bit_rate is most important score
+ # but prefer some codecs over others
+ score = self.bit_rate / 100
+ if self.content_type in (ContentType.AAC, ContentType.OGG):
+ score += 1
+ return int(score)
+
def __hash__(self):
"""Return custom hash."""
- return hash((self.prov_id, self.item_id, self.quality))
+ return hash((self.provider_type.value, self.item_id))
@dataclass(frozen=True)
item_id: str
provider: ProviderType
name: str
- provider_ids: Set[MediaItemProviderId] = field(default_factory=set)
+ provider_mappings: Set[ProviderMapping] = field(default_factory=set)
# optional fields below
metadata: MediaItemMetadata = field(default_factory=MediaItemMetadata)
@property
def available(self):
"""Return (calculated) availability."""
- return any(x.available for x in self.provider_ids)
+ return any(x.available for x in self.provider_mappings)
@property
def image(self) -> MediaItemImage | None:
(x for x in self.metadata.images if x.type == ImageType.THUMB), None
)
- def add_provider_id(self, prov_id: MediaItemProviderId) -> None:
+ def add_provider_mapping(self, prov_mapping: ProviderMapping) -> None:
"""Add provider ID, overwrite existing entry."""
- self.provider_ids = {
+ self.provider_mappings = {
x
- for x in self.provider_ids
- if not (x.item_id == prov_id.item_id and x.prov_id == prov_id.prov_id)
+ for x in self.provider_mappings
+ if not (
+ x.item_id == prov_mapping.item_id
+ and x.provider_id == prov_mapping.provider_id
+ )
}
- self.provider_ids.add(prov_id)
+ self.provider_mappings.add(prov_mapping)
@property
def last_refresh(self) -> int:
db_item: MediaItemType = await controller.get_db_item_by_prov_id(
provider_item_id=prov_item.item_id,
- provider=prov_item.provider,
+ provider_type=prov_item.provider,
)
if not db_item:
# dump the item in the db, rich metadata is lazy loaded later
async for db_item in controller.iter_db_items(True):
if db_item.item_id in cur_db_ids:
continue
- for prov_id in db_item.provider_ids:
- prov_types = {x.prov_type for x in db_item.provider_ids}
- if len(prov_types) > 1:
+ for prov_mapping in db_item.provider_mappings:
+ provider_types = {
+ x.provider_type for x in db_item.provider_mappings
+ }
+ if len(provider_types) > 1:
continue
- if prov_id.prov_id != self.id:
+ if prov_mapping.provider_id != self.id:
continue
# only mark the item as not in library and leave the metadata in db
await controller.set_db_library(db_item.item_id, False)
self._radio_source.append(media_item)
# if radio mode enabled, grab the first batch of tracks here
tracks += await ctrl.dynamic_tracks(
- item_id=media_item.item_id, provider=media_item.provider
+ item_id=media_item.item_id, provider_type=media_item.provider
)
elif media_item.media_type in (
MediaType.ARTIST,
MediaType.PLAYLIST,
):
tracks += await ctrl.tracks(
- media_item.item_id, provider=media_item.provider
+ media_item.item_id, provider_type=media_item.provider
)
else:
# single track or radio item
for radio_item in random.sample(self._radio_source, len(self._radio_source)):
ctrl = self.mass.music.get_controller(radio_item.media_type)
tracks += await ctrl.dynamic_tracks(
- item_id=radio_item.item_id, provider=radio_item.provider
+ item_id=radio_item.item_id, provider_type=radio_item.provider
)
# make sure we do not grab too much items
if len(tracks) >= 50:
"""Package with FileSystem Music provider(s)."""
-from .filesystem import FileSystemProvider # noqa
+from .base import FileSystemProviderBase # noqa
+from .local import LocalFileSystemProvider # noqa
+from .smb import SMBFileSystemProvider # noqa
--- /dev/null
+"""Filesystem musicprovider support for MusicAssistant."""
+from __future__ import annotations
+
+import os
+from abc import abstractmethod
+from dataclasses import dataclass
+from time import time
+from typing import AsyncGenerator, List, Optional, Set, Tuple
+
+import xmltodict
+
+from music_assistant.constants import VARIOUS_ARTISTS, VARIOUS_ARTISTS_ID
+from music_assistant.controllers.database import SCHEMA_VERSION
+from music_assistant.helpers.compare import compare_strings
+from music_assistant.helpers.playlists import parse_m3u, parse_pls
+from music_assistant.helpers.tags import parse_tags, split_items
+from music_assistant.helpers.util import parse_title_and_version
+from music_assistant.models.enums import MusicProviderFeature
+from music_assistant.models.errors import MediaNotFoundError, MusicAssistantError
+from music_assistant.models.media_items import (
+ Album,
+ AlbumType,
+ Artist,
+ BrowseFolder,
+ ContentType,
+ ImageType,
+ MediaItemImage,
+ MediaItemType,
+ MediaType,
+ Playlist,
+ ProviderMapping,
+ Radio,
+ StreamDetails,
+ Track,
+)
+from music_assistant.models.music_provider import MusicProvider
+
+from .helpers import get_parentdir
+
+TRACK_EXTENSIONS = ("mp3", "m4a", "mp4", "flac", "wav", "ogg", "aiff", "wma", "dsf")
+PLAYLIST_EXTENSIONS = ("m3u", "pls")
+SUPPORTED_EXTENSIONS = TRACK_EXTENSIONS + PLAYLIST_EXTENSIONS
+IMAGE_EXTENSIONS = ("jpg", "jpeg", "JPG", "JPEG", "png", "PNG", "gif", "GIF")
+
+
+@dataclass
+class FileSystemItem:
+ """
+ Representation of an item (file or directory) on the filesystem.
+
+ - name: Name (not path) of the file (or directory).
+ - path: Relative path to the item on this filesystem provider.
+ - absolute_path: Absolute (provider dependent) path to this item.
+ - is_file: Boolean if item is file (not directory or symlink).
+ - is_dir: Boolean if item is directory (not file).
+ - checksum: Checksum for this path (usually last modified time).
+ - file_size : File size in number of bytes or None if unknown (or not a file).
+ - local_path: Optional local accessible path to this (file)item, supported by ffmpeg.
+ """
+
+ name: str
+ path: str
+ absolute_path: str
+ is_file: bool
+ is_dir: bool
+ checksum: str
+ file_size: Optional[int] = None
+ local_path: Optional[str] = None
+
+ @property
+ def ext(self) -> str | None:
+ """Return file extension."""
+ try:
+ return self.name.rsplit(".", 1)[1]
+ except IndexError:
+ return None
+
+
+class FileSystemProviderBase(MusicProvider):
+ """
+ Base Implementation of a musicprovider for files.
+
+ Reads ID3 tags from file and falls back to parsing filename.
+ Optionally reads metadata from nfo files and images in folder structure <artist>/<album>.
+ Supports m3u files only for playlists.
+ Supports having URI's from streaming providers within m3u playlist.
+ """
+
+ @abstractmethod
+ async def setup(self) -> bool:
+ """Handle async initialization of the provider."""
+
+ @abstractmethod
+ async def listdir(
+ self, path: str, recursive: bool = False
+ ) -> AsyncGenerator[FileSystemItem, None]:
+ """
+ List contents of a given provider directory/path.
+
+ Parameters:
+ - path: path of the directory (relative or absolute) to list contents of.
+ Empty string for provider's root.
+ - recursive: If True will recursively keep unwrapping subdirectories (scandir equivalent).
+
+ Returns:
+ AsyncGenerator yielding FileSystemItem objects.
+
+ """
+ yield
+
+ @abstractmethod
+ async def resolve(self, file_path: str) -> FileSystemItem:
+ """Resolve (absolute or relative) path to FileSystemItem."""
+
+ @abstractmethod
+ async def exists(self, file_path: str) -> bool:
+ """Return bool is this FileSystem musicprovider has given file/dir."""
+
+ @abstractmethod
+ async def read_file_content(
+ self, file_path: str, seek: int = 0
+ ) -> AsyncGenerator[bytes, None]:
+ """Yield (binary) contents of file in chunks of bytes."""
+ yield
+
+ @abstractmethod
+ async def write_file_content(self, file_path: str, data: bytes) -> None:
+ """Write entire file content as bytes (e.g. for playlists)."""
+
+ ##############################################
+ # DEFAULT/GENERIC IMPLEMENTATION BELOW
+ # should normally not be needed to override
+
+ @property
+ def supported_features(self) -> Tuple[MusicProviderFeature]:
+ """Return the features supported by this MusicProvider."""
+ return (
+ MusicProviderFeature.LIBRARY_ARTISTS,
+ MusicProviderFeature.LIBRARY_ALBUMS,
+ MusicProviderFeature.LIBRARY_TRACKS,
+ MusicProviderFeature.LIBRARY_PLAYLISTS,
+ MusicProviderFeature.PLAYLIST_TRACKS_EDIT,
+ MusicProviderFeature.PLAYLIST_CREATE,
+ MusicProviderFeature.BROWSE,
+ MusicProviderFeature.SEARCH,
+ )
+
+ async def search(
+ self, search_query: str, media_types=Optional[List[MediaType]], limit: int = 5
+ ) -> List[MediaItemType]:
+ """Perform search on this file based musicprovider."""
+ result = []
+ # searching the filesystem is slow and unreliable,
+ # instead we make some (slow) freaking queries to the db ;-)
+ params = {"name": f"%{search_query}%", "provider_id": f"%{self.id}%"}
+ if media_types is None or MediaType.TRACK in media_types:
+ query = "SELECT * FROM tracks WHERE name LIKE :name AND provider_mappings LIKE :provider_id"
+ tracks = await self.mass.music.tracks.get_db_items_by_query(query, params)
+ result += tracks
+ if media_types is None or MediaType.ALBUM in media_types:
+ query = "SELECT * FROM albums WHERE name LIKE :name AND provider_mappings LIKE :provider_id"
+ albums = await self.mass.music.albums.get_db_items_by_query(query, params)
+ result += albums
+ if media_types is None or MediaType.ARTIST in media_types:
+ query = "SELECT * FROM artists WHERE name LIKE :name AND provider_mappings LIKE :provider_id"
+ artists = await self.mass.music.artists.get_db_items_by_query(query, params)
+ result += artists
+ if media_types is None or MediaType.PLAYLIST in media_types:
+ query = "SELECT * FROM playlists WHERE name LIKE :name AND provider_mappings LIKE :provider_id"
+ playlists = await self.mass.music.playlists.get_db_items_by_query(
+ query, params
+ )
+ result += playlists
+ return result
+
+ async def browse(self, path: str) -> BrowseFolder:
+ """
+ Browse this provider's items.
+
+ :param path: The path to browse, (e.g. provid://artists).
+ """
+ _, item_path = path.split("://")
+ if not item_path:
+ item_path = ""
+ subitems = []
+ async for item in self.listdir(item_path, recursive=False):
+ if item.is_dir:
+ subitems.append(
+ BrowseFolder(
+ item_id=item.path,
+ provider=self.type,
+ path=f"{self.id}://{item.path}",
+ name=item.name,
+ )
+ )
+ continue
+
+ if "." not in item.name or not item.ext:
+ # skip system files and files without extension
+ continue
+
+ if item.ext in TRACK_EXTENSIONS:
+ if db_item := await self.mass.music.tracks.get_db_item_by_prov_id(
+ item.path, provider_id=self.id
+ ):
+ subitems.append(db_item)
+ elif track := await self.get_track(item.path):
+ # make sure that the item exists
+ # https://github.com/music-assistant/hass-music-assistant/issues/707
+ db_item = await self.mass.music.tracks.add_db_item(track)
+ subitems.append(db_item)
+ continue
+ if item.ext in PLAYLIST_EXTENSIONS:
+ if db_item := await self.mass.music.playlists.get_db_item_by_prov_id(
+ item.path, provider_id=self.id
+ ):
+ subitems.append(db_item)
+ elif playlist := await self.get_playlist(item.path):
+ # make sure that the item exists
+ # https://github.com/music-assistant/hass-music-assistant/issues/707
+ db_item = await self.mass.music.playlists.add_db_item(playlist)
+ subitems.append(db_item)
+ continue
+
+ return BrowseFolder(
+ item_id=item_path,
+ provider=self.type,
+ path=path,
+ name=item_path or self.name,
+ # make sure to sort the resulting listing
+ items=sorted(subitems, key=lambda x: (x.name.casefold(), x.name)),
+ )
+
+ async def sync_library(
+ self, media_types: Optional[Tuple[MediaType]] = None
+ ) -> None:
+ """Run library sync for this provider."""
+ cache_key = f"{self.id}.checksums"
+ prev_checksums = await self.mass.cache.get(cache_key, SCHEMA_VERSION)
+ save_checksum_interval = 0
+ if prev_checksums is None:
+ prev_checksums = {}
+
+ # find all music files in the music directory and all subfolders
+ # we work bottom up, as-in we derive all info from the tracks
+ cur_checksums = {}
+ async for item in self.listdir("", recursive=True):
+
+ if "." not in item.name or not item.ext:
+ # skip system files and files without extension
+ continue
+
+ if item.ext not in SUPPORTED_EXTENSIONS:
+ # unsupported file extension
+ continue
+
+ try:
+ cur_checksums[item.path] = item.checksum
+ if item.checksum == prev_checksums.get(item.path):
+ continue
+
+ if item.ext in TRACK_EXTENSIONS:
+ # add/update track to db
+ track = await self.get_track(item.path)
+ # if the track was edited on disk, always overwrite existing db details
+ overwrite_existing = item.path in prev_checksums
+ await self.mass.music.tracks.add_db_item(
+ track, overwrite_existing=overwrite_existing
+ )
+ elif item.ext in PLAYLIST_EXTENSIONS:
+ playlist = await self.get_playlist(item.path)
+ # add/update] playlist to db
+ playlist.metadata.checksum = item.checksum
+ # playlist is always in-library
+ playlist.in_library = True
+ await self.mass.music.playlists.add_db_item(playlist)
+ except Exception as err: # pylint: disable=broad-except
+ # we don't want the whole sync to crash on one file so we catch all exceptions here
+ self.logger.exception("Error processing %s - %s", item.path, str(err))
+
+ # save checksums every 100 processed items
+ # this allows us to pickup where we leftoff when initial scan gets interrupted
+ if save_checksum_interval == 100:
+ await self.mass.cache.set(cache_key, cur_checksums, SCHEMA_VERSION)
+ save_checksum_interval = 0
+ else:
+ save_checksum_interval += 1
+
+ # store (final) checksums in cache
+ await self.mass.cache.set(cache_key, cur_checksums, SCHEMA_VERSION)
+ # work out deletions
+ deleted_files = set(prev_checksums.keys()) - set(cur_checksums.keys())
+ await self._process_deletions(deleted_files)
+
+ async def _process_deletions(self, deleted_files: Set[str]) -> None:
+ """Process all deletions."""
+ # process deleted tracks/playlists
+ for file_path in deleted_files:
+
+ _, ext = file_path.rsplit(".", 1)
+ if ext not in SUPPORTED_EXTENSIONS:
+ # unsupported file extension
+ continue
+
+ if ext in PLAYLIST_EXTENSIONS:
+ controller = self.mass.music.get_controller(MediaType.PLAYLIST)
+ else:
+ controller = self.mass.music.get_controller(MediaType.TRACK)
+
+ if db_item := await controller.get_db_item_by_prov_id(file_path, self.type):
+ await controller.remove_prov_mapping(db_item.item_id, self.id)
+
+ async def get_artist(self, prov_artist_id: str) -> Artist:
+ """Get full artist details by id."""
+ db_artist = await self.mass.music.artists.get_db_item_by_prov_id(
+ provider_item_id=prov_artist_id, provider_id=self.id
+ )
+ if db_artist is None:
+ raise MediaNotFoundError(f"Artist not found: {prov_artist_id}")
+ if await self.exists(prov_artist_id):
+ # if path exists on disk allow parsing full details to allow refresh of metadata
+ return await self._parse_artist(db_artist.name, artist_path=prov_artist_id)
+ return db_artist
+
+ async def get_album(self, prov_album_id: str) -> Album:
+ """Get full album details by id."""
+ db_album = await self.mass.music.albums.get_db_item_by_prov_id(
+ provider_item_id=prov_album_id, provider_id=self.id
+ )
+ if db_album is None:
+ raise MediaNotFoundError(f"Album not found: {prov_album_id}")
+ if await self.exists(prov_album_id):
+ # if path exists on disk allow parsing full details to allow refresh of metadata
+ return await self._parse_album(
+ db_album.name, prov_album_id, db_album.artists
+ )
+ return db_album
+
+ async def get_track(self, prov_track_id: str) -> Track:
+ """Get full track details by id."""
+ if not await self.exists(prov_track_id):
+ raise MediaNotFoundError(f"Track path does not exist: {prov_track_id}")
+
+ file_item = await self.resolve(prov_track_id)
+
+ # parse tags
+ input_file = file_item.local_path or self.read_file_content(
+ file_item.absolute_path
+ )
+ tags = await parse_tags(input_file)
+
+ name, version = parse_title_and_version(tags.title)
+ track = Track(
+ item_id=file_item.path,
+ provider=self.type,
+ name=name,
+ version=version,
+ )
+
+ # album
+ if tags.album:
+ # work out if we have an album folder
+ album_dir = get_parentdir(file_item.path, tags.album)
+
+ # album artist(s)
+ if tags.album_artists:
+ album_artists = []
+ for index, album_artist_str in enumerate(tags.album_artists):
+ # work out if we have an artist folder
+ artist_dir = get_parentdir(file_item.path, album_artist_str)
+ artist = await self._parse_artist(
+ album_artist_str, artist_path=artist_dir
+ )
+ if not artist.musicbrainz_id:
+ try:
+ artist.musicbrainz_id = tags.musicbrainz_albumartistids[
+ index
+ ]
+ except IndexError:
+ pass
+ album_artists.append(artist)
+ else:
+ # always fallback to various artists as album artist if user did not tag album artist
+ # ID3 tag properly because we must have an album artist
+ self.logger.warning(
+ "%s is missing ID3 tag [albumartist], using %s as fallback",
+ file_item.path,
+ VARIOUS_ARTISTS,
+ )
+ album_artists = [await self._parse_artist(name=VARIOUS_ARTISTS)]
+
+ track.album = await self._parse_album(
+ tags.album,
+ album_dir,
+ artists=album_artists,
+ )
+ else:
+ self.logger.warning("%s is missing ID3 tag [album]", file_item.path)
+
+ # track artist(s)
+ for index, track_artist_str in enumerate(tags.artists):
+ # re-use album artist details if possible
+ if track.album:
+ if artist := next(
+ (x for x in track.album.artists if x.name == track_artist_str), None
+ ):
+ track.artists.append(artist)
+ continue
+ artist = await self._parse_artist(track_artist_str)
+ if not artist.musicbrainz_id:
+ try:
+ artist.musicbrainz_id = tags.musicbrainz_artistids[index]
+ except IndexError:
+ pass
+ track.artists.append(artist)
+
+ # cover image - prefer album image, fallback to embedded
+ if track.album and track.album.image:
+ track.metadata.images = [track.album.image]
+ elif tags.has_cover_image:
+ # we do not actually embed the image in the metadata because that would consume too
+ # much space and bandwidth. Instead we set the filename as value so the image can
+ # be retrieved later in realtime.
+ track.metadata.images = [
+ MediaItemImage(ImageType.THUMB, file_item.path, True)
+ ]
+ if track.album:
+ # set embedded cover on album
+ track.album.metadata.images = track.metadata.images
+
+ # parse other info
+ track.duration = tags.duration or 0
+ track.metadata.genres = tags.genres
+ track.disc_number = tags.disc
+ track.track_number = tags.track
+ track.isrc = tags.get("isrc")
+ track.metadata.copyright = tags.get("copyright")
+ track.metadata.lyrics = tags.get("lyrics")
+ track.musicbrainz_id = tags.musicbrainz_trackid
+ if track.album:
+ if not track.album.musicbrainz_id:
+ track.album.musicbrainz_id = tags.musicbrainz_releasegroupid
+ if not track.album.year:
+ track.album.year = tags.year
+ if not track.album.upc:
+ track.album.upc = tags.get("barcode")
+ # try to parse albumtype
+ if track.album and track.album.album_type == AlbumType.UNKNOWN:
+ album_type = tags.album_type
+ if album_type and "compilation" in album_type:
+ track.album.album_type = AlbumType.COMPILATION
+ elif album_type and "single" in album_type:
+ track.album.album_type = AlbumType.SINGLE
+ elif album_type and "album" in album_type:
+ track.album.album_type = AlbumType.ALBUM
+ elif track.album.sort_name in track.sort_name:
+ track.album.album_type = AlbumType.SINGLE
+
+ # set checksum to invalidate any cached listings
+ checksum_timestamp = str(int(time()))
+ track.metadata.checksum = checksum_timestamp
+ if track.album:
+ track.album.metadata.checksum = checksum_timestamp
+ for artist in track.album.artists:
+ artist.metadata.checksum = checksum_timestamp
+
+ track.add_provider_mapping(
+ ProviderMapping(
+ item_id=file_item.path,
+ provider_type=self.type,
+ provider_id=self.id,
+ content_type=ContentType.try_parse(tags.format),
+ sample_rate=tags.sample_rate,
+ bit_depth=tags.bits_per_sample,
+ bit_rate=tags.bit_rate,
+ )
+ )
+ return track
+
+ async def get_playlist(self, prov_playlist_id: str) -> Playlist:
+ """Get full playlist details by id."""
+ if not await self.exists(prov_playlist_id):
+ raise MediaNotFoundError(
+ f"Playlist path does not exist: {prov_playlist_id}"
+ )
+
+ file_item = await self.resolve(prov_playlist_id)
+ playlist = Playlist(file_item.path, provider=self.type, name=file_item.name)
+ playlist.is_editable = file_item.ext != "pls" # can only edit m3u playlists
+
+ playlist.add_provider_mapping(
+ ProviderMapping(
+ item_id=file_item.path,
+ provider_type=self.type,
+ provider_id=self.id,
+ )
+ )
+ playlist.owner = self._attr_name
+ checksum = f"{SCHEMA_VERSION}.{file_item.checksum}"
+ playlist.metadata.checksum = checksum
+ return playlist
+
+ async def get_album_tracks(self, prov_album_id: str) -> List[Track]:
+ """Get album tracks for given album id."""
+ # filesystem items are always stored in db so we can query the database
+ db_album = await self.mass.music.albums.get_db_item_by_prov_id(
+ prov_album_id, provider_id=self.id
+ )
+ if db_album is None:
+ raise MediaNotFoundError(f"Album not found: {prov_album_id}")
+ # TODO: adjust to json query instead of text search
+ query = f"SELECT * FROM tracks WHERE albums LIKE '%\"{db_album.item_id}\"%'"
+ query += f" AND provider_mappings LIKE '%\"{self.id}\"%'"
+ result = []
+ for track in await self.mass.music.tracks.get_db_items_by_query(query):
+ track.album = db_album
+ if album_mapping := next(
+ (x for x in track.albums if x.item_id == db_album.item_id), None
+ ):
+ track.disc_number = album_mapping.disc_number
+ track.track_number = album_mapping.track_number
+ result.append(track)
+ return sorted(result, key=lambda x: (x.disc_number or 0, x.track_number or 0))
+
+ async def get_playlist_tracks(self, prov_playlist_id: str) -> List[Track]:
+ """Get playlist tracks for given playlist id."""
+ result = []
+ if not await self.exists(prov_playlist_id):
+ raise MediaNotFoundError(
+ f"Playlist path does not exist: {prov_playlist_id}"
+ )
+
+ _, ext = prov_playlist_id.rsplit(".", 1)
+ try:
+ # get playlist file contents
+ playlist_data = b""
+ async for chunk in self.read_file_content(prov_playlist_id):
+ playlist_data += chunk
+ playlist_data = playlist_data.decode("utf-8")
+
+ if ext in ("m3u", "m3u8"):
+ playlist_lines = await parse_m3u(playlist_data)
+ else:
+ playlist_lines = await parse_pls(playlist_data)
+
+ for line_no, playlist_line in enumerate(playlist_lines):
+
+ if media_item := await self._parse_playlist_line(
+ playlist_line, os.path.dirname(prov_playlist_id)
+ ):
+ # use the linenumber as position for easier deletions
+ media_item.position = line_no
+ result.append(media_item)
+
+ except Exception as err: # pylint: disable=broad-except
+ self.logger.warning(
+ "Error while parsing playlist %s", prov_playlist_id, exc_info=err
+ )
+ return result
+
+ async def _parse_playlist_line(
+ self, line: str, playlist_path: str
+ ) -> Track | Radio | None:
+ """Try to parse a track from a playlist line."""
+ try:
+ # try to treat uri as (relative) filename
+ if "://" not in line:
+ for filename in (line, os.path.join(playlist_path, line)):
+ if not await self.exists(filename):
+ continue
+ return await self.get_track(filename)
+ # fallback to generic uri parsing
+ return await self.mass.music.get_item_by_uri(line)
+ except MusicAssistantError as err:
+ self.logger.warning(
+ "Could not parse uri/file %s to track: %s", line, str(err)
+ )
+ return None
+
+ async def add_playlist_tracks(
+ self, prov_playlist_id: str, prov_track_ids: List[str]
+ ) -> None:
+ """Add track(s) to playlist."""
+ if not await self.exists(prov_playlist_id):
+ raise MediaNotFoundError(
+ f"Playlist path does not exist: {prov_playlist_id}"
+ )
+ playlist_data = b""
+ async for chunk in self.read_file_content(prov_playlist_id):
+ playlist_data += chunk
+ playlist_data = playlist_data.decode("utf-8")
+ for uri in prov_track_ids:
+ playlist_data += f"\n{uri}"
+
+ # write playlist file
+ await self.write_file_content(prov_playlist_id, playlist_data.encode("utf-8"))
+
+ async def remove_playlist_tracks(
+ self, prov_playlist_id: str, positions_to_remove: Tuple[int]
+ ) -> None:
+ """Remove track(s) from playlist."""
+ if not await self.exists(prov_playlist_id):
+ raise MediaNotFoundError(
+ f"Playlist path does not exist: {prov_playlist_id}"
+ )
+ cur_lines = []
+ _, ext = prov_playlist_id.rsplit(".", 1)
+
+ # get playlist file contents
+ playlist_data = b""
+ async for chunk in self.read_file_content(prov_playlist_id):
+ playlist_data += chunk
+ playlist_data.decode("utf-8")
+
+ if ext in ("m3u", "m3u8"):
+ playlist_lines = await parse_m3u(playlist_data)
+ else:
+ playlist_lines = await parse_pls(playlist_data)
+
+ for line_no, playlist_line in enumerate(playlist_lines):
+ if line_no not in positions_to_remove:
+ cur_lines.append(playlist_line)
+
+ new_playlist_data = "\n".join(cur_lines)
+ # write playlist file
+ await self.write_file_content(
+ prov_playlist_id, new_playlist_data.encode("utf-8")
+ )
+
+ async def create_playlist(self, name: str) -> Playlist:
+ """Create a new playlist on provider with given name."""
+ # creating a new playlist on the filesystem is as easy
+ # as creating a new (empty) file with the m3u extension...
+ filename = await self.resolve(f"{name}.m3u")
+ await self.write_file_content(filename, b"")
+ playlist = await self.get_playlist(filename)
+ db_playlist = await self.mass.music.playlists.add_db_item(playlist)
+ return db_playlist
+
+ async def get_stream_details(self, item_id: str) -> StreamDetails:
+ """Return the content details for the given track when it will be streamed."""
+ if not await self.exists(item_id):
+ raise MediaNotFoundError(f"Item path does not exist: {item_id}")
+
+ file_item = await self.resolve(item_id)
+
+ # parse tags
+ input_file = file_item.local_path or self.read_file_content(
+ file_item.absolute_path
+ )
+ tags = await parse_tags(input_file)
+
+ return StreamDetails(
+ provider=self.type,
+ item_id=item_id,
+ content_type=ContentType.try_parse(tags.format),
+ media_type=MediaType.TRACK,
+ duration=tags.duration,
+ size=file_item.file_size,
+ sample_rate=tags.sample_rate,
+ bit_depth=tags.bits_per_sample,
+ direct=file_item.local_path,
+ )
+
+ async def get_audio_stream(
+ self, streamdetails: StreamDetails, seek_position: int = 0
+ ) -> AsyncGenerator[bytes, None]:
+ """Return the audio stream for the provider item."""
+ if seek_position:
+ assert streamdetails.duration, "Duration required for seek requests"
+ assert streamdetails.size, "Filesize required for seek requests"
+ seek_bytes = int(
+ (streamdetails.size / streamdetails.duration) * seek_position
+ )
+ else:
+ seek_bytes = 0
+
+ async for chunk in self.read_file_content(streamdetails.item_id, seek_bytes):
+ yield chunk
+
+ async def _parse_artist(
+ self,
+ name: Optional[str] = None,
+ artist_path: Optional[str] = None,
+ ) -> Artist | None:
+ """Lookup metadata in Artist folder."""
+ assert name or artist_path
+ if not artist_path:
+ artist_path = name
+
+ if not name:
+ name = artist_path.split(os.sep)[-1]
+
+ artist = Artist(
+ artist_path,
+ self.type,
+ name,
+ provider_mappings={
+ ProviderMapping(artist_path, self.type, self.id, url=artist_path)
+ },
+ musicbrainz_id=VARIOUS_ARTISTS_ID
+ if compare_strings(name, VARIOUS_ARTISTS)
+ else None,
+ )
+
+ if not await self.exists(artist_path):
+ # return basic object if there is no dedicated artist folder
+ return artist
+
+ nfo_file = os.path.join(artist_path, "artist.nfo")
+ if await self.exists(nfo_file):
+ # found NFO file with metadata
+ # https://kodi.wiki/view/NFO_files/Artists
+ data = b""
+ async for chunk in self.read_file_content(nfo_file):
+ data += chunk
+ info = await self.mass.loop.run_in_executor(None, xmltodict.parse, data)
+ info = info["artist"]
+ artist.name = info.get("title", info.get("name", name))
+ if sort_name := info.get("sortname"):
+ artist.sort_name = sort_name
+ if musicbrainz_id := info.get("musicbrainzartistid"):
+ artist.musicbrainz_id = musicbrainz_id
+ if descripton := info.get("biography"):
+ artist.metadata.description = descripton
+ if genre := info.get("genre"):
+ artist.metadata.genres = set(split_items(genre))
+ # find local images
+ artist.metadata.images = await self._get_local_images(artist_path) or None
+
+ return artist
+
+ async def _parse_album(
+ self, name: Optional[str], album_path: Optional[str], artists: List[Artist]
+ ) -> Album | None:
+ """Lookup metadata in Album folder."""
+ assert (name or album_path) and artists
+ if not album_path:
+ # create fake path
+ album_path = artists[0].name + os.sep + name
+
+ if not name:
+ name = album_path.split(os.sep)[-1]
+
+ album = Album(
+ album_path,
+ self.type,
+ name,
+ artists=artists,
+ provider_mappings={
+ ProviderMapping(album_path, self.type, self.id, url=album_path)
+ },
+ )
+
+ if not await self.exists(album_path):
+ # return basic object if there is no dedicated album folder
+ return album
+
+ nfo_file = os.path.join(album_path, "album.nfo")
+ if await self.exists(nfo_file):
+ # found NFO file with metadata
+ # https://kodi.wiki/view/NFO_files/Artists
+ data = b""
+ async for chunk in self.read_file_content(nfo_file):
+ data += chunk
+ info = await self.mass.loop.run_in_executor(None, xmltodict.parse, data)
+ info = info["album"]
+ album.name = info.get("title", info.get("name", name))
+ if sort_name := info.get("sortname"):
+ album.sort_name = sort_name
+ if musicbrainz_id := info.get("musicbrainzreleasegroupid"):
+ album.musicbrainz_id = musicbrainz_id
+ if mb_artist_id := info.get("musicbrainzalbumartistid"):
+ if album.artist and not album.artist.musicbrainz_id:
+ album.artist.musicbrainz_id = mb_artist_id
+ if description := info.get("review"):
+ album.metadata.description = description
+ if year := info.get("year"):
+ album.year = int(year)
+ if genre := info.get("genre"):
+ album.metadata.genres = set(split_items(genre))
+ # parse name/version
+ album.name, album.version = parse_title_and_version(album.name)
+
+ # find local images
+ album.metadata.images = await self._get_local_images(album_path) or None
+
+ return album
+
+ async def _get_local_images(self, folder: str) -> List[MediaItemImage]:
+ """Return local images found in a given folderpath."""
+ images = []
+ async for item in self.listdir(folder):
+ if "." not in item.path or item.is_dir:
+ continue
+ for ext in IMAGE_EXTENSIONS:
+ if item.ext != ext:
+ continue
+ try:
+ images.append(MediaItemImage(ImageType(item.name), item.path, True))
+ except ValueError:
+ if "folder" in item.name:
+ images.append(MediaItemImage(ImageType.THUMB, item.path, True))
+ elif "AlbumArt" in item.name:
+ images.append(MediaItemImage(ImageType.THUMB, item.path, True))
+ elif "Artist" in item.name:
+ images.append(MediaItemImage(ImageType.THUMB, item.path, True))
+ return images
+++ /dev/null
-"""Filesystem musicprovider support for MusicAssistant."""
-from __future__ import annotations
-
-import asyncio
-import logging
-import os
-import urllib.parse
-from contextlib import asynccontextmanager
-from time import time
-from typing import AsyncGenerator, List, Optional, Set, Tuple
-
-import aiofiles
-import xmltodict
-from aiofiles.os import wrap
-from aiofiles.threadpool.binary import AsyncFileIO
-
-from music_assistant.constants import VARIOUS_ARTISTS, VARIOUS_ARTISTS_ID
-from music_assistant.helpers.compare import compare_strings
-from music_assistant.helpers.playlists import parse_m3u, parse_pls
-from music_assistant.helpers.tags import parse_tags, split_items
-from music_assistant.helpers.util import create_safe_string, parse_title_and_version
-from music_assistant.models.enums import MusicProviderFeature, ProviderType
-from music_assistant.models.errors import MediaNotFoundError, MusicAssistantError
-from music_assistant.models.media_items import (
- Album,
- AlbumType,
- Artist,
- BrowseFolder,
- ContentType,
- ImageType,
- MediaItemImage,
- MediaItemProviderId,
- MediaItemType,
- MediaQuality,
- MediaType,
- Playlist,
- Radio,
- StreamDetails,
- Track,
-)
-from music_assistant.models.music_provider import MusicProvider
-
-TRACK_EXTENSIONS = ("mp3", "m4a", "mp4", "flac", "wav", "ogg", "aiff", "wma", "dsf")
-PLAYLIST_EXTENSIONS = ("m3u", "pls")
-SUPPORTED_EXTENSIONS = TRACK_EXTENSIONS + PLAYLIST_EXTENSIONS
-IMAGE_EXTENSIONS = ("jpg", "jpeg", "JPG", "JPEG", "png", "PNG", "gif", "GIF")
-SCHEMA_VERSION = 17
-LOGGER = logging.getLogger(__name__)
-
-listdir = wrap(os.listdir)
-isdir = wrap(os.path.isdir)
-isfile = wrap(os.path.isfile)
-
-
-async def scantree(path: str) -> AsyncGenerator[os.DirEntry, None]:
- """Recursively yield DirEntry objects for given directory."""
-
- def is_dir(entry: os.DirEntry) -> bool:
- return entry.is_dir(follow_symlinks=False)
-
- loop = asyncio.get_running_loop()
- for entry in await loop.run_in_executor(None, os.scandir, path):
- if entry.name.startswith("."):
- continue
- if await loop.run_in_executor(None, is_dir, entry):
- try:
- async for subitem in scantree(entry.path):
- yield subitem
- except (OSError, PermissionError) as err:
- LOGGER.warning("Skip folder %s: %s", entry.path, str(err))
- else:
- yield entry
-
-
-def get_parentdir(base_path: str, name: str) -> str | None:
- """Look for folder name in path (to find dedicated artist or album folder)."""
- parentdir = os.path.dirname(base_path)
- for _ in range(3):
- dirname = parentdir.rsplit(os.sep)[-1]
- if compare_strings(name, dirname, False):
- return parentdir
- parentdir = os.path.dirname(parentdir)
- return None
-
-
-class FileSystemProvider(MusicProvider):
- """
- Implementation of a musicprovider for local files.
-
- Reads ID3 tags from file and falls back to parsing filename.
- Optionally reads metadata from nfo files and images in folder structure <artist>/<album>.
- Supports m3u files only for playlists.
- Supports having URI's from streaming providers within m3u playlist.
- """
-
- _attr_name = "Filesystem"
- _attr_type = ProviderType.FILESYSTEM_LOCAL
-
- @property
- def supported_features(self) -> Tuple[MusicProviderFeature]:
- """Return the features supported by this MusicProvider."""
- return (
- MusicProviderFeature.LIBRARY_ARTISTS,
- MusicProviderFeature.LIBRARY_ALBUMS,
- MusicProviderFeature.LIBRARY_TRACKS,
- MusicProviderFeature.LIBRARY_PLAYLISTS,
- MusicProviderFeature.PLAYLIST_TRACKS_EDIT,
- MusicProviderFeature.PLAYLIST_CREATE,
- MusicProviderFeature.BROWSE,
- MusicProviderFeature.SEARCH,
- )
-
- async def setup(self) -> bool:
- """Handle async initialization of the provider."""
-
- if not await isdir(self.config.path):
- raise MediaNotFoundError(
- f"Music Directory {self.config.path} does not exist"
- )
-
- return True
-
- async def search(
- self, search_query: str, media_types=Optional[List[MediaType]], limit: int = 5
- ) -> List[MediaItemType]:
- """Perform search on musicprovider."""
- result = []
- # searching the filesystem is slow and unreliable,
- # instead we make some (slow) freaking queries to the db ;-)
- params = {"name": f"%{search_query}%", "prov_type": f"%{self.type.value}%"}
- if media_types is None or MediaType.TRACK in media_types:
- query = "SELECT * FROM tracks WHERE name LIKE :name AND provider_ids LIKE :prov_type"
- tracks = await self.mass.music.tracks.get_db_items_by_query(query, params)
- result += tracks
- if media_types is None or MediaType.ALBUM in media_types:
- query = "SELECT * FROM albums WHERE name LIKE :name AND provider_ids LIKE :prov_type"
- albums = await self.mass.music.albums.get_db_items_by_query(query, params)
- result += albums
- if media_types is None or MediaType.ARTIST in media_types:
- query = "SELECT * FROM artists WHERE name LIKE :name AND provider_ids LIKE :prov_type"
- artists = await self.mass.music.artists.get_db_items_by_query(query, params)
- result += artists
- if media_types is None or MediaType.PLAYLIST in media_types:
- query = "SELECT * FROM playlists WHERE name LIKE :name AND provider_ids LIKE :prov_type"
- playlists = await self.mass.music.playlists.get_db_items_by_query(
- query, params
- )
- result += playlists
- return result
-
- async def browse(self, path: str) -> BrowseFolder:
- """
- Browse this provider's items.
-
- :param path: The path to browse, (e.g. provid://artists).
- """
- _, sub_path = path.split("://")
- if not sub_path:
- item_path = self.config.path
- else:
- item_path = os.path.join(self.config.path, sub_path)
- subitems = []
- for filename in await listdir(item_path):
- full_path: str = os.path.join(item_path, filename)
- rel_path = full_path.replace(self.config.path + os.sep, "")
- if await isdir(full_path):
- subitems.append(
- BrowseFolder(
- item_id=rel_path,
- provider=self.type,
- path=f"{self.id}://{rel_path}",
- name=filename,
- )
- )
- continue
-
- if "." not in filename or filename.startswith("."):
- # skip system files and files without extension
- continue
-
- _, ext = filename.rsplit(".", 1)
-
- if ext in TRACK_EXTENSIONS:
- item_id = self._get_item_id(full_path)
- if db_item := await self.mass.music.tracks.get_db_item_by_prov_id(
- item_id, provider_id=self.id
- ):
- subitems.append(db_item)
- elif track := await self._parse_track(full_path):
- # make sure that the item exists
- # https://github.com/music-assistant/hass-music-assistant/issues/707
- db_item = await self.mass.music.tracks.add_db_item(track)
- subitems.append(db_item)
- continue
- if ext in PLAYLIST_EXTENSIONS:
- item_id = self._get_item_id(full_path)
- if db_item := await self.mass.music.playlists.get_db_item_by_prov_id(
- item_id, provider_id=self.id
- ):
- subitems.append(db_item)
- elif playlist := await self._parse_playlist(full_path):
- # make sure that the item exists
- # https://github.com/music-assistant/hass-music-assistant/issues/707
- db_item = await self.mass.music.playlists.add_db_item(playlist)
- subitems.append(db_item)
- continue
-
- return BrowseFolder(
- item_id=sub_path,
- provider=self.type,
- path=path,
- name=sub_path or self.name,
- # make sure to sort the resulting listing
- items=sorted(subitems, key=lambda x: (x.name.casefold(), x.name)),
- )
-
- async def sync_library(
- self, media_types: Optional[Tuple[MediaType]] = None
- ) -> None:
- """Run library sync for this provider."""
- cache_key = f"{self.id}.checksums"
- prev_checksums = await self.mass.cache.get(cache_key, SCHEMA_VERSION)
- save_checksum_interval = 0
- if prev_checksums is None:
- prev_checksums = {}
-
- # find all music files in the music directory and all subfolders
- # we work bottom up, as-in we derive all info from the tracks
- cur_checksums = {}
- async for entry in scantree(self.config.path):
-
- if "." not in entry.path or entry.path.startswith("."):
- # skip system files and files without extension
- continue
-
- _, ext = entry.path.rsplit(".", 1)
- if ext not in SUPPORTED_EXTENSIONS:
- # unsupported file extension
- continue
-
- try:
- # mtime is used as file checksum
- stat = await asyncio.get_running_loop().run_in_executor(
- None, entry.stat
- )
- checksum = int(stat.st_mtime)
- cur_checksums[entry.path] = checksum
- if checksum == prev_checksums.get(entry.path):
- continue
-
- if ext in TRACK_EXTENSIONS:
- # add/update track to db
- track = await self._parse_track(entry.path)
- # if the track was edited on disk, always overwrite existing db details
- overwrite_existing = entry.path in prev_checksums
- await self.mass.music.tracks.add_db_item(
- track, overwrite_existing=overwrite_existing
- )
- elif ext in PLAYLIST_EXTENSIONS:
- playlist = await self._parse_playlist(entry.path)
- # add/update] playlist to db
- playlist.metadata.checksum = checksum
- # playlist is always in-library
- playlist.in_library = True
- await self.mass.music.playlists.add_db_item(playlist)
- except Exception as err: # pylint: disable=broad-except
- # we don't want the whole sync to crash on one file so we catch all exceptions here
- self.logger.exception("Error processing %s - %s", entry.path, str(err))
-
- # save checksums every 100 processed items
- # this allows us to pickup where we leftoff when initial scan gets interrupted
- if save_checksum_interval == 100:
- await self.mass.cache.set(cache_key, cur_checksums, SCHEMA_VERSION)
- save_checksum_interval = 0
- else:
- save_checksum_interval += 1
-
- # store (final) checksums in cache
- await self.mass.cache.set(cache_key, cur_checksums, SCHEMA_VERSION)
- # work out deletions
- deleted_files = set(prev_checksums.keys()) - set(cur_checksums.keys())
- await self._process_deletions(deleted_files)
-
- async def _process_deletions(self, deleted_files: Set[str]) -> None:
- """Process all deletions."""
- # process deleted tracks/playlists
- for file_path in deleted_files:
-
- if "." not in file_path or file_path.startswith("."):
- # skip system files and files without extension
- continue
-
- _, ext = file_path.rsplit(".", 1)
- if ext not in SUPPORTED_EXTENSIONS:
- # unsupported file extension
- continue
-
- item_id = self._get_item_id(file_path)
-
- if ext in PLAYLIST_EXTENSIONS:
- controller = self.mass.music.get_controller(MediaType.PLAYLIST)
- else:
- controller = self.mass.music.get_controller(MediaType.TRACK)
-
- if db_item := await controller.get_db_item_by_prov_id(item_id, self.type):
- await controller.remove_prov_mapping(db_item.item_id, self.id)
-
- async def get_artist(self, prov_artist_id: str) -> Artist:
- """Get full artist details by id."""
- db_artist = await self.mass.music.artists.get_db_item_by_prov_id(
- provider_item_id=prov_artist_id, provider_id=self.id
- )
- if db_artist is None:
- raise MediaNotFoundError(f"Artist not found: {prov_artist_id}")
- itempath = await self._get_filepath(MediaType.ARTIST, prov_artist_id)
- if await self.exists(itempath):
- # if path exists on disk allow parsing full details to allow refresh of metadata
- return await self._parse_artist(db_artist.name, artist_path=itempath)
- return db_artist
-
- async def get_album(self, prov_album_id: str) -> Album:
- """Get full album details by id."""
- db_album = await self.mass.music.albums.get_db_item_by_prov_id(
- provider_item_id=prov_album_id, provider_id=self.id
- )
- if db_album is None:
- raise MediaNotFoundError(f"Album not found: {prov_album_id}")
- itempath = await self._get_filepath(MediaType.ALBUM, prov_album_id)
- if await self.exists(itempath):
- # if path exists on disk allow parsing full details to allow refresh of metadata
- return await self._parse_album(db_album.name, itempath, db_album.artists)
- return db_album
-
- async def get_track(self, prov_track_id: str) -> Track:
- """Get full track details by id."""
- itempath = await self._get_filepath(MediaType.TRACK, prov_track_id)
- return await self._parse_track(itempath)
-
- async def get_playlist(self, prov_playlist_id: str) -> Playlist:
- """Get full playlist details by id."""
- itempath = await self._get_filepath(MediaType.PLAYLIST, prov_playlist_id)
- return await self._parse_playlist(itempath)
-
- async def get_album_tracks(self, prov_album_id: str) -> List[Track]:
- """Get album tracks for given album id."""
- # filesystem items are always stored in db so we can query the database
- db_album = await self.mass.music.albums.get_db_item_by_prov_id(
- prov_album_id, provider_id=self.id
- )
- if db_album is None:
- raise MediaNotFoundError(f"Album not found: {prov_album_id}")
- # TODO: adjust to json query instead of text search
- query = f"SELECT * FROM tracks WHERE albums LIKE '%\"{db_album.item_id}\"%'"
- query += f" AND provider_ids LIKE '%\"{self.type.value}\"%'"
- result = []
- for track in await self.mass.music.tracks.get_db_items_by_query(query):
- track.album = db_album
- if album_mapping := next(
- (x for x in track.albums if x.item_id == db_album.item_id), None
- ):
- track.disc_number = album_mapping.disc_number
- track.track_number = album_mapping.track_number
- result.append(track)
- return sorted(result, key=lambda x: (x.disc_number or 0, x.track_number or 0))
-
- async def get_playlist_tracks(self, prov_playlist_id: str) -> List[Track]:
- """Get playlist tracks for given playlist id."""
- result = []
- playlist_path = await self._get_filepath(MediaType.PLAYLIST, prov_playlist_id)
- if not await self.exists(playlist_path):
- raise MediaNotFoundError(f"Playlist path does not exist: {playlist_path}")
- parentdir = os.path.dirname(playlist_path)
- _, ext = playlist_path.rsplit(".", 1)
- try:
- async with self.open_file(playlist_path, "r") as _file:
- playlist_data = await _file.read()
-
- if ext in ("m3u", "m3u8"):
- playlist_lines = await parse_m3u(playlist_data)
- else:
- playlist_lines = await parse_pls(playlist_data)
-
- for line_no, playlist_line in enumerate(playlist_lines):
-
- if media_item := await self._parse_playlist_line(
- playlist_line, parentdir
- ):
- # use the linenumber as position for easier deletions
- media_item.position = line_no
- result.append(media_item)
-
- except Exception as err: # pylint: disable=broad-except
- self.logger.warning(
- "Error while parsing playlist %s", playlist_path, exc_info=err
- )
- return result
-
- async def _parse_playlist_line(
- self, line: str, playlist_path: str
- ) -> Track | Radio | None:
- """Try to parse a track from a playlist line."""
- try:
- # try to treat uri as (relative) filename
- if "://" not in line:
- for filename in (line, os.path.join(playlist_path, line)):
- if not await self.exists(filename):
- continue
- file_path = await self.resolve(filename)
- return await self._parse_track(file_path)
- # fallback to generic uri parsing
- return await self.mass.music.get_item_by_uri(line)
- except MusicAssistantError as err:
- self.logger.warning(
- "Could not parse uri/file %s to track: %s", line, str(err)
- )
- return None
-
- async def add_playlist_tracks(
- self, prov_playlist_id: str, prov_track_ids: List[str]
- ) -> None:
- """Add track(s) to playlist."""
- itempath = await self._get_filepath(MediaType.PLAYLIST, prov_playlist_id)
- if not await self.exists(itempath):
- raise MediaNotFoundError(f"Playlist path does not exist: {itempath}")
- async with self.open_file(itempath, "r") as _file:
- cur_data = await _file.read()
- async with self.open_file(itempath, "w") as _file:
- await _file.write(cur_data)
- for uri in prov_track_ids:
- await _file.write(f"\n{uri}")
-
- async def remove_playlist_tracks(
- self, prov_playlist_id: str, positions_to_remove: Tuple[int]
- ) -> None:
- """Remove track(s) from playlist."""
- itempath = await self._get_filepath(MediaType.PLAYLIST, prov_playlist_id)
- if not await self.exists(itempath):
- raise MediaNotFoundError(f"Playlist path does not exist: {itempath}")
- cur_lines = []
- async with self.open_file(itempath, "r") as _file:
- for line_no, line in enumerate(await _file.readlines()):
- line = urllib.parse.unquote(line.strip())
- if line_no not in positions_to_remove:
- cur_lines.append(line)
- async with self.open_file(itempath, "w") as _file:
- for uri in cur_lines:
- await _file.write(f"{uri}\n")
-
- async def create_playlist(self, name: str) -> Playlist:
- """Create a new playlist on provider with given name."""
- # creating a new playlist on the filesystem is as easy
- # as creating a new (empty) file with the m3u extension...
- filename = await self.resolve(f"{name}.m3u")
- async with self.open_file(filename, "w") as _file:
- await _file.write("\n")
- playlist = await self._parse_playlist(filename)
- db_playlist = await self.mass.music.playlists.add_db_item(playlist)
- return db_playlist
-
- async def get_stream_details(self, item_id: str) -> StreamDetails:
- """Return the content details for the given track when it will be streamed."""
- itempath = await self._get_filepath(MediaType.TRACK, item_id)
- if not await self.exists(itempath):
- raise MediaNotFoundError(f"Track path does not exist: {itempath}")
-
- metadata = await parse_tags(itempath)
- stat = await self.mass.loop.run_in_executor(None, os.stat, itempath)
-
- return StreamDetails(
- provider=self.type,
- item_id=item_id,
- content_type=ContentType.try_parse(metadata.format),
- media_type=MediaType.TRACK,
- duration=metadata.duration,
- size=stat.st_size,
- sample_rate=metadata.sample_rate,
- bit_depth=metadata.bits_per_sample,
- direct=itempath,
- )
-
- async def _parse_track(self, track_path: str) -> Track:
- """Try to parse a track from a filename by reading its tags."""
-
- if not await self.exists(track_path):
- raise MediaNotFoundError(f"Track path does not exist: {track_path}")
-
- track_item_id = self._get_item_id(track_path)
-
- # parse tags
- tags = await parse_tags(track_path)
-
- name, version = parse_title_and_version(tags.title)
- track = Track(
- item_id=track_item_id,
- provider=self.type,
- name=name,
- version=version,
- )
-
- # album
- if tags.album:
- # work out if we have an album folder
- album_dir = get_parentdir(track_path, tags.album)
-
- # album artist(s)
- if tags.album_artists:
- album_artists = []
- for index, album_artist_str in enumerate(tags.album_artists):
- # work out if we have an artist folder
- artist_dir = get_parentdir(track_path, album_artist_str)
- artist = await self._parse_artist(
- album_artist_str, artist_path=artist_dir
- )
- if not artist.musicbrainz_id:
- try:
- artist.musicbrainz_id = tags.musicbrainz_albumartistids[
- index
- ]
- except IndexError:
- pass
- album_artists.append(artist)
- else:
- # always fallback to various artists as album artist if user did not tag album artist
- # ID3 tag properly because we must have an album artist
- self.logger.warning(
- "%s is missing ID3 tag [albumartist], using %s as fallback",
- track_path,
- VARIOUS_ARTISTS,
- )
- album_artists = [await self._parse_artist(name=VARIOUS_ARTISTS)]
-
- track.album = await self._parse_album(
- tags.album,
- album_dir,
- artists=album_artists,
- )
- else:
- self.logger.warning("%s is missing ID3 tag [album]", track_path)
-
- # track artist(s)
- for index, track_artist_str in enumerate(tags.artists):
- # re-use album artist details if possible
- if track.album:
- if artist := next(
- (x for x in track.album.artists if x.name == track_artist_str), None
- ):
- track.artists.append(artist)
- continue
- artist = await self._parse_artist(track_artist_str)
- if not artist.musicbrainz_id:
- try:
- artist.musicbrainz_id = tags.musicbrainz_artistids[index]
- except IndexError:
- pass
- track.artists.append(artist)
-
- # cover image - prefer album image, fallback to embedded
- if track.album and track.album.image:
- track.metadata.images = [track.album.image]
- elif tags.has_cover_image:
- # we do not actually embed the image in the metadata because that would consume too
- # much space and bandwidth. Instead we set the filename as value so the image can
- # be retrieved later in realtime.
- track.metadata.images = [MediaItemImage(ImageType.THUMB, track_path, True)]
- if track.album:
- # set embedded cover on album
- track.album.metadata.images = track.metadata.images
-
- # parse other info
- track.duration = tags.duration or 0
- track.metadata.genres = tags.genres
- track.disc_number = tags.disc
- track.track_number = tags.track
- track.isrc = tags.get("isrc")
- track.metadata.copyright = tags.get("copyright")
- track.metadata.lyrics = tags.get("lyrics")
- track.musicbrainz_id = tags.musicbrainz_trackid
- if track.album:
- if not track.album.musicbrainz_id:
- track.album.musicbrainz_id = tags.musicbrainz_releasegroupid
- if not track.album.year:
- track.album.year = tags.year
- if not track.album.upc:
- track.album.upc = tags.get("barcode")
- # try to parse albumtype
- if track.album and track.album.album_type == AlbumType.UNKNOWN:
- album_type = tags.album_type
- if album_type and "compilation" in album_type:
- track.album.album_type = AlbumType.COMPILATION
- elif album_type and "single" in album_type:
- track.album.album_type = AlbumType.SINGLE
- elif album_type and "album" in album_type:
- track.album.album_type = AlbumType.ALBUM
- elif track.album.sort_name in track.sort_name:
- track.album.album_type = AlbumType.SINGLE
-
- # set checksum to invalidate any cached listings
- checksum_timestamp = str(int(time()))
- track.metadata.checksum = checksum_timestamp
- if track.album:
- track.album.metadata.checksum = checksum_timestamp
- for artist in track.album.artists:
- artist.metadata.checksum = checksum_timestamp
-
- quality_details = ""
- content_type = ContentType.try_parse(tags.format)
- quality_details = f"{int(tags.bit_rate / 1000)} kbps"
- if content_type == ContentType.MP3:
- quality = MediaQuality.LOSSY_MP3
- elif content_type == ContentType.OGG:
- quality = MediaQuality.LOSSY_OGG
- elif content_type == ContentType.AAC:
- quality = MediaQuality.LOSSY_AAC
- elif content_type == ContentType.M4A:
- quality = MediaQuality.LOSSY_M4A
- elif content_type.is_lossless():
- quality = MediaQuality.LOSSLESS
- quality_details = f"{tags.sample_rate / 1000} Khz / {tags.bit_rate} bit"
- if tags.sample_rate > 192000:
- quality = MediaQuality.LOSSLESS_HI_RES_4
- elif tags.sample_rate > 96000:
- quality = MediaQuality.LOSSLESS_HI_RES_3
- elif tags.sample_rate > 48000:
- quality = MediaQuality.LOSSLESS_HI_RES_2
- elif tags.bits_per_sample > 16:
- quality = MediaQuality.LOSSLESS_HI_RES_1
- else:
- quality = MediaQuality.UNKNOWN
-
- track.add_provider_id(
- MediaItemProviderId(
- item_id=track_item_id,
- prov_type=self.type,
- prov_id=self.id,
- quality=quality,
- details=quality_details,
- url=track_path,
- )
- )
- return track
-
- async def _parse_artist(
- self,
- name: Optional[str] = None,
- artist_path: Optional[str] = None,
- ) -> Artist | None:
- """Lookup metadata in Artist folder."""
- assert name or artist_path
- if not artist_path:
- # create fake path
- artist_path = os.path.join(self.config.path, name)
-
- artist_item_id = self._get_item_id(artist_path)
- if not name:
- name = artist_path.split(os.sep)[-1]
-
- artist = Artist(
- artist_item_id,
- self.type,
- name,
- provider_ids={
- MediaItemProviderId(artist_item_id, self.type, self.id, url=artist_path)
- },
- musicbrainz_id=VARIOUS_ARTISTS_ID
- if compare_strings(name, VARIOUS_ARTISTS)
- else None,
- )
-
- if not await self.exists(artist_path):
- # return basic object if there is no dedicated artist folder
- return artist
-
- nfo_file = os.path.join(artist_path, "artist.nfo")
- if await self.exists(nfo_file):
- # found NFO file with metadata
- # https://kodi.wiki/view/NFO_files/Artists
- async with self.open_file(nfo_file, "r") as _file:
- data = await _file.read()
- info = await self.mass.loop.run_in_executor(None, xmltodict.parse, data)
- info = info["artist"]
- artist.name = info.get("title", info.get("name", name))
- if sort_name := info.get("sortname"):
- artist.sort_name = sort_name
- if musicbrainz_id := info.get("musicbrainzartistid"):
- artist.musicbrainz_id = musicbrainz_id
- if descripton := info.get("biography"):
- artist.metadata.description = descripton
- if genre := info.get("genre"):
- artist.metadata.genres = set(split_items(genre))
- # find local images
- artist.metadata.images = await self._get_local_images(artist_path) or None
-
- return artist
-
- async def _parse_album(
- self, name: Optional[str], album_path: Optional[str], artists: List[Artist]
- ) -> Album | None:
- """Lookup metadata in Album folder."""
- assert (name or album_path) and artists
- if not album_path:
- # create fake path
- album_path = os.path.join(self.config.path, artists[0].name, name)
-
- album_item_id = self._get_item_id(album_path)
- if not name:
- name = album_path.split(os.sep)[-1]
-
- album = Album(
- album_item_id,
- self.type,
- name,
- artists=artists,
- provider_ids={
- MediaItemProviderId(album_item_id, self.type, self.id, url=album_path)
- },
- )
-
- if not await self.exists(album_path):
- # return basic object if there is no dedicated album folder
- return album
-
- nfo_file = os.path.join(album_path, "album.nfo")
- if await self.exists(nfo_file):
- # found NFO file with metadata
- # https://kodi.wiki/view/NFO_files/Artists
- async with self.open_file(nfo_file) as _file:
- data = await _file.read()
- info = await self.mass.loop.run_in_executor(None, xmltodict.parse, data)
- info = info["album"]
- album.name = info.get("title", info.get("name", name))
- if sort_name := info.get("sortname"):
- album.sort_name = sort_name
- if musicbrainz_id := info.get("musicbrainzreleasegroupid"):
- album.musicbrainz_id = musicbrainz_id
- if mb_artist_id := info.get("musicbrainzalbumartistid"):
- if album.artist and not album.artist.musicbrainz_id:
- album.artist.musicbrainz_id = mb_artist_id
- if description := info.get("review"):
- album.metadata.description = description
- if year := info.get("year"):
- album.year = int(year)
- if genre := info.get("genre"):
- album.metadata.genres = set(split_items(genre))
- # parse name/version
- album.name, album.version = parse_title_and_version(album.name)
-
- # find local images
- album.metadata.images = await self._get_local_images(album_path) or None
-
- return album
-
- async def _parse_playlist(self, playlist_path: str) -> Playlist:
- """Parse playlist from file."""
- playlist_item_id = self._get_item_id(playlist_path)
-
- if not await self.exists(playlist_path):
- raise MediaNotFoundError(f"Playlist path does not exist: {playlist_path}")
-
- playlist_path_base, ext = playlist_path.rsplit(".", 1)
- name = playlist_path_base.split(os.sep)[-1]
-
- playlist = Playlist(playlist_item_id, provider=self.type, name=name)
- playlist.is_editable = ext != "pls" # can only edit m3u playlists
-
- playlist.add_provider_id(
- MediaItemProviderId(
- item_id=playlist_item_id,
- prov_type=self.type,
- prov_id=self.id,
- url=playlist_path,
- )
- )
- playlist.owner = self._attr_name
- getmtime = wrap(os.path.getmtime)
- mtime = await getmtime(playlist_path)
- checksum = f"{SCHEMA_VERSION}.{int(mtime)}"
- playlist.metadata.checksum = checksum
- return playlist
-
- async def exists(self, file_path: str) -> bool:
- """Return bool is this FileSystem musicprovider has given file/dir."""
- if not file_path:
- return False # guard
- file_path = await self.resolve(file_path)
- _exists = wrap(os.path.exists)
- return await _exists(file_path)
-
- @asynccontextmanager
- async def open_file(self, file_path: str, mode="rb") -> AsyncFileIO:
- """Return (async) handle to given file."""
- # ensure we have a full path and not relative
- if self.config.path not in file_path:
- file_path = os.path.join(self.config.path, file_path)
- file_path = await self.resolve(file_path)
- async with aiofiles.open(file_path, mode) as _file:
- yield _file
-
- async def resolve(self, file_path: str) -> str:
- """Resolve local accessible file."""
- # remote file locations should return a tempfile here so this is future proofing
- if self.config.path not in file_path:
- file_path = os.path.join(self.config.path, file_path)
- return file_path
-
- async def _get_filepath(
- self, media_type: MediaType, prov_item_id: str
- ) -> str | None:
- """Get full filepath on disk for item_id."""
- if prov_item_id is None:
- return None # guard
- # funky sql queries go here ;-)
- table = f"{media_type.value}s"
- query = (
- f"SELECT json_extract(json_each.value, '$.url') as url FROM {table}"
- " ,json_each(provider_ids) WHERE"
- f" json_extract(json_each.value, '$.prov_id') = '{self.id}'"
- f" AND json_extract(json_each.value, '$.item_id') = '{prov_item_id}'"
- )
- for db_row in await self.mass.database.get_rows_from_query(query):
- file_path = db_row["url"]
- # ensure we have a full path and not relative
- if self.config.path not in file_path:
- file_path = os.path.join(self.config.path, file_path)
- return file_path
- return None
-
- def _get_item_id(self, file_path: str) -> str:
- """Create item id from filename."""
- return create_safe_string(file_path.replace(self.config.path, ""))
-
- async def _get_local_images(self, folder: str) -> List[MediaItemImage]:
- """Return local images found in a given folderpath."""
- images = []
- async for _path in scantree(folder):
- if "." not in _path.path or _path.is_dir():
- continue
- for ext in IMAGE_EXTENSIONS:
- if not _path.path.endswith(f".{ext}"):
- continue
- filename = _path.path.rsplit(os.sep, 1)[-1].replace(f".{ext}", "")
- try:
- images.append(MediaItemImage(ImageType(filename), _path.path, True))
- except ValueError:
- if "folder" in filename:
- images.append(MediaItemImage(ImageType.THUMB, _path.path, True))
- elif "AlbumArt" in filename:
- images.append(MediaItemImage(ImageType.THUMB, _path.path, True))
- elif "Artist" in filename:
- images.append(MediaItemImage(ImageType.THUMB, _path.path, True))
- return images
--- /dev/null
+"""Some helpers for Filesystem based Musicproviders."""
+from __future__ import annotations
+
+import asyncio
+import os
+from io import BytesIO
+from typing import Any, AsyncGenerator, Dict
+
+from smb.base import SharedFile, SMBTimeout
+from smb.smb_structs import OperationFailure
+from smb.SMBConnection import SMBConnection
+
+from music_assistant.helpers.compare import compare_strings
+from music_assistant.models.errors import LoginFailed
+
+SERVICE_NAME = "music_assistant"
+
+
+def get_parentdir(base_path: str, name: str) -> str | None:
+ """Look for folder name in path (to find dedicated artist or album folder)."""
+ parentdir = os.path.dirname(base_path)
+ for _ in range(3):
+ dirname = parentdir.rsplit(os.sep)[-1]
+ if compare_strings(name, dirname, False):
+ return parentdir
+ parentdir = os.path.dirname(parentdir)
+ return None
+
+
+def get_relative_path(base_path: str, path: str) -> str:
+ """Return the relative path string for a path."""
+ if path.startswith(base_path):
+ path = path.split(base_path)[1]
+ for sep in ("/", "\\"):
+ if path.startswith(sep):
+ path = path[1:]
+ return path
+
+
+def get_absolute_path(base_path: str, path: str) -> str:
+ """Return the absolute path string for a path."""
+ if path.startswith(base_path):
+ return path
+ return os.path.join(base_path, path)
+
+
+class AsyncSMB:
+ """Async wrapped pysmb."""
+
+ def __init__(
+ self,
+ remote_name: str,
+ service_name: str,
+ username: str,
+ password: str,
+ target_ip: str,
+ options: Dict[str, Any],
+ ) -> None:
+ """Initialize instance."""
+ self._service_name = service_name
+ self._remote_name = remote_name
+ self._target_ip = target_ip
+ self._username = username
+ self._password = password
+ self._conn = SMBConnection(
+ username=self._username,
+ password=self._password,
+ my_name=SERVICE_NAME,
+ remote_name=self._remote_name,
+ # choose sane default options but allow user to override them via the options dict
+ domain=options.get("domain", ""),
+ use_ntlm_v2=options.get("use_ntlm_v2", False),
+ sign_options=options.get("sign_options", 2),
+ is_direct_tcp=options.get("is_direct_tcp", False),
+ )
+
+ async def list_path(self, path: str) -> list[SharedFile]:
+ """Retrieve a directory listing of files/folders at *path*."""
+ loop = asyncio.get_running_loop()
+ return await loop.run_in_executor(
+ None, self._conn.listPath, self._service_name, path
+ )
+
+ async def get_attributes(self, path: str) -> SharedFile:
+ """Retrieve information about the file at *path* on the *service_name*."""
+ loop = asyncio.get_running_loop()
+ return await loop.run_in_executor(
+ None, self._conn.getAttributes, self._service_name, path
+ )
+
+ async def retrieve_file(
+ self, path: str, offset: int = 0
+ ) -> AsyncGenerator[bytes, None]:
+ """Retrieve file contents."""
+ loop = asyncio.get_running_loop()
+
+ chunk_size = 256000
+ while True:
+ with BytesIO() as file_obj:
+ await loop.run_in_executor(
+ None,
+ self._conn.retrieveFileFromOffset,
+ self._service_name,
+ path,
+ file_obj,
+ offset,
+ chunk_size,
+ )
+ file_obj.seek(0)
+ chunk = file_obj.read()
+ yield chunk
+ offset += len(chunk)
+ if len(chunk) < chunk_size:
+ break
+
+ async def write_file(self, path: str, data: bytes) -> SharedFile:
+ """Store the contents to the file at *path*."""
+ loop = asyncio.get_running_loop()
+ with BytesIO() as file_obj:
+ file_obj.write(data)
+ file_obj.seek(0)
+ await loop.run_in_executor(
+ None,
+ self._conn.storeFile,
+ self._service_name,
+ path,
+ file_obj,
+ )
+
+ async def path_exists(self, path: str) -> bool:
+ """Return bool is this FileSystem musicprovider has given file/dir."""
+ loop = asyncio.get_running_loop()
+ try:
+ await loop.run_in_executor(
+ None, self._conn.getAttributes, self._service_name, path
+ )
+ except (OperationFailure, SMBTimeout):
+ return False
+ return True
+
+ async def connect(self) -> None:
+ """Connect to the SMB server."""
+ loop = asyncio.get_running_loop()
+ try:
+ assert (
+ await loop.run_in_executor(None, self._conn.connect, self._target_ip)
+ is True
+ )
+ except Exception as exc:
+ raise LoginFailed(f"SMB Connect failed to {self._remote_name}") from exc
+
+ async def __aenter__(self) -> "AsyncSMB":
+ """Enter context manager."""
+ # connect
+ await self.connect()
+ return self
+
+ async def __aexit__(self, exc_type, exc_value, traceback) -> bool:
+ """Exit context manager."""
+ self._conn.close()
--- /dev/null
+"""Filesystem musicprovider support for MusicAssistant."""
+from __future__ import annotations
+
+import asyncio
+import os
+import os.path
+from typing import AsyncGenerator
+
+import aiofiles
+from aiofiles.os import wrap
+
+from music_assistant.models.enums import ProviderType
+from music_assistant.models.errors import SetupFailedError
+
+from .base import FileSystemItem, FileSystemProviderBase
+from .helpers import get_absolute_path, get_relative_path
+
+listdir = wrap(os.listdir)
+isdir = wrap(os.path.isdir)
+isfile = wrap(os.path.isfile)
+exists = wrap(os.path.exists)
+
+
+async def create_item(base_path: str, entry: os.DirEntry) -> FileSystemItem:
+ """Create FileSystemItem from os.DirEntry."""
+
+ def _create_item():
+ absolute_path = get_absolute_path(base_path, entry.path)
+ stat = entry.stat(follow_symlinks=False)
+ return FileSystemItem(
+ name=entry.name,
+ path=get_relative_path(base_path, entry.path),
+ absolute_path=absolute_path,
+ is_file=entry.is_file(follow_symlinks=False),
+ is_dir=entry.is_dir(follow_symlinks=False),
+ checksum=str(int(stat.st_mtime)),
+ file_size=stat.st_size,
+ # local filesystem is always local resolvable
+ local_path=absolute_path,
+ )
+
+ # run in executor because strictly taken this may be blocking IO
+ loop = asyncio.get_running_loop()
+ return await loop.run_in_executor(None, _create_item)
+
+
+class LocalFileSystemProvider(FileSystemProviderBase):
+ """Implementation of a musicprovider for local files."""
+
+ _attr_name = "Filesystem"
+ _attr_type = ProviderType.FILESYSTEM_LOCAL
+
+ async def setup(self) -> bool:
+ """Handle async initialization of the provider."""
+
+ if not await isdir(self.config.path):
+ raise SetupFailedError(f"Music Directory {self.config.path} does not exist")
+
+ return True
+
+ async def listdir(
+ self, path: str, recursive: bool = False
+ ) -> AsyncGenerator[FileSystemItem, None]:
+ """
+ List contents of a given provider directory/path.
+
+ Parameters:
+ - path: path of the directory (relative or absolute) to list contents of.
+ Empty string for provider's root.
+ - recursive: If True will recursively keep unwrapping subdirectories (scandir equivalent).
+
+ Returns:
+ AsyncGenerator yielding FileSystemItem objects.
+
+ """
+ abs_path = get_absolute_path(self.config.path, path)
+ loop = asyncio.get_running_loop()
+ for entry in await loop.run_in_executor(None, os.scandir, abs_path):
+ if entry.name.startswith("."):
+ # skip invalid/system files and dirs
+ continue
+ item = await create_item(self.config.path, entry)
+ if recursive and item.is_dir:
+ try:
+ async for subitem in self.listdir(item.absolute_path, True):
+ yield subitem
+ except (OSError, PermissionError) as err:
+ self.logger.warning("Skip folder %s: %s", item.path, str(err))
+ else:
+ yield item
+
+ async def resolve(
+ self, file_path: str, require_local: bool = False
+ ) -> FileSystemItem:
+ """
+ Resolve (absolute or relative) path to FileSystemItem.
+
+ If want_local is True, we prefer to have the `local_path` attribute filled
+ (e.g. with a tempfile), if supported by the provider/item.
+ """
+ absolute_path = get_absolute_path(self.config.path, file_path)
+
+ def _create_item():
+ stat = os.stat(absolute_path, follow_symlinks=False)
+ return FileSystemItem(
+ name=os.path.basename(file_path),
+ path=get_relative_path(self.config.path, file_path),
+ absolute_path=absolute_path,
+ is_dir=os.path.isdir(absolute_path),
+ is_file=os.path.isfile(absolute_path),
+ checksum=str(int(stat.st_mtime)),
+ file_size=stat.st_size,
+ # local filesystem is always local resolvable
+ local_path=absolute_path,
+ )
+
+ # run in executor because strictly taken this may be blocking IO
+ loop = asyncio.get_running_loop()
+ return await loop.run_in_executor(None, _create_item)
+
+ async def exists(self, file_path: str) -> bool:
+ """Return bool is this FileSystem musicprovider has given file/dir."""
+ if not file_path:
+ return False # guard
+ abs_path = get_absolute_path(self.config.path, file_path)
+ return await exists(abs_path)
+
+ async def read_file_content(
+ self, file_path: str, seek: int = 0
+ ) -> AsyncGenerator[bytes, None]:
+ """Yield (binary) contents of file in chunks of bytes."""
+ abs_path = get_absolute_path(self.config.path, file_path)
+ chunk_size = 512000
+ async with aiofiles.open(abs_path, "rb") as _file:
+ if seek:
+ await _file.seek(seek)
+ # yield chunks of data from file
+ while True:
+ data = await _file.read(chunk_size)
+ if not data:
+ break
+ yield data
+
+ async def write_file_content(self, file_path: str, data: bytes) -> None:
+ """Write entire file content as bytes (e.g. for playlists)."""
+ abs_path = get_absolute_path(self.config.path, file_path)
+ async with aiofiles.open(abs_path, "wb") as _file:
+ await _file.write(data)
--- /dev/null
+"""SMB filesystem provider for Music Assistant."""
+
+import contextvars
+import os
+from contextlib import asynccontextmanager
+from typing import AsyncGenerator
+
+from smb.base import SharedFile
+
+from music_assistant.helpers.util import get_ip_from_host
+from music_assistant.models.enums import ProviderType
+
+from .base import FileSystemItem, FileSystemProviderBase
+from .helpers import AsyncSMB, get_absolute_path, get_relative_path
+
+
+async def create_item(
+ file_path: str, entry: SharedFile, root_path: str
+) -> FileSystemItem:
+ """Create FileSystemItem from smb.SharedFile."""
+
+ rel_path = get_relative_path(root_path, file_path)
+ abs_path = get_absolute_path(root_path, file_path)
+ return FileSystemItem(
+ name=entry.filename,
+ path=rel_path,
+ absolute_path=abs_path,
+ is_file=not entry.isDirectory,
+ is_dir=entry.isDirectory,
+ checksum=str(int(entry.last_write_time)),
+ file_size=entry.file_size,
+ )
+
+
+smb_conn_ctx = contextvars.ContextVar("smb_conn_ctx", default=None)
+
+
+class SMBFileSystemProvider(FileSystemProviderBase):
+ """Implementation of an SMB File System Provider."""
+
+ _attr_name = "smb"
+ _attr_type = ProviderType.FILESYSTEM_SMB
+ _service_name = ""
+ _root_path = "/"
+ _remote_name = ""
+ _target_ip = ""
+
+ async def setup(self) -> bool:
+ """Handle async initialization of the provider."""
+ # extract params from path
+ if self.config.path.startswith("\\\\"):
+ path_parts = self.config.path[2:].split("\\", 2)
+ elif self.config.path.startswith("smb://"):
+ path_parts = self.config.path[6:].split("/", 2)
+ else:
+ path_parts = self.config.path.split(os.sep)
+ self._remote_name = path_parts[0]
+ self._service_name = path_parts[1]
+ if len(path_parts) > 2:
+ self._root_path = os.sep + path_parts[2]
+
+ default_target_ip = await get_ip_from_host(self._remote_name)
+ self._target_ip = self.config.options.get("target_ip", default_target_ip)
+ async with self._get_smb_connection():
+ return True
+
+ async def listdir(
+ self,
+ path: str,
+ recursive: bool = False,
+ ) -> AsyncGenerator[FileSystemItem, None]:
+ """
+ List contents of a given provider directory/path.
+
+ Parameters:
+ - path: path of the directory (relative or absolute) to list contents of.
+ Empty string for provider's root.
+ - recursive: If True will recursively keep unwrapping subdirectories (scandir equivalent).
+
+ Returns:
+ AsyncGenerator yielding FileSystemItem objects.
+
+ """
+ abs_path = get_absolute_path(self._root_path, path)
+ async with self._get_smb_connection() as smb_conn:
+ path_result: list[SharedFile] = await smb_conn.list_path(abs_path)
+ for entry in path_result:
+ if entry.filename.startswith("."):
+ # skip invalid/system files and dirs
+ continue
+ file_path = os.path.join(path, entry.filename)
+ item = await create_item(file_path, entry, self._root_path)
+ if recursive and item.is_dir:
+ # yield sublevel recursively
+ try:
+ async for subitem in self.listdir(file_path, True):
+ yield subitem
+ except (OSError, PermissionError) as err:
+ self.logger.warning("Skip folder %s: %s", item.path, str(err))
+ elif item.is_file or item.is_dir:
+ yield item
+
+ async def resolve(self, file_path: str) -> FileSystemItem:
+ """Resolve (absolute or relative) path to FileSystemItem."""
+ abs_path = get_absolute_path(self._root_path, file_path)
+ async with self._get_smb_connection() as smb_conn:
+ entry: SharedFile = await smb_conn.get_attributes(abs_path)
+ return FileSystemItem(
+ name=file_path,
+ path=get_relative_path(self._root_path, file_path),
+ absolute_path=abs_path,
+ is_file=not entry.isDirectory,
+ is_dir=entry.isDirectory,
+ checksum=str(int(entry.last_write_time)),
+ file_size=entry.file_size,
+ )
+
+ async def exists(self, file_path: str) -> bool:
+ """Return bool if this FileSystem musicprovider has given file/dir."""
+ abs_path = get_absolute_path(self._root_path, file_path)
+ async with self._get_smb_connection() as smb_conn:
+ return await smb_conn.path_exists(abs_path)
+
+ async def read_file_content(
+ self, file_path: str, seek: int = 0
+ ) -> AsyncGenerator[bytes, None]:
+ """Yield (binary) contents of file in chunks of bytes."""
+ abs_path = get_absolute_path(self._root_path, file_path)
+
+ async with self._get_smb_connection() as smb_conn:
+ async for chunk in smb_conn.retrieve_file(abs_path, seek):
+ yield chunk
+
+ async def write_file_content(self, file_path: str, data: bytes) -> None:
+ """Write entire file content as bytes (e.g. for playlists)."""
+ abs_path = get_absolute_path(self._root_path, file_path)
+ async with self._get_smb_connection() as smb_conn:
+ await smb_conn.write_file(abs_path, data)
+
+ @asynccontextmanager
+ async def _get_smb_connection(self) -> AsyncGenerator[AsyncSMB, None]:
+ """Get instance of AsyncSMB."""
+
+ # for a task that consists of multiple steps,
+ # the smb connection may be reused (shared through a contextvar)
+ if existing := smb_conn_ctx.get():
+ yield existing
+ return
+
+ async with AsyncSMB(
+ remote_name=self._remote_name,
+ service_name=self._service_name,
+ username=self.config.username,
+ password=self.config.password,
+ target_ip=self._target_ip,
+ options=self.config.options,
+ ) as smb_conn:
+ token = smb_conn_ctx.set(smb_conn)
+ yield smb_conn
+ smb_conn_ctx.reset(token)
ContentType,
ImageType,
MediaItemImage,
- MediaItemProviderId,
MediaItemType,
- MediaQuality,
MediaType,
Playlist,
+ ProviderMapping,
StreamDetails,
Track,
)
artist = Artist(
item_id=str(artist_obj["id"]), provider=self.type, name=artist_obj["name"]
)
- artist.add_provider_id(
- MediaItemProviderId(
+ artist.add_provider_mapping(
+ ProviderMapping(
item_id=str(artist_obj["id"]),
- prov_type=self.type,
- prov_id=self.id,
+ provider_type=self.type,
+ provider_id=self.id,
url=artist_obj.get(
"url", f'https://open.qobuz.com/artist/{artist_obj["id"]}'
),
album = Album(
item_id=str(album_obj["id"]), provider=self.type, name=name, version=version
)
- if album_obj["maximum_sampling_rate"] > 192:
- quality = MediaQuality.LOSSLESS_HI_RES_4
- elif album_obj["maximum_sampling_rate"] > 96:
- quality = MediaQuality.LOSSLESS_HI_RES_3
- elif album_obj["maximum_sampling_rate"] > 48:
- quality = MediaQuality.LOSSLESS_HI_RES_2
- elif album_obj["maximum_bit_depth"] > 16:
- quality = MediaQuality.LOSSLESS_HI_RES_1
- elif album_obj.get("format_id", 0) == 5:
- quality = MediaQuality.LOSSY_AAC
- else:
- quality = MediaQuality.LOSSLESS
- album.add_provider_id(
- MediaItemProviderId(
+ album.add_provider_mapping(
+ ProviderMapping(
item_id=str(album_obj["id"]),
- prov_type=self.type,
- prov_id=self.id,
- quality=quality,
+ provider_type=self.type,
+ provider_id=self.id,
+ available=album_obj["streamable"] and album_obj["displayable"],
+ content_type=ContentType.FLAC,
+ sample_rate=album_obj["maximum_sampling_rate"] * 1000,
+ bit_depth=album_obj["maximum_bit_depth"],
url=album_obj.get(
"url", f'https://open.qobuz.com/album/{album_obj["id"]}'
),
- details=f'{album_obj["maximum_sampling_rate"]}kHz {album_obj["maximum_bit_depth"]}bit',
- available=album_obj["streamable"] and album_obj["displayable"],
)
)
track.metadata.explicit = True
if img := self.__get_image(track_obj):
track.metadata.images = [MediaItemImage(ImageType.THUMB, img)]
- # get track quality
- if track_obj["maximum_sampling_rate"] > 192:
- quality = MediaQuality.LOSSLESS_HI_RES_4
- elif track_obj["maximum_sampling_rate"] > 96:
- quality = MediaQuality.LOSSLESS_HI_RES_3
- elif track_obj["maximum_sampling_rate"] > 48:
- quality = MediaQuality.LOSSLESS_HI_RES_2
- elif track_obj["maximum_bit_depth"] > 16:
- quality = MediaQuality.LOSSLESS_HI_RES_1
- elif track_obj.get("format_id", 0) == 5:
- quality = MediaQuality.LOSSY_AAC
- else:
- quality = MediaQuality.LOSSLESS
- track.add_provider_id(
- MediaItemProviderId(
+
+ track.add_provider_mapping(
+ ProviderMapping(
item_id=str(track_obj["id"]),
- prov_type=self.type,
- prov_id=self.id,
- quality=quality,
+ provider_type=self.type,
+ provider_id=self.id,
+ available=track_obj["streamable"] and track_obj["displayable"],
+ content_type=ContentType.FLAC,
+ sample_rate=track_obj["maximum_sampling_rate"] * 1000,
+ bit_depth=track_obj["maximum_bit_depth"],
url=track_obj.get(
"url", f'https://open.qobuz.com/track/{track_obj["id"]}'
),
- details=f'{track_obj["maximum_sampling_rate"]}kHz {track_obj["maximum_bit_depth"]}bit',
- available=track_obj["streamable"] and track_obj["displayable"],
)
)
return track
name=playlist_obj["name"],
owner=playlist_obj["owner"]["name"],
)
- playlist.add_provider_id(
- MediaItemProviderId(
+ playlist.add_provider_mapping(
+ ProviderMapping(
item_id=str(playlist_obj["id"]),
- prov_type=self.type,
- prov_id=self.id,
+ provider_type=self.type,
+ provider_id=self.id,
url=playlist_obj.get(
"url", f'https://open.qobuz.com/playlist/{playlist_obj["id"]}'
),
ContentType,
ImageType,
MediaItemImage,
- MediaItemProviderId,
MediaItemType,
- MediaQuality,
MediaType,
Playlist,
+ ProviderMapping,
StreamDetails,
Track,
)
artist = Artist(
item_id=artist_obj["id"], provider=self.type, name=artist_obj["name"]
)
- artist.add_provider_id(
- MediaItemProviderId(
+ artist.add_provider_mapping(
+ ProviderMapping(
item_id=artist_obj["id"],
- prov_type=self.type,
- prov_id=self.id,
+ provider_type=self.type,
+ provider_id=self.id,
url=artist_obj["external_urls"]["spotify"],
)
)
album.metadata.copyright = album_obj["copyrights"][0]["text"]
if album_obj.get("explicit"):
album.metadata.explicit = album_obj["explicit"]
- album.add_provider_id(
- MediaItemProviderId(
+ album.add_provider_mapping(
+ ProviderMapping(
item_id=album_obj["id"],
- prov_type=self.type,
- prov_id=self.id,
- quality=MediaQuality.LOSSY_OGG,
+ provider_type=self.type,
+ provider_id=self.id,
+ content_type=ContentType.OGG,
+ bit_rate=320,
url=album_obj["external_urls"]["spotify"],
)
)
track.metadata.explicit = True
if track_obj.get("popularity"):
track.metadata.popularity = track_obj["popularity"]
- track.add_provider_id(
- MediaItemProviderId(
+ track.add_provider_mapping(
+ ProviderMapping(
item_id=track_obj["id"],
- prov_type=self.type,
- prov_id=self.id,
- quality=MediaQuality.LOSSY_OGG,
+ provider_type=self.type,
+ provider_id=self.id,
+ content_type=ContentType.OGG,
+ bit_rate=320,
url=track_obj["external_urls"]["spotify"],
available=not track_obj["is_local"] and track_obj["is_playable"],
)
name=playlist_obj["name"],
owner=playlist_obj["owner"]["display_name"],
)
- playlist.add_provider_id(
- MediaItemProviderId(
+ playlist.add_provider_mapping(
+ ProviderMapping(
item_id=playlist_obj["id"],
- prov_type=self.type,
- prov_id=self.id,
+ provider_type=self.type,
+ provider_id=self.id,
url=playlist_obj["external_urls"]["spotify"],
)
)
from music_assistant.helpers.audio import get_radio_stream
from music_assistant.helpers.playlists import fetch_playlist
+from music_assistant.helpers.tags import parse_tags
from music_assistant.helpers.util import create_sort_name
from music_assistant.models.enums import MusicProviderFeature, ProviderType
from music_assistant.models.errors import LoginFailed, MediaNotFoundError
ContentType,
ImageType,
MediaItemImage,
- MediaItemProviderId,
- MediaQuality,
MediaType,
+ ProviderMapping,
Radio,
StreamDetails,
)
# custom url (no stream object present)
url = details["URL"]
item_id = url
- # TODO: parse header of stream for audio quality details?
- quality = MediaQuality.UNKNOWN
+ media_info = await parse_tags(url)
+ content_type = ContentType.try_parse(media_info.format)
+ bit_rate = media_info.bit_rate
else:
url = stream["url"]
item_id = f'{details["preset_id"]}--{stream["media_type"]}'
- if stream["media_type"] == "aac":
- quality = MediaQuality.LOSSY_AAC
- elif stream["media_type"] == "ogg":
- quality = MediaQuality.LOSSY_OGG
- else:
- quality = MediaQuality.LOSSY_MP3
+ content_type = ContentType.try_parse(stream["media_type"])
+ bit_rate = stream.get("bitrate", 128) # TODO !
radio = Radio(item_id=item_id, provider=self.type, name=name)
- radio.add_provider_id(
- MediaItemProviderId(
+ radio.add_provider_mapping(
+ ProviderMapping(
item_id=item_id,
- prov_type=self.type,
- prov_id=self.id,
- quality=quality,
+ provider_type=self.type,
+ provider_id=self.id,
+ content_type=content_type,
+ bit_rate=bit_rate,
details=url,
)
)
from music_assistant.helpers.playlists import fetch_playlist
from music_assistant.helpers.tags import AudioTags, parse_tags
from music_assistant.models.config import MusicProviderConfig
-from music_assistant.models.enums import (
- ContentType,
- ImageType,
- MediaQuality,
- MediaType,
- ProviderType,
-)
+from music_assistant.models.enums import ContentType, ImageType, MediaType, ProviderType
from music_assistant.models.media_items import (
Artist,
MediaItemImage,
- MediaItemProviderId,
MediaItemType,
+ ProviderMapping,
Radio,
StreamDetails,
Track,
artist,
self.type,
artist,
- provider_ids={
- MediaItemProviderId(artist, self.type, self.id, available=False)
+ provider_mappings={
+ ProviderMapping(artist, self.type, self.id, available=False)
},
)
],
)
- quality = MediaQuality.from_file_type(media_info.format)
- media_item.provider_ids = {
- MediaItemProviderId(item_id, self.type, self.id, quality=quality)
+ media_item.provider_mappings = {
+ ProviderMapping(
+ item_id=item_id,
+ provider_type=self.type,
+ provider_id=self.id,
+ content_type=ContentType.try_parse(media_info.format),
+ sample_rate=media_info.sample_rate,
+ bit_depth=media_info.bits_per_sample,
+ bit_rate=media_info.bit_rate,
+ )
}
if media_info.has_cover_image:
media_item.metadata.images = [MediaItemImage(ImageType.THUMB, url, True)]
import pytube
import ytmusicapi
-from music_assistant.models.enums import (
- MediaQuality,
- MusicProviderFeature,
- ProviderType,
-)
+from music_assistant.models.enums import MusicProviderFeature, ProviderType
from music_assistant.models.errors import (
InvalidDataError,
LoginFailed,
ContentType,
ImageType,
MediaItemImage,
- MediaItemProviderId,
MediaItemType,
MediaType,
Playlist,
+ ProviderMapping,
StreamDetails,
Track,
)
else:
album_type = AlbumType.UNKNOWN
album.album_type = album_type
- album.add_provider_id(
- MediaItemProviderId(
- item_id=str(album_id), prov_type=self.type, prov_id=self.id
+ album.add_provider_mapping(
+ ProviderMapping(
+ item_id=str(album_id), provider_type=self.type, provider_id=self.id
)
)
return album
artist.metadata.images = await self._parse_thumbnails(
artist_obj["thumbnails"]
)
- artist.add_provider_id(
- MediaItemProviderId(
+ artist.add_provider_mapping(
+ ProviderMapping(
item_id=str(artist_id),
- prov_type=self.type,
- prov_id=self.id,
+ provider_type=self.type,
+ provider_id=self.id,
url=f"https://music.youtube.com/channel/{artist_id}",
)
)
if playlist_obj.get("privacy") and playlist_obj.get("privacy") == "PRIVATE":
is_editable = True
playlist.is_editable = is_editable
- playlist.add_provider_id(
- MediaItemProviderId(
- item_id=playlist_obj["id"], prov_type=self.type, prov_id=self.id
+ playlist.add_provider_mapping(
+ ProviderMapping(
+ item_id=playlist_obj["id"], provider_type=self.type, provider_id=self.id
)
)
playlist.metadata.checksum = playlist_obj.get("checksum")
available = True
if "isAvailable" in track_obj:
available = track_obj["isAvailable"]
- track.add_provider_id(
- MediaItemProviderId(
+ track.add_provider_mapping(
+ ProviderMapping(
item_id=str(track_obj["videoId"]),
- prov_type=self.type,
- prov_id=self.id,
+ provider_type=self.type,
+ provider_id=self.id,
available=available,
- quality=MediaQuality.LOSSY_M4A,
+ content_type=ContentType.M4A,
)
)
return track
xmltodict>=0.12.0,<=0.13.0
ytmusicapi>=0.22.0,<=0.23.0
pytube>=12.1.0,<=12.2.0
+pysmb>=1.2.8,<=1.3.0