provider_id: Optional[str] = None,
) -> List[Track]:
"""Return album tracks for the given provider album id."""
- album = await self.get(item_id, provider, provider_id)
+ # if provider specific album is requested, return that directly
+ if not (provider == ProviderType.DATABASE or provider_id == "database"):
+ return await self.get_provider_album_tracks(
+ item_id, provider=provider, provider_id=provider_id
+ )
+
# get results from all providers
+ db_album = await self.get_db_item(item_id)
coros = [
self.get_provider_album_tracks(item.item_id, item.prov_id)
- for item in album.provider_ids
+ for item in db_album.provider_ids
]
tracks = itertools.chain.from_iterable(await asyncio.gather(*coros))
# merge duplicates using a dict
final_items: Dict[str, Track] = {}
for track in tracks:
- key = f".{track.name}.{track.version}.{track.disc_number}.{track.track_number}"
+ key = f".{track.name.lower()}.{track.version}.{track.disc_number}.{track.track_number}"
if key in final_items:
final_items[key].provider_ids.update(track.provider_ids)
else:
- track.album = album
+ track.album = db_album
final_items[key] = track
- if album.in_library:
- final_items[key].in_library = True
return list(final_items.values())
async def versions(
)
await resp.prepare(request)
if request.method == "GET":
- # service 60 seconds of silence while player is processing request
- async for chunk in get_silence(60, ContentType.WAV):
+ # service 1 second of silence while player is processing request
+ async for chunk in get_silence(1, ContentType.WAV):
await resp.write(chunk)
return resp
seek_position = self.seek_position
fade_in = self.fade_in
else:
- queue_index = self.queue.get_next_index(queue_index)
+ next_index = self.queue.get_next_index(queue_index)
+ # break here if repeat is enabled
+ if next_index <= queue_index:
+ self.signal_next = True
+ break
+ queue_index = next_index
seek_position = 0
fade_in = False
self.index_in_buffer = queue_index
"wav": ContentType.WAV,
"ogg": ContentType.OGG,
"wma": ContentType.WMA,
+ "aiff": ContentType.AIFF,
}
SCHEMA_VERSION = 17
cur_checksums = {}
async with self.mass.database.get_db() as db:
async for entry in scantree(self.config.path):
-
- # mtime is used as file checksum
- stat = await asyncio.get_running_loop().run_in_executor(
- None, entry.stat
- )
- checksum = int(stat.st_mtime)
- cur_checksums[entry.path] = checksum
- if checksum == prev_checksums.get(entry.path):
- continue
try:
+ # mtime is used as file checksum
+ stat = await asyncio.get_running_loop().run_in_executor(
+ None, entry.stat
+ )
+ checksum = int(stat.st_mtime)
+ cur_checksums[entry.path] = checksum
+ if checksum == prev_checksums.get(entry.path):
+ continue
+
if track := await self._parse_track(entry.path):
# process album
if track.album:
# add/update] playlist to db
playlist.metadata.checksum = checksum
await self.mass.music.playlists.add_db_item(playlist, db=db)
- except Exception: # pylint: disable=broad-except
+ except Exception as err: # pylint: disable=broad-except
# we don't want the whole sync to crash on one file so we catch all exceptions here
- self.logger.exception("Error processing %s", entry.path)
+ self.logger.exception(
+ "Error processing %s - %s", entry.path, str(err)
+ )
# save checksums every 50 processed items
# this allows us to pickup where we leftoff when initial scan gets intterrupted