From: marcelveldt Date: Sat, 18 May 2019 23:05:15 +0000 (+0200) Subject: add basic support for r128 volume leveling X-Git-Url: https://git.kitaultman.com/?a=commitdiff_plain;h=85301544478fa65c8ca296b38c87bbd9b3481552;p=music-assistant-server.git add basic support for r128 volume leveling --- diff --git a/music_assistant/cache.py b/music_assistant/cache.py deleted file mode 100644 index 85945daa..00000000 --- a/music_assistant/cache.py +++ /dev/null @@ -1,237 +0,0 @@ -#!/usr/bin/python3 -# -*- coding: utf-8 -*- - -'''provides a simple stateless caching system''' - -import datetime -import time -import sqlite3 -from functools import reduce -import os -from utils import run_periodic, LOGGER, parse_track_title -import functools -import asyncio - - -class Cache(object): - '''basic stateless caching system ''' - _exit = False - _mem_cache = {} - _busy_tasks = [] - _database = None - - def __init__(self, datapath): - '''Initialize our caching class''' - self._datapath = datapath - asyncio.ensure_future(self._do_cleanup()) - LOGGER.debug("Initialized") - - async def get(self, endpoint, checksum=""): - ''' - get object from cache and return the results - endpoint: the (unique) name of the cache object as reference - checkum: optional argument to check if the checksum in the cacheobject matches the checkum provided - ''' - checksum = self._get_checksum(checksum) - cur_time = self._get_timestamp(datetime.datetime.now()) - result = None - # 1: try memory cache first - result = await self._get_mem_cache(endpoint, checksum, cur_time) - # 2: fallback to _database cache - if result is None: - result = await self._get_db_cache(endpoint, checksum, cur_time) - return result - - async def set(self, endpoint, data, checksum="", expiration=datetime.timedelta(days=14)): - ''' - set data in cache - ''' - task_name = "set.%s" % endpoint - self._busy_tasks.append(task_name) - checksum = self._get_checksum(checksum) - expires = self._get_timestamp(datetime.datetime.now() + expiration) - - # memory cache - await self._set_mem_cache(endpoint, checksum, expires, data) - - # db cache - if not self._exit: - await self._set_db_cache(endpoint, checksum, expires, data) - - # remove this task from list - self._busy_tasks.remove(task_name) - - async def _get_mem_cache(self, endpoint, checksum, cur_time): - ''' - get cache data from memory cache - ''' - result = None - cachedata = self._mem_cache.get(endpoint) - if cachedata: - cachedata = cachedata - if cachedata[0] > cur_time: - if checksum == None or checksum == cachedata[2]: - result = cachedata[1] - return result - - async def _set_mem_cache(self, endpoint, checksum, expires, data): - ''' - put data in memory cache - ''' - cachedata = (expires, data, checksum) - self._mem_cache[endpoint] = cachedata - - async def _get_db_cache(self, endpoint, checksum, cur_time): - '''get cache data from sqllite database''' - result = None - query = "SELECT expires, data, checksum FROM simplecache WHERE id = ?" - cache_data = self._execute_sql(query, (endpoint,)) - if cache_data: - cache_data = cache_data.fetchone() - if cache_data and cache_data[0] > cur_time: - if checksum == None or cache_data[2] == checksum: - result = eval(cache_data[1]) - # also set result in memory cache for further access - await self._set_mem_cache(endpoint, checksum, cache_data[0], result) - return result - - async def _set_db_cache(self, endpoint, checksum, expires, data): - ''' store cache data in _database ''' - query = "INSERT OR REPLACE INTO simplecache( id, expires, data, checksum) VALUES (?, ?, ?, ?)" - data = repr(data) - self._execute_sql(query, (endpoint, expires, data, checksum)) - - @run_periodic(3600) - async def _do_cleanup(self): - '''perform cleanup task''' - if self._exit: - return - self._busy_tasks.append(__name__) - cur_time = datetime.datetime.now() - cur_timestamp = self._get_timestamp(cur_time) - LOGGER.debug("Running cleanup...") - query = "SELECT id, expires FROM simplecache" - for cache_data in self._execute_sql(query).fetchall(): - cache_id = cache_data[0] - cache_expires = cache_data[1] - if self._exit: - return - # always cleanup all memory objects on each interval - self._mem_cache.pop(cache_id, None) - # clean up db cache object only if expired - if cache_expires < cur_timestamp: - query = 'DELETE FROM simplecache WHERE id = ?' - self._execute_sql(query, (cache_id,)) - LOGGER.debug("delete from db %s" % cache_id) - - # compact db - self._execute_sql("VACUUM") - - # remove task from list - self._busy_tasks.remove(__name__) - LOGGER.debug("Auto cleanup done") - - def _get_database(self): - '''get reference to our sqllite _database - performs basic integrity check''' - dbfile = os.path.join(self._datapath, "simplecache.db") - try: - connection = sqlite3.connect(dbfile, timeout=30, isolation_level=None) - connection.execute('SELECT * FROM simplecache LIMIT 1') - return connection - except Exception as error: - # our _database is corrupt or doesn't exist yet, we simply try to recreate it - if os.path.isfile(dbfile): - os.remove(dbfile) - try: - connection = sqlite3.connect(dbfile, timeout=30, isolation_level=None) - connection.execute( - """CREATE TABLE IF NOT EXISTS simplecache( - id TEXT UNIQUE, expires INTEGER, data TEXT, checksum INTEGER)""") - return connection - except Exception as error: - LOGGER.warning("Exception while initializing _database: %s" % str(error)) - return None - - def _execute_sql(self, query, data=None): - '''little wrapper around execute and executemany to just retry a db command if db is locked''' - retries = 0 - result = None - error = None - # always use new db object because we need to be sure that data is available for other simplecache instances - with self._get_database() as _database: - while not retries == 10: - if self._exit: - return None - try: - if isinstance(data, list): - result = _database.executemany(query, data) - elif data: - result = _database.execute(query, data) - else: - result = _database.execute(query) - return result - except sqlite3.OperationalError as error: - if "_database is locked" in error: - LOGGER.debug("retrying DB commit...") - retries += 1 - time.sleep(0.5) - else: - break - except Exception as error: - LOGGER.error("_database ERROR ! -- %s" % str(error)) - break - return None - - @staticmethod - def _get_timestamp(date_time): - '''Converts a datetime object to unix timestamp''' - return int(time.mktime(date_time.timetuple())) - - @staticmethod - def _get_checksum(stringinput): - '''get int checksum from string''' - if not stringinput: - return 0 - else: - stringinput = str(stringinput) - return reduce(lambda x, y: x + y, map(ord, stringinput)) - -def use_cache(cache_days=14, cache_hours=8): - def wrapper(func): - @functools.wraps(func) - async def wrapped(*args, **kwargs): - if kwargs.get("ignore_cache"): - return await func(*args, **kwargs) - cache_checksum = kwargs.get("cache_checksum") - method_class = args[0] - method_class_name = method_class.__class__.__name__ - cache_str = "%s.%s" % (method_class_name, func.__name__) - # append args to cache identifier - for item in args[1:]: - if isinstance(item, dict): - for subkey in sorted(list(item.keys())): - subvalue = item[subkey] - cache_str += ".%s%s" %(subkey,subvalue) - else: - cache_str += ".%s" % item - # append kwargs to cache identifier - for key in sorted(list(kwargs.keys())): - if key in ["ignore_cache", "cache_checksum"]: - continue - value = kwargs[key] - if isinstance(value, dict): - for subkey in sorted(list(value.keys())): - subvalue = value[subkey] - cache_str += ".%s%s" %(subkey,subvalue) - else: - cache_str += ".%s%s" %(key,value) - cache_str = cache_str.lower() - cachedata = await method_class.cache.get(cache_str, checksum=cache_checksum) - if cachedata is not None: - return cachedata - else: - result = await func(*args, **kwargs) - await method_class.cache.set(cache_str, result, checksum=cache_checksum, expiration=datetime.timedelta(days=cache_days, hours=cache_hours)) - return result - return wrapped - return wrapper diff --git a/music_assistant/database.py b/music_assistant/database.py index 3c856ce5..7e1fcd61 100755 --- a/music_assistant/database.py +++ b/music_assistant/database.py @@ -178,9 +178,10 @@ class Database(): if media_type == MediaType.Playlist: sql_query = 'DELETE FROM playlist_tracks WHERE playlist_id=?;' await db.execute(sql_query, (item_id,)) - if media_type == MediaType.Playlist: sql_query = 'DELETE FROM playlists WHERE playlist_id=?;' await db.execute(sql_query, (item_id,)) + sql_query = 'DELETE FROM provider_mappings WHERE item_id=? AND media_type=? AND provider=?;' + await db.execute(sql_query, (item_id,media_type, provider)) await db.commit() async def artists(self, filter_query=None, limit=100000, offset=0, orderby='name', fulldata=False) -> List[Artist]: diff --git a/music_assistant/main.py b/music_assistant/main.py index a3805a89..e3e0d8d2 100755 --- a/music_assistant/main.py +++ b/music_assistant/main.py @@ -15,11 +15,11 @@ import json import time from database import Database -from metadata import MetaData from utils import run_periodic, LOGGER -from cache import Cache -from music import Music -from player import Player +from modules.metadata import MetaData +from modules.cache import Cache +from modules.music import Music +from modules.player import Player from modules.homeassistant import setup as hass_setup from modules.web import setup as web_setup @@ -27,7 +27,7 @@ class Main(): def __init__(self, datapath): uvloop.install() - self._datapath = datapath + self.datapath = datapath self.parse_config() self.event_loop = asyncio.get_event_loop() self.bg_executor = ThreadPoolExecutor(max_workers=5) @@ -76,8 +76,8 @@ class Main(): def save_config(self): ''' save config to file ''' # backup existing file - conf_file = os.path.join(self._datapath, 'config.json') - conf_file_backup = os.path.join(self._datapath, 'config.json') + conf_file = os.path.join(self.datapath, 'config.json') + conf_file_backup = os.path.join(self.datapath, 'config.json') if os.path.isfile(conf_file): shutil.move(conf_file, conf_file_backup) with open(conf_file, 'w') as f: @@ -91,7 +91,7 @@ class Main(): "playerproviders": {}, "player_settings": {} } - conf_file = os.path.join(self._datapath, 'config.json') + conf_file = os.path.join(self.datapath, 'config.json') if os.path.isfile(conf_file): with open(conf_file) as f: data = f.read() diff --git a/music_assistant/metadata.py b/music_assistant/metadata.py deleted file mode 100755 index e3c14d2e..00000000 --- a/music_assistant/metadata.py +++ /dev/null @@ -1,174 +0,0 @@ -#!/usr/bin/env python3 -# -*- coding:utf-8 -*- - -import asyncio -import os -from utils import run_periodic, LOGGER -import json -import aiohttp -from asyncio_throttle import Throttler -from difflib import SequenceMatcher as Matcher -from cache import use_cache -from yarl import URL -import re - -LUCENE_SPECIAL = r'([+\-&|!(){}\[\]\^"~*?:\\\/])' - -class MetaData(): - ''' several helpers to search and store mediadata for mediaitems ''' - - def __init__(self, event_loop, db, cache): - self.event_loop = event_loop - self.db = db - self.cache = cache - self.musicbrainz = MusicBrainz(event_loop, cache) - self.fanarttv = FanartTv(event_loop, cache) - - async def get_artist_metadata(self, mb_artist_id, cur_metadata): - ''' get/update rich metadata for an artist by providing the musicbrainz artist id ''' - metadata = cur_metadata - if not ('fanart' in metadata or 'thumb' in metadata): - res = await self.fanarttv.artist_images(mb_artist_id) - self.merge_metadata(cur_metadata, res) - return metadata - - async def get_mb_artist_id(self, artistname, albumname=None, album_upc=None, trackname=None, track_isrc=None): - ''' retrieve musicbrainz artist id for the given details ''' - LOGGER.debug('searching musicbrainz for %s (albumname: %s - album_upc: %s - trackname: %s - track_isrc: %s)' %(artistname, albumname, album_upc, trackname, track_isrc)) - mb_artist_id = None - if album_upc: - mb_artist_id = await self.musicbrainz.search_artist_by_album(artistname, None, album_upc) - if not mb_artist_id and track_isrc: - mb_artist_id = await self.musicbrainz.search_artist_by_track(artistname, None, track_isrc) - if not mb_artist_id and albumname: - mb_artist_id = await self.musicbrainz.search_artist_by_album(artistname, albumname) - if not mb_artist_id and trackname: - mb_artist_id = await self.musicbrainz.search_artist_by_track(artistname, trackname) - LOGGER.debug('Got musicbrainz artist id for artist %s --> %s' %(artistname, mb_artist_id)) - return mb_artist_id - - @staticmethod - def merge_metadata(cur_metadata, new_values): - ''' merge new info into the metadata dict without overwiteing existing values ''' - for key, value in new_values.items(): - if not cur_metadata.get(key): - cur_metadata[key] = value - return cur_metadata - -class MusicBrainz(): - - def __init__(self, event_loop, cache): - self.event_loop = event_loop - self.cache = cache - self.http_session = aiohttp.ClientSession(loop=event_loop, connector=aiohttp.TCPConnector(verify_ssl=False)) - self.throttler = Throttler(rate_limit=1, period=1) - - async def search_artist_by_album(self, artistname, albumname=None, album_upc=None): - ''' retrieve musicbrainz artist id by providing the artist name and albumname or upc ''' - if album_upc: - endpoint = 'release' - params = {'query': 'barcode:%s' % album_upc} - else: - searchartist = re.sub(LUCENE_SPECIAL, r'\\\1', artistname) - searchartist = searchartist.replace('/','').replace('\\','') - searchalbum = re.sub(LUCENE_SPECIAL, r'\\\1', albumname) - endpoint = 'release' - params = {'query': 'artist:"%s" AND release:"%s"' % (searchartist, searchalbum)} - result = await self.get_data(endpoint, params) - if result and result.get('releases'): - for strictness in [1, 0.95, 0.9]: - for item in result['releases']: - if album_upc or Matcher(None, item['title'].lower(), albumname.lower()).ratio() >= strictness: - for artist in item['artist-credit']: - artist = artist['artist'] - if Matcher(None, artist['name'].lower(), artistname.lower()).ratio() >= strictness: - return artist['id'] - for item in artist.get('aliases',[]): - if item['name'].lower() == artistname.lower(): - return artist['id'] - return '' - - async def search_artist_by_track(self, artistname, trackname=None, track_isrc=None): - ''' retrieve artist id by providing the artist name and trackname or track isrc ''' - endpoint = 'recording' - searchartist = re.sub(LUCENE_SPECIAL, r'\\\1', artistname) - searchartist = searchartist.replace('/','').replace('\\','') - if track_isrc: - endpoint = 'isrc/%s' % track_isrc - params = {'inc': 'artist-credits'} - else: - searchtrack = re.sub(LUCENE_SPECIAL, r'\\\1', trackname) - endpoint = 'recording' - params = {'query': '"%s" AND artist:"%s"' % (searchtrack, searchartist)} - result = await self.get_data(endpoint, params) - if result and result.get('recordings'): - for strictness in [1, 0.95]: - for item in result['recordings']: - if track_isrc or Matcher(None, item['title'].lower(), trackname.lower()).ratio() >= strictness: - for artist in item['artist-credit']: - artist = artist['artist'] - if Matcher(None, artist['name'].lower(), artistname.lower()).ratio() >= strictness: - return artist['id'] - for item in artist.get('aliases',[]): - if item['name'].lower() == artistname.lower(): - return artist['id'] - return '' - - @use_cache(30) - async def get_data(self, endpoint, params={}): - ''' get data from api''' - url = 'http://musicbrainz.org/ws/2/%s' % endpoint - headers = {'User-Agent': 'Music Assistant/1.0.0 https://github.com/marcelveldt'} - params['fmt'] = 'json' - async with self.throttler: - async with self.http_session.get(url, headers=headers, params=params) as response: - try: - result = await response.json() - except Exception as exc: - msg = await response.text() - LOGGER.exception("%s - %s" % (str(exc), msg)) - result = None - return result - - -class FanartTv(): - - def __init__(self, event_loop, cache): - self.event_loop = event_loop - self.cache = cache - self.http_session = aiohttp.ClientSession(loop=event_loop, connector=aiohttp.TCPConnector(verify_ssl=False)) - self.throttler = Throttler(rate_limit=1, period=1) - - async def artist_images(self, mb_artist_id): - ''' retrieve images by musicbrainz artist id ''' - metadata = {} - data = await self.get_data("music/%s" % mb_artist_id) - if data: - if data.get('hdmusiclogo'): - metadata['logo'] = data['hdmusiclogo'][0]["url"] - elif data.get('musiclogo'): - metadata['logo'] = data['musiclogo'][0]["url"] - if data.get('artistbackground'): - count = 0 - for item in data['artistbackground']: - key = "fanart" if count == 0 else "fanart.%s" % count - metadata[key] = item["url"] - if data.get('artistthumb'): - url = data['artistthumb'][0]["url"] - if not '2a96cbd8b46e442fc41c2b86b821562f' in url: - metadata['image'] = url - if data.get('musicbanner'): - metadata['banner'] = data['musicbanner'][0]["url"] - return metadata - - @use_cache(30) - async def get_data(self, endpoint, params={}): - ''' get data from api''' - url = 'http://webservice.fanart.tv/v3/%s' % endpoint - params['api_key'] = '639191cb0774661597f28a47e7e2bad5' - async with self.throttler: - async with self.http_session.get(url, params=params) as response: - result = await response.json() - if 'error' in result and 'limit' in result['error']: - raise Exception(result['error']) - return result diff --git a/music_assistant/models.py b/music_assistant/models.py index be3044b4..313df483 100755 --- a/music_assistant/models.py +++ b/music_assistant/models.py @@ -8,7 +8,7 @@ sys.path.append("..") from utils import run_periodic, LOGGER, parse_track_title from difflib import SequenceMatcher as Matcher import asyncio -from cache import use_cache +from modules.cache import use_cache class MediaType(IntEnum): @@ -99,8 +99,8 @@ class Track(object): self.artists = [] self.provider_ids = [] self.album = None - self.disc_number = 0 - self.track_number = 0 + self.disc_number = 1 + self.track_number = 1 self.media_type = MediaType.Track self.in_library = [] self.is_lazy = False @@ -109,7 +109,8 @@ class Track(object): return NotImplemented return (self.name == other.name and self.version == other.version and - self.item_id == other.item_id) + self.item_id == other.item_id and + self.provider == other.provider) def __ne__(self, other): return not self.__eq__(other) @@ -137,6 +138,7 @@ class MusicProvider(): name = 'My great Music provider' # display name prov_id = 'my_provider' # used as id icon = '' + audio_fmt = 'flac' # the audio format used by this provider when streaming def __init__(self, mass): self.mass = mass @@ -275,12 +277,10 @@ class MusicProvider(): prov_album_id = track_details.album.item_id track_details.album = await self.album(prov_album_id, lazy=False) item_id = await self.mass.db.add_track(track_details) - # also fetch same track on all providers + # also fetch same track on all providers (will also get other quality versions) new_track = await self.mass.db.track(item_id) - item_provider_keys = [item['provider'] for item in new_track.provider_ids] for prov_id, provider in self.mass.music.providers.items(): - if not prov_id in item_provider_keys: - await provider.match_track(new_track) + await provider.match_track(new_track) return item_id async def playlist(self, prov_playlist_id) -> Playlist: @@ -437,6 +437,14 @@ class MusicProvider(): async def remove_library(self, prov_item_id, media_type:MediaType): ''' remove item from library ''' raise NotImplementedError + + async def add_playlist_tracks(self, prov_playlist_id, prov_track_ids): + ''' add track(s) to playlist ''' + raise NotImplementedError + + async def remove_playlist_tracks(self, prov_playlist_id, prov_track_ids): + ''' remove track(s) from playlist ''' + raise NotImplementedError class PlayerState(str, Enum): Off = "off" diff --git a/music_assistant/modules/bs1770gain/linux64/bs1770gain b/music_assistant/modules/bs1770gain/linux64/bs1770gain new file mode 100755 index 00000000..3d143e56 Binary files /dev/null and b/music_assistant/modules/bs1770gain/linux64/bs1770gain differ diff --git a/music_assistant/modules/bs1770gain/linux64/bs1770gain-tools/libavcodec.so.58 b/music_assistant/modules/bs1770gain/linux64/bs1770gain-tools/libavcodec.so.58 new file mode 100755 index 00000000..df615fdd Binary files /dev/null and b/music_assistant/modules/bs1770gain/linux64/bs1770gain-tools/libavcodec.so.58 differ diff --git a/music_assistant/modules/bs1770gain/linux64/bs1770gain-tools/libavformat.so.58 b/music_assistant/modules/bs1770gain/linux64/bs1770gain-tools/libavformat.so.58 new file mode 100755 index 00000000..88fe2e74 Binary files /dev/null and b/music_assistant/modules/bs1770gain/linux64/bs1770gain-tools/libavformat.so.58 differ diff --git a/music_assistant/modules/bs1770gain/linux64/bs1770gain-tools/libavutil.so.56 b/music_assistant/modules/bs1770gain/linux64/bs1770gain-tools/libavutil.so.56 new file mode 100755 index 00000000..56c2db9d Binary files /dev/null and b/music_assistant/modules/bs1770gain/linux64/bs1770gain-tools/libavutil.so.56 differ diff --git a/music_assistant/modules/bs1770gain/linux64/bs1770gain-tools/libsox.so.3 b/music_assistant/modules/bs1770gain/linux64/bs1770gain-tools/libsox.so.3 new file mode 100755 index 00000000..c6d1607b Binary files /dev/null and b/music_assistant/modules/bs1770gain/linux64/bs1770gain-tools/libsox.so.3 differ diff --git a/music_assistant/modules/bs1770gain/linux64/bs1770gain-tools/libswresample.so.3 b/music_assistant/modules/bs1770gain/linux64/bs1770gain-tools/libswresample.so.3 new file mode 100755 index 00000000..54fefdfc Binary files /dev/null and b/music_assistant/modules/bs1770gain/linux64/bs1770gain-tools/libswresample.so.3 differ diff --git a/music_assistant/modules/bs1770gain/osx/bs1770gain b/music_assistant/modules/bs1770gain/osx/bs1770gain new file mode 100755 index 00000000..38c156a1 Binary files /dev/null and b/music_assistant/modules/bs1770gain/osx/bs1770gain differ diff --git a/music_assistant/modules/bs1770gain/win64/bs1770gain-tools/avcodec-58.dll b/music_assistant/modules/bs1770gain/win64/bs1770gain-tools/avcodec-58.dll new file mode 100755 index 00000000..9f311708 Binary files /dev/null and b/music_assistant/modules/bs1770gain/win64/bs1770gain-tools/avcodec-58.dll differ diff --git a/music_assistant/modules/bs1770gain/win64/bs1770gain-tools/avformat-58.dll b/music_assistant/modules/bs1770gain/win64/bs1770gain-tools/avformat-58.dll new file mode 100755 index 00000000..92029d63 Binary files /dev/null and b/music_assistant/modules/bs1770gain/win64/bs1770gain-tools/avformat-58.dll differ diff --git a/music_assistant/modules/bs1770gain/win64/bs1770gain-tools/avutil-56.dll b/music_assistant/modules/bs1770gain/win64/bs1770gain-tools/avutil-56.dll new file mode 100755 index 00000000..06ec56e3 Binary files /dev/null and b/music_assistant/modules/bs1770gain/win64/bs1770gain-tools/avutil-56.dll differ diff --git a/music_assistant/modules/bs1770gain/win64/bs1770gain-tools/libsox-3.dll b/music_assistant/modules/bs1770gain/win64/bs1770gain-tools/libsox-3.dll new file mode 100755 index 00000000..8c3b8a9e Binary files /dev/null and b/music_assistant/modules/bs1770gain/win64/bs1770gain-tools/libsox-3.dll differ diff --git a/music_assistant/modules/bs1770gain/win64/bs1770gain-tools/swresample-3.dll b/music_assistant/modules/bs1770gain/win64/bs1770gain-tools/swresample-3.dll new file mode 100755 index 00000000..fe43a4a8 Binary files /dev/null and b/music_assistant/modules/bs1770gain/win64/bs1770gain-tools/swresample-3.dll differ diff --git a/music_assistant/modules/bs1770gain/win64/bs1770gain.exe b/music_assistant/modules/bs1770gain/win64/bs1770gain.exe new file mode 100755 index 00000000..27188e8a Binary files /dev/null and b/music_assistant/modules/bs1770gain/win64/bs1770gain.exe differ diff --git a/music_assistant/modules/cache.py b/music_assistant/modules/cache.py new file mode 100644 index 00000000..85945daa --- /dev/null +++ b/music_assistant/modules/cache.py @@ -0,0 +1,237 @@ +#!/usr/bin/python3 +# -*- coding: utf-8 -*- + +'''provides a simple stateless caching system''' + +import datetime +import time +import sqlite3 +from functools import reduce +import os +from utils import run_periodic, LOGGER, parse_track_title +import functools +import asyncio + + +class Cache(object): + '''basic stateless caching system ''' + _exit = False + _mem_cache = {} + _busy_tasks = [] + _database = None + + def __init__(self, datapath): + '''Initialize our caching class''' + self._datapath = datapath + asyncio.ensure_future(self._do_cleanup()) + LOGGER.debug("Initialized") + + async def get(self, endpoint, checksum=""): + ''' + get object from cache and return the results + endpoint: the (unique) name of the cache object as reference + checkum: optional argument to check if the checksum in the cacheobject matches the checkum provided + ''' + checksum = self._get_checksum(checksum) + cur_time = self._get_timestamp(datetime.datetime.now()) + result = None + # 1: try memory cache first + result = await self._get_mem_cache(endpoint, checksum, cur_time) + # 2: fallback to _database cache + if result is None: + result = await self._get_db_cache(endpoint, checksum, cur_time) + return result + + async def set(self, endpoint, data, checksum="", expiration=datetime.timedelta(days=14)): + ''' + set data in cache + ''' + task_name = "set.%s" % endpoint + self._busy_tasks.append(task_name) + checksum = self._get_checksum(checksum) + expires = self._get_timestamp(datetime.datetime.now() + expiration) + + # memory cache + await self._set_mem_cache(endpoint, checksum, expires, data) + + # db cache + if not self._exit: + await self._set_db_cache(endpoint, checksum, expires, data) + + # remove this task from list + self._busy_tasks.remove(task_name) + + async def _get_mem_cache(self, endpoint, checksum, cur_time): + ''' + get cache data from memory cache + ''' + result = None + cachedata = self._mem_cache.get(endpoint) + if cachedata: + cachedata = cachedata + if cachedata[0] > cur_time: + if checksum == None or checksum == cachedata[2]: + result = cachedata[1] + return result + + async def _set_mem_cache(self, endpoint, checksum, expires, data): + ''' + put data in memory cache + ''' + cachedata = (expires, data, checksum) + self._mem_cache[endpoint] = cachedata + + async def _get_db_cache(self, endpoint, checksum, cur_time): + '''get cache data from sqllite database''' + result = None + query = "SELECT expires, data, checksum FROM simplecache WHERE id = ?" + cache_data = self._execute_sql(query, (endpoint,)) + if cache_data: + cache_data = cache_data.fetchone() + if cache_data and cache_data[0] > cur_time: + if checksum == None or cache_data[2] == checksum: + result = eval(cache_data[1]) + # also set result in memory cache for further access + await self._set_mem_cache(endpoint, checksum, cache_data[0], result) + return result + + async def _set_db_cache(self, endpoint, checksum, expires, data): + ''' store cache data in _database ''' + query = "INSERT OR REPLACE INTO simplecache( id, expires, data, checksum) VALUES (?, ?, ?, ?)" + data = repr(data) + self._execute_sql(query, (endpoint, expires, data, checksum)) + + @run_periodic(3600) + async def _do_cleanup(self): + '''perform cleanup task''' + if self._exit: + return + self._busy_tasks.append(__name__) + cur_time = datetime.datetime.now() + cur_timestamp = self._get_timestamp(cur_time) + LOGGER.debug("Running cleanup...") + query = "SELECT id, expires FROM simplecache" + for cache_data in self._execute_sql(query).fetchall(): + cache_id = cache_data[0] + cache_expires = cache_data[1] + if self._exit: + return + # always cleanup all memory objects on each interval + self._mem_cache.pop(cache_id, None) + # clean up db cache object only if expired + if cache_expires < cur_timestamp: + query = 'DELETE FROM simplecache WHERE id = ?' + self._execute_sql(query, (cache_id,)) + LOGGER.debug("delete from db %s" % cache_id) + + # compact db + self._execute_sql("VACUUM") + + # remove task from list + self._busy_tasks.remove(__name__) + LOGGER.debug("Auto cleanup done") + + def _get_database(self): + '''get reference to our sqllite _database - performs basic integrity check''' + dbfile = os.path.join(self._datapath, "simplecache.db") + try: + connection = sqlite3.connect(dbfile, timeout=30, isolation_level=None) + connection.execute('SELECT * FROM simplecache LIMIT 1') + return connection + except Exception as error: + # our _database is corrupt or doesn't exist yet, we simply try to recreate it + if os.path.isfile(dbfile): + os.remove(dbfile) + try: + connection = sqlite3.connect(dbfile, timeout=30, isolation_level=None) + connection.execute( + """CREATE TABLE IF NOT EXISTS simplecache( + id TEXT UNIQUE, expires INTEGER, data TEXT, checksum INTEGER)""") + return connection + except Exception as error: + LOGGER.warning("Exception while initializing _database: %s" % str(error)) + return None + + def _execute_sql(self, query, data=None): + '''little wrapper around execute and executemany to just retry a db command if db is locked''' + retries = 0 + result = None + error = None + # always use new db object because we need to be sure that data is available for other simplecache instances + with self._get_database() as _database: + while not retries == 10: + if self._exit: + return None + try: + if isinstance(data, list): + result = _database.executemany(query, data) + elif data: + result = _database.execute(query, data) + else: + result = _database.execute(query) + return result + except sqlite3.OperationalError as error: + if "_database is locked" in error: + LOGGER.debug("retrying DB commit...") + retries += 1 + time.sleep(0.5) + else: + break + except Exception as error: + LOGGER.error("_database ERROR ! -- %s" % str(error)) + break + return None + + @staticmethod + def _get_timestamp(date_time): + '''Converts a datetime object to unix timestamp''' + return int(time.mktime(date_time.timetuple())) + + @staticmethod + def _get_checksum(stringinput): + '''get int checksum from string''' + if not stringinput: + return 0 + else: + stringinput = str(stringinput) + return reduce(lambda x, y: x + y, map(ord, stringinput)) + +def use_cache(cache_days=14, cache_hours=8): + def wrapper(func): + @functools.wraps(func) + async def wrapped(*args, **kwargs): + if kwargs.get("ignore_cache"): + return await func(*args, **kwargs) + cache_checksum = kwargs.get("cache_checksum") + method_class = args[0] + method_class_name = method_class.__class__.__name__ + cache_str = "%s.%s" % (method_class_name, func.__name__) + # append args to cache identifier + for item in args[1:]: + if isinstance(item, dict): + for subkey in sorted(list(item.keys())): + subvalue = item[subkey] + cache_str += ".%s%s" %(subkey,subvalue) + else: + cache_str += ".%s" % item + # append kwargs to cache identifier + for key in sorted(list(kwargs.keys())): + if key in ["ignore_cache", "cache_checksum"]: + continue + value = kwargs[key] + if isinstance(value, dict): + for subkey in sorted(list(value.keys())): + subvalue = value[subkey] + cache_str += ".%s%s" %(subkey,subvalue) + else: + cache_str += ".%s%s" %(key,value) + cache_str = cache_str.lower() + cachedata = await method_class.cache.get(cache_str, checksum=cache_checksum) + if cachedata is not None: + return cachedata + else: + result = await func(*args, **kwargs) + await method_class.cache.set(cache_str, result, checksum=cache_checksum, expiration=datetime.timedelta(days=cache_days, hours=cache_hours)) + return result + return wrapped + return wrapper diff --git a/music_assistant/modules/homeassistant.py b/music_assistant/modules/homeassistant.py index 16de0cb5..89bb38eb 100644 --- a/music_assistant/modules/homeassistant.py +++ b/music_assistant/modules/homeassistant.py @@ -5,11 +5,9 @@ import asyncio import os from typing import List import random -import sys -sys.path.append("..") -from utils import run_periodic, LOGGER, parse_track_title, try_parse_int -from models import PlayerProvider, MusicPlayer, PlayerState, MediaType, TrackQuality, AlbumType, Artist, Album, Track, Playlist -from constants import CONF_ENABLED, CONF_HOSTNAME, CONF_PORT +from music_assistant.utils import run_periodic, LOGGER, parse_track_title, try_parse_int +from music_assistant.models import PlayerProvider, MusicPlayer, PlayerState, MediaType, TrackQuality, AlbumType, Artist, Album, Track, Playlist +from music_assistant.constants import CONF_ENABLED, CONF_HOSTNAME, CONF_PORT import json import aiohttp import time @@ -17,7 +15,7 @@ import datetime import hashlib from asyncio_throttle import Throttler from aiocometd import Client, ConnectionType, Extension -from cache import use_cache +from music_assistant.modules.cache import use_cache import copy import slugify as slug diff --git a/music_assistant/modules/metadata.py b/music_assistant/modules/metadata.py new file mode 100755 index 00000000..0e89beb7 --- /dev/null +++ b/music_assistant/modules/metadata.py @@ -0,0 +1,174 @@ +#!/usr/bin/env python3 +# -*- coding:utf-8 -*- + +import asyncio +import os +from music_assistant.utils import run_periodic, LOGGER +import json +import aiohttp +from asyncio_throttle import Throttler +from difflib import SequenceMatcher as Matcher +from music_assistant.modules.cache import use_cache +from yarl import URL +import re + +LUCENE_SPECIAL = r'([+\-&|!(){}\[\]\^"~*?:\\\/])' + +class MetaData(): + ''' several helpers to search and store mediadata for mediaitems ''' + + def __init__(self, event_loop, db, cache): + self.event_loop = event_loop + self.db = db + self.cache = cache + self.musicbrainz = MusicBrainz(event_loop, cache) + self.fanarttv = FanartTv(event_loop, cache) + + async def get_artist_metadata(self, mb_artist_id, cur_metadata): + ''' get/update rich metadata for an artist by providing the musicbrainz artist id ''' + metadata = cur_metadata + if not ('fanart' in metadata or 'thumb' in metadata): + res = await self.fanarttv.artist_images(mb_artist_id) + self.merge_metadata(cur_metadata, res) + return metadata + + async def get_mb_artist_id(self, artistname, albumname=None, album_upc=None, trackname=None, track_isrc=None): + ''' retrieve musicbrainz artist id for the given details ''' + LOGGER.debug('searching musicbrainz for %s (albumname: %s - album_upc: %s - trackname: %s - track_isrc: %s)' %(artistname, albumname, album_upc, trackname, track_isrc)) + mb_artist_id = None + if album_upc: + mb_artist_id = await self.musicbrainz.search_artist_by_album(artistname, None, album_upc) + if not mb_artist_id and track_isrc: + mb_artist_id = await self.musicbrainz.search_artist_by_track(artistname, None, track_isrc) + if not mb_artist_id and albumname: + mb_artist_id = await self.musicbrainz.search_artist_by_album(artistname, albumname) + if not mb_artist_id and trackname: + mb_artist_id = await self.musicbrainz.search_artist_by_track(artistname, trackname) + LOGGER.debug('Got musicbrainz artist id for artist %s --> %s' %(artistname, mb_artist_id)) + return mb_artist_id + + @staticmethod + def merge_metadata(cur_metadata, new_values): + ''' merge new info into the metadata dict without overwiteing existing values ''' + for key, value in new_values.items(): + if not cur_metadata.get(key): + cur_metadata[key] = value + return cur_metadata + +class MusicBrainz(): + + def __init__(self, event_loop, cache): + self.event_loop = event_loop + self.cache = cache + self.http_session = aiohttp.ClientSession(loop=event_loop, connector=aiohttp.TCPConnector(verify_ssl=False)) + self.throttler = Throttler(rate_limit=1, period=1) + + async def search_artist_by_album(self, artistname, albumname=None, album_upc=None): + ''' retrieve musicbrainz artist id by providing the artist name and albumname or upc ''' + if album_upc: + endpoint = 'release' + params = {'query': 'barcode:%s' % album_upc} + else: + searchartist = re.sub(LUCENE_SPECIAL, r'\\\1', artistname) + searchartist = searchartist.replace('/','').replace('\\','') + searchalbum = re.sub(LUCENE_SPECIAL, r'\\\1', albumname) + endpoint = 'release' + params = {'query': 'artist:"%s" AND release:"%s"' % (searchartist, searchalbum)} + result = await self.get_data(endpoint, params) + if result and result.get('releases'): + for strictness in [1, 0.95, 0.9]: + for item in result['releases']: + if album_upc or Matcher(None, item['title'].lower(), albumname.lower()).ratio() >= strictness: + for artist in item['artist-credit']: + artist = artist['artist'] + if Matcher(None, artist['name'].lower(), artistname.lower()).ratio() >= strictness: + return artist['id'] + for item in artist.get('aliases',[]): + if item['name'].lower() == artistname.lower(): + return artist['id'] + return '' + + async def search_artist_by_track(self, artistname, trackname=None, track_isrc=None): + ''' retrieve artist id by providing the artist name and trackname or track isrc ''' + endpoint = 'recording' + searchartist = re.sub(LUCENE_SPECIAL, r'\\\1', artistname) + searchartist = searchartist.replace('/','').replace('\\','') + if track_isrc: + endpoint = 'isrc/%s' % track_isrc + params = {'inc': 'artist-credits'} + else: + searchtrack = re.sub(LUCENE_SPECIAL, r'\\\1', trackname) + endpoint = 'recording' + params = {'query': '"%s" AND artist:"%s"' % (searchtrack, searchartist)} + result = await self.get_data(endpoint, params) + if result and result.get('recordings'): + for strictness in [1, 0.95]: + for item in result['recordings']: + if track_isrc or Matcher(None, item['title'].lower(), trackname.lower()).ratio() >= strictness: + for artist in item['artist-credit']: + artist = artist['artist'] + if Matcher(None, artist['name'].lower(), artistname.lower()).ratio() >= strictness: + return artist['id'] + for item in artist.get('aliases',[]): + if item['name'].lower() == artistname.lower(): + return artist['id'] + return '' + + @use_cache(30) + async def get_data(self, endpoint, params={}): + ''' get data from api''' + url = 'http://musicbrainz.org/ws/2/%s' % endpoint + headers = {'User-Agent': 'Music Assistant/1.0.0 https://github.com/marcelveldt'} + params['fmt'] = 'json' + async with self.throttler: + async with self.http_session.get(url, headers=headers, params=params) as response: + try: + result = await response.json() + except Exception as exc: + msg = await response.text() + LOGGER.exception("%s - %s" % (str(exc), msg)) + result = None + return result + + +class FanartTv(): + + def __init__(self, event_loop, cache): + self.event_loop = event_loop + self.cache = cache + self.http_session = aiohttp.ClientSession(loop=event_loop, connector=aiohttp.TCPConnector(verify_ssl=False)) + self.throttler = Throttler(rate_limit=1, period=1) + + async def artist_images(self, mb_artist_id): + ''' retrieve images by musicbrainz artist id ''' + metadata = {} + data = await self.get_data("music/%s" % mb_artist_id) + if data: + if data.get('hdmusiclogo'): + metadata['logo'] = data['hdmusiclogo'][0]["url"] + elif data.get('musiclogo'): + metadata['logo'] = data['musiclogo'][0]["url"] + if data.get('artistbackground'): + count = 0 + for item in data['artistbackground']: + key = "fanart" if count == 0 else "fanart.%s" % count + metadata[key] = item["url"] + if data.get('artistthumb'): + url = data['artistthumb'][0]["url"] + if not '2a96cbd8b46e442fc41c2b86b821562f' in url: + metadata['image'] = url + if data.get('musicbanner'): + metadata['banner'] = data['musicbanner'][0]["url"] + return metadata + + @use_cache(30) + async def get_data(self, endpoint, params={}): + ''' get data from api''' + url = 'http://webservice.fanart.tv/v3/%s' % endpoint + params['api_key'] = '639191cb0774661597f28a47e7e2bad5' + async with self.throttler: + async with self.http_session.get(url, params=params) as response: + result = await response.json() + if 'error' in result and 'limit' in result['error']: + raise Exception(result['error']) + return result diff --git a/music_assistant/modules/music.py b/music_assistant/modules/music.py new file mode 100755 index 00000000..2ddaf666 --- /dev/null +++ b/music_assistant/modules/music.py @@ -0,0 +1,377 @@ +#!/usr/bin/env python3 +# -*- coding:utf-8 -*- + +import asyncio +import os +from music_assistant.utils import run_periodic, run_async_background_task, LOGGER, try_parse_int +import aiohttp +from difflib import SequenceMatcher as Matcher +from music_assistant.models import MediaType, Track, Artist, Album, Playlist +from typing import List +import toolz +import operator + + +BASE_DIR = os.path.dirname(os.path.abspath(__file__)) +MODULES_PATH = os.path.join(BASE_DIR, "musicproviders" ) + +class Music(): + ''' several helpers around the musicproviders ''' + + def __init__(self, mass): + self.sync_running = False + self.mass = mass + self.providers = {} + # dynamically load musicprovider modules + self.load_music_providers() + # schedule sync task + mass.event_loop.create_task(self.sync_music_providers()) + + async def item(self, item_id, media_type:MediaType, provider='database', lazy=True): + ''' get single music item by id and media type''' + if media_type == MediaType.Artist: + return await self.artist(item_id, provider, lazy=lazy) + elif media_type == MediaType.Album: + return await self.album(item_id, provider, lazy=lazy) + elif media_type == MediaType.Track: + return await self.track(item_id, provider, lazy=lazy) + elif media_type == MediaType.Playlist: + return await self.playlist(item_id, provider) + else: + return None + + async def library_artists(self, limit=0, offset=0, orderby='name', provider_filter=None) -> List[Artist]: + ''' return all library artists, optionally filtered by provider ''' + return await self.mass.db.library_artists(provider=provider_filter, limit=limit, offset=offset, orderby=orderby) + + async def library_albums(self, limit=0, offset=0, orderby='name', provider_filter=None) -> List[Album]: + ''' return all library albums, optionally filtered by provider ''' + return await self.mass.db.library_albums(provider=provider_filter, limit=limit, offset=offset, orderby=orderby) + + async def library_tracks(self, limit=0, offset=0, orderby='name', provider_filter=None) -> List[Track]: + ''' return all library tracks, optionally filtered by provider ''' + return await self.mass.db.library_tracks(provider=provider_filter, limit=limit, offset=offset, orderby=orderby) + + async def playlists(self, limit=0, offset=0, orderby='name', provider_filter=None) -> List[Playlist]: + ''' return all library playlists, optionally filtered by provider ''' + return await self.mass.db.playlists(provider=provider_filter, limit=limit, offset=offset, orderby=orderby) + + async def library_items(self, media_type:MediaType, limit=0, offset=0, orderby='name', provider_filter=None) -> List[object]: + ''' get multiple music items in library''' + if media_type == MediaType.Artist: + return await self.library_artists(limit=limit, offset=offset, orderby=orderby, provider_filter=provider_filter) + elif media_type == MediaType.Album: + return await self.library_albums(limit=limit, offset=offset, orderby=orderby, provider_filter=provider_filter) + elif media_type == MediaType.Track: + return await self.library_tracks(limit=limit, offset=offset, orderby=orderby, provider_filter=provider_filter) + elif media_type == MediaType.Playlist: + return await self.playlists(limit=limit, offset=offset, orderby=orderby, provider_filter=provider_filter) + + async def artist(self, item_id, provider='database', lazy=True) -> Artist: + ''' get artist by id ''' + if not provider or provider == 'database': + return await self.mass.db.artist(item_id) + return await self.providers[provider].artist(item_id, lazy=lazy) + + async def album(self, item_id, provider='database', lazy=True) -> Album: + ''' get album by id ''' + if not provider or provider == 'database': + return await self.mass.db.album(item_id) + return await self.providers[provider].album(item_id, lazy=lazy) + + async def track(self, item_id, provider='database', lazy=True) -> Track: + ''' get track by id ''' + if not provider or provider == 'database': + return await self.mass.db.track(item_id) + return await self.providers[provider].track(item_id, lazy=lazy) + + async def playlist(self, item_id, provider='database') -> Playlist: + ''' get playlist by id ''' + if not provider or provider == 'database': + return await self.mass.db.playlist(item_id) + return await self.providers[provider].playlist(item_id) + + async def playlist_by_name(self, name) -> Playlist: + ''' get playlist by name ''' + for playlist in await self.playlists(): + if playlist.name == name: + return playlist + return None + + async def artist_toptracks(self, artist_id, provider='database') -> List[Track]: + ''' get top tracks for given artist ''' + artist = await self.artist(artist_id, provider) + # always append database tracks + items = await self.mass.db.artist_tracks(artist.item_id) + for prov_mapping in artist.provider_ids: + prov_id = prov_mapping['provider'] + prov_item_id = prov_mapping['item_id'] + prov_obj = self.providers[prov_id] + items += await prov_obj.artist_toptracks(prov_item_id) + items = list(toolz.unique(items, key=operator.attrgetter('item_id'))) + items.sort(key=lambda x: x.name, reverse=False) + return items + + async def artist_albums(self, artist_id, provider='database') -> List[Album]: + ''' get (all) albums for given artist ''' + artist = await self.artist(artist_id, provider) + # always append database tracks + items = await self.mass.db.artist_albums(artist.item_id) + for prov_mapping in artist.provider_ids: + prov_id = prov_mapping['provider'] + prov_item_id = prov_mapping['item_id'] + prov_obj = self.providers[prov_id] + items += await prov_obj.artist_albums(prov_item_id) + items = list(toolz.unique(items, key=operator.attrgetter('item_id'))) + items.sort(key=lambda x: x.name, reverse=False) + return items + + async def album_tracks(self, album_id, provider='database') -> List[Track]: + ''' get the album tracks for given album ''' + items = [] + album = await self.album(album_id, provider) + for prov_mapping in album.provider_ids: + prov_id = prov_mapping['provider'] + prov_item_id = prov_mapping['item_id'] + prov_obj = self.providers[prov_id] + items += await prov_obj.album_tracks(prov_item_id) + items = list(toolz.unique(items, key=operator.attrgetter('item_id'))) + items = sorted(items, key=operator.attrgetter('disc_number'), reverse=False) + items = sorted(items, key=operator.attrgetter('track_number'), reverse=False) + return items + + async def playlist_tracks(self, playlist_id, provider='database', offset=0, limit=50) -> List[Track]: + ''' get the tracks for given playlist ''' + playlist = None + if not provider or provider == 'database': + playlist = await self.mass.db.playlist(playlist_id) + if playlist and playlist.is_editable: + # database synced playlist, return tracks from db... + return await self.mass.db.playlist_tracks(playlist.item_id, offset=offset, limit=limit) + else: + # return playlist tracks from provider + items = [] + playlist = await self.playlist(playlist_id) + for prov_mapping in playlist.provider_ids: + prov_id = prov_mapping['provider'] + prov_item_id = prov_mapping['item_id'] + prov_obj = self.providers[prov_id] + items += await prov_obj.playlist_tracks(prov_item_id, offset=offset, limit=limit) + if items: + break + items = list(toolz.unique(items, key=operator.attrgetter('item_id'))) + return items + + async def search(self, searchquery, media_types:List[MediaType], limit=10, online=False) -> dict: + ''' search database or providers ''' + # get results from database + result = await self.mass.db.search(searchquery, media_types, limit) + if online: + # include results from music providers + for prov in self.providers.values(): + prov_results = await prov.search(searchquery, media_types, limit) + for item_type, items in prov_results.items(): + result[item_type] += items + # filter out duplicates + for item_type, items in result.items(): + items = list(toolz.unique(items, key=operator.attrgetter('item_id'))) + return result + + async def item_action(self, item_id, media_type, provider='database', action=None): + ''' perform action on item (such as library add/remove) ''' + result = None + item = await self.item(item_id, media_type, provider) + if item and action in ['add', 'remove']: + # remove or add item to the library + for prov_mapping in result.provider_ids: + prov_id = prov_mapping['provider'] + prov_item_id = prov_mapping['item_id'] + for prov in self.providers.values(): + if prov.prov_id == prov_id: + if action == 'add': + result = await prov.add_library(prov_item_id, media_type) + elif action == 'remove': + result = await prov.remove_library(prov_item_id, media_type) + return result + + async def add_playlist_tracks(self, playlist_id, tracks:List[Track]): + ''' add tracks to playlist - make sure we dont add dupes ''' + # we can only edit playlists that are in the database (marked as editable) + playlist = await self.playlist(playlist_id, 'database') + if not playlist or not playlist.is_editable: + LOGGER.warning("Playlist %s is not editable - skip addition of tracks" %(playlist.name)) + return False + playlist_prov = playlist.provider_ids[0] # playlist can only have one provider (for now) + cur_playlist_tracks = await self.mass.db.playlist_tracks(playlist_id, limit=0) + # grab all (database) track ids in the playlist so we can check for duplicates + cur_playlist_track_ids = [item.item_id for item in cur_playlist_tracks] + track_ids_to_add = [] + for track in tracks: + if not track.provider == 'database': + # make sure we have a database track + track = await self.track(track.item_id, track.provider, lazy=False) + if track.item_id in cur_playlist_track_ids: + LOGGER.warning("Track %s already in playlist %s - skip addition" %(track.name, playlist.name)) + continue + # we can only add a track to a provider playlist if the track is available on that provider + # exception is the file provider which does accept tracks from all providers in the m3u playlist + # this should all be handled in the frontend but these checks are here just to be safe + track_playlist_provs = [item['provider'] for item in track.provider_ids] + if playlist_prov['provider'] in track_playlist_provs: + # a track can contain multiple versions on the same provider + # # simply sort by quality and just add the first one (assuming the track is still available) + track_versions = sorted(track.provider_ids, key=operator.itemgetter('quality'), reverse=True) + for track_version in track_versions: + if track_version['provider'] == playlist_prov['provider']: + track_ids_to_add.append(track_version['item_id']) + break + elif playlist_prov['provider'] == 'file': + # the file provider can handle uri's from all providers in the file so simply add the db id + track_ids_to_add.append(track.item_id) + else: + LOGGER.warning("Track %s not available on provider %s - skip addition to playlist %s" %(track.name, playlist_prov['provider'], playlist.name)) + continue + # actually add the tracks to the playlist on the provider + await self.providers[playlist_prov['provider']].add_playlist_tracks(playlist_prov['item_id'], track_ids_to_add) + # schedule sync + self.mass.event_loop.create_task(self.sync_playlist_tracks(playlist.item_id, playlist_prov['provider'], playlist_prov['item_id'])) + + @run_periodic(3600) + async def sync_music_providers(self): + ''' periodic sync of all music providers ''' + if self.sync_running: + return + self.sync_running = True + for prov_id in self.providers.keys(): + # sync library artists + await self.sync_library_artists(prov_id) + await self.sync_library_albums(prov_id) + await self.sync_library_tracks(prov_id) + await self.sync_playlists(prov_id) + self.sync_running = False + + async def sync_library_artists(self, prov_id): + ''' sync library artists for given provider''' + music_provider = self.providers[prov_id] + prev_items = await self.library_artists(provider_filter=prov_id) + prev_db_ids = [item.item_id for item in prev_items] + cur_items = await music_provider.get_library_artists() + cur_db_ids = [] + for item in cur_items: + db_item = await music_provider.artist(item.item_id, lazy=False) + cur_db_ids.append(db_item.item_id) + if not db_item.item_id in prev_db_ids: + await self.mass.db.add_to_library(db_item.item_id, MediaType.Artist, prov_id) + # process deletions + for db_id in prev_db_ids: + if db_id not in cur_db_ids: + await self.mass.db.remove_from_library(db_id, MediaType.Artist, prov_id) + LOGGER.info("Finished syncing Artists for provider %s" % prov_id) + + async def sync_library_albums(self, prov_id): + ''' sync library albums for given provider''' + music_provider = self.providers[prov_id] + prev_items = await self.library_albums(provider_filter=prov_id) + prev_db_ids = [item.item_id for item in prev_items] + cur_items = await music_provider.get_library_albums() + cur_db_ids = [] + for item in cur_items: + db_item = await music_provider.album(item.item_id, lazy=False) + cur_db_ids.append(db_item.item_id) + # precache album tracks... + for album_track in await music_provider.get_album_tracks(item.item_id): + await music_provider.track(album_track.item_id) + if not db_item.item_id in prev_db_ids: + await self.mass.db.add_to_library(db_item.item_id, MediaType.Album, prov_id) + # process deletions + for db_id in prev_db_ids: + if db_id not in cur_db_ids: + await self.mass.db.remove_from_library(db_id, MediaType.Album, prov_id) + LOGGER.info("Finished syncing Albums for provider %s" % prov_id) + + async def sync_library_tracks(self, prov_id): + ''' sync library tracks for given provider''' + music_provider = self.providers[prov_id] + prev_items = await self.library_tracks(provider_filter=prov_id) + prev_db_ids = [item.item_id for item in prev_items] + cur_items = await music_provider.get_library_tracks() + cur_db_ids = [] + for item in cur_items: + db_item = await music_provider.track(item.item_id, lazy=False) + cur_db_ids.append(db_item.item_id) + if not db_item.item_id in prev_db_ids: + await self.mass.db.add_to_library(db_item.item_id, MediaType.Track, prov_id) + # process deletions + for db_id in prev_db_ids: + if db_id not in cur_db_ids: + await self.mass.db.remove_from_library(db_id, MediaType.Track, prov_id) + LOGGER.info("Finished syncing Tracks for provider %s" % prov_id) + + async def sync_playlists(self, prov_id): + ''' sync library playlists for given provider''' + music_provider = self.providers[prov_id] + prev_items = await self.playlists(provider_filter=prov_id) + prev_db_ids = [item.item_id for item in prev_items] + cur_items = await music_provider.get_playlists() + cur_db_ids = [] + for item in cur_items: + # always add to db because playlist attributes could have changed + db_id = await self.mass.db.add_playlist(item) + cur_db_ids.append(db_id) + if not db_id in prev_db_ids: + await self.mass.db.add_to_library(db_id, MediaType.Playlist, prov_id) + if item.is_editable: + # precache/sync playlist tracks (user owned playlists only) + asyncio.create_task( self.sync_playlist_tracks(db_id, prov_id, item.item_id) ) + # process playlist deletions + for db_id in prev_db_ids: + if db_id not in cur_db_ids: + await self.mass.db.remove_from_library(db_id, MediaType.Playlist, prov_id) + LOGGER.info("Finished syncing Playlists for provider %s" % prov_id) + + async def sync_playlist_tracks(self, db_playlist_id, prov_id, prov_playlist_id): + ''' sync library playlists tracks for given provider''' + music_provider = self.providers[prov_id] + prev_items = await self.playlist_tracks(db_playlist_id) + prev_db_ids = [item.item_id for item in prev_items] + cur_items = await music_provider.get_playlist_tracks(prov_playlist_id, limit=0) + cur_db_ids = [] + pos = 0 + for item in cur_items: + # we need to do this the complicated way because the file provider can return tracks from other providers + for prov_mapping in item.provider_ids: + item_provider = prov_mapping['provider'] + prov_item_id = prov_mapping['item_id'] + db_item = await self.providers[item_provider].track(prov_item_id, lazy=False) + cur_db_ids.append(db_item.item_id) + if not db_item.item_id in prev_db_ids: + await self.mass.db.add_playlist_track(db_playlist_id, db_item.item_id, pos) + pos += 1 + # process playlist track deletions + for db_id in prev_db_ids: + if db_id not in cur_db_ids: + await self.mass.db.remove_playlist_track(db_playlist_id, db_id) + LOGGER.info("Finished syncing Playlist %s tracks for provider %s" % (prov_playlist_id, prov_id)) + + def load_music_providers(self): + ''' dynamically load musicproviders ''' + for item in os.listdir(MODULES_PATH): + if (os.path.isfile(os.path.join(MODULES_PATH, item)) and not item.startswith("_") and + item.endswith('.py') and not item.startswith('.')): + module_name = item.replace(".py","") + LOGGER.debug("Loading musicprovider module %s" % module_name) + try: + mod = __import__("modules.musicproviders." + module_name, fromlist=['']) + if not self.mass.config['musicproviders'].get(module_name): + self.mass.config['musicproviders'][module_name] = {} + self.mass.config['musicproviders'][module_name]['__desc__'] = mod.config_entries() + for key, def_value, desc in mod.config_entries(): + if not key in self.mass.config['musicproviders'][module_name]: + self.mass.config['musicproviders'][module_name][key] = def_value + mod = mod.setup(self.mass) + if mod: + self.providers[mod.prov_id] = mod + cls_name = mod.__class__.__name__ + LOGGER.info("Successfully initialized module %s" % cls_name) + except Exception as exc: + LOGGER.exception("Error loading module %s: %s" %(module_name, exc)) diff --git a/music_assistant/modules/musicproviders/file.py b/music_assistant/modules/musicproviders/file.py index 902151c3..82c9c609 100644 --- a/music_assistant/modules/musicproviders/file.py +++ b/music_assistant/modules/musicproviders/file.py @@ -6,12 +6,11 @@ import os from typing import List import sys import time -sys.path.append("..") -from utils import run_periodic, LOGGER, parse_track_title -from models import MusicProvider, MediaType, TrackQuality, AlbumType, Artist, Album, Track, Playlist -from constants import CONF_ENABLED +from music_assistant.utils import run_periodic, LOGGER, parse_track_title +from music_assistant.models import MusicProvider, MediaType, TrackQuality, AlbumType, Artist, Album, Track, Playlist +from music_assistant.constants import CONF_ENABLED import taglib -from cache import use_cache +from music_assistant.modules.cache import use_cache def setup(mass): @@ -204,7 +203,7 @@ class FileProvider(MusicProvider): track = await self.__parse_track_from_uri(line) if track: tracks.append(track) - if len(tracks) == limit: + if limit and len(tracks) == limit: break return tracks @@ -294,19 +293,31 @@ class FileProvider(MusicProvider): track.disc_number = int(song.tags['DISCNUMBER'][0]) if 'TRACKNUMBER' in song.tags: track.track_number = int(song.tags['TRACKNUMBER'][0]) + quality_details = "" if filename.endswith('.flac'): - # TODO: try to get more quality info + # TODO: get bit depth quality = TrackQuality.FLAC_LOSSLESS + if song.sampleRate > 192000: + quality = TrackQuality.FLAC_LOSSLESS_HI_RES_4 + elif song.sampleRate > 96000: + quality = TrackQuality.FLAC_LOSSLESS_HI_RES_3 + elif song.sampleRate > 48000: + quality = TrackQuality.FLAC_LOSSLESS_HI_RES_2 + quality_details = "%s Khz" % (song.sampleRate/1000) elif filename.endswith('.ogg'): quality = TrackQuality.LOSSY_OGG + quality_details = "%s kbps" % (song.bitrate) elif filename.endswith('.m4a'): quality = TrackQuality.LOSSY_AAC + quality_details = "%s kbps" % (song.bitrate) else: quality = TrackQuality.LOSSY_MP3 + quality_details = "%s kbps" % (song.bitrate) track.provider_ids.append({ "provider": self.prov_id, "item_id": filename, - "quality": quality + "quality": quality, + "details": quality_details }) return track @@ -330,4 +341,3 @@ class FileProvider(MusicProvider): if track: return track return None - diff --git a/music_assistant/modules/musicproviders/qobuz.py b/music_assistant/modules/musicproviders/qobuz.py index 84c613d6..bc8a4a01 100644 --- a/music_assistant/modules/musicproviders/qobuz.py +++ b/music_assistant/modules/musicproviders/qobuz.py @@ -4,18 +4,16 @@ import asyncio import os from typing import List -import sys -sys.path.append("..") -from utils import run_periodic, LOGGER, parse_track_title -from models import MusicProvider, MediaType, TrackQuality, AlbumType, Artist, Album, Track, Playlist -from constants import CONF_USERNAME, CONF_PASSWORD, CONF_ENABLED +from music_assistant.utils import run_periodic, LOGGER, parse_track_title +from music_assistant.models import MusicProvider, MediaType, TrackQuality, AlbumType, Artist, Album, Track, Playlist +from music_assistant.constants import CONF_USERNAME, CONF_PASSWORD, CONF_ENABLED import json import aiohttp import time import datetime import hashlib from asyncio_throttle import Throttler -from cache import use_cache +from music_assistant.modules.cache import use_cache def setup(mass): @@ -42,6 +40,7 @@ class QobuzProvider(MusicProvider): def __init__(self, mass, username, password): self.name = 'Qobuz' self.prov_id = 'qobuz' + self.audio_fmt = 'flac' self._cur_user = None self.mass = mass self.cache = mass.cache @@ -180,6 +179,7 @@ class QobuzProvider(MusicProvider): playlist_track = await self.__parse_track(track_obj) if playlist_track: tracks.append(playlist_track) + # TODO: should we look for an alternative track version if the original is marked unavailable ? return tracks async def get_artist_albums(self, prov_artist_id, limit=100, offset=0) -> List[Album]: @@ -235,26 +235,35 @@ class QobuzProvider(MusicProvider): await self.mass.db.remove_from_library(item.item_id, media_type, self.prov_id) LOGGER.debug("deleted item %s from %s - %s" %(prov_item_id, self.prov_id, result)) - async def get_stream_details(self, track_id): - ''' returns the stream details for the provider ''' - params = {'format_id': 27, 'track_id': track_id, 'intent': 'stream'} - return await self.__get_data('track/getFileUrl', params, sign_request=True, ignore_cache=True) + async def add_playlist_tracks(self, prov_playlist_id, prov_track_ids): + ''' add track(s) to playlist ''' + params = { + 'playlist_id': prov_playlist_id, + 'track_ids': ",".join(prov_track_ids) + } + return await self.__get_data('playlist/addTracks', params) + + async def remove_playlist_tracks(self, prov_playlist_id, prov_track_ids): + ''' remove track(s) from playlist ''' + playlist_track_ids = [] + params = {'playlist_id': prov_playlist_id, 'extra': 'tracks'} + for track in await self.__get_all_items("playlist/get", params, key='tracks', limit=0): + if track['id'] in prov_track_ids: + playlist_track_ids.append(track['playlist_track_id']) + params = {'playlist_id': prov_playlist_id, 'track_ids': ",".join(playlist_track_ids)} + return await self.__get_data('playlist/deleteTracks', params) - async def get_stream(self, track_id): + async def get_audio_stream(self, track_id): ''' get audio stream for a track ''' - sox_effects='vol -12 dB' - track_details = await self.get_stream_details(track_id) - url = track_details['url'] - env = os.environ.copy() - env["SOX_OPTS"] = "−−multi−threaded −−replay−gain track" - cmd = 'curl -s -X GET "%s" | sox -t flac - -t flac - %s' % (url, sox_effects) - process = await asyncio.create_subprocess_shell(cmd, stdout=asyncio.subprocess.PIPE, env=env) - while not process.stdout.at_eof(): - chunk = await process.stdout.readline() - if chunk: + params = {'format_id': 27, 'track_id': track_id, 'intent': 'stream'} + streamdetails = await self.__get_data('track/getFileUrl', params, sign_request=True, ignore_cache=True) + async with self.http_session.get(streamdetails['url']) as resp: + while True: + chunk = await resp.content.read(2000000) + if not chunk: + break yield chunk - else: - break + LOGGER.info("end of stream for track_id %s" % track_id) async def __parse_artist(self, artist_obj): ''' parse spotify artist object to generic layout ''' @@ -285,7 +294,7 @@ class QobuzProvider(MusicProvider): album = Album() if not album_obj.get('id') or not album_obj["streamable"] or not album_obj["displayable"]: # some safety checks - LOGGER.debug("invalid/unavailable album found: %s" % album_obj.get('id')) + LOGGER.warning("invalid/unavailable album found: %s" % album_obj.get('id')) return None album.item_id = album_obj['id'] album.provider = self.prov_id @@ -331,7 +340,7 @@ class QobuzProvider(MusicProvider): track = Track() if not track_obj.get('id') or not track_obj["streamable"] or not track_obj["displayable"]: # some safety checks - LOGGER.debug("invalid/unavailable track found: %s" % track_obj.get('id')) + LOGGER.warning("invalid/unavailable track found: %s - %s" % (track_obj.get('id'), track_obj.get('name'))) return None track.item_id = track_obj['id'] track.provider = self.prov_id diff --git a/music_assistant/modules/musicproviders/spotify.py b/music_assistant/modules/musicproviders/spotify.py index 6725943a..f9f7420e 100644 --- a/music_assistant/modules/musicproviders/spotify.py +++ b/music_assistant/modules/musicproviders/spotify.py @@ -6,15 +6,14 @@ import os from typing import List import sys import time -sys.path.append("..") -from utils import run_periodic, LOGGER, parse_track_title -from models import MusicProvider, MediaType, TrackQuality, AlbumType, Artist, Album, Track, Playlist -from constants import CONF_USERNAME, CONF_PASSWORD, CONF_ENABLED +from music_assistant.utils import run_periodic, LOGGER, parse_track_title, run_background_task +from music_assistant.models import MusicProvider, MediaType, TrackQuality, AlbumType, Artist, Album, Track, Playlist +from music_assistant.constants import CONF_USERNAME, CONF_PASSWORD, CONF_ENABLED from asyncio_throttle import Throttler import json import aiohttp -from cache import use_cache - +from music_assistant.modules.cache import use_cache +import concurrent def setup(mass): ''' setup the provider''' @@ -40,6 +39,7 @@ class SpotifyProvider(MusicProvider): def __init__(self, mass, username, password): self.name = 'Spotify' self.prov_id = 'spotify' + self.audio_fmt = 'ogg' self._cur_user = None self.mass = mass self.cache = mass.cache @@ -242,49 +242,30 @@ class SpotifyProvider(MusicProvider): elif offset_uri != None: # only for playlists/albums! opts["offset"] = {"uri": offset_uri } return await self.__put_data('me/player/play', {"device_id": device_id}, opts) - - async def get_stream_details(self, track_id): - ''' returns the stream details for the provider ''' - track = await self.track(track_id) - import socket - host = socket.gethostbyname(socket.gethostname()) - return { - 'mime_type': 'audio/flac', - 'duration': track.duration, - 'sampling_rate': 44100, - 'bit_depth': 16, - 'url': 'http://%s/stream/spotify/%s' % (host, track_id) - } - async def get_stream(self, track_id): - ''' get audio stream for a track ''' - sox_effects='vol -12 dB' - import subprocess - spotty = self.get_spotty_binary() - env = os.environ.copy() - env["SOX_OPTS"] = "−−multi−threaded −−replay−gain track" - cmd = spotty + ' -n temp -u ' + self._username + ' -p ' + self._password + ' --pass-through --single-track ' + track_id - cmd += ' | sox -t ogg - -t flac - %s' % sox_effects - process = await asyncio.create_subprocess_shell(cmd, stdout=asyncio.subprocess.PIPE, env=env) - while not process.stdout.at_eof(): - chunk = await process.stdout.readline() - if chunk: - yield chunk - await process.wait() - - async def get_stream__old(self, track_id, sox_effects='vol -12 dB'): + async def get_audio_stream(self, track_id): ''' get audio stream for a track ''' import subprocess spotty = self.get_spotty_binary() - os.environ["SOX_OPTS"] = "−−multi−threaded −−replay−gain track" - cmd = spotty + ' -n temp -u ' + self._username + ' -p ' + self._password + ' --pass-through --single-track ' + track_id + ' | sox -t ogg -- - -t flac - %s' % sox_effects - process = await asyncio.create_subprocess_shell(cmd, stdout=asyncio.subprocess.PIPE) - while not process.stdout.at_eof(): - chunk = await process.stdout.readline() - if chunk: - yield chunk - await process.wait() - + args = ['-n', 'temp', '-u', self._username, '-p', self._password, '--pass-through', '--single-track', track_id] + process = await asyncio.create_subprocess_exec(spotty, *args, stdout=asyncio.subprocess.PIPE) + try: + while not process.stdout.at_eof(): + chunk = await process.stdout.read(2000000) + if chunk: + yield chunk + else: + break + except (GeneratorExit, Exception): + while True: + if not await process.stdout.read(2000000): + break + await process.wait() + LOGGER.info("stream cancelled for track_id %s" % track_id) + else: + await process.wait() + LOGGER.info("end of stream for track_id %s" % track_id) + async def __parse_artist(self, artist_obj): ''' parse spotify artist object to generic layout ''' artist = Artist() @@ -357,6 +338,7 @@ class SpotifyProvider(MusicProvider): if 'track' in track_obj: track_obj = track_obj['track'] if track_obj['is_local'] or not track_obj['id'] or not track_obj['is_playable']: + LOGGER.warning("invalid/unavailable track found: %s - %s" % (track_obj.get('id'), track_obj.get('name'))) return None track = Track() track.item_id = track_obj['id'] diff --git a/music_assistant/modules/player.py b/music_assistant/modules/player.py new file mode 100755 index 00000000..c27ad3a4 --- /dev/null +++ b/music_assistant/modules/player.py @@ -0,0 +1,490 @@ +#!/usr/bin/env python3 +# -*- coding:utf-8 -*- + +import asyncio +import os +from music_assistant.utils import run_periodic, LOGGER, try_parse_int, try_parse_float, kill_proc +import aiohttp +from difflib import SequenceMatcher as Matcher +from music_assistant.models import MediaType, PlayerState, MusicPlayer +from typing import List +import toolz +import operator +import socket +import random +from copy import deepcopy +import functools +import time +import shutil +import xml.etree.ElementTree as ET +import concurrent +import aiohttp +import random + +BASE_DIR = os.path.dirname(os.path.abspath(__file__)) +MODULES_PATH = os.path.join(BASE_DIR, "playerproviders" ) +AUDIO_TEMP_DIR = "/tmp/audio_tmp" +AUDIO_CACHE_DIR = "/tmp/audio_cache" + +class Player(): + ''' several helpers to handle playback through player providers ''' + + def __init__(self, mass): + self.mass = mass + self.providers = {} + self._players = {} + self.create_config_entries() + # create needed temp/cache dirs + if not os.path.isdir(AUDIO_CACHE_DIR): + os.makedirs(AUDIO_CACHE_DIR) + if not os.path.isdir(AUDIO_TEMP_DIR): + os.makedirs(AUDIO_TEMP_DIR) + # dynamically load provider modules + self.load_providers() + + def create_config_entries(self): + ''' sets the config entries for this module (list with key/value pairs)''' + self.mass.config['player_settings']['__desc__'] = [ + ("enabled", False, "player_enabled"), + ("name", "", "player_name"), + ("group_parent", "", "player_group_with"), + ("mute_as_power", False, "player_mute_power"), + ("disable_volume", False, "player_disable_vol"), + ("apply_group_volume", False, "player_group_vol"), + ("apply_group_power", False, "player_group_pow"), + ("play_power_on", False, "player_power_play") + ] + + async def players(self): + ''' return all players ''' + items = list(self._players.values()) + items.sort(key=lambda x: x.name, reverse=False) + return items + + async def player(self, player_id): + ''' return players by id ''' + return self._players[player_id] + + async def player_command(self, player_id, cmd, cmd_args=None): + ''' issue command on player (play, pause, next, previous, stop, power, volume, mute) ''' + if player_id not in self._players: + return + player = self._players[player_id] + # handle some common workarounds + if cmd in ['pause', 'play'] and cmd_args == 'toggle': + cmd = 'pause' if player.state == PlayerState.Playing else 'play' + if cmd == 'power' and cmd_args == 'toggle': + cmd_args = 'off' if player.powered else 'on' + if cmd == 'volume' and cmd_args == 'up': + cmd_args = player.volume_level + 2 + elif cmd == 'volume' and cmd_args == 'down': + cmd_args = player.volume_level - 2 + # redirect playlist related commands to parent player + if player.group_parent and cmd not in ['power', 'volume', 'mute']: + return await self.player_command(player.group_parent, cmd, cmd_args) + # handle hass integration + await self.__player_command_hass_integration(player, cmd, cmd_args) + # handle mute as power + if cmd == 'power' and player.settings['mute_as_power']: + cmd = 'mute' + cmd_args = 'on' if cmd_args == 'off' else 'off' # invert logic (power ON is mute OFF) + # handle group volume for group players + player_childs = [item for item in self._players.values() if item.group_parent == player_id] + if player.is_group and cmd == 'volume' and player.settings['apply_group_volume']: + return await self.__player_command_group_volume(player, player_childs, cmd_args) + if player.is_group and cmd == 'power' and cmd_args == 'off': + for item in player_childs: + asyncio.create_task(self.player_command(item.player_id, cmd, cmd_args)) + # normal execution of command on player + prov_id = self._players[player_id].player_provider + prov = self.providers[prov_id] + await prov.player_command(player_id, cmd, cmd_args) + # handle play on power on + if cmd == 'power' and cmd_args == 'on' and player.settings['play_power_on']: + LOGGER.info('play_power_on %s' % player.name) + await prov.player_command(player_id, 'play') + + async def __player_command_hass_integration(self, player, cmd, cmd_args): + ''' handle hass integration in player command ''' + if not self.mass.hass: + return + if cmd == 'power' and player.settings.get('hass_power_entity') and player.settings.get('hass_power_entity_source'): + cur_source = await self.mass.hass.get_state(player.settings['hass_power_entity'], attribute='source') + if cmd_args == 'on' and not cur_source: + service_data = { 'entity_id': player.settings['hass_power_entity'], 'source':player.settings['hass_power_entity_source'] } + await self.mass.hass.call_service('media_player', 'select_source', service_data) + elif cmd_args == 'off' and cur_source == player.settings['hass_power_entity_source']: + service_data = { 'entity_id': player.settings['hass_power_entity'] } + await self.mass.hass.call_service('media_player', 'turn_off', service_data) + else: + LOGGER.warning('Ignoring power command as required source is not active') + elif cmd == 'power' and player.settings.get('hass_power_entity'): + domain = player.settings['hass_power_entity'].split('.')[0] + service_data = { 'entity_id': player.settings['hass_power_entity']} + await self.mass.hass.call_service(domain, 'turn_%s' % cmd_args, service_data) + if cmd == 'volume' and player.settings.get('hass_volume_entity'): + service_data = { 'entity_id': player.settings['hass_power_entity'], 'volume_level': int(cmd_args)/100} + await self.mass.hass.call_service('media_player', 'volume_set', service_data) + cmd_args = 100 # just force full volume on actual player if volume is outsourced to hass + + async def __player_command_group_volume(self, player, player_childs, cmd_args): + ''' handle group volume if needed''' + cur_volume = player.volume_level + new_volume = try_parse_int(cmd_args) + volume_dif = new_volume - cur_volume + volume_dif_percent = volume_dif/cur_volume + for child_player in player_childs: + if child_player.enabled and child_player.powered: + cur_child_volume = child_player.volume_level + new_child_volume = cur_child_volume + (cur_child_volume * volume_dif_percent) + child_player.volume_level = new_child_volume + await self.player_command(child_player.player_id, 'volume', new_child_volume) + player.volume_level = new_volume + return True + + async def remove_player(self, player_id): + ''' handle a player remove ''' + self._players.pop(player_id, None) + asyncio.ensure_future(self.mass.event('player removed', player_id)) + + async def trigger_update(self, player_id): + ''' manually trigger update for a player ''' + await self.update_player(self._players[player_id]) + + async def update_player(self, player_details): + ''' update (or add) player ''' + player_details = deepcopy(player_details) + LOGGER.debug('Incoming msg from %s' % player_details.name) + player_id = player_details.player_id + player_changed = False + if not player_id in self._players: + # first message from player + self._players[player_id] = MusicPlayer() + player = self._players[player_id] + player.player_id = player_id + player.player_provider = player_details.player_provider + player_changed = True + else: + player = self._players[player_id] + player.settings = await self.__get_player_settings(player_id) + # handle basic player settings + player_details.enabled = player.settings['enabled'] + player_details.name = player.settings['name'] if player.settings['name'] else player_details.name + player_details.group_parent = player.settings['group_parent'] if player.settings['group_parent'] else player_details.group_parent + # handle hass integration + await self.__update_player_hass_settings(player_details, player.settings) + # handle mute as power setting + if player.settings['mute_as_power']: + player_details.powered = not player_details.muted + # combine state of group parent + if player_details.group_parent and player_details.group_parent in self._players: + parent_player = self._players[player_details.group_parent] + player_details.cur_item_time = parent_player.cur_item_time + player_details.cur_item = parent_player.cur_item + player_details.state = parent_player.state + # handle group volume/power setting + player_childs = [item for item in self._players.values() if item.group_parent == player_id] + player_details.is_group = len(player_childs) > 0 + if player_details.is_group and player.settings['apply_group_volume']: + await self.__update_player_group_volume(player_details, player_childs) + if player_details.is_group and player.settings['apply_group_power']: + await self.__update_player_group_power(player_details, player_childs) + # compare values to detect changes + if player.cur_item.name != player_details.cur_item.name: + player_changed = True + player.cur_item = player_details.cur_item + for key, cur_value in player.__dict__.items(): + if key != 'settings': + new_value = getattr(player_details, key) + if new_value != cur_value: + player_changed = True + setattr(player, key, new_value) + LOGGER.debug('key changed: %s for player %s - new value: %s' % (key, player.name, new_value)) + if player_changed: + # player is added or updated! + asyncio.ensure_future(self.mass.event('player updated', player)) + # is groupplayer, trigger update of its childs + for child in player_childs: + asyncio.create_task(self.trigger_update(child.player_id)) + # if child player in a group, trigger update of parent + if player.group_parent: + asyncio.create_task(self.trigger_update(player.group_parent)) + + async def __update_player_hass_settings(self, player_details, player_settings): + ''' handle home assistant integration on a player ''' + if not self.mass.hass: + return + player_id = player_details.player_id + player_settings = self.mass.config['player_settings'][player_id] + if player_settings.get('hass_power_entity') and player_settings.get('hass_power_entity_source'): + hass_state = await self.mass.hass.get_state( + player_settings['hass_power_entity'], + attribute='source', + register_listener=functools.partial(self.trigger_update, player_id)) + player_details.powered = hass_state == player_settings['hass_power_entity_source'] + elif player_settings.get('hass_power_entity'): + hass_state = await self.mass.hass.get_state( + player_settings['hass_power_entity'], + attribute='state', + register_listener=functools.partial(self.trigger_update, player_id)) + player_details.powered = hass_state != 'off' + if player_settings.get('hass_volume_entity'): + hass_state = await self.mass.hass.get_state( + player_settings['hass_volume_entity'], + attribute='volume_level', + register_listener=functools.partial(self.trigger_update, player_id)) + player_details.volume_level = int(try_parse_float(hass_state)*100) + + async def __update_player_group_volume(self, player_details, player_childs): + ''' handle group volume ''' + group_volume = 0 + active_players = 0 + for child_player in player_childs: + if child_player.enabled and child_player.powered: + group_volume += child_player.volume_level + active_players += 1 + group_volume = group_volume / active_players if active_players else 0 + player_details.volume_level = group_volume + + async def __update_player_group_power(self, player_details, player_childs): + ''' handle group power ''' + player_powered = False + for child_player in player_childs: + if child_player.powered: + player_powered = True + break + if player_details.powered and not player_powered: + # all childs turned off so turn off group player + LOGGER.info('all childs turned off so turn off group player %s' % player_details.name) + await self. player_command(player_details.player_id, 'power', 'off') + player_details.powered = False + elif not player_details.powered and player_powered: + # all childs turned off but group player still off, so turn it on + LOGGER.info('all childs turned off but group player still off, so turn it on %s' % player_details.name) + await self. player_command(player_details.player_id, 'power', 'on') + player_details.powered = True + + async def __get_player_settings(self, player_id): + ''' get (or create) player config ''' + player_settings = self.mass.config['player_settings'].get(player_id,{}) + for key, def_value, desc in self.mass.config['player_settings']['__desc__']: + if not key in player_settings: + player_settings[key] = def_value + self.mass.config['player_settings'][player_id] = player_settings + return player_settings + + async def play_media(self, player_id, media_item, queue_opt='play'): + ''' + play media on a player + player_id: id of the player + media_item: media item(s) that should be played (Track, Album, Artist, Playlist) + queue_opt: play, replace, next or add + ''' + if not player_id in self._players: + LOGGER.warning('Player %s not found' % player_id) + return False + player_prov = self.providers[self._players[player_id].player_provider] + # a single item or list of items may be provided + media_items = media_item if isinstance(media_item, list) else [media_item] + playable_tracks = [] + for media_item in media_items: + # collect tracks to play + if media_item.media_type == MediaType.Artist: + tracks = await self.mass.music.artist_toptracks(media_item.item_id, provider=media_item.provider) + elif media_item.media_type == MediaType.Album: + tracks = await self.mass.music.album_tracks(media_item.item_id, provider=media_item.provider) + elif media_item.media_type == MediaType.Playlist: + tracks = await self.mass.music.playlist_tracks(media_item.item_id, provider=media_item.provider, offset=0, limit=0) + else: + tracks = [media_item] # single track + # check supported music providers by this player and work out how to handle playback... + for track in tracks: + # sort by quality + match_found = False + for prov_media in sorted(track.provider_ids, key=operator.itemgetter('quality'), reverse=True): + media_provider = prov_media['provider'] + media_item_id = prov_media['item_id'] + player_supported_provs = player_prov.supported_musicproviders + if media_provider in player_supported_provs: + # the provider can handle this media_type directly ! + track.uri = await self.get_track_uri(media_item_id, media_provider) + playable_tracks.append(track) + match_found = True + elif 'http' in player_prov.supported_musicproviders: + # fallback to http streaming if supported + track.uri = await self.get_track_uri(media_item_id, media_provider, True) + playable_tracks.append(track) + match_found = True + if match_found: + break + if playable_tracks: + if self._players[player_id].shuffle_enabled: + random.shuffle(playable_tracks) + if queue_opt in ['next', 'play'] and len(playable_tracks) > 1: + queue_opt = 'replace' # always assume playback of multiple items as new queue + return await player_prov.play_media(player_id, playable_tracks, queue_opt) + else: + raise Exception("Musicprovider and/or media not supported by player %s !" % (player_id) ) + + async def get_track_uri(self, item_id, provider, http_stream=False): + ''' generate the URL/URI for a media item ''' + uri = "" + if http_stream: + host = socket.gethostbyname(socket.gethostname()) + uri = 'http://%s:8095/stream/%s/%s'% (host, provider, item_id) + elif provider == "spotify": + uri = 'spotify://spotify:track:%s' % item_id + elif provider == "qobuz": + uri = 'qobuz://%s.flac' % item_id + elif provider == "file": + uri = item_id + return uri + + async def player_queue(self, player_id, offset=0, limit=50): + ''' return the items in the player's queue ''' + player = self._players[player_id] + player_prov = self.providers[player.player_provider] + return await player_prov.player_queue(player_id, offset=offset, limit=limit) + + async def get_audio_stream(self, track_id, provider): + ''' get audio stream from provider and apply additional effects/processing where needed''' + input_audio_fmt = self.mass.music.providers[provider].audio_fmt + cachefile = self.__get_track_cache_filename(track_id, provider) + gain_correct = await self.__get_track_gain_correct(track_id, provider) + sox_effects=['vol', str(gain_correct), 'dB' ] + if os.path.isfile(cachefile): + # we have a temp file for this track which we can use + args = ['-t', input_audio_fmt, cachefile, '-t', 'flac', '-', *sox_effects] + process = await asyncio.create_subprocess_exec('sox', *args, + stdout=asyncio.subprocess.PIPE) + buffer_task = None + else: + # stream from provider + args = ['-t', input_audio_fmt, '-', '-t', 'flac', '-', *sox_effects] + process = await asyncio.create_subprocess_exec('sox', *args, + stdout=asyncio.subprocess.PIPE, stdin=asyncio.subprocess.PIPE) + buffer_task = asyncio.create_task( + self.__fill_audio_buffer(process.stdin, track_id, provider)) + try: + # yield the chunks from stdout + while not process.stdout.at_eof(): + chunk = await process.stdout.read(2000000) + if not chunk: + break + yield chunk + except (asyncio.CancelledError, concurrent.futures._base.CancelledError): + # client disconnected so cleanup + #if buffer_task: + # buffer_task.cancel() + # Could not figure out how to reliably close process without deadlocks + # so instead just read all data for a clean exit + while True: + if not await process.stdout.read(2000000): + break + await process.wait() + LOGGER.info("streaming of track_id %s aborted (client disconnect ?)" % track_id) + raise asyncio.CancelledError() + except Exception as exc: + LOGGER.error(exc) + else: + await process.wait() + LOGGER.info("streaming of track_id %s completed" % track_id) + + async def __analyze_track_audio(self, musicfile, track_id, provider): + ''' analyze track audio, for now we only calculate EBU R128 loudness ''' + import platform + analyse_dir = os.path.join(self.mass.datapath, 'analyse_info') + analysis_file = os.path.join(analyse_dir, "%s_%s.xml" %(provider, track_id.split(os.sep)[-1])) + if not os.path.isdir(analyse_dir): + os.makedirs(analyse_dir) + bs1770_binary = None + if platform.system() == "Windows": + bs1770_binary = os.path.join(os.path.dirname(__file__), "bs1770gain", "win64", "bs1770gain") + elif platform.system() == "Darwin": + # macos binary is x86_64 intel + bs1770_binary = os.path.join(os.path.dirname(__file__), "bs1770gain", "osx", "bs1770gain") + elif platform.system() == "Linux": + architecture = platform.machine() + if architecture.startswith('AMD64') or architecture.startswith('x86_64'): + bs1770_binary = os.path.join(os.path.dirname(__file__), "bs1770gain", "linux64", "bs1770gain") + # TODO: build armhf binary + cmd = '%s %s --loglevel quiet --xml --ebu -f %s' % (bs1770_binary, musicfile, analysis_file) + process = await asyncio.create_subprocess_shell(cmd) + await process.wait() + + async def __get_track_gain_correct(self, track_id, provider): + ''' get the gain correction that should be applied to a track ''' + target_gain = -23 + fallback_gain = -14 # fallback if no analyse info is available + analysis_file = os.path.join(self.mass.datapath, 'analyse_info', "%s_%s.xml" %(provider, track_id.split(os.sep)[-1])) + try: # read audio analysis if available + tree = ET.parse(analysis_file) + trackinfo = tree.getroot().find("album").find("track") + track_lufs = trackinfo.find('integrated').get('lufs') + gain_correct = target_gain - float(track_lufs) + LOGGER.info("apply gain correction of %s" % gain_correct) + except Exception: + gain_correct = fallback_gain # fallback value + if os.path.isfile(analysis_file): + os.remove(analysis_file) + cachefile = self.__get_track_cache_filename(track_id, provider) + # reschedule analyze task to try again + asyncio.create_task(self.__analyze_track_audio(cachefile, track_id, provider)) + return gain_correct + + async def __fill_audio_buffer(self, buf, track_id, provider): + ''' get audio data from provider and write to buffer''' + # fill the buffer with audio data + # a tempfile is created so we can do audio analysis + try: + tmpfile = os.path.join(AUDIO_TEMP_DIR, '%s%s%s.tmp' % (random.randint(0, 999), track_id, random.randint(0, 999))) + finalfile = self.__get_track_cache_filename(track_id, provider) + fd = open(tmpfile, 'wb') + async for chunk in self.mass.music.providers[provider].get_audio_stream(track_id): + buf.write(chunk) + await buf.drain() + fd.write(chunk) + await buf.drain() + buf.write_eof() + fd.close() + except Exception as exc: + LOGGER.error(exc) + else: + # successfull completion + if os.path.isfile(tmpfile) and not os.path.isfile(finalfile): + shutil.move(tmpfile, finalfile) + asyncio.create_task(self.__analyze_track_audio(finalfile, track_id, provider)) + LOGGER.info("fill_audio_buffer complete for track %s" % track_id) + finally: + # always clean up temp file + if os.path.isfile(tmpfile): + of.remove(tmpfile) + + @staticmethod + def __get_track_cache_filename(track_id, provider): + ''' get filename for a track to use as cache file ''' + return os.path.join(AUDIO_CACHE_DIR, '%s_%s' %(provider, track_id.split(os.sep)[-1])) + + + def load_providers(self): + ''' dynamically load providers ''' + for item in os.listdir(MODULES_PATH): + if (os.path.isfile(os.path.join(MODULES_PATH, item)) and not item.startswith("_") and + item.endswith('.py') and not item.startswith('.')): + module_name = item.replace(".py","") + LOGGER.debug("Loading playerprovider module %s" % module_name) + try: + mod = __import__("modules.playerproviders." + module_name, fromlist=['']) + if not self.mass.config['playerproviders'].get(module_name): + self.mass.config['playerproviders'][module_name] = {} + self.mass.config['playerproviders'][module_name]['__desc__'] = mod.config_entries() + for key, def_value, desc in mod.config_entries(): + if not key in self.mass.config['playerproviders'][module_name]: + self.mass.config['playerproviders'][module_name][key] = def_value + mod = mod.setup(self.mass) + if mod: + self.providers[mod.prov_id] = mod + cls_name = mod.__class__.__name__ + LOGGER.info("Successfully initialized module %s" % cls_name) + except Exception as exc: + LOGGER.exception("Error loading module %s: %s" %(module_name, exc)) diff --git a/music_assistant/modules/playerproviders/chromecast.py b/music_assistant/modules/playerproviders/chromecast.py index 29ff1dc5..8c6c8429 100644 --- a/music_assistant/modules/playerproviders/chromecast.py +++ b/music_assistant/modules/playerproviders/chromecast.py @@ -6,10 +6,9 @@ import os from typing import List import random import sys -sys.path.append("..") -from utils import run_periodic, run_background_task, LOGGER, parse_track_title, try_parse_int -from models import PlayerProvider, MusicPlayer, PlayerState, MediaType, TrackQuality, AlbumType, Artist, Album, Track, Playlist -from constants import CONF_ENABLED, CONF_HOSTNAME, CONF_PORT +from music_assistant.utils import run_periodic, run_background_task, LOGGER, parse_track_title, try_parse_int +from music_assistant.models import PlayerProvider, MusicPlayer, PlayerState, MediaType, TrackQuality, AlbumType, Artist, Album, Track, Playlist +from music_assistant.constants import CONF_ENABLED, CONF_HOSTNAME, CONF_PORT import json import aiohttp import time @@ -17,7 +16,7 @@ import datetime import hashlib from asyncio_throttle import Throttler from aiocometd import Client, ConnectionType, Extension -from cache import use_cache +from music_assistant.modules.cache import use_cache import copy import pychromecast from pychromecast.controllers.multizone import MultizoneController @@ -117,8 +116,9 @@ class ChromecastProvider(PlayerProvider): ''' play media on a player ''' - player = self._chromecasts[player_id] - media_controller = player.media_controller + castplayer = self._chromecasts[player_id] + player = self._players[player_id] + media_controller = castplayer.media_controller receiver_ctrl = media_controller._socket_client.receiver_controller cur_queue_index = 0 if media_controller.queue_cur_id != None: @@ -127,12 +127,12 @@ class ChromecastProvider(PlayerProvider): if item['itemId'] == media_controller.queue_cur_id: cur_queue_item = item # find out the current index - for counter, value in enumerate(player.queue): + for counter, value in enumerate(pcastplayerlayer.queue): if value['media']['contentId'] == cur_queue_item['media']['contentId']: cur_queue_index = counter break break - if (not media_controller.queue_cur_id or not media_controller.status.media_session_id or not player.queue): + if (not media_controller.queue_cur_id or not media_controller.status.media_session_id or not castplayer.queue): queue_opt = 'replace' new_queue_items = [] @@ -141,23 +141,23 @@ class ChromecastProvider(PlayerProvider): new_queue_items.append(queue_item) if (queue_opt in ['replace', 'play'] or not media_controller.queue_cur_id or - not media_controller.status.media_session_id or not player.queue): + not media_controller.status.media_session_id or not castplayer.queue): # load new Chromecast queue with items if queue_opt == 'add': # append items to queue - player.queue = player.queue + new_queue_items + castplayer.queue = castplayer.queue + new_queue_items startindex = cur_queue_index elif queue_opt == 'play': # keep current queue but append new items at begin and start playing first item - player.queue = new_queue_items + player.queue[cur_queue_index:] + player.queue[:cur_queue_index] + castplayer.queue = new_queue_items + castplayer.queue[cur_queue_index:] + castplayer.queue[:cur_queue_index] startindex = 0 elif queue_opt == 'next': # play the new items after the current playing item (insert before current next item) - player.queue = new_queue_items + player.queue[cur_queue_index:] + player.queue[:cur_queue_index] + castplayer.queue = new_queue_items + castplayer.queue[cur_queue_index:] + plcastplayerayer.queue[:cur_queue_index] startindex = cur_queue_index else: # overwrite the whole queue with new item(s) - player.queue = new_queue_items + castplayer.queue = new_queue_items startindex = 0 # load first 10 items as soon as possible queuedata = { @@ -166,17 +166,17 @@ class ChromecastProvider(PlayerProvider): "shuffle": player.shuffle_enabled, "queueType": "PLAYLIST", "startIndex": startindex, # Item index to play after this request or keep same item if undefined - "items": player.queue[:10] + "items": castplayer.queue[:10] } await self.__send_player_queue(receiver_ctrl, media_controller, queuedata) # append the rest of the items in the queue in chunks - for chunk in chunks(player.queue[10:], 100): + for chunk in chunks(castplayer.queue[10:], 100): await asyncio.sleep(1) queuedata = { "type": 'QUEUE_INSERT', "items": chunk } await self.__send_player_queue(receiver_ctrl, media_controller, queuedata) elif queue_opt == 'add': # existing queue is playing: simply append items to the end of the queue (in small chunks) - player.queue = player.queue + new_queue_items + castplayer.queue = castplayer.queue + new_queue_items insertbefore = None for chunk in chunks(new_queue_items, 100): queuedata = { "type": 'QUEUE_INSERT', "items": chunk } @@ -184,7 +184,7 @@ class ChromecastProvider(PlayerProvider): await asyncio.sleep(1) elif queue_opt == 'next': # play the new items after the current playing item (insert before current next item) - player.queue = player.queue[:cur_queue_index] + new_queue_items + player.queue[cur_queue_index:] + player.queue = castplayer.queue[:cur_queue_index] + new_queue_items + castplayer.queue[cur_queue_index:] queuedata = { "type": 'QUEUE_INSERT', "insertBefore": media_controller.queue_cur_id+1, @@ -252,11 +252,9 @@ class ChromecastProvider(PlayerProvider): track_id = uri.replace('qobuz://','').replace('.flac','') track = await self.mass.music.providers['qobuz'].track(track_id) elif uri.startswith('http') and '/stream' in uri: - try: - item_id = uri.split('/')[-1] - provider = uri.split('/')[-2] - track = await self.mass.music.providers[provider].track(item_id) - except: pass + item_id = uri.split('/')[-1] + provider = uri.split('/')[-2] + track = await self.mass.music.providers[provider].track(item_id) return track async def __handle_group_members_update(self, mz, added_player=None, removed_player=None): diff --git a/music_assistant/modules/playerproviders/lms.py b/music_assistant/modules/playerproviders/lms.py index 8faf1601..640d8f2f 100644 --- a/music_assistant/modules/playerproviders/lms.py +++ b/music_assistant/modules/playerproviders/lms.py @@ -6,10 +6,9 @@ import os from typing import List import random import sys -sys.path.append("..") -from utils import run_periodic, LOGGER, parse_track_title -from models import PlayerProvider, MusicPlayer, PlayerState, MediaType, TrackQuality, AlbumType, Artist, Album, Track, Playlist -from constants import CONF_ENABLED, CONF_HOSTNAME, CONF_PORT +from music_assistant.utils import run_periodic, LOGGER, parse_track_title +from music_assistant.models import PlayerProvider, MusicPlayer, PlayerState, MediaType, TrackQuality, AlbumType, Artist, Album, Track, Playlist +from music_assistant.constants import CONF_ENABLED, CONF_HOSTNAME, CONF_PORT import json import aiohttp import time @@ -17,7 +16,7 @@ import datetime import hashlib from asyncio_throttle import Throttler from aiocometd import Client, ConnectionType, Extension -from cache import use_cache +from music_assistant.modules.cache import use_cache import copy def setup(mass): @@ -52,6 +51,7 @@ class LMSProvider(PlayerProvider): self._players = {} self.last_msg_received = 0 self.supported_musicproviders = ['qobuz', 'file', 'spotify', 'http'] + self.supported_musicproviders = ['http'] self.http_session = aiohttp.ClientSession(loop=mass.event_loop) # we use a combi of active polling and subscriptions because the cometd implementation of LMS is somewhat unreliable asyncio.ensure_future(self.__lms_events()) @@ -217,6 +217,10 @@ class LMSProvider(PlayerProvider): return await self.mass.music.providers['spotify'].track(track_id) except Exception as exc: LOGGER.error(exc) + elif track_url.startswith('http') and '/stream' in track_url: + item_id = track_url.split('/')[-1] + provider = track_url.split('/')[-2] + return await self.mass.music.providers[provider].track(item_id) # fallback to a generic track track = Track() track.name = track_details['title'] diff --git a/music_assistant/modules/web.py b/music_assistant/modules/web.py index 295139b1..949a5623 100755 --- a/music_assistant/modules/web.py +++ b/music_assistant/modules/web.py @@ -3,11 +3,11 @@ import asyncio import os -from utils import run_periodic, LOGGER +from music_assistant.utils import run_periodic, LOGGER import json import aiohttp from aiohttp import web -from models import MediaType, media_type_from_string +from music_assistant.models import MediaType, media_type_from_string from functools import partial json_serializer = partial(json.dumps, default=lambda x: x.__dict__) import ssl @@ -224,22 +224,11 @@ class Web(): await self.mass.remove_event_listener(cb_id) await ws.close() else: - # for now we only use WS for player commands + # for now we only use WS for (simple) player commands if msg.data == 'players': players = await self.mass.player.players() ws_msg = {'message': 'players', 'message_details': players} await ws.send_json(ws_msg, dumps=json_serializer) - # elif msg.data.startswith('players') and '/play_media/' in msg.data: - # #'players/{player_id}/play_media/{media_type}/{media_id}/{queue_opt}' - # msg_data_parts = msg.data.split('/') - # player_id = msg_data_parts[1] - # media_type = msg_data_parts[3] - # media_type = media_type_from_string(media_type) - # media_id = msg_data_parts[4] - # queue_opt = msg_data_parts[5] if len(msg_data_parts) == 6 else 'replace' - # media_item = await self.mass.music.item(media_id, media_type, lazy=True) - # await self.mass.player.play_media(player_id, media_item, queue_opt) - elif msg.data.startswith('players') and '/cmd/' in msg.data: # players/{player_id}/cmd/{cmd} or players/{player_id}/cmd/{cmd}/{cmd_args} msg_data_parts = msg.data.split('/') @@ -275,14 +264,10 @@ class Web(): ''' start streaming audio from provider ''' track_id = request.match_info.get('track_id') provider = request.match_info.get('provider') - #stream_details = await self.mass.music.providers[provider].get_stream_details(track_id) - # resp = web.StreamResponse(status=200, - # reason='OK', - # headers={'Content-Type': stream_details['mime_type']}) resp = web.StreamResponse(status=200, reason='OK', headers={'Content-Type': 'audio/flac'}) await resp.prepare(request) - async for chunk in self.mass.music.providers[provider].get_stream(track_id): + async for chunk in self.mass.player.get_audio_stream(track_id, provider): await resp.write(chunk) return resp \ No newline at end of file diff --git a/music_assistant/music.py b/music_assistant/music.py deleted file mode 100755 index 30264438..00000000 --- a/music_assistant/music.py +++ /dev/null @@ -1,333 +0,0 @@ -#!/usr/bin/env python3 -# -*- coding:utf-8 -*- - -import asyncio -import os -from utils import run_periodic, run_async_background_task, LOGGER, try_parse_int -import aiohttp -from difflib import SequenceMatcher as Matcher -from models import MediaType -from typing import List -import toolz -import operator - - -BASE_DIR = os.path.dirname(os.path.abspath(__file__)) -MODULES_PATH = os.path.join(BASE_DIR, "modules", "musicproviders" ) - -class Music(): - ''' several helpers around the musicproviders ''' - - def __init__(self, mass): - self.sync_running = False - self.mass = mass - self.providers = {} - # dynamically load musicprovider modules - self.load_music_providers() - # schedule sync task - mass.event_loop.create_task(self.sync_music_providers()) - - async def item(self, item_id, media_type:MediaType, provider='database', lazy=True): - ''' get single music item by id and media type''' - if media_type == MediaType.Artist: - return await self.artist(item_id, provider, lazy=lazy) - elif media_type == MediaType.Album: - return await self.album(item_id, provider, lazy=lazy) - elif media_type == MediaType.Track: - return await self.track(item_id, provider, lazy=lazy) - elif media_type == MediaType.Playlist: - return await self.playlist(item_id, provider) - else: - return None - - async def library_artists(self, limit=0, offset=0, orderby='name', provider_filter=None): - ''' return all library artists, optionally filtered by provider ''' - return await self.mass.db.library_artists(provider=provider_filter, limit=limit, offset=offset, orderby=orderby) - - async def library_albums(self, limit=0, offset=0, orderby='name', provider_filter=None): - ''' return all library albums, optionally filtered by provider ''' - return await self.mass.db.library_albums(provider=provider_filter, limit=limit, offset=offset, orderby=orderby) - - async def library_tracks(self, limit=0, offset=0, orderby='name', provider_filter=None): - ''' return all library tracks, optionally filtered by provider ''' - return await self.mass.db.library_tracks(provider=provider_filter, limit=limit, offset=offset, orderby=orderby) - - async def playlists(self, limit=0, offset=0, orderby='name', provider_filter=None): - ''' return all library playlists, optionally filtered by provider ''' - return await self.mass.db.playlists(provider=provider_filter, limit=limit, offset=offset, orderby=orderby) - - async def library_items(self, media_type:MediaType, limit=0, offset=0, orderby='name', provider_filter=None): - ''' get multiple music items in library''' - if media_type == MediaType.Artist: - return await self.library_artists(limit=limit, offset=offset, orderby=orderby, provider_filter=provider_filter) - elif media_type == MediaType.Album: - return await self.library_albums(limit=limit, offset=offset, orderby=orderby, provider_filter=provider_filter) - elif media_type == MediaType.Track: - return await self.library_tracks(limit=limit, offset=offset, orderby=orderby, provider_filter=provider_filter) - elif media_type == MediaType.Playlist: - return await self.playlists(limit=limit, offset=offset, orderby=orderby, provider_filter=provider_filter) - - async def artist(self, item_id, provider='database', lazy=True): - ''' get artist by id ''' - if not provider or provider == 'database': - return await self.mass.db.artist(item_id) - return await self.providers[provider].artist(item_id, lazy=lazy) - - async def album(self, item_id, provider='database', lazy=True): - ''' get album by id ''' - if not provider or provider == 'database': - return await self.mass.db.album(item_id) - return await self.providers[provider].album(item_id, lazy=lazy) - - async def track(self, item_id, provider='database', lazy=True): - ''' get track by id ''' - if not provider or provider == 'database': - return await self.mass.db.track(item_id) - return await self.providers[provider].track(item_id, lazy=lazy) - - async def playlist(self, item_id, provider='database'): - ''' get playlist by id ''' - if not provider or provider == 'database': - return await self.mass.db.playlist(item_id) - return await self.providers[provider].playlist(item_id) - - async def playlist_by_name(self, name): - ''' get playlist by name ''' - for playlist in await self.playlists(): - if playlist.name == name: - return playlist - return None - - async def artist_toptracks(self, artist_id, provider='database'): - ''' get top tracks for given artist ''' - artist = await self.artist(artist_id, provider) - # always append database tracks - items = await self.mass.db.artist_tracks(artist.item_id) - for prov_mapping in artist.provider_ids: - prov_id = prov_mapping['provider'] - prov_item_id = prov_mapping['item_id'] - prov_obj = self.providers[prov_id] - items += await prov_obj.artist_toptracks(prov_item_id) - items = list(toolz.unique(items, key=operator.attrgetter('item_id'))) - items.sort(key=lambda x: x.name, reverse=False) - return items - - async def artist_albums(self, artist_id, provider='database'): - ''' get (all) albums for given artist ''' - artist = await self.artist(artist_id, provider) - # always append database tracks - items = await self.mass.db.artist_albums(artist.item_id) - for prov_mapping in artist.provider_ids: - prov_id = prov_mapping['provider'] - prov_item_id = prov_mapping['item_id'] - prov_obj = self.providers[prov_id] - items += await prov_obj.artist_albums(prov_item_id) - items = list(toolz.unique(items, key=operator.attrgetter('item_id'))) - items.sort(key=lambda x: x.name, reverse=False) - return items - - async def album_tracks(self, album_id, provider='database'): - ''' get the album tracks for given album ''' - items = [] - album = await self.album(album_id, provider) - for prov_mapping in album.provider_ids: - prov_id = prov_mapping['provider'] - prov_item_id = prov_mapping['item_id'] - prov_obj = self.providers[prov_id] - items += await prov_obj.album_tracks(prov_item_id) - items = list(toolz.unique(items, key=operator.attrgetter('item_id'))) - return items - - async def playlist_tracks(self, playlist_id, provider='database', offset=0, limit=50): - ''' get the tracks for given playlist ''' - playlist = None - if not provider or provider == 'database': - playlist = await self.mass.db.playlist(playlist_id) - if playlist and playlist.is_editable: - # database synced playlist, return tracks from db... - return await self.mass.db.playlist_tracks(playlist.item_id, offset=offset, limit=limit) - else: - # return playlist tracks from provider - items = [] - playlist = await self.playlist(playlist_id) - for prov_mapping in playlist.provider_ids: - prov_id = prov_mapping['provider'] - prov_item_id = prov_mapping['item_id'] - prov_obj = self.providers[prov_id] - items += await prov_obj.playlist_tracks(prov_item_id, offset=offset, limit=limit) - if items: - break - items = list(toolz.unique(items, key=operator.attrgetter('item_id'))) - return items - - async def search(self, searchquery, media_types:List[MediaType], limit=10, online=False): - ''' search database or providers ''' - # get results from database - result = await self.mass.db.search(searchquery, media_types, limit) - if online: - # include results from music providers - for prov in self.providers.values(): - prov_results = await prov.search(searchquery, media_types, limit) - for item_type, items in prov_results.items(): - result[item_type] += items - # filter out duplicates - for item_type, items in result.items(): - items = list(toolz.unique(items, key=operator.attrgetter('item_id'))) - return result - - async def item_action(self, item_id, media_type, provider='database', action=None): - ''' perform action on item (such as library add/remove) ''' - result = None - item = await self.item(item_id, media_type, provider) - if item and action in ['add', 'remove']: - # remove or add item to the library - for prov_mapping in result.provider_ids: - prov_id = prov_mapping['provider'] - prov_item_id = prov_mapping['item_id'] - for prov in self.providers.values(): - if prov.prov_id == prov_id: - if action == 'add': - result = await prov.add_library(prov_item_id, media_type) - elif action == 'remove': - result = await prov.remove_library(prov_item_id, media_type) - return result - - @run_periodic(3600) - async def sync_music_providers(self): - ''' periodic sync of all music providers ''' - if self.sync_running: - return - self.sync_running = True - for prov_id in self.providers.keys(): - # sync library artists - await self.sync_library_artists(prov_id) - await self.sync_library_albums(prov_id) - await self.sync_library_tracks(prov_id) - await self.sync_playlists(prov_id) - self.sync_running = False - - async def sync_library_artists(self, prov_id): - ''' sync library artists for given provider''' - music_provider = self.providers[prov_id] - prev_items = await self.library_artists(provider_filter=prov_id) - prev_db_ids = [item.item_id for item in prev_items] - cur_items = await music_provider.get_library_artists() - cur_db_ids = [] - for item in cur_items: - db_item = await music_provider.artist(item.item_id, lazy=False) - cur_db_ids.append(db_item.item_id) - if not db_item.item_id in prev_db_ids: - await self.mass.db.add_to_library(db_item.item_id, MediaType.Artist, prov_id) - # process deletions - for db_id in prev_db_ids: - if db_id not in cur_db_ids: - await self.mass.db.remove_from_library(db_id, MediaType.Artist, prov_id) - LOGGER.info("Finished syncing Artists for provider %s" % prov_id) - - async def sync_library_albums(self, prov_id): - ''' sync library albums for given provider''' - music_provider = self.providers[prov_id] - prev_items = await self.library_albums(provider_filter=prov_id) - prev_db_ids = [item.item_id for item in prev_items] - cur_items = await music_provider.get_library_albums() - cur_db_ids = [] - for item in cur_items: - db_item = await music_provider.album(item.item_id, lazy=False) - cur_db_ids.append(db_item.item_id) - # precache album tracks... - for album_track in await music_provider.get_album_tracks(item.item_id): - await music_provider.track(album_track.item_id) - if not db_item.item_id in prev_db_ids: - await self.mass.db.add_to_library(db_item.item_id, MediaType.Album, prov_id) - # process deletions - for db_id in prev_db_ids: - if db_id not in cur_db_ids: - await self.mass.db.remove_from_library(db_id, MediaType.Album, prov_id) - LOGGER.info("Finished syncing Albums for provider %s" % prov_id) - - async def sync_library_tracks(self, prov_id): - ''' sync library tracks for given provider''' - music_provider = self.providers[prov_id] - prev_items = await self.library_tracks(provider_filter=prov_id) - prev_db_ids = [item.item_id for item in prev_items] - cur_items = await music_provider.get_library_tracks() - cur_db_ids = [] - for item in cur_items: - db_item = await music_provider.track(item.item_id, lazy=False) - cur_db_ids.append(db_item.item_id) - if not db_item.item_id in prev_db_ids: - await self.mass.db.add_to_library(db_item.item_id, MediaType.Track, prov_id) - # process deletions - for db_id in prev_db_ids: - if db_id not in cur_db_ids: - await self.mass.db.remove_from_library(db_id, MediaType.Track, prov_id) - LOGGER.info("Finished syncing Tracks for provider %s" % prov_id) - - async def sync_playlists(self, prov_id): - ''' sync library playlists for given provider''' - music_provider = self.providers[prov_id] - prev_items = await self.playlists(provider_filter=prov_id) - prev_db_ids = [item.item_id for item in prev_items] - cur_items = await music_provider.get_playlists() - cur_db_ids = [] - for item in cur_items: - # always add to db because playlist attributes could have changed - db_id = await self.mass.db.add_playlist(item) - cur_db_ids.append(db_id) - if not db_id in prev_db_ids: - await self.mass.db.add_to_library(db_id, MediaType.Playlist, prov_id) - if item.is_editable: - # precache/sync playlist tracks (user owned playlists only) - asyncio.create_task( self.sync_playlist_tracks(db_id, prov_id, item.item_id) ) - # process playlist deletions - for db_id in prev_db_ids: - if db_id not in cur_db_ids: - await self.mass.db.remove_from_library(db_id, MediaType.Playlist, prov_id) - LOGGER.info("Finished syncing Playlists for provider %s" % prov_id) - - async def sync_playlist_tracks(self, db_playlist_id, prov_id, prov_playlist_id): - ''' sync library playlists tracks for given provider''' - music_provider = self.providers[prov_id] - prev_items = await self.playlist_tracks(db_playlist_id) - prev_db_ids = [item.item_id for item in prev_items] - cur_items = await music_provider.get_playlist_tracks(prov_playlist_id, limit=0) - cur_db_ids = [] - pos = 0 - for item in cur_items: - # we need to do this the complicated way because the file provider can return tracks from other providers - for prov_mapping in item.provider_ids: - item_provider = prov_mapping['provider'] - prov_item_id = prov_mapping['item_id'] - db_item = await self.providers[item_provider].track(prov_item_id, lazy=False) - cur_db_ids.append(db_item.item_id) - if not db_item.item_id in prev_db_ids: - await self.mass.db.add_playlist_track(db_playlist_id, db_item.item_id, pos) - pos += 1 - # process playlist track deletions - for db_id in prev_db_ids: - if db_id not in cur_db_ids: - await self.mass.db.remove_playlist_track(db_playlist_id, db_id) - LOGGER.info("Finished syncing Playlist %s tracks for provider %s" % (prov_playlist_id, prov_id)) - - def load_music_providers(self): - ''' dynamically load musicproviders ''' - for item in os.listdir(MODULES_PATH): - if (os.path.isfile(os.path.join(MODULES_PATH, item)) and not item.startswith("_") and - item.endswith('.py') and not item.startswith('.')): - module_name = item.replace(".py","") - LOGGER.debug("Loading musicprovider module %s" % module_name) - try: - mod = __import__("modules.musicproviders." + module_name, fromlist=['']) - if not self.mass.config['musicproviders'].get(module_name): - self.mass.config['musicproviders'][module_name] = {} - self.mass.config['musicproviders'][module_name]['__desc__'] = mod.config_entries() - for key, def_value, desc in mod.config_entries(): - if not key in self.mass.config['musicproviders'][module_name]: - self.mass.config['musicproviders'][module_name][key] = def_value - mod = mod.setup(self.mass) - if mod: - self.providers[mod.prov_id] = mod - cls_name = mod.__class__.__name__ - LOGGER.info("Successfully initialized module %s" % cls_name) - except Exception as exc: - LOGGER.exception("Error loading module %s: %s" %(module_name, exc)) diff --git a/music_assistant/player.py b/music_assistant/player.py deleted file mode 100755 index 54d3959d..00000000 --- a/music_assistant/player.py +++ /dev/null @@ -1,354 +0,0 @@ -#!/usr/bin/env python3 -# -*- coding:utf-8 -*- - -import asyncio -import os -from utils import run_periodic, LOGGER, try_parse_int, try_parse_float -import aiohttp -from difflib import SequenceMatcher as Matcher -from models import MediaType, PlayerState, MusicPlayer -from typing import List -import toolz -import operator -import socket -import random -from copy import deepcopy -import functools - -BASE_DIR = os.path.dirname(os.path.abspath(__file__)) -MODULES_PATH = os.path.join(BASE_DIR, "modules", "playerproviders" ) - -class Player(): - ''' several helpers to handle playback through player providers ''' - - def __init__(self, mass): - self.mass = mass - self.providers = {} - self._players = {} - self.create_config_entries() - # dynamically load provider modules - self.load_providers() - - def create_config_entries(self): - ''' sets the config entries for this module (list with key/value pairs)''' - self.mass.config['player_settings']['__desc__'] = [ - ("enabled", False, "player_enabled"), - ("name", "", "player_name"), - ("group_parent", "", "player_group_with"), - ("mute_as_power", False, "player_mute_power"), - ("disable_volume", False, "player_disable_vol"), - ("apply_group_volume", False, "player_group_vol"), - ("apply_group_power", False, "player_group_pow"), - ("play_power_on", False, "player_power_play") - ] - - async def players(self): - ''' return all players ''' - items = list(self._players.values()) - items.sort(key=lambda x: x.name, reverse=False) - return items - - async def player(self, player_id): - ''' return players by id ''' - return self._players[player_id] - - async def player_command(self, player_id, cmd, cmd_args=None): - ''' issue command on player (play, pause, next, previous, stop, power, volume, mute) ''' - if player_id not in self._players: - return - player = self._players[player_id] - # handle some common workarounds - if cmd in ['pause', 'play'] and cmd_args == 'toggle': - cmd = 'pause' if player.state == PlayerState.Playing else 'play' - if cmd == 'power' and cmd_args == 'toggle': - cmd_args = 'off' if player.powered else 'on' - if cmd == 'volume' and cmd_args == 'up': - cmd_args = player.volume_level + 2 - elif cmd == 'volume' and cmd_args == 'down': - cmd_args = player.volume_level - 2 - # redirect playlist related commands to parent player - if player.group_parent and cmd not in ['power', 'volume', 'mute']: - return await self.player_command(player.group_parent, cmd, cmd_args) - # handle hass integration - await self.__player_command_hass_integration(player, cmd, cmd_args) - # handle mute as power - if cmd == 'power' and player.settings['mute_as_power']: - cmd = 'mute' - cmd_args = 'on' if cmd_args == 'off' else 'off' # invert logic (power ON is mute OFF) - # handle group volume for group players - player_childs = [item for item in self._players.values() if item.group_parent == player_id] - if player.is_group and cmd == 'volume' and player.settings['apply_group_volume']: - return await self.__player_command_group_volume(player, player_childs, cmd_args) - if player.is_group and cmd == 'power' and cmd_args == 'off': - for item in player_childs: - asyncio.create_task(self.player_command(item.player_id, cmd, cmd_args)) - # normal execution of command on player - prov_id = self._players[player_id].player_provider - prov = self.providers[prov_id] - await prov.player_command(player_id, cmd, cmd_args) - # handle play on power on - if cmd == 'power' and cmd_args == 'on' and player.settings['play_power_on']: - LOGGER.info('play_power_on %s' % player.name) - await prov.player_command(player_id, 'play') - - async def __player_command_hass_integration(self, player, cmd, cmd_args): - ''' handle hass integration in player command ''' - if not self.mass.hass: - return - if cmd == 'power' and player.settings.get('hass_power_entity') and player.settings.get('hass_power_entity_source'): - cur_source = await self.mass.hass.get_state(player.settings['hass_power_entity'], attribute='source') - if cmd_args == 'on' and not cur_source: - service_data = { 'entity_id': player.settings['hass_power_entity'], 'source':player.settings['hass_power_entity_source'] } - await self.mass.hass.call_service('media_player', 'select_source', service_data) - elif cmd_args == 'off' and cur_source == player.settings['hass_power_entity_source']: - service_data = { 'entity_id': player.settings['hass_power_entity'] } - await self.mass.hass.call_service('media_player', 'turn_off', service_data) - else: - LOGGER.warning('Ignoring power command as required source is not active') - elif cmd == 'power' and player.settings.get('hass_power_entity'): - domain = player.settings['hass_power_entity'].split('.')[0] - service_data = { 'entity_id': player.settings['hass_power_entity']} - await self.mass.hass.call_service(domain, 'turn_%s' % cmd_args, service_data) - if cmd == 'volume' and player.settings.get('hass_volume_entity'): - service_data = { 'entity_id': player.settings['hass_power_entity'], 'volume_level': int(cmd_args)/100} - await self.mass.hass.call_service('media_player', 'volume_set', service_data) - cmd_args = 100 # just force full volume on actual player if volume is outsourced to hass - - async def __player_command_group_volume(self, player, player_childs, cmd_args): - ''' handle group volume if needed''' - cur_volume = player.volume_level - new_volume = try_parse_int(cmd_args) - volume_dif = new_volume - cur_volume - volume_dif_percent = volume_dif/cur_volume - for child_player in player_childs: - if child_player.enabled and child_player.powered: - cur_child_volume = child_player.volume_level - new_child_volume = cur_child_volume + (cur_child_volume * volume_dif_percent) - child_player.volume_level = new_child_volume - await self.player_command(child_player.player_id, 'volume', new_child_volume) - player.volume_level = new_volume - return True - - async def remove_player(self, player_id): - ''' handle a player remove ''' - self._players.pop(player_id, None) - asyncio.ensure_future(self.mass.event('player removed', player_id)) - - async def trigger_update(self, player_id): - ''' manually trigger update for a player ''' - await self.update_player(self._players[player_id]) - - async def update_player(self, player_details): - ''' update (or add) player ''' - player_details = deepcopy(player_details) - LOGGER.debug('Incoming msg from %s' % player_details.name) - player_id = player_details.player_id - player_changed = False - if not player_id in self._players: - # first message from player - self._players[player_id] = MusicPlayer() - player = self._players[player_id] - player.player_id = player_id - player.player_provider = player_details.player_provider - player_changed = True - else: - player = self._players[player_id] - player.settings = await self.__get_player_settings(player_id) - # handle basic player settings - player_details.enabled = player.settings['enabled'] - player_details.name = player.settings['name'] if player.settings['name'] else player_details.name - player_details.group_parent = player.settings['group_parent'] if player.settings['group_parent'] else player_details.group_parent - # handle hass integration - await self.__update_player_hass_settings(player_details, player.settings) - # handle mute as power setting - if player.settings['mute_as_power']: - player_details.powered = not player_details.muted - # combine state of group parent - if player_details.group_parent and player_details.group_parent in self._players: - parent_player = self._players[player_details.group_parent] - player_details.cur_item_time = parent_player.cur_item_time - player_details.cur_item = parent_player.cur_item - player_details.state = parent_player.state - # handle group volume/power setting - player_childs = [item for item in self._players.values() if item.group_parent == player_id] - player_details.is_group = len(player_childs) > 0 - if player_details.is_group and player.settings['apply_group_volume']: - await self.__update_player_group_volume(player_details, player_childs) - if player_details.is_group and player.settings['apply_group_power']: - await self.__update_player_group_power(player_details, player_childs) - # compare values to detect changes - for key, cur_value in player.__dict__.items(): - if key != 'settings': - new_value = getattr(player_details, key) - if new_value != cur_value: - player_changed = True - setattr(player, key, new_value) - LOGGER.debug('key changed: %s for player %s - new value: %s' % (key, player.name, new_value)) - if player_changed: - # player is added or updated! - asyncio.ensure_future(self.mass.event('player updated', player)) - # is groupplayer, trigger update of its childs - for child in player_childs: - asyncio.create_task(self.trigger_update(child.player_id)) - # if child player in a group, trigger update of parent - if player.group_parent: - asyncio.create_task(self.trigger_update(player.group_parent)) - - async def __update_player_hass_settings(self, player_details, player_settings): - ''' handle home assistant integration on a player ''' - if not self.mass.hass: - return - player_id = player_details.player_id - player_settings = self.mass.config['player_settings'][player_id] - if player_settings.get('hass_power_entity') and player_settings.get('hass_power_entity_source'): - hass_state = await self.mass.hass.get_state( - player_settings['hass_power_entity'], - attribute='source', - register_listener=functools.partial(self.trigger_update, player_id)) - player_details.powered = hass_state == player_settings['hass_power_entity_source'] - elif player_settings.get('hass_power_entity'): - hass_state = await self.mass.hass.get_state( - player_settings['hass_power_entity'], - attribute='state', - register_listener=functools.partial(self.trigger_update, player_id)) - player_details.powered = hass_state != 'off' - if player_settings.get('hass_volume_entity'): - hass_state = await self.mass.hass.get_state( - player_settings['hass_volume_entity'], - attribute='volume_level', - register_listener=functools.partial(self.trigger_update, player_id)) - player_details.volume_level = int(try_parse_float(hass_state)*100) - - async def __update_player_group_volume(self, player_details, player_childs): - ''' handle group volume ''' - group_volume = 0 - active_players = 0 - for child_player in player_childs: - if child_player.enabled and child_player.powered: - group_volume += child_player.volume_level - active_players += 1 - group_volume = group_volume / active_players if active_players else 0 - player_details.volume_level = group_volume - - async def __update_player_group_power(self, player_details, player_childs): - ''' handle group power ''' - player_powered = False - for child_player in player_childs: - if child_player.powered: - player_powered = True - break - if player_details.powered and not player_powered: - # all childs turned off so turn off group player - LOGGER.info('all childs turned off so turn off group player %s' % player_details.name) - await self. player_command(player_details.player_id, 'power', 'off') - player_details.powered = False - elif not player_details.powered and player_powered: - # all childs turned off but group player still off, so turn it on - LOGGER.info('all childs turned off but group player still off, so turn it on %s' % player_details.name) - await self. player_command(player_details.player_id, 'power', 'on') - player_details.powered = True - - async def __get_player_settings(self, player_id): - ''' get (or create) player config ''' - player_settings = self.mass.config['player_settings'].get(player_id,{}) - for key, def_value, desc in self.mass.config['player_settings']['__desc__']: - if not key in player_settings: - player_settings[key] = def_value - self.mass.config['player_settings'][player_id] = player_settings - return player_settings - - async def play_media(self, player_id, media_item, queue_opt='play'): - ''' - play media on a player - player_id: id of the player - media_item: media item(s) that should be played (Track, Album, Artist, Playlist) - queue_opt: play, replace, next or add - ''' - if not player_id in self._players: - LOGGER.warning('Player %s not found' % player_id) - return False - player_prov = self.providers[self._players[player_id].player_provider] - # a single item or list of items may be provided - media_items = media_item if isinstance(media_item, list) else [media_item] - playable_tracks = [] - for media_item in media_items: - # collect tracks to play - if media_item.media_type == MediaType.Artist: - tracks = await self.mass.music.artist_toptracks(media_item.item_id, provider=media_item.provider) - elif media_item.media_type == MediaType.Album: - tracks = await self.mass.music.album_tracks(media_item.item_id, provider=media_item.provider) - elif media_item.media_type == MediaType.Playlist: - tracks = await self.mass.music.playlist_tracks(media_item.item_id, provider=media_item.provider, offset=0, limit=0) - else: - tracks = [media_item] # single track - # check supported music providers by this player and work out how to handle playback... - for track in tracks: - # sort by quality - match_found = False - for prov_media in sorted(track.provider_ids, key=operator.itemgetter('quality'), reverse=True): - media_provider = prov_media['provider'] - media_item_id = prov_media['item_id'] - player_supported_provs = player_prov.supported_musicproviders - if media_provider in player_supported_provs: - # the provider can handle this media_type directly ! - track.uri = await self.get_track_uri(media_item_id, media_provider) - playable_tracks.append(track) - match_found = True - elif 'http' in player_prov.supported_musicproviders: - # fallback to http streaming if supported - track.uri = await self.get_track_uri(media_item_id, media_provider, True) - playable_tracks.append(track) - match_found = True - if match_found: - break - if playable_tracks: - if self._players[player_id].shuffle_enabled: - random.shuffle(playable_tracks) - if queue_opt in ['next', 'play'] and len(playable_tracks) > 1: - queue_opt = 'replace' # always assume playback of multiple items as new queue - return await player_prov.play_media(player_id, playable_tracks, queue_opt) - else: - raise Exception("Musicprovider and/or media not supported by player %s !" % (player_id) ) - - async def get_track_uri(self, item_id, provider, http_stream=False): - ''' generate the URL/URI for a media item ''' - uri = "" - if http_stream: - host = socket.gethostbyname(socket.gethostname()) - uri = 'http://%s:8095/stream/%s/%s'% (host, provider, item_id) - elif provider == "spotify": - uri = 'spotify://spotify:track:%s' % item_id - elif provider == "qobuz": - uri = 'qobuz://%s.flac' % item_id - elif provider == "file": - uri = 'file://%s' % item_id - return uri - - async def player_queue(self, player_id, offset=0, limit=50): - ''' return the items in the player's queue ''' - player = self._players[player_id] - player_prov = self.providers[player.player_provider] - return await player_prov.player_queue(player_id, offset=offset, limit=limit) - - def load_providers(self): - ''' dynamically load providers ''' - for item in os.listdir(MODULES_PATH): - if (os.path.isfile(os.path.join(MODULES_PATH, item)) and not item.startswith("_") and - item.endswith('.py') and not item.startswith('.')): - module_name = item.replace(".py","") - LOGGER.debug("Loading playerprovider module %s" % module_name) - try: - mod = __import__("modules.playerproviders." + module_name, fromlist=['']) - if not self.mass.config['playerproviders'].get(module_name): - self.mass.config['playerproviders'][module_name] = {} - self.mass.config['playerproviders'][module_name]['__desc__'] = mod.config_entries() - for key, def_value, desc in mod.config_entries(): - if not key in self.mass.config['playerproviders'][module_name]: - self.mass.config['playerproviders'][module_name][key] = def_value - mod = mod.setup(self.mass) - if mod: - self.providers[mod.prov_id] = mod - cls_name = mod.__class__.__name__ - LOGGER.info("Successfully initialized module %s" % cls_name) - except Exception as exc: - LOGGER.exception("Error loading module %s: %s" %(module_name, exc)) diff --git a/music_assistant/utils.py b/music_assistant/utils.py index d6f2c1b3..3c75c320 100755 --- a/music_assistant/utils.py +++ b/music_assistant/utils.py @@ -92,3 +92,6 @@ def parse_track_title(track_title): version = version.strip().title() return title, version +async def kill_proc(proc): + proc.kill() + await proc.communicate() \ No newline at end of file diff --git a/music_assistant/web/components/listviewItem.vue.js b/music_assistant/web/components/listviewItem.vue.js index ff05edac..687c69c9 100755 --- a/music_assistant/web/components/listviewItem.vue.js +++ b/music_assistant/web/components/listviewItem.vue.js @@ -49,8 +49,8 @@ Vue.component("listviewItem", { favorite_border - Item is added to the library - Add item to the library + {{ $t('remove_library') }} + {{ $t('add_library') }} diff --git a/music_assistant/web/components/playmenu.vue.js b/music_assistant/web/components/playmenu.vue.js index d1a84d2c..cbe4433b 100644 --- a/music_assistant/web/components/playmenu.vue.js +++ b/music_assistant/web/components/playmenu.vue.js @@ -63,7 +63,7 @@ Vue.component("playmenu", { methods: { itemClick(cmd) { if (cmd == 'info') - this.$router.push({ path: '/tracks/' + this.$globals.playmenuitem.item_id, params: {provider: this.$globals.playmenuitem.provider}}) + this.$router.push({ path: '/tracks/' + this.$globals.playmenuitem.item_id, query: {provider: this.$globals.playmenuitem.provider}}) else this.$emit('playItem', this.$globals.playmenuitem, cmd) // close dialog diff --git a/music_assistant/web/css/vue-loading.css b/music_assistant/web/css/vue-loading.css new file mode 100644 index 00000000..6d62f807 --- /dev/null +++ b/music_assistant/web/css/vue-loading.css @@ -0,0 +1,36 @@ +.vld-overlay { + bottom: 0; + left: 0; + position: absolute; + right: 0; + top: 0; + align-items: center; + display: none; + justify-content: center; + overflow: hidden; + z-index: 1 +} + +.vld-overlay.is-active { + display: flex +} + +.vld-overlay.is-full-page { + z-index: 999; + position: fixed +} + +.vld-overlay .vld-background { + bottom: 0; + left: 0; + position: absolute; + right: 0; + top: 0; + background: #000; + opacity: 0.7 +} + +.vld-overlay .vld-icon, .vld-parent { + position: relative +} + diff --git a/music_assistant/web/index.html b/music_assistant/web/index.html index ed1448b2..6df94daa 100755 --- a/music_assistant/web/index.html +++ b/music_assistant/web/index.html @@ -12,6 +12,7 @@ + @@ -24,30 +25,11 @@ - - - - Please stand by - - - - + - @@ -56,7 +38,7 @@ - +