+++ /dev/null
-#!/usr/bin/python3
-# -*- coding: utf-8 -*-
-
-'''provides a simple stateless caching system'''
-
-import datetime
-import time
-import sqlite3
-from functools import reduce
-import os
-from utils import run_periodic, LOGGER, parse_track_title
-import functools
-import asyncio
-
-
-class Cache(object):
- '''basic stateless caching system '''
- _exit = False
- _mem_cache = {}
- _busy_tasks = []
- _database = None
-
- def __init__(self, datapath):
- '''Initialize our caching class'''
- self._datapath = datapath
- asyncio.ensure_future(self._do_cleanup())
- LOGGER.debug("Initialized")
-
- async def get(self, endpoint, checksum=""):
- '''
- get object from cache and return the results
- endpoint: the (unique) name of the cache object as reference
- checkum: optional argument to check if the checksum in the cacheobject matches the checkum provided
- '''
- checksum = self._get_checksum(checksum)
- cur_time = self._get_timestamp(datetime.datetime.now())
- result = None
- # 1: try memory cache first
- result = await self._get_mem_cache(endpoint, checksum, cur_time)
- # 2: fallback to _database cache
- if result is None:
- result = await self._get_db_cache(endpoint, checksum, cur_time)
- return result
-
- async def set(self, endpoint, data, checksum="", expiration=datetime.timedelta(days=14)):
- '''
- set data in cache
- '''
- task_name = "set.%s" % endpoint
- self._busy_tasks.append(task_name)
- checksum = self._get_checksum(checksum)
- expires = self._get_timestamp(datetime.datetime.now() + expiration)
-
- # memory cache
- await self._set_mem_cache(endpoint, checksum, expires, data)
-
- # db cache
- if not self._exit:
- await self._set_db_cache(endpoint, checksum, expires, data)
-
- # remove this task from list
- self._busy_tasks.remove(task_name)
-
- async def _get_mem_cache(self, endpoint, checksum, cur_time):
- '''
- get cache data from memory cache
- '''
- result = None
- cachedata = self._mem_cache.get(endpoint)
- if cachedata:
- cachedata = cachedata
- if cachedata[0] > cur_time:
- if checksum == None or checksum == cachedata[2]:
- result = cachedata[1]
- return result
-
- async def _set_mem_cache(self, endpoint, checksum, expires, data):
- '''
- put data in memory cache
- '''
- cachedata = (expires, data, checksum)
- self._mem_cache[endpoint] = cachedata
-
- async def _get_db_cache(self, endpoint, checksum, cur_time):
- '''get cache data from sqllite database'''
- result = None
- query = "SELECT expires, data, checksum FROM simplecache WHERE id = ?"
- cache_data = self._execute_sql(query, (endpoint,))
- if cache_data:
- cache_data = cache_data.fetchone()
- if cache_data and cache_data[0] > cur_time:
- if checksum == None or cache_data[2] == checksum:
- result = eval(cache_data[1])
- # also set result in memory cache for further access
- await self._set_mem_cache(endpoint, checksum, cache_data[0], result)
- return result
-
- async def _set_db_cache(self, endpoint, checksum, expires, data):
- ''' store cache data in _database '''
- query = "INSERT OR REPLACE INTO simplecache( id, expires, data, checksum) VALUES (?, ?, ?, ?)"
- data = repr(data)
- self._execute_sql(query, (endpoint, expires, data, checksum))
-
- @run_periodic(3600)
- async def _do_cleanup(self):
- '''perform cleanup task'''
- if self._exit:
- return
- self._busy_tasks.append(__name__)
- cur_time = datetime.datetime.now()
- cur_timestamp = self._get_timestamp(cur_time)
- LOGGER.debug("Running cleanup...")
- query = "SELECT id, expires FROM simplecache"
- for cache_data in self._execute_sql(query).fetchall():
- cache_id = cache_data[0]
- cache_expires = cache_data[1]
- if self._exit:
- return
- # always cleanup all memory objects on each interval
- self._mem_cache.pop(cache_id, None)
- # clean up db cache object only if expired
- if cache_expires < cur_timestamp:
- query = 'DELETE FROM simplecache WHERE id = ?'
- self._execute_sql(query, (cache_id,))
- LOGGER.debug("delete from db %s" % cache_id)
-
- # compact db
- self._execute_sql("VACUUM")
-
- # remove task from list
- self._busy_tasks.remove(__name__)
- LOGGER.debug("Auto cleanup done")
-
- def _get_database(self):
- '''get reference to our sqllite _database - performs basic integrity check'''
- dbfile = os.path.join(self._datapath, "simplecache.db")
- try:
- connection = sqlite3.connect(dbfile, timeout=30, isolation_level=None)
- connection.execute('SELECT * FROM simplecache LIMIT 1')
- return connection
- except Exception as error:
- # our _database is corrupt or doesn't exist yet, we simply try to recreate it
- if os.path.isfile(dbfile):
- os.remove(dbfile)
- try:
- connection = sqlite3.connect(dbfile, timeout=30, isolation_level=None)
- connection.execute(
- """CREATE TABLE IF NOT EXISTS simplecache(
- id TEXT UNIQUE, expires INTEGER, data TEXT, checksum INTEGER)""")
- return connection
- except Exception as error:
- LOGGER.warning("Exception while initializing _database: %s" % str(error))
- return None
-
- def _execute_sql(self, query, data=None):
- '''little wrapper around execute and executemany to just retry a db command if db is locked'''
- retries = 0
- result = None
- error = None
- # always use new db object because we need to be sure that data is available for other simplecache instances
- with self._get_database() as _database:
- while not retries == 10:
- if self._exit:
- return None
- try:
- if isinstance(data, list):
- result = _database.executemany(query, data)
- elif data:
- result = _database.execute(query, data)
- else:
- result = _database.execute(query)
- return result
- except sqlite3.OperationalError as error:
- if "_database is locked" in error:
- LOGGER.debug("retrying DB commit...")
- retries += 1
- time.sleep(0.5)
- else:
- break
- except Exception as error:
- LOGGER.error("_database ERROR ! -- %s" % str(error))
- break
- return None
-
- @staticmethod
- def _get_timestamp(date_time):
- '''Converts a datetime object to unix timestamp'''
- return int(time.mktime(date_time.timetuple()))
-
- @staticmethod
- def _get_checksum(stringinput):
- '''get int checksum from string'''
- if not stringinput:
- return 0
- else:
- stringinput = str(stringinput)
- return reduce(lambda x, y: x + y, map(ord, stringinput))
-
-def use_cache(cache_days=14, cache_hours=8):
- def wrapper(func):
- @functools.wraps(func)
- async def wrapped(*args, **kwargs):
- if kwargs.get("ignore_cache"):
- return await func(*args, **kwargs)
- cache_checksum = kwargs.get("cache_checksum")
- method_class = args[0]
- method_class_name = method_class.__class__.__name__
- cache_str = "%s.%s" % (method_class_name, func.__name__)
- # append args to cache identifier
- for item in args[1:]:
- if isinstance(item, dict):
- for subkey in sorted(list(item.keys())):
- subvalue = item[subkey]
- cache_str += ".%s%s" %(subkey,subvalue)
- else:
- cache_str += ".%s" % item
- # append kwargs to cache identifier
- for key in sorted(list(kwargs.keys())):
- if key in ["ignore_cache", "cache_checksum"]:
- continue
- value = kwargs[key]
- if isinstance(value, dict):
- for subkey in sorted(list(value.keys())):
- subvalue = value[subkey]
- cache_str += ".%s%s" %(subkey,subvalue)
- else:
- cache_str += ".%s%s" %(key,value)
- cache_str = cache_str.lower()
- cachedata = await method_class.cache.get(cache_str, checksum=cache_checksum)
- if cachedata is not None:
- return cachedata
- else:
- result = await func(*args, **kwargs)
- await method_class.cache.set(cache_str, result, checksum=cache_checksum, expiration=datetime.timedelta(days=cache_days, hours=cache_hours))
- return result
- return wrapped
- return wrapper
if media_type == MediaType.Playlist:
sql_query = 'DELETE FROM playlist_tracks WHERE playlist_id=?;'
await db.execute(sql_query, (item_id,))
- if media_type == MediaType.Playlist:
sql_query = 'DELETE FROM playlists WHERE playlist_id=?;'
await db.execute(sql_query, (item_id,))
+ sql_query = 'DELETE FROM provider_mappings WHERE item_id=? AND media_type=? AND provider=?;'
+ await db.execute(sql_query, (item_id,media_type, provider))
await db.commit()
async def artists(self, filter_query=None, limit=100000, offset=0, orderby='name', fulldata=False) -> List[Artist]:
import time
from database import Database
-from metadata import MetaData
from utils import run_periodic, LOGGER
-from cache import Cache
-from music import Music
-from player import Player
+from modules.metadata import MetaData
+from modules.cache import Cache
+from modules.music import Music
+from modules.player import Player
from modules.homeassistant import setup as hass_setup
from modules.web import setup as web_setup
def __init__(self, datapath):
uvloop.install()
- self._datapath = datapath
+ self.datapath = datapath
self.parse_config()
self.event_loop = asyncio.get_event_loop()
self.bg_executor = ThreadPoolExecutor(max_workers=5)
def save_config(self):
''' save config to file '''
# backup existing file
- conf_file = os.path.join(self._datapath, 'config.json')
- conf_file_backup = os.path.join(self._datapath, 'config.json')
+ conf_file = os.path.join(self.datapath, 'config.json')
+ conf_file_backup = os.path.join(self.datapath, 'config.json')
if os.path.isfile(conf_file):
shutil.move(conf_file, conf_file_backup)
with open(conf_file, 'w') as f:
"playerproviders": {},
"player_settings": {}
}
- conf_file = os.path.join(self._datapath, 'config.json')
+ conf_file = os.path.join(self.datapath, 'config.json')
if os.path.isfile(conf_file):
with open(conf_file) as f:
data = f.read()
+++ /dev/null
-#!/usr/bin/env python3
-# -*- coding:utf-8 -*-
-
-import asyncio
-import os
-from utils import run_periodic, LOGGER
-import json
-import aiohttp
-from asyncio_throttle import Throttler
-from difflib import SequenceMatcher as Matcher
-from cache import use_cache
-from yarl import URL
-import re
-
-LUCENE_SPECIAL = r'([+\-&|!(){}\[\]\^"~*?:\\\/])'
-
-class MetaData():
- ''' several helpers to search and store mediadata for mediaitems '''
-
- def __init__(self, event_loop, db, cache):
- self.event_loop = event_loop
- self.db = db
- self.cache = cache
- self.musicbrainz = MusicBrainz(event_loop, cache)
- self.fanarttv = FanartTv(event_loop, cache)
-
- async def get_artist_metadata(self, mb_artist_id, cur_metadata):
- ''' get/update rich metadata for an artist by providing the musicbrainz artist id '''
- metadata = cur_metadata
- if not ('fanart' in metadata or 'thumb' in metadata):
- res = await self.fanarttv.artist_images(mb_artist_id)
- self.merge_metadata(cur_metadata, res)
- return metadata
-
- async def get_mb_artist_id(self, artistname, albumname=None, album_upc=None, trackname=None, track_isrc=None):
- ''' retrieve musicbrainz artist id for the given details '''
- LOGGER.debug('searching musicbrainz for %s (albumname: %s - album_upc: %s - trackname: %s - track_isrc: %s)' %(artistname, albumname, album_upc, trackname, track_isrc))
- mb_artist_id = None
- if album_upc:
- mb_artist_id = await self.musicbrainz.search_artist_by_album(artistname, None, album_upc)
- if not mb_artist_id and track_isrc:
- mb_artist_id = await self.musicbrainz.search_artist_by_track(artistname, None, track_isrc)
- if not mb_artist_id and albumname:
- mb_artist_id = await self.musicbrainz.search_artist_by_album(artistname, albumname)
- if not mb_artist_id and trackname:
- mb_artist_id = await self.musicbrainz.search_artist_by_track(artistname, trackname)
- LOGGER.debug('Got musicbrainz artist id for artist %s --> %s' %(artistname, mb_artist_id))
- return mb_artist_id
-
- @staticmethod
- def merge_metadata(cur_metadata, new_values):
- ''' merge new info into the metadata dict without overwiteing existing values '''
- for key, value in new_values.items():
- if not cur_metadata.get(key):
- cur_metadata[key] = value
- return cur_metadata
-
-class MusicBrainz():
-
- def __init__(self, event_loop, cache):
- self.event_loop = event_loop
- self.cache = cache
- self.http_session = aiohttp.ClientSession(loop=event_loop, connector=aiohttp.TCPConnector(verify_ssl=False))
- self.throttler = Throttler(rate_limit=1, period=1)
-
- async def search_artist_by_album(self, artistname, albumname=None, album_upc=None):
- ''' retrieve musicbrainz artist id by providing the artist name and albumname or upc '''
- if album_upc:
- endpoint = 'release'
- params = {'query': 'barcode:%s' % album_upc}
- else:
- searchartist = re.sub(LUCENE_SPECIAL, r'\\\1', artistname)
- searchartist = searchartist.replace('/','').replace('\\','')
- searchalbum = re.sub(LUCENE_SPECIAL, r'\\\1', albumname)
- endpoint = 'release'
- params = {'query': 'artist:"%s" AND release:"%s"' % (searchartist, searchalbum)}
- result = await self.get_data(endpoint, params)
- if result and result.get('releases'):
- for strictness in [1, 0.95, 0.9]:
- for item in result['releases']:
- if album_upc or Matcher(None, item['title'].lower(), albumname.lower()).ratio() >= strictness:
- for artist in item['artist-credit']:
- artist = artist['artist']
- if Matcher(None, artist['name'].lower(), artistname.lower()).ratio() >= strictness:
- return artist['id']
- for item in artist.get('aliases',[]):
- if item['name'].lower() == artistname.lower():
- return artist['id']
- return ''
-
- async def search_artist_by_track(self, artistname, trackname=None, track_isrc=None):
- ''' retrieve artist id by providing the artist name and trackname or track isrc '''
- endpoint = 'recording'
- searchartist = re.sub(LUCENE_SPECIAL, r'\\\1', artistname)
- searchartist = searchartist.replace('/','').replace('\\','')
- if track_isrc:
- endpoint = 'isrc/%s' % track_isrc
- params = {'inc': 'artist-credits'}
- else:
- searchtrack = re.sub(LUCENE_SPECIAL, r'\\\1', trackname)
- endpoint = 'recording'
- params = {'query': '"%s" AND artist:"%s"' % (searchtrack, searchartist)}
- result = await self.get_data(endpoint, params)
- if result and result.get('recordings'):
- for strictness in [1, 0.95]:
- for item in result['recordings']:
- if track_isrc or Matcher(None, item['title'].lower(), trackname.lower()).ratio() >= strictness:
- for artist in item['artist-credit']:
- artist = artist['artist']
- if Matcher(None, artist['name'].lower(), artistname.lower()).ratio() >= strictness:
- return artist['id']
- for item in artist.get('aliases',[]):
- if item['name'].lower() == artistname.lower():
- return artist['id']
- return ''
-
- @use_cache(30)
- async def get_data(self, endpoint, params={}):
- ''' get data from api'''
- url = 'http://musicbrainz.org/ws/2/%s' % endpoint
- headers = {'User-Agent': 'Music Assistant/1.0.0 https://github.com/marcelveldt'}
- params['fmt'] = 'json'
- async with self.throttler:
- async with self.http_session.get(url, headers=headers, params=params) as response:
- try:
- result = await response.json()
- except Exception as exc:
- msg = await response.text()
- LOGGER.exception("%s - %s" % (str(exc), msg))
- result = None
- return result
-
-
-class FanartTv():
-
- def __init__(self, event_loop, cache):
- self.event_loop = event_loop
- self.cache = cache
- self.http_session = aiohttp.ClientSession(loop=event_loop, connector=aiohttp.TCPConnector(verify_ssl=False))
- self.throttler = Throttler(rate_limit=1, period=1)
-
- async def artist_images(self, mb_artist_id):
- ''' retrieve images by musicbrainz artist id '''
- metadata = {}
- data = await self.get_data("music/%s" % mb_artist_id)
- if data:
- if data.get('hdmusiclogo'):
- metadata['logo'] = data['hdmusiclogo'][0]["url"]
- elif data.get('musiclogo'):
- metadata['logo'] = data['musiclogo'][0]["url"]
- if data.get('artistbackground'):
- count = 0
- for item in data['artistbackground']:
- key = "fanart" if count == 0 else "fanart.%s" % count
- metadata[key] = item["url"]
- if data.get('artistthumb'):
- url = data['artistthumb'][0]["url"]
- if not '2a96cbd8b46e442fc41c2b86b821562f' in url:
- metadata['image'] = url
- if data.get('musicbanner'):
- metadata['banner'] = data['musicbanner'][0]["url"]
- return metadata
-
- @use_cache(30)
- async def get_data(self, endpoint, params={}):
- ''' get data from api'''
- url = 'http://webservice.fanart.tv/v3/%s' % endpoint
- params['api_key'] = '639191cb0774661597f28a47e7e2bad5'
- async with self.throttler:
- async with self.http_session.get(url, params=params) as response:
- result = await response.json()
- if 'error' in result and 'limit' in result['error']:
- raise Exception(result['error'])
- return result
from utils import run_periodic, LOGGER, parse_track_title
from difflib import SequenceMatcher as Matcher
import asyncio
-from cache import use_cache
+from modules.cache import use_cache
class MediaType(IntEnum):
self.artists = []
self.provider_ids = []
self.album = None
- self.disc_number = 0
- self.track_number = 0
+ self.disc_number = 1
+ self.track_number = 1
self.media_type = MediaType.Track
self.in_library = []
self.is_lazy = False
return NotImplemented
return (self.name == other.name and
self.version == other.version and
- self.item_id == other.item_id)
+ self.item_id == other.item_id and
+ self.provider == other.provider)
def __ne__(self, other):
return not self.__eq__(other)
name = 'My great Music provider' # display name
prov_id = 'my_provider' # used as id
icon = ''
+ audio_fmt = 'flac' # the audio format used by this provider when streaming
def __init__(self, mass):
self.mass = mass
prov_album_id = track_details.album.item_id
track_details.album = await self.album(prov_album_id, lazy=False)
item_id = await self.mass.db.add_track(track_details)
- # also fetch same track on all providers
+ # also fetch same track on all providers (will also get other quality versions)
new_track = await self.mass.db.track(item_id)
- item_provider_keys = [item['provider'] for item in new_track.provider_ids]
for prov_id, provider in self.mass.music.providers.items():
- if not prov_id in item_provider_keys:
- await provider.match_track(new_track)
+ await provider.match_track(new_track)
return item_id
async def playlist(self, prov_playlist_id) -> Playlist:
async def remove_library(self, prov_item_id, media_type:MediaType):
''' remove item from library '''
raise NotImplementedError
+
+ async def add_playlist_tracks(self, prov_playlist_id, prov_track_ids):
+ ''' add track(s) to playlist '''
+ raise NotImplementedError
+
+ async def remove_playlist_tracks(self, prov_playlist_id, prov_track_ids):
+ ''' remove track(s) from playlist '''
+ raise NotImplementedError
class PlayerState(str, Enum):
Off = "off"
--- /dev/null
+#!/usr/bin/python3
+# -*- coding: utf-8 -*-
+
+'''provides a simple stateless caching system'''
+
+import datetime
+import time
+import sqlite3
+from functools import reduce
+import os
+from utils import run_periodic, LOGGER, parse_track_title
+import functools
+import asyncio
+
+
+class Cache(object):
+ '''basic stateless caching system '''
+ _exit = False
+ _mem_cache = {}
+ _busy_tasks = []
+ _database = None
+
+ def __init__(self, datapath):
+ '''Initialize our caching class'''
+ self._datapath = datapath
+ asyncio.ensure_future(self._do_cleanup())
+ LOGGER.debug("Initialized")
+
+ async def get(self, endpoint, checksum=""):
+ '''
+ get object from cache and return the results
+ endpoint: the (unique) name of the cache object as reference
+ checkum: optional argument to check if the checksum in the cacheobject matches the checkum provided
+ '''
+ checksum = self._get_checksum(checksum)
+ cur_time = self._get_timestamp(datetime.datetime.now())
+ result = None
+ # 1: try memory cache first
+ result = await self._get_mem_cache(endpoint, checksum, cur_time)
+ # 2: fallback to _database cache
+ if result is None:
+ result = await self._get_db_cache(endpoint, checksum, cur_time)
+ return result
+
+ async def set(self, endpoint, data, checksum="", expiration=datetime.timedelta(days=14)):
+ '''
+ set data in cache
+ '''
+ task_name = "set.%s" % endpoint
+ self._busy_tasks.append(task_name)
+ checksum = self._get_checksum(checksum)
+ expires = self._get_timestamp(datetime.datetime.now() + expiration)
+
+ # memory cache
+ await self._set_mem_cache(endpoint, checksum, expires, data)
+
+ # db cache
+ if not self._exit:
+ await self._set_db_cache(endpoint, checksum, expires, data)
+
+ # remove this task from list
+ self._busy_tasks.remove(task_name)
+
+ async def _get_mem_cache(self, endpoint, checksum, cur_time):
+ '''
+ get cache data from memory cache
+ '''
+ result = None
+ cachedata = self._mem_cache.get(endpoint)
+ if cachedata:
+ cachedata = cachedata
+ if cachedata[0] > cur_time:
+ if checksum == None or checksum == cachedata[2]:
+ result = cachedata[1]
+ return result
+
+ async def _set_mem_cache(self, endpoint, checksum, expires, data):
+ '''
+ put data in memory cache
+ '''
+ cachedata = (expires, data, checksum)
+ self._mem_cache[endpoint] = cachedata
+
+ async def _get_db_cache(self, endpoint, checksum, cur_time):
+ '''get cache data from sqllite database'''
+ result = None
+ query = "SELECT expires, data, checksum FROM simplecache WHERE id = ?"
+ cache_data = self._execute_sql(query, (endpoint,))
+ if cache_data:
+ cache_data = cache_data.fetchone()
+ if cache_data and cache_data[0] > cur_time:
+ if checksum == None or cache_data[2] == checksum:
+ result = eval(cache_data[1])
+ # also set result in memory cache for further access
+ await self._set_mem_cache(endpoint, checksum, cache_data[0], result)
+ return result
+
+ async def _set_db_cache(self, endpoint, checksum, expires, data):
+ ''' store cache data in _database '''
+ query = "INSERT OR REPLACE INTO simplecache( id, expires, data, checksum) VALUES (?, ?, ?, ?)"
+ data = repr(data)
+ self._execute_sql(query, (endpoint, expires, data, checksum))
+
+ @run_periodic(3600)
+ async def _do_cleanup(self):
+ '''perform cleanup task'''
+ if self._exit:
+ return
+ self._busy_tasks.append(__name__)
+ cur_time = datetime.datetime.now()
+ cur_timestamp = self._get_timestamp(cur_time)
+ LOGGER.debug("Running cleanup...")
+ query = "SELECT id, expires FROM simplecache"
+ for cache_data in self._execute_sql(query).fetchall():
+ cache_id = cache_data[0]
+ cache_expires = cache_data[1]
+ if self._exit:
+ return
+ # always cleanup all memory objects on each interval
+ self._mem_cache.pop(cache_id, None)
+ # clean up db cache object only if expired
+ if cache_expires < cur_timestamp:
+ query = 'DELETE FROM simplecache WHERE id = ?'
+ self._execute_sql(query, (cache_id,))
+ LOGGER.debug("delete from db %s" % cache_id)
+
+ # compact db
+ self._execute_sql("VACUUM")
+
+ # remove task from list
+ self._busy_tasks.remove(__name__)
+ LOGGER.debug("Auto cleanup done")
+
+ def _get_database(self):
+ '''get reference to our sqllite _database - performs basic integrity check'''
+ dbfile = os.path.join(self._datapath, "simplecache.db")
+ try:
+ connection = sqlite3.connect(dbfile, timeout=30, isolation_level=None)
+ connection.execute('SELECT * FROM simplecache LIMIT 1')
+ return connection
+ except Exception as error:
+ # our _database is corrupt or doesn't exist yet, we simply try to recreate it
+ if os.path.isfile(dbfile):
+ os.remove(dbfile)
+ try:
+ connection = sqlite3.connect(dbfile, timeout=30, isolation_level=None)
+ connection.execute(
+ """CREATE TABLE IF NOT EXISTS simplecache(
+ id TEXT UNIQUE, expires INTEGER, data TEXT, checksum INTEGER)""")
+ return connection
+ except Exception as error:
+ LOGGER.warning("Exception while initializing _database: %s" % str(error))
+ return None
+
+ def _execute_sql(self, query, data=None):
+ '''little wrapper around execute and executemany to just retry a db command if db is locked'''
+ retries = 0
+ result = None
+ error = None
+ # always use new db object because we need to be sure that data is available for other simplecache instances
+ with self._get_database() as _database:
+ while not retries == 10:
+ if self._exit:
+ return None
+ try:
+ if isinstance(data, list):
+ result = _database.executemany(query, data)
+ elif data:
+ result = _database.execute(query, data)
+ else:
+ result = _database.execute(query)
+ return result
+ except sqlite3.OperationalError as error:
+ if "_database is locked" in error:
+ LOGGER.debug("retrying DB commit...")
+ retries += 1
+ time.sleep(0.5)
+ else:
+ break
+ except Exception as error:
+ LOGGER.error("_database ERROR ! -- %s" % str(error))
+ break
+ return None
+
+ @staticmethod
+ def _get_timestamp(date_time):
+ '''Converts a datetime object to unix timestamp'''
+ return int(time.mktime(date_time.timetuple()))
+
+ @staticmethod
+ def _get_checksum(stringinput):
+ '''get int checksum from string'''
+ if not stringinput:
+ return 0
+ else:
+ stringinput = str(stringinput)
+ return reduce(lambda x, y: x + y, map(ord, stringinput))
+
+def use_cache(cache_days=14, cache_hours=8):
+ def wrapper(func):
+ @functools.wraps(func)
+ async def wrapped(*args, **kwargs):
+ if kwargs.get("ignore_cache"):
+ return await func(*args, **kwargs)
+ cache_checksum = kwargs.get("cache_checksum")
+ method_class = args[0]
+ method_class_name = method_class.__class__.__name__
+ cache_str = "%s.%s" % (method_class_name, func.__name__)
+ # append args to cache identifier
+ for item in args[1:]:
+ if isinstance(item, dict):
+ for subkey in sorted(list(item.keys())):
+ subvalue = item[subkey]
+ cache_str += ".%s%s" %(subkey,subvalue)
+ else:
+ cache_str += ".%s" % item
+ # append kwargs to cache identifier
+ for key in sorted(list(kwargs.keys())):
+ if key in ["ignore_cache", "cache_checksum"]:
+ continue
+ value = kwargs[key]
+ if isinstance(value, dict):
+ for subkey in sorted(list(value.keys())):
+ subvalue = value[subkey]
+ cache_str += ".%s%s" %(subkey,subvalue)
+ else:
+ cache_str += ".%s%s" %(key,value)
+ cache_str = cache_str.lower()
+ cachedata = await method_class.cache.get(cache_str, checksum=cache_checksum)
+ if cachedata is not None:
+ return cachedata
+ else:
+ result = await func(*args, **kwargs)
+ await method_class.cache.set(cache_str, result, checksum=cache_checksum, expiration=datetime.timedelta(days=cache_days, hours=cache_hours))
+ return result
+ return wrapped
+ return wrapper
import os
from typing import List
import random
-import sys
-sys.path.append("..")
-from utils import run_periodic, LOGGER, parse_track_title, try_parse_int
-from models import PlayerProvider, MusicPlayer, PlayerState, MediaType, TrackQuality, AlbumType, Artist, Album, Track, Playlist
-from constants import CONF_ENABLED, CONF_HOSTNAME, CONF_PORT
+from music_assistant.utils import run_periodic, LOGGER, parse_track_title, try_parse_int
+from music_assistant.models import PlayerProvider, MusicPlayer, PlayerState, MediaType, TrackQuality, AlbumType, Artist, Album, Track, Playlist
+from music_assistant.constants import CONF_ENABLED, CONF_HOSTNAME, CONF_PORT
import json
import aiohttp
import time
import hashlib
from asyncio_throttle import Throttler
from aiocometd import Client, ConnectionType, Extension
-from cache import use_cache
+from music_assistant.modules.cache import use_cache
import copy
import slugify as slug
--- /dev/null
+#!/usr/bin/env python3
+# -*- coding:utf-8 -*-
+
+import asyncio
+import os
+from music_assistant.utils import run_periodic, LOGGER
+import json
+import aiohttp
+from asyncio_throttle import Throttler
+from difflib import SequenceMatcher as Matcher
+from music_assistant.modules.cache import use_cache
+from yarl import URL
+import re
+
+LUCENE_SPECIAL = r'([+\-&|!(){}\[\]\^"~*?:\\\/])'
+
+class MetaData():
+ ''' several helpers to search and store mediadata for mediaitems '''
+
+ def __init__(self, event_loop, db, cache):
+ self.event_loop = event_loop
+ self.db = db
+ self.cache = cache
+ self.musicbrainz = MusicBrainz(event_loop, cache)
+ self.fanarttv = FanartTv(event_loop, cache)
+
+ async def get_artist_metadata(self, mb_artist_id, cur_metadata):
+ ''' get/update rich metadata for an artist by providing the musicbrainz artist id '''
+ metadata = cur_metadata
+ if not ('fanart' in metadata or 'thumb' in metadata):
+ res = await self.fanarttv.artist_images(mb_artist_id)
+ self.merge_metadata(cur_metadata, res)
+ return metadata
+
+ async def get_mb_artist_id(self, artistname, albumname=None, album_upc=None, trackname=None, track_isrc=None):
+ ''' retrieve musicbrainz artist id for the given details '''
+ LOGGER.debug('searching musicbrainz for %s (albumname: %s - album_upc: %s - trackname: %s - track_isrc: %s)' %(artistname, albumname, album_upc, trackname, track_isrc))
+ mb_artist_id = None
+ if album_upc:
+ mb_artist_id = await self.musicbrainz.search_artist_by_album(artistname, None, album_upc)
+ if not mb_artist_id and track_isrc:
+ mb_artist_id = await self.musicbrainz.search_artist_by_track(artistname, None, track_isrc)
+ if not mb_artist_id and albumname:
+ mb_artist_id = await self.musicbrainz.search_artist_by_album(artistname, albumname)
+ if not mb_artist_id and trackname:
+ mb_artist_id = await self.musicbrainz.search_artist_by_track(artistname, trackname)
+ LOGGER.debug('Got musicbrainz artist id for artist %s --> %s' %(artistname, mb_artist_id))
+ return mb_artist_id
+
+ @staticmethod
+ def merge_metadata(cur_metadata, new_values):
+ ''' merge new info into the metadata dict without overwiteing existing values '''
+ for key, value in new_values.items():
+ if not cur_metadata.get(key):
+ cur_metadata[key] = value
+ return cur_metadata
+
+class MusicBrainz():
+
+ def __init__(self, event_loop, cache):
+ self.event_loop = event_loop
+ self.cache = cache
+ self.http_session = aiohttp.ClientSession(loop=event_loop, connector=aiohttp.TCPConnector(verify_ssl=False))
+ self.throttler = Throttler(rate_limit=1, period=1)
+
+ async def search_artist_by_album(self, artistname, albumname=None, album_upc=None):
+ ''' retrieve musicbrainz artist id by providing the artist name and albumname or upc '''
+ if album_upc:
+ endpoint = 'release'
+ params = {'query': 'barcode:%s' % album_upc}
+ else:
+ searchartist = re.sub(LUCENE_SPECIAL, r'\\\1', artistname)
+ searchartist = searchartist.replace('/','').replace('\\','')
+ searchalbum = re.sub(LUCENE_SPECIAL, r'\\\1', albumname)
+ endpoint = 'release'
+ params = {'query': 'artist:"%s" AND release:"%s"' % (searchartist, searchalbum)}
+ result = await self.get_data(endpoint, params)
+ if result and result.get('releases'):
+ for strictness in [1, 0.95, 0.9]:
+ for item in result['releases']:
+ if album_upc or Matcher(None, item['title'].lower(), albumname.lower()).ratio() >= strictness:
+ for artist in item['artist-credit']:
+ artist = artist['artist']
+ if Matcher(None, artist['name'].lower(), artistname.lower()).ratio() >= strictness:
+ return artist['id']
+ for item in artist.get('aliases',[]):
+ if item['name'].lower() == artistname.lower():
+ return artist['id']
+ return ''
+
+ async def search_artist_by_track(self, artistname, trackname=None, track_isrc=None):
+ ''' retrieve artist id by providing the artist name and trackname or track isrc '''
+ endpoint = 'recording'
+ searchartist = re.sub(LUCENE_SPECIAL, r'\\\1', artistname)
+ searchartist = searchartist.replace('/','').replace('\\','')
+ if track_isrc:
+ endpoint = 'isrc/%s' % track_isrc
+ params = {'inc': 'artist-credits'}
+ else:
+ searchtrack = re.sub(LUCENE_SPECIAL, r'\\\1', trackname)
+ endpoint = 'recording'
+ params = {'query': '"%s" AND artist:"%s"' % (searchtrack, searchartist)}
+ result = await self.get_data(endpoint, params)
+ if result and result.get('recordings'):
+ for strictness in [1, 0.95]:
+ for item in result['recordings']:
+ if track_isrc or Matcher(None, item['title'].lower(), trackname.lower()).ratio() >= strictness:
+ for artist in item['artist-credit']:
+ artist = artist['artist']
+ if Matcher(None, artist['name'].lower(), artistname.lower()).ratio() >= strictness:
+ return artist['id']
+ for item in artist.get('aliases',[]):
+ if item['name'].lower() == artistname.lower():
+ return artist['id']
+ return ''
+
+ @use_cache(30)
+ async def get_data(self, endpoint, params={}):
+ ''' get data from api'''
+ url = 'http://musicbrainz.org/ws/2/%s' % endpoint
+ headers = {'User-Agent': 'Music Assistant/1.0.0 https://github.com/marcelveldt'}
+ params['fmt'] = 'json'
+ async with self.throttler:
+ async with self.http_session.get(url, headers=headers, params=params) as response:
+ try:
+ result = await response.json()
+ except Exception as exc:
+ msg = await response.text()
+ LOGGER.exception("%s - %s" % (str(exc), msg))
+ result = None
+ return result
+
+
+class FanartTv():
+
+ def __init__(self, event_loop, cache):
+ self.event_loop = event_loop
+ self.cache = cache
+ self.http_session = aiohttp.ClientSession(loop=event_loop, connector=aiohttp.TCPConnector(verify_ssl=False))
+ self.throttler = Throttler(rate_limit=1, period=1)
+
+ async def artist_images(self, mb_artist_id):
+ ''' retrieve images by musicbrainz artist id '''
+ metadata = {}
+ data = await self.get_data("music/%s" % mb_artist_id)
+ if data:
+ if data.get('hdmusiclogo'):
+ metadata['logo'] = data['hdmusiclogo'][0]["url"]
+ elif data.get('musiclogo'):
+ metadata['logo'] = data['musiclogo'][0]["url"]
+ if data.get('artistbackground'):
+ count = 0
+ for item in data['artistbackground']:
+ key = "fanart" if count == 0 else "fanart.%s" % count
+ metadata[key] = item["url"]
+ if data.get('artistthumb'):
+ url = data['artistthumb'][0]["url"]
+ if not '2a96cbd8b46e442fc41c2b86b821562f' in url:
+ metadata['image'] = url
+ if data.get('musicbanner'):
+ metadata['banner'] = data['musicbanner'][0]["url"]
+ return metadata
+
+ @use_cache(30)
+ async def get_data(self, endpoint, params={}):
+ ''' get data from api'''
+ url = 'http://webservice.fanart.tv/v3/%s' % endpoint
+ params['api_key'] = '639191cb0774661597f28a47e7e2bad5'
+ async with self.throttler:
+ async with self.http_session.get(url, params=params) as response:
+ result = await response.json()
+ if 'error' in result and 'limit' in result['error']:
+ raise Exception(result['error'])
+ return result
--- /dev/null
+#!/usr/bin/env python3
+# -*- coding:utf-8 -*-
+
+import asyncio
+import os
+from music_assistant.utils import run_periodic, run_async_background_task, LOGGER, try_parse_int
+import aiohttp
+from difflib import SequenceMatcher as Matcher
+from music_assistant.models import MediaType, Track, Artist, Album, Playlist
+from typing import List
+import toolz
+import operator
+
+
+BASE_DIR = os.path.dirname(os.path.abspath(__file__))
+MODULES_PATH = os.path.join(BASE_DIR, "musicproviders" )
+
+class Music():
+ ''' several helpers around the musicproviders '''
+
+ def __init__(self, mass):
+ self.sync_running = False
+ self.mass = mass
+ self.providers = {}
+ # dynamically load musicprovider modules
+ self.load_music_providers()
+ # schedule sync task
+ mass.event_loop.create_task(self.sync_music_providers())
+
+ async def item(self, item_id, media_type:MediaType, provider='database', lazy=True):
+ ''' get single music item by id and media type'''
+ if media_type == MediaType.Artist:
+ return await self.artist(item_id, provider, lazy=lazy)
+ elif media_type == MediaType.Album:
+ return await self.album(item_id, provider, lazy=lazy)
+ elif media_type == MediaType.Track:
+ return await self.track(item_id, provider, lazy=lazy)
+ elif media_type == MediaType.Playlist:
+ return await self.playlist(item_id, provider)
+ else:
+ return None
+
+ async def library_artists(self, limit=0, offset=0, orderby='name', provider_filter=None) -> List[Artist]:
+ ''' return all library artists, optionally filtered by provider '''
+ return await self.mass.db.library_artists(provider=provider_filter, limit=limit, offset=offset, orderby=orderby)
+
+ async def library_albums(self, limit=0, offset=0, orderby='name', provider_filter=None) -> List[Album]:
+ ''' return all library albums, optionally filtered by provider '''
+ return await self.mass.db.library_albums(provider=provider_filter, limit=limit, offset=offset, orderby=orderby)
+
+ async def library_tracks(self, limit=0, offset=0, orderby='name', provider_filter=None) -> List[Track]:
+ ''' return all library tracks, optionally filtered by provider '''
+ return await self.mass.db.library_tracks(provider=provider_filter, limit=limit, offset=offset, orderby=orderby)
+
+ async def playlists(self, limit=0, offset=0, orderby='name', provider_filter=None) -> List[Playlist]:
+ ''' return all library playlists, optionally filtered by provider '''
+ return await self.mass.db.playlists(provider=provider_filter, limit=limit, offset=offset, orderby=orderby)
+
+ async def library_items(self, media_type:MediaType, limit=0, offset=0, orderby='name', provider_filter=None) -> List[object]:
+ ''' get multiple music items in library'''
+ if media_type == MediaType.Artist:
+ return await self.library_artists(limit=limit, offset=offset, orderby=orderby, provider_filter=provider_filter)
+ elif media_type == MediaType.Album:
+ return await self.library_albums(limit=limit, offset=offset, orderby=orderby, provider_filter=provider_filter)
+ elif media_type == MediaType.Track:
+ return await self.library_tracks(limit=limit, offset=offset, orderby=orderby, provider_filter=provider_filter)
+ elif media_type == MediaType.Playlist:
+ return await self.playlists(limit=limit, offset=offset, orderby=orderby, provider_filter=provider_filter)
+
+ async def artist(self, item_id, provider='database', lazy=True) -> Artist:
+ ''' get artist by id '''
+ if not provider or provider == 'database':
+ return await self.mass.db.artist(item_id)
+ return await self.providers[provider].artist(item_id, lazy=lazy)
+
+ async def album(self, item_id, provider='database', lazy=True) -> Album:
+ ''' get album by id '''
+ if not provider or provider == 'database':
+ return await self.mass.db.album(item_id)
+ return await self.providers[provider].album(item_id, lazy=lazy)
+
+ async def track(self, item_id, provider='database', lazy=True) -> Track:
+ ''' get track by id '''
+ if not provider or provider == 'database':
+ return await self.mass.db.track(item_id)
+ return await self.providers[provider].track(item_id, lazy=lazy)
+
+ async def playlist(self, item_id, provider='database') -> Playlist:
+ ''' get playlist by id '''
+ if not provider or provider == 'database':
+ return await self.mass.db.playlist(item_id)
+ return await self.providers[provider].playlist(item_id)
+
+ async def playlist_by_name(self, name) -> Playlist:
+ ''' get playlist by name '''
+ for playlist in await self.playlists():
+ if playlist.name == name:
+ return playlist
+ return None
+
+ async def artist_toptracks(self, artist_id, provider='database') -> List[Track]:
+ ''' get top tracks for given artist '''
+ artist = await self.artist(artist_id, provider)
+ # always append database tracks
+ items = await self.mass.db.artist_tracks(artist.item_id)
+ for prov_mapping in artist.provider_ids:
+ prov_id = prov_mapping['provider']
+ prov_item_id = prov_mapping['item_id']
+ prov_obj = self.providers[prov_id]
+ items += await prov_obj.artist_toptracks(prov_item_id)
+ items = list(toolz.unique(items, key=operator.attrgetter('item_id')))
+ items.sort(key=lambda x: x.name, reverse=False)
+ return items
+
+ async def artist_albums(self, artist_id, provider='database') -> List[Album]:
+ ''' get (all) albums for given artist '''
+ artist = await self.artist(artist_id, provider)
+ # always append database tracks
+ items = await self.mass.db.artist_albums(artist.item_id)
+ for prov_mapping in artist.provider_ids:
+ prov_id = prov_mapping['provider']
+ prov_item_id = prov_mapping['item_id']
+ prov_obj = self.providers[prov_id]
+ items += await prov_obj.artist_albums(prov_item_id)
+ items = list(toolz.unique(items, key=operator.attrgetter('item_id')))
+ items.sort(key=lambda x: x.name, reverse=False)
+ return items
+
+ async def album_tracks(self, album_id, provider='database') -> List[Track]:
+ ''' get the album tracks for given album '''
+ items = []
+ album = await self.album(album_id, provider)
+ for prov_mapping in album.provider_ids:
+ prov_id = prov_mapping['provider']
+ prov_item_id = prov_mapping['item_id']
+ prov_obj = self.providers[prov_id]
+ items += await prov_obj.album_tracks(prov_item_id)
+ items = list(toolz.unique(items, key=operator.attrgetter('item_id')))
+ items = sorted(items, key=operator.attrgetter('disc_number'), reverse=False)
+ items = sorted(items, key=operator.attrgetter('track_number'), reverse=False)
+ return items
+
+ async def playlist_tracks(self, playlist_id, provider='database', offset=0, limit=50) -> List[Track]:
+ ''' get the tracks for given playlist '''
+ playlist = None
+ if not provider or provider == 'database':
+ playlist = await self.mass.db.playlist(playlist_id)
+ if playlist and playlist.is_editable:
+ # database synced playlist, return tracks from db...
+ return await self.mass.db.playlist_tracks(playlist.item_id, offset=offset, limit=limit)
+ else:
+ # return playlist tracks from provider
+ items = []
+ playlist = await self.playlist(playlist_id)
+ for prov_mapping in playlist.provider_ids:
+ prov_id = prov_mapping['provider']
+ prov_item_id = prov_mapping['item_id']
+ prov_obj = self.providers[prov_id]
+ items += await prov_obj.playlist_tracks(prov_item_id, offset=offset, limit=limit)
+ if items:
+ break
+ items = list(toolz.unique(items, key=operator.attrgetter('item_id')))
+ return items
+
+ async def search(self, searchquery, media_types:List[MediaType], limit=10, online=False) -> dict:
+ ''' search database or providers '''
+ # get results from database
+ result = await self.mass.db.search(searchquery, media_types, limit)
+ if online:
+ # include results from music providers
+ for prov in self.providers.values():
+ prov_results = await prov.search(searchquery, media_types, limit)
+ for item_type, items in prov_results.items():
+ result[item_type] += items
+ # filter out duplicates
+ for item_type, items in result.items():
+ items = list(toolz.unique(items, key=operator.attrgetter('item_id')))
+ return result
+
+ async def item_action(self, item_id, media_type, provider='database', action=None):
+ ''' perform action on item (such as library add/remove) '''
+ result = None
+ item = await self.item(item_id, media_type, provider)
+ if item and action in ['add', 'remove']:
+ # remove or add item to the library
+ for prov_mapping in result.provider_ids:
+ prov_id = prov_mapping['provider']
+ prov_item_id = prov_mapping['item_id']
+ for prov in self.providers.values():
+ if prov.prov_id == prov_id:
+ if action == 'add':
+ result = await prov.add_library(prov_item_id, media_type)
+ elif action == 'remove':
+ result = await prov.remove_library(prov_item_id, media_type)
+ return result
+
+ async def add_playlist_tracks(self, playlist_id, tracks:List[Track]):
+ ''' add tracks to playlist - make sure we dont add dupes '''
+ # we can only edit playlists that are in the database (marked as editable)
+ playlist = await self.playlist(playlist_id, 'database')
+ if not playlist or not playlist.is_editable:
+ LOGGER.warning("Playlist %s is not editable - skip addition of tracks" %(playlist.name))
+ return False
+ playlist_prov = playlist.provider_ids[0] # playlist can only have one provider (for now)
+ cur_playlist_tracks = await self.mass.db.playlist_tracks(playlist_id, limit=0)
+ # grab all (database) track ids in the playlist so we can check for duplicates
+ cur_playlist_track_ids = [item.item_id for item in cur_playlist_tracks]
+ track_ids_to_add = []
+ for track in tracks:
+ if not track.provider == 'database':
+ # make sure we have a database track
+ track = await self.track(track.item_id, track.provider, lazy=False)
+ if track.item_id in cur_playlist_track_ids:
+ LOGGER.warning("Track %s already in playlist %s - skip addition" %(track.name, playlist.name))
+ continue
+ # we can only add a track to a provider playlist if the track is available on that provider
+ # exception is the file provider which does accept tracks from all providers in the m3u playlist
+ # this should all be handled in the frontend but these checks are here just to be safe
+ track_playlist_provs = [item['provider'] for item in track.provider_ids]
+ if playlist_prov['provider'] in track_playlist_provs:
+ # a track can contain multiple versions on the same provider
+ # # simply sort by quality and just add the first one (assuming the track is still available)
+ track_versions = sorted(track.provider_ids, key=operator.itemgetter('quality'), reverse=True)
+ for track_version in track_versions:
+ if track_version['provider'] == playlist_prov['provider']:
+ track_ids_to_add.append(track_version['item_id'])
+ break
+ elif playlist_prov['provider'] == 'file':
+ # the file provider can handle uri's from all providers in the file so simply add the db id
+ track_ids_to_add.append(track.item_id)
+ else:
+ LOGGER.warning("Track %s not available on provider %s - skip addition to playlist %s" %(track.name, playlist_prov['provider'], playlist.name))
+ continue
+ # actually add the tracks to the playlist on the provider
+ await self.providers[playlist_prov['provider']].add_playlist_tracks(playlist_prov['item_id'], track_ids_to_add)
+ # schedule sync
+ self.mass.event_loop.create_task(self.sync_playlist_tracks(playlist.item_id, playlist_prov['provider'], playlist_prov['item_id']))
+
+ @run_periodic(3600)
+ async def sync_music_providers(self):
+ ''' periodic sync of all music providers '''
+ if self.sync_running:
+ return
+ self.sync_running = True
+ for prov_id in self.providers.keys():
+ # sync library artists
+ await self.sync_library_artists(prov_id)
+ await self.sync_library_albums(prov_id)
+ await self.sync_library_tracks(prov_id)
+ await self.sync_playlists(prov_id)
+ self.sync_running = False
+
+ async def sync_library_artists(self, prov_id):
+ ''' sync library artists for given provider'''
+ music_provider = self.providers[prov_id]
+ prev_items = await self.library_artists(provider_filter=prov_id)
+ prev_db_ids = [item.item_id for item in prev_items]
+ cur_items = await music_provider.get_library_artists()
+ cur_db_ids = []
+ for item in cur_items:
+ db_item = await music_provider.artist(item.item_id, lazy=False)
+ cur_db_ids.append(db_item.item_id)
+ if not db_item.item_id in prev_db_ids:
+ await self.mass.db.add_to_library(db_item.item_id, MediaType.Artist, prov_id)
+ # process deletions
+ for db_id in prev_db_ids:
+ if db_id not in cur_db_ids:
+ await self.mass.db.remove_from_library(db_id, MediaType.Artist, prov_id)
+ LOGGER.info("Finished syncing Artists for provider %s" % prov_id)
+
+ async def sync_library_albums(self, prov_id):
+ ''' sync library albums for given provider'''
+ music_provider = self.providers[prov_id]
+ prev_items = await self.library_albums(provider_filter=prov_id)
+ prev_db_ids = [item.item_id for item in prev_items]
+ cur_items = await music_provider.get_library_albums()
+ cur_db_ids = []
+ for item in cur_items:
+ db_item = await music_provider.album(item.item_id, lazy=False)
+ cur_db_ids.append(db_item.item_id)
+ # precache album tracks...
+ for album_track in await music_provider.get_album_tracks(item.item_id):
+ await music_provider.track(album_track.item_id)
+ if not db_item.item_id in prev_db_ids:
+ await self.mass.db.add_to_library(db_item.item_id, MediaType.Album, prov_id)
+ # process deletions
+ for db_id in prev_db_ids:
+ if db_id not in cur_db_ids:
+ await self.mass.db.remove_from_library(db_id, MediaType.Album, prov_id)
+ LOGGER.info("Finished syncing Albums for provider %s" % prov_id)
+
+ async def sync_library_tracks(self, prov_id):
+ ''' sync library tracks for given provider'''
+ music_provider = self.providers[prov_id]
+ prev_items = await self.library_tracks(provider_filter=prov_id)
+ prev_db_ids = [item.item_id for item in prev_items]
+ cur_items = await music_provider.get_library_tracks()
+ cur_db_ids = []
+ for item in cur_items:
+ db_item = await music_provider.track(item.item_id, lazy=False)
+ cur_db_ids.append(db_item.item_id)
+ if not db_item.item_id in prev_db_ids:
+ await self.mass.db.add_to_library(db_item.item_id, MediaType.Track, prov_id)
+ # process deletions
+ for db_id in prev_db_ids:
+ if db_id not in cur_db_ids:
+ await self.mass.db.remove_from_library(db_id, MediaType.Track, prov_id)
+ LOGGER.info("Finished syncing Tracks for provider %s" % prov_id)
+
+ async def sync_playlists(self, prov_id):
+ ''' sync library playlists for given provider'''
+ music_provider = self.providers[prov_id]
+ prev_items = await self.playlists(provider_filter=prov_id)
+ prev_db_ids = [item.item_id for item in prev_items]
+ cur_items = await music_provider.get_playlists()
+ cur_db_ids = []
+ for item in cur_items:
+ # always add to db because playlist attributes could have changed
+ db_id = await self.mass.db.add_playlist(item)
+ cur_db_ids.append(db_id)
+ if not db_id in prev_db_ids:
+ await self.mass.db.add_to_library(db_id, MediaType.Playlist, prov_id)
+ if item.is_editable:
+ # precache/sync playlist tracks (user owned playlists only)
+ asyncio.create_task( self.sync_playlist_tracks(db_id, prov_id, item.item_id) )
+ # process playlist deletions
+ for db_id in prev_db_ids:
+ if db_id not in cur_db_ids:
+ await self.mass.db.remove_from_library(db_id, MediaType.Playlist, prov_id)
+ LOGGER.info("Finished syncing Playlists for provider %s" % prov_id)
+
+ async def sync_playlist_tracks(self, db_playlist_id, prov_id, prov_playlist_id):
+ ''' sync library playlists tracks for given provider'''
+ music_provider = self.providers[prov_id]
+ prev_items = await self.playlist_tracks(db_playlist_id)
+ prev_db_ids = [item.item_id for item in prev_items]
+ cur_items = await music_provider.get_playlist_tracks(prov_playlist_id, limit=0)
+ cur_db_ids = []
+ pos = 0
+ for item in cur_items:
+ # we need to do this the complicated way because the file provider can return tracks from other providers
+ for prov_mapping in item.provider_ids:
+ item_provider = prov_mapping['provider']
+ prov_item_id = prov_mapping['item_id']
+ db_item = await self.providers[item_provider].track(prov_item_id, lazy=False)
+ cur_db_ids.append(db_item.item_id)
+ if not db_item.item_id in prev_db_ids:
+ await self.mass.db.add_playlist_track(db_playlist_id, db_item.item_id, pos)
+ pos += 1
+ # process playlist track deletions
+ for db_id in prev_db_ids:
+ if db_id not in cur_db_ids:
+ await self.mass.db.remove_playlist_track(db_playlist_id, db_id)
+ LOGGER.info("Finished syncing Playlist %s tracks for provider %s" % (prov_playlist_id, prov_id))
+
+ def load_music_providers(self):
+ ''' dynamically load musicproviders '''
+ for item in os.listdir(MODULES_PATH):
+ if (os.path.isfile(os.path.join(MODULES_PATH, item)) and not item.startswith("_") and
+ item.endswith('.py') and not item.startswith('.')):
+ module_name = item.replace(".py","")
+ LOGGER.debug("Loading musicprovider module %s" % module_name)
+ try:
+ mod = __import__("modules.musicproviders." + module_name, fromlist=[''])
+ if not self.mass.config['musicproviders'].get(module_name):
+ self.mass.config['musicproviders'][module_name] = {}
+ self.mass.config['musicproviders'][module_name]['__desc__'] = mod.config_entries()
+ for key, def_value, desc in mod.config_entries():
+ if not key in self.mass.config['musicproviders'][module_name]:
+ self.mass.config['musicproviders'][module_name][key] = def_value
+ mod = mod.setup(self.mass)
+ if mod:
+ self.providers[mod.prov_id] = mod
+ cls_name = mod.__class__.__name__
+ LOGGER.info("Successfully initialized module %s" % cls_name)
+ except Exception as exc:
+ LOGGER.exception("Error loading module %s: %s" %(module_name, exc))
from typing import List
import sys
import time
-sys.path.append("..")
-from utils import run_periodic, LOGGER, parse_track_title
-from models import MusicProvider, MediaType, TrackQuality, AlbumType, Artist, Album, Track, Playlist
-from constants import CONF_ENABLED
+from music_assistant.utils import run_periodic, LOGGER, parse_track_title
+from music_assistant.models import MusicProvider, MediaType, TrackQuality, AlbumType, Artist, Album, Track, Playlist
+from music_assistant.constants import CONF_ENABLED
import taglib
-from cache import use_cache
+from music_assistant.modules.cache import use_cache
def setup(mass):
track = await self.__parse_track_from_uri(line)
if track:
tracks.append(track)
- if len(tracks) == limit:
+ if limit and len(tracks) == limit:
break
return tracks
track.disc_number = int(song.tags['DISCNUMBER'][0])
if 'TRACKNUMBER' in song.tags:
track.track_number = int(song.tags['TRACKNUMBER'][0])
+ quality_details = ""
if filename.endswith('.flac'):
- # TODO: try to get more quality info
+ # TODO: get bit depth
quality = TrackQuality.FLAC_LOSSLESS
+ if song.sampleRate > 192000:
+ quality = TrackQuality.FLAC_LOSSLESS_HI_RES_4
+ elif song.sampleRate > 96000:
+ quality = TrackQuality.FLAC_LOSSLESS_HI_RES_3
+ elif song.sampleRate > 48000:
+ quality = TrackQuality.FLAC_LOSSLESS_HI_RES_2
+ quality_details = "%s Khz" % (song.sampleRate/1000)
elif filename.endswith('.ogg'):
quality = TrackQuality.LOSSY_OGG
+ quality_details = "%s kbps" % (song.bitrate)
elif filename.endswith('.m4a'):
quality = TrackQuality.LOSSY_AAC
+ quality_details = "%s kbps" % (song.bitrate)
else:
quality = TrackQuality.LOSSY_MP3
+ quality_details = "%s kbps" % (song.bitrate)
track.provider_ids.append({
"provider": self.prov_id,
"item_id": filename,
- "quality": quality
+ "quality": quality,
+ "details": quality_details
})
return track
if track:
return track
return None
-
import asyncio
import os
from typing import List
-import sys
-sys.path.append("..")
-from utils import run_periodic, LOGGER, parse_track_title
-from models import MusicProvider, MediaType, TrackQuality, AlbumType, Artist, Album, Track, Playlist
-from constants import CONF_USERNAME, CONF_PASSWORD, CONF_ENABLED
+from music_assistant.utils import run_periodic, LOGGER, parse_track_title
+from music_assistant.models import MusicProvider, MediaType, TrackQuality, AlbumType, Artist, Album, Track, Playlist
+from music_assistant.constants import CONF_USERNAME, CONF_PASSWORD, CONF_ENABLED
import json
import aiohttp
import time
import datetime
import hashlib
from asyncio_throttle import Throttler
-from cache import use_cache
+from music_assistant.modules.cache import use_cache
def setup(mass):
def __init__(self, mass, username, password):
self.name = 'Qobuz'
self.prov_id = 'qobuz'
+ self.audio_fmt = 'flac'
self._cur_user = None
self.mass = mass
self.cache = mass.cache
playlist_track = await self.__parse_track(track_obj)
if playlist_track:
tracks.append(playlist_track)
+ # TODO: should we look for an alternative track version if the original is marked unavailable ?
return tracks
async def get_artist_albums(self, prov_artist_id, limit=100, offset=0) -> List[Album]:
await self.mass.db.remove_from_library(item.item_id, media_type, self.prov_id)
LOGGER.debug("deleted item %s from %s - %s" %(prov_item_id, self.prov_id, result))
- async def get_stream_details(self, track_id):
- ''' returns the stream details for the provider '''
- params = {'format_id': 27, 'track_id': track_id, 'intent': 'stream'}
- return await self.__get_data('track/getFileUrl', params, sign_request=True, ignore_cache=True)
+ async def add_playlist_tracks(self, prov_playlist_id, prov_track_ids):
+ ''' add track(s) to playlist '''
+ params = {
+ 'playlist_id': prov_playlist_id,
+ 'track_ids': ",".join(prov_track_ids)
+ }
+ return await self.__get_data('playlist/addTracks', params)
+
+ async def remove_playlist_tracks(self, prov_playlist_id, prov_track_ids):
+ ''' remove track(s) from playlist '''
+ playlist_track_ids = []
+ params = {'playlist_id': prov_playlist_id, 'extra': 'tracks'}
+ for track in await self.__get_all_items("playlist/get", params, key='tracks', limit=0):
+ if track['id'] in prov_track_ids:
+ playlist_track_ids.append(track['playlist_track_id'])
+ params = {'playlist_id': prov_playlist_id, 'track_ids': ",".join(playlist_track_ids)}
+ return await self.__get_data('playlist/deleteTracks', params)
- async def get_stream(self, track_id):
+ async def get_audio_stream(self, track_id):
''' get audio stream for a track '''
- sox_effects='vol -12 dB'
- track_details = await self.get_stream_details(track_id)
- url = track_details['url']
- env = os.environ.copy()
- env["SOX_OPTS"] = "−−multi−threaded −−replay−gain track"
- cmd = 'curl -s -X GET "%s" | sox -t flac - -t flac - %s' % (url, sox_effects)
- process = await asyncio.create_subprocess_shell(cmd, stdout=asyncio.subprocess.PIPE, env=env)
- while not process.stdout.at_eof():
- chunk = await process.stdout.readline()
- if chunk:
+ params = {'format_id': 27, 'track_id': track_id, 'intent': 'stream'}
+ streamdetails = await self.__get_data('track/getFileUrl', params, sign_request=True, ignore_cache=True)
+ async with self.http_session.get(streamdetails['url']) as resp:
+ while True:
+ chunk = await resp.content.read(2000000)
+ if not chunk:
+ break
yield chunk
- else:
- break
+ LOGGER.info("end of stream for track_id %s" % track_id)
async def __parse_artist(self, artist_obj):
''' parse spotify artist object to generic layout '''
album = Album()
if not album_obj.get('id') or not album_obj["streamable"] or not album_obj["displayable"]:
# some safety checks
- LOGGER.debug("invalid/unavailable album found: %s" % album_obj.get('id'))
+ LOGGER.warning("invalid/unavailable album found: %s" % album_obj.get('id'))
return None
album.item_id = album_obj['id']
album.provider = self.prov_id
track = Track()
if not track_obj.get('id') or not track_obj["streamable"] or not track_obj["displayable"]:
# some safety checks
- LOGGER.debug("invalid/unavailable track found: %s" % track_obj.get('id'))
+ LOGGER.warning("invalid/unavailable track found: %s - %s" % (track_obj.get('id'), track_obj.get('name')))
return None
track.item_id = track_obj['id']
track.provider = self.prov_id
from typing import List
import sys
import time
-sys.path.append("..")
-from utils import run_periodic, LOGGER, parse_track_title
-from models import MusicProvider, MediaType, TrackQuality, AlbumType, Artist, Album, Track, Playlist
-from constants import CONF_USERNAME, CONF_PASSWORD, CONF_ENABLED
+from music_assistant.utils import run_periodic, LOGGER, parse_track_title, run_background_task
+from music_assistant.models import MusicProvider, MediaType, TrackQuality, AlbumType, Artist, Album, Track, Playlist
+from music_assistant.constants import CONF_USERNAME, CONF_PASSWORD, CONF_ENABLED
from asyncio_throttle import Throttler
import json
import aiohttp
-from cache import use_cache
-
+from music_assistant.modules.cache import use_cache
+import concurrent
def setup(mass):
''' setup the provider'''
def __init__(self, mass, username, password):
self.name = 'Spotify'
self.prov_id = 'spotify'
+ self.audio_fmt = 'ogg'
self._cur_user = None
self.mass = mass
self.cache = mass.cache
elif offset_uri != None: # only for playlists/albums!
opts["offset"] = {"uri": offset_uri }
return await self.__put_data('me/player/play', {"device_id": device_id}, opts)
-
- async def get_stream_details(self, track_id):
- ''' returns the stream details for the provider '''
- track = await self.track(track_id)
- import socket
- host = socket.gethostbyname(socket.gethostname())
- return {
- 'mime_type': 'audio/flac',
- 'duration': track.duration,
- 'sampling_rate': 44100,
- 'bit_depth': 16,
- 'url': 'http://%s/stream/spotify/%s' % (host, track_id)
- }
- async def get_stream(self, track_id):
- ''' get audio stream for a track '''
- sox_effects='vol -12 dB'
- import subprocess
- spotty = self.get_spotty_binary()
- env = os.environ.copy()
- env["SOX_OPTS"] = "−−multi−threaded −−replay−gain track"
- cmd = spotty + ' -n temp -u ' + self._username + ' -p ' + self._password + ' --pass-through --single-track ' + track_id
- cmd += ' | sox -t ogg - -t flac - %s' % sox_effects
- process = await asyncio.create_subprocess_shell(cmd, stdout=asyncio.subprocess.PIPE, env=env)
- while not process.stdout.at_eof():
- chunk = await process.stdout.readline()
- if chunk:
- yield chunk
- await process.wait()
-
- async def get_stream__old(self, track_id, sox_effects='vol -12 dB'):
+ async def get_audio_stream(self, track_id):
''' get audio stream for a track '''
import subprocess
spotty = self.get_spotty_binary()
- os.environ["SOX_OPTS"] = "−−multi−threaded −−replay−gain track"
- cmd = spotty + ' -n temp -u ' + self._username + ' -p ' + self._password + ' --pass-through --single-track ' + track_id + ' | sox -t ogg -- - -t flac - %s' % sox_effects
- process = await asyncio.create_subprocess_shell(cmd, stdout=asyncio.subprocess.PIPE)
- while not process.stdout.at_eof():
- chunk = await process.stdout.readline()
- if chunk:
- yield chunk
- await process.wait()
-
+ args = ['-n', 'temp', '-u', self._username, '-p', self._password, '--pass-through', '--single-track', track_id]
+ process = await asyncio.create_subprocess_exec(spotty, *args, stdout=asyncio.subprocess.PIPE)
+ try:
+ while not process.stdout.at_eof():
+ chunk = await process.stdout.read(2000000)
+ if chunk:
+ yield chunk
+ else:
+ break
+ except (GeneratorExit, Exception):
+ while True:
+ if not await process.stdout.read(2000000):
+ break
+ await process.wait()
+ LOGGER.info("stream cancelled for track_id %s" % track_id)
+ else:
+ await process.wait()
+ LOGGER.info("end of stream for track_id %s" % track_id)
+
async def __parse_artist(self, artist_obj):
''' parse spotify artist object to generic layout '''
artist = Artist()
if 'track' in track_obj:
track_obj = track_obj['track']
if track_obj['is_local'] or not track_obj['id'] or not track_obj['is_playable']:
+ LOGGER.warning("invalid/unavailable track found: %s - %s" % (track_obj.get('id'), track_obj.get('name')))
return None
track = Track()
track.item_id = track_obj['id']
--- /dev/null
+#!/usr/bin/env python3
+# -*- coding:utf-8 -*-
+
+import asyncio
+import os
+from music_assistant.utils import run_periodic, LOGGER, try_parse_int, try_parse_float, kill_proc
+import aiohttp
+from difflib import SequenceMatcher as Matcher
+from music_assistant.models import MediaType, PlayerState, MusicPlayer
+from typing import List
+import toolz
+import operator
+import socket
+import random
+from copy import deepcopy
+import functools
+import time
+import shutil
+import xml.etree.ElementTree as ET
+import concurrent
+import aiohttp
+import random
+
+BASE_DIR = os.path.dirname(os.path.abspath(__file__))
+MODULES_PATH = os.path.join(BASE_DIR, "playerproviders" )
+AUDIO_TEMP_DIR = "/tmp/audio_tmp"
+AUDIO_CACHE_DIR = "/tmp/audio_cache"
+
+class Player():
+ ''' several helpers to handle playback through player providers '''
+
+ def __init__(self, mass):
+ self.mass = mass
+ self.providers = {}
+ self._players = {}
+ self.create_config_entries()
+ # create needed temp/cache dirs
+ if not os.path.isdir(AUDIO_CACHE_DIR):
+ os.makedirs(AUDIO_CACHE_DIR)
+ if not os.path.isdir(AUDIO_TEMP_DIR):
+ os.makedirs(AUDIO_TEMP_DIR)
+ # dynamically load provider modules
+ self.load_providers()
+
+ def create_config_entries(self):
+ ''' sets the config entries for this module (list with key/value pairs)'''
+ self.mass.config['player_settings']['__desc__'] = [
+ ("enabled", False, "player_enabled"),
+ ("name", "", "player_name"),
+ ("group_parent", "<player>", "player_group_with"),
+ ("mute_as_power", False, "player_mute_power"),
+ ("disable_volume", False, "player_disable_vol"),
+ ("apply_group_volume", False, "player_group_vol"),
+ ("apply_group_power", False, "player_group_pow"),
+ ("play_power_on", False, "player_power_play")
+ ]
+
+ async def players(self):
+ ''' return all players '''
+ items = list(self._players.values())
+ items.sort(key=lambda x: x.name, reverse=False)
+ return items
+
+ async def player(self, player_id):
+ ''' return players by id '''
+ return self._players[player_id]
+
+ async def player_command(self, player_id, cmd, cmd_args=None):
+ ''' issue command on player (play, pause, next, previous, stop, power, volume, mute) '''
+ if player_id not in self._players:
+ return
+ player = self._players[player_id]
+ # handle some common workarounds
+ if cmd in ['pause', 'play'] and cmd_args == 'toggle':
+ cmd = 'pause' if player.state == PlayerState.Playing else 'play'
+ if cmd == 'power' and cmd_args == 'toggle':
+ cmd_args = 'off' if player.powered else 'on'
+ if cmd == 'volume' and cmd_args == 'up':
+ cmd_args = player.volume_level + 2
+ elif cmd == 'volume' and cmd_args == 'down':
+ cmd_args = player.volume_level - 2
+ # redirect playlist related commands to parent player
+ if player.group_parent and cmd not in ['power', 'volume', 'mute']:
+ return await self.player_command(player.group_parent, cmd, cmd_args)
+ # handle hass integration
+ await self.__player_command_hass_integration(player, cmd, cmd_args)
+ # handle mute as power
+ if cmd == 'power' and player.settings['mute_as_power']:
+ cmd = 'mute'
+ cmd_args = 'on' if cmd_args == 'off' else 'off' # invert logic (power ON is mute OFF)
+ # handle group volume for group players
+ player_childs = [item for item in self._players.values() if item.group_parent == player_id]
+ if player.is_group and cmd == 'volume' and player.settings['apply_group_volume']:
+ return await self.__player_command_group_volume(player, player_childs, cmd_args)
+ if player.is_group and cmd == 'power' and cmd_args == 'off':
+ for item in player_childs:
+ asyncio.create_task(self.player_command(item.player_id, cmd, cmd_args))
+ # normal execution of command on player
+ prov_id = self._players[player_id].player_provider
+ prov = self.providers[prov_id]
+ await prov.player_command(player_id, cmd, cmd_args)
+ # handle play on power on
+ if cmd == 'power' and cmd_args == 'on' and player.settings['play_power_on']:
+ LOGGER.info('play_power_on %s' % player.name)
+ await prov.player_command(player_id, 'play')
+
+ async def __player_command_hass_integration(self, player, cmd, cmd_args):
+ ''' handle hass integration in player command '''
+ if not self.mass.hass:
+ return
+ if cmd == 'power' and player.settings.get('hass_power_entity') and player.settings.get('hass_power_entity_source'):
+ cur_source = await self.mass.hass.get_state(player.settings['hass_power_entity'], attribute='source')
+ if cmd_args == 'on' and not cur_source:
+ service_data = { 'entity_id': player.settings['hass_power_entity'], 'source':player.settings['hass_power_entity_source'] }
+ await self.mass.hass.call_service('media_player', 'select_source', service_data)
+ elif cmd_args == 'off' and cur_source == player.settings['hass_power_entity_source']:
+ service_data = { 'entity_id': player.settings['hass_power_entity'] }
+ await self.mass.hass.call_service('media_player', 'turn_off', service_data)
+ else:
+ LOGGER.warning('Ignoring power command as required source is not active')
+ elif cmd == 'power' and player.settings.get('hass_power_entity'):
+ domain = player.settings['hass_power_entity'].split('.')[0]
+ service_data = { 'entity_id': player.settings['hass_power_entity']}
+ await self.mass.hass.call_service(domain, 'turn_%s' % cmd_args, service_data)
+ if cmd == 'volume' and player.settings.get('hass_volume_entity'):
+ service_data = { 'entity_id': player.settings['hass_power_entity'], 'volume_level': int(cmd_args)/100}
+ await self.mass.hass.call_service('media_player', 'volume_set', service_data)
+ cmd_args = 100 # just force full volume on actual player if volume is outsourced to hass
+
+ async def __player_command_group_volume(self, player, player_childs, cmd_args):
+ ''' handle group volume if needed'''
+ cur_volume = player.volume_level
+ new_volume = try_parse_int(cmd_args)
+ volume_dif = new_volume - cur_volume
+ volume_dif_percent = volume_dif/cur_volume
+ for child_player in player_childs:
+ if child_player.enabled and child_player.powered:
+ cur_child_volume = child_player.volume_level
+ new_child_volume = cur_child_volume + (cur_child_volume * volume_dif_percent)
+ child_player.volume_level = new_child_volume
+ await self.player_command(child_player.player_id, 'volume', new_child_volume)
+ player.volume_level = new_volume
+ return True
+
+ async def remove_player(self, player_id):
+ ''' handle a player remove '''
+ self._players.pop(player_id, None)
+ asyncio.ensure_future(self.mass.event('player removed', player_id))
+
+ async def trigger_update(self, player_id):
+ ''' manually trigger update for a player '''
+ await self.update_player(self._players[player_id])
+
+ async def update_player(self, player_details):
+ ''' update (or add) player '''
+ player_details = deepcopy(player_details)
+ LOGGER.debug('Incoming msg from %s' % player_details.name)
+ player_id = player_details.player_id
+ player_changed = False
+ if not player_id in self._players:
+ # first message from player
+ self._players[player_id] = MusicPlayer()
+ player = self._players[player_id]
+ player.player_id = player_id
+ player.player_provider = player_details.player_provider
+ player_changed = True
+ else:
+ player = self._players[player_id]
+ player.settings = await self.__get_player_settings(player_id)
+ # handle basic player settings
+ player_details.enabled = player.settings['enabled']
+ player_details.name = player.settings['name'] if player.settings['name'] else player_details.name
+ player_details.group_parent = player.settings['group_parent'] if player.settings['group_parent'] else player_details.group_parent
+ # handle hass integration
+ await self.__update_player_hass_settings(player_details, player.settings)
+ # handle mute as power setting
+ if player.settings['mute_as_power']:
+ player_details.powered = not player_details.muted
+ # combine state of group parent
+ if player_details.group_parent and player_details.group_parent in self._players:
+ parent_player = self._players[player_details.group_parent]
+ player_details.cur_item_time = parent_player.cur_item_time
+ player_details.cur_item = parent_player.cur_item
+ player_details.state = parent_player.state
+ # handle group volume/power setting
+ player_childs = [item for item in self._players.values() if item.group_parent == player_id]
+ player_details.is_group = len(player_childs) > 0
+ if player_details.is_group and player.settings['apply_group_volume']:
+ await self.__update_player_group_volume(player_details, player_childs)
+ if player_details.is_group and player.settings['apply_group_power']:
+ await self.__update_player_group_power(player_details, player_childs)
+ # compare values to detect changes
+ if player.cur_item.name != player_details.cur_item.name:
+ player_changed = True
+ player.cur_item = player_details.cur_item
+ for key, cur_value in player.__dict__.items():
+ if key != 'settings':
+ new_value = getattr(player_details, key)
+ if new_value != cur_value:
+ player_changed = True
+ setattr(player, key, new_value)
+ LOGGER.debug('key changed: %s for player %s - new value: %s' % (key, player.name, new_value))
+ if player_changed:
+ # player is added or updated!
+ asyncio.ensure_future(self.mass.event('player updated', player))
+ # is groupplayer, trigger update of its childs
+ for child in player_childs:
+ asyncio.create_task(self.trigger_update(child.player_id))
+ # if child player in a group, trigger update of parent
+ if player.group_parent:
+ asyncio.create_task(self.trigger_update(player.group_parent))
+
+ async def __update_player_hass_settings(self, player_details, player_settings):
+ ''' handle home assistant integration on a player '''
+ if not self.mass.hass:
+ return
+ player_id = player_details.player_id
+ player_settings = self.mass.config['player_settings'][player_id]
+ if player_settings.get('hass_power_entity') and player_settings.get('hass_power_entity_source'):
+ hass_state = await self.mass.hass.get_state(
+ player_settings['hass_power_entity'],
+ attribute='source',
+ register_listener=functools.partial(self.trigger_update, player_id))
+ player_details.powered = hass_state == player_settings['hass_power_entity_source']
+ elif player_settings.get('hass_power_entity'):
+ hass_state = await self.mass.hass.get_state(
+ player_settings['hass_power_entity'],
+ attribute='state',
+ register_listener=functools.partial(self.trigger_update, player_id))
+ player_details.powered = hass_state != 'off'
+ if player_settings.get('hass_volume_entity'):
+ hass_state = await self.mass.hass.get_state(
+ player_settings['hass_volume_entity'],
+ attribute='volume_level',
+ register_listener=functools.partial(self.trigger_update, player_id))
+ player_details.volume_level = int(try_parse_float(hass_state)*100)
+
+ async def __update_player_group_volume(self, player_details, player_childs):
+ ''' handle group volume '''
+ group_volume = 0
+ active_players = 0
+ for child_player in player_childs:
+ if child_player.enabled and child_player.powered:
+ group_volume += child_player.volume_level
+ active_players += 1
+ group_volume = group_volume / active_players if active_players else 0
+ player_details.volume_level = group_volume
+
+ async def __update_player_group_power(self, player_details, player_childs):
+ ''' handle group power '''
+ player_powered = False
+ for child_player in player_childs:
+ if child_player.powered:
+ player_powered = True
+ break
+ if player_details.powered and not player_powered:
+ # all childs turned off so turn off group player
+ LOGGER.info('all childs turned off so turn off group player %s' % player_details.name)
+ await self. player_command(player_details.player_id, 'power', 'off')
+ player_details.powered = False
+ elif not player_details.powered and player_powered:
+ # all childs turned off but group player still off, so turn it on
+ LOGGER.info('all childs turned off but group player still off, so turn it on %s' % player_details.name)
+ await self. player_command(player_details.player_id, 'power', 'on')
+ player_details.powered = True
+
+ async def __get_player_settings(self, player_id):
+ ''' get (or create) player config '''
+ player_settings = self.mass.config['player_settings'].get(player_id,{})
+ for key, def_value, desc in self.mass.config['player_settings']['__desc__']:
+ if not key in player_settings:
+ player_settings[key] = def_value
+ self.mass.config['player_settings'][player_id] = player_settings
+ return player_settings
+
+ async def play_media(self, player_id, media_item, queue_opt='play'):
+ '''
+ play media on a player
+ player_id: id of the player
+ media_item: media item(s) that should be played (Track, Album, Artist, Playlist)
+ queue_opt: play, replace, next or add
+ '''
+ if not player_id in self._players:
+ LOGGER.warning('Player %s not found' % player_id)
+ return False
+ player_prov = self.providers[self._players[player_id].player_provider]
+ # a single item or list of items may be provided
+ media_items = media_item if isinstance(media_item, list) else [media_item]
+ playable_tracks = []
+ for media_item in media_items:
+ # collect tracks to play
+ if media_item.media_type == MediaType.Artist:
+ tracks = await self.mass.music.artist_toptracks(media_item.item_id, provider=media_item.provider)
+ elif media_item.media_type == MediaType.Album:
+ tracks = await self.mass.music.album_tracks(media_item.item_id, provider=media_item.provider)
+ elif media_item.media_type == MediaType.Playlist:
+ tracks = await self.mass.music.playlist_tracks(media_item.item_id, provider=media_item.provider, offset=0, limit=0)
+ else:
+ tracks = [media_item] # single track
+ # check supported music providers by this player and work out how to handle playback...
+ for track in tracks:
+ # sort by quality
+ match_found = False
+ for prov_media in sorted(track.provider_ids, key=operator.itemgetter('quality'), reverse=True):
+ media_provider = prov_media['provider']
+ media_item_id = prov_media['item_id']
+ player_supported_provs = player_prov.supported_musicproviders
+ if media_provider in player_supported_provs:
+ # the provider can handle this media_type directly !
+ track.uri = await self.get_track_uri(media_item_id, media_provider)
+ playable_tracks.append(track)
+ match_found = True
+ elif 'http' in player_prov.supported_musicproviders:
+ # fallback to http streaming if supported
+ track.uri = await self.get_track_uri(media_item_id, media_provider, True)
+ playable_tracks.append(track)
+ match_found = True
+ if match_found:
+ break
+ if playable_tracks:
+ if self._players[player_id].shuffle_enabled:
+ random.shuffle(playable_tracks)
+ if queue_opt in ['next', 'play'] and len(playable_tracks) > 1:
+ queue_opt = 'replace' # always assume playback of multiple items as new queue
+ return await player_prov.play_media(player_id, playable_tracks, queue_opt)
+ else:
+ raise Exception("Musicprovider and/or media not supported by player %s !" % (player_id) )
+
+ async def get_track_uri(self, item_id, provider, http_stream=False):
+ ''' generate the URL/URI for a media item '''
+ uri = ""
+ if http_stream:
+ host = socket.gethostbyname(socket.gethostname())
+ uri = 'http://%s:8095/stream/%s/%s'% (host, provider, item_id)
+ elif provider == "spotify":
+ uri = 'spotify://spotify:track:%s' % item_id
+ elif provider == "qobuz":
+ uri = 'qobuz://%s.flac' % item_id
+ elif provider == "file":
+ uri = item_id
+ return uri
+
+ async def player_queue(self, player_id, offset=0, limit=50):
+ ''' return the items in the player's queue '''
+ player = self._players[player_id]
+ player_prov = self.providers[player.player_provider]
+ return await player_prov.player_queue(player_id, offset=offset, limit=limit)
+
+ async def get_audio_stream(self, track_id, provider):
+ ''' get audio stream from provider and apply additional effects/processing where needed'''
+ input_audio_fmt = self.mass.music.providers[provider].audio_fmt
+ cachefile = self.__get_track_cache_filename(track_id, provider)
+ gain_correct = await self.__get_track_gain_correct(track_id, provider)
+ sox_effects=['vol', str(gain_correct), 'dB' ]
+ if os.path.isfile(cachefile):
+ # we have a temp file for this track which we can use
+ args = ['-t', input_audio_fmt, cachefile, '-t', 'flac', '-', *sox_effects]
+ process = await asyncio.create_subprocess_exec('sox', *args,
+ stdout=asyncio.subprocess.PIPE)
+ buffer_task = None
+ else:
+ # stream from provider
+ args = ['-t', input_audio_fmt, '-', '-t', 'flac', '-', *sox_effects]
+ process = await asyncio.create_subprocess_exec('sox', *args,
+ stdout=asyncio.subprocess.PIPE, stdin=asyncio.subprocess.PIPE)
+ buffer_task = asyncio.create_task(
+ self.__fill_audio_buffer(process.stdin, track_id, provider))
+ try:
+ # yield the chunks from stdout
+ while not process.stdout.at_eof():
+ chunk = await process.stdout.read(2000000)
+ if not chunk:
+ break
+ yield chunk
+ except (asyncio.CancelledError, concurrent.futures._base.CancelledError):
+ # client disconnected so cleanup
+ #if buffer_task:
+ # buffer_task.cancel()
+ # Could not figure out how to reliably close process without deadlocks
+ # so instead just read all data for a clean exit
+ while True:
+ if not await process.stdout.read(2000000):
+ break
+ await process.wait()
+ LOGGER.info("streaming of track_id %s aborted (client disconnect ?)" % track_id)
+ raise asyncio.CancelledError()
+ except Exception as exc:
+ LOGGER.error(exc)
+ else:
+ await process.wait()
+ LOGGER.info("streaming of track_id %s completed" % track_id)
+
+ async def __analyze_track_audio(self, musicfile, track_id, provider):
+ ''' analyze track audio, for now we only calculate EBU R128 loudness '''
+ import platform
+ analyse_dir = os.path.join(self.mass.datapath, 'analyse_info')
+ analysis_file = os.path.join(analyse_dir, "%s_%s.xml" %(provider, track_id.split(os.sep)[-1]))
+ if not os.path.isdir(analyse_dir):
+ os.makedirs(analyse_dir)
+ bs1770_binary = None
+ if platform.system() == "Windows":
+ bs1770_binary = os.path.join(os.path.dirname(__file__), "bs1770gain", "win64", "bs1770gain")
+ elif platform.system() == "Darwin":
+ # macos binary is x86_64 intel
+ bs1770_binary = os.path.join(os.path.dirname(__file__), "bs1770gain", "osx", "bs1770gain")
+ elif platform.system() == "Linux":
+ architecture = platform.machine()
+ if architecture.startswith('AMD64') or architecture.startswith('x86_64'):
+ bs1770_binary = os.path.join(os.path.dirname(__file__), "bs1770gain", "linux64", "bs1770gain")
+ # TODO: build armhf binary
+ cmd = '%s %s --loglevel quiet --xml --ebu -f %s' % (bs1770_binary, musicfile, analysis_file)
+ process = await asyncio.create_subprocess_shell(cmd)
+ await process.wait()
+
+ async def __get_track_gain_correct(self, track_id, provider):
+ ''' get the gain correction that should be applied to a track '''
+ target_gain = -23
+ fallback_gain = -14 # fallback if no analyse info is available
+ analysis_file = os.path.join(self.mass.datapath, 'analyse_info', "%s_%s.xml" %(provider, track_id.split(os.sep)[-1]))
+ try: # read audio analysis if available
+ tree = ET.parse(analysis_file)
+ trackinfo = tree.getroot().find("album").find("track")
+ track_lufs = trackinfo.find('integrated').get('lufs')
+ gain_correct = target_gain - float(track_lufs)
+ LOGGER.info("apply gain correction of %s" % gain_correct)
+ except Exception:
+ gain_correct = fallback_gain # fallback value
+ if os.path.isfile(analysis_file):
+ os.remove(analysis_file)
+ cachefile = self.__get_track_cache_filename(track_id, provider)
+ # reschedule analyze task to try again
+ asyncio.create_task(self.__analyze_track_audio(cachefile, track_id, provider))
+ return gain_correct
+
+ async def __fill_audio_buffer(self, buf, track_id, provider):
+ ''' get audio data from provider and write to buffer'''
+ # fill the buffer with audio data
+ # a tempfile is created so we can do audio analysis
+ try:
+ tmpfile = os.path.join(AUDIO_TEMP_DIR, '%s%s%s.tmp' % (random.randint(0, 999), track_id, random.randint(0, 999)))
+ finalfile = self.__get_track_cache_filename(track_id, provider)
+ fd = open(tmpfile, 'wb')
+ async for chunk in self.mass.music.providers[provider].get_audio_stream(track_id):
+ buf.write(chunk)
+ await buf.drain()
+ fd.write(chunk)
+ await buf.drain()
+ buf.write_eof()
+ fd.close()
+ except Exception as exc:
+ LOGGER.error(exc)
+ else:
+ # successfull completion
+ if os.path.isfile(tmpfile) and not os.path.isfile(finalfile):
+ shutil.move(tmpfile, finalfile)
+ asyncio.create_task(self.__analyze_track_audio(finalfile, track_id, provider))
+ LOGGER.info("fill_audio_buffer complete for track %s" % track_id)
+ finally:
+ # always clean up temp file
+ if os.path.isfile(tmpfile):
+ of.remove(tmpfile)
+
+ @staticmethod
+ def __get_track_cache_filename(track_id, provider):
+ ''' get filename for a track to use as cache file '''
+ return os.path.join(AUDIO_CACHE_DIR, '%s_%s' %(provider, track_id.split(os.sep)[-1]))
+
+
+ def load_providers(self):
+ ''' dynamically load providers '''
+ for item in os.listdir(MODULES_PATH):
+ if (os.path.isfile(os.path.join(MODULES_PATH, item)) and not item.startswith("_") and
+ item.endswith('.py') and not item.startswith('.')):
+ module_name = item.replace(".py","")
+ LOGGER.debug("Loading playerprovider module %s" % module_name)
+ try:
+ mod = __import__("modules.playerproviders." + module_name, fromlist=[''])
+ if not self.mass.config['playerproviders'].get(module_name):
+ self.mass.config['playerproviders'][module_name] = {}
+ self.mass.config['playerproviders'][module_name]['__desc__'] = mod.config_entries()
+ for key, def_value, desc in mod.config_entries():
+ if not key in self.mass.config['playerproviders'][module_name]:
+ self.mass.config['playerproviders'][module_name][key] = def_value
+ mod = mod.setup(self.mass)
+ if mod:
+ self.providers[mod.prov_id] = mod
+ cls_name = mod.__class__.__name__
+ LOGGER.info("Successfully initialized module %s" % cls_name)
+ except Exception as exc:
+ LOGGER.exception("Error loading module %s: %s" %(module_name, exc))
from typing import List
import random
import sys
-sys.path.append("..")
-from utils import run_periodic, run_background_task, LOGGER, parse_track_title, try_parse_int
-from models import PlayerProvider, MusicPlayer, PlayerState, MediaType, TrackQuality, AlbumType, Artist, Album, Track, Playlist
-from constants import CONF_ENABLED, CONF_HOSTNAME, CONF_PORT
+from music_assistant.utils import run_periodic, run_background_task, LOGGER, parse_track_title, try_parse_int
+from music_assistant.models import PlayerProvider, MusicPlayer, PlayerState, MediaType, TrackQuality, AlbumType, Artist, Album, Track, Playlist
+from music_assistant.constants import CONF_ENABLED, CONF_HOSTNAME, CONF_PORT
import json
import aiohttp
import time
import hashlib
from asyncio_throttle import Throttler
from aiocometd import Client, ConnectionType, Extension
-from cache import use_cache
+from music_assistant.modules.cache import use_cache
import copy
import pychromecast
from pychromecast.controllers.multizone import MultizoneController
'''
play media on a player
'''
- player = self._chromecasts[player_id]
- media_controller = player.media_controller
+ castplayer = self._chromecasts[player_id]
+ player = self._players[player_id]
+ media_controller = castplayer.media_controller
receiver_ctrl = media_controller._socket_client.receiver_controller
cur_queue_index = 0
if media_controller.queue_cur_id != None:
if item['itemId'] == media_controller.queue_cur_id:
cur_queue_item = item
# find out the current index
- for counter, value in enumerate(player.queue):
+ for counter, value in enumerate(pcastplayerlayer.queue):
if value['media']['contentId'] == cur_queue_item['media']['contentId']:
cur_queue_index = counter
break
break
- if (not media_controller.queue_cur_id or not media_controller.status.media_session_id or not player.queue):
+ if (not media_controller.queue_cur_id or not media_controller.status.media_session_id or not castplayer.queue):
queue_opt = 'replace'
new_queue_items = []
new_queue_items.append(queue_item)
if (queue_opt in ['replace', 'play'] or not media_controller.queue_cur_id or
- not media_controller.status.media_session_id or not player.queue):
+ not media_controller.status.media_session_id or not castplayer.queue):
# load new Chromecast queue with items
if queue_opt == 'add':
# append items to queue
- player.queue = player.queue + new_queue_items
+ castplayer.queue = castplayer.queue + new_queue_items
startindex = cur_queue_index
elif queue_opt == 'play':
# keep current queue but append new items at begin and start playing first item
- player.queue = new_queue_items + player.queue[cur_queue_index:] + player.queue[:cur_queue_index]
+ castplayer.queue = new_queue_items + castplayer.queue[cur_queue_index:] + castplayer.queue[:cur_queue_index]
startindex = 0
elif queue_opt == 'next':
# play the new items after the current playing item (insert before current next item)
- player.queue = new_queue_items + player.queue[cur_queue_index:] + player.queue[:cur_queue_index]
+ castplayer.queue = new_queue_items + castplayer.queue[cur_queue_index:] + plcastplayerayer.queue[:cur_queue_index]
startindex = cur_queue_index
else:
# overwrite the whole queue with new item(s)
- player.queue = new_queue_items
+ castplayer.queue = new_queue_items
startindex = 0
# load first 10 items as soon as possible
queuedata = {
"shuffle": player.shuffle_enabled,
"queueType": "PLAYLIST",
"startIndex": startindex, # Item index to play after this request or keep same item if undefined
- "items": player.queue[:10]
+ "items": castplayer.queue[:10]
}
await self.__send_player_queue(receiver_ctrl, media_controller, queuedata)
# append the rest of the items in the queue in chunks
- for chunk in chunks(player.queue[10:], 100):
+ for chunk in chunks(castplayer.queue[10:], 100):
await asyncio.sleep(1)
queuedata = { "type": 'QUEUE_INSERT', "items": chunk }
await self.__send_player_queue(receiver_ctrl, media_controller, queuedata)
elif queue_opt == 'add':
# existing queue is playing: simply append items to the end of the queue (in small chunks)
- player.queue = player.queue + new_queue_items
+ castplayer.queue = castplayer.queue + new_queue_items
insertbefore = None
for chunk in chunks(new_queue_items, 100):
queuedata = { "type": 'QUEUE_INSERT', "items": chunk }
await asyncio.sleep(1)
elif queue_opt == 'next':
# play the new items after the current playing item (insert before current next item)
- player.queue = player.queue[:cur_queue_index] + new_queue_items + player.queue[cur_queue_index:]
+ player.queue = castplayer.queue[:cur_queue_index] + new_queue_items + castplayer.queue[cur_queue_index:]
queuedata = {
"type": 'QUEUE_INSERT',
"insertBefore": media_controller.queue_cur_id+1,
track_id = uri.replace('qobuz://','').replace('.flac','')
track = await self.mass.music.providers['qobuz'].track(track_id)
elif uri.startswith('http') and '/stream' in uri:
- try:
- item_id = uri.split('/')[-1]
- provider = uri.split('/')[-2]
- track = await self.mass.music.providers[provider].track(item_id)
- except: pass
+ item_id = uri.split('/')[-1]
+ provider = uri.split('/')[-2]
+ track = await self.mass.music.providers[provider].track(item_id)
return track
async def __handle_group_members_update(self, mz, added_player=None, removed_player=None):
from typing import List
import random
import sys
-sys.path.append("..")
-from utils import run_periodic, LOGGER, parse_track_title
-from models import PlayerProvider, MusicPlayer, PlayerState, MediaType, TrackQuality, AlbumType, Artist, Album, Track, Playlist
-from constants import CONF_ENABLED, CONF_HOSTNAME, CONF_PORT
+from music_assistant.utils import run_periodic, LOGGER, parse_track_title
+from music_assistant.models import PlayerProvider, MusicPlayer, PlayerState, MediaType, TrackQuality, AlbumType, Artist, Album, Track, Playlist
+from music_assistant.constants import CONF_ENABLED, CONF_HOSTNAME, CONF_PORT
import json
import aiohttp
import time
import hashlib
from asyncio_throttle import Throttler
from aiocometd import Client, ConnectionType, Extension
-from cache import use_cache
+from music_assistant.modules.cache import use_cache
import copy
def setup(mass):
self._players = {}
self.last_msg_received = 0
self.supported_musicproviders = ['qobuz', 'file', 'spotify', 'http']
+ self.supported_musicproviders = ['http']
self.http_session = aiohttp.ClientSession(loop=mass.event_loop)
# we use a combi of active polling and subscriptions because the cometd implementation of LMS is somewhat unreliable
asyncio.ensure_future(self.__lms_events())
return await self.mass.music.providers['spotify'].track(track_id)
except Exception as exc:
LOGGER.error(exc)
+ elif track_url.startswith('http') and '/stream' in track_url:
+ item_id = track_url.split('/')[-1]
+ provider = track_url.split('/')[-2]
+ return await self.mass.music.providers[provider].track(item_id)
# fallback to a generic track
track = Track()
track.name = track_details['title']
import asyncio
import os
-from utils import run_periodic, LOGGER
+from music_assistant.utils import run_periodic, LOGGER
import json
import aiohttp
from aiohttp import web
-from models import MediaType, media_type_from_string
+from music_assistant.models import MediaType, media_type_from_string
from functools import partial
json_serializer = partial(json.dumps, default=lambda x: x.__dict__)
import ssl
await self.mass.remove_event_listener(cb_id)
await ws.close()
else:
- # for now we only use WS for player commands
+ # for now we only use WS for (simple) player commands
if msg.data == 'players':
players = await self.mass.player.players()
ws_msg = {'message': 'players', 'message_details': players}
await ws.send_json(ws_msg, dumps=json_serializer)
- # elif msg.data.startswith('players') and '/play_media/' in msg.data:
- # #'players/{player_id}/play_media/{media_type}/{media_id}/{queue_opt}'
- # msg_data_parts = msg.data.split('/')
- # player_id = msg_data_parts[1]
- # media_type = msg_data_parts[3]
- # media_type = media_type_from_string(media_type)
- # media_id = msg_data_parts[4]
- # queue_opt = msg_data_parts[5] if len(msg_data_parts) == 6 else 'replace'
- # media_item = await self.mass.music.item(media_id, media_type, lazy=True)
- # await self.mass.player.play_media(player_id, media_item, queue_opt)
-
elif msg.data.startswith('players') and '/cmd/' in msg.data:
# players/{player_id}/cmd/{cmd} or players/{player_id}/cmd/{cmd}/{cmd_args}
msg_data_parts = msg.data.split('/')
''' start streaming audio from provider '''
track_id = request.match_info.get('track_id')
provider = request.match_info.get('provider')
- #stream_details = await self.mass.music.providers[provider].get_stream_details(track_id)
- # resp = web.StreamResponse(status=200,
- # reason='OK',
- # headers={'Content-Type': stream_details['mime_type']})
resp = web.StreamResponse(status=200,
reason='OK',
headers={'Content-Type': 'audio/flac'})
await resp.prepare(request)
- async for chunk in self.mass.music.providers[provider].get_stream(track_id):
+ async for chunk in self.mass.player.get_audio_stream(track_id, provider):
await resp.write(chunk)
return resp
\ No newline at end of file
+++ /dev/null
-#!/usr/bin/env python3
-# -*- coding:utf-8 -*-
-
-import asyncio
-import os
-from utils import run_periodic, run_async_background_task, LOGGER, try_parse_int
-import aiohttp
-from difflib import SequenceMatcher as Matcher
-from models import MediaType
-from typing import List
-import toolz
-import operator
-
-
-BASE_DIR = os.path.dirname(os.path.abspath(__file__))
-MODULES_PATH = os.path.join(BASE_DIR, "modules", "musicproviders" )
-
-class Music():
- ''' several helpers around the musicproviders '''
-
- def __init__(self, mass):
- self.sync_running = False
- self.mass = mass
- self.providers = {}
- # dynamically load musicprovider modules
- self.load_music_providers()
- # schedule sync task
- mass.event_loop.create_task(self.sync_music_providers())
-
- async def item(self, item_id, media_type:MediaType, provider='database', lazy=True):
- ''' get single music item by id and media type'''
- if media_type == MediaType.Artist:
- return await self.artist(item_id, provider, lazy=lazy)
- elif media_type == MediaType.Album:
- return await self.album(item_id, provider, lazy=lazy)
- elif media_type == MediaType.Track:
- return await self.track(item_id, provider, lazy=lazy)
- elif media_type == MediaType.Playlist:
- return await self.playlist(item_id, provider)
- else:
- return None
-
- async def library_artists(self, limit=0, offset=0, orderby='name', provider_filter=None):
- ''' return all library artists, optionally filtered by provider '''
- return await self.mass.db.library_artists(provider=provider_filter, limit=limit, offset=offset, orderby=orderby)
-
- async def library_albums(self, limit=0, offset=0, orderby='name', provider_filter=None):
- ''' return all library albums, optionally filtered by provider '''
- return await self.mass.db.library_albums(provider=provider_filter, limit=limit, offset=offset, orderby=orderby)
-
- async def library_tracks(self, limit=0, offset=0, orderby='name', provider_filter=None):
- ''' return all library tracks, optionally filtered by provider '''
- return await self.mass.db.library_tracks(provider=provider_filter, limit=limit, offset=offset, orderby=orderby)
-
- async def playlists(self, limit=0, offset=0, orderby='name', provider_filter=None):
- ''' return all library playlists, optionally filtered by provider '''
- return await self.mass.db.playlists(provider=provider_filter, limit=limit, offset=offset, orderby=orderby)
-
- async def library_items(self, media_type:MediaType, limit=0, offset=0, orderby='name', provider_filter=None):
- ''' get multiple music items in library'''
- if media_type == MediaType.Artist:
- return await self.library_artists(limit=limit, offset=offset, orderby=orderby, provider_filter=provider_filter)
- elif media_type == MediaType.Album:
- return await self.library_albums(limit=limit, offset=offset, orderby=orderby, provider_filter=provider_filter)
- elif media_type == MediaType.Track:
- return await self.library_tracks(limit=limit, offset=offset, orderby=orderby, provider_filter=provider_filter)
- elif media_type == MediaType.Playlist:
- return await self.playlists(limit=limit, offset=offset, orderby=orderby, provider_filter=provider_filter)
-
- async def artist(self, item_id, provider='database', lazy=True):
- ''' get artist by id '''
- if not provider or provider == 'database':
- return await self.mass.db.artist(item_id)
- return await self.providers[provider].artist(item_id, lazy=lazy)
-
- async def album(self, item_id, provider='database', lazy=True):
- ''' get album by id '''
- if not provider or provider == 'database':
- return await self.mass.db.album(item_id)
- return await self.providers[provider].album(item_id, lazy=lazy)
-
- async def track(self, item_id, provider='database', lazy=True):
- ''' get track by id '''
- if not provider or provider == 'database':
- return await self.mass.db.track(item_id)
- return await self.providers[provider].track(item_id, lazy=lazy)
-
- async def playlist(self, item_id, provider='database'):
- ''' get playlist by id '''
- if not provider or provider == 'database':
- return await self.mass.db.playlist(item_id)
- return await self.providers[provider].playlist(item_id)
-
- async def playlist_by_name(self, name):
- ''' get playlist by name '''
- for playlist in await self.playlists():
- if playlist.name == name:
- return playlist
- return None
-
- async def artist_toptracks(self, artist_id, provider='database'):
- ''' get top tracks for given artist '''
- artist = await self.artist(artist_id, provider)
- # always append database tracks
- items = await self.mass.db.artist_tracks(artist.item_id)
- for prov_mapping in artist.provider_ids:
- prov_id = prov_mapping['provider']
- prov_item_id = prov_mapping['item_id']
- prov_obj = self.providers[prov_id]
- items += await prov_obj.artist_toptracks(prov_item_id)
- items = list(toolz.unique(items, key=operator.attrgetter('item_id')))
- items.sort(key=lambda x: x.name, reverse=False)
- return items
-
- async def artist_albums(self, artist_id, provider='database'):
- ''' get (all) albums for given artist '''
- artist = await self.artist(artist_id, provider)
- # always append database tracks
- items = await self.mass.db.artist_albums(artist.item_id)
- for prov_mapping in artist.provider_ids:
- prov_id = prov_mapping['provider']
- prov_item_id = prov_mapping['item_id']
- prov_obj = self.providers[prov_id]
- items += await prov_obj.artist_albums(prov_item_id)
- items = list(toolz.unique(items, key=operator.attrgetter('item_id')))
- items.sort(key=lambda x: x.name, reverse=False)
- return items
-
- async def album_tracks(self, album_id, provider='database'):
- ''' get the album tracks for given album '''
- items = []
- album = await self.album(album_id, provider)
- for prov_mapping in album.provider_ids:
- prov_id = prov_mapping['provider']
- prov_item_id = prov_mapping['item_id']
- prov_obj = self.providers[prov_id]
- items += await prov_obj.album_tracks(prov_item_id)
- items = list(toolz.unique(items, key=operator.attrgetter('item_id')))
- return items
-
- async def playlist_tracks(self, playlist_id, provider='database', offset=0, limit=50):
- ''' get the tracks for given playlist '''
- playlist = None
- if not provider or provider == 'database':
- playlist = await self.mass.db.playlist(playlist_id)
- if playlist and playlist.is_editable:
- # database synced playlist, return tracks from db...
- return await self.mass.db.playlist_tracks(playlist.item_id, offset=offset, limit=limit)
- else:
- # return playlist tracks from provider
- items = []
- playlist = await self.playlist(playlist_id)
- for prov_mapping in playlist.provider_ids:
- prov_id = prov_mapping['provider']
- prov_item_id = prov_mapping['item_id']
- prov_obj = self.providers[prov_id]
- items += await prov_obj.playlist_tracks(prov_item_id, offset=offset, limit=limit)
- if items:
- break
- items = list(toolz.unique(items, key=operator.attrgetter('item_id')))
- return items
-
- async def search(self, searchquery, media_types:List[MediaType], limit=10, online=False):
- ''' search database or providers '''
- # get results from database
- result = await self.mass.db.search(searchquery, media_types, limit)
- if online:
- # include results from music providers
- for prov in self.providers.values():
- prov_results = await prov.search(searchquery, media_types, limit)
- for item_type, items in prov_results.items():
- result[item_type] += items
- # filter out duplicates
- for item_type, items in result.items():
- items = list(toolz.unique(items, key=operator.attrgetter('item_id')))
- return result
-
- async def item_action(self, item_id, media_type, provider='database', action=None):
- ''' perform action on item (such as library add/remove) '''
- result = None
- item = await self.item(item_id, media_type, provider)
- if item and action in ['add', 'remove']:
- # remove or add item to the library
- for prov_mapping in result.provider_ids:
- prov_id = prov_mapping['provider']
- prov_item_id = prov_mapping['item_id']
- for prov in self.providers.values():
- if prov.prov_id == prov_id:
- if action == 'add':
- result = await prov.add_library(prov_item_id, media_type)
- elif action == 'remove':
- result = await prov.remove_library(prov_item_id, media_type)
- return result
-
- @run_periodic(3600)
- async def sync_music_providers(self):
- ''' periodic sync of all music providers '''
- if self.sync_running:
- return
- self.sync_running = True
- for prov_id in self.providers.keys():
- # sync library artists
- await self.sync_library_artists(prov_id)
- await self.sync_library_albums(prov_id)
- await self.sync_library_tracks(prov_id)
- await self.sync_playlists(prov_id)
- self.sync_running = False
-
- async def sync_library_artists(self, prov_id):
- ''' sync library artists for given provider'''
- music_provider = self.providers[prov_id]
- prev_items = await self.library_artists(provider_filter=prov_id)
- prev_db_ids = [item.item_id for item in prev_items]
- cur_items = await music_provider.get_library_artists()
- cur_db_ids = []
- for item in cur_items:
- db_item = await music_provider.artist(item.item_id, lazy=False)
- cur_db_ids.append(db_item.item_id)
- if not db_item.item_id in prev_db_ids:
- await self.mass.db.add_to_library(db_item.item_id, MediaType.Artist, prov_id)
- # process deletions
- for db_id in prev_db_ids:
- if db_id not in cur_db_ids:
- await self.mass.db.remove_from_library(db_id, MediaType.Artist, prov_id)
- LOGGER.info("Finished syncing Artists for provider %s" % prov_id)
-
- async def sync_library_albums(self, prov_id):
- ''' sync library albums for given provider'''
- music_provider = self.providers[prov_id]
- prev_items = await self.library_albums(provider_filter=prov_id)
- prev_db_ids = [item.item_id for item in prev_items]
- cur_items = await music_provider.get_library_albums()
- cur_db_ids = []
- for item in cur_items:
- db_item = await music_provider.album(item.item_id, lazy=False)
- cur_db_ids.append(db_item.item_id)
- # precache album tracks...
- for album_track in await music_provider.get_album_tracks(item.item_id):
- await music_provider.track(album_track.item_id)
- if not db_item.item_id in prev_db_ids:
- await self.mass.db.add_to_library(db_item.item_id, MediaType.Album, prov_id)
- # process deletions
- for db_id in prev_db_ids:
- if db_id not in cur_db_ids:
- await self.mass.db.remove_from_library(db_id, MediaType.Album, prov_id)
- LOGGER.info("Finished syncing Albums for provider %s" % prov_id)
-
- async def sync_library_tracks(self, prov_id):
- ''' sync library tracks for given provider'''
- music_provider = self.providers[prov_id]
- prev_items = await self.library_tracks(provider_filter=prov_id)
- prev_db_ids = [item.item_id for item in prev_items]
- cur_items = await music_provider.get_library_tracks()
- cur_db_ids = []
- for item in cur_items:
- db_item = await music_provider.track(item.item_id, lazy=False)
- cur_db_ids.append(db_item.item_id)
- if not db_item.item_id in prev_db_ids:
- await self.mass.db.add_to_library(db_item.item_id, MediaType.Track, prov_id)
- # process deletions
- for db_id in prev_db_ids:
- if db_id not in cur_db_ids:
- await self.mass.db.remove_from_library(db_id, MediaType.Track, prov_id)
- LOGGER.info("Finished syncing Tracks for provider %s" % prov_id)
-
- async def sync_playlists(self, prov_id):
- ''' sync library playlists for given provider'''
- music_provider = self.providers[prov_id]
- prev_items = await self.playlists(provider_filter=prov_id)
- prev_db_ids = [item.item_id for item in prev_items]
- cur_items = await music_provider.get_playlists()
- cur_db_ids = []
- for item in cur_items:
- # always add to db because playlist attributes could have changed
- db_id = await self.mass.db.add_playlist(item)
- cur_db_ids.append(db_id)
- if not db_id in prev_db_ids:
- await self.mass.db.add_to_library(db_id, MediaType.Playlist, prov_id)
- if item.is_editable:
- # precache/sync playlist tracks (user owned playlists only)
- asyncio.create_task( self.sync_playlist_tracks(db_id, prov_id, item.item_id) )
- # process playlist deletions
- for db_id in prev_db_ids:
- if db_id not in cur_db_ids:
- await self.mass.db.remove_from_library(db_id, MediaType.Playlist, prov_id)
- LOGGER.info("Finished syncing Playlists for provider %s" % prov_id)
-
- async def sync_playlist_tracks(self, db_playlist_id, prov_id, prov_playlist_id):
- ''' sync library playlists tracks for given provider'''
- music_provider = self.providers[prov_id]
- prev_items = await self.playlist_tracks(db_playlist_id)
- prev_db_ids = [item.item_id for item in prev_items]
- cur_items = await music_provider.get_playlist_tracks(prov_playlist_id, limit=0)
- cur_db_ids = []
- pos = 0
- for item in cur_items:
- # we need to do this the complicated way because the file provider can return tracks from other providers
- for prov_mapping in item.provider_ids:
- item_provider = prov_mapping['provider']
- prov_item_id = prov_mapping['item_id']
- db_item = await self.providers[item_provider].track(prov_item_id, lazy=False)
- cur_db_ids.append(db_item.item_id)
- if not db_item.item_id in prev_db_ids:
- await self.mass.db.add_playlist_track(db_playlist_id, db_item.item_id, pos)
- pos += 1
- # process playlist track deletions
- for db_id in prev_db_ids:
- if db_id not in cur_db_ids:
- await self.mass.db.remove_playlist_track(db_playlist_id, db_id)
- LOGGER.info("Finished syncing Playlist %s tracks for provider %s" % (prov_playlist_id, prov_id))
-
- def load_music_providers(self):
- ''' dynamically load musicproviders '''
- for item in os.listdir(MODULES_PATH):
- if (os.path.isfile(os.path.join(MODULES_PATH, item)) and not item.startswith("_") and
- item.endswith('.py') and not item.startswith('.')):
- module_name = item.replace(".py","")
- LOGGER.debug("Loading musicprovider module %s" % module_name)
- try:
- mod = __import__("modules.musicproviders." + module_name, fromlist=[''])
- if not self.mass.config['musicproviders'].get(module_name):
- self.mass.config['musicproviders'][module_name] = {}
- self.mass.config['musicproviders'][module_name]['__desc__'] = mod.config_entries()
- for key, def_value, desc in mod.config_entries():
- if not key in self.mass.config['musicproviders'][module_name]:
- self.mass.config['musicproviders'][module_name][key] = def_value
- mod = mod.setup(self.mass)
- if mod:
- self.providers[mod.prov_id] = mod
- cls_name = mod.__class__.__name__
- LOGGER.info("Successfully initialized module %s" % cls_name)
- except Exception as exc:
- LOGGER.exception("Error loading module %s: %s" %(module_name, exc))
+++ /dev/null
-#!/usr/bin/env python3
-# -*- coding:utf-8 -*-
-
-import asyncio
-import os
-from utils import run_periodic, LOGGER, try_parse_int, try_parse_float
-import aiohttp
-from difflib import SequenceMatcher as Matcher
-from models import MediaType, PlayerState, MusicPlayer
-from typing import List
-import toolz
-import operator
-import socket
-import random
-from copy import deepcopy
-import functools
-
-BASE_DIR = os.path.dirname(os.path.abspath(__file__))
-MODULES_PATH = os.path.join(BASE_DIR, "modules", "playerproviders" )
-
-class Player():
- ''' several helpers to handle playback through player providers '''
-
- def __init__(self, mass):
- self.mass = mass
- self.providers = {}
- self._players = {}
- self.create_config_entries()
- # dynamically load provider modules
- self.load_providers()
-
- def create_config_entries(self):
- ''' sets the config entries for this module (list with key/value pairs)'''
- self.mass.config['player_settings']['__desc__'] = [
- ("enabled", False, "player_enabled"),
- ("name", "", "player_name"),
- ("group_parent", "<player>", "player_group_with"),
- ("mute_as_power", False, "player_mute_power"),
- ("disable_volume", False, "player_disable_vol"),
- ("apply_group_volume", False, "player_group_vol"),
- ("apply_group_power", False, "player_group_pow"),
- ("play_power_on", False, "player_power_play")
- ]
-
- async def players(self):
- ''' return all players '''
- items = list(self._players.values())
- items.sort(key=lambda x: x.name, reverse=False)
- return items
-
- async def player(self, player_id):
- ''' return players by id '''
- return self._players[player_id]
-
- async def player_command(self, player_id, cmd, cmd_args=None):
- ''' issue command on player (play, pause, next, previous, stop, power, volume, mute) '''
- if player_id not in self._players:
- return
- player = self._players[player_id]
- # handle some common workarounds
- if cmd in ['pause', 'play'] and cmd_args == 'toggle':
- cmd = 'pause' if player.state == PlayerState.Playing else 'play'
- if cmd == 'power' and cmd_args == 'toggle':
- cmd_args = 'off' if player.powered else 'on'
- if cmd == 'volume' and cmd_args == 'up':
- cmd_args = player.volume_level + 2
- elif cmd == 'volume' and cmd_args == 'down':
- cmd_args = player.volume_level - 2
- # redirect playlist related commands to parent player
- if player.group_parent and cmd not in ['power', 'volume', 'mute']:
- return await self.player_command(player.group_parent, cmd, cmd_args)
- # handle hass integration
- await self.__player_command_hass_integration(player, cmd, cmd_args)
- # handle mute as power
- if cmd == 'power' and player.settings['mute_as_power']:
- cmd = 'mute'
- cmd_args = 'on' if cmd_args == 'off' else 'off' # invert logic (power ON is mute OFF)
- # handle group volume for group players
- player_childs = [item for item in self._players.values() if item.group_parent == player_id]
- if player.is_group and cmd == 'volume' and player.settings['apply_group_volume']:
- return await self.__player_command_group_volume(player, player_childs, cmd_args)
- if player.is_group and cmd == 'power' and cmd_args == 'off':
- for item in player_childs:
- asyncio.create_task(self.player_command(item.player_id, cmd, cmd_args))
- # normal execution of command on player
- prov_id = self._players[player_id].player_provider
- prov = self.providers[prov_id]
- await prov.player_command(player_id, cmd, cmd_args)
- # handle play on power on
- if cmd == 'power' and cmd_args == 'on' and player.settings['play_power_on']:
- LOGGER.info('play_power_on %s' % player.name)
- await prov.player_command(player_id, 'play')
-
- async def __player_command_hass_integration(self, player, cmd, cmd_args):
- ''' handle hass integration in player command '''
- if not self.mass.hass:
- return
- if cmd == 'power' and player.settings.get('hass_power_entity') and player.settings.get('hass_power_entity_source'):
- cur_source = await self.mass.hass.get_state(player.settings['hass_power_entity'], attribute='source')
- if cmd_args == 'on' and not cur_source:
- service_data = { 'entity_id': player.settings['hass_power_entity'], 'source':player.settings['hass_power_entity_source'] }
- await self.mass.hass.call_service('media_player', 'select_source', service_data)
- elif cmd_args == 'off' and cur_source == player.settings['hass_power_entity_source']:
- service_data = { 'entity_id': player.settings['hass_power_entity'] }
- await self.mass.hass.call_service('media_player', 'turn_off', service_data)
- else:
- LOGGER.warning('Ignoring power command as required source is not active')
- elif cmd == 'power' and player.settings.get('hass_power_entity'):
- domain = player.settings['hass_power_entity'].split('.')[0]
- service_data = { 'entity_id': player.settings['hass_power_entity']}
- await self.mass.hass.call_service(domain, 'turn_%s' % cmd_args, service_data)
- if cmd == 'volume' and player.settings.get('hass_volume_entity'):
- service_data = { 'entity_id': player.settings['hass_power_entity'], 'volume_level': int(cmd_args)/100}
- await self.mass.hass.call_service('media_player', 'volume_set', service_data)
- cmd_args = 100 # just force full volume on actual player if volume is outsourced to hass
-
- async def __player_command_group_volume(self, player, player_childs, cmd_args):
- ''' handle group volume if needed'''
- cur_volume = player.volume_level
- new_volume = try_parse_int(cmd_args)
- volume_dif = new_volume - cur_volume
- volume_dif_percent = volume_dif/cur_volume
- for child_player in player_childs:
- if child_player.enabled and child_player.powered:
- cur_child_volume = child_player.volume_level
- new_child_volume = cur_child_volume + (cur_child_volume * volume_dif_percent)
- child_player.volume_level = new_child_volume
- await self.player_command(child_player.player_id, 'volume', new_child_volume)
- player.volume_level = new_volume
- return True
-
- async def remove_player(self, player_id):
- ''' handle a player remove '''
- self._players.pop(player_id, None)
- asyncio.ensure_future(self.mass.event('player removed', player_id))
-
- async def trigger_update(self, player_id):
- ''' manually trigger update for a player '''
- await self.update_player(self._players[player_id])
-
- async def update_player(self, player_details):
- ''' update (or add) player '''
- player_details = deepcopy(player_details)
- LOGGER.debug('Incoming msg from %s' % player_details.name)
- player_id = player_details.player_id
- player_changed = False
- if not player_id in self._players:
- # first message from player
- self._players[player_id] = MusicPlayer()
- player = self._players[player_id]
- player.player_id = player_id
- player.player_provider = player_details.player_provider
- player_changed = True
- else:
- player = self._players[player_id]
- player.settings = await self.__get_player_settings(player_id)
- # handle basic player settings
- player_details.enabled = player.settings['enabled']
- player_details.name = player.settings['name'] if player.settings['name'] else player_details.name
- player_details.group_parent = player.settings['group_parent'] if player.settings['group_parent'] else player_details.group_parent
- # handle hass integration
- await self.__update_player_hass_settings(player_details, player.settings)
- # handle mute as power setting
- if player.settings['mute_as_power']:
- player_details.powered = not player_details.muted
- # combine state of group parent
- if player_details.group_parent and player_details.group_parent in self._players:
- parent_player = self._players[player_details.group_parent]
- player_details.cur_item_time = parent_player.cur_item_time
- player_details.cur_item = parent_player.cur_item
- player_details.state = parent_player.state
- # handle group volume/power setting
- player_childs = [item for item in self._players.values() if item.group_parent == player_id]
- player_details.is_group = len(player_childs) > 0
- if player_details.is_group and player.settings['apply_group_volume']:
- await self.__update_player_group_volume(player_details, player_childs)
- if player_details.is_group and player.settings['apply_group_power']:
- await self.__update_player_group_power(player_details, player_childs)
- # compare values to detect changes
- for key, cur_value in player.__dict__.items():
- if key != 'settings':
- new_value = getattr(player_details, key)
- if new_value != cur_value:
- player_changed = True
- setattr(player, key, new_value)
- LOGGER.debug('key changed: %s for player %s - new value: %s' % (key, player.name, new_value))
- if player_changed:
- # player is added or updated!
- asyncio.ensure_future(self.mass.event('player updated', player))
- # is groupplayer, trigger update of its childs
- for child in player_childs:
- asyncio.create_task(self.trigger_update(child.player_id))
- # if child player in a group, trigger update of parent
- if player.group_parent:
- asyncio.create_task(self.trigger_update(player.group_parent))
-
- async def __update_player_hass_settings(self, player_details, player_settings):
- ''' handle home assistant integration on a player '''
- if not self.mass.hass:
- return
- player_id = player_details.player_id
- player_settings = self.mass.config['player_settings'][player_id]
- if player_settings.get('hass_power_entity') and player_settings.get('hass_power_entity_source'):
- hass_state = await self.mass.hass.get_state(
- player_settings['hass_power_entity'],
- attribute='source',
- register_listener=functools.partial(self.trigger_update, player_id))
- player_details.powered = hass_state == player_settings['hass_power_entity_source']
- elif player_settings.get('hass_power_entity'):
- hass_state = await self.mass.hass.get_state(
- player_settings['hass_power_entity'],
- attribute='state',
- register_listener=functools.partial(self.trigger_update, player_id))
- player_details.powered = hass_state != 'off'
- if player_settings.get('hass_volume_entity'):
- hass_state = await self.mass.hass.get_state(
- player_settings['hass_volume_entity'],
- attribute='volume_level',
- register_listener=functools.partial(self.trigger_update, player_id))
- player_details.volume_level = int(try_parse_float(hass_state)*100)
-
- async def __update_player_group_volume(self, player_details, player_childs):
- ''' handle group volume '''
- group_volume = 0
- active_players = 0
- for child_player in player_childs:
- if child_player.enabled and child_player.powered:
- group_volume += child_player.volume_level
- active_players += 1
- group_volume = group_volume / active_players if active_players else 0
- player_details.volume_level = group_volume
-
- async def __update_player_group_power(self, player_details, player_childs):
- ''' handle group power '''
- player_powered = False
- for child_player in player_childs:
- if child_player.powered:
- player_powered = True
- break
- if player_details.powered and not player_powered:
- # all childs turned off so turn off group player
- LOGGER.info('all childs turned off so turn off group player %s' % player_details.name)
- await self. player_command(player_details.player_id, 'power', 'off')
- player_details.powered = False
- elif not player_details.powered and player_powered:
- # all childs turned off but group player still off, so turn it on
- LOGGER.info('all childs turned off but group player still off, so turn it on %s' % player_details.name)
- await self. player_command(player_details.player_id, 'power', 'on')
- player_details.powered = True
-
- async def __get_player_settings(self, player_id):
- ''' get (or create) player config '''
- player_settings = self.mass.config['player_settings'].get(player_id,{})
- for key, def_value, desc in self.mass.config['player_settings']['__desc__']:
- if not key in player_settings:
- player_settings[key] = def_value
- self.mass.config['player_settings'][player_id] = player_settings
- return player_settings
-
- async def play_media(self, player_id, media_item, queue_opt='play'):
- '''
- play media on a player
- player_id: id of the player
- media_item: media item(s) that should be played (Track, Album, Artist, Playlist)
- queue_opt: play, replace, next or add
- '''
- if not player_id in self._players:
- LOGGER.warning('Player %s not found' % player_id)
- return False
- player_prov = self.providers[self._players[player_id].player_provider]
- # a single item or list of items may be provided
- media_items = media_item if isinstance(media_item, list) else [media_item]
- playable_tracks = []
- for media_item in media_items:
- # collect tracks to play
- if media_item.media_type == MediaType.Artist:
- tracks = await self.mass.music.artist_toptracks(media_item.item_id, provider=media_item.provider)
- elif media_item.media_type == MediaType.Album:
- tracks = await self.mass.music.album_tracks(media_item.item_id, provider=media_item.provider)
- elif media_item.media_type == MediaType.Playlist:
- tracks = await self.mass.music.playlist_tracks(media_item.item_id, provider=media_item.provider, offset=0, limit=0)
- else:
- tracks = [media_item] # single track
- # check supported music providers by this player and work out how to handle playback...
- for track in tracks:
- # sort by quality
- match_found = False
- for prov_media in sorted(track.provider_ids, key=operator.itemgetter('quality'), reverse=True):
- media_provider = prov_media['provider']
- media_item_id = prov_media['item_id']
- player_supported_provs = player_prov.supported_musicproviders
- if media_provider in player_supported_provs:
- # the provider can handle this media_type directly !
- track.uri = await self.get_track_uri(media_item_id, media_provider)
- playable_tracks.append(track)
- match_found = True
- elif 'http' in player_prov.supported_musicproviders:
- # fallback to http streaming if supported
- track.uri = await self.get_track_uri(media_item_id, media_provider, True)
- playable_tracks.append(track)
- match_found = True
- if match_found:
- break
- if playable_tracks:
- if self._players[player_id].shuffle_enabled:
- random.shuffle(playable_tracks)
- if queue_opt in ['next', 'play'] and len(playable_tracks) > 1:
- queue_opt = 'replace' # always assume playback of multiple items as new queue
- return await player_prov.play_media(player_id, playable_tracks, queue_opt)
- else:
- raise Exception("Musicprovider and/or media not supported by player %s !" % (player_id) )
-
- async def get_track_uri(self, item_id, provider, http_stream=False):
- ''' generate the URL/URI for a media item '''
- uri = ""
- if http_stream:
- host = socket.gethostbyname(socket.gethostname())
- uri = 'http://%s:8095/stream/%s/%s'% (host, provider, item_id)
- elif provider == "spotify":
- uri = 'spotify://spotify:track:%s' % item_id
- elif provider == "qobuz":
- uri = 'qobuz://%s.flac' % item_id
- elif provider == "file":
- uri = 'file://%s' % item_id
- return uri
-
- async def player_queue(self, player_id, offset=0, limit=50):
- ''' return the items in the player's queue '''
- player = self._players[player_id]
- player_prov = self.providers[player.player_provider]
- return await player_prov.player_queue(player_id, offset=offset, limit=limit)
-
- def load_providers(self):
- ''' dynamically load providers '''
- for item in os.listdir(MODULES_PATH):
- if (os.path.isfile(os.path.join(MODULES_PATH, item)) and not item.startswith("_") and
- item.endswith('.py') and not item.startswith('.')):
- module_name = item.replace(".py","")
- LOGGER.debug("Loading playerprovider module %s" % module_name)
- try:
- mod = __import__("modules.playerproviders." + module_name, fromlist=[''])
- if not self.mass.config['playerproviders'].get(module_name):
- self.mass.config['playerproviders'][module_name] = {}
- self.mass.config['playerproviders'][module_name]['__desc__'] = mod.config_entries()
- for key, def_value, desc in mod.config_entries():
- if not key in self.mass.config['playerproviders'][module_name]:
- self.mass.config['playerproviders'][module_name][key] = def_value
- mod = mod.setup(self.mass)
- if mod:
- self.providers[mod.prov_id] = mod
- cls_name = mod.__class__.__name__
- LOGGER.info("Successfully initialized module %s" % cls_name)
- except Exception as exc:
- LOGGER.exception("Error loading module %s: %s" %(module_name, exc))
version = version.strip().title()
return title, version
+async def kill_proc(proc):
+ proc.kill()
+ await proc.communicate()
\ No newline at end of file
<v-icon height="20" v-if="item.in_library.length == 0">favorite_border</v-icon>
</v-btn>
</template>
- <span v-if="item.in_library.length > 0">Item is added to the library</span>
- <span v-if="item.in_library.length == 0">Add item to the library</span>
+ <span v-if="item.in_library.length > 0">{{ $t('remove_library') }}</span>
+ <span v-if="item.in_library.length == 0">{{ $t('add_library') }}</span>
</v-tooltip>
</v-list-tile-action>
methods: { \r
itemClick(cmd) {\r
if (cmd == 'info')\r
- this.$router.push({ path: '/tracks/' + this.$globals.playmenuitem.item_id, params: {provider: this.$globals.playmenuitem.provider}})\r
+ this.$router.push({ path: '/tracks/' + this.$globals.playmenuitem.item_id, query: {provider: this.$globals.playmenuitem.provider}})\r
else\r
this.$emit('playItem', this.$globals.playmenuitem, cmd)\r
// close dialog\r
--- /dev/null
+.vld-overlay {
+ bottom: 0;
+ left: 0;
+ position: absolute;
+ right: 0;
+ top: 0;
+ align-items: center;
+ display: none;
+ justify-content: center;
+ overflow: hidden;
+ z-index: 1
+}
+
+.vld-overlay.is-active {
+ display: flex
+}
+
+.vld-overlay.is-full-page {
+ z-index: 999;
+ position: fixed
+}
+
+.vld-overlay .vld-background {
+ bottom: 0;
+ left: 0;
+ position: absolute;
+ right: 0;
+ top: 0;
+ background: #000;
+ opacity: 0.7
+}
+
+.vld-overlay .vld-icon, .vld-parent {
+ position: relative
+}
+
<link rel="apple-touch-icon" href="./images/icons/icon-apple.png">
<meta name="apple-mobile-web-app-capable" content="yes">
<link href="./css/site.css" rel="stylesheet">
+ <link href="./css/vue-loading.css" rel="stylesheet">
</head>
<body>
<router-view app :key="$route.path"></router-view>
<searchbox/>
</v-content>
- <v-dialog
- v-model="$globals.loading"
- persistent
- width="300"
- >
- <v-card
- color="primary"
- dark
- >
- <v-card-text>
- Please stand by
- <v-progress-linear
- indeterminate
- color="white"
- class="mb-0"
- ></v-progress-linear>
- </v-card-text>
- </v-card>
- </v-dialog>
+ <loading :active.sync="$globals.loading" :can-cancel="true" color="#2196f3" loader="dots"></loading>
</v-app>
</div>
- <!-- <script src="https://cdn.jsdelivr.net/npm/vue@2.6.10/dist/vue.js"></script> -->
<script src="https://unpkg.com/vue/dist/vue.js"></script>
<script src="https://unpkg.com/vue-i18n/dist/vue-i18n.js"></script>
<script src="https://cdn.jsdelivr.net/npm/vuetify/dist/vuetify.js"></script>
<script src="https://unpkg.com/axios/dist/axios.min.js"></script>
<script src="https://cdn.jsdelivr.net/npm/moment@2.20.1/moment.min.js"></script>
<script src="https://unpkg.com/vee-validate@2.0.0-rc.25/dist/vee-validate.js"></script>
- <script src="https://unpkg.com/http-vue-loader"></script>
+ <script src="./lib/vue-loading-overlay.js"></script>
<script>
function isMobile() {
endpoint = "/playlists/"
item_id = item.item_id.toString();
var url = endpoint + item_id;
- console.log(url + ' - ' + item.provider);
router.push({ path: url, query: {provider: item.provider}});
}
var url = endpoint + item_id + action;
console.log('loading ' + url);
axios
- .get(url)
+ .get(url, { params: { provider: item.provider }})
.then(result => {
data = result.data;
console.log(data);
Vue.use(VeeValidate);
Vue.use(Vuetify);
Vue.use(VueI18n);
+ Vue.use(VueLoading);
const routes = [
watch: {},
mounted() {
},
+ components: {
+ Loading: VueLoading
+ },
created() {
// little hack to force refresh PWA on iOS by simple reloading it every hour
var d = new Date();
--- /dev/null
+!function(t,e){"object"==typeof exports&&"object"==typeof module?module.exports=e():"function"==typeof define&&define.amd?define("VueLoading",[],e):"object"==typeof exports?exports.VueLoading=e():t.VueLoading=e()}("undefined"!=typeof self?self:this,function(){return function(t){var e={};function i(n){if(e[n])return e[n].exports;var r=e[n]={i:n,l:!1,exports:{}};return t[n].call(r.exports,r,r.exports,i),r.l=!0,r.exports}return i.m=t,i.c=e,i.d=function(t,e,n){i.o(t,e)||Object.defineProperty(t,e,{enumerable:!0,get:n})},i.r=function(t){"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(t,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(t,"__esModule",{value:!0})},i.t=function(t,e){if(1&e&&(t=i(t)),8&e)return t;if(4&e&&"object"==typeof t&&t&&t.__esModule)return t;var n=Object.create(null);if(i.r(n),Object.defineProperty(n,"default",{enumerable:!0,value:t}),2&e&&"string"!=typeof t)for(var r in t)i.d(n,r,function(e){return t[e]}.bind(null,r));return n},i.n=function(t){var e=t&&t.__esModule?function(){return t.default}:function(){return t};return i.d(e,"a",e),e},i.o=function(t,e){return Object.prototype.hasOwnProperty.call(t,e)},i.p="",i(i.s=1)}([function(t,e,i){},function(t,e,i){"use strict";i.r(e);var n="undefined"!=typeof window?window.HTMLElement:Object,r={mounted:function(){document.addEventListener("focusin",this.focusIn)},methods:{focusIn:function(t){if(this.isActive&&t.target!==this.$el&&!this.$el.contains(t.target)){var e=this.container?this.container:this.isFullPage?null:this.$el.parentElement;(this.isFullPage||e&&e.contains(t.target))&&(t.preventDefault(),this.$el.focus())}}},beforeDestroy:function(){document.removeEventListener("focusin",this.focusIn)}};function a(t,e,i,n,r,a,o,s){var u,l="function"==typeof t?t.options:t;if(e&&(l.render=e,l.staticRenderFns=i,l._compiled=!0),n&&(l.functional=!0),a&&(l._scopeId="data-v-"+a),o?(u=function(t){(t=t||this.$vnode&&this.$vnode.ssrContext||this.parent&&this.parent.$vnode&&this.parent.$vnode.ssrContext)||"undefined"==typeof __VUE_SSR_CONTEXT__||(t=__VUE_SSR_CONTEXT__),r&&r.call(this,t),t&&t._registeredComponents&&t._registeredComponents.add(o)},l._ssrRegister=u):r&&(u=s?function(){r.call(this,this.$root.$options.shadowRoot)}:r),u)if(l.functional){l._injectStyles=u;var c=l.render;l.render=function(t,e){return u.call(e),c(t,e)}}else{var d=l.beforeCreate;l.beforeCreate=d?[].concat(d,u):[u]}return{exports:t,options:l}}var o=a({name:"spinner",props:{color:{type:String,default:"#000"},height:{type:Number,default:64},width:{type:Number,default:64}}},function(){var t=this.$createElement,e=this._self._c||t;return e("svg",{attrs:{viewBox:"0 0 38 38",xmlns:"http://www.w3.org/2000/svg",width:this.width,height:this.height,stroke:this.color}},[e("g",{attrs:{fill:"none","fill-rule":"evenodd"}},[e("g",{attrs:{transform:"translate(1 1)","stroke-width":"2"}},[e("circle",{attrs:{"stroke-opacity":".25",cx:"18",cy:"18",r:"18"}}),e("path",{attrs:{d:"M36 18c0-9.94-8.06-18-18-18"}},[e("animateTransform",{attrs:{attributeName:"transform",type:"rotate",from:"0 18 18",to:"360 18 18",dur:"0.8s",repeatCount:"indefinite"}})],1)])])])},[],!1,null,null,null).exports,s=a({name:"dots",props:{color:{type:String,default:"#000"},height:{type:Number,default:240},width:{type:Number,default:60}}},function(){var t=this.$createElement,e=this._self._c||t;return e("svg",{attrs:{viewBox:"0 0 120 30",xmlns:"http://www.w3.org/2000/svg",fill:this.color,width:this.width,height:this.height}},[e("circle",{attrs:{cx:"15",cy:"15",r:"15"}},[e("animate",{attrs:{attributeName:"r",from:"15",to:"15",begin:"0s",dur:"0.8s",values:"15;9;15",calcMode:"linear",repeatCount:"indefinite"}}),e("animate",{attrs:{attributeName:"fill-opacity",from:"1",to:"1",begin:"0s",dur:"0.8s",values:"1;.5;1",calcMode:"linear",repeatCount:"indefinite"}})]),e("circle",{attrs:{cx:"60",cy:"15",r:"9","fill-opacity":"0.3"}},[e("animate",{attrs:{attributeName:"r",from:"9",to:"9",begin:"0s",dur:"0.8s",values:"9;15;9",calcMode:"linear",repeatCount:"indefinite"}}),e("animate",{attrs:{attributeName:"fill-opacity",from:"0.5",to:"0.5",begin:"0s",dur:"0.8s",values:".5;1;.5",calcMode:"linear",repeatCount:"indefinite"}})]),e("circle",{attrs:{cx:"105",cy:"15",r:"15"}},[e("animate",{attrs:{attributeName:"r",from:"15",to:"15",begin:"0s",dur:"0.8s",values:"15;9;15",calcMode:"linear",repeatCount:"indefinite"}}),e("animate",{attrs:{attributeName:"fill-opacity",from:"1",to:"1",begin:"0s",dur:"0.8s",values:"1;.5;1",calcMode:"linear",repeatCount:"indefinite"}})])])},[],!1,null,null,null).exports,u=a({name:"bars",props:{color:{type:String,default:"#000"},height:{type:Number,default:40},width:{type:Number,default:40}}},function(){var t=this.$createElement,e=this._self._c||t;return e("svg",{attrs:{xmlns:"http://www.w3.org/2000/svg",viewBox:"0 0 30 30",height:this.height,width:this.width,fill:this.color}},[e("rect",{attrs:{x:"0",y:"13",width:"4",height:"5"}},[e("animate",{attrs:{attributeName:"height",attributeType:"XML",values:"5;21;5",begin:"0s",dur:"0.6s",repeatCount:"indefinite"}}),e("animate",{attrs:{attributeName:"y",attributeType:"XML",values:"13; 5; 13",begin:"0s",dur:"0.6s",repeatCount:"indefinite"}})]),e("rect",{attrs:{x:"10",y:"13",width:"4",height:"5"}},[e("animate",{attrs:{attributeName:"height",attributeType:"XML",values:"5;21;5",begin:"0.15s",dur:"0.6s",repeatCount:"indefinite"}}),e("animate",{attrs:{attributeName:"y",attributeType:"XML",values:"13; 5; 13",begin:"0.15s",dur:"0.6s",repeatCount:"indefinite"}})]),e("rect",{attrs:{x:"20",y:"13",width:"4",height:"5"}},[e("animate",{attrs:{attributeName:"height",attributeType:"XML",values:"5;21;5",begin:"0.3s",dur:"0.6s",repeatCount:"indefinite"}}),e("animate",{attrs:{attributeName:"y",attributeType:"XML",values:"13; 5; 13",begin:"0.3s",dur:"0.6s",repeatCount:"indefinite"}})])])},[],!1,null,null,null).exports,l=a({name:"vue-loading",mixins:[r],props:{active:Boolean,programmatic:Boolean,container:[Object,Function,n],isFullPage:{type:Boolean,default:!0},transition:{type:String,default:"fade"},canCancel:Boolean,onCancel:{type:Function,default:function(){}},color:String,backgroundColor:String,opacity:Number,width:Number,height:Number,zIndex:Number,loader:{type:String,default:"spinner"}},data:function(){return{isActive:this.active}},components:{Spinner:o,Dots:s,Bars:u},beforeMount:function(){this.programmatic&&(this.container?(this.isFullPage=!1,this.container.appendChild(this.$el)):document.body.appendChild(this.$el))},mounted:function(){this.programmatic&&(this.isActive=!0),document.addEventListener("keyup",this.keyPress)},methods:{cancel:function(){this.canCancel&&this.isActive&&(this.hide(),this.onCancel.apply(null,arguments))},hide:function(){var t=this;this.$emit("hide"),this.$emit("update:active",!1),this.programmatic&&(this.isActive=!1,setTimeout(function(){var e;t.$destroy(),void 0!==(e=t.$el).remove?e.remove():e.parentNode.removeChild(e)},150))},keyPress:function(t){27===t.keyCode&&this.cancel()}},watch:{active:function(t){this.isActive=t}},beforeDestroy:function(){document.removeEventListener("keyup",this.keyPress)}},function(){var t=this,e=t.$createElement,i=t._self._c||e;return i("transition",{attrs:{name:t.transition}},[i("div",{directives:[{name:"show",rawName:"v-show",value:t.isActive,expression:"isActive"}],staticClass:"vld-overlay is-active",class:{"is-full-page":t.isFullPage},style:{zIndex:this.zIndex},attrs:{tabindex:"0","aria-busy":t.isActive,"aria-label":"Loading"}},[i("div",{staticClass:"vld-background",style:{background:this.backgroundColor,opacity:this.opacity},on:{click:function(e){return e.preventDefault(),t.cancel(e)}}}),i("div",{staticClass:"vld-icon"},[t._t("before"),t._t("default",[i(t.loader,{tag:"component",attrs:{color:t.color,width:t.width,height:t.height}})]),t._t("after")],2)])])},[],!1,null,null,null).exports,c=function(t){var e=arguments.length>1&&void 0!==arguments[1]?arguments[1]:{},i=arguments.length>2&&void 0!==arguments[2]?arguments[2]:{};return{show:function(){var n=arguments.length>0&&void 0!==arguments[0]?arguments[0]:e,r=arguments.length>1&&void 0!==arguments[1]?arguments[1]:i,a=Object.assign({},e,n,{programmatic:!0}),o=new(t.extend(l))({el:document.createElement("div"),propsData:a}),s=Object.assign({},i,r);return Object.keys(s).map(function(t){o.$slots[t]=s[t]}),o}}};i(0);l.install=function(t){var e=arguments.length>1&&void 0!==arguments[1]?arguments[1]:{},i=arguments.length>2&&void 0!==arguments[2]?arguments[2]:{},n=c(t,e,i);t.$loading=n,t.prototype.$loading=n};e.default=l}]).default});
\ No newline at end of file