diff --git a/dev/releases/3.1.yml b/dev/releases/3.1.yml new file mode 100644 index 0000000..b0ca543 --- /dev/null +++ b/dev/releases/3.1.yml @@ -0,0 +1,10 @@ +minor_release_name: "Soyeon" +3.1.0: + notes: + - "[Architecture] Cleaned up legacy process control" + - "[Architecture] Added proper exception framework to native API" + - "[Feature] Implemented track title and artist name editing from web interface" + - "[Feature] Implemented track and artist merging from web interface" + - "[Feature] Implemented scrobble reparsing from web interface" + - "[Performance] Adjusted cache sizes" + - "[Logging] Added cache memory use information" diff --git a/dev/releases/branch.yml b/dev/releases/branch.yml deleted file mode 100644 index 00203d9..0000000 --- a/dev/releases/branch.yml +++ /dev/null @@ -1,2 +0,0 @@ -- "[Performance] Adjusted cache sizes" -- "[Logging] Added cache memory use information" diff --git a/dev/testing/stresstest.py b/dev/testing/stresstest.py new file mode 100644 index 0000000..ad405e7 --- /dev/null +++ b/dev/testing/stresstest.py @@ -0,0 +1,43 @@ +import threading +import subprocess +import time +import requests +import os + +ACTIVE = True + +build_cmd = ["docker","build","-t","maloja",".","-f","Containerfile"] +subprocess.run(build_cmd) + +common_prc = ( + ["docker","run","--rm","-v",f"{os.path.abspath('./testdata')}:/mlj","-e","MALOJA_DATA_DIRECTORY=/mlj"], + ["maloja"] +) + +servers = [ + {'port': 42010}, + {'port': 42011, 'extraargs':["--memory=1g"]}, + {'port': 42012, 'extraargs':["--memory=500m"]} +] +for s in servers: + cmd = common_prc[0] + ["-p",f"{s['port']}:42010"] + s.get('extraargs',[]) + common_prc[1] + print(cmd) + t = threading.Thread(target=subprocess.run,args=(cmd,)) + s['thread'] = t + t.daemon = True + t.start() + time.sleep(5) + +time.sleep(5) +while ACTIVE: + time.sleep(1) + try: + for s in servers: + requests.get(f"http://localhost:{s['port']}") + except KeyboardInterrupt: + ACTIVE = False + except Exception: + pass + +for s in servers: + s['thread'].join() diff --git a/maloja/__init__.py b/maloja/__init__.py index ec1fd37..f43dd84 100644 --- a/maloja/__init__.py +++ b/maloja/__init__.py @@ -1,4 +1,4 @@ # monkey patching -from . import monkey +from .pkg_global import monkey # configuration before all else -from . import globalconf +from .pkg_global import conf diff --git a/maloja/__main__.py b/maloja/__main__.py index 4fcd284..28b66cc 100644 --- a/maloja/__main__.py +++ b/maloja/__main__.py @@ -1,4 +1,177 @@ -# make the package itself runnable with python -m maloja +import os +import signal +import subprocess +import time -from .proccontrol.control import main -main() +from setproctitle import setproctitle +from ipaddress import ip_address + +from doreah.control import mainfunction +from doreah.io import col +from doreah.logging import log + +from . import __pkginfo__ as pkginfo +from .pkg_global import conf +from .proccontrol import tasks +from .setup import setup +from .dev import generate + + + +def print_header_info(): + print() + #print("#####") + print(col['yellow']("Maloja"),f"v{pkginfo.VERSION}") + print(pkginfo.HOMEPAGE) + #print("#####") + print() + + + +def get_instance(): + try: + return int(subprocess.check_output(["pidof","maloja"])) + except Exception: + return None + +def get_instance_supervisor(): + try: + return int(subprocess.check_output(["pidof","maloja_supervisor"])) + except Exception: + return None + +def restart(): + if stop(): + start() + else: + print(col["red"]("Could not stop Maloja!")) + +def start(): + if get_instance_supervisor() is not None: + print("Maloja is already running.") + else: + print_header_info() + setup() + try: + #p = subprocess.Popen(["python3","-m","maloja.server"],stdout=subprocess.DEVNULL,stderr=subprocess.DEVNULL) + sp = subprocess.Popen(["python3","-m","maloja","supervisor"],stdout=subprocess.DEVNULL,stderr=subprocess.DEVNULL) + print(col["green"]("Maloja started!")) + + port = conf.malojaconfig["PORT"] + + print("Visit your server address (Port " + str(port) + ") to see your web interface. Visit /admin_setup to get started.") + print("If you're installing this on your local machine, these links should get you there:") + print("\t" + col["blue"]("http://localhost:" + str(port))) + print("\t" + col["blue"]("http://localhost:" + str(port) + "/admin_setup")) + return True + except Exception: + print("Error while starting Maloja.") + return False + + +def stop(): + + for attempt in [(signal.SIGTERM,2),(signal.SIGTERM,5),(signal.SIGKILL,3),(signal.SIGKILL,5)]: + + pid_sv = get_instance_supervisor() + pid = get_instance() + + if pid is None and pid_sv is None: + print("Maloja stopped!") + return True + + if pid_sv is not None: + os.kill(pid_sv,attempt[0]) + if pid is not None: + os.kill(pid,attempt[0]) + + time.sleep(attempt[1]) + + return False + + + + + + + print("Maloja stopped!") + return True + +def onlysetup(): + print_header_info() + setup() + print("Setup complete!") + +def run_server(): + print_header_info() + setup() + setproctitle("maloja") + from . import server + server.run_server() + +def run_supervisor(): + setproctitle("maloja_supervisor") + while True: + log("Maloja is not running, starting...",module="supervisor") + try: + process = subprocess.Popen( + ["python3", "-m", "maloja","run"], + stdout=subprocess.DEVNULL, + stderr=subprocess.DEVNULL, + ) + except Exception as e: + log("Error starting Maloja: " + str(e),module="supervisor") + else: + try: + process.wait() + except Exception as e: + log("Maloja crashed: " + str(e),module="supervisor") + +def debug(): + os.environ["MALOJA_DEV_MODE"] = 'true' + conf.malojaconfig.load_environment() + direct() + +def print_info(): + print_header_info() + print(col['lightblue']("Configuration Directory:"),conf.dir_settings['config']) + print(col['lightblue']("Data Directory: "),conf.dir_settings['state']) + print(col['lightblue']("Log Directory: "),conf.dir_settings['logs']) + print(col['lightblue']("Network: "),f"IPv{ip_address(conf.malojaconfig['host']).version}, Port {conf.malojaconfig['port']}") + print(col['lightblue']("Timezone: "),f"UTC{conf.malojaconfig['timezone']:+d}") + print() + print() + +@mainfunction({"l":"level","v":"version","V":"version"},flags=['version','include_images'],shield=True) +def main(*args,**kwargs): + + actions = { + # server + "start":start, + "restart":restart, + "stop":stop, + "run":run_server, + "supervisor":run_supervisor, + "debug":debug, + "setup":onlysetup, + # admin scripts + "import":tasks.import_scrobbles, # maloja import /x/y.csv + "backup":tasks.backup, # maloja backup --targetfolder /x/y --include_images + "generate":generate.generate_scrobbles, # maloja generate 400 + "export":tasks.export, # maloja export + # aux + "info":print_info + } + + if "version" in kwargs: + print(info.VERSION) + return True + else: + try: + action, *args = args + action = actions[action] + except (ValueError, KeyError): + print("Valid commands: " + " ".join(a for a in actions)) + return False + + return action(*args,**kwargs) diff --git a/maloja/apis/_apikeys.py b/maloja/apis/_apikeys.py index cc49c92..fd4773e 100644 --- a/maloja/apis/_apikeys.py +++ b/maloja/apis/_apikeys.py @@ -4,7 +4,7 @@ from doreah.keystore import KeyStore from doreah.logging import log -from ..globalconf import data_dir +from ..pkg_global.conf import data_dir apikeystore = KeyStore(file=data_dir['clients']("apikeys.yml"),save_endpoint="/apis/mlj_1/apikeys") diff --git a/maloja/apis/listenbrainz.py b/maloja/apis/listenbrainz.py index 507de60..129b581 100644 --- a/maloja/apis/listenbrainz.py +++ b/maloja/apis/listenbrainz.py @@ -4,7 +4,7 @@ from .. import database import datetime from ._apikeys import apikeystore -from ..globalconf import malojaconfig +from ..pkg_global.conf import malojaconfig class Listenbrainz(APIHandler): diff --git a/maloja/apis/native_v1.py b/maloja/apis/native_v1.py index 1d7f232..06aba79 100644 --- a/maloja/apis/native_v1.py +++ b/maloja/apis/native_v1.py @@ -1,5 +1,6 @@ import os import math +import traceback from bottle import response, static_file, request, FormsDict @@ -12,7 +13,7 @@ from nimrodel import Multi from .. import database -from ..globalconf import malojaconfig, data_dir +from ..pkg_global.conf import malojaconfig, data_dir @@ -39,15 +40,40 @@ api.__apipath__ = "mlj_1" errors = { - database.MissingScrobbleParameters: lambda e: (400,{ + database.exceptions.MissingScrobbleParameters: lambda e: (400,{ "status":"failure", "error":{ 'type':'missing_scrobble_data', 'value':e.params, - 'desc':"A scrobble requires these parameters." + 'desc':"The scrobble is missing needed parameters." } }), - Exception: lambda e: (500,{ + database.exceptions.MissingEntityParameter: lambda e: (400,{ + "status":"error", + "error":{ + 'type':'missing_entity_parameter', + 'value':None, + 'desc':"This API call is not valid without an entity (track or artist)." + } + }), + database.exceptions.EntityExists: lambda e: (409,{ + "status":"failure", + "error":{ + 'type':'entity_exists', + 'value':e.entitydict, + 'desc':"This entity already exists in the database. Consider merging instead." + } + }), + database.exceptions.DatabaseNotBuilt: lambda e: (503,{ + "status":"error", + "error":{ + 'type':'server_not_ready', + 'value':'db_upgrade', + 'desc':"The database is being upgraded. Please try again later." + } + }), + # for http errors, use their status code + Exception: lambda e: ((e.status_code if hasattr(e,'statuscode') else 500),{ "status":"failure", "error":{ 'type':'unknown_error', @@ -57,6 +83,21 @@ errors = { }) } +def catch_exceptions(func): + def protector(*args,**kwargs): + try: + return func(*args,**kwargs) + except Exception as e: + print(traceback.format_exc()) + for etype in errors: + if isinstance(e,etype): + errorhandling = errors[etype](e) + response.status = errorhandling[0] + return errorhandling[1] + + protector.__doc__ = func.__doc__ + protector.__annotations__ = func.__annotations__ + return protector def add_common_args_to_docstring(filterkeys=False,limitkeys=False,delimitkeys=False,amountkeys=False): @@ -94,6 +135,7 @@ def add_common_args_to_docstring(filterkeys=False,limitkeys=False,delimitkeys=Fa @api.get("test") +@catch_exceptions def test_server(key=None): """Pings the server. If an API key is supplied, the server will respond with 200 if the key is correct and 403 if it isn't. If no key is supplied, the server will @@ -119,6 +161,7 @@ def test_server(key=None): @api.get("serverinfo") +@catch_exceptions def server_info(): """Returns basic information about the server. @@ -141,6 +184,7 @@ def server_info(): @api.get("scrobbles") +@catch_exceptions @add_common_args_to_docstring(filterkeys=True,limitkeys=True,amountkeys=True) def get_scrobbles_external(**keys): """Returns a list of scrobbles. @@ -158,11 +202,13 @@ def get_scrobbles_external(**keys): if k_amount.get('perpage') is not math.inf: result = result[:k_amount.get('perpage')] return { + "status":"ok", "list":result } @api.get("numscrobbles") +@catch_exceptions @add_common_args_to_docstring(filterkeys=True,limitkeys=True,amountkeys=True) def get_scrobbles_num_external(**keys): """Returns amount of scrobbles. @@ -176,12 +222,14 @@ def get_scrobbles_num_external(**keys): result = database.get_scrobbles_num(**ckeys) return { + "status":"ok", "amount":result } @api.get("tracks") +@catch_exceptions @add_common_args_to_docstring(filterkeys=True) def get_tracks_external(**keys): """Returns all tracks (optionally of an artist). @@ -195,12 +243,14 @@ def get_tracks_external(**keys): result = database.get_tracks(**ckeys) return { + "status":"ok", "list":result } @api.get("artists") +@catch_exceptions @add_common_args_to_docstring() def get_artists_external(): """Returns all artists. @@ -210,6 +260,7 @@ def get_artists_external(): result = database.get_artists() return { + "status":"ok", "list":result } @@ -218,6 +269,7 @@ def get_artists_external(): @api.get("charts/artists") +@catch_exceptions @add_common_args_to_docstring(limitkeys=True) def get_charts_artists_external(**keys): """Returns artist charts @@ -230,12 +282,14 @@ def get_charts_artists_external(**keys): result = database.get_charts_artists(**ckeys) return { + "status":"ok", "list":result } @api.get("charts/tracks") +@catch_exceptions @add_common_args_to_docstring(filterkeys=True,limitkeys=True) def get_charts_tracks_external(**keys): """Returns track charts @@ -248,6 +302,7 @@ def get_charts_tracks_external(**keys): result = database.get_charts_tracks(**ckeys) return { + "status":"ok", "list":result } @@ -255,6 +310,7 @@ def get_charts_tracks_external(**keys): @api.get("pulse") +@catch_exceptions @add_common_args_to_docstring(filterkeys=True,limitkeys=True,delimitkeys=True,amountkeys=True) def get_pulse_external(**keys): """Returns amounts of scrobbles in specified time frames @@ -267,6 +323,7 @@ def get_pulse_external(**keys): results = database.get_pulse(**ckeys) return { + "status":"ok", "list":results } @@ -274,6 +331,7 @@ def get_pulse_external(**keys): @api.get("performance") +@catch_exceptions @add_common_args_to_docstring(filterkeys=True,limitkeys=True,delimitkeys=True,amountkeys=True) def get_performance_external(**keys): """Returns artist's or track's rank in specified time frames @@ -286,6 +344,7 @@ def get_performance_external(**keys): results = database.get_performance(**ckeys) return { + "status":"ok", "list":results } @@ -293,6 +352,7 @@ def get_performance_external(**keys): @api.get("top/artists") +@catch_exceptions @add_common_args_to_docstring(limitkeys=True,delimitkeys=True) def get_top_artists_external(**keys): """Returns respective number 1 artists in specified time frames @@ -305,6 +365,7 @@ def get_top_artists_external(**keys): results = database.get_top_artists(**ckeys) return { + "status":"ok", "list":results } @@ -312,6 +373,7 @@ def get_top_artists_external(**keys): @api.get("top/tracks") +@catch_exceptions @add_common_args_to_docstring(limitkeys=True,delimitkeys=True) def get_top_tracks_external(**keys): """Returns respective number 1 tracks in specified time frames @@ -326,6 +388,7 @@ def get_top_tracks_external(**keys): results = database.get_top_tracks(**ckeys) return { + "status":"ok", "list":results } @@ -333,6 +396,7 @@ def get_top_tracks_external(**keys): @api.get("artistinfo") +@catch_exceptions @add_common_args_to_docstring(filterkeys=True) def artist_info_external(**keys): """Returns information about an artist @@ -347,8 +411,9 @@ def artist_info_external(**keys): @api.get("trackinfo") +@catch_exceptions @add_common_args_to_docstring(filterkeys=True) -def track_info_external(artist:Multi[str],**keys): +def track_info_external(artist:Multi[str]=[],**keys): """Returns information about a track :return: track (Mapping), scrobbles (Integer), position (Integer), medals (Mapping), certification (String), topweeks (Integer) @@ -365,6 +430,7 @@ def track_info_external(artist:Multi[str],**keys): @api.post("newscrobble") @authenticated_function(alternate=api_key_correct,api=True,pass_auth_result_as='auth_result') +@catch_exceptions def post_scrobble( artist:Multi=None, artists:list=[], @@ -406,46 +472,41 @@ def post_scrobble( # for logging purposes, don't pass values that we didn't actually supply rawscrobble = {k:rawscrobble[k] for k in rawscrobble if rawscrobble[k]} - try: - result = database.incoming_scrobble( - rawscrobble, - client='browser' if auth_result.get('doreah_native_auth_check') else auth_result.get('client'), - api='native/v1', - fix=(nofix is None) - ) - responsedict = { - 'status': 'success', - 'track': { - 'artists':result['track']['artists'], - 'title':result['track']['title'] - } - } - if extra_kwargs: - responsedict['warnings'] = [ - {'type':'invalid_keyword_ignored','value':k, - 'desc':"This key was not recognized by the server and has been discarded."} - for k in extra_kwargs - ] - if artist and artists: - responsedict['warnings'] = [ - {'type':'mixed_schema','value':['artist','artists'], - 'desc':"These two fields are meant as alternative methods to submit information. Use of both is discouraged, but works at the moment."} - ] - return responsedict - except Exception as e: - for etype in errors: - if isinstance(e,etype): - errorhandling = errors[etype](e) - response.status = errorhandling[0] - return errorhandling[1] + result = database.incoming_scrobble( + rawscrobble, + client='browser' if auth_result.get('doreah_native_auth_check') else auth_result.get('client'), + api='native/v1', + fix=(nofix is None) + ) + responsedict = { + 'status': 'success', + 'track': { + 'artists':result['track']['artists'], + 'title':result['track']['title'] + }, + 'desc':f"Scrobbled {result['track']['title']} by {', '.join(result['track']['artists'])}" + } + if extra_kwargs: + responsedict['warnings'] = [ + {'type':'invalid_keyword_ignored','value':k, + 'desc':"This key was not recognized by the server and has been discarded."} + for k in extra_kwargs + ] + if artist and artists: + responsedict['warnings'] = [ + {'type':'mixed_schema','value':['artist','artists'], + 'desc':"These two fields are meant as alternative methods to submit information. Use of both is discouraged, but works at the moment."} + ] + return responsedict @api.post("importrules") @authenticated_function(api=True) +@catch_exceptions def import_rulemodule(**keys): """Internal Use Only""" filename = keys.get("filename") @@ -464,6 +525,7 @@ def import_rulemodule(**keys): @api.post("rebuild") @authenticated_function(api=True) +@catch_exceptions def rebuild(**keys): """Internal Use Only""" log("Database rebuild initiated!") @@ -480,6 +542,7 @@ def rebuild(**keys): @api.get("search") +@catch_exceptions def search(**keys): """Internal Use Only""" query = keys.get("query") @@ -501,17 +564,19 @@ def search(**keys): artists_result = [] for a in artists: result = { - 'name': a, + 'artist': a, 'link': "/artist?" + compose_querystring(internal_to_uri({"artist": a})), + 'image': images.get_artist_image(a) } - result["image"] = images.get_artist_image(a) artists_result.append(result) tracks_result = [] for t in tracks: - result = t - result["link"] = "/track?" + compose_querystring(internal_to_uri({"track":t})) - result["image"] = images.get_track_image(t) + result = { + 'track': t, + 'link': "/track?" + compose_querystring(internal_to_uri({"track":t})), + 'image': images.get_track_image(t) + } tracks_result.append(result) return {"artists":artists_result[:max_],"tracks":tracks_result[:max_]} @@ -519,6 +584,7 @@ def search(**keys): @api.post("addpicture") @authenticated_function(api=True) +@catch_exceptions def add_picture(b64,artist:Multi=[],title=None): """Internal Use Only""" keys = FormsDict() @@ -532,6 +598,7 @@ def add_picture(b64,artist:Multi=[],title=None): @api.post("newrule") @authenticated_function(api=True) +@catch_exceptions def newrule(**keys): """Internal Use Only""" pass @@ -542,18 +609,21 @@ def newrule(**keys): @api.post("settings") @authenticated_function(api=True) +@catch_exceptions def set_settings(**keys): """Internal Use Only""" malojaconfig.update(keys) @api.post("apikeys") @authenticated_function(api=True) +@catch_exceptions def set_apikeys(**keys): """Internal Use Only""" apikeystore.update(keys) @api.post("import") @authenticated_function(api=True) +@catch_exceptions def import_scrobbles(identifier): """Internal Use Only""" from ..thirdparty import import_scrobbles @@ -561,6 +631,7 @@ def import_scrobbles(identifier): @api.get("backup") @authenticated_function(api=True) +@catch_exceptions def get_backup(**keys): """Internal Use Only""" from ..proccontrol.tasks.backup import backup @@ -573,6 +644,7 @@ def get_backup(**keys): @api.get("export") @authenticated_function(api=True) +@catch_exceptions def get_export(**keys): """Internal Use Only""" from ..proccontrol.tasks.export import export @@ -586,6 +658,71 @@ def get_export(**keys): @api.post("delete_scrobble") @authenticated_function(api=True) +@catch_exceptions def delete_scrobble(timestamp): """Internal Use Only""" - database.remove_scrobble(timestamp) + result = database.remove_scrobble(timestamp) + return { + "status":"success", + "desc":f"Scrobble was deleted!" + } + + +@api.post("edit_artist") +@authenticated_function(api=True) +@catch_exceptions +def edit_artist(id,name): + """Internal Use Only""" + result = database.edit_artist(id,name) + return { + "status":"success" + } + +@api.post("edit_track") +@authenticated_function(api=True) +@catch_exceptions +def edit_track(id,title): + """Internal Use Only""" + result = database.edit_track(id,{'title':title}) + return { + "status":"success" + } + + +@api.post("merge_tracks") +@authenticated_function(api=True) +@catch_exceptions +def merge_tracks(target_id,source_ids): + """Internal Use Only""" + result = database.merge_tracks(target_id,source_ids) + return { + "status":"success" + } + +@api.post("merge_artists") +@authenticated_function(api=True) +@catch_exceptions +def merge_artists(target_id,source_ids): + """Internal Use Only""" + result = database.merge_artists(target_id,source_ids) + return { + "status":"success" + } + +@api.post("reparse_scrobble") +@authenticated_function(api=True) +@catch_exceptions +def reparse_scrobble(timestamp): + """Internal Use Only""" + result = database.reparse_scrobble(timestamp) + if result: + return { + "status":"success", + "desc":f"Scrobble was reparsed!", + "scrobble":result + } + else: + return { + "status":"no_operation", + "desc":"The scrobble was not changed." + } diff --git a/maloja/cleanup.py b/maloja/cleanup.py index de47920..eb97528 100644 --- a/maloja/cleanup.py +++ b/maloja/cleanup.py @@ -2,7 +2,7 @@ import re import os import csv -from .globalconf import data_dir, malojaconfig +from .pkg_global.conf import data_dir, malojaconfig # need to do this as a class so it can retain loaded settings from file # apparently this is not true diff --git a/maloja/database/__init__.py b/maloja/database/__init__.py index dbd51c0..473cb7b 100644 --- a/maloja/database/__init__.py +++ b/maloja/database/__init__.py @@ -1,5 +1,5 @@ # server -from bottle import request, response, FormsDict, HTTPError +from bottle import request, response, FormsDict # rest of the project from ..cleanup import CleanerAgent @@ -7,12 +7,13 @@ from .. import images from ..malojatime import register_scrobbletime, time_stamps, ranges, alltime from ..malojauri import uri_to_internal, internal_to_uri, compose_querystring from ..thirdparty import proxy_scrobble_all -from ..globalconf import data_dir, malojaconfig +from ..pkg_global.conf import data_dir, malojaconfig from ..apis import apikeystore #db from . import sqldb from . import cached from . import dbcache +from . import exceptions # doreah toolkit from doreah.logging import log @@ -42,23 +43,12 @@ dbstatus = { "rebuildinprogress":False, "complete":False # information is complete } -class DatabaseNotBuilt(HTTPError): - def __init__(self): - super().__init__( - status=503, - body="The Maloja Database is being upgraded to Version 3. This could take quite a long time! (~ 2-5 minutes per 10 000 scrobbles)", - headers={"Retry-After":120} - ) -class MissingScrobbleParameters(Exception): - def __init__(self,params=[]): - self.params = params - def waitfordb(func): def newfunc(*args,**kwargs): - if not dbstatus['healthy']: raise DatabaseNotBuilt() + if not dbstatus['healthy']: raise exceptions.DatabaseNotBuilt() return func(*args,**kwargs) return newfunc @@ -97,11 +87,45 @@ def incoming_scrobble(rawscrobble,fix=True,client=None,api=None,dbconn=None): missing.append(necessary_arg) if len(missing) > 0: log(f"Invalid Scrobble [Client: {client} | API: {api}]: {rawscrobble} ",color='red') - raise MissingScrobbleParameters(missing) + raise exceptions.MissingScrobbleParameters(missing) log(f"Incoming scrobble [Client: {client} | API: {api}]: {rawscrobble}") + scrobbledict = rawscrobble_to_scrobbledict(rawscrobble, fix, client) + + sqldb.add_scrobble(scrobbledict,dbconn=dbconn) + proxy_scrobble_all(scrobbledict['track']['artists'],scrobbledict['track']['title'],scrobbledict['time']) + + dbcache.invalidate_caches(scrobbledict['time']) + + #return {"status":"success","scrobble":scrobbledict} + return scrobbledict + + +@waitfordb +def reparse_scrobble(timestamp): + log(f"Reparsing Scrobble {timestamp}") + scrobble = sqldb.get_scrobble(timestamp=timestamp, include_internal=True) + + if not scrobble or not scrobble['rawscrobble']: + return False + + newscrobble = rawscrobble_to_scrobbledict(scrobble['rawscrobble']) + + track_id = sqldb.get_track_id(newscrobble['track']) + + # check if id changed + if sqldb.get_track_id(scrobble['track']) != track_id: + sqldb.edit_scrobble(timestamp, {'track':newscrobble['track']}) + dbcache.invalidate_entity_cache() + dbcache.invalidate_caches() + return sqldb.get_scrobble(timestamp=timestamp) + + return False + + +def rawscrobble_to_scrobbledict(rawscrobble, fix=True, client=None): # raw scrobble to processed info scrobbleinfo = {**rawscrobble} if fix: @@ -129,26 +153,58 @@ def incoming_scrobble(rawscrobble,fix=True,client=None,api=None,dbconn=None): "rawscrobble":rawscrobble } - - sqldb.add_scrobble(scrobbledict,dbconn=dbconn) - proxy_scrobble_all(scrobbledict['track']['artists'],scrobbledict['track']['title'],scrobbledict['time']) - - dbcache.invalidate_caches(scrobbledict['time']) - - #return {"status":"success","scrobble":scrobbledict} return scrobbledict - @waitfordb def remove_scrobble(timestamp): log(f"Deleting Scrobble {timestamp}") result = sqldb.delete_scrobble(timestamp) dbcache.invalidate_caches(timestamp) + return result +@waitfordb +def edit_artist(id,artistinfo): + artist = sqldb.get_artist(id) + log(f"Renaming {artist} to {artistinfo}") + result = sqldb.edit_artist(id,artistinfo) + dbcache.invalidate_entity_cache() + dbcache.invalidate_caches() + return result +@waitfordb +def edit_track(id,trackinfo): + track = sqldb.get_track(id) + log(f"Renaming {track['title']} to {trackinfo['title']}") + result = sqldb.edit_track(id,trackinfo) + dbcache.invalidate_entity_cache() + dbcache.invalidate_caches() + + return result + +@waitfordb +def merge_artists(target_id,source_ids): + sources = [sqldb.get_artist(id) for id in source_ids] + target = sqldb.get_artist(target_id) + log(f"Merging {sources} into {target}") + result = sqldb.merge_artists(target_id,source_ids) + dbcache.invalidate_entity_cache() + dbcache.invalidate_caches() + + return result + +@waitfordb +def merge_tracks(target_id,source_ids): + sources = [sqldb.get_track(id) for id in source_ids] + target = sqldb.get_track(target_id) + log(f"Merging {sources} into {target}") + result = sqldb.merge_tracks(target_id,source_ids) + dbcache.invalidate_entity_cache() + dbcache.invalidate_caches() + + return result @@ -165,6 +221,7 @@ def get_scrobbles(dbconn=None,**keys): #return result[keys['page']*keys['perpage']:(keys['page']+1)*keys['perpage']] return list(reversed(result)) + @waitfordb def get_scrobbles_num(dbconn=None,**keys): (since,to) = keys.get('timerange').timestamps() @@ -242,6 +299,8 @@ def get_performance(dbconn=None,**keys): if c["artist"] == artist: rank = c["rank"] break + else: + raise exceptions.MissingEntityParameter() results.append({"range":rng,"rank":rank}) return results @@ -281,8 +340,10 @@ def get_top_tracks(dbconn=None,**keys): def artist_info(dbconn=None,**keys): artist = keys.get('artist') + if artist is None: raise exceptions.MissingEntityParameter() - artist = sqldb.get_artist(sqldb.get_artist_id(artist,dbconn=dbconn),dbconn=dbconn) + artist_id = sqldb.get_artist_id(artist,dbconn=dbconn) + artist = sqldb.get_artist(artist_id,dbconn=dbconn) alltimecharts = get_charts_artists(timerange=alltime(),dbconn=dbconn) scrobbles = get_scrobbles_num(artist=artist,timerange=alltime(),dbconn=dbconn) #we cant take the scrobble number from the charts because that includes all countas scrobbles @@ -296,11 +357,12 @@ def artist_info(dbconn=None,**keys): "position":position, "associated":others, "medals":{ - "gold": [year for year in cached.medals_artists if artist in cached.medals_artists[year]['gold']], - "silver": [year for year in cached.medals_artists if artist in cached.medals_artists[year]['silver']], - "bronze": [year for year in cached.medals_artists if artist in cached.medals_artists[year]['bronze']], + "gold": [year for year in cached.medals_artists if artist_id in cached.medals_artists[year]['gold']], + "silver": [year for year in cached.medals_artists if artist_id in cached.medals_artists[year]['silver']], + "bronze": [year for year in cached.medals_artists if artist_id in cached.medals_artists[year]['bronze']], }, - "topweeks":len([e for e in cached.weekly_topartists if e == artist]) + "topweeks":len([e for e in cached.weekly_topartists if e == artist_id]), + "id":artist_id } except Exception: # if the artist isnt in the charts, they are not being credited and we @@ -308,7 +370,13 @@ def artist_info(dbconn=None,**keys): replaceartist = sqldb.get_credited_artists(artist)[0] c = [e for e in alltimecharts if e["artist"] == replaceartist][0] position = c["rank"] - return {"artist":artist,"replace":replaceartist,"scrobbles":scrobbles,"position":position} + return { + "artist":artist, + "replace":replaceartist, + "scrobbles":scrobbles, + "position":position, + "id":artist_id + } @@ -317,8 +385,10 @@ def artist_info(dbconn=None,**keys): def track_info(dbconn=None,**keys): track = keys.get('track') + if track is None: raise exceptions.MissingEntityParameter() - track = sqldb.get_track(sqldb.get_track_id(track,dbconn=dbconn),dbconn=dbconn) + track_id = sqldb.get_track_id(track,dbconn=dbconn) + track = sqldb.get_track(track_id,dbconn=dbconn) alltimecharts = get_charts_tracks(timerange=alltime(),dbconn=dbconn) #scrobbles = get_scrobbles_num(track=track,timerange=alltime()) @@ -337,12 +407,13 @@ def track_info(dbconn=None,**keys): "scrobbles":scrobbles, "position":position, "medals":{ - "gold": [year for year in cached.medals_tracks if track in cached.medals_tracks[year]['gold']], - "silver": [year for year in cached.medals_tracks if track in cached.medals_tracks[year]['silver']], - "bronze": [year for year in cached.medals_tracks if track in cached.medals_tracks[year]['bronze']], + "gold": [year for year in cached.medals_tracks if track_id in cached.medals_tracks[year]['gold']], + "silver": [year for year in cached.medals_tracks if track_id in cached.medals_tracks[year]['silver']], + "bronze": [year for year in cached.medals_tracks if track_id in cached.medals_tracks[year]['bronze']], }, "certification":cert, - "topweeks":len([e for e in cached.weekly_toptracks if e == track]) + "topweeks":len([e for e in cached.weekly_toptracks if e == track_id]), + "id":track_id } diff --git a/maloja/database/associated.py b/maloja/database/associated.py index 00f6a5f..69ccc61 100644 --- a/maloja/database/associated.py +++ b/maloja/database/associated.py @@ -8,7 +8,7 @@ import csv import os from . import sqldb -from ..globalconf import data_dir +from ..pkg_global.conf import data_dir def load_associated_rules(): diff --git a/maloja/database/cached.py b/maloja/database/cached.py index 7967665..ea39a29 100644 --- a/maloja/database/cached.py +++ b/maloja/database/cached.py @@ -3,6 +3,7 @@ from doreah.regular import runyearly, rundaily from .. import database +from . import sqldb from .. import malojatime as mjt @@ -24,27 +25,29 @@ def update_medals(): medals_artists.clear() medals_tracks.clear() - for year in mjt.ranges(step="year"): - if year == mjt.thisyear(): break + with sqldb.engine.begin() as conn: + for year in mjt.ranges(step="year"): + if year == mjt.thisyear(): break - charts_artists = database.get_charts_artists(timerange=year) - charts_tracks = database.get_charts_tracks(timerange=year) + charts_artists = sqldb.count_scrobbles_by_artist(since=year.first_stamp(),to=year.last_stamp(),resolve_ids=False,dbconn=conn) + charts_tracks = sqldb.count_scrobbles_by_track(since=year.first_stamp(),to=year.last_stamp(),resolve_ids=False,dbconn=conn) - entry_artists = {'gold':[],'silver':[],'bronze':[]} - entry_tracks = {'gold':[],'silver':[],'bronze':[]} - medals_artists[year.desc()] = entry_artists - medals_tracks[year.desc()] = entry_tracks + entry_artists = {'gold':[],'silver':[],'bronze':[]} + entry_tracks = {'gold':[],'silver':[],'bronze':[]} + medals_artists[year.desc()] = entry_artists + medals_tracks[year.desc()] = entry_tracks + + for entry in charts_artists: + if entry['rank'] == 1: entry_artists['gold'].append(entry['artist_id']) + elif entry['rank'] == 2: entry_artists['silver'].append(entry['artist_id']) + elif entry['rank'] == 3: entry_artists['bronze'].append(entry['artist_id']) + else: break + for entry in charts_tracks: + if entry['rank'] == 1: entry_tracks['gold'].append(entry['track_id']) + elif entry['rank'] == 2: entry_tracks['silver'].append(entry['track_id']) + elif entry['rank'] == 3: entry_tracks['bronze'].append(entry['track_id']) + else: break - for entry in charts_artists: - if entry['rank'] == 1: entry_artists['gold'].append(entry['artist']) - elif entry['rank'] == 2: entry_artists['silver'].append(entry['artist']) - elif entry['rank'] == 3: entry_artists['bronze'].append(entry['artist']) - else: break - for entry in charts_tracks: - if entry['rank'] == 1: entry_tracks['gold'].append(entry['track']) - elif entry['rank'] == 2: entry_tracks['silver'].append(entry['track']) - elif entry['rank'] == 3: entry_tracks['bronze'].append(entry['track']) - else: break @@ -55,15 +58,17 @@ def update_weekly(): weekly_topartists.clear() weekly_toptracks.clear() - for week in mjt.ranges(step="week"): - if week == mjt.thisweek(): break + with sqldb.engine.begin() as conn: + for week in mjt.ranges(step="week"): + if week == mjt.thisweek(): break - charts_artists = database.get_charts_artists(timerange=week) - charts_tracks = database.get_charts_tracks(timerange=week) - for entry in charts_artists: - if entry['rank'] == 1: weekly_topartists.append(entry['artist']) - else: break - for entry in charts_tracks: - if entry['rank'] == 1: weekly_toptracks.append(entry['track']) - else: break + charts_artists = sqldb.count_scrobbles_by_artist(since=week.first_stamp(),to=week.last_stamp(),resolve_ids=False,dbconn=conn) + charts_tracks = sqldb.count_scrobbles_by_track(since=week.first_stamp(),to=week.last_stamp(),resolve_ids=False,dbconn=conn) + + for entry in charts_artists: + if entry['rank'] == 1: weekly_topartists.append(entry['artist_id']) + else: break + for entry in charts_tracks: + if entry['rank'] == 1: weekly_toptracks.append(entry['track_id']) + else: break diff --git a/maloja/database/dbcache.py b/maloja/database/dbcache.py index 6bc81df..cd208ad 100644 --- a/maloja/database/dbcache.py +++ b/maloja/database/dbcache.py @@ -9,7 +9,7 @@ import sys from doreah.regular import runhourly from doreah.logging import log -from ..globalconf import malojaconfig +from ..pkg_global.conf import malojaconfig diff --git a/maloja/database/exceptions.py b/maloja/database/exceptions.py new file mode 100644 index 0000000..a2dadd9 --- /dev/null +++ b/maloja/database/exceptions.py @@ -0,0 +1,29 @@ +from bottle import HTTPError + +class EntityExists(Exception): + def __init__(self,entitydict): + self.entitydict = entitydict + + +class TrackExists(EntityExists): + pass + +class ArtistExists(EntityExists): + pass + + +class DatabaseNotBuilt(HTTPError): + def __init__(self): + super().__init__( + status=503, + body="The Maloja Database is being upgraded to Version 3. This could take quite a long time! (~ 2-5 minutes per 10 000 scrobbles)", + headers={"Retry-After":120} + ) + + +class MissingScrobbleParameters(Exception): + def __init__(self,params=[]): + self.params = params + +class MissingEntityParameter(Exception): + pass diff --git a/maloja/database/jinjaview.py b/maloja/database/jinjaview.py index 80d1b67..c0c48ee 100644 --- a/maloja/database/jinjaview.py +++ b/maloja/database/jinjaview.py @@ -3,7 +3,7 @@ from . sqldb import engine from .dbcache import serialize -from ..globalconf import malojaconfig +from ..pkg_global.conf import malojaconfig from doreah.logging import log diff --git a/maloja/database/sqldb.py b/maloja/database/sqldb.py index 496ab66..957243b 100644 --- a/maloja/database/sqldb.py +++ b/maloja/database/sqldb.py @@ -5,8 +5,9 @@ import math from datetime import datetime from threading import Lock -from ..globalconf import data_dir +from ..pkg_global.conf import data_dir from .dbcache import cached_wrapper, cached_wrapper_individual +from . import exceptions as exc from doreah.logging import log from doreah.regular import runhourly, runmonthly @@ -114,8 +115,9 @@ def connection_provider(func): return func(*args,**kwargs) else: with engine.connect() as connection: - kwargs['dbconn'] = connection - return func(*args,**kwargs) + with connection.begin(): + kwargs['dbconn'] = connection + return func(*args,**kwargs) wrapper.__innerfunc__ = func return wrapper @@ -209,21 +211,22 @@ def artist_db_to_dict(row,dbconn=None): ### DICT -> DB +# These should return None when no data is in the dict so they can be used for update statements def scrobble_dict_to_db(info,dbconn=None): return { - "timestamp":info['time'], - "origin":info['origin'], - "duration":info['duration'], - "track_id":get_track_id(info['track'],dbconn=dbconn), - "extra":json.dumps(info.get('extra',{})), - "rawscrobble":json.dumps(info.get('rawscrobble',{})) + "timestamp":info.get('time'), + "origin":info.get('origin'), + "duration":info.get('duration'), + "track_id":get_track_id(info.get('track'),dbconn=dbconn), + "extra":json.dumps(info.get('extra')) if info.get('extra') else None, + "rawscrobble":json.dumps(info.get('rawscrobble')) if info.get('rawscrobble') else None } def track_dict_to_db(info,dbconn=None): return { - "title":info['title'], - "title_normalized":normalize_name(info['title']), + "title":info.get('title'), + "title_normalized":normalize_name(info.get('title','')) or None, "length":info.get('length') } @@ -277,13 +280,16 @@ def delete_scrobble(scrobble_id,dbconn=None): DB['scrobbles'].c.timestamp == scrobble_id ) - dbconn.execute(op) + result = dbconn.execute(op) + + return True + ### these will 'get' the ID of an entity, creating it if necessary @cached_wrapper @connection_provider -def get_track_id(trackdict,dbconn=None): +def get_track_id(trackdict,create_new=True,dbconn=None): ntitle = normalize_name(trackdict['title']) artist_ids = [get_artist_id(a,dbconn=dbconn) for a in trackdict['artists']] artist_ids = list(set(artist_ids)) @@ -313,6 +319,8 @@ def get_track_id(trackdict,dbconn=None): #print("ID for",trackdict['title'],"was",row[0]) return row.id + if not create_new: return None + op = DB['tracks'].insert().values( **track_dict_to_db(trackdict,dbconn=dbconn) @@ -356,6 +364,137 @@ def get_artist_id(artistname,create_new=True,dbconn=None): return result.inserted_primary_key[0] +### Edit existing + + +@connection_provider +def edit_scrobble(scrobble_id,scrobbleupdatedict,dbconn=None): + + dbentry = scrobble_dict_to_db(scrobbleupdatedict,dbconn=dbconn) + dbentry = {k:v for k,v in dbentry.items() if v} + + print("Updating scrobble",dbentry) + + with SCROBBLE_LOCK: + + op = DB['scrobbles'].update().where( + DB['scrobbles'].c.timestamp == scrobble_id + ).values( + **dbentry + ) + + dbconn.execute(op) + + +@connection_provider +def edit_artist(id,artistupdatedict,dbconn=None): + + artist = get_artist(id) + changedartist = artistupdatedict # well + + dbentry = artist_dict_to_db(artistupdatedict,dbconn=dbconn) + dbentry = {k:v for k,v in dbentry.items() if v} + + existing_artist_id = get_artist_id(changedartist,create_new=False,dbconn=dbconn) + if existing_artist_id not in (None,id): + raise exc.ArtistExists(changedartist) + + op = DB['artists'].update().where( + DB['artists'].c.id==id + ).values( + **dbentry + ) + result = dbconn.execute(op) + + return True + +@connection_provider +def edit_track(id,trackupdatedict,dbconn=None): + + track = get_track(id,dbconn=dbconn) + changedtrack = {**track,**trackupdatedict} + + dbentry = track_dict_to_db(trackupdatedict,dbconn=dbconn) + dbentry = {k:v for k,v in dbentry.items() if v} + + existing_track_id = get_track_id(changedtrack,create_new=False,dbconn=dbconn) + if existing_track_id not in (None,id): + raise exc.TrackExists(changedtrack) + + op = DB['tracks'].update().where( + DB['tracks'].c.id==id + ).values( + **dbentry + ) + result = dbconn.execute(op) + + return True + + +### Merge + +@connection_provider +def merge_tracks(target_id,source_ids,dbconn=None): + + op = DB['scrobbles'].update().where( + DB['scrobbles'].c.track_id.in_(source_ids) + ).values( + track_id=target_id + ) + result = dbconn.execute(op) + clean_db(dbconn=dbconn) + + return True + +@connection_provider +def merge_artists(target_id,source_ids,dbconn=None): + + # some tracks could already have multiple of the to be merged artists + + # find literally all tracksartist entries that have any of the artists involved + op = DB['trackartists'].select().where( + DB['trackartists'].c.artist_id.in_(source_ids + [target_id]) + ) + result = dbconn.execute(op) + + track_ids = set(row.track_id for row in result) + + # now just delete them all lmao + op = DB['trackartists'].delete().where( + #DB['trackartists'].c.track_id.in_(track_ids), + DB['trackartists'].c.artist_id.in_(source_ids + [target_id]), + ) + + result = dbconn.execute(op) + + # now add back the real new artist + op = DB['trackartists'].insert().values([ + {'track_id':track_id,'artist_id':target_id} + for track_id in track_ids + ]) + + result = dbconn.execute(op) + +# tracks_artists = {} +# for row in result: +# tracks_artists.setdefault(row.track_id,[]).append(row.artist_id) +# +# multiple = {k:v for k,v in tracks_artists.items() if len(v) > 1} +# +# print([(get_track(k),[get_artist(a) for a in v]) for k,v in multiple.items()]) +# +# op = DB['trackartists'].update().where( +# DB['trackartists'].c.artist_id.in_(source_ids) +# ).values( +# artist_id=target_id +# ) +# result = dbconn.execute(op) + + # this could have created duplicate tracks + merge_duplicate_tracks(artist_id=target_id,dbconn=dbconn) + clean_db(dbconn=dbconn) + + return True @@ -488,7 +627,7 @@ def get_tracks(dbconn=None): @cached_wrapper @connection_provider -def count_scrobbles_by_artist(since,to,dbconn=None): +def count_scrobbles_by_artist(since,to,resolve_ids=True,dbconn=None): jointable = sql.join( DB['scrobbles'], DB['trackartists'], @@ -516,16 +655,18 @@ def count_scrobbles_by_artist(since,to,dbconn=None): ).order_by(sql.desc('count')) result = dbconn.execute(op).all() - - counts = [row.count for row in result] - artists = get_artists_map([row.artist_id for row in result],dbconn=dbconn) - result = [{'scrobbles':row.count,'artist':artists[row.artist_id]} for row in result] + if resolve_ids: + counts = [row.count for row in result] + artists = get_artists_map([row.artist_id for row in result],dbconn=dbconn) + result = [{'scrobbles':row.count,'artist':artists[row.artist_id]} for row in result] + else: + result = [{'scrobbles':row.count,'artist_id':row.artist_id} for row in result] result = rank(result,key='scrobbles') return result @cached_wrapper @connection_provider -def count_scrobbles_by_track(since,to,dbconn=None): +def count_scrobbles_by_track(since,to,resolve_ids=True,dbconn=None): op = sql.select( @@ -537,10 +678,12 @@ def count_scrobbles_by_track(since,to,dbconn=None): ).group_by(DB['scrobbles'].c.track_id).order_by(sql.desc('count')) result = dbconn.execute(op).all() - - counts = [row.count for row in result] - tracks = get_tracks_map([row.track_id for row in result],dbconn=dbconn) - result = [{'scrobbles':row.count,'track':tracks[row.track_id]} for row in result] + if resolve_ids: + counts = [row.count for row in result] + tracks = get_tracks_map([row.track_id for row in result],dbconn=dbconn) + result = [{'scrobbles':row.count,'track':tracks[row.track_id]} for row in result] + else: + result = [{'scrobbles':row.count,'track_id':row.track_id} for row in result] result = rank(result,key='scrobbles') return result @@ -693,6 +836,17 @@ def get_artist(id,dbconn=None): return artist_db_to_dict(artistinfo,dbconn=dbconn) +@cached_wrapper +@connection_provider +def get_scrobble(timestamp, include_internal=False, dbconn=None): + op = DB['scrobbles'].select().where( + DB['scrobbles'].c.timestamp==timestamp + ) + result = dbconn.execute(op).all() + + scrobble = result[0] + return scrobbles_db_to_dict(rows=[scrobble], include_internal=include_internal)[0] + @cached_wrapper @connection_provider def search_artist(searchterm,dbconn=None): @@ -717,38 +871,37 @@ def search_track(searchterm,dbconn=None): ##### MAINTENANCE @runhourly -def clean_db(): +@connection_provider +def clean_db(dbconn=None): - with SCROBBLE_LOCK: - with engine.begin() as conn: - log(f"Database Cleanup...") + log(f"Database Cleanup...") - to_delete = [ - # tracks with no scrobbles (trackartist entries first) - "from trackartists where track_id in (select id from tracks where id not in (select track_id from scrobbles))", - "from tracks where id not in (select track_id from scrobbles)", - # artists with no tracks - "from artists where id not in (select artist_id from trackartists) and id not in (select target_artist from associated_artists)", - # tracks with no artists (scrobbles first) - "from scrobbles where track_id in (select id from tracks where id not in (select track_id from trackartists))", - "from tracks where id not in (select track_id from trackartists)" - ] + to_delete = [ + # tracks with no scrobbles (trackartist entries first) + "from trackartists where track_id in (select id from tracks where id not in (select track_id from scrobbles))", + "from tracks where id not in (select track_id from scrobbles)", + # artists with no tracks + "from artists where id not in (select artist_id from trackartists) and id not in (select target_artist from associated_artists)", + # tracks with no artists (scrobbles first) + "from scrobbles where track_id in (select id from tracks where id not in (select track_id from trackartists))", + "from tracks where id not in (select track_id from trackartists)" + ] - for d in to_delete: - selection = conn.execute(sql.text(f"select * {d}")) - for row in selection.all(): - log(f"Deleting {row}") - deletion = conn.execute(sql.text(f"delete {d}")) + for d in to_delete: + selection = dbconn.execute(sql.text(f"select * {d}")) + for row in selection.all(): + log(f"Deleting {row}") + deletion = dbconn.execute(sql.text(f"delete {d}")) - log("Database Cleanup complete!") + log("Database Cleanup complete!") - #if a2+a1>0: log(f"Deleted {a2} tracks without scrobbles ({a1} track artist entries)") + #if a2+a1>0: log(f"Deleted {a2} tracks without scrobbles ({a1} track artist entries)") - #if a3>0: log(f"Deleted {a3} artists without tracks") + #if a3>0: log(f"Deleted {a3} artists without tracks") - #if a5+a4>0: log(f"Deleted {a5} tracks without artists ({a4} scrobbles)") + #if a5+a4>0: log(f"Deleted {a5} tracks without artists ({a4} scrobbles)") @@ -769,6 +922,46 @@ def renormalize_names(): rows = conn.execute(DB['artists'].update().where(DB['artists'].c.id == id).values(name_normalized=norm_target)) +@connection_provider +def merge_duplicate_tracks(artist_id,dbconn=None): + rows = dbconn.execute( + DB['trackartists'].select().where( + DB['trackartists'].c.artist_id == artist_id + ) + ) + affected_tracks = [r.track_id for r in rows] + + track_artists = {} + rows = dbconn.execute( + DB['trackartists'].select().where( + DB['trackartists'].c.track_id.in_(affected_tracks) + ) + ) + + + for row in rows: + track_artists.setdefault(row.track_id,[]).append(row.artist_id) + + artist_combos = {} + for track_id in track_artists: + artist_combos.setdefault(tuple(sorted(track_artists[track_id])),[]).append(track_id) + + for c in artist_combos: + if len(artist_combos[c]) > 1: + track_identifiers = {} + for track_id in artist_combos[c]: + track_identifiers.setdefault(normalize_name(get_track(track_id)['title']),[]).append(track_id) + for track in track_identifiers: + if len(track_identifiers[track]) > 1: + target,*src = track_identifiers[track] + merge_tracks(target,src,dbconn=dbconn) + + + + + + + diff --git a/maloja/dev/__init__.py b/maloja/dev/__init__.py new file mode 100644 index 0000000..1220cb9 --- /dev/null +++ b/maloja/dev/__init__.py @@ -0,0 +1,2 @@ +### Subpackage that takes care of all things that concern the server process itself, +### e.g. analytics diff --git a/maloja/proccontrol/tasks/generate.py b/maloja/dev/generate.py similarity index 97% rename from maloja/proccontrol/tasks/generate.py rename to maloja/dev/generate.py index 225b6ce..0083f67 100644 --- a/maloja/proccontrol/tasks/generate.py +++ b/maloja/dev/generate.py @@ -1,5 +1,6 @@ import random import datetime + from doreah.io import ask @@ -66,10 +67,10 @@ def generate_track(): -def generate(n=200): +def generate_scrobbles(n=200): + + from ..database.sqldb import add_scrobbles - from ...database.sqldb import add_scrobbles - n = int(n) if ask("Generate random scrobbles?",default=False): diff --git a/maloja/proccontrol/profiler.py b/maloja/dev/profiler.py similarity index 95% rename from maloja/proccontrol/profiler.py rename to maloja/dev/profiler.py index e8342a8..8d41455 100644 --- a/maloja/proccontrol/profiler.py +++ b/maloja/dev/profiler.py @@ -2,11 +2,10 @@ import os import cProfile, pstats - from doreah.logging import log from doreah.timing import Clock -from ..globalconf import data_dir +from ..pkg_global.conf import data_dir profiler = cProfile.Profile() diff --git a/maloja/images.py b/maloja/images.py index 607885f..80802ed 100644 --- a/maloja/images.py +++ b/maloja/images.py @@ -1,4 +1,4 @@ -from .globalconf import data_dir, malojaconfig +from .pkg_global.conf import data_dir, malojaconfig from . import thirdparty from . import database diff --git a/maloja/jinjaenv/context.py b/maloja/jinjaenv/context.py index 48356e7..55e2f4e 100644 --- a/maloja/jinjaenv/context.py +++ b/maloja/jinjaenv/context.py @@ -1,5 +1,5 @@ from . import filters -from ..globalconf import malojaconfig +from ..pkg_global.conf import malojaconfig from .. import database, malojatime, images, malojauri, thirdparty, __pkginfo__ from ..database import jinjaview diff --git a/maloja/malojatime.py b/maloja/malojatime.py index 28ac383..b383a27 100644 --- a/maloja/malojatime.py +++ b/maloja/malojatime.py @@ -3,7 +3,7 @@ from calendar import monthrange from os.path import commonprefix import math -from .globalconf import malojaconfig +from .pkg_global.conf import malojaconfig OFFSET = malojaconfig["TIMEZONE"] diff --git a/maloja/globalconf.py b/maloja/pkg_global/conf.py similarity index 99% rename from maloja/globalconf.py rename to maloja/pkg_global/conf.py index 73515d6..bbe1e7c 100644 --- a/maloja/globalconf.py +++ b/maloja/pkg_global/conf.py @@ -3,7 +3,7 @@ from doreah.configuration import Configuration from doreah.configuration import types as tp -from .__pkginfo__ import VERSION +from ..__pkginfo__ import VERSION @@ -311,7 +311,7 @@ config( auth={ "multiuser":False, "cookieprefix":"maloja", - "stylesheets":["/style.css"], + "stylesheets":["/maloja.css"], "dbfile":data_dir['auth']("auth.ddb") }, logging={ diff --git a/maloja/monkey.py b/maloja/pkg_global/monkey.py similarity index 100% rename from maloja/monkey.py rename to maloja/pkg_global/monkey.py diff --git a/maloja/proccontrol/control.py b/maloja/proccontrol/control.py deleted file mode 100644 index 2442c9e..0000000 --- a/maloja/proccontrol/control.py +++ /dev/null @@ -1,140 +0,0 @@ -import subprocess -from doreah import settings -from doreah.control import mainfunction -from doreah.io import col -import os -import signal -from ipaddress import ip_address - -from .setup import setup -from . import tasks -from .. import __pkginfo__ as info -from .. import globalconf - - - -def print_header_info(): - print() - #print("#####") - print(col['yellow']("Maloja"),"v" + info.VERSION) - print(info.HOMEPAGE) - #print("#####") - print() - - - -def getInstance(): - try: - output = subprocess.check_output(["pidof","Maloja"]) - return int(output) - except Exception: - return None - -def getInstanceSupervisor(): - try: - output = subprocess.check_output(["pidof","maloja_supervisor"]) - return int(output) - except Exception: - return None - -def restart(): - stop() - start() - -def start(): - if getInstanceSupervisor() is not None: - print("Maloja is already running.") - else: - print_header_info() - setup() - try: - #p = subprocess.Popen(["python3","-m","maloja.server"],stdout=subprocess.DEVNULL,stderr=subprocess.DEVNULL) - sp = subprocess.Popen(["python3","-m","maloja.proccontrol.supervisor"],stdout=subprocess.DEVNULL,stderr=subprocess.DEVNULL) - print(col["green"]("Maloja started!")) - - port = globalconf.malojaconfig["PORT"] - - print("Visit your server address (Port " + str(port) + ") to see your web interface. Visit /admin_setup to get started.") - print("If you're installing this on your local machine, these links should get you there:") - print("\t" + col["blue"]("http://localhost:" + str(port))) - print("\t" + col["blue"]("http://localhost:" + str(port) + "/admin_setup")) - return True - except Exception: - print("Error while starting Maloja.") - return False - - -def stop(): - - pid_sv = getInstanceSupervisor() - if pid_sv is not None: - os.kill(pid_sv,signal.SIGTERM) - - pid = getInstance() - if pid is not None: - os.kill(pid,signal.SIGTERM) - - if pid is None and pid_sv is None: - return False - - print("Maloja stopped!") - return True - -def onlysetup(): - print_header_info() - setup() - print("Setup complete!") - -def direct(): - print_header_info() - setup() - from .. import server - server.run_server() - -def debug(): - os.environ["MALOJA_DEV_MODE"] = 'true' - globalconf.malojaconfig.load_environment() - direct() - -def print_info(): - print_header_info() - print(col['lightblue']("Configuration Directory:"),globalconf.dir_settings['config']) - print(col['lightblue']("Data Directory: "),globalconf.dir_settings['state']) - print(col['lightblue']("Log Directory: "),globalconf.dir_settings['logs']) - print(col['lightblue']("Network: "),f"IPv{ip_address(globalconf.malojaconfig['host']).version}, Port {globalconf.malojaconfig['port']}") - print(col['lightblue']("Timezone: "),f"UTC{globalconf.malojaconfig['timezone']:+d}") - print() - print() - -@mainfunction({"l":"level","v":"version","V":"version"},flags=['version','include_images'],shield=True) -def main(*args,**kwargs): - - actions = { - # server - "start":start, - "restart":restart, - "stop":stop, - "run":direct, - "debug":debug, - "setup":onlysetup, - # admin scripts - "import":tasks.import_scrobbles, # maloja import /x/y.csv - "backup":tasks.backup, # maloja backup --targetfolder /x/y --include_images - "generate":tasks.generate, # maloja generate 400 - "export":tasks.export, # maloja export - # aux - "info":print_info - } - - if "version" in kwargs: - print(info.VERSION) - return True - else: - try: - action, *args = args - action = actions[action] - except (ValueError, KeyError): - print("Valid commands: " + " ".join(a for a in actions)) - return False - - return action(*args,**kwargs) diff --git a/maloja/proccontrol/supervisor.py b/maloja/proccontrol/supervisor.py deleted file mode 100644 index f1ab668..0000000 --- a/maloja/proccontrol/supervisor.py +++ /dev/null @@ -1,33 +0,0 @@ -#!/usr/bin/env python3 -import os - -from ..globalconf import malojaconfig - -import subprocess -import setproctitle -import signal -from doreah.logging import log - - -from .control import getInstance - - -setproctitle.setproctitle("maloja_supervisor") - -def start(): - try: - return subprocess.Popen( - ["python3", "-m", "maloja","run"], - stdout=subprocess.DEVNULL, - stderr=subprocess.DEVNULL, - ) - except e: - log("Error starting Maloja: " + str(e),module="supervisor") - - - -while True: - log("Maloja is not running, starting...",module="supervisor") - process = start() - - process.wait() diff --git a/maloja/proccontrol/tasks/__init__.py b/maloja/proccontrol/tasks/__init__.py index eda907d..cf2cd85 100644 --- a/maloja/proccontrol/tasks/__init__.py +++ b/maloja/proccontrol/tasks/__init__.py @@ -1,4 +1,3 @@ from .import_scrobbles import import_scrobbles from .backup import backup -from .generate import generate from .export import export # read that line out loud diff --git a/maloja/proccontrol/tasks/backup.py b/maloja/proccontrol/tasks/backup.py index 73f797b..dadacfc 100644 --- a/maloja/proccontrol/tasks/backup.py +++ b/maloja/proccontrol/tasks/backup.py @@ -2,7 +2,7 @@ import tarfile import time import glob import os -from ...globalconf import dir_settings +from ...pkg_global.conf import dir_settings from pathlib import PurePath from doreah.logging import log diff --git a/maloja/proccontrol/tasks/import_scrobbles.py b/maloja/proccontrol/tasks/import_scrobbles.py index 41c76c7..e247521 100644 --- a/maloja/proccontrol/tasks/import_scrobbles.py +++ b/maloja/proccontrol/tasks/import_scrobbles.py @@ -4,7 +4,7 @@ import json, csv from doreah.io import col, ask, prompt from ...cleanup import * -from ...globalconf import data_dir +from ...pkg_global.conf import data_dir c = CleanerAgent() diff --git a/maloja/server.py b/maloja/server.py index 63359eb..8bbc001 100644 --- a/maloja/server.py +++ b/maloja/server.py @@ -2,9 +2,7 @@ import sys import os from threading import Thread -import setproctitle from importlib import resources -from css_html_js_minify import html_minify, css_minify import datauri import time @@ -22,12 +20,12 @@ from . import database from .database.jinjaview import JinjaDBConnection from .images import resolve_track_image, resolve_artist_image from .malojauri import uri_to_internal, remove_identical -from .globalconf import malojaconfig, data_dir +from .pkg_global.conf import malojaconfig, data_dir from .jinjaenv.context import jinja_environment from .apis import init_apis, apikeystore -from .proccontrol.profiler import profile +from .dev.profiler import profile ###### @@ -43,48 +41,6 @@ BaseRequest.MEMFILE_MAX = 15 * 1024 * 1024 webserver = Bottle() -#rename process, this is now required for the daemon manager to work -setproctitle.setproctitle("Maloja") - - -###### -### CSS -##### - - -def generate_css(): - cssstr = "" - with resources.files('maloja') / 'web' / 'static' as staticfolder: - - for file in os.listdir(os.path.join(staticfolder,"css")): - if file.endswith(".css"): - with open(os.path.join(staticfolder,"css",file),"r") as filed: - cssstr += filed.read() - - for file in os.listdir(data_dir['css']()): - if file.endswith(".css"): - with open(os.path.join(data_dir['css'](file)),"r") as filed: - cssstr += filed.read() - - cssstr = css_minify(cssstr) - return cssstr - -css = generate_css() - - - -###### -### MINIFY -##### - -def clean_html(inp): - return inp - - #if malojaconfig["DEV_MODE"]: return inp - #else: return html_minify(inp) - - - @@ -204,13 +160,6 @@ def static_image(pth): return resp -@webserver.route("/style.css") -def get_css(): - response.content_type = 'text/css' - if malojaconfig["DEV_MODE"]: return generate_css() - else: return css - - @webserver.route("/login") def login(): return auth.get_login_page() @@ -219,7 +168,7 @@ def login(): @webserver.route("/.") @webserver.route("/media/.") def static(name,ext): - assert ext in ["txt","ico","jpeg","jpg","png","less","js","ttf"] + assert ext in ["txt","ico","jpeg","jpg","png","less","js","ttf","css"] with resources.files('maloja') / 'web' / 'static' as staticfolder: response = static_file(ext + "/" + name + "." + ext,root=staticfolder) response.set_header("Cache-Control", "public, max-age=3600") @@ -263,7 +212,7 @@ def jinja_page(name): if malojaconfig["DEV_MODE"]: jinja_environment.cache.clear() - return clean_html(res) + return res @webserver.route("/") @auth.authenticated diff --git a/maloja/proccontrol/setup.py b/maloja/setup.py similarity index 96% rename from maloja/proccontrol/setup.py rename to maloja/setup.py index 8872357..b74dbab 100644 --- a/maloja/proccontrol/setup.py +++ b/maloja/setup.py @@ -1,10 +1,12 @@ -from importlib import resources -from distutils import dir_util -from doreah.io import col, ask, prompt -from doreah import auth import os -from ..globalconf import data_dir, dir_settings, malojaconfig +from importlib import resources +from distutils import dir_util + +from doreah.io import col, ask, prompt +from doreah import auth + +from .pkg_global.conf import data_dir, dir_settings, malojaconfig @@ -48,7 +50,7 @@ def setup(): # OWN API KEY - from ..apis import apikeystore + from .apis import apikeystore if len(apikeystore) == 0: answer = ask("Do you want to set up a key to enable scrobbling? Your scrobble extension needs that key so that only you can scrobble tracks to your database.",default=True,skip=SKIP) if answer: diff --git a/maloja/thirdparty/__init__.py b/maloja/thirdparty/__init__.py index 8d23109..135d792 100644 --- a/maloja/thirdparty/__init__.py +++ b/maloja/thirdparty/__init__.py @@ -13,7 +13,7 @@ import base64 from doreah.logging import log from threading import BoundedSemaphore -from ..globalconf import malojaconfig +from ..pkg_global.conf import malojaconfig from .. import database diff --git a/maloja/upgrade.py b/maloja/upgrade.py index 03b6a26..67d1176 100644 --- a/maloja/upgrade.py +++ b/maloja/upgrade.py @@ -7,7 +7,7 @@ import csv from doreah.logging import log from doreah.io import col -from .globalconf import data_dir, dir_settings +from .pkg_global.conf import data_dir, dir_settings from .apis import _apikeys diff --git a/maloja/web/jinja/abstracts/admin.jinja b/maloja/web/jinja/abstracts/admin.jinja index dae7397..ff14fd5 100644 --- a/maloja/web/jinja/abstracts/admin.jinja +++ b/maloja/web/jinja/abstracts/admin.jinja @@ -37,7 +37,6 @@

- diff --git a/maloja/web/jinja/abstracts/base.jinja b/maloja/web/jinja/abstracts/base.jinja index aa9cf33..1c6da0d 100644 --- a/maloja/web/jinja/abstracts/base.jinja +++ b/maloja/web/jinja/abstracts/base.jinja @@ -12,7 +12,7 @@ - + @@ -50,9 +50,7 @@ {% endblock %} {% endblock %} -
-
-
- -
+
+ {% block icon_bar %}{% endblock %} + {% include 'icons/settings.jinja' %} +
+ + +
+ +
+ diff --git a/maloja/web/jinja/admin_overview.jinja b/maloja/web/jinja/admin_overview.jinja index 36993a6..6e084f2 100644 --- a/maloja/web/jinja/admin_overview.jinja +++ b/maloja/web/jinja/admin_overview.jinja @@ -66,6 +66,9 @@
  • manually scrobble from track pages
  • delete scrobbles
  • +
  • reparse scrobbles
  • +
  • edit tracks and artists
  • +
  • merge tracks and artists
  • upload artist and track art by dropping a file on the existing image on an artist or track page
  • see more detailed error pages
diff --git a/maloja/web/jinja/admin_setup.jinja b/maloja/web/jinja/admin_setup.jinja index e6313f1..7e00ad6 100644 --- a/maloja/web/jinja/admin_setup.jinja +++ b/maloja/web/jinja/admin_setup.jinja @@ -71,7 +71,7 @@ album Album title - optional albumartists List of album artists - optional duration Duration of play in seconds - optional - length Full length of the trackin seconds - optional + length Full length of the track in seconds - optional time UNIX timestamp - optional, defaults to time of request fix Set this to false to skip server-side metadata fixing - optional @@ -85,7 +85,7 @@

Import your Last.FM data

Switching from Last.fm? Download all your data and run the command maloja import (the file you just downloaded).
- You can also try out Multi-Scrobbler to import scrobbles from a wider range of sources. + You can also try out Multi-Scrobbler to import scrobbles from a wider range of sources.

Set up some rules

diff --git a/maloja/web/jinja/artist.jinja b/maloja/web/jinja/artist.jinja index f738c8b..eb09c77 100644 --- a/maloja/web/jinja/artist.jinja +++ b/maloja/web/jinja/artist.jinja @@ -6,6 +6,7 @@ {% block scripts %} + {% endblock %} {% set artist = filterkeys.artist %} @@ -26,10 +27,23 @@ {% set encodedartist = mlj_uri.uriencode({'artist':artist}) %} +{% block icon_bar %} + {% if adminmode %} + {% include 'icons/edit.jinja' %} + {% include 'icons/merge.jinja' %} + {% include 'icons/merge_mark.jinja' %} + {% include 'icons/merge_cancel.jinja' %} + + {% endif %} +{% endblock %} {% block content %} - + @@ -47,7 +61,7 @@ {% endif %} -

{{ info.artist }}

+

{{ info.artist }}

{% if competes %}#{{ info.position }}{% endif %}
{% if competes and included %} @@ -56,7 +70,9 @@ Competing under {{ links.link(credited) }} (#{{ info.position }}) {% endif %} -

{{ info['scrobbles'] }} Scrobbles

+

+ {{ info['scrobbles'] }} Scrobbles +

@@ -72,6 +88,7 @@ +

Top Tracks

diff --git a/maloja/web/jinja/icons/LICENSE-material b/maloja/web/jinja/icons/LICENSE-material new file mode 100644 index 0000000..7a4a3ea --- /dev/null +++ b/maloja/web/jinja/icons/LICENSE-material @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/maloja/web/jinja/icons/LICENSE-octicons b/maloja/web/jinja/icons/LICENSE-octicons new file mode 100644 index 0000000..00e9069 --- /dev/null +++ b/maloja/web/jinja/icons/LICENSE-octicons @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2022 GitHub Inc. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/maloja/web/jinja/icons/delete.jinja b/maloja/web/jinja/icons/delete.jinja new file mode 100644 index 0000000..c1498ed --- /dev/null +++ b/maloja/web/jinja/icons/delete.jinja @@ -0,0 +1,6 @@ +
+ + + + +
diff --git a/maloja/web/jinja/icons/edit.jinja b/maloja/web/jinja/icons/edit.jinja new file mode 100644 index 0000000..1b4e9f3 --- /dev/null +++ b/maloja/web/jinja/icons/edit.jinja @@ -0,0 +1,5 @@ +
+ + + +
diff --git a/maloja/web/jinja/icons/merge.jinja b/maloja/web/jinja/icons/merge.jinja new file mode 100644 index 0000000..dfe2dd9 --- /dev/null +++ b/maloja/web/jinja/icons/merge.jinja @@ -0,0 +1,5 @@ +
+ + + +
diff --git a/maloja/web/jinja/icons/merge_cancel.jinja b/maloja/web/jinja/icons/merge_cancel.jinja new file mode 100644 index 0000000..64c1d57 --- /dev/null +++ b/maloja/web/jinja/icons/merge_cancel.jinja @@ -0,0 +1,5 @@ +
+ + + +
diff --git a/maloja/web/jinja/icons/merge_mark.jinja b/maloja/web/jinja/icons/merge_mark.jinja new file mode 100644 index 0000000..8623dbc --- /dev/null +++ b/maloja/web/jinja/icons/merge_mark.jinja @@ -0,0 +1,5 @@ +
+ + + +
diff --git a/maloja/web/jinja/icons/nodata.jinja b/maloja/web/jinja/icons/nodata.jinja new file mode 100644 index 0000000..dde0cbb --- /dev/null +++ b/maloja/web/jinja/icons/nodata.jinja @@ -0,0 +1,7 @@ + + + + + +
No scrobbles yet! + diff --git a/maloja/web/jinja/icons/reparse.jinja b/maloja/web/jinja/icons/reparse.jinja new file mode 100644 index 0000000..2f13d39 --- /dev/null +++ b/maloja/web/jinja/icons/reparse.jinja @@ -0,0 +1,5 @@ +
+ + + +
diff --git a/maloja/web/jinja/icons/settings.jinja b/maloja/web/jinja/icons/settings.jinja new file mode 100644 index 0000000..f110b5c --- /dev/null +++ b/maloja/web/jinja/icons/settings.jinja @@ -0,0 +1,10 @@ + +
+ + + + + + +
+
diff --git a/maloja/web/jinja/partials/charts_artists_tiles.jinja b/maloja/web/jinja/partials/charts_artists_tiles.jinja index 78599ab..28efeda 100644 --- a/maloja/web/jinja/partials/charts_artists_tiles.jinja +++ b/maloja/web/jinja/partials/charts_artists_tiles.jinja @@ -9,8 +9,12 @@ {% set charts_cycler = cycler(*charts_14) %} + {% for segment in range(3) %} + {% if charts_14[0] is none and loop.first %} + {% include 'icons/nodata.jinja' %} + {% else %} + + {% endif %} {% endfor %}
{% set segmentsize = segment+1 %} @@ -35,6 +39,7 @@ {%- endfor -%}
-
diff --git a/maloja/web/jinja/partials/charts_tracks_tiles.jinja b/maloja/web/jinja/partials/charts_tracks_tiles.jinja index 6d597e7..90780bd 100644 --- a/maloja/web/jinja/partials/charts_tracks_tiles.jinja +++ b/maloja/web/jinja/partials/charts_tracks_tiles.jinja @@ -11,6 +11,9 @@ {% for segment in range(3) %} + {% if charts_14[0] is none and loop.first %} + {% include 'icons/nodata.jinja' %} + {% else %} + + {% endif %} {% endfor %}
{% set segmentsize = segment+1 %} @@ -35,6 +38,7 @@ {%- endfor %}
-
diff --git a/maloja/web/jinja/partials/scrobbles.jinja b/maloja/web/jinja/partials/scrobbles.jinja index 1fb1cbd..dc487c3 100644 --- a/maloja/web/jinja/partials/scrobbles.jinja +++ b/maloja/web/jinja/partials/scrobbles.jinja @@ -17,18 +17,28 @@ {{ entityrow.row(s.track) }} {% if adminmode %} - - - - + + + + + + + + + {% include 'icons/reparse.jinja' %} + - -
- - - -
+ + + + + + + + + {% include 'icons/delete.jinja' %} + diff --git a/maloja/web/jinja/start.jinja b/maloja/web/jinja/start.jinja index 424ac86..f8240da 100644 --- a/maloja/web/jinja/start.jinja +++ b/maloja/web/jinja/start.jinja @@ -75,7 +75,7 @@ - {%- with amountkeys = {"perpage":15,"page":0}, shortTimeDesc=True -%} + {%- with amountkeys = {"perpage":12,"page":0}, shortTimeDesc=True -%} {% include 'partials/scrobbles.jinja' %} {%- endwith -%} diff --git a/maloja/web/jinja/track.jinja b/maloja/web/jinja/track.jinja index 03762c6..291add0 100644 --- a/maloja/web/jinja/track.jinja +++ b/maloja/web/jinja/track.jinja @@ -5,6 +5,7 @@ {% block scripts %} + + {% endif %} +{% endblock %} + {% block content %} + + {% import 'partials/awards_track.jinja' as awards %} @@ -42,7 +59,7 @@ {{ links.links(track.artists) }}
-

{{ info.track.title }}

+

{{ info.track.title }}

{{ awards.certs(track) }} #{{ info.position }}
diff --git a/maloja/web/static/css/grisons.css b/maloja/web/static/css/grisons.css index 625b877..486fbf1 100644 --- a/maloja/web/static/css/grisons.css +++ b/maloja/web/static/css/grisons.css @@ -2,6 +2,8 @@ COMMON STYLES FOR MALOJA, ALBULA AND POSSIBLY OTHERS **/ +@import url("/grisonsfont.css"); + :root { --base-color: #232327; --base-color-dark: #090909; @@ -156,5 +158,5 @@ input:focus { .hide { - display:none; + display:none !important; } diff --git a/maloja/web/static/css/maloja.css b/maloja/web/static/css/maloja.css index badbeb9..68bba2b 100644 --- a/maloja/web/static/css/maloja.css +++ b/maloja/web/static/css/maloja.css @@ -1,3 +1,6 @@ +@import url("/grisons.css"); + + body { padding:15px; padding-bottom:35px; @@ -55,24 +58,32 @@ div.header h1 { settings icon **/ -div.clickable_icon { - display: inline-block; +svg { fill: var(--text-color); - cursor: pointer; -} -div.clickable_icon:hover { - fill: var(--text-color-focus); -} -div.clickable_icon.danger:hover { - fill: red; } -div#settingsicon { +div#icon_bar { position:fixed; right:30px; top:30px; } +div#icon_bar div.clickable_icon { + display: inline-block; + height:26px; + width:26px; +} +div.clickable_icon svg { + cursor: pointer; +} +div.clickable_icon:hover svg { + fill: var(--text-color-focus); +} +div.clickable_icon.danger:hover svg { + fill: red; +} + + /** Footer @@ -198,7 +209,7 @@ div#notification_area { div#notification_area div.notification { background-color:white; width:400px; - height:100px; + height:50px; margin-bottom:7px; padding:9px; opacity:0.4; @@ -512,7 +523,8 @@ table.list { table.list tr { background-color: var(--current-bg-color); border-color: var(--current-bg-color); - height: 1.4em; + height: 1.45em; + transition: opacity 2s; } @@ -610,31 +622,50 @@ table.list td.searchProvider:hover { color: gold; } -table.list td.delete_area { +table.list td.scrobble_action_area { text-align: right; - width:7em; + width:2em; + overflow:visible; +} + +table.list tr td.scrobble_action_area span.scrobble_action_type { + display:inline-block; + float:right; +} + +table.list td.scrobble_action_area span.scrobble_action_type.active { } /* rows that can be deleted in some form 'active' class on the delete area cell to toggle confirm prompt 'removed' class on the whole row to delete */ -table.list tr td.delete_area span.confirmactions { +table.list tr td.scrobble_action_area span.scrobble_action_type span.confirmactions { display: none; } -table.list tr td.delete_area span.initializeactions { +table.list tr td.scrobble_action_area span.scrobble_action_type span.initializeactions { display: initial; } -table.list tr td.delete_area.active span.confirmactions { + +/* when other action is active, hide all */ +table.list tr td.scrobble_action_area.active span.scrobble_action_type span.initializeactions { + display: none; +} +table.list tr td.scrobble_action_area.active span.scrobble_action_type span.initializeactions { + display: none; +} +/* except this one itself is active */ +table.list tr td.scrobble_action_area.active span.scrobble_action_type.active span.confirmactions { display: initial; } -table.list tr td.delete_area.active span.initializeactions { +table.list tr td.scrobble_action_area.active span.scrobble_action_type.active span.initializeactions { display: none; } -table.list tr.removed td.delete_area span.confirmactions { + +table.list tr.removed td.scrobble_action_area span.scrobble_action_type { display: none; } -table.list tr.removed td.delete_area span.initializeactions { +table.list tr.removed td.scrobble_action_area span.scrobble_action_type { display: none; } table.list tr.removed { @@ -643,6 +674,13 @@ table.list tr.removed { } +table.list tr.changed { + /*background-color: rgba(222,209,180,0.7) !important;*/ + opacity:0; + transition: opacity 0.2s; +} + + /* table td.artists div { overflow:hidden; diff --git a/maloja/web/static/js/edit.js b/maloja/web/static/js/edit.js index 0a415ba..4229ea5 100644 --- a/maloja/web/static/js/edit.js +++ b/maloja/web/static/js/edit.js @@ -1,12 +1,260 @@ // JS for all web interface editing / deletion of scrobble data +// HELPERS +function selectAll(e) { + // https://stackoverflow.com/a/6150060/6651341 + var range = document.createRange(); + range.selectNodeContents(e); + var sel = window.getSelection(); + sel.removeAllRanges(); + sel.addRange(range); +} + +// DELETION function toggleDeleteConfirm(element) { element.parentElement.parentElement.classList.toggle('active'); + element.parentElement.parentElement.parentElement.classList.toggle('active'); } function deleteScrobble(id,element) { - element.parentElement.parentElement.parentElement.classList.add('removed'); + var callback_func = function(req){ + if (req.status == 200) { + element.parentElement.parentElement.parentElement.parentElement.classList.add('removed'); + notifyCallback(req); + } + else { + notifyCallback(req); + } + }; - neo.xhttpreq("/apis/mlj_1/delete_scrobble",data={'timestamp':id},method="POST",callback=(()=>null),json=true); + neo.xhttpreq("/apis/mlj_1/delete_scrobble",data={'timestamp':id},method="POST",callback=callback_func,json=true); +} + +// REPARSING + +function toggleReparseConfirm(element) { + element.parentElement.parentElement.classList.toggle('active'); + element.parentElement.parentElement.parentElement.classList.toggle('active'); +} + +function reparseScrobble(id, element) { + toggleReparseConfirm(element); + + callback_func = function(req){ + if (req.status == 200) { + if (req.response.status != 'no_operation') { + //window.location.reload(); + notifyCallback(req); + var newtrack = req.response.scrobble.track; + var row = element.parentElement.parentElement.parentElement.parentElement; + changeScrobbleRow(row,newtrack); + } + else { + notifyCallback(req); + } + } + else { + notifyCallback(req); + } + }; + + neo.xhttpreq("/apis/mlj_1/reparse_scrobble",data={'timestamp':id},method="POST",callback=callback_func,json=true); } + +function changeScrobbleRow(element,newtrack) { + element.classList.add('changed'); + + setTimeout(function(){ + element.getElementsByClassName('track')[0].innerHTML = createTrackCell(newtrack); + },200); + setTimeout(function(){element.classList.remove('changed')},300); +} + +function createTrackCell(trackinfo) { + + var trackquery = new URLSearchParams(); + trackinfo.artists.forEach((a)=>trackquery.append('artist',a)); + trackquery.append('title',trackinfo.title); + + tracklink = document.createElement('a'); + tracklink.href = "/track?" + trackquery.toString(); + tracklink.textContent = trackinfo.title; + + artistelements = [] + var artistholder = document.createElement('span'); + artistholder.classList.add('artist_in_trackcolumn'); + for (var a of trackinfo.artists) { + var artistquery = new URLSearchParams(); + artistquery.append('artist',a); + + artistlink = document.createElement('a'); + artistlink.href = "/artist?" + artistquery.toString(); + artistlink.textContent = a; + + artistelements.push(artistlink.outerHTML) + } + + artistholder.innerHTML = artistelements.join(", "); + return artistholder.outerHTML + " – " + tracklink.outerHTML; +} + + +// EDIT NAME +function editEntity() { + + var namefield = document.getElementById('main_entity_name'); + namefield.contentEditable = "plaintext-only"; + + namefield.addEventListener('keydown',function(e){ + // dont allow new lines, done on enter + if (e.key === "Enter") { + e.preventDefault(); + namefield.blur(); // this leads to below + } + // cancel on esc + else if (e.key === "Escape" || e.key === "Esc") { + e.preventDefault(); + namefield.textContent = entity_name; + namefield.blur(); + } + }) + + // emergency, not pretty because it will move cursor + namefield.addEventListener('input',function(e){ + if (namefield.textContent.includes("\n")) { + namefield.textContent = namefield.textContent.replace("\n",""); + } + }) + + // manually clicking away OR enter + namefield.addEventListener('blur',function(e){ + doneEditing(); + }) + + namefield.focus(); + selectAll(namefield); +} + +function doneEditing() { + window.getSelection().removeAllRanges(); + var namefield = document.getElementById('main_entity_name'); + namefield.contentEditable = "false"; + newname = namefield.textContent; + + if (newname != entity_name) { + var searchParams = new URLSearchParams(window.location.search); + + if (entity_type == 'artist') { + var endpoint = "/apis/mlj_1/edit_artist"; + searchParams.set("artist", newname); + var payload = {'id':entity_id,'name':newname}; + } + else if (entity_type == 'track') { + var endpoint = "/apis/mlj_1/edit_track"; + searchParams.set("title", newname); + var payload = {'id':entity_id,'title':newname} + } + + callback_func = function(req){ + if (req.status == 200) { + window.location = "?" + searchParams.toString(); + } + else { + notifyCallback(req); + namefield.textContent = entity_name; + } + }; + + neo.xhttpreq( + endpoint, + data=payload, + method="POST", + callback=callback_func, + json=true + ); + } +} + +// MERGING + +function showValidMergeIcons() { + const lcst = window.sessionStorage; + var key = "marked_for_merge_" + entity_type; + var current_stored = (lcst.getItem(key) || '').split(","); + current_stored = current_stored.filter((x)=>x).map((x)=>parseInt(x)); + + var mergeicon = document.getElementById('mergeicon'); + var mergemarkicon = document.getElementById('mergemarkicon'); + var mergecancelicon = document.getElementById('mergecancelicon'); + + mergeicon.classList.add('hide'); + mergemarkicon.classList.add('hide'); + mergecancelicon.classList.add('hide'); + + if (current_stored.length == 0) { + mergemarkicon.classList.remove('hide'); + } + else { + mergecancelicon.classList.remove('hide'); + + if (current_stored.includes(entity_id)) { + + } + else { + mergemarkicon.classList.remove('hide'); + mergeicon.classList.remove('hide'); + } + } + +} + + +function markForMerge() { + const lcst = window.sessionStorage; + var key = "marked_for_merge_" + entity_type; + var current_stored = (lcst.getItem(key) || '').split(","); + current_stored = current_stored.filter((x)=>x).map((x)=>parseInt(x)); + current_stored.push(entity_id); + current_stored = [...new Set(current_stored)]; + lcst.setItem(key,current_stored); //this already formats it correctly + notify("Marked " + entity_name + " for merge","Currently " + current_stored.length + " marked!") + showValidMergeIcons(); +} + +function merge() { + const lcst = window.sessionStorage; + var key = "marked_for_merge_" + entity_type; + var current_stored = lcst.getItem(key).split(","); + current_stored = current_stored.filter((x)=>x).map((x)=>parseInt(x)); + + callback_func = function(req){ + if (req.status == 200) { + window.location.reload(); + } + else { + notifyCallback(req); + } + }; + + neo.xhttpreq( + "/apis/mlj_1/merge_" + entity_type + "s", + data={ + 'source_ids':current_stored, + 'target_id':entity_id + }, + method="POST", + callback=callback_func, + json=true + ); + + lcst.removeItem(key); +} + +function cancelMerge() { + const lcst = window.sessionStorage; + var key = "marked_for_merge_" + entity_type; + lcst.setItem(key,[]); + showValidMergeIcons(); + notify("Cancelled merge!","") +} diff --git a/maloja/web/static/js/manualscrobble.js b/maloja/web/static/js/manualscrobble.js index 1ae55de..4a58ee9 100644 --- a/maloja/web/static/js/manualscrobble.js +++ b/maloja/web/static/js/manualscrobble.js @@ -69,8 +69,9 @@ function scrobble(artists,title) { "title":title } + if (title != "" && artists.length > 0) { - neo.xhttpreq("/apis/mlj_1/newscrobble",data=payload,method="POST",callback=scrobbledone,json=true) + neo.xhttpreq("/apis/mlj_1/newscrobble",data=payload,method="POST",callback=notifyCallback,json=true) } document.getElementById("title").value = ""; diff --git a/maloja/web/static/js/notifications.js b/maloja/web/static/js/notifications.js index 5dae55e..1cdf323 100644 --- a/maloja/web/static/js/notifications.js +++ b/maloja/web/static/js/notifications.js @@ -6,7 +6,7 @@ const colors = { } const notification_template = info => ` -
+
${info.title}
${info.body} @@ -20,11 +20,11 @@ function htmlToElement(html) { return template.content.firstChild; } -function notify(title,msg,type='info',reload=false) { +function notify(title,msg,notification_type='info',reload=false) { info = { 'title':title, 'body':msg, - 'type':type + 'notification_type':notification_type } var element = htmlToElement(notification_template(info)); @@ -33,3 +33,22 @@ function notify(title,msg,type='info',reload=false) { setTimeout(function(e){e.remove();},7000,element); } + +function notifyCallback(request) { + var body = request.response; + var status = request.status; + + if (status == 200) { + var notification_type = 'info'; + var title = "Success!"; + var msg = body.desc || body; + } + else { + var notification_type = 'warning'; + var title = "Error: " + body.error.type; + var msg = body.error.desc || ""; + } + + + notify(title,msg,notification_type); +} diff --git a/maloja/web/static/js/search.js b/maloja/web/static/js/search.js index 4afa5be..f903bae 100644 --- a/maloja/web/static/js/search.js +++ b/maloja/web/static/js/search.js @@ -61,7 +61,7 @@ function searchresult() { } for (var i=0;i=0.12.16", - "waitress>=1.3", + "waitress>=2.1.0", "doreah>=1.9.1, <2", "nimrodel>=0.8.0", "setproctitle>=1.1.10", #"pyvips>=2.1.16", - "jinja2>=2.11", + "jinja2>=3.0.0", "lru-dict>=1.1.6", - "css_html_js_minify>=2.5.5", "psutil>=5.8.0", "sqlalchemy>=1.4", "python-datauri>=1.1.0", @@ -40,7 +39,7 @@ full = [ ] [project.scripts] -maloja = "maloja.proccontrol.control:main" +maloja = "maloja.__main__:main" [build-system] requires = ["flit_core >=3.2,<4"] diff --git a/requirements.txt b/requirements.txt index d4f1bf4..77b1e34 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,11 +1,10 @@ bottle>=0.12.16 -waitress>=1.3 +waitress>=2.1.0 doreah>=1.9.1, <2 nimrodel>=0.8.0 setproctitle>=1.1.10 -jinja2>=2.11 +jinja2>=3.0.0 lru-dict>=1.1.6 -css_html_js_minify>=2.5.5 psutil>=5.8.0 sqlalchemy>=1.4 python-datauri>=1.1.0 diff --git a/screenshot.png b/screenshot.png index 5f31e70..6590cdc 100644 Binary files a/screenshot.png and b/screenshot.png differ