mirror of
https://github.com/krateng/maloja.git
synced 2025-04-23 03:50:28 +03:00
commit
3592571afd
10
dev/releases/3.1.yml
Normal file
10
dev/releases/3.1.yml
Normal file
@ -0,0 +1,10 @@
|
||||
minor_release_name: "Soyeon"
|
||||
3.1.0:
|
||||
notes:
|
||||
- "[Architecture] Cleaned up legacy process control"
|
||||
- "[Architecture] Added proper exception framework to native API"
|
||||
- "[Feature] Implemented track title and artist name editing from web interface"
|
||||
- "[Feature] Implemented track and artist merging from web interface"
|
||||
- "[Feature] Implemented scrobble reparsing from web interface"
|
||||
- "[Performance] Adjusted cache sizes"
|
||||
- "[Logging] Added cache memory use information"
|
@ -1,2 +0,0 @@
|
||||
- "[Performance] Adjusted cache sizes"
|
||||
- "[Logging] Added cache memory use information"
|
43
dev/testing/stresstest.py
Normal file
43
dev/testing/stresstest.py
Normal file
@ -0,0 +1,43 @@
|
||||
import threading
|
||||
import subprocess
|
||||
import time
|
||||
import requests
|
||||
import os
|
||||
|
||||
ACTIVE = True
|
||||
|
||||
build_cmd = ["docker","build","-t","maloja",".","-f","Containerfile"]
|
||||
subprocess.run(build_cmd)
|
||||
|
||||
common_prc = (
|
||||
["docker","run","--rm","-v",f"{os.path.abspath('./testdata')}:/mlj","-e","MALOJA_DATA_DIRECTORY=/mlj"],
|
||||
["maloja"]
|
||||
)
|
||||
|
||||
servers = [
|
||||
{'port': 42010},
|
||||
{'port': 42011, 'extraargs':["--memory=1g"]},
|
||||
{'port': 42012, 'extraargs':["--memory=500m"]}
|
||||
]
|
||||
for s in servers:
|
||||
cmd = common_prc[0] + ["-p",f"{s['port']}:42010"] + s.get('extraargs',[]) + common_prc[1]
|
||||
print(cmd)
|
||||
t = threading.Thread(target=subprocess.run,args=(cmd,))
|
||||
s['thread'] = t
|
||||
t.daemon = True
|
||||
t.start()
|
||||
time.sleep(5)
|
||||
|
||||
time.sleep(5)
|
||||
while ACTIVE:
|
||||
time.sleep(1)
|
||||
try:
|
||||
for s in servers:
|
||||
requests.get(f"http://localhost:{s['port']}")
|
||||
except KeyboardInterrupt:
|
||||
ACTIVE = False
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
for s in servers:
|
||||
s['thread'].join()
|
@ -1,4 +1,4 @@
|
||||
# monkey patching
|
||||
from . import monkey
|
||||
from .pkg_global import monkey
|
||||
# configuration before all else
|
||||
from . import globalconf
|
||||
from .pkg_global import conf
|
||||
|
@ -1,4 +1,177 @@
|
||||
# make the package itself runnable with python -m maloja
|
||||
import os
|
||||
import signal
|
||||
import subprocess
|
||||
import time
|
||||
|
||||
from .proccontrol.control import main
|
||||
main()
|
||||
from setproctitle import setproctitle
|
||||
from ipaddress import ip_address
|
||||
|
||||
from doreah.control import mainfunction
|
||||
from doreah.io import col
|
||||
from doreah.logging import log
|
||||
|
||||
from . import __pkginfo__ as pkginfo
|
||||
from .pkg_global import conf
|
||||
from .proccontrol import tasks
|
||||
from .setup import setup
|
||||
from .dev import generate
|
||||
|
||||
|
||||
|
||||
def print_header_info():
|
||||
print()
|
||||
#print("#####")
|
||||
print(col['yellow']("Maloja"),f"v{pkginfo.VERSION}")
|
||||
print(pkginfo.HOMEPAGE)
|
||||
#print("#####")
|
||||
print()
|
||||
|
||||
|
||||
|
||||
def get_instance():
|
||||
try:
|
||||
return int(subprocess.check_output(["pidof","maloja"]))
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
def get_instance_supervisor():
|
||||
try:
|
||||
return int(subprocess.check_output(["pidof","maloja_supervisor"]))
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
def restart():
|
||||
if stop():
|
||||
start()
|
||||
else:
|
||||
print(col["red"]("Could not stop Maloja!"))
|
||||
|
||||
def start():
|
||||
if get_instance_supervisor() is not None:
|
||||
print("Maloja is already running.")
|
||||
else:
|
||||
print_header_info()
|
||||
setup()
|
||||
try:
|
||||
#p = subprocess.Popen(["python3","-m","maloja.server"],stdout=subprocess.DEVNULL,stderr=subprocess.DEVNULL)
|
||||
sp = subprocess.Popen(["python3","-m","maloja","supervisor"],stdout=subprocess.DEVNULL,stderr=subprocess.DEVNULL)
|
||||
print(col["green"]("Maloja started!"))
|
||||
|
||||
port = conf.malojaconfig["PORT"]
|
||||
|
||||
print("Visit your server address (Port " + str(port) + ") to see your web interface. Visit /admin_setup to get started.")
|
||||
print("If you're installing this on your local machine, these links should get you there:")
|
||||
print("\t" + col["blue"]("http://localhost:" + str(port)))
|
||||
print("\t" + col["blue"]("http://localhost:" + str(port) + "/admin_setup"))
|
||||
return True
|
||||
except Exception:
|
||||
print("Error while starting Maloja.")
|
||||
return False
|
||||
|
||||
|
||||
def stop():
|
||||
|
||||
for attempt in [(signal.SIGTERM,2),(signal.SIGTERM,5),(signal.SIGKILL,3),(signal.SIGKILL,5)]:
|
||||
|
||||
pid_sv = get_instance_supervisor()
|
||||
pid = get_instance()
|
||||
|
||||
if pid is None and pid_sv is None:
|
||||
print("Maloja stopped!")
|
||||
return True
|
||||
|
||||
if pid_sv is not None:
|
||||
os.kill(pid_sv,attempt[0])
|
||||
if pid is not None:
|
||||
os.kill(pid,attempt[0])
|
||||
|
||||
time.sleep(attempt[1])
|
||||
|
||||
return False
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
print("Maloja stopped!")
|
||||
return True
|
||||
|
||||
def onlysetup():
|
||||
print_header_info()
|
||||
setup()
|
||||
print("Setup complete!")
|
||||
|
||||
def run_server():
|
||||
print_header_info()
|
||||
setup()
|
||||
setproctitle("maloja")
|
||||
from . import server
|
||||
server.run_server()
|
||||
|
||||
def run_supervisor():
|
||||
setproctitle("maloja_supervisor")
|
||||
while True:
|
||||
log("Maloja is not running, starting...",module="supervisor")
|
||||
try:
|
||||
process = subprocess.Popen(
|
||||
["python3", "-m", "maloja","run"],
|
||||
stdout=subprocess.DEVNULL,
|
||||
stderr=subprocess.DEVNULL,
|
||||
)
|
||||
except Exception as e:
|
||||
log("Error starting Maloja: " + str(e),module="supervisor")
|
||||
else:
|
||||
try:
|
||||
process.wait()
|
||||
except Exception as e:
|
||||
log("Maloja crashed: " + str(e),module="supervisor")
|
||||
|
||||
def debug():
|
||||
os.environ["MALOJA_DEV_MODE"] = 'true'
|
||||
conf.malojaconfig.load_environment()
|
||||
direct()
|
||||
|
||||
def print_info():
|
||||
print_header_info()
|
||||
print(col['lightblue']("Configuration Directory:"),conf.dir_settings['config'])
|
||||
print(col['lightblue']("Data Directory: "),conf.dir_settings['state'])
|
||||
print(col['lightblue']("Log Directory: "),conf.dir_settings['logs'])
|
||||
print(col['lightblue']("Network: "),f"IPv{ip_address(conf.malojaconfig['host']).version}, Port {conf.malojaconfig['port']}")
|
||||
print(col['lightblue']("Timezone: "),f"UTC{conf.malojaconfig['timezone']:+d}")
|
||||
print()
|
||||
print()
|
||||
|
||||
@mainfunction({"l":"level","v":"version","V":"version"},flags=['version','include_images'],shield=True)
|
||||
def main(*args,**kwargs):
|
||||
|
||||
actions = {
|
||||
# server
|
||||
"start":start,
|
||||
"restart":restart,
|
||||
"stop":stop,
|
||||
"run":run_server,
|
||||
"supervisor":run_supervisor,
|
||||
"debug":debug,
|
||||
"setup":onlysetup,
|
||||
# admin scripts
|
||||
"import":tasks.import_scrobbles, # maloja import /x/y.csv
|
||||
"backup":tasks.backup, # maloja backup --targetfolder /x/y --include_images
|
||||
"generate":generate.generate_scrobbles, # maloja generate 400
|
||||
"export":tasks.export, # maloja export
|
||||
# aux
|
||||
"info":print_info
|
||||
}
|
||||
|
||||
if "version" in kwargs:
|
||||
print(info.VERSION)
|
||||
return True
|
||||
else:
|
||||
try:
|
||||
action, *args = args
|
||||
action = actions[action]
|
||||
except (ValueError, KeyError):
|
||||
print("Valid commands: " + " ".join(a for a in actions))
|
||||
return False
|
||||
|
||||
return action(*args,**kwargs)
|
||||
|
@ -4,7 +4,7 @@
|
||||
from doreah.keystore import KeyStore
|
||||
from doreah.logging import log
|
||||
|
||||
from ..globalconf import data_dir
|
||||
from ..pkg_global.conf import data_dir
|
||||
|
||||
apikeystore = KeyStore(file=data_dir['clients']("apikeys.yml"),save_endpoint="/apis/mlj_1/apikeys")
|
||||
|
||||
|
@ -4,7 +4,7 @@ from .. import database
|
||||
import datetime
|
||||
from ._apikeys import apikeystore
|
||||
|
||||
from ..globalconf import malojaconfig
|
||||
from ..pkg_global.conf import malojaconfig
|
||||
|
||||
|
||||
class Listenbrainz(APIHandler):
|
||||
|
@ -1,5 +1,6 @@
|
||||
import os
|
||||
import math
|
||||
import traceback
|
||||
|
||||
from bottle import response, static_file, request, FormsDict
|
||||
|
||||
@ -12,7 +13,7 @@ from nimrodel import Multi
|
||||
|
||||
|
||||
from .. import database
|
||||
from ..globalconf import malojaconfig, data_dir
|
||||
from ..pkg_global.conf import malojaconfig, data_dir
|
||||
|
||||
|
||||
|
||||
@ -39,15 +40,40 @@ api.__apipath__ = "mlj_1"
|
||||
|
||||
|
||||
errors = {
|
||||
database.MissingScrobbleParameters: lambda e: (400,{
|
||||
database.exceptions.MissingScrobbleParameters: lambda e: (400,{
|
||||
"status":"failure",
|
||||
"error":{
|
||||
'type':'missing_scrobble_data',
|
||||
'value':e.params,
|
||||
'desc':"A scrobble requires these parameters."
|
||||
'desc':"The scrobble is missing needed parameters."
|
||||
}
|
||||
}),
|
||||
Exception: lambda e: (500,{
|
||||
database.exceptions.MissingEntityParameter: lambda e: (400,{
|
||||
"status":"error",
|
||||
"error":{
|
||||
'type':'missing_entity_parameter',
|
||||
'value':None,
|
||||
'desc':"This API call is not valid without an entity (track or artist)."
|
||||
}
|
||||
}),
|
||||
database.exceptions.EntityExists: lambda e: (409,{
|
||||
"status":"failure",
|
||||
"error":{
|
||||
'type':'entity_exists',
|
||||
'value':e.entitydict,
|
||||
'desc':"This entity already exists in the database. Consider merging instead."
|
||||
}
|
||||
}),
|
||||
database.exceptions.DatabaseNotBuilt: lambda e: (503,{
|
||||
"status":"error",
|
||||
"error":{
|
||||
'type':'server_not_ready',
|
||||
'value':'db_upgrade',
|
||||
'desc':"The database is being upgraded. Please try again later."
|
||||
}
|
||||
}),
|
||||
# for http errors, use their status code
|
||||
Exception: lambda e: ((e.status_code if hasattr(e,'statuscode') else 500),{
|
||||
"status":"failure",
|
||||
"error":{
|
||||
'type':'unknown_error',
|
||||
@ -57,6 +83,21 @@ errors = {
|
||||
})
|
||||
}
|
||||
|
||||
def catch_exceptions(func):
|
||||
def protector(*args,**kwargs):
|
||||
try:
|
||||
return func(*args,**kwargs)
|
||||
except Exception as e:
|
||||
print(traceback.format_exc())
|
||||
for etype in errors:
|
||||
if isinstance(e,etype):
|
||||
errorhandling = errors[etype](e)
|
||||
response.status = errorhandling[0]
|
||||
return errorhandling[1]
|
||||
|
||||
protector.__doc__ = func.__doc__
|
||||
protector.__annotations__ = func.__annotations__
|
||||
return protector
|
||||
|
||||
|
||||
def add_common_args_to_docstring(filterkeys=False,limitkeys=False,delimitkeys=False,amountkeys=False):
|
||||
@ -94,6 +135,7 @@ def add_common_args_to_docstring(filterkeys=False,limitkeys=False,delimitkeys=Fa
|
||||
|
||||
|
||||
@api.get("test")
|
||||
@catch_exceptions
|
||||
def test_server(key=None):
|
||||
"""Pings the server. If an API key is supplied, the server will respond with 200
|
||||
if the key is correct and 403 if it isn't. If no key is supplied, the server will
|
||||
@ -119,6 +161,7 @@ def test_server(key=None):
|
||||
|
||||
|
||||
@api.get("serverinfo")
|
||||
@catch_exceptions
|
||||
def server_info():
|
||||
"""Returns basic information about the server.
|
||||
|
||||
@ -141,6 +184,7 @@ def server_info():
|
||||
|
||||
|
||||
@api.get("scrobbles")
|
||||
@catch_exceptions
|
||||
@add_common_args_to_docstring(filterkeys=True,limitkeys=True,amountkeys=True)
|
||||
def get_scrobbles_external(**keys):
|
||||
"""Returns a list of scrobbles.
|
||||
@ -158,11 +202,13 @@ def get_scrobbles_external(**keys):
|
||||
if k_amount.get('perpage') is not math.inf: result = result[:k_amount.get('perpage')]
|
||||
|
||||
return {
|
||||
"status":"ok",
|
||||
"list":result
|
||||
}
|
||||
|
||||
|
||||
@api.get("numscrobbles")
|
||||
@catch_exceptions
|
||||
@add_common_args_to_docstring(filterkeys=True,limitkeys=True,amountkeys=True)
|
||||
def get_scrobbles_num_external(**keys):
|
||||
"""Returns amount of scrobbles.
|
||||
@ -176,12 +222,14 @@ def get_scrobbles_num_external(**keys):
|
||||
result = database.get_scrobbles_num(**ckeys)
|
||||
|
||||
return {
|
||||
"status":"ok",
|
||||
"amount":result
|
||||
}
|
||||
|
||||
|
||||
|
||||
@api.get("tracks")
|
||||
@catch_exceptions
|
||||
@add_common_args_to_docstring(filterkeys=True)
|
||||
def get_tracks_external(**keys):
|
||||
"""Returns all tracks (optionally of an artist).
|
||||
@ -195,12 +243,14 @@ def get_tracks_external(**keys):
|
||||
result = database.get_tracks(**ckeys)
|
||||
|
||||
return {
|
||||
"status":"ok",
|
||||
"list":result
|
||||
}
|
||||
|
||||
|
||||
|
||||
@api.get("artists")
|
||||
@catch_exceptions
|
||||
@add_common_args_to_docstring()
|
||||
def get_artists_external():
|
||||
"""Returns all artists.
|
||||
@ -210,6 +260,7 @@ def get_artists_external():
|
||||
result = database.get_artists()
|
||||
|
||||
return {
|
||||
"status":"ok",
|
||||
"list":result
|
||||
}
|
||||
|
||||
@ -218,6 +269,7 @@ def get_artists_external():
|
||||
|
||||
|
||||
@api.get("charts/artists")
|
||||
@catch_exceptions
|
||||
@add_common_args_to_docstring(limitkeys=True)
|
||||
def get_charts_artists_external(**keys):
|
||||
"""Returns artist charts
|
||||
@ -230,12 +282,14 @@ def get_charts_artists_external(**keys):
|
||||
result = database.get_charts_artists(**ckeys)
|
||||
|
||||
return {
|
||||
"status":"ok",
|
||||
"list":result
|
||||
}
|
||||
|
||||
|
||||
|
||||
@api.get("charts/tracks")
|
||||
@catch_exceptions
|
||||
@add_common_args_to_docstring(filterkeys=True,limitkeys=True)
|
||||
def get_charts_tracks_external(**keys):
|
||||
"""Returns track charts
|
||||
@ -248,6 +302,7 @@ def get_charts_tracks_external(**keys):
|
||||
result = database.get_charts_tracks(**ckeys)
|
||||
|
||||
return {
|
||||
"status":"ok",
|
||||
"list":result
|
||||
}
|
||||
|
||||
@ -255,6 +310,7 @@ def get_charts_tracks_external(**keys):
|
||||
|
||||
|
||||
@api.get("pulse")
|
||||
@catch_exceptions
|
||||
@add_common_args_to_docstring(filterkeys=True,limitkeys=True,delimitkeys=True,amountkeys=True)
|
||||
def get_pulse_external(**keys):
|
||||
"""Returns amounts of scrobbles in specified time frames
|
||||
@ -267,6 +323,7 @@ def get_pulse_external(**keys):
|
||||
results = database.get_pulse(**ckeys)
|
||||
|
||||
return {
|
||||
"status":"ok",
|
||||
"list":results
|
||||
}
|
||||
|
||||
@ -274,6 +331,7 @@ def get_pulse_external(**keys):
|
||||
|
||||
|
||||
@api.get("performance")
|
||||
@catch_exceptions
|
||||
@add_common_args_to_docstring(filterkeys=True,limitkeys=True,delimitkeys=True,amountkeys=True)
|
||||
def get_performance_external(**keys):
|
||||
"""Returns artist's or track's rank in specified time frames
|
||||
@ -286,6 +344,7 @@ def get_performance_external(**keys):
|
||||
results = database.get_performance(**ckeys)
|
||||
|
||||
return {
|
||||
"status":"ok",
|
||||
"list":results
|
||||
}
|
||||
|
||||
@ -293,6 +352,7 @@ def get_performance_external(**keys):
|
||||
|
||||
|
||||
@api.get("top/artists")
|
||||
@catch_exceptions
|
||||
@add_common_args_to_docstring(limitkeys=True,delimitkeys=True)
|
||||
def get_top_artists_external(**keys):
|
||||
"""Returns respective number 1 artists in specified time frames
|
||||
@ -305,6 +365,7 @@ def get_top_artists_external(**keys):
|
||||
results = database.get_top_artists(**ckeys)
|
||||
|
||||
return {
|
||||
"status":"ok",
|
||||
"list":results
|
||||
}
|
||||
|
||||
@ -312,6 +373,7 @@ def get_top_artists_external(**keys):
|
||||
|
||||
|
||||
@api.get("top/tracks")
|
||||
@catch_exceptions
|
||||
@add_common_args_to_docstring(limitkeys=True,delimitkeys=True)
|
||||
def get_top_tracks_external(**keys):
|
||||
"""Returns respective number 1 tracks in specified time frames
|
||||
@ -326,6 +388,7 @@ def get_top_tracks_external(**keys):
|
||||
results = database.get_top_tracks(**ckeys)
|
||||
|
||||
return {
|
||||
"status":"ok",
|
||||
"list":results
|
||||
}
|
||||
|
||||
@ -333,6 +396,7 @@ def get_top_tracks_external(**keys):
|
||||
|
||||
|
||||
@api.get("artistinfo")
|
||||
@catch_exceptions
|
||||
@add_common_args_to_docstring(filterkeys=True)
|
||||
def artist_info_external(**keys):
|
||||
"""Returns information about an artist
|
||||
@ -347,8 +411,9 @@ def artist_info_external(**keys):
|
||||
|
||||
|
||||
@api.get("trackinfo")
|
||||
@catch_exceptions
|
||||
@add_common_args_to_docstring(filterkeys=True)
|
||||
def track_info_external(artist:Multi[str],**keys):
|
||||
def track_info_external(artist:Multi[str]=[],**keys):
|
||||
"""Returns information about a track
|
||||
|
||||
:return: track (Mapping), scrobbles (Integer), position (Integer), medals (Mapping), certification (String), topweeks (Integer)
|
||||
@ -365,6 +430,7 @@ def track_info_external(artist:Multi[str],**keys):
|
||||
|
||||
@api.post("newscrobble")
|
||||
@authenticated_function(alternate=api_key_correct,api=True,pass_auth_result_as='auth_result')
|
||||
@catch_exceptions
|
||||
def post_scrobble(
|
||||
artist:Multi=None,
|
||||
artists:list=[],
|
||||
@ -406,46 +472,41 @@ def post_scrobble(
|
||||
# for logging purposes, don't pass values that we didn't actually supply
|
||||
rawscrobble = {k:rawscrobble[k] for k in rawscrobble if rawscrobble[k]}
|
||||
|
||||
try:
|
||||
result = database.incoming_scrobble(
|
||||
rawscrobble,
|
||||
client='browser' if auth_result.get('doreah_native_auth_check') else auth_result.get('client'),
|
||||
api='native/v1',
|
||||
fix=(nofix is None)
|
||||
)
|
||||
|
||||
responsedict = {
|
||||
'status': 'success',
|
||||
'track': {
|
||||
'artists':result['track']['artists'],
|
||||
'title':result['track']['title']
|
||||
}
|
||||
}
|
||||
if extra_kwargs:
|
||||
responsedict['warnings'] = [
|
||||
{'type':'invalid_keyword_ignored','value':k,
|
||||
'desc':"This key was not recognized by the server and has been discarded."}
|
||||
for k in extra_kwargs
|
||||
]
|
||||
if artist and artists:
|
||||
responsedict['warnings'] = [
|
||||
{'type':'mixed_schema','value':['artist','artists'],
|
||||
'desc':"These two fields are meant as alternative methods to submit information. Use of both is discouraged, but works at the moment."}
|
||||
]
|
||||
return responsedict
|
||||
except Exception as e:
|
||||
for etype in errors:
|
||||
if isinstance(e,etype):
|
||||
errorhandling = errors[etype](e)
|
||||
response.status = errorhandling[0]
|
||||
return errorhandling[1]
|
||||
result = database.incoming_scrobble(
|
||||
rawscrobble,
|
||||
client='browser' if auth_result.get('doreah_native_auth_check') else auth_result.get('client'),
|
||||
api='native/v1',
|
||||
fix=(nofix is None)
|
||||
)
|
||||
|
||||
responsedict = {
|
||||
'status': 'success',
|
||||
'track': {
|
||||
'artists':result['track']['artists'],
|
||||
'title':result['track']['title']
|
||||
},
|
||||
'desc':f"Scrobbled {result['track']['title']} by {', '.join(result['track']['artists'])}"
|
||||
}
|
||||
if extra_kwargs:
|
||||
responsedict['warnings'] = [
|
||||
{'type':'invalid_keyword_ignored','value':k,
|
||||
'desc':"This key was not recognized by the server and has been discarded."}
|
||||
for k in extra_kwargs
|
||||
]
|
||||
if artist and artists:
|
||||
responsedict['warnings'] = [
|
||||
{'type':'mixed_schema','value':['artist','artists'],
|
||||
'desc':"These two fields are meant as alternative methods to submit information. Use of both is discouraged, but works at the moment."}
|
||||
]
|
||||
return responsedict
|
||||
|
||||
|
||||
|
||||
|
||||
@api.post("importrules")
|
||||
@authenticated_function(api=True)
|
||||
@catch_exceptions
|
||||
def import_rulemodule(**keys):
|
||||
"""Internal Use Only"""
|
||||
filename = keys.get("filename")
|
||||
@ -464,6 +525,7 @@ def import_rulemodule(**keys):
|
||||
|
||||
@api.post("rebuild")
|
||||
@authenticated_function(api=True)
|
||||
@catch_exceptions
|
||||
def rebuild(**keys):
|
||||
"""Internal Use Only"""
|
||||
log("Database rebuild initiated!")
|
||||
@ -480,6 +542,7 @@ def rebuild(**keys):
|
||||
|
||||
|
||||
@api.get("search")
|
||||
@catch_exceptions
|
||||
def search(**keys):
|
||||
"""Internal Use Only"""
|
||||
query = keys.get("query")
|
||||
@ -501,17 +564,19 @@ def search(**keys):
|
||||
artists_result = []
|
||||
for a in artists:
|
||||
result = {
|
||||
'name': a,
|
||||
'artist': a,
|
||||
'link': "/artist?" + compose_querystring(internal_to_uri({"artist": a})),
|
||||
'image': images.get_artist_image(a)
|
||||
}
|
||||
result["image"] = images.get_artist_image(a)
|
||||
artists_result.append(result)
|
||||
|
||||
tracks_result = []
|
||||
for t in tracks:
|
||||
result = t
|
||||
result["link"] = "/track?" + compose_querystring(internal_to_uri({"track":t}))
|
||||
result["image"] = images.get_track_image(t)
|
||||
result = {
|
||||
'track': t,
|
||||
'link': "/track?" + compose_querystring(internal_to_uri({"track":t})),
|
||||
'image': images.get_track_image(t)
|
||||
}
|
||||
tracks_result.append(result)
|
||||
|
||||
return {"artists":artists_result[:max_],"tracks":tracks_result[:max_]}
|
||||
@ -519,6 +584,7 @@ def search(**keys):
|
||||
|
||||
@api.post("addpicture")
|
||||
@authenticated_function(api=True)
|
||||
@catch_exceptions
|
||||
def add_picture(b64,artist:Multi=[],title=None):
|
||||
"""Internal Use Only"""
|
||||
keys = FormsDict()
|
||||
@ -532,6 +598,7 @@ def add_picture(b64,artist:Multi=[],title=None):
|
||||
|
||||
@api.post("newrule")
|
||||
@authenticated_function(api=True)
|
||||
@catch_exceptions
|
||||
def newrule(**keys):
|
||||
"""Internal Use Only"""
|
||||
pass
|
||||
@ -542,18 +609,21 @@ def newrule(**keys):
|
||||
|
||||
@api.post("settings")
|
||||
@authenticated_function(api=True)
|
||||
@catch_exceptions
|
||||
def set_settings(**keys):
|
||||
"""Internal Use Only"""
|
||||
malojaconfig.update(keys)
|
||||
|
||||
@api.post("apikeys")
|
||||
@authenticated_function(api=True)
|
||||
@catch_exceptions
|
||||
def set_apikeys(**keys):
|
||||
"""Internal Use Only"""
|
||||
apikeystore.update(keys)
|
||||
|
||||
@api.post("import")
|
||||
@authenticated_function(api=True)
|
||||
@catch_exceptions
|
||||
def import_scrobbles(identifier):
|
||||
"""Internal Use Only"""
|
||||
from ..thirdparty import import_scrobbles
|
||||
@ -561,6 +631,7 @@ def import_scrobbles(identifier):
|
||||
|
||||
@api.get("backup")
|
||||
@authenticated_function(api=True)
|
||||
@catch_exceptions
|
||||
def get_backup(**keys):
|
||||
"""Internal Use Only"""
|
||||
from ..proccontrol.tasks.backup import backup
|
||||
@ -573,6 +644,7 @@ def get_backup(**keys):
|
||||
|
||||
@api.get("export")
|
||||
@authenticated_function(api=True)
|
||||
@catch_exceptions
|
||||
def get_export(**keys):
|
||||
"""Internal Use Only"""
|
||||
from ..proccontrol.tasks.export import export
|
||||
@ -586,6 +658,71 @@ def get_export(**keys):
|
||||
|
||||
@api.post("delete_scrobble")
|
||||
@authenticated_function(api=True)
|
||||
@catch_exceptions
|
||||
def delete_scrobble(timestamp):
|
||||
"""Internal Use Only"""
|
||||
database.remove_scrobble(timestamp)
|
||||
result = database.remove_scrobble(timestamp)
|
||||
return {
|
||||
"status":"success",
|
||||
"desc":f"Scrobble was deleted!"
|
||||
}
|
||||
|
||||
|
||||
@api.post("edit_artist")
|
||||
@authenticated_function(api=True)
|
||||
@catch_exceptions
|
||||
def edit_artist(id,name):
|
||||
"""Internal Use Only"""
|
||||
result = database.edit_artist(id,name)
|
||||
return {
|
||||
"status":"success"
|
||||
}
|
||||
|
||||
@api.post("edit_track")
|
||||
@authenticated_function(api=True)
|
||||
@catch_exceptions
|
||||
def edit_track(id,title):
|
||||
"""Internal Use Only"""
|
||||
result = database.edit_track(id,{'title':title})
|
||||
return {
|
||||
"status":"success"
|
||||
}
|
||||
|
||||
|
||||
@api.post("merge_tracks")
|
||||
@authenticated_function(api=True)
|
||||
@catch_exceptions
|
||||
def merge_tracks(target_id,source_ids):
|
||||
"""Internal Use Only"""
|
||||
result = database.merge_tracks(target_id,source_ids)
|
||||
return {
|
||||
"status":"success"
|
||||
}
|
||||
|
||||
@api.post("merge_artists")
|
||||
@authenticated_function(api=True)
|
||||
@catch_exceptions
|
||||
def merge_artists(target_id,source_ids):
|
||||
"""Internal Use Only"""
|
||||
result = database.merge_artists(target_id,source_ids)
|
||||
return {
|
||||
"status":"success"
|
||||
}
|
||||
|
||||
@api.post("reparse_scrobble")
|
||||
@authenticated_function(api=True)
|
||||
@catch_exceptions
|
||||
def reparse_scrobble(timestamp):
|
||||
"""Internal Use Only"""
|
||||
result = database.reparse_scrobble(timestamp)
|
||||
if result:
|
||||
return {
|
||||
"status":"success",
|
||||
"desc":f"Scrobble was reparsed!",
|
||||
"scrobble":result
|
||||
}
|
||||
else:
|
||||
return {
|
||||
"status":"no_operation",
|
||||
"desc":"The scrobble was not changed."
|
||||
}
|
||||
|
@ -2,7 +2,7 @@ import re
|
||||
import os
|
||||
import csv
|
||||
|
||||
from .globalconf import data_dir, malojaconfig
|
||||
from .pkg_global.conf import data_dir, malojaconfig
|
||||
|
||||
# need to do this as a class so it can retain loaded settings from file
|
||||
# apparently this is not true
|
||||
|
@ -1,5 +1,5 @@
|
||||
# server
|
||||
from bottle import request, response, FormsDict, HTTPError
|
||||
from bottle import request, response, FormsDict
|
||||
|
||||
# rest of the project
|
||||
from ..cleanup import CleanerAgent
|
||||
@ -7,12 +7,13 @@ from .. import images
|
||||
from ..malojatime import register_scrobbletime, time_stamps, ranges, alltime
|
||||
from ..malojauri import uri_to_internal, internal_to_uri, compose_querystring
|
||||
from ..thirdparty import proxy_scrobble_all
|
||||
from ..globalconf import data_dir, malojaconfig
|
||||
from ..pkg_global.conf import data_dir, malojaconfig
|
||||
from ..apis import apikeystore
|
||||
#db
|
||||
from . import sqldb
|
||||
from . import cached
|
||||
from . import dbcache
|
||||
from . import exceptions
|
||||
|
||||
# doreah toolkit
|
||||
from doreah.logging import log
|
||||
@ -42,23 +43,12 @@ dbstatus = {
|
||||
"rebuildinprogress":False,
|
||||
"complete":False # information is complete
|
||||
}
|
||||
class DatabaseNotBuilt(HTTPError):
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
status=503,
|
||||
body="The Maloja Database is being upgraded to Version 3. This could take quite a long time! (~ 2-5 minutes per 10 000 scrobbles)",
|
||||
headers={"Retry-After":120}
|
||||
)
|
||||
|
||||
|
||||
class MissingScrobbleParameters(Exception):
|
||||
def __init__(self,params=[]):
|
||||
self.params = params
|
||||
|
||||
|
||||
def waitfordb(func):
|
||||
def newfunc(*args,**kwargs):
|
||||
if not dbstatus['healthy']: raise DatabaseNotBuilt()
|
||||
if not dbstatus['healthy']: raise exceptions.DatabaseNotBuilt()
|
||||
return func(*args,**kwargs)
|
||||
return newfunc
|
||||
|
||||
@ -97,11 +87,45 @@ def incoming_scrobble(rawscrobble,fix=True,client=None,api=None,dbconn=None):
|
||||
missing.append(necessary_arg)
|
||||
if len(missing) > 0:
|
||||
log(f"Invalid Scrobble [Client: {client} | API: {api}]: {rawscrobble} ",color='red')
|
||||
raise MissingScrobbleParameters(missing)
|
||||
raise exceptions.MissingScrobbleParameters(missing)
|
||||
|
||||
|
||||
log(f"Incoming scrobble [Client: {client} | API: {api}]: {rawscrobble}")
|
||||
|
||||
scrobbledict = rawscrobble_to_scrobbledict(rawscrobble, fix, client)
|
||||
|
||||
sqldb.add_scrobble(scrobbledict,dbconn=dbconn)
|
||||
proxy_scrobble_all(scrobbledict['track']['artists'],scrobbledict['track']['title'],scrobbledict['time'])
|
||||
|
||||
dbcache.invalidate_caches(scrobbledict['time'])
|
||||
|
||||
#return {"status":"success","scrobble":scrobbledict}
|
||||
return scrobbledict
|
||||
|
||||
|
||||
@waitfordb
|
||||
def reparse_scrobble(timestamp):
|
||||
log(f"Reparsing Scrobble {timestamp}")
|
||||
scrobble = sqldb.get_scrobble(timestamp=timestamp, include_internal=True)
|
||||
|
||||
if not scrobble or not scrobble['rawscrobble']:
|
||||
return False
|
||||
|
||||
newscrobble = rawscrobble_to_scrobbledict(scrobble['rawscrobble'])
|
||||
|
||||
track_id = sqldb.get_track_id(newscrobble['track'])
|
||||
|
||||
# check if id changed
|
||||
if sqldb.get_track_id(scrobble['track']) != track_id:
|
||||
sqldb.edit_scrobble(timestamp, {'track':newscrobble['track']})
|
||||
dbcache.invalidate_entity_cache()
|
||||
dbcache.invalidate_caches()
|
||||
return sqldb.get_scrobble(timestamp=timestamp)
|
||||
|
||||
return False
|
||||
|
||||
|
||||
def rawscrobble_to_scrobbledict(rawscrobble, fix=True, client=None):
|
||||
# raw scrobble to processed info
|
||||
scrobbleinfo = {**rawscrobble}
|
||||
if fix:
|
||||
@ -129,26 +153,58 @@ def incoming_scrobble(rawscrobble,fix=True,client=None,api=None,dbconn=None):
|
||||
"rawscrobble":rawscrobble
|
||||
}
|
||||
|
||||
|
||||
sqldb.add_scrobble(scrobbledict,dbconn=dbconn)
|
||||
proxy_scrobble_all(scrobbledict['track']['artists'],scrobbledict['track']['title'],scrobbledict['time'])
|
||||
|
||||
dbcache.invalidate_caches(scrobbledict['time'])
|
||||
|
||||
#return {"status":"success","scrobble":scrobbledict}
|
||||
return scrobbledict
|
||||
|
||||
|
||||
|
||||
@waitfordb
|
||||
def remove_scrobble(timestamp):
|
||||
log(f"Deleting Scrobble {timestamp}")
|
||||
result = sqldb.delete_scrobble(timestamp)
|
||||
dbcache.invalidate_caches(timestamp)
|
||||
|
||||
return result
|
||||
|
||||
@waitfordb
|
||||
def edit_artist(id,artistinfo):
|
||||
artist = sqldb.get_artist(id)
|
||||
log(f"Renaming {artist} to {artistinfo}")
|
||||
result = sqldb.edit_artist(id,artistinfo)
|
||||
dbcache.invalidate_entity_cache()
|
||||
dbcache.invalidate_caches()
|
||||
|
||||
return result
|
||||
|
||||
@waitfordb
|
||||
def edit_track(id,trackinfo):
|
||||
track = sqldb.get_track(id)
|
||||
log(f"Renaming {track['title']} to {trackinfo['title']}")
|
||||
result = sqldb.edit_track(id,trackinfo)
|
||||
dbcache.invalidate_entity_cache()
|
||||
dbcache.invalidate_caches()
|
||||
|
||||
return result
|
||||
|
||||
@waitfordb
|
||||
def merge_artists(target_id,source_ids):
|
||||
sources = [sqldb.get_artist(id) for id in source_ids]
|
||||
target = sqldb.get_artist(target_id)
|
||||
log(f"Merging {sources} into {target}")
|
||||
result = sqldb.merge_artists(target_id,source_ids)
|
||||
dbcache.invalidate_entity_cache()
|
||||
dbcache.invalidate_caches()
|
||||
|
||||
return result
|
||||
|
||||
@waitfordb
|
||||
def merge_tracks(target_id,source_ids):
|
||||
sources = [sqldb.get_track(id) for id in source_ids]
|
||||
target = sqldb.get_track(target_id)
|
||||
log(f"Merging {sources} into {target}")
|
||||
result = sqldb.merge_tracks(target_id,source_ids)
|
||||
dbcache.invalidate_entity_cache()
|
||||
dbcache.invalidate_caches()
|
||||
|
||||
return result
|
||||
|
||||
|
||||
|
||||
@ -165,6 +221,7 @@ def get_scrobbles(dbconn=None,**keys):
|
||||
#return result[keys['page']*keys['perpage']:(keys['page']+1)*keys['perpage']]
|
||||
return list(reversed(result))
|
||||
|
||||
|
||||
@waitfordb
|
||||
def get_scrobbles_num(dbconn=None,**keys):
|
||||
(since,to) = keys.get('timerange').timestamps()
|
||||
@ -242,6 +299,8 @@ def get_performance(dbconn=None,**keys):
|
||||
if c["artist"] == artist:
|
||||
rank = c["rank"]
|
||||
break
|
||||
else:
|
||||
raise exceptions.MissingEntityParameter()
|
||||
results.append({"range":rng,"rank":rank})
|
||||
|
||||
return results
|
||||
@ -281,8 +340,10 @@ def get_top_tracks(dbconn=None,**keys):
|
||||
def artist_info(dbconn=None,**keys):
|
||||
|
||||
artist = keys.get('artist')
|
||||
if artist is None: raise exceptions.MissingEntityParameter()
|
||||
|
||||
artist = sqldb.get_artist(sqldb.get_artist_id(artist,dbconn=dbconn),dbconn=dbconn)
|
||||
artist_id = sqldb.get_artist_id(artist,dbconn=dbconn)
|
||||
artist = sqldb.get_artist(artist_id,dbconn=dbconn)
|
||||
alltimecharts = get_charts_artists(timerange=alltime(),dbconn=dbconn)
|
||||
scrobbles = get_scrobbles_num(artist=artist,timerange=alltime(),dbconn=dbconn)
|
||||
#we cant take the scrobble number from the charts because that includes all countas scrobbles
|
||||
@ -296,11 +357,12 @@ def artist_info(dbconn=None,**keys):
|
||||
"position":position,
|
||||
"associated":others,
|
||||
"medals":{
|
||||
"gold": [year for year in cached.medals_artists if artist in cached.medals_artists[year]['gold']],
|
||||
"silver": [year for year in cached.medals_artists if artist in cached.medals_artists[year]['silver']],
|
||||
"bronze": [year for year in cached.medals_artists if artist in cached.medals_artists[year]['bronze']],
|
||||
"gold": [year for year in cached.medals_artists if artist_id in cached.medals_artists[year]['gold']],
|
||||
"silver": [year for year in cached.medals_artists if artist_id in cached.medals_artists[year]['silver']],
|
||||
"bronze": [year for year in cached.medals_artists if artist_id in cached.medals_artists[year]['bronze']],
|
||||
},
|
||||
"topweeks":len([e for e in cached.weekly_topartists if e == artist])
|
||||
"topweeks":len([e for e in cached.weekly_topartists if e == artist_id]),
|
||||
"id":artist_id
|
||||
}
|
||||
except Exception:
|
||||
# if the artist isnt in the charts, they are not being credited and we
|
||||
@ -308,7 +370,13 @@ def artist_info(dbconn=None,**keys):
|
||||
replaceartist = sqldb.get_credited_artists(artist)[0]
|
||||
c = [e for e in alltimecharts if e["artist"] == replaceartist][0]
|
||||
position = c["rank"]
|
||||
return {"artist":artist,"replace":replaceartist,"scrobbles":scrobbles,"position":position}
|
||||
return {
|
||||
"artist":artist,
|
||||
"replace":replaceartist,
|
||||
"scrobbles":scrobbles,
|
||||
"position":position,
|
||||
"id":artist_id
|
||||
}
|
||||
|
||||
|
||||
|
||||
@ -317,8 +385,10 @@ def artist_info(dbconn=None,**keys):
|
||||
def track_info(dbconn=None,**keys):
|
||||
|
||||
track = keys.get('track')
|
||||
if track is None: raise exceptions.MissingEntityParameter()
|
||||
|
||||
track = sqldb.get_track(sqldb.get_track_id(track,dbconn=dbconn),dbconn=dbconn)
|
||||
track_id = sqldb.get_track_id(track,dbconn=dbconn)
|
||||
track = sqldb.get_track(track_id,dbconn=dbconn)
|
||||
alltimecharts = get_charts_tracks(timerange=alltime(),dbconn=dbconn)
|
||||
#scrobbles = get_scrobbles_num(track=track,timerange=alltime())
|
||||
|
||||
@ -337,12 +407,13 @@ def track_info(dbconn=None,**keys):
|
||||
"scrobbles":scrobbles,
|
||||
"position":position,
|
||||
"medals":{
|
||||
"gold": [year for year in cached.medals_tracks if track in cached.medals_tracks[year]['gold']],
|
||||
"silver": [year for year in cached.medals_tracks if track in cached.medals_tracks[year]['silver']],
|
||||
"bronze": [year for year in cached.medals_tracks if track in cached.medals_tracks[year]['bronze']],
|
||||
"gold": [year for year in cached.medals_tracks if track_id in cached.medals_tracks[year]['gold']],
|
||||
"silver": [year for year in cached.medals_tracks if track_id in cached.medals_tracks[year]['silver']],
|
||||
"bronze": [year for year in cached.medals_tracks if track_id in cached.medals_tracks[year]['bronze']],
|
||||
},
|
||||
"certification":cert,
|
||||
"topweeks":len([e for e in cached.weekly_toptracks if e == track])
|
||||
"topweeks":len([e for e in cached.weekly_toptracks if e == track_id]),
|
||||
"id":track_id
|
||||
}
|
||||
|
||||
|
||||
|
@ -8,7 +8,7 @@ import csv
|
||||
import os
|
||||
|
||||
from . import sqldb
|
||||
from ..globalconf import data_dir
|
||||
from ..pkg_global.conf import data_dir
|
||||
|
||||
|
||||
def load_associated_rules():
|
||||
|
@ -3,6 +3,7 @@
|
||||
|
||||
from doreah.regular import runyearly, rundaily
|
||||
from .. import database
|
||||
from . import sqldb
|
||||
from .. import malojatime as mjt
|
||||
|
||||
|
||||
@ -24,27 +25,29 @@ def update_medals():
|
||||
medals_artists.clear()
|
||||
medals_tracks.clear()
|
||||
|
||||
for year in mjt.ranges(step="year"):
|
||||
if year == mjt.thisyear(): break
|
||||
with sqldb.engine.begin() as conn:
|
||||
for year in mjt.ranges(step="year"):
|
||||
if year == mjt.thisyear(): break
|
||||
|
||||
charts_artists = database.get_charts_artists(timerange=year)
|
||||
charts_tracks = database.get_charts_tracks(timerange=year)
|
||||
charts_artists = sqldb.count_scrobbles_by_artist(since=year.first_stamp(),to=year.last_stamp(),resolve_ids=False,dbconn=conn)
|
||||
charts_tracks = sqldb.count_scrobbles_by_track(since=year.first_stamp(),to=year.last_stamp(),resolve_ids=False,dbconn=conn)
|
||||
|
||||
entry_artists = {'gold':[],'silver':[],'bronze':[]}
|
||||
entry_tracks = {'gold':[],'silver':[],'bronze':[]}
|
||||
medals_artists[year.desc()] = entry_artists
|
||||
medals_tracks[year.desc()] = entry_tracks
|
||||
entry_artists = {'gold':[],'silver':[],'bronze':[]}
|
||||
entry_tracks = {'gold':[],'silver':[],'bronze':[]}
|
||||
medals_artists[year.desc()] = entry_artists
|
||||
medals_tracks[year.desc()] = entry_tracks
|
||||
|
||||
for entry in charts_artists:
|
||||
if entry['rank'] == 1: entry_artists['gold'].append(entry['artist_id'])
|
||||
elif entry['rank'] == 2: entry_artists['silver'].append(entry['artist_id'])
|
||||
elif entry['rank'] == 3: entry_artists['bronze'].append(entry['artist_id'])
|
||||
else: break
|
||||
for entry in charts_tracks:
|
||||
if entry['rank'] == 1: entry_tracks['gold'].append(entry['track_id'])
|
||||
elif entry['rank'] == 2: entry_tracks['silver'].append(entry['track_id'])
|
||||
elif entry['rank'] == 3: entry_tracks['bronze'].append(entry['track_id'])
|
||||
else: break
|
||||
|
||||
for entry in charts_artists:
|
||||
if entry['rank'] == 1: entry_artists['gold'].append(entry['artist'])
|
||||
elif entry['rank'] == 2: entry_artists['silver'].append(entry['artist'])
|
||||
elif entry['rank'] == 3: entry_artists['bronze'].append(entry['artist'])
|
||||
else: break
|
||||
for entry in charts_tracks:
|
||||
if entry['rank'] == 1: entry_tracks['gold'].append(entry['track'])
|
||||
elif entry['rank'] == 2: entry_tracks['silver'].append(entry['track'])
|
||||
elif entry['rank'] == 3: entry_tracks['bronze'].append(entry['track'])
|
||||
else: break
|
||||
|
||||
|
||||
|
||||
@ -55,15 +58,17 @@ def update_weekly():
|
||||
weekly_topartists.clear()
|
||||
weekly_toptracks.clear()
|
||||
|
||||
for week in mjt.ranges(step="week"):
|
||||
if week == mjt.thisweek(): break
|
||||
with sqldb.engine.begin() as conn:
|
||||
for week in mjt.ranges(step="week"):
|
||||
if week == mjt.thisweek(): break
|
||||
|
||||
charts_artists = database.get_charts_artists(timerange=week)
|
||||
charts_tracks = database.get_charts_tracks(timerange=week)
|
||||
|
||||
for entry in charts_artists:
|
||||
if entry['rank'] == 1: weekly_topartists.append(entry['artist'])
|
||||
else: break
|
||||
for entry in charts_tracks:
|
||||
if entry['rank'] == 1: weekly_toptracks.append(entry['track'])
|
||||
else: break
|
||||
charts_artists = sqldb.count_scrobbles_by_artist(since=week.first_stamp(),to=week.last_stamp(),resolve_ids=False,dbconn=conn)
|
||||
charts_tracks = sqldb.count_scrobbles_by_track(since=week.first_stamp(),to=week.last_stamp(),resolve_ids=False,dbconn=conn)
|
||||
|
||||
for entry in charts_artists:
|
||||
if entry['rank'] == 1: weekly_topartists.append(entry['artist_id'])
|
||||
else: break
|
||||
for entry in charts_tracks:
|
||||
if entry['rank'] == 1: weekly_toptracks.append(entry['track_id'])
|
||||
else: break
|
||||
|
@ -9,7 +9,7 @@ import sys
|
||||
from doreah.regular import runhourly
|
||||
from doreah.logging import log
|
||||
|
||||
from ..globalconf import malojaconfig
|
||||
from ..pkg_global.conf import malojaconfig
|
||||
|
||||
|
||||
|
||||
|
29
maloja/database/exceptions.py
Normal file
29
maloja/database/exceptions.py
Normal file
@ -0,0 +1,29 @@
|
||||
from bottle import HTTPError
|
||||
|
||||
class EntityExists(Exception):
|
||||
def __init__(self,entitydict):
|
||||
self.entitydict = entitydict
|
||||
|
||||
|
||||
class TrackExists(EntityExists):
|
||||
pass
|
||||
|
||||
class ArtistExists(EntityExists):
|
||||
pass
|
||||
|
||||
|
||||
class DatabaseNotBuilt(HTTPError):
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
status=503,
|
||||
body="The Maloja Database is being upgraded to Version 3. This could take quite a long time! (~ 2-5 minutes per 10 000 scrobbles)",
|
||||
headers={"Retry-After":120}
|
||||
)
|
||||
|
||||
|
||||
class MissingScrobbleParameters(Exception):
|
||||
def __init__(self,params=[]):
|
||||
self.params = params
|
||||
|
||||
class MissingEntityParameter(Exception):
|
||||
pass
|
@ -3,7 +3,7 @@ from . sqldb import engine
|
||||
|
||||
from .dbcache import serialize
|
||||
|
||||
from ..globalconf import malojaconfig
|
||||
from ..pkg_global.conf import malojaconfig
|
||||
|
||||
from doreah.logging import log
|
||||
|
||||
|
@ -5,8 +5,9 @@ import math
|
||||
from datetime import datetime
|
||||
from threading import Lock
|
||||
|
||||
from ..globalconf import data_dir
|
||||
from ..pkg_global.conf import data_dir
|
||||
from .dbcache import cached_wrapper, cached_wrapper_individual
|
||||
from . import exceptions as exc
|
||||
|
||||
from doreah.logging import log
|
||||
from doreah.regular import runhourly, runmonthly
|
||||
@ -114,8 +115,9 @@ def connection_provider(func):
|
||||
return func(*args,**kwargs)
|
||||
else:
|
||||
with engine.connect() as connection:
|
||||
kwargs['dbconn'] = connection
|
||||
return func(*args,**kwargs)
|
||||
with connection.begin():
|
||||
kwargs['dbconn'] = connection
|
||||
return func(*args,**kwargs)
|
||||
|
||||
wrapper.__innerfunc__ = func
|
||||
return wrapper
|
||||
@ -209,21 +211,22 @@ def artist_db_to_dict(row,dbconn=None):
|
||||
|
||||
|
||||
### DICT -> DB
|
||||
# These should return None when no data is in the dict so they can be used for update statements
|
||||
|
||||
def scrobble_dict_to_db(info,dbconn=None):
|
||||
return {
|
||||
"timestamp":info['time'],
|
||||
"origin":info['origin'],
|
||||
"duration":info['duration'],
|
||||
"track_id":get_track_id(info['track'],dbconn=dbconn),
|
||||
"extra":json.dumps(info.get('extra',{})),
|
||||
"rawscrobble":json.dumps(info.get('rawscrobble',{}))
|
||||
"timestamp":info.get('time'),
|
||||
"origin":info.get('origin'),
|
||||
"duration":info.get('duration'),
|
||||
"track_id":get_track_id(info.get('track'),dbconn=dbconn),
|
||||
"extra":json.dumps(info.get('extra')) if info.get('extra') else None,
|
||||
"rawscrobble":json.dumps(info.get('rawscrobble')) if info.get('rawscrobble') else None
|
||||
}
|
||||
|
||||
def track_dict_to_db(info,dbconn=None):
|
||||
return {
|
||||
"title":info['title'],
|
||||
"title_normalized":normalize_name(info['title']),
|
||||
"title":info.get('title'),
|
||||
"title_normalized":normalize_name(info.get('title','')) or None,
|
||||
"length":info.get('length')
|
||||
}
|
||||
|
||||
@ -277,13 +280,16 @@ def delete_scrobble(scrobble_id,dbconn=None):
|
||||
DB['scrobbles'].c.timestamp == scrobble_id
|
||||
)
|
||||
|
||||
dbconn.execute(op)
|
||||
result = dbconn.execute(op)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
### these will 'get' the ID of an entity, creating it if necessary
|
||||
|
||||
@cached_wrapper
|
||||
@connection_provider
|
||||
def get_track_id(trackdict,dbconn=None):
|
||||
def get_track_id(trackdict,create_new=True,dbconn=None):
|
||||
ntitle = normalize_name(trackdict['title'])
|
||||
artist_ids = [get_artist_id(a,dbconn=dbconn) for a in trackdict['artists']]
|
||||
artist_ids = list(set(artist_ids))
|
||||
@ -313,6 +319,8 @@ def get_track_id(trackdict,dbconn=None):
|
||||
#print("ID for",trackdict['title'],"was",row[0])
|
||||
return row.id
|
||||
|
||||
if not create_new: return None
|
||||
|
||||
|
||||
op = DB['tracks'].insert().values(
|
||||
**track_dict_to_db(trackdict,dbconn=dbconn)
|
||||
@ -356,6 +364,137 @@ def get_artist_id(artistname,create_new=True,dbconn=None):
|
||||
return result.inserted_primary_key[0]
|
||||
|
||||
|
||||
### Edit existing
|
||||
|
||||
|
||||
@connection_provider
|
||||
def edit_scrobble(scrobble_id,scrobbleupdatedict,dbconn=None):
|
||||
|
||||
dbentry = scrobble_dict_to_db(scrobbleupdatedict,dbconn=dbconn)
|
||||
dbentry = {k:v for k,v in dbentry.items() if v}
|
||||
|
||||
print("Updating scrobble",dbentry)
|
||||
|
||||
with SCROBBLE_LOCK:
|
||||
|
||||
op = DB['scrobbles'].update().where(
|
||||
DB['scrobbles'].c.timestamp == scrobble_id
|
||||
).values(
|
||||
**dbentry
|
||||
)
|
||||
|
||||
dbconn.execute(op)
|
||||
|
||||
|
||||
@connection_provider
|
||||
def edit_artist(id,artistupdatedict,dbconn=None):
|
||||
|
||||
artist = get_artist(id)
|
||||
changedartist = artistupdatedict # well
|
||||
|
||||
dbentry = artist_dict_to_db(artistupdatedict,dbconn=dbconn)
|
||||
dbentry = {k:v for k,v in dbentry.items() if v}
|
||||
|
||||
existing_artist_id = get_artist_id(changedartist,create_new=False,dbconn=dbconn)
|
||||
if existing_artist_id not in (None,id):
|
||||
raise exc.ArtistExists(changedartist)
|
||||
|
||||
op = DB['artists'].update().where(
|
||||
DB['artists'].c.id==id
|
||||
).values(
|
||||
**dbentry
|
||||
)
|
||||
result = dbconn.execute(op)
|
||||
|
||||
return True
|
||||
|
||||
@connection_provider
|
||||
def edit_track(id,trackupdatedict,dbconn=None):
|
||||
|
||||
track = get_track(id,dbconn=dbconn)
|
||||
changedtrack = {**track,**trackupdatedict}
|
||||
|
||||
dbentry = track_dict_to_db(trackupdatedict,dbconn=dbconn)
|
||||
dbentry = {k:v for k,v in dbentry.items() if v}
|
||||
|
||||
existing_track_id = get_track_id(changedtrack,create_new=False,dbconn=dbconn)
|
||||
if existing_track_id not in (None,id):
|
||||
raise exc.TrackExists(changedtrack)
|
||||
|
||||
op = DB['tracks'].update().where(
|
||||
DB['tracks'].c.id==id
|
||||
).values(
|
||||
**dbentry
|
||||
)
|
||||
result = dbconn.execute(op)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
### Merge
|
||||
|
||||
@connection_provider
|
||||
def merge_tracks(target_id,source_ids,dbconn=None):
|
||||
|
||||
op = DB['scrobbles'].update().where(
|
||||
DB['scrobbles'].c.track_id.in_(source_ids)
|
||||
).values(
|
||||
track_id=target_id
|
||||
)
|
||||
result = dbconn.execute(op)
|
||||
clean_db(dbconn=dbconn)
|
||||
|
||||
return True
|
||||
|
||||
@connection_provider
|
||||
def merge_artists(target_id,source_ids,dbconn=None):
|
||||
|
||||
# some tracks could already have multiple of the to be merged artists
|
||||
|
||||
# find literally all tracksartist entries that have any of the artists involved
|
||||
op = DB['trackartists'].select().where(
|
||||
DB['trackartists'].c.artist_id.in_(source_ids + [target_id])
|
||||
)
|
||||
result = dbconn.execute(op)
|
||||
|
||||
track_ids = set(row.track_id for row in result)
|
||||
|
||||
# now just delete them all lmao
|
||||
op = DB['trackartists'].delete().where(
|
||||
#DB['trackartists'].c.track_id.in_(track_ids),
|
||||
DB['trackartists'].c.artist_id.in_(source_ids + [target_id]),
|
||||
)
|
||||
|
||||
result = dbconn.execute(op)
|
||||
|
||||
# now add back the real new artist
|
||||
op = DB['trackartists'].insert().values([
|
||||
{'track_id':track_id,'artist_id':target_id}
|
||||
for track_id in track_ids
|
||||
])
|
||||
|
||||
result = dbconn.execute(op)
|
||||
|
||||
# tracks_artists = {}
|
||||
# for row in result:
|
||||
# tracks_artists.setdefault(row.track_id,[]).append(row.artist_id)
|
||||
#
|
||||
# multiple = {k:v for k,v in tracks_artists.items() if len(v) > 1}
|
||||
#
|
||||
# print([(get_track(k),[get_artist(a) for a in v]) for k,v in multiple.items()])
|
||||
#
|
||||
# op = DB['trackartists'].update().where(
|
||||
# DB['trackartists'].c.artist_id.in_(source_ids)
|
||||
# ).values(
|
||||
# artist_id=target_id
|
||||
# )
|
||||
# result = dbconn.execute(op)
|
||||
|
||||
# this could have created duplicate tracks
|
||||
merge_duplicate_tracks(artist_id=target_id,dbconn=dbconn)
|
||||
clean_db(dbconn=dbconn)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
|
||||
@ -488,7 +627,7 @@ def get_tracks(dbconn=None):
|
||||
|
||||
@cached_wrapper
|
||||
@connection_provider
|
||||
def count_scrobbles_by_artist(since,to,dbconn=None):
|
||||
def count_scrobbles_by_artist(since,to,resolve_ids=True,dbconn=None):
|
||||
jointable = sql.join(
|
||||
DB['scrobbles'],
|
||||
DB['trackartists'],
|
||||
@ -516,16 +655,18 @@ def count_scrobbles_by_artist(since,to,dbconn=None):
|
||||
).order_by(sql.desc('count'))
|
||||
result = dbconn.execute(op).all()
|
||||
|
||||
|
||||
counts = [row.count for row in result]
|
||||
artists = get_artists_map([row.artist_id for row in result],dbconn=dbconn)
|
||||
result = [{'scrobbles':row.count,'artist':artists[row.artist_id]} for row in result]
|
||||
if resolve_ids:
|
||||
counts = [row.count for row in result]
|
||||
artists = get_artists_map([row.artist_id for row in result],dbconn=dbconn)
|
||||
result = [{'scrobbles':row.count,'artist':artists[row.artist_id]} for row in result]
|
||||
else:
|
||||
result = [{'scrobbles':row.count,'artist_id':row.artist_id} for row in result]
|
||||
result = rank(result,key='scrobbles')
|
||||
return result
|
||||
|
||||
@cached_wrapper
|
||||
@connection_provider
|
||||
def count_scrobbles_by_track(since,to,dbconn=None):
|
||||
def count_scrobbles_by_track(since,to,resolve_ids=True,dbconn=None):
|
||||
|
||||
|
||||
op = sql.select(
|
||||
@ -537,10 +678,12 @@ def count_scrobbles_by_track(since,to,dbconn=None):
|
||||
).group_by(DB['scrobbles'].c.track_id).order_by(sql.desc('count'))
|
||||
result = dbconn.execute(op).all()
|
||||
|
||||
|
||||
counts = [row.count for row in result]
|
||||
tracks = get_tracks_map([row.track_id for row in result],dbconn=dbconn)
|
||||
result = [{'scrobbles':row.count,'track':tracks[row.track_id]} for row in result]
|
||||
if resolve_ids:
|
||||
counts = [row.count for row in result]
|
||||
tracks = get_tracks_map([row.track_id for row in result],dbconn=dbconn)
|
||||
result = [{'scrobbles':row.count,'track':tracks[row.track_id]} for row in result]
|
||||
else:
|
||||
result = [{'scrobbles':row.count,'track_id':row.track_id} for row in result]
|
||||
result = rank(result,key='scrobbles')
|
||||
return result
|
||||
|
||||
@ -693,6 +836,17 @@ def get_artist(id,dbconn=None):
|
||||
return artist_db_to_dict(artistinfo,dbconn=dbconn)
|
||||
|
||||
|
||||
@cached_wrapper
|
||||
@connection_provider
|
||||
def get_scrobble(timestamp, include_internal=False, dbconn=None):
|
||||
op = DB['scrobbles'].select().where(
|
||||
DB['scrobbles'].c.timestamp==timestamp
|
||||
)
|
||||
result = dbconn.execute(op).all()
|
||||
|
||||
scrobble = result[0]
|
||||
return scrobbles_db_to_dict(rows=[scrobble], include_internal=include_internal)[0]
|
||||
|
||||
@cached_wrapper
|
||||
@connection_provider
|
||||
def search_artist(searchterm,dbconn=None):
|
||||
@ -717,38 +871,37 @@ def search_track(searchterm,dbconn=None):
|
||||
##### MAINTENANCE
|
||||
|
||||
@runhourly
|
||||
def clean_db():
|
||||
@connection_provider
|
||||
def clean_db(dbconn=None):
|
||||
|
||||
with SCROBBLE_LOCK:
|
||||
with engine.begin() as conn:
|
||||
log(f"Database Cleanup...")
|
||||
log(f"Database Cleanup...")
|
||||
|
||||
to_delete = [
|
||||
# tracks with no scrobbles (trackartist entries first)
|
||||
"from trackartists where track_id in (select id from tracks where id not in (select track_id from scrobbles))",
|
||||
"from tracks where id not in (select track_id from scrobbles)",
|
||||
# artists with no tracks
|
||||
"from artists where id not in (select artist_id from trackartists) and id not in (select target_artist from associated_artists)",
|
||||
# tracks with no artists (scrobbles first)
|
||||
"from scrobbles where track_id in (select id from tracks where id not in (select track_id from trackartists))",
|
||||
"from tracks where id not in (select track_id from trackartists)"
|
||||
]
|
||||
to_delete = [
|
||||
# tracks with no scrobbles (trackartist entries first)
|
||||
"from trackartists where track_id in (select id from tracks where id not in (select track_id from scrobbles))",
|
||||
"from tracks where id not in (select track_id from scrobbles)",
|
||||
# artists with no tracks
|
||||
"from artists where id not in (select artist_id from trackartists) and id not in (select target_artist from associated_artists)",
|
||||
# tracks with no artists (scrobbles first)
|
||||
"from scrobbles where track_id in (select id from tracks where id not in (select track_id from trackartists))",
|
||||
"from tracks where id not in (select track_id from trackartists)"
|
||||
]
|
||||
|
||||
for d in to_delete:
|
||||
selection = conn.execute(sql.text(f"select * {d}"))
|
||||
for row in selection.all():
|
||||
log(f"Deleting {row}")
|
||||
deletion = conn.execute(sql.text(f"delete {d}"))
|
||||
for d in to_delete:
|
||||
selection = dbconn.execute(sql.text(f"select * {d}"))
|
||||
for row in selection.all():
|
||||
log(f"Deleting {row}")
|
||||
deletion = dbconn.execute(sql.text(f"delete {d}"))
|
||||
|
||||
log("Database Cleanup complete!")
|
||||
log("Database Cleanup complete!")
|
||||
|
||||
|
||||
|
||||
#if a2+a1>0: log(f"Deleted {a2} tracks without scrobbles ({a1} track artist entries)")
|
||||
#if a2+a1>0: log(f"Deleted {a2} tracks without scrobbles ({a1} track artist entries)")
|
||||
|
||||
#if a3>0: log(f"Deleted {a3} artists without tracks")
|
||||
#if a3>0: log(f"Deleted {a3} artists without tracks")
|
||||
|
||||
#if a5+a4>0: log(f"Deleted {a5} tracks without artists ({a4} scrobbles)")
|
||||
#if a5+a4>0: log(f"Deleted {a5} tracks without artists ({a4} scrobbles)")
|
||||
|
||||
|
||||
|
||||
@ -769,6 +922,46 @@ def renormalize_names():
|
||||
rows = conn.execute(DB['artists'].update().where(DB['artists'].c.id == id).values(name_normalized=norm_target))
|
||||
|
||||
|
||||
@connection_provider
|
||||
def merge_duplicate_tracks(artist_id,dbconn=None):
|
||||
rows = dbconn.execute(
|
||||
DB['trackartists'].select().where(
|
||||
DB['trackartists'].c.artist_id == artist_id
|
||||
)
|
||||
)
|
||||
affected_tracks = [r.track_id for r in rows]
|
||||
|
||||
track_artists = {}
|
||||
rows = dbconn.execute(
|
||||
DB['trackartists'].select().where(
|
||||
DB['trackartists'].c.track_id.in_(affected_tracks)
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
for row in rows:
|
||||
track_artists.setdefault(row.track_id,[]).append(row.artist_id)
|
||||
|
||||
artist_combos = {}
|
||||
for track_id in track_artists:
|
||||
artist_combos.setdefault(tuple(sorted(track_artists[track_id])),[]).append(track_id)
|
||||
|
||||
for c in artist_combos:
|
||||
if len(artist_combos[c]) > 1:
|
||||
track_identifiers = {}
|
||||
for track_id in artist_combos[c]:
|
||||
track_identifiers.setdefault(normalize_name(get_track(track_id)['title']),[]).append(track_id)
|
||||
for track in track_identifiers:
|
||||
if len(track_identifiers[track]) > 1:
|
||||
target,*src = track_identifiers[track]
|
||||
merge_tracks(target,src,dbconn=dbconn)
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
2
maloja/dev/__init__.py
Normal file
2
maloja/dev/__init__.py
Normal file
@ -0,0 +1,2 @@
|
||||
### Subpackage that takes care of all things that concern the server process itself,
|
||||
### e.g. analytics
|
@ -1,5 +1,6 @@
|
||||
import random
|
||||
import datetime
|
||||
|
||||
from doreah.io import ask
|
||||
|
||||
|
||||
@ -66,10 +67,10 @@ def generate_track():
|
||||
|
||||
|
||||
|
||||
def generate(n=200):
|
||||
def generate_scrobbles(n=200):
|
||||
|
||||
from ..database.sqldb import add_scrobbles
|
||||
|
||||
from ...database.sqldb import add_scrobbles
|
||||
|
||||
n = int(n)
|
||||
|
||||
if ask("Generate random scrobbles?",default=False):
|
@ -2,11 +2,10 @@ import os
|
||||
|
||||
import cProfile, pstats
|
||||
|
||||
|
||||
from doreah.logging import log
|
||||
from doreah.timing import Clock
|
||||
|
||||
from ..globalconf import data_dir
|
||||
from ..pkg_global.conf import data_dir
|
||||
|
||||
|
||||
profiler = cProfile.Profile()
|
@ -1,4 +1,4 @@
|
||||
from .globalconf import data_dir, malojaconfig
|
||||
from .pkg_global.conf import data_dir, malojaconfig
|
||||
from . import thirdparty
|
||||
from . import database
|
||||
|
||||
|
@ -1,5 +1,5 @@
|
||||
from . import filters
|
||||
from ..globalconf import malojaconfig
|
||||
from ..pkg_global.conf import malojaconfig
|
||||
|
||||
from .. import database, malojatime, images, malojauri, thirdparty, __pkginfo__
|
||||
from ..database import jinjaview
|
||||
|
@ -3,7 +3,7 @@ from calendar import monthrange
|
||||
from os.path import commonprefix
|
||||
import math
|
||||
|
||||
from .globalconf import malojaconfig
|
||||
from .pkg_global.conf import malojaconfig
|
||||
|
||||
|
||||
OFFSET = malojaconfig["TIMEZONE"]
|
||||
|
@ -3,7 +3,7 @@ from doreah.configuration import Configuration
|
||||
from doreah.configuration import types as tp
|
||||
|
||||
|
||||
from .__pkginfo__ import VERSION
|
||||
from ..__pkginfo__ import VERSION
|
||||
|
||||
|
||||
|
||||
@ -311,7 +311,7 @@ config(
|
||||
auth={
|
||||
"multiuser":False,
|
||||
"cookieprefix":"maloja",
|
||||
"stylesheets":["/style.css"],
|
||||
"stylesheets":["/maloja.css"],
|
||||
"dbfile":data_dir['auth']("auth.ddb")
|
||||
},
|
||||
logging={
|
@ -1,140 +0,0 @@
|
||||
import subprocess
|
||||
from doreah import settings
|
||||
from doreah.control import mainfunction
|
||||
from doreah.io import col
|
||||
import os
|
||||
import signal
|
||||
from ipaddress import ip_address
|
||||
|
||||
from .setup import setup
|
||||
from . import tasks
|
||||
from .. import __pkginfo__ as info
|
||||
from .. import globalconf
|
||||
|
||||
|
||||
|
||||
def print_header_info():
|
||||
print()
|
||||
#print("#####")
|
||||
print(col['yellow']("Maloja"),"v" + info.VERSION)
|
||||
print(info.HOMEPAGE)
|
||||
#print("#####")
|
||||
print()
|
||||
|
||||
|
||||
|
||||
def getInstance():
|
||||
try:
|
||||
output = subprocess.check_output(["pidof","Maloja"])
|
||||
return int(output)
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
def getInstanceSupervisor():
|
||||
try:
|
||||
output = subprocess.check_output(["pidof","maloja_supervisor"])
|
||||
return int(output)
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
def restart():
|
||||
stop()
|
||||
start()
|
||||
|
||||
def start():
|
||||
if getInstanceSupervisor() is not None:
|
||||
print("Maloja is already running.")
|
||||
else:
|
||||
print_header_info()
|
||||
setup()
|
||||
try:
|
||||
#p = subprocess.Popen(["python3","-m","maloja.server"],stdout=subprocess.DEVNULL,stderr=subprocess.DEVNULL)
|
||||
sp = subprocess.Popen(["python3","-m","maloja.proccontrol.supervisor"],stdout=subprocess.DEVNULL,stderr=subprocess.DEVNULL)
|
||||
print(col["green"]("Maloja started!"))
|
||||
|
||||
port = globalconf.malojaconfig["PORT"]
|
||||
|
||||
print("Visit your server address (Port " + str(port) + ") to see your web interface. Visit /admin_setup to get started.")
|
||||
print("If you're installing this on your local machine, these links should get you there:")
|
||||
print("\t" + col["blue"]("http://localhost:" + str(port)))
|
||||
print("\t" + col["blue"]("http://localhost:" + str(port) + "/admin_setup"))
|
||||
return True
|
||||
except Exception:
|
||||
print("Error while starting Maloja.")
|
||||
return False
|
||||
|
||||
|
||||
def stop():
|
||||
|
||||
pid_sv = getInstanceSupervisor()
|
||||
if pid_sv is not None:
|
||||
os.kill(pid_sv,signal.SIGTERM)
|
||||
|
||||
pid = getInstance()
|
||||
if pid is not None:
|
||||
os.kill(pid,signal.SIGTERM)
|
||||
|
||||
if pid is None and pid_sv is None:
|
||||
return False
|
||||
|
||||
print("Maloja stopped!")
|
||||
return True
|
||||
|
||||
def onlysetup():
|
||||
print_header_info()
|
||||
setup()
|
||||
print("Setup complete!")
|
||||
|
||||
def direct():
|
||||
print_header_info()
|
||||
setup()
|
||||
from .. import server
|
||||
server.run_server()
|
||||
|
||||
def debug():
|
||||
os.environ["MALOJA_DEV_MODE"] = 'true'
|
||||
globalconf.malojaconfig.load_environment()
|
||||
direct()
|
||||
|
||||
def print_info():
|
||||
print_header_info()
|
||||
print(col['lightblue']("Configuration Directory:"),globalconf.dir_settings['config'])
|
||||
print(col['lightblue']("Data Directory: "),globalconf.dir_settings['state'])
|
||||
print(col['lightblue']("Log Directory: "),globalconf.dir_settings['logs'])
|
||||
print(col['lightblue']("Network: "),f"IPv{ip_address(globalconf.malojaconfig['host']).version}, Port {globalconf.malojaconfig['port']}")
|
||||
print(col['lightblue']("Timezone: "),f"UTC{globalconf.malojaconfig['timezone']:+d}")
|
||||
print()
|
||||
print()
|
||||
|
||||
@mainfunction({"l":"level","v":"version","V":"version"},flags=['version','include_images'],shield=True)
|
||||
def main(*args,**kwargs):
|
||||
|
||||
actions = {
|
||||
# server
|
||||
"start":start,
|
||||
"restart":restart,
|
||||
"stop":stop,
|
||||
"run":direct,
|
||||
"debug":debug,
|
||||
"setup":onlysetup,
|
||||
# admin scripts
|
||||
"import":tasks.import_scrobbles, # maloja import /x/y.csv
|
||||
"backup":tasks.backup, # maloja backup --targetfolder /x/y --include_images
|
||||
"generate":tasks.generate, # maloja generate 400
|
||||
"export":tasks.export, # maloja export
|
||||
# aux
|
||||
"info":print_info
|
||||
}
|
||||
|
||||
if "version" in kwargs:
|
||||
print(info.VERSION)
|
||||
return True
|
||||
else:
|
||||
try:
|
||||
action, *args = args
|
||||
action = actions[action]
|
||||
except (ValueError, KeyError):
|
||||
print("Valid commands: " + " ".join(a for a in actions))
|
||||
return False
|
||||
|
||||
return action(*args,**kwargs)
|
@ -1,33 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
import os
|
||||
|
||||
from ..globalconf import malojaconfig
|
||||
|
||||
import subprocess
|
||||
import setproctitle
|
||||
import signal
|
||||
from doreah.logging import log
|
||||
|
||||
|
||||
from .control import getInstance
|
||||
|
||||
|
||||
setproctitle.setproctitle("maloja_supervisor")
|
||||
|
||||
def start():
|
||||
try:
|
||||
return subprocess.Popen(
|
||||
["python3", "-m", "maloja","run"],
|
||||
stdout=subprocess.DEVNULL,
|
||||
stderr=subprocess.DEVNULL,
|
||||
)
|
||||
except e:
|
||||
log("Error starting Maloja: " + str(e),module="supervisor")
|
||||
|
||||
|
||||
|
||||
while True:
|
||||
log("Maloja is not running, starting...",module="supervisor")
|
||||
process = start()
|
||||
|
||||
process.wait()
|
@ -1,4 +1,3 @@
|
||||
from .import_scrobbles import import_scrobbles
|
||||
from .backup import backup
|
||||
from .generate import generate
|
||||
from .export import export # read that line out loud
|
||||
|
@ -2,7 +2,7 @@ import tarfile
|
||||
import time
|
||||
import glob
|
||||
import os
|
||||
from ...globalconf import dir_settings
|
||||
from ...pkg_global.conf import dir_settings
|
||||
from pathlib import PurePath
|
||||
|
||||
from doreah.logging import log
|
||||
|
@ -4,7 +4,7 @@ import json, csv
|
||||
from doreah.io import col, ask, prompt
|
||||
|
||||
from ...cleanup import *
|
||||
from ...globalconf import data_dir
|
||||
from ...pkg_global.conf import data_dir
|
||||
|
||||
|
||||
c = CleanerAgent()
|
||||
|
@ -2,9 +2,7 @@
|
||||
import sys
|
||||
import os
|
||||
from threading import Thread
|
||||
import setproctitle
|
||||
from importlib import resources
|
||||
from css_html_js_minify import html_minify, css_minify
|
||||
import datauri
|
||||
import time
|
||||
|
||||
@ -22,12 +20,12 @@ from . import database
|
||||
from .database.jinjaview import JinjaDBConnection
|
||||
from .images import resolve_track_image, resolve_artist_image
|
||||
from .malojauri import uri_to_internal, remove_identical
|
||||
from .globalconf import malojaconfig, data_dir
|
||||
from .pkg_global.conf import malojaconfig, data_dir
|
||||
from .jinjaenv.context import jinja_environment
|
||||
from .apis import init_apis, apikeystore
|
||||
|
||||
|
||||
from .proccontrol.profiler import profile
|
||||
from .dev.profiler import profile
|
||||
|
||||
|
||||
######
|
||||
@ -43,48 +41,6 @@ BaseRequest.MEMFILE_MAX = 15 * 1024 * 1024
|
||||
|
||||
webserver = Bottle()
|
||||
|
||||
#rename process, this is now required for the daemon manager to work
|
||||
setproctitle.setproctitle("Maloja")
|
||||
|
||||
|
||||
######
|
||||
### CSS
|
||||
#####
|
||||
|
||||
|
||||
def generate_css():
|
||||
cssstr = ""
|
||||
with resources.files('maloja') / 'web' / 'static' as staticfolder:
|
||||
|
||||
for file in os.listdir(os.path.join(staticfolder,"css")):
|
||||
if file.endswith(".css"):
|
||||
with open(os.path.join(staticfolder,"css",file),"r") as filed:
|
||||
cssstr += filed.read()
|
||||
|
||||
for file in os.listdir(data_dir['css']()):
|
||||
if file.endswith(".css"):
|
||||
with open(os.path.join(data_dir['css'](file)),"r") as filed:
|
||||
cssstr += filed.read()
|
||||
|
||||
cssstr = css_minify(cssstr)
|
||||
return cssstr
|
||||
|
||||
css = generate_css()
|
||||
|
||||
|
||||
|
||||
######
|
||||
### MINIFY
|
||||
#####
|
||||
|
||||
def clean_html(inp):
|
||||
return inp
|
||||
|
||||
#if malojaconfig["DEV_MODE"]: return inp
|
||||
#else: return html_minify(inp)
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
@ -204,13 +160,6 @@ def static_image(pth):
|
||||
return resp
|
||||
|
||||
|
||||
@webserver.route("/style.css")
|
||||
def get_css():
|
||||
response.content_type = 'text/css'
|
||||
if malojaconfig["DEV_MODE"]: return generate_css()
|
||||
else: return css
|
||||
|
||||
|
||||
@webserver.route("/login")
|
||||
def login():
|
||||
return auth.get_login_page()
|
||||
@ -219,7 +168,7 @@ def login():
|
||||
@webserver.route("/<name>.<ext>")
|
||||
@webserver.route("/media/<name>.<ext>")
|
||||
def static(name,ext):
|
||||
assert ext in ["txt","ico","jpeg","jpg","png","less","js","ttf"]
|
||||
assert ext in ["txt","ico","jpeg","jpg","png","less","js","ttf","css"]
|
||||
with resources.files('maloja') / 'web' / 'static' as staticfolder:
|
||||
response = static_file(ext + "/" + name + "." + ext,root=staticfolder)
|
||||
response.set_header("Cache-Control", "public, max-age=3600")
|
||||
@ -263,7 +212,7 @@ def jinja_page(name):
|
||||
|
||||
if malojaconfig["DEV_MODE"]: jinja_environment.cache.clear()
|
||||
|
||||
return clean_html(res)
|
||||
return res
|
||||
|
||||
@webserver.route("/<name:re:admin.*>")
|
||||
@auth.authenticated
|
||||
|
@ -1,10 +1,12 @@
|
||||
from importlib import resources
|
||||
from distutils import dir_util
|
||||
from doreah.io import col, ask, prompt
|
||||
from doreah import auth
|
||||
import os
|
||||
|
||||
from ..globalconf import data_dir, dir_settings, malojaconfig
|
||||
from importlib import resources
|
||||
from distutils import dir_util
|
||||
|
||||
from doreah.io import col, ask, prompt
|
||||
from doreah import auth
|
||||
|
||||
from .pkg_global.conf import data_dir, dir_settings, malojaconfig
|
||||
|
||||
|
||||
|
||||
@ -48,7 +50,7 @@ def setup():
|
||||
|
||||
|
||||
# OWN API KEY
|
||||
from ..apis import apikeystore
|
||||
from .apis import apikeystore
|
||||
if len(apikeystore) == 0:
|
||||
answer = ask("Do you want to set up a key to enable scrobbling? Your scrobble extension needs that key so that only you can scrobble tracks to your database.",default=True,skip=SKIP)
|
||||
if answer:
|
2
maloja/thirdparty/__init__.py
vendored
2
maloja/thirdparty/__init__.py
vendored
@ -13,7 +13,7 @@ import base64
|
||||
from doreah.logging import log
|
||||
from threading import BoundedSemaphore
|
||||
|
||||
from ..globalconf import malojaconfig
|
||||
from ..pkg_global.conf import malojaconfig
|
||||
from .. import database
|
||||
|
||||
|
||||
|
@ -7,7 +7,7 @@ import csv
|
||||
from doreah.logging import log
|
||||
from doreah.io import col
|
||||
|
||||
from .globalconf import data_dir, dir_settings
|
||||
from .pkg_global.conf import data_dir, dir_settings
|
||||
from .apis import _apikeys
|
||||
|
||||
|
||||
|
@ -37,7 +37,6 @@
|
||||
|
||||
</span>
|
||||
<br/><br/>
|
||||
<span id="notification"></span>
|
||||
|
||||
</td>
|
||||
</tr>
|
||||
|
@ -12,7 +12,7 @@
|
||||
<meta name="color-scheme" content="dark" />
|
||||
<meta name="darkreader" content="wat" />
|
||||
|
||||
<link rel="stylesheet" href="/style.css" />
|
||||
<link rel="stylesheet" href="/maloja.css" />
|
||||
<link rel="stylesheet" href="/static/css/themes/{{ settings.theme }}.css" />
|
||||
|
||||
<script src="/search.js"></script>
|
||||
@ -50,9 +50,7 @@
|
||||
{% endblock %}
|
||||
{% endblock %}
|
||||
|
||||
<div id="notification_area">
|
||||
|
||||
</div>
|
||||
|
||||
|
||||
<div class="footer">
|
||||
@ -84,9 +82,16 @@
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<a href="/admin_overview"><div title="Server Administration" id="settingsicon" class="clickable_icon">
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24"><path d="M17 12.645v-2.289c-1.17-.417-1.907-.533-2.28-1.431-.373-.9.07-1.512.6-2.625l-1.618-1.619c-1.105.525-1.723.974-2.626.6-.9-.374-1.017-1.117-1.431-2.281h-2.29c-.412 1.158-.53 1.907-1.431 2.28h-.001c-.9.374-1.51-.07-2.625-.6l-1.617 1.619c.527 1.11.973 1.724.6 2.625-.375.901-1.123 1.019-2.281 1.431v2.289c1.155.412 1.907.531 2.28 1.431.376.908-.081 1.534-.6 2.625l1.618 1.619c1.107-.525 1.724-.974 2.625-.6h.001c.9.373 1.018 1.118 1.431 2.28h2.289c.412-1.158.53-1.905 1.437-2.282h.001c.894-.372 1.501.071 2.619.602l1.618-1.619c-.525-1.107-.974-1.723-.601-2.625.374-.899 1.126-1.019 2.282-1.43zm-8.5 1.689c-1.564 0-2.833-1.269-2.833-2.834s1.269-2.834 2.833-2.834 2.833 1.269 2.833 2.834-1.269 2.834-2.833 2.834zm15.5 4.205v-1.077c-.55-.196-.897-.251-1.073-.673-.176-.424.033-.711.282-1.236l-.762-.762c-.52.248-.811.458-1.235.283-.424-.175-.479-.525-.674-1.073h-1.076c-.194.545-.25.897-.674 1.073-.424.176-.711-.033-1.235-.283l-.762.762c.248.523.458.812.282 1.236-.176.424-.528.479-1.073.673v1.077c.544.193.897.25 1.073.673.177.427-.038.722-.282 1.236l.762.762c.521-.248.812-.458 1.235-.283.424.175.479.526.674 1.073h1.076c.194-.545.25-.897.676-1.074h.001c.421-.175.706.034 1.232.284l.762-.762c-.247-.521-.458-.812-.282-1.235s.529-.481 1.073-.674zm-4 .794c-.736 0-1.333-.597-1.333-1.333s.597-1.333 1.333-1.333 1.333.597 1.333 1.333-.597 1.333-1.333 1.333z"/></svg>
|
||||
</div></a>
|
||||
<div id="icon_bar">
|
||||
{% block icon_bar %}{% endblock %}
|
||||
{% include 'icons/settings.jinja' %}
|
||||
</div>
|
||||
|
||||
|
||||
<div id="notification_area">
|
||||
|
||||
</div>
|
||||
|
||||
|
||||
</body>
|
||||
</html>
|
||||
|
@ -66,6 +66,9 @@
|
||||
<ul>
|
||||
<li>manually scrobble from track pages</li>
|
||||
<li>delete scrobbles</li>
|
||||
<li>reparse scrobbles</li>
|
||||
<li>edit tracks and artists</li>
|
||||
<li>merge tracks and artists</li>
|
||||
<li>upload artist and track art by dropping a file on the existing image on an artist or track page</li>
|
||||
<li>see more detailed error pages</li>
|
||||
</ul>
|
||||
|
@ -71,7 +71,7 @@
|
||||
<tr> <td>album</td> <td><i>Album title - optional</i></td> </tr>
|
||||
<tr> <td>albumartists</td> <td><i>List of album artists - optional</i></td> </tr>
|
||||
<tr> <td>duration</td> <td><i>Duration of play in seconds - optional</i></td> </tr>
|
||||
<tr> <td>length</td> <td><i>Full length of the trackin seconds - optional</i></td> </tr>
|
||||
<tr> <td>length</td> <td><i>Full length of the track in seconds - optional</i></td> </tr>
|
||||
<tr> <td>time</td> <td><i>UNIX timestamp - optional, defaults to time of request</i></td> </tr>
|
||||
<tr> <td>fix</td> <td><i>Set this to false to skip server-side metadata fixing - optional</i></td> </tr>
|
||||
|
||||
@ -85,7 +85,7 @@
|
||||
<h2>Import your Last.FM data</h2>
|
||||
|
||||
Switching from Last.fm? <a class="textlink" href="https://benjaminbenben.com/lastfm-to-csv/">Download all your data</a> and run the command <span class="stats">maloja import <i>(the file you just downloaded)</i></span>.<br/>
|
||||
You can also try out <a href="https://github.com/FoxxMD/multi-scrobbler">Multi-Scrobbler</a> to import scrobbles from a wider range of sources.
|
||||
You can also try out <a class="textlink" href="https://github.com/FoxxMD/multi-scrobbler">Multi-Scrobbler</a> to import scrobbles from a wider range of sources.
|
||||
<br/><br/>
|
||||
|
||||
<h2>Set up some rules</h2>
|
||||
|
@ -6,6 +6,7 @@
|
||||
|
||||
{% block scripts %}
|
||||
<script src="/rangeselect.js"></script>
|
||||
<script src="/edit.js"></script>
|
||||
{% endblock %}
|
||||
|
||||
{% set artist = filterkeys.artist %}
|
||||
@ -26,10 +27,23 @@
|
||||
|
||||
{% set encodedartist = mlj_uri.uriencode({'artist':artist}) %}
|
||||
|
||||
{% block icon_bar %}
|
||||
{% if adminmode %}
|
||||
{% include 'icons/edit.jinja' %}
|
||||
{% include 'icons/merge.jinja' %}
|
||||
{% include 'icons/merge_mark.jinja' %}
|
||||
{% include 'icons/merge_cancel.jinja' %}
|
||||
<script>showValidMergeIcons();</script>
|
||||
{% endif %}
|
||||
{% endblock %}
|
||||
|
||||
{% block content %}
|
||||
|
||||
|
||||
<script>
|
||||
const entity_id = {{ info.id }};
|
||||
const entity_type = 'artist';
|
||||
const entity_name = {{ artist | tojson }};
|
||||
</script>
|
||||
|
||||
|
||||
|
||||
@ -47,7 +61,7 @@
|
||||
{% endif %}
|
||||
</td>
|
||||
<td class="text">
|
||||
<h1 class="headerwithextra">{{ info.artist }}</h1>
|
||||
<h1 id="main_entity_name" class="headerwithextra">{{ info.artist }}</h1>
|
||||
{% if competes %}<span class="rank"><a href="/charts_artists?max=100">#{{ info.position }}</a></span>{% endif %}
|
||||
<br/>
|
||||
{% if competes and included %}
|
||||
@ -56,7 +70,9 @@
|
||||
<span>Competing under {{ links.link(credited) }} (#{{ info.position }})</span>
|
||||
{% endif %}
|
||||
|
||||
<p class="stats"><a href="{{ mlj_uri.create_uri("/scrobbles",filterkeys) }}">{{ info['scrobbles'] }} Scrobbles</a></p>
|
||||
<p class="stats">
|
||||
<a href="{{ mlj_uri.create_uri("/scrobbles",filterkeys) }}">{{ info['scrobbles'] }} Scrobbles</a>
|
||||
</p>
|
||||
|
||||
|
||||
|
||||
@ -72,6 +88,7 @@
|
||||
</tr>
|
||||
</table>
|
||||
|
||||
|
||||
<h2><a href='{{ mlj_uri.create_uri("/charts_tracks",filterkeys) }}'>Top Tracks</a></h2>
|
||||
|
||||
|
||||
|
202
maloja/web/jinja/icons/LICENSE-material
Normal file
202
maloja/web/jinja/icons/LICENSE-material
Normal file
@ -0,0 +1,202 @@
|
||||
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "[]"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright [yyyy] [name of copyright owner]
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
21
maloja/web/jinja/icons/LICENSE-octicons
Normal file
21
maloja/web/jinja/icons/LICENSE-octicons
Normal file
@ -0,0 +1,21 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2022 GitHub Inc.
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
6
maloja/web/jinja/icons/delete.jinja
Normal file
6
maloja/web/jinja/icons/delete.jinja
Normal file
@ -0,0 +1,6 @@
|
||||
<div class='deleteicon clickable_icon danger' onclick="toggleDeleteConfirm(this)" title="Delete scrobble">
|
||||
<svg height="16px" viewBox="0 0 24 24" width="16px">
|
||||
<path d="M0 0h24v24H0z" fill="none"/>
|
||||
<path d="M6 19c0 1.1.9 2 2 2h8c1.1 0 2-.9 2-2V7H6v12zM19 4h-3.5l-1-1h-5l-1 1H5v2h14V4z"/>
|
||||
</svg>
|
||||
</div>
|
5
maloja/web/jinja/icons/edit.jinja
Normal file
5
maloja/web/jinja/icons/edit.jinja
Normal file
@ -0,0 +1,5 @@
|
||||
<div title="Edit" id="editicon" class="clickable_icon" onclick="editEntity()">
|
||||
<svg width="24" height="24" viewBox="0 0 24 24">
|
||||
<path fill-rule="evenodd" d="M17.263 2.177a1.75 1.75 0 012.474 0l2.586 2.586a1.75 1.75 0 010 2.474L19.53 10.03l-.012.013L8.69 20.378a1.75 1.75 0 01-.699.409l-5.523 1.68a.75.75 0 01-.935-.935l1.673-5.5a1.75 1.75 0 01.466-.756L14.476 4.963l2.787-2.786zm-2.275 4.371l-10.28 9.813a.25.25 0 00-.067.108l-1.264 4.154 4.177-1.271a.25.25 0 00.1-.059l10.273-9.806-2.94-2.939zM19 8.44l2.263-2.262a.25.25 0 000-.354l-2.586-2.586a.25.25 0 00-.354 0L16.061 5.5 19 8.44z"/>
|
||||
</svg>
|
||||
</div>
|
5
maloja/web/jinja/icons/merge.jinja
Normal file
5
maloja/web/jinja/icons/merge.jinja
Normal file
@ -0,0 +1,5 @@
|
||||
<div title="Merge" id="mergeicon" class="clickable_icon hide" onclick="merge()">
|
||||
<svg viewBox="0 0 16 16" width="24" height="24">
|
||||
<path fill-rule="evenodd" d="M5 3.254V3.25v.005a.75.75 0 110-.005v.004zm.45 1.9a2.25 2.25 0 10-1.95.218v5.256a2.25 2.25 0 101.5 0V7.123A5.735 5.735 0 009.25 9h1.378a2.251 2.251 0 100-1.5H9.25a4.25 4.25 0 01-3.8-2.346zM12.75 9a.75.75 0 100-1.5.75.75 0 000 1.5zm-8.5 4.5a.75.75 0 100-1.5.75.75 0 000 1.5z"></path>
|
||||
</svg>
|
||||
</div>
|
5
maloja/web/jinja/icons/merge_cancel.jinja
Normal file
5
maloja/web/jinja/icons/merge_cancel.jinja
Normal file
@ -0,0 +1,5 @@
|
||||
<div title="Cancel merge" id="mergecancelicon" class="clickable_icon hide" onclick="cancelMerge()">
|
||||
<svg viewBox="0 0 16 16" width="24" height="24">
|
||||
<path fill-rule="evenodd" d="M10.72 1.227a.75.75 0 011.06 0l.97.97.97-.97a.75.75 0 111.06 1.061l-.97.97.97.97a.75.75 0 01-1.06 1.06l-.97-.97-.97.97a.75.75 0 11-1.06-1.06l.97-.97-.97-.97a.75.75 0 010-1.06zM12.75 6.5a.75.75 0 00-.75.75v3.378a2.251 2.251 0 101.5 0V7.25a.75.75 0 00-.75-.75zm0 5.5a.75.75 0 100 1.5.75.75 0 000-1.5zM2.5 3.25a.75.75 0 111.5 0 .75.75 0 01-1.5 0zM3.25 1a2.25 2.25 0 00-.75 4.372v5.256a2.251 2.251 0 101.5 0V5.372A2.25 2.25 0 003.25 1zm0 11a.75.75 0 100 1.5.75.75 0 000-1.5z"></path>
|
||||
</svg>
|
||||
</div>
|
5
maloja/web/jinja/icons/merge_mark.jinja
Normal file
5
maloja/web/jinja/icons/merge_mark.jinja
Normal file
@ -0,0 +1,5 @@
|
||||
<div title="Mark for merging" id="mergemarkicon" class="clickable_icon hide" onclick="markForMerge()">
|
||||
<svg viewBox="0 0 16 16" width="24" height="24">
|
||||
<path fill-rule="evenodd" d="M7.177 3.073L9.573.677A.25.25 0 0110 .854v4.792a.25.25 0 01-.427.177L7.177 3.427a.25.25 0 010-.354zM3.75 2.5a.75.75 0 100 1.5.75.75 0 000-1.5zm-2.25.75a2.25 2.25 0 113 2.122v5.256a2.251 2.251 0 11-1.5 0V5.372A2.25 2.25 0 011.5 3.25zM11 2.5h-1V4h1a1 1 0 011 1v5.628a2.251 2.251 0 101.5 0V5A2.5 2.5 0 0011 2.5zm1 10.25a.75.75 0 111.5 0 .75.75 0 01-1.5 0zM3.75 12a.75.75 0 100 1.5.75.75 0 000-1.5z"></path>
|
||||
</svg>
|
||||
</div>
|
7
maloja/web/jinja/icons/nodata.jinja
Normal file
7
maloja/web/jinja/icons/nodata.jinja
Normal file
@ -0,0 +1,7 @@
|
||||
<td style="opacity:0.5;text-align:center;">
|
||||
<svg height="96px" viewBox="0 0 24 24" width="96px">
|
||||
<path d="M0 0h24v24H0z" fill="none"/>
|
||||
<path d="M4.27 3L3 4.27l9 9v.28c-.59-.34-1.27-.55-2-.55-2.21 0-4 1.79-4 4s1.79 4 4 4 4-1.79 4-4v-1.73L19.73 21 21 19.73 4.27 3zM14 7h4V3h-6v5.18l2 2z"/>
|
||||
</svg>
|
||||
<br/>No scrobbles yet!
|
||||
</td>
|
5
maloja/web/jinja/icons/reparse.jinja
Normal file
5
maloja/web/jinja/icons/reparse.jinja
Normal file
@ -0,0 +1,5 @@
|
||||
<div class='refreshicon clickable_icon danger' onclick="toggleReparseConfirm(this)" title="Reparse original scrobble">
|
||||
<svg height="16px" viewBox="0 0 24 24" width="16px">
|
||||
<path d="M0 0h24v24H0z" fill="none"/><path d="M17.65 6.35C16.2 4.9 14.21 4 12 4c-4.42 0-7.99 3.58-7.99 8s3.57 8 7.99 8c3.73 0 6.84-2.55 7.73-6h-2.08c-.82 2.33-3.04 4-5.65 4-3.31 0-6-2.69-6-6s2.69-6 6-6c1.66 0 3.14.69 4.22 1.78L13 11h7V4l-2.35 2.35z"/>
|
||||
</svg>
|
||||
</div>
|
10
maloja/web/jinja/icons/settings.jinja
Normal file
10
maloja/web/jinja/icons/settings.jinja
Normal file
@ -0,0 +1,10 @@
|
||||
<a class='hidelink' href="/admin_overview">
|
||||
<div title="Server Administration" id="settingsicon" class="clickable_icon" style="margin-left:25px;">
|
||||
<svg enable-background="new 0 0 24 24" height="24px" viewBox="0 0 24 24" width="24px">
|
||||
<g>
|
||||
<path d="M0,0h24v24H0V0z" fill="none"/>
|
||||
<path d="M19.14,12.94c0.04-0.3,0.06-0.61,0.06-0.94c0-0.32-0.02-0.64-0.07-0.94l2.03-1.58c0.18-0.14,0.23-0.41,0.12-0.61 l-1.92-3.32c-0.12-0.22-0.37-0.29-0.59-0.22l-2.39,0.96c-0.5-0.38-1.03-0.7-1.62-0.94L14.4,2.81c-0.04-0.24-0.24-0.41-0.48-0.41 h-3.84c-0.24,0-0.43,0.17-0.47,0.41L9.25,5.35C8.66,5.59,8.12,5.92,7.63,6.29L5.24,5.33c-0.22-0.08-0.47,0-0.59,0.22L2.74,8.87 C2.62,9.08,2.66,9.34,2.86,9.48l2.03,1.58C4.84,11.36,4.8,11.69,4.8,12s0.02,0.64,0.07,0.94l-2.03,1.58 c-0.18,0.14-0.23,0.41-0.12,0.61l1.92,3.32c0.12,0.22,0.37,0.29,0.59,0.22l2.39-0.96c0.5,0.38,1.03,0.7,1.62,0.94l0.36,2.54 c0.05,0.24,0.24,0.41,0.48,0.41h3.84c0.24,0,0.44-0.17,0.47-0.41l0.36-2.54c0.59-0.24,1.13-0.56,1.62-0.94l2.39,0.96 c0.22,0.08,0.47,0,0.59-0.22l1.92-3.32c0.12-0.22,0.07-0.47-0.12-0.61L19.14,12.94z M12,15.6c-1.98,0-3.6-1.62-3.6-3.6 s1.62-3.6,3.6-3.6s3.6,1.62,3.6,3.6S13.98,15.6,12,15.6z"/>
|
||||
</g>
|
||||
</svg>
|
||||
</div>
|
||||
</a>
|
@ -9,8 +9,12 @@
|
||||
{% set charts_cycler = cycler(*charts_14) %}
|
||||
|
||||
|
||||
|
||||
<table class="tiles_top"><tr>
|
||||
{% for segment in range(3) %}
|
||||
{% if charts_14[0] is none and loop.first %}
|
||||
{% include 'icons/nodata.jinja' %}
|
||||
{% else %}
|
||||
<td>
|
||||
{% set segmentsize = segment+1 %}
|
||||
<table class="tiles_{{ segmentsize }}x{{ segmentsize }} tiles_sub">
|
||||
@ -35,6 +39,7 @@
|
||||
</tr>
|
||||
{%- endfor -%}
|
||||
</table>
|
||||
</td>
|
||||
</td>
|
||||
{% endif %}
|
||||
{% endfor %}
|
||||
</tr></table>
|
||||
|
@ -11,6 +11,9 @@
|
||||
|
||||
<table class="tiles_top"><tr>
|
||||
{% for segment in range(3) %}
|
||||
{% if charts_14[0] is none and loop.first %}
|
||||
{% include 'icons/nodata.jinja' %}
|
||||
{% else %}
|
||||
<td>
|
||||
{% set segmentsize = segment+1 %}
|
||||
<table class="tiles_{{ segmentsize }}x{{ segmentsize }} tiles_sub">
|
||||
@ -35,6 +38,7 @@
|
||||
</tr>
|
||||
{%- endfor %}
|
||||
</table>
|
||||
</td>
|
||||
</td>
|
||||
{% endif %}
|
||||
{% endfor %}
|
||||
</tr></table>
|
||||
|
@ -17,18 +17,28 @@
|
||||
{{ entityrow.row(s.track) }}
|
||||
{% if adminmode %}
|
||||
|
||||
<td class='delete_area'>
|
||||
<span class="confirmactions">
|
||||
<button class="smallbutton warning" onclick="deleteScrobble({{ s.time }},this)">Confirm</button>
|
||||
<button class="smallbutton" onclick="toggleDeleteConfirm(this)">Cancel</button>
|
||||
<td class='scrobble_action_area'>
|
||||
<span class='scrobble_action_type'>
|
||||
<span class="confirmactions">
|
||||
<button class="smallbutton warning" onclick="reparseScrobble({{ s.time }},this)">Reparse</button>
|
||||
<button class="smallbutton" onclick="toggleReparseConfirm(this)">Cancel</button>
|
||||
</span>
|
||||
|
||||
<span class="initializeactions">
|
||||
{% include 'icons/reparse.jinja' %}
|
||||
</span>
|
||||
</span>
|
||||
|
||||
<span class="initializeactions">
|
||||
<div class='deleteicon clickable_icon danger' onclick="toggleDeleteConfirm(this)">
|
||||
<svg style="width:14px;height:14px" viewBox="0 0 24 24">
|
||||
<path d="M19,4H15.5L14.5,3H9.5L8.5,4H5V6H19M6,19A2,2 0 0,0 8,21H16A2,2 0 0,0 18,19V7H6V19Z" />
|
||||
</svg>
|
||||
</div>
|
||||
<span class='scrobble_action_type'>
|
||||
|
||||
<span class="confirmactions">
|
||||
<button class="smallbutton warning" onclick="deleteScrobble({{ s.time }},this)">Delete</button>
|
||||
<button class="smallbutton" onclick="toggleDeleteConfirm(this)">Cancel</button>
|
||||
</span>
|
||||
|
||||
<span class="initializeactions">
|
||||
{% include 'icons/delete.jinja' %}
|
||||
</span>
|
||||
</span>
|
||||
|
||||
</td>
|
||||
|
@ -75,7 +75,7 @@
|
||||
<span class="stat_module">
|
||||
|
||||
|
||||
{%- with amountkeys = {"perpage":15,"page":0}, shortTimeDesc=True -%}
|
||||
{%- with amountkeys = {"perpage":12,"page":0}, shortTimeDesc=True -%}
|
||||
{% include 'partials/scrobbles.jinja' %}
|
||||
{%- endwith -%}
|
||||
</span>
|
||||
|
@ -5,6 +5,7 @@
|
||||
|
||||
{% block scripts %}
|
||||
<script src="/rangeselect.js"></script>
|
||||
<script src="/edit.js"></script>
|
||||
<script>
|
||||
function scrobble(encodedtrack) {
|
||||
neo.xhttprequest('/apis/mlj_1/newscrobble?nofix&' + encodedtrack,data={},method="POST").then(response=>{window.location.reload()});
|
||||
@ -21,8 +22,24 @@
|
||||
{% set encodedtrack = mlj_uri.uriencode({'track':track}) %}
|
||||
|
||||
|
||||
{% block icon_bar %}
|
||||
{% if adminmode %}
|
||||
{% include 'icons/edit.jinja' %}
|
||||
{% include 'icons/merge.jinja' %}
|
||||
{% include 'icons/merge_mark.jinja' %}
|
||||
{% include 'icons/merge_cancel.jinja' %}
|
||||
<script>showValidMergeIcons();</script>
|
||||
{% endif %}
|
||||
{% endblock %}
|
||||
|
||||
{% block content %}
|
||||
|
||||
<script>
|
||||
const entity_id = {{ info.id }};
|
||||
const entity_type = 'track';
|
||||
const entity_name = {{ track.title | tojson }};
|
||||
</script>
|
||||
|
||||
|
||||
{% import 'partials/awards_track.jinja' as awards %}
|
||||
|
||||
@ -42,7 +59,7 @@
|
||||
</td>
|
||||
<td class="text">
|
||||
<span>{{ links.links(track.artists) }}</span><br/>
|
||||
<h1 class="headerwithextra">{{ info.track.title }}</h1>
|
||||
<h1 id="main_entity_name" class="headerwithextra">{{ info.track.title }}</h1>
|
||||
{{ awards.certs(track) }}
|
||||
<span class="rank"><a href="/charts_tracks?max=100">#{{ info.position }}</a></span>
|
||||
<br/>
|
||||
|
@ -2,6 +2,8 @@
|
||||
COMMON STYLES FOR MALOJA, ALBULA AND POSSIBLY OTHERS
|
||||
**/
|
||||
|
||||
@import url("/grisonsfont.css");
|
||||
|
||||
:root {
|
||||
--base-color: #232327;
|
||||
--base-color-dark: #090909;
|
||||
@ -156,5 +158,5 @@ input:focus {
|
||||
|
||||
|
||||
.hide {
|
||||
display:none;
|
||||
display:none !important;
|
||||
}
|
||||
|
@ -1,3 +1,6 @@
|
||||
@import url("/grisons.css");
|
||||
|
||||
|
||||
body {
|
||||
padding:15px;
|
||||
padding-bottom:35px;
|
||||
@ -55,24 +58,32 @@ div.header h1 {
|
||||
settings icon
|
||||
**/
|
||||
|
||||
div.clickable_icon {
|
||||
display: inline-block;
|
||||
svg {
|
||||
fill: var(--text-color);
|
||||
cursor: pointer;
|
||||
}
|
||||
div.clickable_icon:hover {
|
||||
fill: var(--text-color-focus);
|
||||
}
|
||||
div.clickable_icon.danger:hover {
|
||||
fill: red;
|
||||
}
|
||||
|
||||
div#settingsicon {
|
||||
div#icon_bar {
|
||||
position:fixed;
|
||||
right:30px;
|
||||
top:30px;
|
||||
}
|
||||
|
||||
div#icon_bar div.clickable_icon {
|
||||
display: inline-block;
|
||||
height:26px;
|
||||
width:26px;
|
||||
}
|
||||
div.clickable_icon svg {
|
||||
cursor: pointer;
|
||||
}
|
||||
div.clickable_icon:hover svg {
|
||||
fill: var(--text-color-focus);
|
||||
}
|
||||
div.clickable_icon.danger:hover svg {
|
||||
fill: red;
|
||||
}
|
||||
|
||||
|
||||
|
||||
/**
|
||||
Footer
|
||||
@ -198,7 +209,7 @@ div#notification_area {
|
||||
div#notification_area div.notification {
|
||||
background-color:white;
|
||||
width:400px;
|
||||
height:100px;
|
||||
height:50px;
|
||||
margin-bottom:7px;
|
||||
padding:9px;
|
||||
opacity:0.4;
|
||||
@ -512,7 +523,8 @@ table.list {
|
||||
table.list tr {
|
||||
background-color: var(--current-bg-color);
|
||||
border-color: var(--current-bg-color);
|
||||
height: 1.4em;
|
||||
height: 1.45em;
|
||||
transition: opacity 2s;
|
||||
|
||||
}
|
||||
|
||||
@ -610,31 +622,50 @@ table.list td.searchProvider:hover {
|
||||
color: gold;
|
||||
}
|
||||
|
||||
table.list td.delete_area {
|
||||
table.list td.scrobble_action_area {
|
||||
text-align: right;
|
||||
width:7em;
|
||||
width:2em;
|
||||
overflow:visible;
|
||||
}
|
||||
|
||||
table.list tr td.scrobble_action_area span.scrobble_action_type {
|
||||
display:inline-block;
|
||||
float:right;
|
||||
}
|
||||
|
||||
table.list td.scrobble_action_area span.scrobble_action_type.active {
|
||||
}
|
||||
|
||||
/* rows that can be deleted in some form
|
||||
'active' class on the delete area cell to toggle confirm prompt
|
||||
'removed' class on the whole row to delete
|
||||
*/
|
||||
table.list tr td.delete_area span.confirmactions {
|
||||
table.list tr td.scrobble_action_area span.scrobble_action_type span.confirmactions {
|
||||
display: none;
|
||||
}
|
||||
table.list tr td.delete_area span.initializeactions {
|
||||
table.list tr td.scrobble_action_area span.scrobble_action_type span.initializeactions {
|
||||
display: initial;
|
||||
}
|
||||
table.list tr td.delete_area.active span.confirmactions {
|
||||
|
||||
/* when other action is active, hide all */
|
||||
table.list tr td.scrobble_action_area.active span.scrobble_action_type span.initializeactions {
|
||||
display: none;
|
||||
}
|
||||
table.list tr td.scrobble_action_area.active span.scrobble_action_type span.initializeactions {
|
||||
display: none;
|
||||
}
|
||||
/* except this one itself is active */
|
||||
table.list tr td.scrobble_action_area.active span.scrobble_action_type.active span.confirmactions {
|
||||
display: initial;
|
||||
}
|
||||
table.list tr td.delete_area.active span.initializeactions {
|
||||
table.list tr td.scrobble_action_area.active span.scrobble_action_type.active span.initializeactions {
|
||||
display: none;
|
||||
}
|
||||
table.list tr.removed td.delete_area span.confirmactions {
|
||||
|
||||
table.list tr.removed td.scrobble_action_area span.scrobble_action_type {
|
||||
display: none;
|
||||
}
|
||||
table.list tr.removed td.delete_area span.initializeactions {
|
||||
table.list tr.removed td.scrobble_action_area span.scrobble_action_type {
|
||||
display: none;
|
||||
}
|
||||
table.list tr.removed {
|
||||
@ -643,6 +674,13 @@ table.list tr.removed {
|
||||
}
|
||||
|
||||
|
||||
table.list tr.changed {
|
||||
/*background-color: rgba(222,209,180,0.7) !important;*/
|
||||
opacity:0;
|
||||
transition: opacity 0.2s;
|
||||
}
|
||||
|
||||
|
||||
/*
|
||||
table td.artists div {
|
||||
overflow:hidden;
|
||||
|
@ -1,12 +1,260 @@
|
||||
// JS for all web interface editing / deletion of scrobble data
|
||||
|
||||
// HELPERS
|
||||
function selectAll(e) {
|
||||
// https://stackoverflow.com/a/6150060/6651341
|
||||
var range = document.createRange();
|
||||
range.selectNodeContents(e);
|
||||
var sel = window.getSelection();
|
||||
sel.removeAllRanges();
|
||||
sel.addRange(range);
|
||||
}
|
||||
|
||||
// DELETION
|
||||
function toggleDeleteConfirm(element) {
|
||||
element.parentElement.parentElement.classList.toggle('active');
|
||||
element.parentElement.parentElement.parentElement.classList.toggle('active');
|
||||
}
|
||||
|
||||
function deleteScrobble(id,element) {
|
||||
element.parentElement.parentElement.parentElement.classList.add('removed');
|
||||
var callback_func = function(req){
|
||||
if (req.status == 200) {
|
||||
element.parentElement.parentElement.parentElement.parentElement.classList.add('removed');
|
||||
notifyCallback(req);
|
||||
}
|
||||
else {
|
||||
notifyCallback(req);
|
||||
}
|
||||
};
|
||||
|
||||
neo.xhttpreq("/apis/mlj_1/delete_scrobble",data={'timestamp':id},method="POST",callback=(()=>null),json=true);
|
||||
neo.xhttpreq("/apis/mlj_1/delete_scrobble",data={'timestamp':id},method="POST",callback=callback_func,json=true);
|
||||
}
|
||||
|
||||
// REPARSING
|
||||
|
||||
function toggleReparseConfirm(element) {
|
||||
element.parentElement.parentElement.classList.toggle('active');
|
||||
element.parentElement.parentElement.parentElement.classList.toggle('active');
|
||||
}
|
||||
|
||||
function reparseScrobble(id, element) {
|
||||
toggleReparseConfirm(element);
|
||||
|
||||
callback_func = function(req){
|
||||
if (req.status == 200) {
|
||||
if (req.response.status != 'no_operation') {
|
||||
//window.location.reload();
|
||||
notifyCallback(req);
|
||||
var newtrack = req.response.scrobble.track;
|
||||
var row = element.parentElement.parentElement.parentElement.parentElement;
|
||||
changeScrobbleRow(row,newtrack);
|
||||
}
|
||||
else {
|
||||
notifyCallback(req);
|
||||
}
|
||||
}
|
||||
else {
|
||||
notifyCallback(req);
|
||||
}
|
||||
};
|
||||
|
||||
neo.xhttpreq("/apis/mlj_1/reparse_scrobble",data={'timestamp':id},method="POST",callback=callback_func,json=true);
|
||||
|
||||
}
|
||||
|
||||
function changeScrobbleRow(element,newtrack) {
|
||||
element.classList.add('changed');
|
||||
|
||||
setTimeout(function(){
|
||||
element.getElementsByClassName('track')[0].innerHTML = createTrackCell(newtrack);
|
||||
},200);
|
||||
setTimeout(function(){element.classList.remove('changed')},300);
|
||||
}
|
||||
|
||||
function createTrackCell(trackinfo) {
|
||||
|
||||
var trackquery = new URLSearchParams();
|
||||
trackinfo.artists.forEach((a)=>trackquery.append('artist',a));
|
||||
trackquery.append('title',trackinfo.title);
|
||||
|
||||
tracklink = document.createElement('a');
|
||||
tracklink.href = "/track?" + trackquery.toString();
|
||||
tracklink.textContent = trackinfo.title;
|
||||
|
||||
artistelements = []
|
||||
var artistholder = document.createElement('span');
|
||||
artistholder.classList.add('artist_in_trackcolumn');
|
||||
for (var a of trackinfo.artists) {
|
||||
var artistquery = new URLSearchParams();
|
||||
artistquery.append('artist',a);
|
||||
|
||||
artistlink = document.createElement('a');
|
||||
artistlink.href = "/artist?" + artistquery.toString();
|
||||
artistlink.textContent = a;
|
||||
|
||||
artistelements.push(artistlink.outerHTML)
|
||||
}
|
||||
|
||||
artistholder.innerHTML = artistelements.join(", ");
|
||||
return artistholder.outerHTML + " – " + tracklink.outerHTML;
|
||||
}
|
||||
|
||||
|
||||
// EDIT NAME
|
||||
function editEntity() {
|
||||
|
||||
var namefield = document.getElementById('main_entity_name');
|
||||
namefield.contentEditable = "plaintext-only";
|
||||
|
||||
namefield.addEventListener('keydown',function(e){
|
||||
// dont allow new lines, done on enter
|
||||
if (e.key === "Enter") {
|
||||
e.preventDefault();
|
||||
namefield.blur(); // this leads to below
|
||||
}
|
||||
// cancel on esc
|
||||
else if (e.key === "Escape" || e.key === "Esc") {
|
||||
e.preventDefault();
|
||||
namefield.textContent = entity_name;
|
||||
namefield.blur();
|
||||
}
|
||||
})
|
||||
|
||||
// emergency, not pretty because it will move cursor
|
||||
namefield.addEventListener('input',function(e){
|
||||
if (namefield.textContent.includes("\n")) {
|
||||
namefield.textContent = namefield.textContent.replace("\n","");
|
||||
}
|
||||
})
|
||||
|
||||
// manually clicking away OR enter
|
||||
namefield.addEventListener('blur',function(e){
|
||||
doneEditing();
|
||||
})
|
||||
|
||||
namefield.focus();
|
||||
selectAll(namefield);
|
||||
}
|
||||
|
||||
function doneEditing() {
|
||||
window.getSelection().removeAllRanges();
|
||||
var namefield = document.getElementById('main_entity_name');
|
||||
namefield.contentEditable = "false";
|
||||
newname = namefield.textContent;
|
||||
|
||||
if (newname != entity_name) {
|
||||
var searchParams = new URLSearchParams(window.location.search);
|
||||
|
||||
if (entity_type == 'artist') {
|
||||
var endpoint = "/apis/mlj_1/edit_artist";
|
||||
searchParams.set("artist", newname);
|
||||
var payload = {'id':entity_id,'name':newname};
|
||||
}
|
||||
else if (entity_type == 'track') {
|
||||
var endpoint = "/apis/mlj_1/edit_track";
|
||||
searchParams.set("title", newname);
|
||||
var payload = {'id':entity_id,'title':newname}
|
||||
}
|
||||
|
||||
callback_func = function(req){
|
||||
if (req.status == 200) {
|
||||
window.location = "?" + searchParams.toString();
|
||||
}
|
||||
else {
|
||||
notifyCallback(req);
|
||||
namefield.textContent = entity_name;
|
||||
}
|
||||
};
|
||||
|
||||
neo.xhttpreq(
|
||||
endpoint,
|
||||
data=payload,
|
||||
method="POST",
|
||||
callback=callback_func,
|
||||
json=true
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// MERGING
|
||||
|
||||
function showValidMergeIcons() {
|
||||
const lcst = window.sessionStorage;
|
||||
var key = "marked_for_merge_" + entity_type;
|
||||
var current_stored = (lcst.getItem(key) || '').split(",");
|
||||
current_stored = current_stored.filter((x)=>x).map((x)=>parseInt(x));
|
||||
|
||||
var mergeicon = document.getElementById('mergeicon');
|
||||
var mergemarkicon = document.getElementById('mergemarkicon');
|
||||
var mergecancelicon = document.getElementById('mergecancelicon');
|
||||
|
||||
mergeicon.classList.add('hide');
|
||||
mergemarkicon.classList.add('hide');
|
||||
mergecancelicon.classList.add('hide');
|
||||
|
||||
if (current_stored.length == 0) {
|
||||
mergemarkicon.classList.remove('hide');
|
||||
}
|
||||
else {
|
||||
mergecancelicon.classList.remove('hide');
|
||||
|
||||
if (current_stored.includes(entity_id)) {
|
||||
|
||||
}
|
||||
else {
|
||||
mergemarkicon.classList.remove('hide');
|
||||
mergeicon.classList.remove('hide');
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
function markForMerge() {
|
||||
const lcst = window.sessionStorage;
|
||||
var key = "marked_for_merge_" + entity_type;
|
||||
var current_stored = (lcst.getItem(key) || '').split(",");
|
||||
current_stored = current_stored.filter((x)=>x).map((x)=>parseInt(x));
|
||||
current_stored.push(entity_id);
|
||||
current_stored = [...new Set(current_stored)];
|
||||
lcst.setItem(key,current_stored); //this already formats it correctly
|
||||
notify("Marked " + entity_name + " for merge","Currently " + current_stored.length + " marked!")
|
||||
showValidMergeIcons();
|
||||
}
|
||||
|
||||
function merge() {
|
||||
const lcst = window.sessionStorage;
|
||||
var key = "marked_for_merge_" + entity_type;
|
||||
var current_stored = lcst.getItem(key).split(",");
|
||||
current_stored = current_stored.filter((x)=>x).map((x)=>parseInt(x));
|
||||
|
||||
callback_func = function(req){
|
||||
if (req.status == 200) {
|
||||
window.location.reload();
|
||||
}
|
||||
else {
|
||||
notifyCallback(req);
|
||||
}
|
||||
};
|
||||
|
||||
neo.xhttpreq(
|
||||
"/apis/mlj_1/merge_" + entity_type + "s",
|
||||
data={
|
||||
'source_ids':current_stored,
|
||||
'target_id':entity_id
|
||||
},
|
||||
method="POST",
|
||||
callback=callback_func,
|
||||
json=true
|
||||
);
|
||||
|
||||
lcst.removeItem(key);
|
||||
}
|
||||
|
||||
function cancelMerge() {
|
||||
const lcst = window.sessionStorage;
|
||||
var key = "marked_for_merge_" + entity_type;
|
||||
lcst.setItem(key,[]);
|
||||
showValidMergeIcons();
|
||||
notify("Cancelled merge!","")
|
||||
}
|
||||
|
@ -69,8 +69,9 @@ function scrobble(artists,title) {
|
||||
"title":title
|
||||
}
|
||||
|
||||
|
||||
if (title != "" && artists.length > 0) {
|
||||
neo.xhttpreq("/apis/mlj_1/newscrobble",data=payload,method="POST",callback=scrobbledone,json=true)
|
||||
neo.xhttpreq("/apis/mlj_1/newscrobble",data=payload,method="POST",callback=notifyCallback,json=true)
|
||||
}
|
||||
|
||||
document.getElementById("title").value = "";
|
||||
|
@ -6,7 +6,7 @@ const colors = {
|
||||
}
|
||||
|
||||
const notification_template = info => `
|
||||
<div class="notification" style="background-color:${colors[type]};">
|
||||
<div class="notification" style="background-color:${colors[info.notification_type]};">
|
||||
<b>${info.title}</b><br/>
|
||||
<span>${info.body}</span>
|
||||
|
||||
@ -20,11 +20,11 @@ function htmlToElement(html) {
|
||||
return template.content.firstChild;
|
||||
}
|
||||
|
||||
function notify(title,msg,type='info',reload=false) {
|
||||
function notify(title,msg,notification_type='info',reload=false) {
|
||||
info = {
|
||||
'title':title,
|
||||
'body':msg,
|
||||
'type':type
|
||||
'notification_type':notification_type
|
||||
}
|
||||
|
||||
var element = htmlToElement(notification_template(info));
|
||||
@ -33,3 +33,22 @@ function notify(title,msg,type='info',reload=false) {
|
||||
|
||||
setTimeout(function(e){e.remove();},7000,element);
|
||||
}
|
||||
|
||||
function notifyCallback(request) {
|
||||
var body = request.response;
|
||||
var status = request.status;
|
||||
|
||||
if (status == 200) {
|
||||
var notification_type = 'info';
|
||||
var title = "Success!";
|
||||
var msg = body.desc || body;
|
||||
}
|
||||
else {
|
||||
var notification_type = 'warning';
|
||||
var title = "Error: " + body.error.type;
|
||||
var msg = body.error.desc || "";
|
||||
}
|
||||
|
||||
|
||||
notify(title,msg,notification_type);
|
||||
}
|
||||
|
@ -61,7 +61,7 @@ function searchresult() {
|
||||
}
|
||||
|
||||
for (var i=0;i<artists.length;i++) {
|
||||
name = artists[i]["name"];
|
||||
name = artists[i]["artist"];
|
||||
link = artists[i]["link"];
|
||||
image = artists[i]["image"];
|
||||
|
||||
@ -74,8 +74,8 @@ function searchresult() {
|
||||
}
|
||||
for (var i=0;i<tracks.length;i++) {
|
||||
|
||||
artists = tracks[i]["artists"].join(", ");
|
||||
title = tracks[i]["title"];
|
||||
artists = tracks[i]["track"]["artists"].join(", ");
|
||||
title = tracks[i]["track"]["title"];
|
||||
link = tracks[i]["link"];
|
||||
image = tracks[i]["image"];
|
||||
|
||||
|
@ -3,7 +3,7 @@ name = "malojaserver"
|
||||
version = "3.0.7"
|
||||
description = "Self-hosted music scrobble database"
|
||||
readme = "./README.md"
|
||||
requires-python = ">=3.6"
|
||||
requires-python = ">=3.7"
|
||||
license = { file="./LICENSE" }
|
||||
authors = [ { name="Johannes Krattenmacher", email="maloja@dev.krateng.ch" } ]
|
||||
|
||||
@ -20,14 +20,13 @@ classifiers = [
|
||||
|
||||
dependencies = [
|
||||
"bottle>=0.12.16",
|
||||
"waitress>=1.3",
|
||||
"waitress>=2.1.0",
|
||||
"doreah>=1.9.1, <2",
|
||||
"nimrodel>=0.8.0",
|
||||
"setproctitle>=1.1.10",
|
||||
#"pyvips>=2.1.16",
|
||||
"jinja2>=2.11",
|
||||
"jinja2>=3.0.0",
|
||||
"lru-dict>=1.1.6",
|
||||
"css_html_js_minify>=2.5.5",
|
||||
"psutil>=5.8.0",
|
||||
"sqlalchemy>=1.4",
|
||||
"python-datauri>=1.1.0",
|
||||
@ -40,7 +39,7 @@ full = [
|
||||
]
|
||||
|
||||
[project.scripts]
|
||||
maloja = "maloja.proccontrol.control:main"
|
||||
maloja = "maloja.__main__:main"
|
||||
|
||||
[build-system]
|
||||
requires = ["flit_core >=3.2,<4"]
|
||||
|
@ -1,11 +1,10 @@
|
||||
bottle>=0.12.16
|
||||
waitress>=1.3
|
||||
waitress>=2.1.0
|
||||
doreah>=1.9.1, <2
|
||||
nimrodel>=0.8.0
|
||||
setproctitle>=1.1.10
|
||||
jinja2>=2.11
|
||||
jinja2>=3.0.0
|
||||
lru-dict>=1.1.6
|
||||
css_html_js_minify>=2.5.5
|
||||
psutil>=5.8.0
|
||||
sqlalchemy>=1.4
|
||||
python-datauri>=1.1.0
|
||||
|
BIN
screenshot.png
BIN
screenshot.png
Binary file not shown.
Before Width: | Height: | Size: 1.0 MiB After Width: | Height: | Size: 1.0 MiB |
Loading…
x
Reference in New Issue
Block a user