diff --git a/.doreah b/.doreah
index 9a545e6..5d9d026 100644
--- a/.doreah
+++ b/.doreah
@@ -1,4 +1,10 @@
-logging.logfolder = logs
-settings.files = [ "settings/default.ini" , "settings/settings.ini" ]
-caching.folder = "cache/"
-regular.autostart = false
+logging:
+ logfolder: "logs"
+settings:
+ files:
+ - "settings/default.ini"
+ - "settings/settings.ini"
+caching:
+ folder: "cache/"
+regular:
+ autostart: false
diff --git a/.gitignore b/.gitignore
index 50cc9d4..ff3a5ed 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,6 +1,7 @@
# generic temporary / dev files
*.pyc
*.sh
+!/update_requirements.sh
*.note
*.xcf
nohup.out
@@ -10,10 +11,10 @@ nohup.out
*.tsv
*.rulestate
*.log
+*.css
# currently not using
/screenshot*.png
-/proxyscrobble.py
# only for development, normally external
/doreah
diff --git a/README.md b/README.md
index fba5b56..a6f0a41 100644
--- a/README.md
+++ b/README.md
@@ -16,12 +16,8 @@ Also neat: You can use your **custom artist or track images**.
## Requirements
-* [python3](https://www.python.org/) - [GitHub](https://github.com/python/cpython)
-* [bottle.py](https://bottlepy.org/) - [GitHub](https://github.com/bottlepy/bottle)
-* [waitress](https://docs.pylonsproject.org/projects/waitress/) - [GitHub](https://github.com/Pylons/waitress)
-* [doreah](https://pypi.org/project/doreah/) - [GitHub](https://github.com/krateng/doreah) (at least Version 0.9.1)
-* [nimrodel](https://pypi.org/project/nimrodel/) - [GitHub](https://github.com/krateng/nimrodel) (at least Version 0.4.9)
-* [setproctitle](https://pypi.org/project/setproctitle/) - [GitHub](https://github.com/dvarrazzo/py-setproctitle)
+* Python 3
+* Pip packages specified in `requirements.txt`
* If you'd like to display images, you will need API keys for [Last.fm](https://www.last.fm/api/account/create) and [Fanart.tv](https://fanart.tv/get-an-api-key/). These are free of charge!
## How to install
@@ -68,9 +64,9 @@ If you didn't install Maloja from the package (and therefore don't have it in `/
### Native API
-If you use Plex Web or Youtube Music on Chromium, you can use the included extension (also available on the [Chrome Web Store](https://chrome.google.com/webstore/detail/maloja-scrobbler/cfnbifdmgbnaalphodcbandoopgbfeeh)). Make sure to enter the random key Maloja generates on first startup in the extension settings.
+If you use Plex Web, Spotify, Bandcamp, Soundcloud or Youtube Music on Chromium, you can use the included extension (also available on the [Chrome Web Store](https://chrome.google.com/webstore/detail/maloja-scrobbler/cfnbifdmgbnaalphodcbandoopgbfeeh)). Make sure to enter the random key Maloja generates on first startup in the extension settings.
-If you want to implement your own method of scrobbling, it's very simple: You only need one POST request to `/api/newscrobble` with the keys `artist`, `title` and `key` - either as from-data or json.
+If you want to implement your own method of scrobbling, it's very simple: You only need one POST request to `/api/newscrobble` with the keys `artist`, `title` and `key` - either as form-data or json.
### Standard-compliant API
diff --git a/cleanup.py b/cleanup.py
index 3012914..7961940 100644
--- a/cleanup.py
+++ b/cleanup.py
@@ -1,6 +1,6 @@
import re
import utilities
-from doreah import tsv
+from doreah import tsv, settings
# need to do this as a class so it can retain loaded settings from file
# apparently this is not true
@@ -11,11 +11,16 @@ class CleanerAgent:
self.updateRules()
def updateRules(self):
- raw = tsv.parse_all("rules","string","string","string")
- self.rules_belongtogether = [b for [a,b,c] in raw if a=="belongtogether"]
- self.rules_notanartist = [b for [a,b,c] in raw if a=="notanartist"]
- self.rules_replacetitle = {b.lower():c for [a,b,c] in raw if a=="replacetitle"}
- self.rules_replaceartist = {b.lower():c for [a,b,c] in raw if a=="replaceartist"}
+ raw = tsv.parse_all("rules","string","string","string","string")
+ self.rules_belongtogether = [b for [a,b,c,d] in raw if a=="belongtogether"]
+ self.rules_notanartist = [b for [a,b,c,d] in raw if a=="notanartist"]
+ self.rules_replacetitle = {b.lower():c for [a,b,c,d] in raw if a=="replacetitle"}
+ self.rules_replaceartist = {b.lower():c for [a,b,c,d] in raw if a=="replaceartist"}
+ self.rules_ignoreartist = [b.lower() for [a,b,c,d] in raw if a=="ignoreartist"]
+ self.rules_addartists = {c.lower():(b.lower(),d) for [a,b,c,d] in raw if a=="addartists"}
+ #self.rules_regexartist = [[b,c] for [a,b,c,d] in raw if a=="regexartist"]
+ #self.rules_regextitle = [[b,c] for [a,b,c,d] in raw if a=="regextitle"]
+ # TODO
# we always need to be able to tell if our current database is made with the current rules
self.checksums = utilities.checksumTSV("rules")
@@ -27,6 +32,12 @@ class CleanerAgent:
title = self.parseTitle(self.removespecial(title))
(title,moreartists) = self.parseTitleForArtists(title)
artists += moreartists
+ if title.lower() in self.rules_addartists:
+ reqartists, allartists = self.rules_addartists[title.lower()]
+ reqartists = reqartists.split("␟")
+ allartists = allartists.split("␟")
+ if set(reqartists).issubset(set(a.lower() for a in artists)):
+ artists += allartists
artists = list(set(artists))
artists.sort()
@@ -52,6 +63,12 @@ class CleanerAgent:
def parseArtists(self,a):
+ if a.strip() in settings.get_settings("INVALID_ARTISTS"):
+ return []
+
+ if a.strip().lower() in self.rules_ignoreartist:
+ return []
+
if a.strip() == "":
return []
diff --git a/compliant_api.py b/compliant_api.py
index 24e4019..5960efa 100644
--- a/compliant_api.py
+++ b/compliant_api.py
@@ -68,6 +68,7 @@ def handle(path,keys):
def scrobbletrack(artiststr,titlestr,timestamp):
try:
+ log("Incoming scrobble (compliant API): ARTISTS: " + artiststr + ", TRACK: " + titlestr,module="debug")
(artists,title) = cla.fullclean(artiststr,titlestr)
database.createScrobble(artists,title,timestamp)
database.sync()
diff --git a/database.py b/database.py
index 65fbc5f..47eb5aa 100644
--- a/database.py
+++ b/database.py
@@ -6,6 +6,7 @@ import utilities
from malojatime import register_scrobbletime, time_stamps, ranges
from urihandler import uri_to_internal, internal_to_uri, compose_querystring
import compliant_api
+from external import proxy_scrobble
# doreah toolkit
from doreah.logging import log
from doreah import tsv
@@ -49,8 +50,11 @@ TRACKS_LOWER = []
ARTISTS_LOWER = []
ARTIST_SET = set()
TRACK_SET = set()
+
MEDALS = {} #literally only changes once per year, no need to calculate that on the fly
MEDALS_TRACKS = {}
+WEEKLY_TOPTRACKS = {}
+WEEKLY_TOPARTISTS = {}
cla = CleanerAgent()
coa = CollectorAgent()
@@ -73,7 +77,12 @@ def loadAPIkeys():
log("Authenticated Machines: " + ", ".join([m[1] for m in clients]))
def checkAPIkey(k):
- return (k in [k for [k,d] in clients])
+ #return (k in [k for [k,d] in clients])
+ for key, identifier in clients:
+ if key == k: return identifier
+
+ return False
+
def allAPIkeys():
return [k for [k,d] in clients]
@@ -102,10 +111,23 @@ def get_track_dict(o):
def createScrobble(artists,title,time,volatile=False):
+
+ if len(artists) == 0 or title == "":
+ return {}
+
dblock.acquire()
+
+ i = getTrackID(artists,title)
+
+ # idempotence
+ if time in SCROBBLESDICT:
+ if i == SCROBBLESDICT[time].track:
+ dblock.release()
+ return get_track_dict(TRACKS[i])
+ # timestamp as unique identifier
while (time in SCROBBLESDICT):
time += 1
- i = getTrackID(artists,title)
+
obj = Scrobble(i,time,volatile) # if volatile generated, we simply pretend we have already saved it to disk
#SCROBBLES.append(obj)
# immediately insert scrobble correctly so we can guarantee sorted list
@@ -116,6 +138,8 @@ def createScrobble(artists,title,time,volatile=False):
invalidate_caches()
dblock.release()
+ proxy_scrobble(artists,title,time)
+
return get_track_dict(TRACKS[obj.track])
@@ -225,7 +249,22 @@ def get_scrobbles(**keys):
# return r
return r
+# info for comparison
+@dbserver.get("info")
+def info_external(**keys):
+ result = info()
+ return result
+def info():
+ totalscrobbles = get_scrobbles_num()
+ artists = {}
+
+ return {
+ "name":settings.get_settings("NAME"),
+ "artists":{
+ chartentry["artist"]:round(chartentry["scrobbles"] * 100 / totalscrobbles,3)
+ for chartentry in get_charts_artists() if chartentry["scrobbles"]/totalscrobbles >= 0}
+ }
@@ -517,7 +556,14 @@ def artistInfo(artist):
c = [e for e in charts if e["artist"] == artist][0]
others = [a for a in coa.getAllAssociated(artist) if a in ARTISTS]
position = c["rank"]
- return {"scrobbles":scrobbles,"position":position,"associated":others,"medals":MEDALS.get(artist)}
+ performance = get_performance(artist=artist,step="week")
+ return {
+ "scrobbles":scrobbles,
+ "position":position,
+ "associated":others,
+ "medals":MEDALS.get(artist),
+ "topweeks":WEEKLY_TOPARTISTS.get(artist,0)
+ }
except:
# if the artist isnt in the charts, they are not being credited and we
# need to show information about the credited one
@@ -555,11 +601,13 @@ def trackInfo(track):
elif scrobbles >= threshold_platinum: cert = "platinum"
elif scrobbles >= threshold_gold: cert = "gold"
+
return {
"scrobbles":scrobbles,
"position":position,
"medals":MEDALS_TRACKS.get((frozenset(track["artists"]),track["title"])),
- "certification":cert
+ "certification":cert,
+ "topweeks":WEEKLY_TOPTRACKS.get(((frozenset(track["artists"]),track["title"])),0)
}
@@ -573,13 +621,16 @@ def pseudo_post_scrobble(**keys):
artists = keys.get("artist")
title = keys.get("title")
apikey = keys.get("key")
- if not (checkAPIkey(apikey)):
+ client = checkAPIkey(apikey)
+ if client == False: # empty string allowed!
response.status = 403
return ""
try:
time = int(keys.get("time"))
except:
time = int(datetime.datetime.now(tz=datetime.timezone.utc).timestamp())
+
+ log("Incoming scrobble (native API): Client " + client + ", ARTISTS: " + str(artists) + ", TRACK: " + title,module="debug")
(artists,title) = cla.fullclean(artists,title)
## this is necessary for localhost testing
@@ -587,8 +638,9 @@ def pseudo_post_scrobble(**keys):
trackdict = createScrobble(artists,title,time)
- if (time - lastsync) > 3600:
- sync()
+ sync()
+
+
return {"status":"success","track":trackdict}
@@ -597,7 +649,8 @@ def post_scrobble(**keys):
artists = keys.get("artist")
title = keys.get("title")
apikey = keys.get("key")
- if not (checkAPIkey(apikey)):
+ client = checkAPIkey(apikey)
+ if client == False: # empty string allowed!
response.status = 403
return ""
@@ -605,6 +658,8 @@ def post_scrobble(**keys):
time = int(keys.get("time"))
except:
time = int(datetime.datetime.now(tz=datetime.timezone.utc).timestamp())
+
+ log("Incoming scrobble (native API): Client " + client + ", ARTISTS: " + str(artists) + ", TRACK: " + title,module="debug")
(artists,title) = cla.fullclean(artists,title)
## this is necessary for localhost testing
@@ -612,12 +667,11 @@ def post_scrobble(**keys):
trackdict = createScrobble(artists,title,time)
- #if (time - lastsync) > 3600:
- # sync()
sync()
#always sync, one filesystem access every three minutes shouldn't matter
+
return {"status":"success","track":trackdict}
@@ -644,8 +698,7 @@ def abouttoshutdown():
#sys.exit()
@dbserver.post("newrule")
-def newrule():
- keys = FormsDict.decode(request.forms)
+def newrule(**keys):
apikey = keys.pop("key",None)
if (checkAPIkey(apikey)):
tsv.add_entry("rules/webmade.tsv",[k for k in keys])
@@ -751,8 +804,7 @@ def issues():
@dbserver.post("importrules")
-def import_rulemodule():
- keys = FormsDict.decode(request.forms)
+def import_rulemodule(**keys):
apikey = keys.pop("key",None)
if (checkAPIkey(apikey)):
@@ -771,9 +823,7 @@ def import_rulemodule():
@dbserver.post("rebuild")
-def rebuild():
-
- keys = FormsDict.decode(request.forms)
+def rebuild(**keys):
apikey = keys.pop("key",None)
if (checkAPIkey(apikey)):
log("Database rebuild initiated!")
@@ -886,6 +936,7 @@ def build_db():
#start regular tasks
utilities.update_medals()
+ utilities.update_weekly()
global db_rulestate
db_rulestate = utilities.consistentRulestate("scrobbles",cla.checksums)
@@ -899,6 +950,7 @@ def sync():
# all entries by file collected
# so we don't open the same file for every entry
+ #log("Syncing",module="debug")
entries = {}
for idx in range(len(SCROBBLES)):
@@ -918,15 +970,19 @@ def sync():
SCROBBLES[idx] = (SCROBBLES[idx][0],SCROBBLES[idx][1],True)
+ #log("Sorted into months",module="debug")
+
for e in entries:
tsv.add_entries("scrobbles/" + e + ".tsv",entries[e],comments=False)
#addEntries("scrobbles/" + e + ".tsv",entries[e],escape=False)
utilities.combineChecksums("scrobbles/" + e + ".tsv",cla.checksums)
+ #log("Written files",module="debug")
+
global lastsync
lastsync = int(datetime.datetime.now(tz=datetime.timezone.utc).timestamp())
- log("Database saved to disk.")
+ #log("Database saved to disk.")
# save cached images
#saveCache()
diff --git a/external.py b/external.py
index c9c51fc..5f90197 100644
--- a/external.py
+++ b/external.py
@@ -3,6 +3,10 @@ import json
import base64
from doreah.settings import get_settings
from doreah.logging import log
+import hashlib
+import xml.etree.ElementTree as ET
+
+### PICTURES
apis_artists = []
@@ -130,3 +134,49 @@ def api_request_track(track):
pass
return None
+
+
+
+
+
+
+
+
+### SCROBBLING
+
+# creates signature and returns full query string
+def lfmbuild(parameters):
+ m = hashlib.md5()
+ keys = sorted(str(k) for k in parameters)
+ m.update(utf("".join(str(k) + str(parameters[k]) for k in keys)))
+ m.update(utf(get_settings("LASTFM_API_SECRET")))
+ sig = m.hexdigest()
+ return urllib.parse.urlencode(parameters) + "&api_sig=" + sig
+
+def utf(st):
+ return st.encode(encoding="UTF-8")
+
+
+
+apis_scrobble = []
+
+if get_settings("LASTFM_API_SK") not in [None,"ASK"] and get_settings("LASTFM_API_SECRET") not in [None,"ASK"] and get_settings("LASTFM_API_KEY") not in [None,"ASK"]:
+ apis_scrobble.append({
+ "name":"LastFM",
+ "scrobbleurl":"http://ws.audioscrobbler.com/2.0/",
+ "requestbody":lambda artists,title,timestamp: lfmbuild({"method":"track.scrobble","artist[0]":", ".join(artists),"track[0]":title,"timestamp":timestamp,"api_key":get_settings("LASTFM_API_KEY"),"sk":get_settings("LASTFM_API_SK")})
+ })
+
+
+
+
+def proxy_scrobble(artists,title,timestamp):
+ for api in apis_scrobble:
+ response = urllib.request.urlopen(api["scrobbleurl"],data=utf(api["requestbody"](artists,title,timestamp)))
+ xml = response.read()
+ data = ET.fromstring(xml)
+ if data.attrib.get("status") == "ok":
+ if data.find("scrobbles").attrib.get("ignored") == "0":
+ log(api["name"] + ": Scrobble accepted: " + "/".join(artists) + " - " + title)
+ else:
+ log(api["name"] + ": Scrobble not accepted: " + "/".join(artists) + " - " + title)
diff --git a/htmlgenerators.py b/htmlgenerators.py
index c8b7b5e..e443fd7 100644
--- a/htmlgenerators.py
+++ b/htmlgenerators.py
@@ -2,6 +2,8 @@ import urllib
from bottle import FormsDict
import datetime
from urihandler import compose_querystring
+import urllib.parse
+from doreah.settings import get_settings
# returns the proper column(s) for an artist or track
@@ -16,7 +18,9 @@ def entity_column(element,counting=[],image=None):
# track
# html += "
"
+ html += trackSearchLink(element)
+ html += html_links(element["artists"]) + " – " + html_link(element) + "
"
else:
# artist
html += "
" + html_link(element)
@@ -74,6 +78,33 @@ def trackLink(track):
#artists,title = track["artists"],track["title"]
#return "" + title + ""
+def trackSearchLink(track):
+ searchProvider = get_settings("TRACK_SEARCH_PROVIDER")
+ if searchProvider is None: return ""
+
+ link = "🎵"
+ return link
+
#def scrobblesTrackLink(artists,title,timekeys,amount=None,pixels=None):
def scrobblesTrackLink(track,timekeys,amount=None,percent=None):
artists,title = track["artists"],track["title"]
diff --git a/htmlmodules.py b/htmlmodules.py
index 5f27f07..6182af2 100644
--- a/htmlmodules.py
+++ b/htmlmodules.py
@@ -18,29 +18,38 @@ import math
# result.append(element.get("image"))
-# artist=None,track=None,since=None,to=None,within=None,associated=False,max_=None,pictures=False
-def module_scrobblelist(max_=None,pictures=False,shortTimeDesc=False,earlystop=False,**kwargs):
+#max_ indicates that no pagination should occur (because this is not the primary module)
+def module_scrobblelist(page=0,perpage=100,max_=None,pictures=False,shortTimeDesc=False,earlystop=False,**kwargs):
kwargs_filter = pickKeys(kwargs,"artist","track","associated")
kwargs_time = pickKeys(kwargs,"timerange","since","to","within")
+ if max_ is not None: perpage,page=max_,0
+
+ firstindex = page * perpage
+ lastindex = firstindex + perpage
# if earlystop, we don't care about the actual amount and only request as many from the db
# without, we request everything and filter on site
- maxkey = {"max_":max_} if earlystop else {}
+ maxkey = {"max_":lastindex} if earlystop else {}
scrobbles = database.get_scrobbles(**kwargs_time,**kwargs_filter,**maxkey)
if pictures:
- scrobbleswithpictures = scrobbles if max_ is None else scrobbles[:max_]
+ scrobbleswithpictures = [""] * firstindex + scrobbles[firstindex:lastindex]
#scrobbleimages = [e.get("image") for e in getTracksInfo(scrobbleswithpictures)] #will still work with scrobble objects as they are a technically a subset of track objects
#scrobbleimages = ["/image?title=" + urllib.parse.quote(t["title"]) + "&" + "&".join(["artist=" + urllib.parse.quote(a) for a in t["artists"]]) for t in scrobbleswithpictures]
scrobbleimages = [getTrackImage(t["artists"],t["title"],fast=True) for t in scrobbleswithpictures]
+ pages = math.ceil(len(scrobbles) / perpage)
+
representative = scrobbles[0] if len(scrobbles) is not 0 else None
# build list
i = 0
html = "
"
@@ -48,32 +57,38 @@ def module_scrobblelist(max_=None,pictures=False,shortTimeDesc=False,earlystop=F
img = scrobbleimages[i]
else: img = None
html += entity_column(s,image=img)
- # Alternative way: Do it in one cell
- #html += "
"
html += ""
i += 1
- if max_ is not None and i>=max_:
+ if i>=lastindex:
break
html += "
"
+ if max_ is None: html += module_paginate(page=page,pages=pages,perpage=perpage,**kwargs)
+
return (html,len(scrobbles),representative)
-def module_pulse(max_=None,**kwargs):
+def module_pulse(page=0,perpage=100,max_=None,**kwargs):
from doreah.timing import clock, clockp
kwargs_filter = pickKeys(kwargs,"artist","track","associated")
kwargs_time = pickKeys(kwargs,"since","to","within","timerange","step","stepn","trail")
+ if max_ is not None: perpage,page=max_,0
+
+ firstindex = page * perpage
+ lastindex = firstindex + perpage
+
ranges = database.get_pulse(**kwargs_time,**kwargs_filter)
+ pages = math.ceil(len(ranges) / perpage)
- if max_ is not None: ranges = ranges[:max_]
+ ranges = ranges[firstindex:lastindex]
# if time range not explicitly specified, only show from first appearance
# if "since" not in kwargs:
@@ -94,19 +109,27 @@ def module_pulse(max_=None,**kwargs):
html += ""
html += ""
+ if max_ is None: html += module_paginate(page=page,pages=pages,perpage=perpage,**kwargs)
return html
-def module_performance(max_=None,**kwargs):
+def module_performance(page=0,perpage=100,max_=None,**kwargs):
kwargs_filter = pickKeys(kwargs,"artist","track")
kwargs_time = pickKeys(kwargs,"since","to","within","timerange","step","stepn","trail")
+ if max_ is not None: perpage,page=max_,0
+
+ firstindex = page * perpage
+ lastindex = firstindex + perpage
+
ranges = database.get_performance(**kwargs_time,**kwargs_filter)
- if max_ is not None: ranges = ranges[:max_]
+ pages = math.ceil(len(ranges) / perpage)
+
+ ranges = ranges[firstindex:lastindex]
# if time range not explicitly specified, only show from first appearance
# if "since" not in kwargs:
@@ -130,18 +153,26 @@ def module_performance(max_=None,**kwargs):
html += ""
html += ""
+ if max_ is None: html += module_paginate(page=page,pages=pages,perpage=perpage,**kwargs)
return html
-def module_trackcharts(max_=None,**kwargs):
+def module_trackcharts(page=0,perpage=100,max_=None,**kwargs):
kwargs_filter = pickKeys(kwargs,"artist","associated")
kwargs_time = pickKeys(kwargs,"timerange","since","to","within")
+ if max_ is not None: perpage,page=max_,0
+
+ firstindex = page * perpage
+ lastindex = firstindex + perpage
+
tracks = database.get_charts_tracks(**kwargs_filter,**kwargs_time)
+ pages = math.ceil(len(tracks) / perpage)
+
# last time range (to compare)
try:
trackslast = database.get_charts_tracks(**kwargs_filter,timerange=kwargs_time["timerange"].next(step=-1))
@@ -167,13 +198,16 @@ def module_trackcharts(max_=None,**kwargs):
i = 0
html = "
"
for e in tracks:
+ if imax_:
+ if i>lastindex:
break
html += "
"
# rank
- if i == 1 or e["scrobbles"] < prev["scrobbles"]:
- html += "
#" + str(i) + "
"
+ if i == firstindex+1 or e["scrobbles"] < prev["scrobbles"]:
+ html += "
#" + str(e["rank"]) + "
"
else:
html += "
"
# rank change
@@ -196,16 +230,26 @@ def module_trackcharts(max_=None,**kwargs):
prev = e
html += "
"
+ if max_ is None: html += module_paginate(page=page,pages=pages,perpage=perpage,**kwargs)
+
return (html,representative)
-def module_artistcharts(max_=None,**kwargs):
+def module_artistcharts(page=0,perpage=100,max_=None,**kwargs):
kwargs_filter = pickKeys(kwargs,"associated") #not used right now
kwargs_time = pickKeys(kwargs,"timerange","since","to","within")
+ if max_ is not None: perpage,page=max_,0
+
+ firstindex = page * perpage
+ lastindex = firstindex + perpage
+
artists = database.get_charts_artists(**kwargs_filter,**kwargs_time)
+ pages = math.ceil(len(artists) / perpage)
+
+
# last time range (to compare)
try:
#from malojatime import _get_next
@@ -231,13 +275,16 @@ def module_artistcharts(max_=None,**kwargs):
i = 0
html = "
"
for e in artists:
+ if imax_:
+ if i>lastindex:
break
html += "
"
# rank
- if i == 1 or e["scrobbles"] < prev["scrobbles"]:
- html += "
#" + str(i) + "
"
+ if i == firstindex+1 or e["scrobbles"] < prev["scrobbles"]:
+ html += "
"
+ if max_ is None: html += module_paginate(page=page,pages=pages,perpage=perpage,**kwargs)
+
return (html, representative)
@@ -308,7 +357,7 @@ def module_toptracks(pictures=True,**kwargs):
if pictures:
html += "
"
+
+ if page > 1:
+ html += "" + "1" + ""
+ html += " | "
+
+ if page > 2:
+ html += " ... | "
+
+ if page > 0:
+ html += "" + str(page) + ""
+ html += " « "
+
+ html += "" + str(page+1) + ""
+
+ if page < pages-1:
+ html += " » "
+ html += "" + str(page+2) + ""
+
+ if page < pages-3:
+ html += " | ... "
+
+ if page < pages-2:
+ html += " | "
+ html += "" + str(pages) + ""
+
+
+ html += "
"
+
+ return html
+
+
+
# THIS FUNCTION USES THE ORIGINAL URI KEYS!!!
def module_filterselection(keys,time=True,delimit=False):
- filterkeys, timekeys, delimitkeys, extrakeys = uri_to_internal(keys)
+ from malojatime import today, thisweek, thismonth, thisyear, alltime
+ filterkeys, timekeys, delimitkeys, extrakeys = uri_to_internal(keys)
# drop keys that are not relevant so they don't clutter the URI
if not time: timekeys = {}
if not delimit: delimitkeys = {}
+ if "page" in extrakeys: del extrakeys["page"]
+ internalkeys = {**filterkeys,**timekeys,**delimitkeys,**extrakeys}
html = ""
- if time:
- # all other keys that will not be changed by clicking another filter
- #keystr = "?" + compose_querystring(keys,exclude=["since","to","in"])
- unchangedkeys = internal_to_uri({**filterkeys,**delimitkeys,**extrakeys})
+ if time:
# wonky selector for precise date range
@@ -513,139 +600,78 @@ def module_filterselection(keys,time=True,delimit=False):
# html += "to "
# html += ""
- from malojatime import today, thisweek, thismonth, thisyear
-
- ### temp!!! this will not allow weekly rank changes
- # weekday = ((now.isoweekday()) % 7)
- # weekbegin = now - datetime.timedelta(days=weekday)
- # weekend = weekbegin + datetime.timedelta(days=6)
- # weekbegin = [weekbegin.year,weekbegin.month,weekbegin.day]
- # weekend = [weekend.year,weekend.month,weekend.day]
- # weekbeginstr = "/".join((str(num) for num in weekbegin))
- # weekendstr = "/".join((str(num) for num in weekend))
-
-
-
# relative to current range
-
html += "
"
- # if timekeys.get("timerange").next(-1) is not None:
- # html += "«"
- # if timekeys.get("timerange").next(-1) is not None or timekeys.get("timerange").next(1) is not None:
- # html += " " + timekeys.get("timerange").desc() + " "
- # if timekeys.get("timerange").next(1) is not None:
- # html += "»"
- if timekeys.get("timerange").next(-1) is not None:
- prevrange = timekeys.get("timerange").next(-1)
- html += "" + prevrange.desc() + ""
+ thisrange = timekeys.get("timerange")
+ prevrange = thisrange.next(-1)
+ nextrange = thisrange.next(1)
+
+ if prevrange is not None:
+ link = compose_querystring(internal_to_uri({**internalkeys,"timerange":prevrange}))
+ html += "" + prevrange.desc() + ""
html += " « "
- if timekeys.get("timerange").next(-1) is not None or timekeys.get("timerange").next(1) is not None:
- html += "" + timekeys.get("timerange").desc() + ""
- if timekeys.get("timerange").next(1) is not None:
+ if prevrange is not None or nextrange is not None:
+ html += "" + thisrange.desc() + ""
+ if nextrange is not None:
html += " » "
- nextrange = timekeys.get("timerange").next(1)
- html += "" + nextrange.desc() + ""
-
- html += "
"
-
-
- # predefined ranges
-
- html += "
"
- if timekeys.get("timerange") == today():
- html += "Today"
- else:
- html += "Today"
- html += " | "
-
- if timekeys.get("timerange") == thisweek():
- html += "This Week"
- else:
- html += "This Week"
- html += " | "
-
- if timekeys.get("timerange") == thismonth():
- html += "This Month"
- else:
- html += "This Month"
- html += " | "
-
- if timekeys.get("timerange") == thisyear():
- html += "This Year"
- else:
- html += "This Year"
- html += " | "
-
- if timekeys.get("timerange") is None or timekeys.get("timerange").unlimited():
- html += "All Time"
- else:
- html += "All Time"
-
- html += "
"
-
- if delimit:
-
- #keystr = "?" + compose_querystring(keys,exclude=["step","stepn"])
- unchangedkeys = internal_to_uri({**filterkeys,**timekeys,**extrakeys})
-
- # only for this element (delimit selector consists of more than one)
- unchangedkeys_sub = internal_to_uri({k:delimitkeys[k] for k in delimitkeys if k not in ["step","stepn"]})
-
- html += "
"
- if delimitkeys.get("step") == "day" and delimitkeys.get("stepn") == 1:
- html += "Daily"
- else:
- html += "Daily"
- html += " | "
-
- if delimitkeys.get("step") == "week" and delimitkeys.get("stepn") == 1:
- html += "Weekly"
- else:
- html += "Weekly"
- html += " | "
-
- if delimitkeys.get("step") == "month" and delimitkeys.get("stepn") == 1:
- html += "Monthly"
- else:
- html += "Monthly"
- html += " | "
-
- if delimitkeys.get("step") == "year" and delimitkeys.get("stepn") == 1:
- html += "Yearly"
- else:
- html += "Yearly"
+ link = compose_querystring(internal_to_uri({**internalkeys,"timerange":nextrange}))
+ html += "" + nextrange.desc() + ""
html += "
"
- unchangedkeys_sub = internal_to_uri({k:delimitkeys[k] for k in delimitkeys if k != "trail"})
- html += "
"
- if delimitkeys.get("trail") == 1:
- html += "Standard"
- else:
- html += "Standard"
- html += " | "
+ categories = [
+ {
+ "active":time,
+ "options":{
+ "Today":{"timerange":today()},
+ "This Week":{"timerange":thisweek()},
+ "This Month":{"timerange":thismonth()},
+ "This Year":{"timerange":thisyear()},
+ "All Time":{"timerange":alltime()}
+ }
+ },
+ {
+ "active":delimit,
+ "options":{
+ "Daily":{"step":"day","stepn":1},
+ "Weekly":{"step":"week","stepn":1},
+ "Fortnightly":{"step":"week","stepn":2},
+ "Monthly":{"step":"month","stepn":1},
+ "Quarterly":{"step":"month","stepn":3},
+ "Yearly":{"step":"year","stepn":1}
+ }
+ },
+ {
+ "active":delimit,
+ "options":{
+ "Standard":{"trail":1},
+ "Trailing":{"trail":2},
+ "Long Trailing":{"trail":3},
+ "Inert":{"trail":10},
+ "Cumulative":{"trail":math.inf}
+ }
+ }
- if delimitkeys.get("trail") == 2:
- html += "Trailing"
- else:
- html += "Trailing"
- html += " | "
+ ]
- if delimitkeys.get("trail") == 3:
- html += "Long Trailing"
- else:
- html += "Long Trailing"
- html += " | "
+ for c in categories:
- if delimitkeys.get("trail") == math.inf:
- html += "Cumulative"
- else:
- html += "Cumulative"
+ if c["active"]:
- html += "
"
+ optionlist = []
+ for option in c["options"]:
+ values = c["options"][option]
+ link = "?" + compose_querystring(internal_to_uri({**internalkeys,**values}))
+
+ if all(internalkeys.get(k) == values[k] for k in values):
+ optionlist.append("" + option + "")
+ else:
+ optionlist.append("" + option + "")
+
+ html += "
" + " | ".join(optionlist) + "
"
return html
diff --git a/maloja b/maloja
index 5fc9eab..8019e20 100755
--- a/maloja
+++ b/maloja
@@ -183,12 +183,21 @@ def getInstance():
except:
return None
+def getInstanceSupervisor():
+ try:
+ output = subprocess.check_output(["pidof","maloja_supervisor"])
+ pid = int(output)
+ return pid
+ except:
+ return None
+
def start():
if install():
if gotodir():
setup()
p = subprocess.Popen(["python3","server.py"],stdout=subprocess.DEVNULL,stderr=subprocess.DEVNULL)
+ p = subprocess.Popen(["python3","supervisor.py"],stdout=subprocess.DEVNULL,stderr=subprocess.DEVNULL)
print(green("Maloja started!") + " PID: " + str(p.pid))
from doreah import settings
@@ -221,8 +230,12 @@ def restart():
return wasrunning
def stop():
+ pid_sv = getInstanceSupervisor()
+ if pid_sv is not None:
+ os.kill(pid_sv,signal.SIGTERM)
+
pid = getInstance()
- if pid == None:
+ if pid is None:
print("Server is not running")
return False
else:
@@ -283,8 +296,13 @@ def update():
print("Done!")
os.chmod("./maloja",os.stat("./maloja").st_mode | stat.S_IXUSR)
+ os.chmod("./update_requirements.sh",os.stat("./update_requirements.sh").st_mode | stat.S_IXUSR)
- print("Make sure to update required modules! (" + yellow("pip3 install -r requirements.txt --upgrade --no-cache-dir") + ")")
+ try:
+ returnval = os.system("./update_requirements.sh")
+ assert returnval == 0
+ except:
+ print("Make sure to update required modules! (" + yellow("./update_requirements.sh") + ")")
if stop(): start() #stop returns whether it was running before, in which case we restart it
diff --git a/malojatime.py b/malojatime.py
index d789b8c..7c8fd81 100644
--- a/malojatime.py
+++ b/malojatime.py
@@ -391,6 +391,7 @@ def time_fix(t):
if isinstance(t,MRangeDescriptor): return t
if isinstance(t, str):
+ if t in ["alltime"]: return None
tod = datetime.datetime.utcnow()
months = ["january","february","march","april","may","june","july","august","september","october","november","december"]
weekdays = ["sunday","monday","tuesday","wednesday","thursday","friday","saturday"]
@@ -545,9 +546,8 @@ def time_stamps(since=None,to=None,within=None,range=None):
def delimit_desc(step="month",stepn=1,trail=1):
txt = ""
- if stepn is not 1: txt += _num(stepn) + "-"
+ if stepn is not 1: txt += str(stepn) + "-"
txt += {"year":"Yearly","month":"Monthly","week":"Weekly","day":"Daily"}[step.lower()]
- #if trail is not 1: txt += " " + _num(trail) + "-Trailing"
if trail is math.inf: txt += " Cumulative"
elif trail is not 1: txt += " Trailing" #we don't need all the info in the title
@@ -587,10 +587,11 @@ def ranges(since=None,to=None,within=None,timerange=None,step="month",stepn=1,tr
d_start = d_start.next(stepn-1) #last part of first included range
i = 0
current_end = d_start
+ current_start = current_end.next((stepn*trail-1)*-1)
#ranges = []
- while current_end.first_stamp() <= lastincluded and (max_ is None or i < max_):
+ while current_end.first_stamp() < lastincluded and (max_ is None or i < max_):
+
- current_start = current_end.next((stepn*trail-1)*-1)
if current_start == current_end:
yield current_start
#ranges.append(current_start)
@@ -598,6 +599,7 @@ def ranges(since=None,to=None,within=None,timerange=None,step="month",stepn=1,tr
yield MRange(current_start,current_end)
#ranges.append(MRange(current_start,current_end))
current_end = current_end.next(stepn)
+ current_start = current_end.next((stepn*trail-1)*-1)
i += 1
@@ -619,6 +621,8 @@ def thismonth():
def thisyear():
tod = datetime.datetime.utcnow()
return MTime(tod.year)
+def alltime():
+ return MRange(None,None)
#def _get_start_of(timestamp,unit):
# date = datetime.datetime.utcfromtimestamp(timestamp)
diff --git a/packages/maloja.deb b/packages/maloja.deb
index b8ccf44..efd5cea 100644
Binary files a/packages/maloja.deb and b/packages/maloja.deb differ
diff --git a/requirements.txt b/requirements.txt
index a17d235..b5aa9c9 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -1,6 +1,7 @@
bottle>=0.12.16
waitress>=1.3
-doreah>=0.9.1
+doreah>=1.1.7
nimrodel>=0.4.9
setproctitle>=1.1.10
wand>=0.5.4
+lesscpy>=0.13
diff --git a/rules/predefined/krateng_artistsingroups.tsv b/rules/predefined/krateng_artistsingroups.tsv
index 9079410..39204cc 100644
--- a/rules/predefined/krateng_artistsingroups.tsv
+++ b/rules/predefined/krateng_artistsingroups.tsv
@@ -10,4 +10,5 @@ countas S Club 7 Tina Barrett
countas RenoakRhythm Approaching Nirvana
countas Shirley Manson Garbage
countas Lewis Brindley The Yogscast
-countas Sips The Yogscast
+countas Sips The Yogscast
+countas Sjin The Yogscast
diff --git a/rules/predefined/krateng_kpopgirlgroups.tsv b/rules/predefined/krateng_kpopgirlgroups.tsv
index e07c332..e4d3c1d 100644
--- a/rules/predefined/krateng_kpopgirlgroups.tsv
+++ b/rules/predefined/krateng_kpopgirlgroups.tsv
@@ -16,6 +16,7 @@ replacetitle Cause I'm God Girl Roll Deep
countas 2Yoon 4Minute
replaceartist 4minute 4Minute
replacetitle 미쳐 Crazy
+addartists HyunA Change Jun Hyung
# BLACKPINK
countas Jennie BLACKPINK
@@ -47,8 +48,8 @@ replacetitle 나비 (Butterfly) Butterfly
replacetitle Déjà vu Déjà Vu
replacetitle 라차타 (LA chA TA) LA chA TA
replacetitle 여우 같은 내 친구 (No More) No More
-replacetitle 시그널 (Signal) Signal
-replacetitle 미행 (그림자 : Shadow) Shadow
+replacetitle 시그널 (Signal) Signal
+replacetitle 미행 (그림자 : Shadow) Shadow
# Stellar
replaceartist STELLAR Stellar
@@ -58,6 +59,7 @@ replacetitle 찔려 Sting Sting
# Red Velvet
countas Seulgi Red Velvet
+countas Joy Red Velvet
replacetitle 러시안 룰렛 Russian Roulette Russian Roulette
replacetitle 피카부 Peek-a-Boo Peek-A-Boo
replacetitle 빨간 맛 Red Flavor Red Flavor
@@ -81,6 +83,7 @@ replacetitle CHEER UP Cheer Up
replacetitle OOH-AHH하게 Like OOH-AHH Like Ooh-Ahh
replacetitle OOH-AHH Like Ooh-Ahh
replacetitle LIKEY Likey
+countas Tzuyu TWICE
# AOA
countas AOA Black AOA
@@ -145,5 +148,8 @@ replaceartist A pink Apink
# Chungha & IOI
replaceartist CHUNG HA Chungha
replaceartist 청하 CHUNGHA Chungha
-#countas Chungha I.O.I # Chungha is too famous
+#countas Chungha I.O.I # Chungha is too famous
replacetitle 벌써 12시 Gotta Go Gotta Go
+
+# ITZY
+replacetitle 달라달라 (DALLA DALLA) Dalla Dalla
diff --git a/rules/rules.info b/rules/rules.info
index b28e612..c016d07 100644
--- a/rules/rules.info
+++ b/rules/rules.info
@@ -16,6 +16,10 @@ The first column defines the type of the rule:
This will not change the separation in the database and all effects of this rule will disappear as soon as it is no longer active.
Second column is the artist
Third column the replacement artist / grouping label
+ addartists Defines a certain combination of artists and song title that should always have other artists added.
+ Second column is artists that need to be already present for this rule to apply
+ Third column is the song title
+ Fourth column are artists that shoud be added, separated by ␟
Rules in non-tsv files are ignored. '#' is used for comments. Additional columns are ignored. To have a '#' in a name, use '\num'
Comments are not supported in scrobble lists, but you probably never edit these manually anyway.
@@ -30,3 +34,4 @@ replacetitle 첫 사랑니 (Rum Pum Pum Pum) Rum Pum Pum Pum
replaceartist Dal Shabet Dal★Shabet
replaceartist Mr FijiWiji, AgNO3 Mr FijiWiji␟AgNO3 # one artist is replaced by two artists
countas Trouble Maker HyunA
+addartists HyunA Change Jun Hyung
diff --git a/scrobblers/chromium-generic/background.js b/scrobblers/chromium-generic/background.js
index 9806105..32d4571 100644
--- a/scrobblers/chromium-generic/background.js
+++ b/scrobblers/chromium-generic/background.js
@@ -28,6 +28,18 @@ pages = {
"https://open.spotify.com"
],
"script":"spotify.js"
+ },
+ "Bandcamp":{
+ "patterns":[
+ "bandcamp.com"
+ ],
+ "script":"bandcamp.js"
+ },
+ "Soundcloud":{
+ "patterns":[
+ "https://soundcloud.com"
+ ],
+ "script":"soundcloud.js"
}
}
@@ -51,7 +63,7 @@ function onTabUpdated(tabId, changeInfo, tab) {
patterns = pages[page]["patterns"];
//console.log("Page was managed by a " + page + " manager")
for (var i=0;i")
@webserver.route("/")
@webserver.route("/")
+@webserver.route("/")
@webserver.route("/")
@webserver.route("/")
@webserver.route("/")
@@ -132,7 +145,7 @@ def static(name):
@webserver.route("/")
def static_html(name):
- linkheaders = ["; rel=preload; as=style"]
+ linkheaders = ["; rel=preload; as=style"]
keys = remove_identical(FormsDict.decode(request.query))
# if a pyhp file exists, use this
@@ -206,6 +219,16 @@ def static_html(name):
return html
#return static_file("website/" + name + ".html",root="")
+
+# Shortlinks
+
+@webserver.get("/artist/")
+def redirect_artist(artist):
+ redirect("/artist?artist=" + artist)
+@webserver.get("/track//")
+def redirect_track(artists,title):
+ redirect("/track?title=" + title + "&" + "&".join("artist=" + artist for artist in artists.split("/")))
+
#set graceful shutdown
signal.signal(signal.SIGINT, graceful_exit)
signal.signal(signal.SIGTERM, graceful_exit)
@@ -215,8 +238,7 @@ setproctitle.setproctitle("Maloja")
## start database
database.start_db()
-#database.register_subroutes(webserver,"/api")
database.dbserver.mount(server=webserver)
log("Starting up Maloja server...")
-run(webserver, host='::', port=MAIN_PORT, server='waitress')
+run(webserver, host=HOST, port=MAIN_PORT, server='waitress')
diff --git a/settings/default.ini b/settings/default.ini
index b6661ab..d96ec0b 100644
--- a/settings/default.ini
+++ b/settings/default.ini
@@ -1,19 +1,30 @@
+# Do not change settings in this file
+# Instead, simply write an entry with the same name in your own settings.ini file
+# Category headers in [brackets] are only for organization and not necessary
+
[HTTP]
WEB_PORT = 42010
+HOST = "::" # You most likely want either :: for IPv6 or 0.0.0.0 for IPv4 here
[Third Party Services]
LASTFM_API_KEY = "ASK" # 'ASK' signifies that the user has not yet indicated to not use any key at all.
+LASTFM_API_SECRET = "ASK"
FANARTTV_API_KEY = "ASK"
SPOTIFY_API_ID = "ASK"
SPOTIFY_API_SECRET = "ASK"
CACHE_EXPIRE_NEGATIVE = 30 # after how many days negative results should be tried again
CACHE_EXPIRE_POSITIVE = 300 # after how many days positive results should be refreshed
+# Can be 'YouTube', 'YouTube Music', 'Google Play Music', 'Spotify', 'Tidal', 'SoundCloud', 'Deezer', 'Amazon Music'
+# Omit or set to none to disable
+TRACK_SEARCH_PROVIDER = None
+
[Database]
DB_CACHE_SIZE = 8192 # how many MB on disk each database cache should have available.
+INVALID_ARTISTS = ["[Unknown Artist]","Unknown Artist"]
[Local Images]
@@ -27,8 +38,11 @@ LOCAL_IMAGE_ROTATE = 3600 # when multiple images are present locally, how many s
DEFAULT_RANGE_CHARTS_ARTISTS = year
DEFAULT_RANGE_CHARTS_TRACKS = year
# same for pulse view
-# can be days, weeks, months, years
-DEFAULT_RANGE_PULSE = months
+# can be day, week, month, year
+DEFAULT_STEP_PULSE = month
+
+# display top tiles on artist and track chart pages
+CHARTS_DISPLAY_TILES = false
[Fluff]
@@ -36,6 +50,8 @@ DEFAULT_RANGE_PULSE = months
SCROBBLES_GOLD = 250
SCROBBLES_PLATINUM = 500
SCROBBLES_DIAMOND = 1000
+# name for comparisons
+NAME = "Generic Maloja User"
[Misc]
diff --git a/supervisor.py b/supervisor.py
new file mode 100644
index 0000000..824c2a2
--- /dev/null
+++ b/supervisor.py
@@ -0,0 +1,25 @@
+#!/usr/bin/env python3
+
+import subprocess
+import time
+import setproctitle
+import signal
+from doreah.logging import log
+
+
+setproctitle.setproctitle("maloja_supervisor")
+
+
+while True:
+ time.sleep(60)
+
+ try:
+ output = subprocess.check_output(["pidof","Maloja"])
+ pid = int(output)
+ log("Maloja is running, PID " + str(pid),module="supervisor")
+ except:
+ log("Maloja is not running, restarting...",module="supervisor")
+ try:
+ p = subprocess.Popen(["python3","server.py"],stdout=subprocess.DEVNULL,stderr=subprocess.DEVNULL)
+ except e:
+ log("Error starting Maloja: " + str(e),module="supervisor")
diff --git a/update_requirements.sh b/update_requirements.sh
new file mode 100644
index 0000000..6da8554
--- /dev/null
+++ b/update_requirements.sh
@@ -0,0 +1,2 @@
+#!/bin/sh
+pip3 install -r requirements.txt --upgrade --no-cache-dir
diff --git a/urihandler.py b/urihandler.py
index ae08440..9ed7943 100644
--- a/urihandler.py
+++ b/urihandler.py
@@ -103,8 +103,12 @@ def uri_to_internal(keys,forceTrack=False,forceArtist=False):
#4
- resultkeys4 = {"max_":300}
- if "max" in keys: resultkeys4["max_"] = int(keys["max"])
+ resultkeys4 = {"page":0,"perpage":100}
+# if "max" in keys: resultkeys4["max_"] = int(keys["max"])
+ if "max" in keys: resultkeys4["page"],resultkeys4["perpage"] = 0, int(keys["max"])
+ #different max than the internal one! the user doesn't get to disable pagination
+ if "page" in keys: resultkeys4["page"] = int(keys["page"])
+ if "perpage" in keys: resultkeys4["perpage"] = int(keys["perpage"])
return resultkeys1, resultkeys2, resultkeys3, resultkeys4
@@ -146,8 +150,12 @@ def internal_to_uri(keys):
urikeys.append("trail",str(keys["trail"]))
# stuff
- if "max_" in keys:
- urikeys.append("max",str(keys["max_"]))
+ #if "max_" in keys:
+ # urikeys.append("max",str(keys["max_"]))
+ if "page" in keys:
+ urikeys.append("page",str(keys["page"]))
+ if "perpage" in keys:
+ urikeys.append("perpage",str(keys["perpage"]))
return urikeys
diff --git a/utilities.py b/utilities.py
index 2b1b7e5..861488d 100644
--- a/utilities.py
+++ b/utilities.py
@@ -99,14 +99,11 @@ def consistentRulestate(folder,checksums):
if (scrobblefile.endswith(".tsv")):
try:
- f = open(folder + "/" + scrobblefile + ".rulestate","r")
- if f.read() != checksums:
- return False
-
+ with open(folder + "/" + scrobblefile + ".rulestate","r") as f:
+ if f.read() != checksums:
+ return False
except:
return False
- finally:
- f.close()
return True
@@ -442,8 +439,12 @@ def update_medals():
from database import MEDALS, MEDALS_TRACKS, STAMPS, get_charts_artists, get_charts_tracks
- firstyear = datetime.datetime.utcfromtimestamp(STAMPS[0]).year
currentyear = datetime.datetime.utcnow().year
+ try:
+ firstyear = datetime.datetime.utcfromtimestamp(STAMPS[0]).year
+ except:
+ firstyear = currentyear
+
MEDALS.clear()
for year in range(firstyear,currentyear):
@@ -468,3 +469,23 @@ def update_medals():
elif t["rank"] == 2: MEDALS_TRACKS.setdefault(track,{}).setdefault("silver",[]).append(year)
elif t["rank"] == 3: MEDALS_TRACKS.setdefault(track,{}).setdefault("bronze",[]).append(year)
else: break
+
+@daily
+def update_weekly():
+
+ from database import WEEKLY_TOPTRACKS, WEEKLY_TOPARTISTS, get_charts_artists, get_charts_tracks
+ from malojatime import ranges, thisweek
+
+
+ WEEKLY_TOPARTISTS.clear()
+ WEEKLY_TOPTRACKS.clear()
+
+ for week in ranges(step="week"):
+ if week == thisweek(): break
+ for a in get_charts_artists(timerange=week):
+ artist = a["artist"]
+ if a["rank"] == 1: WEEKLY_TOPARTISTS[artist] = WEEKLY_TOPARTISTS.setdefault(artist,0) + 1
+
+ for t in get_charts_tracks(timerange=week):
+ track = (frozenset(t["track"]["artists"]),t["track"]["title"])
+ if t["rank"] == 1: WEEKLY_TOPTRACKS[track] = WEEKLY_TOPTRACKS.setdefault(track,0) + 1
diff --git a/website/artist.html b/website/artist.html
index 50caf83..d9f4a03 100644
--- a/website/artist.html
+++ b/website/artist.html
@@ -4,7 +4,8 @@
Maloja - KEY_ARTISTNAME
-
+
+
@@ -15,13 +16,13 @@