diff --git a/dev/releases/3.2.yml b/dev/releases/3.2.yml
index 2f1226d..4a6442c 100644
--- a/dev/releases/3.2.yml
+++ b/dev/releases/3.2.yml
@@ -6,3 +6,4 @@ minor_release_name: "Momo"
- "[Performance] Improved image rendering"
- "[Bugfix] Fixed configuration of time format"
- "[Bugfix] Fixed search on manual scrobble page"
+ - "[Bugfix] Disabled DB maintenance while not running main server"
diff --git a/maloja/__main__.py b/maloja/__main__.py
index 5e6d4de..4694a40 100644
--- a/maloja/__main__.py
+++ b/maloja/__main__.py
@@ -166,6 +166,7 @@ def main(*args,**kwargs):
"generate":generate.generate_scrobbles, # maloja generate 400
"export":tasks.export, # maloja export
"apidebug":apidebug.run, # maloja apidebug
+ "parsealbums":tasks.parse_albums, # maloja parsealbums
# aux
"info":print_info
}
diff --git a/maloja/apis/native_v1.py b/maloja/apis/native_v1.py
index f7e36a7..e7d2ff4 100644
--- a/maloja/apis/native_v1.py
+++ b/maloja/apis/native_v1.py
@@ -592,6 +592,7 @@ def search(**keys):
artists = database.db_search(query,type="ARTIST")
tracks = database.db_search(query,type="TRACK")
+ albums = database.db_search(query,type="ALBUM")
@@ -599,6 +600,7 @@ def search(**keys):
# also, shorter is better (because longer titles would be easier to further specify)
artists.sort(key=lambda x: ((0 if x.lower().startswith(query) else 1 if " " + query in x.lower() else 2),len(x)))
tracks.sort(key=lambda x: ((0 if x["title"].lower().startswith(query) else 1 if " " + query in x["title"].lower() else 2),len(x["title"])))
+ albums.sort(key=lambda x: ((0 if x["albumtitle"].lower().startswith(query) else 1 if " " + query in x["albumtitle"].lower() else 2),len(x["albumtitle"])))
# add links
artists_result = []
@@ -619,7 +621,17 @@ def search(**keys):
}
tracks_result.append(result)
- return {"artists":artists_result[:max_],"tracks":tracks_result[:max_]}
+ albums_result = []
+ for al in albums:
+ result = {
+ 'album': al,
+ 'link': "/album?" + compose_querystring(internal_to_uri({"album":al})),
+ 'image': images.get_album_image(al)
+ }
+ if not result['album']['artists']: result['album']['displayArtist'] = malojaconfig["DEFAULT_ALBUM_ARTIST"]
+ albums_result.append(result)
+
+ return {"artists":artists_result[:max_],"tracks":tracks_result[:max_],"albums":albums_result[:max_]}
@api.post("newrule")
@@ -714,6 +726,16 @@ def edit_track(id,title):
"status":"success"
}
+@api.post("edit_album")
+@authenticated_function(api=True)
+@catch_exceptions
+def edit_album(id,albumtitle):
+ """Internal Use Only"""
+ result = database.edit_album(id,{'albumtitle':albumtitle})
+ return {
+ "status":"success"
+ }
+
@api.post("merge_tracks")
@authenticated_function(api=True)
@@ -735,6 +757,16 @@ def merge_artists(target_id,source_ids):
"status":"success"
}
+@api.post("merge_albums")
+@authenticated_function(api=True)
+@catch_exceptions
+def merge_artists(target_id,source_ids):
+ """Internal Use Only"""
+ result = database.merge_albums(target_id,source_ids)
+ return {
+ "status":"success"
+ }
+
@api.post("reparse_scrobble")
@authenticated_function(api=True)
@catch_exceptions
diff --git a/maloja/database/__init__.py b/maloja/database/__init__.py
index 7cb3c6b..9d437a1 100644
--- a/maloja/database/__init__.py
+++ b/maloja/database/__init__.py
@@ -45,6 +45,16 @@ dbstatus = {
}
+# we're running an auxiliary task that doesn't require all the random background
+# nonsense to be fired up
+# this is temporary
+# FIX YO DAMN ARCHITECTURE ALREADY
+AUX_MODE = False
+def set_aux_mode():
+ global AUX_MODE
+ AUX_MODE = True
+
+
def waitfordb(func):
def newfunc(*args,**kwargs):
@@ -153,7 +163,8 @@ def rawscrobble_to_scrobbledict(rawscrobble, fix=True, client=None):
"origin":f"client:{client}" if client else "generic",
"extra":{
k:scrobbleinfo[k] for k in scrobbleinfo if k not in
- ['scrobble_time','track_artists','track_title','track_length','scrobble_duration','album_title','album_artists']
+ ['scrobble_time','track_artists','track_title','track_length','scrobble_duration']#,'album_title','album_artists']
+ # we still save album info in extra because the user might select majority album authority
},
"rawscrobble":rawscrobble
}
@@ -189,6 +200,16 @@ def edit_track(id,trackinfo):
return result
+@waitfordb
+def edit_album(id,albuminfo):
+ album = sqldb.get_album(id)
+ log(f"Renaming {album['albumtitle']} to {albuminfo['albumtitle']}")
+ result = sqldb.edit_album(id,albuminfo)
+ dbcache.invalidate_entity_cache()
+ dbcache.invalidate_caches()
+
+ return result
+
@waitfordb
def merge_artists(target_id,source_ids):
sources = [sqldb.get_artist(id) for id in source_ids]
@@ -211,6 +232,17 @@ def merge_tracks(target_id,source_ids):
return result
+@waitfordb
+def merge_albums(target_id,source_ids):
+ sources = [sqldb.get_album(id) for id in source_ids]
+ target = sqldb.get_album(target_id)
+ log(f"Merging {sources} into {target}")
+ result = sqldb.merge_albums(target_id,source_ids)
+ dbcache.invalidate_entity_cache()
+ dbcache.invalidate_caches()
+
+ return result
+
@@ -257,6 +289,21 @@ def get_artists(dbconn=None):
return sqldb.get_artists(dbconn=dbconn)
+def get_albums_artist_appears_on(dbconn=None,**keys):
+
+ artist_id = sqldb.get_artist_id(keys['artist'],dbconn=dbconn)
+
+ albums = sqldb.get_albums_artists_appear_on([artist_id],dbconn=dbconn).get(artist_id) or []
+ ownalbums = sqldb.get_albums_of_artists([artist_id],dbconn=dbconn).get(artist_id) or []
+
+ result = {
+ "own_albums":ownalbums,
+ "appears_on":[a for a in albums if a not in ownalbums]
+ }
+
+ return result
+
+
@waitfordb
def get_charts_artists(dbconn=None,**keys):
(since,to) = keys.get('timerange').timestamps()
@@ -388,15 +435,27 @@ def artist_info(dbconn=None,**keys):
artist_id = sqldb.get_artist_id(artist,dbconn=dbconn)
artist = sqldb.get_artist(artist_id,dbconn=dbconn)
alltimecharts = get_charts_artists(timerange=alltime(),dbconn=dbconn)
- scrobbles = get_scrobbles_num(artist=artist,timerange=alltime(),dbconn=dbconn)
#we cant take the scrobble number from the charts because that includes all countas scrobbles
- try:
- c = [e for e in alltimecharts if e["artist"] == artist][0]
+ scrobbles = get_scrobbles_num(artist=artist,timerange=alltime(),dbconn=dbconn)
+ albums = sqldb.get_albums_of_artists(set([artist_id]),dbconn=dbconn)
+ isalbumartist = len(albums.get(artist_id,[]))>0
+
+
+ # base info for everyone
+ result = {
+ "artist":artist,
+ "scrobbles":scrobbles,
+ "id":artist_id,
+ "isalbumartist":isalbumartist
+ }
+
+ # check if credited to someone else
+ parent_artists = sqldb.get_credited_artists(artist)
+ if len(parent_artists) == 0:
+ c = [e for e in alltimecharts if e["artist"] == artist]
+ position = c[0]["rank"] if len(c) > 0 else None
others = sqldb.get_associated_artists(artist,dbconn=dbconn)
- position = c["rank"]
- return {
- "artist":artist,
- "scrobbles":scrobbles,
+ result.update({
"position":position,
"associated":others,
"medals":{
@@ -404,23 +463,19 @@ def artist_info(dbconn=None,**keys):
"silver": [year for year in cached.medals_artists if artist_id in cached.medals_artists[year]['silver']],
"bronze": [year for year in cached.medals_artists if artist_id in cached.medals_artists[year]['bronze']],
},
- "topweeks":len([e for e in cached.weekly_topartists if e == artist_id]),
- "id":artist_id
- }
- except Exception:
- # if the artist isnt in the charts, they are not being credited and we
- # need to show information about the credited one
- replaceartist = sqldb.get_credited_artists(artist)[0]
+ "topweeks":len([e for e in cached.weekly_topartists if e == artist_id])
+ })
+
+ else:
+ replaceartist = parent_artists[0]
c = [e for e in alltimecharts if e["artist"] == replaceartist][0]
position = c["rank"]
- return {
- "artist":artist,
+ result.update({
"replace":replaceartist,
- "scrobbles":scrobbles,
- "position":position,
- "id":artist_id
- }
+ "position":position
+ })
+ return result
@@ -570,4 +625,7 @@ def db_search(query,type=None):
results = sqldb.search_artist(query)
if type=="TRACK":
results = sqldb.search_track(query)
+ if type=="ALBUM":
+ results = sqldb.search_album(query)
+
return results
diff --git a/maloja/database/dbcache.py b/maloja/database/dbcache.py
index 9b092a9..582a9e7 100644
--- a/maloja/database/dbcache.py
+++ b/maloja/database/dbcache.py
@@ -22,8 +22,10 @@ if malojaconfig['USE_GLOBAL_CACHE']:
@runhourly
def maintenance():
- print_stats()
- trim_cache()
+ from . import AUX_MODE
+ if not AUX_MODE:
+ print_stats()
+ trim_cache()
def print_stats():
for name,c in (('Cache',cache),('Entity Cache',entitycache)):
diff --git a/maloja/database/sqldb.py b/maloja/database/sqldb.py
index 1c0e0c5..0497456 100644
--- a/maloja/database/sqldb.py
+++ b/maloja/database/sqldb.py
@@ -354,6 +354,11 @@ def add_track_to_album(track_id,album_id,replace=False,dbconn=None):
result = dbconn.execute(op)
return True
+@connection_provider
+def add_tracks_to_albums(track_to_album_id_dict,replace=False,dbconn=None):
+
+ for track_id in track_to_album_id_dict:
+ add_track_to_album(track_id,track_to_album_id_dict[track_id],dbconn=dbconn)
### these will 'get' the ID of an entity, creating it if necessary
@@ -368,9 +373,7 @@ def get_track_id(trackdict,create_new=True,update_album=False,dbconn=None):
- op = DB['tracks'].select(
-# DB['tracks'].c.id
- ).where(
+ op = DB['tracks'].select().where(
DB['tracks'].c.title_normalized==ntitle
)
result = dbconn.execute(op).all()
@@ -418,9 +421,7 @@ def get_artist_id(artistname,create_new=True,dbconn=None):
nname = normalize_name(artistname)
#print("looking for",nname)
- op = DB['artists'].select(
-# DB['artists'].c.id
- ).where(
+ op = DB['artists'].select().where(
DB['artists'].c.name_normalized==nname
)
result = dbconn.execute(op).all()
@@ -558,6 +559,28 @@ def edit_track(id,trackupdatedict,dbconn=None):
return True
+@connection_provider
+def edit_album(id,albumupdatedict,dbconn=None):
+
+ album = get_album(id,dbconn=dbconn)
+ changedalbum = {**album,**albumupdatedict}
+
+ dbentry = album_dict_to_db(albumupdatedict,dbconn=dbconn)
+ dbentry = {k:v for k,v in dbentry.items() if v}
+
+ existing_album_id = get_album_id(changedalbum,create_new=False,dbconn=dbconn)
+ if existing_album_id not in (None,id):
+ raise exc.TrackExists(changedalbum)
+
+ op = DB['albums'].update().where(
+ DB['albums'].c.id==id
+ ).values(
+ **dbentry
+ )
+ result = dbconn.execute(op)
+
+ return True
+
### Merge
@@ -603,6 +626,28 @@ def merge_artists(target_id,source_ids,dbconn=None):
result = dbconn.execute(op)
+
+ # same for albums
+ op = DB['albumartists'].select().where(
+ DB['albumartists'].c.artist_id.in_(source_ids + [target_id])
+ )
+ result = dbconn.execute(op)
+
+ album_ids = set(row.album_id for row in result)
+
+ op = DB['albumartists'].delete().where(
+ DB['albumartists'].c.artist_id.in_(source_ids + [target_id]),
+ )
+ result = dbconn.execute(op)
+
+ op = DB['albumartists'].insert().values([
+ {'album_id':album_id,'artist_id':target_id}
+ for album_id in album_ids
+ ])
+
+ result = dbconn.execute(op)
+
+
# tracks_artists = {}
# for row in result:
# tracks_artists.setdefault(row.track_id,[]).append(row.artist_id)
@@ -618,13 +663,27 @@ def merge_artists(target_id,source_ids,dbconn=None):
# )
# result = dbconn.execute(op)
- # this could have created duplicate tracks
+ # this could have created duplicate tracks and albums
merge_duplicate_tracks(artist_id=target_id,dbconn=dbconn)
+ merge_duplicate_albums(artist_id=target_id,dbconn=dbconn)
clean_db(dbconn=dbconn)
return True
+@connection_provider
+def merge_albums(target_id,source_ids,dbconn=None):
+
+ op = DB['tracks'].update().where(
+ DB['tracks'].c.album_id.in_(source_ids)
+ ).values(
+ album_id=target_id
+ )
+ result = dbconn.execute(op)
+ clean_db(dbconn=dbconn)
+
+ return True
+
### Functions that get rows according to parameters
@@ -1010,7 +1069,18 @@ def count_scrobbles_by_track_of_album(since,to,album,dbconn=None):
@cached_wrapper_individual
@connection_provider
def get_artists_of_tracks(track_ids,dbconn=None):
- op = sql.join(DB['trackartists'],DB['artists']).select().where(
+
+ jointable = sql.join(
+ DB['trackartists'],
+ DB['artists']
+ )
+
+ # we need to select to avoid multiple 'id' columns that will then
+ # be misinterpreted by the row-dict converter
+ op = sql.select(
+ DB['artists'],
+ DB['trackartists'].c.track_id
+ ).select_from(jointable).where(
DB['trackartists'].c.track_id.in_(track_ids)
)
result = dbconn.execute(op).all()
@@ -1023,7 +1093,18 @@ def get_artists_of_tracks(track_ids,dbconn=None):
@cached_wrapper_individual
@connection_provider
def get_artists_of_albums(album_ids,dbconn=None):
- op = sql.join(DB['albumartists'],DB['artists']).select().where(
+
+ jointable = sql.join(
+ DB['albumartists'],
+ DB['artists']
+ )
+
+ # we need to select to avoid multiple 'id' columns that will then
+ # be misinterpreted by the row-dict converter
+ op = sql.select(
+ DB['artists'],
+ DB['albumartists'].c.album_id
+ ).select_from(jointable).where(
DB['albumartists'].c.album_id.in_(album_ids)
)
result = dbconn.execute(op).all()
@@ -1033,6 +1114,65 @@ def get_artists_of_albums(album_ids,dbconn=None):
artists.setdefault(row.album_id,[]).append(artist_db_to_dict(row,dbconn=dbconn))
return artists
+@cached_wrapper_individual
+@connection_provider
+def get_albums_of_artists(artist_ids,dbconn=None):
+
+ jointable = sql.join(
+ DB['albumartists'],
+ DB['albums']
+ )
+
+ # we need to select to avoid multiple 'id' columns that will then
+ # be misinterpreted by the row-dict converter
+ op = sql.select(
+ DB["albums"],
+ DB['albumartists'].c.artist_id
+ ).select_from(jointable).where(
+ DB['albumartists'].c.artist_id.in_(artist_ids)
+ )
+ result = dbconn.execute(op).all()
+
+ albums = {}
+ for row in result:
+ albums.setdefault(row.artist_id,[]).append(album_db_to_dict(row,dbconn=dbconn))
+ return albums
+
+@cached_wrapper_individual
+@connection_provider
+# this includes the artists' own albums!
+def get_albums_artists_appear_on(artist_ids,dbconn=None):
+
+ jointable1 = sql.join(
+ DB["trackartists"],
+ DB["tracks"]
+ )
+ jointable2 = sql.join(
+ jointable1,
+ DB["albums"]
+ )
+
+ # we need to select to avoid multiple 'id' columns that will then
+ # be misinterpreted by the row-dict converter
+ op = sql.select(
+ DB["albums"],
+ DB["trackartists"].c.artist_id
+ ).select_from(jointable2).where(
+ DB['trackartists'].c.artist_id.in_(artist_ids)
+ )
+ result = dbconn.execute(op).all()
+
+ albums = {}
+ # avoid duplicates from multiple tracks in album by same artist
+ already_done = {}
+ for row in result:
+ if row.id in already_done.setdefault(row.artist_id,[]):
+ pass
+ else:
+ albums.setdefault(row.artist_id,[]).append(album_db_to_dict(row,dbconn=dbconn))
+ already_done[row.artist_id].append(row.id)
+ return albums
+
@cached_wrapper_individual
@connection_provider
@@ -1097,7 +1237,11 @@ def get_associated_artists(*artists,dbconn=None):
DB['associated_artists'].c.source_artist == DB['artists'].c.id
)
- op = jointable.select().where(
+ # we need to select to avoid multiple 'id' columns that will then
+ # be misinterpreted by the row-dict converter
+ op = sql.select(
+ DB['artists']
+ ).select_from(jointable).where(
DB['associated_artists'].c.target_artist.in_(artist_ids)
)
result = dbconn.execute(op).all()
@@ -1116,8 +1260,11 @@ def get_credited_artists(*artists,dbconn=None):
DB['associated_artists'].c.target_artist == DB['artists'].c.id
)
-
- op = jointable.select().where(
+ # we need to select to avoid multiple 'id' columns that will then
+ # be misinterpreted by the row-dict converter
+ op = sql.select(
+ DB['artists']
+ ).select_from(jointable).where(
DB['associated_artists'].c.source_artist.in_(artist_ids)
)
result = dbconn.execute(op).all()
@@ -1192,6 +1339,15 @@ def search_track(searchterm,dbconn=None):
return [get_track(row.id,dbconn=dbconn) for row in result]
+@cached_wrapper
+@connection_provider
+def search_album(searchterm,dbconn=None):
+ op = DB['albums'].select().where(
+ DB['albums'].c.albtitle_normalized.ilike(normalize_name(f"%{searchterm}%"))
+ )
+ result = dbconn.execute(op).all()
+
+ return [get_album(row.id,dbconn=dbconn) for row in result]
##### MAINTENANCE
@@ -1199,26 +1355,41 @@ def search_track(searchterm,dbconn=None):
@connection_provider
def clean_db(dbconn=None):
- log(f"Database Cleanup...")
+ from . import AUX_MODE
- to_delete = [
- # tracks with no scrobbles (trackartist entries first)
- "from trackartists where track_id in (select id from tracks where id not in (select track_id from scrobbles))",
- "from tracks where id not in (select track_id from scrobbles)",
- # artists with no tracks
- "from artists where id not in (select artist_id from trackartists) and id not in (select target_artist from associated_artists)",
- # tracks with no artists (scrobbles first)
- "from scrobbles where track_id in (select id from tracks where id not in (select track_id from trackartists))",
- "from tracks where id not in (select track_id from trackartists)"
- ]
+ if not AUX_MODE:
+ with SCROBBLE_LOCK:
+ log(f"Database Cleanup...")
- for d in to_delete:
- selection = dbconn.execute(sql.text(f"select * {d}"))
- for row in selection.all():
- log(f"Deleting {row}")
- deletion = dbconn.execute(sql.text(f"delete {d}"))
+ to_delete = [
+ # tracks with no scrobbles (trackartist entries first)
+ "from trackartists where track_id in (select id from tracks where id not in (select track_id from scrobbles))",
+ "from tracks where id not in (select track_id from scrobbles)",
+ # artists with no tracks AND no albums
+ "from artists where id not in (select artist_id from trackartists) \
+ and id not in (select target_artist from associated_artists) \
+ and id not in (select artist_id from albumartists)",
+ # tracks with no artists (scrobbles first)
+ "from scrobbles where track_id in (select id from tracks where id not in (select track_id from trackartists))",
+ "from tracks where id not in (select track_id from trackartists)",
+ # albums with no tracks (albumartist entries first)
+ "from albumartists where album_id in (select id from albums where id not in (select album_id from tracks where album_id is not null))",
+ "from albums where id not in (select album_id from tracks where album_id is not null)",
+ # albumartist entries that are missing a reference
+ "from albumartists where album_id not in (select album_id from tracks where album_id is not null)",
+ "from albumartists where artist_id not in (select id from artists)",
+ # trackartist entries that mare missing a reference
+ "from trackartists where track_id not in (select id from tracks)",
+ "from trackartists where artist_id not in (select id from artists)"
+ ]
- log("Database Cleanup complete!")
+ for d in to_delete:
+ selection = dbconn.execute(sql.text(f"select * {d}"))
+ for row in selection.all():
+ log(f"Deleting {row}")
+ deletion = dbconn.execute(sql.text(f"delete {d}"))
+
+ log("Database Cleanup complete!")
@@ -1283,12 +1454,145 @@ def merge_duplicate_tracks(artist_id,dbconn=None):
+@connection_provider
+def merge_duplicate_albums(artist_id,dbconn=None):
+ rows = dbconn.execute(
+ DB['albumartists'].select().where(
+ DB['albumartists'].c.artist_id == artist_id
+ )
+ )
+ affected_albums = [r.album_id for r in rows]
+
+ album_artists = {}
+ rows = dbconn.execute(
+ DB['albumartists'].select().where(
+ DB['albumartists'].c.album_id.in_(affected_albums)
+ )
+ )
+
+
+ for row in rows:
+ album_artists.setdefault(row.album_id,[]).append(row.artist_id)
+
+ artist_combos = {}
+ for album_id in album_artists:
+ artist_combos.setdefault(tuple(sorted(album_artists[album_id])),[]).append(album_id)
+
+ for c in artist_combos:
+ if len(artist_combos[c]) > 1:
+ album_identifiers = {}
+ for album_id in artist_combos[c]:
+ album_identifiers.setdefault(normalize_name(get_album(album_id)['albumtitle']),[]).append(album_id)
+ for album in album_identifiers:
+ if len(album_identifiers[album]) > 1:
+ target,*src = album_identifiers[album]
+ merge_albums(target,src,dbconn=dbconn)
+@connection_provider
+def guess_albums(track_ids=None,replace=False,dbconn=None):
+
+ MIN_NUM_TO_ASSIGN = 1
+
+ jointable = sql.join(
+ DB['scrobbles'],
+ DB['tracks']
+ )
+
+ # get all scrobbles of the respective tracks that have some info
+ conditions = [
+ DB['scrobbles'].c.extra.isnot(None)
+ ]
+ if track_ids is not None:
+ # only do these tracks
+ conditions.append(
+ DB['scrobbles'].c.track_id.in_(track_ids)
+ )
+ if not replace:
+ # only tracks that have no album yet
+ conditions.append(
+ DB['tracks'].c.album_id.is_(None)
+ )
+
+ op = sql.select(
+ DB['scrobbles']
+ ).select_from(jointable).where(
+ *conditions
+ )
+
+ result = dbconn.execute(op).all()
+
+ # for each track, count what album info appears how often
+ possible_albums = {}
+ for row in result:
+ extrainfo = json.loads(row.extra)
+ albumtitle = extrainfo.get("album_name") or extrainfo.get("album_title")
+ albumartists = extrainfo.get("album_artists",[])
+ if not albumtitle:
+ # try the raw scrobble
+ extrainfo = json.loads(row.rawscrobble)
+ albumtitle = extrainfo.get("album_name") or extrainfo.get("album_title")
+ albumartists = albumartists or extrainfo.get("album_artists",[])
+ if albumtitle:
+ hashable_albuminfo = tuple([*albumartists,albumtitle])
+ possible_albums.setdefault(row.track_id,{}).setdefault(hashable_albuminfo,0)
+ possible_albums[row.track_id][hashable_albuminfo] += 1
+
+ res = {}
+ for track_id in possible_albums:
+ options = possible_albums[track_id]
+ if len(options)>0:
+ # pick the one with most occurences
+ mostnum = max(options[albuminfo] for albuminfo in options)
+ if mostnum >= MIN_NUM_TO_ASSIGN:
+ bestpick = [albuminfo for albuminfo in options if options[albuminfo] == mostnum][0]
+ #print("best pick",track_id,bestpick)
+ *artists,title = bestpick
+ res[track_id] = {"assigned":{
+ "artists":artists,
+ "albumtitle": title
+ }}
+ if len(artists) == 0:
+ # for albums without artist, assume track artist
+ res[track_id]["guess_artists"] = True
+ else:
+ res[track_id] = {"assigned":False,"reason":"Not enough data"}
+
+ else:
+ res[track_id] = {"assigned":False,"reason":"No scrobbles with album information found"}
+
+
+
+ missing_artists = [track_id for track_id in res if res[track_id].get("guess_artists")]
+
+ #we're pointlessly getting the albumartist names here even though the IDs would be enough
+ #but it's better for function separation I guess
+ jointable = sql.join(
+ DB['trackartists'],
+ DB['artists']
+ )
+ op = sql.select(
+ DB['trackartists'].c.track_id,
+ DB['artists']
+ ).select_from(jointable).where(
+ DB['trackartists'].c.track_id.in_(missing_artists)
+ )
+ result = dbconn.execute(op).all()
+
+ for row in result:
+ res[row.track_id]["assigned"]["artists"].append(row.name)
+ for track_id in res:
+ if res[track_id].get("guess_artists"):
+ del res[track_id]["guess_artists"]
+
+ return res
+
+
+
##### AUX FUNCS
diff --git a/maloja/pkg_global/conf.py b/maloja/pkg_global/conf.py
index 7224970..f519dc5 100644
--- a/maloja/pkg_global/conf.py
+++ b/maloja/pkg_global/conf.py
@@ -191,6 +191,7 @@ malojaconfig = Configuration(
"default_range_charts_tracks":(tp.Choice({'alltime':'All Time','year':'Year','month':"Month",'week':'Week'}), "Default Range Track Charts", "year"),
"default_step_pulse":(tp.Choice({'year':'Year','month':"Month",'week':'Week','day':'Day'}), "Default Pulse Step", "month"),
"charts_display_tiles":(tp.Boolean(), "Display Chart Tiles", False),
+ "album_showcase":(tp.Boolean(), "Display Album Showcase", True, "Display a graphical album showcase for artist overview pages instead of a chart list"),
"display_art_icons":(tp.Boolean(), "Display Album/Artist Icons", True),
"default_album_artist":(tp.String(), "Default Albumartist", "Various Artists"),
"discourage_cpu_heavy_stats":(tp.Boolean(), "Discourage CPU-heavy stats", False, "Prevent visitors from mindlessly clicking on CPU-heavy options. Does not actually disable them for malicious actors!"),
diff --git a/maloja/proccontrol/tasks/__init__.py b/maloja/proccontrol/tasks/__init__.py
index cf2cd85..90ce051 100644
--- a/maloja/proccontrol/tasks/__init__.py
+++ b/maloja/proccontrol/tasks/__init__.py
@@ -1,3 +1,4 @@
from .import_scrobbles import import_scrobbles
from .backup import backup
from .export import export # read that line out loud
+from .parse_albums import parse_albums
diff --git a/maloja/proccontrol/tasks/import_scrobbles.py b/maloja/proccontrol/tasks/import_scrobbles.py
index b5bf620..376591d 100644
--- a/maloja/proccontrol/tasks/import_scrobbles.py
+++ b/maloja/proccontrol/tasks/import_scrobbles.py
@@ -21,6 +21,9 @@ outputs = {
def import_scrobbles(inputf):
+ from ...database import set_aux_mode
+ set_aux_mode()
+
from ...database.sqldb import add_scrobbles
result = {
@@ -180,7 +183,7 @@ def parse_spotify_full(inputf):
if len(inputfiles) == 0:
print("No files found!")
return
-
+
if inputfiles != [inputf]:
print("Spotify files should all be imported together to identify duplicates across the whole dataset.")
if not ask("Import " + ", ".join(col['yellow'](i) for i in inputfiles) + "?",default=True):
diff --git a/maloja/proccontrol/tasks/parse_albums.py b/maloja/proccontrol/tasks/parse_albums.py
new file mode 100644
index 0000000..125df0c
--- /dev/null
+++ b/maloja/proccontrol/tasks/parse_albums.py
@@ -0,0 +1,23 @@
+
+
+
+def parse_albums(replace=False):
+
+ from ...database import set_aux_mode
+ set_aux_mode()
+
+ from ...database.sqldb import guess_albums, get_album_id, add_track_to_album
+
+ print("Parsing album information...")
+ result = guess_albums(replace=replace)
+
+ result = {track_id:result[track_id] for track_id in result if result[track_id]["assigned"]}
+ print("Adding",len(result),"tracks to albums...")
+ i = 0
+ for track_id in result:
+ album_id = get_album_id(result[track_id]["assigned"])
+ add_track_to_album(track_id,album_id)
+ i += 1
+ if (i % 100) == 0:
+ print(i,"of",len(result))
+ print("Done!")
diff --git a/maloja/web/jinja/abstracts/base.jinja b/maloja/web/jinja/abstracts/base.jinja
index 3ca1619..8b3e8a6 100644
--- a/maloja/web/jinja/abstracts/base.jinja
+++ b/maloja/web/jinja/abstracts/base.jinja
@@ -80,7 +80,11 @@
Tracks
-
  |
+ + + + |
+ {{ links.links(album.artists) }} + {{ links.link(album) }} + |
Appears on |
+ + + + |
+ {{ links.links(album.artists) }} + {{ links.link(album) }} + |