Merge branch 'feature-albums' into next_minor_version

This commit is contained in:
krateng 2023-03-30 17:33:29 +02:00
commit d07cbed7fa
17 changed files with 659 additions and 64 deletions

View File

@ -6,3 +6,4 @@ minor_release_name: "Momo"
- "[Performance] Improved image rendering"
- "[Bugfix] Fixed configuration of time format"
- "[Bugfix] Fixed search on manual scrobble page"
- "[Bugfix] Disabled DB maintenance while not running main server"

View File

@ -166,6 +166,7 @@ def main(*args,**kwargs):
"generate":generate.generate_scrobbles, # maloja generate 400
"export":tasks.export, # maloja export
"apidebug":apidebug.run, # maloja apidebug
"parsealbums":tasks.parse_albums, # maloja parsealbums
# aux
"info":print_info
}

View File

@ -592,6 +592,7 @@ def search(**keys):
artists = database.db_search(query,type="ARTIST")
tracks = database.db_search(query,type="TRACK")
albums = database.db_search(query,type="ALBUM")
@ -599,6 +600,7 @@ def search(**keys):
# also, shorter is better (because longer titles would be easier to further specify)
artists.sort(key=lambda x: ((0 if x.lower().startswith(query) else 1 if " " + query in x.lower() else 2),len(x)))
tracks.sort(key=lambda x: ((0 if x["title"].lower().startswith(query) else 1 if " " + query in x["title"].lower() else 2),len(x["title"])))
albums.sort(key=lambda x: ((0 if x["albumtitle"].lower().startswith(query) else 1 if " " + query in x["albumtitle"].lower() else 2),len(x["albumtitle"])))
# add links
artists_result = []
@ -619,7 +621,17 @@ def search(**keys):
}
tracks_result.append(result)
return {"artists":artists_result[:max_],"tracks":tracks_result[:max_]}
albums_result = []
for al in albums:
result = {
'album': al,
'link': "/album?" + compose_querystring(internal_to_uri({"album":al})),
'image': images.get_album_image(al)
}
if not result['album']['artists']: result['album']['displayArtist'] = malojaconfig["DEFAULT_ALBUM_ARTIST"]
albums_result.append(result)
return {"artists":artists_result[:max_],"tracks":tracks_result[:max_],"albums":albums_result[:max_]}
@api.post("newrule")
@ -714,6 +726,16 @@ def edit_track(id,title):
"status":"success"
}
@api.post("edit_album")
@authenticated_function(api=True)
@catch_exceptions
def edit_album(id,albumtitle):
"""Internal Use Only"""
result = database.edit_album(id,{'albumtitle':albumtitle})
return {
"status":"success"
}
@api.post("merge_tracks")
@authenticated_function(api=True)
@ -735,6 +757,16 @@ def merge_artists(target_id,source_ids):
"status":"success"
}
@api.post("merge_albums")
@authenticated_function(api=True)
@catch_exceptions
def merge_artists(target_id,source_ids):
"""Internal Use Only"""
result = database.merge_albums(target_id,source_ids)
return {
"status":"success"
}
@api.post("reparse_scrobble")
@authenticated_function(api=True)
@catch_exceptions

View File

@ -45,6 +45,16 @@ dbstatus = {
}
# we're running an auxiliary task that doesn't require all the random background
# nonsense to be fired up
# this is temporary
# FIX YO DAMN ARCHITECTURE ALREADY
AUX_MODE = False
def set_aux_mode():
global AUX_MODE
AUX_MODE = True
def waitfordb(func):
def newfunc(*args,**kwargs):
@ -153,7 +163,8 @@ def rawscrobble_to_scrobbledict(rawscrobble, fix=True, client=None):
"origin":f"client:{client}" if client else "generic",
"extra":{
k:scrobbleinfo[k] for k in scrobbleinfo if k not in
['scrobble_time','track_artists','track_title','track_length','scrobble_duration','album_title','album_artists']
['scrobble_time','track_artists','track_title','track_length','scrobble_duration']#,'album_title','album_artists']
# we still save album info in extra because the user might select majority album authority
},
"rawscrobble":rawscrobble
}
@ -189,6 +200,16 @@ def edit_track(id,trackinfo):
return result
@waitfordb
def edit_album(id,albuminfo):
album = sqldb.get_album(id)
log(f"Renaming {album['albumtitle']} to {albuminfo['albumtitle']}")
result = sqldb.edit_album(id,albuminfo)
dbcache.invalidate_entity_cache()
dbcache.invalidate_caches()
return result
@waitfordb
def merge_artists(target_id,source_ids):
sources = [sqldb.get_artist(id) for id in source_ids]
@ -211,6 +232,17 @@ def merge_tracks(target_id,source_ids):
return result
@waitfordb
def merge_albums(target_id,source_ids):
sources = [sqldb.get_album(id) for id in source_ids]
target = sqldb.get_album(target_id)
log(f"Merging {sources} into {target}")
result = sqldb.merge_albums(target_id,source_ids)
dbcache.invalidate_entity_cache()
dbcache.invalidate_caches()
return result
@ -257,6 +289,21 @@ def get_artists(dbconn=None):
return sqldb.get_artists(dbconn=dbconn)
def get_albums_artist_appears_on(dbconn=None,**keys):
artist_id = sqldb.get_artist_id(keys['artist'],dbconn=dbconn)
albums = sqldb.get_albums_artists_appear_on([artist_id],dbconn=dbconn).get(artist_id) or []
ownalbums = sqldb.get_albums_of_artists([artist_id],dbconn=dbconn).get(artist_id) or []
result = {
"own_albums":ownalbums,
"appears_on":[a for a in albums if a not in ownalbums]
}
return result
@waitfordb
def get_charts_artists(dbconn=None,**keys):
(since,to) = keys.get('timerange').timestamps()
@ -388,15 +435,27 @@ def artist_info(dbconn=None,**keys):
artist_id = sqldb.get_artist_id(artist,dbconn=dbconn)
artist = sqldb.get_artist(artist_id,dbconn=dbconn)
alltimecharts = get_charts_artists(timerange=alltime(),dbconn=dbconn)
scrobbles = get_scrobbles_num(artist=artist,timerange=alltime(),dbconn=dbconn)
#we cant take the scrobble number from the charts because that includes all countas scrobbles
try:
c = [e for e in alltimecharts if e["artist"] == artist][0]
scrobbles = get_scrobbles_num(artist=artist,timerange=alltime(),dbconn=dbconn)
albums = sqldb.get_albums_of_artists(set([artist_id]),dbconn=dbconn)
isalbumartist = len(albums.get(artist_id,[]))>0
# base info for everyone
result = {
"artist":artist,
"scrobbles":scrobbles,
"id":artist_id,
"isalbumartist":isalbumartist
}
# check if credited to someone else
parent_artists = sqldb.get_credited_artists(artist)
if len(parent_artists) == 0:
c = [e for e in alltimecharts if e["artist"] == artist]
position = c[0]["rank"] if len(c) > 0 else None
others = sqldb.get_associated_artists(artist,dbconn=dbconn)
position = c["rank"]
return {
"artist":artist,
"scrobbles":scrobbles,
result.update({
"position":position,
"associated":others,
"medals":{
@ -404,23 +463,19 @@ def artist_info(dbconn=None,**keys):
"silver": [year for year in cached.medals_artists if artist_id in cached.medals_artists[year]['silver']],
"bronze": [year for year in cached.medals_artists if artist_id in cached.medals_artists[year]['bronze']],
},
"topweeks":len([e for e in cached.weekly_topartists if e == artist_id]),
"id":artist_id
}
except Exception:
# if the artist isnt in the charts, they are not being credited and we
# need to show information about the credited one
replaceartist = sqldb.get_credited_artists(artist)[0]
"topweeks":len([e for e in cached.weekly_topartists if e == artist_id])
})
else:
replaceartist = parent_artists[0]
c = [e for e in alltimecharts if e["artist"] == replaceartist][0]
position = c["rank"]
return {
"artist":artist,
result.update({
"replace":replaceartist,
"scrobbles":scrobbles,
"position":position,
"id":artist_id
}
"position":position
})
return result
@ -570,4 +625,7 @@ def db_search(query,type=None):
results = sqldb.search_artist(query)
if type=="TRACK":
results = sqldb.search_track(query)
if type=="ALBUM":
results = sqldb.search_album(query)
return results

View File

@ -22,8 +22,10 @@ if malojaconfig['USE_GLOBAL_CACHE']:
@runhourly
def maintenance():
print_stats()
trim_cache()
from . import AUX_MODE
if not AUX_MODE:
print_stats()
trim_cache()
def print_stats():
for name,c in (('Cache',cache),('Entity Cache',entitycache)):

View File

@ -354,6 +354,11 @@ def add_track_to_album(track_id,album_id,replace=False,dbconn=None):
result = dbconn.execute(op)
return True
@connection_provider
def add_tracks_to_albums(track_to_album_id_dict,replace=False,dbconn=None):
for track_id in track_to_album_id_dict:
add_track_to_album(track_id,track_to_album_id_dict[track_id],dbconn=dbconn)
### these will 'get' the ID of an entity, creating it if necessary
@ -368,9 +373,7 @@ def get_track_id(trackdict,create_new=True,update_album=False,dbconn=None):
op = DB['tracks'].select(
# DB['tracks'].c.id
).where(
op = DB['tracks'].select().where(
DB['tracks'].c.title_normalized==ntitle
)
result = dbconn.execute(op).all()
@ -418,9 +421,7 @@ def get_artist_id(artistname,create_new=True,dbconn=None):
nname = normalize_name(artistname)
#print("looking for",nname)
op = DB['artists'].select(
# DB['artists'].c.id
).where(
op = DB['artists'].select().where(
DB['artists'].c.name_normalized==nname
)
result = dbconn.execute(op).all()
@ -558,6 +559,28 @@ def edit_track(id,trackupdatedict,dbconn=None):
return True
@connection_provider
def edit_album(id,albumupdatedict,dbconn=None):
album = get_album(id,dbconn=dbconn)
changedalbum = {**album,**albumupdatedict}
dbentry = album_dict_to_db(albumupdatedict,dbconn=dbconn)
dbentry = {k:v for k,v in dbentry.items() if v}
existing_album_id = get_album_id(changedalbum,create_new=False,dbconn=dbconn)
if existing_album_id not in (None,id):
raise exc.TrackExists(changedalbum)
op = DB['albums'].update().where(
DB['albums'].c.id==id
).values(
**dbentry
)
result = dbconn.execute(op)
return True
### Merge
@ -603,6 +626,28 @@ def merge_artists(target_id,source_ids,dbconn=None):
result = dbconn.execute(op)
# same for albums
op = DB['albumartists'].select().where(
DB['albumartists'].c.artist_id.in_(source_ids + [target_id])
)
result = dbconn.execute(op)
album_ids = set(row.album_id for row in result)
op = DB['albumartists'].delete().where(
DB['albumartists'].c.artist_id.in_(source_ids + [target_id]),
)
result = dbconn.execute(op)
op = DB['albumartists'].insert().values([
{'album_id':album_id,'artist_id':target_id}
for album_id in album_ids
])
result = dbconn.execute(op)
# tracks_artists = {}
# for row in result:
# tracks_artists.setdefault(row.track_id,[]).append(row.artist_id)
@ -618,13 +663,27 @@ def merge_artists(target_id,source_ids,dbconn=None):
# )
# result = dbconn.execute(op)
# this could have created duplicate tracks
# this could have created duplicate tracks and albums
merge_duplicate_tracks(artist_id=target_id,dbconn=dbconn)
merge_duplicate_albums(artist_id=target_id,dbconn=dbconn)
clean_db(dbconn=dbconn)
return True
@connection_provider
def merge_albums(target_id,source_ids,dbconn=None):
op = DB['tracks'].update().where(
DB['tracks'].c.album_id.in_(source_ids)
).values(
album_id=target_id
)
result = dbconn.execute(op)
clean_db(dbconn=dbconn)
return True
### Functions that get rows according to parameters
@ -1010,7 +1069,18 @@ def count_scrobbles_by_track_of_album(since,to,album,dbconn=None):
@cached_wrapper_individual
@connection_provider
def get_artists_of_tracks(track_ids,dbconn=None):
op = sql.join(DB['trackartists'],DB['artists']).select().where(
jointable = sql.join(
DB['trackartists'],
DB['artists']
)
# we need to select to avoid multiple 'id' columns that will then
# be misinterpreted by the row-dict converter
op = sql.select(
DB['artists'],
DB['trackartists'].c.track_id
).select_from(jointable).where(
DB['trackartists'].c.track_id.in_(track_ids)
)
result = dbconn.execute(op).all()
@ -1023,7 +1093,18 @@ def get_artists_of_tracks(track_ids,dbconn=None):
@cached_wrapper_individual
@connection_provider
def get_artists_of_albums(album_ids,dbconn=None):
op = sql.join(DB['albumartists'],DB['artists']).select().where(
jointable = sql.join(
DB['albumartists'],
DB['artists']
)
# we need to select to avoid multiple 'id' columns that will then
# be misinterpreted by the row-dict converter
op = sql.select(
DB['artists'],
DB['albumartists'].c.album_id
).select_from(jointable).where(
DB['albumartists'].c.album_id.in_(album_ids)
)
result = dbconn.execute(op).all()
@ -1033,6 +1114,65 @@ def get_artists_of_albums(album_ids,dbconn=None):
artists.setdefault(row.album_id,[]).append(artist_db_to_dict(row,dbconn=dbconn))
return artists
@cached_wrapper_individual
@connection_provider
def get_albums_of_artists(artist_ids,dbconn=None):
jointable = sql.join(
DB['albumartists'],
DB['albums']
)
# we need to select to avoid multiple 'id' columns that will then
# be misinterpreted by the row-dict converter
op = sql.select(
DB["albums"],
DB['albumartists'].c.artist_id
).select_from(jointable).where(
DB['albumartists'].c.artist_id.in_(artist_ids)
)
result = dbconn.execute(op).all()
albums = {}
for row in result:
albums.setdefault(row.artist_id,[]).append(album_db_to_dict(row,dbconn=dbconn))
return albums
@cached_wrapper_individual
@connection_provider
# this includes the artists' own albums!
def get_albums_artists_appear_on(artist_ids,dbconn=None):
jointable1 = sql.join(
DB["trackartists"],
DB["tracks"]
)
jointable2 = sql.join(
jointable1,
DB["albums"]
)
# we need to select to avoid multiple 'id' columns that will then
# be misinterpreted by the row-dict converter
op = sql.select(
DB["albums"],
DB["trackartists"].c.artist_id
).select_from(jointable2).where(
DB['trackartists'].c.artist_id.in_(artist_ids)
)
result = dbconn.execute(op).all()
albums = {}
# avoid duplicates from multiple tracks in album by same artist
already_done = {}
for row in result:
if row.id in already_done.setdefault(row.artist_id,[]):
pass
else:
albums.setdefault(row.artist_id,[]).append(album_db_to_dict(row,dbconn=dbconn))
already_done[row.artist_id].append(row.id)
return albums
@cached_wrapper_individual
@connection_provider
@ -1097,7 +1237,11 @@ def get_associated_artists(*artists,dbconn=None):
DB['associated_artists'].c.source_artist == DB['artists'].c.id
)
op = jointable.select().where(
# we need to select to avoid multiple 'id' columns that will then
# be misinterpreted by the row-dict converter
op = sql.select(
DB['artists']
).select_from(jointable).where(
DB['associated_artists'].c.target_artist.in_(artist_ids)
)
result = dbconn.execute(op).all()
@ -1116,8 +1260,11 @@ def get_credited_artists(*artists,dbconn=None):
DB['associated_artists'].c.target_artist == DB['artists'].c.id
)
op = jointable.select().where(
# we need to select to avoid multiple 'id' columns that will then
# be misinterpreted by the row-dict converter
op = sql.select(
DB['artists']
).select_from(jointable).where(
DB['associated_artists'].c.source_artist.in_(artist_ids)
)
result = dbconn.execute(op).all()
@ -1192,6 +1339,15 @@ def search_track(searchterm,dbconn=None):
return [get_track(row.id,dbconn=dbconn) for row in result]
@cached_wrapper
@connection_provider
def search_album(searchterm,dbconn=None):
op = DB['albums'].select().where(
DB['albums'].c.albtitle_normalized.ilike(normalize_name(f"%{searchterm}%"))
)
result = dbconn.execute(op).all()
return [get_album(row.id,dbconn=dbconn) for row in result]
##### MAINTENANCE
@ -1199,26 +1355,41 @@ def search_track(searchterm,dbconn=None):
@connection_provider
def clean_db(dbconn=None):
log(f"Database Cleanup...")
from . import AUX_MODE
to_delete = [
# tracks with no scrobbles (trackartist entries first)
"from trackartists where track_id in (select id from tracks where id not in (select track_id from scrobbles))",
"from tracks where id not in (select track_id from scrobbles)",
# artists with no tracks
"from artists where id not in (select artist_id from trackartists) and id not in (select target_artist from associated_artists)",
# tracks with no artists (scrobbles first)
"from scrobbles where track_id in (select id from tracks where id not in (select track_id from trackartists))",
"from tracks where id not in (select track_id from trackartists)"
]
if not AUX_MODE:
with SCROBBLE_LOCK:
log(f"Database Cleanup...")
for d in to_delete:
selection = dbconn.execute(sql.text(f"select * {d}"))
for row in selection.all():
log(f"Deleting {row}")
deletion = dbconn.execute(sql.text(f"delete {d}"))
to_delete = [
# tracks with no scrobbles (trackartist entries first)
"from trackartists where track_id in (select id from tracks where id not in (select track_id from scrobbles))",
"from tracks where id not in (select track_id from scrobbles)",
# artists with no tracks AND no albums
"from artists where id not in (select artist_id from trackartists) \
and id not in (select target_artist from associated_artists) \
and id not in (select artist_id from albumartists)",
# tracks with no artists (scrobbles first)
"from scrobbles where track_id in (select id from tracks where id not in (select track_id from trackartists))",
"from tracks where id not in (select track_id from trackartists)",
# albums with no tracks (albumartist entries first)
"from albumartists where album_id in (select id from albums where id not in (select album_id from tracks where album_id is not null))",
"from albums where id not in (select album_id from tracks where album_id is not null)",
# albumartist entries that are missing a reference
"from albumartists where album_id not in (select album_id from tracks where album_id is not null)",
"from albumartists where artist_id not in (select id from artists)",
# trackartist entries that mare missing a reference
"from trackartists where track_id not in (select id from tracks)",
"from trackartists where artist_id not in (select id from artists)"
]
log("Database Cleanup complete!")
for d in to_delete:
selection = dbconn.execute(sql.text(f"select * {d}"))
for row in selection.all():
log(f"Deleting {row}")
deletion = dbconn.execute(sql.text(f"delete {d}"))
log("Database Cleanup complete!")
@ -1283,12 +1454,145 @@ def merge_duplicate_tracks(artist_id,dbconn=None):
@connection_provider
def merge_duplicate_albums(artist_id,dbconn=None):
rows = dbconn.execute(
DB['albumartists'].select().where(
DB['albumartists'].c.artist_id == artist_id
)
)
affected_albums = [r.album_id for r in rows]
album_artists = {}
rows = dbconn.execute(
DB['albumartists'].select().where(
DB['albumartists'].c.album_id.in_(affected_albums)
)
)
for row in rows:
album_artists.setdefault(row.album_id,[]).append(row.artist_id)
artist_combos = {}
for album_id in album_artists:
artist_combos.setdefault(tuple(sorted(album_artists[album_id])),[]).append(album_id)
for c in artist_combos:
if len(artist_combos[c]) > 1:
album_identifiers = {}
for album_id in artist_combos[c]:
album_identifiers.setdefault(normalize_name(get_album(album_id)['albumtitle']),[]).append(album_id)
for album in album_identifiers:
if len(album_identifiers[album]) > 1:
target,*src = album_identifiers[album]
merge_albums(target,src,dbconn=dbconn)
@connection_provider
def guess_albums(track_ids=None,replace=False,dbconn=None):
MIN_NUM_TO_ASSIGN = 1
jointable = sql.join(
DB['scrobbles'],
DB['tracks']
)
# get all scrobbles of the respective tracks that have some info
conditions = [
DB['scrobbles'].c.extra.isnot(None)
]
if track_ids is not None:
# only do these tracks
conditions.append(
DB['scrobbles'].c.track_id.in_(track_ids)
)
if not replace:
# only tracks that have no album yet
conditions.append(
DB['tracks'].c.album_id.is_(None)
)
op = sql.select(
DB['scrobbles']
).select_from(jointable).where(
*conditions
)
result = dbconn.execute(op).all()
# for each track, count what album info appears how often
possible_albums = {}
for row in result:
extrainfo = json.loads(row.extra)
albumtitle = extrainfo.get("album_name") or extrainfo.get("album_title")
albumartists = extrainfo.get("album_artists",[])
if not albumtitle:
# try the raw scrobble
extrainfo = json.loads(row.rawscrobble)
albumtitle = extrainfo.get("album_name") or extrainfo.get("album_title")
albumartists = albumartists or extrainfo.get("album_artists",[])
if albumtitle:
hashable_albuminfo = tuple([*albumartists,albumtitle])
possible_albums.setdefault(row.track_id,{}).setdefault(hashable_albuminfo,0)
possible_albums[row.track_id][hashable_albuminfo] += 1
res = {}
for track_id in possible_albums:
options = possible_albums[track_id]
if len(options)>0:
# pick the one with most occurences
mostnum = max(options[albuminfo] for albuminfo in options)
if mostnum >= MIN_NUM_TO_ASSIGN:
bestpick = [albuminfo for albuminfo in options if options[albuminfo] == mostnum][0]
#print("best pick",track_id,bestpick)
*artists,title = bestpick
res[track_id] = {"assigned":{
"artists":artists,
"albumtitle": title
}}
if len(artists) == 0:
# for albums without artist, assume track artist
res[track_id]["guess_artists"] = True
else:
res[track_id] = {"assigned":False,"reason":"Not enough data"}
else:
res[track_id] = {"assigned":False,"reason":"No scrobbles with album information found"}
missing_artists = [track_id for track_id in res if res[track_id].get("guess_artists")]
#we're pointlessly getting the albumartist names here even though the IDs would be enough
#but it's better for function separation I guess
jointable = sql.join(
DB['trackartists'],
DB['artists']
)
op = sql.select(
DB['trackartists'].c.track_id,
DB['artists']
).select_from(jointable).where(
DB['trackartists'].c.track_id.in_(missing_artists)
)
result = dbconn.execute(op).all()
for row in result:
res[row.track_id]["assigned"]["artists"].append(row.name)
for track_id in res:
if res[track_id].get("guess_artists"):
del res[track_id]["guess_artists"]
return res
##### AUX FUNCS

View File

@ -191,6 +191,7 @@ malojaconfig = Configuration(
"default_range_charts_tracks":(tp.Choice({'alltime':'All Time','year':'Year','month':"Month",'week':'Week'}), "Default Range Track Charts", "year"),
"default_step_pulse":(tp.Choice({'year':'Year','month':"Month",'week':'Week','day':'Day'}), "Default Pulse Step", "month"),
"charts_display_tiles":(tp.Boolean(), "Display Chart Tiles", False),
"album_showcase":(tp.Boolean(), "Display Album Showcase", True, "Display a graphical album showcase for artist overview pages instead of a chart list"),
"display_art_icons":(tp.Boolean(), "Display Album/Artist Icons", True),
"default_album_artist":(tp.String(), "Default Albumartist", "Various Artists"),
"discourage_cpu_heavy_stats":(tp.Boolean(), "Discourage CPU-heavy stats", False, "Prevent visitors from mindlessly clicking on CPU-heavy options. Does not actually disable them for malicious actors!"),

View File

@ -1,3 +1,4 @@
from .import_scrobbles import import_scrobbles
from .backup import backup
from .export import export # read that line out loud
from .parse_albums import parse_albums

View File

@ -21,6 +21,9 @@ outputs = {
def import_scrobbles(inputf):
from ...database import set_aux_mode
set_aux_mode()
from ...database.sqldb import add_scrobbles
result = {
@ -180,7 +183,7 @@ def parse_spotify_full(inputf):
if len(inputfiles) == 0:
print("No files found!")
return
if inputfiles != [inputf]:
print("Spotify files should all be imported together to identify duplicates across the whole dataset.")
if not ask("Import " + ", ".join(col['yellow'](i) for i in inputfiles) + "?",default=True):

View File

@ -0,0 +1,23 @@
def parse_albums(replace=False):
from ...database import set_aux_mode
set_aux_mode()
from ...database.sqldb import guess_albums, get_album_id, add_track_to_album
print("Parsing album information...")
result = guess_albums(replace=replace)
result = {track_id:result[track_id] for track_id in result if result[track_id]["assigned"]}
print("Adding",len(result),"tracks to albums...")
i = 0
for track_id in result:
album_id = get_album_id(result[track_id]["assigned"])
add_track_to_album(track_id,album_id)
i += 1
if (i % 100) == 0:
print(i,"of",len(result))
print("Done!")

View File

@ -80,7 +80,11 @@
</table>
<br/><br/>
<span>Tracks</span>
<table class="searchresults_tracks" id="searchresults_tracks">
<table class="searchresults_tracks searchresults_extrainfo" id="searchresults_tracks">
</table>
<br/><br/>
<span>Albums</span>
<table class="searchresults_albums searchresults_extrainfo" id="searchresults_albums">
</table>
</div>
</div>

View File

@ -63,7 +63,7 @@
</td>
<td class="text">
<h1 id="main_entity_name" class="headerwithextra">{{ info.artist | e }}</h1>
{% if competes %}<span class="rank"><a href="/charts_artists?max=100">#{{ info.position }}</a></span>{% endif %}
{% if competes and info['scrobbles']>0 %}<span class="rank"><a href="/charts_artists?max=100">#{{ info.position }}</a></span>{% endif %}
<br/>
{% if competes and included %}
<span>associated: {{ links.links(included) }}</span>
@ -90,19 +90,29 @@
</table>
<h2><a href='{{ mlj_uri.create_uri("/charts_albums",filterkeys) }}'>Top Albums</a></h2>
{% if info["isalbumartist"] %}
{% with amountkeys={"perpage":15,"page":0} %}
{% include 'partials/charts_albums.jinja' %}
{% endwith %}
{% if settings['ALBUM_SHOWCASE'] %}
<h2><a href='{{ mlj_uri.create_uri("/charts_albums",filterkeys) }}'>Albums</a></h2>
{% include 'partials/album_showcase.jinja' %}
{% else %}
<h2><a href='{{ mlj_uri.create_uri("/charts_albums",filterkeys) }}'>Top Albums</a></h2>
{% with amountkeys={"perpage":15,"page":0} %}
{% include 'partials/charts_albums.jinja' %}
{% endwith %}
{% endif %}
{% endif %}
{% if info['scrobbles']>0 %}
<h2><a href='{{ mlj_uri.create_uri("/charts_tracks",filterkeys) }}'>Top Tracks</a></h2>
{% with amountkeys={"perpage":15,"page":0} %}
{% include 'partials/charts_tracks.jinja' %}
{% endwith %}
<br/>
<table class="twopart">
@ -180,5 +190,6 @@
{% with amountkeys = {"perpage":15,"page":0} %}
{% include 'partials/scrobbles.jinja' %}
{% endwith %}
{% endif %}
{% endblock %}

View File

@ -0,0 +1,78 @@
{% import 'snippets/links.jinja' as links %}
{% set info = dbc.get_albums_artist_appears_on(filterkeys,limitkeys) %}
{% set ownalbums = info.own_albums %}
{% set otheralbums = info.appears_on %}
<div id="showcase_container">
{% for album in ownalbums %}
<table class="album">
<tr><td>&nbsp</td></tr>
<tr><td>
<a href="{{ links.url(album) }}">
<div class="lazy" data-bg="{{ images.get_album_image(album) }}"'></div>
</a>
</td></tr>
<tr><td>
<span class="album_artists">{{ links.links(album.artists) }}</span><br/>
<span class="album_title">{{ links.link(album) }}</span>
</td></tr>
</table>
{% endfor %}
{% for album in otheralbums %}
<table class="album">
<tr><td>Appears on</td></tr>
<tr><td>
<a href="{{ links.url(album) }}">
<div class="lazy" data-bg="{{ images.get_album_image(album) }}"'></div>
</a>
</td></tr>
<tr><td>
<span class="album_artists">{{ links.links(album.artists) }}</span><br/>
<span class="album_title">{{ links.link(album) }}</span>
</td></tr>
</table>
{% endfor %}
<!--
<table class="album_showcase">
<tr>
{% for album in ownalbums %}<td></td>{% endfor %}
{% if ownalbums and otheralbums%}<td class="album_separator_column"></td>{% endif %}
{% for album in otheralbums %}<td>Appears on</td>{% endfor %}
</tr>
<tr>
{% for album in ownalbums %}
<td>
<a href="{{ links.url(album) }}">
<div class="lazy" data-bg="{{ images.get_album_image(album) }}"'></div>
</a>
</td>
{% endfor %}
{% if ownalbums and otheralbums%}<td class="album_separator_column"></td>{% endif %}
{% for album in otheralbums %}
<td class="album_appearon">
<a href="{{ links.url(album) }}">
<div class="lazy" data-bg="{{ images.get_album_image(album) }}"'></div>
</a>
</td>
{% endfor %}
</tr>
<tr>
{% for album in ownalbums %}
<td>{{ album.albumtitle }}</td>
{% endfor %}
{% if ownalbums and otheralbums%}<td class="album_separator_column"></td>{% endif %}
{% for album in otheralbums %}
<td>{{ album.albumtitle }}</td>
{% endfor %}
</tr>
</table>
-->
</div>

View File

@ -28,7 +28,7 @@
<td>
<a href="{{ links.url(album) }}">
<div class="lazy" data-bg="{{ images.get_album_image(album) }}"'>
<span class='stats'>#{{ rank }}</span> <span>{{ album.title }}</span>
<span class='stats'>#{{ rank }}</span> <span>{{ album.albumtitle }}</span>
</div>
</a>
</td>

View File

@ -189,7 +189,7 @@ div.searchresults tr td:nth-child(2) {
padding-left:10px;
}
div.searchresults table.searchresults_tracks td span:nth-child(1) {
div.searchresults table.searchresults_extrainfo td span:nth-child(1) {
font-size:12px;
color:grey;
@ -898,6 +898,56 @@ table.tiles_sub a span {
}
div#showcase_container {
display: flex;
margin-top: -15px;
padding-bottom: 20px;
align-items: flex-start;
flex-wrap: wrap;
}
div#showcase_container table.album {
width: 180px;
}
div#showcase_container table.album tr td {
padding-left: 15px;
padding-right: 15px;
}
div#showcase_container table.album tr:nth-child(1) td {
height:8px;
opacity: 0.3;
text-align: center;
}
div#showcase_container table.album tr:nth-child(2) td {
height:150px;
padding-top:2px;
padding-bottom:2px;
}
div#showcase_container table.album tr:nth-child(3) td {
height:15px;
}
div#showcase_container div {
height: 150px;
width: 150px;
background-size: cover;
background-position: top;
box-shadow: 0px 0px 10px 10px rgba(0,0,0,0.5);
}
div#showcase_container table:hover div {
box-shadow: 0px 0px 10px 10px var(--ctrl-element-color-main);
}
div#showcase_container span.album_artists {
font-size: 80%;
}
div#showcase_container span.album_title {
font-weight: bold;
}
.summary_rank {
background-size:cover;

View File

@ -161,6 +161,11 @@ function doneEditing() {
searchParams.set("title", newname);
var payload = {'id':entity_id,'title':newname}
}
else if (entity_type == 'album') {
var endpoint = "/apis/mlj_1/edit_album";
searchParams.set("albumtitle", newname);
var payload = {'id':entity_id,'albumtitle':newname}
}
callback_func = function(req){
if (req.status == 200) {

View File

@ -23,11 +23,13 @@ function html_to_fragment(html) {
var results_artists;
var results_tracks;
var results_albums;
var searchresultwrap;
window.addEventListener("DOMContentLoaded",function(){
results_artists = document.getElementById("searchresults_artists");
results_tracks = document.getElementById("searchresults_tracks");
results_albums = document.getElementById("searchresults_albums");
searchresultwrap = document.getElementById("resultwrap");
});
@ -50,8 +52,9 @@ function searchresult() {
// any older searches are now rendered irrelevant
while (searches[0] != this) { searches.splice(0,1) }
var result = JSON.parse(this.responseText);
var artists = result["artists"].slice(0,5)
var tracks = result["tracks"].slice(0,5)
var artists = result["artists"].slice(0,4)
var tracks = result["tracks"].slice(0,4)
var albums = result["albums"].slice(0,4)
while (results_artists.firstChild) {
results_artists.removeChild(results_artists.firstChild);
@ -59,6 +62,9 @@ function searchresult() {
while (results_tracks.firstChild) {
results_tracks.removeChild(results_tracks.firstChild);
}
while (results_albums.firstChild) {
results_albums.removeChild(results_albums.firstChild);
}
for (var i=0;i<artists.length;i++) {
name = artists[i]["artist"];
@ -87,6 +93,21 @@ function searchresult() {
results_tracks.appendChild(node);
}
for (var i=0;i<albums.length;i++) {
artists = albums[i]["album"].hasOwnProperty("displayArtist") ? albums[i]["album"]["displayArtist"] : albums[i]["album"]["artists"].join(", ");
albumtitle = albums[i]["album"]["albumtitle"];
link = albums[i]["link"];
image = albums[i]["image"];
var node = oneresult.cloneNode(true);
node.setAttribute("onclick","goto('" + link + "')");
node.children[0].style.backgroundImage = "url('" + image + "')";
node.children[1].children[0].textContent = artists;
node.children[1].children[2].textContent = albumtitle;
results_albums.appendChild(node);
}
searchresultwrap.classList.remove("hide")
}