mirror of
https://github.com/krateng/maloja.git
synced 2025-04-25 04:40:51 +03:00
Compare commits
No commits in common. "master" and "v3.1.2" have entirely different histories.
@ -1,6 +1,5 @@
|
||||
*
|
||||
!maloja
|
||||
!container
|
||||
!Containerfile
|
||||
!requirements.txt
|
||||
!pyproject.toml
|
||||
|
3
.github/FUNDING.yml
vendored
3
.github/FUNDING.yml
vendored
@ -1,2 +1 @@
|
||||
custom: ["https://paypal.me/krateng"]
|
||||
patreon: krateng
|
||||
custom: ["https://flattr.com/@Krateng", "https://paypal.me/krateng"]
|
||||
|
19
.github/workflows/docker.yml
vendored
19
.github/workflows/docker.yml
vendored
@ -4,7 +4,6 @@ on:
|
||||
push:
|
||||
tags:
|
||||
- 'v*'
|
||||
- 'runaction-docker'
|
||||
|
||||
jobs:
|
||||
push_to_registry:
|
||||
@ -12,18 +11,18 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Check out the repo
|
||||
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
|
||||
uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||
|
||||
- name: Log in to Docker Hub
|
||||
if: github.event_name != 'pull_request'
|
||||
uses: docker/login-action@343f7c4344506bcbf9b4de18042ae17996df046d
|
||||
uses: docker/login-action@dd4fa0671be5250ee6f50aedf4cb05514abda2c7
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_ACCESS_TOKEN }}
|
||||
|
||||
- name: Extract metadata (tags, labels) for Docker
|
||||
id: meta
|
||||
uses: docker/metadata-action@96383f45573cb7f253c731d3b3ab81c87ef81934
|
||||
uses: docker/metadata-action@f2a13332ac1ce8c0a71aeac48a150dbb1838ab67
|
||||
with:
|
||||
images: |
|
||||
${{ github.repository_owner }}/maloja
|
||||
@ -34,13 +33,13 @@ jobs:
|
||||
latest=true
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@68827325e0b33c7199eb31dd4e31fbe9023e06e3
|
||||
uses: docker/setup-qemu-action@27d0a4f181a40b142cce983c5393082c365d1480
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@f95db51fddba0c2d1ec667646a06c2ce06100226
|
||||
uses: docker/setup-buildx-action@94ab11c41e45d028884a99163086648e898eed25
|
||||
|
||||
- name: Cache Docker layers
|
||||
uses: actions/cache@704facf57e6136b1bc63b828d79edcd491f0ee84
|
||||
uses: actions/cache@48af2dc4a9e8278b89d7fa154b955c30c6aaab09
|
||||
with:
|
||||
path: /tmp/.buildx-cache
|
||||
key: ${{ runner.os }}-buildx-${{ github.sha }}
|
||||
@ -48,14 +47,14 @@ jobs:
|
||||
${{ runner.os }}-buildx-
|
||||
|
||||
- name: Build and push Docker image
|
||||
uses: docker/build-push-action@0565240e2d4ab88bba5387d719585280857ece09
|
||||
uses: docker/build-push-action@ac9327eae2b366085ac7f6a2d02df8aa8ead720a
|
||||
with:
|
||||
context: .
|
||||
file: Containerfile
|
||||
push: ${{ github.event_name != 'pull_request' }}
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
platforms: linux/amd64,linux/arm64 #,linux/arm/v7 #build this ourselves GH: #229
|
||||
platforms: linux/amd64,linux/arm64,linux/arm/v7
|
||||
cache-from: type=local,src=/tmp/.buildx-cache
|
||||
cache-to: type=local,dest=/tmp/.buildx-cache-new,mode=max
|
||||
|
||||
@ -68,7 +67,7 @@ jobs:
|
||||
mv /tmp/.buildx-cache-new /tmp/.buildx-cache
|
||||
|
||||
- name: Update Readme and short description
|
||||
uses: peter-evans/dockerhub-description@dc67fad7001ef9e8e3c124cb7a64e16d0a63d864
|
||||
uses: peter-evans/dockerhub-description@836d7e6aa8f6f32dce26f5a1dd46d3dc24997eae
|
||||
continue-on-error: true
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
|
12
.github/workflows/pypi.yml
vendored
12
.github/workflows/pypi.yml
vendored
@ -4,20 +4,17 @@ on:
|
||||
push:
|
||||
tags:
|
||||
- 'v*'
|
||||
- 'runaction-pypi'
|
||||
|
||||
jobs:
|
||||
publish_to_pypi:
|
||||
name: Push Package to PyPI
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
id-token: write
|
||||
steps:
|
||||
- name: Check out the repo
|
||||
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
|
||||
uses: actions/checkout@ec3a7ce113134d7a93b817d10a8272cb61118579
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236
|
||||
uses: actions/setup-python@7f80679172b057fc5e90d70d197929d454754a5a
|
||||
with:
|
||||
python-version: '3.x'
|
||||
|
||||
@ -28,4 +25,7 @@ jobs:
|
||||
run: python -m build
|
||||
|
||||
- name: Publish to PyPI
|
||||
uses: pypa/gh-action-pypi-publish@67339c736fd9354cd4f8cb0b744f2b82a74b5c70
|
||||
uses: pypa/gh-action-pypi-publish@717ba43cfbb0387f6ce311b169a825772f54d295
|
||||
with:
|
||||
user: __token__
|
||||
password: ${{ secrets.PYPI_API_TOKEN }}
|
||||
|
1
.gitignore
vendored
1
.gitignore
vendored
@ -3,6 +3,7 @@
|
||||
|
||||
# environments / builds
|
||||
.venv/*
|
||||
testdata*
|
||||
/dist
|
||||
/build
|
||||
/*.egg-info
|
||||
|
36
APKBUILD
Normal file
36
APKBUILD
Normal file
@ -0,0 +1,36 @@
|
||||
# Contributor: Johannes Krattenmacher <maloja@dev.krateng.ch>
|
||||
# Maintainer: Johannes Krattenmacher <maloja@dev.krateng.ch>
|
||||
pkgname=maloja
|
||||
pkgver=3.0.0-dev
|
||||
pkgrel=0
|
||||
pkgdesc="Self-hosted music scrobble database"
|
||||
url="https://github.com/krateng/maloja"
|
||||
arch="noarch"
|
||||
license="GPL-3.0"
|
||||
depends="python3 tzdata"
|
||||
pkgusers=$pkgname
|
||||
pkggroups=$pkgname
|
||||
depends_dev="gcc g++ python3-dev libxml2-dev libxslt-dev libffi-dev libc-dev py3-pip linux-headers"
|
||||
makedepends="$depends_dev"
|
||||
source="
|
||||
$pkgname-$pkgver.tar.gz::https://github.com/krateng/maloja/archive/refs/tags/v$pkgver.tar.gz
|
||||
"
|
||||
builddir="$srcdir"/$pkgname-$pkgver
|
||||
|
||||
|
||||
|
||||
build() {
|
||||
cd $builddir
|
||||
python3 -m build .
|
||||
pip3 install dist/*.tar.gz
|
||||
}
|
||||
|
||||
package() {
|
||||
mkdir -p /etc/$pkgname || return 1
|
||||
mkdir -p /var/lib/$pkgname || return 1
|
||||
mkdir -p /var/cache/$pkgname || return 1
|
||||
mkdir -p /var/logs/$pkgname || return 1
|
||||
}
|
||||
|
||||
# TODO
|
||||
sha512sums="a674eaaaa248fc2b315514d79f9a7a0bac6aa1582fe29554d9176e8b551e8aa3aa75abeebdd7713e9e98cc987e7bd57dc7a5e9a2fb85af98b9c18cb54de47bf7 $pkgname-${pkgver}.tar.gz"
|
@ -1,86 +1,40 @@
|
||||
FROM lsiobase/alpine:3.21 AS base
|
||||
FROM alpine:3.15
|
||||
# Python image includes two Python versions, so use base Alpine
|
||||
|
||||
# Based on the work of Jonathan Boeckel <jonathanboeckel1996@gmail.com>
|
||||
|
||||
WORKDIR /usr/src/app
|
||||
|
||||
# Install run dependencies first
|
||||
RUN apk add --no-cache python3 py3-lxml tzdata
|
||||
|
||||
|
||||
COPY --chown=abc:abc ./requirements.txt ./requirements.txt
|
||||
|
||||
# based on https://github.com/linuxserver/docker-pyload-ng/blob/main/Dockerfile
|
||||
# everything but the app installation is run in one command so we can purge
|
||||
# all build dependencies and cache in the same layer
|
||||
# it may be possible to decrease image size slightly by using build stage and
|
||||
# copying all site-packages to runtime stage but the image is already pretty small
|
||||
# system pip could be removed after build, but apk then decides to also remove all its
|
||||
# python dependencies, even if they are explicitly installed as python packages
|
||||
# whut
|
||||
RUN \
|
||||
echo "" && \
|
||||
echo "**** install build packages ****" && \
|
||||
apk add --no-cache --virtual=build-deps \
|
||||
gcc \
|
||||
g++ \
|
||||
python3-dev \
|
||||
libxml2-dev \
|
||||
libxslt-dev \
|
||||
libffi-dev \
|
||||
libc-dev \
|
||||
py3-pip \
|
||||
linux-headers && \
|
||||
echo "" && \
|
||||
echo "**** install runtime packages ****" && \
|
||||
apk add --no-cache \
|
||||
python3 \
|
||||
py3-lxml \
|
||||
libmagic \
|
||||
tzdata && \
|
||||
echo "" && \
|
||||
echo "**** install pip dependencies ****" && \
|
||||
python3 -m venv /venv && \
|
||||
. /venv/bin/activate && \
|
||||
python3 -m ensurepip && \
|
||||
pip install -U --no-cache-dir \
|
||||
pip \
|
||||
wheel && \
|
||||
echo "" && \
|
||||
echo "**** install maloja requirements ****" && \
|
||||
pip install --no-cache-dir -r requirements.txt && \
|
||||
echo "" && \
|
||||
echo "**** cleanup ****" && \
|
||||
apk del --purge \
|
||||
build-deps && \
|
||||
rm -rf \
|
||||
/tmp/* \
|
||||
${HOME}/.cache
|
||||
apk add py3-pip && \
|
||||
pip install wheel
|
||||
|
||||
# actual installation in extra layer so we can cache the stuff above
|
||||
|
||||
COPY --chown=abc:abc . .
|
||||
COPY ./requirements.txt ./requirements.txt
|
||||
|
||||
RUN \
|
||||
echo "" && \
|
||||
echo "**** install maloja ****" && \
|
||||
apk add --no-cache --virtual=install-deps \
|
||||
py3-pip && \
|
||||
python3 -m venv /venv && \
|
||||
. /venv/bin/activate && \
|
||||
pip3 install /usr/src/app && \
|
||||
apk del --purge \
|
||||
install-deps && \
|
||||
rm -rf \
|
||||
/tmp/* \
|
||||
${HOME}/.cache
|
||||
apk add --no-cache --virtual .build-deps gcc g++ python3-dev libxml2-dev libxslt-dev libffi-dev libc-dev py3-pip linux-headers && \
|
||||
pip install --no-cache-dir -r requirements.txt && \
|
||||
apk del .build-deps
|
||||
|
||||
|
||||
# no chance for caching below here
|
||||
|
||||
COPY container/root/ /
|
||||
COPY . .
|
||||
|
||||
ENV \
|
||||
# Docker-specific configuration
|
||||
MALOJA_SKIP_SETUP=yes \
|
||||
MALOJA_CONTAINER=yes \
|
||||
PYTHONUNBUFFERED=1 \
|
||||
# Prevents breaking change for previous container that ran maloja as root
|
||||
# On linux hosts (non-podman rootless) these variables should be set to the
|
||||
# host user that should own the host folder bound to MALOJA_DATA_DIRECTORY
|
||||
PUID=0 \
|
||||
PGID=0
|
||||
RUN pip install /usr/src/app
|
||||
|
||||
# Docker-specific configuration
|
||||
# defaulting to IPv4 is no longer necessary (default host is dual stack)
|
||||
ENV MALOJA_SKIP_SETUP=yes
|
||||
ENV PYTHONUNBUFFERED=1
|
||||
|
||||
EXPOSE 42010
|
||||
# use exec form for better signal handling https://docs.docker.com/engine/reference/builder/#entrypoint
|
||||
ENTRYPOINT ["maloja", "run"]
|
||||
|
@ -9,14 +9,49 @@ Clone the repository and enter it.
|
||||
|
||||
## Environment
|
||||
|
||||
To avoid cluttering your system, consider using a [virtual environment](https://docs.python.org/3/tutorial/venv.html), or better yet run the included `docker-compose.yml` file.
|
||||
Your IDE should let you run the file directly, otherwise you can execute `docker compose -f dev/docker-compose.yml -p maloja up --force-recreate --build`.
|
||||
To avoid cluttering your system, consider using a [virtual environment](https://docs.python.org/3/tutorial/venv.html).
|
||||
|
||||
Your system needs several packages installed. For supported distributions, this can be done with e.g.
|
||||
|
||||
```console
|
||||
sh ./install/install_dependencies_alpine.sh
|
||||
```
|
||||
|
||||
For other distros, try to find the equivalents of the packages listed or simply check your error output.
|
||||
|
||||
Then install all Python dependencies with
|
||||
|
||||
```console
|
||||
pip install -r requirements.txt
|
||||
```
|
||||
|
||||
|
||||
## Running the server
|
||||
|
||||
Use the environment variable `MALOJA_DATA_DIRECTORY` to force all user files into one central directory - this way, you can also quickly change between multiple configurations.
|
||||
For development, you might not want to install maloja files all over your filesystem. Use the environment variable `MALOJA_DATA_DIRECTORY` to force all user files into one central directory - this way, you can also quickly change between multiple configurations.
|
||||
|
||||
You can quickly run the server with all your local changes with
|
||||
|
||||
```console
|
||||
python3 -m maloja run
|
||||
```
|
||||
|
||||
You can also build the package with
|
||||
|
||||
```console
|
||||
pip install .
|
||||
```
|
||||
|
||||
|
||||
## Docker
|
||||
|
||||
You can also always build and run the server with
|
||||
|
||||
```console
|
||||
sh ./dev/run_docker.sh
|
||||
```
|
||||
|
||||
This will use the directory `testdata`.
|
||||
|
||||
## Further help
|
||||
|
||||
|
108
README.md
108
README.md
@ -4,9 +4,12 @@
|
||||
[](https://pypi.org/project/malojaserver/)
|
||||
[](https://hub.docker.com/r/krateng/maloja)
|
||||
|
||||
Simple self-hosted music scrobble database to create personal listening statistics.
|
||||
[](https://github.com/krateng/maloja/blob/master/LICENSE)
|
||||
[](https://codeclimate.com/github/krateng/maloja)
|
||||
|
||||

|
||||
Simple self-hosted music scrobble database to create personal listening statistics. No recommendations, no social network, no nonsense.
|
||||
|
||||

|
||||
|
||||
You can check [my own Maloja page](https://maloja.krateng.ch) as an example instance.
|
||||
|
||||
@ -15,15 +18,19 @@ You can check [my own Maloja page](https://maloja.krateng.ch) as an example inst
|
||||
* [Features](#features)
|
||||
* [How to install](#how-to-install)
|
||||
* [Requirements](#requirements)
|
||||
* [Docker / Podman](#docker--podman)
|
||||
* [PyPI](#pypi)
|
||||
* [From Source](#from-source)
|
||||
* [Docker / Podman](#docker-podman)
|
||||
* [Extras](#extras)
|
||||
* [How to use](#how-to-use)
|
||||
* [Basic control](#basic-control)
|
||||
* [Data](#data)
|
||||
* [Customization](#customization)
|
||||
* [How to scrobble](#how-to-scrobble)
|
||||
* [Native support](#native-support)
|
||||
* [Native API](#native-api)
|
||||
* [Standard-compliant API](#standard-compliant-api)
|
||||
* [Manual](#manual)
|
||||
* [How to extend](#how-to-extend)
|
||||
|
||||
## Features
|
||||
@ -40,17 +47,50 @@ You can check [my own Maloja page](https://maloja.krateng.ch) as an example inst
|
||||
|
||||
## How to install
|
||||
|
||||
To avoid issues with version / dependency mismatches, Maloja should only be used in **Docker** or **Podman**, not on bare metal.
|
||||
I cannot offer any help for bare metal installations (but using venv should help).
|
||||
### Requirements
|
||||
|
||||
Maloja should run on any x86 or ARM machine that runs Python.
|
||||
|
||||
I can support you with issues best if you use **Alpine Linux**.
|
||||
|
||||
Your CPU should have a single core passmark score of at the very least 1500. 500 MB RAM should give you a decent experience, but performance will benefit greatly from up to 2 GB.
|
||||
|
||||
### PyPI
|
||||
|
||||
You can install Maloja with
|
||||
|
||||
```console
|
||||
pip install malojaserver
|
||||
```
|
||||
|
||||
To make sure all dependencies are installed, you can also use one of the included scripts in the `install` folder.
|
||||
|
||||
### From Source
|
||||
|
||||
Clone this repository and enter the directory with
|
||||
|
||||
```console
|
||||
git clone https://github.com/krateng/maloja
|
||||
cd maloja
|
||||
```
|
||||
|
||||
Then install all the requirements and build the package, e.g.:
|
||||
|
||||
```console
|
||||
sh ./install/install_dependencies_alpine.sh
|
||||
pip install -r requirements.txt
|
||||
pip install .
|
||||
```
|
||||
|
||||
### Docker / Podman
|
||||
|
||||
Pull the [latest image](https://hub.docker.com/r/krateng/maloja) or check out the repository and use the included Containerfile.
|
||||
|
||||
Of note are these settings which should be passed as environmental variables to the container:
|
||||
|
||||
* `MALOJA_SKIP_SETUP` -- Make the server setup process non-interactive. Maloja will not work properly in a container without this variable set. This is done by default in the provided Containerfile.
|
||||
* `MALOJA_FORCE_PASSWORD` -- Set an admin password for Maloja. You only need this on the first run.
|
||||
* `MALOJA_DATA_DIRECTORY` -- Set the directory in the container where configuration folders/files should be located
|
||||
* Mount a [volume](https://docs.docker.com/engine/reference/builder/#volume) to the specified directory to access these files outside the container (and to make them persistent)
|
||||
* Mount a [volume](https://docs.docker.com/engine/reference/builder/#volume) to the specified directory to access these files outside the container (and to make them persistent)
|
||||
* `MALOJA_FORCE_PASSWORD` -- Set an admin password for maloja
|
||||
|
||||
You must publish a port on your host machine to bind to the container's web port (default 42010). The container uses IPv4 per default.
|
||||
|
||||
@ -60,43 +100,59 @@ An example of a minimum run configuration to access maloja via `localhost:42010`
|
||||
docker run -p 42010:42010 -v $PWD/malojadata:/mljdata -e MALOJA_DATA_DIRECTORY=/mljdata krateng/maloja
|
||||
```
|
||||
|
||||
If you are using [rootless containers with Podman](https://developers.redhat.com/blog/2020/09/25/rootless-containers-with-podman-the-basics#why_podman_) the following DOES NOT apply to you, but if you are running **Docker** on a **Linux Host** you should specify `user:group` ids of the user who owns the folder on the host machine bound to `MALOJA_DATA_DIRECTORY` in order to avoid [docker file permission problems.](https://ikriv.com/blog/?p=4698) These can be specified using the [environmental variables **PUID** and **PGID**.](https://docs.linuxserver.io/general/understanding-puid-and-pgid)
|
||||
|
||||
To get the UID and GID for the current user run these commands from a terminal:
|
||||
|
||||
* `id -u` -- prints UID (EX `1000`)
|
||||
* `id -g` -- prints GID (EX `1001`)
|
||||
|
||||
The modified run command with these variables would look like:
|
||||
|
||||
```console
|
||||
docker run -e PUID=1000 -e PGID=1001 -p 42010:42010 -v $PWD/malojadata:/mljdata -e MALOJA_DATA_DIRECTORY=/mljdata krateng/maloja
|
||||
```
|
||||
|
||||
|
||||
### Extras
|
||||
|
||||
* If you'd like to display images, you will need API keys for [Last.fm](https://www.last.fm/api/account/create) and [Spotify](https://developer.spotify.com/dashboard/applications). These are free of charge!
|
||||
|
||||
* Put your server behind a reverse proxy for SSL encryption. Make sure that you're proxying to the IPv6 or IPv4 address according to your settings.
|
||||
|
||||
* You can set up a cronjob to start your server on system boot, and potentially restart it on a regular basis:
|
||||
|
||||
```
|
||||
@reboot sleep 15 && maloja start
|
||||
42 0 7 * * maloja restart
|
||||
```
|
||||
|
||||
|
||||
## How to use
|
||||
|
||||
### Basic control
|
||||
|
||||
When not running in a container, you can run the application with `maloja run`.
|
||||
Start and stop the server in the background with
|
||||
|
||||
```console
|
||||
maloja start
|
||||
maloja stop
|
||||
maloja restart
|
||||
```
|
||||
|
||||
If you need to run the server in the foreground, use
|
||||
|
||||
```console
|
||||
maloja run
|
||||
```
|
||||
|
||||
|
||||
### Data
|
||||
|
||||
If you would like to import your previous scrobbles, copy them into the import folder in your data directory. This works on:
|
||||
If you would like to import your previous scrobbles, use the command `maloja import *filename*`. This works on:
|
||||
|
||||
* a Last.fm export generated by [ghan64's website](https://lastfm.ghan.nl/export/)
|
||||
* a Last.fm export generated by [benfoxall's website](https://benjaminbenben.com/lastfm-to-csv/) ([GitHub page](https://github.com/benfoxall/lastfm-to-csv))
|
||||
* an official [Spotify data export file](https://www.spotify.com/us/account/privacy/)
|
||||
* an official [ListenBrainz export file](https://listenbrainz.org/profile/export/)
|
||||
* the export of another Maloja instance
|
||||
|
||||
⚠️ Never import your data while maloja is running. When you need to do import inside docker container start it in shell mode instead and perform import before starting the container as mentioned above.
|
||||
|
||||
```console
|
||||
docker run -it --entrypoint sh -v $PWD/malojadata:/mljdata -e MALOJA_DATA_DIRECTORY=/mljdata krateng/maloja
|
||||
cd /mljdata
|
||||
maloja import my_last_fm_export.csv
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
To backup your data, run `maloja backup`, optional with `--include_images`.
|
||||
|
||||
### Customization
|
||||
|
||||
* Have a look at the [available settings](settings.md) and specifiy your choices in `/etc/maloja/settings.ini`. You can also set each of these settings as an environment variable with the prefix `MALOJA_` (e.g. `MALOJA_SKIP_SETUP`).
|
||||
|
@ -11,8 +11,7 @@ const ALWAYS_SCROBBLE_SECONDS = 60*3;
|
||||
// Longer songs are always scrobbled when playing at least 2 minutes
|
||||
|
||||
pages = {
|
||||
"plex":{
|
||||
"name":"Plex",
|
||||
"Plex Web":{
|
||||
"patterns":[
|
||||
"https://app.plex.tv",
|
||||
"http://app.plex.tv",
|
||||
@ -21,36 +20,31 @@ pages = {
|
||||
],
|
||||
"script":"plex.js"
|
||||
},
|
||||
"ytmusic":{
|
||||
"name":"YouTube Music",
|
||||
"YouTube Music":{
|
||||
"patterns":[
|
||||
"https://music.youtube.com"
|
||||
],
|
||||
"script":"ytmusic.js"
|
||||
},
|
||||
"spotify":{
|
||||
"name":"Spotify",
|
||||
"Spotify Web":{
|
||||
"patterns":[
|
||||
"https://open.spotify.com"
|
||||
],
|
||||
"script":"spotify.js"
|
||||
},
|
||||
"bandcamp":{
|
||||
"name":"Bandcamp",
|
||||
"Bandcamp":{
|
||||
"patterns":[
|
||||
"bandcamp.com"
|
||||
],
|
||||
"script":"bandcamp.js"
|
||||
},
|
||||
"soundcloud":{
|
||||
"name":"Soundcloud",
|
||||
"Soundcloud":{
|
||||
"patterns":[
|
||||
"https://soundcloud.com"
|
||||
],
|
||||
"script":"soundcloud.js"
|
||||
},
|
||||
"navidrome":{
|
||||
"name":"Navidrome",
|
||||
"Navidrome":{
|
||||
"patterns":[
|
||||
"https://navidrome.",
|
||||
"http://navidrome."
|
||||
@ -83,13 +77,6 @@ function onTabUpdated(tabId, changeInfo, tab) {
|
||||
//console.log("Still on same page!")
|
||||
tabManagers[tabId].update();
|
||||
|
||||
// check if the setting for this page is still active
|
||||
chrome.storage.local.get(["service_active_" + page],function(result){
|
||||
if (!result["service_active_" + page]) {
|
||||
delete tabManagers[tabId];
|
||||
}
|
||||
});
|
||||
|
||||
return
|
||||
}
|
||||
}
|
||||
@ -103,21 +90,13 @@ function onTabUpdated(tabId, changeInfo, tab) {
|
||||
patterns = pages[key]["patterns"];
|
||||
for (var i=0;i<patterns.length;i++) {
|
||||
if (tab.url.includes(patterns[i])) {
|
||||
console.log("New page on tab " + tabId + " will be handled by new " + key + " manager!");
|
||||
tabManagers[tabId] = new Controller(tabId,key);
|
||||
updateTabNum();
|
||||
return
|
||||
//chrome.tabs.executeScript(tab.id,{"file":"sitescripts/" + pages[key]["script"]})
|
||||
|
||||
// check if we even like that page
|
||||
chrome.storage.local.get(["service_active_" + key],function(result){
|
||||
if (result["service_active_" + key]) {
|
||||
console.log("New page on tab " + tabId + " will be handled by new " + key + " manager!");
|
||||
tabManagers[tabId] = new Controller(tabId,key);
|
||||
updateTabNum();
|
||||
//chrome.tabs.executeScript(tab.id,{"file":"sitescripts/" + pages[key]["script"]})
|
||||
}
|
||||
else {
|
||||
console.log("New page on tab " + tabId + " is " + key + ", not enabled!");
|
||||
}
|
||||
});
|
||||
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -148,10 +127,10 @@ function onInternalMessage(request,sender) {
|
||||
for (tabId in tabManagers) {
|
||||
manager = tabManagers[tabId]
|
||||
if (manager.currentlyPlaying) {
|
||||
answer.push([pages[manager.page]['name'],manager.currentArtist,manager.currentTitle]);
|
||||
answer.push([manager.page,manager.currentArtist,manager.currentTitle]);
|
||||
}
|
||||
else {
|
||||
answer.push([pages[manager.page]['name'],null]);
|
||||
answer.push([manager.page,null]);
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "Maloja Scrobbler",
|
||||
"version": "1.13",
|
||||
"version": "1.11",
|
||||
"description": "Scrobbles tracks from various sites to your Maloja server",
|
||||
"manifest_version": 2,
|
||||
"permissions": [
|
||||
|
@ -14,7 +14,7 @@
|
||||
color:beige;
|
||||
font-family:'Ubuntu';
|
||||
}
|
||||
input[type=text] {
|
||||
input {
|
||||
width:270px;
|
||||
font-family:'Ubuntu';
|
||||
outline:none;
|
||||
@ -33,14 +33,10 @@
|
||||
<br /><br />
|
||||
<span id="checkmark_key"></span> <span>API key:</span><br />
|
||||
<input type="text" id="apikey" />
|
||||
<hr/>
|
||||
<br/><br/>
|
||||
<span>Tabs:</span>
|
||||
<list id="playinglist">
|
||||
</list>
|
||||
<hr/>
|
||||
<span>Services:</span>
|
||||
<list id="sitelist">
|
||||
</list>
|
||||
</div>
|
||||
</body>
|
||||
</html>
|
||||
|
@ -1,71 +1,26 @@
|
||||
// duplicate this info for now, don't know if there is a better way than sending messages
|
||||
var pages = {
|
||||
"plex":"Plex",
|
||||
"ytmusic":"YouTube Music",
|
||||
"spotify":"Spotify",
|
||||
"bandcamp":"Bandcamp",
|
||||
"soundcloud":"Soundcloud",
|
||||
"navidrome":"Navidrome"
|
||||
}
|
||||
|
||||
var config_defaults = {
|
||||
serverurl:"http://localhost:42010",
|
||||
apikey:"BlackPinkInYourArea"
|
||||
}
|
||||
|
||||
for (var key in pages) {
|
||||
config_defaults["service_active_" + key] = true;
|
||||
}
|
||||
|
||||
|
||||
document.addEventListener("DOMContentLoaded",function() {
|
||||
|
||||
var sitelist = document.getElementById("sitelist");
|
||||
|
||||
|
||||
for (var identifier in pages) {
|
||||
sitelist.append(document.createElement('br'));
|
||||
var checkbox = document.createElement('input');
|
||||
checkbox.type = "checkbox";
|
||||
checkbox.id = "service_active_" + identifier;
|
||||
var label = document.createElement('label');
|
||||
label.for = checkbox.id;
|
||||
label.textContent = pages[identifier];
|
||||
sitelist.appendChild(checkbox);
|
||||
sitelist.appendChild(label);
|
||||
|
||||
checkbox.addEventListener("change",toggleSite);
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
||||
document.getElementById("serverurl").addEventListener("change",checkServer);
|
||||
document.getElementById("apikey").addEventListener("change",checkServer);
|
||||
|
||||
document.getElementById("serverurl").addEventListener("focusout",checkServer);
|
||||
document.getElementById("apikey").addEventListener("focusout",checkServer);
|
||||
|
||||
document.getElementById("serverurl").addEventListener("input",saveServer);
|
||||
document.getElementById("apikey").addEventListener("input",saveServer);
|
||||
document.getElementById("serverurl").addEventListener("input",saveConfig);
|
||||
document.getElementById("apikey").addEventListener("input",saveConfig);
|
||||
|
||||
|
||||
chrome.runtime.onMessage.addListener(onInternalMessage);
|
||||
|
||||
chrome.storage.local.get(config_defaults,function(result){
|
||||
console.log(result);
|
||||
for (var key in result) {
|
||||
|
||||
// booleans
|
||||
if (result[key] == true || result[key] == false) {
|
||||
document.getElementById(key).checked = result[key];
|
||||
}
|
||||
|
||||
// text
|
||||
else{
|
||||
document.getElementById(key).value = result[key];
|
||||
}
|
||||
|
||||
document.getElementById(key).value = result[key];
|
||||
}
|
||||
checkServer();
|
||||
})
|
||||
@ -76,11 +31,6 @@ document.addEventListener("DOMContentLoaded",function() {
|
||||
|
||||
});
|
||||
|
||||
function toggleSite(evt) {
|
||||
var element = evt.target;
|
||||
chrome.storage.local.set({ [element.id]: element.checked });
|
||||
}
|
||||
|
||||
|
||||
function onInternalMessage(request,sender) {
|
||||
if (request.type == "response") {
|
||||
@ -100,8 +50,8 @@ function onInternalMessage(request,sender) {
|
||||
|
||||
|
||||
|
||||
function saveServer() {
|
||||
for (var key of ["serverurl","apikey"]) {
|
||||
function saveConfig() {
|
||||
for (var key in config_defaults) {
|
||||
var value = document.getElementById(key).value;
|
||||
chrome.storage.local.set({ [key]: value });
|
||||
}
|
||||
|
@ -1,10 +0,0 @@
|
||||
#!/usr/bin/with-contenv bash
|
||||
|
||||
if [ "$(s6-setuidgid abc id -u)" = "0" ]; then
|
||||
echo "-------------------------------------"
|
||||
echo "WARN: Running as root! If you meant to do this than this message can be ignored."
|
||||
echo "If you are running this container on a *linux* host and are not using podman rootless you SHOULD"
|
||||
echo "change the ENVs PUID and PGID for this container to ensure correct permissions on your config folder."
|
||||
echo -e "See: https://github.com/krateng/maloja#linux-host\n"
|
||||
echo -e "-------------------------------------\n"
|
||||
fi
|
@ -1 +0,0 @@
|
||||
oneshot
|
@ -1 +0,0 @@
|
||||
/etc/s6-overlay/s6-rc.d/init-permission-check/run
|
@ -1,7 +0,0 @@
|
||||
#!/usr/bin/with-contenv bash
|
||||
|
||||
# used https://github.com/linuxserver/docker-wikijs/blob/master/root/etc/s6-overlay/s6-rc.d/svc-wikijs/run as a template
|
||||
|
||||
echo -e "\nMaloja is starting!"
|
||||
exec \
|
||||
s6-setuidgid abc /venv/bin/python -m maloja run
|
@ -1 +0,0 @@
|
||||
longrun
|
@ -1,3 +0,0 @@
|
||||
sudo rm -r ./testdata
|
||||
mkdir ./testdata
|
||||
chmod 777 ./testdata
|
@ -1,13 +0,0 @@
|
||||
services:
|
||||
maloja:
|
||||
build:
|
||||
context: ..
|
||||
dockerfile: ./Containerfile
|
||||
ports:
|
||||
- "42010:42010"
|
||||
volumes:
|
||||
- "./testdata:/data"
|
||||
environment:
|
||||
- "MALOJA_DATA_DIRECTORY=/data"
|
||||
- "PUID=1000"
|
||||
- "PGID=1000"
|
@ -1 +0,0 @@
|
||||
git tag -l '*.0' -n1 --sort=v:refname
|
21
dev/package.py
Normal file
21
dev/package.py
Normal file
@ -0,0 +1,21 @@
|
||||
import toml
|
||||
import os
|
||||
|
||||
with open("pyproject.toml") as filed:
|
||||
data = toml.load(filed)
|
||||
|
||||
info = {
|
||||
'name':data['project']['name'],
|
||||
'license':"GPLv3",
|
||||
'version':data['project']['version'],
|
||||
'architecture':'all',
|
||||
'description':'"' + data['project']['description'] + '"',
|
||||
'url':'"' + data['project']['urls']['homepage'] + '"',
|
||||
'maintainer':f"\"{data['project']['authors'][0]['name']} <{data['project']['authors'][0]['email']}>\"",
|
||||
}
|
||||
|
||||
|
||||
for target in ["apk","deb"]:
|
||||
lcmd = f"fpm {' '.join(f'--{key} {info[key]}' for key in info)} -s python -t {target} . "
|
||||
print(lcmd)
|
||||
os.system(lcmd)
|
@ -16,28 +16,8 @@ minor_release_name: "Soyeon"
|
||||
- "[Bugfix] Fixed inclusion of custom css files"
|
||||
- "[Bugfix] Fixed list values in configuration"
|
||||
3.1.2:
|
||||
commit: "a0739306013cd9661f028fb5b2620cfa2d298aa4"
|
||||
notes:
|
||||
- "[Feature] Added remix artist parsing"
|
||||
- "[Feature] Added API debug mode"
|
||||
- "[Bugfix] Fixed leftover whitespaces when parsing titles"
|
||||
- "[Bugfix] Fixed handling of fallthrough values in config file"
|
||||
3.1.3:
|
||||
commit: "f3a04c79b1c37597cdf3cafcd95e3c923cd6a53f"
|
||||
notes:
|
||||
- "[Bugfix] Fixed infinite recursion with capitalized featuring delimiters"
|
||||
- "[Bugfix] Fixed favicon display"
|
||||
3.1.4:
|
||||
commit: "ef06f2262205c903e7c3060e2d2d52397f8ffc9d"
|
||||
notes:
|
||||
- "[Feature] Expanded information saved from Listenbrainz API"
|
||||
- "[Feature] Added import for Listenbrainz exports"
|
||||
- "[Bugfix] Sanitized artists and tracks with html-like structure"
|
||||
3.1.5:
|
||||
commit: "4330b0294bc0a01cdb841e2e3db370108da901db"
|
||||
notes:
|
||||
- "[Feature] Made image upload part of regular API"
|
||||
- "[Bugfix] Additional entity name sanitization"
|
||||
- "[Bugfix] Fixed image display on Safari"
|
||||
- "[Bugfix] Fixed entity editing on Firefox"
|
||||
- "[Bugfix] Made compatibile with SQLAlchemy 2.0"
|
||||
|
@ -1,57 +0,0 @@
|
||||
minor_release_name: "Nicole"
|
||||
3.2.0:
|
||||
commit: "34d0a49eb8deae2fb95233289521bb817732c772"
|
||||
notes:
|
||||
- "[Architecture] Switched to linuxserver.io container base image"
|
||||
- "[Architecture] Reworked image handling"
|
||||
- "[Architecture] Removed pre-calculated stats"
|
||||
- "[Feature] Added support for albums"
|
||||
- "[Feature] New start page"
|
||||
- "[Feature] Added UI for track-artist, track-album and album-artist association"
|
||||
- "[Feature] Added inline UI for association and merging in chart lists"
|
||||
- "[Feature] Added UI selector for including associated artists"
|
||||
- "[Feature] Added UI distinction for associated scrobbles in chart bars"
|
||||
- "[Performance] Improved image rendering"
|
||||
- "[Performance] Optimized several database calls"
|
||||
- "[Bugfix] Fixed configuration of time format"
|
||||
- "[Bugfix] Fixed search on manual scrobble page"
|
||||
- "[Bugfix] Disabled DB maintenance while not running main server"
|
||||
- "[Bugfix] Removed some nonsensical ephemereal database entry creations"
|
||||
- "[Bugfix] Fixed API endpoint for track charts with no artist provided"
|
||||
- "[Technical] Bumped Python and SQLAlchemy versions"
|
||||
- "[Distribution] Removed build of arm/v7 image"
|
||||
3.2.1:
|
||||
commit: "5495d6e38d95c0c2128e1de9a9553b55b6be945b"
|
||||
notes:
|
||||
- "[Feature] Added setting for custom week offset"
|
||||
- "[Feature] Added Musicbrainz album art fetching"
|
||||
- "[Bugfix] Fixed album entity rows being marked as track entity rows"
|
||||
- "[Bugfix] Fixed scrobbling of tracks when all artists have been removed by server parsing"
|
||||
- "[Bugfix] Fixed Spotify import of multiple files"
|
||||
- "[Bugfix] Fixed process control on FreeBSD"
|
||||
- "[Bugfix] Fixed Spotify authentication thread blocking the process from terminating"
|
||||
- "[Technical] Upgraded all third party modules to use requests module and send User Agent"
|
||||
3.2.2:
|
||||
commit: "febaff97228b37a192f2630aa331cac5e5c3e98e"
|
||||
notes:
|
||||
- "[Security] Fixed XSS vulnerability in error page (Disclosed by https://github.com/NULLYUKI)"
|
||||
- "[Architecture] Reworked the default directory selection"
|
||||
- "[Feature] Added option to show scrobbles on tile charts"
|
||||
- "[Bugfix] Fixed Last.fm authentication"
|
||||
3.2.3:
|
||||
commit: "a7dcd3df8a6b051a1f6d0b7d10cc5af83502445c"
|
||||
notes:
|
||||
- "[Architecture] Upgraded doreah, significant rework of authentication"
|
||||
- "[Bugfix] Fixed initial permission check"
|
||||
- "[Bugfix] Fixed and updated various texts"
|
||||
- "[Bugfix] Fixed moving tracks to different album"
|
||||
3.2.4:
|
||||
notes:
|
||||
- "[Architecture] Removed daemonization capabilities"
|
||||
- "[Architecture] Moved import to main server process"
|
||||
- "[Feature] Implemented support for ghan's csv Last.fm export"
|
||||
- "[Performance] Debounced search"
|
||||
- "[Bugfix] Fixed stuck scrobbling from Navidrome"
|
||||
- "[Bugfix] Fixed missing image mimetype"
|
||||
- "[Technical] Pinned dependencies"
|
||||
- "[Technical] Upgraded Python and Alpine"
|
2
dev/run_docker.sh
Normal file
2
dev/run_docker.sh
Normal file
@ -0,0 +1,2 @@
|
||||
docker build -t maloja . -f Containerfile
|
||||
docker run --rm -p 42010:42010 -v $PWD/testdata:/mlj -e MALOJA_DATA_DIRECTORY=/mlj maloja
|
2
dev/run_podman.sh
Normal file
2
dev/run_podman.sh
Normal file
@ -0,0 +1,2 @@
|
||||
podman build -t maloja . -f Containerfile
|
||||
podman run --rm -p 42010:42010 -v $PWD/testdata:/mlj -e MALOJA_DATA_DIRECTORY=/mlj maloja
|
36
dev/templates/APKBUILD.jinja
Normal file
36
dev/templates/APKBUILD.jinja
Normal file
@ -0,0 +1,36 @@
|
||||
# Contributor: Johannes Krattenmacher <maloja@dev.krateng.ch>
|
||||
# Maintainer: Johannes Krattenmacher <maloja@dev.krateng.ch>
|
||||
pkgname={{ tool.flit.module.name }}
|
||||
pkgver={{ project.version }}
|
||||
pkgrel=0
|
||||
pkgdesc="{{ project.description }}"
|
||||
url="{{ project.urls.homepage }}"
|
||||
arch="noarch"
|
||||
license="GPL-3.0"
|
||||
depends="{{ tool.osreqs.alpine.run | join(' ') }}"
|
||||
pkgusers=$pkgname
|
||||
pkggroups=$pkgname
|
||||
depends_dev="{{ tool.osreqs.alpine.build | join(' ') }}"
|
||||
makedepends="$depends_dev"
|
||||
source="
|
||||
$pkgname-$pkgver.tar.gz::{{ project.urls.repository }}/archive/refs/tags/v$pkgver.tar.gz
|
||||
"
|
||||
builddir="$srcdir"/$pkgname-$pkgver
|
||||
|
||||
|
||||
|
||||
build() {
|
||||
cd $builddir
|
||||
python3 -m build .
|
||||
pip3 install dist/*.tar.gz
|
||||
}
|
||||
|
||||
package() {
|
||||
mkdir -p /etc/$pkgname || return 1
|
||||
mkdir -p /var/lib/$pkgname || return 1
|
||||
mkdir -p /var/cache/$pkgname || return 1
|
||||
mkdir -p /var/logs/$pkgname || return 1
|
||||
}
|
||||
|
||||
# TODO
|
||||
sha512sums="a674eaaaa248fc2b315514d79f9a7a0bac6aa1582fe29554d9176e8b551e8aa3aa75abeebdd7713e9e98cc987e7bd57dc7a5e9a2fb85af98b9c18cb54de47bf7 $pkgname-${pkgver}.tar.gz"
|
40
dev/templates/Containerfile.jinja
Normal file
40
dev/templates/Containerfile.jinja
Normal file
@ -0,0 +1,40 @@
|
||||
FROM alpine:3.15
|
||||
# Python image includes two Python versions, so use base Alpine
|
||||
|
||||
# Based on the work of Jonathan Boeckel <jonathanboeckel1996@gmail.com>
|
||||
|
||||
WORKDIR /usr/src/app
|
||||
|
||||
# Install run dependencies first
|
||||
RUN apk add --no-cache {{ tool.osreqs.alpine.run | join(' ') }}
|
||||
|
||||
# system pip could be removed after build, but apk then decides to also remove all its
|
||||
# python dependencies, even if they are explicitly installed as python packages
|
||||
# whut
|
||||
RUN \
|
||||
apk add py3-pip && \
|
||||
pip install wheel
|
||||
|
||||
|
||||
COPY ./requirements.txt ./requirements.txt
|
||||
|
||||
RUN \
|
||||
apk add --no-cache --virtual .build-deps {{ tool.osreqs.alpine.build | join(' ') }} && \
|
||||
pip install --no-cache-dir -r requirements.txt && \
|
||||
apk del .build-deps
|
||||
|
||||
|
||||
# no chance for caching below here
|
||||
|
||||
COPY . .
|
||||
|
||||
RUN pip install /usr/src/app
|
||||
|
||||
# Docker-specific configuration
|
||||
# defaulting to IPv4 is no longer necessary (default host is dual stack)
|
||||
ENV MALOJA_SKIP_SETUP=yes
|
||||
ENV PYTHONUNBUFFERED=1
|
||||
|
||||
EXPOSE 42010
|
||||
# use exec form for better signal handling https://docs.docker.com/engine/reference/builder/#entrypoint
|
||||
ENTRYPOINT ["maloja", "run"]
|
4
dev/templates/install/install_alpine.sh.jinja
Normal file
4
dev/templates/install/install_alpine.sh.jinja
Normal file
@ -0,0 +1,4 @@
|
||||
{% include 'install/install_dependencies_alpine.sh.jinja' %}
|
||||
apk add py3-pip
|
||||
pip install wheel
|
||||
pip install malojaserver
|
4
dev/templates/install/install_debian.sh.jinja
Normal file
4
dev/templates/install/install_debian.sh.jinja
Normal file
@ -0,0 +1,4 @@
|
||||
{% include 'install/install_dependencies_debian.sh.jinja' %}
|
||||
apt install python3-pip
|
||||
pip install wheel
|
||||
pip install malojaserver
|
@ -0,0 +1,4 @@
|
||||
#!/usr/bin/env sh
|
||||
apk update
|
||||
apk add \
|
||||
{{ (tool.osreqs.alpine.build + tool.osreqs.alpine.run + tool.osreqs.alpine.opt) | join(' \\\n\t') }}
|
@ -0,0 +1,4 @@
|
||||
#!/usr/bin/env sh
|
||||
apt update
|
||||
apt install \
|
||||
{{ (tool.osreqs.debian.build + tool.osreqs.debian.run + tool.osreqs.debian.opt) | join(' \\\n\t') }}
|
@ -189,7 +189,7 @@
|
||||
],
|
||||
"body": {
|
||||
"mode": "raw",
|
||||
"raw": "{\n \"key\": \"{{api_key}}\",\n \"artist\": \"{{data.artist1}}\",\n \"title\": \"{{data.title1}}\",\n \"album\": \"{{data.album}}\",\n \"albumartists\":[\n \"{{data.artist1}}\",\n \"{{data.artist3}}\"\n ]\n}"
|
||||
"raw": "{\n \"key\": \"{{api_key}}\",\n \"artist\": \"{{data.artist1}}\",\n \"title\": \"{{data.title1}}\"\n}"
|
||||
},
|
||||
"url": {
|
||||
"raw": "{{url}}/apis/mlj_1/newscrobble",
|
||||
@ -219,7 +219,7 @@
|
||||
],
|
||||
"body": {
|
||||
"mode": "raw",
|
||||
"raw": "{\n \"key\": \"{{api_key}}\",\n \"artists\": [\"{{data.artist1}}\",\"{{data.artist2}}\"],\n \"title\": \"{{data.title1}}\",\n \"album\": \"{{data.album}}\",\n \"albumartists\":[\n \"{{data.artist1}}\",\n \"{{data.artist3}}\"\n ]\n}"
|
||||
"raw": "{\n \"key\": \"{{api_key}}\",\n \"artists\": [\"{{data.artist1}}\",\"{{data.artist2}}\"],\n \"title\": \"{{data.title1}}\"\n}"
|
||||
},
|
||||
"url": {
|
||||
"raw": "{{url}}/apis/mlj_1/newscrobble",
|
||||
@ -867,11 +867,6 @@
|
||||
"key": "data.title3",
|
||||
"value": "One in a Million"
|
||||
},
|
||||
{
|
||||
"key": "data.album",
|
||||
"value": "The Epic Collection",
|
||||
"type": "default"
|
||||
},
|
||||
{
|
||||
"key": "data.timestamp1",
|
||||
"value": ""
|
||||
|
@ -1,21 +1,17 @@
|
||||
"""
|
||||
Create necessary files from sources of truth. Currently just the requirements.txt files.
|
||||
"""
|
||||
|
||||
import toml
|
||||
import os
|
||||
import jinja2
|
||||
|
||||
env = jinja2.Environment(
|
||||
loader=jinja2.FileSystemLoader('./templates'),
|
||||
loader=jinja2.FileSystemLoader('dev/templates'),
|
||||
autoescape=jinja2.select_autoescape(['html', 'xml']),
|
||||
keep_trailing_newline=True
|
||||
)
|
||||
|
||||
with open("../pyproject.toml") as filed:
|
||||
with open("pyproject.toml") as filed:
|
||||
data = toml.load(filed)
|
||||
|
||||
templatedir = "./templates"
|
||||
templatedir = "./dev/templates"
|
||||
|
||||
for root,dirs,files in os.walk(templatedir):
|
||||
|
||||
@ -27,7 +23,7 @@ for root,dirs,files in os.walk(templatedir):
|
||||
if not f.endswith('.jinja'): continue
|
||||
|
||||
srcfile = os.path.join(root,f)
|
||||
trgfile = os.path.join("..", reldirpath,f.replace(".jinja",""))
|
||||
trgfile = os.path.join(reldirpath,f.replace(".jinja",""))
|
||||
|
||||
|
||||
template = env.get_template(relfilepath)
|
||||
|
@ -1,7 +1,3 @@
|
||||
"""
|
||||
Read the changelogs / version metadata and create all git tags
|
||||
"""
|
||||
|
||||
import os
|
||||
import subprocess as sp
|
||||
import yaml
|
||||
|
20
install/install_alpine.sh
Normal file
20
install/install_alpine.sh
Normal file
@ -0,0 +1,20 @@
|
||||
#!/usr/bin/env sh
|
||||
apk update
|
||||
apk add \
|
||||
gcc \
|
||||
g++ \
|
||||
python3-dev \
|
||||
libxml2-dev \
|
||||
libxslt-dev \
|
||||
libffi-dev \
|
||||
libc-dev \
|
||||
py3-pip \
|
||||
linux-headers \
|
||||
python3 \
|
||||
py3-lxml \
|
||||
tzdata \
|
||||
vips
|
||||
|
||||
apk add py3-pip
|
||||
pip install wheel
|
||||
pip install malojaserver
|
9
install/install_debian.sh
Normal file
9
install/install_debian.sh
Normal file
@ -0,0 +1,9 @@
|
||||
#!/usr/bin/env sh
|
||||
apt update
|
||||
apt install \
|
||||
python3-pip \
|
||||
python3
|
||||
|
||||
apt install python3-pip
|
||||
pip install wheel
|
||||
pip install malojaserver
|
16
install/install_dependencies_alpine.sh
Normal file
16
install/install_dependencies_alpine.sh
Normal file
@ -0,0 +1,16 @@
|
||||
#!/usr/bin/env sh
|
||||
apk update
|
||||
apk add \
|
||||
gcc \
|
||||
g++ \
|
||||
python3-dev \
|
||||
libxml2-dev \
|
||||
libxslt-dev \
|
||||
libffi-dev \
|
||||
libc-dev \
|
||||
py3-pip \
|
||||
linux-headers \
|
||||
python3 \
|
||||
py3-lxml \
|
||||
tzdata \
|
||||
vips
|
5
install/install_dependencies_debian.sh
Normal file
5
install/install_dependencies_debian.sh
Normal file
@ -0,0 +1,5 @@
|
||||
#!/usr/bin/env sh
|
||||
apt update
|
||||
apt install \
|
||||
python3-pip \
|
||||
python3
|
@ -26,6 +26,77 @@ def print_header_info():
|
||||
#print("#####")
|
||||
print()
|
||||
|
||||
|
||||
|
||||
def get_instance():
|
||||
try:
|
||||
return int(subprocess.check_output(["pidof","maloja"]))
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
def get_instance_supervisor():
|
||||
try:
|
||||
return int(subprocess.check_output(["pidof","maloja_supervisor"]))
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
def restart():
|
||||
if stop():
|
||||
start()
|
||||
else:
|
||||
print(col["red"]("Could not stop Maloja!"))
|
||||
|
||||
def start():
|
||||
if get_instance_supervisor() is not None:
|
||||
print("Maloja is already running.")
|
||||
else:
|
||||
print_header_info()
|
||||
setup()
|
||||
try:
|
||||
#p = subprocess.Popen(["python3","-m","maloja.server"],stdout=subprocess.DEVNULL,stderr=subprocess.DEVNULL)
|
||||
sp = subprocess.Popen(["python3","-m","maloja","supervisor"],stdout=subprocess.DEVNULL,stderr=subprocess.DEVNULL)
|
||||
print(col["green"]("Maloja started!"))
|
||||
|
||||
port = conf.malojaconfig["PORT"]
|
||||
|
||||
print("Visit your server address (Port " + str(port) + ") to see your web interface. Visit /admin_setup to get started.")
|
||||
print("If you're installing this on your local machine, these links should get you there:")
|
||||
print("\t" + col["blue"]("http://localhost:" + str(port)))
|
||||
print("\t" + col["blue"]("http://localhost:" + str(port) + "/admin_setup"))
|
||||
return True
|
||||
except Exception:
|
||||
print("Error while starting Maloja.")
|
||||
return False
|
||||
|
||||
|
||||
def stop():
|
||||
|
||||
for attempt in [(signal.SIGTERM,2),(signal.SIGTERM,5),(signal.SIGKILL,3),(signal.SIGKILL,5)]:
|
||||
|
||||
pid_sv = get_instance_supervisor()
|
||||
pid = get_instance()
|
||||
|
||||
if pid is None and pid_sv is None:
|
||||
print("Maloja stopped!")
|
||||
return True
|
||||
|
||||
if pid_sv is not None:
|
||||
os.kill(pid_sv,attempt[0])
|
||||
if pid is not None:
|
||||
os.kill(pid,attempt[0])
|
||||
|
||||
time.sleep(attempt[1])
|
||||
|
||||
return False
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
print("Maloja stopped!")
|
||||
return True
|
||||
|
||||
def onlysetup():
|
||||
print_header_info()
|
||||
setup()
|
||||
@ -38,54 +109,49 @@ def run_server():
|
||||
from . import server
|
||||
server.run_server()
|
||||
|
||||
def run_supervisor():
|
||||
setproctitle("maloja_supervisor")
|
||||
while True:
|
||||
log("Maloja is not running, starting...",module="supervisor")
|
||||
try:
|
||||
process = subprocess.Popen(
|
||||
["python3", "-m", "maloja","run"],
|
||||
stdout=subprocess.DEVNULL,
|
||||
stderr=subprocess.DEVNULL,
|
||||
)
|
||||
except Exception as e:
|
||||
log("Error starting Maloja: " + str(e),module="supervisor")
|
||||
else:
|
||||
try:
|
||||
process.wait()
|
||||
except Exception as e:
|
||||
log("Maloja crashed: " + str(e),module="supervisor")
|
||||
|
||||
def debug():
|
||||
os.environ["MALOJA_DEV_MODE"] = 'true'
|
||||
conf.malojaconfig.load_environment()
|
||||
run_server()
|
||||
direct()
|
||||
|
||||
def print_info():
|
||||
print_header_info()
|
||||
print(col['lightblue']("Configuration Directory:"),conf.dir_settings['config'])
|
||||
print(col['lightblue']("State Directory: "),conf.dir_settings['state'])
|
||||
print(col['lightblue']("Data Directory: "),conf.dir_settings['state'])
|
||||
print(col['lightblue']("Log Directory: "),conf.dir_settings['logs'])
|
||||
print(col['lightblue']("Network: "),f"Dual Stack, Port {conf.malojaconfig['port']}" if conf.malojaconfig['host'] == "*" else f"IPv{ip_address(conf.malojaconfig['host']).version}, Port {conf.malojaconfig['port']}")
|
||||
print(col['lightblue']("Network: "),f"IPv{ip_address(conf.malojaconfig['host']).version}, Port {conf.malojaconfig['port']}")
|
||||
print(col['lightblue']("Timezone: "),f"UTC{conf.malojaconfig['timezone']:+d}")
|
||||
print(col['lightblue']("Location Timezone: "),conf.malojaconfig['location_timezone'])
|
||||
print()
|
||||
try:
|
||||
from importlib.metadata import distribution
|
||||
for pkg in ("sqlalchemy","waitress","bottle","doreah","jinja2"):
|
||||
print(col['cyan'](f"{pkg}:".ljust(13)),distribution(pkg).version)
|
||||
except Exception:
|
||||
print("Could not determine dependency versions.")
|
||||
print()
|
||||
try:
|
||||
import platform
|
||||
pyimpl = platform.python_implementation()
|
||||
pyvers = '.'.join(platform.python_version_tuple())
|
||||
print(col['magenta'](f"Python:".ljust(13)),pyimpl,pyvers)
|
||||
osname = platform.system()
|
||||
osvers = platform.release()
|
||||
print(col['magenta'](f"OS:".ljust(13)),osname,osvers)
|
||||
arch = platform.machine()
|
||||
print(col['magenta'](f"Architecture:".ljust(13)),arch)
|
||||
except Exception:
|
||||
print("Could not determine system information.")
|
||||
|
||||
|
||||
def print_settings():
|
||||
print_header_info()
|
||||
maxlen = max(len(k) for k in conf.malojaconfig)
|
||||
for k in conf.malojaconfig:
|
||||
print(col['lightblue'](k.ljust(maxlen+2)),conf.malojaconfig[k])
|
||||
|
||||
|
||||
@mainfunction({"l":"level","v":"version","V":"version"},flags=['version','include_images','prefer_existing'],shield=True)
|
||||
@mainfunction({"l":"level","v":"version","V":"version"},flags=['version','include_images'],shield=True)
|
||||
def main(*args,**kwargs):
|
||||
|
||||
actions = {
|
||||
# server
|
||||
"start":start,
|
||||
"restart":restart,
|
||||
"stop":stop,
|
||||
"run":run_server,
|
||||
"supervisor":run_supervisor,
|
||||
"debug":debug,
|
||||
"setup":onlysetup,
|
||||
# admin scripts
|
||||
@ -94,14 +160,12 @@ def main(*args,**kwargs):
|
||||
"generate":generate.generate_scrobbles, # maloja generate 400
|
||||
"export":tasks.export, # maloja export
|
||||
"apidebug":apidebug.run, # maloja apidebug
|
||||
"parsealbums":tasks.parse_albums, # maloja parsealbums --strategy majority
|
||||
# aux
|
||||
"info":print_info,
|
||||
"settings":print_settings
|
||||
"info":print_info
|
||||
}
|
||||
|
||||
if "version" in kwargs:
|
||||
print(pkginfo.VERSION)
|
||||
print(info.VERSION)
|
||||
return True
|
||||
else:
|
||||
try:
|
||||
|
@ -4,7 +4,7 @@
|
||||
# you know what f*ck it
|
||||
# this is hardcoded for now because of that damn project / package name discrepancy
|
||||
# i'll fix it one day
|
||||
VERSION = "3.2.4"
|
||||
VERSION = "3.1.2"
|
||||
HOMEPAGE = "https://github.com/krateng/maloja"
|
||||
|
||||
|
||||
|
@ -47,12 +47,9 @@ def init_apis(server):
|
||||
server.get(altpath_empty_cl)(alias_api)
|
||||
server.post(altpath_empty_cl)(alias_api)
|
||||
|
||||
def invalid_api(pth=''):
|
||||
def invalid_api(pth):
|
||||
response.status = 404
|
||||
return {"error":"Invalid API"}
|
||||
|
||||
server.get("/apis/<pth:path>")(invalid_api)
|
||||
server.post("/apis/<pth:path>")(invalid_api)
|
||||
|
||||
server.get("/apis")(invalid_api)
|
||||
server.post("/apis")(invalid_api)
|
||||
|
@ -25,20 +25,9 @@ __logmodulename__ = "apis"
|
||||
|
||||
cla = CleanerAgent()
|
||||
|
||||
|
||||
|
||||
# wrapper method: calls handle. final net to catch exceptions and map them to the handlers proper json / xml response
|
||||
# handle method: finds the method for this path / query. can only raise InvalidMethodException
|
||||
# scrobble: NOT the exposed scrobble method - helper for all APIs to scrobble their results with self-identification
|
||||
|
||||
|
||||
class APIHandler:
|
||||
|
||||
__apiname__: str
|
||||
errors: dict
|
||||
# make these classes singletons
|
||||
_instance = None
|
||||
|
||||
def __new__(cls, *args, **kwargs):
|
||||
if not isinstance(cls._instance, cls):
|
||||
cls._instance = object.__new__(cls, *args, **kwargs)
|
||||
@ -73,33 +62,37 @@ class APIHandler:
|
||||
|
||||
try:
|
||||
response.status,result = self.handle(path,keys)
|
||||
except Exception as e:
|
||||
for exc_type, exc_response in self.errors.items():
|
||||
if isinstance(e, exc_type):
|
||||
response.status, result = exc_response
|
||||
log(f"Error with {self.__apiname__} API: {e} (Request: {path})")
|
||||
break
|
||||
except Exception:
|
||||
exceptiontype = sys.exc_info()[0]
|
||||
if exceptiontype in self.errors:
|
||||
response.status,result = self.errors[exceptiontype]
|
||||
log(f"Error with {self.__apiname__} API: {exceptiontype} (Request: {path})")
|
||||
else:
|
||||
# THIS SHOULD NOT HAPPEN
|
||||
response.status, result = 500, {"status": "Unknown error", "code": 500}
|
||||
log(f"Unhandled Exception with {self.__apiname__} API: {e} (Request: {path})")
|
||||
response.status,result = 500,{"status":"Unknown error","code":500}
|
||||
log(f"Unhandled Exception with {self.__apiname__} API: {exceptiontype} (Request: {path})")
|
||||
|
||||
return result
|
||||
#else:
|
||||
# result = {"error":"Invalid scrobble protocol"}
|
||||
# response.status = 500
|
||||
|
||||
|
||||
def handle(self,path,keys):
|
||||
|
||||
try:
|
||||
methodname = self.get_method(path, keys)
|
||||
methodname = self.get_method(path,keys)
|
||||
method = self.methods[methodname]
|
||||
except KeyError:
|
||||
log(f"Could not find a handler for method {methodname} in API {self.__apiname__}", module="debug")
|
||||
log(f"Keys: {keys}", module="debug")
|
||||
except Exception:
|
||||
log("Could not find a handler for method " + str(methodname) + " in API " + self.__apiname__,module="debug")
|
||||
log("Keys: " + str(keys),module="debug")
|
||||
raise InvalidMethodException()
|
||||
return method(path, keys)
|
||||
return method(path,keys)
|
||||
|
||||
|
||||
def scrobble(self,rawscrobble,client=None):
|
||||
|
||||
# fixing etc is handled by the main scrobble function
|
||||
return database.incoming_scrobble(rawscrobble,api=self.__apiname__,client=client)
|
||||
try:
|
||||
return database.incoming_scrobble(rawscrobble,api=self.__apiname__,client=client)
|
||||
except Exception:
|
||||
raise ScrobblingException()
|
||||
|
@ -3,4 +3,4 @@ class InvalidAuthException(Exception): pass
|
||||
class InvalidMethodException(Exception): pass
|
||||
class InvalidSessionKey(Exception): pass
|
||||
class MalformedJSONException(Exception): pass
|
||||
|
||||
class ScrobblingException(Exception): pass
|
||||
|
@ -21,22 +21,13 @@ class Audioscrobbler(APIHandler):
|
||||
"track.scrobble":self.submit_scrobble
|
||||
}
|
||||
self.errors = {
|
||||
BadAuthException: (400, {"error": 6, "message": "Requires authentication"}),
|
||||
InvalidAuthException: (401, {"error": 4, "message": "Invalid credentials"}),
|
||||
InvalidMethodException: (200, {"error": 3, "message": "Invalid method"}),
|
||||
InvalidSessionKey: (403, {"error": 9, "message": "Invalid session key"}),
|
||||
Exception: (500, {"error": 8, "message": "Operation failed"})
|
||||
BadAuthException:(400,{"error":6,"message":"Requires authentication"}),
|
||||
InvalidAuthException:(401,{"error":4,"message":"Invalid credentials"}),
|
||||
InvalidMethodException:(200,{"error":3,"message":"Invalid method"}),
|
||||
InvalidSessionKey:(403,{"error":9,"message":"Invalid session key"}),
|
||||
ScrobblingException:(500,{"error":8,"message":"Operation failed"})
|
||||
}
|
||||
|
||||
# xml string escaping: https://stackoverflow.com/a/28703510
|
||||
def xml_escape(self, str_xml: str):
|
||||
str_xml = str_xml.replace("&", "&")
|
||||
str_xml = str_xml.replace("<", "<")
|
||||
str_xml = str_xml.replace("<", "<")
|
||||
str_xml = str_xml.replace("\"", """)
|
||||
str_xml = str_xml.replace("'", "'")
|
||||
return str_xml
|
||||
|
||||
def get_method(self,pathnodes,keys):
|
||||
return keys.get("method")
|
||||
|
||||
@ -54,22 +45,12 @@ class Audioscrobbler(APIHandler):
|
||||
token = keys.get("authToken")
|
||||
user = keys.get("username")
|
||||
password = keys.get("password")
|
||||
format = keys.get("format") or "xml" # Audioscrobbler 2.0 uses XML by default
|
||||
# either username and password
|
||||
if user is not None and password is not None:
|
||||
client = apikeystore.check_and_identify_key(password)
|
||||
if client:
|
||||
sessionkey = self.generate_key(client)
|
||||
if format == "json":
|
||||
return 200,{"session":{"key":sessionkey}}
|
||||
else:
|
||||
return 200,"""<lfm status="ok">
|
||||
<session>
|
||||
<name>%s</name>
|
||||
<key>%s</key>
|
||||
<subscriber>0</subscriber>
|
||||
</session>
|
||||
</lfm>""" % (self.xml_escape(user), self.xml_escape(sessionkey))
|
||||
return 200,{"session":{"key":sessionkey}}
|
||||
else:
|
||||
raise InvalidAuthException()
|
||||
# or username and token (deprecated by lastfm)
|
||||
@ -78,16 +59,7 @@ class Audioscrobbler(APIHandler):
|
||||
key = apikeystore[client]
|
||||
if md5(user + md5(key)) == token:
|
||||
sessionkey = self.generate_key(client)
|
||||
if format == "json":
|
||||
return 200,{"session":{"key":sessionkey}}
|
||||
else:
|
||||
return 200,"""<lfm status="ok">
|
||||
<session>
|
||||
<name>%s</name>
|
||||
<key>%s</key>
|
||||
<subscriber>0</subscriber>
|
||||
</session>
|
||||
</lfm>""" % (self.xml_escape(user), self.xml_escape(sessionkey))
|
||||
return 200,{"session":{"key":sessionkey}}
|
||||
raise InvalidAuthException()
|
||||
else:
|
||||
raise BadAuthException()
|
||||
|
@ -23,11 +23,11 @@ class AudioscrobblerLegacy(APIHandler):
|
||||
"scrobble":self.submit_scrobble
|
||||
}
|
||||
self.errors = {
|
||||
BadAuthException: (403, "BADAUTH\n"),
|
||||
InvalidAuthException: (403, "BADAUTH\n"),
|
||||
InvalidMethodException: (400, "FAILED\n"),
|
||||
InvalidSessionKey: (403, "BADSESSION\n"),
|
||||
Exception: (500, "FAILED\n")
|
||||
BadAuthException:(403,"BADAUTH\n"),
|
||||
InvalidAuthException:(403,"BADAUTH\n"),
|
||||
InvalidMethodException:(400,"FAILED\n"),
|
||||
InvalidSessionKey:(403,"BADSESSION\n"),
|
||||
ScrobblingException:(500,"FAILED\n")
|
||||
}
|
||||
|
||||
def get_method(self,pathnodes,keys):
|
||||
@ -73,8 +73,6 @@ class AudioscrobblerLegacy(APIHandler):
|
||||
client = self.mobile_sessions.get(key)
|
||||
for count in range(50):
|
||||
artist_key = f"a[{count}]"
|
||||
album_key = f"b[{count}]"
|
||||
length_key = f"l[{count}]"
|
||||
track_key = f"t[{count}]"
|
||||
time_key = f"i[{count}]"
|
||||
if artist_key not in keys or track_key not in keys:
|
||||
@ -84,19 +82,12 @@ class AudioscrobblerLegacy(APIHandler):
|
||||
timestamp = int(keys[time_key])
|
||||
except Exception:
|
||||
timestamp = None
|
||||
|
||||
scrobble = {
|
||||
#database.createScrobble(artists,title,timestamp)
|
||||
self.scrobble({
|
||||
'track_artists':[artiststr],
|
||||
'track_title':titlestr,
|
||||
'scrobble_time':timestamp,
|
||||
}
|
||||
if album_key in keys:
|
||||
scrobble['album_name'] = keys[album_key]
|
||||
if length_key in keys:
|
||||
scrobble['track_length'] = keys[length_key]
|
||||
|
||||
#database.createScrobble(artists,title,timestamp)
|
||||
self.scrobble(scrobble, client=client)
|
||||
'scrobble_time':timestamp
|
||||
},client=client)
|
||||
return 200,"OK\n"
|
||||
|
||||
|
||||
|
@ -3,7 +3,6 @@ from ._exceptions import *
|
||||
from .. import database
|
||||
import datetime
|
||||
from ._apikeys import apikeystore
|
||||
from ..database.exceptions import DuplicateScrobble, DuplicateTimestamp
|
||||
|
||||
from ..pkg_global.conf import malojaconfig
|
||||
|
||||
@ -22,13 +21,11 @@ class Listenbrainz(APIHandler):
|
||||
"validate-token":self.validate_token
|
||||
}
|
||||
self.errors = {
|
||||
BadAuthException: (401, {"code": 401, "error": "You need to provide an Authorization header."}),
|
||||
InvalidAuthException: (401, {"code": 401, "error": "Incorrect Authorization"}),
|
||||
InvalidMethodException: (200, {"code": 200, "error": "Invalid Method"}),
|
||||
MalformedJSONException: (400, {"code": 400, "error": "Invalid JSON document submitted."}),
|
||||
DuplicateScrobble: (200, {"status": "ok"}),
|
||||
DuplicateTimestamp: (409, {"error": "Scrobble with the same timestamp already exists."}),
|
||||
Exception: (500, {"code": 500, "error": "Unspecified server error."})
|
||||
BadAuthException:(401,{"code":401,"error":"You need to provide an Authorization header."}),
|
||||
InvalidAuthException:(401,{"code":401,"error":"Incorrect Authorization"}),
|
||||
InvalidMethodException:(200,{"code":200,"error":"Invalid Method"}),
|
||||
MalformedJSONException:(400,{"code":400,"error":"Invalid JSON document submitted."}),
|
||||
ScrobblingException:(500,{"code":500,"error":"Unspecified server error."})
|
||||
}
|
||||
|
||||
def get_method(self,pathnodes,keys):
|
||||
@ -58,8 +55,6 @@ class Listenbrainz(APIHandler):
|
||||
try:
|
||||
metadata = listen["track_metadata"]
|
||||
artiststr, titlestr = metadata["artist_name"], metadata["track_name"]
|
||||
albumstr = metadata.get("release_name")
|
||||
additional = metadata.get("additional_info",{})
|
||||
try:
|
||||
timestamp = int(listen["listened_at"])
|
||||
except Exception:
|
||||
@ -67,21 +62,10 @@ class Listenbrainz(APIHandler):
|
||||
except Exception:
|
||||
raise MalformedJSONException()
|
||||
|
||||
extrafields = {
|
||||
# fields that will not be consumed by regular scrobbling
|
||||
# will go into 'extra'
|
||||
k:additional[k]
|
||||
for k in ['track_mbid', 'release_mbid', 'artist_mbids','recording_mbid','tags']
|
||||
if k in additional
|
||||
}
|
||||
|
||||
self.scrobble({
|
||||
'track_artists':[artiststr],
|
||||
'track_title':titlestr,
|
||||
'album_title':albumstr,
|
||||
'scrobble_time':timestamp,
|
||||
'track_length': additional.get("duration"),
|
||||
**extrafields
|
||||
'scrobble_time':timestamp
|
||||
},client=client)
|
||||
|
||||
return 200,{"status":"ok"}
|
||||
|
@ -2,11 +2,10 @@ import os
|
||||
import math
|
||||
import traceback
|
||||
|
||||
from bottle import response, static_file, FormsDict
|
||||
|
||||
from inspect import signature
|
||||
from bottle import response, static_file, request, FormsDict
|
||||
|
||||
from doreah.logging import log
|
||||
from doreah.auth import authenticated_api, authenticated_api_with_alternate, authenticated_function
|
||||
|
||||
# nimrodel API
|
||||
from nimrodel import EAPI as API
|
||||
@ -14,12 +13,12 @@ from nimrodel import Multi
|
||||
|
||||
|
||||
from .. import database
|
||||
from ..pkg_global.conf import malojaconfig, data_dir, auth
|
||||
from ..pkg_global.conf import malojaconfig, data_dir
|
||||
|
||||
|
||||
|
||||
from ..__pkginfo__ import VERSION
|
||||
from ..malojauri import uri_to_internal, compose_querystring, internal_to_uri, create_uri
|
||||
from ..malojauri import uri_to_internal, compose_querystring, internal_to_uri
|
||||
from .. import images
|
||||
from ._apikeys import apikeystore, api_key_correct
|
||||
|
||||
@ -73,40 +72,6 @@ errors = {
|
||||
'desc':"The database is being upgraded. Please try again later."
|
||||
}
|
||||
}),
|
||||
database.exceptions.EntityDoesNotExist: lambda e: (404,{
|
||||
"status":"error",
|
||||
"error":{
|
||||
'type':'entity_does_not_exist',
|
||||
'value':e.entitydict,
|
||||
'desc':"This entity does not exist in the database."
|
||||
}
|
||||
}),
|
||||
database.exceptions.DuplicateTimestamp: lambda e: (409,{
|
||||
"status":"error",
|
||||
"error":{
|
||||
'type':'duplicate_timestamp',
|
||||
'value':e.rejected_scrobble,
|
||||
'desc':"A scrobble is already registered with this timestamp."
|
||||
}
|
||||
}),
|
||||
database.exceptions.DuplicateScrobble: lambda e: (200,{
|
||||
"status": "success",
|
||||
"desc": "The scrobble is present in the database.",
|
||||
"track": {},
|
||||
"warnings": [{
|
||||
'type': 'scrobble_exists',
|
||||
'value': None,
|
||||
'desc': 'This scrobble exists in the database (same timestamp and track). The submitted scrobble was not added.'
|
||||
}]
|
||||
}),
|
||||
images.MalformedB64: lambda e: (400,{
|
||||
"status":"failure",
|
||||
"error":{
|
||||
'type':'malformed_b64',
|
||||
'value':None,
|
||||
'desc':"The provided base 64 string is not valid."
|
||||
}
|
||||
}),
|
||||
# for http errors, use their status code
|
||||
Exception: lambda e: ((e.status_code if hasattr(e,'statuscode') else 500),{
|
||||
"status":"failure",
|
||||
@ -118,8 +83,6 @@ errors = {
|
||||
})
|
||||
}
|
||||
|
||||
|
||||
# decorator to catch exceptions and return proper json responses
|
||||
def catch_exceptions(func):
|
||||
def protector(*args,**kwargs):
|
||||
try:
|
||||
@ -134,11 +97,9 @@ def catch_exceptions(func):
|
||||
|
||||
protector.__doc__ = func.__doc__
|
||||
protector.__annotations__ = func.__annotations__
|
||||
protector.__name__ = f"EXCPR_{func.__name__}"
|
||||
return protector
|
||||
|
||||
|
||||
# decorator to expand the docstring with common arguments for the API explorer. DOESNT WRAP
|
||||
def add_common_args_to_docstring(filterkeys=False,limitkeys=False,delimitkeys=False,amountkeys=False):
|
||||
def decorator(func):
|
||||
timeformats = "Possible formats include '2022', '2022/08', '2022/08/01', '2022/W42', 'today', 'thismonth', 'monday', 'august'"
|
||||
@ -172,64 +133,6 @@ def add_common_args_to_docstring(filterkeys=False,limitkeys=False,delimitkeys=Fa
|
||||
return decorator
|
||||
|
||||
|
||||
# decorator to take the URI keys and convert them into internal keys
|
||||
def convert_kwargs(func):
|
||||
|
||||
#params = tuple(p for p in signature(func).parameters)
|
||||
|
||||
def wrapper(*args,albumartist:Multi[str]=[],trackartist:Multi[str]=[],**kwargs):
|
||||
|
||||
kwargs = FormsDict(kwargs)
|
||||
for a in albumartist:
|
||||
kwargs.append("albumartist",a)
|
||||
for a in trackartist:
|
||||
kwargs.append("trackartist",a)
|
||||
|
||||
k_filter, k_limit, k_delimit, k_amount, k_special = uri_to_internal(kwargs,api=True)
|
||||
|
||||
try:
|
||||
return func(*args,k_filter=k_filter, k_limit=k_limit, k_delimit=k_delimit, k_amount=k_amount)
|
||||
except TypeError:
|
||||
return func(*args,k_filter=k_filter, k_limit=k_limit, k_delimit=k_delimit, k_amount=k_amount,k_special=k_special)
|
||||
# TODO: ....really?
|
||||
|
||||
wrapper.__doc__ = func.__doc__
|
||||
wrapper.__name__ = f"CVKWA_{func.__name__}"
|
||||
return wrapper
|
||||
|
||||
|
||||
# decorator to add pagination info to endpoints (like links to other pages)
|
||||
# this expects already converted uri args!!!
|
||||
def add_pagination(endpoint,filterkeys=False,limitkeys=False,delimitkeys=False):
|
||||
|
||||
def decorator(func):
|
||||
def wrapper(*args,k_filter, k_limit, k_delimit, k_amount):
|
||||
|
||||
keydicts = []
|
||||
if filterkeys: keydicts.append(k_filter)
|
||||
if limitkeys: keydicts.append(k_limit)
|
||||
if delimitkeys: keydicts.append(k_delimit)
|
||||
keydicts.append(k_amount)
|
||||
|
||||
|
||||
result = func(*args,k_filter=k_filter, k_limit=k_limit, k_delimit=k_delimit, k_amount=k_amount)
|
||||
|
||||
result['pagination'] = {
|
||||
'page': k_amount['page'],
|
||||
'perpage': k_amount['perpage'] if (k_amount['perpage'] is not math.inf) else None,
|
||||
'next_page': create_uri(api.pathprefix + '/' + endpoint,*keydicts,{'page':k_amount['page']+1}) if len(result.get('list',[]))==k_amount['perpage'] else None,
|
||||
'prev_page': create_uri(api.pathprefix + '/' + endpoint,*keydicts,{'page':k_amount['page']-1}) if k_amount['page'] > 0 else None
|
||||
}
|
||||
|
||||
return result
|
||||
|
||||
wrapper.__doc__ = func.__doc__
|
||||
wrapper.__annotations__ = func.__annotations__
|
||||
wrapper.__name__ = f"PGNAT_{func.__name__}"
|
||||
return wrapper
|
||||
|
||||
return decorator
|
||||
|
||||
|
||||
@api.get("test")
|
||||
@catch_exceptions
|
||||
@ -283,22 +186,20 @@ def server_info():
|
||||
@api.get("scrobbles")
|
||||
@catch_exceptions
|
||||
@add_common_args_to_docstring(filterkeys=True,limitkeys=True,amountkeys=True)
|
||||
@convert_kwargs
|
||||
@add_pagination("scrobbles",filterkeys=True,limitkeys=True)
|
||||
def get_scrobbles_external(k_filter, k_limit, k_delimit, k_amount):
|
||||
def get_scrobbles_external(**keys):
|
||||
"""Returns a list of scrobbles.
|
||||
|
||||
:return: list (List)
|
||||
:rtype: Dictionary
|
||||
"""
|
||||
k_filter, k_time, _, k_amount, _ = uri_to_internal(keys,api=True)
|
||||
ckeys = {**k_filter, **k_time, **k_amount}
|
||||
|
||||
ckeys = {**k_filter, **k_limit, **k_amount}
|
||||
result = database.get_scrobbles(**ckeys)
|
||||
|
||||
# this should now all be served by the inner function
|
||||
#offset = (k_amount.get('page') * k_amount.get('perpage')) if k_amount.get('perpage') is not math.inf else 0
|
||||
#result = result[offset:]
|
||||
#if k_amount.get('perpage') is not math.inf: result = result[:k_amount.get('perpage')]
|
||||
offset = (k_amount.get('page') * k_amount.get('perpage')) if k_amount.get('perpage') is not math.inf else 0
|
||||
result = result[offset:]
|
||||
if k_amount.get('perpage') is not math.inf: result = result[:k_amount.get('perpage')]
|
||||
|
||||
return {
|
||||
"status":"ok",
|
||||
@ -309,15 +210,15 @@ def get_scrobbles_external(k_filter, k_limit, k_delimit, k_amount):
|
||||
@api.get("numscrobbles")
|
||||
@catch_exceptions
|
||||
@add_common_args_to_docstring(filterkeys=True,limitkeys=True,amountkeys=True)
|
||||
@convert_kwargs
|
||||
def get_scrobbles_num_external(k_filter, k_limit, k_delimit, k_amount):
|
||||
def get_scrobbles_num_external(**keys):
|
||||
"""Returns amount of scrobbles.
|
||||
|
||||
:return: amount (Integer)
|
||||
:rtype: Dictionary
|
||||
"""
|
||||
k_filter, k_time, _, k_amount, _ = uri_to_internal(keys)
|
||||
ckeys = {**k_filter, **k_time, **k_amount}
|
||||
|
||||
ckeys = {**k_filter, **k_limit, **k_amount}
|
||||
result = database.get_scrobbles_num(**ckeys)
|
||||
|
||||
return {
|
||||
@ -326,18 +227,19 @@ def get_scrobbles_num_external(k_filter, k_limit, k_delimit, k_amount):
|
||||
}
|
||||
|
||||
|
||||
|
||||
@api.get("tracks")
|
||||
@catch_exceptions
|
||||
@add_common_args_to_docstring(filterkeys=True)
|
||||
@convert_kwargs
|
||||
def get_tracks_external(k_filter, k_limit, k_delimit, k_amount):
|
||||
"""Returns all tracks (optionally of an artist or on an album).
|
||||
def get_tracks_external(**keys):
|
||||
"""Returns all tracks (optionally of an artist).
|
||||
|
||||
:return: list (List)
|
||||
:rtype: Dictionary
|
||||
"""
|
||||
|
||||
k_filter, _, _, _, _ = uri_to_internal(keys,forceArtist=True)
|
||||
ckeys = {**k_filter}
|
||||
|
||||
result = database.get_tracks(**ckeys)
|
||||
|
||||
return {
|
||||
@ -346,16 +248,15 @@ def get_tracks_external(k_filter, k_limit, k_delimit, k_amount):
|
||||
}
|
||||
|
||||
|
||||
|
||||
@api.get("artists")
|
||||
@catch_exceptions
|
||||
@add_common_args_to_docstring()
|
||||
@convert_kwargs
|
||||
def get_artists_external(k_filter, k_limit, k_delimit, k_amount):
|
||||
def get_artists_external():
|
||||
"""Returns all artists.
|
||||
|
||||
:return: list (List)
|
||||
:rtype: Dictionary"""
|
||||
|
||||
result = database.get_artists()
|
||||
|
||||
return {
|
||||
@ -364,36 +265,20 @@ def get_artists_external(k_filter, k_limit, k_delimit, k_amount):
|
||||
}
|
||||
|
||||
|
||||
@api.get("albums")
|
||||
@catch_exceptions
|
||||
@add_common_args_to_docstring(filterkeys=True)
|
||||
@convert_kwargs
|
||||
def get_albums_external(k_filter, k_limit, k_delimit, k_amount):
|
||||
"""Returns all albums (optionally of an artist).
|
||||
|
||||
:return: list (List)
|
||||
:rtype: Dictionary"""
|
||||
|
||||
ckeys = {**k_filter}
|
||||
result = database.get_albums(**ckeys)
|
||||
|
||||
return {
|
||||
"status":"ok",
|
||||
"list":result
|
||||
}
|
||||
|
||||
|
||||
@api.get("charts/artists")
|
||||
@catch_exceptions
|
||||
@add_common_args_to_docstring(limitkeys=True)
|
||||
@convert_kwargs
|
||||
def get_charts_artists_external(k_filter, k_limit, k_delimit, k_amount):
|
||||
def get_charts_artists_external(**keys):
|
||||
"""Returns artist charts
|
||||
|
||||
:return: list (List)
|
||||
:rtype: Dictionary"""
|
||||
_, k_time, _, _, _ = uri_to_internal(keys)
|
||||
ckeys = {**k_time}
|
||||
|
||||
ckeys = {**k_limit}
|
||||
result = database.get_charts_artists(**ckeys)
|
||||
|
||||
return {
|
||||
@ -402,17 +287,18 @@ def get_charts_artists_external(k_filter, k_limit, k_delimit, k_amount):
|
||||
}
|
||||
|
||||
|
||||
|
||||
@api.get("charts/tracks")
|
||||
@catch_exceptions
|
||||
@add_common_args_to_docstring(filterkeys=True,limitkeys=True)
|
||||
@convert_kwargs
|
||||
def get_charts_tracks_external(k_filter, k_limit, k_delimit, k_amount):
|
||||
def get_charts_tracks_external(**keys):
|
||||
"""Returns track charts
|
||||
|
||||
:return: list (List)
|
||||
:rtype: Dictionary"""
|
||||
k_filter, k_time, _, _, _ = uri_to_internal(keys,forceArtist=True)
|
||||
ckeys = {**k_filter, **k_time}
|
||||
|
||||
ckeys = {**k_filter, **k_limit}
|
||||
result = database.get_charts_tracks(**ckeys)
|
||||
|
||||
return {
|
||||
@ -421,36 +307,19 @@ def get_charts_tracks_external(k_filter, k_limit, k_delimit, k_amount):
|
||||
}
|
||||
|
||||
|
||||
@api.get("charts/albums")
|
||||
@catch_exceptions
|
||||
@add_common_args_to_docstring(filterkeys=True,limitkeys=True)
|
||||
@convert_kwargs
|
||||
def get_charts_albums_external(k_filter, k_limit, k_delimit, k_amount):
|
||||
"""Returns album charts
|
||||
|
||||
:return: list (List)
|
||||
:rtype: Dictionary"""
|
||||
|
||||
ckeys = {**k_filter, **k_limit}
|
||||
result = database.get_charts_albums(**ckeys)
|
||||
|
||||
return {
|
||||
"status":"ok",
|
||||
"list":result
|
||||
}
|
||||
|
||||
|
||||
@api.get("pulse")
|
||||
@catch_exceptions
|
||||
@add_common_args_to_docstring(filterkeys=True,limitkeys=True,delimitkeys=True,amountkeys=True)
|
||||
@convert_kwargs
|
||||
def get_pulse_external(k_filter, k_limit, k_delimit, k_amount):
|
||||
def get_pulse_external(**keys):
|
||||
"""Returns amounts of scrobbles in specified time frames
|
||||
|
||||
:return: list (List)
|
||||
:rtype: Dictionary"""
|
||||
k_filter, k_time, k_internal, k_amount, _ = uri_to_internal(keys)
|
||||
ckeys = {**k_filter, **k_time, **k_internal, **k_amount}
|
||||
|
||||
ckeys = {**k_filter, **k_limit, **k_delimit, **k_amount}
|
||||
results = database.get_pulse(**ckeys)
|
||||
|
||||
return {
|
||||
@ -459,17 +328,19 @@ def get_pulse_external(k_filter, k_limit, k_delimit, k_amount):
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
@api.get("performance")
|
||||
@catch_exceptions
|
||||
@add_common_args_to_docstring(filterkeys=True,limitkeys=True,delimitkeys=True,amountkeys=True)
|
||||
@convert_kwargs
|
||||
def get_performance_external(k_filter, k_limit, k_delimit, k_amount):
|
||||
def get_performance_external(**keys):
|
||||
"""Returns artist's or track's rank in specified time frames
|
||||
|
||||
:return: list (List)
|
||||
:rtype: Dictionary"""
|
||||
k_filter, k_time, k_internal, k_amount, _ = uri_to_internal(keys)
|
||||
ckeys = {**k_filter, **k_time, **k_internal, **k_amount}
|
||||
|
||||
ckeys = {**k_filter, **k_limit, **k_delimit, **k_amount}
|
||||
results = database.get_performance(**ckeys)
|
||||
|
||||
return {
|
||||
@ -483,15 +354,15 @@ def get_performance_external(k_filter, k_limit, k_delimit, k_amount):
|
||||
@api.get("top/artists")
|
||||
@catch_exceptions
|
||||
@add_common_args_to_docstring(limitkeys=True,delimitkeys=True)
|
||||
@convert_kwargs
|
||||
def get_top_artists_external(k_filter, k_limit, k_delimit, k_amount):
|
||||
def get_top_artists_external(**keys):
|
||||
"""Returns respective number 1 artists in specified time frames
|
||||
|
||||
:return: list (List)
|
||||
:rtype: Dictionary"""
|
||||
_, k_time, k_internal, _, _ = uri_to_internal(keys)
|
||||
ckeys = {**k_time, **k_internal}
|
||||
|
||||
ckeys = {**k_limit, **k_delimit}
|
||||
results = database.get_top_artists(**ckeys,compatibility=True)
|
||||
results = database.get_top_artists(**ckeys)
|
||||
|
||||
return {
|
||||
"status":"ok",
|
||||
@ -499,19 +370,22 @@ def get_top_artists_external(k_filter, k_limit, k_delimit, k_amount):
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
@api.get("top/tracks")
|
||||
@catch_exceptions
|
||||
@add_common_args_to_docstring(limitkeys=True,delimitkeys=True)
|
||||
@convert_kwargs
|
||||
def get_top_tracks_external(k_filter, k_limit, k_delimit, k_amount):
|
||||
def get_top_tracks_external(**keys):
|
||||
"""Returns respective number 1 tracks in specified time frames
|
||||
|
||||
:return: list (List)
|
||||
:rtype: Dictionary"""
|
||||
_, k_time, k_internal, _, _ = uri_to_internal(keys)
|
||||
ckeys = {**k_time, **k_internal}
|
||||
|
||||
ckeys = {**k_limit, **k_delimit}
|
||||
results = database.get_top_tracks(**ckeys,compatibility=True)
|
||||
# IMPLEMENT THIS FOR TOP TRACKS OF ARTIST/ALBUM AS WELL?
|
||||
# IMPLEMENT THIS FOR TOP TRACKS OF ARTIST AS WELL?
|
||||
|
||||
results = database.get_top_tracks(**ckeys)
|
||||
|
||||
return {
|
||||
"status":"ok",
|
||||
@ -519,36 +393,17 @@ def get_top_tracks_external(k_filter, k_limit, k_delimit, k_amount):
|
||||
}
|
||||
|
||||
|
||||
@api.get("top/albums")
|
||||
@catch_exceptions
|
||||
@add_common_args_to_docstring(limitkeys=True,delimitkeys=True)
|
||||
@convert_kwargs
|
||||
def get_top_albums_external(k_filter, k_limit, k_delimit, k_amount):
|
||||
"""Returns respective number 1 albums in specified time frames
|
||||
|
||||
:return: list (List)
|
||||
:rtype: Dictionary"""
|
||||
|
||||
ckeys = {**k_limit, **k_delimit}
|
||||
results = database.get_top_albums(**ckeys,compatibility=True)
|
||||
# IMPLEMENT THIS FOR TOP ALBUMS OF ARTIST AS WELL?
|
||||
|
||||
return {
|
||||
"status":"ok",
|
||||
"list":results
|
||||
}
|
||||
|
||||
|
||||
@api.get("artistinfo")
|
||||
@catch_exceptions
|
||||
@add_common_args_to_docstring(filterkeys=True)
|
||||
@convert_kwargs
|
||||
def artist_info_external(k_filter, k_limit, k_delimit, k_amount):
|
||||
def artist_info_external(**keys):
|
||||
"""Returns information about an artist
|
||||
|
||||
:return: artist (String), scrobbles (Integer), position (Integer), associated (List), medals (Mapping), topweeks (Integer)
|
||||
:rtype: Dictionary"""
|
||||
|
||||
k_filter, _, _, _, _ = uri_to_internal(keys,forceArtist=True)
|
||||
ckeys = {**k_filter}
|
||||
|
||||
return database.artist_info(**ckeys)
|
||||
@ -558,40 +413,30 @@ def artist_info_external(k_filter, k_limit, k_delimit, k_amount):
|
||||
@api.get("trackinfo")
|
||||
@catch_exceptions
|
||||
@add_common_args_to_docstring(filterkeys=True)
|
||||
@convert_kwargs
|
||||
def track_info_external(k_filter, k_limit, k_delimit, k_amount):
|
||||
def track_info_external(artist:Multi[str]=[],**keys):
|
||||
"""Returns information about a track
|
||||
|
||||
:return: track (Mapping), scrobbles (Integer), position (Integer), medals (Mapping), certification (String), topweeks (Integer)
|
||||
:rtype: Dictionary"""
|
||||
|
||||
# transform into a multidict so we can use our nomral uri_to_internal function
|
||||
keys = FormsDict(keys)
|
||||
for a in artist:
|
||||
keys.append("artist",a)
|
||||
k_filter, _, _, _, _ = uri_to_internal(keys,forceTrack=True)
|
||||
ckeys = {**k_filter}
|
||||
|
||||
return database.track_info(**ckeys)
|
||||
|
||||
|
||||
@api.get("albuminfo")
|
||||
@catch_exceptions
|
||||
@add_common_args_to_docstring(filterkeys=True)
|
||||
@convert_kwargs
|
||||
def album_info_external(k_filter, k_limit, k_delimit, k_amount):
|
||||
"""Returns information about an album
|
||||
|
||||
:return: album (Mapping), scrobbles (Integer), position (Integer), medals (Mapping), certification (String), topweeks (Integer)
|
||||
:rtype: Dictionary"""
|
||||
|
||||
ckeys = {**k_filter}
|
||||
return database.album_info(**ckeys)
|
||||
|
||||
|
||||
@api.post("newscrobble")
|
||||
@auth.authenticated_function(alternate=api_key_correct,api=True,pass_auth_result_as='auth_result')
|
||||
@authenticated_function(alternate=api_key_correct,api=True,pass_auth_result_as='auth_result')
|
||||
@catch_exceptions
|
||||
def post_scrobble(
|
||||
artist:Multi=None,
|
||||
artists:list=[],
|
||||
title:str="",
|
||||
album:str=None,
|
||||
albumartists:list=None,
|
||||
albumartists:list=[],
|
||||
duration:int=None,
|
||||
length:int=None,
|
||||
time:int=None,
|
||||
@ -617,7 +462,7 @@ def post_scrobble(
|
||||
rawscrobble = {
|
||||
'track_artists':(artist or []) + artists,
|
||||
'track_title':title,
|
||||
'album_title':album,
|
||||
'album_name':album,
|
||||
'album_artists':albumartists,
|
||||
'scrobble_duration':duration,
|
||||
'track_length':length,
|
||||
@ -625,7 +470,7 @@ def post_scrobble(
|
||||
}
|
||||
|
||||
# for logging purposes, don't pass values that we didn't actually supply
|
||||
rawscrobble = {k:rawscrobble[k] for k in rawscrobble if rawscrobble[k] is not None} # [] should be passed
|
||||
rawscrobble = {k:rawscrobble[k] for k in rawscrobble if rawscrobble[k]}
|
||||
|
||||
|
||||
result = database.incoming_scrobble(
|
||||
@ -641,53 +486,26 @@ def post_scrobble(
|
||||
'artists':result['track']['artists'],
|
||||
'title':result['track']['title']
|
||||
},
|
||||
'desc':f"Scrobbled {result['track']['title']} by {', '.join(result['track']['artists'])}",
|
||||
'warnings':[]
|
||||
'desc':f"Scrobbled {result['track']['title']} by {', '.join(result['track']['artists'])}"
|
||||
}
|
||||
if extra_kwargs:
|
||||
responsedict['warnings'] += [
|
||||
responsedict['warnings'] = [
|
||||
{'type':'invalid_keyword_ignored','value':k,
|
||||
'desc':"This key was not recognized by the server and has been discarded."}
|
||||
for k in extra_kwargs
|
||||
]
|
||||
if artist and artists:
|
||||
responsedict['warnings'] += [
|
||||
responsedict['warnings'] = [
|
||||
{'type':'mixed_schema','value':['artist','artists'],
|
||||
'desc':"These two fields are meant as alternative methods to submit information. Use of both is discouraged, but works at the moment."}
|
||||
]
|
||||
|
||||
if len(responsedict['warnings']) == 0: del responsedict['warnings']
|
||||
|
||||
return responsedict
|
||||
|
||||
|
||||
|
||||
|
||||
@api.post("addpicture")
|
||||
@auth.authenticated_function(alternate=api_key_correct,api=True)
|
||||
@catch_exceptions
|
||||
@convert_kwargs
|
||||
def add_picture(k_filter, k_limit, k_delimit, k_amount, k_special):
|
||||
"""Uploads a new image for an artist, album or track.
|
||||
|
||||
param string b64: Base 64 representation of the image
|
||||
|
||||
"""
|
||||
|
||||
if "associated" in k_filter: del k_filter["associated"]
|
||||
if "track" in k_filter: k_filter = k_filter["track"]
|
||||
elif "album" in k_filter: k_filter = k_filter["album"]
|
||||
url = images.set_image(k_special['b64'],**k_filter)
|
||||
|
||||
return {
|
||||
'status': 'success',
|
||||
'url': url
|
||||
}
|
||||
|
||||
|
||||
|
||||
@api.post("importrules")
|
||||
@auth.authenticated_function(api=True)
|
||||
@authenticated_function(api=True)
|
||||
@catch_exceptions
|
||||
def import_rulemodule(**keys):
|
||||
"""Internal Use Only"""
|
||||
@ -706,7 +524,7 @@ def import_rulemodule(**keys):
|
||||
|
||||
|
||||
@api.post("rebuild")
|
||||
@auth.authenticated_function(api=True)
|
||||
@authenticated_function(api=True)
|
||||
@catch_exceptions
|
||||
def rebuild(**keys):
|
||||
"""Internal Use Only"""
|
||||
@ -734,7 +552,6 @@ def search(**keys):
|
||||
|
||||
artists = database.db_search(query,type="ARTIST")
|
||||
tracks = database.db_search(query,type="TRACK")
|
||||
albums = database.db_search(query,type="ALBUM")
|
||||
|
||||
|
||||
|
||||
@ -742,7 +559,6 @@ def search(**keys):
|
||||
# also, shorter is better (because longer titles would be easier to further specify)
|
||||
artists.sort(key=lambda x: ((0 if x.lower().startswith(query) else 1 if " " + query in x.lower() else 2),len(x)))
|
||||
tracks.sort(key=lambda x: ((0 if x["title"].lower().startswith(query) else 1 if " " + query in x["title"].lower() else 2),len(x["title"])))
|
||||
albums.sort(key=lambda x: ((0 if x["albumtitle"].lower().startswith(query) else 1 if " " + query in x["albumtitle"].lower() else 2),len(x["albumtitle"])))
|
||||
|
||||
# add links
|
||||
artists_result = []
|
||||
@ -763,26 +579,25 @@ def search(**keys):
|
||||
}
|
||||
tracks_result.append(result)
|
||||
|
||||
albums_result = []
|
||||
for al in albums:
|
||||
result = {
|
||||
'album': al,
|
||||
'link': "/album?" + compose_querystring(internal_to_uri({"album":al})),
|
||||
'image': images.get_album_image(al)
|
||||
}
|
||||
mutable_result = result.copy()
|
||||
mutable_result['album'] = result['album'].copy()
|
||||
if not mutable_result['album']['artists']: mutable_result['album']['displayArtist'] = malojaconfig["DEFAULT_ALBUM_ARTIST"]
|
||||
# we don't wanna actually mutate the dict here because this is in the cache
|
||||
# TODO: This should be globally solved!!!!! immutable dicts with mutable overlays???
|
||||
# this is a major flaw in the architecture!
|
||||
albums_result.append(mutable_result)
|
||||
return {"artists":artists_result[:max_],"tracks":tracks_result[:max_]}
|
||||
|
||||
return {"artists":artists_result[:max_],"tracks":tracks_result[:max_],"albums":albums_result[:max_]}
|
||||
|
||||
@api.post("addpicture")
|
||||
@authenticated_function(api=True)
|
||||
@catch_exceptions
|
||||
def add_picture(b64,artist:Multi=[],title=None):
|
||||
"""Internal Use Only"""
|
||||
keys = FormsDict()
|
||||
for a in artist:
|
||||
keys.append("artist",a)
|
||||
if title is not None: keys.append("title",title)
|
||||
k_filter, _, _, _, _ = uri_to_internal(keys)
|
||||
if "track" in k_filter: k_filter = k_filter["track"]
|
||||
images.set_image(b64,**k_filter)
|
||||
|
||||
|
||||
@api.post("newrule")
|
||||
@auth.authenticated_function(api=True)
|
||||
@authenticated_function(api=True)
|
||||
@catch_exceptions
|
||||
def newrule(**keys):
|
||||
"""Internal Use Only"""
|
||||
@ -793,21 +608,21 @@ def newrule(**keys):
|
||||
|
||||
|
||||
@api.post("settings")
|
||||
@auth.authenticated_function(api=True)
|
||||
@authenticated_function(api=True)
|
||||
@catch_exceptions
|
||||
def set_settings(**keys):
|
||||
"""Internal Use Only"""
|
||||
malojaconfig.update(keys)
|
||||
|
||||
@api.post("apikeys")
|
||||
@auth.authenticated_function(api=True)
|
||||
@authenticated_function(api=True)
|
||||
@catch_exceptions
|
||||
def set_apikeys(**keys):
|
||||
"""Internal Use Only"""
|
||||
apikeystore.update(keys)
|
||||
|
||||
@api.post("import")
|
||||
@auth.authenticated_function(api=True)
|
||||
@authenticated_function(api=True)
|
||||
@catch_exceptions
|
||||
def import_scrobbles(identifier):
|
||||
"""Internal Use Only"""
|
||||
@ -815,7 +630,7 @@ def import_scrobbles(identifier):
|
||||
import_scrobbles(identifier)
|
||||
|
||||
@api.get("backup")
|
||||
@auth.authenticated_function(api=True)
|
||||
@authenticated_function(api=True)
|
||||
@catch_exceptions
|
||||
def get_backup(**keys):
|
||||
"""Internal Use Only"""
|
||||
@ -828,7 +643,7 @@ def get_backup(**keys):
|
||||
return static_file(os.path.basename(archivefile),root=tmpfolder)
|
||||
|
||||
@api.get("export")
|
||||
@auth.authenticated_function(api=True)
|
||||
@authenticated_function(api=True)
|
||||
@catch_exceptions
|
||||
def get_export(**keys):
|
||||
"""Internal Use Only"""
|
||||
@ -842,7 +657,7 @@ def get_export(**keys):
|
||||
|
||||
|
||||
@api.post("delete_scrobble")
|
||||
@auth.authenticated_function(api=True)
|
||||
@authenticated_function(api=True)
|
||||
@catch_exceptions
|
||||
def delete_scrobble(timestamp):
|
||||
"""Internal Use Only"""
|
||||
@ -854,7 +669,7 @@ def delete_scrobble(timestamp):
|
||||
|
||||
|
||||
@api.post("edit_artist")
|
||||
@auth.authenticated_function(api=True)
|
||||
@authenticated_function(api=True)
|
||||
@catch_exceptions
|
||||
def edit_artist(id,name):
|
||||
"""Internal Use Only"""
|
||||
@ -864,7 +679,7 @@ def edit_artist(id,name):
|
||||
}
|
||||
|
||||
@api.post("edit_track")
|
||||
@auth.authenticated_function(api=True)
|
||||
@authenticated_function(api=True)
|
||||
@catch_exceptions
|
||||
def edit_track(id,title):
|
||||
"""Internal Use Only"""
|
||||
@ -873,88 +688,29 @@ def edit_track(id,title):
|
||||
"status":"success"
|
||||
}
|
||||
|
||||
@api.post("edit_album")
|
||||
@auth.authenticated_function(api=True)
|
||||
@catch_exceptions
|
||||
def edit_album(id,albumtitle):
|
||||
"""Internal Use Only"""
|
||||
result = database.edit_album(id,{'albumtitle':albumtitle})
|
||||
return {
|
||||
"status":"success"
|
||||
}
|
||||
|
||||
|
||||
@api.post("merge_tracks")
|
||||
@auth.authenticated_function(api=True)
|
||||
@authenticated_function(api=True)
|
||||
@catch_exceptions
|
||||
def merge_tracks(target_id,source_ids):
|
||||
"""Internal Use Only"""
|
||||
result = database.merge_tracks(target_id,source_ids)
|
||||
return {
|
||||
"status":"success",
|
||||
"desc":f"{', '.join(src['title'] for src in result['sources'])} were merged into {result['target']['title']}"
|
||||
"status":"success"
|
||||
}
|
||||
|
||||
@api.post("merge_artists")
|
||||
@auth.authenticated_function(api=True)
|
||||
@authenticated_function(api=True)
|
||||
@catch_exceptions
|
||||
def merge_artists(target_id,source_ids):
|
||||
"""Internal Use Only"""
|
||||
result = database.merge_artists(target_id,source_ids)
|
||||
return {
|
||||
"status":"success",
|
||||
"desc":f"{', '.join(src for src in result['sources'])} were merged into {result['target']}"
|
||||
"status":"success"
|
||||
}
|
||||
|
||||
@api.post("merge_albums")
|
||||
@auth.authenticated_function(api=True)
|
||||
@catch_exceptions
|
||||
def merge_artists(target_id,source_ids):
|
||||
"""Internal Use Only"""
|
||||
result = database.merge_albums(target_id,source_ids)
|
||||
return {
|
||||
"status":"success",
|
||||
"desc":f"{', '.join(src['albumtitle'] for src in result['sources'])} were merged into {result['target']['albumtitle']}"
|
||||
}
|
||||
|
||||
@api.post("associate_albums_to_artist")
|
||||
@auth.authenticated_function(api=True)
|
||||
@catch_exceptions
|
||||
def associate_albums_to_artist(target_id,source_ids,remove=False):
|
||||
result = database.associate_albums_to_artist(target_id,source_ids,remove=remove)
|
||||
descword = "removed" if remove else "added"
|
||||
if result:
|
||||
return {
|
||||
"status":"success",
|
||||
"desc":f"{result['target']} was {descword} as album artist of {', '.join(src['albumtitle'] for src in result['sources'])}"
|
||||
}
|
||||
|
||||
@api.post("associate_tracks_to_artist")
|
||||
@auth.authenticated_function(api=True)
|
||||
@catch_exceptions
|
||||
def associate_tracks_to_artist(target_id,source_ids,remove=False):
|
||||
result = database.associate_tracks_to_artist(target_id,source_ids,remove=remove)
|
||||
descword = "removed" if remove else "added"
|
||||
if result:
|
||||
return {
|
||||
"status":"success",
|
||||
"desc":f"{result['target']} was {descword} as artist for {', '.join(src['title'] for src in result['sources'])}"
|
||||
}
|
||||
|
||||
@api.post("associate_tracks_to_album")
|
||||
@auth.authenticated_function(api=True)
|
||||
@catch_exceptions
|
||||
def associate_tracks_to_album(target_id,source_ids):
|
||||
result = database.associate_tracks_to_album(target_id,source_ids)
|
||||
if result:
|
||||
return {
|
||||
"status":"success",
|
||||
"desc":f"{', '.join(src['title'] for src in result['sources'])} were " + f"added to {result['target']['albumtitle']}" if target_id else "removed from their album"
|
||||
}
|
||||
|
||||
|
||||
@api.post("reparse_scrobble")
|
||||
@auth.authenticated_function(api=True)
|
||||
@authenticated_function(api=True)
|
||||
@catch_exceptions
|
||||
def reparse_scrobble(timestamp):
|
||||
"""Internal Use Only"""
|
||||
|
@ -15,20 +15,17 @@ class CleanerAgent:
|
||||
def updateRules(self):
|
||||
|
||||
rawrules = []
|
||||
try:
|
||||
for f in os.listdir(data_dir["rules"]()):
|
||||
if f.split('.')[-1].lower() != 'tsv': continue
|
||||
filepath = data_dir["rules"](f)
|
||||
with open(filepath,'r') as filed:
|
||||
reader = csv.reader(filed,delimiter="\t")
|
||||
rawrules += [[col for col in entry if col] for entry in reader if len(entry)>0 and not entry[0].startswith('#')]
|
||||
except FileNotFoundError:
|
||||
pass
|
||||
for f in os.listdir(data_dir["rules"]()):
|
||||
if f.split('.')[-1].lower() != 'tsv': continue
|
||||
filepath = data_dir["rules"](f)
|
||||
with open(filepath,'r') as filed:
|
||||
reader = csv.reader(filed,delimiter="\t")
|
||||
rawrules += [[col for col in entry if col] for entry in reader if len(entry)>0 and not entry[0].startswith('#')]
|
||||
|
||||
|
||||
self.rules_belongtogether = [r[1] for r in rawrules if r[0]=="belongtogether"]
|
||||
self.rules_notanartist = [r[1] for r in rawrules if r[0]=="notanartist"]
|
||||
self.rules_replacetitle = {r[1].lower():r[2] for r in rawrules if r[0]=="replacetitle"}
|
||||
self.rules_replacealbumtitle = {r[1].lower():r[2] for r in rawrules if r[0]=="replacealbumtitle"}
|
||||
self.rules_replaceartist = {r[1].lower():r[2] for r in rawrules if r[0]=="replaceartist"}
|
||||
self.rules_ignoreartist = [r[1].lower() for r in rawrules if r[0]=="ignoreartist"]
|
||||
self.rules_addartists = {r[2].lower():(r[1].lower(),r[3]) for r in rawrules if r[0]=="addartists"}
|
||||
@ -85,7 +82,7 @@ class CleanerAgent:
|
||||
|
||||
def parseArtists(self,a):
|
||||
|
||||
if isinstance(a,list) or isinstance(a,tuple):
|
||||
if isinstance(a,list):
|
||||
res = [self.parseArtists(art) for art in a]
|
||||
return [a for group in res for a in group]
|
||||
|
||||
@ -112,9 +109,9 @@ class CleanerAgent:
|
||||
|
||||
|
||||
for d in self.delimiters_feat:
|
||||
if re.match(r"(.*) [\(\[]" + d + " (.*)[\)\]]",a,flags=re.IGNORECASE) is not None:
|
||||
return self.parseArtists(re.sub(r"(.*) [\(\[]" + d + " (.*)[\)\]]",r"\1",a,flags=re.IGNORECASE)) + \
|
||||
self.parseArtists(re.sub(r"(.*) [\(\[]" + d + " (.*)[\)\]]",r"\2",a,flags=re.IGNORECASE))
|
||||
if re.match(r"(.*) [\(\[]" + d + " (.*)[\)\]]",a,re.IGNORECASE) is not None:
|
||||
return self.parseArtists(re.sub(r"(.*) [\(\[]" + d + " (.*)[\)\]]",r"\1",a,re.IGNORECASE)) + \
|
||||
self.parseArtists(re.sub(r"(.*) [\(\[]" + d + " (.*)[\)\]]",r"\2",a,re.IGNORECASE))
|
||||
|
||||
|
||||
|
||||
@ -159,46 +156,42 @@ class CleanerAgent:
|
||||
# t = p(t).strip()
|
||||
return t
|
||||
|
||||
def parseTitleForArtists(self,title):
|
||||
def parseTitleForArtists(self,t):
|
||||
for d in self.delimiters_feat:
|
||||
if re.match(r"(.*) [\(\[]" + d + " (.*?)[\)\]]",t,re.IGNORECASE) is not None:
|
||||
(title,artists) = self.parseTitleForArtists(re.sub(r"(.*) [\(\[]" + d + " (.*?)[\)\]]",r"\1",t,re.IGNORECASE))
|
||||
artists += self.parseArtists(re.sub(r"(.*) [\(\[]" + d + " (.*?)[\)\]].*",r"\2",t,re.IGNORECASE))
|
||||
return (title,artists)
|
||||
if re.match(r"(.*) - " + d + " (.*)",t,re.IGNORECASE) is not None:
|
||||
(title,artists) = self.parseTitleForArtists(re.sub(r"(.*) - " + d + " (.*)",r"\1",t,re.IGNORECASE))
|
||||
artists += self.parseArtists(re.sub(r"(.*) - " + d + " (.*).*",r"\2",t,re.IGNORECASE))
|
||||
return (title,artists)
|
||||
if re.match(r"(.*) " + d + " (.*)",t,re.IGNORECASE) is not None:
|
||||
(title,artists) = self.parseTitleForArtists(re.sub(r"(.*) " + d + " (.*)",r"\1",t,re.IGNORECASE))
|
||||
artists += self.parseArtists(re.sub(r"(.*) " + d + " (.*).*",r"\2",t,re.IGNORECASE))
|
||||
return (title,artists)
|
||||
|
||||
artists = []
|
||||
for delimiter in malojaconfig["DELIMITERS_FEAT"]:
|
||||
for pattern in [
|
||||
r" [\(\[]" + re.escape(delimiter) + " (.*?)[\)\]]",
|
||||
r" - " + re.escape(delimiter) + " (.*)",
|
||||
r" " + re.escape(delimiter) + " (.*)"
|
||||
]:
|
||||
matches = re.finditer(pattern,title,flags=re.IGNORECASE)
|
||||
for match in matches:
|
||||
title = match.re.sub('',match.string) # Remove matched part
|
||||
artists += self.parseArtists(match.group(1)) # Parse matched artist string
|
||||
|
||||
|
||||
|
||||
if malojaconfig["PARSE_REMIX_ARTISTS"]:
|
||||
for filter in malojaconfig["FILTERS_REMIX"]:
|
||||
for pattern in [
|
||||
r" [\(\[](.*)" + re.escape(filter) + "[\)\]]", # match remix in brackets
|
||||
r" - (.*)" + re.escape(filter) # match remix split with "-"
|
||||
]:
|
||||
match = re.search(pattern,title,flags=re.IGNORECASE)
|
||||
if match:
|
||||
# title stays the same
|
||||
artists += self.parseArtists(match.group(1))
|
||||
|
||||
# match remix in brackets
|
||||
m = re.match(r".*[\(\[](.*)" + filter + "[\)\]]", t, re.IGNORECASE)
|
||||
if m:
|
||||
artists += self.parseArtists(m.groups()[0])
|
||||
|
||||
# match remix split with "-"
|
||||
m = re.match(r".*-(.*)" + filter, t, re.IGNORECASE)
|
||||
if m:
|
||||
artists += self.parseArtists(m.groups()[0])
|
||||
|
||||
for st in self.rules_artistintitle:
|
||||
if st in title.lower(): artists += self.rules_artistintitle[st].split("␟")
|
||||
return (title,artists)
|
||||
if st in t.lower(): artists += self.rules_artistintitle[st].split("␟")
|
||||
return (t,artists)
|
||||
|
||||
|
||||
def parseAlbumtitle(self,t):
|
||||
if t.strip().lower() in self.rules_replacealbumtitle:
|
||||
return self.rules_replacealbumtitle[t.strip().lower()]
|
||||
|
||||
t = t.replace("[","(").replace("]",")")
|
||||
|
||||
t = t.strip()
|
||||
return t
|
||||
|
||||
|
||||
def flatten(lis):
|
||||
|
@ -8,7 +8,6 @@ countas Trouble Maker HyunA
|
||||
countas S Club 7 Tina Barrett
|
||||
countas 4Minute HyunA
|
||||
countas I.O.I Chungha
|
||||
countas TrySail Sora Amamiya
|
||||
# Group more famous than single artist
|
||||
countas RenoakRhythm Approaching Nirvana
|
||||
countas Shirley Manson Garbage
|
||||
@ -22,4 +21,3 @@ countas Gawr Gura Hololive EN
|
||||
countas Mori Calliope Hololive EN
|
||||
countas Ninomae Ina'nis Hololive EN
|
||||
countas Takanashi Kiara Hololive EN
|
||||
countas Ceres Fauna Hololive EN
|
||||
|
Can't render this file because it has a wrong number of fields in line 5.
|
@ -1,20 +0,0 @@
|
||||
# NAME: JPop
|
||||
# DESC: Fixes and romanizes various Japanese tracks and artists
|
||||
|
||||
|
||||
belongtogether Myth & Roid
|
||||
|
||||
|
||||
# Sora-chan
|
||||
replaceartist Amamiya Sora Sora Amamiya
|
||||
replacetitle エデンの旅人 Eden no Tabibito
|
||||
replacetitle 月灯り Tsukiakari
|
||||
replacetitle 火花 Hibana
|
||||
replacetitle ロンリーナイト・ディスコティック Lonely Night Discotheque
|
||||
replacetitle 羽根輪舞 Hane Rinbu
|
||||
replacetitle メリーゴーランド Merry-go-round
|
||||
replacetitle フリイジア Fressia
|
||||
replacetitle 誓い Chikai
|
||||
|
||||
# ReoNa
|
||||
replacetitle ないない nainai
|
Can't render this file because it has a wrong number of fields in line 5.
|
@ -21,7 +21,7 @@ addartists HyunA Change Jun Hyung
|
||||
# BLACKPINK
|
||||
countas Jennie BLACKPINK
|
||||
countas Rosé BLACKPINK
|
||||
countas Lalisa BLACKPINK
|
||||
countas Lisa BLACKPINK
|
||||
countas Jisoo BLACKPINK
|
||||
replacetitle AS IF IT'S YOUR LAST As If It's Your Last
|
||||
replacetitle BOOMBAYAH Boombayah
|
||||
@ -98,7 +98,6 @@ countas Jeongyeon TWICE
|
||||
countas Chaeyoung TWICE
|
||||
countas Nayeon TWICE
|
||||
countas Sana TWICE
|
||||
countas MISAMO TWICE
|
||||
|
||||
# AOA
|
||||
countas AOA Black AOA
|
||||
@ -160,8 +159,8 @@ replaceartist 여자친구 GFriend GFriend
|
||||
# Girl's Generation
|
||||
replaceartist 소녀시대 Girls' Generation
|
||||
replaceartist SNSD Girls' Generation
|
||||
replaceartist Girls' Generation-TTS TaeTiSeo
|
||||
countas TaeTiSeo Girls' Generation
|
||||
replaceartist Girls' Generation-TTS TaeTiSeo
|
||||
countas TaeTiSeo Girls' Generation
|
||||
|
||||
# Apink
|
||||
replaceartist A Pink Apink
|
||||
@ -183,9 +182,6 @@ countas Akali K/DA
|
||||
# (G)I-DLE
|
||||
countas Soyeon (G)I-DLE
|
||||
countas Miyeon (G)I-DLE
|
||||
countas Yuqi (G)I-DLE
|
||||
countas Minnie (G)I-DLE
|
||||
countas Shuhua (G)I-DLE
|
||||
replaceartist Jeon Soyeon Soyeon
|
||||
|
||||
|
||||
@ -204,21 +200,10 @@ countas ACE IZ*ONE
|
||||
countas Chaewon IZ*ONE
|
||||
countas Minju IZ*ONE
|
||||
|
||||
# ITZY
|
||||
countas Yeji ITZY
|
||||
|
||||
# IVE
|
||||
countas Yeji ITZY
|
||||
countas Wonyoung IVE
|
||||
countas Yujin IVE
|
||||
countas Gaeul IVE
|
||||
|
||||
# Pristin
|
||||
countas Pristin V Pristin
|
||||
|
||||
# CLC
|
||||
countas Sorn CLC
|
||||
countas Yeeun CLC
|
||||
countas Seungyeon CLC
|
||||
|
||||
# Popular Remixes
|
||||
artistintitle Areia Remix Areia
|
||||
|
Can't render this file because it has a wrong number of fields in line 5.
|
@ -1,34 +1,23 @@
|
||||
# server
|
||||
from bottle import request, response, FormsDict
|
||||
|
||||
from ..pkg_global import conf
|
||||
|
||||
|
||||
# decorator that makes sure this function is only run in normal operation,
|
||||
# not when we run a task that needs to access the database
|
||||
def no_aux_mode(func):
|
||||
def wrapper(*args,**kwargs):
|
||||
from ..pkg_global import conf
|
||||
if conf.AUX_MODE: return
|
||||
return func(*args,**kwargs)
|
||||
return wrapper
|
||||
|
||||
|
||||
# rest of the project
|
||||
from ..cleanup import CleanerAgent
|
||||
from .. import images
|
||||
from ..malojatime import register_scrobbletime, ranges, alltime, today, thisweek, thisyear, MTRangeComposite
|
||||
from ..malojatime import register_scrobbletime, time_stamps, ranges, alltime
|
||||
from ..malojauri import uri_to_internal, internal_to_uri, compose_querystring
|
||||
from ..thirdparty import proxy_scrobble_all
|
||||
from ..pkg_global.conf import data_dir, malojaconfig
|
||||
from ..apis import apikeystore
|
||||
#db
|
||||
from . import sqldb
|
||||
from . import cached
|
||||
from . import dbcache
|
||||
from . import exceptions
|
||||
|
||||
# doreah toolkit
|
||||
from doreah.logging import log
|
||||
from doreah.auth import authenticated_api, authenticated_api_with_alternate
|
||||
import doreah
|
||||
|
||||
|
||||
@ -43,7 +32,6 @@ from collections import namedtuple
|
||||
from threading import Lock
|
||||
import yaml, json
|
||||
import math
|
||||
from itertools import takewhile
|
||||
|
||||
# url handling
|
||||
import urllib
|
||||
@ -58,15 +46,10 @@ dbstatus = {
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
def waitfordb(func):
|
||||
def newfunc(*args,**kwargs):
|
||||
if not dbstatus['healthy']: raise exceptions.DatabaseNotBuilt()
|
||||
return func(*args,**kwargs)
|
||||
|
||||
newfunc.__name__ = func.__name__
|
||||
return newfunc
|
||||
|
||||
|
||||
@ -110,20 +93,14 @@ def incoming_scrobble(rawscrobble,fix=True,client=None,api=None,dbconn=None):
|
||||
log(f"Incoming scrobble [Client: {client} | API: {api}]: {rawscrobble}")
|
||||
|
||||
scrobbledict = rawscrobble_to_scrobbledict(rawscrobble, fix, client)
|
||||
albumupdate = (malojaconfig["ALBUM_INFORMATION_TRUST"] == 'last')
|
||||
|
||||
if scrobbledict:
|
||||
sqldb.add_scrobble(scrobbledict,dbconn=dbconn)
|
||||
proxy_scrobble_all(scrobbledict['track']['artists'],scrobbledict['track']['title'],scrobbledict['time'])
|
||||
|
||||
sqldb.add_scrobble(scrobbledict,update_album=albumupdate,dbconn=dbconn)
|
||||
proxy_scrobble_all(scrobbledict['track']['artists'],scrobbledict['track']['title'],scrobbledict['time'])
|
||||
dbcache.invalidate_caches(scrobbledict['time'])
|
||||
|
||||
dbcache.invalidate_caches(scrobbledict['time'])
|
||||
|
||||
#return {"status":"success","scrobble":scrobbledict}
|
||||
return scrobbledict
|
||||
|
||||
else:
|
||||
raise exceptions.MissingScrobbleParameters('artist')
|
||||
#return {"status":"success","scrobble":scrobbledict}
|
||||
return scrobbledict
|
||||
|
||||
|
||||
@waitfordb
|
||||
@ -136,16 +113,14 @@ def reparse_scrobble(timestamp):
|
||||
|
||||
newscrobble = rawscrobble_to_scrobbledict(scrobble['rawscrobble'])
|
||||
|
||||
if newscrobble:
|
||||
track_id = sqldb.get_track_id(newscrobble['track'])
|
||||
|
||||
track_id = sqldb.get_track_id(newscrobble['track'])
|
||||
|
||||
# check if id changed
|
||||
if sqldb.get_track_id(scrobble['track']) != track_id:
|
||||
sqldb.edit_scrobble(timestamp, {'track':newscrobble['track']})
|
||||
dbcache.invalidate_entity_cache()
|
||||
dbcache.invalidate_caches()
|
||||
return sqldb.get_scrobble(timestamp=timestamp)
|
||||
# check if id changed
|
||||
if sqldb.get_track_id(scrobble['track']) != track_id:
|
||||
sqldb.edit_scrobble(timestamp, {'track':newscrobble['track']})
|
||||
dbcache.invalidate_entity_cache()
|
||||
dbcache.invalidate_caches()
|
||||
return sqldb.get_scrobble(timestamp=timestamp)
|
||||
|
||||
return False
|
||||
|
||||
@ -155,23 +130,8 @@ def rawscrobble_to_scrobbledict(rawscrobble, fix=True, client=None):
|
||||
scrobbleinfo = {**rawscrobble}
|
||||
if fix:
|
||||
scrobbleinfo['track_artists'],scrobbleinfo['track_title'] = cla.fullclean(scrobbleinfo['track_artists'],scrobbleinfo['track_title'])
|
||||
if scrobbleinfo.get('album_artists'):
|
||||
scrobbleinfo['album_artists'] = cla.parseArtists(scrobbleinfo['album_artists'])
|
||||
if scrobbleinfo.get("album_title"):
|
||||
scrobbleinfo['album_title'] = cla.parseAlbumtitle(scrobbleinfo['album_title'])
|
||||
scrobbleinfo['scrobble_time'] = scrobbleinfo.get('scrobble_time') or int(datetime.datetime.now(tz=datetime.timezone.utc).timestamp())
|
||||
|
||||
# if we send [] as albumartists, it means various
|
||||
# if we send nothing, the scrobbler just doesnt support it and we assume track artists
|
||||
if ('album_title' in scrobbleinfo) and ('album_artists' not in scrobbleinfo):
|
||||
scrobbleinfo['album_artists'] = scrobbleinfo.get('track_artists')
|
||||
|
||||
# New plan, do this further down
|
||||
# NONE always means there is simply no info, so make a guess or whatever the options say
|
||||
# could use the track artists, but probably check if any album with the same name exists first
|
||||
# various artists always needs to be specified via []
|
||||
# TODO
|
||||
|
||||
# processed info to internal scrobble dict
|
||||
scrobbledict = {
|
||||
"time":scrobbleinfo.get('scrobble_time'),
|
||||
@ -179,7 +139,7 @@ def rawscrobble_to_scrobbledict(rawscrobble, fix=True, client=None):
|
||||
"artists":scrobbleinfo.get('track_artists'),
|
||||
"title":scrobbleinfo.get('track_title'),
|
||||
"album":{
|
||||
"albumtitle":scrobbleinfo.get('album_title'),
|
||||
"name":scrobbleinfo.get('album_name'),
|
||||
"artists":scrobbleinfo.get('album_artists')
|
||||
},
|
||||
"length":scrobbleinfo.get('track_length')
|
||||
@ -188,20 +148,11 @@ def rawscrobble_to_scrobbledict(rawscrobble, fix=True, client=None):
|
||||
"origin":f"client:{client}" if client else "generic",
|
||||
"extra":{
|
||||
k:scrobbleinfo[k] for k in scrobbleinfo if k not in
|
||||
['scrobble_time','track_artists','track_title','track_length','scrobble_duration']#,'album_title','album_artists']
|
||||
# we still save album info in extra because the user might select majority album authority
|
||||
['scrobble_time','track_artists','track_title','track_length','scrobble_duration','album_name','album_artists']
|
||||
},
|
||||
"rawscrobble":rawscrobble
|
||||
}
|
||||
|
||||
if not scrobbledict["track"]["album"]["albumtitle"]:
|
||||
del scrobbledict["track"]["album"]
|
||||
|
||||
# discard if invalid
|
||||
if len(scrobbledict['track']['artists']) == 0:
|
||||
return None
|
||||
# TODO: other checks
|
||||
|
||||
return scrobbledict
|
||||
|
||||
|
||||
@ -233,23 +184,12 @@ def edit_track(id,trackinfo):
|
||||
|
||||
return result
|
||||
|
||||
@waitfordb
|
||||
def edit_album(id,albuminfo):
|
||||
album = sqldb.get_album(id)
|
||||
log(f"Renaming {album['albumtitle']} to {albuminfo['albumtitle']}")
|
||||
result = sqldb.edit_album(id,albuminfo)
|
||||
dbcache.invalidate_entity_cache()
|
||||
dbcache.invalidate_caches()
|
||||
|
||||
return result
|
||||
|
||||
@waitfordb
|
||||
def merge_artists(target_id,source_ids):
|
||||
sources = [sqldb.get_artist(id) for id in source_ids]
|
||||
target = sqldb.get_artist(target_id)
|
||||
log(f"Merging {sources} into {target}")
|
||||
sqldb.merge_artists(target_id,source_ids)
|
||||
result = {'sources':sources,'target':target}
|
||||
result = sqldb.merge_artists(target_id,source_ids)
|
||||
dbcache.invalidate_entity_cache()
|
||||
dbcache.invalidate_caches()
|
||||
|
||||
@ -260,116 +200,35 @@ def merge_tracks(target_id,source_ids):
|
||||
sources = [sqldb.get_track(id) for id in source_ids]
|
||||
target = sqldb.get_track(target_id)
|
||||
log(f"Merging {sources} into {target}")
|
||||
sqldb.merge_tracks(target_id,source_ids)
|
||||
result = {'sources':sources,'target':target}
|
||||
result = sqldb.merge_tracks(target_id,source_ids)
|
||||
dbcache.invalidate_entity_cache()
|
||||
dbcache.invalidate_caches()
|
||||
|
||||
return result
|
||||
|
||||
@waitfordb
|
||||
def merge_albums(target_id,source_ids):
|
||||
sources = [sqldb.get_album(id) for id in source_ids]
|
||||
target = sqldb.get_album(target_id)
|
||||
log(f"Merging {sources} into {target}")
|
||||
sqldb.merge_albums(target_id,source_ids)
|
||||
result = {'sources':sources,'target':target}
|
||||
dbcache.invalidate_entity_cache()
|
||||
dbcache.invalidate_caches()
|
||||
|
||||
return result
|
||||
|
||||
|
||||
|
||||
@waitfordb
|
||||
def associate_albums_to_artist(target_id,source_ids,remove=False):
|
||||
sources = [sqldb.get_album(id) for id in source_ids]
|
||||
target = sqldb.get_artist(target_id)
|
||||
if remove:
|
||||
log(f"Removing {sources} from {target}")
|
||||
sqldb.remove_artists_from_albums(artist_ids=[target_id],album_ids=source_ids)
|
||||
else:
|
||||
log(f"Adding {sources} into {target}")
|
||||
sqldb.add_artists_to_albums(artist_ids=[target_id],album_ids=source_ids)
|
||||
result = {'sources':sources,'target':target}
|
||||
dbcache.invalidate_entity_cache()
|
||||
dbcache.invalidate_caches()
|
||||
|
||||
return result
|
||||
|
||||
@waitfordb
|
||||
def associate_tracks_to_artist(target_id,source_ids,remove=False):
|
||||
sources = [sqldb.get_track(id) for id in source_ids]
|
||||
target = sqldb.get_artist(target_id)
|
||||
if remove:
|
||||
log(f"Removing {sources} from {target}")
|
||||
sqldb.remove_artists_from_tracks(artist_ids=[target_id],track_ids=source_ids)
|
||||
else:
|
||||
log(f"Adding {sources} into {target}")
|
||||
sqldb.add_artists_to_tracks(artist_ids=[target_id],track_ids=source_ids)
|
||||
result = {'sources':sources,'target':target}
|
||||
dbcache.invalidate_entity_cache()
|
||||
dbcache.invalidate_caches()
|
||||
|
||||
return result
|
||||
|
||||
@waitfordb
|
||||
def associate_tracks_to_album(target_id,source_ids):
|
||||
# target_id None means remove from current album!
|
||||
sources = [sqldb.get_track(id) for id in source_ids]
|
||||
if target_id:
|
||||
target = sqldb.get_album(target_id)
|
||||
log(f"Adding {sources} into {target}")
|
||||
sqldb.add_tracks_to_albums({src:target_id for src in source_ids},replace=True)
|
||||
else:
|
||||
sqldb.remove_album(source_ids)
|
||||
result = {'sources':sources,'target':target}
|
||||
dbcache.invalidate_entity_cache()
|
||||
dbcache.invalidate_caches()
|
||||
|
||||
return result
|
||||
|
||||
|
||||
|
||||
@waitfordb
|
||||
def get_scrobbles(dbconn=None,**keys):
|
||||
(since,to) = keys.get('timerange').timestamps()
|
||||
|
||||
reverse = keys.get('reverse',True) # comaptibility with old calls
|
||||
if keys.get('perpage',math.inf) is not math.inf:
|
||||
limit = (keys.get('page',0)+1) * keys.get('perpage',100)
|
||||
behead = keys.get('page',0) * keys.get('perpage',100)
|
||||
else:
|
||||
limit = None
|
||||
behead = 0
|
||||
|
||||
|
||||
associated = keys.get('associated',False)
|
||||
if 'artist' in keys:
|
||||
result = sqldb.get_scrobbles_of_artist(artist=keys['artist'],since=since,to=to,associated=associated,limit=limit,reverse=reverse,dbconn=dbconn)
|
||||
result = sqldb.get_scrobbles_of_artist(artist=keys['artist'],since=since,to=to,dbconn=dbconn)
|
||||
elif 'track' in keys:
|
||||
result = sqldb.get_scrobbles_of_track(track=keys['track'],since=since,to=to,limit=limit,reverse=reverse,dbconn=dbconn)
|
||||
elif 'album' in keys:
|
||||
result = sqldb.get_scrobbles_of_album(album=keys['album'],since=since,to=to,limit=limit,reverse=reverse,dbconn=dbconn)
|
||||
result = sqldb.get_scrobbles_of_track(track=keys['track'],since=since,to=to,dbconn=dbconn)
|
||||
else:
|
||||
result = sqldb.get_scrobbles(since=since,to=to,limit=limit,reverse=reverse,dbconn=dbconn)
|
||||
result = sqldb.get_scrobbles(since=since,to=to,dbconn=dbconn)
|
||||
#return result[keys['page']*keys['perpage']:(keys['page']+1)*keys['perpage']]
|
||||
|
||||
#print(result)
|
||||
|
||||
return list(result[behead:])
|
||||
return list(reversed(result))
|
||||
|
||||
|
||||
@waitfordb
|
||||
def get_scrobbles_num(dbconn=None,**keys):
|
||||
(since,to) = keys.get('timerange').timestamps()
|
||||
associated = keys.get('associated',False)
|
||||
if 'artist' in keys:
|
||||
result = len(sqldb.get_scrobbles_of_artist(artist=keys['artist'],since=since,to=to,associated=associated,resolve_references=False,dbconn=dbconn))
|
||||
result = len(sqldb.get_scrobbles_of_artist(artist=keys['artist'],since=since,to=to,resolve_references=False,dbconn=dbconn))
|
||||
elif 'track' in keys:
|
||||
result = len(sqldb.get_scrobbles_of_track(track=keys['track'],since=since,to=to,resolve_references=False,dbconn=dbconn))
|
||||
elif 'album' in keys:
|
||||
result = len(sqldb.get_scrobbles_of_album(album=keys['album'],since=since,to=to,resolve_references=False,dbconn=dbconn))
|
||||
else:
|
||||
result = sqldb.get_scrobbles_num(since=since,to=to,dbconn=dbconn)
|
||||
return result
|
||||
@ -384,186 +243,60 @@ def get_tracks(dbconn=None,**keys):
|
||||
result = sqldb.get_tracks_of_artist(keys.get('artist'),dbconn=dbconn)
|
||||
return result
|
||||
|
||||
@waitfordb
|
||||
def get_albums(dbconn=None,**keys):
|
||||
if keys.get('artist') is None:
|
||||
result = sqldb.get_albums(dbconn=dbconn)
|
||||
else:
|
||||
result = sqldb.get_albums_of_artists([sqldb.get_artist_id(keys.get('artist'),create_new=False)],dbconn=dbconn)
|
||||
return result
|
||||
|
||||
@waitfordb
|
||||
def get_artists(dbconn=None):
|
||||
return sqldb.get_artists(dbconn=dbconn)
|
||||
|
||||
@waitfordb
|
||||
def get_albums_artist_appears_on(dbconn=None,**keys):
|
||||
|
||||
artist_id = sqldb.get_artist_id(keys['artist'],dbconn=dbconn)
|
||||
|
||||
albums = sqldb.get_albums_artists_appear_on([artist_id],dbconn=dbconn).get(artist_id) or []
|
||||
ownalbums = sqldb.get_albums_of_artists([artist_id],dbconn=dbconn).get(artist_id) or []
|
||||
|
||||
result = {
|
||||
"own_albums":ownalbums,
|
||||
"appears_on":[a for a in albums if a not in ownalbums]
|
||||
}
|
||||
|
||||
return result
|
||||
|
||||
@waitfordb
|
||||
def get_tracks_without_album(dbconn=None,resolve_ids=True):
|
||||
return get_charts_tracks(album=None,timerange=alltime(),resolve_ids=resolve_ids,dbconn=dbconn)
|
||||
|
||||
@waitfordb
|
||||
def get_charts_artists(dbconn=None,resolve_ids=True,**keys):
|
||||
def get_charts_artists(dbconn=None,**keys):
|
||||
(since,to) = keys.get('timerange').timestamps()
|
||||
separate = keys.get('separate',False)
|
||||
result = sqldb.count_scrobbles_by_artist(since=since,to=to,resolve_ids=resolve_ids,associated=(not separate),dbconn=dbconn)
|
||||
|
||||
if resolve_ids:
|
||||
# only add associated info if we resolve
|
||||
map = sqldb.get_associated_artist_map(artist_ids=[entry['artist_id'] for entry in result if 'artist_id' in entry])
|
||||
for entry in result:
|
||||
if "artist_id" in entry:
|
||||
entry['associated_artists'] = map[entry['artist_id']]
|
||||
result = sqldb.count_scrobbles_by_artist(since=since,to=to,dbconn=dbconn)
|
||||
return result
|
||||
|
||||
@waitfordb
|
||||
def get_charts_tracks(dbconn=None,resolve_ids=True,**keys):
|
||||
def get_charts_tracks(dbconn=None,**keys):
|
||||
(since,to) = keys.get('timerange').timestamps()
|
||||
if 'artist' in keys:
|
||||
result = sqldb.count_scrobbles_by_track_of_artist(since=since,to=to,artist=keys['artist'],associated=keys.get('associated',False),resolve_ids=resolve_ids,dbconn=dbconn)
|
||||
elif 'album' in keys:
|
||||
result = sqldb.count_scrobbles_by_track_of_album(since=since,to=to,album=keys['album'],resolve_ids=resolve_ids,dbconn=dbconn)
|
||||
result = sqldb.count_scrobbles_by_track_of_artist(since=since,to=to,artist=keys['artist'],dbconn=dbconn)
|
||||
else:
|
||||
result = sqldb.count_scrobbles_by_track(since=since,to=to,resolve_ids=resolve_ids,dbconn=dbconn)
|
||||
return result
|
||||
|
||||
@waitfordb
|
||||
def get_charts_albums(dbconn=None,resolve_ids=True,only_own_albums=False,**keys):
|
||||
# TODO: different scrobble numbers for only own tracks on own album etc?
|
||||
(since,to) = keys.get('timerange').timestamps()
|
||||
|
||||
if 'artist' in keys:
|
||||
artist = sqldb.get_artist(sqldb.get_artist_id(keys['artist']))
|
||||
result = sqldb.count_scrobbles_by_album_combined(since=since,to=to,artist=artist,associated=keys.get('associated',False),resolve_ids=resolve_ids,dbconn=dbconn)
|
||||
if only_own_albums:
|
||||
# TODO: this doesnt take associated into account and doesnt change ranks
|
||||
result = [e for e in result if artist in (e['album']['artists'] or [])]
|
||||
else:
|
||||
result = sqldb.count_scrobbles_by_album(since=since,to=to,resolve_ids=resolve_ids,dbconn=dbconn)
|
||||
result = sqldb.count_scrobbles_by_track(since=since,to=to,dbconn=dbconn)
|
||||
return result
|
||||
|
||||
@waitfordb
|
||||
def get_pulse(dbconn=None,**keys):
|
||||
|
||||
# amountkeys for pulse and performance aren't really necessary
|
||||
# since the amount of entries is completely determined by the time keys
|
||||
# but lets just include it in case
|
||||
reverse = keys.get('reverse',False)
|
||||
if keys.get('perpage',math.inf) is not math.inf:
|
||||
limit = (keys.get('page',0)+1) * keys.get('perpage',100)
|
||||
behead = keys.get('page',0) * keys.get('perpage',100)
|
||||
else:
|
||||
limit = math.inf
|
||||
behead = 0
|
||||
|
||||
rngs = ranges(**{k:keys[k] for k in keys if k in ["since","to","within","timerange","step","stepn","trail"]})
|
||||
if reverse: rngs = reversed(list(rngs))
|
||||
results = []
|
||||
for rng in rngs:
|
||||
|
||||
# count down how many we need
|
||||
if limit==0:
|
||||
break
|
||||
limit -= 1
|
||||
|
||||
# skip prev pages
|
||||
if behead>0:
|
||||
behead -= 1
|
||||
continue
|
||||
|
||||
res = get_scrobbles_num(timerange=rng,**{k:keys[k] for k in keys if k != 'timerange'},dbconn=dbconn)
|
||||
if keys.get('artist') and keys.get('associated',False):
|
||||
res_real = get_scrobbles_num(timerange=rng,**{k:keys[k] for k in keys if k not in ['timerange','associated']},associated=False,dbconn=dbconn)
|
||||
# this isnt really efficient, we could do that in one db call, but i dont wanna reorganize rn
|
||||
else:
|
||||
res_real = res
|
||||
results.append({"range":rng,"scrobbles":res,"real_scrobbles":res_real})
|
||||
results.append({"range":rng,"scrobbles":res})
|
||||
|
||||
return results
|
||||
|
||||
@waitfordb
|
||||
def get_performance(dbconn=None,**keys):
|
||||
|
||||
# amountkeys for pulse and performance aren't really necessary
|
||||
# since the amount of entries is completely determined by the time keys
|
||||
# but lets just include it in case
|
||||
reverse = keys.get('reverse',False)
|
||||
if keys.get('perpage',math.inf) is not math.inf:
|
||||
limit = (keys.get('page',0)+1) * keys.get('perpage',100)
|
||||
behead = keys.get('page',0) * keys.get('perpage',100)
|
||||
else:
|
||||
limit = math.inf
|
||||
behead = 0
|
||||
|
||||
separate = keys.get('separate')
|
||||
|
||||
rngs = ranges(**{k:keys[k] for k in keys if k in ["since","to","within","timerange","step","stepn","trail"]})
|
||||
if reverse: rngs = reversed(list(rngs))
|
||||
results = []
|
||||
|
||||
for rng in rngs:
|
||||
|
||||
# count down how many we need
|
||||
if limit==0:
|
||||
break
|
||||
limit -= 1
|
||||
|
||||
# skip prev pages
|
||||
if behead>0:
|
||||
behead -= 1
|
||||
continue
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
if "track" in keys:
|
||||
track_id = sqldb.get_track_id(keys['track'],create_new=False,dbconn=dbconn)
|
||||
if not track_id:
|
||||
raise exceptions.TrackDoesNotExist(keys['track'])
|
||||
#track = sqldb.get_track(track_id,dbconn=dbconn)
|
||||
charts = get_charts_tracks(timerange=rng,resolve_ids=False,dbconn=dbconn)
|
||||
track = sqldb.get_track(sqldb.get_track_id(keys['track'],dbconn=dbconn),dbconn=dbconn)
|
||||
charts = get_charts_tracks(timerange=rng,dbconn=dbconn)
|
||||
rank = None
|
||||
for c in charts:
|
||||
if c["track_id"] == track_id:
|
||||
if c["track"] == track:
|
||||
rank = c["rank"]
|
||||
break
|
||||
elif "artist" in keys:
|
||||
artist_id = sqldb.get_artist_id(keys['artist'],create_new=False,dbconn=dbconn)
|
||||
if not artist_id:
|
||||
raise exceptions.ArtistDoesNotExist(keys['artist'])
|
||||
#artist = sqldb.get_artist(artist_id,dbconn=dbconn)
|
||||
artist = sqldb.get_artist(sqldb.get_artist_id(keys['artist'],dbconn=dbconn),dbconn=dbconn)
|
||||
# ^this is the most useless line in programming history
|
||||
# but I like consistency
|
||||
charts = get_charts_artists(timerange=rng,resolve_ids=False,separate=separate,dbconn=dbconn)
|
||||
charts = get_charts_artists(timerange=rng,dbconn=dbconn)
|
||||
rank = None
|
||||
for c in charts:
|
||||
if c["artist_id"] == artist_id:
|
||||
rank = c["rank"]
|
||||
break
|
||||
elif "album" in keys:
|
||||
album_id = sqldb.get_album_id(keys['album'],create_new=False,dbconn=dbconn)
|
||||
if not album_id:
|
||||
raise exceptions.AlbumDoesNotExist(keys['album'])
|
||||
#album = sqldb.get_album(album_id,dbconn=dbconn)
|
||||
charts = get_charts_albums(timerange=rng,resolve_ids=False,dbconn=dbconn)
|
||||
rank = None
|
||||
for c in charts:
|
||||
if c["album_id"] == album_id:
|
||||
if c["artist"] == artist:
|
||||
rank = c["rank"]
|
||||
break
|
||||
else:
|
||||
@ -573,81 +306,33 @@ def get_performance(dbconn=None,**keys):
|
||||
return results
|
||||
|
||||
@waitfordb
|
||||
def get_top_artists(dbconn=None,compatibility=True,**keys):
|
||||
|
||||
separate = keys.get('separate')
|
||||
def get_top_artists(dbconn=None,**keys):
|
||||
|
||||
rngs = ranges(**{k:keys[k] for k in keys if k in ["since","to","within","timerange","step","stepn","trail"]})
|
||||
results = []
|
||||
|
||||
for rng in rngs:
|
||||
result = {'range':rng}
|
||||
res = get_charts_artists(timerange=rng,separate=separate,dbconn=dbconn)
|
||||
|
||||
result['top'] = [
|
||||
{'artist': r['artist'], 'scrobbles': r['scrobbles'], 'real_scrobbles':r['real_scrobbles'], 'associated_artists': sqldb.get_associated_artists(r['artist'])}
|
||||
for r in takewhile(lambda x:x['rank']==1,res)
|
||||
]
|
||||
# for third party applications
|
||||
if compatibility:
|
||||
if result['top']:
|
||||
result.update(result['top'][0])
|
||||
else:
|
||||
result.update({'artist':None,'scrobbles':0,'real_scrobbles':0})
|
||||
|
||||
results.append(result)
|
||||
try:
|
||||
res = get_charts_artists(timerange=rng,dbconn=dbconn)[0]
|
||||
results.append({"range":rng,"artist":res["artist"],"scrobbles":res["scrobbles"]})
|
||||
except Exception:
|
||||
results.append({"range":rng,"artist":None,"scrobbles":0})
|
||||
|
||||
return results
|
||||
|
||||
|
||||
@waitfordb
|
||||
def get_top_tracks(dbconn=None,compatibility=True,**keys):
|
||||
def get_top_tracks(dbconn=None,**keys):
|
||||
|
||||
rngs = ranges(**{k:keys[k] for k in keys if k in ["since","to","within","timerange","step","stepn","trail"]})
|
||||
results = []
|
||||
|
||||
for rng in rngs:
|
||||
result = {'range':rng}
|
||||
res = get_charts_tracks(timerange=rng,dbconn=dbconn)
|
||||
|
||||
result['top'] = [
|
||||
{'track': r['track'], 'scrobbles': r['scrobbles']}
|
||||
for r in takewhile(lambda x:x['rank']==1,res)
|
||||
]
|
||||
# for third party applications
|
||||
if compatibility:
|
||||
if result['top']:
|
||||
result.update(result['top'][0])
|
||||
else:
|
||||
result.update({'track':None,'scrobbles':0})
|
||||
|
||||
results.append(result)
|
||||
|
||||
return results
|
||||
|
||||
@waitfordb
|
||||
def get_top_albums(dbconn=None,compatibility=True,**keys):
|
||||
|
||||
rngs = ranges(**{k:keys[k] for k in keys if k in ["since","to","within","timerange","step","stepn","trail"]})
|
||||
results = []
|
||||
|
||||
for rng in rngs:
|
||||
|
||||
result = {'range':rng}
|
||||
res = get_charts_albums(timerange=rng,dbconn=dbconn)
|
||||
|
||||
result['top'] = [
|
||||
{'album': r['album'], 'scrobbles': r['scrobbles']}
|
||||
for r in takewhile(lambda x:x['rank']==1,res)
|
||||
]
|
||||
# for third party applications
|
||||
if compatibility:
|
||||
if result['top']:
|
||||
result.update(result['top'][0])
|
||||
else:
|
||||
result.update({'album':None,'scrobbles':0})
|
||||
|
||||
results.append(result)
|
||||
try:
|
||||
res = get_charts_tracks(timerange=rng,dbconn=dbconn)[0]
|
||||
results.append({"range":rng,"track":res["track"],"scrobbles":res["scrobbles"]})
|
||||
except Exception:
|
||||
results.append({"range":rng,"track":None,"scrobbles":0})
|
||||
|
||||
return results
|
||||
|
||||
@ -657,91 +342,42 @@ def artist_info(dbconn=None,**keys):
|
||||
artist = keys.get('artist')
|
||||
if artist is None: raise exceptions.MissingEntityParameter()
|
||||
|
||||
artist_id = sqldb.get_artist_id(artist,create_new=False,dbconn=dbconn)
|
||||
if not artist_id: raise exceptions.ArtistDoesNotExist(artist)
|
||||
|
||||
artist_id = sqldb.get_artist_id(artist,dbconn=dbconn)
|
||||
artist = sqldb.get_artist(artist_id,dbconn=dbconn)
|
||||
alltimecharts = get_charts_artists(timerange=alltime(),dbconn=dbconn)
|
||||
#we cant take the scrobble number from the charts because that includes all countas scrobbles
|
||||
scrobbles = get_scrobbles_num(artist=artist,timerange=alltime(),dbconn=dbconn)
|
||||
albums = sqldb.get_albums_of_artists(set([artist_id]),dbconn=dbconn)
|
||||
isalbumartist = len(albums.get(artist_id,[]))>0
|
||||
|
||||
cert = None
|
||||
own_track_charts = get_charts_tracks(timerange=alltime(),resolve_ids=False,artist=artist,dbconn=dbconn)
|
||||
own_album_charts = get_charts_albums(timerange=alltime(),resolve_ids=True,artist=artist,dbconn=dbconn)
|
||||
# we resolve ids here which we don't need to. however, on the jinja page we make that same call
|
||||
# later again with resolve ids, so its a cache miss and it doubles page load time
|
||||
# TODO: find better solution
|
||||
if own_track_charts:
|
||||
c = own_track_charts[0]
|
||||
tscrobbles = c["scrobbles"]
|
||||
threshold_gold, threshold_platinum, threshold_diamond = malojaconfig["SCROBBLES_GOLD","SCROBBLES_PLATINUM","SCROBBLES_DIAMOND"]
|
||||
if tscrobbles >= threshold_diamond: cert = "diamond"
|
||||
elif tscrobbles >= threshold_platinum: cert = "platinum"
|
||||
elif tscrobbles >= threshold_gold: cert = "gold"
|
||||
if own_album_charts:
|
||||
c = own_album_charts[0]
|
||||
ascrobbles = c["scrobbles"]
|
||||
threshold_gold, threshold_platinum, threshold_diamond = malojaconfig["SCROBBLES_GOLD_ALBUM","SCROBBLES_PLATINUM_ALBUM","SCROBBLES_DIAMOND_ALBUM"]
|
||||
if ascrobbles >= threshold_diamond: cert = "diamond"
|
||||
elif ascrobbles >= threshold_platinum and cert != "diamond": cert = "platinum"
|
||||
elif ascrobbles >= threshold_gold and not cert: cert = "gold"
|
||||
|
||||
twk = thisweek()
|
||||
tyr = thisyear()
|
||||
|
||||
# base info for everyone
|
||||
result = {
|
||||
"artist":artist,
|
||||
"scrobbles":scrobbles,
|
||||
"id":artist_id,
|
||||
"isalbumartist":isalbumartist,
|
||||
"certification":cert,
|
||||
}
|
||||
|
||||
# check if credited to someone else
|
||||
parent_artists = sqldb.get_credited_artists(artist)
|
||||
if len(parent_artists) == 0:
|
||||
c = [e for e in alltimecharts if e["artist"] == artist]
|
||||
position = c[0]["rank"] if len(c) > 0 else None
|
||||
#we cant take the scrobble number from the charts because that includes all countas scrobbles
|
||||
try:
|
||||
c = [e for e in alltimecharts if e["artist"] == artist][0]
|
||||
others = sqldb.get_associated_artists(artist,dbconn=dbconn)
|
||||
result.update({
|
||||
position = c["rank"]
|
||||
return {
|
||||
"artist":artist,
|
||||
"scrobbles":scrobbles,
|
||||
"position":position,
|
||||
"associated":others,
|
||||
"medals":{
|
||||
"gold": [year.desc() for year in ranges(step='year') if (year != tyr) and any(
|
||||
(e.get('artist_id') == artist_id) and (e.get('rank') == 1) for e in
|
||||
sqldb.count_scrobbles_by_artist(since=year.first_stamp(),to=year.last_stamp(),resolve_ids=False,dbconn=dbconn)
|
||||
)],
|
||||
"silver": [year.desc() for year in ranges(step='year') if (year != tyr) and any(
|
||||
(e.get('artist_id') == artist_id) and (e.get('rank') == 2) for e in
|
||||
sqldb.count_scrobbles_by_artist(since=year.first_stamp(),to=year.last_stamp(),resolve_ids=False,dbconn=dbconn)
|
||||
)],
|
||||
"bronze": [year.desc() for year in ranges(step='year') if (year != tyr) and any(
|
||||
(e.get('artist_id') == artist_id) and (e.get('rank') == 3) for e in
|
||||
sqldb.count_scrobbles_by_artist(since=year.first_stamp(),to=year.last_stamp(),resolve_ids=False,dbconn=dbconn)
|
||||
)]
|
||||
"gold": [year for year in cached.medals_artists if artist_id in cached.medals_artists[year]['gold']],
|
||||
"silver": [year for year in cached.medals_artists if artist_id in cached.medals_artists[year]['silver']],
|
||||
"bronze": [year for year in cached.medals_artists if artist_id in cached.medals_artists[year]['bronze']],
|
||||
},
|
||||
"topweeks":len([
|
||||
week for week in ranges(step="week") if (week != twk) and any(
|
||||
(e.get('artist_id') == artist_id) and (e.get('rank') == 1) for e in
|
||||
sqldb.count_scrobbles_by_artist(since=week.first_stamp(),to=week.last_stamp(),resolve_ids=False,associated=True,dbconn=dbconn)
|
||||
)
|
||||
# we don't need to check the whole thing, just until rank is lower, but... well, its a list comprehension
|
||||
])
|
||||
})
|
||||
|
||||
else:
|
||||
replaceartist = parent_artists[0]
|
||||
"topweeks":len([e for e in cached.weekly_topartists if e == artist_id]),
|
||||
"id":artist_id
|
||||
}
|
||||
except Exception:
|
||||
# if the artist isnt in the charts, they are not being credited and we
|
||||
# need to show information about the credited one
|
||||
replaceartist = sqldb.get_credited_artists(artist)[0]
|
||||
c = [e for e in alltimecharts if e["artist"] == replaceartist][0]
|
||||
position = c["rank"]
|
||||
result.update({
|
||||
return {
|
||||
"artist":artist,
|
||||
"replace":replaceartist,
|
||||
"position":position
|
||||
})
|
||||
"scrobbles":scrobbles,
|
||||
"position":position,
|
||||
"id":artist_id
|
||||
}
|
||||
|
||||
return result
|
||||
|
||||
|
||||
|
||||
@ -751,14 +387,12 @@ def track_info(dbconn=None,**keys):
|
||||
track = keys.get('track')
|
||||
if track is None: raise exceptions.MissingEntityParameter()
|
||||
|
||||
track_id = sqldb.get_track_id(track,create_new=False,dbconn=dbconn)
|
||||
if not track_id: raise exceptions.TrackDoesNotExist(track)
|
||||
|
||||
track_id = sqldb.get_track_id(track,dbconn=dbconn)
|
||||
track = sqldb.get_track(track_id,dbconn=dbconn)
|
||||
alltimecharts = get_charts_tracks(timerange=alltime(),resolve_ids=False,dbconn=dbconn)
|
||||
alltimecharts = get_charts_tracks(timerange=alltime(),dbconn=dbconn)
|
||||
#scrobbles = get_scrobbles_num(track=track,timerange=alltime())
|
||||
|
||||
c = [e for e in alltimecharts if e["track_id"] == track_id][0]
|
||||
c = [e for e in alltimecharts if e["track"] == track][0]
|
||||
scrobbles = c["scrobbles"]
|
||||
position = c["rank"]
|
||||
cert = None
|
||||
@ -767,129 +401,22 @@ def track_info(dbconn=None,**keys):
|
||||
elif scrobbles >= threshold_platinum: cert = "platinum"
|
||||
elif scrobbles >= threshold_gold: cert = "gold"
|
||||
|
||||
twk = thisweek()
|
||||
tyr = thisyear()
|
||||
|
||||
return {
|
||||
"track":track,
|
||||
"scrobbles":scrobbles,
|
||||
"position":position,
|
||||
"medals":{
|
||||
"gold": [year.desc() for year in ranges(step='year') if (year != tyr) and any(
|
||||
(e.get('track_id') == track_id) and (e.get('rank') == 1) for e in
|
||||
sqldb.count_scrobbles_by_track(since=year.first_stamp(),to=year.last_stamp(),resolve_ids=False,dbconn=dbconn)
|
||||
)],
|
||||
"silver": [year.desc() for year in ranges(step='year') if (year != tyr) and any(
|
||||
(e.get('track_id') == track_id) and (e.get('rank') == 2) for e in
|
||||
sqldb.count_scrobbles_by_track(since=year.first_stamp(),to=year.last_stamp(),resolve_ids=False,dbconn=dbconn)
|
||||
)],
|
||||
"bronze": [year.desc() for year in ranges(step='year') if (year != tyr) and any(
|
||||
(e.get('track_id') == track_id) and (e.get('rank') == 3) for e in
|
||||
sqldb.count_scrobbles_by_track(since=year.first_stamp(),to=year.last_stamp(),resolve_ids=False,dbconn=dbconn)
|
||||
)]
|
||||
"gold": [year for year in cached.medals_tracks if track_id in cached.medals_tracks[year]['gold']],
|
||||
"silver": [year for year in cached.medals_tracks if track_id in cached.medals_tracks[year]['silver']],
|
||||
"bronze": [year for year in cached.medals_tracks if track_id in cached.medals_tracks[year]['bronze']],
|
||||
},
|
||||
"certification":cert,
|
||||
"topweeks":len([
|
||||
week for week in ranges(step="week") if (week != twk) and any(
|
||||
(e.get('track_id') == track_id) and (e.get('rank') == 1) for e in
|
||||
sqldb.count_scrobbles_by_track(since=week.first_stamp(),to=week.last_stamp(),resolve_ids=False,dbconn=dbconn)
|
||||
)
|
||||
]),
|
||||
"topweeks":len([e for e in cached.weekly_toptracks if e == track_id]),
|
||||
"id":track_id
|
||||
}
|
||||
|
||||
|
||||
@waitfordb
|
||||
def album_info(dbconn=None,reduced=False,**keys):
|
||||
|
||||
album = keys.get('album')
|
||||
if album is None: raise exceptions.MissingEntityParameter()
|
||||
|
||||
album_id = sqldb.get_album_id(album,create_new=False,dbconn=dbconn)
|
||||
if not album_id: raise exceptions.AlbumDoesNotExist(album)
|
||||
|
||||
album = sqldb.get_album(album_id,dbconn=dbconn)
|
||||
|
||||
extrainfo = {}
|
||||
|
||||
if reduced:
|
||||
scrobbles = get_scrobbles_num(album=album,timerange=alltime())
|
||||
else:
|
||||
alltimecharts = get_charts_albums(timerange=alltime(),dbconn=dbconn)
|
||||
c = [e for e in alltimecharts if e["album"] == album][0]
|
||||
scrobbles = c["scrobbles"]
|
||||
position = c["rank"]
|
||||
extrainfo['position'] = position
|
||||
|
||||
cert = None
|
||||
threshold_gold, threshold_platinum, threshold_diamond = malojaconfig["SCROBBLES_GOLD_ALBUM","SCROBBLES_PLATINUM_ALBUM","SCROBBLES_DIAMOND_ALBUM"]
|
||||
if scrobbles >= threshold_diamond: cert = "diamond"
|
||||
elif scrobbles >= threshold_platinum: cert = "platinum"
|
||||
elif scrobbles >= threshold_gold: cert = "gold"
|
||||
|
||||
if reduced:
|
||||
pass
|
||||
else:
|
||||
twk = thisweek()
|
||||
tyr = thisyear()
|
||||
extrainfo.update({
|
||||
"medals":{
|
||||
"gold": [year.desc() for year in ranges(step='year') if (year != tyr) and any(
|
||||
(e.get('album_id') == album_id) and (e.get('rank') == 1) for e in
|
||||
sqldb.count_scrobbles_by_album(since=year.first_stamp(),to=year.last_stamp(),resolve_ids=False,dbconn=dbconn)
|
||||
)],
|
||||
"silver": [year.desc() for year in ranges(step='year') if (year != tyr) and any(
|
||||
(e.get('album_id') == album_id) and (e.get('rank') == 2) for e in
|
||||
sqldb.count_scrobbles_by_album(since=year.first_stamp(),to=year.last_stamp(),resolve_ids=False,dbconn=dbconn)
|
||||
)],
|
||||
"bronze": [year.desc() for year in ranges(step='year') if (year != tyr) and any(
|
||||
(e.get('album_id') == album_id) and (e.get('rank') == 3) for e in
|
||||
sqldb.count_scrobbles_by_album(since=year.first_stamp(),to=year.last_stamp(),resolve_ids=False,dbconn=dbconn)
|
||||
)]
|
||||
},
|
||||
"topweeks":len([
|
||||
week for week in ranges(step="week") if (week != twk) and any(
|
||||
(e.get('album_id') == album_id) and (e.get('rank') == 1) for e in
|
||||
sqldb.count_scrobbles_by_album(since=week.first_stamp(),to=week.last_stamp(),resolve_ids=False,dbconn=dbconn)
|
||||
)
|
||||
])
|
||||
})
|
||||
|
||||
return {
|
||||
"album":album,
|
||||
"scrobbles":scrobbles,
|
||||
"certification":cert,
|
||||
"id":album_id,
|
||||
**extrainfo
|
||||
}
|
||||
|
||||
|
||||
|
||||
### TODO: FIND COOL ALGORITHM TO SELECT FEATURED STUFF
|
||||
@waitfordb
|
||||
def get_featured(dbconn=None):
|
||||
# temporary stand-in
|
||||
ranges = [
|
||||
MTRangeComposite(since=today().next(-14),to=today()),
|
||||
MTRangeComposite(since=thisweek().next(-12),to=thisweek()),
|
||||
MTRangeComposite(since=thisweek().next(-52),to=thisweek()),
|
||||
alltime()
|
||||
]
|
||||
funcs = {
|
||||
"artist": (get_charts_artists,{'associated':False}),
|
||||
"album": (get_charts_albums,{}),
|
||||
"track": (get_charts_tracks,{})
|
||||
}
|
||||
result = {t:None for t in funcs}
|
||||
|
||||
for entity_type in funcs:
|
||||
for r in ranges:
|
||||
func,kwargs = funcs[entity_type]
|
||||
chart = func(timerange=r,**kwargs)
|
||||
if chart:
|
||||
result[entity_type] = chart[0][entity_type]
|
||||
break
|
||||
return result
|
||||
|
||||
def get_predefined_rulesets(dbconn=None):
|
||||
validchars = "-_abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789"
|
||||
@ -934,44 +461,31 @@ def get_predefined_rulesets(dbconn=None):
|
||||
|
||||
|
||||
def start_db():
|
||||
|
||||
conf.AUX_MODE = True # that is, without a doubt, the worst python code you have ever seen
|
||||
|
||||
# Upgrade database
|
||||
from .. import upgrade
|
||||
upgrade.upgrade_db(sqldb.add_scrobbles)
|
||||
upgrade.parse_old_albums()
|
||||
|
||||
# Load temporary tables
|
||||
from . import associated
|
||||
associated.load_associated_rules()
|
||||
|
||||
# import scrobbles
|
||||
from ..proccontrol.tasks.import_scrobbles import import_scrobbles #lmao this codebase is so fucked
|
||||
for f in os.listdir(data_dir['import']()):
|
||||
if f != 'dummy':
|
||||
import_scrobbles(data_dir['import'](f))
|
||||
|
||||
dbstatus['healthy'] = True
|
||||
|
||||
conf.AUX_MODE = False # but you have seen it
|
||||
|
||||
# inform time module about begin of scrobbling
|
||||
try:
|
||||
firstscrobble = sqldb.get_scrobbles(limit=1)[0]
|
||||
firstscrobble = sqldb.get_scrobbles()[0]
|
||||
register_scrobbletime(firstscrobble['time'])
|
||||
except IndexError:
|
||||
register_scrobbletime(int(datetime.datetime.now().timestamp()))
|
||||
|
||||
|
||||
# create cached information
|
||||
cached.update_medals()
|
||||
cached.update_weekly()
|
||||
|
||||
dbstatus['complete'] = True
|
||||
|
||||
# cache some stuff that we'll probably need
|
||||
with sqldb.engine.connect() as dbconn:
|
||||
with dbconn.begin():
|
||||
for week in ranges(step='week'):
|
||||
sqldb.count_scrobbles_by_artist(since=week.first_stamp(),to=week.last_stamp(),resolve_ids=False,associated=True,dbconn=dbconn)
|
||||
sqldb.count_scrobbles_by_track(since=week.first_stamp(),to=week.last_stamp(),resolve_ids=False,dbconn=dbconn)
|
||||
sqldb.count_scrobbles_by_album(since=week.first_stamp(),to=week.last_stamp(),resolve_ids=False,dbconn=dbconn)
|
||||
|
||||
|
||||
|
||||
|
||||
@ -983,7 +497,4 @@ def db_search(query,type=None):
|
||||
results = sqldb.search_artist(query)
|
||||
if type=="TRACK":
|
||||
results = sqldb.search_track(query)
|
||||
if type=="ALBUM":
|
||||
results = sqldb.search_album(query)
|
||||
|
||||
return results
|
||||
|
@ -19,16 +19,12 @@ def load_associated_rules():
|
||||
|
||||
# load from file
|
||||
rawrules = []
|
||||
try:
|
||||
for f in os.listdir(data_dir["rules"]()):
|
||||
if f.split('.')[-1].lower() != 'tsv': continue
|
||||
filepath = data_dir["rules"](f)
|
||||
with open(filepath,'r') as filed:
|
||||
reader = csv.reader(filed,delimiter="\t")
|
||||
rawrules += [[col for col in entry if col] for entry in reader if len(entry)>0 and not entry[0].startswith('#')]
|
||||
except FileNotFoundError:
|
||||
return
|
||||
|
||||
for f in os.listdir(data_dir["rules"]()):
|
||||
if f.split('.')[-1].lower() != 'tsv': continue
|
||||
filepath = data_dir["rules"](f)
|
||||
with open(filepath,'r') as filed:
|
||||
reader = csv.reader(filed,delimiter="\t")
|
||||
rawrules += [[col for col in entry if col] for entry in reader if len(entry)>0 and not entry[0].startswith('#')]
|
||||
rules = [{'source_artist':r[1],'target_artist':r[2]} for r in rawrules if r[0]=="countas"]
|
||||
|
||||
#for rule in rules:
|
||||
|
74
maloja/database/cached.py
Normal file
74
maloja/database/cached.py
Normal file
@ -0,0 +1,74 @@
|
||||
# for information that is not authorative, but should be saved anyway because it
|
||||
# changes infrequently and DB access is expensive
|
||||
|
||||
from doreah.regular import runyearly, rundaily
|
||||
from .. import database
|
||||
from . import sqldb
|
||||
from .. import malojatime as mjt
|
||||
|
||||
|
||||
|
||||
medals_artists = {
|
||||
# year: {'gold':[],'silver':[],'bronze':[]}
|
||||
}
|
||||
medals_tracks = {
|
||||
# year: {'gold':[],'silver':[],'bronze':[]}
|
||||
}
|
||||
|
||||
weekly_topartists = []
|
||||
weekly_toptracks = []
|
||||
|
||||
@runyearly
|
||||
def update_medals():
|
||||
|
||||
global medals_artists, medals_tracks
|
||||
medals_artists.clear()
|
||||
medals_tracks.clear()
|
||||
|
||||
with sqldb.engine.begin() as conn:
|
||||
for year in mjt.ranges(step="year"):
|
||||
if year == mjt.thisyear(): break
|
||||
|
||||
charts_artists = sqldb.count_scrobbles_by_artist(since=year.first_stamp(),to=year.last_stamp(),resolve_ids=False,dbconn=conn)
|
||||
charts_tracks = sqldb.count_scrobbles_by_track(since=year.first_stamp(),to=year.last_stamp(),resolve_ids=False,dbconn=conn)
|
||||
|
||||
entry_artists = {'gold':[],'silver':[],'bronze':[]}
|
||||
entry_tracks = {'gold':[],'silver':[],'bronze':[]}
|
||||
medals_artists[year.desc()] = entry_artists
|
||||
medals_tracks[year.desc()] = entry_tracks
|
||||
|
||||
for entry in charts_artists:
|
||||
if entry['rank'] == 1: entry_artists['gold'].append(entry['artist_id'])
|
||||
elif entry['rank'] == 2: entry_artists['silver'].append(entry['artist_id'])
|
||||
elif entry['rank'] == 3: entry_artists['bronze'].append(entry['artist_id'])
|
||||
else: break
|
||||
for entry in charts_tracks:
|
||||
if entry['rank'] == 1: entry_tracks['gold'].append(entry['track_id'])
|
||||
elif entry['rank'] == 2: entry_tracks['silver'].append(entry['track_id'])
|
||||
elif entry['rank'] == 3: entry_tracks['bronze'].append(entry['track_id'])
|
||||
else: break
|
||||
|
||||
|
||||
|
||||
|
||||
@rundaily
|
||||
def update_weekly():
|
||||
|
||||
global weekly_topartists, weekly_toptracks
|
||||
weekly_topartists.clear()
|
||||
weekly_toptracks.clear()
|
||||
|
||||
with sqldb.engine.begin() as conn:
|
||||
for week in mjt.ranges(step="week"):
|
||||
if week == mjt.thisweek(): break
|
||||
|
||||
|
||||
charts_artists = sqldb.count_scrobbles_by_artist(since=week.first_stamp(),to=week.last_stamp(),resolve_ids=False,dbconn=conn)
|
||||
charts_tracks = sqldb.count_scrobbles_by_track(since=week.first_stamp(),to=week.last_stamp(),resolve_ids=False,dbconn=conn)
|
||||
|
||||
for entry in charts_artists:
|
||||
if entry['rank'] == 1: weekly_topartists.append(entry['artist_id'])
|
||||
else: break
|
||||
for entry in charts_tracks:
|
||||
if entry['rank'] == 1: weekly_toptracks.append(entry['track_id'])
|
||||
else: break
|
@ -10,7 +10,7 @@ from doreah.regular import runhourly
|
||||
from doreah.logging import log
|
||||
|
||||
from ..pkg_global.conf import malojaconfig
|
||||
from . import no_aux_mode
|
||||
|
||||
|
||||
|
||||
if malojaconfig['USE_GLOBAL_CACHE']:
|
||||
@ -21,7 +21,6 @@ if malojaconfig['USE_GLOBAL_CACHE']:
|
||||
|
||||
|
||||
@runhourly
|
||||
@no_aux_mode
|
||||
def maintenance():
|
||||
print_stats()
|
||||
trim_cache()
|
||||
@ -43,7 +42,6 @@ if malojaconfig['USE_GLOBAL_CACHE']:
|
||||
conn = None
|
||||
global hits, misses
|
||||
key = (serialize(args),serialize(kwargs), inner_func, kwargs.get("since"), kwargs.get("to"))
|
||||
# TODO: also factor in default values to get better chance of hits
|
||||
|
||||
try:
|
||||
return cache[key]
|
||||
@ -52,8 +50,6 @@ if malojaconfig['USE_GLOBAL_CACHE']:
|
||||
cache[key] = result
|
||||
return result
|
||||
|
||||
outer_func.__name__ = f"CACHD_{inner_func.__name__}"
|
||||
|
||||
return outer_func
|
||||
|
||||
|
||||
@ -84,23 +80,22 @@ if malojaconfig['USE_GLOBAL_CACHE']:
|
||||
|
||||
return outer_func
|
||||
|
||||
@no_aux_mode
|
||||
def invalidate_caches(scrobbletime=None):
|
||||
|
||||
cleared, kept = 0, 0
|
||||
for k in cache.keys():
|
||||
# VERY BIG TODO: differentiate between None as in 'unlimited timerange' and None as in 'time doesnt matter here'!
|
||||
if scrobbletime is None or ((k[3] is None or scrobbletime >= k[3]) and (k[4] is None or scrobbletime <= k[4])):
|
||||
if scrobbletime is None or (k[3] is None or scrobbletime >= k[3]) and (k[4] is None or scrobbletime <= k[4]):
|
||||
cleared += 1
|
||||
del cache[k]
|
||||
else:
|
||||
kept += 1
|
||||
log(f"Invalidated {cleared} of {cleared+kept} DB cache entries")
|
||||
|
||||
@no_aux_mode
|
||||
|
||||
def invalidate_entity_cache():
|
||||
entitycache.clear()
|
||||
|
||||
|
||||
def trim_cache():
|
||||
ramprct = psutil.virtual_memory().percent
|
||||
if ramprct > malojaconfig["DB_MAX_MEMORY"]:
|
||||
@ -137,7 +132,7 @@ else:
|
||||
def serialize(obj):
|
||||
try:
|
||||
return serialize(obj.hashable())
|
||||
except AttributeError:
|
||||
except Exception:
|
||||
try:
|
||||
return json.dumps(obj)
|
||||
except Exception:
|
||||
|
@ -1,72 +1,29 @@
|
||||
from bottle import HTTPError
|
||||
|
||||
|
||||
class EntityExists(Exception):
|
||||
def __init__(self, entitydict):
|
||||
def __init__(self,entitydict):
|
||||
self.entitydict = entitydict
|
||||
|
||||
|
||||
class TrackExists(EntityExists):
|
||||
pass
|
||||
|
||||
|
||||
class ArtistExists(EntityExists):
|
||||
pass
|
||||
|
||||
|
||||
class AlbumExists(EntityExists):
|
||||
pass
|
||||
|
||||
|
||||
# if the scrobbles dont match
|
||||
class DuplicateTimestamp(Exception):
|
||||
def __init__(self, existing_scrobble, rejected_scrobble):
|
||||
self.existing_scrobble = existing_scrobble
|
||||
self.rejected_scrobble = rejected_scrobble
|
||||
|
||||
|
||||
# if it's the same scrobble
|
||||
class DuplicateScrobble(Exception):
|
||||
def __init__(self, scrobble):
|
||||
self.scrobble = scrobble
|
||||
|
||||
|
||||
class DatabaseNotBuilt(HTTPError):
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
status=503,
|
||||
body="The Maloja Database is being upgraded to support new Maloja features. This could take a while.",
|
||||
headers={"Retry-After": 120}
|
||||
body="The Maloja Database is being upgraded to Version 3. This could take quite a long time! (~ 2-5 minutes per 10 000 scrobbles)",
|
||||
headers={"Retry-After":120}
|
||||
)
|
||||
|
||||
|
||||
class MissingScrobbleParameters(Exception):
|
||||
def __init__(self, params=[]):
|
||||
def __init__(self,params=[]):
|
||||
self.params = params
|
||||
|
||||
|
||||
class MissingEntityParameter(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class EntityDoesNotExist(HTTPError):
|
||||
entitytype = 'Entity'
|
||||
|
||||
def __init__(self,entitydict):
|
||||
self.entitydict = entitydict
|
||||
super().__init__(
|
||||
status=404,
|
||||
body=f"The {self.entitytype} '{self.entitydict}' does not exist in the database."
|
||||
)
|
||||
|
||||
|
||||
class ArtistDoesNotExist(EntityDoesNotExist):
|
||||
entitytype = 'Artist'
|
||||
|
||||
|
||||
class AlbumDoesNotExist(EntityDoesNotExist):
|
||||
entitytype = 'Album'
|
||||
|
||||
|
||||
class TrackDoesNotExist(EntityDoesNotExist):
|
||||
entitytype = 'Track'
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -1,65 +1,39 @@
|
||||
import os
|
||||
|
||||
import cProfile, pstats
|
||||
import time
|
||||
|
||||
from doreah.logging import log
|
||||
from doreah.timing import Clock
|
||||
|
||||
from ..pkg_global.conf import data_dir
|
||||
|
||||
|
||||
|
||||
profiler = cProfile.Profile()
|
||||
|
||||
FULL_PROFILE = False
|
||||
SINGLE_CALLS = False
|
||||
# only save the last single call instead of adding up all calls
|
||||
# of that function for more representative performance result
|
||||
|
||||
if not SINGLE_CALLS:
|
||||
profilers = {}
|
||||
times = {}
|
||||
|
||||
def profile(func):
|
||||
|
||||
realfunc = func
|
||||
while hasattr(realfunc, '__innerfunc__'):
|
||||
realfunc = realfunc.__innerfunc__
|
||||
|
||||
def newfunc(*args,**kwargs):
|
||||
|
||||
starttime = time.time()
|
||||
|
||||
if FULL_PROFILE:
|
||||
benchmarkfolder = data_dir['logs']("benchmarks")
|
||||
os.makedirs(benchmarkfolder,exist_ok=True)
|
||||
if SINGLE_CALLS:
|
||||
localprofiler = cProfile.Profile()
|
||||
else:
|
||||
localprofiler = profilers.setdefault(realfunc,cProfile.Profile())
|
||||
localprofiler.enable()
|
||||
|
||||
clock = Clock()
|
||||
clock.start()
|
||||
|
||||
if FULL_PROFILE:
|
||||
profiler.enable()
|
||||
result = func(*args,**kwargs)
|
||||
|
||||
if FULL_PROFILE:
|
||||
localprofiler.disable()
|
||||
|
||||
seconds = time.time() - starttime
|
||||
|
||||
if not SINGLE_CALLS:
|
||||
times.setdefault(realfunc,[]).append(seconds)
|
||||
|
||||
if SINGLE_CALLS:
|
||||
log(f"Executed {realfunc.__name__} ({args}, {kwargs}) in {seconds:.3f}s",module="debug_performance")
|
||||
else:
|
||||
log(f"Executed {realfunc.__name__} ({args}, {kwargs}) in {seconds:.3f}s (Average: { sum(times[realfunc])/len(times[realfunc]):.3f}s)",module="debug_performance")
|
||||
profiler.disable()
|
||||
|
||||
log(f"Executed {func.__name__} ({args}, {kwargs}) in {clock.stop():.2f}s",module="debug_performance")
|
||||
if FULL_PROFILE:
|
||||
targetfilename = os.path.join(benchmarkfolder,f"{realfunc.__name__}.stats")
|
||||
try:
|
||||
pstats.Stats(localprofiler).dump_stats(targetfilename)
|
||||
log(f"Saved benchmark as {targetfilename}")
|
||||
pstats.Stats(profiler).dump_stats(os.path.join(benchmarkfolder,f"{func.__name__}.stats"))
|
||||
except Exception:
|
||||
log(f"Failed to save benchmark as {targetfilename}")
|
||||
pass
|
||||
|
||||
return result
|
||||
|
||||
|
365
maloja/images.py
365
maloja/images.py
@ -12,330 +12,195 @@ import base64
|
||||
import requests
|
||||
import datauri
|
||||
import io
|
||||
from threading import Lock
|
||||
from concurrent.futures import ThreadPoolExecutor
|
||||
from threading import Thread, Timer, BoundedSemaphore
|
||||
import re
|
||||
import datetime
|
||||
import time
|
||||
|
||||
import sqlalchemy as sql
|
||||
|
||||
|
||||
|
||||
MAX_RESOLVE_THREADS = 5
|
||||
MAX_SECONDS_TO_RESOLVE_REQUEST = 5
|
||||
|
||||
|
||||
# remove old db file (columns missing)
|
||||
try:
|
||||
os.remove(data_dir['cache']('images.sqlite'))
|
||||
except:
|
||||
pass
|
||||
|
||||
DB = {}
|
||||
engine = sql.create_engine(f"sqlite:///{data_dir['cache']('imagecache.sqlite')}", echo = False)
|
||||
engine = sql.create_engine(f"sqlite:///{data_dir['cache']('images.sqlite')}", echo = False)
|
||||
meta = sql.MetaData()
|
||||
|
||||
dblock = Lock()
|
||||
|
||||
DB['artists'] = sql.Table(
|
||||
'artists', meta,
|
||||
sql.Column('id',sql.Integer,primary_key=True),
|
||||
sql.Column('url',sql.String),
|
||||
sql.Column('expire',sql.Integer),
|
||||
# sql.Column('raw',sql.String)
|
||||
sql.Column('local',sql.Boolean),
|
||||
sql.Column('localproxyurl',sql.String)
|
||||
sql.Column('raw',sql.String)
|
||||
)
|
||||
DB['tracks'] = sql.Table(
|
||||
'tracks', meta,
|
||||
sql.Column('id',sql.Integer,primary_key=True),
|
||||
sql.Column('url',sql.String),
|
||||
sql.Column('expire',sql.Integer),
|
||||
# sql.Column('raw',sql.String)
|
||||
sql.Column('local',sql.Boolean),
|
||||
sql.Column('localproxyurl',sql.String)
|
||||
)
|
||||
DB['albums'] = sql.Table(
|
||||
'albums', meta,
|
||||
sql.Column('id',sql.Integer,primary_key=True),
|
||||
sql.Column('url',sql.String),
|
||||
sql.Column('expire',sql.Integer),
|
||||
# sql.Column('raw',sql.String)
|
||||
sql.Column('local',sql.Boolean),
|
||||
sql.Column('localproxyurl',sql.String)
|
||||
sql.Column('raw',sql.String)
|
||||
)
|
||||
|
||||
meta.create_all(engine)
|
||||
|
||||
def get_id_and_table(track_id=None,artist_id=None,album_id=None):
|
||||
if track_id:
|
||||
return track_id,'tracks'
|
||||
elif album_id:
|
||||
return album_id,'albums'
|
||||
elif artist_id:
|
||||
return artist_id,'artists'
|
||||
|
||||
def get_image_from_cache(track_id=None,artist_id=None,album_id=None):
|
||||
def get_image_from_cache(id,table):
|
||||
now = int(datetime.datetime.now().timestamp())
|
||||
entity_id, table = get_id_and_table(track_id=track_id,artist_id=artist_id,album_id=album_id)
|
||||
|
||||
with engine.begin() as conn:
|
||||
op = DB[table].select().where(
|
||||
DB[table].c.id==entity_id,
|
||||
DB[table].c.id==id,
|
||||
DB[table].c.expire>now
|
||||
)
|
||||
result = conn.execute(op).all()
|
||||
for row in result:
|
||||
if row.local:
|
||||
return {'type':'localurl','value':row.url}
|
||||
elif row.localproxyurl:
|
||||
return {'type':'localurl','value':row.localproxyurl}
|
||||
if row.raw is not None:
|
||||
return {'type':'raw','value':row.raw}
|
||||
else:
|
||||
return {'type':'url','value':row.url or None}
|
||||
# value none means nonexistence is cached
|
||||
# for some reason this can also be an empty string, so use or None here to unify
|
||||
return {'type':'url','value':row.url} # returns None as value if nonexistence cached
|
||||
return None # no cache entry
|
||||
|
||||
def set_image_in_cache(url,track_id=None,artist_id=None,album_id=None,local=False):
|
||||
remove_image_from_cache(track_id=track_id,artist_id=artist_id,album_id=album_id)
|
||||
entity_id, table = get_id_and_table(track_id=track_id,artist_id=artist_id,album_id=album_id)
|
||||
def set_image_in_cache(id,table,url):
|
||||
remove_image_from_cache(id,table)
|
||||
now = int(datetime.datetime.now().timestamp())
|
||||
if url is None:
|
||||
expire = now + (malojaconfig["CACHE_EXPIRE_NEGATIVE"] * 24 * 3600)
|
||||
else:
|
||||
expire = now + (malojaconfig["CACHE_EXPIRE_POSITIVE"] * 24 * 3600)
|
||||
|
||||
with dblock:
|
||||
now = int(datetime.datetime.now().timestamp())
|
||||
if url is None:
|
||||
expire = now + (malojaconfig["CACHE_EXPIRE_NEGATIVE"] * 24 * 3600)
|
||||
else:
|
||||
expire = now + (malojaconfig["CACHE_EXPIRE_POSITIVE"] * 24 * 3600)
|
||||
raw = dl_image(url)
|
||||
|
||||
if not local and malojaconfig["PROXY_IMAGES"] and url is not None:
|
||||
localproxyurl = dl_image(url)
|
||||
else:
|
||||
localproxyurl = None
|
||||
|
||||
with engine.begin() as conn:
|
||||
op = DB[table].insert().values(
|
||||
id=entity_id,
|
||||
url=url,
|
||||
expire=expire,
|
||||
local=local,
|
||||
localproxyurl=localproxyurl
|
||||
)
|
||||
result = conn.execute(op)
|
||||
|
||||
def remove_image_from_cache(track_id=None,artist_id=None,album_id=None):
|
||||
entity_id, table = get_id_and_table(track_id=track_id,artist_id=artist_id,album_id=album_id)
|
||||
|
||||
with dblock:
|
||||
with engine.begin() as conn:
|
||||
op = DB[table].delete().where(
|
||||
DB[table].c.id==entity_id,
|
||||
).returning(
|
||||
DB[table].c.id,
|
||||
DB[table].c.localproxyurl
|
||||
)
|
||||
result = conn.execute(op).all()
|
||||
|
||||
for row in result:
|
||||
try:
|
||||
targetpath = data_dir['cache']('images',row.localproxyurl.split('/')[-1])
|
||||
os.remove(targetpath)
|
||||
except:
|
||||
pass
|
||||
with engine.begin() as conn:
|
||||
op = DB[table].insert().values(
|
||||
id=id,
|
||||
url=url,
|
||||
expire=expire,
|
||||
raw=raw
|
||||
)
|
||||
result = conn.execute(op)
|
||||
|
||||
def remove_image_from_cache(id,table):
|
||||
with engine.begin() as conn:
|
||||
op = DB[table].delete().where(
|
||||
DB[table].c.id==id,
|
||||
)
|
||||
result = conn.execute(op)
|
||||
|
||||
def dl_image(url):
|
||||
if not malojaconfig["PROXY_IMAGES"]: return None
|
||||
if url is None: return None
|
||||
if url.startswith("/"): return None #local image
|
||||
try:
|
||||
r = requests.get(url)
|
||||
mime = r.headers.get('content-type') or 'image/jpg'
|
||||
data = io.BytesIO(r.content).read()
|
||||
#uri = datauri.DataURI.make(mime,charset='ascii',base64=True,data=data)
|
||||
targetname = '%030x' % random.getrandbits(128)
|
||||
targetpath = data_dir['cache']('images',targetname)
|
||||
with open(targetpath,'wb') as fd:
|
||||
fd.write(data)
|
||||
return os.path.join("/cacheimages",targetname)
|
||||
uri = datauri.DataURI.make(mime,charset='ascii',base64=True,data=data)
|
||||
log(f"Downloaded {url} for local caching")
|
||||
return uri
|
||||
except Exception:
|
||||
log(f"Image {url} could not be downloaded for local caching")
|
||||
return None
|
||||
|
||||
|
||||
|
||||
|
||||
resolver = ThreadPoolExecutor(max_workers=MAX_RESOLVE_THREADS,thread_name_prefix='image_resolve')
|
||||
|
||||
### getting images for any website embedding now ALWAYS returns just the generic link
|
||||
### even if we have already cached it, we will handle that on request
|
||||
def get_track_image(track=None,track_id=None):
|
||||
if track_id is None:
|
||||
track_id = database.sqldb.get_track_id(track,create_new=False)
|
||||
track_id = database.sqldb.get_track_id(track)
|
||||
|
||||
if malojaconfig["USE_ALBUM_ARTWORK_FOR_TRACKS"]:
|
||||
if track is None:
|
||||
track = database.sqldb.get_track(track_id)
|
||||
if track.get("album"):
|
||||
album_id = database.sqldb.get_album_id(track["album"])
|
||||
return get_album_image(album_id=album_id)
|
||||
return f"/image?type=track&id={track_id}"
|
||||
|
||||
resolver.submit(resolve_image,track_id=track_id)
|
||||
|
||||
return f"/image?track_id={track_id}"
|
||||
|
||||
def get_artist_image(artist=None,artist_id=None):
|
||||
if artist_id is None:
|
||||
artist_id = database.sqldb.get_artist_id(artist,create_new=False)
|
||||
artist_id = database.sqldb.get_artist_id(artist)
|
||||
|
||||
resolver.submit(resolve_image,artist_id=artist_id)
|
||||
|
||||
return f"/image?artist_id={artist_id}"
|
||||
|
||||
def get_album_image(album=None,album_id=None):
|
||||
if album_id is None:
|
||||
album_id = database.sqldb.get_album_id(album,create_new=False)
|
||||
|
||||
resolver.submit(resolve_image,album_id=album_id)
|
||||
|
||||
return f"/image?album_id={album_id}"
|
||||
|
||||
|
||||
# this is to keep track of what is currently being resolved
|
||||
# so new requests know that they don't need to queue another resolve
|
||||
image_resolve_controller_lock = Lock()
|
||||
image_resolve_controller = {
|
||||
'artists':set(),
|
||||
'albums':set(),
|
||||
'tracks':set()
|
||||
}
|
||||
|
||||
# this function doesn't need to return any info
|
||||
# it runs async to do all the work that takes time and only needs to write the result
|
||||
# to the cache so the synchronous functions (http requests) can access it
|
||||
def resolve_image(artist_id=None,track_id=None,album_id=None):
|
||||
result = get_image_from_cache(artist_id=artist_id,track_id=track_id,album_id=album_id)
|
||||
if result is not None:
|
||||
# No need to do anything
|
||||
return
|
||||
|
||||
if artist_id:
|
||||
entitytype = 'artist'
|
||||
table = 'artists'
|
||||
getfunc, entity_id = database.sqldb.get_artist, artist_id
|
||||
elif track_id:
|
||||
entitytype = 'track'
|
||||
table = 'tracks'
|
||||
getfunc, entity_id = database.sqldb.get_track, track_id
|
||||
elif album_id:
|
||||
entitytype = 'album'
|
||||
table = 'albums'
|
||||
getfunc, entity_id = database.sqldb.get_album, album_id
|
||||
return f"/image?type=artist&id={artist_id}"
|
||||
|
||||
|
||||
|
||||
# is another thread already working on this?
|
||||
with image_resolve_controller_lock:
|
||||
if entity_id in image_resolve_controller[table]:
|
||||
return
|
||||
else:
|
||||
image_resolve_controller[table].add(entity_id)
|
||||
resolve_semaphore = BoundedSemaphore(8)
|
||||
|
||||
|
||||
def resolve_track_image(track_id):
|
||||
|
||||
with resolve_semaphore:
|
||||
# check cache
|
||||
result = get_image_from_cache(track_id,'tracks')
|
||||
if result is not None:
|
||||
return result
|
||||
|
||||
try:
|
||||
entity = getfunc(entity_id)
|
||||
track = database.sqldb.get_track(track_id)
|
||||
|
||||
# local image
|
||||
if malojaconfig["USE_LOCAL_IMAGES"]:
|
||||
images = local_files(**{entitytype: entity})
|
||||
images = local_files(artists=track['artists'],title=track['title'])
|
||||
if len(images) != 0:
|
||||
result = random.choice(images)
|
||||
result = urllib.parse.quote(result)
|
||||
result = {'type':'localurl','value':result}
|
||||
set_image_in_cache(artist_id=artist_id,track_id=track_id,album_id=album_id,url=result['value'],local=True)
|
||||
result = {'type':'url','value':result}
|
||||
set_image_in_cache(track_id,'tracks',result['value'])
|
||||
return result
|
||||
|
||||
# third party
|
||||
if artist_id:
|
||||
result = thirdparty.get_image_artist_all(entity)
|
||||
elif track_id:
|
||||
result = thirdparty.get_image_track_all((entity['artists'],entity['title']))
|
||||
elif album_id:
|
||||
result = thirdparty.get_image_album_all((entity['artists'],entity['albumtitle']))
|
||||
result = thirdparty.get_image_track_all((track['artists'],track['title']))
|
||||
result = {'type':'url','value':result}
|
||||
set_image_in_cache(track_id,'tracks',result['value'])
|
||||
|
||||
result = {'type':'url','value':result or None}
|
||||
set_image_in_cache(artist_id=artist_id,track_id=track_id,album_id=album_id,url=result['value'])
|
||||
finally:
|
||||
with image_resolve_controller_lock:
|
||||
image_resolve_controller[table].remove(entity_id)
|
||||
return result
|
||||
|
||||
|
||||
def resolve_artist_image(artist_id):
|
||||
|
||||
# the actual http request for the full image
|
||||
def image_request(artist_id=None,track_id=None,album_id=None):
|
||||
|
||||
# because we use lazyload, we can allow our http requests to take a little while at least
|
||||
# not the full backend request, but a few seconds to give us time to fetch some images
|
||||
# because 503 retry-after doesn't seem to be honored
|
||||
attempt = 0
|
||||
while attempt < MAX_SECONDS_TO_RESOLVE_REQUEST:
|
||||
attempt += 1
|
||||
with resolve_semaphore:
|
||||
# check cache
|
||||
result = get_image_from_cache(artist_id=artist_id,track_id=track_id,album_id=album_id)
|
||||
result = get_image_from_cache(artist_id,'artists')
|
||||
if result is not None:
|
||||
# we got an entry, even if it's that there is no image (value None)
|
||||
if result['value'] is None:
|
||||
# fallback to album regardless of setting (because we have no image)
|
||||
if track_id:
|
||||
track = database.sqldb.get_track(track_id)
|
||||
if track.get("album"):
|
||||
album_id = database.sqldb.get_album_id(track["album"])
|
||||
return image_request(album_id=album_id)
|
||||
# use placeholder
|
||||
if malojaconfig["FANCY_PLACEHOLDER_ART"]:
|
||||
placeholder_url = "https://generative-placeholders.glitch.me/image?width=300&height=300&style="
|
||||
if artist_id:
|
||||
result['value'] = placeholder_url + f"tiles&colors={artist_id % 100}"
|
||||
if track_id:
|
||||
result['value'] = placeholder_url + f"triangles&colors={track_id % 100}"
|
||||
if album_id:
|
||||
result['value'] = placeholder_url + f"joy-division&colors={album_id % 100}"
|
||||
else:
|
||||
if artist_id:
|
||||
result['value'] = "/static/svg/placeholder_artist.svg"
|
||||
if track_id:
|
||||
result['value'] = "/static/svg/placeholder_track.svg"
|
||||
if album_id:
|
||||
result['value'] = "/static/svg/placeholder_album.svg"
|
||||
return result
|
||||
time.sleep(1)
|
||||
|
||||
# no entry, which means we're still working on it
|
||||
return {'type':'noimage','value':'wait'}
|
||||
artist = database.sqldb.get_artist(artist_id)
|
||||
|
||||
# local image
|
||||
if malojaconfig["USE_LOCAL_IMAGES"]:
|
||||
images = local_files(artist=artist)
|
||||
if len(images) != 0:
|
||||
result = random.choice(images)
|
||||
result = urllib.parse.quote(result)
|
||||
result = {'type':'url','value':result}
|
||||
set_image_in_cache(artist_id,'artists',result['value'])
|
||||
return result
|
||||
|
||||
# third party
|
||||
result = thirdparty.get_image_artist_all(artist)
|
||||
result = {'type':'url','value':result}
|
||||
set_image_in_cache(artist_id,'artists',result['value'])
|
||||
|
||||
return result
|
||||
|
||||
|
||||
# removes emojis and weird shit from names
|
||||
def clean(name):
|
||||
return "".join(c for c in name if c.isalnum() or c in []).strip()
|
||||
|
||||
# new and improved
|
||||
def get_all_possible_filenames(artist=None,track=None,album=None):
|
||||
if track:
|
||||
title, artists = clean(track['title']), [clean(a) for a in track['artists']]
|
||||
superfolder = "tracks/"
|
||||
elif album:
|
||||
title, artists = clean(album['albumtitle']), [clean(a) for a in album.get('artists') or []]
|
||||
superfolder = "albums/"
|
||||
elif artist:
|
||||
def get_all_possible_filenames(artist=None,artists=None,title=None):
|
||||
# check if we're dealing with a track or artist, then clean up names
|
||||
# (only remove non-alphanumeric, allow korean and stuff)
|
||||
|
||||
if title is not None and artists is not None:
|
||||
track = True
|
||||
title, artists = clean(title), [clean(a) for a in artists]
|
||||
elif artist is not None:
|
||||
track = False
|
||||
artist = clean(artist)
|
||||
superfolder = "artists/"
|
||||
else:
|
||||
return []
|
||||
else: return []
|
||||
|
||||
|
||||
superfolder = "tracks/" if track else "artists/"
|
||||
|
||||
filenames = []
|
||||
|
||||
if track or album:
|
||||
if track:
|
||||
#unsafeartists = [artist.translate(None,"-_./\\") for artist in artists]
|
||||
safeartists = [re.sub("[^a-zA-Z0-9]","",artist) for artist in artists]
|
||||
#unsafetitle = title.translate(None,"-_./\\")
|
||||
safetitle = re.sub("[^a-zA-Z0-9]","",title)
|
||||
|
||||
if len(artists) < 4:
|
||||
@ -345,6 +210,7 @@ def get_all_possible_filenames(artist=None,track=None,album=None):
|
||||
unsafeperms = [sorted(artists)]
|
||||
safeperms = [sorted(safeartists)]
|
||||
|
||||
|
||||
for unsafeartistlist in unsafeperms:
|
||||
filename = "-".join(unsafeartistlist) + "_" + title
|
||||
if filename != "":
|
||||
@ -375,11 +241,10 @@ def get_all_possible_filenames(artist=None,track=None,album=None):
|
||||
|
||||
return [superfolder + name for name in filenames]
|
||||
|
||||
|
||||
def local_files(artist=None,album=None,track=None):
|
||||
def local_files(artist=None,artists=None,title=None):
|
||||
|
||||
|
||||
filenames = get_all_possible_filenames(artist=artist,album=album,track=track)
|
||||
filenames = get_all_possible_filenames(artist,artists,title)
|
||||
|
||||
images = []
|
||||
|
||||
@ -402,50 +267,34 @@ def local_files(artist=None,album=None,track=None):
|
||||
|
||||
|
||||
|
||||
class MalformedB64(Exception):
|
||||
pass
|
||||
|
||||
def set_image(b64,**keys):
|
||||
if "title" in keys:
|
||||
entity = {"track":keys}
|
||||
id = database.sqldb.get_track_id(entity['track'])
|
||||
idkeys = {'track_id':id}
|
||||
dbtable = "tracks"
|
||||
elif "albumtitle" in keys:
|
||||
entity = {"album":keys}
|
||||
id = database.sqldb.get_album_id(entity['album'])
|
||||
idkeys = {'album_id':id}
|
||||
dbtable = "albums"
|
||||
elif "artist" in keys:
|
||||
entity = keys
|
||||
id = database.sqldb.get_artist_id(entity['artist'])
|
||||
idkeys = {'artist_id':id}
|
||||
dbtable = "artists"
|
||||
track = "title" in keys
|
||||
if track:
|
||||
entity = {'artists':keys['artists'],'title':keys['title']}
|
||||
id = database.sqldb.get_track_id(entity)
|
||||
else:
|
||||
entity = keys['artist']
|
||||
id = database.sqldb.get_artist_id(entity)
|
||||
|
||||
log("Trying to set image, b64 string: " + str(b64[:30] + "..."),module="debug")
|
||||
|
||||
regex = r"data:image/(\w+);base64,(.+)"
|
||||
match = re.fullmatch(regex,b64)
|
||||
if not match: raise MalformedB64()
|
||||
|
||||
type,b64 = match.groups()
|
||||
type,b64 = re.fullmatch(regex,b64).groups()
|
||||
b64 = base64.b64decode(b64)
|
||||
filename = "webupload" + str(int(datetime.datetime.now().timestamp())) + "." + type
|
||||
for folder in get_all_possible_filenames(**entity):
|
||||
for folder in get_all_possible_filenames(**keys):
|
||||
if os.path.exists(data_dir['images'](folder)):
|
||||
with open(data_dir['images'](folder,filename),"wb") as f:
|
||||
f.write(b64)
|
||||
break
|
||||
else:
|
||||
folder = get_all_possible_filenames(**entity)[0]
|
||||
folder = get_all_possible_filenames(**keys)[0]
|
||||
os.makedirs(data_dir['images'](folder))
|
||||
with open(data_dir['images'](folder,filename),"wb") as f:
|
||||
f.write(b64)
|
||||
|
||||
|
||||
log("Saved image as " + data_dir['images'](folder,filename),module="debug")
|
||||
|
||||
# set as current picture in rotation
|
||||
set_image_in_cache(**idkeys,url=os.path.join("/images",folder,filename),local=True)
|
||||
|
||||
return os.path.join("/images",folder,filename)
|
||||
if track: set_image_in_cache(id,'tracks',os.path.join("/images",folder,filename))
|
||||
else: set_image_in_cache(id,'artists',os.path.join("/images",folder,filename))
|
||||
|
@ -26,6 +26,8 @@ def update_jinja_environment():
|
||||
|
||||
JINJA_CONTEXT = {
|
||||
# maloja
|
||||
"db": database, #TODO: move these to connection manager as well
|
||||
#"dbp":dbp,
|
||||
"malojatime": malojatime,
|
||||
"images": images,
|
||||
"mlj_uri": malojauri,
|
||||
@ -70,14 +72,6 @@ def update_jinja_environment():
|
||||
{"identifier":"longtrailing","replacekeys":{"trail":3},"localisation":"Long Trailing"},
|
||||
{"identifier":"inert","replacekeys":{"trail":10},"localisation":"Inert","heavy":True},
|
||||
{"identifier":"cumulative","replacekeys":{"trail":math.inf},"localisation":"Cumulative","heavy":True}
|
||||
],
|
||||
"xassociated": [
|
||||
{"identifier":"include_associated","replacekeys":{"associated":True},"localisation":"Associated"},
|
||||
{"identifier":"exclude_associated","replacekeys":{"associated":False},"localisation":"Exclusive"}
|
||||
],
|
||||
"xseparate": [
|
||||
{"identifier":"count_combined","replacekeys":{"separate":False},"localisation":"Combined"},
|
||||
{"identifier":"count_separate","replacekeys":{"separate":True},"localisation":"Separate"}
|
||||
]
|
||||
}
|
||||
|
||||
|
@ -1,18 +1,16 @@
|
||||
from datetime import timezone, timedelta, date, time, datetime
|
||||
from calendar import monthrange
|
||||
from os.path import commonprefix
|
||||
import math
|
||||
import zoneinfo
|
||||
from abc import ABC, abstractmethod
|
||||
|
||||
from .pkg_global.conf import malojaconfig
|
||||
|
||||
|
||||
OFFSET = malojaconfig["TIMEZONE"]
|
||||
LOCATION_TIMEZONE = malojaconfig["LOCATION_TIMEZONE"]
|
||||
TIMEZONE = timezone(timedelta(hours=OFFSET)) if not LOCATION_TIMEZONE or LOCATION_TIMEZONE not in zoneinfo.available_timezones() else zoneinfo.ZoneInfo(LOCATION_TIMEZONE)
|
||||
TIMEZONE = timezone(timedelta(hours=OFFSET))
|
||||
UTC = timezone.utc
|
||||
|
||||
FIRST_SCROBBLE = int(datetime.now(UTC).timestamp())
|
||||
FIRST_SCROBBLE = int(datetime.utcnow().replace(tzinfo=UTC).timestamp())
|
||||
|
||||
def register_scrobbletime(timestamp):
|
||||
global FIRST_SCROBBLE
|
||||
@ -30,7 +28,7 @@ def register_scrobbletime(timestamp):
|
||||
|
||||
|
||||
# Generic Time Range
|
||||
class MTRangeGeneric(ABC):
|
||||
class MTRangeGeneric:
|
||||
|
||||
# despite the above, ranges that refer to the exact same real time range should evaluate as equal
|
||||
def __eq__(self,other):
|
||||
@ -65,20 +63,11 @@ class MTRangeGeneric(ABC):
|
||||
|
||||
# whether we currently live or will ever again live in this range
|
||||
def active(self):
|
||||
return (self.last_stamp() > datetime.now(timezone.utc).timestamp())
|
||||
return (self.last_stamp() > datetime.utcnow().timestamp())
|
||||
|
||||
def __contains__(self,timestamp):
|
||||
return timestamp >= self.first_stamp() and timestamp <= self.last_stamp()
|
||||
|
||||
@abstractmethod
|
||||
def first_stamp(self):
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def last_stamp(self):
|
||||
pass
|
||||
|
||||
|
||||
# Any range that has one defining base unit, whether week, year, etc.
|
||||
class MTRangeSingular(MTRangeGeneric):
|
||||
def fromstr(self):
|
||||
@ -113,7 +102,7 @@ class MTRangeGregorian(MTRangeSingular):
|
||||
# whether we currently live or will ever again live in this range
|
||||
# USE GENERIC SUPER METHOD INSTEAD
|
||||
# def active(self):
|
||||
# tod = datetime.datetime.now(timezone.utc).date()
|
||||
# tod = datetime.datetime.utcnow().date()
|
||||
# if tod.year > self.year: return False
|
||||
# if self.precision == 1: return True
|
||||
# if tod.year == self.year:
|
||||
@ -214,15 +203,16 @@ class MTRangeWeek(MTRangeSingular):
|
||||
def __init__(self,year=None,week=None):
|
||||
|
||||
# do this so we can construct the week with overflow (eg 2020/-3)
|
||||
thisisoyear_firstday = date.fromisocalendar(year,1,1) + timedelta(days=malojaconfig['WEEK_OFFSET']-1)
|
||||
thisisoyear_firstday = date.fromchrcalendar(year,1,1)
|
||||
self.firstday = thisisoyear_firstday + timedelta(days=7*(week-1))
|
||||
self.firstday = date(self.firstday.year,self.firstday.month,self.firstday.day)
|
||||
# for compatibility with pre python3.8 (https://bugs.python.org/issue32417)
|
||||
|
||||
|
||||
self.lastday = self.firstday + timedelta(days=6)
|
||||
|
||||
# now get the actual year and week number (in case of overflow)
|
||||
fakedate = self.firstday - timedelta(days=malojaconfig['WEEK_OFFSET']-1)
|
||||
# fake date that gives the correct iso return for the real date considering our week offset
|
||||
self.year,self.week,_ = fakedate.isocalendar()
|
||||
self.year,self.week,_ = self.firstday.chrcalendar()
|
||||
|
||||
|
||||
|
||||
@ -330,7 +320,7 @@ class MTRangeComposite(MTRangeGeneric):
|
||||
if self.since is None: return FIRST_SCROBBLE
|
||||
else: return self.since.first_stamp()
|
||||
def last_stamp(self):
|
||||
#if self.to is None: return int(datetime.now(timezone.utc).timestamp())
|
||||
#if self.to is None: return int(datetime.utcnow().replace(tzinfo=timezone.utc).timestamp())
|
||||
if self.to is None: return today().last_stamp()
|
||||
else: return self.to.last_stamp()
|
||||
|
||||
@ -360,9 +350,7 @@ def today():
|
||||
def thisweek():
|
||||
tod = datetime.now(tz=TIMEZONE)
|
||||
tod = date(tod.year,tod.month,tod.day)
|
||||
fakedate = tod - timedelta(days=malojaconfig['WEEK_OFFSET']-1)
|
||||
# fake date for correct iso representation
|
||||
y,w,_ = fakedate.isocalendar()
|
||||
y,w,_ = tod.chrcalendar()
|
||||
return MTRangeWeek(y,w)
|
||||
def thismonth():
|
||||
tod = datetime.now(tz=TIMEZONE)
|
||||
@ -423,8 +411,8 @@ def get_last_instance(category,current,target,amount):
|
||||
|
||||
str_to_time_range = {
|
||||
**{s:callable for callable,strlist in currenttime_string_representations for s in strlist},
|
||||
**{s:(lambda i=index:get_last_instance(thismonth,datetime.now(timezone.utc).month,i,12)) for index,strlist in enumerate(month_string_representations,1) for s in strlist},
|
||||
**{s:(lambda i=index:get_last_instance(today,datetime.now(timezone.utc).isoweekday()+1%7,i,7)) for index,strlist in enumerate(weekday_string_representations,1) for s in strlist}
|
||||
**{s:(lambda i=index:get_last_instance(thismonth,datetime.utcnow().month,i,12)) for index,strlist in enumerate(month_string_representations,1) for s in strlist},
|
||||
**{s:(lambda i=index:get_last_instance(today,datetime.utcnow().isoweekday()+1%7,i,7)) for index,strlist in enumerate(weekday_string_representations,1) for s in strlist}
|
||||
}
|
||||
|
||||
|
||||
@ -567,9 +555,7 @@ def year_from_timestamp(stamp):
|
||||
def week_from_timestamp(stamp):
|
||||
dt = datetime.fromtimestamp(stamp,tz=TIMEZONE)
|
||||
d = date(dt.year,dt.month,dt.day)
|
||||
fakedate = d - timedelta(days=malojaconfig['WEEK_OFFSET']-1)
|
||||
# fake date for correct iso representation
|
||||
y,w,_ = fakedate.isocalendar()
|
||||
y,w,_ = d.chrcalendar()
|
||||
return MTRangeWeek(y,w)
|
||||
|
||||
def from_timestamp(stamp,unit):
|
||||
|
@ -4,7 +4,7 @@ import urllib
|
||||
import math
|
||||
|
||||
# this also sets defaults!
|
||||
def uri_to_internal(keys,accepted_entities=('artist','track','album'),forceTrack=False,forceArtist=False,forceAlbum=False,api=False):
|
||||
def uri_to_internal(keys,forceTrack=False,forceArtist=False,api=False):
|
||||
|
||||
# output:
|
||||
# 1 keys that define the filtered object like artist or track
|
||||
@ -12,35 +12,14 @@ def uri_to_internal(keys,accepted_entities=('artist','track','album'),forceTrack
|
||||
# 3 keys that define interal time ranges
|
||||
# 4 keys that define amount limits
|
||||
|
||||
# if we force a type, that only means that the other types are not allowed
|
||||
# it could still have no type at all (any call that isn't filtering by entity)
|
||||
|
||||
if forceTrack: accepted_entities = ('track',)
|
||||
if forceArtist: accepted_entities = ('artist',)
|
||||
if forceAlbum: accepted_entities = ('album',)
|
||||
|
||||
# API backwards compatibility
|
||||
if "artist" in keys and "artist" not in accepted_entities:
|
||||
if "track" in accepted_entities:
|
||||
keys['trackartist'] = keys['artist']
|
||||
elif "album" in accepted_entities:
|
||||
keys['albumartist'] = keys['artist']
|
||||
|
||||
|
||||
# 1
|
||||
filterkeys = {}
|
||||
# this only takes care of the logic - what kind of entity we're dealing with
|
||||
# it does not check with the database if it exists or what the canonical name is!!!
|
||||
if "track" in accepted_entities and "title" in keys:
|
||||
filterkeys.update({"track":{"artists":keys.getall("trackartist"),"title":keys.get("title")}})
|
||||
if "artist" in accepted_entities and "artist" in keys:
|
||||
filterkeys.update({"artist": keys.get("artist"), "associated": (keys.get('associated', 'no').lower() == 'yes')})
|
||||
# associated is only used for filtering by artist, to indicate that we include associated artists
|
||||
# for actual artist charts, to show that we want to count them, use 'unified'
|
||||
if "album" in accepted_entities and "albumtitle" in keys:
|
||||
filterkeys.update({"album":{"artists":keys.getall("albumartist"),"albumtitle":keys.get("albumtitle")}})
|
||||
|
||||
|
||||
if "title" in keys and not forceArtist:
|
||||
filterkeys = {"track":{"artists":keys.getall("artist"),"title":keys.get("title")}}
|
||||
elif "artist" in keys and not forceTrack:
|
||||
filterkeys = {"artist":keys.get("artist")}
|
||||
if "associated" in keys: filterkeys["associated"] = True
|
||||
else:
|
||||
filterkeys = {}
|
||||
|
||||
# 2
|
||||
limitkeys = {}
|
||||
@ -72,20 +51,11 @@ def uri_to_internal(keys,accepted_entities=('artist','track','album'),forceTrack
|
||||
#different max than the internal one! the user doesn't get to disable pagination
|
||||
if "page" in keys: amountkeys["page"] = int(keys["page"])
|
||||
if "perpage" in keys: amountkeys["perpage"] = int(keys["perpage"])
|
||||
#amountkeys["reverse"] = (keys.get("reverse","no").lower() == "yes")
|
||||
# we have different defaults for different things, so here we need to actually pass true false or nothing dependent
|
||||
# on whether its specified
|
||||
if keys.get("reverse","").lower() == 'yes': amountkeys['reverse'] = True
|
||||
elif keys.get("reverse","").lower() == 'no': amountkeys['reverse'] = False
|
||||
|
||||
|
||||
#5
|
||||
specialkeys = {}
|
||||
#if "remote" in keys: specialkeys["remote"] = keys["remote"]
|
||||
specialkeys["separate"] = (keys.get('separate','no').lower() == 'yes')
|
||||
for k in keys:
|
||||
if k in ['remote','b64']:
|
||||
# TODO: better solution!
|
||||
specialkeys[k] = keys[k]
|
||||
if "remote" in keys: specialkeys["remote"] = keys["remote"]
|
||||
|
||||
|
||||
return filterkeys, limitkeys, delimitkeys, amountkeys, specialkeys
|
||||
@ -110,15 +80,10 @@ def internal_to_uri(keys):
|
||||
if "artist" in keys:
|
||||
urikeys.append("artist",keys["artist"])
|
||||
if keys.get("associated"): urikeys.append("associated","yes")
|
||||
if "track" in keys:
|
||||
elif "track" in keys:
|
||||
for a in keys["track"]["artists"]:
|
||||
urikeys.append("trackartist",a)
|
||||
urikeys.append("artist",a)
|
||||
urikeys.append("title",keys["track"]["title"])
|
||||
if "album" in keys:
|
||||
for a in keys["album"].get("artists") or []:
|
||||
urikeys.append("albumartist",a)
|
||||
urikeys.append("albumtitle",keys["album"]["albumtitle"])
|
||||
|
||||
|
||||
#time
|
||||
if "timerange" in keys:
|
||||
@ -151,11 +116,6 @@ def internal_to_uri(keys):
|
||||
urikeys.append("page",str(keys["page"]))
|
||||
if "perpage" in keys:
|
||||
urikeys.append("perpage",str(keys["perpage"]))
|
||||
if "reverse" in keys:
|
||||
urikeys.append("reverse","yes" if keys['reverse'] else "no")
|
||||
|
||||
if keys.get("separate",False):
|
||||
urikeys.append("separate","yes")
|
||||
|
||||
|
||||
return urikeys
|
||||
|
@ -1,7 +1,4 @@
|
||||
import os
|
||||
|
||||
import doreah.auth
|
||||
import doreah.logging
|
||||
from doreah.configuration import Configuration
|
||||
from doreah.configuration import types as tp
|
||||
|
||||
@ -9,8 +6,6 @@ from doreah.configuration import types as tp
|
||||
from ..__pkginfo__ import VERSION
|
||||
|
||||
|
||||
# this mode specifies whether we run some auxiliary task instead of the main server
|
||||
AUX_MODE = True
|
||||
|
||||
|
||||
# if DATA_DIRECTORY is specified, this is the directory to use for EVERYTHING, no matter what
|
||||
@ -20,11 +15,9 @@ AUX_MODE = True
|
||||
# DIRECRORY_CONFIG, DIRECRORY_STATE, DIRECTORY_LOGS and DIRECTORY_CACHE
|
||||
# config can only be determined by environment variable, the others can be loaded
|
||||
# from the config files
|
||||
# explicit settings will always be respected, fallback to default
|
||||
|
||||
# we don't specify 'default' values in the normal sense of the config object
|
||||
# the default is none, meaning the app should figure it out (depending on environment)
|
||||
# the actual 'default' values of our folders are simply in code since they are dependent on environment (container?)
|
||||
# and we need to actually distinguish them from the user having specified something
|
||||
# if default isn't usable, and config writable, find alternative and fix it in settings
|
||||
|
||||
# USEFUL FUNCS
|
||||
pthj = os.path.join
|
||||
@ -32,7 +25,9 @@ pthj = os.path.join
|
||||
def is_dir_usable(pth):
|
||||
try:
|
||||
os.makedirs(pth,exist_ok=True)
|
||||
return os.access(pth,os.W_OK)
|
||||
os.mknod(pthj(pth,".test"))
|
||||
os.remove(pthj(pth,".test"))
|
||||
return True
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
@ -43,10 +38,7 @@ def get_env_vars(key,pathsuffix=[]):
|
||||
|
||||
directory_info = {
|
||||
"config":{
|
||||
"sentinel":".maloja_config_sentinel",
|
||||
"possible_folders_container":[
|
||||
"/config/config"
|
||||
],
|
||||
"sentinel":"rules",
|
||||
"possible_folders":[
|
||||
"/etc/maloja",
|
||||
os.path.expanduser("~/.local/share/maloja")
|
||||
@ -54,22 +46,15 @@ directory_info = {
|
||||
"setting":"directory_config"
|
||||
},
|
||||
"cache":{
|
||||
"sentinel":".maloja_cache_sentinel",
|
||||
"possible_folders_container":[
|
||||
"/config/cache"
|
||||
],
|
||||
"sentinel":"dummy",
|
||||
"possible_folders":[
|
||||
"/var/cache/maloja",
|
||||
os.path.expanduser("~/.local/share/maloja/cache"),
|
||||
"/tmp/maloja"
|
||||
os.path.expanduser("~/.local/share/maloja/cache")
|
||||
],
|
||||
"setting":"directory_cache"
|
||||
},
|
||||
"state":{
|
||||
"sentinel":".maloja_state_sentinel",
|
||||
"possible_folders_container":[
|
||||
"/config/state"
|
||||
],
|
||||
"sentinel":"scrobbles",
|
||||
"possible_folders":[
|
||||
"/var/lib/maloja",
|
||||
os.path.expanduser("~/.local/share/maloja")
|
||||
@ -77,10 +62,7 @@ directory_info = {
|
||||
"setting":"directory_state"
|
||||
},
|
||||
"logs":{
|
||||
"sentinel":".maloja_logs_sentinel",
|
||||
"possible_folders_container":[
|
||||
"/config/logs"
|
||||
],
|
||||
"sentinel":"dbfix",
|
||||
"possible_folders":[
|
||||
"/var/log/maloja",
|
||||
os.path.expanduser("~/.local/share/maloja/logs")
|
||||
@ -93,51 +75,51 @@ directory_info = {
|
||||
# checks if one has been in use before and writes it to dict/config
|
||||
# if not, determines which to use and writes it to dict/config
|
||||
# returns determined folder
|
||||
def find_good_folder(datatype):
|
||||
def find_good_folder(datatype,configobject):
|
||||
info = directory_info[datatype]
|
||||
|
||||
possible_folders = info['possible_folders']
|
||||
if os.environ.get("MALOJA_CONTAINER"):
|
||||
possible_folders = info['possible_folders_container'] + possible_folders
|
||||
|
||||
# check each possible folder if its used
|
||||
for p in possible_folders:
|
||||
for p in info['possible_folders']:
|
||||
if os.path.exists(pthj(p,info['sentinel'])):
|
||||
if is_dir_usable(p):
|
||||
#print(p,"was apparently used as maloja's folder for",datatype,"- fixing in settings")
|
||||
return p
|
||||
else:
|
||||
raise PermissionError(f"Can no longer use previously used {datatype} folder {p}")
|
||||
#print(p,"has been determined as maloja's folder for",datatype)
|
||||
configobject[info['setting']] = p
|
||||
return p
|
||||
|
||||
#print("Could not find previous",datatype,"folder")
|
||||
# check which one we can use
|
||||
for p in possible_folders:
|
||||
for p in info['possible_folders']:
|
||||
if is_dir_usable(p):
|
||||
#print(p,"has been selected as maloja's folder for",datatype)
|
||||
configobject[info['setting']] = p
|
||||
return p
|
||||
#print("No folder can be used for",datatype)
|
||||
#print("This should not happen!")
|
||||
raise PermissionError(f"No folder could be found for {datatype}")
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
### STEP 1 - find out where the settings file is
|
||||
|
||||
# environment variables
|
||||
maloja_dir_config = os.environ.get("MALOJA_DATA_DIRECTORY") or os.environ.get("MALOJA_DIRECTORY_CONFIG")
|
||||
|
||||
|
||||
if maloja_dir_config is None:
|
||||
# if nothing is set, we set our own
|
||||
maloja_dir_config = find_good_folder('config')
|
||||
maloja_dir_config = find_good_folder('config',{})
|
||||
found_new_config_dir = True
|
||||
else:
|
||||
pass
|
||||
# if there is an environment variable, this is 100% explicitly defined by the user, so we respect it
|
||||
# the user might run more than one instances on the same machine, so we don't do any heuristics here
|
||||
# if you define this, we believe it!
|
||||
found_new_config_dir = False
|
||||
# remember whether we had to find our config dir or it was user-specified
|
||||
|
||||
os.makedirs(maloja_dir_config,exist_ok=True)
|
||||
settingsfile = pthj(maloja_dir_config,"settings.ini")
|
||||
|
||||
oldsettingsfile = pthj(maloja_dir_config,"settings","settings.ini")
|
||||
newsettingsfile = pthj(maloja_dir_config,"settings.ini")
|
||||
|
||||
|
||||
|
||||
if os.path.exists(oldsettingsfile):
|
||||
os.rename(oldsettingsfile,newsettingsfile)
|
||||
|
||||
|
||||
### STEP 2 - create settings object
|
||||
@ -147,10 +129,10 @@ malojaconfig = Configuration(
|
||||
settings={
|
||||
"Setup":{
|
||||
"data_directory":(tp.String(), "Data Directory", None, "Folder for all user data. Overwrites all choices for specific directories."),
|
||||
"directory_config":(tp.String(), "Config Directory", None, "Folder for config data. Only applied when global data directory is not set."),
|
||||
"directory_state":(tp.String(), "State Directory", None, "Folder for state data. Only applied when global data directory is not set."),
|
||||
"directory_logs":(tp.String(), "Log Directory", None, "Folder for log data. Only applied when global data directory is not set."),
|
||||
"directory_cache":(tp.String(), "Cache Directory", None, "Folder for cache data. Only applied when global data directory is not set."),
|
||||
"directory_config":(tp.String(), "Config Directory", "/etc/maloja", "Folder for config data. Only applied when global data directory is not set."),
|
||||
"directory_state":(tp.String(), "State Directory", "/var/lib/maloja", "Folder for state data. Only applied when global data directory is not set."),
|
||||
"directory_logs":(tp.String(), "Log Directory", "/var/log/maloja", "Folder for log data. Only applied when global data directory is not set."),
|
||||
"directory_cache":(tp.String(), "Cache Directory", "/var/cache/maloja", "Folder for cache data. Only applied when global data directory is not set."),
|
||||
"skip_setup":(tp.Boolean(), "Skip Setup", False, "Make server setup process non-interactive. Vital for Docker."),
|
||||
"force_password":(tp.String(), "Force Password", None, "On startup, overwrite admin password with this one. This should usually only be done via environment variable in Docker."),
|
||||
"clean_output":(tp.Boolean(), "Avoid Mutable Console Output", False, "Use if console output will be redirected e.g. to a web interface.")
|
||||
@ -166,21 +148,18 @@ malojaconfig = Configuration(
|
||||
"Technical":{
|
||||
"cache_expire_positive":(tp.Integer(), "Image Cache Expiration", 60, "Days until images are refetched"),
|
||||
"cache_expire_negative":(tp.Integer(), "Image Cache Negative Expiration", 5, "Days until failed image fetches are reattempted"),
|
||||
"db_max_memory":(tp.Integer(min=0,max=100), "RAM Percentage soft limit", 70, "RAM Usage in percent at which Maloja should no longer increase its database cache."),
|
||||
"db_max_memory":(tp.Integer(min=0,max=100), "RAM Percentage soft limit", 50, "RAM Usage in percent at which Maloja should no longer increase its database cache."),
|
||||
"use_request_cache":(tp.Boolean(), "Use request-local DB Cache", False),
|
||||
"use_global_cache":(tp.Boolean(), "Use global DB Cache", True, "This is vital for Maloja's performance. Do not disable this unless you have a strong reason to.")
|
||||
"use_global_cache":(tp.Boolean(), "Use global DB Cache", True)
|
||||
},
|
||||
"Fluff":{
|
||||
"scrobbles_gold":(tp.Integer(), "Scrobbles for Gold (Track)", 250, "How many scrobbles a track needs to be considered 'Gold' status"),
|
||||
"scrobbles_platinum":(tp.Integer(), "Scrobbles for Platinum (Track)",500, "How many scrobbles a track needs to be considered 'Platinum' status"),
|
||||
"scrobbles_diamond":(tp.Integer(), "Scrobbles for Diamond (Track)",1000, "How many scrobbles a track needs to be considered 'Diamond' status"),
|
||||
"scrobbles_gold_album":(tp.Integer(), "Scrobbles for Gold (Album)", 500, "How many scrobbles an album needs to be considered 'Gold' status"),
|
||||
"scrobbles_platinum_album":(tp.Integer(), "Scrobbles for Platinum (Album)",750, "How many scrobbles an album needs to be considered 'Platinum' status"),
|
||||
"scrobbles_diamond_album":(tp.Integer(), "Scrobbles for Diamond (Album)",1500, "How many scrobbles an album needs to be considered 'Diamond' status"),
|
||||
"scrobbles_gold":(tp.Integer(), "Scrobbles for Gold", 250, "How many scrobbles a track needs to be considered 'Gold' status"),
|
||||
"scrobbles_platinum":(tp.Integer(), "Scrobbles for Platinum", 500, "How many scrobbles a track needs to be considered 'Platinum' status"),
|
||||
"scrobbles_diamond":(tp.Integer(), "Scrobbles for Diamond", 1000, "How many scrobbles a track needs to be considered 'Diamond' status"),
|
||||
"name":(tp.String(), "Name", "Generic Maloja User")
|
||||
},
|
||||
"Third Party Services":{
|
||||
"metadata_providers":(tp.List(tp.String()), "Metadata Providers", ['lastfm','spotify','deezer','audiodb','musicbrainz'], "List of which metadata providers should be used in what order. Musicbrainz is rate-limited and should not be used first."),
|
||||
"metadata_providers":(tp.List(tp.String()), "Metadata Providers", ['lastfm','spotify','deezer','musicbrainz'], "Which metadata providers should be used in what order. Musicbrainz is rate-limited and should not be used first."),
|
||||
"scrobble_lastfm":(tp.Boolean(), "Proxy-Scrobble to Last.fm", False),
|
||||
"lastfm_api_key":(tp.String(), "Last.fm API Key", None),
|
||||
"lastfm_api_secret":(tp.String(), "Last.fm API Secret", None),
|
||||
@ -198,44 +177,39 @@ malojaconfig = Configuration(
|
||||
|
||||
},
|
||||
"Database":{
|
||||
"album_information_trust":(tp.Choice({'first':"First",'last':"Last",'majority':"Majority"}), "Album Information Authority","first", "Whether to trust the first album information that is sent with a track or update every time a different album is sent"),
|
||||
"invalid_artists":(tp.Set(tp.String()), "Invalid Artists", ["[Unknown Artist]","Unknown Artist","Spotify"], "Artists that should be discarded immediately"),
|
||||
"remove_from_title":(tp.Set(tp.String()), "Remove from Title", ["(Original Mix)","(Radio Edit)","(Album Version)","(Explicit Version)","(Bonus Track)"], "Phrases that should be removed from song titles"),
|
||||
"delimiters_feat":(tp.Set(tp.String()), "Featuring Delimiters", ["ft.","ft","feat.","feat","featuring"], "Delimiters used for extra artists, even when in the title field"),
|
||||
"delimiters_informal":(tp.Set(tp.String()), "Informal Delimiters", ["vs.","vs","&"], "Delimiters in informal artist strings with spaces expected around them"),
|
||||
"delimiters_formal":(tp.Set(tp.String()), "Formal Delimiters", [";","/","|","␝","␞","␟"], "Delimiters used to tag multiple artists when only one tag field is available"),
|
||||
"filters_remix":(tp.Set(tp.String()), "Remix Filters", ["Remix", "Remix Edit", "Short Mix", "Extended Mix", "Soundtrack Version"], "Filters used to recognize the remix artists in the title"),
|
||||
"parse_remix_artists":(tp.Boolean(), "Parse Remix Artists", False),
|
||||
"week_offset":(tp.Integer(), "Week Begin Offset", 0, "Start of the week for the purpose of weekly statistics. 0 = Sunday, 6 = Saturday"),
|
||||
"timezone":(tp.Integer(), "UTC Offset", 0),
|
||||
"location_timezone":(tp.String(), "Location Timezone", None)
|
||||
"parse_remix_artists":(tp.Boolean(), "Parse Remix Artists", False)
|
||||
},
|
||||
"Web Interface":{
|
||||
"default_range_startpage":(tp.Choice({'alltime':'All Time','year':'Year','month':"Month",'week':'Week'}), "Default Range for Startpage Stats", "year"),
|
||||
"default_range_charts_artists":(tp.Choice({'alltime':'All Time','year':'Year','month':"Month",'week':'Week'}), "Default Range Artist Charts", "year"),
|
||||
"default_range_charts_tracks":(tp.Choice({'alltime':'All Time','year':'Year','month':"Month",'week':'Week'}), "Default Range Track Charts", "year"),
|
||||
"default_step_pulse":(tp.Choice({'year':'Year','month':"Month",'week':'Week','day':'Day'}), "Default Pulse Step", "month"),
|
||||
"charts_display_tiles":(tp.Boolean(), "Display Chart Tiles", False),
|
||||
"album_showcase":(tp.Boolean(), "Display Album Showcase", True, "Display a graphical album showcase for artist overview pages instead of a chart list"),
|
||||
"display_art_icons":(tp.Boolean(), "Display Album/Artist Icons", True),
|
||||
"default_album_artist":(tp.String(), "Default Albumartist", "Various Artists"),
|
||||
"use_album_artwork_for_tracks":(tp.Boolean(), "Use Album Artwork for tracks", True),
|
||||
"fancy_placeholder_art":(tp.Boolean(), "Use fancy placeholder artwork",False),
|
||||
"show_play_number_on_tiles":(tp.Boolean(), "Show amount of plays on tiles",False),
|
||||
"discourage_cpu_heavy_stats":(tp.Boolean(), "Discourage CPU-heavy stats", False, "Prevent visitors from mindlessly clicking on CPU-heavy options. Does not actually disable them for malicious actors!"),
|
||||
"use_local_images":(tp.Boolean(), "Use Local Images", True),
|
||||
#"local_image_rotate":(tp.Integer(), "Local Image Rotate", 3600),
|
||||
"timezone":(tp.Integer(), "UTC Offset", 0),
|
||||
"time_format":(tp.String(), "Time Format", "%d. %b %Y %I:%M %p"),
|
||||
"theme":(tp.String(), "Theme", "maloja")
|
||||
}
|
||||
},
|
||||
configfile=settingsfile,
|
||||
configfile=newsettingsfile,
|
||||
save_endpoint="/apis/mlj_1/settings",
|
||||
env_prefix="MALOJA_",
|
||||
extra_files=["/run/secrets/maloja.yml","/run/secrets/maloja.ini"]
|
||||
|
||||
)
|
||||
|
||||
if not malojaconfig.readonly:
|
||||
malojaconfig["DIRECTORY_CONFIG"] = maloja_dir_config
|
||||
if found_new_config_dir:
|
||||
try:
|
||||
malojaconfig["DIRECTORY_CONFIG"] = maloja_dir_config
|
||||
except PermissionError as e:
|
||||
pass
|
||||
# this really doesn't matter because when are we gonna load info about where
|
||||
# the settings file is stored from the settings file
|
||||
# but oh well
|
||||
@ -257,17 +231,17 @@ except PermissionError as e:
|
||||
pass
|
||||
|
||||
|
||||
### STEP 3 - now check the other directories
|
||||
### STEP 3 - check all possible folders for files (old installation)
|
||||
|
||||
|
||||
|
||||
if not malojaconfig.readonly:
|
||||
for datatype in ("state","cache","logs"):
|
||||
# if the setting is specified in the file or via a user environment variable, we accept it (we'll check later if it's usable)
|
||||
if malojaconfig[directory_info[datatype]['setting']] or malojaconfig['DATA_DIRECTORY']:
|
||||
pass
|
||||
# otherwise, find a good one
|
||||
else:
|
||||
malojaconfig[directory_info[datatype]['setting']] = find_good_folder(datatype)
|
||||
# obviously default values shouldn't trigger this
|
||||
# if user has nothing specified, we need to use this
|
||||
if malojaconfig.get_specified(directory_info[datatype]['setting']) is None and malojaconfig.get_specified('DATA_DIRECTORY') is None:
|
||||
find_good_folder(datatype,malojaconfig)
|
||||
|
||||
|
||||
|
||||
|
||||
@ -295,11 +269,11 @@ else:
|
||||
"logs":pthj(malojaconfig['DATA_DIRECTORY'],"logs"),
|
||||
}
|
||||
|
||||
|
||||
data_directories = {
|
||||
"auth":pthj(dir_settings['state'],"auth"),
|
||||
"backups":pthj(dir_settings['state'],"backups"),
|
||||
"images":pthj(dir_settings['state'],"images"),
|
||||
"import":pthj(dir_settings['state'],"import"),
|
||||
"scrobbles":pthj(dir_settings['state']),
|
||||
"rules":pthj(dir_settings['config'],"rules"),
|
||||
"clients":pthj(dir_settings['config']),
|
||||
@ -313,54 +287,50 @@ data_directories = {
|
||||
}
|
||||
|
||||
for identifier,path in data_directories.items():
|
||||
if path is None:
|
||||
continue
|
||||
|
||||
if malojaconfig.readonly and (path == dir_settings['config'] or path.startswith(dir_settings['config']+'/')):
|
||||
continue
|
||||
|
||||
try:
|
||||
os.makedirs(path,exist_ok=True)
|
||||
if not is_dir_usable(path): raise PermissionError(f"Directory {path} is not usable!")
|
||||
except PermissionError:
|
||||
# special case: cache does not contain info that can't be refetched, so no need to require user intervention
|
||||
# just move to the next one
|
||||
if identifier in ['cache']:
|
||||
print("Cannot use",path,"for cache, finding new folder...")
|
||||
data_directories['cache'] = dir_settings['cache'] = malojaconfig['DIRECTORY_CACHE'] = find_good_folder('cache')
|
||||
else:
|
||||
print(f"Directory for {identifier} ({path}) is not writeable.")
|
||||
print("Please change permissions or settings!")
|
||||
print("Make sure Maloja has write and execute access to this directory.")
|
||||
raise
|
||||
|
||||
class DataDirs:
|
||||
def __init__(self, dirs):
|
||||
self.dirs = dirs
|
||||
|
||||
def __getitem__(self, key):
|
||||
return lambda *x, k=key: pthj(self.dirs[k], *x)
|
||||
|
||||
data_dir = DataDirs(data_directories)
|
||||
|
||||
### DOREAH OBJECTS
|
||||
|
||||
auth = doreah.auth.AuthManager(singleuser=True,cookieprefix='maloja',stylesheets=("/maloja.css",),dbfile=data_dir['auth']("auth.sqlite"))
|
||||
|
||||
#logger = doreah.logging.Logger(logfolder=data_dir['logs']() if malojaconfig["LOGGING"] else None)
|
||||
#log = logger.log
|
||||
|
||||
# this is not how its supposed to be done, but lets ease the transition
|
||||
doreah.logging.defaultlogger.logfolder = data_dir['logs']() if malojaconfig["LOGGING"] else None
|
||||
os.makedirs(path,exist_ok=True)
|
||||
|
||||
|
||||
try:
|
||||
custom_css_files = [f for f in os.listdir(data_dir['css']()) if f.lower().endswith('.css')]
|
||||
except FileNotFoundError:
|
||||
custom_css_files = []
|
||||
data_dir = {
|
||||
k:lambda *x,k=k: pthj(data_directories[k],*x) for k in data_directories
|
||||
}
|
||||
|
||||
|
||||
|
||||
### write down the last ran version
|
||||
with open(pthj(dir_settings['state'],".lastmalojaversion"),"w") as filed:
|
||||
filed.write(VERSION)
|
||||
filed.write("\n")
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
### DOREAH CONFIGURATION
|
||||
|
||||
from doreah import config
|
||||
|
||||
config(
|
||||
auth={
|
||||
"multiuser":False,
|
||||
"cookieprefix":"maloja",
|
||||
"stylesheets":["/maloja.css"],
|
||||
"dbfile":data_dir['auth']("auth.ddb")
|
||||
},
|
||||
logging={
|
||||
"logfolder": data_dir['logs']() if malojaconfig["LOGGING"] else None
|
||||
},
|
||||
regular={
|
||||
"offset": malojaconfig["TIMEZONE"]
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
custom_css_files = [f for f in os.listdir(data_dir['css']()) if f.lower().endswith('.css')]
|
||||
|
||||
|
||||
from ..database.sqldb import set_maloja_info
|
||||
set_maloja_info({'last_run_version':VERSION})
|
||||
|
||||
# what the fuck did i just write
|
||||
# this spaghetti file is proudly sponsored by the rice crackers i'm eating at the
|
||||
|
@ -28,3 +28,40 @@ try:
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
|
||||
|
||||
|
||||
# proper sunday-first weeks
|
||||
# damn iso heathens
|
||||
|
||||
from datetime import date, timedelta
|
||||
import datetime
|
||||
|
||||
class expandeddate(date):
|
||||
|
||||
def chrweekday(self):
|
||||
return self.isoweekday() + 1 % 7
|
||||
|
||||
def chrcalendar(self):
|
||||
tomorrow = self + timedelta(days=1)
|
||||
cal = tomorrow.isocalendar()
|
||||
return (cal[0],cal[1],cal[2])
|
||||
|
||||
@classmethod
|
||||
def fromchrcalendar(cls,y,w,d):
|
||||
try:
|
||||
return datetime.date.fromisocalendar(y,w,d) - timedelta(days=1) #sunday instead of monday
|
||||
except Exception:
|
||||
# pre python3.8 compatibility
|
||||
|
||||
firstdayofyear = datetime.date(y,1,1)
|
||||
wkday = firstdayofyear.isoweekday()
|
||||
if wkday <= 4: # day up to thursday -> this week belongs to the new year
|
||||
firstisodayofyear = firstdayofyear - timedelta(days=wkday) #this also shifts to sunday-first weeks
|
||||
else: # if not, still old year
|
||||
firstisodayofyear = firstdayofyear + timedelta(days=7-wkday) #same
|
||||
return firstisodayofyear + timedelta(days=(w-1)*7) + timedelta(days=d-1)
|
||||
|
||||
|
||||
|
||||
datetime.date = expandeddate
|
||||
|
@ -1,4 +1,3 @@
|
||||
from .import_scrobbles import import_scrobbles
|
||||
from .backup import backup
|
||||
from .export import export # read that line out loud
|
||||
from .parse_albums import parse_albums
|
||||
|
@ -12,12 +12,11 @@ def export(targetfolder=None):
|
||||
targetfolder = os.getcwd()
|
||||
|
||||
timestr = time.strftime("%Y_%m_%d_%H_%M_%S")
|
||||
timestamp = int(time.time()) # ok this is technically a separate time get from above, but those ms are not gonna matter, and im too lazy to change it all to datetime
|
||||
filename = f"maloja_export_{timestr}.json"
|
||||
outputfile = os.path.join(targetfolder,filename)
|
||||
assert not os.path.exists(outputfile)
|
||||
|
||||
data = {'maloja':{'export_time': timestamp },'scrobbles':get_scrobbles()}
|
||||
data = {'scrobbles':get_scrobbles()}
|
||||
with open(outputfile,'w') as outfd:
|
||||
json.dump(data,outfd,indent=3)
|
||||
|
||||
|
@ -32,62 +32,30 @@ def import_scrobbles(inputf):
|
||||
}
|
||||
|
||||
filename = os.path.basename(inputf)
|
||||
importfunc = None
|
||||
|
||||
if re.match(r"recenttracks-.*\.csv", filename):
|
||||
typeid, typedesc = "lastfm", "Last.fm (ghan CSV)"
|
||||
importfunc = parse_lastfm_ghan_csv
|
||||
|
||||
elif re.match(r".*\.csv", filename):
|
||||
typeid,typedesc = "lastfm", "Last.fm (benjaminbenben CSV)"
|
||||
if re.match(".*\.csv",filename):
|
||||
typeid,typedesc = "lastfm","Last.fm"
|
||||
importfunc = parse_lastfm
|
||||
|
||||
elif re.match(r"Streaming_History_Audio.+\.json", filename):
|
||||
typeid,typedesc = "spotify", "Spotify"
|
||||
elif re.match("endsong_[0-9]+\.json",filename):
|
||||
typeid,typedesc = "spotify","Spotify"
|
||||
importfunc = parse_spotify_full
|
||||
|
||||
elif re.match("StreamingHistory[0-9]+\.json",filename):
|
||||
typeid,typedesc = "spotify","Spotify"
|
||||
importfunc = parse_spotify_lite
|
||||
|
||||
elif re.match(r"endsong_[0-9]+\.json", filename):
|
||||
typeid,typedesc = "spotify", "Spotify"
|
||||
importfunc = parse_spotify
|
||||
|
||||
elif re.match(r"StreamingHistory[0-9]+\.json", filename):
|
||||
typeid,typedesc = "spotify", "Spotify"
|
||||
importfunc = parse_spotify_lite_legacy
|
||||
|
||||
elif re.match(r"maloja_export[_0-9]*\.json", filename):
|
||||
typeid,typedesc = "maloja", "Maloja"
|
||||
elif re.match("maloja_export_[0-9]+\.json",filename):
|
||||
typeid,typedesc = "maloja","Maloja"
|
||||
importfunc = parse_maloja
|
||||
|
||||
# username_lb-YYYY-MM-DD.json
|
||||
elif re.match(r".*_lb-[0-9-]+\.json", filename):
|
||||
typeid,typedesc = "listenbrainz", "ListenBrainz"
|
||||
importfunc = parse_listenbrainz
|
||||
|
||||
elif re.match(r"\.scrobbler\.log", filename):
|
||||
typeid,typedesc = "rockbox", "Rockbox"
|
||||
importfunc = parse_rockbox
|
||||
|
||||
elif re.match(r"recenttracks-.*\.json", filename):
|
||||
typeid, typedesc = "lastfm", "Last.fm (ghan JSON)"
|
||||
importfunc = parse_lastfm_ghan_json
|
||||
|
||||
elif re.match(r".*\.json",filename):
|
||||
try:
|
||||
with open(filename,'r') as fd:
|
||||
data = json.load(fd)
|
||||
if 'maloja' in data:
|
||||
typeid,typedesc = "maloja","Maloja"
|
||||
importfunc = parse_maloja
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
if not importfunc:
|
||||
else:
|
||||
print("File",inputf,"could not be identified as a valid import source.")
|
||||
return result
|
||||
|
||||
|
||||
print(f"Parsing {col['yellow'](inputf)} as {col['cyan'](typedesc)} export.")
|
||||
print(col['red']("Please double-check if this is correct - if the import fails, the file might have been interpreted as the wrong type."))
|
||||
print(f"Parsing {col['yellow'](inputf)} as {col['cyan'](typedesc)} export")
|
||||
print("This could take a while...")
|
||||
|
||||
timestamps = set()
|
||||
scrobblebuffer = []
|
||||
@ -107,18 +75,16 @@ def import_scrobbles(inputf):
|
||||
|
||||
# extra info
|
||||
extrainfo = {}
|
||||
if scrobble.get('album_name'): extrainfo['album_name'] = scrobble['album_name']
|
||||
# saving this in the scrobble instead of the track because for now it's not meant
|
||||
# to be authorative information, just payload of the scrobble
|
||||
|
||||
scrobblebuffer.append({
|
||||
"time":scrobble['scrobble_time'],
|
||||
"track":{
|
||||
"artists":scrobble['track_artists'],
|
||||
"title":scrobble['track_title'],
|
||||
"length":scrobble['track_length'],
|
||||
"album":{
|
||||
"albumtitle":scrobble.get('album_name') or None,
|
||||
"artists":scrobble.get('album_artists') or scrobble['track_artists'] or None
|
||||
# TODO: use same heuristics as with parsing to determine album?
|
||||
} if scrobble.get('album_name') else None
|
||||
"length":None
|
||||
},
|
||||
"duration":scrobble['scrobble_duration'],
|
||||
"origin":"import:" + typeid,
|
||||
@ -150,29 +116,24 @@ def import_scrobbles(inputf):
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def parse_spotify_lite_legacy(inputf):
|
||||
def parse_spotify_lite(inputf):
|
||||
pth = os.path
|
||||
# use absolute paths internally for peace of mind. just change representation for console output
|
||||
inputf = pth.abspath(inputf)
|
||||
inputfolder = pth.dirname(inputf)
|
||||
inputfolder = pth.relpath(pth.dirname(pth.abspath(inputf)))
|
||||
filenames = re.compile(r'StreamingHistory[0-9]+\.json')
|
||||
#inputfiles = [os.path.join(inputfolder,f) for f in os.listdir(inputfolder) if filenames.match(f)]
|
||||
inputfiles = [inputf]
|
||||
inputfiles = [os.path.join(inputfolder,f) for f in os.listdir(inputfolder) if filenames.match(f)]
|
||||
|
||||
#if len(inputfiles) == 0:
|
||||
# print("No files found!")
|
||||
# return
|
||||
if len(inputfiles) == 0:
|
||||
print("No files found!")
|
||||
return
|
||||
|
||||
#if inputfiles != [inputf]:
|
||||
# print("Spotify files should all be imported together to identify duplicates across the whole dataset.")
|
||||
# if not ask("Import " + ", ".join(col['yellow'](pth.basename(i)) for i in inputfiles) + "?",default=True):
|
||||
# inputfiles = [inputf]
|
||||
# print("Only importing", col['yellow'](pth.basename(inputf)))
|
||||
if inputfiles != [inputf]:
|
||||
print("Spotify files should all be imported together to identify duplicates across the whole dataset.")
|
||||
if not ask("Import " + ", ".join(col['yellow'](i) for i in inputfiles) + "?",default=True):
|
||||
inputfiles = [inputf]
|
||||
|
||||
for inputf in inputfiles:
|
||||
|
||||
#print("Importing",col['yellow'](inputf),"...")
|
||||
print("Importing",col['yellow'](inputf),"...")
|
||||
with open(inputf,'r') as inputfd:
|
||||
data = json.load(inputfd)
|
||||
|
||||
@ -193,7 +154,6 @@ def parse_spotify_lite_legacy(inputf):
|
||||
yield ("CONFIDENT_IMPORT",{
|
||||
'track_title':title,
|
||||
'track_artists': artist,
|
||||
'track_length': None,
|
||||
'scrobble_time': timestamp,
|
||||
'scrobble_duration':played,
|
||||
'album_name': None
|
||||
@ -205,85 +165,20 @@ def parse_spotify_lite_legacy(inputf):
|
||||
print()
|
||||
|
||||
|
||||
def parse_spotify_lite(inputf):
|
||||
def parse_spotify_full(inputf):
|
||||
pth = os.path
|
||||
# use absolute paths internally for peace of mind. just change representation for console output
|
||||
inputf = pth.abspath(inputf)
|
||||
inputfolder = pth.dirname(inputf)
|
||||
filenames = re.compile(r'Streaming_History_Audio.+\.json')
|
||||
#inputfiles = [os.path.join(inputfolder,f) for f in os.listdir(inputfolder) if filenames.match(f)]
|
||||
inputfiles = [inputf]
|
||||
|
||||
#if len(inputfiles) == 0:
|
||||
# print("No files found!")
|
||||
# return
|
||||
|
||||
#if inputfiles != [inputf]:
|
||||
# print("Spotify files should all be imported together to identify duplicates across the whole dataset.")
|
||||
# if not ask("Import " + ", ".join(col['yellow'](pth.basename(i)) for i in inputfiles) + "?",default=True):
|
||||
# inputfiles = [inputf]
|
||||
# print("Only importing", col['yellow'](pth.basename(inputf)))
|
||||
|
||||
for inputf in inputfiles:
|
||||
|
||||
#print("Importing",col['yellow'](inputf),"...")
|
||||
with open(inputf,'r') as inputfd:
|
||||
data = json.load(inputfd)
|
||||
|
||||
for entry in data:
|
||||
|
||||
try:
|
||||
played = int(entry['ms_played'] / 1000)
|
||||
timestamp = int(
|
||||
datetime.datetime.strptime(entry['ts'],"%Y-%m-%dT%H:%M:%SZ").timestamp()
|
||||
)
|
||||
artist = entry['master_metadata_album_artist_name'] # hmmm
|
||||
title = entry['master_metadata_track_name']
|
||||
album = entry['master_metadata_album_album_name']
|
||||
albumartist = entry['master_metadata_album_artist_name']
|
||||
|
||||
if None in [title,artist]:
|
||||
yield ('CONFIDENT_SKIP',None,f"{entry} has relevant fields set to null, skipping...")
|
||||
continue
|
||||
|
||||
if played < 30:
|
||||
yield ('CONFIDENT_SKIP',None,f"{entry} is shorter than 30 seconds, skipping...")
|
||||
continue
|
||||
|
||||
yield ("CONFIDENT_IMPORT",{
|
||||
'track_title':title,
|
||||
'track_artists': artist,
|
||||
'track_length': None,
|
||||
'scrobble_time': timestamp,
|
||||
'scrobble_duration':played,
|
||||
'album_name': album,
|
||||
'album_artist': albumartist
|
||||
},'')
|
||||
except Exception as e:
|
||||
yield ('FAIL',None,f"{entry} could not be parsed. Scrobble not imported. ({repr(e)})")
|
||||
continue
|
||||
|
||||
print()
|
||||
|
||||
|
||||
def parse_spotify(inputf):
|
||||
pth = os.path
|
||||
# use absolute paths internally for peace of mind. just change representation for console output
|
||||
inputf = pth.abspath(inputf)
|
||||
inputfolder = pth.dirname(inputf)
|
||||
inputfolder = pth.relpath(pth.dirname(pth.abspath(inputf)))
|
||||
filenames = re.compile(r'endsong_[0-9]+\.json')
|
||||
#inputfiles = [os.path.join(inputfolder,f) for f in os.listdir(inputfolder) if filenames.match(f)]
|
||||
inputfiles = [inputf]
|
||||
inputfiles = [os.path.join(inputfolder,f) for f in os.listdir(inputfolder) if filenames.match(f)]
|
||||
|
||||
#if len(inputfiles) == 0:
|
||||
# print("No files found!")
|
||||
# return
|
||||
|
||||
#if inputfiles != [inputf]:
|
||||
# print("Spotify files should all be imported together to identify duplicates across the whole dataset.")
|
||||
# if not ask("Import " + ", ".join(col['yellow'](pth.basename(i)) for i in inputfiles) + "?",default=True):
|
||||
# inputfiles = [inputf]
|
||||
# print("Only importing", col['yellow'](pth.basename(inputf)))
|
||||
if len(inputfiles) == 0:
|
||||
print("No files found!")
|
||||
return
|
||||
|
||||
if inputfiles != [inputf]:
|
||||
print("Spotify files should all be imported together to identify duplicates across the whole dataset.")
|
||||
if not ask("Import " + ", ".join(col['yellow'](i) for i in inputfiles) + "?",default=True):
|
||||
inputfiles = [inputf]
|
||||
|
||||
# we keep timestamps here as well to remove duplicates because spotify's export
|
||||
# is messy - this is specific to this import type and should not be mixed with
|
||||
@ -294,7 +189,7 @@ def parse_spotify(inputf):
|
||||
|
||||
for inputf in inputfiles:
|
||||
|
||||
#print("Importing",col['yellow'](inputf),"...")
|
||||
print("Importing",col['yellow'](inputf),"...")
|
||||
with open(inputf,'r') as inputfd:
|
||||
data = json.load(inputfd)
|
||||
|
||||
@ -367,7 +262,6 @@ def parse_spotify(inputf):
|
||||
yield (status,{
|
||||
'track_title':title,
|
||||
'track_artists': artist,
|
||||
'track_length': None,
|
||||
'album_name': album,
|
||||
'scrobble_time': timestamp,
|
||||
'scrobble_duration':played
|
||||
@ -378,7 +272,6 @@ def parse_spotify(inputf):
|
||||
|
||||
print()
|
||||
|
||||
|
||||
def parse_lastfm(inputf):
|
||||
|
||||
with open(inputf,'r',newline='') as inputfd:
|
||||
@ -401,7 +294,6 @@ def parse_lastfm(inputf):
|
||||
yield ('CONFIDENT_IMPORT',{
|
||||
'track_title': title,
|
||||
'track_artists': artist,
|
||||
'track_length': None,
|
||||
'album_name': album,
|
||||
'scrobble_time': int(datetime.datetime.strptime(
|
||||
time + '+0000',
|
||||
@ -414,93 +306,6 @@ def parse_lastfm(inputf):
|
||||
continue
|
||||
|
||||
|
||||
def parse_lastfm_ghan_json(inputf):
|
||||
with open(inputf, 'r') as inputfd:
|
||||
data = json.load(inputfd)
|
||||
|
||||
skip = 50000
|
||||
for entry in data:
|
||||
for track in entry['track']:
|
||||
skip -= 1
|
||||
#if skip: continue
|
||||
#print(track)
|
||||
#input()
|
||||
|
||||
yield ('CONFIDENT_IMPORT', {
|
||||
'track_title': track['name'],
|
||||
'track_artists': track['artist']['#text'],
|
||||
'track_length': None,
|
||||
'album_name': track['album']['#text'],
|
||||
'scrobble_time': int(track['date']['uts']),
|
||||
'scrobble_duration': None
|
||||
}, '')
|
||||
|
||||
|
||||
def parse_lastfm_ghan_csv(inputf):
|
||||
with open(inputf, 'r') as inputfd:
|
||||
reader = csv.DictReader(inputfd)
|
||||
|
||||
for row in reader:
|
||||
yield ('CONFIDENT_IMPORT', {
|
||||
'track_title': row['track'],
|
||||
'track_artists': row['artist'],
|
||||
'track_length': None,
|
||||
'album_name': row['album'],
|
||||
'scrobble_time': int(row['uts']),
|
||||
'scrobble_duration': None
|
||||
}, '')
|
||||
|
||||
|
||||
def parse_listenbrainz(inputf):
|
||||
|
||||
with open(inputf,'r') as inputfd:
|
||||
data = json.load(inputfd)
|
||||
|
||||
for entry in data:
|
||||
|
||||
try:
|
||||
track_metadata = entry['track_metadata']
|
||||
additional_info = track_metadata.get('additional_info', {})
|
||||
|
||||
yield ("CONFIDENT_IMPORT",{
|
||||
'track_title': track_metadata['track_name'],
|
||||
'track_artists': additional_info.get('artist_names') or track_metadata['artist_name'],
|
||||
'track_length': int(additional_info.get('duration_ms', 0) / 1000) or additional_info.get('duration'),
|
||||
'album_name': track_metadata.get('release_name'),
|
||||
'scrobble_time': entry['listened_at'],
|
||||
'scrobble_duration': None,
|
||||
},'')
|
||||
except Exception as e:
|
||||
yield ('FAIL',None,f"{entry} could not be parsed. Scrobble not imported. ({repr(e)})")
|
||||
continue
|
||||
|
||||
def parse_rockbox(inputf):
|
||||
with open(inputf,'r') as inputfd:
|
||||
for line in inputfd.readlines():
|
||||
if line == "#TZ/UNKNOWN":
|
||||
use_local_time = True
|
||||
elif line == "#TZ/UTC":
|
||||
use_local_time = False
|
||||
line = line.split("#")[0].split("\n")[0]
|
||||
if line:
|
||||
try:
|
||||
artist,album,track,pos,duration,rate,timestamp,track_id, *_ = line.split("\t") + [None]
|
||||
if rate == 'L':
|
||||
yield ("CONFIDENT_IMPORT",{
|
||||
'track_title':track,
|
||||
'track_artists':artist,
|
||||
'track_length':duration,
|
||||
'album_name':album,
|
||||
'scrobble_time':timestamp,
|
||||
'scrobble_duration': None
|
||||
},'')
|
||||
else:
|
||||
yield ('CONFIDENT_SKIP',None,f"{track} at {timestamp} is marked as skipped.")
|
||||
except Exception as e:
|
||||
yield ('FAIL',None,f"{line} could not be parsed. Scrobble not imported. ({repr(e)})")
|
||||
continue
|
||||
|
||||
|
||||
def parse_maloja(inputf):
|
||||
|
||||
with open(inputf,'r') as inputfd:
|
||||
@ -513,9 +318,7 @@ def parse_maloja(inputf):
|
||||
yield ('CONFIDENT_IMPORT',{
|
||||
'track_title': s['track']['title'],
|
||||
'track_artists': s['track']['artists'],
|
||||
'track_length': s['track']['length'],
|
||||
'album_name': s['track'].get('album',{}).get('albumtitle','') if s['track'].get('album') is not None else '',
|
||||
'album_artists': s['track'].get('album',{}).get('artists',None) if s['track'].get('album') is not None else '',
|
||||
'album_name': s['track'].get('album',{}).get('name',''),
|
||||
'scrobble_time': s['time'],
|
||||
'scrobble_duration': s['duration']
|
||||
},'')
|
||||
|
@ -1,108 +0,0 @@
|
||||
from doreah.io import col
|
||||
|
||||
def parse_albums(strategy=None,prefer_existing=False):
|
||||
|
||||
if strategy not in ("track","none","all","majority","most"):
|
||||
print("""
|
||||
Please specify your album parsing strategy:
|
||||
|
||||
--strategy Specify what strategy to use when the scrobble contains
|
||||
no information about album artists.
|
||||
track Take the track artists. This can lead to
|
||||
separate albums being created for compilation
|
||||
albums or albums that have collaboration tracks.
|
||||
none Merge all albums with the same name and assign
|
||||
'Various Artists' as the album artist.
|
||||
all Merge all albums with the same name and assign
|
||||
every artist that appears on the album as an album
|
||||
artist.
|
||||
majority Merge all albums with the same name and assign
|
||||
artists that appear in at least half the tracks
|
||||
of the album as album artists. [RECOMMENDED]
|
||||
most Merge all albums with the same name and assign
|
||||
the artist that appears most on the album as album
|
||||
artist.
|
||||
--prefer_existing If an album with the same name already exists, use it
|
||||
without further examination of track artists.
|
||||
""")
|
||||
return
|
||||
|
||||
|
||||
|
||||
from ...database.sqldb import guess_albums, get_album_id, add_track_to_album
|
||||
|
||||
print("Parsing album information...")
|
||||
result = guess_albums()
|
||||
|
||||
result = {track_id:result[track_id] for track_id in result if result[track_id]["assigned"]}
|
||||
print("Found",col['yellow'](len(result)),"Tracks to assign albums to")
|
||||
|
||||
result_authorative = {track_id:result[track_id] for track_id in result if result[track_id]["assigned"]["artists"]}
|
||||
result_guesswork = {track_id:result[track_id] for track_id in result if not result[track_id]["assigned"]["artists"]}
|
||||
|
||||
i = 0
|
||||
|
||||
def countup(i):
|
||||
i+=1
|
||||
if (i % 100) == 0:
|
||||
print(f"Added album information for {i} of {len(result)} tracks...")
|
||||
return i
|
||||
|
||||
for track_id in result_authorative:
|
||||
albuminfo = result[track_id]['assigned']
|
||||
album_id = get_album_id(albuminfo)
|
||||
add_track_to_album(track_id,album_id)
|
||||
i=countup(i)
|
||||
|
||||
albums = {}
|
||||
for track_id in result_guesswork:
|
||||
albuminfo = result[track_id]['assigned']
|
||||
|
||||
# check if already exists
|
||||
if prefer_existing:
|
||||
album_id = get_album_id(albuminfo,ignore_albumartists=True,create_new=False)
|
||||
if album_id:
|
||||
add_track_to_album(track_id,album_id)
|
||||
i=countup(i)
|
||||
continue
|
||||
|
||||
if strategy == 'track':
|
||||
albuminfo['artists'] = result[track_id]['guess_artists']
|
||||
album_id = get_album_id(albuminfo)
|
||||
add_track_to_album(track_id,album_id)
|
||||
i=countup(i)
|
||||
continue
|
||||
|
||||
if strategy == 'none':
|
||||
albuminfo['artists'] = []
|
||||
album_id = get_album_id(albuminfo)
|
||||
add_track_to_album(track_id,album_id)
|
||||
i=countup(i)
|
||||
continue
|
||||
|
||||
if strategy in ['all','majority','most']:
|
||||
cleantitle = albuminfo['albumtitle'].lower()
|
||||
albums.setdefault(cleantitle,{'track_ids':[],'artists':{},'title':albuminfo['albumtitle']})
|
||||
albums[cleantitle]['track_ids'].append(track_id)
|
||||
for a in result[track_id]['guess_artists']:
|
||||
albums[cleantitle]['artists'].setdefault(a,0)
|
||||
albums[cleantitle]['artists'][a] += 1
|
||||
|
||||
|
||||
for cleantitle in albums:
|
||||
artistoptions = albums[cleantitle]['artists']
|
||||
track_ids = albums[cleantitle]['track_ids']
|
||||
realtitle = albums[cleantitle]['title']
|
||||
if strategy == 'all':
|
||||
artists = [a for a in artistoptions]
|
||||
elif strategy == 'majority':
|
||||
artists = [a for a in artistoptions if artistoptions[a] >= (len(track_ids) / 2)]
|
||||
elif strategy == 'most':
|
||||
artists = [max(artistoptions,key=artistoptions.get)]
|
||||
|
||||
for track_id in track_ids:
|
||||
album_id = get_album_id({'albumtitle':realtitle,'artists':artists})
|
||||
add_track_to_album(track_id,album_id)
|
||||
i=countup(i)
|
||||
|
||||
print(col['lawngreen']("Done!"))
|
@ -1,26 +1,26 @@
|
||||
# technical
|
||||
import sys
|
||||
import os
|
||||
from threading import Thread
|
||||
from importlib import resources
|
||||
import datauri
|
||||
import time
|
||||
from magic import from_file
|
||||
|
||||
|
||||
# server stuff
|
||||
from bottle import Bottle, static_file, request, response, FormsDict, redirect, BaseRequest, abort
|
||||
import waitress
|
||||
from jinja2.exceptions import TemplateNotFound
|
||||
|
||||
# doreah toolkit
|
||||
from doreah.logging import log
|
||||
from doreah import auth
|
||||
|
||||
# rest of the project
|
||||
from . import database
|
||||
from .database.jinjaview import JinjaDBConnection
|
||||
from .images import image_request
|
||||
from .images import resolve_track_image, resolve_artist_image
|
||||
from .malojauri import uri_to_internal, remove_identical
|
||||
from .pkg_global.conf import malojaconfig, data_dir, auth
|
||||
from .pkg_global import conf
|
||||
from .pkg_global.conf import malojaconfig, data_dir
|
||||
from .jinjaenv.context import jinja_environment
|
||||
from .apis import init_apis, apikeystore
|
||||
|
||||
@ -97,7 +97,7 @@ aliases = {
|
||||
|
||||
### API
|
||||
|
||||
conf.auth.authapi.mount(server=webserver)
|
||||
auth.authapi.mount(server=webserver)
|
||||
init_apis(webserver)
|
||||
|
||||
# redirects for backwards compatibility
|
||||
@ -119,14 +119,20 @@ def deprecated_api(pth):
|
||||
@webserver.route("/image")
|
||||
def dynamic_image():
|
||||
keys = FormsDict.decode(request.query)
|
||||
result = image_request(**{k:int(keys[k]) for k in keys})
|
||||
if keys['type'] == 'track':
|
||||
result = resolve_track_image(keys['id'])
|
||||
elif keys['type'] == 'artist':
|
||||
result = resolve_artist_image(keys['id'])
|
||||
|
||||
if result['type'] == 'noimage' and result['value'] == 'wait':
|
||||
# still being worked on
|
||||
response.status = 202
|
||||
response.set_header('Retry-After',15)
|
||||
return
|
||||
if result['type'] in ('url','localurl'):
|
||||
if result is None or result['value'] in [None,'']:
|
||||
return ""
|
||||
if result['type'] == 'raw':
|
||||
# data uris are directly served as image because a redirect to a data uri
|
||||
# doesnt work
|
||||
duri = datauri.DataURI(result['value'])
|
||||
response.content_type = duri.mimetype
|
||||
return duri.data
|
||||
if result['type'] == 'url':
|
||||
redirect(result['value'],307)
|
||||
|
||||
@webserver.route("/images/<pth:re:.*\\.jpeg>")
|
||||
@ -153,10 +159,6 @@ def static_image(pth):
|
||||
resp.set_header("Content-Type", "image/" + ext)
|
||||
return resp
|
||||
|
||||
@webserver.route("/cacheimages/<uuid>")
|
||||
def static_proxied_image(uuid):
|
||||
mimetype = from_file(os.path.join(data_dir['cache']('images'),uuid),True)
|
||||
return static_file(uuid,root=data_dir['cache']('images'),mimetype=mimetype)
|
||||
|
||||
@webserver.route("/login")
|
||||
def login():
|
||||
@ -167,16 +169,16 @@ def login():
|
||||
@webserver.route("/media/<name>.<ext>")
|
||||
def static(name,ext):
|
||||
assert ext in ["txt","ico","jpeg","jpg","png","less","js","ttf","css"]
|
||||
staticfolder = resources.files('maloja') / 'web' / 'static'
|
||||
response = static_file(ext + "/" + name + "." + ext,root=staticfolder)
|
||||
with resources.files('maloja') / 'web' / 'static' as staticfolder:
|
||||
response = static_file(ext + "/" + name + "." + ext,root=staticfolder)
|
||||
response.set_header("Cache-Control", "public, max-age=3600")
|
||||
return response
|
||||
|
||||
# new, direct reference
|
||||
@webserver.route("/static/<path:path>")
|
||||
def static(path):
|
||||
staticfolder = resources.files('maloja') / 'web' / 'static'
|
||||
response = static_file(path,root=staticfolder)
|
||||
with resources.files('maloja') / 'web' / 'static' as staticfolder:
|
||||
response = static_file(path,root=staticfolder)
|
||||
response.set_header("Cache-Control", "public, max-age=3600")
|
||||
return response
|
||||
|
||||
@ -198,7 +200,7 @@ def jinja_page(name):
|
||||
if name in aliases: redirect(aliases[name])
|
||||
keys = remove_identical(FormsDict.decode(request.query))
|
||||
|
||||
adminmode = request.cookies.get("adminmode") == "true" and auth.check_request(request)
|
||||
adminmode = request.cookies.get("adminmode") == "true" and auth.check(request)
|
||||
|
||||
with JinjaDBConnection() as conn:
|
||||
|
||||
@ -210,20 +212,19 @@ def jinja_page(name):
|
||||
"_urikeys":keys, #temporary!
|
||||
}
|
||||
loc_context["filterkeys"], loc_context["limitkeys"], loc_context["delimitkeys"], loc_context["amountkeys"], loc_context["specialkeys"] = uri_to_internal(keys)
|
||||
|
||||
template = jinja_environment.get_template(name + '.jinja')
|
||||
try:
|
||||
template = jinja_environment.get_template(name + '.jinja')
|
||||
res = template.render(**loc_context)
|
||||
except TemplateNotFound:
|
||||
abort(404,f"Not found: '{name}'")
|
||||
#except (ValueError, IndexError):
|
||||
# abort(404,"This Artist or Track does not exist")
|
||||
except (ValueError, IndexError):
|
||||
abort(404,"This Artist or Track does not exist")
|
||||
|
||||
if malojaconfig["DEV_MODE"]: jinja_environment.cache.clear()
|
||||
|
||||
return res
|
||||
|
||||
@webserver.route("/<name:re:admin.*>")
|
||||
@auth.authenticated_function()
|
||||
@auth.authenticated
|
||||
def jinja_page_private(name):
|
||||
return jinja_page(name)
|
||||
|
||||
@ -280,8 +281,6 @@ logging.getLogger().addHandler(WaitressLogHandler())
|
||||
|
||||
|
||||
def run_server():
|
||||
conf.AUX_MODE = False
|
||||
|
||||
log("Starting up Maloja server...")
|
||||
|
||||
## start database
|
||||
|
123
maloja/setup.py
123
maloja/setup.py
@ -1,13 +1,12 @@
|
||||
import os
|
||||
import shutil
|
||||
import stat
|
||||
|
||||
from importlib import resources
|
||||
from pathlib import PosixPath
|
||||
from distutils import dir_util
|
||||
|
||||
from doreah.io import col, ask, prompt
|
||||
from doreah import auth
|
||||
|
||||
from .pkg_global.conf import data_dir, dir_settings, malojaconfig, auth
|
||||
from .pkg_global.conf import data_dir, dir_settings, malojaconfig
|
||||
|
||||
|
||||
|
||||
@ -22,86 +21,60 @@ ext_apikeys = [
|
||||
|
||||
|
||||
def copy_initial_local_files():
|
||||
data_file_source = resources.files("maloja") / 'data_files'
|
||||
for cat in dir_settings:
|
||||
if dir_settings[cat] is None:
|
||||
continue
|
||||
if cat == 'config' and malojaconfig.readonly:
|
||||
continue
|
||||
|
||||
# to avoid permission problems with the root dir
|
||||
for subfolder in os.listdir(data_file_source / cat):
|
||||
src = data_file_source / cat / subfolder
|
||||
dst = PosixPath(dir_settings[cat]) / subfolder
|
||||
if os.path.isdir(src):
|
||||
shutil.copytree(src, dst, dirs_exist_ok=True)
|
||||
# fix permissions (u+w)
|
||||
for dirpath, _, filenames in os.walk(dst):
|
||||
os.chmod(dirpath, os.stat(dirpath).st_mode | stat.S_IWUSR)
|
||||
for filename in filenames:
|
||||
filepath = os.path.join(dirpath, filename)
|
||||
os.chmod(filepath, os.stat(filepath).st_mode | stat.S_IWUSR)
|
||||
|
||||
with resources.files("maloja") / 'data_files' as folder:
|
||||
for cat in dir_settings:
|
||||
dir_util.copy_tree(os.path.join(folder,cat),dir_settings[cat],update=False)
|
||||
|
||||
charset = list(range(10)) + list("abcdefghijklmnopqrstuvwxyz") + list("ABCDEFGHIJKLMNOPQRSTUVWXYZ")
|
||||
def randomstring(length=32):
|
||||
import random
|
||||
return "".join(str(random.choice(charset)) for _ in range(length))
|
||||
|
||||
|
||||
def setup():
|
||||
|
||||
copy_initial_local_files()
|
||||
SKIP = malojaconfig["SKIP_SETUP"]
|
||||
|
||||
try:
|
||||
print("Various external services can be used to display images. If not enough of them are set up, only local images will be used.")
|
||||
for k in ext_apikeys:
|
||||
keyname = malojaconfig.get_setting_info(k)['name']
|
||||
key = malojaconfig[k]
|
||||
if key is False:
|
||||
print(f"\tCurrently not using a {col['red'](keyname)} for image display.")
|
||||
elif key is None or key == "ASK":
|
||||
if malojaconfig.readonly:
|
||||
print(f"\tCurrently not using a {col['red'](keyname)} for image display - config is read only.")
|
||||
else:
|
||||
promptmsg = f"\tPlease enter your {col['gold'](keyname)}. If you do not want to use one at this moment, simply leave this empty and press Enter."
|
||||
key = prompt(promptmsg,types=(str,),default=False,skip=SKIP)
|
||||
malojaconfig[k] = key
|
||||
print("Various external services can be used to display images. If not enough of them are set up, only local images will be used.")
|
||||
for k in ext_apikeys:
|
||||
keyname = malojaconfig.get_setting_info(k)['name']
|
||||
key = malojaconfig[k]
|
||||
if key is False:
|
||||
print(f"\tCurrently not using a {col['red'](keyname)} for image display.")
|
||||
elif key is None or key == "ASK":
|
||||
promptmsg = f"\tPlease enter your {col['gold'](keyname)}. If you do not want to use one at this moment, simply leave this empty and press Enter."
|
||||
key = prompt(promptmsg,types=(str,),default=False,skip=SKIP)
|
||||
malojaconfig[k] = key
|
||||
else:
|
||||
print(f"\t{col['lawngreen'](keyname)} found.")
|
||||
|
||||
|
||||
# OWN API KEY
|
||||
from .apis import apikeystore
|
||||
if len(apikeystore) == 0:
|
||||
answer = ask("Do you want to set up a key to enable scrobbling? Your scrobble extension needs that key so that only you can scrobble tracks to your database.",default=True,skip=SKIP)
|
||||
if answer:
|
||||
key = apikeystore.generate_key('default')
|
||||
print("Your API Key: " + col["yellow"](key))
|
||||
|
||||
# PASSWORD
|
||||
forcepassword = malojaconfig["FORCE_PASSWORD"]
|
||||
# this is mainly meant for docker, supply password via environment variable
|
||||
|
||||
if forcepassword is not None:
|
||||
# user has specified to force the pw, nothing else matters
|
||||
auth.defaultuser.setpw(forcepassword)
|
||||
print("Password has been set.")
|
||||
elif auth.defaultuser.checkpw("admin"):
|
||||
# if the actual pw is admin, it means we've never set this up properly (eg first start after update)
|
||||
while True:
|
||||
newpw = prompt("Please set a password for web backend access. Leave this empty to generate a random password.",skip=SKIP,secret=True)
|
||||
if newpw is None:
|
||||
newpw = randomstring(32)
|
||||
print("Generated password:",col["yellow"](newpw))
|
||||
break
|
||||
else:
|
||||
print(f"\t{col['lawngreen'](keyname)} found.")
|
||||
|
||||
|
||||
# OWN API KEY
|
||||
from .apis import apikeystore
|
||||
if len(apikeystore) == 0:
|
||||
answer = ask("Do you want to set up a key to enable scrobbling? Your scrobble extension needs that key so that only you can scrobble tracks to your database.",default=True,skip=SKIP)
|
||||
if answer:
|
||||
key = apikeystore.generate_key('default')
|
||||
print("Your API Key: " + col["yellow"](key))
|
||||
|
||||
# PASSWORD
|
||||
forcepassword = malojaconfig["FORCE_PASSWORD"]
|
||||
# this is mainly meant for docker, supply password via environment variable
|
||||
|
||||
if forcepassword is not None:
|
||||
# user has specified to force the pw, nothing else matters
|
||||
auth.change_pw(password=forcepassword)
|
||||
print("Password has been set.")
|
||||
elif auth.still_has_factory_default_user():
|
||||
# this means we've never set this up properly (eg first start after update)
|
||||
while True:
|
||||
newpw = prompt("Please set a password for web backend access. Leave this empty to generate a random password.",skip=SKIP,secret=True)
|
||||
if newpw is None:
|
||||
newpw = randomstring(32)
|
||||
print("Generated password:",col["yellow"](newpw))
|
||||
break
|
||||
else:
|
||||
newpw_repeat = prompt("Please type again to confirm.",skip=SKIP,secret=True)
|
||||
if newpw != newpw_repeat: print("Passwords do not match!")
|
||||
else: break
|
||||
auth.change_pw(password=newpw)
|
||||
|
||||
except EOFError:
|
||||
print("No user input possible. If you are running inside a container, set the environment variable",col['yellow']("MALOJA_SKIP_SETUP=yes"))
|
||||
raise SystemExit
|
||||
newpw_repeat = prompt("Please type again to confirm.",skip=SKIP,secret=True)
|
||||
if newpw != newpw_repeat: print("Passwords do not match!")
|
||||
else: break
|
||||
auth.defaultuser.setpw(newpw)
|
||||
|
130
maloja/thirdparty/__init__.py
vendored
130
maloja/thirdparty/__init__.py
vendored
@ -7,16 +7,14 @@
|
||||
# pls don't sue me
|
||||
|
||||
import xml.etree.ElementTree as ElementTree
|
||||
import requests
|
||||
import urllib.parse
|
||||
import json
|
||||
import urllib.parse, urllib.request
|
||||
import base64
|
||||
import time
|
||||
from doreah.logging import log
|
||||
from threading import BoundedSemaphore, Thread
|
||||
from threading import BoundedSemaphore
|
||||
|
||||
from ..pkg_global.conf import malojaconfig
|
||||
from .. import database
|
||||
from ..__pkginfo__ import USER_AGENT
|
||||
|
||||
|
||||
services = {
|
||||
@ -25,14 +23,6 @@ services = {
|
||||
"metadata":[]
|
||||
}
|
||||
|
||||
|
||||
|
||||
class InvalidResponse(Exception):
|
||||
"""Invalid Response from Third Party"""
|
||||
|
||||
class RateLimitExceeded(Exception):
|
||||
"""Rate Limit exceeded"""
|
||||
|
||||
# have a limited number of worker threads so we don't completely hog the cpu with
|
||||
# these requests. they are mostly network bound, so python will happily open up 200 new
|
||||
# requests and then when all the responses come in we suddenly can't load pages anymore
|
||||
@ -52,42 +42,28 @@ def proxy_scrobble_all(artists,title,timestamp):
|
||||
def get_image_track_all(track):
|
||||
with thirdpartylock:
|
||||
for service in services["metadata"]:
|
||||
if "track" not in service.metadata["enabled_entity_types"]: continue
|
||||
try:
|
||||
res = service.get_image_track(track)
|
||||
if res:
|
||||
if res is not None:
|
||||
log("Got track image for " + str(track) + " from " + service.name)
|
||||
return res
|
||||
else:
|
||||
log(f"Could not get track image for {track} from {service.name}")
|
||||
log("Could not get track image for " + str(track) + " from " + service.name)
|
||||
except Exception as e:
|
||||
log(f"Error getting track image from {service.name}: {e.__doc__}")
|
||||
log("Error getting track image from " + service.name + ": " + repr(e))
|
||||
def get_image_artist_all(artist):
|
||||
with thirdpartylock:
|
||||
for service in services["metadata"]:
|
||||
if "artist" not in service.metadata["enabled_entity_types"]: continue
|
||||
try:
|
||||
res = service.get_image_artist(artist)
|
||||
if res:
|
||||
if res is not None:
|
||||
log("Got artist image for " + str(artist) + " from " + service.name)
|
||||
return res
|
||||
else:
|
||||
log(f"Could not get artist image for {artist} from {service.name}")
|
||||
log("Could not get artist image for " + str(artist) + " from " + service.name)
|
||||
except Exception as e:
|
||||
log(f"Error getting artist image from {service.name}: {e.__doc__}")
|
||||
def get_image_album_all(album):
|
||||
with thirdpartylock:
|
||||
for service in services["metadata"]:
|
||||
if "album" not in service.metadata["enabled_entity_types"]: continue
|
||||
try:
|
||||
res = service.get_image_album(album)
|
||||
if res:
|
||||
log("Got album image for " + str(album) + " from " + service.name)
|
||||
return res
|
||||
else:
|
||||
log(f"Could not get album image for {album} from {service.name}")
|
||||
except Exception as e:
|
||||
log(f"Error getting album image from {service.name}: {e.__doc__}")
|
||||
log("Error getting artist image from " + service.name + ": " + repr(e))
|
||||
|
||||
|
||||
|
||||
class GenericInterface:
|
||||
@ -104,17 +80,12 @@ class GenericInterface:
|
||||
scrobbleimport = {}
|
||||
metadata = {}
|
||||
|
||||
useragent = USER_AGENT
|
||||
|
||||
def __init__(self):
|
||||
# populate from settings file once on creation
|
||||
# avoid constant disk access, restart on adding services is acceptable
|
||||
for key in self.settings:
|
||||
self.settings[key] = malojaconfig[self.settings[key]]
|
||||
t = Thread(target=self.authorize)
|
||||
t.daemon = True
|
||||
t.start()
|
||||
#self.authorize()
|
||||
self.authorize()
|
||||
|
||||
# this makes sure that of every class we define, we immediately create an
|
||||
# instance (de facto singleton). then each instance checks if the requirements
|
||||
@ -136,6 +107,16 @@ class GenericInterface:
|
||||
return True
|
||||
# per default. no authorization is necessary
|
||||
|
||||
# wrapper method
|
||||
def request(self,url,data,responsetype):
|
||||
response = urllib.request.urlopen(
|
||||
url,
|
||||
data=utf(data)
|
||||
)
|
||||
responsedata = response.read()
|
||||
if responsetype == "xml":
|
||||
data = ElementTree.fromstring(responsedata)
|
||||
return data
|
||||
|
||||
# proxy scrobbler
|
||||
class ProxyScrobbleInterface(GenericInterface,abstract=True):
|
||||
@ -154,15 +135,11 @@ class ProxyScrobbleInterface(GenericInterface,abstract=True):
|
||||
)
|
||||
|
||||
def scrobble(self,artists,title,timestamp):
|
||||
response = requests.post(
|
||||
url=self.proxyscrobble["scrobbleurl"],
|
||||
data=self.proxyscrobble_postdata(artists,title,timestamp),
|
||||
headers={
|
||||
"User-Agent":self.useragent
|
||||
}
|
||||
)
|
||||
response = urllib.request.urlopen(
|
||||
self.proxyscrobble["scrobbleurl"],
|
||||
data=utf(self.proxyscrobble_postdata(artists,title,timestamp)))
|
||||
responsedata = response.read()
|
||||
if self.proxyscrobble["response_type"] == "xml":
|
||||
responsedata = response.text
|
||||
data = ElementTree.fromstring(responsedata)
|
||||
return self.proxyscrobble_parse_response(data)
|
||||
|
||||
@ -200,8 +177,6 @@ class MetadataInterface(GenericInterface,abstract=True):
|
||||
"activated_setting":None
|
||||
}
|
||||
|
||||
delay = 0
|
||||
|
||||
# service provides this role only if the setting is active AND all
|
||||
# necessary auth settings exist
|
||||
def active_metadata(self):
|
||||
@ -214,58 +189,32 @@ class MetadataInterface(GenericInterface,abstract=True):
|
||||
artists, title = track
|
||||
artiststring = urllib.parse.quote(", ".join(artists))
|
||||
titlestring = urllib.parse.quote(title)
|
||||
response = requests.get(
|
||||
self.metadata["trackurl"].format(artist=artiststring,title=titlestring,**self.settings),
|
||||
headers={
|
||||
"User-Agent":self.useragent
|
||||
}
|
||||
response = urllib.request.urlopen(
|
||||
self.metadata["trackurl"].format(artist=artiststring,title=titlestring,**self.settings)
|
||||
)
|
||||
|
||||
responsedata = response.read()
|
||||
if self.metadata["response_type"] == "json":
|
||||
data = response.json()
|
||||
data = json.loads(responsedata)
|
||||
imgurl = self.metadata_parse_response_track(data)
|
||||
else:
|
||||
imgurl = None
|
||||
if imgurl is not None: imgurl = self.postprocess_url(imgurl)
|
||||
time.sleep(self.delay)
|
||||
return imgurl
|
||||
|
||||
def get_image_artist(self,artist):
|
||||
artiststring = urllib.parse.quote(artist)
|
||||
response = requests.get(
|
||||
self.metadata["artisturl"].format(artist=artiststring,**self.settings),
|
||||
headers={
|
||||
"User-Agent":self.useragent
|
||||
}
|
||||
response = urllib.request.urlopen(
|
||||
self.metadata["artisturl"].format(artist=artiststring,**self.settings)
|
||||
)
|
||||
|
||||
responsedata = response.read()
|
||||
if self.metadata["response_type"] == "json":
|
||||
data = response.json()
|
||||
data = json.loads(responsedata)
|
||||
imgurl = self.metadata_parse_response_artist(data)
|
||||
else:
|
||||
imgurl = None
|
||||
if imgurl is not None: imgurl = self.postprocess_url(imgurl)
|
||||
time.sleep(self.delay)
|
||||
return imgurl
|
||||
|
||||
def get_image_album(self,album):
|
||||
artists, title = album
|
||||
artiststring = urllib.parse.quote(", ".join(artists or []))
|
||||
titlestring = urllib.parse.quote(title)
|
||||
response = requests.get(
|
||||
self.metadata["albumurl"].format(artist=artiststring,title=titlestring,**self.settings),
|
||||
headers={
|
||||
"User-Agent":self.useragent
|
||||
}
|
||||
)
|
||||
|
||||
if self.metadata["response_type"] == "json":
|
||||
data = response.json()
|
||||
imgurl = self.metadata_parse_response_album(data)
|
||||
else:
|
||||
imgurl = None
|
||||
if imgurl is not None: imgurl = self.postprocess_url(imgurl)
|
||||
time.sleep(self.delay)
|
||||
return imgurl
|
||||
|
||||
# default function to parse response by descending down nodes
|
||||
@ -276,30 +225,19 @@ class MetadataInterface(GenericInterface,abstract=True):
|
||||
def metadata_parse_response_track(self,data):
|
||||
return self._parse_response("response_parse_tree_track", data)
|
||||
|
||||
def metadata_parse_response_album(self,data):
|
||||
return self._parse_response("response_parse_tree_album", data)
|
||||
|
||||
def _parse_response(self, resp, data):
|
||||
res = data
|
||||
for node in self.metadata[resp]:
|
||||
try:
|
||||
res = res[node]
|
||||
except Exception:
|
||||
handleresult = self.handle_json_result_error(data) #allow the handler to throw custom exceptions
|
||||
# it can also return True to indicate that this is not an error, but simply an instance of 'this api doesnt have any info'
|
||||
if handleresult is True:
|
||||
return None
|
||||
#throw the generic error if the handler refused to do anything
|
||||
raise InvalidResponse()
|
||||
return None
|
||||
return res
|
||||
|
||||
def postprocess_url(self,url):
|
||||
url = url.replace("http:","https:",1)
|
||||
return url
|
||||
|
||||
def handle_json_result_error(self,result):
|
||||
raise InvalidResponse()
|
||||
|
||||
|
||||
|
||||
|
||||
|
9
maloja/thirdparty/audiodb.py
vendored
9
maloja/thirdparty/audiodb.py
vendored
@ -9,18 +9,13 @@ class AudioDB(MetadataInterface):
|
||||
}
|
||||
|
||||
metadata = {
|
||||
#"trackurl": "https://theaudiodb.com/api/v1/json/{api_key}/searchtrack.php?s={artist}&t={title}", #patreon
|
||||
#"trackurl": "https://theaudiodb.com/api/v1/json/{api_key}/searchtrack.php?s={artist}&t={title}",
|
||||
"artisturl": "https://www.theaudiodb.com/api/v1/json/{api_key}/search.php?s={artist}",
|
||||
#"albumurl": "https://www.theaudiodb.com/api/v1/json/{api_key}/searchalbum.php?s={artist}&a={title}", #patreon
|
||||
"response_type":"json",
|
||||
#"response_parse_tree_track": ["tracks",0,"astrArtistThumb"],
|
||||
"response_parse_tree_artist": ["artists",0,"strArtistThumb"],
|
||||
"required_settings": ["api_key"],
|
||||
"enabled_entity_types": ["artist"]
|
||||
}
|
||||
|
||||
def get_image_track(self,track):
|
||||
return None
|
||||
|
||||
def get_image_album(self,album):
|
||||
def get_image_track(self,artist):
|
||||
return None
|
||||
|
29
maloja/thirdparty/deezer.py
vendored
29
maloja/thirdparty/deezer.py
vendored
@ -1,5 +1,4 @@
|
||||
from . import MetadataInterface, RateLimitExceeded
|
||||
|
||||
from . import MetadataInterface
|
||||
|
||||
class Deezer(MetadataInterface):
|
||||
name = "Deezer"
|
||||
@ -9,32 +8,10 @@ class Deezer(MetadataInterface):
|
||||
}
|
||||
|
||||
metadata = {
|
||||
#"trackurl": "https://api.deezer.com/search?q={artist}%20{title}",
|
||||
"trackurl": "https://api.deezer.com/search?q={artist}%20{title}",
|
||||
"artisturl": "https://api.deezer.com/search?q={artist}",
|
||||
"albumurl": "https://api.deezer.com/search?q={artist}%20{title}",
|
||||
"response_type":"json",
|
||||
#"response_parse_tree_track": ["data",0,"album","cover_medium"],
|
||||
"response_parse_tree_track": ["data",0,"album","cover_medium"],
|
||||
"response_parse_tree_artist": ["data",0,"artist","picture_medium"],
|
||||
"response_parse_tree_album": ["data",0,"album","cover_medium"],
|
||||
"required_settings": [],
|
||||
"enabled_entity_types": ["artist","album"]
|
||||
}
|
||||
|
||||
delay = 1
|
||||
|
||||
def get_image_track(self,track):
|
||||
return None
|
||||
# we can use the album pic from the track search,
|
||||
# but should do so via maloja logic
|
||||
|
||||
|
||||
def handle_json_result_error(self,result):
|
||||
if result.get('data') == []:
|
||||
return True
|
||||
if result.get('error',{}).get('code',None) == 4:
|
||||
self.delay += 1
|
||||
# this is permanent (for the lifetime of the process)
|
||||
# but that's actually ok
|
||||
# since hitting the rate limit means we are doing this too fast
|
||||
# and these requests arent really time sensitive
|
||||
raise RateLimitExceeded()
|
||||
|
58
maloja/thirdparty/lastfm.py
vendored
58
maloja/thirdparty/lastfm.py
vendored
@ -1,7 +1,6 @@
|
||||
from . import MetadataInterface, ProxyScrobbleInterface, utf
|
||||
import hashlib
|
||||
import requests
|
||||
import xml.etree.ElementTree as ElementTree
|
||||
import urllib.parse, urllib.request
|
||||
from doreah.logging import log
|
||||
|
||||
class LastFM(MetadataInterface, ProxyScrobbleInterface):
|
||||
@ -23,23 +22,15 @@ class LastFM(MetadataInterface, ProxyScrobbleInterface):
|
||||
"activated_setting": "SCROBBLE_LASTFM"
|
||||
}
|
||||
metadata = {
|
||||
#"artisturl": "https://ws.audioscrobbler.com/2.0/?method=artist.getinfo&artist={artist}&api_key={apikey}&format=json"
|
||||
"trackurl": "https://ws.audioscrobbler.com/2.0/?method=track.getinfo&track={title}&artist={artist}&api_key={apikey}&format=json",
|
||||
"albumurl": "https://ws.audioscrobbler.com/2.0/?method=album.getinfo&api_key={apikey}&artist={artist}&album={title}&format=json",
|
||||
"response_type":"json",
|
||||
"response_parse_tree_track": ["track","album","image",-1,"#text"],
|
||||
# technically just the album artwork, but we use it for now
|
||||
#"response_parse_tree_artist": ["artist","image",-1,"#text"],
|
||||
"response_parse_tree_album": ["album","image",-1,"#text"],
|
||||
"required_settings": ["apikey"],
|
||||
"enabled_entity_types": ["track","album"]
|
||||
}
|
||||
|
||||
def get_image_artist(self,artist):
|
||||
return None
|
||||
# lastfm still provides that endpoint with data,
|
||||
# but doesn't provide actual images
|
||||
|
||||
# lastfm doesn't provide artist images
|
||||
|
||||
def proxyscrobble_parse_response(self,data):
|
||||
return data.attrib.get("status") == "ok" and data.find("scrobbles").attrib.get("ignored") == "0"
|
||||
@ -55,39 +46,28 @@ class LastFM(MetadataInterface, ProxyScrobbleInterface):
|
||||
})
|
||||
|
||||
def authorize(self):
|
||||
if all(self.settings[key] not in [None,"ASK",False] for key in ["username","password","apikey","secret"]):
|
||||
try:
|
||||
response = requests.post(
|
||||
url=self.proxyscrobble['scrobbleurl'],
|
||||
params=self.query_compose({
|
||||
"method":"auth.getMobileSession",
|
||||
"username":self.settings["username"],
|
||||
"password":self.settings["password"],
|
||||
"api_key":self.settings["apikey"]
|
||||
}),
|
||||
headers={
|
||||
"User-Agent":self.useragent
|
||||
}
|
||||
)
|
||||
|
||||
data = ElementTree.fromstring(response.text)
|
||||
self.settings["sk"] = data.find("session").findtext("key")
|
||||
except Exception as e:
|
||||
log("Error while authenticating with LastFM: " + repr(e))
|
||||
try:
|
||||
result = self.request(
|
||||
self.proxyscrobble['scrobbleurl'],
|
||||
self.query_compose({
|
||||
"method":"auth.getMobileSession",
|
||||
"username":self.settings["username"],
|
||||
"password":self.settings["password"],
|
||||
"api_key":self.settings["apikey"]
|
||||
}),
|
||||
responsetype="xml"
|
||||
)
|
||||
self.settings["sk"] = result.find("session").findtext("key")
|
||||
except Exception as e:
|
||||
pass
|
||||
#log("Error while authenticating with LastFM: " + repr(e))
|
||||
|
||||
|
||||
# creates signature and returns full query
|
||||
# creates signature and returns full query string
|
||||
def query_compose(self,parameters):
|
||||
m = hashlib.md5()
|
||||
keys = sorted(str(k) for k in parameters)
|
||||
m.update(utf("".join(str(k) + str(parameters[k]) for k in keys)))
|
||||
m.update(utf(self.settings["secret"]))
|
||||
sig = m.hexdigest()
|
||||
return {**parameters,"api_sig":sig}
|
||||
|
||||
def handle_json_result_error(self,result):
|
||||
if "track" in result and not result.get("track").get('album',{}):
|
||||
return True
|
||||
|
||||
if "error" in result and result.get("error") == 6:
|
||||
return True
|
||||
return urllib.parse.urlencode(parameters) + "&api_sig=" + sig
|
||||
|
6
maloja/thirdparty/maloja.py
vendored
6
maloja/thirdparty/maloja.py
vendored
@ -1,5 +1,5 @@
|
||||
from . import ProxyScrobbleInterface, ImportInterface
|
||||
import requests
|
||||
import urllib.request
|
||||
from doreah.logging import log
|
||||
import json
|
||||
|
||||
@ -32,8 +32,8 @@ class OtherMalojaInstance(ProxyScrobbleInterface, ImportInterface):
|
||||
def get_remote_scrobbles(self):
|
||||
url = f"{self.settings['instance']}/apis/mlj_1/scrobbles"
|
||||
|
||||
response = requests.get(url)
|
||||
data = response.json()
|
||||
response = urllib.request.urlopen(url)
|
||||
data = json.loads(response.read().decode('utf-8'))
|
||||
|
||||
for scrobble in data['list']:
|
||||
yield scrobble
|
||||
|
122
maloja/thirdparty/musicbrainz.py
vendored
122
maloja/thirdparty/musicbrainz.py
vendored
@ -1,7 +1,9 @@
|
||||
from . import MetadataInterface
|
||||
import requests
|
||||
import urllib.parse, urllib.request
|
||||
import json
|
||||
import time
|
||||
import threading
|
||||
from ..__pkginfo__ import USER_AGENT
|
||||
|
||||
class MusicBrainz(MetadataInterface):
|
||||
name = "MusicBrainz"
|
||||
@ -9,123 +11,51 @@ class MusicBrainz(MetadataInterface):
|
||||
|
||||
# musicbrainz is rate-limited
|
||||
lock = threading.Lock()
|
||||
|
||||
|
||||
thumbnailsize_order = ['500','large','1200','250','small']
|
||||
useragent = USER_AGENT
|
||||
|
||||
settings = {
|
||||
}
|
||||
|
||||
metadata = {
|
||||
"response_type":"json",
|
||||
"response_parse_tree_track": ["images",0,"image"],
|
||||
"required_settings": [],
|
||||
"enabled_entity_types": ["album","track"]
|
||||
}
|
||||
|
||||
def get_image_artist(self,artist):
|
||||
return None
|
||||
# not supported
|
||||
|
||||
def get_image_album(self,album):
|
||||
self.lock.acquire()
|
||||
try:
|
||||
artists, title = album
|
||||
searchstr = f'release:"{title}"'
|
||||
for artist in artists:
|
||||
searchstr += f' artist:"{artist}"'
|
||||
res = requests.get(**{
|
||||
"url":"https://musicbrainz.org/ws/2/release",
|
||||
"params":{
|
||||
"fmt":"json",
|
||||
"query":searchstr
|
||||
},
|
||||
"headers":{
|
||||
"User-Agent":self.useragent
|
||||
}
|
||||
})
|
||||
data = res.json()
|
||||
entity = data["releases"][0]
|
||||
coverartendpoint = "release"
|
||||
while True:
|
||||
mbid = entity["id"]
|
||||
try:
|
||||
response = requests.get(
|
||||
f"https://coverartarchive.org/{coverartendpoint}/{mbid}",
|
||||
params={
|
||||
"fmt":"json"
|
||||
},
|
||||
headers={
|
||||
"User-Agent":self.useragent
|
||||
}
|
||||
)
|
||||
data = response.json()
|
||||
thumbnails = data['images'][0]['thumbnails']
|
||||
for size in self.thumbnailsize_order:
|
||||
if thumbnails.get(size) is not None:
|
||||
imgurl = thumbnails.get(size)
|
||||
continue
|
||||
except:
|
||||
imgurl = None
|
||||
if imgurl is None:
|
||||
entity = entity["release-group"]
|
||||
# this will raise an error so we don't stay in the while loop forever
|
||||
coverartendpoint = "release-group"
|
||||
continue
|
||||
|
||||
imgurl = self.postprocess_url(imgurl)
|
||||
return imgurl
|
||||
|
||||
except Exception:
|
||||
return None
|
||||
finally:
|
||||
time.sleep(2)
|
||||
self.lock.release()
|
||||
|
||||
def get_image_track(self,track):
|
||||
self.lock.acquire()
|
||||
try:
|
||||
artists, title = track
|
||||
searchstr = f'recording:"{title}"'
|
||||
for artist in artists:
|
||||
searchstr += f' artist:"{artist}"'
|
||||
res = requests.get(**{
|
||||
"url":"https://musicbrainz.org/ws/2/recording",
|
||||
"params":{
|
||||
"fmt":"json",
|
||||
"query":searchstr
|
||||
},
|
||||
artiststring = ", ".join(artists) #Join artists collection into string
|
||||
titlestring = title
|
||||
querystr = urllib.parse.urlencode({
|
||||
"fmt":"json",
|
||||
"query":"{title} {artist}".format(artist=artiststring,title=titlestring)
|
||||
})
|
||||
req = urllib.request.Request(**{
|
||||
"url":"https://musicbrainz.org/ws/2/release?" + querystr,
|
||||
"method":"GET",
|
||||
"headers":{
|
||||
"User-Agent":self.useragent
|
||||
}
|
||||
})
|
||||
data = res.json()
|
||||
entity = data["recordings"][0]["releases"][0]
|
||||
coverartendpoint = "release"
|
||||
while True:
|
||||
mbid = entity["id"]
|
||||
try:
|
||||
response = requests.get(
|
||||
f"https://coverartarchive.org/{coverartendpoint}/{mbid}",
|
||||
params={
|
||||
"fmt":"json"
|
||||
}
|
||||
)
|
||||
data = response.json()
|
||||
thumbnails = data['images'][0]['thumbnails']
|
||||
for size in self.thumbnailsize_order:
|
||||
if thumbnails.get(size) is not None:
|
||||
imgurl = thumbnails.get(size)
|
||||
continue
|
||||
except:
|
||||
imgurl = None
|
||||
if imgurl is None:
|
||||
entity = entity["release-group"]
|
||||
# this will raise an error so we don't stay in the while loop forever
|
||||
coverartendpoint = "release-group"
|
||||
continue
|
||||
|
||||
imgurl = self.postprocess_url(imgurl)
|
||||
return imgurl
|
||||
response = urllib.request.urlopen(req)
|
||||
responsedata = response.read()
|
||||
data = json.loads(responsedata)
|
||||
mbid = data["releases"][0]["id"]
|
||||
response = urllib.request.urlopen(
|
||||
"https://coverartarchive.org/release/{mbid}?fmt=json".format(mbid=mbid)
|
||||
)
|
||||
responsedata = response.read()
|
||||
data = json.loads(responsedata)
|
||||
imgurl = self.metadata_parse_response_track(data)
|
||||
if imgurl is not None: imgurl = self.postprocess_url(imgurl)
|
||||
return imgurl
|
||||
|
||||
except Exception:
|
||||
return None
|
||||
|
34
maloja/thirdparty/spotify.py
vendored
34
maloja/thirdparty/spotify.py
vendored
@ -1,5 +1,6 @@
|
||||
from . import MetadataInterface, utf, b64
|
||||
import requests
|
||||
import urllib.parse, urllib.request
|
||||
import json
|
||||
from threading import Timer
|
||||
from doreah.logging import log
|
||||
|
||||
@ -13,15 +14,12 @@ class Spotify(MetadataInterface):
|
||||
}
|
||||
|
||||
metadata = {
|
||||
"trackurl": "https://api.spotify.com/v1/search?q={title}%20artist:{artist}&type=track&access_token={token}",
|
||||
"albumurl": "https://api.spotify.com/v1/search?q={title}%20artist:{artist}&type=album&access_token={token}",
|
||||
"artisturl": "https://api.spotify.com/v1/search?q={artist}&type=artist&access_token={token}",
|
||||
"trackurl": "https://api.spotify.com/v1/search?q=artist:{artist}%20track:{title}&type=track&access_token={token}",
|
||||
"artisturl": "https://api.spotify.com/v1/search?q=artist:{artist}&type=artist&access_token={token}",
|
||||
"response_type":"json",
|
||||
"response_parse_tree_track": ["tracks","items",0,"album","images",0,"url"], # use album art
|
||||
"response_parse_tree_album": ["albums","items",0,"images",0,"url"],
|
||||
"response_parse_tree_track": ["tracks","items",0,"album","images",0,"url"],
|
||||
"response_parse_tree_artist": ["artists","items",0,"images",0,"url"],
|
||||
"required_settings": ["apiid","secret"],
|
||||
"enabled_entity_types": ["artist","album","track"]
|
||||
}
|
||||
|
||||
def authorize(self):
|
||||
@ -31,28 +29,22 @@ class Spotify(MetadataInterface):
|
||||
try:
|
||||
keys = {
|
||||
"url":"https://accounts.spotify.com/api/token",
|
||||
"method":"POST",
|
||||
"headers":{
|
||||
"Authorization":"Basic " + b64(utf(self.settings["apiid"] + ":" + self.settings["secret"])).decode("utf-8"),
|
||||
"User-Agent": self.useragent
|
||||
"Authorization":"Basic " + b64(utf(self.settings["apiid"] + ":" + self.settings["secret"])).decode("utf-8")
|
||||
},
|
||||
"data":{"grant_type":"client_credentials"}
|
||||
"data":bytes(urllib.parse.urlencode({"grant_type":"client_credentials"}),encoding="utf-8")
|
||||
}
|
||||
res = requests.post(**keys)
|
||||
responsedata = res.json()
|
||||
req = urllib.request.Request(**keys)
|
||||
response = urllib.request.urlopen(req)
|
||||
responsedata = json.loads(response.read())
|
||||
if "error" in responsedata:
|
||||
log("Error authenticating with Spotify: " + responsedata['error_description'])
|
||||
expire = 3600
|
||||
else:
|
||||
expire = responsedata.get("expires_in",3600)
|
||||
self.settings["token"] = responsedata["access_token"]
|
||||
#log("Successfully authenticated with Spotify")
|
||||
t = Timer(expire,self.authorize)
|
||||
t.daemon = True
|
||||
t.start()
|
||||
log("Successfully authenticated with Spotify")
|
||||
Timer(expire,self.authorize).start()
|
||||
except Exception as e:
|
||||
log("Error while authenticating with Spotify: " + repr(e))
|
||||
|
||||
def handle_json_result_error(self,result):
|
||||
result = result.get('tracks') or result.get('albums') or result.get('artists')
|
||||
if not result['items']:
|
||||
return True
|
@ -11,9 +11,6 @@ from .pkg_global.conf import data_dir, dir_settings
|
||||
from .apis import _apikeys
|
||||
|
||||
|
||||
from .database.sqldb import get_maloja_info, set_maloja_info
|
||||
|
||||
|
||||
# Dealing with old style tsv files - these should be phased out everywhere
|
||||
def read_tsvs(path,types):
|
||||
result = []
|
||||
@ -43,7 +40,7 @@ def upgrade_apikeys():
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# v2 to v3 iupgrade
|
||||
|
||||
def upgrade_db(callback_add_scrobbles):
|
||||
|
||||
oldfolder = os.path.join(dir_settings['state'],"scrobbles")
|
||||
@ -91,13 +88,3 @@ def upgrade_db(callback_add_scrobbles):
|
||||
callback_add_scrobbles(scrobblelist)
|
||||
os.rename(os.path.join(oldfolder,sf),os.path.join(newfolder,sf))
|
||||
log("Done!",color='yellow')
|
||||
|
||||
|
||||
# 3.2 album support
|
||||
def parse_old_albums():
|
||||
setting_name = "db_upgrade_albums"
|
||||
if get_maloja_info([setting_name]).get(setting_name):
|
||||
pass
|
||||
else:
|
||||
pass
|
||||
#set_maloja_info({setting_name:True})
|
||||
|
@ -21,8 +21,7 @@
|
||||
['setup','Server Setup'],
|
||||
['settings','Settings'],
|
||||
['apikeys','API Keys'],
|
||||
['manual','Manual Scrobbling'],
|
||||
['albumless','Tracks without Albums']
|
||||
['manual','Manual Scrobbling']
|
||||
|
||||
] %}
|
||||
{# ['import','Scrobble Import'],
|
||||
|
@ -8,7 +8,6 @@
|
||||
|
||||
<title>{% block title %}{% endblock %}</title>
|
||||
<meta name="description" content='Maloja is a self-hosted music scrobble server.' />
|
||||
<link rel="icon" type="image/x-icon" href="/favicon.ico" />
|
||||
|
||||
<meta name="color-scheme" content="dark" />
|
||||
<meta name="darkreader" content="wat" />
|
||||
@ -23,24 +22,13 @@
|
||||
<script src="/neopolitan.js"></script>
|
||||
<script src="/upload.js"></script>
|
||||
<script src="/notifications.js"></script>
|
||||
<script src="/edit.js"></script>
|
||||
<script>
|
||||
const defaultpicks = {
|
||||
topartists: '{{ settings["DEFAULT_RANGE_STARTPAGE"] }}',
|
||||
toptracks: '{{ settings["DEFAULT_RANGE_STARTPAGE"] }}',
|
||||
topalbums: '{{ settings["DEFAULT_RANGE_STARTPAGE"] }}',
|
||||
pulse: '{{ settings["DEFAULT_RANGE_STARTPAGE"] }}',
|
||||
pulseperformancecombined: '{{ settings["DEFAULT_RANGE_STARTPAGE"] }}',
|
||||
featured: 'artist'
|
||||
}
|
||||
</script>
|
||||
|
||||
<link rel="preload" href="/static/ttf/Ubuntu-Regular.ttf" as="font" type="font/woff2" crossorigin />
|
||||
|
||||
{% block scripts %}{% endblock %}
|
||||
</head>
|
||||
|
||||
<body class="{% block custombodyclasses %}{% endblock %}">
|
||||
<body>
|
||||
|
||||
{% block content %}
|
||||
|
||||
@ -67,17 +55,22 @@
|
||||
|
||||
|
||||
|
||||
<div id="footer">
|
||||
<div id="left-side">
|
||||
<a href="/about">About</a>
|
||||
</div>
|
||||
<div id="notch">
|
||||
<a href="/"><img style="display:block;" src="/favicon.png" /></a>
|
||||
</div>
|
||||
<div id="right-side">
|
||||
<span><input id="searchinput" placeholder="Search for an album, artist or track..." oninput="search(this)" onblur="clearresults()" /></span>
|
||||
</div>
|
||||
|
||||
<div class="footer">
|
||||
<div>
|
||||
<!--<span>Get your own charts on
|
||||
<a target="_blank" rel="noopener noreferrer" href="https://github.com/krateng/maloja">GitHub</a>,
|
||||
<a target="_blank" rel="noopener noreferrer" href="https://pypi.org/project/malojaserver/">PyPI</a> or
|
||||
<a target="_blank" rel="noopener noreferrer" href="https://hub.docker.com/r/krateng/maloja">Dockerhub</a>
|
||||
</span>-->
|
||||
<span><a href="/about">About</a></span>
|
||||
</div>
|
||||
<div>
|
||||
<a href="/"><span style="font-weight:bold;">Maloja {% if settings["DEV_MODE"] %}[Developer Mode]{% endif %}</span></a>
|
||||
</div>
|
||||
<div>
|
||||
<span><input id="searchinput" placeholder="Search for an artist or track..." oninput="search(this)" onblur="clearresults()" /></span>
|
||||
</div>
|
||||
|
||||
<div id="resultwrap" class="hide">
|
||||
<div class="searchresults">
|
||||
@ -86,11 +79,7 @@
|
||||
</table>
|
||||
<br/><br/>
|
||||
<span>Tracks</span>
|
||||
<table class="searchresults_tracks searchresults_extrainfo" id="searchresults_tracks">
|
||||
</table>
|
||||
<br/><br/>
|
||||
<span>Albums</span>
|
||||
<table class="searchresults_albums searchresults_extrainfo" id="searchresults_albums">
|
||||
<table class="searchresults_tracks" id="searchresults_tracks">
|
||||
</table>
|
||||
</div>
|
||||
</div>
|
||||
@ -107,10 +96,5 @@
|
||||
</div>
|
||||
|
||||
|
||||
<!-- Load script as late as possible so DOM renders first -->
|
||||
<script src="/lazyload17-8-2.min.js"></script>
|
||||
<script>
|
||||
var lazyLoadInstance = new LazyLoad({});
|
||||
</script>
|
||||
</body>
|
||||
</html>
|
||||
|
@ -1,14 +0,0 @@
|
||||
{% set page ='admin_albumless' %}
|
||||
{% extends "abstracts/admin.jinja" %}
|
||||
{% block title %}Maloja - Albumless Tracks{% endblock %}
|
||||
|
||||
{% block maincontent %}
|
||||
Here you can find tracks that currently have no album.<br/><br/>
|
||||
|
||||
{% with list = dbc.get_tracks_without_album() %}
|
||||
You have {{list|length}} tracks with no album.<br/><br/>
|
||||
|
||||
{% include 'partials/list_tracks.jinja' %}
|
||||
{% endwith %}
|
||||
|
||||
{% endblock %}
|
@ -15,7 +15,7 @@
|
||||
|
||||
|
||||
var xhttp = new XMLHttpRequest();
|
||||
xhttp.open("POST","/apis/mlj_1/newrule?", true);
|
||||
xhttp.open("POST","/api/newrule?", true);
|
||||
xhttp.send(keys);
|
||||
e = arguments[0];
|
||||
line = e.parentNode;
|
||||
@ -25,7 +25,7 @@
|
||||
function fullrebuild() {
|
||||
|
||||
var xhttp = new XMLHttpRequest();
|
||||
xhttp.open("POST","/apis/mlj_1/rebuild", true);
|
||||
xhttp.open("POST","/api/rebuild", true);
|
||||
xhttp.send();
|
||||
window.location = "/wait";
|
||||
|
||||
|
@ -4,14 +4,6 @@
|
||||
|
||||
{% block scripts %}
|
||||
<script src="/manualscrobble.js"></script>
|
||||
<style>
|
||||
.tooltip {
|
||||
cursor: help;
|
||||
}
|
||||
.tooltip:hover {
|
||||
text-decoration: underline dotted;
|
||||
}
|
||||
</style>
|
||||
{% endblock %}
|
||||
|
||||
|
||||
@ -24,7 +16,7 @@
|
||||
<td style="padding-right:7px;">
|
||||
Artists:
|
||||
</td><td id="artists_td">
|
||||
<input placeholder='Separate with Enter' class='simpleinput' id='artists' onKeydown='keyDetect(event)' onblur='addEnteredArtist()' />
|
||||
<input placeholder='Separate with Enter' class='simpleinput' id='artists' onKeydown='keyDetect(event)' />
|
||||
</td>
|
||||
</tr>
|
||||
<tr>
|
||||
@ -34,49 +26,14 @@
|
||||
<input placeholder='Enter to scrobble' class='simpleinput' id='title' onKeydown='scrobbleIfEnter(event)' />
|
||||
</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td style="padding-right:7px;">
|
||||
Album artists (Optional):
|
||||
</td><td id="albumartists_td">
|
||||
<input placeholder='Separate with Enter' class='simpleinput' id='albumartists' onKeydown='keyDetect2(event)' onblur='addEnteredAlbumartist()' />
|
||||
</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td style="padding-right:7px;">
|
||||
Album (Optional):
|
||||
</td><td>
|
||||
<input placeholder='Enter to scrobble' class='simpleinput' id='album' onKeydown='scrobbleIfEnter(event)' />
|
||||
</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>
|
||||
<input type="checkbox" id="use_custom_time" />
|
||||
Custom Time:
|
||||
</td>
|
||||
<td>
|
||||
<input id="scrobble_datetime" type="datetime-local">
|
||||
</td>
|
||||
</tr>
|
||||
|
||||
</table>
|
||||
|
||||
<script>
|
||||
const now = new Date();
|
||||
const localDateTime = new Date(now.getTime() - now.getTimezoneOffset() * 60000).toISOString().slice(0, 16);
|
||||
document.getElementById("scrobble_datetime").value = localDateTime;
|
||||
</script>
|
||||
|
||||
<br/>
|
||||
|
||||
<input type="checkbox" id="use_track_artists_for_album" checked='true' />
|
||||
<span class="tooltip" title="If this is unchecked, specifying no album artists will result in a compilation album ('Various Artists')">Use track artists as album artists fallback</span>
|
||||
|
||||
<br/><br/>
|
||||
|
||||
<button type="button" onclick="scrobbleNew(event)">Scrobble!</button>
|
||||
<button type="button" onclick="repeatLast()">↻</button>
|
||||
|
||||
|
||||
<br/>
|
||||
|
||||
|
||||
|
||||
|
@ -67,9 +67,9 @@
|
||||
<li>manually scrobble from track pages</li>
|
||||
<li>delete scrobbles</li>
|
||||
<li>reparse scrobbles</li>
|
||||
<li>edit tracks, albums and artists</li>
|
||||
<li>merge tracks, albums and artists</li>
|
||||
<li>upload artist, album and track art by dropping a file on the existing image on an artist or track page</li>
|
||||
<li>edit tracks and artists</li>
|
||||
<li>merge tracks and artists</li>
|
||||
<li>upload artist and track art by dropping a file on the existing image on an artist or track page</li>
|
||||
<li>see more detailed error pages</li>
|
||||
</ul>
|
||||
|
||||
|
@ -24,7 +24,7 @@
|
||||
keys = "filename=" + encodeURIComponent(filename);
|
||||
console.log(keys);
|
||||
var xhttp = new XMLHttpRequest();
|
||||
xhttp.open("POST","/apis/mlj_1/importrules", true);
|
||||
xhttp.open("POST","/api/importrules", true);
|
||||
xhttp.send(keys);
|
||||
|
||||
e.innerHTML = e.innerHTML.replace("Add","Remove");
|
||||
@ -36,7 +36,7 @@
|
||||
keys = "remove&filename=" + encodeURIComponent(filename);
|
||||
|
||||
var xhttp = new XMLHttpRequest();
|
||||
xhttp.open("POST","/apis/mlj_1/importrules", true);
|
||||
xhttp.open("POST","/api/importrules", true);
|
||||
xhttp.send(keys);
|
||||
|
||||
e.innerHTML = e.innerHTML.replace("Remove","Add");
|
||||
@ -56,7 +56,7 @@
|
||||
|
||||
If you use a Chromium-based browser and listen to music on Plex, Spotify, Soundcloud, Bandcamp or YouTube Music, download the extension and simply enter the server URL as well as your API key in the relevant fields. They will turn green if the server is accessible.
|
||||
<br/><br/>
|
||||
You can also use any standard-compliant scrobbler. For GNUFM (audioscrobbler) scrobblers, enter <span class="stats"><span name="serverurl">yourserver.tld</span>/apis/audioscrobbler</span> as your Gnukebox server and your API key as the password. For Listenbrainz scrobblers, use <span class="stats"><span name="serverurl">yourserver.tld</span>/apis/listenbrainz</span> as the API URL (depending on the implementation, you might need to add a <span class="stats">/1</span> at the end) and your API key as token.
|
||||
You can also use any standard-compliant scrobbler. For GNUFM (audioscrobbler) scrobblers, enter <span class="stats"><span name="serverurl">yourserver.tld</span>/apis/audioscrobbler</span> as your Gnukebox server and your API key as the password. For Listenbrainz scrobblers, use <span class="stats"><span name="serverurl">yourserver.tld</span>/apis/listenbrainz</span> as the API URL and your API key as token.
|
||||
<br/><br/>
|
||||
If you use another browser or another music player, you could try to code your own extension. The API is super simple! Just send a POST HTTP request to
|
||||
|
||||
@ -90,7 +90,7 @@
|
||||
|
||||
<h2>Set up some rules</h2>
|
||||
|
||||
You can add some rules in your server's "rules" directory - just add your own .tsv file and read the instructions on how to declare a rule.
|
||||
After you've scrobbled for a bit, you might want to check the <a class="textlink" href="/admin_issues">Issues page</a> to see if you need to set up some rules. You can also manually add rules in your server's "rules" directory - just add your own .tsv file and read the instructions on how to declare a rule.
|
||||
<br/><br/>
|
||||
|
||||
You can also set up some predefined rulesets right away!
|
||||
@ -123,7 +123,7 @@
|
||||
<h2>Say thanks</h2>
|
||||
|
||||
Donations are never required, but always appreciated. If you really like Maloja, you can fund my next Buttergipfel via
|
||||
<a class="textlink" target="_blank" rel="noopener noreferrer" href="https://paypal.me/krateng">PayPal</a> or <a class="textlink" href="bitcoin:1krat8JMniJBTiHftMfR1LtF3Y1w5DAxx">Bitcoin</a>.
|
||||
<a class="textlink" target="_blank" rel="noopener noreferrer" href="https://paypal.me/krateng">PayPal</a>, <a class="textlink" href="bitcoin:1krat8JMniJBTiHftMfR1LtF3Y1w5DAxx">Bitcoin</a> or <a class="textlink" target="_blank" rel="noopener noreferrer" href="https://flattr.com/@Krateng">Flattr</a>.
|
||||
|
||||
<br/><br/>
|
||||
|
||||
|
@ -1,144 +0,0 @@
|
||||
{% extends "abstracts/base.jinja" %}
|
||||
{% block title %}Maloja - {{ info.album.albumtitle }}{% endblock %}
|
||||
|
||||
{% import 'snippets/links.jinja' as links %}
|
||||
|
||||
{% block scripts %}
|
||||
<script src="/statselect.js"></script>
|
||||
{% endblock %}
|
||||
|
||||
{% set album = filterkeys.album %}
|
||||
{% set info = dbc.album_info({'album':album}) %}
|
||||
|
||||
{% set initialrange ='month' %}
|
||||
|
||||
|
||||
{% set encodedalbum = mlj_uri.uriencode({'album':album}) %}
|
||||
|
||||
{% block custombodyclasses %}
|
||||
{% if info.certification %}certified certified_{{ info.certification }}{% endif %}
|
||||
{% endblock %}
|
||||
|
||||
|
||||
{% block icon_bar %}
|
||||
{% if adminmode %}
|
||||
{% include 'icons/edit.jinja' %}
|
||||
|
||||
<div class="iconsubset mergeicons" data-entity_type="album" data-entity_id="{{ info.id }}" data-entity_name="{{ info.album.albumtitle }}">
|
||||
{% include 'icons/merge.jinja' %}
|
||||
{% include 'icons/merge_mark.jinja' %}
|
||||
{% include 'icons/merge_unmark.jinja' %}
|
||||
{% include 'icons/merge_cancel.jinja' %}
|
||||
</div>
|
||||
|
||||
<div class="iconsubset associateicons" data-entity_type="album" data-entity_id="{{ info.id }}" data-entity_name="{{ info.album.albumtitle }}">
|
||||
{% include 'icons/add_album.jinja' %}
|
||||
<!-- no remove album since that is not a specified association - every track only has one album, so the removal should
|
||||
be handled on the track page (or for now, not at all) -->
|
||||
{% include 'icons/association_mark.jinja' %}
|
||||
{% include 'icons/association_unmark.jinja' %}
|
||||
{% include 'icons/association_cancel.jinja' %}
|
||||
</div>
|
||||
|
||||
{% endif %}
|
||||
{% endblock %}
|
||||
|
||||
{% block content %}
|
||||
|
||||
<script>
|
||||
const entity_id = {{ info.id }};
|
||||
const entity_type = 'album';
|
||||
const entity_name = {{ album.albumtitle | tojson }};
|
||||
</script>
|
||||
|
||||
|
||||
{% import 'partials/awards_album.jinja' as awards %}
|
||||
|
||||
|
||||
{% include 'partials/info_album.jinja' %}
|
||||
|
||||
<h2><a href='{{ mlj_uri.create_uri("/charts_tracks",filterkeys) }}'>Top Tracks</a></h2>
|
||||
|
||||
|
||||
{% with amountkeys={"perpage":16,"page":0} %}
|
||||
{% include 'partials/charts_tracks.jinja' %}
|
||||
{% endwith %}
|
||||
|
||||
<br/>
|
||||
|
||||
|
||||
<table class="twopart">
|
||||
|
||||
<tr>
|
||||
<td>
|
||||
<h2 class="headerwithextra"><a href='{{ mlj_uri.create_uri("/pulse",filterkeys) }}'>Pulse</a></h2>
|
||||
<br/>
|
||||
{% for r in xranges %}
|
||||
<span
|
||||
onclick="showStatsManual('pulseperformancecombined','{{ r.identifier }}')"
|
||||
class="stat_selector_pulseperformancecombined selector_pulseperformancecombined_{{ r.identifier }}"
|
||||
style="{{ 'opacity:0.5;' if initialrange==r.identifier else '' }}">
|
||||
{{ r.localisation }}
|
||||
</span>
|
||||
{% if not loop.last %}|{% endif %}
|
||||
{% endfor %}
|
||||
|
||||
<br/><br/>
|
||||
|
||||
{% for r in xranges %}
|
||||
|
||||
<span
|
||||
class="stat_module_pulseperformancecombined pulseperformancecombined_{{ r.identifier }}"
|
||||
style="{{ 'display:none;' if initialrange!=r.identifier else '' }}"
|
||||
>
|
||||
|
||||
{% with limitkeys={"since":r.firstrange},delimitkeys={'step':r.identifier,'trail':1} %}
|
||||
{% include 'partials/pulse.jinja' %}
|
||||
{% endwith %}
|
||||
</span>
|
||||
|
||||
{% endfor %}
|
||||
</td>
|
||||
<td>
|
||||
<!-- We use the same classes / function calls here because we want it to switch together with pulse -->
|
||||
<h2 class="headerwithextra"><a href='{{ mlj_uri.create_uri("/performance",filterkeys) }}'>Performance</a></h2>
|
||||
<br/>
|
||||
{% for r in xranges %}
|
||||
<span
|
||||
onclick="showStatsManual('pulseperformancecombined','{{ r.identifier }}')"
|
||||
class="stat_selector_pulseperformancecombined selector_pulseperformancecombined_{{ r.identifier }}"
|
||||
style="{{ 'opacity:0.5;' if initialrange==r.identifier else '' }}">
|
||||
{{ r.localisation }}
|
||||
</span>
|
||||
{% if not loop.last %}|{% endif %}
|
||||
{% endfor %}
|
||||
|
||||
<br/><br/>
|
||||
|
||||
{% for r in xranges %}
|
||||
|
||||
<span
|
||||
class="stat_module_pulseperformancecombined pulseperformancecombined_{{ r.identifier }}"
|
||||
style="{{ 'display:none;' if initialrange!=r.identifier else '' }}"
|
||||
>
|
||||
|
||||
{% with limitkeys={"since":r.firstrange},delimitkeys={'step':r.identifier,'trail':1} %}
|
||||
{% include 'partials/performance.jinja' %}
|
||||
{% endwith %}
|
||||
</span>
|
||||
|
||||
{% endfor %}
|
||||
|
||||
</td>
|
||||
</tr>
|
||||
</table>
|
||||
|
||||
|
||||
<h2><a href='{{ mlj_uri.create_uri("/scrobbles",filterkeys) }}'>Last Scrobbles</a></h2>
|
||||
|
||||
{% with amountkeys = {"perpage":16,"page":0} %}
|
||||
{% include 'partials/scrobbles.jinja' %}
|
||||
{% endwith %}
|
||||
|
||||
|
||||
{% endblock %}
|
@ -5,11 +5,12 @@
|
||||
{% import 'partials/awards_artist.jinja' as awards %}
|
||||
|
||||
{% block scripts %}
|
||||
<script src="/statselect.js"></script>
|
||||
<script src="/rangeselect.js"></script>
|
||||
<script src="/edit.js"></script>
|
||||
{% endblock %}
|
||||
|
||||
{% set artist = filterkeys.artist %}
|
||||
{% set info = dbc.artist_info({'artist':artist}) %}
|
||||
{% set info = db.artist_info(artist=artist) %}
|
||||
|
||||
{% set credited = info.get('replace') %}
|
||||
{% set included = info.get('associated') %}
|
||||
@ -26,27 +27,13 @@
|
||||
|
||||
{% set encodedartist = mlj_uri.uriencode({'artist':artist}) %}
|
||||
|
||||
{% block custombodyclasses %}
|
||||
{% if info.certification %}certified certified_{{ info.certification }}{% endif %}
|
||||
{% endblock %}
|
||||
|
||||
{% block icon_bar %}
|
||||
{% if adminmode %}
|
||||
{% include 'icons/edit.jinja' %}
|
||||
|
||||
<div class="iconsubset mergeicons" data-entity_type="artist" data-entity_id="{{ info.id }}" data-entity_name="{{ info.artist }}">
|
||||
{% include 'icons/merge.jinja' %}
|
||||
{% include 'icons/merge_mark.jinja' %}
|
||||
{% include 'icons/merge_unmark.jinja' %}
|
||||
{% include 'icons/merge_cancel.jinja' %}
|
||||
</div>
|
||||
|
||||
<div class="iconsubset associateicons" data-entity_type="artist" data-entity_id="{{ info.id }}" data-entity_name="{{ info.artist }}">
|
||||
{% include 'icons/add_artist.jinja' %}
|
||||
{% include 'icons/remove_artist.jinja' %}
|
||||
{% include 'icons/association_cancel.jinja' %}
|
||||
</div>
|
||||
|
||||
<script>showValidMergeIcons();</script>
|
||||
{% endif %}
|
||||
{% endblock %}
|
||||
|
||||
@ -60,36 +47,56 @@
|
||||
|
||||
|
||||
|
||||
{% include 'partials/info_artist.jinja' %}
|
||||
<table class="top_info">
|
||||
<tr>
|
||||
<td class="image">
|
||||
{% if adminmode %}
|
||||
<div
|
||||
class="changeable-image" data-uploader="b64=>upload('{{ encodedartist }}',b64)"
|
||||
style="background-image:url('{{ images.get_artist_image(artist) }}');"
|
||||
title="Drag & Drop to upload new image"
|
||||
></div>
|
||||
{% else %}
|
||||
<div style="background-image:url('{{ images.get_artist_image(artist) }}');">
|
||||
</div>
|
||||
{% endif %}
|
||||
</td>
|
||||
<td class="text">
|
||||
<h1 id="main_entity_name" class="headerwithextra">{{ info.artist }}</h1>
|
||||
{% if competes %}<span class="rank"><a href="/charts_artists?max=100">#{{ info.position }}</a></span>{% endif %}
|
||||
<br/>
|
||||
{% if competes and included %}
|
||||
<span>associated: {{ links.links(included) }}</span>
|
||||
{% elif not competes %}
|
||||
<span>Competing under {{ links.link(credited) }} (#{{ info.position }})</span>
|
||||
{% endif %}
|
||||
|
||||
<p class="stats">
|
||||
<a href="{{ mlj_uri.create_uri("/scrobbles",filterkeys) }}">{{ info['scrobbles'] }} Scrobbles</a>
|
||||
</p>
|
||||
|
||||
|
||||
{% set albums_info = dbc.get_albums_artist_appears_on(filterkeys,limitkeys) %}
|
||||
{% set ownalbums = albums_info.own_albums %}
|
||||
{% set otheralbums = albums_info.appears_on %}
|
||||
|
||||
{% if ownalbums or otheralbums %}
|
||||
|
||||
{% if settings['ALBUM_SHOWCASE'] %}
|
||||
<h2><a href='{{ mlj_uri.create_uri("/charts_albums",filterkeys) }}'>Albums</a></h2>
|
||||
{% include 'partials/album_showcase.jinja' %}
|
||||
{% else %}
|
||||
<h2><a href='{{ mlj_uri.create_uri("/charts_albums",filterkeys) }}'>Top Albums</a></h2>
|
||||
{% if competes %}
|
||||
{{ awards.medals(info) }}
|
||||
{{ awards.topweeks(info) }}
|
||||
{% endif %}
|
||||
{{ awards.certs(artist) }}
|
||||
|
||||
{% with amountkeys={"perpage":16,"page":0} %}
|
||||
{% include 'partials/charts_albums.jinja' %}
|
||||
{% endwith %}
|
||||
{% endif %}
|
||||
|
||||
{% endif %}
|
||||
</td>
|
||||
</tr>
|
||||
</table>
|
||||
|
||||
|
||||
{% if info['scrobbles']>0 %}
|
||||
<h2><a href='{{ mlj_uri.create_uri("/charts_tracks",filterkeys) }}'>Top Tracks</a></h2>
|
||||
|
||||
{% with amountkeys={"perpage":16,"page":0} %}
|
||||
|
||||
{% with amountkeys={"perpage":15,"page":0} %}
|
||||
{% include 'partials/charts_tracks.jinja' %}
|
||||
{% endwith %}
|
||||
|
||||
|
||||
<br/>
|
||||
|
||||
<table class="twopart">
|
||||
@ -100,8 +107,8 @@
|
||||
<br/>
|
||||
{% for r in xranges %}
|
||||
<span
|
||||
onclick="showStatsManual('pulseperformancecombined','{{ r.identifier }}')"
|
||||
class="stat_selector_pulseperformancecombined selector_pulseperformancecombined_{{ r.identifier }}"
|
||||
onclick="showRangeManual('pulse','{{ r.identifier }}')"
|
||||
class="stat_selector_pulse selector_pulse_{{ r.identifier }}"
|
||||
style="{{ 'opacity:0.5;' if initialrange==r.identifier else '' }}">
|
||||
{{ r.localisation }}
|
||||
</span>
|
||||
@ -113,7 +120,7 @@
|
||||
{% for r in xranges %}
|
||||
|
||||
<span
|
||||
class="stat_module_pulseperformancecombined pulseperformancecombined_{{ r.identifier }}"
|
||||
class="stat_module_pulse pulse_{{ r.identifier }}"
|
||||
style="{{ 'display:none;' if initialrange!=r.identifier else '' }}"
|
||||
>
|
||||
|
||||
@ -133,8 +140,8 @@
|
||||
|
||||
{% for r in xranges %}
|
||||
<span
|
||||
onclick="showStatsManual('pulseperformancecombined','{{ r.identifier }}')"
|
||||
class="stat_selector_pulseperformancecombined selector_pulseperformancecombined_{{ r.identifier }}"
|
||||
onclick="showRangeManual('pulse','{{ r.identifier }}')"
|
||||
class="stat_selector_pulse selector_pulse_{{ r.identifier }}"
|
||||
style="{{ 'opacity:0.5;' if initialrange==r.identifier else '' }}">
|
||||
{{ r.localisation }}
|
||||
</span>
|
||||
@ -146,7 +153,7 @@
|
||||
{% for r in xranges %}
|
||||
|
||||
<span
|
||||
class="stat_module_pulseperformancecombined pulseperformancecombined_{{ r.identifier }}"
|
||||
class="stat_module_pulse pulse_{{ r.identifier }}"
|
||||
style="{{ 'display:none;' if initialrange!=r.identifier else '' }}"
|
||||
>
|
||||
|
||||
@ -164,9 +171,8 @@
|
||||
|
||||
<h2><a href='{{ mlj_uri.create_uri("/scrobbles",filterkeys) }}'>Last Scrobbles</a></h2>
|
||||
|
||||
{% with amountkeys = {"perpage":16,"page":0} %}
|
||||
{% with amountkeys = {"perpage":15,"page":0} %}
|
||||
{% include 'partials/scrobbles.jinja' %}
|
||||
{% endwith %}
|
||||
{% endif %}
|
||||
|
||||
{% endblock %}
|
||||
|
@ -1,52 +0,0 @@
|
||||
{% extends "abstracts/base.jinja" %}
|
||||
{% block title %}Maloja - Album Charts{% endblock %}
|
||||
|
||||
{% import 'snippets/links.jinja' as links %}
|
||||
{% import 'snippets/filterdescription.jinja' as filterdesc %}
|
||||
|
||||
{% block scripts %}
|
||||
<script src="/datechange.js" async></script>
|
||||
{% endblock %}
|
||||
|
||||
{% set charts = dbc.get_charts_albums(filterkeys,limitkeys,{'only_own_albums':False}) %}
|
||||
{% set pages = math.ceil(charts.__len__() / amountkeys.perpage) %}
|
||||
{% if charts[0] is defined %}
|
||||
{% set topalbum = charts[0].album %}
|
||||
{% set img = images.get_album_image(topalbum) %}
|
||||
{% else %}
|
||||
{% set img = "/favicon.png" %}
|
||||
{% endif %}
|
||||
|
||||
|
||||
{% block content %}
|
||||
|
||||
<table class="top_info">
|
||||
<tr>
|
||||
<td class="image">
|
||||
<div style="background-image:url('{{ img }}')"></div>
|
||||
</td>
|
||||
<td class="text">
|
||||
<h1>Album Charts</h1><a href="/top_albums"><span>View #1 Albums</span></a><br/>
|
||||
{{ filterdesc.desc(filterkeys,limitkeys) }}
|
||||
<br/><br/>
|
||||
{% with delimitkeys = {} %}
|
||||
{% include 'snippets/timeselection.jinja' %}
|
||||
{% endwith %}
|
||||
|
||||
</td>
|
||||
</tr>
|
||||
</table>
|
||||
|
||||
{% if settings['CHARTS_DISPLAY_TILES'] %}
|
||||
{% include 'partials/charts_albums_tiles.jinja' %}
|
||||
<br/><br/>
|
||||
{% endif %}
|
||||
|
||||
{% with compare=true %}
|
||||
{% include 'partials/charts_albums.jinja' %}
|
||||
{% endwith %}
|
||||
|
||||
{% import 'snippets/pagination.jinja' as pagination %}
|
||||
{{ pagination.pagination(filterkeys,limitkeys,delimitkeys,amountkeys,pages) }}
|
||||
|
||||
{% endblock %}
|
@ -1,16 +1,11 @@
|
||||
{% extends "abstracts/base.jinja" %}
|
||||
{% block title %}Maloja - Artist Charts{% endblock %}
|
||||
|
||||
{% import 'snippets/filterdescription.jinja' as filterdesc %}
|
||||
|
||||
{% block scripts %}
|
||||
<script src="/datechange.js" async></script>
|
||||
{% endblock %}
|
||||
|
||||
{% set charts = dbc.get_charts_artists(filterkeys,limitkeys,specialkeys) %}
|
||||
|
||||
|
||||
|
||||
{% set charts = dbc.get_charts_artists(filterkeys,limitkeys) %}
|
||||
{% set pages = math.ceil(charts.__len__() / amountkeys.perpage) %}
|
||||
{% if charts[0] is defined %}
|
||||
{% set topartist = charts[0].artist %}
|
||||
@ -30,9 +25,9 @@
|
||||
</td>
|
||||
<td class="text">
|
||||
<h1>Artist Charts</h1><a href="/top_artists"><span>View #1 Artists</span></a><br/>
|
||||
{{ filterdesc.desc(filterkeys,limitkeys) }}
|
||||
<span>{{ limitkeys.timerange.desc(prefix=True) }}</span>
|
||||
<br/><br/>
|
||||
{% with delimitkeys = {}, artistchart=True %}
|
||||
{% with delimitkeys = {} %}
|
||||
{% include 'snippets/timeselection.jinja' %}
|
||||
{% endwith %}
|
||||
|
||||
|
@ -2,7 +2,6 @@
|
||||
{% block title %}Maloja - Track Charts{% endblock %}
|
||||
|
||||
{% import 'snippets/links.jinja' as links %}
|
||||
{% import 'snippets/filterdescription.jinja' as filterdesc %}
|
||||
|
||||
{% block scripts %}
|
||||
<script src="/datechange.js" async></script>
|
||||
@ -27,7 +26,8 @@
|
||||
</td>
|
||||
<td class="text">
|
||||
<h1>Track Charts</h1><a href="/top_tracks"><span>View #1 Tracks</span></a><br/>
|
||||
{{ filterdesc.desc(filterkeys,limitkeys) }}
|
||||
{% if filterkeys.get('artist') is not none %}by {{ links.link(filterkeys.get('artist')) }}{% endif %}
|
||||
<span>{{ limitkeys.timerange.desc(prefix=True) }}</span>
|
||||
<br/><br/>
|
||||
{% with delimitkeys = {} %}
|
||||
{% include 'snippets/timeselection.jinja' %}
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user