mirror of
https://github.com/krateng/maloja.git
synced 2025-04-20 18:47:37 +03:00
Compare commits
116 Commits
Author | SHA1 | Date | |
---|---|---|---|
|
3ba27ffc37 | ||
|
300e2c1ff7 | ||
|
5c343053d9 | ||
|
7dab61e420 | ||
|
5296960d68 | ||
|
e060241acb | ||
|
c571ffbf07 | ||
|
767a6bca26 | ||
|
ffed0c29b0 | ||
|
ca65813619 | ||
|
5926dc3307 | ||
|
811bc16a3f | ||
|
a4ec29dd4c | ||
|
a8293063a5 | ||
|
126d155208 | ||
|
7f774f03c4 | ||
|
cc64c894f0 | ||
|
76c013e130 | ||
|
9a6c51a36d | ||
|
e3a578da2f | ||
|
a851e36485 | ||
|
0e928b4007 | ||
|
63386b5ede | ||
|
9d21800eb9 | ||
|
5a95d4e056 | ||
|
968bea14d9 | ||
|
5e62ccc254 | ||
|
273713cdc4 | ||
|
f8b10ab68c | ||
|
922eae7b68 | ||
|
cf0a856040 | ||
|
26f26f36cb | ||
|
1462883ab5 | ||
|
a0b83be095 | ||
|
2750241e61 | ||
|
a7dcd3df8a | ||
|
c6cf28896c | ||
|
9efdf90312 | ||
|
b35bfdc2e4 | ||
|
88bf6d2337 | ||
|
2c2d13e39c | ||
|
152e3948a1 | ||
|
33ed2abdea | ||
|
915808a020 | ||
|
163746c06e | ||
|
738f42d49f | ||
|
f4a5c2fb3d | ||
|
a99831d453 | ||
|
efd7838b02 | ||
|
a816147e2e | ||
|
ac5c58c919 | ||
|
c648b25d28 | ||
|
ed34992d8b | ||
|
386f3c4a41 | ||
|
fbe10930a2 | ||
|
a95b2420b2 | ||
|
16b977d874 | ||
|
5ec8035cb5 | ||
|
259e3b06bb | ||
|
c75bd4fcc3 | ||
|
a4ae92e642 | ||
|
a7dcf6d41d | ||
|
6b2f1892f8 | ||
|
f1c86973c9 | ||
|
b725c98fa5 | ||
|
1f1a65840c | ||
|
436b40821a | ||
|
d160078def | ||
|
ea6e27de5c | ||
|
472281230c | ||
|
966739e677 | ||
|
4c487232c0 | ||
|
20d8a109d6 | ||
|
1ce3119dda | ||
|
8e06c34323 | ||
|
fd4c99f888 | ||
|
f7a9df7446 | ||
|
7d6753042f | ||
|
7ec5e88bc4 | ||
|
3ff92759fb | ||
|
7ccde9cf91 | ||
|
bda134a7f7 | ||
|
febaff9722 | ||
|
048dac3186 | ||
|
4a02ee2ba5 | ||
|
0d4e8dbc58 | ||
|
12064f6d99 | ||
|
be6b796b20 | ||
|
7dbd704c5d | ||
|
96558176e9 | ||
|
04a8b0a231 | ||
|
44f58e31bc | ||
|
215ff622bb | ||
|
dd6bec478e | ||
|
b7afe3b017 | ||
|
e328646104 | ||
|
f244385e40 | ||
|
63b8a49993 | ||
|
6bb81ce589 | ||
|
5495d6e38d | ||
|
d0a20fecb2 | ||
|
76691a5b0f | ||
|
139de02d9a | ||
|
7910ee3b6b | ||
|
6a3fd46219 | ||
|
2a32ab38e1 | ||
|
d5f73dd497 | ||
|
02a848c747 | ||
|
504450fd84 | ||
|
dc05184552 | ||
|
4389605907 | ||
|
de46335eba | ||
|
6c2c89c205 | ||
|
54ba1bd5c6 | ||
|
1a61f5d58f | ||
|
f375ba7a31 |
1
.github/FUNDING.yml
vendored
1
.github/FUNDING.yml
vendored
@ -1 +1,2 @@
|
||||
custom: ["https://paypal.me/krateng"]
|
||||
patreon: krateng
|
||||
|
3
.github/workflows/docker.yml
vendored
3
.github/workflows/docker.yml
vendored
@ -4,6 +4,7 @@ on:
|
||||
push:
|
||||
tags:
|
||||
- 'v*'
|
||||
- 'runaction-docker'
|
||||
|
||||
jobs:
|
||||
push_to_registry:
|
||||
@ -54,7 +55,7 @@ jobs:
|
||||
push: ${{ github.event_name != 'pull_request' }}
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
platforms: linux/amd64,linux/arm64 #,linux/arm/v7 #build this ourselves
|
||||
platforms: linux/amd64,linux/arm64 #,linux/arm/v7 #build this ourselves GH: #229
|
||||
cache-from: type=local,src=/tmp/.buildx-cache
|
||||
cache-to: type=local,dest=/tmp/.buildx-cache-new,mode=max
|
||||
|
||||
|
12
.github/workflows/pypi.yml
vendored
12
.github/workflows/pypi.yml
vendored
@ -4,17 +4,20 @@ on:
|
||||
push:
|
||||
tags:
|
||||
- 'v*'
|
||||
- 'runaction-pypi'
|
||||
|
||||
jobs:
|
||||
publish_to_pypi:
|
||||
name: Push Package to PyPI
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
id-token: write
|
||||
steps:
|
||||
- name: Check out the repo
|
||||
uses: actions/checkout@ec3a7ce113134d7a93b817d10a8272cb61118579
|
||||
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@7f80679172b057fc5e90d70d197929d454754a5a
|
||||
uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236
|
||||
with:
|
||||
python-version: '3.x'
|
||||
|
||||
@ -25,7 +28,4 @@ jobs:
|
||||
run: python -m build
|
||||
|
||||
- name: Publish to PyPI
|
||||
uses: pypa/gh-action-pypi-publish@717ba43cfbb0387f6ce311b169a825772f54d295
|
||||
with:
|
||||
user: __token__
|
||||
password: ${{ secrets.PYPI_API_TOKEN }}
|
||||
uses: pypa/gh-action-pypi-publish@67339c736fd9354cd4f8cb0b744f2b82a74b5c70
|
||||
|
1
.gitignore
vendored
1
.gitignore
vendored
@ -3,7 +3,6 @@
|
||||
|
||||
# environments / builds
|
||||
.venv/*
|
||||
testdata*
|
||||
/dist
|
||||
/build
|
||||
/*.egg-info
|
||||
|
36
APKBUILD
36
APKBUILD
@ -1,36 +0,0 @@
|
||||
# Contributor: Johannes Krattenmacher <maloja@dev.krateng.ch>
|
||||
# Maintainer: Johannes Krattenmacher <maloja@dev.krateng.ch>
|
||||
pkgname=maloja
|
||||
pkgver=3.0.0-dev
|
||||
pkgrel=0
|
||||
pkgdesc="Self-hosted music scrobble database"
|
||||
url="https://github.com/krateng/maloja"
|
||||
arch="noarch"
|
||||
license="GPL-3.0"
|
||||
depends="python3 tzdata"
|
||||
pkgusers=$pkgname
|
||||
pkggroups=$pkgname
|
||||
depends_dev="gcc g++ python3-dev libxml2-dev libxslt-dev libffi-dev libc-dev py3-pip linux-headers"
|
||||
makedepends="$depends_dev"
|
||||
source="
|
||||
$pkgname-$pkgver.tar.gz::https://github.com/krateng/maloja/archive/refs/tags/v$pkgver.tar.gz
|
||||
"
|
||||
builddir="$srcdir"/$pkgname-$pkgver
|
||||
|
||||
|
||||
|
||||
build() {
|
||||
cd $builddir
|
||||
python3 -m build .
|
||||
pip3 install dist/*.tar.gz
|
||||
}
|
||||
|
||||
package() {
|
||||
mkdir -p /etc/$pkgname || return 1
|
||||
mkdir -p /var/lib/$pkgname || return 1
|
||||
mkdir -p /var/cache/$pkgname || return 1
|
||||
mkdir -p /var/logs/$pkgname || return 1
|
||||
}
|
||||
|
||||
# TODO
|
||||
sha512sums="a674eaaaa248fc2b315514d79f9a7a0bac6aa1582fe29554d9176e8b551e8aa3aa75abeebdd7713e9e98cc987e7bd57dc7a5e9a2fb85af98b9c18cb54de47bf7 $pkgname-${pkgver}.tar.gz"
|
@ -1,4 +1,4 @@
|
||||
FROM lsiobase/alpine:3.17 as base
|
||||
FROM lsiobase/alpine:3.21 AS base
|
||||
|
||||
WORKDIR /usr/src/app
|
||||
|
||||
@ -29,16 +29,19 @@ RUN \
|
||||
apk add --no-cache \
|
||||
python3 \
|
||||
py3-lxml \
|
||||
libmagic \
|
||||
tzdata && \
|
||||
echo "" && \
|
||||
echo "**** install pip dependencies ****" && \
|
||||
python3 -m venv /venv && \
|
||||
. /venv/bin/activate && \
|
||||
python3 -m ensurepip && \
|
||||
pip3 install -U --no-cache-dir \
|
||||
pip install -U --no-cache-dir \
|
||||
pip \
|
||||
wheel && \
|
||||
echo "" && \
|
||||
echo "**** install maloja requirements ****" && \
|
||||
pip3 install --no-cache-dir -r requirements.txt && \
|
||||
pip install --no-cache-dir -r requirements.txt && \
|
||||
echo "" && \
|
||||
echo "**** cleanup ****" && \
|
||||
apk del --purge \
|
||||
@ -56,6 +59,8 @@ RUN \
|
||||
echo "**** install maloja ****" && \
|
||||
apk add --no-cache --virtual=install-deps \
|
||||
py3-pip && \
|
||||
python3 -m venv /venv && \
|
||||
. /venv/bin/activate && \
|
||||
pip3 install /usr/src/app && \
|
||||
apk del --purge \
|
||||
install-deps && \
|
||||
@ -70,6 +75,7 @@ COPY container/root/ /
|
||||
ENV \
|
||||
# Docker-specific configuration
|
||||
MALOJA_SKIP_SETUP=yes \
|
||||
MALOJA_CONTAINER=yes \
|
||||
PYTHONUNBUFFERED=1 \
|
||||
# Prevents breaking change for previous container that ran maloja as root
|
||||
# On linux hosts (non-podman rootless) these variables should be set to the
|
||||
|
@ -9,49 +9,14 @@ Clone the repository and enter it.
|
||||
|
||||
## Environment
|
||||
|
||||
To avoid cluttering your system, consider using a [virtual environment](https://docs.python.org/3/tutorial/venv.html).
|
||||
|
||||
Your system needs several packages installed. For supported distributions, this can be done with e.g.
|
||||
|
||||
```console
|
||||
sh ./install/install_dependencies_alpine.sh
|
||||
```
|
||||
|
||||
For other distros, try to find the equivalents of the packages listed or simply check your error output.
|
||||
|
||||
Then install all Python dependencies with
|
||||
|
||||
```console
|
||||
pip install -r requirements.txt
|
||||
```
|
||||
To avoid cluttering your system, consider using a [virtual environment](https://docs.python.org/3/tutorial/venv.html), or better yet run the included `docker-compose.yml` file.
|
||||
Your IDE should let you run the file directly, otherwise you can execute `docker compose -f dev/docker-compose.yml -p maloja up --force-recreate --build`.
|
||||
|
||||
|
||||
## Running the server
|
||||
|
||||
For development, you might not want to install maloja files all over your filesystem. Use the environment variable `MALOJA_DATA_DIRECTORY` to force all user files into one central directory - this way, you can also quickly change between multiple configurations.
|
||||
Use the environment variable `MALOJA_DATA_DIRECTORY` to force all user files into one central directory - this way, you can also quickly change between multiple configurations.
|
||||
|
||||
You can quickly run the server with all your local changes with
|
||||
|
||||
```console
|
||||
python3 -m maloja run
|
||||
```
|
||||
|
||||
You can also build the package with
|
||||
|
||||
```console
|
||||
pip install .
|
||||
```
|
||||
|
||||
|
||||
## Docker
|
||||
|
||||
You can also always build and run the server with
|
||||
|
||||
```console
|
||||
sh ./dev/run_docker.sh
|
||||
```
|
||||
|
||||
This will use the directory `testdata`.
|
||||
|
||||
## Further help
|
||||
|
||||
|
62
README.md
62
README.md
@ -40,15 +40,8 @@ You can check [my own Maloja page](https://maloja.krateng.ch) as an example inst
|
||||
|
||||
## How to install
|
||||
|
||||
### Requirements
|
||||
|
||||
Maloja should run on any x86 or ARM machine that runs Python.
|
||||
|
||||
It is highly recommended to use **Docker** or **Podman**.
|
||||
|
||||
Your CPU should have a single core passmark score of at the very least 1500. 500 MB RAM should give you a decent experience, but performance will benefit greatly from up to 2 GB.
|
||||
|
||||
### Docker / Podman
|
||||
To avoid issues with version / dependency mismatches, Maloja should only be used in **Docker** or **Podman**, not on bare metal.
|
||||
I cannot offer any help for bare metal installations (but using venv should help).
|
||||
|
||||
Pull the [latest image](https://hub.docker.com/r/krateng/maloja) or check out the repository and use the included Containerfile.
|
||||
|
||||
@ -67,11 +60,7 @@ An example of a minimum run configuration to access maloja via `localhost:42010`
|
||||
docker run -p 42010:42010 -v $PWD/malojadata:/mljdata -e MALOJA_DATA_DIRECTORY=/mljdata krateng/maloja
|
||||
```
|
||||
|
||||
#### Linux Host
|
||||
|
||||
**NOTE:** If you are using [rootless containers with Podman](https://developers.redhat.com/blog/2020/09/25/rootless-containers-with-podman-the-basics#why_podman_) this DOES NOT apply to you.
|
||||
|
||||
If you are running Docker on a **Linux Host** you should specify `user:group` ids of the user who owns the folder on the host machine bound to `MALOJA_DATA_DIRECTORY` in order to avoid [docker file permission problems.](https://ikriv.com/blog/?p=4698) These can be specified using the [environmental variables **PUID** and **PGID**.](https://docs.linuxserver.io/general/understanding-puid-and-pgid)
|
||||
If you are using [rootless containers with Podman](https://developers.redhat.com/blog/2020/09/25/rootless-containers-with-podman-the-basics#why_podman_) the following DOES NOT apply to you, but if you are running **Docker** on a **Linux Host** you should specify `user:group` ids of the user who owns the folder on the host machine bound to `MALOJA_DATA_DIRECTORY` in order to avoid [docker file permission problems.](https://ikriv.com/blog/?p=4698) These can be specified using the [environmental variables **PUID** and **PGID**.](https://docs.linuxserver.io/general/understanding-puid-and-pgid)
|
||||
|
||||
To get the UID and GID for the current user run these commands from a terminal:
|
||||
|
||||
@ -84,33 +73,6 @@ The modified run command with these variables would look like:
|
||||
docker run -e PUID=1000 -e PGID=1001 -p 42010:42010 -v $PWD/malojadata:/mljdata -e MALOJA_DATA_DIRECTORY=/mljdata krateng/maloja
|
||||
```
|
||||
|
||||
### PyPI
|
||||
|
||||
You can install Maloja with
|
||||
|
||||
```console
|
||||
pip install malojaserver
|
||||
```
|
||||
|
||||
To make sure all dependencies are installed, you can also use one of the included scripts in the `install` folder.
|
||||
|
||||
### From Source
|
||||
|
||||
Clone this repository and enter the directory with
|
||||
|
||||
```console
|
||||
git clone https://github.com/krateng/maloja
|
||||
cd maloja
|
||||
```
|
||||
|
||||
Then install all the requirements and build the package, e.g.:
|
||||
|
||||
```console
|
||||
sh ./install/install_dependencies_alpine.sh
|
||||
pip install -r requirements.txt
|
||||
pip install .
|
||||
```
|
||||
|
||||
|
||||
### Extras
|
||||
|
||||
@ -123,30 +85,18 @@ Then install all the requirements and build the package, e.g.:
|
||||
|
||||
### Basic control
|
||||
|
||||
When not running in a container, you can run the application with `maloja run`. You can also run it in the background with
|
||||
`maloja start` and `maloja stop`, but this might not be supported in the future.
|
||||
When not running in a container, you can run the application with `maloja run`.
|
||||
|
||||
|
||||
### Data
|
||||
|
||||
If you would like to import your previous scrobbles, use the command `maloja import *filename*`. This works on:
|
||||
If you would like to import your previous scrobbles, copy them into the import folder in your data directory. This works on:
|
||||
|
||||
* a Last.fm export generated by [benfoxall's website](https://benjaminbenben.com/lastfm-to-csv/) ([GitHub page](https://github.com/benfoxall/lastfm-to-csv))
|
||||
* a Last.fm export generated by [ghan64's website](https://lastfm.ghan.nl/export/)
|
||||
* an official [Spotify data export file](https://www.spotify.com/us/account/privacy/)
|
||||
* an official [ListenBrainz export file](https://listenbrainz.org/profile/export/)
|
||||
* the export of another Maloja instance
|
||||
|
||||
⚠️ Never import your data while maloja is running. When you need to do import inside docker container start it in shell mode instead and perform import before starting the container as mentioned above.
|
||||
|
||||
```console
|
||||
docker run -it --entrypoint sh -v $PWD/malojadata:/mljdata -e MALOJA_DATA_DIRECTORY=/mljdata krateng/maloja
|
||||
cd /mljdata
|
||||
maloja import my_last_fm_export.csv
|
||||
```
|
||||
|
||||
|
||||
To backup your data, run `maloja backup`, optional with `--include_images`.
|
||||
|
||||
### Customization
|
||||
|
||||
* Have a look at the [available settings](settings.md) and specifiy your choices in `/etc/maloja/settings.ini`. You can also set each of these settings as an environment variable with the prefix `MALOJA_` (e.g. `MALOJA_SKIP_SETUP`).
|
||||
|
@ -4,4 +4,4 @@
|
||||
|
||||
echo -e "\nMaloja is starting!"
|
||||
exec \
|
||||
s6-setuidgid abc python -m maloja run
|
||||
s6-setuidgid abc /venv/bin/python -m maloja run
|
3
dev/clear_testdata.sh
Normal file
3
dev/clear_testdata.sh
Normal file
@ -0,0 +1,3 @@
|
||||
sudo rm -r ./testdata
|
||||
mkdir ./testdata
|
||||
chmod 777 ./testdata
|
13
dev/docker-compose.yml
Normal file
13
dev/docker-compose.yml
Normal file
@ -0,0 +1,13 @@
|
||||
services:
|
||||
maloja:
|
||||
build:
|
||||
context: ..
|
||||
dockerfile: ./Containerfile
|
||||
ports:
|
||||
- "42010:42010"
|
||||
volumes:
|
||||
- "./testdata:/data"
|
||||
environment:
|
||||
- "MALOJA_DATA_DIRECTORY=/data"
|
||||
- "PUID=1000"
|
||||
- "PGID=1000"
|
@ -1,21 +0,0 @@
|
||||
import toml
|
||||
import os
|
||||
|
||||
with open("pyproject.toml") as filed:
|
||||
data = toml.load(filed)
|
||||
|
||||
info = {
|
||||
'name':data['project']['name'],
|
||||
'license':"GPLv3",
|
||||
'version':data['project']['version'],
|
||||
'architecture':'all',
|
||||
'description':'"' + data['project']['description'] + '"',
|
||||
'url':'"' + data['project']['urls']['homepage'] + '"',
|
||||
'maintainer':f"\"{data['project']['authors'][0]['name']} <{data['project']['authors'][0]['email']}>\"",
|
||||
}
|
||||
|
||||
|
||||
for target in ["apk","deb"]:
|
||||
lcmd = f"fpm {' '.join(f'--{key} {info[key]}' for key in info)} -s python -t {target} . "
|
||||
print(lcmd)
|
||||
os.system(lcmd)
|
@ -1,6 +1,6 @@
|
||||
minor_release_name: "Nicole"
|
||||
3.2.0:
|
||||
commit: "765f8b3401123ded9b8118fd00e26de2fe5ca981"
|
||||
commit: "34d0a49eb8deae2fb95233289521bb817732c772"
|
||||
notes:
|
||||
- "[Architecture] Switched to linuxserver.io container base image"
|
||||
- "[Architecture] Reworked image handling"
|
||||
@ -18,3 +18,40 @@ minor_release_name: "Nicole"
|
||||
- "[Bugfix] Disabled DB maintenance while not running main server"
|
||||
- "[Bugfix] Removed some nonsensical ephemereal database entry creations"
|
||||
- "[Bugfix] Fixed API endpoint for track charts with no artist provided"
|
||||
- "[Technical] Bumped Python and SQLAlchemy versions"
|
||||
- "[Distribution] Removed build of arm/v7 image"
|
||||
3.2.1:
|
||||
commit: "5495d6e38d95c0c2128e1de9a9553b55b6be945b"
|
||||
notes:
|
||||
- "[Feature] Added setting for custom week offset"
|
||||
- "[Feature] Added Musicbrainz album art fetching"
|
||||
- "[Bugfix] Fixed album entity rows being marked as track entity rows"
|
||||
- "[Bugfix] Fixed scrobbling of tracks when all artists have been removed by server parsing"
|
||||
- "[Bugfix] Fixed Spotify import of multiple files"
|
||||
- "[Bugfix] Fixed process control on FreeBSD"
|
||||
- "[Bugfix] Fixed Spotify authentication thread blocking the process from terminating"
|
||||
- "[Technical] Upgraded all third party modules to use requests module and send User Agent"
|
||||
3.2.2:
|
||||
commit: "febaff97228b37a192f2630aa331cac5e5c3e98e"
|
||||
notes:
|
||||
- "[Security] Fixed XSS vulnerability in error page (Disclosed by https://github.com/NULLYUKI)"
|
||||
- "[Architecture] Reworked the default directory selection"
|
||||
- "[Feature] Added option to show scrobbles on tile charts"
|
||||
- "[Bugfix] Fixed Last.fm authentication"
|
||||
3.2.3:
|
||||
commit: "a7dcd3df8a6b051a1f6d0b7d10cc5af83502445c"
|
||||
notes:
|
||||
- "[Architecture] Upgraded doreah, significant rework of authentication"
|
||||
- "[Bugfix] Fixed initial permission check"
|
||||
- "[Bugfix] Fixed and updated various texts"
|
||||
- "[Bugfix] Fixed moving tracks to different album"
|
||||
3.2.4:
|
||||
notes:
|
||||
- "[Architecture] Removed daemonization capabilities"
|
||||
- "[Architecture] Moved import to main server process"
|
||||
- "[Feature] Implemented support for ghan's csv Last.fm export"
|
||||
- "[Performance] Debounced search"
|
||||
- "[Bugfix] Fixed stuck scrobbling from Navidrome"
|
||||
- "[Bugfix] Fixed missing image mimetype"
|
||||
- "[Technical] Pinned dependencies"
|
||||
- "[Technical] Upgraded Python and Alpine"
|
@ -1,2 +0,0 @@
|
||||
docker build -t maloja . -f Containerfile
|
||||
docker run --rm -p 42010:42010 -v $PWD/testdata:/mlj -e MALOJA_DATA_DIRECTORY=/mlj maloja
|
@ -1,2 +0,0 @@
|
||||
podman build -t maloja . -f Containerfile
|
||||
podman run --rm -p 42010:42010 -v $PWD/testdata:/mlj -e MALOJA_DATA_DIRECTORY=/mlj maloja
|
@ -1,36 +0,0 @@
|
||||
# Contributor: Johannes Krattenmacher <maloja@dev.krateng.ch>
|
||||
# Maintainer: Johannes Krattenmacher <maloja@dev.krateng.ch>
|
||||
pkgname={{ tool.flit.module.name }}
|
||||
pkgver={{ project.version }}
|
||||
pkgrel=0
|
||||
pkgdesc="{{ project.description }}"
|
||||
url="{{ project.urls.homepage }}"
|
||||
arch="noarch"
|
||||
license="GPL-3.0"
|
||||
depends="{{ tool.osreqs.alpine.run | join(' ') }}"
|
||||
pkgusers=$pkgname
|
||||
pkggroups=$pkgname
|
||||
depends_dev="{{ tool.osreqs.alpine.build | join(' ') }}"
|
||||
makedepends="$depends_dev"
|
||||
source="
|
||||
$pkgname-$pkgver.tar.gz::{{ project.urls.repository }}/archive/refs/tags/v$pkgver.tar.gz
|
||||
"
|
||||
builddir="$srcdir"/$pkgname-$pkgver
|
||||
|
||||
|
||||
|
||||
build() {
|
||||
cd $builddir
|
||||
python3 -m build .
|
||||
pip3 install dist/*.tar.gz
|
||||
}
|
||||
|
||||
package() {
|
||||
mkdir -p /etc/$pkgname || return 1
|
||||
mkdir -p /var/lib/$pkgname || return 1
|
||||
mkdir -p /var/cache/$pkgname || return 1
|
||||
mkdir -p /var/logs/$pkgname || return 1
|
||||
}
|
||||
|
||||
# TODO
|
||||
sha512sums="a674eaaaa248fc2b315514d79f9a7a0bac6aa1582fe29554d9176e8b551e8aa3aa75abeebdd7713e9e98cc987e7bd57dc7a5e9a2fb85af98b9c18cb54de47bf7 $pkgname-${pkgver}.tar.gz"
|
@ -1,40 +0,0 @@
|
||||
FROM alpine:3.15
|
||||
# Python image includes two Python versions, so use base Alpine
|
||||
|
||||
# Based on the work of Jonathan Boeckel <jonathanboeckel1996@gmail.com>
|
||||
|
||||
WORKDIR /usr/src/app
|
||||
|
||||
# Install run dependencies first
|
||||
RUN apk add --no-cache {{ tool.osreqs.alpine.run | join(' ') }}
|
||||
|
||||
# system pip could be removed after build, but apk then decides to also remove all its
|
||||
# python dependencies, even if they are explicitly installed as python packages
|
||||
# whut
|
||||
RUN \
|
||||
apk add py3-pip && \
|
||||
pip install wheel
|
||||
|
||||
|
||||
COPY ./requirements.txt ./requirements.txt
|
||||
|
||||
RUN \
|
||||
apk add --no-cache --virtual .build-deps {{ tool.osreqs.alpine.build | join(' ') }} && \
|
||||
pip install --no-cache-dir -r requirements.txt && \
|
||||
apk del .build-deps
|
||||
|
||||
|
||||
# no chance for caching below here
|
||||
|
||||
COPY . .
|
||||
|
||||
RUN pip install /usr/src/app
|
||||
|
||||
# Docker-specific configuration
|
||||
# defaulting to IPv4 is no longer necessary (default host is dual stack)
|
||||
ENV MALOJA_SKIP_SETUP=yes
|
||||
ENV PYTHONUNBUFFERED=1
|
||||
|
||||
EXPOSE 42010
|
||||
# use exec form for better signal handling https://docs.docker.com/engine/reference/builder/#entrypoint
|
||||
ENTRYPOINT ["maloja", "run"]
|
@ -1,4 +0,0 @@
|
||||
{% include 'install/install_dependencies_alpine.sh.jinja' %}
|
||||
apk add py3-pip
|
||||
pip install wheel
|
||||
pip install malojaserver
|
@ -1,4 +0,0 @@
|
||||
{% include 'install/install_dependencies_debian.sh.jinja' %}
|
||||
apt install python3-pip
|
||||
pip install wheel
|
||||
pip install malojaserver
|
@ -1,4 +0,0 @@
|
||||
#!/usr/bin/env sh
|
||||
apk update
|
||||
apk add \
|
||||
{{ (tool.osreqs.alpine.build + tool.osreqs.alpine.run + tool.osreqs.alpine.opt) | join(' \\\n\t') }}
|
@ -1,4 +0,0 @@
|
||||
#!/usr/bin/env sh
|
||||
apt update
|
||||
apt install \
|
||||
{{ (tool.osreqs.debian.build + tool.osreqs.debian.run + tool.osreqs.debian.opt) | join(' \\\n\t') }}
|
@ -1,17 +1,21 @@
|
||||
"""
|
||||
Create necessary files from sources of truth. Currently just the requirements.txt files.
|
||||
"""
|
||||
|
||||
import toml
|
||||
import os
|
||||
import jinja2
|
||||
|
||||
env = jinja2.Environment(
|
||||
loader=jinja2.FileSystemLoader('dev/templates'),
|
||||
loader=jinja2.FileSystemLoader('./templates'),
|
||||
autoescape=jinja2.select_autoescape(['html', 'xml']),
|
||||
keep_trailing_newline=True
|
||||
)
|
||||
|
||||
with open("pyproject.toml") as filed:
|
||||
with open("../pyproject.toml") as filed:
|
||||
data = toml.load(filed)
|
||||
|
||||
templatedir = "./dev/templates"
|
||||
templatedir = "./templates"
|
||||
|
||||
for root,dirs,files in os.walk(templatedir):
|
||||
|
||||
@ -23,7 +27,7 @@ for root,dirs,files in os.walk(templatedir):
|
||||
if not f.endswith('.jinja'): continue
|
||||
|
||||
srcfile = os.path.join(root,f)
|
||||
trgfile = os.path.join(reldirpath,f.replace(".jinja",""))
|
||||
trgfile = os.path.join("..", reldirpath,f.replace(".jinja",""))
|
||||
|
||||
|
||||
template = env.get_template(relfilepath)
|
||||
|
@ -1,3 +1,7 @@
|
||||
"""
|
||||
Read the changelogs / version metadata and create all git tags
|
||||
"""
|
||||
|
||||
import os
|
||||
import subprocess as sp
|
||||
import yaml
|
||||
|
@ -1,20 +0,0 @@
|
||||
#!/usr/bin/env sh
|
||||
apk update
|
||||
apk add \
|
||||
gcc \
|
||||
g++ \
|
||||
python3-dev \
|
||||
libxml2-dev \
|
||||
libxslt-dev \
|
||||
libffi-dev \
|
||||
libc-dev \
|
||||
py3-pip \
|
||||
linux-headers \
|
||||
python3 \
|
||||
py3-lxml \
|
||||
tzdata \
|
||||
vips
|
||||
|
||||
apk add py3-pip
|
||||
pip install wheel
|
||||
pip install malojaserver
|
@ -1,9 +0,0 @@
|
||||
#!/usr/bin/env sh
|
||||
apt update
|
||||
apt install \
|
||||
python3-pip \
|
||||
python3
|
||||
|
||||
apt install python3-pip
|
||||
pip install wheel
|
||||
pip install malojaserver
|
@ -1,16 +0,0 @@
|
||||
#!/usr/bin/env sh
|
||||
apk update
|
||||
apk add \
|
||||
gcc \
|
||||
g++ \
|
||||
python3-dev \
|
||||
libxml2-dev \
|
||||
libxslt-dev \
|
||||
libffi-dev \
|
||||
libc-dev \
|
||||
py3-pip \
|
||||
linux-headers \
|
||||
python3 \
|
||||
py3-lxml \
|
||||
tzdata \
|
||||
vips
|
@ -1,15 +0,0 @@
|
||||
#!/usr/bin/env sh
|
||||
pacman -Syu
|
||||
pacman -S --needed \
|
||||
gcc \
|
||||
python3 \
|
||||
libxml2 \
|
||||
libxslt \
|
||||
libffi \
|
||||
glibc \
|
||||
python-pip \
|
||||
linux-headers \
|
||||
python \
|
||||
python-lxml \
|
||||
tzdata \
|
||||
libvips
|
@ -1,5 +0,0 @@
|
||||
#!/usr/bin/env sh
|
||||
apt update
|
||||
apt install \
|
||||
python3-pip \
|
||||
python3
|
@ -26,77 +26,6 @@ def print_header_info():
|
||||
#print("#####")
|
||||
print()
|
||||
|
||||
|
||||
|
||||
def get_instance():
|
||||
try:
|
||||
return int(subprocess.check_output(["pgrep","-x","maloja"]))
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
def get_instance_supervisor():
|
||||
try:
|
||||
return int(subprocess.check_output(["pgrep","-x","maloja_supervisor"]))
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
def restart():
|
||||
if stop():
|
||||
start()
|
||||
else:
|
||||
print(col["red"]("Could not stop Maloja!"))
|
||||
|
||||
def start():
|
||||
if get_instance_supervisor() is not None:
|
||||
print("Maloja is already running.")
|
||||
else:
|
||||
print_header_info()
|
||||
setup()
|
||||
try:
|
||||
#p = subprocess.Popen(["python3","-m","maloja.server"],stdout=subprocess.DEVNULL,stderr=subprocess.DEVNULL)
|
||||
sp = subprocess.Popen(["python3","-m","maloja","supervisor"],stdout=subprocess.DEVNULL,stderr=subprocess.DEVNULL)
|
||||
print(col["green"]("Maloja started!"))
|
||||
|
||||
port = conf.malojaconfig["PORT"]
|
||||
|
||||
print("Visit your server address (Port " + str(port) + ") to see your web interface. Visit /admin_setup to get started.")
|
||||
print("If you're installing this on your local machine, these links should get you there:")
|
||||
print("\t" + col["blue"]("http://localhost:" + str(port)))
|
||||
print("\t" + col["blue"]("http://localhost:" + str(port) + "/admin_setup"))
|
||||
return True
|
||||
except Exception:
|
||||
print("Error while starting Maloja.")
|
||||
return False
|
||||
|
||||
|
||||
def stop():
|
||||
|
||||
for attempt in [(signal.SIGTERM,2),(signal.SIGTERM,5),(signal.SIGKILL,3),(signal.SIGKILL,5)]:
|
||||
|
||||
pid_sv = get_instance_supervisor()
|
||||
pid = get_instance()
|
||||
|
||||
if pid is None and pid_sv is None:
|
||||
print("Maloja stopped!")
|
||||
return True
|
||||
|
||||
if pid_sv is not None:
|
||||
os.kill(pid_sv,attempt[0])
|
||||
if pid is not None:
|
||||
os.kill(pid,attempt[0])
|
||||
|
||||
time.sleep(attempt[1])
|
||||
|
||||
return False
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
print("Maloja stopped!")
|
||||
return True
|
||||
|
||||
def onlysetup():
|
||||
print_header_info()
|
||||
setup()
|
||||
@ -109,24 +38,6 @@ def run_server():
|
||||
from . import server
|
||||
server.run_server()
|
||||
|
||||
def run_supervisor():
|
||||
setproctitle("maloja_supervisor")
|
||||
while True:
|
||||
log("Maloja is not running, starting...",module="supervisor")
|
||||
try:
|
||||
process = subprocess.Popen(
|
||||
["python3", "-m", "maloja","run"],
|
||||
stdout=subprocess.DEVNULL,
|
||||
stderr=subprocess.DEVNULL,
|
||||
)
|
||||
except Exception as e:
|
||||
log("Error starting Maloja: " + str(e),module="supervisor")
|
||||
else:
|
||||
try:
|
||||
process.wait()
|
||||
except Exception as e:
|
||||
log("Maloja crashed: " + str(e),module="supervisor")
|
||||
|
||||
def debug():
|
||||
os.environ["MALOJA_DEV_MODE"] = 'true'
|
||||
conf.malojaconfig.load_environment()
|
||||
@ -135,30 +46,46 @@ def debug():
|
||||
def print_info():
|
||||
print_header_info()
|
||||
print(col['lightblue']("Configuration Directory:"),conf.dir_settings['config'])
|
||||
print(col['lightblue']("Data Directory: "),conf.dir_settings['state'])
|
||||
print(col['lightblue']("State Directory: "),conf.dir_settings['state'])
|
||||
print(col['lightblue']("Log Directory: "),conf.dir_settings['logs'])
|
||||
print(col['lightblue']("Network: "),f"Dual Stack, Port {conf.malojaconfig['port']}" if conf.malojaconfig['host'] == "*" else f"IPv{ip_address(conf.malojaconfig['host']).version}, Port {conf.malojaconfig['port']}")
|
||||
print(col['lightblue']("Timezone: "),f"UTC{conf.malojaconfig['timezone']:+d}")
|
||||
print(col['lightblue']("Location Timezone: "),conf.malojaconfig['location_timezone'])
|
||||
print()
|
||||
try:
|
||||
from importlib.metadata import distribution
|
||||
for pkg in ("sqlalchemy","waitress","bottle","doreah","jinja2"):
|
||||
print(col['cyan'] (f"{pkg}:".ljust(13)),distribution(pkg).version)
|
||||
except ImportError:
|
||||
raise
|
||||
print(col['cyan'](f"{pkg}:".ljust(13)),distribution(pkg).version)
|
||||
except Exception:
|
||||
print("Could not determine dependency versions.")
|
||||
print()
|
||||
try:
|
||||
import platform
|
||||
pyimpl = platform.python_implementation()
|
||||
pyvers = '.'.join(platform.python_version_tuple())
|
||||
print(col['magenta'](f"Python:".ljust(13)),pyimpl,pyvers)
|
||||
osname = platform.system()
|
||||
osvers = platform.release()
|
||||
print(col['magenta'](f"OS:".ljust(13)),osname,osvers)
|
||||
arch = platform.machine()
|
||||
print(col['magenta'](f"Architecture:".ljust(13)),arch)
|
||||
except Exception:
|
||||
print("Could not determine system information.")
|
||||
|
||||
|
||||
def print_settings():
|
||||
print_header_info()
|
||||
maxlen = max(len(k) for k in conf.malojaconfig)
|
||||
for k in conf.malojaconfig:
|
||||
print(col['lightblue'](k.ljust(maxlen+2)),conf.malojaconfig[k])
|
||||
|
||||
|
||||
@mainfunction({"l":"level","v":"version","V":"version"},flags=['version','include_images','prefer_existing'],shield=True)
|
||||
def main(*args,**kwargs):
|
||||
|
||||
actions = {
|
||||
# server
|
||||
"start":start,
|
||||
"restart":restart,
|
||||
"stop":stop,
|
||||
"run":run_server,
|
||||
"supervisor":run_supervisor,
|
||||
"debug":debug,
|
||||
"setup":onlysetup,
|
||||
# admin scripts
|
||||
@ -169,7 +96,8 @@ def main(*args,**kwargs):
|
||||
"apidebug":apidebug.run, # maloja apidebug
|
||||
"parsealbums":tasks.parse_albums, # maloja parsealbums --strategy majority
|
||||
# aux
|
||||
"info":print_info
|
||||
"info":print_info,
|
||||
"settings":print_settings
|
||||
}
|
||||
|
||||
if "version" in kwargs:
|
||||
|
@ -4,7 +4,7 @@
|
||||
# you know what f*ck it
|
||||
# this is hardcoded for now because of that damn project / package name discrepancy
|
||||
# i'll fix it one day
|
||||
VERSION = "3.2.0"
|
||||
VERSION = "3.2.4"
|
||||
HOMEPAGE = "https://github.com/krateng/maloja"
|
||||
|
||||
|
||||
|
@ -25,9 +25,20 @@ __logmodulename__ = "apis"
|
||||
|
||||
cla = CleanerAgent()
|
||||
|
||||
|
||||
|
||||
# wrapper method: calls handle. final net to catch exceptions and map them to the handlers proper json / xml response
|
||||
# handle method: finds the method for this path / query. can only raise InvalidMethodException
|
||||
# scrobble: NOT the exposed scrobble method - helper for all APIs to scrobble their results with self-identification
|
||||
|
||||
|
||||
class APIHandler:
|
||||
|
||||
__apiname__: str
|
||||
errors: dict
|
||||
# make these classes singletons
|
||||
_instance = None
|
||||
|
||||
def __new__(cls, *args, **kwargs):
|
||||
if not isinstance(cls._instance, cls):
|
||||
cls._instance = object.__new__(cls, *args, **kwargs)
|
||||
@ -62,37 +73,33 @@ class APIHandler:
|
||||
|
||||
try:
|
||||
response.status,result = self.handle(path,keys)
|
||||
except Exception:
|
||||
exceptiontype = sys.exc_info()[0]
|
||||
if exceptiontype in self.errors:
|
||||
response.status,result = self.errors[exceptiontype]
|
||||
log(f"Error with {self.__apiname__} API: {exceptiontype} (Request: {path})")
|
||||
except Exception as e:
|
||||
for exc_type, exc_response in self.errors.items():
|
||||
if isinstance(e, exc_type):
|
||||
response.status, result = exc_response
|
||||
log(f"Error with {self.__apiname__} API: {e} (Request: {path})")
|
||||
break
|
||||
else:
|
||||
response.status,result = 500,{"status":"Unknown error","code":500}
|
||||
log(f"Unhandled Exception with {self.__apiname__} API: {exceptiontype} (Request: {path})")
|
||||
# THIS SHOULD NOT HAPPEN
|
||||
response.status, result = 500, {"status": "Unknown error", "code": 500}
|
||||
log(f"Unhandled Exception with {self.__apiname__} API: {e} (Request: {path})")
|
||||
|
||||
return result
|
||||
#else:
|
||||
# result = {"error":"Invalid scrobble protocol"}
|
||||
# response.status = 500
|
||||
|
||||
|
||||
def handle(self,path,keys):
|
||||
|
||||
try:
|
||||
methodname = self.get_method(path,keys)
|
||||
methodname = self.get_method(path, keys)
|
||||
method = self.methods[methodname]
|
||||
except Exception:
|
||||
log("Could not find a handler for method " + str(methodname) + " in API " + self.__apiname__,module="debug")
|
||||
log("Keys: " + str(keys),module="debug")
|
||||
except KeyError:
|
||||
log(f"Could not find a handler for method {methodname} in API {self.__apiname__}", module="debug")
|
||||
log(f"Keys: {keys}", module="debug")
|
||||
raise InvalidMethodException()
|
||||
return method(path,keys)
|
||||
return method(path, keys)
|
||||
|
||||
|
||||
def scrobble(self,rawscrobble,client=None):
|
||||
|
||||
# fixing etc is handled by the main scrobble function
|
||||
try:
|
||||
return database.incoming_scrobble(rawscrobble,api=self.__apiname__,client=client)
|
||||
except Exception:
|
||||
raise ScrobblingException()
|
||||
return database.incoming_scrobble(rawscrobble,api=self.__apiname__,client=client)
|
||||
|
@ -3,4 +3,4 @@ class InvalidAuthException(Exception): pass
|
||||
class InvalidMethodException(Exception): pass
|
||||
class InvalidSessionKey(Exception): pass
|
||||
class MalformedJSONException(Exception): pass
|
||||
class ScrobblingException(Exception): pass
|
||||
|
||||
|
@ -21,13 +21,22 @@ class Audioscrobbler(APIHandler):
|
||||
"track.scrobble":self.submit_scrobble
|
||||
}
|
||||
self.errors = {
|
||||
BadAuthException:(400,{"error":6,"message":"Requires authentication"}),
|
||||
InvalidAuthException:(401,{"error":4,"message":"Invalid credentials"}),
|
||||
InvalidMethodException:(200,{"error":3,"message":"Invalid method"}),
|
||||
InvalidSessionKey:(403,{"error":9,"message":"Invalid session key"}),
|
||||
ScrobblingException:(500,{"error":8,"message":"Operation failed"})
|
||||
BadAuthException: (400, {"error": 6, "message": "Requires authentication"}),
|
||||
InvalidAuthException: (401, {"error": 4, "message": "Invalid credentials"}),
|
||||
InvalidMethodException: (200, {"error": 3, "message": "Invalid method"}),
|
||||
InvalidSessionKey: (403, {"error": 9, "message": "Invalid session key"}),
|
||||
Exception: (500, {"error": 8, "message": "Operation failed"})
|
||||
}
|
||||
|
||||
# xml string escaping: https://stackoverflow.com/a/28703510
|
||||
def xml_escape(self, str_xml: str):
|
||||
str_xml = str_xml.replace("&", "&")
|
||||
str_xml = str_xml.replace("<", "<")
|
||||
str_xml = str_xml.replace("<", "<")
|
||||
str_xml = str_xml.replace("\"", """)
|
||||
str_xml = str_xml.replace("'", "'")
|
||||
return str_xml
|
||||
|
||||
def get_method(self,pathnodes,keys):
|
||||
return keys.get("method")
|
||||
|
||||
@ -45,12 +54,22 @@ class Audioscrobbler(APIHandler):
|
||||
token = keys.get("authToken")
|
||||
user = keys.get("username")
|
||||
password = keys.get("password")
|
||||
format = keys.get("format") or "xml" # Audioscrobbler 2.0 uses XML by default
|
||||
# either username and password
|
||||
if user is not None and password is not None:
|
||||
client = apikeystore.check_and_identify_key(password)
|
||||
if client:
|
||||
sessionkey = self.generate_key(client)
|
||||
return 200,{"session":{"key":sessionkey}}
|
||||
if format == "json":
|
||||
return 200,{"session":{"key":sessionkey}}
|
||||
else:
|
||||
return 200,"""<lfm status="ok">
|
||||
<session>
|
||||
<name>%s</name>
|
||||
<key>%s</key>
|
||||
<subscriber>0</subscriber>
|
||||
</session>
|
||||
</lfm>""" % (self.xml_escape(user), self.xml_escape(sessionkey))
|
||||
else:
|
||||
raise InvalidAuthException()
|
||||
# or username and token (deprecated by lastfm)
|
||||
@ -59,7 +78,16 @@ class Audioscrobbler(APIHandler):
|
||||
key = apikeystore[client]
|
||||
if md5(user + md5(key)) == token:
|
||||
sessionkey = self.generate_key(client)
|
||||
return 200,{"session":{"key":sessionkey}}
|
||||
if format == "json":
|
||||
return 200,{"session":{"key":sessionkey}}
|
||||
else:
|
||||
return 200,"""<lfm status="ok">
|
||||
<session>
|
||||
<name>%s</name>
|
||||
<key>%s</key>
|
||||
<subscriber>0</subscriber>
|
||||
</session>
|
||||
</lfm>""" % (self.xml_escape(user), self.xml_escape(sessionkey))
|
||||
raise InvalidAuthException()
|
||||
else:
|
||||
raise BadAuthException()
|
||||
|
@ -23,11 +23,11 @@ class AudioscrobblerLegacy(APIHandler):
|
||||
"scrobble":self.submit_scrobble
|
||||
}
|
||||
self.errors = {
|
||||
BadAuthException:(403,"BADAUTH\n"),
|
||||
InvalidAuthException:(403,"BADAUTH\n"),
|
||||
InvalidMethodException:(400,"FAILED\n"),
|
||||
InvalidSessionKey:(403,"BADSESSION\n"),
|
||||
ScrobblingException:(500,"FAILED\n")
|
||||
BadAuthException: (403, "BADAUTH\n"),
|
||||
InvalidAuthException: (403, "BADAUTH\n"),
|
||||
InvalidMethodException: (400, "FAILED\n"),
|
||||
InvalidSessionKey: (403, "BADSESSION\n"),
|
||||
Exception: (500, "FAILED\n")
|
||||
}
|
||||
|
||||
def get_method(self,pathnodes,keys):
|
||||
|
@ -3,6 +3,7 @@ from ._exceptions import *
|
||||
from .. import database
|
||||
import datetime
|
||||
from ._apikeys import apikeystore
|
||||
from ..database.exceptions import DuplicateScrobble, DuplicateTimestamp
|
||||
|
||||
from ..pkg_global.conf import malojaconfig
|
||||
|
||||
@ -21,11 +22,13 @@ class Listenbrainz(APIHandler):
|
||||
"validate-token":self.validate_token
|
||||
}
|
||||
self.errors = {
|
||||
BadAuthException:(401,{"code":401,"error":"You need to provide an Authorization header."}),
|
||||
InvalidAuthException:(401,{"code":401,"error":"Incorrect Authorization"}),
|
||||
InvalidMethodException:(200,{"code":200,"error":"Invalid Method"}),
|
||||
MalformedJSONException:(400,{"code":400,"error":"Invalid JSON document submitted."}),
|
||||
ScrobblingException:(500,{"code":500,"error":"Unspecified server error."})
|
||||
BadAuthException: (401, {"code": 401, "error": "You need to provide an Authorization header."}),
|
||||
InvalidAuthException: (401, {"code": 401, "error": "Incorrect Authorization"}),
|
||||
InvalidMethodException: (200, {"code": 200, "error": "Invalid Method"}),
|
||||
MalformedJSONException: (400, {"code": 400, "error": "Invalid JSON document submitted."}),
|
||||
DuplicateScrobble: (200, {"status": "ok"}),
|
||||
DuplicateTimestamp: (409, {"error": "Scrobble with the same timestamp already exists."}),
|
||||
Exception: (500, {"code": 500, "error": "Unspecified server error."})
|
||||
}
|
||||
|
||||
def get_method(self,pathnodes,keys):
|
||||
|
@ -7,7 +7,6 @@ from bottle import response, static_file, FormsDict
|
||||
from inspect import signature
|
||||
|
||||
from doreah.logging import log
|
||||
from doreah.auth import authenticated_function
|
||||
|
||||
# nimrodel API
|
||||
from nimrodel import EAPI as API
|
||||
@ -15,7 +14,7 @@ from nimrodel import Multi
|
||||
|
||||
|
||||
from .. import database
|
||||
from ..pkg_global.conf import malojaconfig, data_dir
|
||||
from ..pkg_global.conf import malojaconfig, data_dir, auth
|
||||
|
||||
|
||||
|
||||
@ -82,6 +81,24 @@ errors = {
|
||||
'desc':"This entity does not exist in the database."
|
||||
}
|
||||
}),
|
||||
database.exceptions.DuplicateTimestamp: lambda e: (409,{
|
||||
"status":"error",
|
||||
"error":{
|
||||
'type':'duplicate_timestamp',
|
||||
'value':e.rejected_scrobble,
|
||||
'desc':"A scrobble is already registered with this timestamp."
|
||||
}
|
||||
}),
|
||||
database.exceptions.DuplicateScrobble: lambda e: (200,{
|
||||
"status": "success",
|
||||
"desc": "The scrobble is present in the database.",
|
||||
"track": {},
|
||||
"warnings": [{
|
||||
'type': 'scrobble_exists',
|
||||
'value': None,
|
||||
'desc': 'This scrobble exists in the database (same timestamp and track). The submitted scrobble was not added.'
|
||||
}]
|
||||
}),
|
||||
images.MalformedB64: lambda e: (400,{
|
||||
"status":"failure",
|
||||
"error":{
|
||||
@ -474,7 +491,7 @@ def get_top_artists_external(k_filter, k_limit, k_delimit, k_amount):
|
||||
:rtype: Dictionary"""
|
||||
|
||||
ckeys = {**k_limit, **k_delimit}
|
||||
results = database.get_top_artists(**ckeys)
|
||||
results = database.get_top_artists(**ckeys,compatibility=True)
|
||||
|
||||
return {
|
||||
"status":"ok",
|
||||
@ -493,7 +510,7 @@ def get_top_tracks_external(k_filter, k_limit, k_delimit, k_amount):
|
||||
:rtype: Dictionary"""
|
||||
|
||||
ckeys = {**k_limit, **k_delimit}
|
||||
results = database.get_top_tracks(**ckeys)
|
||||
results = database.get_top_tracks(**ckeys,compatibility=True)
|
||||
# IMPLEMENT THIS FOR TOP TRACKS OF ARTIST/ALBUM AS WELL?
|
||||
|
||||
return {
|
||||
@ -513,7 +530,7 @@ def get_top_albums_external(k_filter, k_limit, k_delimit, k_amount):
|
||||
:rtype: Dictionary"""
|
||||
|
||||
ckeys = {**k_limit, **k_delimit}
|
||||
results = database.get_top_albums(**ckeys)
|
||||
results = database.get_top_albums(**ckeys,compatibility=True)
|
||||
# IMPLEMENT THIS FOR TOP ALBUMS OF ARTIST AS WELL?
|
||||
|
||||
return {
|
||||
@ -567,7 +584,7 @@ def album_info_external(k_filter, k_limit, k_delimit, k_amount):
|
||||
|
||||
|
||||
@api.post("newscrobble")
|
||||
@authenticated_function(alternate=api_key_correct,api=True,pass_auth_result_as='auth_result')
|
||||
@auth.authenticated_function(alternate=api_key_correct,api=True,pass_auth_result_as='auth_result')
|
||||
@catch_exceptions
|
||||
def post_scrobble(
|
||||
artist:Multi=None,
|
||||
@ -647,7 +664,7 @@ def post_scrobble(
|
||||
|
||||
|
||||
@api.post("addpicture")
|
||||
@authenticated_function(alternate=api_key_correct,api=True)
|
||||
@auth.authenticated_function(alternate=api_key_correct,api=True)
|
||||
@catch_exceptions
|
||||
@convert_kwargs
|
||||
def add_picture(k_filter, k_limit, k_delimit, k_amount, k_special):
|
||||
@ -670,7 +687,7 @@ def add_picture(k_filter, k_limit, k_delimit, k_amount, k_special):
|
||||
|
||||
|
||||
@api.post("importrules")
|
||||
@authenticated_function(api=True)
|
||||
@auth.authenticated_function(api=True)
|
||||
@catch_exceptions
|
||||
def import_rulemodule(**keys):
|
||||
"""Internal Use Only"""
|
||||
@ -689,7 +706,7 @@ def import_rulemodule(**keys):
|
||||
|
||||
|
||||
@api.post("rebuild")
|
||||
@authenticated_function(api=True)
|
||||
@auth.authenticated_function(api=True)
|
||||
@catch_exceptions
|
||||
def rebuild(**keys):
|
||||
"""Internal Use Only"""
|
||||
@ -765,7 +782,7 @@ def search(**keys):
|
||||
|
||||
|
||||
@api.post("newrule")
|
||||
@authenticated_function(api=True)
|
||||
@auth.authenticated_function(api=True)
|
||||
@catch_exceptions
|
||||
def newrule(**keys):
|
||||
"""Internal Use Only"""
|
||||
@ -776,21 +793,21 @@ def newrule(**keys):
|
||||
|
||||
|
||||
@api.post("settings")
|
||||
@authenticated_function(api=True)
|
||||
@auth.authenticated_function(api=True)
|
||||
@catch_exceptions
|
||||
def set_settings(**keys):
|
||||
"""Internal Use Only"""
|
||||
malojaconfig.update(keys)
|
||||
|
||||
@api.post("apikeys")
|
||||
@authenticated_function(api=True)
|
||||
@auth.authenticated_function(api=True)
|
||||
@catch_exceptions
|
||||
def set_apikeys(**keys):
|
||||
"""Internal Use Only"""
|
||||
apikeystore.update(keys)
|
||||
|
||||
@api.post("import")
|
||||
@authenticated_function(api=True)
|
||||
@auth.authenticated_function(api=True)
|
||||
@catch_exceptions
|
||||
def import_scrobbles(identifier):
|
||||
"""Internal Use Only"""
|
||||
@ -798,7 +815,7 @@ def import_scrobbles(identifier):
|
||||
import_scrobbles(identifier)
|
||||
|
||||
@api.get("backup")
|
||||
@authenticated_function(api=True)
|
||||
@auth.authenticated_function(api=True)
|
||||
@catch_exceptions
|
||||
def get_backup(**keys):
|
||||
"""Internal Use Only"""
|
||||
@ -811,7 +828,7 @@ def get_backup(**keys):
|
||||
return static_file(os.path.basename(archivefile),root=tmpfolder)
|
||||
|
||||
@api.get("export")
|
||||
@authenticated_function(api=True)
|
||||
@auth.authenticated_function(api=True)
|
||||
@catch_exceptions
|
||||
def get_export(**keys):
|
||||
"""Internal Use Only"""
|
||||
@ -825,7 +842,7 @@ def get_export(**keys):
|
||||
|
||||
|
||||
@api.post("delete_scrobble")
|
||||
@authenticated_function(api=True)
|
||||
@auth.authenticated_function(api=True)
|
||||
@catch_exceptions
|
||||
def delete_scrobble(timestamp):
|
||||
"""Internal Use Only"""
|
||||
@ -837,7 +854,7 @@ def delete_scrobble(timestamp):
|
||||
|
||||
|
||||
@api.post("edit_artist")
|
||||
@authenticated_function(api=True)
|
||||
@auth.authenticated_function(api=True)
|
||||
@catch_exceptions
|
||||
def edit_artist(id,name):
|
||||
"""Internal Use Only"""
|
||||
@ -847,7 +864,7 @@ def edit_artist(id,name):
|
||||
}
|
||||
|
||||
@api.post("edit_track")
|
||||
@authenticated_function(api=True)
|
||||
@auth.authenticated_function(api=True)
|
||||
@catch_exceptions
|
||||
def edit_track(id,title):
|
||||
"""Internal Use Only"""
|
||||
@ -857,7 +874,7 @@ def edit_track(id,title):
|
||||
}
|
||||
|
||||
@api.post("edit_album")
|
||||
@authenticated_function(api=True)
|
||||
@auth.authenticated_function(api=True)
|
||||
@catch_exceptions
|
||||
def edit_album(id,albumtitle):
|
||||
"""Internal Use Only"""
|
||||
@ -868,7 +885,7 @@ def edit_album(id,albumtitle):
|
||||
|
||||
|
||||
@api.post("merge_tracks")
|
||||
@authenticated_function(api=True)
|
||||
@auth.authenticated_function(api=True)
|
||||
@catch_exceptions
|
||||
def merge_tracks(target_id,source_ids):
|
||||
"""Internal Use Only"""
|
||||
@ -879,7 +896,7 @@ def merge_tracks(target_id,source_ids):
|
||||
}
|
||||
|
||||
@api.post("merge_artists")
|
||||
@authenticated_function(api=True)
|
||||
@auth.authenticated_function(api=True)
|
||||
@catch_exceptions
|
||||
def merge_artists(target_id,source_ids):
|
||||
"""Internal Use Only"""
|
||||
@ -890,7 +907,7 @@ def merge_artists(target_id,source_ids):
|
||||
}
|
||||
|
||||
@api.post("merge_albums")
|
||||
@authenticated_function(api=True)
|
||||
@auth.authenticated_function(api=True)
|
||||
@catch_exceptions
|
||||
def merge_artists(target_id,source_ids):
|
||||
"""Internal Use Only"""
|
||||
@ -901,7 +918,7 @@ def merge_artists(target_id,source_ids):
|
||||
}
|
||||
|
||||
@api.post("associate_albums_to_artist")
|
||||
@authenticated_function(api=True)
|
||||
@auth.authenticated_function(api=True)
|
||||
@catch_exceptions
|
||||
def associate_albums_to_artist(target_id,source_ids,remove=False):
|
||||
result = database.associate_albums_to_artist(target_id,source_ids,remove=remove)
|
||||
@ -913,7 +930,7 @@ def associate_albums_to_artist(target_id,source_ids,remove=False):
|
||||
}
|
||||
|
||||
@api.post("associate_tracks_to_artist")
|
||||
@authenticated_function(api=True)
|
||||
@auth.authenticated_function(api=True)
|
||||
@catch_exceptions
|
||||
def associate_tracks_to_artist(target_id,source_ids,remove=False):
|
||||
result = database.associate_tracks_to_artist(target_id,source_ids,remove=remove)
|
||||
@ -925,7 +942,7 @@ def associate_tracks_to_artist(target_id,source_ids,remove=False):
|
||||
}
|
||||
|
||||
@api.post("associate_tracks_to_album")
|
||||
@authenticated_function(api=True)
|
||||
@auth.authenticated_function(api=True)
|
||||
@catch_exceptions
|
||||
def associate_tracks_to_album(target_id,source_ids):
|
||||
result = database.associate_tracks_to_album(target_id,source_ids)
|
||||
@ -937,7 +954,7 @@ def associate_tracks_to_album(target_id,source_ids):
|
||||
|
||||
|
||||
@api.post("reparse_scrobble")
|
||||
@authenticated_function(api=True)
|
||||
@auth.authenticated_function(api=True)
|
||||
@catch_exceptions
|
||||
def reparse_scrobble(timestamp):
|
||||
"""Internal Use Only"""
|
||||
|
@ -15,13 +15,15 @@ class CleanerAgent:
|
||||
def updateRules(self):
|
||||
|
||||
rawrules = []
|
||||
for f in os.listdir(data_dir["rules"]()):
|
||||
if f.split('.')[-1].lower() != 'tsv': continue
|
||||
filepath = data_dir["rules"](f)
|
||||
with open(filepath,'r') as filed:
|
||||
reader = csv.reader(filed,delimiter="\t")
|
||||
rawrules += [[col for col in entry if col] for entry in reader if len(entry)>0 and not entry[0].startswith('#')]
|
||||
|
||||
try:
|
||||
for f in os.listdir(data_dir["rules"]()):
|
||||
if f.split('.')[-1].lower() != 'tsv': continue
|
||||
filepath = data_dir["rules"](f)
|
||||
with open(filepath,'r') as filed:
|
||||
reader = csv.reader(filed,delimiter="\t")
|
||||
rawrules += [[col for col in entry if col] for entry in reader if len(entry)>0 and not entry[0].startswith('#')]
|
||||
except FileNotFoundError:
|
||||
pass
|
||||
|
||||
self.rules_belongtogether = [r[1] for r in rawrules if r[0]=="belongtogether"]
|
||||
self.rules_notanartist = [r[1] for r in rawrules if r[0]=="notanartist"]
|
||||
|
@ -160,8 +160,8 @@ replaceartist 여자친구 GFriend GFriend
|
||||
# Girl's Generation
|
||||
replaceartist 소녀시대 Girls' Generation
|
||||
replaceartist SNSD Girls' Generation
|
||||
replaceartist Girls' Generation-TTS TaeTiSeo
|
||||
countas TaeTiSeo Girls' Generation
|
||||
replaceartist Girls' Generation-TTS TaeTiSeo
|
||||
countas TaeTiSeo Girls' Generation
|
||||
|
||||
# Apink
|
||||
replaceartist A Pink Apink
|
||||
@ -217,6 +217,8 @@ countas Pristin V Pristin
|
||||
|
||||
# CLC
|
||||
countas Sorn CLC
|
||||
countas Yeeun CLC
|
||||
countas Seungyeon CLC
|
||||
|
||||
# Popular Remixes
|
||||
artistintitle Areia Remix Areia
|
||||
|
Can't render this file because it has a wrong number of fields in line 5.
|
@ -1,6 +1,8 @@
|
||||
# server
|
||||
from bottle import request, response, FormsDict
|
||||
|
||||
from ..pkg_global import conf
|
||||
|
||||
|
||||
# decorator that makes sure this function is only run in normal operation,
|
||||
# not when we run a task that needs to access the database
|
||||
@ -27,7 +29,6 @@ from . import exceptions
|
||||
|
||||
# doreah toolkit
|
||||
from doreah.logging import log
|
||||
from doreah.auth import authenticated_api, authenticated_api_with_alternate
|
||||
import doreah
|
||||
|
||||
|
||||
@ -42,6 +43,7 @@ from collections import namedtuple
|
||||
from threading import Lock
|
||||
import yaml, json
|
||||
import math
|
||||
from itertools import takewhile
|
||||
|
||||
# url handling
|
||||
import urllib
|
||||
@ -110,15 +112,18 @@ def incoming_scrobble(rawscrobble,fix=True,client=None,api=None,dbconn=None):
|
||||
scrobbledict = rawscrobble_to_scrobbledict(rawscrobble, fix, client)
|
||||
albumupdate = (malojaconfig["ALBUM_INFORMATION_TRUST"] == 'last')
|
||||
|
||||
if scrobbledict:
|
||||
|
||||
sqldb.add_scrobble(scrobbledict,update_album=albumupdate,dbconn=dbconn)
|
||||
proxy_scrobble_all(scrobbledict['track']['artists'],scrobbledict['track']['title'],scrobbledict['time'])
|
||||
sqldb.add_scrobble(scrobbledict,update_album=albumupdate,dbconn=dbconn)
|
||||
proxy_scrobble_all(scrobbledict['track']['artists'],scrobbledict['track']['title'],scrobbledict['time'])
|
||||
|
||||
dbcache.invalidate_caches(scrobbledict['time'])
|
||||
dbcache.invalidate_caches(scrobbledict['time'])
|
||||
|
||||
#return {"status":"success","scrobble":scrobbledict}
|
||||
return scrobbledict
|
||||
|
||||
#return {"status":"success","scrobble":scrobbledict}
|
||||
return scrobbledict
|
||||
else:
|
||||
raise exceptions.MissingScrobbleParameters('artist')
|
||||
|
||||
|
||||
@waitfordb
|
||||
@ -131,14 +136,16 @@ def reparse_scrobble(timestamp):
|
||||
|
||||
newscrobble = rawscrobble_to_scrobbledict(scrobble['rawscrobble'])
|
||||
|
||||
track_id = sqldb.get_track_id(newscrobble['track'])
|
||||
if newscrobble:
|
||||
|
||||
# check if id changed
|
||||
if sqldb.get_track_id(scrobble['track']) != track_id:
|
||||
sqldb.edit_scrobble(timestamp, {'track':newscrobble['track']})
|
||||
dbcache.invalidate_entity_cache()
|
||||
dbcache.invalidate_caches()
|
||||
return sqldb.get_scrobble(timestamp=timestamp)
|
||||
track_id = sqldb.get_track_id(newscrobble['track'])
|
||||
|
||||
# check if id changed
|
||||
if sqldb.get_track_id(scrobble['track']) != track_id:
|
||||
sqldb.edit_scrobble(timestamp, {'track':newscrobble['track']})
|
||||
dbcache.invalidate_entity_cache()
|
||||
dbcache.invalidate_caches()
|
||||
return sqldb.get_scrobble(timestamp=timestamp)
|
||||
|
||||
return False
|
||||
|
||||
@ -190,6 +197,11 @@ def rawscrobble_to_scrobbledict(rawscrobble, fix=True, client=None):
|
||||
if not scrobbledict["track"]["album"]["albumtitle"]:
|
||||
del scrobbledict["track"]["album"]
|
||||
|
||||
# discard if invalid
|
||||
if len(scrobbledict['track']['artists']) == 0:
|
||||
return None
|
||||
# TODO: other checks
|
||||
|
||||
return scrobbledict
|
||||
|
||||
|
||||
@ -308,7 +320,7 @@ def associate_tracks_to_album(target_id,source_ids):
|
||||
if target_id:
|
||||
target = sqldb.get_album(target_id)
|
||||
log(f"Adding {sources} into {target}")
|
||||
sqldb.add_tracks_to_albums({src:target_id for src in source_ids})
|
||||
sqldb.add_tracks_to_albums({src:target_id for src in source_ids},replace=True)
|
||||
else:
|
||||
sqldb.remove_album(source_ids)
|
||||
result = {'sources':sources,'target':target}
|
||||
@ -434,10 +446,11 @@ def get_charts_albums(dbconn=None,resolve_ids=True,only_own_albums=False,**keys)
|
||||
(since,to) = keys.get('timerange').timestamps()
|
||||
|
||||
if 'artist' in keys:
|
||||
result = sqldb.count_scrobbles_by_album_combined(since=since,to=to,artist=keys['artist'],associated=keys.get('associated',False),resolve_ids=resolve_ids,dbconn=dbconn)
|
||||
artist = sqldb.get_artist(sqldb.get_artist_id(keys['artist']))
|
||||
result = sqldb.count_scrobbles_by_album_combined(since=since,to=to,artist=artist,associated=keys.get('associated',False),resolve_ids=resolve_ids,dbconn=dbconn)
|
||||
if only_own_albums:
|
||||
# TODO: this doesnt take associated into account and doesnt change ranks
|
||||
result = [e for e in result if keys['artist'] in (e['album']['artists'] or [])]
|
||||
result = [e for e in result if artist in (e['album']['artists'] or [])]
|
||||
else:
|
||||
result = sqldb.count_scrobbles_by_album(since=since,to=to,resolve_ids=resolve_ids,dbconn=dbconn)
|
||||
return result
|
||||
@ -560,7 +573,7 @@ def get_performance(dbconn=None,**keys):
|
||||
return results
|
||||
|
||||
@waitfordb
|
||||
def get_top_artists(dbconn=None,**keys):
|
||||
def get_top_artists(dbconn=None,compatibility=True,**keys):
|
||||
|
||||
separate = keys.get('separate')
|
||||
|
||||
@ -568,42 +581,73 @@ def get_top_artists(dbconn=None,**keys):
|
||||
results = []
|
||||
|
||||
for rng in rngs:
|
||||
try:
|
||||
res = get_charts_artists(timerange=rng,separate=separate,dbconn=dbconn)[0]
|
||||
results.append({"range":rng,"artist":res["artist"],"scrobbles":res["scrobbles"],"real_scrobbles":res["real_scrobbles"],"associated_artists":sqldb.get_associated_artists(res["artist"])})
|
||||
except Exception:
|
||||
results.append({"range":rng,"artist":None,"scrobbles":0,"real_scrobbles":0})
|
||||
result = {'range':rng}
|
||||
res = get_charts_artists(timerange=rng,separate=separate,dbconn=dbconn)
|
||||
|
||||
result['top'] = [
|
||||
{'artist': r['artist'], 'scrobbles': r['scrobbles'], 'real_scrobbles':r['real_scrobbles'], 'associated_artists': sqldb.get_associated_artists(r['artist'])}
|
||||
for r in takewhile(lambda x:x['rank']==1,res)
|
||||
]
|
||||
# for third party applications
|
||||
if compatibility:
|
||||
if result['top']:
|
||||
result.update(result['top'][0])
|
||||
else:
|
||||
result.update({'artist':None,'scrobbles':0,'real_scrobbles':0})
|
||||
|
||||
results.append(result)
|
||||
|
||||
return results
|
||||
|
||||
|
||||
@waitfordb
|
||||
def get_top_tracks(dbconn=None,**keys):
|
||||
def get_top_tracks(dbconn=None,compatibility=True,**keys):
|
||||
|
||||
rngs = ranges(**{k:keys[k] for k in keys if k in ["since","to","within","timerange","step","stepn","trail"]})
|
||||
results = []
|
||||
|
||||
for rng in rngs:
|
||||
try:
|
||||
res = get_charts_tracks(timerange=rng,dbconn=dbconn)[0]
|
||||
results.append({"range":rng,"track":res["track"],"scrobbles":res["scrobbles"]})
|
||||
except Exception:
|
||||
results.append({"range":rng,"track":None,"scrobbles":0})
|
||||
result = {'range':rng}
|
||||
res = get_charts_tracks(timerange=rng,dbconn=dbconn)
|
||||
|
||||
result['top'] = [
|
||||
{'track': r['track'], 'scrobbles': r['scrobbles']}
|
||||
for r in takewhile(lambda x:x['rank']==1,res)
|
||||
]
|
||||
# for third party applications
|
||||
if compatibility:
|
||||
if result['top']:
|
||||
result.update(result['top'][0])
|
||||
else:
|
||||
result.update({'track':None,'scrobbles':0})
|
||||
|
||||
results.append(result)
|
||||
|
||||
return results
|
||||
|
||||
@waitfordb
|
||||
def get_top_albums(dbconn=None,**keys):
|
||||
def get_top_albums(dbconn=None,compatibility=True,**keys):
|
||||
|
||||
rngs = ranges(**{k:keys[k] for k in keys if k in ["since","to","within","timerange","step","stepn","trail"]})
|
||||
results = []
|
||||
|
||||
for rng in rngs:
|
||||
try:
|
||||
res = get_charts_albums(timerange=rng,dbconn=dbconn)[0]
|
||||
results.append({"range":rng,"album":res["album"],"scrobbles":res["scrobbles"]})
|
||||
except Exception:
|
||||
results.append({"range":rng,"album":None,"scrobbles":0})
|
||||
|
||||
result = {'range':rng}
|
||||
res = get_charts_albums(timerange=rng,dbconn=dbconn)
|
||||
|
||||
result['top'] = [
|
||||
{'album': r['album'], 'scrobbles': r['scrobbles']}
|
||||
for r in takewhile(lambda x:x['rank']==1,res)
|
||||
]
|
||||
# for third party applications
|
||||
if compatibility:
|
||||
if result['top']:
|
||||
result.update(result['top'][0])
|
||||
else:
|
||||
result.update({'album':None,'scrobbles':0})
|
||||
|
||||
results.append(result)
|
||||
|
||||
return results
|
||||
|
||||
@ -890,6 +934,9 @@ def get_predefined_rulesets(dbconn=None):
|
||||
|
||||
|
||||
def start_db():
|
||||
|
||||
conf.AUX_MODE = True # that is, without a doubt, the worst python code you have ever seen
|
||||
|
||||
# Upgrade database
|
||||
from .. import upgrade
|
||||
upgrade.upgrade_db(sqldb.add_scrobbles)
|
||||
@ -899,11 +946,19 @@ def start_db():
|
||||
from . import associated
|
||||
associated.load_associated_rules()
|
||||
|
||||
# import scrobbles
|
||||
from ..proccontrol.tasks.import_scrobbles import import_scrobbles #lmao this codebase is so fucked
|
||||
for f in os.listdir(data_dir['import']()):
|
||||
if f != 'dummy':
|
||||
import_scrobbles(data_dir['import'](f))
|
||||
|
||||
dbstatus['healthy'] = True
|
||||
|
||||
conf.AUX_MODE = False # but you have seen it
|
||||
|
||||
# inform time module about begin of scrobbling
|
||||
try:
|
||||
firstscrobble = sqldb.get_scrobbles()[0]
|
||||
firstscrobble = sqldb.get_scrobbles(limit=1)[0]
|
||||
register_scrobbletime(firstscrobble['time'])
|
||||
except IndexError:
|
||||
register_scrobbletime(int(datetime.datetime.now().timestamp()))
|
||||
|
@ -19,12 +19,16 @@ def load_associated_rules():
|
||||
|
||||
# load from file
|
||||
rawrules = []
|
||||
for f in os.listdir(data_dir["rules"]()):
|
||||
if f.split('.')[-1].lower() != 'tsv': continue
|
||||
filepath = data_dir["rules"](f)
|
||||
with open(filepath,'r') as filed:
|
||||
reader = csv.reader(filed,delimiter="\t")
|
||||
rawrules += [[col for col in entry if col] for entry in reader if len(entry)>0 and not entry[0].startswith('#')]
|
||||
try:
|
||||
for f in os.listdir(data_dir["rules"]()):
|
||||
if f.split('.')[-1].lower() != 'tsv': continue
|
||||
filepath = data_dir["rules"](f)
|
||||
with open(filepath,'r') as filed:
|
||||
reader = csv.reader(filed,delimiter="\t")
|
||||
rawrules += [[col for col in entry if col] for entry in reader if len(entry)>0 and not entry[0].startswith('#')]
|
||||
except FileNotFoundError:
|
||||
return
|
||||
|
||||
rules = [{'source_artist':r[1],'target_artist':r[2]} for r in rawrules if r[0]=="countas"]
|
||||
|
||||
#for rule in rules:
|
||||
|
@ -1,37 +1,57 @@
|
||||
from bottle import HTTPError
|
||||
|
||||
|
||||
class EntityExists(Exception):
|
||||
def __init__(self,entitydict):
|
||||
def __init__(self, entitydict):
|
||||
self.entitydict = entitydict
|
||||
|
||||
|
||||
class TrackExists(EntityExists):
|
||||
pass
|
||||
|
||||
|
||||
class ArtistExists(EntityExists):
|
||||
pass
|
||||
|
||||
|
||||
class AlbumExists(EntityExists):
|
||||
pass
|
||||
|
||||
|
||||
# if the scrobbles dont match
|
||||
class DuplicateTimestamp(Exception):
|
||||
def __init__(self, existing_scrobble, rejected_scrobble):
|
||||
self.existing_scrobble = existing_scrobble
|
||||
self.rejected_scrobble = rejected_scrobble
|
||||
|
||||
|
||||
# if it's the same scrobble
|
||||
class DuplicateScrobble(Exception):
|
||||
def __init__(self, scrobble):
|
||||
self.scrobble = scrobble
|
||||
|
||||
|
||||
class DatabaseNotBuilt(HTTPError):
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
status=503,
|
||||
body="The Maloja Database is being upgraded to support new Maloja features. This could take a while.",
|
||||
headers={"Retry-After":120}
|
||||
headers={"Retry-After": 120}
|
||||
)
|
||||
|
||||
|
||||
class MissingScrobbleParameters(Exception):
|
||||
def __init__(self,params=[]):
|
||||
def __init__(self, params=[]):
|
||||
self.params = params
|
||||
|
||||
|
||||
class MissingEntityParameter(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class EntityDoesNotExist(HTTPError):
|
||||
entitytype = 'Entity'
|
||||
|
||||
def __init__(self,entitydict):
|
||||
self.entitydict = entitydict
|
||||
super().__init__(
|
||||
@ -39,9 +59,14 @@ class EntityDoesNotExist(HTTPError):
|
||||
body=f"The {self.entitytype} '{self.entitydict}' does not exist in the database."
|
||||
)
|
||||
|
||||
|
||||
class ArtistDoesNotExist(EntityDoesNotExist):
|
||||
entitytype = 'Artist'
|
||||
|
||||
|
||||
class AlbumDoesNotExist(EntityDoesNotExist):
|
||||
entitytype = 'Album'
|
||||
|
||||
|
||||
class TrackDoesNotExist(EntityDoesNotExist):
|
||||
entitytype = 'Track'
|
||||
|
@ -1,3 +1,5 @@
|
||||
from typing import TypedDict, Optional, cast
|
||||
|
||||
import sqlalchemy as sql
|
||||
from sqlalchemy.dialects.sqlite import insert as sqliteinsert
|
||||
import json
|
||||
@ -213,6 +215,25 @@ def set_maloja_info(info,dbconn=None):
|
||||
# The last two fields are not returned under normal circumstances
|
||||
|
||||
|
||||
class AlbumDict(TypedDict):
|
||||
albumtitle: str
|
||||
artists: list[str]
|
||||
|
||||
|
||||
class TrackDict(TypedDict):
|
||||
artists: list[str]
|
||||
title: str
|
||||
album: AlbumDict
|
||||
length: int | None
|
||||
|
||||
|
||||
class ScrobbleDict(TypedDict):
|
||||
time: int
|
||||
track: TrackDict
|
||||
duration: int
|
||||
origin: str
|
||||
extra: Optional[dict]
|
||||
rawscrobble: Optional[dict]
|
||||
|
||||
|
||||
##### Conversions between DB and dicts
|
||||
@ -222,140 +243,164 @@ def set_maloja_info(info,dbconn=None):
|
||||
|
||||
|
||||
### DB -> DICT
|
||||
def scrobbles_db_to_dict(rows,include_internal=False,dbconn=None):
|
||||
tracks = get_tracks_map(set(row.track_id for row in rows),dbconn=dbconn)
|
||||
def scrobbles_db_to_dict(rows, include_internal=False, dbconn=None) -> list[ScrobbleDict]:
|
||||
tracks: list[TrackDict] = get_tracks_map(set(row.track_id for row in rows), dbconn=dbconn)
|
||||
return [
|
||||
{
|
||||
cast(ScrobbleDict, {
|
||||
**{
|
||||
"time":row.timestamp,
|
||||
"track":tracks[row.track_id],
|
||||
"duration":row.duration,
|
||||
"origin":row.origin,
|
||||
"time": row.timestamp,
|
||||
"track": tracks[row.track_id],
|
||||
"duration": row.duration,
|
||||
"origin": row.origin
|
||||
},
|
||||
**({
|
||||
"extra":json.loads(row.extra or '{}'),
|
||||
"rawscrobble":json.loads(row.rawscrobble or '{}')
|
||||
"extra": json.loads(row.extra or '{}'),
|
||||
"rawscrobble": json.loads(row.rawscrobble or '{}')
|
||||
} if include_internal else {})
|
||||
}
|
||||
})
|
||||
|
||||
for row in rows
|
||||
]
|
||||
|
||||
def scrobble_db_to_dict(row,dbconn=None):
|
||||
return scrobbles_db_to_dict([row],dbconn=dbconn)[0]
|
||||
|
||||
def tracks_db_to_dict(rows,dbconn=None):
|
||||
artists = get_artists_of_tracks(set(row.id for row in rows),dbconn=dbconn)
|
||||
albums = get_albums_map(set(row.album_id for row in rows),dbconn=dbconn)
|
||||
def scrobble_db_to_dict(row, dbconn=None) -> ScrobbleDict:
|
||||
return scrobbles_db_to_dict([row], dbconn=dbconn)[0]
|
||||
|
||||
|
||||
def tracks_db_to_dict(rows, dbconn=None) -> list[TrackDict]:
|
||||
artists = get_artists_of_tracks(set(row.id for row in rows), dbconn=dbconn)
|
||||
albums = get_albums_map(set(row.album_id for row in rows), dbconn=dbconn)
|
||||
return [
|
||||
{
|
||||
cast(TrackDict, {
|
||||
"artists":artists[row.id],
|
||||
"title":row.title,
|
||||
"album":albums.get(row.album_id),
|
||||
"length":row.length
|
||||
}
|
||||
})
|
||||
for row in rows
|
||||
]
|
||||
|
||||
def track_db_to_dict(row,dbconn=None):
|
||||
return tracks_db_to_dict([row],dbconn=dbconn)[0]
|
||||
|
||||
def artists_db_to_dict(rows,dbconn=None):
|
||||
def track_db_to_dict(row, dbconn=None) -> TrackDict:
|
||||
return tracks_db_to_dict([row], dbconn=dbconn)[0]
|
||||
|
||||
|
||||
def artists_db_to_dict(rows, dbconn=None) -> list[str]:
|
||||
return [
|
||||
row.name
|
||||
for row in rows
|
||||
]
|
||||
|
||||
def artist_db_to_dict(row,dbconn=None):
|
||||
return artists_db_to_dict([row],dbconn=dbconn)[0]
|
||||
|
||||
def albums_db_to_dict(rows,dbconn=None):
|
||||
artists = get_artists_of_albums(set(row.id for row in rows),dbconn=dbconn)
|
||||
def artist_db_to_dict(row, dbconn=None) -> str:
|
||||
return artists_db_to_dict([row], dbconn=dbconn)[0]
|
||||
|
||||
|
||||
def albums_db_to_dict(rows, dbconn=None) -> list[AlbumDict]:
|
||||
artists = get_artists_of_albums(set(row.id for row in rows), dbconn=dbconn)
|
||||
return [
|
||||
{
|
||||
"artists":artists.get(row.id),
|
||||
"albumtitle":row.albtitle,
|
||||
}
|
||||
cast(AlbumDict, {
|
||||
"artists": artists.get(row.id),
|
||||
"albumtitle": row.albtitle,
|
||||
})
|
||||
for row in rows
|
||||
]
|
||||
|
||||
def album_db_to_dict(row,dbconn=None):
|
||||
return albums_db_to_dict([row],dbconn=dbconn)[0]
|
||||
|
||||
|
||||
def album_db_to_dict(row, dbconn=None) -> AlbumDict:
|
||||
return albums_db_to_dict([row], dbconn=dbconn)[0]
|
||||
|
||||
|
||||
### DICT -> DB
|
||||
# These should return None when no data is in the dict so they can be used for update statements
|
||||
|
||||
def scrobble_dict_to_db(info,update_album=False,dbconn=None):
|
||||
def scrobble_dict_to_db(info: ScrobbleDict, update_album=False, dbconn=None):
|
||||
return {
|
||||
"timestamp":info.get('time'),
|
||||
"origin":info.get('origin'),
|
||||
"duration":info.get('duration'),
|
||||
"track_id":get_track_id(info.get('track'),update_album=update_album,dbconn=dbconn),
|
||||
"extra":json.dumps(info.get('extra')) if info.get('extra') else None,
|
||||
"rawscrobble":json.dumps(info.get('rawscrobble')) if info.get('rawscrobble') else None
|
||||
"timestamp": info.get('time'),
|
||||
"origin": info.get('origin'),
|
||||
"duration": info.get('duration'),
|
||||
"track_id": get_track_id(info.get('track'), update_album=update_album, dbconn=dbconn),
|
||||
"extra": json.dumps(info.get('extra')) if info.get('extra') else None,
|
||||
"rawscrobble": json.dumps(info.get('rawscrobble')) if info.get('rawscrobble') else None
|
||||
}
|
||||
|
||||
def track_dict_to_db(info,dbconn=None):
|
||||
|
||||
def track_dict_to_db(info: TrackDict, dbconn=None):
|
||||
return {
|
||||
"title":info.get('title'),
|
||||
"title_normalized":normalize_name(info.get('title','')) or None,
|
||||
"length":info.get('length')
|
||||
"title": info.get('title'),
|
||||
"title_normalized": normalize_name(info.get('title', '')) or None,
|
||||
"length": info.get('length')
|
||||
}
|
||||
|
||||
def artist_dict_to_db(info,dbconn=None):
|
||||
|
||||
def artist_dict_to_db(info: str, dbconn=None):
|
||||
return {
|
||||
"name": info,
|
||||
"name_normalized":normalize_name(info)
|
||||
"name_normalized": normalize_name(info)
|
||||
}
|
||||
|
||||
def album_dict_to_db(info,dbconn=None):
|
||||
|
||||
def album_dict_to_db(info: AlbumDict, dbconn=None):
|
||||
return {
|
||||
"albtitle":info.get('albumtitle'),
|
||||
"albtitle_normalized":normalize_name(info.get('albumtitle'))
|
||||
"albtitle": info.get('albumtitle'),
|
||||
"albtitle_normalized": normalize_name(info.get('albumtitle'))
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
##### Actual Database interactions
|
||||
|
||||
# TODO: remove all resolve_id args and do that logic outside the caching to improve hit chances
|
||||
# TODO: maybe also factor out all intitial get entity funcs (some here, some in __init__) and throw exceptions
|
||||
|
||||
@connection_provider
|
||||
def add_scrobble(scrobbledict,update_album=False,dbconn=None):
|
||||
add_scrobbles([scrobbledict],update_album=update_album,dbconn=dbconn)
|
||||
def add_scrobble(scrobbledict: ScrobbleDict, update_album=False, dbconn=None):
|
||||
_, ex, er = add_scrobbles([scrobbledict], update_album=update_album, dbconn=dbconn)
|
||||
if er > 0:
|
||||
raise exc.DuplicateTimestamp(existing_scrobble=None, rejected_scrobble=scrobbledict)
|
||||
# TODO: actually pass existing scrobble
|
||||
elif ex > 0:
|
||||
raise exc.DuplicateScrobble(scrobble=scrobbledict)
|
||||
|
||||
|
||||
@connection_provider
|
||||
def add_scrobbles(scrobbleslist,update_album=False,dbconn=None):
|
||||
def add_scrobbles(scrobbleslist: list[ScrobbleDict], update_album=False, dbconn=None) -> tuple[int, int, int]:
|
||||
|
||||
with SCROBBLE_LOCK:
|
||||
|
||||
ops = [
|
||||
DB['scrobbles'].insert().values(
|
||||
**scrobble_dict_to_db(s,update_album=update_album,dbconn=dbconn)
|
||||
) for s in scrobbleslist
|
||||
]
|
||||
# ops = [
|
||||
# DB['scrobbles'].insert().values(
|
||||
# **scrobble_dict_to_db(s,update_album=update_album,dbconn=dbconn)
|
||||
# ) for s in scrobbleslist
|
||||
# ]
|
||||
|
||||
success,errors = 0,0
|
||||
for op in ops:
|
||||
success, exists, errors = 0, 0, 0
|
||||
|
||||
for s in scrobbleslist:
|
||||
scrobble_entry = scrobble_dict_to_db(s, update_album=update_album, dbconn=dbconn)
|
||||
try:
|
||||
dbconn.execute(op)
|
||||
dbconn.execute(DB['scrobbles'].insert().values(
|
||||
**scrobble_entry
|
||||
))
|
||||
success += 1
|
||||
except sql.exc.IntegrityError as e:
|
||||
errors += 1
|
||||
except sql.exc.IntegrityError:
|
||||
# get existing scrobble
|
||||
result = dbconn.execute(DB['scrobbles'].select().where(
|
||||
DB['scrobbles'].c.timestamp == scrobble_entry['timestamp']
|
||||
)).first()
|
||||
if result.track_id == scrobble_entry['track_id']:
|
||||
exists += 1
|
||||
else:
|
||||
errors += 1
|
||||
|
||||
# TODO check if actual duplicate
|
||||
if errors > 0: log(f"{errors} Scrobbles have not been written to database (duplicate timestamps)!", color='red')
|
||||
if exists > 0: log(f"{exists} Scrobbles have not been written to database (already exist)", color='orange')
|
||||
return success, exists, errors
|
||||
|
||||
if errors > 0: log(f"{errors} Scrobbles have not been written to database!",color='red')
|
||||
return success,errors
|
||||
|
||||
@connection_provider
|
||||
def delete_scrobble(scrobble_id,dbconn=None):
|
||||
def delete_scrobble(scrobble_id: int, dbconn=None) -> bool:
|
||||
|
||||
with SCROBBLE_LOCK:
|
||||
|
||||
@ -369,7 +414,7 @@ def delete_scrobble(scrobble_id,dbconn=None):
|
||||
|
||||
|
||||
@connection_provider
|
||||
def add_track_to_album(track_id,album_id,replace=False,dbconn=None):
|
||||
def add_track_to_album(track_id: int, album_id: int, replace=False, dbconn=None) -> bool:
|
||||
|
||||
conditions = [
|
||||
DB['tracks'].c.id == track_id
|
||||
@ -398,39 +443,39 @@ def add_track_to_album(track_id,album_id,replace=False,dbconn=None):
|
||||
# ALL OF RECORDED HISTORY in order to display top weeks
|
||||
# lmao
|
||||
# TODO: figure out something better
|
||||
|
||||
|
||||
return True
|
||||
|
||||
|
||||
@connection_provider
|
||||
def add_tracks_to_albums(track_to_album_id_dict,replace=False,dbconn=None):
|
||||
def add_tracks_to_albums(track_to_album_id_dict: dict[int, int], replace=False, dbconn=None) -> bool:
|
||||
|
||||
for track_id in track_to_album_id_dict:
|
||||
add_track_to_album(track_id,track_to_album_id_dict[track_id],dbconn=dbconn)
|
||||
add_track_to_album(track_id,track_to_album_id_dict[track_id], replace=replace, dbconn=dbconn)
|
||||
return True
|
||||
|
||||
|
||||
@connection_provider
|
||||
def remove_album(*track_ids,dbconn=None):
|
||||
def remove_album(*track_ids: list[int], dbconn=None) -> bool:
|
||||
|
||||
DB['tracks'].update().where(
|
||||
DB['tracks'].c.track_id.in_(track_ids)
|
||||
).values(
|
||||
album_id=None
|
||||
)
|
||||
return True
|
||||
|
||||
|
||||
### these will 'get' the ID of an entity, creating it if necessary
|
||||
|
||||
@cached_wrapper
|
||||
@connection_provider
|
||||
def get_track_id(trackdict,create_new=True,update_album=False,dbconn=None):
|
||||
def get_track_id(trackdict: TrackDict, create_new=True, update_album=False, dbconn=None) -> int | None:
|
||||
ntitle = normalize_name(trackdict['title'])
|
||||
artist_ids = [get_artist_id(a,create_new=create_new,dbconn=dbconn) for a in trackdict['artists']]
|
||||
artist_ids = [get_artist_id(a, create_new=create_new, dbconn=dbconn) for a in trackdict['artists']]
|
||||
artist_ids = list(set(artist_ids))
|
||||
|
||||
|
||||
|
||||
|
||||
op = DB['tracks'].select().where(
|
||||
DB['tracks'].c.title_normalized==ntitle
|
||||
DB['tracks'].c.title_normalized == ntitle
|
||||
)
|
||||
result = dbconn.execute(op).all()
|
||||
for row in result:
|
||||
@ -440,7 +485,7 @@ def get_track_id(trackdict,create_new=True,update_album=False,dbconn=None):
|
||||
op = DB['trackartists'].select(
|
||||
# DB['trackartists'].c.artist_id
|
||||
).where(
|
||||
DB['trackartists'].c.track_id==row.id
|
||||
DB['trackartists'].c.track_id == row.id
|
||||
)
|
||||
result = dbconn.execute(op).all()
|
||||
match_artist_ids = [r.artist_id for r in result]
|
||||
@ -456,14 +501,14 @@ def get_track_id(trackdict,create_new=True,update_album=False,dbconn=None):
|
||||
album_id = get_album_id(trackdict['album'],create_new=(update_album or not row.album_id),dbconn=dbconn)
|
||||
add_track_to_album(row.id,album_id,replace=update_album,dbconn=dbconn)
|
||||
|
||||
|
||||
return row.id
|
||||
|
||||
if not create_new: return None
|
||||
if not create_new:
|
||||
return None
|
||||
|
||||
#print("Creating new track")
|
||||
op = DB['tracks'].insert().values(
|
||||
**track_dict_to_db(trackdict,dbconn=dbconn)
|
||||
**track_dict_to_db(trackdict, dbconn=dbconn)
|
||||
)
|
||||
result = dbconn.execute(op)
|
||||
track_id = result.inserted_primary_key[0]
|
||||
@ -478,24 +523,26 @@ def get_track_id(trackdict,create_new=True,update_album=False,dbconn=None):
|
||||
#print("Created",trackdict['title'],track_id)
|
||||
|
||||
if trackdict.get('album'):
|
||||
add_track_to_album(track_id,get_album_id(trackdict['album'],dbconn=dbconn),dbconn=dbconn)
|
||||
add_track_to_album(track_id, get_album_id(trackdict['album'], dbconn=dbconn), dbconn=dbconn)
|
||||
return track_id
|
||||
|
||||
|
||||
@cached_wrapper
|
||||
@connection_provider
|
||||
def get_artist_id(artistname,create_new=True,dbconn=None):
|
||||
def get_artist_id(artistname: str, create_new=True, dbconn=None) -> int | None:
|
||||
nname = normalize_name(artistname)
|
||||
#print("looking for",nname)
|
||||
|
||||
op = DB['artists'].select().where(
|
||||
DB['artists'].c.name_normalized==nname
|
||||
DB['artists'].c.name_normalized == nname
|
||||
)
|
||||
result = dbconn.execute(op).all()
|
||||
for row in result:
|
||||
#print("ID for",artistname,"was",row[0])
|
||||
return row.id
|
||||
|
||||
if not create_new: return None
|
||||
if not create_new:
|
||||
return None
|
||||
|
||||
op = DB['artists'].insert().values(
|
||||
name=artistname,
|
||||
@ -508,15 +555,15 @@ def get_artist_id(artistname,create_new=True,dbconn=None):
|
||||
|
||||
@cached_wrapper
|
||||
@connection_provider
|
||||
def get_album_id(albumdict,create_new=True,ignore_albumartists=False,dbconn=None):
|
||||
def get_album_id(albumdict: AlbumDict, create_new=True, ignore_albumartists=False, dbconn=None) -> int | None:
|
||||
ntitle = normalize_name(albumdict['albumtitle'])
|
||||
artist_ids = [get_artist_id(a,dbconn=dbconn) for a in (albumdict.get('artists') or [])]
|
||||
artist_ids = [get_artist_id(a, dbconn=dbconn) for a in (albumdict.get('artists') or [])]
|
||||
artist_ids = list(set(artist_ids))
|
||||
|
||||
op = DB['albums'].select(
|
||||
# DB['albums'].c.id
|
||||
).where(
|
||||
DB['albums'].c.albtitle_normalized==ntitle
|
||||
DB['albums'].c.albtitle_normalized == ntitle
|
||||
)
|
||||
result = dbconn.execute(op).all()
|
||||
for row in result:
|
||||
@ -529,7 +576,7 @@ def get_album_id(albumdict,create_new=True,ignore_albumartists=False,dbconn=None
|
||||
op = DB['albumartists'].select(
|
||||
# DB['albumartists'].c.artist_id
|
||||
).where(
|
||||
DB['albumartists'].c.album_id==row.id
|
||||
DB['albumartists'].c.album_id == row.id
|
||||
)
|
||||
result = dbconn.execute(op).all()
|
||||
match_artist_ids = [r.artist_id for r in result]
|
||||
@ -538,11 +585,11 @@ def get_album_id(albumdict,create_new=True,ignore_albumartists=False,dbconn=None
|
||||
#print("ID for",albumdict['title'],"was",row[0])
|
||||
return row.id
|
||||
|
||||
if not create_new: return None
|
||||
|
||||
if not create_new:
|
||||
return None
|
||||
|
||||
op = DB['albums'].insert().values(
|
||||
**album_dict_to_db(albumdict,dbconn=dbconn)
|
||||
**album_dict_to_db(albumdict, dbconn=dbconn)
|
||||
)
|
||||
result = dbconn.execute(op)
|
||||
album_id = result.inserted_primary_key[0]
|
||||
@ -557,18 +604,15 @@ def get_album_id(albumdict,create_new=True,ignore_albumartists=False,dbconn=None
|
||||
return album_id
|
||||
|
||||
|
||||
|
||||
|
||||
### Edit existing
|
||||
|
||||
|
||||
@connection_provider
|
||||
def edit_scrobble(scrobble_id,scrobbleupdatedict,dbconn=None):
|
||||
def edit_scrobble(scrobble_id: int, scrobbleupdatedict: dict, dbconn=None) -> bool:
|
||||
|
||||
dbentry = scrobble_dict_to_db(scrobbleupdatedict,dbconn=dbconn)
|
||||
dbentry = {k:v for k,v in dbentry.items() if v}
|
||||
dbentry = {k: v for k, v in dbentry.items() if v}
|
||||
|
||||
print("Updating scrobble",dbentry)
|
||||
print("Updating scrobble", dbentry)
|
||||
|
||||
with SCROBBLE_LOCK:
|
||||
|
||||
@ -579,97 +623,97 @@ def edit_scrobble(scrobble_id,scrobbleupdatedict,dbconn=None):
|
||||
)
|
||||
|
||||
dbconn.execute(op)
|
||||
return True
|
||||
|
||||
|
||||
# edit function only for primary db information (not linked fields)
|
||||
@connection_provider
|
||||
def edit_artist(id,artistupdatedict,dbconn=None):
|
||||
def edit_artist(artist_id: int, artistupdatedict: str, dbconn=None) -> bool:
|
||||
|
||||
artist = get_artist(id)
|
||||
artist = get_artist(artist_id)
|
||||
changedartist = artistupdatedict # well
|
||||
|
||||
dbentry = artist_dict_to_db(artistupdatedict,dbconn=dbconn)
|
||||
dbentry = {k:v for k,v in dbentry.items() if v}
|
||||
dbentry = artist_dict_to_db(artistupdatedict, dbconn=dbconn)
|
||||
dbentry = {k: v for k, v in dbentry.items() if v}
|
||||
|
||||
existing_artist_id = get_artist_id(changedartist,create_new=False,dbconn=dbconn)
|
||||
if existing_artist_id not in (None,id):
|
||||
existing_artist_id = get_artist_id(changedartist, create_new=False, dbconn=dbconn)
|
||||
if existing_artist_id not in (None, artist_id):
|
||||
raise exc.ArtistExists(changedartist)
|
||||
|
||||
op = DB['artists'].update().where(
|
||||
DB['artists'].c.id==id
|
||||
DB['artists'].c.id == artist_id
|
||||
).values(
|
||||
**dbentry
|
||||
)
|
||||
result = dbconn.execute(op)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
# edit function only for primary db information (not linked fields)
|
||||
@connection_provider
|
||||
def edit_track(id,trackupdatedict,dbconn=None):
|
||||
def edit_track(track_id: int, trackupdatedict: dict, dbconn=None) -> bool:
|
||||
|
||||
track = get_track(id,dbconn=dbconn)
|
||||
changedtrack = {**track,**trackupdatedict}
|
||||
track = get_track(track_id, dbconn=dbconn)
|
||||
changedtrack: TrackDict = {**track, **trackupdatedict}
|
||||
|
||||
dbentry = track_dict_to_db(trackupdatedict,dbconn=dbconn)
|
||||
dbentry = {k:v for k,v in dbentry.items() if v}
|
||||
dbentry = track_dict_to_db(trackupdatedict, dbconn=dbconn)
|
||||
dbentry = {k: v for k, v in dbentry.items() if v}
|
||||
|
||||
existing_track_id = get_track_id(changedtrack,create_new=False,dbconn=dbconn)
|
||||
if existing_track_id not in (None,id):
|
||||
existing_track_id = get_track_id(changedtrack, create_new=False, dbconn=dbconn)
|
||||
if existing_track_id not in (None, track_id):
|
||||
raise exc.TrackExists(changedtrack)
|
||||
|
||||
op = DB['tracks'].update().where(
|
||||
DB['tracks'].c.id==id
|
||||
DB['tracks'].c.id == track_id
|
||||
).values(
|
||||
**dbentry
|
||||
)
|
||||
result = dbconn.execute(op)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
# edit function only for primary db information (not linked fields)
|
||||
@connection_provider
|
||||
def edit_album(id,albumupdatedict,dbconn=None):
|
||||
def edit_album(album_id: int, albumupdatedict: dict, dbconn=None) -> bool:
|
||||
|
||||
album = get_album(id,dbconn=dbconn)
|
||||
changedalbum = {**album,**albumupdatedict}
|
||||
album = get_album(album_id, dbconn=dbconn)
|
||||
changedalbum: AlbumDict = {**album, **albumupdatedict}
|
||||
|
||||
dbentry = album_dict_to_db(albumupdatedict,dbconn=dbconn)
|
||||
dbentry = {k:v for k,v in dbentry.items() if v}
|
||||
dbentry = album_dict_to_db(albumupdatedict, dbconn=dbconn)
|
||||
dbentry = {k: v for k, v in dbentry.items() if v}
|
||||
|
||||
existing_album_id = get_album_id(changedalbum,create_new=False,dbconn=dbconn)
|
||||
if existing_album_id not in (None,id):
|
||||
existing_album_id = get_album_id(changedalbum, create_new=False, dbconn=dbconn)
|
||||
if existing_album_id not in (None, album_id):
|
||||
raise exc.TrackExists(changedalbum)
|
||||
|
||||
op = DB['albums'].update().where(
|
||||
DB['albums'].c.id==id
|
||||
DB['albums'].c.id == album_id
|
||||
).values(
|
||||
**dbentry
|
||||
)
|
||||
result = dbconn.execute(op)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
### Edit associations
|
||||
|
||||
@connection_provider
|
||||
def add_artists_to_tracks(track_ids,artist_ids,dbconn=None):
|
||||
def add_artists_to_tracks(track_ids: list[int], artist_ids: list[int], dbconn=None) -> bool:
|
||||
|
||||
op = DB['trackartists'].insert().values([
|
||||
{'track_id':track_id,'artist_id':artist_id}
|
||||
{'track_id': track_id, 'artist_id': artist_id}
|
||||
for track_id in track_ids for artist_id in artist_ids
|
||||
])
|
||||
|
||||
result = dbconn.execute(op)
|
||||
|
||||
# the resulting tracks could now be duplicates of existing ones
|
||||
# this also takes care of clean_db
|
||||
merge_duplicate_tracks(dbconn=dbconn)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
@connection_provider
|
||||
def remove_artists_from_tracks(track_ids,artist_ids,dbconn=None):
|
||||
def remove_artists_from_tracks(track_ids: list[int], artist_ids: list[int], dbconn=None) -> bool:
|
||||
|
||||
# only tracks that have at least one other artist
|
||||
subquery = DB['trackartists'].select().where(
|
||||
@ -687,16 +731,14 @@ def remove_artists_from_tracks(track_ids,artist_ids,dbconn=None):
|
||||
)
|
||||
|
||||
result = dbconn.execute(op)
|
||||
|
||||
# the resulting tracks could now be duplicates of existing ones
|
||||
# this also takes care of clean_db
|
||||
merge_duplicate_tracks(dbconn=dbconn)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
@connection_provider
|
||||
def add_artists_to_albums(album_ids,artist_ids,dbconn=None):
|
||||
def add_artists_to_albums(album_ids: list[int], artist_ids: list[int], dbconn=None) -> bool:
|
||||
|
||||
op = DB['albumartists'].insert().values([
|
||||
{'album_id':album_id,'artist_id':artist_id}
|
||||
@ -704,16 +746,14 @@ def add_artists_to_albums(album_ids,artist_ids,dbconn=None):
|
||||
])
|
||||
|
||||
result = dbconn.execute(op)
|
||||
|
||||
# the resulting albums could now be duplicates of existing ones
|
||||
# this also takes care of clean_db
|
||||
merge_duplicate_albums(dbconn=dbconn)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
@connection_provider
|
||||
def remove_artists_from_albums(album_ids,artist_ids,dbconn=None):
|
||||
def remove_artists_from_albums(album_ids: list[int], artist_ids: list[int], dbconn=None) -> bool:
|
||||
|
||||
# no check here, albums are allowed to have zero artists
|
||||
|
||||
@ -725,17 +765,16 @@ def remove_artists_from_albums(album_ids,artist_ids,dbconn=None):
|
||||
)
|
||||
|
||||
result = dbconn.execute(op)
|
||||
|
||||
# the resulting albums could now be duplicates of existing ones
|
||||
# this also takes care of clean_db
|
||||
merge_duplicate_albums(dbconn=dbconn)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
### Merge
|
||||
|
||||
@connection_provider
|
||||
def merge_tracks(target_id,source_ids,dbconn=None):
|
||||
def merge_tracks(target_id: int, source_ids: list[int], dbconn=None) -> bool:
|
||||
|
||||
op = DB['scrobbles'].update().where(
|
||||
DB['scrobbles'].c.track_id.in_(source_ids)
|
||||
@ -744,11 +783,11 @@ def merge_tracks(target_id,source_ids,dbconn=None):
|
||||
)
|
||||
result = dbconn.execute(op)
|
||||
clean_db(dbconn=dbconn)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
@connection_provider
|
||||
def merge_artists(target_id,source_ids,dbconn=None):
|
||||
def merge_artists(target_id: int, source_ids: list[int], dbconn=None) -> bool:
|
||||
|
||||
# some tracks could already have multiple of the to be merged artists
|
||||
|
||||
@ -776,7 +815,6 @@ def merge_artists(target_id,source_ids,dbconn=None):
|
||||
|
||||
result = dbconn.execute(op)
|
||||
|
||||
|
||||
# same for albums
|
||||
op = DB['albumartists'].select().where(
|
||||
DB['albumartists'].c.artist_id.in_(source_ids + [target_id])
|
||||
@ -797,7 +835,6 @@ def merge_artists(target_id,source_ids,dbconn=None):
|
||||
|
||||
result = dbconn.execute(op)
|
||||
|
||||
|
||||
# tracks_artists = {}
|
||||
# for row in result:
|
||||
# tracks_artists.setdefault(row.track_id,[]).append(row.artist_id)
|
||||
@ -814,15 +851,14 @@ def merge_artists(target_id,source_ids,dbconn=None):
|
||||
# result = dbconn.execute(op)
|
||||
|
||||
# this could have created duplicate tracks and albums
|
||||
merge_duplicate_tracks(artist_id=target_id,dbconn=dbconn)
|
||||
merge_duplicate_albums(artist_id=target_id,dbconn=dbconn)
|
||||
merge_duplicate_tracks(artist_id=target_id, dbconn=dbconn)
|
||||
merge_duplicate_albums(artist_id=target_id, dbconn=dbconn)
|
||||
clean_db(dbconn=dbconn)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
@connection_provider
|
||||
def merge_albums(target_id,source_ids,dbconn=None):
|
||||
def merge_albums(target_id: int, source_ids: list[int], dbconn=None) -> bool:
|
||||
|
||||
op = DB['tracks'].update().where(
|
||||
DB['tracks'].c.album_id.in_(source_ids)
|
||||
@ -831,7 +867,6 @@ def merge_albums(target_id,source_ids,dbconn=None):
|
||||
)
|
||||
result = dbconn.execute(op)
|
||||
clean_db(dbconn=dbconn)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
@ -860,19 +895,24 @@ def get_scrobbles_of_artist(artist,since=None,to=None,resolve_references=True,li
|
||||
op = op.order_by(sql.desc('timestamp'))
|
||||
else:
|
||||
op = op.order_by(sql.asc('timestamp'))
|
||||
if limit:
|
||||
if limit and not associated:
|
||||
# if we count associated we cant limit here because we remove stuff later!
|
||||
op = op.limit(limit)
|
||||
result = dbconn.execute(op).all()
|
||||
|
||||
# remove duplicates (multiple associated artists in the song, e.g. Irene & Seulgi being both counted as Red Velvet)
|
||||
# distinct on doesn't seem to exist in sqlite
|
||||
seen = set()
|
||||
filtered_result = []
|
||||
for row in result:
|
||||
if row.timestamp not in seen:
|
||||
filtered_result.append(row)
|
||||
seen.add(row.timestamp)
|
||||
result = filtered_result
|
||||
if associated:
|
||||
seen = set()
|
||||
filtered_result = []
|
||||
for row in result:
|
||||
if row.timestamp not in seen:
|
||||
filtered_result.append(row)
|
||||
seen.add(row.timestamp)
|
||||
result = filtered_result
|
||||
if limit:
|
||||
result = result[:limit]
|
||||
|
||||
|
||||
|
||||
if resolve_references:
|
||||
@ -962,7 +1002,6 @@ def get_scrobbles(since=None,to=None,resolve_references=True,limit=None,reverse=
|
||||
result = scrobbles_db_to_dict(result,dbconn=dbconn)
|
||||
#result = [scrobble_db_to_dict(row,resolve_references=resolve_references) for i,row in enumerate(result) if i<max]
|
||||
|
||||
|
||||
return result
|
||||
|
||||
|
||||
@ -1072,7 +1111,7 @@ def count_scrobbles_by_artist(since,to,associated=True,resolve_ids=True,dbconn=N
|
||||
DB['scrobbles'].c.timestamp.between(since,to)
|
||||
).group_by(
|
||||
artistselect
|
||||
).order_by(sql.desc('count'))
|
||||
).order_by(sql.desc('count'),sql.desc('really_by_this_artist'))
|
||||
result = dbconn.execute(op).all()
|
||||
|
||||
if resolve_ids:
|
||||
@ -1601,48 +1640,52 @@ def get_credited_artists(*artists,dbconn=None):
|
||||
|
||||
@cached_wrapper
|
||||
@connection_provider
|
||||
def get_track(id,dbconn=None):
|
||||
def get_track(track_id: int, dbconn=None) -> TrackDict:
|
||||
op = DB['tracks'].select().where(
|
||||
DB['tracks'].c.id==id
|
||||
DB['tracks'].c.id == track_id
|
||||
)
|
||||
result = dbconn.execute(op).all()
|
||||
|
||||
trackinfo = result[0]
|
||||
return track_db_to_dict(trackinfo,dbconn=dbconn)
|
||||
return track_db_to_dict(trackinfo, dbconn=dbconn)
|
||||
|
||||
|
||||
@cached_wrapper
|
||||
@connection_provider
|
||||
def get_artist(id,dbconn=None):
|
||||
def get_artist(artist_id: int, dbconn=None) -> str:
|
||||
op = DB['artists'].select().where(
|
||||
DB['artists'].c.id==id
|
||||
DB['artists'].c.id == artist_id
|
||||
)
|
||||
result = dbconn.execute(op).all()
|
||||
|
||||
artistinfo = result[0]
|
||||
return artist_db_to_dict(artistinfo,dbconn=dbconn)
|
||||
return artist_db_to_dict(artistinfo, dbconn=dbconn)
|
||||
|
||||
|
||||
@cached_wrapper
|
||||
@connection_provider
|
||||
def get_album(id,dbconn=None):
|
||||
def get_album(album_id: int, dbconn=None) -> AlbumDict:
|
||||
op = DB['albums'].select().where(
|
||||
DB['albums'].c.id==id
|
||||
DB['albums'].c.id == album_id
|
||||
)
|
||||
result = dbconn.execute(op).all()
|
||||
|
||||
albuminfo = result[0]
|
||||
return album_db_to_dict(albuminfo,dbconn=dbconn)
|
||||
return album_db_to_dict(albuminfo, dbconn=dbconn)
|
||||
|
||||
|
||||
@cached_wrapper
|
||||
@connection_provider
|
||||
def get_scrobble(timestamp, include_internal=False, dbconn=None):
|
||||
def get_scrobble(timestamp: int, include_internal=False, dbconn=None) -> ScrobbleDict:
|
||||
op = DB['scrobbles'].select().where(
|
||||
DB['scrobbles'].c.timestamp==timestamp
|
||||
DB['scrobbles'].c.timestamp == timestamp
|
||||
)
|
||||
result = dbconn.execute(op).all()
|
||||
|
||||
scrobble = result[0]
|
||||
return scrobbles_db_to_dict(rows=[scrobble], include_internal=include_internal)[0]
|
||||
|
||||
|
||||
@cached_wrapper
|
||||
@connection_provider
|
||||
def search_artist(searchterm,dbconn=None):
|
||||
@ -1684,6 +1727,11 @@ def clean_db(dbconn=None):
|
||||
log(f"Database Cleanup...")
|
||||
|
||||
to_delete = [
|
||||
# NULL associations
|
||||
"from albumartists where album_id is NULL",
|
||||
"from albumartists where artist_id is NULL",
|
||||
"from trackartists where track_id is NULL",
|
||||
"from trackartists where artist_id is NULL",
|
||||
# tracks with no scrobbles (trackartist entries first)
|
||||
"from trackartists where track_id in (select id from tracks where id not in (select track_id from scrobbles))",
|
||||
"from tracks where id not in (select track_id from scrobbles)",
|
||||
|
@ -1,9 +1,9 @@
|
||||
import os
|
||||
|
||||
import cProfile, pstats
|
||||
import time
|
||||
|
||||
from doreah.logging import log
|
||||
from doreah.timing import Clock
|
||||
|
||||
from ..pkg_global.conf import data_dir
|
||||
|
||||
@ -27,8 +27,7 @@ def profile(func):
|
||||
|
||||
def newfunc(*args,**kwargs):
|
||||
|
||||
clock = Clock()
|
||||
clock.start()
|
||||
starttime = time.time()
|
||||
|
||||
if FULL_PROFILE:
|
||||
benchmarkfolder = data_dir['logs']("benchmarks")
|
||||
@ -44,7 +43,7 @@ def profile(func):
|
||||
if FULL_PROFILE:
|
||||
localprofiler.disable()
|
||||
|
||||
seconds = clock.stop()
|
||||
seconds = time.time() - starttime
|
||||
|
||||
if not SINGLE_CALLS:
|
||||
times.setdefault(realfunc,[]).append(seconds)
|
||||
|
@ -284,6 +284,12 @@ def image_request(artist_id=None,track_id=None,album_id=None):
|
||||
if result is not None:
|
||||
# we got an entry, even if it's that there is no image (value None)
|
||||
if result['value'] is None:
|
||||
# fallback to album regardless of setting (because we have no image)
|
||||
if track_id:
|
||||
track = database.sqldb.get_track(track_id)
|
||||
if track.get("album"):
|
||||
album_id = database.sqldb.get_album_id(track["album"])
|
||||
return image_request(album_id=album_id)
|
||||
# use placeholder
|
||||
if malojaconfig["FANCY_PLACEHOLDER_ART"]:
|
||||
placeholder_url = "https://generative-placeholders.glitch.me/image?width=300&height=300&style="
|
||||
|
@ -1,16 +1,18 @@
|
||||
from datetime import timezone, timedelta, date, time, datetime
|
||||
from calendar import monthrange
|
||||
import math
|
||||
import zoneinfo
|
||||
from abc import ABC, abstractmethod
|
||||
|
||||
from .pkg_global.conf import malojaconfig
|
||||
|
||||
|
||||
OFFSET = malojaconfig["TIMEZONE"]
|
||||
TIMEZONE = timezone(timedelta(hours=OFFSET))
|
||||
LOCATION_TIMEZONE = malojaconfig["LOCATION_TIMEZONE"]
|
||||
TIMEZONE = timezone(timedelta(hours=OFFSET)) if not LOCATION_TIMEZONE or LOCATION_TIMEZONE not in zoneinfo.available_timezones() else zoneinfo.ZoneInfo(LOCATION_TIMEZONE)
|
||||
UTC = timezone.utc
|
||||
|
||||
FIRST_SCROBBLE = int(datetime.utcnow().replace(tzinfo=UTC).timestamp())
|
||||
FIRST_SCROBBLE = int(datetime.now(UTC).timestamp())
|
||||
|
||||
def register_scrobbletime(timestamp):
|
||||
global FIRST_SCROBBLE
|
||||
@ -63,7 +65,7 @@ class MTRangeGeneric(ABC):
|
||||
|
||||
# whether we currently live or will ever again live in this range
|
||||
def active(self):
|
||||
return (self.last_stamp() > datetime.utcnow().timestamp())
|
||||
return (self.last_stamp() > datetime.now(timezone.utc).timestamp())
|
||||
|
||||
def __contains__(self,timestamp):
|
||||
return timestamp >= self.first_stamp() and timestamp <= self.last_stamp()
|
||||
@ -111,7 +113,7 @@ class MTRangeGregorian(MTRangeSingular):
|
||||
# whether we currently live or will ever again live in this range
|
||||
# USE GENERIC SUPER METHOD INSTEAD
|
||||
# def active(self):
|
||||
# tod = datetime.datetime.utcnow().date()
|
||||
# tod = datetime.datetime.now(timezone.utc).date()
|
||||
# if tod.year > self.year: return False
|
||||
# if self.precision == 1: return True
|
||||
# if tod.year == self.year:
|
||||
@ -212,16 +214,15 @@ class MTRangeWeek(MTRangeSingular):
|
||||
def __init__(self,year=None,week=None):
|
||||
|
||||
# do this so we can construct the week with overflow (eg 2020/-3)
|
||||
thisisoyear_firstday = date.fromchrcalendar(year,1,1)
|
||||
thisisoyear_firstday = date.fromisocalendar(year,1,1) + timedelta(days=malojaconfig['WEEK_OFFSET']-1)
|
||||
self.firstday = thisisoyear_firstday + timedelta(days=7*(week-1))
|
||||
self.firstday = date(self.firstday.year,self.firstday.month,self.firstday.day)
|
||||
# for compatibility with pre python3.8 (https://bugs.python.org/issue32417)
|
||||
|
||||
|
||||
self.lastday = self.firstday + timedelta(days=6)
|
||||
|
||||
# now get the actual year and week number (in case of overflow)
|
||||
self.year,self.week,_ = self.firstday.chrcalendar()
|
||||
fakedate = self.firstday - timedelta(days=malojaconfig['WEEK_OFFSET']-1)
|
||||
# fake date that gives the correct iso return for the real date considering our week offset
|
||||
self.year,self.week,_ = fakedate.isocalendar()
|
||||
|
||||
|
||||
|
||||
@ -329,7 +330,7 @@ class MTRangeComposite(MTRangeGeneric):
|
||||
if self.since is None: return FIRST_SCROBBLE
|
||||
else: return self.since.first_stamp()
|
||||
def last_stamp(self):
|
||||
#if self.to is None: return int(datetime.utcnow().replace(tzinfo=timezone.utc).timestamp())
|
||||
#if self.to is None: return int(datetime.now(timezone.utc).timestamp())
|
||||
if self.to is None: return today().last_stamp()
|
||||
else: return self.to.last_stamp()
|
||||
|
||||
@ -359,7 +360,9 @@ def today():
|
||||
def thisweek():
|
||||
tod = datetime.now(tz=TIMEZONE)
|
||||
tod = date(tod.year,tod.month,tod.day)
|
||||
y,w,_ = tod.chrcalendar()
|
||||
fakedate = tod - timedelta(days=malojaconfig['WEEK_OFFSET']-1)
|
||||
# fake date for correct iso representation
|
||||
y,w,_ = fakedate.isocalendar()
|
||||
return MTRangeWeek(y,w)
|
||||
def thismonth():
|
||||
tod = datetime.now(tz=TIMEZONE)
|
||||
@ -420,8 +423,8 @@ def get_last_instance(category,current,target,amount):
|
||||
|
||||
str_to_time_range = {
|
||||
**{s:callable for callable,strlist in currenttime_string_representations for s in strlist},
|
||||
**{s:(lambda i=index:get_last_instance(thismonth,datetime.utcnow().month,i,12)) for index,strlist in enumerate(month_string_representations,1) for s in strlist},
|
||||
**{s:(lambda i=index:get_last_instance(today,datetime.utcnow().isoweekday()+1%7,i,7)) for index,strlist in enumerate(weekday_string_representations,1) for s in strlist}
|
||||
**{s:(lambda i=index:get_last_instance(thismonth,datetime.now(timezone.utc).month,i,12)) for index,strlist in enumerate(month_string_representations,1) for s in strlist},
|
||||
**{s:(lambda i=index:get_last_instance(today,datetime.now(timezone.utc).isoweekday()+1%7,i,7)) for index,strlist in enumerate(weekday_string_representations,1) for s in strlist}
|
||||
}
|
||||
|
||||
|
||||
@ -564,7 +567,9 @@ def year_from_timestamp(stamp):
|
||||
def week_from_timestamp(stamp):
|
||||
dt = datetime.fromtimestamp(stamp,tz=TIMEZONE)
|
||||
d = date(dt.year,dt.month,dt.day)
|
||||
y,w,_ = d.chrcalendar()
|
||||
fakedate = d - timedelta(days=malojaconfig['WEEK_OFFSET']-1)
|
||||
# fake date for correct iso representation
|
||||
y,w,_ = fakedate.isocalendar()
|
||||
return MTRangeWeek(y,w)
|
||||
|
||||
def from_timestamp(stamp,unit):
|
||||
|
@ -29,6 +29,8 @@ def uri_to_internal(keys,accepted_entities=('artist','track','album'),forceTrack
|
||||
|
||||
# 1
|
||||
filterkeys = {}
|
||||
# this only takes care of the logic - what kind of entity we're dealing with
|
||||
# it does not check with the database if it exists or what the canonical name is!!!
|
||||
if "track" in accepted_entities and "title" in keys:
|
||||
filterkeys.update({"track":{"artists":keys.getall("trackartist"),"title":keys.get("title")}})
|
||||
if "artist" in accepted_entities and "artist" in keys:
|
||||
|
@ -1,4 +1,7 @@
|
||||
import os
|
||||
|
||||
import doreah.auth
|
||||
import doreah.logging
|
||||
from doreah.configuration import Configuration
|
||||
from doreah.configuration import types as tp
|
||||
|
||||
@ -17,9 +20,11 @@ AUX_MODE = True
|
||||
# DIRECRORY_CONFIG, DIRECRORY_STATE, DIRECTORY_LOGS and DIRECTORY_CACHE
|
||||
# config can only be determined by environment variable, the others can be loaded
|
||||
# from the config files
|
||||
# explicit settings will always be respected, fallback to default
|
||||
|
||||
# if default isn't usable, and config writable, find alternative and fix it in settings
|
||||
# we don't specify 'default' values in the normal sense of the config object
|
||||
# the default is none, meaning the app should figure it out (depending on environment)
|
||||
# the actual 'default' values of our folders are simply in code since they are dependent on environment (container?)
|
||||
# and we need to actually distinguish them from the user having specified something
|
||||
|
||||
# USEFUL FUNCS
|
||||
pthj = os.path.join
|
||||
@ -27,9 +32,7 @@ pthj = os.path.join
|
||||
def is_dir_usable(pth):
|
||||
try:
|
||||
os.makedirs(pth,exist_ok=True)
|
||||
os.mknod(pthj(pth,".test"))
|
||||
os.remove(pthj(pth,".test"))
|
||||
return True
|
||||
return os.access(pth,os.W_OK)
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
@ -40,7 +43,10 @@ def get_env_vars(key,pathsuffix=[]):
|
||||
|
||||
directory_info = {
|
||||
"config":{
|
||||
"sentinel":"rules",
|
||||
"sentinel":".maloja_config_sentinel",
|
||||
"possible_folders_container":[
|
||||
"/config/config"
|
||||
],
|
||||
"possible_folders":[
|
||||
"/etc/maloja",
|
||||
os.path.expanduser("~/.local/share/maloja")
|
||||
@ -48,15 +54,22 @@ directory_info = {
|
||||
"setting":"directory_config"
|
||||
},
|
||||
"cache":{
|
||||
"sentinel":"dummy",
|
||||
"sentinel":".maloja_cache_sentinel",
|
||||
"possible_folders_container":[
|
||||
"/config/cache"
|
||||
],
|
||||
"possible_folders":[
|
||||
"/var/cache/maloja",
|
||||
os.path.expanduser("~/.local/share/maloja/cache")
|
||||
os.path.expanduser("~/.local/share/maloja/cache"),
|
||||
"/tmp/maloja"
|
||||
],
|
||||
"setting":"directory_cache"
|
||||
},
|
||||
"state":{
|
||||
"sentinel":"scrobbles",
|
||||
"sentinel":".maloja_state_sentinel",
|
||||
"possible_folders_container":[
|
||||
"/config/state"
|
||||
],
|
||||
"possible_folders":[
|
||||
"/var/lib/maloja",
|
||||
os.path.expanduser("~/.local/share/maloja")
|
||||
@ -64,7 +77,10 @@ directory_info = {
|
||||
"setting":"directory_state"
|
||||
},
|
||||
"logs":{
|
||||
"sentinel":"dbfix",
|
||||
"sentinel":".maloja_logs_sentinel",
|
||||
"possible_folders_container":[
|
||||
"/config/logs"
|
||||
],
|
||||
"possible_folders":[
|
||||
"/var/log/maloja",
|
||||
os.path.expanduser("~/.local/share/maloja/logs")
|
||||
@ -77,51 +93,51 @@ directory_info = {
|
||||
# checks if one has been in use before and writes it to dict/config
|
||||
# if not, determines which to use and writes it to dict/config
|
||||
# returns determined folder
|
||||
def find_good_folder(datatype,configobject):
|
||||
def find_good_folder(datatype):
|
||||
info = directory_info[datatype]
|
||||
|
||||
possible_folders = info['possible_folders']
|
||||
if os.environ.get("MALOJA_CONTAINER"):
|
||||
possible_folders = info['possible_folders_container'] + possible_folders
|
||||
|
||||
# check each possible folder if its used
|
||||
for p in info['possible_folders']:
|
||||
for p in possible_folders:
|
||||
if os.path.exists(pthj(p,info['sentinel'])):
|
||||
#print(p,"has been determined as maloja's folder for",datatype)
|
||||
configobject[info['setting']] = p
|
||||
return p
|
||||
if is_dir_usable(p):
|
||||
#print(p,"was apparently used as maloja's folder for",datatype,"- fixing in settings")
|
||||
return p
|
||||
else:
|
||||
raise PermissionError(f"Can no longer use previously used {datatype} folder {p}")
|
||||
|
||||
#print("Could not find previous",datatype,"folder")
|
||||
# check which one we can use
|
||||
for p in info['possible_folders']:
|
||||
for p in possible_folders:
|
||||
if is_dir_usable(p):
|
||||
#print(p,"has been selected as maloja's folder for",datatype)
|
||||
configobject[info['setting']] = p
|
||||
return p
|
||||
#print("No folder can be used for",datatype)
|
||||
#print("This should not happen!")
|
||||
raise PermissionError(f"No folder could be found for {datatype}")
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
### STEP 1 - find out where the settings file is
|
||||
# environment variables
|
||||
|
||||
maloja_dir_config = os.environ.get("MALOJA_DATA_DIRECTORY") or os.environ.get("MALOJA_DIRECTORY_CONFIG")
|
||||
|
||||
|
||||
if maloja_dir_config is None:
|
||||
maloja_dir_config = find_good_folder('config',{})
|
||||
found_new_config_dir = True
|
||||
# if nothing is set, we set our own
|
||||
maloja_dir_config = find_good_folder('config')
|
||||
else:
|
||||
found_new_config_dir = False
|
||||
# remember whether we had to find our config dir or it was user-specified
|
||||
pass
|
||||
# if there is an environment variable, this is 100% explicitly defined by the user, so we respect it
|
||||
# the user might run more than one instances on the same machine, so we don't do any heuristics here
|
||||
# if you define this, we believe it!
|
||||
|
||||
os.makedirs(maloja_dir_config,exist_ok=True)
|
||||
|
||||
oldsettingsfile = pthj(maloja_dir_config,"settings","settings.ini")
|
||||
newsettingsfile = pthj(maloja_dir_config,"settings.ini")
|
||||
|
||||
|
||||
|
||||
if os.path.exists(oldsettingsfile):
|
||||
os.rename(oldsettingsfile,newsettingsfile)
|
||||
settingsfile = pthj(maloja_dir_config,"settings.ini")
|
||||
|
||||
|
||||
### STEP 2 - create settings object
|
||||
@ -131,10 +147,10 @@ malojaconfig = Configuration(
|
||||
settings={
|
||||
"Setup":{
|
||||
"data_directory":(tp.String(), "Data Directory", None, "Folder for all user data. Overwrites all choices for specific directories."),
|
||||
"directory_config":(tp.String(), "Config Directory", "/etc/maloja", "Folder for config data. Only applied when global data directory is not set."),
|
||||
"directory_state":(tp.String(), "State Directory", "/var/lib/maloja", "Folder for state data. Only applied when global data directory is not set."),
|
||||
"directory_logs":(tp.String(), "Log Directory", "/var/log/maloja", "Folder for log data. Only applied when global data directory is not set."),
|
||||
"directory_cache":(tp.String(), "Cache Directory", "/var/cache/maloja", "Folder for cache data. Only applied when global data directory is not set."),
|
||||
"directory_config":(tp.String(), "Config Directory", None, "Folder for config data. Only applied when global data directory is not set."),
|
||||
"directory_state":(tp.String(), "State Directory", None, "Folder for state data. Only applied when global data directory is not set."),
|
||||
"directory_logs":(tp.String(), "Log Directory", None, "Folder for log data. Only applied when global data directory is not set."),
|
||||
"directory_cache":(tp.String(), "Cache Directory", None, "Folder for cache data. Only applied when global data directory is not set."),
|
||||
"skip_setup":(tp.Boolean(), "Skip Setup", False, "Make server setup process non-interactive. Vital for Docker."),
|
||||
"force_password":(tp.String(), "Force Password", None, "On startup, overwrite admin password with this one. This should usually only be done via environment variable in Docker."),
|
||||
"clean_output":(tp.Boolean(), "Avoid Mutable Console Output", False, "Use if console output will be redirected e.g. to a web interface.")
|
||||
@ -164,7 +180,7 @@ malojaconfig = Configuration(
|
||||
"name":(tp.String(), "Name", "Generic Maloja User")
|
||||
},
|
||||
"Third Party Services":{
|
||||
"metadata_providers":(tp.List(tp.String()), "Metadata Providers", ['lastfm','spotify','deezer','musicbrainz'], "Which metadata providers should be used in what order. Musicbrainz is rate-limited and should not be used first."),
|
||||
"metadata_providers":(tp.List(tp.String()), "Metadata Providers", ['lastfm','spotify','deezer','audiodb','musicbrainz'], "List of which metadata providers should be used in what order. Musicbrainz is rate-limited and should not be used first."),
|
||||
"scrobble_lastfm":(tp.Boolean(), "Proxy-Scrobble to Last.fm", False),
|
||||
"lastfm_api_key":(tp.String(), "Last.fm API Key", None),
|
||||
"lastfm_api_secret":(tp.String(), "Last.fm API Secret", None),
|
||||
@ -189,7 +205,10 @@ malojaconfig = Configuration(
|
||||
"delimiters_informal":(tp.Set(tp.String()), "Informal Delimiters", ["vs.","vs","&"], "Delimiters in informal artist strings with spaces expected around them"),
|
||||
"delimiters_formal":(tp.Set(tp.String()), "Formal Delimiters", [";","/","|","␝","␞","␟"], "Delimiters used to tag multiple artists when only one tag field is available"),
|
||||
"filters_remix":(tp.Set(tp.String()), "Remix Filters", ["Remix", "Remix Edit", "Short Mix", "Extended Mix", "Soundtrack Version"], "Filters used to recognize the remix artists in the title"),
|
||||
"parse_remix_artists":(tp.Boolean(), "Parse Remix Artists", False)
|
||||
"parse_remix_artists":(tp.Boolean(), "Parse Remix Artists", False),
|
||||
"week_offset":(tp.Integer(), "Week Begin Offset", 0, "Start of the week for the purpose of weekly statistics. 0 = Sunday, 6 = Saturday"),
|
||||
"timezone":(tp.Integer(), "UTC Offset", 0),
|
||||
"location_timezone":(tp.String(), "Location Timezone", None)
|
||||
},
|
||||
"Web Interface":{
|
||||
"default_range_startpage":(tp.Choice({'alltime':'All Time','year':'Year','month':"Month",'week':'Week'}), "Default Range for Startpage Stats", "year"),
|
||||
@ -199,27 +218,24 @@ malojaconfig = Configuration(
|
||||
"display_art_icons":(tp.Boolean(), "Display Album/Artist Icons", True),
|
||||
"default_album_artist":(tp.String(), "Default Albumartist", "Various Artists"),
|
||||
"use_album_artwork_for_tracks":(tp.Boolean(), "Use Album Artwork for tracks", True),
|
||||
"fancy_placeholder_art":(tp.Boolean(), "Use fancy placeholder artwork",True),
|
||||
"fancy_placeholder_art":(tp.Boolean(), "Use fancy placeholder artwork",False),
|
||||
"show_play_number_on_tiles":(tp.Boolean(), "Show amount of plays on tiles",False),
|
||||
"discourage_cpu_heavy_stats":(tp.Boolean(), "Discourage CPU-heavy stats", False, "Prevent visitors from mindlessly clicking on CPU-heavy options. Does not actually disable them for malicious actors!"),
|
||||
"use_local_images":(tp.Boolean(), "Use Local Images", True),
|
||||
#"local_image_rotate":(tp.Integer(), "Local Image Rotate", 3600),
|
||||
"timezone":(tp.Integer(), "UTC Offset", 0),
|
||||
"time_format":(tp.String(), "Time Format", "%d. %b %Y %I:%M %p"),
|
||||
"theme":(tp.String(), "Theme", "maloja")
|
||||
}
|
||||
},
|
||||
configfile=newsettingsfile,
|
||||
configfile=settingsfile,
|
||||
save_endpoint="/apis/mlj_1/settings",
|
||||
env_prefix="MALOJA_",
|
||||
extra_files=["/run/secrets/maloja.yml","/run/secrets/maloja.ini"]
|
||||
|
||||
)
|
||||
|
||||
if found_new_config_dir:
|
||||
try:
|
||||
malojaconfig["DIRECTORY_CONFIG"] = maloja_dir_config
|
||||
except PermissionError as e:
|
||||
pass
|
||||
if not malojaconfig.readonly:
|
||||
malojaconfig["DIRECTORY_CONFIG"] = maloja_dir_config
|
||||
# this really doesn't matter because when are we gonna load info about where
|
||||
# the settings file is stored from the settings file
|
||||
# but oh well
|
||||
@ -241,17 +257,17 @@ except PermissionError as e:
|
||||
pass
|
||||
|
||||
|
||||
### STEP 3 - check all possible folders for files (old installation)
|
||||
|
||||
### STEP 3 - now check the other directories
|
||||
|
||||
|
||||
if not malojaconfig.readonly:
|
||||
for datatype in ("state","cache","logs"):
|
||||
# obviously default values shouldn't trigger this
|
||||
# if user has nothing specified, we need to use this
|
||||
if malojaconfig.get_specified(directory_info[datatype]['setting']) is None and malojaconfig.get_specified('DATA_DIRECTORY') is None:
|
||||
find_good_folder(datatype,malojaconfig)
|
||||
|
||||
# if the setting is specified in the file or via a user environment variable, we accept it (we'll check later if it's usable)
|
||||
if malojaconfig[directory_info[datatype]['setting']] or malojaconfig['DATA_DIRECTORY']:
|
||||
pass
|
||||
# otherwise, find a good one
|
||||
else:
|
||||
malojaconfig[directory_info[datatype]['setting']] = find_good_folder(datatype)
|
||||
|
||||
|
||||
|
||||
@ -279,11 +295,11 @@ else:
|
||||
"logs":pthj(malojaconfig['DATA_DIRECTORY'],"logs"),
|
||||
}
|
||||
|
||||
|
||||
data_directories = {
|
||||
"auth":pthj(dir_settings['state'],"auth"),
|
||||
"backups":pthj(dir_settings['state'],"backups"),
|
||||
"images":pthj(dir_settings['state'],"images"),
|
||||
"import":pthj(dir_settings['state'],"import"),
|
||||
"scrobbles":pthj(dir_settings['state']),
|
||||
"rules":pthj(dir_settings['config'],"rules"),
|
||||
"clients":pthj(dir_settings['config']),
|
||||
@ -297,39 +313,51 @@ data_directories = {
|
||||
}
|
||||
|
||||
for identifier,path in data_directories.items():
|
||||
os.makedirs(path,exist_ok=True)
|
||||
if path is None:
|
||||
continue
|
||||
|
||||
if malojaconfig.readonly and (path == dir_settings['config'] or path.startswith(dir_settings['config']+'/')):
|
||||
continue
|
||||
|
||||
try:
|
||||
os.makedirs(path,exist_ok=True)
|
||||
if not is_dir_usable(path): raise PermissionError(f"Directory {path} is not usable!")
|
||||
except PermissionError:
|
||||
# special case: cache does not contain info that can't be refetched, so no need to require user intervention
|
||||
# just move to the next one
|
||||
if identifier in ['cache']:
|
||||
print("Cannot use",path,"for cache, finding new folder...")
|
||||
data_directories['cache'] = dir_settings['cache'] = malojaconfig['DIRECTORY_CACHE'] = find_good_folder('cache')
|
||||
else:
|
||||
print(f"Directory for {identifier} ({path}) is not writeable.")
|
||||
print("Please change permissions or settings!")
|
||||
print("Make sure Maloja has write and execute access to this directory.")
|
||||
raise
|
||||
|
||||
class DataDirs:
|
||||
def __init__(self, dirs):
|
||||
self.dirs = dirs
|
||||
|
||||
def __getitem__(self, key):
|
||||
return lambda *x, k=key: pthj(self.dirs[k], *x)
|
||||
|
||||
data_dir = DataDirs(data_directories)
|
||||
|
||||
### DOREAH OBJECTS
|
||||
|
||||
auth = doreah.auth.AuthManager(singleuser=True,cookieprefix='maloja',stylesheets=("/maloja.css",),dbfile=data_dir['auth']("auth.sqlite"))
|
||||
|
||||
#logger = doreah.logging.Logger(logfolder=data_dir['logs']() if malojaconfig["LOGGING"] else None)
|
||||
#log = logger.log
|
||||
|
||||
# this is not how its supposed to be done, but lets ease the transition
|
||||
doreah.logging.defaultlogger.logfolder = data_dir['logs']() if malojaconfig["LOGGING"] else None
|
||||
|
||||
|
||||
data_dir = {
|
||||
k:lambda *x,k=k: pthj(data_directories[k],*x) for k in data_directories
|
||||
}
|
||||
|
||||
|
||||
|
||||
### DOREAH CONFIGURATION
|
||||
|
||||
from doreah import config
|
||||
|
||||
config(
|
||||
auth={
|
||||
"multiuser":False,
|
||||
"cookieprefix":"maloja",
|
||||
"stylesheets":["/maloja.css"],
|
||||
"dbfile":data_dir['auth']("auth.ddb")
|
||||
},
|
||||
logging={
|
||||
"logfolder": data_dir['logs']() if malojaconfig["LOGGING"] else None
|
||||
},
|
||||
regular={
|
||||
"offset": malojaconfig["TIMEZONE"]
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
custom_css_files = [f for f in os.listdir(data_dir['css']()) if f.lower().endswith('.css')]
|
||||
try:
|
||||
custom_css_files = [f for f in os.listdir(data_dir['css']()) if f.lower().endswith('.css')]
|
||||
except FileNotFoundError:
|
||||
custom_css_files = []
|
||||
|
||||
from ..database.sqldb import set_maloja_info
|
||||
set_maloja_info({'last_run_version':VERSION})
|
||||
|
@ -28,40 +28,3 @@ try:
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
|
||||
|
||||
|
||||
# proper sunday-first weeks
|
||||
# damn iso heathens
|
||||
|
||||
from datetime import date, timedelta
|
||||
import datetime
|
||||
|
||||
class expandeddate(date):
|
||||
|
||||
def chrweekday(self):
|
||||
return self.isoweekday() + 1 % 7
|
||||
|
||||
def chrcalendar(self):
|
||||
tomorrow = self + timedelta(days=1)
|
||||
cal = tomorrow.isocalendar()
|
||||
return (cal[0],cal[1],cal[2])
|
||||
|
||||
@classmethod
|
||||
def fromchrcalendar(cls,y,w,d):
|
||||
try:
|
||||
return datetime.date.fromisocalendar(y,w,d) - timedelta(days=1) #sunday instead of monday
|
||||
except Exception:
|
||||
# pre python3.8 compatibility
|
||||
|
||||
firstdayofyear = datetime.date(y,1,1)
|
||||
wkday = firstdayofyear.isoweekday()
|
||||
if wkday <= 4: # day up to thursday -> this week belongs to the new year
|
||||
firstisodayofyear = firstdayofyear - timedelta(days=wkday) #this also shifts to sunday-first weeks
|
||||
else: # if not, still old year
|
||||
firstisodayofyear = firstdayofyear + timedelta(days=7-wkday) #same
|
||||
return firstisodayofyear + timedelta(days=(w-1)*7) + timedelta(days=d-1)
|
||||
|
||||
|
||||
|
||||
datetime.date = expandeddate
|
||||
|
@ -12,11 +12,12 @@ def export(targetfolder=None):
|
||||
targetfolder = os.getcwd()
|
||||
|
||||
timestr = time.strftime("%Y_%m_%d_%H_%M_%S")
|
||||
timestamp = int(time.time()) # ok this is technically a separate time get from above, but those ms are not gonna matter, and im too lazy to change it all to datetime
|
||||
filename = f"maloja_export_{timestr}.json"
|
||||
outputfile = os.path.join(targetfolder,filename)
|
||||
assert not os.path.exists(outputfile)
|
||||
|
||||
data = {'scrobbles':get_scrobbles()}
|
||||
data = {'maloja':{'export_time': timestamp },'scrobbles':get_scrobbles()}
|
||||
with open(outputfile,'w') as outfd:
|
||||
json.dump(data,outfd,indent=3)
|
||||
|
||||
|
@ -32,43 +32,62 @@ def import_scrobbles(inputf):
|
||||
}
|
||||
|
||||
filename = os.path.basename(inputf)
|
||||
importfunc = None
|
||||
|
||||
if re.match(r".*\.csv",filename):
|
||||
typeid,typedesc = "lastfm","Last.fm"
|
||||
if re.match(r"recenttracks-.*\.csv", filename):
|
||||
typeid, typedesc = "lastfm", "Last.fm (ghan CSV)"
|
||||
importfunc = parse_lastfm_ghan_csv
|
||||
|
||||
elif re.match(r".*\.csv", filename):
|
||||
typeid,typedesc = "lastfm", "Last.fm (benjaminbenben CSV)"
|
||||
importfunc = parse_lastfm
|
||||
|
||||
elif re.match(r"Streaming_History_Audio.+\.json",filename):
|
||||
typeid,typedesc = "spotify","Spotify"
|
||||
elif re.match(r"Streaming_History_Audio.+\.json", filename):
|
||||
typeid,typedesc = "spotify", "Spotify"
|
||||
importfunc = parse_spotify_lite
|
||||
|
||||
elif re.match(r"endsong_[0-9]+\.json",filename):
|
||||
typeid,typedesc = "spotify","Spotify"
|
||||
elif re.match(r"endsong_[0-9]+\.json", filename):
|
||||
typeid,typedesc = "spotify", "Spotify"
|
||||
importfunc = parse_spotify
|
||||
|
||||
elif re.match(r"StreamingHistory[0-9]+\.json",filename):
|
||||
typeid,typedesc = "spotify","Spotify"
|
||||
elif re.match(r"StreamingHistory[0-9]+\.json", filename):
|
||||
typeid,typedesc = "spotify", "Spotify"
|
||||
importfunc = parse_spotify_lite_legacy
|
||||
|
||||
elif re.match(r"maloja_export[_0-9]*\.json",filename):
|
||||
typeid,typedesc = "maloja","Maloja"
|
||||
elif re.match(r"maloja_export[_0-9]*\.json", filename):
|
||||
typeid,typedesc = "maloja", "Maloja"
|
||||
importfunc = parse_maloja
|
||||
|
||||
# username_lb-YYYY-MM-DD.json
|
||||
elif re.match(r".*_lb-[0-9-]+\.json",filename):
|
||||
typeid,typedesc = "listenbrainz","ListenBrainz"
|
||||
elif re.match(r".*_lb-[0-9-]+\.json", filename):
|
||||
typeid,typedesc = "listenbrainz", "ListenBrainz"
|
||||
importfunc = parse_listenbrainz
|
||||
|
||||
elif re.match(r"\.scrobbler\.log",filename):
|
||||
typeid,typedesc = "rockbox","Rockbox"
|
||||
elif re.match(r"\.scrobbler\.log", filename):
|
||||
typeid,typedesc = "rockbox", "Rockbox"
|
||||
importfunc = parse_rockbox
|
||||
|
||||
else:
|
||||
elif re.match(r"recenttracks-.*\.json", filename):
|
||||
typeid, typedesc = "lastfm", "Last.fm (ghan JSON)"
|
||||
importfunc = parse_lastfm_ghan_json
|
||||
|
||||
elif re.match(r".*\.json",filename):
|
||||
try:
|
||||
with open(filename,'r') as fd:
|
||||
data = json.load(fd)
|
||||
if 'maloja' in data:
|
||||
typeid,typedesc = "maloja","Maloja"
|
||||
importfunc = parse_maloja
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
if not importfunc:
|
||||
print("File",inputf,"could not be identified as a valid import source.")
|
||||
return result
|
||||
|
||||
|
||||
print(f"Parsing {col['yellow'](inputf)} as {col['cyan'](typedesc)} export")
|
||||
print("This could take a while...")
|
||||
print(f"Parsing {col['yellow'](inputf)} as {col['cyan'](typedesc)} export.")
|
||||
print(col['red']("Please double-check if this is correct - if the import fails, the file might have been interpreted as the wrong type."))
|
||||
|
||||
timestamps = set()
|
||||
scrobblebuffer = []
|
||||
@ -131,24 +150,29 @@ def import_scrobbles(inputf):
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def parse_spotify_lite_legacy(inputf):
|
||||
pth = os.path
|
||||
inputfolder = pth.relpath(pth.dirname(pth.abspath(inputf)))
|
||||
# use absolute paths internally for peace of mind. just change representation for console output
|
||||
inputf = pth.abspath(inputf)
|
||||
inputfolder = pth.dirname(inputf)
|
||||
filenames = re.compile(r'StreamingHistory[0-9]+\.json')
|
||||
inputfiles = [os.path.join(inputfolder,f) for f in os.listdir(inputfolder) if filenames.match(f)]
|
||||
#inputfiles = [os.path.join(inputfolder,f) for f in os.listdir(inputfolder) if filenames.match(f)]
|
||||
inputfiles = [inputf]
|
||||
|
||||
if len(inputfiles) == 0:
|
||||
print("No files found!")
|
||||
return
|
||||
#if len(inputfiles) == 0:
|
||||
# print("No files found!")
|
||||
# return
|
||||
|
||||
if inputfiles != [inputf]:
|
||||
print("Spotify files should all be imported together to identify duplicates across the whole dataset.")
|
||||
if not ask("Import " + ", ".join(col['yellow'](i) for i in inputfiles) + "?",default=True):
|
||||
inputfiles = [inputf]
|
||||
#if inputfiles != [inputf]:
|
||||
# print("Spotify files should all be imported together to identify duplicates across the whole dataset.")
|
||||
# if not ask("Import " + ", ".join(col['yellow'](pth.basename(i)) for i in inputfiles) + "?",default=True):
|
||||
# inputfiles = [inputf]
|
||||
# print("Only importing", col['yellow'](pth.basename(inputf)))
|
||||
|
||||
for inputf in inputfiles:
|
||||
|
||||
print("Importing",col['yellow'](inputf),"...")
|
||||
#print("Importing",col['yellow'](inputf),"...")
|
||||
with open(inputf,'r') as inputfd:
|
||||
data = json.load(inputfd)
|
||||
|
||||
@ -183,22 +207,26 @@ def parse_spotify_lite_legacy(inputf):
|
||||
|
||||
def parse_spotify_lite(inputf):
|
||||
pth = os.path
|
||||
inputfolder = pth.relpath(pth.dirname(pth.abspath(inputf)))
|
||||
# use absolute paths internally for peace of mind. just change representation for console output
|
||||
inputf = pth.abspath(inputf)
|
||||
inputfolder = pth.dirname(inputf)
|
||||
filenames = re.compile(r'Streaming_History_Audio.+\.json')
|
||||
inputfiles = [os.path.join(inputfolder,f) for f in os.listdir(inputfolder) if filenames.match(f)]
|
||||
#inputfiles = [os.path.join(inputfolder,f) for f in os.listdir(inputfolder) if filenames.match(f)]
|
||||
inputfiles = [inputf]
|
||||
|
||||
if len(inputfiles) == 0:
|
||||
print("No files found!")
|
||||
return
|
||||
#if len(inputfiles) == 0:
|
||||
# print("No files found!")
|
||||
# return
|
||||
|
||||
if inputfiles != [inputf]:
|
||||
print("Spotify files should all be imported together to identify duplicates across the whole dataset.")
|
||||
if not ask("Import " + ", ".join(col['yellow'](i) for i in inputfiles) + "?",default=True):
|
||||
inputfiles = [inputf]
|
||||
#if inputfiles != [inputf]:
|
||||
# print("Spotify files should all be imported together to identify duplicates across the whole dataset.")
|
||||
# if not ask("Import " + ", ".join(col['yellow'](pth.basename(i)) for i in inputfiles) + "?",default=True):
|
||||
# inputfiles = [inputf]
|
||||
# print("Only importing", col['yellow'](pth.basename(inputf)))
|
||||
|
||||
for inputf in inputfiles:
|
||||
|
||||
print("Importing",col['yellow'](inputf),"...")
|
||||
#print("Importing",col['yellow'](inputf),"...")
|
||||
with open(inputf,'r') as inputfd:
|
||||
data = json.load(inputfd)
|
||||
|
||||
@ -237,20 +265,25 @@ def parse_spotify_lite(inputf):
|
||||
|
||||
print()
|
||||
|
||||
|
||||
def parse_spotify(inputf):
|
||||
pth = os.path
|
||||
inputfolder = pth.relpath(pth.dirname(pth.abspath(inputf)))
|
||||
# use absolute paths internally for peace of mind. just change representation for console output
|
||||
inputf = pth.abspath(inputf)
|
||||
inputfolder = pth.dirname(inputf)
|
||||
filenames = re.compile(r'endsong_[0-9]+\.json')
|
||||
inputfiles = [os.path.join(inputfolder,f) for f in os.listdir(inputfolder) if filenames.match(f)]
|
||||
#inputfiles = [os.path.join(inputfolder,f) for f in os.listdir(inputfolder) if filenames.match(f)]
|
||||
inputfiles = [inputf]
|
||||
|
||||
if len(inputfiles) == 0:
|
||||
print("No files found!")
|
||||
return
|
||||
#if len(inputfiles) == 0:
|
||||
# print("No files found!")
|
||||
# return
|
||||
|
||||
if inputfiles != [inputf]:
|
||||
print("Spotify files should all be imported together to identify duplicates across the whole dataset.")
|
||||
if not ask("Import " + ", ".join(col['yellow'](i) for i in inputfiles) + "?",default=True):
|
||||
inputfiles = [inputf]
|
||||
#if inputfiles != [inputf]:
|
||||
# print("Spotify files should all be imported together to identify duplicates across the whole dataset.")
|
||||
# if not ask("Import " + ", ".join(col['yellow'](pth.basename(i)) for i in inputfiles) + "?",default=True):
|
||||
# inputfiles = [inputf]
|
||||
# print("Only importing", col['yellow'](pth.basename(inputf)))
|
||||
|
||||
# we keep timestamps here as well to remove duplicates because spotify's export
|
||||
# is messy - this is specific to this import type and should not be mixed with
|
||||
@ -261,7 +294,7 @@ def parse_spotify(inputf):
|
||||
|
||||
for inputf in inputfiles:
|
||||
|
||||
print("Importing",col['yellow'](inputf),"...")
|
||||
#print("Importing",col['yellow'](inputf),"...")
|
||||
with open(inputf,'r') as inputfd:
|
||||
data = json.load(inputfd)
|
||||
|
||||
@ -345,6 +378,7 @@ def parse_spotify(inputf):
|
||||
|
||||
print()
|
||||
|
||||
|
||||
def parse_lastfm(inputf):
|
||||
|
||||
with open(inputf,'r',newline='') as inputfd:
|
||||
@ -379,6 +413,44 @@ def parse_lastfm(inputf):
|
||||
yield ('FAIL',None,f"{row} (Line {line}) could not be parsed. Scrobble not imported. ({repr(e)})")
|
||||
continue
|
||||
|
||||
|
||||
def parse_lastfm_ghan_json(inputf):
|
||||
with open(inputf, 'r') as inputfd:
|
||||
data = json.load(inputfd)
|
||||
|
||||
skip = 50000
|
||||
for entry in data:
|
||||
for track in entry['track']:
|
||||
skip -= 1
|
||||
#if skip: continue
|
||||
#print(track)
|
||||
#input()
|
||||
|
||||
yield ('CONFIDENT_IMPORT', {
|
||||
'track_title': track['name'],
|
||||
'track_artists': track['artist']['#text'],
|
||||
'track_length': None,
|
||||
'album_name': track['album']['#text'],
|
||||
'scrobble_time': int(track['date']['uts']),
|
||||
'scrobble_duration': None
|
||||
}, '')
|
||||
|
||||
|
||||
def parse_lastfm_ghan_csv(inputf):
|
||||
with open(inputf, 'r') as inputfd:
|
||||
reader = csv.DictReader(inputfd)
|
||||
|
||||
for row in reader:
|
||||
yield ('CONFIDENT_IMPORT', {
|
||||
'track_title': row['track'],
|
||||
'track_artists': row['artist'],
|
||||
'track_length': None,
|
||||
'album_name': row['album'],
|
||||
'scrobble_time': int(row['uts']),
|
||||
'scrobble_duration': None
|
||||
}, '')
|
||||
|
||||
|
||||
def parse_listenbrainz(inputf):
|
||||
|
||||
with open(inputf,'r') as inputfd:
|
||||
|
@ -3,6 +3,7 @@ import os
|
||||
from threading import Thread
|
||||
from importlib import resources
|
||||
import time
|
||||
from magic import from_file
|
||||
|
||||
|
||||
# server stuff
|
||||
@ -12,14 +13,13 @@ from jinja2.exceptions import TemplateNotFound
|
||||
|
||||
# doreah toolkit
|
||||
from doreah.logging import log
|
||||
from doreah import auth
|
||||
|
||||
# rest of the project
|
||||
from . import database
|
||||
from .database.jinjaview import JinjaDBConnection
|
||||
from .images import image_request
|
||||
from .malojauri import uri_to_internal, remove_identical
|
||||
from .pkg_global.conf import malojaconfig, data_dir
|
||||
from .pkg_global.conf import malojaconfig, data_dir, auth
|
||||
from .pkg_global import conf
|
||||
from .jinjaenv.context import jinja_environment
|
||||
from .apis import init_apis, apikeystore
|
||||
@ -97,7 +97,7 @@ aliases = {
|
||||
|
||||
### API
|
||||
|
||||
auth.authapi.mount(server=webserver)
|
||||
conf.auth.authapi.mount(server=webserver)
|
||||
init_apis(webserver)
|
||||
|
||||
# redirects for backwards compatibility
|
||||
@ -155,7 +155,8 @@ def static_image(pth):
|
||||
|
||||
@webserver.route("/cacheimages/<uuid>")
|
||||
def static_proxied_image(uuid):
|
||||
return static_file(uuid,root=data_dir['cache']('images'))
|
||||
mimetype = from_file(os.path.join(data_dir['cache']('images'),uuid),True)
|
||||
return static_file(uuid,root=data_dir['cache']('images'),mimetype=mimetype)
|
||||
|
||||
@webserver.route("/login")
|
||||
def login():
|
||||
@ -166,16 +167,16 @@ def login():
|
||||
@webserver.route("/media/<name>.<ext>")
|
||||
def static(name,ext):
|
||||
assert ext in ["txt","ico","jpeg","jpg","png","less","js","ttf","css"]
|
||||
with resources.files('maloja') / 'web' / 'static' as staticfolder:
|
||||
response = static_file(ext + "/" + name + "." + ext,root=staticfolder)
|
||||
staticfolder = resources.files('maloja') / 'web' / 'static'
|
||||
response = static_file(ext + "/" + name + "." + ext,root=staticfolder)
|
||||
response.set_header("Cache-Control", "public, max-age=3600")
|
||||
return response
|
||||
|
||||
# new, direct reference
|
||||
@webserver.route("/static/<path:path>")
|
||||
def static(path):
|
||||
with resources.files('maloja') / 'web' / 'static' as staticfolder:
|
||||
response = static_file(path,root=staticfolder)
|
||||
staticfolder = resources.files('maloja') / 'web' / 'static'
|
||||
response = static_file(path,root=staticfolder)
|
||||
response.set_header("Cache-Control", "public, max-age=3600")
|
||||
return response
|
||||
|
||||
@ -197,7 +198,7 @@ def jinja_page(name):
|
||||
if name in aliases: redirect(aliases[name])
|
||||
keys = remove_identical(FormsDict.decode(request.query))
|
||||
|
||||
adminmode = request.cookies.get("adminmode") == "true" and auth.check(request)
|
||||
adminmode = request.cookies.get("adminmode") == "true" and auth.check_request(request)
|
||||
|
||||
with JinjaDBConnection() as conn:
|
||||
|
||||
@ -222,7 +223,7 @@ def jinja_page(name):
|
||||
return res
|
||||
|
||||
@webserver.route("/<name:re:admin.*>")
|
||||
@auth.authenticated
|
||||
@auth.authenticated_function()
|
||||
def jinja_page_private(name):
|
||||
return jinja_page(name)
|
||||
|
||||
|
129
maloja/setup.py
129
maloja/setup.py
@ -1,14 +1,13 @@
|
||||
import os
|
||||
import shutil
|
||||
import stat
|
||||
|
||||
from importlib import resources
|
||||
try:
|
||||
from setuptools import distutils
|
||||
except ImportError:
|
||||
import distutils
|
||||
from doreah.io import col, ask, prompt
|
||||
from doreah import auth
|
||||
from pathlib import PosixPath
|
||||
|
||||
from .pkg_global.conf import data_dir, dir_settings, malojaconfig
|
||||
from doreah.io import col, ask, prompt
|
||||
|
||||
from .pkg_global.conf import data_dir, dir_settings, malojaconfig, auth
|
||||
|
||||
|
||||
|
||||
@ -23,60 +22,86 @@ ext_apikeys = [
|
||||
|
||||
|
||||
def copy_initial_local_files():
|
||||
with resources.files("maloja") / 'data_files' as folder:
|
||||
for cat in dir_settings:
|
||||
distutils.dir_util.copy_tree(os.path.join(folder,cat),dir_settings[cat],update=False)
|
||||
data_file_source = resources.files("maloja") / 'data_files'
|
||||
for cat in dir_settings:
|
||||
if dir_settings[cat] is None:
|
||||
continue
|
||||
if cat == 'config' and malojaconfig.readonly:
|
||||
continue
|
||||
|
||||
# to avoid permission problems with the root dir
|
||||
for subfolder in os.listdir(data_file_source / cat):
|
||||
src = data_file_source / cat / subfolder
|
||||
dst = PosixPath(dir_settings[cat]) / subfolder
|
||||
if os.path.isdir(src):
|
||||
shutil.copytree(src, dst, dirs_exist_ok=True)
|
||||
# fix permissions (u+w)
|
||||
for dirpath, _, filenames in os.walk(dst):
|
||||
os.chmod(dirpath, os.stat(dirpath).st_mode | stat.S_IWUSR)
|
||||
for filename in filenames:
|
||||
filepath = os.path.join(dirpath, filename)
|
||||
os.chmod(filepath, os.stat(filepath).st_mode | stat.S_IWUSR)
|
||||
|
||||
|
||||
charset = list(range(10)) + list("abcdefghijklmnopqrstuvwxyz") + list("ABCDEFGHIJKLMNOPQRSTUVWXYZ")
|
||||
def randomstring(length=32):
|
||||
import random
|
||||
return "".join(str(random.choice(charset)) for _ in range(length))
|
||||
|
||||
|
||||
def setup():
|
||||
|
||||
copy_initial_local_files()
|
||||
SKIP = malojaconfig["SKIP_SETUP"]
|
||||
|
||||
print("Various external services can be used to display images. If not enough of them are set up, only local images will be used.")
|
||||
for k in ext_apikeys:
|
||||
keyname = malojaconfig.get_setting_info(k)['name']
|
||||
key = malojaconfig[k]
|
||||
if key is False:
|
||||
print(f"\tCurrently not using a {col['red'](keyname)} for image display.")
|
||||
elif key is None or key == "ASK":
|
||||
promptmsg = f"\tPlease enter your {col['gold'](keyname)}. If you do not want to use one at this moment, simply leave this empty and press Enter."
|
||||
key = prompt(promptmsg,types=(str,),default=False,skip=SKIP)
|
||||
malojaconfig[k] = key
|
||||
else:
|
||||
print(f"\t{col['lawngreen'](keyname)} found.")
|
||||
|
||||
|
||||
# OWN API KEY
|
||||
from .apis import apikeystore
|
||||
if len(apikeystore) == 0:
|
||||
answer = ask("Do you want to set up a key to enable scrobbling? Your scrobble extension needs that key so that only you can scrobble tracks to your database.",default=True,skip=SKIP)
|
||||
if answer:
|
||||
key = apikeystore.generate_key('default')
|
||||
print("Your API Key: " + col["yellow"](key))
|
||||
|
||||
# PASSWORD
|
||||
forcepassword = malojaconfig["FORCE_PASSWORD"]
|
||||
# this is mainly meant for docker, supply password via environment variable
|
||||
|
||||
if forcepassword is not None:
|
||||
# user has specified to force the pw, nothing else matters
|
||||
auth.defaultuser.setpw(forcepassword)
|
||||
print("Password has been set.")
|
||||
elif auth.defaultuser.checkpw("admin"):
|
||||
# if the actual pw is admin, it means we've never set this up properly (eg first start after update)
|
||||
while True:
|
||||
newpw = prompt("Please set a password for web backend access. Leave this empty to generate a random password.",skip=SKIP,secret=True)
|
||||
if newpw is None:
|
||||
newpw = randomstring(32)
|
||||
print("Generated password:",col["yellow"](newpw))
|
||||
break
|
||||
try:
|
||||
print("Various external services can be used to display images. If not enough of them are set up, only local images will be used.")
|
||||
for k in ext_apikeys:
|
||||
keyname = malojaconfig.get_setting_info(k)['name']
|
||||
key = malojaconfig[k]
|
||||
if key is False:
|
||||
print(f"\tCurrently not using a {col['red'](keyname)} for image display.")
|
||||
elif key is None or key == "ASK":
|
||||
if malojaconfig.readonly:
|
||||
print(f"\tCurrently not using a {col['red'](keyname)} for image display - config is read only.")
|
||||
else:
|
||||
promptmsg = f"\tPlease enter your {col['gold'](keyname)}. If you do not want to use one at this moment, simply leave this empty and press Enter."
|
||||
key = prompt(promptmsg,types=(str,),default=False,skip=SKIP)
|
||||
malojaconfig[k] = key
|
||||
else:
|
||||
newpw_repeat = prompt("Please type again to confirm.",skip=SKIP,secret=True)
|
||||
if newpw != newpw_repeat: print("Passwords do not match!")
|
||||
else: break
|
||||
auth.defaultuser.setpw(newpw)
|
||||
print(f"\t{col['lawngreen'](keyname)} found.")
|
||||
|
||||
|
||||
# OWN API KEY
|
||||
from .apis import apikeystore
|
||||
if len(apikeystore) == 0:
|
||||
answer = ask("Do you want to set up a key to enable scrobbling? Your scrobble extension needs that key so that only you can scrobble tracks to your database.",default=True,skip=SKIP)
|
||||
if answer:
|
||||
key = apikeystore.generate_key('default')
|
||||
print("Your API Key: " + col["yellow"](key))
|
||||
|
||||
# PASSWORD
|
||||
forcepassword = malojaconfig["FORCE_PASSWORD"]
|
||||
# this is mainly meant for docker, supply password via environment variable
|
||||
|
||||
if forcepassword is not None:
|
||||
# user has specified to force the pw, nothing else matters
|
||||
auth.change_pw(password=forcepassword)
|
||||
print("Password has been set.")
|
||||
elif auth.still_has_factory_default_user():
|
||||
# this means we've never set this up properly (eg first start after update)
|
||||
while True:
|
||||
newpw = prompt("Please set a password for web backend access. Leave this empty to generate a random password.",skip=SKIP,secret=True)
|
||||
if newpw is None:
|
||||
newpw = randomstring(32)
|
||||
print("Generated password:",col["yellow"](newpw))
|
||||
break
|
||||
else:
|
||||
newpw_repeat = prompt("Please type again to confirm.",skip=SKIP,secret=True)
|
||||
if newpw != newpw_repeat: print("Passwords do not match!")
|
||||
else: break
|
||||
auth.change_pw(password=newpw)
|
||||
|
||||
except EOFError:
|
||||
print("No user input possible. If you are running inside a container, set the environment variable",col['yellow']("MALOJA_SKIP_SETUP=yes"))
|
||||
raise SystemExit
|
||||
|
69
maloja/thirdparty/__init__.py
vendored
69
maloja/thirdparty/__init__.py
vendored
@ -7,15 +7,16 @@
|
||||
# pls don't sue me
|
||||
|
||||
import xml.etree.ElementTree as ElementTree
|
||||
import json
|
||||
import urllib.parse, urllib.request
|
||||
import requests
|
||||
import urllib.parse
|
||||
import base64
|
||||
import time
|
||||
from doreah.logging import log
|
||||
from threading import BoundedSemaphore
|
||||
from threading import BoundedSemaphore, Thread
|
||||
|
||||
from ..pkg_global.conf import malojaconfig
|
||||
from .. import database
|
||||
from ..__pkginfo__ import USER_AGENT
|
||||
|
||||
|
||||
services = {
|
||||
@ -51,6 +52,7 @@ def proxy_scrobble_all(artists,title,timestamp):
|
||||
def get_image_track_all(track):
|
||||
with thirdpartylock:
|
||||
for service in services["metadata"]:
|
||||
if "track" not in service.metadata["enabled_entity_types"]: continue
|
||||
try:
|
||||
res = service.get_image_track(track)
|
||||
if res:
|
||||
@ -63,6 +65,7 @@ def get_image_track_all(track):
|
||||
def get_image_artist_all(artist):
|
||||
with thirdpartylock:
|
||||
for service in services["metadata"]:
|
||||
if "artist" not in service.metadata["enabled_entity_types"]: continue
|
||||
try:
|
||||
res = service.get_image_artist(artist)
|
||||
if res:
|
||||
@ -75,6 +78,7 @@ def get_image_artist_all(artist):
|
||||
def get_image_album_all(album):
|
||||
with thirdpartylock:
|
||||
for service in services["metadata"]:
|
||||
if "album" not in service.metadata["enabled_entity_types"]: continue
|
||||
try:
|
||||
res = service.get_image_album(album)
|
||||
if res:
|
||||
@ -100,12 +104,17 @@ class GenericInterface:
|
||||
scrobbleimport = {}
|
||||
metadata = {}
|
||||
|
||||
useragent = USER_AGENT
|
||||
|
||||
def __init__(self):
|
||||
# populate from settings file once on creation
|
||||
# avoid constant disk access, restart on adding services is acceptable
|
||||
for key in self.settings:
|
||||
self.settings[key] = malojaconfig[self.settings[key]]
|
||||
self.authorize()
|
||||
t = Thread(target=self.authorize)
|
||||
t.daemon = True
|
||||
t.start()
|
||||
#self.authorize()
|
||||
|
||||
# this makes sure that of every class we define, we immediately create an
|
||||
# instance (de facto singleton). then each instance checks if the requirements
|
||||
@ -127,16 +136,6 @@ class GenericInterface:
|
||||
return True
|
||||
# per default. no authorization is necessary
|
||||
|
||||
# wrapper method
|
||||
def request(self,url,data,responsetype):
|
||||
response = urllib.request.urlopen(
|
||||
url,
|
||||
data=utf(data)
|
||||
)
|
||||
responsedata = response.read()
|
||||
if responsetype == "xml":
|
||||
data = ElementTree.fromstring(responsedata)
|
||||
return data
|
||||
|
||||
# proxy scrobbler
|
||||
class ProxyScrobbleInterface(GenericInterface,abstract=True):
|
||||
@ -155,11 +154,15 @@ class ProxyScrobbleInterface(GenericInterface,abstract=True):
|
||||
)
|
||||
|
||||
def scrobble(self,artists,title,timestamp):
|
||||
response = urllib.request.urlopen(
|
||||
self.proxyscrobble["scrobbleurl"],
|
||||
data=utf(self.proxyscrobble_postdata(artists,title,timestamp)))
|
||||
responsedata = response.read()
|
||||
response = requests.post(
|
||||
url=self.proxyscrobble["scrobbleurl"],
|
||||
data=self.proxyscrobble_postdata(artists,title,timestamp),
|
||||
headers={
|
||||
"User-Agent":self.useragent
|
||||
}
|
||||
)
|
||||
if self.proxyscrobble["response_type"] == "xml":
|
||||
responsedata = response.text
|
||||
data = ElementTree.fromstring(responsedata)
|
||||
return self.proxyscrobble_parse_response(data)
|
||||
|
||||
@ -211,13 +214,15 @@ class MetadataInterface(GenericInterface,abstract=True):
|
||||
artists, title = track
|
||||
artiststring = urllib.parse.quote(", ".join(artists))
|
||||
titlestring = urllib.parse.quote(title)
|
||||
response = urllib.request.urlopen(
|
||||
self.metadata["trackurl"].format(artist=artiststring,title=titlestring,**self.settings)
|
||||
response = requests.get(
|
||||
self.metadata["trackurl"].format(artist=artiststring,title=titlestring,**self.settings),
|
||||
headers={
|
||||
"User-Agent":self.useragent
|
||||
}
|
||||
)
|
||||
|
||||
responsedata = response.read()
|
||||
if self.metadata["response_type"] == "json":
|
||||
data = json.loads(responsedata)
|
||||
data = response.json()
|
||||
imgurl = self.metadata_parse_response_track(data)
|
||||
else:
|
||||
imgurl = None
|
||||
@ -227,13 +232,15 @@ class MetadataInterface(GenericInterface,abstract=True):
|
||||
|
||||
def get_image_artist(self,artist):
|
||||
artiststring = urllib.parse.quote(artist)
|
||||
response = urllib.request.urlopen(
|
||||
self.metadata["artisturl"].format(artist=artiststring,**self.settings)
|
||||
response = requests.get(
|
||||
self.metadata["artisturl"].format(artist=artiststring,**self.settings),
|
||||
headers={
|
||||
"User-Agent":self.useragent
|
||||
}
|
||||
)
|
||||
|
||||
responsedata = response.read()
|
||||
if self.metadata["response_type"] == "json":
|
||||
data = json.loads(responsedata)
|
||||
data = response.json()
|
||||
imgurl = self.metadata_parse_response_artist(data)
|
||||
else:
|
||||
imgurl = None
|
||||
@ -245,13 +252,15 @@ class MetadataInterface(GenericInterface,abstract=True):
|
||||
artists, title = album
|
||||
artiststring = urllib.parse.quote(", ".join(artists or []))
|
||||
titlestring = urllib.parse.quote(title)
|
||||
response = urllib.request.urlopen(
|
||||
self.metadata["albumurl"].format(artist=artiststring,title=titlestring,**self.settings)
|
||||
response = requests.get(
|
||||
self.metadata["albumurl"].format(artist=artiststring,title=titlestring,**self.settings),
|
||||
headers={
|
||||
"User-Agent":self.useragent
|
||||
}
|
||||
)
|
||||
|
||||
responsedata = response.read()
|
||||
if self.metadata["response_type"] == "json":
|
||||
data = json.loads(responsedata)
|
||||
data = response.json()
|
||||
imgurl = self.metadata_parse_response_album(data)
|
||||
else:
|
||||
imgurl = None
|
||||
|
1
maloja/thirdparty/audiodb.py
vendored
1
maloja/thirdparty/audiodb.py
vendored
@ -16,6 +16,7 @@ class AudioDB(MetadataInterface):
|
||||
#"response_parse_tree_track": ["tracks",0,"astrArtistThumb"],
|
||||
"response_parse_tree_artist": ["artists",0,"strArtistThumb"],
|
||||
"required_settings": ["api_key"],
|
||||
"enabled_entity_types": ["artist"]
|
||||
}
|
||||
|
||||
def get_image_track(self,track):
|
||||
|
1
maloja/thirdparty/deezer.py
vendored
1
maloja/thirdparty/deezer.py
vendored
@ -17,6 +17,7 @@ class Deezer(MetadataInterface):
|
||||
"response_parse_tree_artist": ["data",0,"artist","picture_medium"],
|
||||
"response_parse_tree_album": ["data",0,"album","cover_medium"],
|
||||
"required_settings": [],
|
||||
"enabled_entity_types": ["artist","album"]
|
||||
}
|
||||
|
||||
delay = 1
|
||||
|
49
maloja/thirdparty/lastfm.py
vendored
49
maloja/thirdparty/lastfm.py
vendored
@ -1,6 +1,7 @@
|
||||
from . import MetadataInterface, ProxyScrobbleInterface, utf
|
||||
import hashlib
|
||||
import urllib.parse, urllib.request
|
||||
import requests
|
||||
import xml.etree.ElementTree as ElementTree
|
||||
from doreah.logging import log
|
||||
|
||||
class LastFM(MetadataInterface, ProxyScrobbleInterface):
|
||||
@ -31,6 +32,7 @@ class LastFM(MetadataInterface, ProxyScrobbleInterface):
|
||||
#"response_parse_tree_artist": ["artist","image",-1,"#text"],
|
||||
"response_parse_tree_album": ["album","image",-1,"#text"],
|
||||
"required_settings": ["apikey"],
|
||||
"enabled_entity_types": ["track","album"]
|
||||
}
|
||||
|
||||
def get_image_artist(self,artist):
|
||||
@ -53,28 +55,39 @@ class LastFM(MetadataInterface, ProxyScrobbleInterface):
|
||||
})
|
||||
|
||||
def authorize(self):
|
||||
try:
|
||||
result = self.request(
|
||||
self.proxyscrobble['scrobbleurl'],
|
||||
self.query_compose({
|
||||
"method":"auth.getMobileSession",
|
||||
"username":self.settings["username"],
|
||||
"password":self.settings["password"],
|
||||
"api_key":self.settings["apikey"]
|
||||
}),
|
||||
responsetype="xml"
|
||||
)
|
||||
self.settings["sk"] = result.find("session").findtext("key")
|
||||
except Exception as e:
|
||||
pass
|
||||
#log("Error while authenticating with LastFM: " + repr(e))
|
||||
if all(self.settings[key] not in [None,"ASK",False] for key in ["username","password","apikey","secret"]):
|
||||
try:
|
||||
response = requests.post(
|
||||
url=self.proxyscrobble['scrobbleurl'],
|
||||
params=self.query_compose({
|
||||
"method":"auth.getMobileSession",
|
||||
"username":self.settings["username"],
|
||||
"password":self.settings["password"],
|
||||
"api_key":self.settings["apikey"]
|
||||
}),
|
||||
headers={
|
||||
"User-Agent":self.useragent
|
||||
}
|
||||
)
|
||||
|
||||
data = ElementTree.fromstring(response.text)
|
||||
self.settings["sk"] = data.find("session").findtext("key")
|
||||
except Exception as e:
|
||||
log("Error while authenticating with LastFM: " + repr(e))
|
||||
|
||||
|
||||
# creates signature and returns full query string
|
||||
# creates signature and returns full query
|
||||
def query_compose(self,parameters):
|
||||
m = hashlib.md5()
|
||||
keys = sorted(str(k) for k in parameters)
|
||||
m.update(utf("".join(str(k) + str(parameters[k]) for k in keys)))
|
||||
m.update(utf(self.settings["secret"]))
|
||||
sig = m.hexdigest()
|
||||
return urllib.parse.urlencode(parameters) + "&api_sig=" + sig
|
||||
return {**parameters,"api_sig":sig}
|
||||
|
||||
def handle_json_result_error(self,result):
|
||||
if "track" in result and not result.get("track").get('album',{}):
|
||||
return True
|
||||
|
||||
if "error" in result and result.get("error") == 6:
|
||||
return True
|
||||
|
6
maloja/thirdparty/maloja.py
vendored
6
maloja/thirdparty/maloja.py
vendored
@ -1,5 +1,5 @@
|
||||
from . import ProxyScrobbleInterface, ImportInterface
|
||||
import urllib.request
|
||||
import requests
|
||||
from doreah.logging import log
|
||||
import json
|
||||
|
||||
@ -32,8 +32,8 @@ class OtherMalojaInstance(ProxyScrobbleInterface, ImportInterface):
|
||||
def get_remote_scrobbles(self):
|
||||
url = f"{self.settings['instance']}/apis/mlj_1/scrobbles"
|
||||
|
||||
response = urllib.request.urlopen(url)
|
||||
data = json.loads(response.read().decode('utf-8'))
|
||||
response = requests.get(url)
|
||||
data = response.json()
|
||||
|
||||
for scrobble in data['list']:
|
||||
yield scrobble
|
||||
|
128
maloja/thirdparty/musicbrainz.py
vendored
128
maloja/thirdparty/musicbrainz.py
vendored
@ -1,9 +1,7 @@
|
||||
from . import MetadataInterface
|
||||
import urllib.parse, urllib.request
|
||||
import json
|
||||
import requests
|
||||
import time
|
||||
import threading
|
||||
from ..__pkginfo__ import USER_AGENT
|
||||
|
||||
class MusicBrainz(MetadataInterface):
|
||||
name = "MusicBrainz"
|
||||
@ -11,15 +9,17 @@ class MusicBrainz(MetadataInterface):
|
||||
|
||||
# musicbrainz is rate-limited
|
||||
lock = threading.Lock()
|
||||
useragent = USER_AGENT
|
||||
|
||||
|
||||
thumbnailsize_order = ['500','large','1200','250','small']
|
||||
|
||||
settings = {
|
||||
}
|
||||
|
||||
metadata = {
|
||||
"response_type":"json",
|
||||
"response_parse_tree_track": ["images",0,"thumbnails","500"],
|
||||
"required_settings": [],
|
||||
"enabled_entity_types": ["album","track"]
|
||||
}
|
||||
|
||||
def get_image_artist(self,artist):
|
||||
@ -27,37 +27,105 @@ class MusicBrainz(MetadataInterface):
|
||||
# not supported
|
||||
|
||||
def get_image_album(self,album):
|
||||
return None
|
||||
|
||||
def get_image_track(self,track):
|
||||
self.lock.acquire()
|
||||
try:
|
||||
artists, title = track
|
||||
artiststring = ", ".join(artists) #Join artists collection into string
|
||||
titlestring = title
|
||||
querystr = urllib.parse.urlencode({
|
||||
"fmt":"json",
|
||||
"query":"{title} {artist}".format(artist=artiststring,title=titlestring)
|
||||
})
|
||||
req = urllib.request.Request(**{
|
||||
"url":"https://musicbrainz.org/ws/2/release?" + querystr,
|
||||
"method":"GET",
|
||||
artists, title = album
|
||||
searchstr = f'release:"{title}"'
|
||||
for artist in artists:
|
||||
searchstr += f' artist:"{artist}"'
|
||||
res = requests.get(**{
|
||||
"url":"https://musicbrainz.org/ws/2/release",
|
||||
"params":{
|
||||
"fmt":"json",
|
||||
"query":searchstr
|
||||
},
|
||||
"headers":{
|
||||
"User-Agent":self.useragent
|
||||
}
|
||||
})
|
||||
response = urllib.request.urlopen(req)
|
||||
responsedata = response.read()
|
||||
data = json.loads(responsedata)
|
||||
mbid = data["releases"][0]["id"]
|
||||
response = urllib.request.urlopen(
|
||||
"https://coverartarchive.org/release/{mbid}?fmt=json".format(mbid=mbid)
|
||||
)
|
||||
responsedata = response.read()
|
||||
data = json.loads(responsedata)
|
||||
imgurl = self.metadata_parse_response_track(data)
|
||||
if imgurl is not None: imgurl = self.postprocess_url(imgurl)
|
||||
return imgurl
|
||||
data = res.json()
|
||||
entity = data["releases"][0]
|
||||
coverartendpoint = "release"
|
||||
while True:
|
||||
mbid = entity["id"]
|
||||
try:
|
||||
response = requests.get(
|
||||
f"https://coverartarchive.org/{coverartendpoint}/{mbid}",
|
||||
params={
|
||||
"fmt":"json"
|
||||
},
|
||||
headers={
|
||||
"User-Agent":self.useragent
|
||||
}
|
||||
)
|
||||
data = response.json()
|
||||
thumbnails = data['images'][0]['thumbnails']
|
||||
for size in self.thumbnailsize_order:
|
||||
if thumbnails.get(size) is not None:
|
||||
imgurl = thumbnails.get(size)
|
||||
continue
|
||||
except:
|
||||
imgurl = None
|
||||
if imgurl is None:
|
||||
entity = entity["release-group"]
|
||||
# this will raise an error so we don't stay in the while loop forever
|
||||
coverartendpoint = "release-group"
|
||||
continue
|
||||
|
||||
imgurl = self.postprocess_url(imgurl)
|
||||
return imgurl
|
||||
|
||||
except Exception:
|
||||
return None
|
||||
finally:
|
||||
time.sleep(2)
|
||||
self.lock.release()
|
||||
|
||||
def get_image_track(self,track):
|
||||
self.lock.acquire()
|
||||
try:
|
||||
artists, title = track
|
||||
searchstr = f'recording:"{title}"'
|
||||
for artist in artists:
|
||||
searchstr += f' artist:"{artist}"'
|
||||
res = requests.get(**{
|
||||
"url":"https://musicbrainz.org/ws/2/recording",
|
||||
"params":{
|
||||
"fmt":"json",
|
||||
"query":searchstr
|
||||
},
|
||||
"headers":{
|
||||
"User-Agent":self.useragent
|
||||
}
|
||||
})
|
||||
data = res.json()
|
||||
entity = data["recordings"][0]["releases"][0]
|
||||
coverartendpoint = "release"
|
||||
while True:
|
||||
mbid = entity["id"]
|
||||
try:
|
||||
response = requests.get(
|
||||
f"https://coverartarchive.org/{coverartendpoint}/{mbid}",
|
||||
params={
|
||||
"fmt":"json"
|
||||
}
|
||||
)
|
||||
data = response.json()
|
||||
thumbnails = data['images'][0]['thumbnails']
|
||||
for size in self.thumbnailsize_order:
|
||||
if thumbnails.get(size) is not None:
|
||||
imgurl = thumbnails.get(size)
|
||||
continue
|
||||
except:
|
||||
imgurl = None
|
||||
if imgurl is None:
|
||||
entity = entity["release-group"]
|
||||
# this will raise an error so we don't stay in the while loop forever
|
||||
coverartendpoint = "release-group"
|
||||
continue
|
||||
|
||||
imgurl = self.postprocess_url(imgurl)
|
||||
return imgurl
|
||||
|
||||
except Exception:
|
||||
return None
|
||||
|
24
maloja/thirdparty/spotify.py
vendored
24
maloja/thirdparty/spotify.py
vendored
@ -1,6 +1,5 @@
|
||||
from . import MetadataInterface, utf, b64
|
||||
import urllib.parse, urllib.request
|
||||
import json
|
||||
import requests
|
||||
from threading import Timer
|
||||
from doreah.logging import log
|
||||
|
||||
@ -22,6 +21,7 @@ class Spotify(MetadataInterface):
|
||||
"response_parse_tree_album": ["albums","items",0,"images",0,"url"],
|
||||
"response_parse_tree_artist": ["artists","items",0,"images",0,"url"],
|
||||
"required_settings": ["apiid","secret"],
|
||||
"enabled_entity_types": ["artist","album","track"]
|
||||
}
|
||||
|
||||
def authorize(self):
|
||||
@ -31,15 +31,14 @@ class Spotify(MetadataInterface):
|
||||
try:
|
||||
keys = {
|
||||
"url":"https://accounts.spotify.com/api/token",
|
||||
"method":"POST",
|
||||
"headers":{
|
||||
"Authorization":"Basic " + b64(utf(self.settings["apiid"] + ":" + self.settings["secret"])).decode("utf-8")
|
||||
"Authorization":"Basic " + b64(utf(self.settings["apiid"] + ":" + self.settings["secret"])).decode("utf-8"),
|
||||
"User-Agent": self.useragent
|
||||
},
|
||||
"data":bytes(urllib.parse.urlencode({"grant_type":"client_credentials"}),encoding="utf-8")
|
||||
"data":{"grant_type":"client_credentials"}
|
||||
}
|
||||
req = urllib.request.Request(**keys)
|
||||
response = urllib.request.urlopen(req)
|
||||
responsedata = json.loads(response.read())
|
||||
res = requests.post(**keys)
|
||||
responsedata = res.json()
|
||||
if "error" in responsedata:
|
||||
log("Error authenticating with Spotify: " + responsedata['error_description'])
|
||||
expire = 3600
|
||||
@ -47,6 +46,13 @@ class Spotify(MetadataInterface):
|
||||
expire = responsedata.get("expires_in",3600)
|
||||
self.settings["token"] = responsedata["access_token"]
|
||||
#log("Successfully authenticated with Spotify")
|
||||
Timer(expire,self.authorize).start()
|
||||
t = Timer(expire,self.authorize)
|
||||
t.daemon = True
|
||||
t.start()
|
||||
except Exception as e:
|
||||
log("Error while authenticating with Spotify: " + repr(e))
|
||||
|
||||
def handle_json_result_error(self,result):
|
||||
result = result.get('tracks') or result.get('albums') or result.get('artists')
|
||||
if not result['items']:
|
||||
return True
|
@ -75,7 +75,7 @@
|
||||
<a href="/"><img style="display:block;" src="/favicon.png" /></a>
|
||||
</div>
|
||||
<div id="right-side">
|
||||
<span><input id="searchinput" placeholder="Search for an artist or track..." oninput="search(this)" onblur="clearresults()" /></span>
|
||||
<span><input id="searchinput" placeholder="Search for an album, artist or track..." oninput="search(this)" onblur="clearresults()" /></span>
|
||||
</div>
|
||||
|
||||
|
||||
|
@ -6,6 +6,8 @@
|
||||
Here you can find tracks that currently have no album.<br/><br/>
|
||||
|
||||
{% with list = dbc.get_tracks_without_album() %}
|
||||
You have {{list|length}} tracks with no album.<br/><br/>
|
||||
|
||||
{% include 'partials/list_tracks.jinja' %}
|
||||
{% endwith %}
|
||||
|
||||
|
@ -15,7 +15,7 @@
|
||||
|
||||
|
||||
var xhttp = new XMLHttpRequest();
|
||||
xhttp.open("POST","/api/newrule?", true);
|
||||
xhttp.open("POST","/apis/mlj_1/newrule?", true);
|
||||
xhttp.send(keys);
|
||||
e = arguments[0];
|
||||
line = e.parentNode;
|
||||
@ -25,7 +25,7 @@
|
||||
function fullrebuild() {
|
||||
|
||||
var xhttp = new XMLHttpRequest();
|
||||
xhttp.open("POST","/api/rebuild", true);
|
||||
xhttp.open("POST","/apis/mlj_1/rebuild", true);
|
||||
xhttp.send();
|
||||
window.location = "/wait";
|
||||
|
||||
|
@ -67,9 +67,9 @@
|
||||
<li>manually scrobble from track pages</li>
|
||||
<li>delete scrobbles</li>
|
||||
<li>reparse scrobbles</li>
|
||||
<li>edit tracks and artists</li>
|
||||
<li>merge tracks and artists</li>
|
||||
<li>upload artist and track art by dropping a file on the existing image on an artist or track page</li>
|
||||
<li>edit tracks, albums and artists</li>
|
||||
<li>merge tracks, albums and artists</li>
|
||||
<li>upload artist, album and track art by dropping a file on the existing image on an artist or track page</li>
|
||||
<li>see more detailed error pages</li>
|
||||
</ul>
|
||||
|
||||
|
@ -24,7 +24,7 @@
|
||||
keys = "filename=" + encodeURIComponent(filename);
|
||||
console.log(keys);
|
||||
var xhttp = new XMLHttpRequest();
|
||||
xhttp.open("POST","/api/importrules", true);
|
||||
xhttp.open("POST","/apis/mlj_1/importrules", true);
|
||||
xhttp.send(keys);
|
||||
|
||||
e.innerHTML = e.innerHTML.replace("Add","Remove");
|
||||
@ -36,7 +36,7 @@
|
||||
keys = "remove&filename=" + encodeURIComponent(filename);
|
||||
|
||||
var xhttp = new XMLHttpRequest();
|
||||
xhttp.open("POST","/api/importrules", true);
|
||||
xhttp.open("POST","/apis/mlj_1/importrules", true);
|
||||
xhttp.send(keys);
|
||||
|
||||
e.innerHTML = e.innerHTML.replace("Remove","Add");
|
||||
@ -56,7 +56,7 @@
|
||||
|
||||
If you use a Chromium-based browser and listen to music on Plex, Spotify, Soundcloud, Bandcamp or YouTube Music, download the extension and simply enter the server URL as well as your API key in the relevant fields. They will turn green if the server is accessible.
|
||||
<br/><br/>
|
||||
You can also use any standard-compliant scrobbler. For GNUFM (audioscrobbler) scrobblers, enter <span class="stats"><span name="serverurl">yourserver.tld</span>/apis/audioscrobbler</span> as your Gnukebox server and your API key as the password. For Listenbrainz scrobblers, use <span class="stats"><span name="serverurl">yourserver.tld</span>/apis/listenbrainz</span> as the API URL and your API key as token.
|
||||
You can also use any standard-compliant scrobbler. For GNUFM (audioscrobbler) scrobblers, enter <span class="stats"><span name="serverurl">yourserver.tld</span>/apis/audioscrobbler</span> as your Gnukebox server and your API key as the password. For Listenbrainz scrobblers, use <span class="stats"><span name="serverurl">yourserver.tld</span>/apis/listenbrainz</span> as the API URL (depending on the implementation, you might need to add a <span class="stats">/1</span> at the end) and your API key as token.
|
||||
<br/><br/>
|
||||
If you use another browser or another music player, you could try to code your own extension. The API is super simple! Just send a POST HTTP request to
|
||||
|
||||
@ -123,7 +123,7 @@
|
||||
<h2>Say thanks</h2>
|
||||
|
||||
Donations are never required, but always appreciated. If you really like Maloja, you can fund my next Buttergipfel via
|
||||
<a class="textlink" target="_blank" rel="noopener noreferrer" href="https://paypal.me/krateng">PayPal</a>, <a class="textlink" href="bitcoin:1krat8JMniJBTiHftMfR1LtF3Y1w5DAxx">Bitcoin</a> or <a class="textlink" target="_blank" rel="noopener noreferrer" href="https://flattr.com/@Krateng">Flattr</a>.
|
||||
<a class="textlink" target="_blank" rel="noopener noreferrer" href="https://paypal.me/krateng">PayPal</a> or <a class="textlink" href="bitcoin:1krat8JMniJBTiHftMfR1LtF3Y1w5DAxx">Bitcoin</a>.
|
||||
|
||||
<br/><br/>
|
||||
|
||||
|
@ -8,8 +8,8 @@
|
||||
<div style="background-image:url('/favicon.png')"></div>
|
||||
</td>
|
||||
<td class="text">
|
||||
<h1>{{ error_desc }}</h1><br/>
|
||||
{{ error_full_desc }}
|
||||
<h1>{{ error_desc | e }}</h1><br/>
|
||||
{{ error_full_desc | e }}
|
||||
|
||||
</td>
|
||||
</tr>
|
||||
|
@ -29,7 +29,7 @@
|
||||
{% for entry in dbc.get_charts_albums(filterkeys,limitkeys,{'only_own_albums':False}) %}
|
||||
|
||||
|
||||
{% if artist not in (entry.album.artists or []) %}
|
||||
{% if info.artist not in (entry.album.artists or []) %}
|
||||
|
||||
{%- set cert = None -%}
|
||||
{%- if entry.scrobbles >= settings.scrobbles_gold_album -%}{% set cert = 'gold' %}{%- endif -%}
|
||||
|
@ -63,7 +63,7 @@
|
||||
{%- if e.scrobbles >= settings.scrobbles_diamond -%}{% set cert = 'diamond' %}{%- endif -%}
|
||||
|
||||
{%- if cert -%}
|
||||
<a href='{{ links.url(e.track) }}' class="hidelink certified certified_{{ cert }} smallcerticon" title="{{ e.track.title }} has reached {{ cert.capitalize() }} status">
|
||||
<a href='{{ links.url(e.track) }}' class="hidelink certified certified_{{ cert }} smallcerticon" title="{{ e.track.title | e }} has reached {{ cert.capitalize() }} status">
|
||||
{% include 'icons/cert_track.jinja' %}
|
||||
</a>
|
||||
{%- endif %}
|
||||
|
@ -72,7 +72,7 @@
|
||||
{%- if e.scrobbles >= settings.scrobbles_diamond_album -%}{% set cert = 'diamond' %}{%- endif -%}
|
||||
|
||||
{%- if cert -%}
|
||||
<a href='{{ links.url(e.album) }}' class="hidelink certified certified_{{ cert }} smallcerticon" title="{{ e.album.albumtitle }} has reached {{ cert.capitalize() }} status">
|
||||
<a href='{{ links.url(e.album) }}' class="hidelink certified certified_{{ cert }} smallcerticon" title="{{ e.album.albumtitle | e }} has reached {{ cert.capitalize() }} status">
|
||||
{% include 'icons/cert_album.jinja' %}
|
||||
</a>
|
||||
{%- endif %}
|
||||
@ -87,7 +87,7 @@
|
||||
{%- if e.scrobbles >= settings.scrobbles_diamond -%}{% set cert = 'diamond' %}{%- endif -%}
|
||||
|
||||
{%- if cert -%}
|
||||
<a href='{{ links.url(e.track) }}' class="hidelink certified certified_{{ cert }} smallcerticon" title="{{ e.track.title }} has reached {{ cert.capitalize() }} status">
|
||||
<a href='{{ links.url(e.track) }}' class="hidelink certified certified_{{ cert }} smallcerticon" title="{{ e.track.title | e }} has reached {{ cert.capitalize() }} status">
|
||||
{% include 'icons/cert_track.jinja' %}
|
||||
</a>
|
||||
{%- endif %}
|
||||
|
@ -16,10 +16,14 @@
|
||||
{% if entry is not none %}
|
||||
{% set album = entry.album %}
|
||||
{% set rank = entry.rank %}
|
||||
{% set scrobbles = entry.scrobbles %}
|
||||
<div class="tile">
|
||||
<a href="{{ links.url(album) }}">
|
||||
<div class="lazy" data-bg="{{ images.get_album_image(album) }}"'>
|
||||
<span class='stats'>#{{ rank }}</span> <span>{{ album.albumtitle }}</span>
|
||||
{% if settings['SHOW_PLAY_NUMBER_ON_TILES'] %}
|
||||
<p class="scrobbles"><span>{{ scrobbles }} {{ 'play' if scrobbles == 1 else 'plays' }}</span> </p>
|
||||
{% endif %}
|
||||
</div>
|
||||
</a>
|
||||
</div>
|
||||
|
@ -16,10 +16,14 @@
|
||||
{% if entry is not none %}
|
||||
{% set artist = entry.artist %}
|
||||
{% set rank = entry.rank %}
|
||||
{% set scrobbles = entry.scrobbles %}
|
||||
<div class="tile">
|
||||
<a href="{{ links.url(artist) }}">
|
||||
<div class="lazy" data-bg="{{ images.get_artist_image(artist) }}"'>
|
||||
<span class='stats'>#{{ rank }}</span> <span>{{ artist }}</span>
|
||||
<span class='stats'>#{{ rank }}</span> <span>{{ artist }}</span>
|
||||
{% if settings['SHOW_PLAY_NUMBER_ON_TILES'] %}
|
||||
<p class="scrobbles"><span>{{ scrobbles }} {{ 'play' if scrobbles == 1 else 'plays' }}</span> </p>
|
||||
{% endif %}
|
||||
</div>
|
||||
</a>
|
||||
</div>
|
||||
|
@ -16,10 +16,14 @@
|
||||
{% if entry is not none %}
|
||||
{% set track = entry.track %}
|
||||
{% set rank = entry.rank %}
|
||||
{% set scrobbles = entry.scrobbles %}
|
||||
<div class="tile">
|
||||
<a href="{{ links.url(track) }}">
|
||||
<div class="lazy" data-bg="{{ images.get_track_image(track) }}"'>
|
||||
<span class='stats'>#{{ rank }}</span> <span>{{ track.title }}</span>
|
||||
{% if settings['SHOW_PLAY_NUMBER_ON_TILES'] %}
|
||||
<p class="scrobbles"><span>{{ scrobbles }} {{ 'play' if scrobbles == 1 else 'plays' }}</span> </p>
|
||||
{% endif %}
|
||||
</div>
|
||||
</a>
|
||||
</div>
|
||||
|
@ -15,16 +15,16 @@
|
||||
<div class="lazy" data-bg="{{ img }}"></div>
|
||||
{% endif %}
|
||||
</td>
|
||||
{% if entity is mapping and 'artists' in entity %}
|
||||
{% if entity is mapping and 'title' in entity %}
|
||||
{% if settings['TRACK_SEARCH_PROVIDER'] %}
|
||||
<td class='searchProvider'>{{ links.link_search(entity) }}</td>
|
||||
{% endif %}
|
||||
<td class='track'>
|
||||
<span class='artist_in_trackcolumn'>{{ links.links(entity.artists) }}</span> – {{ links.link(entity) }}
|
||||
<span class='artist_in_trackcolumn'>{{ links.links(entity.artists, restrict_amount=True) }}</span> – {{ links.link(entity) }}
|
||||
</td>
|
||||
{% elif entity is mapping and 'albumtitle' in entity %}
|
||||
<td class='album'>
|
||||
<span class='artist_in_trackcolumn'>{{ links.links(entity.artists) }}</span> – {{ links.link(entity) }}
|
||||
<span class='artist_in_albumcolumn'>{{ links.links(entity.artists, restrict_amount=True) }}</span> – {{ links.link(entity) }}
|
||||
</td>
|
||||
{% else %}
|
||||
<td class='artist'>{{ links.link(entity) }}
|
||||
|
@ -8,9 +8,11 @@
|
||||
<a href="{{ url(entity) }}">{{ name | e }}</a>
|
||||
{%- endmacro %}
|
||||
|
||||
{% macro links(entities) -%}
|
||||
{% macro links(entities, restrict_amount=False) -%}
|
||||
{% if entities is none or entities == [] %}
|
||||
{{ settings["DEFAULT_ALBUM_ARTIST"] }}
|
||||
{% elif entities.__len__() > 3 and restrict_amount %}
|
||||
{{ link(entities[0]) }} et al.
|
||||
{% else %}
|
||||
{% for entity in entities -%}
|
||||
{{ link(entity) }}{{ ", " if not loop.last }}
|
||||
|
@ -363,12 +363,14 @@ div#notification_area {
|
||||
right:20px;
|
||||
}
|
||||
div#notification_area div.notification {
|
||||
background-color:white;
|
||||
background-color:black;
|
||||
width:400px;
|
||||
min-height:50px;
|
||||
margin-bottom:7px;
|
||||
padding:9px;
|
||||
opacity:0.4;
|
||||
opacity:0.5;
|
||||
border-left: 8px solid var(--notification-color);
|
||||
border-radius: 3px;
|
||||
}
|
||||
div#notification_area div.notification:hover {
|
||||
opacity:0.95;
|
||||
@ -781,6 +783,9 @@ table.list td.artists,td.artist,td.title,td.track {
|
||||
table.list td.track span.artist_in_trackcolumn {
|
||||
color: var(--text-color-secondary);
|
||||
}
|
||||
table.list td.album span.artist_in_albumcolumn {
|
||||
color: var(--text-color-secondary);
|
||||
}
|
||||
|
||||
table.list td.searchProvider {
|
||||
width: 20px;
|
||||
@ -987,6 +992,7 @@ table.misc td {
|
||||
|
||||
|
||||
div.tiles {
|
||||
max-height: 600px;
|
||||
display: grid;
|
||||
grid-template-columns: repeat(18, calc(100% / 18));
|
||||
grid-template-rows: repeat(6, calc(100% / 6));
|
||||
@ -1069,6 +1075,12 @@ div.tiles span {
|
||||
overflow-wrap: anywhere;
|
||||
}
|
||||
|
||||
div.tiles p.scrobbles {
|
||||
margin: 0;
|
||||
top:100%;
|
||||
position: sticky;
|
||||
}
|
||||
|
||||
div.tiles a:hover {
|
||||
text-decoration: none;
|
||||
}
|
||||
|
@ -22,8 +22,8 @@ div#startpage {
|
||||
|
||||
@media (min-width: 1401px) and (max-width: 2200px) {
|
||||
div#startpage {
|
||||
grid-template-columns: 45vw 45vw;
|
||||
grid-template-rows: 45vh 45vh 45vh;
|
||||
grid-template-columns: repeat(2, 45vw);
|
||||
grid-template-rows: repeat(3, 45vh);
|
||||
|
||||
grid-template-areas:
|
||||
"charts_artists lastscrobbles"
|
||||
|
@ -126,7 +126,7 @@ function scrobble(artists,title,albumartists,album,timestamp) {
|
||||
lastArtists = artists;
|
||||
lastTrack = title;
|
||||
lastAlbum = album;
|
||||
lastAlbumartists = albumartists;
|
||||
lastAlbumartists = albumartists || [];
|
||||
|
||||
var payload = {
|
||||
"artists":artists,
|
||||
@ -186,7 +186,7 @@ function search_manualscrobbling(searchfield) {
|
||||
else {
|
||||
xhttp = new XMLHttpRequest();
|
||||
xhttp.onreadystatechange = searchresult_manualscrobbling;
|
||||
xhttp.open("GET","/api/search?max=5&query=" + encodeURIComponent(txt), true);
|
||||
xhttp.open("GET","/apis/mlj_1/search?max=5&query=" + encodeURIComponent(txt), true);
|
||||
xhttp.send();
|
||||
}
|
||||
}
|
||||
|
@ -1,12 +1,14 @@
|
||||
// JS for feedback to the user whenever any XHTTP action is taken
|
||||
|
||||
const colors = {
|
||||
'warning':'red',
|
||||
'error': 'red',
|
||||
'warning':'#8ACC26',
|
||||
'info':'green'
|
||||
}
|
||||
|
||||
|
||||
const notification_template = info => `
|
||||
<div class="notification" style="background-color:${colors[info.notification_type]};">
|
||||
<div class="notification" style="--notification-color: ${colors[info.notification_type]};">
|
||||
<b>${info.title}</b><br/>
|
||||
<span>${info.body}</span>
|
||||
|
||||
@ -35,18 +37,24 @@ function notify(title,msg,notification_type='info',reload=false) {
|
||||
}
|
||||
|
||||
function notifyCallback(request) {
|
||||
var body = request.response;
|
||||
var response = request.response;
|
||||
var status = request.status;
|
||||
|
||||
if (status == 200) {
|
||||
var notification_type = 'info';
|
||||
if (response.hasOwnProperty('warnings') && response.warnings.length > 0) {
|
||||
var notification_type = 'warning';
|
||||
}
|
||||
else {
|
||||
var notification_type = 'info';
|
||||
}
|
||||
|
||||
var title = "Success!";
|
||||
var msg = body.desc || body;
|
||||
var msg = response.desc || response;
|
||||
}
|
||||
else {
|
||||
var notification_type = 'warning';
|
||||
var title = "Error: " + body.error.type;
|
||||
var msg = body.error.desc || "";
|
||||
var notification_type = 'error';
|
||||
var title = "Error: " + response.error.type;
|
||||
var msg = response.error.desc || "";
|
||||
}
|
||||
|
||||
|
||||
|
@ -1,17 +1,23 @@
|
||||
var searches = []
|
||||
var searches = [];
|
||||
var debounceTimer;
|
||||
|
||||
function search(searchfield) {
|
||||
txt = searchfield.value;
|
||||
if (txt == "") {
|
||||
reallyclear()
|
||||
}
|
||||
else {
|
||||
xhttp = new XMLHttpRequest();
|
||||
searches.push(xhttp)
|
||||
xhttp.onreadystatechange = searchresult
|
||||
xhttp.open("GET","/api/search?max=5&query=" + encodeURIComponent(txt), true);
|
||||
xhttp.send();
|
||||
}
|
||||
clearTimeout(debounceTimer);
|
||||
debounceTimer = setTimeout(() => {
|
||||
const txt = searchfield.value;
|
||||
if (txt == "") {
|
||||
reallyclear();
|
||||
}
|
||||
else {
|
||||
const xhttp = new XMLHttpRequest();
|
||||
searches.push(xhttp);
|
||||
xhttp.onreadystatechange = searchresult
|
||||
xhttp.open("GET","/apis/mlj_1/search?max=5&query=" + encodeURIComponent(txt), true);
|
||||
xhttp.send();
|
||||
}
|
||||
}, 1000);
|
||||
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
@ -1,3 +1,3 @@
|
||||
function upload(encodedentity,b64) {
|
||||
neo.xhttprequest("/api/addpicture?" + encodedentity,{"b64":b64},"POST")
|
||||
neo.xhttprequest("/apis/mlj_1/addpicture?" + encodedentity,{"b64":b64},"POST")
|
||||
}
|
||||
|
@ -1,10 +1,10 @@
|
||||
[project]
|
||||
name = "malojaserver"
|
||||
version = "3.2.0"
|
||||
version = "3.2.4"
|
||||
description = "Self-hosted music scrobble database"
|
||||
readme = "./README.md"
|
||||
requires-python = ">=3.10"
|
||||
license = { file="./LICENSE" }
|
||||
readme = "README.md"
|
||||
requires-python = "==3.12.*"
|
||||
license = { file="LICENSE" }
|
||||
authors = [ { name="Johannes Krattenmacher", email="maloja@dev.krateng.ch" } ]
|
||||
|
||||
urls.repository = "https://github.com/krateng/maloja"
|
||||
@ -19,31 +19,32 @@ classifiers = [
|
||||
]
|
||||
|
||||
dependencies = [
|
||||
"bottle>=0.12.16",
|
||||
"waitress>=2.1.0",
|
||||
"doreah>=1.9.4, <2",
|
||||
"nimrodel>=0.8.0",
|
||||
"setproctitle>=1.1.10",
|
||||
#"pyvips>=2.1.16",
|
||||
"jinja2>=3.0.0",
|
||||
"lru-dict>=1.1.6",
|
||||
"psutil>=5.8.0",
|
||||
"sqlalchemy>=2.0",
|
||||
"python-datauri>=1.1.0",
|
||||
"requests>=2.27.1",
|
||||
"setuptools>68.0.0"
|
||||
"bottle==0.13.*",
|
||||
"waitress==3.0.*",
|
||||
"doreah==2.0.*",
|
||||
"nimrodel==0.8.*",
|
||||
"setproctitle==1.3.*",
|
||||
"jinja2==3.1.*",
|
||||
"lru-dict==1.3.*",
|
||||
"psutil==5.9.*",
|
||||
"sqlalchemy==2.0",
|
||||
"python-datauri==3.0.*",
|
||||
"python-magic==0.4.*",
|
||||
"requests==2.32.*",
|
||||
"toml==0.10.*",
|
||||
"PyYAML==6.0.*"
|
||||
]
|
||||
|
||||
[project.optional-dependencies]
|
||||
full = [
|
||||
"pyvips>=2.1"
|
||||
"pyvips==2.2.*"
|
||||
]
|
||||
|
||||
[project.scripts]
|
||||
maloja = "maloja.__main__:main"
|
||||
|
||||
[build-system]
|
||||
requires = ["flit_core >=3.2,<4"]
|
||||
requires = ["flit_core >=3.10,<4"]
|
||||
build-backend = "flit_core.buildapi"
|
||||
|
||||
[tool.flit.module]
|
||||
@ -64,7 +65,8 @@ build =[
|
||||
run = [
|
||||
"python3",
|
||||
"py3-lxml",
|
||||
"tzdata"
|
||||
"tzdata",
|
||||
"libmagic"
|
||||
]
|
||||
opt = [
|
||||
"vips"
|
||||
|
@ -1,12 +1,15 @@
|
||||
bottle>=0.12.16
|
||||
waitress>=2.1.0
|
||||
doreah>=1.9.4, <2
|
||||
nimrodel>=0.8.0
|
||||
setproctitle>=1.1.10
|
||||
jinja2>=3.0.0
|
||||
lru-dict>=1.1.6
|
||||
psutil>=5.8.0
|
||||
sqlalchemy>=2.0
|
||||
python-datauri>=1.1.0
|
||||
requests>=2.27.1
|
||||
setuptools>68.0.0
|
||||
bottle==0.13.*
|
||||
waitress==3.0.*
|
||||
doreah==2.0.*
|
||||
nimrodel==0.8.*
|
||||
setproctitle==1.3.*
|
||||
jinja2==3.1.*
|
||||
lru-dict==1.3.*
|
||||
psutil==5.9.*
|
||||
sqlalchemy==2.0
|
||||
python-datauri==3.0.*
|
||||
python-magic==0.4.*
|
||||
requests==2.32.*
|
||||
toml==0.10.*
|
||||
PyYAML==6.0.*
|
||||
|
||||
|
@ -1,2 +1,2 @@
|
||||
pyvips>=2.1
|
||||
pyvips==2.2.*
|
||||
|
||||
|
20
settings.md
20
settings.md
@ -32,14 +32,17 @@ Settings File | Environment Variable | Type | Description
|
||||
`cache_expire_negative` | `MALOJA_CACHE_EXPIRE_NEGATIVE` | Integer | Days until failed image fetches are reattempted
|
||||
`db_max_memory` | `MALOJA_DB_MAX_MEMORY` | Integer | RAM Usage in percent at which Maloja should no longer increase its database cache.
|
||||
`use_request_cache` | `MALOJA_USE_REQUEST_CACHE` | Boolean | Use request-local DB Cache
|
||||
`use_global_cache` | `MALOJA_USE_GLOBAL_CACHE` | Boolean | Use global DB Cache
|
||||
`use_global_cache` | `MALOJA_USE_GLOBAL_CACHE` | Boolean | This is vital for Maloja's performance. Do not disable this unless you have a strong reason to.
|
||||
**Fluff**
|
||||
`scrobbles_gold` | `MALOJA_SCROBBLES_GOLD` | Integer | How many scrobbles a track needs to be considered 'Gold' status
|
||||
`scrobbles_platinum` | `MALOJA_SCROBBLES_PLATINUM` | Integer | How many scrobbles a track needs to be considered 'Platinum' status
|
||||
`scrobbles_diamond` | `MALOJA_SCROBBLES_DIAMOND` | Integer | How many scrobbles a track needs to be considered 'Diamond' status
|
||||
`scrobbles_gold_album` | `MALOJA_SCROBBLES_GOLD_ALBUM` | Integer | How many scrobbles an album needs to be considered 'Gold' status
|
||||
`scrobbles_platinum_album` | `MALOJA_SCROBBLES_PLATINUM_ALBUM` | Integer | How many scrobbles an album needs to be considered 'Platinum' status
|
||||
`scrobbles_diamond_album` | `MALOJA_SCROBBLES_DIAMOND_ALBUM` | Integer | How many scrobbles an album needs to be considered 'Diamond' status
|
||||
`name` | `MALOJA_NAME` | String | Name
|
||||
**Third Party Services**
|
||||
`metadata_providers` | `MALOJA_METADATA_PROVIDERS` | List | Which metadata providers should be used in what order. Musicbrainz is rate-limited and should not be used first.
|
||||
`metadata_providers` | `MALOJA_METADATA_PROVIDERS` | List | List of which metadata providers should be used in what order. Musicbrainz is rate-limited and should not be used first.
|
||||
`scrobble_lastfm` | `MALOJA_SCROBBLE_LASTFM` | Boolean | Proxy-Scrobble to Last.fm
|
||||
`lastfm_api_key` | `MALOJA_LASTFM_API_KEY` | String | Last.fm API Key
|
||||
`lastfm_api_secret` | `MALOJA_LASTFM_API_SECRET` | String | Last.fm API Secret
|
||||
@ -55,6 +58,7 @@ Settings File | Environment Variable | Type | Description
|
||||
`send_stats` | `MALOJA_SEND_STATS` | Boolean | Send Statistics
|
||||
`proxy_images` | `MALOJA_PROXY_IMAGES` | Boolean | Whether third party images should be downloaded and served directly by Maloja (instead of just linking their URL)
|
||||
**Database**
|
||||
`album_information_trust` | `MALOJA_ALBUM_INFORMATION_TRUST` | Choice | Whether to trust the first album information that is sent with a track or update every time a different album is sent
|
||||
`invalid_artists` | `MALOJA_INVALID_ARTISTS` | Set | Artists that should be discarded immediately
|
||||
`remove_from_title` | `MALOJA_REMOVE_FROM_TITLE` | Set | Phrases that should be removed from song titles
|
||||
`delimiters_feat` | `MALOJA_DELIMITERS_FEAT` | Set | Delimiters used for extra artists, even when in the title field
|
||||
@ -62,14 +66,20 @@ Settings File | Environment Variable | Type | Description
|
||||
`delimiters_formal` | `MALOJA_DELIMITERS_FORMAL` | Set | Delimiters used to tag multiple artists when only one tag field is available
|
||||
`filters_remix` | `MALOJA_FILTERS_REMIX` | Set | Filters used to recognize the remix artists in the title
|
||||
`parse_remix_artists` | `MALOJA_PARSE_REMIX_ARTISTS` | Boolean | Parse Remix Artists
|
||||
`week_offset` | `MALOJA_WEEK_OFFSET` | Integer | Start of the week for the purpose of weekly statistics. 0 = Sunday, 6 = Saturday
|
||||
`timezone` | `MALOJA_TIMEZONE` | Integer | UTC Offset
|
||||
`location_timezone` | `MALOJA_LOCATION_TIMEZONE` | String | Location Timezone (overrides `timezone`)
|
||||
**Web Interface**
|
||||
`default_range_charts_artists` | `MALOJA_DEFAULT_RANGE_CHARTS_ARTISTS` | Choice | Default Range Artist Charts
|
||||
`default_range_charts_tracks` | `MALOJA_DEFAULT_RANGE_CHARTS_TRACKS` | Choice | Default Range Track Charts
|
||||
`default_range_startpage` | `MALOJA_DEFAULT_RANGE_STARTPAGE` | Choice | Default Range for Startpage Stats
|
||||
`default_step_pulse` | `MALOJA_DEFAULT_STEP_PULSE` | Choice | Default Pulse Step
|
||||
`charts_display_tiles` | `MALOJA_CHARTS_DISPLAY_TILES` | Boolean | Display Chart Tiles
|
||||
`album_showcase` | `MALOJA_ALBUM_SHOWCASE` | Boolean | Display a graphical album showcase for artist overview pages instead of a chart list
|
||||
`display_art_icons` | `MALOJA_DISPLAY_ART_ICONS` | Boolean | Display Album/Artist Icons
|
||||
`default_album_artist` | `MALOJA_DEFAULT_ALBUM_ARTIST` | String | Default Albumartist
|
||||
`use_album_artwork_for_tracks` | `MALOJA_USE_ALBUM_ARTWORK_FOR_TRACKS` | Boolean | Use Album Artwork for tracks
|
||||
`fancy_placeholder_art` | `MALOJA_FANCY_PLACEHOLDER_ART` | Boolean | Use fancy placeholder artwork
|
||||
`show_play_number_on_tiles` | `MALOJA_SHOW_PLAY_NUMBER_ON_TILES` | Boolean | Show amount of plays on tiles
|
||||
`discourage_cpu_heavy_stats` | `MALOJA_DISCOURAGE_CPU_HEAVY_STATS` | Boolean | Prevent visitors from mindlessly clicking on CPU-heavy options. Does not actually disable them for malicious actors!
|
||||
`use_local_images` | `MALOJA_USE_LOCAL_IMAGES` | Boolean | Use Local Images
|
||||
`timezone` | `MALOJA_TIMEZONE` | Integer | UTC Offset
|
||||
`time_format` | `MALOJA_TIME_FORMAT` | String | Time Format
|
||||
`theme` | `MALOJA_THEME` | String | Theme
|
||||
|
40
setup.py
40
setup.py
@ -1,40 +0,0 @@
|
||||
import setuptools
|
||||
import toml
|
||||
|
||||
|
||||
with open("pyproject.toml") as fd:
|
||||
pkgdata = toml.load(fd)
|
||||
projectdata = pkgdata['project']
|
||||
|
||||
|
||||
# extract info
|
||||
with open(projectdata['readme'], "r") as fh:
|
||||
long_description = fh.read()
|
||||
|
||||
setuptools.setup(
|
||||
name=projectdata['name'],
|
||||
version=projectdata['version'],
|
||||
author=projectdata['authors'][0]['name'],
|
||||
author_email=projectdata['authors'][0]['email'],
|
||||
description=projectdata["description"],
|
||||
license="GPLv3",
|
||||
long_description=long_description,
|
||||
long_description_content_type="text/markdown",
|
||||
url=projectdata['urls']['repository'],
|
||||
packages=setuptools.find_packages("."),
|
||||
classifiers=[
|
||||
"Programming Language :: Python :: 3",
|
||||
"License :: OSI Approved :: GNU General Public License v3 (GPLv3)",
|
||||
"Operating System :: OS Independent",
|
||||
],
|
||||
python_requires=projectdata['requires-python'],
|
||||
install_requires=projectdata['dependencies'],
|
||||
package_data={'': ['*','*/*','*/*/*','*/*/*/*','*/*/.*','*/*/*/.*']},
|
||||
include_package_data=True,
|
||||
entry_points = {
|
||||
'console_scripts':[
|
||||
k + '=' + projectdata['scripts'][k] for k in projectdata['scripts']
|
||||
]
|
||||
|
||||
}
|
||||
)
|
Loading…
x
Reference in New Issue
Block a user