mirror of
https://github.com/krateng/maloja.git
synced 2025-04-15 16:30:32 +03:00
commit
2cf785faae
8
.dockerignore
Normal file
8
.dockerignore
Normal file
@ -0,0 +1,8 @@
|
||||
*
|
||||
!maloja
|
||||
!Containerfile
|
||||
!requirements_pre.txt
|
||||
!requirements.txt
|
||||
!pyproject.toml
|
||||
!README.md
|
||||
!LICENSE
|
1
.github/FUNDING.yml
vendored
Normal file
1
.github/FUNDING.yml
vendored
Normal file
@ -0,0 +1 @@
|
||||
custom: ["https://flattr.com/@Krateng", "https://paypal.me/krateng", "bitcoin:1krat8JMniJBTiHftMfR1LtF3Y1w5DAxx"]
|
6
.github/workflows/aux.library.pypi.yml
vendored
6
.github/workflows/aux.library.pypi.yml
vendored
@ -13,10 +13,10 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Check out the repo
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@ec3a7ce113134d7a93b817d10a8272cb61118579
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v2
|
||||
uses: actions/setup-python@7f80679172b057fc5e90d70d197929d454754a5a
|
||||
with:
|
||||
python-version: '3.x'
|
||||
|
||||
@ -30,7 +30,7 @@ jobs:
|
||||
run: python -m build
|
||||
|
||||
- name: Publish to PyPI
|
||||
uses: pypa/gh-action-pypi-publish@release/v1
|
||||
uses: pypa/gh-action-pypi-publish@717ba43cfbb0387f6ce311b169a825772f54d295
|
||||
with:
|
||||
user: __token__
|
||||
password: ${{ secrets.PYPI_API_TOKEN }}
|
||||
|
2
.github/workflows/aux.scrobbler.upload.yml
vendored
2
.github/workflows/aux.scrobbler.upload.yml
vendored
@ -12,7 +12,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Push Extension to Web Store
|
||||
uses: Klemensas/chrome-extension-upload-action@$VERSION
|
||||
uses: Klemensas/chrome-extension-upload-action@1e8ede84548583abf1a2a495f4242c4c51539337
|
||||
with:
|
||||
refresh-token: '${{ secrets.GOOGLE_REFRESHTOKEN }}'
|
||||
client-id: '${{ secrets.GOOGLE_CLIENTID }}'
|
||||
|
23
.github/workflows/dockerhub.yml
vendored
23
.github/workflows/dockerhub.yml
vendored
@ -11,7 +11,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Check out the repo
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@ec3a7ce113134d7a93b817d10a8272cb61118579
|
||||
|
||||
- name: Log in to Docker Hub
|
||||
if: github.event_name != 'pull_request'
|
||||
@ -22,7 +22,7 @@ jobs:
|
||||
|
||||
- name: Extract metadata (tags, labels) for Docker
|
||||
id: meta
|
||||
uses: docker/metadata-action@v3
|
||||
uses: docker/metadata-action@e5622373a38e60fb6d795a4421e56882f2d7a681
|
||||
with:
|
||||
images: krateng/maloja
|
||||
# generate Docker tags based on the following events/attributes
|
||||
@ -32,19 +32,20 @@ jobs:
|
||||
latest=true
|
||||
|
||||
- name: Build and push Docker image
|
||||
uses: docker/build-push-action@v2
|
||||
uses: docker/build-push-action@7f9d37fa544684fb73bfe4835ed7214c255ce02b
|
||||
with:
|
||||
context: .
|
||||
file: Containerfile
|
||||
#platforms: linux/amd64,linux/arm64
|
||||
push: ${{ github.event_name != 'pull_request' }}
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
|
||||
# skipping this until https://github.com/peter-evans/dockerhub-description/issues/10 is fixed
|
||||
# - name: Update Readme and short description
|
||||
# uses: peter-evans/dockerhub-description@v2
|
||||
# with:
|
||||
# username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
# password: ${{ secrets.DOCKERHUB_ACCESS_TOKEN }}
|
||||
# repository: krateng/maloja
|
||||
# short-description: ${{ github.event.repository.description }}
|
||||
- name: Update Readme and short description
|
||||
uses: peter-evans/dockerhub-description@1cf9afbac3c5d2fdc66416a464c2c38260cb6f8d
|
||||
continue-on-error: true
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_ACCESS_TOKEN }}
|
||||
repository: krateng/maloja
|
||||
short-description: ${{ github.event.repository.description }}
|
||||
|
6
.github/workflows/pypi.yml
vendored
6
.github/workflows/pypi.yml
vendored
@ -11,10 +11,10 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Check out the repo
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@ec3a7ce113134d7a93b817d10a8272cb61118579
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v2
|
||||
uses: actions/setup-python@7f80679172b057fc5e90d70d197929d454754a5a
|
||||
with:
|
||||
python-version: '3.x'
|
||||
|
||||
@ -25,7 +25,7 @@ jobs:
|
||||
run: python -m build
|
||||
|
||||
- name: Publish to PyPI
|
||||
uses: pypa/gh-action-pypi-publish@release/v1
|
||||
uses: pypa/gh-action-pypi-publish@717ba43cfbb0387f6ce311b169a825772f54d295
|
||||
with:
|
||||
user: __token__
|
||||
password: ${{ secrets.PYPI_API_TOKEN }}
|
||||
|
23
.gitignore
vendored
23
.gitignore
vendored
@ -1,21 +1,14 @@
|
||||
# generic temporary / dev files
|
||||
# temporary / generated files
|
||||
*.pyc
|
||||
*.note
|
||||
*.xcf
|
||||
/nohup.out
|
||||
/*-old
|
||||
/*.yml
|
||||
/pylintrc
|
||||
.venv/*
|
||||
/testdata*
|
||||
|
||||
# build
|
||||
# environments / builds
|
||||
.venv/*
|
||||
testdata*
|
||||
/dist
|
||||
/build
|
||||
/*.egg-info
|
||||
|
||||
# currently not using
|
||||
/screenshot*.png
|
||||
|
||||
# only for development, normally external
|
||||
/doreah
|
||||
# dev files
|
||||
*.xcf
|
||||
*.note
|
||||
*-old
|
||||
|
36
APKBUILD
Normal file
36
APKBUILD
Normal file
@ -0,0 +1,36 @@
|
||||
# Contributor: Johannes Krattenmacher <maloja@dev.krateng.ch>
|
||||
# Maintainer: Johannes Krattenmacher <maloja@dev.krateng.ch>
|
||||
pkgname=maloja
|
||||
pkgver=3.0.0-dev
|
||||
pkgrel=0
|
||||
pkgdesc="Self-hosted music scrobble database"
|
||||
url="https://github.com/krateng/maloja"
|
||||
arch="noarch"
|
||||
license="GPL-3.0"
|
||||
depends="python3 tzdata"
|
||||
pkgusers=$pkgname
|
||||
pkggroups=$pkgname
|
||||
depends_dev="gcc g++ python3-dev libxml2-dev libxslt-dev libffi-dev libc-dev py3-pip linux-headers"
|
||||
makedepends="$depends_dev"
|
||||
source="
|
||||
$pkgname-$pkgver.tar.gz::https://github.com/krateng/maloja/archive/refs/tags/v$pkgver.tar.gz
|
||||
"
|
||||
builddir="$srcdir"/$pkgname-$pkgver
|
||||
|
||||
|
||||
|
||||
build() {
|
||||
cd $builddir
|
||||
python3 -m build .
|
||||
pip3 install dist/*.tar.gz
|
||||
}
|
||||
|
||||
package() {
|
||||
mkdir -p /etc/$pkgname || return 1
|
||||
mkdir -p /var/lib/$pkgname || return 1
|
||||
mkdir -p /var/cache/$pkgname || return 1
|
||||
mkdir -p /var/logs/$pkgname || return 1
|
||||
}
|
||||
|
||||
# TODO
|
||||
sha512sums="a674eaaaa248fc2b315514d79f9a7a0bac6aa1582fe29554d9176e8b551e8aa3aa75abeebdd7713e9e98cc987e7bd57dc7a5e9a2fb85af98b9c18cb54de47bf7 $pkgname-${pkgver}.tar.gz"
|
@ -1,29 +1,47 @@
|
||||
FROM python:3-alpine
|
||||
FROM alpine:3.15
|
||||
# Python image includes two Python versions, so use base Alpine
|
||||
|
||||
# Based on the work of Jonathan Boeckel <jonathanboeckel1996@gmail.com>
|
||||
# https://gitlab.com/Joniator/docker-maloja
|
||||
# https://github.com/Joniator
|
||||
|
||||
WORKDIR /usr/src/app
|
||||
|
||||
# Install run dependencies first
|
||||
RUN apk add --no-cache python3 tzdata
|
||||
|
||||
# Copy project into dir
|
||||
COPY . .
|
||||
# system pip could be removed after build, but apk then decides to also remove all its
|
||||
# python dependencies, even if they are explicitly installed as python packages
|
||||
# whut
|
||||
RUN \
|
||||
apk add py3-pip && \
|
||||
pip install wheel
|
||||
|
||||
# these are more static than the real requirements, which means caching
|
||||
COPY ./requirements_pre.txt ./requirements_pre.txt
|
||||
|
||||
RUN \
|
||||
# Build dependencies (This will pipe all packages from the file)
|
||||
sed 's/#.*//' ./install/dependencies_build.txt | xargs apk add --no-cache --virtual .build-deps && \
|
||||
# Runtime dependencies (Same)
|
||||
sed 's/#.*//' ./install/dependencies_run.txt | xargs apk add --no-cache && \
|
||||
# Python dependencies
|
||||
pip3 install --no-cache-dir -r requirements.txt && \
|
||||
# Local project install
|
||||
pip3 install /usr/src/app && \
|
||||
# Remove build dependencies
|
||||
apk del .build-deps
|
||||
apk add --no-cache --virtual .build-deps gcc g++ python3-dev libxml2-dev libxslt-dev libffi-dev libc-dev py3-pip linux-headers && \
|
||||
pip install --no-cache-dir -r requirements_pre.txt && \
|
||||
apk del .build-deps
|
||||
|
||||
# expected behavior for a default setup is for maloja to "just work"
|
||||
|
||||
# less likely to be cached
|
||||
COPY ./requirements.txt ./requirements.txt
|
||||
|
||||
RUN \
|
||||
apk add --no-cache --virtual .build-deps gcc g++ python3-dev libxml2-dev libxslt-dev libffi-dev libc-dev py3-pip linux-headers && \
|
||||
pip install --no-cache-dir -r requirements.txt && \
|
||||
apk del .build-deps
|
||||
|
||||
|
||||
# no chance for caching below here
|
||||
|
||||
COPY . .
|
||||
|
||||
RUN pip install /usr/src/app
|
||||
|
||||
# Docker-specific configuration and default to IPv4
|
||||
ENV MALOJA_SKIP_SETUP=yes
|
||||
ENV MALOJA_HOST=0.0.0.0
|
||||
|
||||
EXPOSE 42010
|
||||
# use exec form for better signal handling https://docs.docker.com/engine/reference/builder/#entrypoint
|
||||
|
@ -11,10 +11,10 @@ Clone the repository and enter it.
|
||||
|
||||
To avoid cluttering your system, consider using a [virtual environment](https://docs.python.org/3/tutorial/venv.html).
|
||||
|
||||
Your system needs several packages installed. On Alpine, this can be done with
|
||||
Your system needs several packages installed. For supported distributions, this can be done with e.g.
|
||||
|
||||
```console
|
||||
sh ./install/install_dependencies.sh
|
||||
sh ./install/install_dependencies_alpine.sh
|
||||
```
|
||||
|
||||
For other distros, try to find the equivalents of the packages listed or simply check your error output.
|
||||
@ -41,6 +41,18 @@ You can also build the package with
|
||||
```console
|
||||
pip install .
|
||||
```
|
||||
|
||||
|
||||
## Docker
|
||||
|
||||
You can also always build and run the server with
|
||||
|
||||
```console
|
||||
sh ./dev/run_docker.sh
|
||||
```
|
||||
|
||||
This will use the directory `testdata`.
|
||||
|
||||
## Further help
|
||||
|
||||
Feel free to [ask](https://github.com/krateng/maloja/discussions) if you need some help!
|
||||
|
@ -1,31 +0,0 @@
|
||||
FROM python:3-alpine
|
||||
|
||||
# Based on the work of Jonathan Boeckel <jonathanboeckel1996@gmail.com>
|
||||
# https://gitlab.com/Joniator/docker-maloja
|
||||
# https://github.com/Joniator
|
||||
|
||||
ARG MALOJA_RELEASE
|
||||
WORKDIR /usr/src/app
|
||||
|
||||
# Copy needed information
|
||||
COPY ./install ./install
|
||||
|
||||
RUN \
|
||||
# Build dependencies (This will pipe all packages from the file)
|
||||
sed 's/#.*//' ./install/dependencies_build.txt | xargs apk add --no-cache --virtual .build-deps && \
|
||||
# Runtime dependencies (Same)
|
||||
sed 's/#.*//' ./install/dependencies_run.txt | xargs apk add --no-cache && \
|
||||
|
||||
|
||||
# PyPI install
|
||||
pip3 install malojaserver==$MALOJA_RELEASE && \
|
||||
# Remove build dependencies
|
||||
apk del .build-deps
|
||||
|
||||
# expected behavior for a default setup is for maloja to "just work"
|
||||
ENV MALOJA_SKIP_SETUP=yes
|
||||
ENV MAGICK_HOME=/usr
|
||||
|
||||
EXPOSE 42010
|
||||
# use exec form for better signal handling https://docs.docker.com/engine/reference/builder/#entrypoint
|
||||
ENTRYPOINT ["maloja", "run"]
|
64
README.md
64
README.md
@ -1,28 +1,26 @@
|
||||
# Maloja
|
||||
|
||||
[](https://github.com/krateng/maloja)
|
||||
[](https://pypi.org/project/malojaserver/)
|
||||
[](https://hub.docker.com/r/krateng/maloja)
|
||||
[](https://github.com/krateng/maloja)
|
||||
[](https://pypi.org/project/malojaserver/)
|
||||
[](https://hub.docker.com/r/krateng/maloja)
|
||||
|
||||
[](https://github.com/krateng/maloja/blob/master/LICENSE)
|
||||
[](https://codeclimate.com/github/krateng/maloja)
|
||||
|
||||
Simple self-hosted music scrobble database to create personal listening statistics. No recommendations, no social network, no nonsense.
|
||||
|
||||
You can check [my own Maloja page](https://maloja.krateng.ch) to see what it looks like (it's down fairly often because I use it as staging environment, that doesn't reflect the stability of the Maloja software!).
|
||||

|
||||
|
||||
> If you're a new Maloja user, consider installing [Version 3 Beta](https://github.com/krateng/maloja/tree/v3) right away. This way, you avoid having to upgrade later and you might help catch some bugs.
|
||||
>
|
||||
> Simply clone this repository and change branches with `git checkout v3`, then follow the [Readme](https://github.com/krateng/maloja/blob/v3/README.md) of that branch to install from source or run the included Containerfile.
|
||||
>
|
||||
> Thank you for your help testing the new release!
|
||||
You can check [my own Maloja page](https://maloja.krateng.ch) as an example instance.
|
||||
|
||||
|
||||
## Table of Contents
|
||||
* [Features](#features)
|
||||
* [How to install](#how-to-install)
|
||||
* [LXC / VM / Bare Metal](#lxc--vm--bare-metal)
|
||||
* [Docker](#docker)
|
||||
* [Requirements](#requirements)
|
||||
* [PyPI](#pypi)
|
||||
* [From Source](#from-source)
|
||||
* [Docker / Podman](#docker-podman)
|
||||
* [Extras](#extras)
|
||||
* [How to use](#how-to-use)
|
||||
* [Basic control](#basic-control)
|
||||
@ -49,27 +47,25 @@ You can check [my own Maloja page](https://maloja.krateng.ch) to see what it loo
|
||||
|
||||
## How to install
|
||||
|
||||
### LXC / VM / Bare Metal
|
||||
### Requirements
|
||||
|
||||
Maloja should run on any x86 or ARM machine that runs Python.
|
||||
|
||||
I can support you with issues best if you use **Alpine Linux**.
|
||||
|
||||
#### From PyPI
|
||||
Your CPU should have a single core passmark score of at the very least 1500. 500 MB RAM should give you a decent experience, but performance will benefit greatly from up to 2 GB.
|
||||
|
||||
You can download the included script `install_alpine.sh` and run it with
|
||||
### PyPI
|
||||
|
||||
```console
|
||||
sh install_alpine.sh
|
||||
```
|
||||
|
||||
You can also simply call the install command
|
||||
You can install Maloja with
|
||||
|
||||
```console
|
||||
pip install malojaserver
|
||||
```
|
||||
|
||||
directly (e.g. if you're not on Alpine) - make sure you have all the system packages installed.
|
||||
To make sure all dependencies are installed, you can also use one of the included scripts in the `install` folder.
|
||||
|
||||
#### From Source
|
||||
### From Source
|
||||
|
||||
Clone this repository and enter the directory with
|
||||
|
||||
@ -78,15 +74,15 @@ Clone this repository and enter the directory with
|
||||
cd maloja
|
||||
```
|
||||
|
||||
Then install all the requirements and build the package:
|
||||
Then install all the requirements and build the package, e.g.:
|
||||
|
||||
```console
|
||||
sh ./install/install_dependencies.sh
|
||||
sh ./install/install_dependencies_alpine.sh
|
||||
pip install -r requirements.txt
|
||||
pip install .
|
||||
```
|
||||
|
||||
### Docker
|
||||
### Docker / Podman
|
||||
|
||||
Pull the [latest image](https://hub.docker.com/r/krateng/maloja) or check out the repository and use the included Containerfile.
|
||||
|
||||
@ -95,21 +91,20 @@ Of note are these settings which should be passed as environmental variables to
|
||||
* `MALOJA_DATA_DIRECTORY` -- Set the directory in the container where configuration folders/files should be located
|
||||
* Mount a [volume](https://docs.docker.com/engine/reference/builder/#volume) to the specified directory to access these files outside the container (and to make them persistent)
|
||||
* `MALOJA_FORCE_PASSWORD` -- Set an admin password for maloja
|
||||
* `MALOJA_HOST` (Optional) -- Maloja uses IPv6 by default for the host. Set this to `0.0.0.0` if you cannot initially access the webserver
|
||||
|
||||
You must also publish a port on your host machine to bind to the container's web port (default 42010)
|
||||
You must publish a port on your host machine to bind to the container's web port (default 42010). The container uses IPv4 per default.
|
||||
|
||||
An example of a minimum run configuration when accessing maloja from an IPv4 address IE `localhost:42010`:
|
||||
An example of a minimum run configuration to access maloja via `localhost:42010`:
|
||||
|
||||
```console
|
||||
docker run -p 42010:42010 -e MALOJA_HOST=0.0.0.0 maloja
|
||||
docker run -p 42010:42010 -v $PWD/malojadata:/mljdata -e MALOJA_DATA_DIRECTORY=/mljdata maloja
|
||||
```
|
||||
|
||||
### Extras
|
||||
|
||||
* If you'd like to display images, you will need API keys for [Last.fm](https://www.last.fm/api/account/create) and [Spotify](https://developer.spotify.com/dashboard/applications). These are free of charge!
|
||||
|
||||
* Put your server behind a reverse proxy for SSL encryption. Make sure that you're proxying to the IPv6 address unless you changed your settings to use IPv4.
|
||||
* Put your server behind a reverse proxy for SSL encryption. Make sure that you're proxying to the IPv6 or IPv4 address according to your settings.
|
||||
|
||||
* You can set up a cronjob to start your server on system boot, and potentially restart it on a regular basis:
|
||||
|
||||
@ -140,11 +135,13 @@ If you need to run the server in the foreground, use
|
||||
|
||||
### Data
|
||||
|
||||
* If you would like to import all your previous last.fm scrobbles, use [benfoxall's website](https://benjaminbenben.com/lastfm-to-csv/) ([GitHub page](https://github.com/benfoxall/lastfm-to-csv)). Use the command `maloja import *filename*` to import the downloaded file into Maloja.
|
||||
If you would like to import your previous scrobbles, use the command `maloja import *filename*`. This works on:
|
||||
|
||||
* To backup your data, run `maloja backup` or, to only backup essential data (no artwork etc), `maloja backup -l minimal`.
|
||||
* a Last.fm export generated by [benfoxall's website](https://benjaminbenben.com/lastfm-to-csv/) ([GitHub page](https://github.com/benfoxall/lastfm-to-csv))
|
||||
* an official [Spotify data export file](https://www.spotify.com/us/account/privacy/)
|
||||
* the export of another Maloja instance
|
||||
|
||||
* To fix your database (e.g. after you've added new rules), use `maloja fix`.
|
||||
To backup your data, run `maloja backup`, optional with `--include_images`.
|
||||
|
||||
### Customization
|
||||
|
||||
@ -157,7 +154,7 @@ If you need to run the server in the foreground, use
|
||||
|
||||
## How to scrobble
|
||||
|
||||
You can set up any amount of API keys in the file `authenticated_machines.tsv` in the `/etc/maloja/clients` folder. It is recommended to define a different API key for every scrobbler you use.
|
||||
You can set up any amount of API keys in the file `apikeys.yml` in your configuration folder (or via the web interface). It is recommended to define a different API key for every scrobbler you use.
|
||||
|
||||
### Native support
|
||||
|
||||
@ -167,7 +164,6 @@ These solutions allow you to directly setup scrobbling to your Maloja server:
|
||||
* [Multi Scrobbler](https://github.com/FoxxMD/multi-scrobbler) Desktop Application
|
||||
* [Cmus-maloja-scrobbler](https://git.sr.ht/~xyank/cmus-maloja-scrobbler) Script
|
||||
* [OngakuKiroku](https://github.com/Atelier-Shiori/OngakuKiroku) Desktop Application (Mac)
|
||||
* [Albula](https://github.com/krateng/albula) Music Server
|
||||
* [Maloja Scrobbler](https://chrome.google.com/webstore/detail/maloja-scrobbler/cfnbifdmgbnaalphodcbandoopgbfeeh) Chromium Extension (also included in the repository) for Plex Web, Spotify, Bandcamp, Soundcloud or Youtube Music
|
||||
|
||||
### Native API
|
||||
|
@ -6,6 +6,10 @@ chrome.runtime.onMessage.addListener(onInternalMessage);
|
||||
|
||||
tabManagers = {}
|
||||
|
||||
|
||||
const ALWAYS_SCROBBLE_SECONDS = 60*3;
|
||||
// Longer songs are always scrobbled when playing at least 2 minutes
|
||||
|
||||
pages = {
|
||||
"Plex Web":{
|
||||
"patterns":[
|
||||
@ -282,7 +286,7 @@ class Controller {
|
||||
|
||||
//ONLY CASE 2: Playback ended
|
||||
if (artist != this.currentArtist || title != this.currentTitle) {
|
||||
if (this.alreadyPlayed > this.currentLength / 2) {
|
||||
if ((this.alreadyPlayed > this.currentLength / 2) || (this.alreadyPlayed > ALWAYS_SCROBBLE_SECONDS)) {
|
||||
scrobble(this.currentArtist,this.currentTitle,this.alreadyPlayed)
|
||||
this.alreadyPlayed = 0
|
||||
}
|
||||
|
@ -32,7 +32,7 @@ Node.prototype.xpath = getxpath;
|
||||
|
||||
bar = document.xpath(maloja_scrobbler_selector_playbar, XPathResult.FIRST_ORDERED_NODE_TYPE);
|
||||
if (bar == null) {
|
||||
console.log("Nothing playing right now!");
|
||||
console.log("[Maloja Scrobbler] Nothing playing right now!");
|
||||
chrome.runtime.sendMessage({type:"stopPlayback",time:Math.floor(Date.now()),artist:"",title:""});
|
||||
}
|
||||
else {
|
||||
@ -78,12 +78,12 @@ else {
|
||||
label_paused = "Play"
|
||||
}
|
||||
if (control == label_paused) {
|
||||
console.log("Not playing right now");
|
||||
console.log("[Maloja Scrobbler] Not playing right now");
|
||||
chrome.runtime.sendMessage({type:"stopPlayback",time:Math.floor(Date.now()),artist:artist,title:title});
|
||||
//stopPlayback()
|
||||
}
|
||||
else if (control == label_playing) {
|
||||
console.log("Playing " + artist + " - " + title + " (" + durationSeconds + " sec)");
|
||||
console.log("[Maloja Scrobbler] Playing " + artist + " - " + title + " (" + durationSeconds + " sec)");
|
||||
chrome.runtime.sendMessage({type:"startPlayback",time:Math.floor(Date.now()),artist:artist,title:title,duration:durationSeconds});
|
||||
//startPlayback(artist,title,durationSeconds)
|
||||
}
|
||||
|
@ -21,7 +21,6 @@ dependencies = [
|
||||
"requests"
|
||||
]
|
||||
|
||||
[project.scripts]
|
||||
|
||||
[build-system]
|
||||
requires = ["flit_core >=3.2,<4"]
|
||||
|
2
dev/run_docker.sh
Normal file
2
dev/run_docker.sh
Normal file
@ -0,0 +1,2 @@
|
||||
docker build -t maloja . -f Containerfile
|
||||
docker run --rm -p 42010:42010 -v $PWD/testdata:/mlj -e MALOJA_DATA_DIRECTORY=/mlj maloja
|
2
dev/run_podman.sh
Normal file
2
dev/run_podman.sh
Normal file
@ -0,0 +1,2 @@
|
||||
podman build -t maloja . -f Containerfile
|
||||
podman run --rm -p 42010:42010 -v $PWD/testdata:/mlj -e MALOJA_DATA_DIRECTORY=/mlj maloja
|
36
dev/templates/APKBUILD.jinja
Normal file
36
dev/templates/APKBUILD.jinja
Normal file
@ -0,0 +1,36 @@
|
||||
# Contributor: Johannes Krattenmacher <maloja@dev.krateng.ch>
|
||||
# Maintainer: Johannes Krattenmacher <maloja@dev.krateng.ch>
|
||||
pkgname={{ tool.flit.module.name }}
|
||||
pkgver={{ project.version }}
|
||||
pkgrel=0
|
||||
pkgdesc="{{ project.description }}"
|
||||
url="{{ project.urls.homepage }}"
|
||||
arch="noarch"
|
||||
license="GPL-3.0"
|
||||
depends="{{ tool.osreqs.alpine.run | join(' ') }}"
|
||||
pkgusers=$pkgname
|
||||
pkggroups=$pkgname
|
||||
depends_dev="{{ tool.osreqs.alpine.build | join(' ') }}"
|
||||
makedepends="$depends_dev"
|
||||
source="
|
||||
$pkgname-$pkgver.tar.gz::{{ project.urls.repository }}/archive/refs/tags/v$pkgver.tar.gz
|
||||
"
|
||||
builddir="$srcdir"/$pkgname-$pkgver
|
||||
|
||||
|
||||
|
||||
build() {
|
||||
cd $builddir
|
||||
python3 -m build .
|
||||
pip3 install dist/*.tar.gz
|
||||
}
|
||||
|
||||
package() {
|
||||
mkdir -p /etc/$pkgname || return 1
|
||||
mkdir -p /var/lib/$pkgname || return 1
|
||||
mkdir -p /var/cache/$pkgname || return 1
|
||||
mkdir -p /var/logs/$pkgname || return 1
|
||||
}
|
||||
|
||||
# TODO
|
||||
sha512sums="a674eaaaa248fc2b315514d79f9a7a0bac6aa1582fe29554d9176e8b551e8aa3aa75abeebdd7713e9e98cc987e7bd57dc7a5e9a2fb85af98b9c18cb54de47bf7 $pkgname-${pkgver}.tar.gz"
|
48
dev/templates/Containerfile.jinja
Normal file
48
dev/templates/Containerfile.jinja
Normal file
@ -0,0 +1,48 @@
|
||||
FROM alpine:3.15
|
||||
# Python image includes two Python versions, so use base Alpine
|
||||
|
||||
# Based on the work of Jonathan Boeckel <jonathanboeckel1996@gmail.com>
|
||||
|
||||
WORKDIR /usr/src/app
|
||||
|
||||
# Install run dependencies first
|
||||
RUN apk add --no-cache {{ tool.osreqs.alpine.run | join(' ') }}
|
||||
|
||||
# system pip could be removed after build, but apk then decides to also remove all its
|
||||
# python dependencies, even if they are explicitly installed as python packages
|
||||
# whut
|
||||
RUN \
|
||||
apk add py3-pip && \
|
||||
pip install wheel
|
||||
|
||||
# these are more static than the real requirements, which means caching
|
||||
COPY ./requirements_pre.txt ./requirements_pre.txt
|
||||
|
||||
RUN \
|
||||
apk add --no-cache --virtual .build-deps {{ tool.osreqs.alpine.build | join(' ') }} && \
|
||||
pip install --no-cache-dir -r requirements_pre.txt && \
|
||||
apk del .build-deps
|
||||
|
||||
|
||||
# less likely to be cached
|
||||
COPY ./requirements.txt ./requirements.txt
|
||||
|
||||
RUN \
|
||||
apk add --no-cache --virtual .build-deps {{ tool.osreqs.alpine.build | join(' ') }} && \
|
||||
pip install --no-cache-dir -r requirements.txt && \
|
||||
apk del .build-deps
|
||||
|
||||
|
||||
# no chance for caching below here
|
||||
|
||||
COPY . .
|
||||
|
||||
RUN pip install /usr/src/app
|
||||
|
||||
# Docker-specific configuration and default to IPv4
|
||||
ENV MALOJA_SKIP_SETUP=yes
|
||||
ENV MALOJA_HOST=0.0.0.0
|
||||
|
||||
EXPOSE 42010
|
||||
# use exec form for better signal handling https://docs.docker.com/engine/reference/builder/#entrypoint
|
||||
ENTRYPOINT ["maloja", "run"]
|
4
dev/templates/install/install_alpine.sh.jinja
Normal file
4
dev/templates/install/install_alpine.sh.jinja
Normal file
@ -0,0 +1,4 @@
|
||||
{% include 'install/install_dependencies_alpine.sh.jinja' %}
|
||||
apk add py3-pip
|
||||
pip install wheel
|
||||
pip install malojaserver
|
4
dev/templates/install/install_debian.sh.jinja
Normal file
4
dev/templates/install/install_debian.sh.jinja
Normal file
@ -0,0 +1,4 @@
|
||||
{% include 'install/install_dependencies_debian.sh.jinja' %}
|
||||
apt install python3-pip
|
||||
pip install wheel
|
||||
pip install malojaserver
|
@ -0,0 +1,4 @@
|
||||
#!/usr/bin/env sh
|
||||
apk update
|
||||
apk add \
|
||||
{{ (tool.osreqs.alpine.build + tool.osreqs.alpine.run + tool.osreqs.alpine.opt) | join(' \\\n\t') }}
|
@ -0,0 +1,4 @@
|
||||
#!/usr/bin/env sh
|
||||
apt update
|
||||
apt install \
|
||||
{{ (tool.osreqs.debian.build + tool.osreqs.debian.run + tool.osreqs.debian.opt) | join(' \\\n\t') }}
|
3
dev/templates/requirements.txt.jinja
Normal file
3
dev/templates/requirements.txt.jinja
Normal file
@ -0,0 +1,3 @@
|
||||
{% for dep in project.dependencies -%}
|
||||
{{ dep }}
|
||||
{% endfor %}
|
3
dev/templates/requirements_extra.txt.jinja
Normal file
3
dev/templates/requirements_extra.txt.jinja
Normal file
@ -0,0 +1,3 @@
|
||||
{% for dep in project['optional-dependencies'].full -%}
|
||||
{{ dep }}
|
||||
{% endfor %}
|
33
dev/update_dist_files.py
Normal file
33
dev/update_dist_files.py
Normal file
@ -0,0 +1,33 @@
|
||||
import toml
|
||||
import os
|
||||
import jinja2
|
||||
|
||||
env = jinja2.Environment(
|
||||
loader=jinja2.FileSystemLoader('dev/templates'),
|
||||
autoescape=jinja2.select_autoescape(['html', 'xml']),
|
||||
keep_trailing_newline=True
|
||||
)
|
||||
|
||||
with open("pyproject.toml") as filed:
|
||||
data = toml.load(filed)
|
||||
|
||||
templatedir = "./dev/templates"
|
||||
|
||||
for root,dirs,files in os.walk(templatedir):
|
||||
|
||||
reldirpath = os.path.relpath(root,start=templatedir)
|
||||
for f in files:
|
||||
|
||||
relfilepath = os.path.join(reldirpath,f)
|
||||
|
||||
if not f.endswith('.jinja'): continue
|
||||
|
||||
srcfile = os.path.join(root,f)
|
||||
trgfile = os.path.join(reldirpath,f.replace(".jinja",""))
|
||||
|
||||
|
||||
template = env.get_template(relfilepath)
|
||||
result = template.render(**data)
|
||||
|
||||
with open(trgfile,"w") as filed:
|
||||
filed.write(result)
|
20
example-compose.yml
Normal file
20
example-compose.yml
Normal file
@ -0,0 +1,20 @@
|
||||
services:
|
||||
maloja:
|
||||
# from dockerhub
|
||||
image: "krateng/maloja:latest"
|
||||
# or built locally
|
||||
#build:
|
||||
# context: .
|
||||
# dockerfile: ./Containerfile
|
||||
ports:
|
||||
- "42010:42010"
|
||||
# different directories for configuration, state and logs
|
||||
volumes:
|
||||
- "$PWD/config:/etc/maloja"
|
||||
- "$PWD/data:/var/lib/maloja"
|
||||
- "$PWD/logs:/var/log/maloja"
|
||||
#you can also have everything together instead:
|
||||
#volumes:
|
||||
#- "$PWD/data:/data"
|
||||
#environment:
|
||||
#- "MALOJA_DATA_DIRECTORY=/data"
|
@ -1 +0,0 @@
|
||||
python3
|
@ -1,8 +0,0 @@
|
||||
gcc
|
||||
python3-dev
|
||||
libxml2-dev
|
||||
libxslt-dev
|
||||
libffi-dev
|
||||
libc-dev
|
||||
py3-pip
|
||||
linux-headers
|
@ -1 +0,0 @@
|
||||
tzdata
|
@ -1 +0,0 @@
|
||||
vips
|
19
install/install_alpine.sh
Normal file
19
install/install_alpine.sh
Normal file
@ -0,0 +1,19 @@
|
||||
#!/usr/bin/env sh
|
||||
apk update
|
||||
apk add \
|
||||
gcc \
|
||||
g++ \
|
||||
python3-dev \
|
||||
libxml2-dev \
|
||||
libxslt-dev \
|
||||
libffi-dev \
|
||||
libc-dev \
|
||||
py3-pip \
|
||||
linux-headers \
|
||||
python3 \
|
||||
tzdata \
|
||||
vips
|
||||
|
||||
apk add py3-pip
|
||||
pip install wheel
|
||||
pip install malojaserver
|
9
install/install_debian.sh
Normal file
9
install/install_debian.sh
Normal file
@ -0,0 +1,9 @@
|
||||
#!/usr/bin/env sh
|
||||
apt update
|
||||
apt install \
|
||||
python3-pip \
|
||||
python3
|
||||
|
||||
apt install python3-pip
|
||||
pip install wheel
|
||||
pip install malojaserver
|
@ -1,4 +0,0 @@
|
||||
sed 's/#.*//' ./install/dependencies_basic.txt | xargs apk add
|
||||
sed 's/#.*//' ./install/dependencies_build.txt | xargs apk add
|
||||
sed 's/#.*//' ./install/dependencies_run.txt | xargs apk add
|
||||
sed 's/#.*//' ./install/dependencies_run_opt.txt | xargs apk add
|
@ -1,15 +1,15 @@
|
||||
#!/usr/bin/env bash
|
||||
#!/usr/bin/env sh
|
||||
apk update
|
||||
apk add \
|
||||
python3 \
|
||||
python3-dev \
|
||||
gcc \
|
||||
g++ \
|
||||
python3-dev \
|
||||
libxml2-dev \
|
||||
libxslt-dev \
|
||||
libffi-dev \
|
||||
libc-dev \
|
||||
py3-pip \
|
||||
linux-headers \
|
||||
python3 \
|
||||
tzdata \
|
||||
vips
|
||||
pip3 install wheel
|
||||
pip3 install malojaserver
|
5
install/install_dependencies_debian.sh
Normal file
5
install/install_dependencies_debian.sh
Normal file
@ -0,0 +1,5 @@
|
||||
#!/usr/bin/env sh
|
||||
apt update
|
||||
apt install \
|
||||
python3-pip \
|
||||
python3
|
@ -1,4 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
apt update
|
||||
apt install python3 python3-pip
|
||||
pip3 install malojaserver
|
@ -4,7 +4,7 @@
|
||||
# you know what f*ck it
|
||||
# this is hardcoded for now because of that damn project / package name discrepancy
|
||||
# i'll fix it one day
|
||||
VERSION = "2.14.10"
|
||||
VERSION = "3.0.0-beta.6"
|
||||
HOMEPAGE = "https://github.com/krateng/maloja"
|
||||
|
||||
|
||||
|
@ -1,22 +1,28 @@
|
||||
from . import native_v1
|
||||
from .audioscrobbler import Audioscrobbler
|
||||
from .audioscrobbler_legacy import AudioscrobblerLegacy
|
||||
from .listenbrainz import Listenbrainz
|
||||
from ._apikeys import apikeystore
|
||||
|
||||
|
||||
import copy
|
||||
from bottle import redirect, request, response
|
||||
from urllib.parse import urlencode
|
||||
|
||||
native_apis = [
|
||||
native_v1.api
|
||||
]
|
||||
standardized_apis = [
|
||||
Listenbrainz(),
|
||||
Audioscrobbler(),
|
||||
AudioscrobblerLegacy()
|
||||
]
|
||||
|
||||
|
||||
def init_apis(server):
|
||||
|
||||
from . import native_v1
|
||||
from .audioscrobbler import Audioscrobbler
|
||||
from .audioscrobbler_legacy import AudioscrobblerLegacy
|
||||
from .listenbrainz import Listenbrainz
|
||||
|
||||
native_apis = [
|
||||
native_v1.api
|
||||
]
|
||||
standardized_apis = [
|
||||
Listenbrainz(),
|
||||
Audioscrobbler(),
|
||||
AudioscrobblerLegacy()
|
||||
]
|
||||
|
||||
for api in native_apis:
|
||||
api.mount(server=server,path="apis/"+api.__apipath__)
|
||||
|
||||
|
29
maloja/apis/_apikeys.py
Normal file
29
maloja/apis/_apikeys.py
Normal file
@ -0,0 +1,29 @@
|
||||
### API KEYS
|
||||
### symmetric keys are fine since we hopefully use HTTPS
|
||||
|
||||
from doreah.keystore import KeyStore
|
||||
from doreah.logging import log
|
||||
|
||||
from ..globalconf import data_dir
|
||||
|
||||
apikeystore = KeyStore(file=data_dir['clients']("apikeys.yml"),save_endpoint="/apis/mlj_1/apikeys")
|
||||
|
||||
|
||||
from .. import upgrade
|
||||
upgrade.upgrade_apikeys()
|
||||
|
||||
|
||||
# skip regular authentication if api key is present in request
|
||||
# an api key now ONLY permits scrobbling tracks, no other admin tasks
|
||||
def api_key_correct(request,args,kwargs):
|
||||
if "key" in kwargs:
|
||||
apikey = kwargs.pop("key")
|
||||
elif "apikey" in kwargs:
|
||||
apikey = kwargs.pop("apikey")
|
||||
else: return False
|
||||
|
||||
client = apikeystore.check_and_identify_key(apikey)
|
||||
if client:
|
||||
return {'client':client}
|
||||
else:
|
||||
return False
|
@ -58,7 +58,7 @@ class APIHandler:
|
||||
|
||||
|
||||
def wrapper(self,path:Multi=[],**keys):
|
||||
log("API request: " + str(path))# + " | Keys: " + str({k:keys.get(k) for k in keys}))
|
||||
log(f"{self.__apiname__} API request: {path}")# + " | Keys: " + str({k:keys.get(k) for k in keys}))
|
||||
|
||||
try:
|
||||
response.status,result = self.handle(path,keys)
|
||||
@ -89,13 +89,10 @@ class APIHandler:
|
||||
return method(path,keys)
|
||||
|
||||
|
||||
def scrobble(self,artiststr,titlestr,time=None,duration=None,album=None):
|
||||
logmsg = "Incoming scrobble (API: {api}): ARTISTS: {artiststr}, TRACK: {titlestr}"
|
||||
log(logmsg.format(api=self.__apiname__,artiststr=artiststr,titlestr=titlestr))
|
||||
if time is None: time = int(datetime.datetime.now(tz=datetime.timezone.utc).timestamp())
|
||||
def scrobble(self,rawscrobble,client=None):
|
||||
|
||||
# fixing etc is handled by the main scrobble function
|
||||
try:
|
||||
(artists,title) = cla.fullclean(artiststr,titlestr)
|
||||
database.createScrobble(artists,title,time)
|
||||
database.sync()
|
||||
return database.incoming_scrobble(rawscrobble,api=self.__apiname__,client=client)
|
||||
except:
|
||||
raise ScrobblingException()
|
||||
|
@ -1,6 +1,7 @@
|
||||
from ._base import APIHandler
|
||||
from ._exceptions import *
|
||||
from .. import database
|
||||
from ._apikeys import apikeystore
|
||||
|
||||
class Audioscrobbler(APIHandler):
|
||||
__apiname__ = "Audioscrobbler"
|
||||
@ -14,7 +15,7 @@ class Audioscrobbler(APIHandler):
|
||||
def init(self):
|
||||
|
||||
# no need to save these on disk, clients can always request a new session
|
||||
self.mobile_sessions = []
|
||||
self.mobile_sessions = {}
|
||||
self.methods = {
|
||||
"auth.getMobileSession":self.authmobile,
|
||||
"track.scrobble":self.submit_scrobble
|
||||
@ -30,29 +31,45 @@ class Audioscrobbler(APIHandler):
|
||||
def get_method(self,pathnodes,keys):
|
||||
return keys.get("method")
|
||||
|
||||
def generate_key(self,client):
|
||||
key = "".join(
|
||||
str(
|
||||
random.choice(
|
||||
list(range(10)) + list("abcdefghijklmnopqrstuvwxyz") +
|
||||
list("ABCDEFGHIJKLMNOPQRSTUVWXYZ"))) for _ in range(64))
|
||||
|
||||
self.mobile_sessions[key] = client
|
||||
return key
|
||||
|
||||
def authmobile(self,pathnodes,keys):
|
||||
token = keys.get("authToken")
|
||||
user = keys.get("username")
|
||||
password = keys.get("password")
|
||||
# either username and password
|
||||
if user is not None and password is not None:
|
||||
if password in database.allAPIkeys():
|
||||
sessionkey = generate_key(self.mobile_sessions)
|
||||
client = apikeystore.check_and_identify_key(password)
|
||||
if client:
|
||||
sessionkey = self.generate_key(client)
|
||||
return 200,{"session":{"key":sessionkey}}
|
||||
else:
|
||||
raise InvalidAuthException()
|
||||
# or username and token (deprecated by lastfm)
|
||||
elif user is not None and token is not None:
|
||||
for key in database.allAPIkeys():
|
||||
for client in apikeystore:
|
||||
key = apikeystore[client]
|
||||
if md5(user + md5(key)) == token:
|
||||
sessionkey = generate_key(self.mobile_sessions)
|
||||
sessionkey = self.generate_key(client)
|
||||
return 200,{"session":{"key":sessionkey}}
|
||||
raise InvalidAuthException()
|
||||
else:
|
||||
raise BadAuthException()
|
||||
|
||||
def submit_scrobble(self,pathnodes,keys):
|
||||
if keys.get("sk") is None or keys.get("sk") not in self.mobile_sessions:
|
||||
key = keys.get("sk")
|
||||
if key is None:
|
||||
raise InvalidSessionKey()
|
||||
client = self.mobile_sessions.get(key)
|
||||
if not client:
|
||||
raise InvalidSessionKey()
|
||||
if "track" in keys and "artist" in keys:
|
||||
artiststr,titlestr = keys["artist"], keys["track"]
|
||||
@ -62,7 +79,7 @@ class Audioscrobbler(APIHandler):
|
||||
except:
|
||||
timestamp = None
|
||||
#database.createScrobble(artists,title,timestamp)
|
||||
self.scrobble(artiststr,titlestr,time=timestamp)
|
||||
self.scrobble({'track_artists':[artiststr],'track_title':titlestr,'scrobble_time':timestamp},client=client)
|
||||
else:
|
||||
for num in range(50):
|
||||
if "track[" + str(num) + "]" in keys:
|
||||
@ -82,13 +99,3 @@ def md5(input):
|
||||
m = hashlib.md5()
|
||||
m.update(bytes(input,encoding="utf-8"))
|
||||
return m.hexdigest()
|
||||
|
||||
def generate_key(ls):
|
||||
key = "".join(
|
||||
str(
|
||||
random.choice(
|
||||
list(range(10)) + list("abcdefghijklmnopqrstuvwxyz") +
|
||||
list("ABCDEFGHIJKLMNOPQRSTUVWXYZ"))) for _ in range(64))
|
||||
|
||||
ls.append(key)
|
||||
return key
|
||||
|
@ -1,6 +1,7 @@
|
||||
from ._base import APIHandler
|
||||
from ._exceptions import *
|
||||
from .. import database
|
||||
from ._apikeys import apikeystore
|
||||
|
||||
from bottle import request
|
||||
|
||||
@ -15,7 +16,7 @@ class AudioscrobblerLegacy(APIHandler):
|
||||
def init(self):
|
||||
|
||||
# no need to save these on disk, clients can always request a new session
|
||||
self.mobile_sessions = []
|
||||
self.mobile_sessions = {}
|
||||
self.methods = {
|
||||
"handshake":self.handshake,
|
||||
"nowplaying":self.now_playing,
|
||||
@ -41,9 +42,10 @@ class AudioscrobblerLegacy(APIHandler):
|
||||
protocol = 'http' if (keys.get("u") == 'nossl') else request.urlparts.scheme
|
||||
|
||||
if auth is not None:
|
||||
for key in database.allAPIkeys():
|
||||
if check_token(auth, key, timestamp):
|
||||
sessionkey = generate_key(self.mobile_sessions)
|
||||
for client in apikeystore:
|
||||
key = apikeystore[client]
|
||||
if self.check_token(auth,key,timestamp):
|
||||
sessionkey = self.generate_key(client)
|
||||
return 200, (
|
||||
"OK\n"
|
||||
f"{sessionkey}\n"
|
||||
@ -65,8 +67,10 @@ class AudioscrobblerLegacy(APIHandler):
|
||||
return 200,"OK\n"
|
||||
|
||||
def submit_scrobble(self,pathnodes,keys):
|
||||
if keys.get("s") is None or keys.get("s") not in self.mobile_sessions:
|
||||
key = keys.get("s")
|
||||
if key is None or key not in self.mobile_sessions:
|
||||
raise InvalidSessionKey()
|
||||
client = self.mobile_sessions.get(key)
|
||||
for count in range(50):
|
||||
artist_key = f"a[{count}]"
|
||||
track_key = f"t[{count}]"
|
||||
@ -79,10 +83,29 @@ class AudioscrobblerLegacy(APIHandler):
|
||||
except:
|
||||
timestamp = None
|
||||
#database.createScrobble(artists,title,timestamp)
|
||||
self.scrobble(artiststr,titlestr,time=timestamp)
|
||||
self.scrobble({
|
||||
'track_artists':[artiststr],
|
||||
'track_title':titlestr,
|
||||
'scrobble_time':timestamp
|
||||
},client=client)
|
||||
return 200,"OK\n"
|
||||
|
||||
|
||||
def check_token(self, received_token, expected_key, ts):
|
||||
expected_token = md5(md5(expected_key) + ts)
|
||||
return received_token == expected_token
|
||||
|
||||
def generate_key(self,client):
|
||||
key = "".join(
|
||||
str(
|
||||
random.choice(
|
||||
list(range(10)) + list("abcdefghijklmnopqrstuvwxyz") +
|
||||
list("ABCDEFGHIJKLMNOPQRSTUVWXYZ"))) for _ in range(64))
|
||||
|
||||
self.mobile_sessions[key] = client
|
||||
return key
|
||||
|
||||
|
||||
import hashlib
|
||||
import random
|
||||
|
||||
@ -90,20 +113,3 @@ def md5(input):
|
||||
m = hashlib.md5()
|
||||
m.update(bytes(input,encoding="utf-8"))
|
||||
return m.hexdigest()
|
||||
|
||||
def generate_key(ls):
|
||||
key = "".join(
|
||||
str(
|
||||
random.choice(
|
||||
list(range(10)) + list("abcdefghijklmnopqrstuvwxyz") +
|
||||
list("ABCDEFGHIJKLMNOPQRSTUVWXYZ"))) for _ in range(64))
|
||||
|
||||
ls.append(key)
|
||||
return key
|
||||
|
||||
def lastfm_token(password, ts):
|
||||
return md5(md5(password) + ts)
|
||||
|
||||
def check_token(received_token, expected_key, ts):
|
||||
expected_token = lastfm_token(expected_key, ts)
|
||||
return received_token == expected_token
|
||||
|
@ -2,6 +2,7 @@ from ._base import APIHandler
|
||||
from ._exceptions import *
|
||||
from .. import database
|
||||
import datetime
|
||||
from ._apikeys import apikeystore
|
||||
|
||||
from ..globalconf import malojaconfig
|
||||
|
||||
@ -36,7 +37,9 @@ class Listenbrainz(APIHandler):
|
||||
except:
|
||||
raise BadAuthException()
|
||||
|
||||
if token not in database.allAPIkeys():
|
||||
client = apikeystore.check_and_identify_key(token)
|
||||
|
||||
if not client:
|
||||
raise InvalidAuthException()
|
||||
|
||||
try:
|
||||
@ -59,7 +62,11 @@ class Listenbrainz(APIHandler):
|
||||
except:
|
||||
raise MalformedJSONException()
|
||||
|
||||
self.scrobble(artiststr,titlestr,timestamp)
|
||||
self.scrobble({
|
||||
'track_artists':[artiststr],
|
||||
'track_title':titlestr,
|
||||
'scrobble_time':timestamp
|
||||
},client=client)
|
||||
|
||||
return 200,{"status":"ok"}
|
||||
|
||||
@ -69,7 +76,7 @@ class Listenbrainz(APIHandler):
|
||||
token = self.get_token_from_request_keys(keys)
|
||||
except:
|
||||
raise BadAuthException()
|
||||
if token not in database.allAPIkeys():
|
||||
if not apikeystore.check_key(token):
|
||||
raise InvalidAuthException()
|
||||
else:
|
||||
return 200,{"code":200,"message":"Token valid.","valid":True,"user_name":malojaconfig["NAME"]}
|
||||
|
@ -1,20 +1,43 @@
|
||||
from ..database import *
|
||||
from ..globalconf import malojaconfig, apikeystore
|
||||
from ..__pkginfo__ import VERSION
|
||||
from ..malojauri import uri_to_internal
|
||||
from .. import utilities
|
||||
import os
|
||||
|
||||
from bottle import response, static_file
|
||||
from bottle import response, static_file, request, FormsDict
|
||||
|
||||
from doreah.logging import log
|
||||
from doreah.auth import authenticated_api, authenticated_api_with_alternate, authenticated_function
|
||||
|
||||
# nimrodel API
|
||||
from nimrodel import EAPI as API
|
||||
from nimrodel import Multi
|
||||
|
||||
|
||||
from .. import database
|
||||
from ..globalconf import malojaconfig, data_dir
|
||||
|
||||
|
||||
|
||||
from ..__pkginfo__ import VERSION
|
||||
from ..malojauri import uri_to_internal, compose_querystring, internal_to_uri
|
||||
from .. import images
|
||||
from ._apikeys import apikeystore, api_key_correct
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
api = API(delay=True)
|
||||
api.__apipath__ = "mlj_1"
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
@api.get("test")
|
||||
def test_server(key=None):
|
||||
"""Pings the server. If an API key is supplied, the server will respond with 200
|
||||
@ -24,7 +47,7 @@ def test_server(key=None):
|
||||
:param string key: An API key to be tested. Optional.
|
||||
"""
|
||||
response.set_header("Access-Control-Allow-Origin","*")
|
||||
if key is not None and not (checkAPIkey(key)):
|
||||
if key is not None and not apikeystore.check_key(key):
|
||||
response.status = 403
|
||||
return {"error":"Wrong API key"}
|
||||
|
||||
@ -44,7 +67,7 @@ def server_info():
|
||||
"name":malojaconfig["NAME"],
|
||||
"version":VERSION.split("."),
|
||||
"versionstring":VERSION,
|
||||
"db_status":dbstatus
|
||||
"db_status":database.dbstatus
|
||||
}
|
||||
|
||||
|
||||
@ -56,7 +79,7 @@ def get_scrobbles_external(**keys):
|
||||
k_filter, k_time, _, k_amount, _ = uri_to_internal(keys,api=True)
|
||||
ckeys = {**k_filter, **k_time, **k_amount}
|
||||
|
||||
result = get_scrobbles(**ckeys)
|
||||
result = database.get_scrobbles(**ckeys)
|
||||
|
||||
offset = (k_amount.get('page') * k_amount.get('perpage')) if k_amount.get('perpage') is not math.inf else 0
|
||||
result = result[offset:]
|
||||
@ -81,7 +104,7 @@ def get_scrobbles_num_external(**keys):
|
||||
k_filter, k_time, _, k_amount, _ = uri_to_internal(keys)
|
||||
ckeys = {**k_filter, **k_time, **k_amount}
|
||||
|
||||
result = get_scrobbles_num(**ckeys)
|
||||
result = database.get_scrobbles_num(**ckeys)
|
||||
return {"amount":result}
|
||||
|
||||
|
||||
@ -91,14 +114,14 @@ def get_tracks_external(**keys):
|
||||
k_filter, _, _, _, _ = uri_to_internal(keys,forceArtist=True)
|
||||
ckeys = {**k_filter}
|
||||
|
||||
result = get_tracks(**ckeys)
|
||||
result = database.get_tracks(**ckeys)
|
||||
return {"list":result}
|
||||
|
||||
|
||||
|
||||
@api.get("artists")
|
||||
def get_artists_external():
|
||||
result = get_artists()
|
||||
result = database.get_artists()
|
||||
return {"list":result}
|
||||
|
||||
|
||||
@ -110,7 +133,7 @@ def get_charts_artists_external(**keys):
|
||||
_, k_time, _, _, _ = uri_to_internal(keys)
|
||||
ckeys = {**k_time}
|
||||
|
||||
result = get_charts_artists(**ckeys)
|
||||
result = database.get_charts_artists(**ckeys)
|
||||
return {"list":result}
|
||||
|
||||
|
||||
@ -120,7 +143,7 @@ def get_charts_tracks_external(**keys):
|
||||
k_filter, k_time, _, _, _ = uri_to_internal(keys,forceArtist=True)
|
||||
ckeys = {**k_filter, **k_time}
|
||||
|
||||
result = get_charts_tracks(**ckeys)
|
||||
result = database.get_charts_tracks(**ckeys)
|
||||
return {"list":result}
|
||||
|
||||
|
||||
@ -131,7 +154,7 @@ def get_pulse_external(**keys):
|
||||
k_filter, k_time, k_internal, k_amount, _ = uri_to_internal(keys)
|
||||
ckeys = {**k_filter, **k_time, **k_internal, **k_amount}
|
||||
|
||||
results = get_pulse(**ckeys)
|
||||
results = database.get_pulse(**ckeys)
|
||||
return {"list":results}
|
||||
|
||||
|
||||
@ -142,7 +165,7 @@ def get_performance_external(**keys):
|
||||
k_filter, k_time, k_internal, k_amount, _ = uri_to_internal(keys)
|
||||
ckeys = {**k_filter, **k_time, **k_internal, **k_amount}
|
||||
|
||||
results = get_performance(**ckeys)
|
||||
results = database.get_performance(**ckeys)
|
||||
return {"list":results}
|
||||
|
||||
|
||||
@ -153,7 +176,7 @@ def get_top_artists_external(**keys):
|
||||
_, k_time, k_internal, _, _ = uri_to_internal(keys)
|
||||
ckeys = {**k_time, **k_internal}
|
||||
|
||||
results = get_top_artists(**ckeys)
|
||||
results = database.get_top_artists(**ckeys)
|
||||
return {"list":results}
|
||||
|
||||
|
||||
@ -166,23 +189,23 @@ def get_top_tracks_external(**keys):
|
||||
|
||||
# IMPLEMENT THIS FOR TOP TRACKS OF ARTIST AS WELL?
|
||||
|
||||
results = get_top_tracks(**ckeys)
|
||||
results = database.get_top_tracks(**ckeys)
|
||||
return {"list":results}
|
||||
|
||||
|
||||
|
||||
|
||||
@api.get("artistinfo")
|
||||
def artistInfo_external(**keys):
|
||||
def artist_info_external(**keys):
|
||||
k_filter, _, _, _, _ = uri_to_internal(keys,forceArtist=True)
|
||||
ckeys = {**k_filter}
|
||||
|
||||
return artistInfo(**ckeys)
|
||||
return database.artist_info(**ckeys)
|
||||
|
||||
|
||||
|
||||
@api.get("trackinfo")
|
||||
def trackInfo_external(artist:Multi[str],**keys):
|
||||
def track_info_external(artist:Multi[str],**keys):
|
||||
# transform into a multidict so we can use our nomral uri_to_internal function
|
||||
keys = FormsDict(keys)
|
||||
for a in artist:
|
||||
@ -190,50 +213,60 @@ def trackInfo_external(artist:Multi[str],**keys):
|
||||
k_filter, _, _, _, _ = uri_to_internal(keys,forceTrack=True)
|
||||
ckeys = {**k_filter}
|
||||
|
||||
return trackInfo(**ckeys)
|
||||
return database.track_info(**ckeys)
|
||||
|
||||
|
||||
@api.get("compare")
|
||||
def compare_external(**keys):
|
||||
return compare(keys["remote"])
|
||||
return database.compare(keys["remote"])
|
||||
|
||||
|
||||
|
||||
@api.get("newscrobble")
|
||||
@authenticated_api_with_alternate(api_key_correct)
|
||||
def get_post_scrobble(artist:Multi,**keys):
|
||||
"""DEPRECATED. Use the equivalent POST method instead."""
|
||||
artists = artist
|
||||
title = keys.get("title")
|
||||
album = keys.get("album")
|
||||
duration = keys.get("seconds")
|
||||
time = keys.get("time")
|
||||
if time is not None: time = int(time)
|
||||
|
||||
return incoming_scrobble(artists,title,album=album,duration=duration,time=time)
|
||||
|
||||
@api.post("newscrobble")
|
||||
@authenticated_api_with_alternate(api_key_correct)
|
||||
def post_scrobble(artist:Multi=None,**keys):
|
||||
@authenticated_function(alternate=api_key_correct,api=True,pass_auth_result_as='auth_result')
|
||||
def post_scrobble(artist:Multi=None,auth_result=None,**keys):
|
||||
"""Submit a new scrobble.
|
||||
|
||||
:param string artist: Artist. Can be submitted multiple times as query argument for multiple artists.
|
||||
:param string artists: List of artists. Overwritten by artist parameter.
|
||||
:param string title: Title of the track.
|
||||
:param string album: Name of the album. Optional.
|
||||
:param string albumartists: Album artists. Optional.
|
||||
:param int duration: Actual listened duration of the scrobble in seconds. Optional.
|
||||
:param int length: Total length of the track in seconds. Optional.
|
||||
:param int time: UNIX timestamp of the scrobble. Optional, not needed if scrobble is at time of request.
|
||||
:param boolean nofix: Skip server-side metadata parsing. Optional.
|
||||
"""
|
||||
#artists = "/".join(artist)
|
||||
artists = artist if artist is not None else keys.get("artists")
|
||||
title = keys.get("title")
|
||||
album = keys.get("album")
|
||||
duration = keys.get("seconds")
|
||||
time = keys.get("time")
|
||||
nofix = keys.get("nofix") is not None
|
||||
if time is not None: time = int(time)
|
||||
|
||||
return incoming_scrobble(artists,title,album=album,duration=duration,time=time,fix=not nofix)
|
||||
rawscrobble = {
|
||||
'track_artists':artist if artist is not None else keys.get("artists"),
|
||||
'track_title':keys.get('title'),
|
||||
'album_name':keys.get('album'),
|
||||
'album_artists':keys.get('albumartists'),
|
||||
'scrobble_duration':keys.get('duration'),
|
||||
'track_length':keys.get('length'),
|
||||
'scrobble_time':int(keys.get('time')) if (keys.get('time') is not None) else None
|
||||
}
|
||||
|
||||
# for logging purposes, don't pass values that we didn't actually supply
|
||||
rawscrobble = {k:rawscrobble[k] for k in rawscrobble if rawscrobble[k]}
|
||||
|
||||
result = database.incoming_scrobble(
|
||||
rawscrobble,
|
||||
client='browser' if auth_result.get('doreah_native_auth_check') else auth_result.get('client'),
|
||||
api='native/v1',
|
||||
fix=(keys.get("nofix") is None)
|
||||
)
|
||||
|
||||
if result:
|
||||
return {
|
||||
'status': 'success',
|
||||
'track': {
|
||||
'artists':result['track']['artists'],
|
||||
'title':result['track']['title']
|
||||
}
|
||||
}
|
||||
else:
|
||||
return {"status":"failure"}
|
||||
|
||||
|
||||
|
||||
@ -259,15 +292,14 @@ def import_rulemodule(**keys):
|
||||
@authenticated_api
|
||||
def rebuild(**keys):
|
||||
log("Database rebuild initiated!")
|
||||
sync()
|
||||
database.sync()
|
||||
dbstatus['rebuildinprogress'] = True
|
||||
from ..proccontrol.tasks.fixexisting import fix
|
||||
fix()
|
||||
global cla, coa
|
||||
global cla
|
||||
cla = CleanerAgent()
|
||||
coa = CollectorAgent()
|
||||
build_db()
|
||||
invalidate_caches()
|
||||
database.build_db()
|
||||
database.invalidate_caches()
|
||||
|
||||
|
||||
|
||||
@ -279,8 +311,8 @@ def search(**keys):
|
||||
if max_ is not None: max_ = int(max_)
|
||||
query = query.lower()
|
||||
|
||||
artists = db_search(query,type="ARTIST")
|
||||
tracks = db_search(query,type="TRACK")
|
||||
artists = database.db_search(query,type="ARTIST")
|
||||
tracks = database.db_search(query,type="TRACK")
|
||||
|
||||
|
||||
|
||||
@ -296,14 +328,14 @@ def search(**keys):
|
||||
'name': a,
|
||||
'link': "/artist?" + compose_querystring(internal_to_uri({"artist": a})),
|
||||
}
|
||||
result["image"] = "/image?" + compose_querystring(internal_to_uri({"artist":a}))
|
||||
result["image"] = images.get_artist_image(a)
|
||||
artists_result.append(result)
|
||||
|
||||
tracks_result = []
|
||||
for t in tracks:
|
||||
result = t
|
||||
result["link"] = "/track?" + compose_querystring(internal_to_uri({"track":t}))
|
||||
result["image"] = "/image?" + compose_querystring(internal_to_uri({"track":t}))
|
||||
result["image"] = images.get_track_image(t)
|
||||
tracks_result.append(result)
|
||||
|
||||
return {"artists":artists_result[:max_],"tracks":tracks_result[:max_]}
|
||||
@ -318,13 +350,15 @@ def add_picture(b64,artist:Multi=[],title=None):
|
||||
if title is not None: keys.append("title",title)
|
||||
k_filter, _, _, _, _ = uri_to_internal(keys)
|
||||
if "track" in k_filter: k_filter = k_filter["track"]
|
||||
utilities.set_image(b64,**k_filter)
|
||||
images.set_image(b64,**k_filter)
|
||||
|
||||
|
||||
@api.post("newrule")
|
||||
@authenticated_api
|
||||
def newrule(**keys):
|
||||
tsv.add_entry(data_dir['rules']("webmade.tsv"),[k for k in keys])
|
||||
pass
|
||||
# TODO after implementing new rule system
|
||||
#tsv.add_entry(data_dir['rules']("webmade.tsv"),[k for k in keys])
|
||||
#addEntry("rules/webmade.tsv",[k for k in keys])
|
||||
|
||||
|
||||
@ -354,3 +388,20 @@ def get_backup(**keys):
|
||||
archivefile = backup(tmpfolder)
|
||||
|
||||
return static_file(os.path.basename(archivefile),root=tmpfolder)
|
||||
|
||||
@api.get("export")
|
||||
@authenticated_api
|
||||
def get_export(**keys):
|
||||
from ..proccontrol.tasks.export import export
|
||||
import tempfile
|
||||
|
||||
tmpfolder = tempfile.gettempdir()
|
||||
resultfile = export(tmpfolder)
|
||||
|
||||
return static_file(os.path.basename(resultfile),root=tmpfolder)
|
||||
|
||||
|
||||
@api.post("delete_scrobble")
|
||||
@authenticated_api
|
||||
def delete_scrobble(timestamp):
|
||||
database.remove_scrobble(timestamp)
|
||||
|
@ -1,8 +1,8 @@
|
||||
import re
|
||||
#from . import utilities
|
||||
from doreah import tsv
|
||||
import os
|
||||
import csv
|
||||
|
||||
from .globalconf import data_dir, malojaconfig
|
||||
import pkg_resources
|
||||
|
||||
# need to do this as a class so it can retain loaded settings from file
|
||||
# apparently this is not true
|
||||
@ -13,19 +13,29 @@ class CleanerAgent:
|
||||
self.updateRules()
|
||||
|
||||
def updateRules(self):
|
||||
raw = tsv.parse_all(data_dir["rules"](),"string","string","string","string")
|
||||
self.rules_belongtogether = [b for [a,b,c,d] in raw if a=="belongtogether"]
|
||||
self.rules_notanartist = [b for [a,b,c,d] in raw if a=="notanartist"]
|
||||
self.rules_replacetitle = {b.lower():c for [a,b,c,d] in raw if a=="replacetitle"}
|
||||
self.rules_replaceartist = {b.lower():c for [a,b,c,d] in raw if a=="replaceartist"}
|
||||
self.rules_ignoreartist = [b.lower() for [a,b,c,d] in raw if a=="ignoreartist"]
|
||||
self.rules_addartists = {c.lower():(b.lower(),d) for [a,b,c,d] in raw if a=="addartists"}
|
||||
self.rules_fixartists = {c.lower():b for [a,b,c,d] in raw if a=="fixartists"}
|
||||
self.rules_artistintitle = {b.lower():c for [a,b,c,d] in raw if a=="artistintitle"}
|
||||
|
||||
rawrules = []
|
||||
for f in os.listdir(data_dir["rules"]()):
|
||||
if f.split('.')[-1].lower() != 'tsv': continue
|
||||
filepath = data_dir["rules"](f)
|
||||
with open(filepath,'r') as filed:
|
||||
reader = csv.reader(filed,delimiter="\t")
|
||||
rawrules += [[col for col in entry if col] for entry in reader if len(entry)>0 and not entry[0].startswith('#')]
|
||||
|
||||
|
||||
self.rules_belongtogether = [r[1] for r in rawrules if r[0]=="belongtogether"]
|
||||
self.rules_notanartist = [r[1] for r in rawrules if r[0]=="notanartist"]
|
||||
self.rules_replacetitle = {r[1].lower():r[2] for r in rawrules if r[0]=="replacetitle"}
|
||||
self.rules_replaceartist = {r[1].lower():r[2] for r in rawrules if r[0]=="replaceartist"}
|
||||
self.rules_ignoreartist = [r[1].lower() for r in rawrules if r[0]=="ignoreartist"]
|
||||
self.rules_addartists = {r[2].lower():(r[1].lower(),r[3]) for r in rawrules if r[0]=="addartists"}
|
||||
self.rules_fixartists = {r[2].lower():r[1] for r in rawrules if r[0]=="fixartists"}
|
||||
self.rules_artistintitle = {r[1].lower():r[2] for r in rawrules if r[0]=="artistintitle"}
|
||||
#self.rules_regexartist = [[b,c] for [a,b,c,d] in raw if a=="regexartist"]
|
||||
#self.rules_regextitle = [[b,c] for [a,b,c,d] in raw if a=="regextitle"]
|
||||
|
||||
|
||||
|
||||
def fullclean(self,artist,title):
|
||||
artists = self.parseArtists(self.removespecial(artist))
|
||||
title = self.parseTitle(self.removespecial(title))
|
||||
@ -166,65 +176,6 @@ class CleanerAgent:
|
||||
|
||||
|
||||
|
||||
#this is for all the runtime changes (counting Trouble Maker as HyunA for charts etc)
|
||||
class CollectorAgent:
|
||||
|
||||
def __init__(self):
|
||||
self.updateRules()
|
||||
|
||||
# rules_countas dict: real artist -> credited artist
|
||||
# rules_countas_id dict: real artist ID -> credited artist ID
|
||||
# rules_include dict: credited artist -> all real artists
|
||||
|
||||
def updateRules(self):
|
||||
raw = tsv.parse_all(data_dir["rules"](),"string","string","string")
|
||||
self.rules_countas = {b:c for [a,b,c] in raw if a=="countas"}
|
||||
self.rules_countas_id = {}
|
||||
self.rules_include = {} #Twice the memory, double the performance!
|
||||
# (Yes, we're saving redundant information here, but it's not unelegant if it's within a closed object!)
|
||||
for a in self.rules_countas:
|
||||
self.rules_include[self.rules_countas[a]] = self.rules_include.setdefault(self.rules_countas[a],[]) + [a]
|
||||
|
||||
# this agent needs to be aware of the current id assignment in the main program
|
||||
# unelegant, but the best way i can think of
|
||||
def updateIDs(self,artistlist):
|
||||
self.rules_countas_id = {artistlist.index(a):artistlist.index(self.rules_countas[a]) for a in self.rules_countas if a in artistlist}
|
||||
#self.rules_include_id = {artistlist.index(a):artistlist.index(self.rules_include[a]) for a in self.rules_include}
|
||||
#this needs to take lists into account
|
||||
|
||||
|
||||
# get who is credited for this artist
|
||||
def getCredited(self,artist):
|
||||
if artist in self.rules_countas:
|
||||
return self.rules_countas[artist]
|
||||
if artist in self.rules_countas_id:
|
||||
return self.rules_countas_id[artist]
|
||||
|
||||
else:
|
||||
return artist
|
||||
|
||||
# get all credited artists for the artists given
|
||||
def getCreditedList(self,artists):
|
||||
updatedArtists = [self.getCredited(artist) for artist in artists]
|
||||
return list(set(updatedArtists))
|
||||
|
||||
# get artists who the given artist is given credit for
|
||||
def getAllAssociated(self,artist):
|
||||
return self.rules_include.get(artist,[])
|
||||
|
||||
# this function is there to check for artists that we should include in the
|
||||
# database even though they never have any scrobble.
|
||||
def getAllArtists(self):
|
||||
return list({self.rules_countas[a] for a in self.rules_countas})
|
||||
# artists that count can be nonexisting (counting HyunA as 4Minute even
|
||||
# though 4Minute has never been listened to)
|
||||
# but artists that are counted as someone else are only relevant if they
|
||||
# exist (so we can preemptively declare lots of rules just in case)
|
||||
#return list(set([a for a in self.rules_countas] + [self.rules_countas[a] for a in self.rules_countas]))
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
@ -16,3 +16,5 @@ countas Sips The Yogscast
|
||||
countas Sjin The Yogscast
|
||||
countas Airi Suzuki ℃-ute
|
||||
countas CeeLo Green Gnarls Barkley
|
||||
countas Amelia Watson Hololive EN
|
||||
countas Gawr Gura Hololive EN
|
||||
|
Can't render this file because it has a wrong number of fields in line 5.
|
1175
maloja/database.py
1175
maloja/database.py
File diff suppressed because it is too large
Load Diff
420
maloja/database/__init__.py
Normal file
420
maloja/database/__init__.py
Normal file
@ -0,0 +1,420 @@
|
||||
# server
|
||||
from bottle import request, response, FormsDict, HTTPError
|
||||
|
||||
# rest of the project
|
||||
from ..cleanup import CleanerAgent
|
||||
from .. import images
|
||||
from ..malojatime import register_scrobbletime, time_stamps, ranges, alltime
|
||||
from ..malojauri import uri_to_internal, internal_to_uri, compose_querystring
|
||||
from ..thirdparty import proxy_scrobble_all
|
||||
from ..globalconf import data_dir, malojaconfig
|
||||
from ..apis import apikeystore
|
||||
#db
|
||||
from . import sqldb
|
||||
from . import cached
|
||||
from . import dbcache
|
||||
|
||||
# doreah toolkit
|
||||
from doreah.logging import log
|
||||
from doreah.auth import authenticated_api, authenticated_api_with_alternate
|
||||
import doreah
|
||||
|
||||
|
||||
|
||||
|
||||
# technical
|
||||
import os
|
||||
import datetime
|
||||
import sys
|
||||
import unicodedata
|
||||
from collections import namedtuple
|
||||
from threading import Lock
|
||||
import yaml, json
|
||||
import math
|
||||
|
||||
# url handling
|
||||
import urllib
|
||||
|
||||
|
||||
|
||||
dbstatus = {
|
||||
"healthy":False, # we can access the db
|
||||
"rebuildinprogress":False,
|
||||
"complete":False # information is complete
|
||||
}
|
||||
class DatabaseNotBuilt(HTTPError):
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
status=503,
|
||||
body="The Maloja Database is being upgraded to Version 3. This could take several minutes.",
|
||||
headers={"Retry-After":120}
|
||||
)
|
||||
|
||||
|
||||
def waitfordb(func):
|
||||
def newfunc(*args,**kwargs):
|
||||
if not dbstatus['healthy']: raise DatabaseNotBuilt()
|
||||
return func(*args,**kwargs)
|
||||
return newfunc
|
||||
|
||||
|
||||
|
||||
ISSUES = {}
|
||||
|
||||
cla = CleanerAgent()
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
## this function accepts a flat dict - all info of the scrobble should be top level key
|
||||
## but can contain a list as value
|
||||
## the following keys are valid:
|
||||
## scrobble_duration int
|
||||
## scrobble_time int
|
||||
## track_title str, mandatory
|
||||
## track_artists list, mandatory
|
||||
## track_length int
|
||||
## album_name str
|
||||
## album_artists list
|
||||
##
|
||||
##
|
||||
##
|
||||
##
|
||||
##
|
||||
##
|
||||
|
||||
def incoming_scrobble(rawscrobble,fix=True,client=None,api=None,dbconn=None):
|
||||
|
||||
if (not "track_artists" in rawscrobble) or (len(rawscrobble['track_artists']) == 0) or (not "track_title" in rawscrobble):
|
||||
log(f"Invalid Scrobble [Client: {client} | API: {api}]: {rawscrobble} ",color='red')
|
||||
#return {"status":"failure"}
|
||||
return False
|
||||
|
||||
log(f"Incoming scrobble [Client: {client} | API: {api}]: {rawscrobble}")
|
||||
|
||||
# raw scrobble to processed info
|
||||
scrobbleinfo = {**rawscrobble}
|
||||
if fix:
|
||||
scrobbleinfo['track_artists'],scrobbleinfo['track_title'] = cla.fullclean(scrobbleinfo['track_artists'],scrobbleinfo['track_title'])
|
||||
scrobbleinfo['scrobble_time'] = scrobbleinfo.get('scrobble_time') or int(datetime.datetime.now(tz=datetime.timezone.utc).timestamp())
|
||||
|
||||
# processed info to internal scrobble dict
|
||||
scrobbledict = {
|
||||
"time":scrobbleinfo.get('scrobble_time'),
|
||||
"track":{
|
||||
"artists":scrobbleinfo.get('track_artists'),
|
||||
"title":scrobbleinfo.get('track_title'),
|
||||
"album":{
|
||||
"name":scrobbleinfo.get('album_name'),
|
||||
"artists":scrobbleinfo.get('album_artists')
|
||||
},
|
||||
"length":scrobbleinfo.get('track_length')
|
||||
},
|
||||
"duration":scrobbleinfo.get('scrobble_duration'),
|
||||
"origin":f"client:{client}" if client else "generic",
|
||||
"extra":{
|
||||
k:scrobbleinfo[k] for k in scrobbleinfo if k not in
|
||||
['scrobble_time','track_artists','track_title','track_length','scrobble_duration','album_name','album_artists']
|
||||
},
|
||||
"rawscrobble":rawscrobble
|
||||
}
|
||||
|
||||
|
||||
sqldb.add_scrobble(scrobbledict,dbconn=dbconn)
|
||||
proxy_scrobble_all(scrobbledict['track']['artists'],scrobbledict['track']['title'],scrobbledict['time'])
|
||||
|
||||
dbcache.invalidate_caches(scrobbledict['time'])
|
||||
|
||||
#return {"status":"success","scrobble":scrobbledict}
|
||||
return scrobbledict
|
||||
|
||||
|
||||
|
||||
@waitfordb
|
||||
def remove_scrobble(timestamp):
|
||||
log(f"Deleting Scrobble {timestamp}")
|
||||
result = sqldb.delete_scrobble(timestamp)
|
||||
dbcache.invalidate_caches(timestamp)
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
@waitfordb
|
||||
def get_scrobbles(dbconn=None,**keys):
|
||||
(since,to) = keys.get('timerange').timestamps()
|
||||
if 'artist' in keys:
|
||||
result = sqldb.get_scrobbles_of_artist(artist=keys['artist'],since=since,to=to,dbconn=dbconn)
|
||||
elif 'track' in keys:
|
||||
result = sqldb.get_scrobbles_of_track(track=keys['track'],since=since,to=to,dbconn=dbconn)
|
||||
else:
|
||||
result = sqldb.get_scrobbles(since=since,to=to,dbconn=dbconn)
|
||||
#return result[keys['page']*keys['perpage']:(keys['page']+1)*keys['perpage']]
|
||||
return list(reversed(result))
|
||||
|
||||
@waitfordb
|
||||
def get_scrobbles_num(dbconn=None,**keys):
|
||||
(since,to) = keys.get('timerange').timestamps()
|
||||
if 'artist' in keys:
|
||||
result = len(sqldb.get_scrobbles_of_artist(artist=keys['artist'],since=since,to=to,resolve_references=False,dbconn=dbconn))
|
||||
elif 'track' in keys:
|
||||
result = len(sqldb.get_scrobbles_of_track(track=keys['track'],since=since,to=to,resolve_references=False,dbconn=dbconn))
|
||||
else:
|
||||
result = sqldb.get_scrobbles_num(since=since,to=to,dbconn=dbconn)
|
||||
return result
|
||||
|
||||
|
||||
|
||||
@waitfordb
|
||||
def get_tracks(dbconn=None,**keys):
|
||||
if keys.get('artist') is None:
|
||||
result = sqldb.get_tracks(dbconn=dbconn)
|
||||
else:
|
||||
result = sqldb.get_tracks_of_artist(keys.get('artist'),dbconn=dbconn)
|
||||
return result
|
||||
|
||||
@waitfordb
|
||||
def get_artists(dbconn=None):
|
||||
return sqldb.get_artists(dbconn=dbconn)
|
||||
|
||||
|
||||
@waitfordb
|
||||
def get_charts_artists(dbconn=None,**keys):
|
||||
(since,to) = keys.get('timerange').timestamps()
|
||||
result = sqldb.count_scrobbles_by_artist(since=since,to=to,dbconn=dbconn)
|
||||
return result
|
||||
|
||||
@waitfordb
|
||||
def get_charts_tracks(dbconn=None,**keys):
|
||||
(since,to) = keys.get('timerange').timestamps()
|
||||
if 'artist' in keys:
|
||||
result = sqldb.count_scrobbles_by_track_of_artist(since=since,to=to,artist=keys['artist'],dbconn=dbconn)
|
||||
else:
|
||||
result = sqldb.count_scrobbles_by_track(since=since,to=to,dbconn=dbconn)
|
||||
return result
|
||||
|
||||
@waitfordb
|
||||
def get_pulse(dbconn=None,**keys):
|
||||
|
||||
rngs = ranges(**{k:keys[k] for k in keys if k in ["since","to","within","timerange","step","stepn","trail"]})
|
||||
results = []
|
||||
for rng in rngs:
|
||||
res = get_scrobbles_num(timerange=rng,**{k:keys[k] for k in keys if k != 'timerange'},dbconn=dbconn)
|
||||
results.append({"range":rng,"scrobbles":res})
|
||||
|
||||
return results
|
||||
|
||||
@waitfordb
|
||||
def get_performance(dbconn=None,**keys):
|
||||
|
||||
rngs = ranges(**{k:keys[k] for k in keys if k in ["since","to","within","timerange","step","stepn","trail"]})
|
||||
results = []
|
||||
|
||||
for rng in rngs:
|
||||
if "track" in keys:
|
||||
track = sqldb.get_track(sqldb.get_track_id(keys['track'],dbconn=dbconn),dbconn=dbconn)
|
||||
charts = get_charts_tracks(timerange=rng,dbconn=dbconn)
|
||||
rank = None
|
||||
for c in charts:
|
||||
if c["track"] == track:
|
||||
rank = c["rank"]
|
||||
break
|
||||
elif "artist" in keys:
|
||||
artist = sqldb.get_artist(sqldb.get_artist_id(keys['artist'],dbconn=dbconn),dbconn=dbconn)
|
||||
# ^this is the most useless line in programming history
|
||||
# but I like consistency
|
||||
charts = get_charts_artists(timerange=rng,dbconn=dbconn)
|
||||
rank = None
|
||||
for c in charts:
|
||||
if c["artist"] == artist:
|
||||
rank = c["rank"]
|
||||
break
|
||||
results.append({"range":rng,"rank":rank})
|
||||
|
||||
return results
|
||||
|
||||
@waitfordb
|
||||
def get_top_artists(dbconn=None,**keys):
|
||||
|
||||
rngs = ranges(**{k:keys[k] for k in keys if k in ["since","to","within","timerange","step","stepn","trail"]})
|
||||
results = []
|
||||
|
||||
for rng in rngs:
|
||||
try:
|
||||
res = get_charts_artists(timerange=rng,dbconn=dbconn)[0]
|
||||
results.append({"range":rng,"artist":res["artist"],"scrobbles":res["scrobbles"]})
|
||||
except:
|
||||
results.append({"range":rng,"artist":None,"scrobbles":0})
|
||||
|
||||
return results
|
||||
|
||||
|
||||
@waitfordb
|
||||
def get_top_tracks(dbconn=None,**keys):
|
||||
|
||||
rngs = ranges(**{k:keys[k] for k in keys if k in ["since","to","within","timerange","step","stepn","trail"]})
|
||||
results = []
|
||||
|
||||
for rng in rngs:
|
||||
try:
|
||||
res = get_charts_tracks(timerange=rng,dbconn=dbconn)[0]
|
||||
results.append({"range":rng,"track":res["track"],"scrobbles":res["scrobbles"]})
|
||||
except:
|
||||
results.append({"range":rng,"track":None,"scrobbles":0})
|
||||
|
||||
return results
|
||||
|
||||
@waitfordb
|
||||
def artist_info(dbconn=None,**keys):
|
||||
|
||||
artist = keys.get('artist')
|
||||
|
||||
artist = sqldb.get_artist(sqldb.get_artist_id(artist,dbconn=dbconn),dbconn=dbconn)
|
||||
alltimecharts = get_charts_artists(timerange=alltime(),dbconn=dbconn)
|
||||
scrobbles = get_scrobbles_num(artist=artist,timerange=alltime(),dbconn=dbconn)
|
||||
#we cant take the scrobble number from the charts because that includes all countas scrobbles
|
||||
try:
|
||||
c = [e for e in alltimecharts if e["artist"] == artist][0]
|
||||
others = sqldb.get_associated_artists(artist,dbconn=dbconn)
|
||||
position = c["rank"]
|
||||
return {
|
||||
"artist":artist,
|
||||
"scrobbles":scrobbles,
|
||||
"position":position,
|
||||
"associated":others,
|
||||
"medals":{
|
||||
"gold": [year for year in cached.medals_artists if artist in cached.medals_artists[year]['gold']],
|
||||
"silver": [year for year in cached.medals_artists if artist in cached.medals_artists[year]['silver']],
|
||||
"bronze": [year for year in cached.medals_artists if artist in cached.medals_artists[year]['bronze']],
|
||||
},
|
||||
"topweeks":len([e for e in cached.weekly_topartists if e == artist])
|
||||
}
|
||||
except:
|
||||
# if the artist isnt in the charts, they are not being credited and we
|
||||
# need to show information about the credited one
|
||||
replaceartist = sqldb.get_credited_artists(artist)[0]
|
||||
c = [e for e in alltimecharts if e["artist"] == replaceartist][0]
|
||||
position = c["rank"]
|
||||
return {"artist":artist,"replace":replaceartist,"scrobbles":scrobbles,"position":position}
|
||||
|
||||
|
||||
|
||||
|
||||
@waitfordb
|
||||
def track_info(dbconn=None,**keys):
|
||||
|
||||
track = keys.get('track')
|
||||
|
||||
track = sqldb.get_track(sqldb.get_track_id(track,dbconn=dbconn),dbconn=dbconn)
|
||||
alltimecharts = get_charts_tracks(timerange=alltime(),dbconn=dbconn)
|
||||
#scrobbles = get_scrobbles_num(track=track,timerange=alltime())
|
||||
|
||||
c = [e for e in alltimecharts if e["track"] == track][0]
|
||||
scrobbles = c["scrobbles"]
|
||||
position = c["rank"]
|
||||
cert = None
|
||||
threshold_gold, threshold_platinum, threshold_diamond = malojaconfig["SCROBBLES_GOLD","SCROBBLES_PLATINUM","SCROBBLES_DIAMOND"]
|
||||
if scrobbles >= threshold_diamond: cert = "diamond"
|
||||
elif scrobbles >= threshold_platinum: cert = "platinum"
|
||||
elif scrobbles >= threshold_gold: cert = "gold"
|
||||
|
||||
|
||||
return {
|
||||
"track":track,
|
||||
"scrobbles":scrobbles,
|
||||
"position":position,
|
||||
"medals":{
|
||||
"gold": [year for year in cached.medals_tracks if track in cached.medals_tracks[year]['gold']],
|
||||
"silver": [year for year in cached.medals_tracks if track in cached.medals_tracks[year]['silver']],
|
||||
"bronze": [year for year in cached.medals_tracks if track in cached.medals_tracks[year]['bronze']],
|
||||
},
|
||||
"certification":cert,
|
||||
"topweeks":len([e for e in cached.weekly_toptracks if e == track])
|
||||
}
|
||||
|
||||
|
||||
|
||||
def get_predefined_rulesets(dbconn=None):
|
||||
validchars = "-_abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789"
|
||||
|
||||
rulesets = []
|
||||
|
||||
for f in os.listdir(data_dir['rules']("predefined")):
|
||||
if f.endswith(".tsv"):
|
||||
|
||||
rawf = f.replace(".tsv","")
|
||||
valid = all(char in validchars for char in rawf)
|
||||
if not valid: continue
|
||||
if "_" not in rawf: continue
|
||||
|
||||
try:
|
||||
with open(data_dir['rules']("predefined",f)) as tsvfile:
|
||||
line1 = tsvfile.readline()
|
||||
line2 = tsvfile.readline()
|
||||
|
||||
if "# NAME: " in line1:
|
||||
name = line1.replace("# NAME: ","")
|
||||
else: name = rawf.split("_")[1]
|
||||
desc = line2.replace("# DESC: ","") if "# DESC: " in line2 else ""
|
||||
author = rawf.split("_")[0]
|
||||
except:
|
||||
continue
|
||||
|
||||
ruleset = {"file":rawf}
|
||||
rulesets.append(ruleset)
|
||||
ruleset["active"] = bool(os.path.exists(data_dir['rules'](f)))
|
||||
ruleset["name"] = name
|
||||
ruleset["author"] = author
|
||||
ruleset["desc"] = desc
|
||||
|
||||
return rulesets
|
||||
|
||||
|
||||
####
|
||||
## Server operation
|
||||
####
|
||||
|
||||
|
||||
|
||||
def start_db():
|
||||
# Upgrade database
|
||||
from .. import upgrade
|
||||
upgrade.upgrade_db(sqldb.add_scrobbles)
|
||||
|
||||
# Load temporary tables
|
||||
from . import associated
|
||||
associated.load_associated_rules()
|
||||
|
||||
dbstatus['healthy'] = True
|
||||
|
||||
# inform time module about begin of scrobbling
|
||||
try:
|
||||
firstscrobble = sqldb.get_scrobbles()[0]
|
||||
register_scrobbletime(firstscrobble['time'])
|
||||
except IndexError:
|
||||
register_scrobbletime(int(datetime.datetime.now().timestamp()))
|
||||
|
||||
|
||||
# create cached information
|
||||
cached.update_medals()
|
||||
cached.update_weekly()
|
||||
|
||||
dbstatus['complete'] = True
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
# Search for strings
|
||||
def db_search(query,type=None):
|
||||
results = []
|
||||
if type=="ARTIST":
|
||||
results = [a for a in sqldb.get_artists() if sqldb.normalize_name(query) in sqldb.normalize_name(a)]
|
||||
if type=="TRACK":
|
||||
results = [t for t in sqldb.get_tracks() if sqldb.normalize_name(query) in sqldb.normalize_name(t['title'])]
|
||||
return results
|
49
maloja/database/associated.py
Normal file
49
maloja/database/associated.py
Normal file
@ -0,0 +1,49 @@
|
||||
## dealing with loading the associated artists rules into a database
|
||||
## right now this is kind of absurd because we're storing it in a db while not
|
||||
## actually using its permanence, but this makes it possible to use the information
|
||||
## directly in sql
|
||||
|
||||
|
||||
import csv
|
||||
import os
|
||||
|
||||
from . import sqldb
|
||||
from ..globalconf import data_dir
|
||||
|
||||
|
||||
def load_associated_rules():
|
||||
# delete old
|
||||
with sqldb.engine.begin() as conn:
|
||||
op = sqldb.DB['associated_artists'].delete().where()
|
||||
conn.execute(op)
|
||||
|
||||
# load from file
|
||||
rawrules = []
|
||||
for f in os.listdir(data_dir["rules"]()):
|
||||
if f.split('.')[-1].lower() != 'tsv': continue
|
||||
filepath = data_dir["rules"](f)
|
||||
with open(filepath,'r') as filed:
|
||||
reader = csv.reader(filed,delimiter="\t")
|
||||
rawrules += [[col for col in entry if col] for entry in reader if len(entry)>0 and not entry[0].startswith('#')]
|
||||
rules = [{'source_artist':r[1],'target_artist':r[2]} for r in rawrules if r[0]=="countas"]
|
||||
|
||||
#for rule in rules:
|
||||
# print(f"Rule to replace {rule['source_artist']} with {rule['target_artist']}:")
|
||||
# test = {k:sqldb.get_artist_id(rule[k],create_new=False) for k in rule}
|
||||
# if test['source_artist'] is None: print("axed")
|
||||
|
||||
#allartists = set([*[r['source_artist'] for r in rules],*[r['target_artist'] for r in rules]])
|
||||
|
||||
# find ids
|
||||
rules = [{k:sqldb.get_artist_id(rule[k],create_new=False) for k in rule} for rule in rules]
|
||||
rules = [r for r in rules if r['source_artist'] is not None]
|
||||
|
||||
# write to db
|
||||
ops = [
|
||||
sqldb.DB['associated_artists'].insert().values(**r).prefix_with('OR IGNORE')
|
||||
for r in rules
|
||||
]
|
||||
|
||||
with sqldb.engine.begin() as conn:
|
||||
for op in ops:
|
||||
conn.execute(op)
|
69
maloja/database/cached.py
Normal file
69
maloja/database/cached.py
Normal file
@ -0,0 +1,69 @@
|
||||
# for information that is not authorative, but should be saved anyway because it
|
||||
# changes infrequently and DB access is expensive
|
||||
|
||||
from doreah.regular import runyearly, rundaily
|
||||
from .. import database
|
||||
from .. import malojatime as mjt
|
||||
|
||||
|
||||
|
||||
medals_artists = {
|
||||
# year: {'gold':[],'silver':[],'bronze':[]}
|
||||
}
|
||||
medals_tracks = {
|
||||
# year: {'gold':[],'silver':[],'bronze':[]}
|
||||
}
|
||||
|
||||
weekly_topartists = []
|
||||
weekly_toptracks = []
|
||||
|
||||
@runyearly
|
||||
def update_medals():
|
||||
|
||||
global medals_artists, medals_tracks
|
||||
medals_artists.clear()
|
||||
medals_tracks.clear()
|
||||
|
||||
for year in mjt.ranges(step="year"):
|
||||
if year == mjt.thisyear(): break
|
||||
|
||||
charts_artists = database.get_charts_artists(timerange=year)
|
||||
charts_tracks = database.get_charts_tracks(timerange=year)
|
||||
|
||||
entry_artists = {'gold':[],'silver':[],'bronze':[]}
|
||||
entry_tracks = {'gold':[],'silver':[],'bronze':[]}
|
||||
medals_artists[year.desc()] = entry_artists
|
||||
medals_tracks[year.desc()] = entry_tracks
|
||||
|
||||
for entry in charts_artists:
|
||||
if entry['rank'] == 1: entry_artists['gold'].append(entry['artist'])
|
||||
elif entry['rank'] == 2: entry_artists['silver'].append(entry['artist'])
|
||||
elif entry['rank'] == 3: entry_artists['bronze'].append(entry['artist'])
|
||||
else: break
|
||||
for entry in charts_tracks:
|
||||
if entry['rank'] == 1: entry_tracks['gold'].append(entry['track'])
|
||||
elif entry['rank'] == 2: entry_tracks['silver'].append(entry['track'])
|
||||
elif entry['rank'] == 3: entry_tracks['bronze'].append(entry['track'])
|
||||
else: break
|
||||
|
||||
|
||||
|
||||
@rundaily
|
||||
def update_weekly():
|
||||
|
||||
global weekly_topartists, weekly_toptracks
|
||||
weekly_topartists.clear()
|
||||
weekly_toptracks.clear()
|
||||
|
||||
for week in mjt.ranges(step="week"):
|
||||
if week == mjt.thisweek(): break
|
||||
|
||||
charts_artists = database.get_charts_artists(timerange=week)
|
||||
charts_tracks = database.get_charts_tracks(timerange=week)
|
||||
|
||||
for entry in charts_artists:
|
||||
if entry['rank'] == 1: weekly_topartists.append(entry['artist'])
|
||||
else: break
|
||||
for entry in charts_tracks:
|
||||
if entry['rank'] == 1: weekly_toptracks.append(entry['track'])
|
||||
else: break
|
143
maloja/database/dbcache.py
Normal file
143
maloja/database/dbcache.py
Normal file
@ -0,0 +1,143 @@
|
||||
# the more generalized caching for DB queries
|
||||
# mostly to avoid long loading times for pages that show lots of information
|
||||
# that changes very infrequently or not at all
|
||||
|
||||
import lru
|
||||
import psutil
|
||||
import json
|
||||
from doreah.regular import runhourly
|
||||
from doreah.logging import log
|
||||
|
||||
from ..globalconf import malojaconfig
|
||||
|
||||
HIGH_NUMBER = 1000000
|
||||
CACHE_SIZE = 10000
|
||||
ENTITY_CACHE_SIZE = 1000000
|
||||
CACHE_ADJUST_STEP = 100
|
||||
|
||||
cache = lru.LRU(CACHE_SIZE)
|
||||
entitycache = lru.LRU(ENTITY_CACHE_SIZE)
|
||||
|
||||
hits, misses = 0, 0
|
||||
|
||||
|
||||
|
||||
@runhourly
|
||||
def maintenance():
|
||||
if malojaconfig['USE_GLOBAL_CACHE']:
|
||||
print_stats()
|
||||
trim_cache()
|
||||
|
||||
def print_stats():
|
||||
log(f"Cache Size: {len(cache)} [{len(entitycache)} E], System RAM Utilization: {psutil.virtual_memory().percent}%, Cache Hits: {hits}/{hits+misses}")
|
||||
#print("Full rundown:")
|
||||
#import sys
|
||||
#for k in cache.keys():
|
||||
# print(f"\t{k}\t{sys.getsizeof(cache[k])}")
|
||||
|
||||
|
||||
def cached_wrapper(inner_func):
|
||||
|
||||
if not malojaconfig['USE_GLOBAL_CACHE']: return inner_func
|
||||
def outer_func(*args,**kwargs):
|
||||
if 'dbconn' in kwargs:
|
||||
conn = kwargs.pop('dbconn')
|
||||
else:
|
||||
conn = None
|
||||
global hits, misses
|
||||
key = (serialize(args),serialize(kwargs), inner_func, kwargs.get("since"), kwargs.get("to"))
|
||||
|
||||
if key in cache:
|
||||
hits += 1
|
||||
return cache.get(key)
|
||||
|
||||
else:
|
||||
misses += 1
|
||||
result = inner_func(*args,**kwargs,dbconn=conn)
|
||||
cache[key] = result
|
||||
return result
|
||||
|
||||
return outer_func
|
||||
|
||||
|
||||
# cache for functions that call with a whole list of entity ids
|
||||
# we don't want a new cache entry for every single combination, but keep a common
|
||||
# cache that's aware of what we're calling
|
||||
def cached_wrapper_individual(inner_func):
|
||||
|
||||
if not malojaconfig['USE_GLOBAL_CACHE']: return inner_func
|
||||
def outer_func(set_arg,**kwargs):
|
||||
|
||||
|
||||
if 'dbconn' in kwargs:
|
||||
conn = kwargs.pop('dbconn')
|
||||
else:
|
||||
conn = None
|
||||
|
||||
#global hits, misses
|
||||
result = {}
|
||||
for id in set_arg:
|
||||
if (inner_func,id) in entitycache:
|
||||
result[id] = entitycache[(inner_func,id)]
|
||||
#hits += 1
|
||||
else:
|
||||
pass
|
||||
#misses += 1
|
||||
|
||||
|
||||
remaining = inner_func(set(e for e in set_arg if e not in result),dbconn=conn)
|
||||
for id in remaining:
|
||||
entitycache[(inner_func,id)] = remaining[id]
|
||||
result[id] = remaining[id]
|
||||
|
||||
return result
|
||||
|
||||
return outer_func
|
||||
|
||||
def invalidate_caches(scrobbletime):
|
||||
if malojaconfig['USE_GLOBAL_CACHE']:
|
||||
cleared, kept = 0, 0
|
||||
for k in cache.keys():
|
||||
# VERY BIG TODO: differentiate between None as in 'unlimited timerange' and None as in 'time doesnt matter here'!
|
||||
if (k[3] is None or scrobbletime >= k[3]) and (k[4] is None or scrobbletime <= k[4]):
|
||||
cleared += 1
|
||||
del cache[k]
|
||||
else:
|
||||
kept += 1
|
||||
log(f"Invalidated {cleared} of {cleared+kept} DB cache entries")
|
||||
|
||||
|
||||
def invalidate_entity_cache():
|
||||
entitycache.clear()
|
||||
|
||||
|
||||
def trim_cache():
|
||||
ramprct = psutil.virtual_memory().percent
|
||||
if ramprct > malojaconfig["DB_MAX_MEMORY"]:
|
||||
log(f"{ramprct}% RAM usage, clearing cache and adjusting size!")
|
||||
#ratio = 0.6
|
||||
#targetsize = max(int(len(cache) * ratio),50)
|
||||
#log(f"Reducing to {targetsize} entries")
|
||||
#cache.set_size(targetsize)
|
||||
#cache.set_size(HIGH_NUMBER)
|
||||
cache.clear()
|
||||
if cache.get_size() > CACHE_ADJUST_STEP:
|
||||
cache.set_size(cache.get_size() - CACHE_ADJUST_STEP)
|
||||
|
||||
#log(f"New RAM usage: {psutil.virtual_memory().percent}%")
|
||||
print_stats()
|
||||
|
||||
|
||||
|
||||
def serialize(obj):
|
||||
try:
|
||||
return serialize(obj.hashable())
|
||||
except:
|
||||
try:
|
||||
return json.dumps(obj)
|
||||
except:
|
||||
if isinstance(obj, (list, tuple, set)):
|
||||
return "[" + ",".join(serialize(o) for o in obj) + "]"
|
||||
elif isinstance(obj,dict):
|
||||
return "{" + ",".join(serialize(o) + ":" + serialize(obj[o]) for o in obj) + "}"
|
||||
return json.dumps(obj.hashable())
|
50
maloja/database/jinjaview.py
Normal file
50
maloja/database/jinjaview.py
Normal file
@ -0,0 +1,50 @@
|
||||
from .. import database
|
||||
from . sqldb import engine
|
||||
|
||||
from .dbcache import serialize
|
||||
|
||||
from ..globalconf import malojaconfig
|
||||
|
||||
from doreah.logging import log
|
||||
|
||||
|
||||
# this is a wrapper object that provides a DB connection so that one jinja page
|
||||
# (with all its included partials) can use it for all functions
|
||||
# it also translates the non-unpacked calls to unpacked calls that the DB wants
|
||||
# it also maintains a request-local cache since many webpages use the same stats
|
||||
# several times
|
||||
class JinjaDBConnection:
|
||||
def __init__(self):
|
||||
self.cache = {}
|
||||
self.hits = 0
|
||||
self.misses = 0
|
||||
def __enter__(self):
|
||||
self.conn = engine.connect()
|
||||
return self
|
||||
def __exit__(self, exc_type, exc_value, exc_traceback):
|
||||
self.conn.close()
|
||||
log(f"Generated page with {self.hits}/{self.hits+self.misses} local Cache hits",module="debug_performance")
|
||||
del self.cache
|
||||
def __getattr__(self,name):
|
||||
originalmethod = getattr(database,name)
|
||||
|
||||
def packedmethod(*keys):
|
||||
kwargs = {}
|
||||
for k in keys:
|
||||
kwargs.update(k)
|
||||
if malojaconfig['USE_REQUEST_CACHE']:
|
||||
cachekey = serialize((id(originalmethod),kwargs))
|
||||
if cachekey in self.cache:
|
||||
self.hits += 1
|
||||
return self.cache[cachekey]
|
||||
else:
|
||||
self.misses += 1
|
||||
result = originalmethod(**kwargs,dbconn=self.conn)
|
||||
self.cache[cachekey] = result
|
||||
return result
|
||||
else:
|
||||
result = originalmethod(**kwargs,dbconn=self.conn)
|
||||
return result
|
||||
|
||||
|
||||
return packedmethod
|
780
maloja/database/sqldb.py
Normal file
780
maloja/database/sqldb.py
Normal file
@ -0,0 +1,780 @@
|
||||
import sqlalchemy as sql
|
||||
import json
|
||||
import unicodedata
|
||||
import math
|
||||
from datetime import datetime
|
||||
from threading import Lock
|
||||
|
||||
from ..globalconf import data_dir
|
||||
from .dbcache import cached_wrapper, cached_wrapper_individual
|
||||
|
||||
from doreah.logging import log
|
||||
from doreah.regular import runhourly, runmonthly
|
||||
|
||||
|
||||
|
||||
##### DB Technical
|
||||
|
||||
|
||||
DBTABLES = {
|
||||
# name - type - foreign key - kwargs
|
||||
'scrobbles':{
|
||||
'columns':[
|
||||
("timestamp", sql.Integer, {'primary_key':True}),
|
||||
("rawscrobble", sql.String, {}),
|
||||
("origin", sql.String, {}),
|
||||
("duration", sql.Integer, {}),
|
||||
("track_id", sql.Integer, sql.ForeignKey('tracks.id'), {}),
|
||||
("extra", sql.String, {})
|
||||
],
|
||||
'extraargs':(),'extrakwargs':{}
|
||||
},
|
||||
'tracks':{
|
||||
'columns':[
|
||||
("id", sql.Integer, {'primary_key':True}),
|
||||
("title", sql.String, {}),
|
||||
("title_normalized",sql.String, {}),
|
||||
("length", sql.Integer, {})
|
||||
],
|
||||
'extraargs':(),'extrakwargs':{'sqlite_autoincrement':True}
|
||||
},
|
||||
'artists':{
|
||||
'columns':[
|
||||
("id", sql.Integer, {'primary_key':True}),
|
||||
("name", sql.String, {}),
|
||||
("name_normalized", sql.String, {})
|
||||
],
|
||||
'extraargs':(),'extrakwargs':{'sqlite_autoincrement':True}
|
||||
},
|
||||
'trackartists':{
|
||||
'columns':[
|
||||
("id", sql.Integer, {'primary_key':True}),
|
||||
("artist_id", sql.Integer, sql.ForeignKey('artists.id'), {}),
|
||||
("track_id", sql.Integer, sql.ForeignKey('tracks.id'), {})
|
||||
],
|
||||
'extraargs':(sql.UniqueConstraint('artist_id', 'track_id'),),'extrakwargs':{}
|
||||
},
|
||||
'associated_artists':{
|
||||
'columns':[
|
||||
("source_artist", sql.Integer, sql.ForeignKey('artists.id'), {}),
|
||||
("target_artist", sql.Integer, sql.ForeignKey('artists.id'), {})
|
||||
],
|
||||
'extraargs':(sql.UniqueConstraint('source_artist', 'target_artist'),),'extrakwargs':{}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
DB = {}
|
||||
|
||||
engine = sql.create_engine(f"sqlite:///{data_dir['scrobbles']('malojadb.sqlite')}", echo = False)
|
||||
meta = sql.MetaData()
|
||||
|
||||
# create table definitions
|
||||
for tablename in DBTABLES:
|
||||
|
||||
DB[tablename] = sql.Table(
|
||||
tablename, meta,
|
||||
*[sql.Column(colname,*args,**kwargs) for colname,*args,kwargs in DBTABLES[tablename]['columns']],
|
||||
*DBTABLES[tablename]['extraargs'],
|
||||
**DBTABLES[tablename]['extrakwargs']
|
||||
)
|
||||
|
||||
# actually create tables for new databases
|
||||
meta.create_all(engine)
|
||||
|
||||
# upgrade old database with new columns
|
||||
with engine.begin() as conn:
|
||||
for tablename in DBTABLES:
|
||||
info = DBTABLES[tablename]
|
||||
table = DB[tablename]
|
||||
|
||||
for colname,datatype,*args,kwargs in info['columns']:
|
||||
try:
|
||||
statement = f"ALTER TABLE {tablename} ADD {colname} {datatype().compile()}"
|
||||
conn.execute(sql.text(statement))
|
||||
log(f"Column {colname} was added to table {tablename}!")
|
||||
# TODO figure out how to compile foreign key references!
|
||||
except sql.exc.OperationalError as e:
|
||||
pass
|
||||
|
||||
|
||||
# adding a scrobble could consist of multiple write operations that sqlite doesn't
|
||||
# see as belonging together
|
||||
SCROBBLE_LOCK = Lock()
|
||||
|
||||
|
||||
# decorator that passes either the provided dbconn, or creates a separate one
|
||||
# just for this function call
|
||||
def connection_provider(func):
|
||||
|
||||
def wrapper(*args,**kwargs):
|
||||
if kwargs.get("dbconn") is not None:
|
||||
return func(*args,**kwargs)
|
||||
else:
|
||||
with engine.connect() as connection:
|
||||
kwargs['dbconn'] = connection
|
||||
return func(*args,**kwargs)
|
||||
return wrapper
|
||||
|
||||
##### DB <-> Dict translations
|
||||
|
||||
## ATTENTION ALL ADVENTURERS
|
||||
## this is what a scrobble dict will look like from now on
|
||||
## this is the single canonical source of truth
|
||||
## stop making different little dicts in every single function
|
||||
## this is the schema that will definitely 100% stay like this and not
|
||||
## randomly get changed two versions later
|
||||
## here we go
|
||||
#
|
||||
# {
|
||||
# "time":int,
|
||||
# "track":{
|
||||
# "artists":list,
|
||||
# "title":string,
|
||||
# "album":{
|
||||
# "name":string,
|
||||
# "artists":list
|
||||
# },
|
||||
# "length":None
|
||||
# },
|
||||
# "duration":int,
|
||||
# "origin":string,
|
||||
# "extra":{string-keyed mapping for all flags with the scrobble},
|
||||
# "rawscrobble":{string-keyed mapping of the original scrobble received}
|
||||
# }
|
||||
#
|
||||
# The last two fields are not returned under normal circumstances
|
||||
|
||||
|
||||
|
||||
|
||||
##### Conversions between DB and dicts
|
||||
|
||||
# These should work on whole lists and collect all the references,
|
||||
# then look them up once and fill them in
|
||||
|
||||
|
||||
### DB -> DICT
|
||||
def scrobbles_db_to_dict(rows,include_internal=False):
|
||||
tracks = get_tracks_map(set(row.track_id for row in rows))
|
||||
return [
|
||||
{
|
||||
**{
|
||||
"time":row.timestamp,
|
||||
"track":tracks[row.track_id],
|
||||
"duration":row.duration,
|
||||
"origin":row.origin,
|
||||
},
|
||||
**({
|
||||
"extra":json.loads(row.extra or '{}'),
|
||||
"rawscrobble":json.loads(row.rawscrobble or '{}')
|
||||
} if include_internal else {})
|
||||
}
|
||||
|
||||
for row in rows
|
||||
]
|
||||
|
||||
def scrobble_db_to_dict(row):
|
||||
return scrobbles_db_to_dict([row])[0]
|
||||
|
||||
def tracks_db_to_dict(rows):
|
||||
artists = get_artists_of_tracks(set(row.id for row in rows))
|
||||
return [
|
||||
{
|
||||
"artists":artists[row.id],
|
||||
"title":row.title,
|
||||
#"album":
|
||||
"length":row.length
|
||||
}
|
||||
for row in rows
|
||||
]
|
||||
|
||||
def track_db_to_dict(row):
|
||||
return tracks_db_to_dict([row])[0]
|
||||
|
||||
def artists_db_to_dict(rows):
|
||||
return [
|
||||
row.name
|
||||
for row in rows
|
||||
]
|
||||
|
||||
def artist_db_to_dict(row):
|
||||
return artists_db_to_dict([row])[0]
|
||||
|
||||
|
||||
|
||||
|
||||
### DICT -> DB
|
||||
|
||||
def scrobble_dict_to_db(info):
|
||||
return {
|
||||
"timestamp":info['time'],
|
||||
"origin":info['origin'],
|
||||
"duration":info['duration'],
|
||||
"track_id":get_track_id(info['track']),
|
||||
"extra":json.dumps(info.get('extra',{})),
|
||||
"rawscrobble":json.dumps(info.get('rawscrobble',{}))
|
||||
}
|
||||
|
||||
def track_dict_to_db(info):
|
||||
return {
|
||||
"title":info['title'],
|
||||
"title_normalized":normalize_name(info['title']),
|
||||
"length":info.get('length')
|
||||
}
|
||||
|
||||
def artist_dict_to_db(info):
|
||||
return {
|
||||
"name": info,
|
||||
"name_normalized":normalize_name(info)
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
##### Actual Database interactions
|
||||
|
||||
|
||||
@connection_provider
|
||||
def add_scrobble(scrobbledict,dbconn=None):
|
||||
add_scrobbles([scrobbledict],dbconn=dbconn)
|
||||
|
||||
@connection_provider
|
||||
def add_scrobbles(scrobbleslist,dbconn=None):
|
||||
|
||||
with SCROBBLE_LOCK:
|
||||
|
||||
ops = [
|
||||
DB['scrobbles'].insert().values(
|
||||
**scrobble_dict_to_db(s)
|
||||
) for s in scrobbleslist
|
||||
]
|
||||
|
||||
success,errors = 0,0
|
||||
for op in ops:
|
||||
try:
|
||||
dbconn.execute(op)
|
||||
success += 1
|
||||
except sql.exc.IntegrityError as e:
|
||||
errors += 1
|
||||
|
||||
# TODO check if actual duplicate
|
||||
|
||||
if errors > 0: log(f"{errors} Scrobbles have not been written to database!",color='red')
|
||||
return success,errors
|
||||
|
||||
@connection_provider
|
||||
def delete_scrobble(scrobble_id,dbconn=None):
|
||||
|
||||
with SCROBBLE_LOCK:
|
||||
|
||||
op = DB['scrobbles'].delete().where(
|
||||
DB['scrobbles'].c.timestamp == scrobble_id
|
||||
)
|
||||
|
||||
dbconn.execute(op)
|
||||
|
||||
### these will 'get' the ID of an entity, creating it if necessary
|
||||
|
||||
@cached_wrapper
|
||||
@connection_provider
|
||||
def get_track_id(trackdict,dbconn=None):
|
||||
ntitle = normalize_name(trackdict['title'])
|
||||
artist_ids = [get_artist_id(a) for a in trackdict['artists']]
|
||||
|
||||
|
||||
|
||||
|
||||
op = DB['tracks'].select(
|
||||
DB['tracks'].c.id
|
||||
).where(
|
||||
DB['tracks'].c.title_normalized==ntitle
|
||||
)
|
||||
result = dbconn.execute(op).all()
|
||||
for row in result:
|
||||
# check if the artists are the same
|
||||
foundtrackartists = []
|
||||
|
||||
op = DB['trackartists'].select(
|
||||
DB['trackartists'].c.artist_id
|
||||
).where(
|
||||
DB['trackartists'].c.track_id==row[0]
|
||||
)
|
||||
result = dbconn.execute(op).all()
|
||||
match_artist_ids = [r.artist_id for r in result]
|
||||
#print("required artists",artist_ids,"this match",match_artist_ids)
|
||||
if set(artist_ids) == set(match_artist_ids):
|
||||
#print("ID for",trackdict['title'],"was",row[0])
|
||||
return row.id
|
||||
|
||||
|
||||
op = DB['tracks'].insert().values(
|
||||
**track_dict_to_db(trackdict)
|
||||
)
|
||||
result = dbconn.execute(op)
|
||||
track_id = result.inserted_primary_key[0]
|
||||
|
||||
for artist_id in artist_ids:
|
||||
op = DB['trackartists'].insert().values(
|
||||
track_id=track_id,
|
||||
artist_id=artist_id
|
||||
)
|
||||
result = dbconn.execute(op)
|
||||
#print("Created",trackdict['title'],track_id)
|
||||
return track_id
|
||||
|
||||
@cached_wrapper
|
||||
@connection_provider
|
||||
def get_artist_id(artistname,create_new=True,dbconn=None):
|
||||
nname = normalize_name(artistname)
|
||||
#print("looking for",nname)
|
||||
|
||||
op = DB['artists'].select(
|
||||
DB['artists'].c.id
|
||||
).where(
|
||||
DB['artists'].c.name_normalized==nname
|
||||
)
|
||||
result = dbconn.execute(op).all()
|
||||
for row in result:
|
||||
#print("ID for",artistname,"was",row[0])
|
||||
return row.id
|
||||
|
||||
if not create_new: return None
|
||||
|
||||
op = DB['artists'].insert().values(
|
||||
name=artistname,
|
||||
name_normalized=nname
|
||||
)
|
||||
result = dbconn.execute(op)
|
||||
#print("Created",artistname,result.inserted_primary_key)
|
||||
return result.inserted_primary_key[0]
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
### Functions that get rows according to parameters
|
||||
|
||||
@cached_wrapper
|
||||
@connection_provider
|
||||
def get_scrobbles_of_artist(artist,since=None,to=None,resolve_references=True,dbconn=None):
|
||||
|
||||
if since is None: since=0
|
||||
if to is None: to=now()
|
||||
|
||||
artist_id = get_artist_id(artist)
|
||||
|
||||
jointable = sql.join(DB['scrobbles'],DB['trackartists'],DB['scrobbles'].c.track_id == DB['trackartists'].c.track_id)
|
||||
|
||||
op = jointable.select().where(
|
||||
DB['scrobbles'].c.timestamp<=to,
|
||||
DB['scrobbles'].c.timestamp>=since,
|
||||
DB['trackartists'].c.artist_id==artist_id
|
||||
).order_by(sql.asc('timestamp'))
|
||||
result = dbconn.execute(op).all()
|
||||
|
||||
if resolve_references:
|
||||
result = scrobbles_db_to_dict(result)
|
||||
#result = [scrobble_db_to_dict(row,resolve_references=resolve_references) for row in result]
|
||||
return result
|
||||
|
||||
@cached_wrapper
|
||||
@connection_provider
|
||||
def get_scrobbles_of_track(track,since=None,to=None,resolve_references=True,dbconn=None):
|
||||
|
||||
if since is None: since=0
|
||||
if to is None: to=now()
|
||||
|
||||
track_id = get_track_id(track)
|
||||
|
||||
op = DB['scrobbles'].select().where(
|
||||
DB['scrobbles'].c.timestamp<=to,
|
||||
DB['scrobbles'].c.timestamp>=since,
|
||||
DB['scrobbles'].c.track_id==track_id
|
||||
).order_by(sql.asc('timestamp'))
|
||||
result = dbconn.execute(op).all()
|
||||
|
||||
if resolve_references:
|
||||
result = scrobbles_db_to_dict(result)
|
||||
#result = [scrobble_db_to_dict(row) for row in result]
|
||||
return result
|
||||
|
||||
@cached_wrapper
|
||||
@connection_provider
|
||||
def get_scrobbles(since=None,to=None,resolve_references=True,dbconn=None):
|
||||
|
||||
if since is None: since=0
|
||||
if to is None: to=now()
|
||||
|
||||
op = DB['scrobbles'].select().where(
|
||||
DB['scrobbles'].c.timestamp<=to,
|
||||
DB['scrobbles'].c.timestamp>=since,
|
||||
).order_by(sql.asc('timestamp'))
|
||||
result = dbconn.execute(op).all()
|
||||
|
||||
if resolve_references:
|
||||
result = scrobbles_db_to_dict(result)
|
||||
#result = [scrobble_db_to_dict(row,resolve_references=resolve_references) for i,row in enumerate(result) if i<max]
|
||||
return result
|
||||
|
||||
|
||||
# we can do that with above and resolve_references=False, but just testing speed
|
||||
@cached_wrapper
|
||||
@connection_provider
|
||||
def get_scrobbles_num(since=None,to=None,dbconn=None):
|
||||
|
||||
if since is None: since=0
|
||||
if to is None: to=now()
|
||||
|
||||
op = sql.select(sql.func.count()).select_from(DB['scrobbles']).where(
|
||||
DB['scrobbles'].c.timestamp<=to,
|
||||
DB['scrobbles'].c.timestamp>=since,
|
||||
)
|
||||
result = dbconn.execute(op).all()
|
||||
|
||||
return result[0][0]
|
||||
|
||||
@cached_wrapper
|
||||
@connection_provider
|
||||
def get_artists_of_track(track_id,resolve_references=True,dbconn=None):
|
||||
|
||||
op = DB['trackartists'].select().where(
|
||||
DB['trackartists'].c.track_id==track_id
|
||||
)
|
||||
result = dbconn.execute(op).all()
|
||||
|
||||
artists = [get_artist(row.artist_id) if resolve_references else row.artist_id for row in result]
|
||||
return artists
|
||||
|
||||
|
||||
@cached_wrapper
|
||||
@connection_provider
|
||||
def get_tracks_of_artist(artist,dbconn=None):
|
||||
|
||||
artist_id = get_artist_id(artist)
|
||||
|
||||
op = sql.join(DB['tracks'],DB['trackartists']).select().where(
|
||||
DB['trackartists'].c.artist_id==artist_id
|
||||
)
|
||||
result = dbconn.execute(op).all()
|
||||
|
||||
return tracks_db_to_dict(result)
|
||||
|
||||
@cached_wrapper
|
||||
@connection_provider
|
||||
def get_artists(dbconn=None):
|
||||
|
||||
op = DB['artists'].select()
|
||||
result = dbconn.execute(op).all()
|
||||
|
||||
return artists_db_to_dict(result)
|
||||
|
||||
@cached_wrapper
|
||||
@connection_provider
|
||||
def get_tracks(dbconn=None):
|
||||
|
||||
op = DB['tracks'].select()
|
||||
result = dbconn.execute(op).all()
|
||||
|
||||
return tracks_db_to_dict(result)
|
||||
|
||||
### functions that count rows for parameters
|
||||
|
||||
@cached_wrapper
|
||||
@connection_provider
|
||||
def count_scrobbles_by_artist(since,to,dbconn=None):
|
||||
jointable = sql.join(
|
||||
DB['scrobbles'],
|
||||
DB['trackartists'],
|
||||
DB['scrobbles'].c.track_id == DB['trackartists'].c.track_id
|
||||
)
|
||||
|
||||
jointable2 = sql.join(
|
||||
jointable,
|
||||
DB['associated_artists'],
|
||||
DB['trackartists'].c.artist_id == DB['associated_artists'].c.source_artist,
|
||||
isouter=True
|
||||
)
|
||||
op = sql.select(
|
||||
sql.func.count(sql.func.distinct(DB['scrobbles'].c.timestamp)).label('count'),
|
||||
# only count distinct scrobbles - because of artist replacement, we could end up
|
||||
# with two artists of the same scrobble counting it twice for the same artist
|
||||
# e.g. Irene and Seulgi adding two scrobbles to Red Velvet for one real scrobble
|
||||
sql.func.coalesce(DB['associated_artists'].c.target_artist,DB['trackartists'].c.artist_id).label('artist_id')
|
||||
# use the replaced artist as artist to count if it exists, otherwise original one
|
||||
).select_from(jointable2).where(
|
||||
DB['scrobbles'].c.timestamp<=to,
|
||||
DB['scrobbles'].c.timestamp>=since
|
||||
).group_by(
|
||||
sql.func.coalesce(DB['associated_artists'].c.target_artist,DB['trackartists'].c.artist_id)
|
||||
).order_by(sql.desc('count'))
|
||||
result = dbconn.execute(op).all()
|
||||
|
||||
|
||||
counts = [row.count for row in result]
|
||||
artists = get_artists_map([row.artist_id for row in result])
|
||||
result = [{'scrobbles':row.count,'artist':artists[row.artist_id]} for row in result]
|
||||
result = rank(result,key='scrobbles')
|
||||
return result
|
||||
|
||||
@cached_wrapper
|
||||
@connection_provider
|
||||
def count_scrobbles_by_track(since,to,dbconn=None):
|
||||
|
||||
|
||||
op = sql.select(
|
||||
sql.func.count(sql.func.distinct(DB['scrobbles'].c.timestamp)).label('count'),
|
||||
DB['scrobbles'].c.track_id
|
||||
).select_from(DB['scrobbles']).where(
|
||||
DB['scrobbles'].c.timestamp<=to,
|
||||
DB['scrobbles'].c.timestamp>=since
|
||||
).group_by(DB['scrobbles'].c.track_id).order_by(sql.desc('count'))
|
||||
result = dbconn.execute(op).all()
|
||||
|
||||
|
||||
counts = [row.count for row in result]
|
||||
tracks = get_tracks_map([row.track_id for row in result])
|
||||
result = [{'scrobbles':row.count,'track':tracks[row.track_id]} for row in result]
|
||||
result = rank(result,key='scrobbles')
|
||||
return result
|
||||
|
||||
@cached_wrapper
|
||||
@connection_provider
|
||||
def count_scrobbles_by_track_of_artist(since,to,artist,dbconn=None):
|
||||
|
||||
artist_id = get_artist_id(artist)
|
||||
|
||||
jointable = sql.join(
|
||||
DB['scrobbles'],
|
||||
DB['trackartists'],
|
||||
DB['scrobbles'].c.track_id == DB['trackartists'].c.track_id
|
||||
)
|
||||
|
||||
op = sql.select(
|
||||
sql.func.count(sql.func.distinct(DB['scrobbles'].c.timestamp)).label('count'),
|
||||
DB['scrobbles'].c.track_id
|
||||
).select_from(jointable).filter(
|
||||
DB['scrobbles'].c.timestamp<=to,
|
||||
DB['scrobbles'].c.timestamp>=since,
|
||||
DB['trackartists'].c.artist_id==artist_id
|
||||
).group_by(DB['scrobbles'].c.track_id).order_by(sql.desc('count'))
|
||||
result = dbconn.execute(op).all()
|
||||
|
||||
|
||||
counts = [row.count for row in result]
|
||||
tracks = get_tracks_map([row.track_id for row in result])
|
||||
result = [{'scrobbles':row.count,'track':tracks[row.track_id]} for row in result]
|
||||
result = rank(result,key='scrobbles')
|
||||
return result
|
||||
|
||||
|
||||
|
||||
|
||||
### functions that get mappings for several entities -> rows
|
||||
|
||||
@cached_wrapper_individual
|
||||
@connection_provider
|
||||
def get_artists_of_tracks(track_ids,dbconn=None):
|
||||
op = sql.join(DB['trackartists'],DB['artists']).select().where(
|
||||
DB['trackartists'].c.track_id.in_(track_ids)
|
||||
)
|
||||
result = dbconn.execute(op).all()
|
||||
|
||||
artists = {}
|
||||
for row in result:
|
||||
artists.setdefault(row.track_id,[]).append(artist_db_to_dict(row))
|
||||
return artists
|
||||
|
||||
|
||||
@cached_wrapper_individual
|
||||
@connection_provider
|
||||
def get_tracks_map(track_ids,dbconn=None):
|
||||
op = DB['tracks'].select().where(
|
||||
DB['tracks'].c.id.in_(track_ids)
|
||||
)
|
||||
result = dbconn.execute(op).all()
|
||||
|
||||
tracks = {}
|
||||
result = list(result)
|
||||
# this will get a list of artistdicts in the correct order of our rows
|
||||
trackdicts = tracks_db_to_dict(result)
|
||||
for row,trackdict in zip(result,trackdicts):
|
||||
tracks[row.id] = trackdict
|
||||
return tracks
|
||||
|
||||
@cached_wrapper_individual
|
||||
@connection_provider
|
||||
def get_artists_map(artist_ids,dbconn=None):
|
||||
|
||||
op = DB['artists'].select().where(
|
||||
DB['artists'].c.id.in_(artist_ids)
|
||||
)
|
||||
result = dbconn.execute(op).all()
|
||||
|
||||
artists = {}
|
||||
result = list(result)
|
||||
# this will get a list of artistdicts in the correct order of our rows
|
||||
artistdicts = artists_db_to_dict(result)
|
||||
for row,artistdict in zip(result,artistdicts):
|
||||
artists[row.id] = artistdict
|
||||
return artists
|
||||
|
||||
|
||||
### associations
|
||||
|
||||
@cached_wrapper
|
||||
@connection_provider
|
||||
def get_associated_artists(*artists,dbconn=None):
|
||||
artist_ids = [get_artist_id(a) for a in artists]
|
||||
|
||||
jointable = sql.join(
|
||||
DB['associated_artists'],
|
||||
DB['artists'],
|
||||
DB['associated_artists'].c.source_artist == DB['artists'].c.id
|
||||
)
|
||||
|
||||
op = jointable.select().where(
|
||||
DB['associated_artists'].c.target_artist.in_(artist_ids)
|
||||
)
|
||||
result = dbconn.execute(op).all()
|
||||
|
||||
artists = artists_db_to_dict(result)
|
||||
return artists
|
||||
|
||||
@cached_wrapper
|
||||
@connection_provider
|
||||
def get_credited_artists(*artists,dbconn=None):
|
||||
artist_ids = [get_artist_id(a) for a in artists]
|
||||
|
||||
jointable = sql.join(
|
||||
DB['associated_artists'],
|
||||
DB['artists'],
|
||||
DB['associated_artists'].c.target_artist == DB['artists'].c.id
|
||||
)
|
||||
|
||||
|
||||
op = jointable.select().where(
|
||||
DB['associated_artists'].c.source_artist.in_(artist_ids)
|
||||
)
|
||||
result = dbconn.execute(op).all()
|
||||
|
||||
artists = artists_db_to_dict(result)
|
||||
return artists
|
||||
|
||||
|
||||
### get a specific entity by id
|
||||
|
||||
@cached_wrapper
|
||||
@connection_provider
|
||||
def get_track(id,dbconn=None):
|
||||
op = DB['tracks'].select().where(
|
||||
DB['tracks'].c.id==id
|
||||
)
|
||||
result = dbconn.execute(op).all()
|
||||
|
||||
trackinfo = result[0]
|
||||
return track_db_to_dict(trackinfo)
|
||||
|
||||
@cached_wrapper
|
||||
@connection_provider
|
||||
def get_artist(id,dbconn=None):
|
||||
op = DB['artists'].select().where(
|
||||
DB['artists'].c.id==id
|
||||
)
|
||||
result = dbconn.execute(op).all()
|
||||
|
||||
artistinfo = result[0]
|
||||
return artist_db_to_dict(artistinfo)
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
##### MAINTENANCE
|
||||
|
||||
@runhourly
|
||||
def clean_db():
|
||||
|
||||
with SCROBBLE_LOCK:
|
||||
with engine.begin() as conn:
|
||||
log(f"Database Cleanup...")
|
||||
|
||||
to_delete = [
|
||||
# tracks with no scrobbles (trackartist entries first)
|
||||
"from trackartists where track_id in (select id from tracks where id not in (select track_id from scrobbles))",
|
||||
"from tracks where id not in (select track_id from scrobbles)",
|
||||
# artists with no tracks
|
||||
"from artists where id not in (select artist_id from trackartists) and id not in (select target_artist from associated_artists)",
|
||||
# tracks with no artists (scrobbles first)
|
||||
"from scrobbles where track_id in (select id from tracks where id not in (select track_id from trackartists))",
|
||||
"from tracks where id not in (select track_id from trackartists)"
|
||||
]
|
||||
|
||||
for d in to_delete:
|
||||
selection = conn.execute(sql.text(f"select * {d}"))
|
||||
for row in selection.all():
|
||||
log(f"Deleting {row}")
|
||||
deletion = conn.execute(sql.text(f"delete {d}"))
|
||||
|
||||
log("Database Cleanup complete!")
|
||||
|
||||
|
||||
|
||||
#if a2+a1>0: log(f"Deleted {a2} tracks without scrobbles ({a1} track artist entries)")
|
||||
|
||||
#if a3>0: log(f"Deleted {a3} artists without tracks")
|
||||
|
||||
#if a5+a4>0: log(f"Deleted {a5} tracks without artists ({a4} scrobbles)")
|
||||
|
||||
|
||||
|
||||
@runmonthly
|
||||
def renormalize_names():
|
||||
with SCROBBLE_LOCK:
|
||||
with engine.begin() as conn:
|
||||
rows = conn.execute(DB['artists'].select()).all()
|
||||
|
||||
for row in rows:
|
||||
id = row.id
|
||||
name = row.name
|
||||
norm_actual = row.name_normalized
|
||||
norm_target = normalize_name(name)
|
||||
if norm_actual != norm_target:
|
||||
log(f"{name} should be normalized to {norm_target}, but is instead {norm_actual}, fixing...")
|
||||
|
||||
with engine.begin() as conn:
|
||||
rows = conn.execute(DB['artists'].update().where(DB['artists'].c.id == id).values(name_normalized=norm_target))
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
##### AUX FUNCS
|
||||
|
||||
|
||||
|
||||
# function to turn the name into a representation that can be easily compared, ignoring minor differences
|
||||
remove_symbols = ["'","`","’"]
|
||||
replace_with_space = [" - ",": "]
|
||||
def normalize_name(name):
|
||||
for r in replace_with_space:
|
||||
name = name.replace(r," ")
|
||||
name = "".join(char for char in unicodedata.normalize('NFD',name.lower())
|
||||
if char not in remove_symbols and unicodedata.category(char) != 'Mn')
|
||||
return name
|
||||
|
||||
|
||||
def now():
|
||||
return int(datetime.now().timestamp())
|
||||
|
||||
def rank(ls,key):
|
||||
for rnk in range(len(ls)):
|
||||
if rnk == 0 or ls[rnk][key] < ls[rnk-1][key]:
|
||||
ls[rnk]["rank"] = rnk + 1
|
||||
else:
|
||||
ls[rnk]["rank"] = ls[rnk-1]["rank"]
|
||||
return ls
|
@ -1,28 +0,0 @@
|
||||
from . import database
|
||||
|
||||
# this is simply an object to expose all database functions with their arguments packed into dicts
|
||||
# because jinja doesn't accept **kwargs
|
||||
class DB:
|
||||
def __getattr__(self,name):
|
||||
originalmethod = getattr(database,name)
|
||||
|
||||
def packedmethod(*keys):
|
||||
kwargs = {}
|
||||
for k in keys:
|
||||
kwargs.update(k)
|
||||
return originalmethod(**kwargs)
|
||||
|
||||
return packedmethod
|
||||
|
||||
|
||||
# class that is initialized with all the uri keys of the currently requested page and exposes curried db functions
|
||||
class View:
|
||||
def __init__(self,filterkeys,limitkeys,delimitkeys,amountkeys):
|
||||
self.filterkeys = filterkeys
|
||||
self.limitkeys = limitkeys
|
||||
self.delimitkeys = delimitkeys
|
||||
self.amountkeys = amountkeys
|
||||
|
||||
|
||||
def get_pulse(self):
|
||||
return database.get_pulse(**self.limitkeys,**self.delimitkeys,**self.filterkeys)
|
@ -1,12 +1,13 @@
|
||||
import os
|
||||
from doreah.configuration import Configuration
|
||||
from doreah.configuration import types as tp
|
||||
from doreah.keystore import KeyStore
|
||||
|
||||
|
||||
from .__pkginfo__ import VERSION
|
||||
|
||||
|
||||
|
||||
|
||||
# if DATA_DIRECTORY is specified, this is the directory to use for EVERYTHING, no matter what
|
||||
# but with asynnetrical structure, cache and logs in subfolders
|
||||
# otherwise, each directory is treated seperately
|
||||
@ -14,10 +15,9 @@ from .__pkginfo__ import VERSION
|
||||
# DIRECRORY_CONFIG, DIRECRORY_STATE, DIRECTORY_LOGS and DIRECTORY_CACHE
|
||||
# config can only be determined by environment variable, the others can be loaded
|
||||
# from the config files
|
||||
# explicit settings will always be respected. if there are none:
|
||||
# first check if there is any indication of one of the possibilities being populated already
|
||||
# if not, use the first we have permissions for
|
||||
# after we decide which to use, fix it in settings to avoid future heuristics
|
||||
# explicit settings will always be respected, fallback to default
|
||||
|
||||
# if default isn't usable, and config writable, find alternative and fix it in settings
|
||||
|
||||
# USEFUL FUNCS
|
||||
pthj = os.path.join
|
||||
@ -146,13 +146,11 @@ malojaconfig = Configuration(
|
||||
"port":(tp.Integer(), "Port", 42010),
|
||||
},
|
||||
"Technical":{
|
||||
"cache_expire_positive":(tp.Integer(), "Image Cache Expiration", 300, "Days until images are refetched"),
|
||||
"cache_expire_negative":(tp.Integer(), "Image Cache Negative Expiration", 30, "Days until failed image fetches are reattempted"),
|
||||
"use_db_cache":(tp.Boolean(), "Use DB Cache", True),
|
||||
"cache_database_short":(tp.Boolean(), "Use volatile Database Cache", True),
|
||||
"cache_database_perm":(tp.Boolean(), "Use permanent Database Cache", True),
|
||||
"db_cache_entries":(tp.Integer(), "Maximal Cache entries", 10000),
|
||||
"db_max_memory":(tp.Integer(max=100,min=20), "RAM Percentage Theshold", 75, "Maximal percentage of RAM that should be used by whole system before Maloja discards cache entries. Use a higher number if your Maloja runs on a dedicated instance (e.g. a container)")
|
||||
"cache_expire_positive":(tp.Integer(), "Image Cache Expiration", 60, "Days until images are refetched"),
|
||||
"cache_expire_negative":(tp.Integer(), "Image Cache Negative Expiration", 5, "Days until failed image fetches are reattempted"),
|
||||
"db_max_memory":(tp.Integer(min=0,max=100), "RAM Percentage soft limit", 80, "RAM Usage in percent at which Maloja should no longer increase its database cache."),
|
||||
"use_request_cache":(tp.Boolean(), "Use request-local DB Cache", True),
|
||||
"use_global_cache":(tp.Boolean(), "Use global DB Cache", True)
|
||||
},
|
||||
"Fluff":{
|
||||
"scrobbles_gold":(tp.Integer(), "Scrobbles for Gold", 250, "How many scrobbles a track needs to be considered 'Gold' status"),
|
||||
@ -175,6 +173,7 @@ malojaconfig = Configuration(
|
||||
"other_maloja_api_key":(tp.String(), "Other Maloja Instance API Key",None),
|
||||
"track_search_provider":(tp.String(), "Track Search Provider", None),
|
||||
"send_stats":(tp.Boolean(), "Send Statistics", None),
|
||||
"proxy_images":(tp.Boolean(), "Image Proxy", True, "Whether third party images should be downloaded and served directly by Maloja (instead of just linking their URL)")
|
||||
|
||||
},
|
||||
"Database":{
|
||||
@ -182,7 +181,7 @@ malojaconfig = Configuration(
|
||||
"remove_from_title":(tp.Set(tp.String()), "Remove from Title", ["(Original Mix)","(Radio Edit)","(Album Version)","(Explicit Version)","(Bonus Track)"], "Phrases that should be removed from song titles"),
|
||||
"delimiters_feat":(tp.Set(tp.String()), "Featuring Delimiters", ["ft.","ft","feat.","feat","featuring","Ft.","Ft","Feat.","Feat","Featuring"], "Delimiters used for extra artists, even when in the title field"),
|
||||
"delimiters_informal":(tp.Set(tp.String()), "Informal Delimiters", ["vs.","vs","&"], "Delimiters in informal artist strings with spaces expected around them"),
|
||||
"delimiters_formal":(tp.Set(tp.String()), "Formal Delimiters", [";","/"], "Delimiters used to tag multiple artists when only one tag field is available")
|
||||
"delimiters_formal":(tp.Set(tp.String()), "Formal Delimiters", [";","/","|","␝","␞","␟"], "Delimiters used to tag multiple artists when only one tag field is available")
|
||||
},
|
||||
"Web Interface":{
|
||||
"default_range_charts_artists":(tp.Choice({'alltime':'All Time','year':'Year','month':"Month",'week':'Week'}), "Default Range Artist Charts", "year"),
|
||||
@ -191,43 +190,55 @@ malojaconfig = Configuration(
|
||||
"charts_display_tiles":(tp.Boolean(), "Display Chart Tiles", False),
|
||||
"discourage_cpu_heavy_stats":(tp.Boolean(), "Discourage CPU-heavy stats", False, "Prevent visitors from mindlessly clicking on CPU-heavy options. Does not actually disable them for malicious actors!"),
|
||||
"use_local_images":(tp.Boolean(), "Use Local Images", True),
|
||||
"local_image_rotate":(tp.Integer(), "Local Image Rotate", 3600),
|
||||
#"local_image_rotate":(tp.Integer(), "Local Image Rotate", 3600),
|
||||
"timezone":(tp.Integer(), "UTC Offset", 0),
|
||||
"time_format":(tp.String(), "Time Format", "%d. %b %Y %I:%M %p")
|
||||
"time_format":(tp.String(), "Time Format", "%d. %b %Y %I:%M %p"),
|
||||
"theme":(tp.String(), "Theme", "maloja")
|
||||
}
|
||||
},
|
||||
configfile=newsettingsfile,
|
||||
save_endpoint="/apis/mlj_1/settings",
|
||||
env_prefix="MALOJA_"
|
||||
env_prefix="MALOJA_",
|
||||
extra_files=["/run/secrets/maloja.yml","/run/secrets/maloja.ini"]
|
||||
|
||||
)
|
||||
|
||||
if found_new_config_dir:
|
||||
malojaconfig["DIRECTORY_CONFIG"] = maloja_dir_config
|
||||
try:
|
||||
malojaconfig["DIRECTORY_CONFIG"] = maloja_dir_config
|
||||
except PermissionError as e:
|
||||
pass
|
||||
# this really doesn't matter because when are we gonna load info about where
|
||||
# the settings file is stored from the settings file
|
||||
# but oh well
|
||||
|
||||
malojaconfig.render_help(pthj(maloja_dir_config,"settings.md"),
|
||||
top_text='''If you wish to adjust settings in the settings.ini file, do so while the server
|
||||
is not running in order to avoid data being overwritten.
|
||||
try:
|
||||
malojaconfig.render_help(pthj(maloja_dir_config,"settings.md"),
|
||||
top_text='''If you wish to adjust settings in the settings.ini file, do so while the server
|
||||
is not running in order to avoid data being overwritten.
|
||||
|
||||
Technically, each setting can be set via environment variable or the settings
|
||||
file - simply add the prefix `MALOJA_` for environment variables. It is recommended
|
||||
to use the settings file where possible and not configure each aspect of your
|
||||
server via environment variables!''')
|
||||
Technically, each setting can be set via environment variable or the settings
|
||||
file - simply add the prefix `MALOJA_` for environment variables. It is recommended
|
||||
to use the settings file where possible and not configure each aspect of your
|
||||
server via environment variables!
|
||||
|
||||
You also can specify additional settings in the files`/run/secrets/maloja.yml` or
|
||||
`/run/secrets/maloja.ini`, as well as their values directly in files of the respective
|
||||
name in `/run/secrets/` (e.g. `/run/secrets/lastfm_api_key`).''')
|
||||
except PermissionError as e:
|
||||
pass
|
||||
|
||||
|
||||
### STEP 3 - check all possible folders for files (old installation)
|
||||
|
||||
|
||||
|
||||
|
||||
for datatype in ("state","cache","logs"):
|
||||
# obviously default values shouldn't trigger this
|
||||
# if user has nothing specified, we need to use this
|
||||
if malojaconfig.get_specified(directory_info[datatype]['setting']) is None and malojaconfig.get_specified('DATA_DIRECTORY') is None:
|
||||
find_good_folder(datatype,malojaconfig)
|
||||
if not malojaconfig.readonly:
|
||||
for datatype in ("state","cache","logs"):
|
||||
# obviously default values shouldn't trigger this
|
||||
# if user has nothing specified, we need to use this
|
||||
if malojaconfig.get_specified(directory_info[datatype]['setting']) is None and malojaconfig.get_specified('DATA_DIRECTORY') is None:
|
||||
find_good_folder(datatype,malojaconfig)
|
||||
|
||||
|
||||
|
||||
@ -261,15 +272,21 @@ data_directories = {
|
||||
"auth":pthj(dir_settings['state'],"auth"),
|
||||
"backups":pthj(dir_settings['state'],"backups"),
|
||||
"images":pthj(dir_settings['state'],"images"),
|
||||
"scrobbles":pthj(dir_settings['state'],"scrobbles"),
|
||||
"scrobbles":pthj(dir_settings['state']),
|
||||
"rules":pthj(dir_settings['config'],"rules"),
|
||||
"clients":pthj(dir_settings['config']),
|
||||
"settings":pthj(dir_settings['config']),
|
||||
"css":pthj(dir_settings['config'],"custom_css"),
|
||||
"logs":pthj(dir_settings['logs']),
|
||||
"cache":pthj(dir_settings['cache']),
|
||||
|
||||
"config":dir_settings['config'],
|
||||
"state":dir_settings['state'],
|
||||
"logs":dir_settings['logs'],
|
||||
"cache":dir_settings['cache'],
|
||||
}
|
||||
|
||||
for identifier,path in data_directories.items():
|
||||
os.makedirs(path,exist_ok=True)
|
||||
|
||||
|
||||
data_dir = {
|
||||
k:lambda *x,k=k: pthj(data_directories[k],*x) for k in data_directories
|
||||
@ -291,9 +308,6 @@ with open(pthj(dir_settings['state'],".lastmalojaversion"),"w") as filed:
|
||||
from doreah import config
|
||||
|
||||
config(
|
||||
caching={
|
||||
"folder": data_dir['cache']()
|
||||
},
|
||||
auth={
|
||||
"multiuser":False,
|
||||
"cookieprefix":"maloja",
|
||||
@ -304,7 +318,6 @@ config(
|
||||
"logfolder": data_dir['logs']() if malojaconfig["LOGGING"] else None
|
||||
},
|
||||
regular={
|
||||
"autostart": False,
|
||||
"offset": malojaconfig["TIMEZONE"]
|
||||
}
|
||||
)
|
||||
@ -312,23 +325,6 @@ config(
|
||||
|
||||
|
||||
|
||||
### API KEYS
|
||||
|
||||
|
||||
|
||||
### symmetric keys are fine for now since we hopefully use HTTPS
|
||||
apikeystore = KeyStore(file=data_dir['clients']("apikeys.yml"),save_endpoint="/apis/mlj_1/apikeys")
|
||||
|
||||
oldfile = pthj(dir_settings['config'],"clients","authenticated_machines.tsv")
|
||||
if os.path.exists(oldfile):
|
||||
try:
|
||||
from doreah import tsv
|
||||
clients = tsv.parse(oldfile,"string","string")
|
||||
for key,identifier in clients:
|
||||
apikeystore[identifier] = key
|
||||
os.remove(oldfile)
|
||||
except:
|
||||
pass
|
||||
|
||||
|
||||
# what the fuck did i just write
|
||||
|
300
maloja/images.py
Normal file
300
maloja/images.py
Normal file
@ -0,0 +1,300 @@
|
||||
from .globalconf import data_dir, malojaconfig
|
||||
from . import thirdparty
|
||||
from . import database
|
||||
|
||||
from doreah.logging import log
|
||||
|
||||
import itertools
|
||||
import os
|
||||
import urllib
|
||||
import random
|
||||
import base64
|
||||
import requests
|
||||
import datauri
|
||||
import io
|
||||
from threading import Thread, Timer, BoundedSemaphore
|
||||
import re
|
||||
import datetime
|
||||
|
||||
import sqlalchemy as sql
|
||||
|
||||
|
||||
|
||||
|
||||
DB = {}
|
||||
engine = sql.create_engine(f"sqlite:///{data_dir['cache']('images.sqlite')}", echo = False)
|
||||
meta = sql.MetaData()
|
||||
|
||||
DB['artists'] = sql.Table(
|
||||
'artists', meta,
|
||||
sql.Column('id',sql.Integer,primary_key=True),
|
||||
sql.Column('url',sql.String),
|
||||
sql.Column('expire',sql.Integer),
|
||||
sql.Column('raw',sql.String)
|
||||
)
|
||||
DB['tracks'] = sql.Table(
|
||||
'tracks', meta,
|
||||
sql.Column('id',sql.Integer,primary_key=True),
|
||||
sql.Column('url',sql.String),
|
||||
sql.Column('expire',sql.Integer),
|
||||
sql.Column('raw',sql.String)
|
||||
)
|
||||
|
||||
meta.create_all(engine)
|
||||
|
||||
def get_image_from_cache(id,table):
|
||||
now = int(datetime.datetime.now().timestamp())
|
||||
with engine.begin() as conn:
|
||||
op = DB[table].select().where(
|
||||
DB[table].c.id==id,
|
||||
DB[table].c.expire>now
|
||||
)
|
||||
result = conn.execute(op).all()
|
||||
for row in result:
|
||||
if row.raw is not None:
|
||||
return {'type':'raw','value':row.raw}
|
||||
else:
|
||||
return {'type':'url','value':row.url} # returns None as value if nonexistence cached
|
||||
return None # no cache entry
|
||||
|
||||
def set_image_in_cache(id,table,url):
|
||||
remove_image_from_cache(id,table)
|
||||
now = int(datetime.datetime.now().timestamp())
|
||||
if url is None:
|
||||
expire = now + (malojaconfig["CACHE_EXPIRE_NEGATIVE"] * 24 * 3600)
|
||||
else:
|
||||
expire = now + (malojaconfig["CACHE_EXPIRE_POSITIVE"] * 24 * 3600)
|
||||
|
||||
raw = dl_image(url)
|
||||
|
||||
with engine.begin() as conn:
|
||||
op = DB[table].insert().values(
|
||||
id=id,
|
||||
url=url,
|
||||
expire=expire,
|
||||
raw=raw
|
||||
)
|
||||
result = conn.execute(op)
|
||||
|
||||
def remove_image_from_cache(id,table):
|
||||
with engine.begin() as conn:
|
||||
op = DB[table].delete().where(
|
||||
DB[table].c.id==id,
|
||||
)
|
||||
result = conn.execute(op)
|
||||
|
||||
def dl_image(url):
|
||||
if not malojaconfig["PROXY_IMAGES"]: return None
|
||||
if url is None: return None
|
||||
if url.startswith("/"): return None #local image
|
||||
try:
|
||||
r = requests.get(url)
|
||||
mime = r.headers.get('content-type') or 'image/jpg'
|
||||
data = io.BytesIO(r.content).read()
|
||||
uri = datauri.DataURI.make(mime,charset='ascii',base64=True,data=data)
|
||||
log(f"Downloaded {url} for local caching")
|
||||
return uri
|
||||
except:
|
||||
log(f"Image {url} could not be downloaded for local caching")
|
||||
return None
|
||||
|
||||
|
||||
|
||||
### getting images for any website embedding now ALWAYS returns just the generic link
|
||||
### even if we have already cached it, we will handle that on request
|
||||
def get_track_image(track=None,track_id=None):
|
||||
if track_id is None:
|
||||
track_id = database.sqldb.get_track_id(track)
|
||||
|
||||
return f"/image?type=track&id={track_id}"
|
||||
|
||||
|
||||
def get_artist_image(artist=None,artist_id=None):
|
||||
if artist_id is None:
|
||||
artist_id = database.sqldb.get_artist_id(artist)
|
||||
|
||||
return f"/image?type=artist&id={artist_id}"
|
||||
|
||||
|
||||
|
||||
resolve_semaphore = BoundedSemaphore(8)
|
||||
|
||||
|
||||
def resolve_track_image(track_id):
|
||||
|
||||
with resolve_semaphore:
|
||||
# check cache
|
||||
result = get_image_from_cache(track_id,'tracks')
|
||||
if result is not None:
|
||||
return result
|
||||
|
||||
track = database.sqldb.get_track(track_id)
|
||||
|
||||
# local image
|
||||
if malojaconfig["USE_LOCAL_IMAGES"]:
|
||||
images = local_files(artists=track['artists'],title=track['title'])
|
||||
if len(images) != 0:
|
||||
result = random.choice(images)
|
||||
result = urllib.parse.quote(result)
|
||||
result = {'type':'url','value':result}
|
||||
set_image_in_cache(track_id,'tracks',result['value'])
|
||||
return result
|
||||
|
||||
# third party
|
||||
result = thirdparty.get_image_track_all((track['artists'],track['title']))
|
||||
result = {'type':'url','value':result}
|
||||
set_image_in_cache(track_id,'tracks',result['value'])
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def resolve_artist_image(artist_id):
|
||||
|
||||
with resolve_semaphore:
|
||||
# check cache
|
||||
result = get_image_from_cache(artist_id,'artists')
|
||||
if result is not None:
|
||||
return result
|
||||
|
||||
artist = database.sqldb.get_artist(artist_id)
|
||||
|
||||
# local image
|
||||
if malojaconfig["USE_LOCAL_IMAGES"]:
|
||||
images = local_files(artist=artist)
|
||||
if len(images) != 0:
|
||||
result = random.choice(images)
|
||||
result = urllib.parse.quote(result)
|
||||
result = {'type':'url','value':result}
|
||||
set_image_in_cache(artist_id,'artists',result['value'])
|
||||
return result
|
||||
|
||||
# third party
|
||||
result = thirdparty.get_image_artist_all(artist)
|
||||
result = {'type':'url','value':result}
|
||||
set_image_in_cache(artist_id,'artists',result['value'])
|
||||
|
||||
return result
|
||||
|
||||
|
||||
# removes emojis and weird shit from names
|
||||
def clean(name):
|
||||
return "".join(c for c in name if c.isalnum() or c in []).strip()
|
||||
|
||||
def get_all_possible_filenames(artist=None,artists=None,title=None):
|
||||
# check if we're dealing with a track or artist, then clean up names
|
||||
# (only remove non-alphanumeric, allow korean and stuff)
|
||||
|
||||
if title is not None and artists is not None:
|
||||
track = True
|
||||
title, artists = clean(title), [clean(a) for a in artists]
|
||||
elif artist is not None:
|
||||
track = False
|
||||
artist = clean(artist)
|
||||
else: return []
|
||||
|
||||
|
||||
superfolder = "tracks/" if track else "artists/"
|
||||
|
||||
filenames = []
|
||||
|
||||
if track:
|
||||
#unsafeartists = [artist.translate(None,"-_./\\") for artist in artists]
|
||||
safeartists = [re.sub("[^a-zA-Z0-9]","",artist) for artist in artists]
|
||||
#unsafetitle = title.translate(None,"-_./\\")
|
||||
safetitle = re.sub("[^a-zA-Z0-9]","",title)
|
||||
|
||||
if len(artists) < 4:
|
||||
unsafeperms = itertools.permutations(artists)
|
||||
safeperms = itertools.permutations(safeartists)
|
||||
else:
|
||||
unsafeperms = [sorted(artists)]
|
||||
safeperms = [sorted(safeartists)]
|
||||
|
||||
|
||||
for unsafeartistlist in unsafeperms:
|
||||
filename = "-".join(unsafeartistlist) + "_" + title
|
||||
if filename != "":
|
||||
filenames.append(filename)
|
||||
filenames.append(filename.lower())
|
||||
for safeartistlist in safeperms:
|
||||
filename = "-".join(safeartistlist) + "_" + safetitle
|
||||
if filename != "":
|
||||
filenames.append(filename)
|
||||
filenames.append(filename.lower())
|
||||
filenames = list(set(filenames))
|
||||
if len(filenames) == 0: filenames.append(str(hash((frozenset(artists),title))))
|
||||
else:
|
||||
#unsafeartist = artist.translate(None,"-_./\\")
|
||||
safeartist = re.sub("[^a-zA-Z0-9]","",artist)
|
||||
|
||||
filename = artist
|
||||
if filename != "":
|
||||
filenames.append(filename)
|
||||
filenames.append(filename.lower())
|
||||
filename = safeartist
|
||||
if filename != "":
|
||||
filenames.append(filename)
|
||||
filenames.append(filename.lower())
|
||||
|
||||
filenames = list(set(filenames))
|
||||
if len(filenames) == 0: filenames.append(str(hash(artist)))
|
||||
|
||||
return [superfolder + name for name in filenames]
|
||||
|
||||
def local_files(artist=None,artists=None,title=None):
|
||||
|
||||
|
||||
filenames = get_all_possible_filenames(artist,artists,title)
|
||||
|
||||
images = []
|
||||
|
||||
for purename in filenames:
|
||||
# direct files
|
||||
for ext in ["png","jpg","jpeg","gif"]:
|
||||
#for num in [""] + [str(n) for n in range(0,10)]:
|
||||
if os.path.exists(data_dir['images'](purename + "." + ext)):
|
||||
images.append("/images/" + purename + "." + ext)
|
||||
|
||||
# folder
|
||||
try:
|
||||
for f in os.listdir(data_dir['images'](purename)):
|
||||
if f.split(".")[-1] in ["png","jpg","jpeg","gif"]:
|
||||
images.append("/images/" + purename + "/" + f)
|
||||
except:
|
||||
pass
|
||||
|
||||
return images
|
||||
|
||||
|
||||
|
||||
def set_image(b64,**keys):
|
||||
track = "title" in keys
|
||||
if track:
|
||||
entity = {'artists':keys['artists'],'title':keys['title']}
|
||||
id = database.sqldb.get_track_id(entity)
|
||||
else:
|
||||
entity = keys['artist']
|
||||
id = database.sqldb.get_artist_id(entity)
|
||||
|
||||
log("Trying to set image, b64 string: " + str(b64[:30] + "..."),module="debug")
|
||||
|
||||
regex = r"data:image/(\w+);base64,(.+)"
|
||||
type,b64 = re.fullmatch(regex,b64).groups()
|
||||
b64 = base64.b64decode(b64)
|
||||
filename = "webupload" + str(int(datetime.datetime.now().timestamp())) + "." + type
|
||||
for folder in get_all_possible_filenames(**keys):
|
||||
if os.path.exists(data_dir['images'](folder)):
|
||||
with open(data_dir['images'](folder,filename),"wb") as f:
|
||||
f.write(b64)
|
||||
break
|
||||
else:
|
||||
folder = get_all_possible_filenames(**keys)[0]
|
||||
os.makedirs(data_dir['images'](folder))
|
||||
with open(data_dir['images'](folder,filename),"wb") as f:
|
||||
f.write(b64)
|
||||
|
||||
log("Saved image as " + data_dir['images'](folder,filename),module="debug")
|
||||
|
||||
# set as current picture in rotation
|
||||
if track: set_image_in_cache(id,'tracks',os.path.join("/images",folder,filename))
|
||||
else: set_image_in_cache(id,'artists',os.path.join("/images",folder,filename))
|
@ -1,8 +1,8 @@
|
||||
from .. import database_packed
|
||||
from . import filters
|
||||
from ..globalconf import malojaconfig
|
||||
|
||||
from .. import database, database_packed, malojatime, utilities, malojauri, thirdparty
|
||||
from .. import database, malojatime, images, malojauri, thirdparty, __pkginfo__
|
||||
from ..database import jinjaview
|
||||
from doreah.regular import repeatdaily
|
||||
|
||||
import urllib
|
||||
@ -11,7 +11,7 @@ import math
|
||||
# templating
|
||||
from jinja2 import Environment, PackageLoader, select_autoescape
|
||||
|
||||
dbp = database_packed.DB()
|
||||
#dbp = jinjaview.JinjaDBConnection()
|
||||
|
||||
jinja_environment = Environment(
|
||||
loader=PackageLoader('maloja', "web/jinja"),
|
||||
@ -25,13 +25,14 @@ def update_jinja_environment():
|
||||
|
||||
JINJA_CONTEXT = {
|
||||
# maloja
|
||||
"db": database,
|
||||
"dbp":dbp,
|
||||
"db": database, #TODO: move these to connection manager as well
|
||||
#"dbp":dbp,
|
||||
"malojatime": malojatime,
|
||||
"utilities": utilities,
|
||||
"images": images,
|
||||
"mlj_uri": malojauri,
|
||||
"settings": malojaconfig,
|
||||
"thirdparty": thirdparty,
|
||||
"pkginfo": __pkginfo__,
|
||||
# external
|
||||
"urllib": urllib,
|
||||
"math":math,
|
||||
|
@ -58,10 +58,16 @@ class MTRangeGeneric:
|
||||
def unlimited(self):
|
||||
return False
|
||||
|
||||
def timestamps(self):
|
||||
return (self.first_stamp(),self.last_stamp())
|
||||
|
||||
# whether we currently live or will ever again live in this range
|
||||
def active(self):
|
||||
return (self.last_stamp() > datetime.utcnow().timestamp())
|
||||
|
||||
def __contains__(self,timestamp):
|
||||
return timestamp >= self.first_stamp() and timestamp <= self.last_stamp()
|
||||
|
||||
# Any range that has one defining base unit, whether week, year, etc.
|
||||
class MTRangeSingular(MTRangeGeneric):
|
||||
def fromstr(self):
|
||||
|
@ -36,7 +36,7 @@ def uri_to_internal(keys,forceTrack=False,forceArtist=False,api=False):
|
||||
limitkeys["timerange"] = get_range_object(since=since,to=to,within=within)
|
||||
|
||||
#3
|
||||
delimitkeys = {"step":"month","stepn":1,"trail":1}
|
||||
delimitkeys = {"step":"year","stepn":1,"trail":1}
|
||||
if "step" in keys: [delimitkeys["step"],delimitkeys["stepn"]] = (keys["step"].split("-") + [1])[:2]
|
||||
if "stepn" in keys: delimitkeys["stepn"] = keys["stepn"] #overwrite if explicitly given
|
||||
if "stepn" in delimitkeys: delimitkeys["stepn"] = int(delimitkeys["stepn"]) #in both cases, convert it here
|
||||
|
@ -12,12 +12,13 @@ from .. import __pkginfo__ as info
|
||||
from .. import globalconf
|
||||
|
||||
|
||||
|
||||
def print_header_info():
|
||||
print()
|
||||
print("#####")
|
||||
print("Maloja v" + info.VERSION)
|
||||
#print("#####")
|
||||
print(col['yellow']("Maloja"),"v" + info.VERSION)
|
||||
print(info.HOMEPAGE)
|
||||
print("#####")
|
||||
#print("#####")
|
||||
print()
|
||||
|
||||
|
||||
@ -79,12 +80,16 @@ def stop():
|
||||
print("Maloja stopped!")
|
||||
return True
|
||||
|
||||
|
||||
def onlysetup():
|
||||
print_header_info()
|
||||
setup()
|
||||
print("Setup complete!")
|
||||
|
||||
def direct():
|
||||
print_header_info()
|
||||
setup()
|
||||
from .. import server
|
||||
server.run_server()
|
||||
|
||||
def debug():
|
||||
os.environ["MALOJA_DEV_MODE"] = 'true'
|
||||
@ -93,29 +98,31 @@ def debug():
|
||||
|
||||
def print_info():
|
||||
print_header_info()
|
||||
print("Configuration Directory:",globalconf.dir_settings['config'])
|
||||
print("Data Directory: ",globalconf.dir_settings['state'])
|
||||
print("Log Directory: ",globalconf.dir_settings['logs'])
|
||||
print("Network: ",f"IPv{ip_address(globalconf.malojaconfig['host']).version}, Port {globalconf.malojaconfig['port']}")
|
||||
print("Timezone: ",f"UTC{globalconf.malojaconfig['timezone']:+d}")
|
||||
print(col['lightblue']("Configuration Directory:"),globalconf.dir_settings['config'])
|
||||
print(col['lightblue']("Data Directory: "),globalconf.dir_settings['state'])
|
||||
print(col['lightblue']("Log Directory: "),globalconf.dir_settings['logs'])
|
||||
print(col['lightblue']("Network: "),f"IPv{ip_address(globalconf.malojaconfig['host']).version}, Port {globalconf.malojaconfig['port']}")
|
||||
print(col['lightblue']("Timezone: "),f"UTC{globalconf.malojaconfig['timezone']:+d}")
|
||||
print()
|
||||
print("#####")
|
||||
print()
|
||||
|
||||
@mainfunction({"l":"level","v":"version","V":"version"},flags=['version'],shield=True)
|
||||
@mainfunction({"l":"level","v":"version","V":"version"},flags=['version','include_images'],shield=True)
|
||||
def main(*args,**kwargs):
|
||||
|
||||
actions = {
|
||||
# server
|
||||
"start":start,
|
||||
"restart":restart,
|
||||
"stop":stop,
|
||||
"run":direct,
|
||||
"debug":debug,
|
||||
"import":tasks.loadexternal,
|
||||
"backup":tasks.backuphere,
|
||||
# "update":update,
|
||||
"fix":tasks.fixdb,
|
||||
"generate":tasks.generate_scrobbles,
|
||||
"setup":onlysetup,
|
||||
# admin scripts
|
||||
"import":tasks.import_scrobbles, # maloja import /x/y.csv
|
||||
"backup":tasks.backup, # maloja backup --targetfolder /x/y --include_images
|
||||
"generate":tasks.generate, # maloja generate 400
|
||||
"export":tasks.export, # maloja export
|
||||
# aux
|
||||
"info":print_info
|
||||
}
|
||||
|
||||
@ -124,8 +131,11 @@ def main(*args,**kwargs):
|
||||
else:
|
||||
try:
|
||||
action, *args = args
|
||||
actions[action](*args,**kwargs)
|
||||
action = actions[action]
|
||||
except (ValueError, KeyError):
|
||||
print("Valid commands: " + " ".join(a for a in actions))
|
||||
return
|
||||
|
||||
return action(*args,**kwargs)
|
||||
|
||||
return True
|
||||
|
35
maloja/proccontrol/profiler.py
Normal file
35
maloja/proccontrol/profiler.py
Normal file
@ -0,0 +1,35 @@
|
||||
import os
|
||||
|
||||
import cProfile, pstats
|
||||
|
||||
|
||||
from doreah.logging import log
|
||||
from doreah.timing import Clock
|
||||
|
||||
from ..globalconf import data_dir
|
||||
|
||||
|
||||
profiler = cProfile.Profile()
|
||||
|
||||
def profile(func):
|
||||
def newfunc(*args,**kwargs):
|
||||
|
||||
benchmarkfolder = data_dir['logs']("benchmarks")
|
||||
os.makedirs(benchmarkfolder,exist_ok=True)
|
||||
|
||||
clock = Clock()
|
||||
clock.start()
|
||||
|
||||
profiler.enable()
|
||||
result = func(*args,**kwargs)
|
||||
profiler.disable()
|
||||
|
||||
log(f"Executed {func.__name__} ({args}, {kwargs}) in {clock.stop():.2f}s",module="debug_performance")
|
||||
try:
|
||||
pstats.Stats(profiler).dump_stats(os.path.join(benchmarkfolder,f"{func.__name__}.stats"))
|
||||
except:
|
||||
pass
|
||||
|
||||
return result
|
||||
|
||||
return newfunc
|
@ -1,10 +1,11 @@
|
||||
import pkg_resources
|
||||
from importlib import resources
|
||||
from distutils import dir_util
|
||||
from doreah.io import col, ask, prompt
|
||||
from doreah import auth
|
||||
import os
|
||||
|
||||
from ..globalconf import data_dir, dir_settings, malojaconfig, apikeystore
|
||||
from ..globalconf import data_dir, dir_settings, malojaconfig
|
||||
|
||||
|
||||
|
||||
# EXTERNAL API KEYS
|
||||
@ -18,9 +19,9 @@ ext_apikeys = [
|
||||
|
||||
|
||||
def copy_initial_local_files():
|
||||
folder = pkg_resources.resource_filename("maloja","data_files")
|
||||
for cat in dir_settings:
|
||||
dir_util.copy_tree(os.path.join(folder,cat),dir_settings[cat],update=False)
|
||||
with resources.files("maloja") / 'data_files' as folder:
|
||||
for cat in dir_settings:
|
||||
dir_util.copy_tree(os.path.join(folder,cat),dir_settings[cat],update=False)
|
||||
|
||||
charset = list(range(10)) + list("abcdefghijklmnopqrstuvwxyz") + list("ABCDEFGHIJKLMNOPQRSTUVWXYZ")
|
||||
def randomstring(length=32):
|
||||
@ -37,16 +38,17 @@ def setup():
|
||||
keyname = malojaconfig.get_setting_info(k)['name']
|
||||
key = malojaconfig[k]
|
||||
if key is False:
|
||||
print("\t" + "Currently not using a " + col['red'](keyname) + " for image display.")
|
||||
print(f"\tCurrently not using a {col['red'](keyname)} for image display.")
|
||||
elif key is None or key == "ASK":
|
||||
print("\t" + "Please enter your " + col['gold'](keyname) + ". If you do not want to use one at this moment, simply leave this empty and press Enter.")
|
||||
key = prompt("",types=(str,),default=False,skip=SKIP)
|
||||
promptmsg = f"\tPlease enter your {col['gold'](keyname)}. If you do not want to use one at this moment, simply leave this empty and press Enter."
|
||||
key = prompt(promptmsg,types=(str,),default=False,skip=SKIP)
|
||||
malojaconfig[k] = key
|
||||
else:
|
||||
print("\t" + col['lawngreen'](keyname) + " found.")
|
||||
print(f"\t{col['lawngreen'](keyname)} found.")
|
||||
|
||||
|
||||
# OWN API KEY
|
||||
from ..apis import apikeystore
|
||||
if len(apikeystore) == 0:
|
||||
answer = ask("Do you want to set up a key to enable scrobbling? Your scrobble extension needs that key so that only you can scrobble tracks to your database.",default=True,skip=SKIP)
|
||||
if answer:
|
||||
@ -74,11 +76,3 @@ def setup():
|
||||
if newpw != newpw_repeat: print("Passwords do not match!")
|
||||
else: break
|
||||
auth.defaultuser.setpw(newpw)
|
||||
|
||||
if malojaconfig["SEND_STATS"] is None:
|
||||
answer = ask("I would like to know how many people use Maloja. Would it be okay to send a daily ping to my server (this contains no data that isn't accessible via your web interface already)?",default=True,skip=SKIP)
|
||||
if answer:
|
||||
malojaconfig["SEND_STATS"] = True
|
||||
malojaconfig["PUBLIC_URL"] = None
|
||||
else:
|
||||
malojaconfig["SEND_STATS"] = False
|
||||
|
@ -14,17 +14,10 @@ from .control import getInstance
|
||||
|
||||
setproctitle.setproctitle("maloja_supervisor")
|
||||
|
||||
def update():
|
||||
log("Updating...",module="supervisor")
|
||||
try:
|
||||
os.system("pip3 install maloja --upgrade --no-cache-dir")
|
||||
except:
|
||||
log("Could not update.",module="supervisor")
|
||||
|
||||
def start():
|
||||
try:
|
||||
return subprocess.Popen(
|
||||
["python3", "-m", "maloja.server"],
|
||||
["python3", "-m", "maloja","run"],
|
||||
stdout=subprocess.DEVNULL,
|
||||
stderr=subprocess.DEVNULL,
|
||||
)
|
||||
@ -35,8 +28,6 @@ def start():
|
||||
|
||||
while True:
|
||||
log("Maloja is not running, starting...",module="supervisor")
|
||||
if malojaconfig["UPDATE_AFTER_CRASH"]:
|
||||
update()
|
||||
process = start()
|
||||
|
||||
process.wait()
|
||||
|
@ -1,49 +1,4 @@
|
||||
import os
|
||||
from doreah.io import ask,col
|
||||
|
||||
from ...globalconf import data_dir
|
||||
|
||||
|
||||
|
||||
def loadexternal(filename):
|
||||
|
||||
if not os.path.exists(filename):
|
||||
print("File could not be found.")
|
||||
return
|
||||
|
||||
from .import_scrobbles import import_scrobbles
|
||||
result = import_scrobbles(filename)
|
||||
|
||||
msg = f"Successfully imported {result['CONFIDENT_IMPORT'] + result['UNCERTAIN_IMPORT']} scrobbles"
|
||||
if result['UNCERTAIN_IMPORT'] > 0:
|
||||
warningmsg = col['orange'](f"{result['UNCERTAIN_IMPORT']} Warning{'s' if result['UNCERTAIN_IMPORT'] != 1 else ''}!")
|
||||
msg += f" ({warningmsg})"
|
||||
print(msg)
|
||||
|
||||
msg = f"Skipped {result['CONFIDENT_SKIP'] + result['UNCERTAIN_SKIP']} scrobbles"
|
||||
if result['UNCERTAIN_SKIP'] > 0:
|
||||
warningmsg = col['indianred'](f"{result['UNCERTAIN_SKIP']} Warning{'s' if result['UNCERTAIN_SKIP'] != 1 else ''}!")
|
||||
msg += f" ({warningmsg})"
|
||||
print(msg)
|
||||
|
||||
if result['FAIL'] > 0:
|
||||
print(col['red'](f"{result['FAIL']} Error{'s' if result['FAIL'] != 1 else ''}!"))
|
||||
|
||||
def backuphere():
|
||||
from .backup import backup
|
||||
backup(folder=os.getcwd())
|
||||
|
||||
def update():
|
||||
os.system("pip3 install malojaserver --upgrade --no-cache-dir")
|
||||
from ..control import restart
|
||||
restart()
|
||||
|
||||
def fixdb():
|
||||
from .fixexisting import fix
|
||||
fix()
|
||||
|
||||
def generate_scrobbles():
|
||||
targetfile = data_dir['scrobbles']("randomgenerated.tsv")
|
||||
|
||||
from .generate import generate
|
||||
generate(targetfile)
|
||||
from .import_scrobbles import import_scrobbles
|
||||
from .backup import backup
|
||||
from .generate import generate
|
||||
from .export import export # read that line out loud
|
||||
|
@ -1,47 +1,51 @@
|
||||
import tarfile
|
||||
from datetime import datetime
|
||||
import time
|
||||
import glob
|
||||
import os
|
||||
from ...globalconf import data_dir
|
||||
from ...globalconf import dir_settings
|
||||
from pathlib import PurePath
|
||||
|
||||
from doreah.logging import log
|
||||
from doreah.io import col
|
||||
|
||||
|
||||
user_files = {
|
||||
"minimal":{
|
||||
"rules":["*.tsv"],
|
||||
"scrobbles":["*.tsv"]
|
||||
},
|
||||
"full":{
|
||||
"clients":["apikeys.yml"],
|
||||
"images":["artists","tracks"],
|
||||
"settings":["settings.ini"]
|
||||
}
|
||||
}
|
||||
basic_files = [
|
||||
('config', ['rules/*.tsv','settings.ini','apikeys.yml','custom_css/*.css']),
|
||||
('state', ['auth/auth.ddb','malojadb.sqlite'])
|
||||
]
|
||||
expanded_files = [
|
||||
('state', ['images'])
|
||||
]
|
||||
|
||||
def backup(folder,level="full"):
|
||||
def backup(targetfolder=None,include_images=False):
|
||||
|
||||
selected_files = user_files["minimal"] if level == "minimal" else {**user_files["minimal"], **user_files["full"]}
|
||||
real_files = {cat:[] for cat in selected_files}
|
||||
for cat in selected_files:
|
||||
catfolder = data_dir[cat]
|
||||
for g in selected_files[cat]:
|
||||
real_files[cat] += glob.glob(catfolder(g))
|
||||
if targetfolder is None:
|
||||
targetfolder = os.getcwd()
|
||||
|
||||
if include_images:
|
||||
file_patterns = basic_files + expanded_files
|
||||
else:
|
||||
file_patterns = basic_files
|
||||
|
||||
real_files = {}
|
||||
for category,patterns in file_patterns:
|
||||
real_files.setdefault(category,[])
|
||||
for pattern in patterns:
|
||||
real_files[category] += glob.glob(os.path.join(dir_settings[category],pattern))
|
||||
|
||||
log("Creating backup...")
|
||||
|
||||
|
||||
now = datetime.utcnow()
|
||||
timestr = now.strftime("%Y_%m_%d_%H_%M_%S")
|
||||
filename = "maloja_backup_" + timestr + ".tar.gz"
|
||||
archivefile = os.path.join(folder,filename)
|
||||
assert not os.path.exists(archivefile)
|
||||
with tarfile.open(name=archivefile,mode="x:gz") as archive:
|
||||
for cat, value in real_files.items():
|
||||
for f in value:
|
||||
timestr = time.strftime("%Y_%m_%d_%H_%M_%S")
|
||||
filename = f"maloja_backup_{timestr}.tar.gz"
|
||||
outputfile = os.path.join(targetfolder,filename)
|
||||
assert not os.path.exists(outputfile)
|
||||
|
||||
with tarfile.open(name=outputfile,mode="x:gz") as archive:
|
||||
for category, filelist in real_files.items():
|
||||
for f in filelist:
|
||||
p = PurePath(f)
|
||||
r = p.relative_to(data_dir[cat]())
|
||||
archive.add(f,arcname=os.path.join(cat,r))
|
||||
log("Backup created!")
|
||||
return archivefile
|
||||
r = p.relative_to(dir_settings[category])
|
||||
archive.add(f,arcname=os.path.join(category,r))
|
||||
log("Backup created: " + col['yellow'](outputfile))
|
||||
return outputfile
|
||||
|
24
maloja/proccontrol/tasks/export.py
Normal file
24
maloja/proccontrol/tasks/export.py
Normal file
@ -0,0 +1,24 @@
|
||||
import json
|
||||
import os
|
||||
import time
|
||||
|
||||
from doreah.io import col
|
||||
|
||||
def export(targetfolder=None):
|
||||
|
||||
from ...database.sqldb import get_scrobbles
|
||||
|
||||
if targetfolder is None:
|
||||
targetfolder = os.getcwd()
|
||||
|
||||
timestr = time.strftime("%Y_%m_%d_%H_%M_%S")
|
||||
filename = f"maloja_export_{timestr}.json"
|
||||
outputfile = os.path.join(targetfolder,filename)
|
||||
assert not os.path.exists(outputfile)
|
||||
|
||||
data = {'scrobbles':get_scrobbles()}
|
||||
with open(outputfile,'w') as outfd:
|
||||
json.dump(data,outfd,indent=3)
|
||||
|
||||
print(f"Exported {len(data['scrobbles'])} Scrobbles to {col['yellow'](outputfile)}")
|
||||
return outputfile
|
@ -1,70 +0,0 @@
|
||||
import os
|
||||
from ...globalconf import data_dir
|
||||
import re
|
||||
from ...cleanup import CleanerAgent
|
||||
from doreah.logging import log
|
||||
import difflib
|
||||
import datetime
|
||||
from .backup import backup
|
||||
|
||||
wendigo = CleanerAgent()
|
||||
|
||||
exp = r"([0-9]*)(\t+)([^\t]+?)(\t+)([^\t]+)([^\n]*)\n"
|
||||
# 1 2 3 4 5 6
|
||||
# groups:
|
||||
# 1 - timestamp
|
||||
# 2 - sep
|
||||
# 3 - artists
|
||||
# 4 - sep
|
||||
# 5 - title
|
||||
# 6 - rest
|
||||
|
||||
|
||||
|
||||
def fix():
|
||||
|
||||
backup(level="minimal",folder=data_dir['backups']())
|
||||
|
||||
now = datetime.datetime.utcnow()
|
||||
nowstr = now.strftime("%Y_%m_%d_%H_%M_%S")
|
||||
datestr = now.strftime("%Y/%m/%d")
|
||||
timestr = now.strftime("%H:%M:%S")
|
||||
|
||||
patchfolder = data_dir['logs']("dbfix",nowstr)
|
||||
os.makedirs(patchfolder)
|
||||
|
||||
|
||||
log("Fixing database...")
|
||||
for filename in os.listdir(data_dir['scrobbles']()):
|
||||
if filename.endswith(".tsv"):
|
||||
log("Fix file " + filename)
|
||||
filename_new = filename + "_new"
|
||||
|
||||
with open(data_dir['scrobbles'](filename_new),"w") as newfile:
|
||||
with open(data_dir['scrobbles'](filename),"r") as oldfile:
|
||||
|
||||
for l in oldfile:
|
||||
|
||||
a,t = re.sub(exp,r"\3",l), re.sub(exp,r"\5",l)
|
||||
r1,r2,r3 = re.sub(exp,r"\1\2",l),re.sub(exp,r"\4",l),re.sub(exp,r"\6",l)
|
||||
|
||||
a = a.split("␟")
|
||||
|
||||
(al,t) = wendigo.fullclean(a,t)
|
||||
a = "␟".join(al)
|
||||
newfile.write(r1 + a + r2 + t + r3 + "\n")
|
||||
|
||||
|
||||
#os.system("diff " + "scrobbles/" + fn + "_new" + " " + "scrobbles/" + fn)
|
||||
with open(data_dir['scrobbles'](filename_new),"r") as newfile, open(data_dir['scrobbles'](filename),"r") as oldfile:
|
||||
|
||||
diff = difflib.unified_diff(oldfile.read().split("\n"),newfile.read().split("\n"),lineterm="",n=0)
|
||||
diff = list(diff)
|
||||
|
||||
with open(os.path.join(patchfolder,filename + ".diff"),"w") as patchfile:
|
||||
patchfile.write("\n".join(diff))
|
||||
|
||||
os.replace(data_dir['scrobbles'](filename_new),data_dir['scrobbles'](filename))
|
||||
|
||||
|
||||
log("Database fixed!")
|
@ -2,6 +2,7 @@ import random
|
||||
import datetime
|
||||
from doreah.io import ask
|
||||
|
||||
|
||||
artists = [
|
||||
"Chou Tzuyu","Jennie Kim","Kim Seolhyun","Nancy McDonie","Park Junghwa","Hirai Momo","Rosé Park","Laura Brehm","HyunA",
|
||||
"Jeremy Soule","Jerry Goldsmith","Howard Shore","Tilman Sillescu","James Newton Howard","Bear McCreary","David Newman",
|
||||
@ -65,15 +66,28 @@ def generate_track():
|
||||
|
||||
|
||||
|
||||
def generate(targetfile):
|
||||
if ask("Generate random scrobbles?",default=False):
|
||||
with open(targetfile,"a") as fd:
|
||||
for _ in range(200):
|
||||
track = generate_track()
|
||||
for _ in range(random.randint(1, 50)):
|
||||
timestamp = random.randint(1, int(datetime.datetime.now().timestamp()))
|
||||
def generate(n=200):
|
||||
|
||||
entry = "\t".join([str(timestamp),"␟".join(track['artists']),track['title'],"-"])
|
||||
fd.write(entry)
|
||||
fd.write("\n")
|
||||
print("Done!")
|
||||
from ...database.sqldb import add_scrobbles
|
||||
|
||||
n = int(n)
|
||||
|
||||
if ask("Generate random scrobbles?",default=False):
|
||||
scrobbles = []
|
||||
for _ in range(n):
|
||||
track = generate_track()
|
||||
print("Generated",track)
|
||||
for _ in range(random.randint(1, 50)):
|
||||
timestamp = random.randint(1, int(datetime.datetime.now().timestamp()))
|
||||
|
||||
scrobbles.append({
|
||||
"time":timestamp,
|
||||
"track":{
|
||||
"artists":track['artists'],
|
||||
"title":track['title']
|
||||
},
|
||||
"duration":None,
|
||||
"origin":"generated"
|
||||
})
|
||||
|
||||
add_scrobbles(scrobbles)
|
||||
|
@ -1,9 +1,9 @@
|
||||
import os, datetime, re
|
||||
|
||||
import json, csv
|
||||
|
||||
from ...cleanup import *
|
||||
from doreah.io import col, ask, prompt
|
||||
|
||||
from ...cleanup import *
|
||||
from ...globalconf import data_dir
|
||||
|
||||
|
||||
@ -21,6 +21,8 @@ outputs = {
|
||||
|
||||
def import_scrobbles(inputf):
|
||||
|
||||
from ...database.sqldb import add_scrobbles
|
||||
|
||||
result = {
|
||||
"CONFIDENT_IMPORT": 0,
|
||||
"UNCERTAIN_IMPORT": 0,
|
||||
@ -32,76 +34,85 @@ def import_scrobbles(inputf):
|
||||
filename = os.path.basename(inputf)
|
||||
|
||||
if re.match(".*\.csv",filename):
|
||||
type = "Last.fm"
|
||||
outputf = data_dir['scrobbles']("lastfmimport.tsv")
|
||||
typeid,typedesc = "lastfm","Last.fm"
|
||||
importfunc = parse_lastfm
|
||||
|
||||
elif re.match("endsong_[0-9]+\.json",filename):
|
||||
type = "Spotify"
|
||||
outputf = data_dir['scrobbles']("spotifyimport.tsv")
|
||||
typeid,typedesc = "spotify","Spotify"
|
||||
importfunc = parse_spotify_full
|
||||
|
||||
elif re.match("StreamingHistory[0-9]+\.json",filename):
|
||||
type = "Spotify"
|
||||
outputf = data_dir['scrobbles']("spotifyimport.tsv")
|
||||
typeid,typedesc = "spotify","Spotify"
|
||||
importfunc = parse_spotify_lite
|
||||
|
||||
elif re.match("maloja_export_[0-9]+\.json",filename):
|
||||
typeid,typedesc = "maloja","Maloja"
|
||||
importfunc = parse_maloja
|
||||
|
||||
else:
|
||||
print("File",inputf,"could not be identified as a valid import source.")
|
||||
return result
|
||||
|
||||
|
||||
print(f"Parsing {col['yellow'](inputf)} as {col['cyan'](type)} export")
|
||||
print(f"Parsing {col['yellow'](inputf)} as {col['cyan'](typedesc)} export")
|
||||
print("This could take a while...")
|
||||
|
||||
timestamps = set()
|
||||
scrobblebuffer = []
|
||||
|
||||
if os.path.exists(outputf):
|
||||
while True:
|
||||
action = prompt(f"Already imported {type} data. [O]verwrite, [A]ppend or [C]ancel?",default='c').lower()[0]
|
||||
if action == 'c':
|
||||
return result
|
||||
elif action == 'a':
|
||||
mode = 'a'
|
||||
break
|
||||
elif action == 'o':
|
||||
mode = 'w'
|
||||
break
|
||||
else:
|
||||
print("Could not understand response.")
|
||||
else:
|
||||
mode = 'w'
|
||||
for status,scrobble,msg in importfunc(inputf):
|
||||
result[status] += 1
|
||||
outputs[status](msg)
|
||||
if status in ['CONFIDENT_IMPORT','UNCERTAIN_IMPORT']:
|
||||
|
||||
# prevent duplicate timestamps
|
||||
while scrobble['scrobble_time'] in timestamps:
|
||||
scrobble['scrobble_time'] += 1
|
||||
timestamps.add(scrobble['scrobble_time'])
|
||||
|
||||
with open(outputf,mode) as outputfd:
|
||||
# clean up
|
||||
(scrobble['track_artists'],scrobble['track_title']) = c.fullclean(scrobble['track_artists'],scrobble['track_title'])
|
||||
|
||||
timestamps = set()
|
||||
# extra info
|
||||
extrainfo = {}
|
||||
if scrobble.get('album_name'): extrainfo['album_name'] = scrobble['album_name']
|
||||
# saving this in the scrobble instead of the track because for now it's not meant
|
||||
# to be authorative information, just payload of the scrobble
|
||||
|
||||
for status,scrobble,msg in importfunc(inputf):
|
||||
result[status] += 1
|
||||
outputs[status](msg)
|
||||
if status in ['CONFIDENT_IMPORT','UNCERTAIN_IMPORT']:
|
||||
scrobblebuffer.append({
|
||||
"time":scrobble['scrobble_time'],
|
||||
"track":{
|
||||
"artists":scrobble['track_artists'],
|
||||
"title":scrobble['track_title'],
|
||||
"length":None
|
||||
},
|
||||
"duration":scrobble['scrobble_duration'],
|
||||
"origin":"import:" + typeid,
|
||||
"extra":extrainfo
|
||||
})
|
||||
|
||||
while scrobble['timestamp'] in timestamps:
|
||||
scrobble['timestamp'] += 1
|
||||
timestamps.add(scrobble['timestamp'])
|
||||
if (result['CONFIDENT_IMPORT'] + result['UNCERTAIN_IMPORT']) % 1000 == 0:
|
||||
print(f"Imported {result['CONFIDENT_IMPORT'] + result['UNCERTAIN_IMPORT']} scrobbles...")
|
||||
add_scrobbles(scrobblebuffer)
|
||||
scrobblebuffer = []
|
||||
|
||||
# Format fields for tsv
|
||||
scrobble['timestamp'] = str(scrobble['timestamp'])
|
||||
scrobble['duration'] = str(scrobble['duration']) if scrobble['duration'] is not None else '-'
|
||||
scrobble['album'] = scrobble['album'] if scrobble['album'] is not None else '-'
|
||||
(artists,scrobble['title']) = c.fullclean(scrobble['artiststr'],scrobble['title'])
|
||||
scrobble['artiststr'] = "␟".join(artists)
|
||||
add_scrobbles(scrobblebuffer)
|
||||
|
||||
outputline = "\t".join([
|
||||
scrobble['timestamp'],
|
||||
scrobble['artiststr'],
|
||||
scrobble['title'],
|
||||
scrobble['album'],
|
||||
scrobble['duration']
|
||||
])
|
||||
outputfd.write(outputline + '\n')
|
||||
msg = f"Successfully imported {result['CONFIDENT_IMPORT'] + result['UNCERTAIN_IMPORT']} scrobbles"
|
||||
if result['UNCERTAIN_IMPORT'] > 0:
|
||||
warningmsg = col['orange'](f"{result['UNCERTAIN_IMPORT']} Warning{'s' if result['UNCERTAIN_IMPORT'] != 1 else ''}!")
|
||||
msg += f" ({warningmsg})"
|
||||
print(msg)
|
||||
|
||||
msg = f"Skipped {result['CONFIDENT_SKIP'] + result['UNCERTAIN_SKIP']} scrobbles"
|
||||
if result['UNCERTAIN_SKIP'] > 0:
|
||||
warningmsg = col['indianred'](f"{result['UNCERTAIN_SKIP']} Warning{'s' if result['UNCERTAIN_SKIP'] != 1 else ''}!")
|
||||
msg += f" ({warningmsg})"
|
||||
print(msg)
|
||||
|
||||
if result['FAIL'] > 0:
|
||||
print(col['red'](f"{result['FAIL']} Error{'s' if result['FAIL'] != 1 else ''}!"))
|
||||
|
||||
if (result['CONFIDENT_IMPORT'] + result['UNCERTAIN_IMPORT']) % 100 == 0:
|
||||
print(f"Imported {result['CONFIDENT_IMPORT'] + result['UNCERTAIN_IMPORT']} scrobbles...")
|
||||
|
||||
return result
|
||||
|
||||
@ -136,11 +147,11 @@ def parse_spotify_lite(inputf):
|
||||
continue
|
||||
|
||||
yield ("CONFIDENT_IMPORT",{
|
||||
'title':title,
|
||||
'artiststr': artist,
|
||||
'timestamp': timestamp,
|
||||
'duration':played,
|
||||
'album': None
|
||||
'track_title':title,
|
||||
'track_artists': artist,
|
||||
'scrobble_time': timestamp,
|
||||
'scrobble_duration':played,
|
||||
'album_name': None
|
||||
},'')
|
||||
except Exception as e:
|
||||
yield ('FAIL',None,f"{entry} could not be parsed. Scrobble not imported. ({repr(e)})")
|
||||
@ -240,11 +251,11 @@ def parse_spotify_full(inputf):
|
||||
|
||||
|
||||
yield (status,{
|
||||
'title':title,
|
||||
'artiststr': artist,
|
||||
'album': album,
|
||||
'timestamp': timestamp,
|
||||
'duration':played
|
||||
'track_title':title,
|
||||
'track_artists': artist,
|
||||
'album_name': album,
|
||||
'scrobble_time': timestamp,
|
||||
'scrobble_duration':played
|
||||
},msg)
|
||||
except Exception as e:
|
||||
yield ('FAIL',None,f"{entry} could not be parsed. Scrobble not imported. ({repr(e)})")
|
||||
@ -266,15 +277,36 @@ def parse_lastfm(inputf):
|
||||
|
||||
try:
|
||||
yield ('CONFIDENT_IMPORT',{
|
||||
'title': title,
|
||||
'artiststr': artist,
|
||||
'album': album,
|
||||
'timestamp': int(datetime.datetime.strptime(
|
||||
'track_title': title,
|
||||
'track_artists': artist,
|
||||
'album_name': album,
|
||||
'scrobble_time': int(datetime.datetime.strptime(
|
||||
time + '+0000',
|
||||
"%d %b %Y %H:%M%z"
|
||||
).timestamp()),
|
||||
'duration':None
|
||||
'scrobble_duration':None
|
||||
},'')
|
||||
except Exception as e:
|
||||
yield ('FAIL',None,f"{entry} could not be parsed. Scrobble not imported. ({repr(e)})")
|
||||
continue
|
||||
|
||||
|
||||
def parse_maloja(inputf):
|
||||
|
||||
with open(inputf,'r') as inputfd:
|
||||
data = json.load(inputfd)
|
||||
|
||||
scrobbles = data['scrobbles']
|
||||
|
||||
for s in scrobbles:
|
||||
try:
|
||||
yield ('CONFIDENT_IMPORT',{
|
||||
'track_title': s['track']['title'],
|
||||
'track_artists': s['track']['artists'],
|
||||
'album_name': s['track'].get('album',{}).get('name',''),
|
||||
'scrobble_time': s['time'],
|
||||
'scrobble_duration': s['duration']
|
||||
},'')
|
||||
except Exception as e:
|
||||
yield ('FAIL',None,f"{s} could not be parsed. Scrobble not imported. ({repr(e)})")
|
||||
continue
|
||||
|
143
maloja/server.py
143
maloja/server.py
@ -1,11 +1,11 @@
|
||||
# technical
|
||||
import sys
|
||||
import signal
|
||||
import os
|
||||
from threading import Thread
|
||||
import setproctitle
|
||||
import pkg_resources
|
||||
from importlib import resources
|
||||
from css_html_js_minify import html_minify, css_minify
|
||||
import datauri
|
||||
|
||||
|
||||
# server stuff
|
||||
@ -14,18 +14,19 @@ import waitress
|
||||
|
||||
# doreah toolkit
|
||||
from doreah.logging import log
|
||||
from doreah.timing import Clock
|
||||
from doreah import auth
|
||||
|
||||
# rest of the project
|
||||
from . import database
|
||||
from .utilities import resolveImage
|
||||
from .database.jinjaview import JinjaDBConnection
|
||||
from .images import resolve_track_image, resolve_artist_image
|
||||
from .malojauri import uri_to_internal, remove_identical
|
||||
from .globalconf import malojaconfig, apikeystore, data_dir
|
||||
from .globalconf import malojaconfig, data_dir
|
||||
from .jinjaenv.context import jinja_environment
|
||||
from .apis import init_apis
|
||||
from .apis import init_apis, apikeystore
|
||||
|
||||
|
||||
from .proccontrol.profiler import profile
|
||||
|
||||
|
||||
######
|
||||
@ -34,10 +35,10 @@ from .apis import init_apis
|
||||
|
||||
PORT = malojaconfig["PORT"]
|
||||
HOST = malojaconfig["HOST"]
|
||||
THREADS = 24
|
||||
THREADS = 12
|
||||
BaseRequest.MEMFILE_MAX = 15 * 1024 * 1024
|
||||
|
||||
STATICFOLDER = pkg_resources.resource_filename(__name__,"web/static")
|
||||
#STATICFOLDER = importlib.resources.path(__name__,"web/static")
|
||||
|
||||
webserver = Bottle()
|
||||
|
||||
@ -52,9 +53,12 @@ setproctitle.setproctitle("Maloja")
|
||||
|
||||
def generate_css():
|
||||
cssstr = ""
|
||||
for file in os.listdir(os.path.join(STATICFOLDER,"css")):
|
||||
with open(os.path.join(STATICFOLDER,"css",file),"r") as filed:
|
||||
cssstr += filed.read()
|
||||
with resources.files('maloja') / 'web' / 'static' as staticfolder:
|
||||
|
||||
for file in os.listdir(os.path.join(staticfolder,"css")):
|
||||
if file.endswith(".css"):
|
||||
with open(os.path.join(staticfolder,"css",file),"r") as filed:
|
||||
cssstr += filed.read()
|
||||
|
||||
for file in os.listdir(data_dir['css']()):
|
||||
if file.endswith(".css"):
|
||||
@ -158,10 +162,21 @@ def deprecated_api(pth):
|
||||
@webserver.route("/image")
|
||||
def dynamic_image():
|
||||
keys = FormsDict.decode(request.query)
|
||||
relevant, _, _, _, _ = uri_to_internal(keys)
|
||||
result = resolveImage(**relevant)
|
||||
if result == "": return ""
|
||||
redirect(result,307)
|
||||
if keys['type'] == 'track':
|
||||
result = resolve_track_image(keys['id'])
|
||||
elif keys['type'] == 'artist':
|
||||
result = resolve_artist_image(keys['id'])
|
||||
|
||||
if result is None or result['value'] in [None,'']:
|
||||
return ""
|
||||
if result['type'] == 'raw':
|
||||
# data uris are directly served as image because a redirect to a data uri
|
||||
# doesnt work
|
||||
duri = datauri.DataURI(result['value'])
|
||||
response.content_type = duri.mimetype
|
||||
return duri.data
|
||||
if result['type'] == 'url':
|
||||
redirect(result['value'],307)
|
||||
|
||||
@webserver.route("/images/<pth:re:.*\\.jpeg>")
|
||||
@webserver.route("/images/<pth:re:.*\\.jpg>")
|
||||
@ -172,39 +187,48 @@ def static_image(pth):
|
||||
ext = pth.split(".")[-1]
|
||||
small_pth = pth + "-small"
|
||||
if os.path.exists(data_dir['images'](small_pth)):
|
||||
response = static_file(small_pth,root=data_dir['images']())
|
||||
resp = static_file(small_pth,root=data_dir['images']())
|
||||
else:
|
||||
try:
|
||||
from pyvips import Image
|
||||
thumb = Image.thumbnail(data_dir['images'](pth),300)
|
||||
thumb.webpsave(data_dir['images'](small_pth))
|
||||
response = static_file(small_pth,root=data_dir['images']())
|
||||
resp = static_file(small_pth,root=data_dir['images']())
|
||||
except Exception:
|
||||
response = static_file(pth,root=data_dir['images']())
|
||||
resp = static_file(pth,root=data_dir['images']())
|
||||
|
||||
#response = static_file("images/" + pth,root="")
|
||||
response.set_header("Cache-Control", "public, max-age=86400")
|
||||
response.set_header("Content-Type", "image/" + ext)
|
||||
return response
|
||||
resp.set_header("Cache-Control", "public, max-age=86400")
|
||||
resp.set_header("Content-Type", "image/" + ext)
|
||||
return resp
|
||||
|
||||
|
||||
@webserver.route("/style.css")
|
||||
def get_css():
|
||||
response.content_type = 'text/css'
|
||||
global css
|
||||
if malojaconfig["DEV_MODE"]: css = generate_css()
|
||||
return css
|
||||
if malojaconfig["DEV_MODE"]: return generate_css()
|
||||
else: return css
|
||||
|
||||
|
||||
@webserver.route("/login")
|
||||
def login():
|
||||
return auth.get_login_page()
|
||||
|
||||
# old
|
||||
@webserver.route("/<name>.<ext>")
|
||||
@webserver.route("/media/<name>.<ext>")
|
||||
def static(name,ext):
|
||||
assert ext in ["txt","ico","jpeg","jpg","png","less","js"]
|
||||
response = static_file(ext + "/" + name + "." + ext,root=STATICFOLDER)
|
||||
assert ext in ["txt","ico","jpeg","jpg","png","less","js","ttf"]
|
||||
with resources.files('maloja') / 'web' / 'static' as staticfolder:
|
||||
response = static_file(ext + "/" + name + "." + ext,root=staticfolder)
|
||||
response.set_header("Cache-Control", "public, max-age=3600")
|
||||
return response
|
||||
|
||||
# new, direct reference
|
||||
@webserver.route("/static/<path:path>")
|
||||
def static(path):
|
||||
with resources.files('maloja') / 'web' / 'static' as staticfolder:
|
||||
response = static_file(path,root=staticfolder)
|
||||
response.set_header("Cache-Control", "public, max-age=3600")
|
||||
return response
|
||||
|
||||
@ -212,47 +236,46 @@ def static(name,ext):
|
||||
|
||||
### DYNAMIC
|
||||
|
||||
def static_html(name):
|
||||
def jinja_page(name):
|
||||
if name in aliases: redirect(aliases[name])
|
||||
keys = remove_identical(FormsDict.decode(request.query))
|
||||
|
||||
adminmode = request.cookies.get("adminmode") == "true" and auth.check(request)
|
||||
|
||||
clock = Clock()
|
||||
clock.start()
|
||||
with JinjaDBConnection() as conn:
|
||||
|
||||
loc_context = {
|
||||
"adminmode":adminmode,
|
||||
"apikey":request.cookies.get("apikey") if adminmode else None,
|
||||
"apikeys":apikeystore,
|
||||
"_urikeys":keys, #temporary!
|
||||
}
|
||||
loc_context["filterkeys"], loc_context["limitkeys"], loc_context["delimitkeys"], loc_context["amountkeys"], loc_context["specialkeys"] = uri_to_internal(keys)
|
||||
loc_context = {
|
||||
"dbc":conn,
|
||||
"adminmode":adminmode,
|
||||
"apikey":request.cookies.get("apikey") if adminmode else None,
|
||||
"apikeys":apikeystore,
|
||||
"_urikeys":keys, #temporary!
|
||||
}
|
||||
loc_context["filterkeys"], loc_context["limitkeys"], loc_context["delimitkeys"], loc_context["amountkeys"], loc_context["specialkeys"] = uri_to_internal(keys)
|
||||
|
||||
template = jinja_environment.get_template(name + '.jinja')
|
||||
try:
|
||||
res = template.render(**loc_context)
|
||||
except (ValueError, IndexError):
|
||||
abort(404,"This Artist or Track does not exist")
|
||||
template = jinja_environment.get_template(name + '.jinja')
|
||||
try:
|
||||
res = template.render(**loc_context)
|
||||
except (ValueError, IndexError):
|
||||
abort(404,"This Artist or Track does not exist")
|
||||
|
||||
if malojaconfig["DEV_MODE"]: jinja_environment.cache.clear()
|
||||
|
||||
log("Generated page {name} in {time:.5f}s".format(name=name,time=clock.stop()),module="debug_performance")
|
||||
return clean_html(res)
|
||||
|
||||
@webserver.route("/<name:re:admin.*>")
|
||||
@auth.authenticated
|
||||
def static_html_private(name):
|
||||
return static_html(name)
|
||||
def jinja_page_private(name):
|
||||
return jinja_page(name)
|
||||
|
||||
@webserver.route("/<name>")
|
||||
def static_html_public(name):
|
||||
return static_html(name)
|
||||
def jinja_page_public(name):
|
||||
return jinja_page(name)
|
||||
|
||||
@webserver.route("")
|
||||
@webserver.route("/")
|
||||
def mainpage():
|
||||
return static_html("start")
|
||||
return jinja_page("start")
|
||||
|
||||
|
||||
# Shortlinks
|
||||
@ -265,27 +288,6 @@ def redirect_track(artists,title):
|
||||
redirect("/track?title=" + title + "&" + "&".join("artist=" + artist for artist in artists.split("/")))
|
||||
|
||||
|
||||
######
|
||||
### SHUTDOWN
|
||||
#####
|
||||
|
||||
|
||||
def graceful_exit(sig=None,frame=None):
|
||||
#urllib.request.urlopen("http://[::1]:" + str(DATABASE_PORT) + "/sync")
|
||||
log("Received signal to shutdown")
|
||||
try:
|
||||
database.sync()
|
||||
except Exception as e:
|
||||
log("Error while shutting down!",e)
|
||||
log("Server shutting down...")
|
||||
sys.exit(0)
|
||||
|
||||
#set graceful shutdown
|
||||
signal.signal(signal.SIGINT, graceful_exit)
|
||||
signal.signal(signal.SIGTERM, graceful_exit)
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
######
|
||||
@ -302,11 +304,8 @@ def run_server():
|
||||
|
||||
try:
|
||||
#run(webserver, host=HOST, port=MAIN_PORT, server='waitress')
|
||||
log(f"Listening on {HOST}:{PORT}")
|
||||
waitress.serve(webserver, host=HOST, port=PORT, threads=THREADS)
|
||||
except OSError:
|
||||
log("Error. Is another Maloja process already running?")
|
||||
raise
|
||||
|
||||
|
||||
|
||||
run_server()
|
||||
|
56
maloja/thirdparty/__init__.py
vendored
56
maloja/thirdparty/__init__.py
vendored
@ -11,6 +11,7 @@ import json
|
||||
import urllib.parse, urllib.request
|
||||
import base64
|
||||
from doreah.logging import log
|
||||
from threading import BoundedSemaphore
|
||||
|
||||
from ..globalconf import malojaconfig
|
||||
from .. import database
|
||||
@ -22,6 +23,11 @@ services = {
|
||||
"metadata":[]
|
||||
}
|
||||
|
||||
# have a limited number of worker threads so we don't completely hog the cpu with
|
||||
# these requests. they are mostly network bound, so python will happily open up 200 new
|
||||
# requests and then when all the responses come in we suddenly can't load pages anymore
|
||||
thirdpartylock = BoundedSemaphore(4)
|
||||
|
||||
|
||||
def import_scrobbles(identifier):
|
||||
for service in services['import']:
|
||||
@ -34,27 +40,29 @@ def proxy_scrobble_all(artists,title,timestamp):
|
||||
service.scrobble(artists,title,timestamp)
|
||||
|
||||
def get_image_track_all(track):
|
||||
for service in services["metadata"]:
|
||||
try:
|
||||
res = service.get_image_track(track)
|
||||
if res is not None:
|
||||
log("Got track image for " + str(track) + " from " + service.name)
|
||||
return res
|
||||
else:
|
||||
log("Could not get track image for " + str(track) + " from " + service.name)
|
||||
except Exception as e:
|
||||
log("Error getting track image from " + service.name + ": " + repr(e))
|
||||
with thirdpartylock:
|
||||
for service in services["metadata"]:
|
||||
try:
|
||||
res = service.get_image_track(track)
|
||||
if res is not None:
|
||||
log("Got track image for " + str(track) + " from " + service.name)
|
||||
return res
|
||||
else:
|
||||
log("Could not get track image for " + str(track) + " from " + service.name)
|
||||
except Exception as e:
|
||||
log("Error getting track image from " + service.name + ": " + repr(e))
|
||||
def get_image_artist_all(artist):
|
||||
for service in services["metadata"]:
|
||||
try:
|
||||
res = service.get_image_artist(artist)
|
||||
if res is not None:
|
||||
log("Got artist image for " + str(artist) + " from " + service.name)
|
||||
return res
|
||||
else:
|
||||
log("Could not get artist image for " + str(artist) + " from " + service.name)
|
||||
except Exception as e:
|
||||
log("Error getting artist image from " + service.name + ": " + repr(e))
|
||||
with thirdpartylock:
|
||||
for service in services["metadata"]:
|
||||
try:
|
||||
res = service.get_image_artist(artist)
|
||||
if res is not None:
|
||||
log("Got artist image for " + str(artist) + " from " + service.name)
|
||||
return res
|
||||
else:
|
||||
log("Could not get artist image for " + str(artist) + " from " + service.name)
|
||||
except Exception as e:
|
||||
log("Error getting artist image from " + service.name + ": " + repr(e))
|
||||
|
||||
|
||||
|
||||
@ -87,13 +95,13 @@ class GenericInterface:
|
||||
s = cls()
|
||||
if s.active_proxyscrobble():
|
||||
services["proxyscrobble"].append(s)
|
||||
log(cls.name + " registered as proxy scrobble target")
|
||||
#log(cls.name + " registered as proxy scrobble target")
|
||||
if s.active_import():
|
||||
services["import"].append(s)
|
||||
log(cls.name + " registered as scrobble import source")
|
||||
#log(cls.name + " registered as scrobble import source")
|
||||
if s.active_metadata():
|
||||
services["metadata"].append(s)
|
||||
log(cls.name + " registered as metadata provider")
|
||||
#log(cls.name + " registered as metadata provider")
|
||||
|
||||
def authorize(self):
|
||||
return True
|
||||
@ -154,7 +162,7 @@ class ImportInterface(GenericInterface,abstract=True):
|
||||
|
||||
def import_scrobbles(self):
|
||||
for scrobble in self.get_remote_scrobbles():
|
||||
database.createScrobble(
|
||||
database.incoming_scrobble(
|
||||
artists=scrobble['artists'],
|
||||
title=scrobble['title'],
|
||||
time=scrobble['time']
|
||||
|
90
maloja/upgrade.py
Normal file
90
maloja/upgrade.py
Normal file
@ -0,0 +1,90 @@
|
||||
# This module should take care of recognizing old install data and upgrading it before the actual server deals with it
|
||||
|
||||
import os
|
||||
import re
|
||||
import csv
|
||||
|
||||
from doreah.logging import log
|
||||
from doreah.io import col
|
||||
|
||||
from .globalconf import data_dir, dir_settings
|
||||
from .apis import _apikeys
|
||||
|
||||
|
||||
# Dealing with old style tsv files - these should be phased out everywhere
|
||||
def read_tsvs(path,types):
|
||||
result = []
|
||||
for f in os.listdir(path):
|
||||
if f.split('.')[-1].lower() != 'tsv': continue
|
||||
filepath = os.path.join(path,f)
|
||||
result += read_tsv(filepath,types)
|
||||
return result
|
||||
|
||||
def read_tsv(filename,types):
|
||||
with open(filename,'r') as filed:
|
||||
reader = csv.reader(filed,delimiter="\t")
|
||||
rawentries = [[col for col in entry if col] for entry in reader if len(entry)>0 and not entry[0].startswith('#')]
|
||||
converted_entries = [[coltype(col) for col,coltype in zip(entry,types)] for entry in rawentries]
|
||||
return converted_entries
|
||||
|
||||
|
||||
def upgrade_apikeys():
|
||||
|
||||
oldfile = os.path.join(dir_settings['config'],"clients","authenticated_machines.tsv")
|
||||
if os.path.exists(oldfile):
|
||||
try:
|
||||
entries = read_tsv(oldfile)
|
||||
for key,identifier in entries:
|
||||
_apikeys.apikeystore[identifier] = key
|
||||
os.remove(oldfile)
|
||||
except:
|
||||
pass
|
||||
|
||||
|
||||
def upgrade_db(callback_add_scrobbles):
|
||||
|
||||
oldfolder = os.path.join(dir_settings['state'],"scrobbles")
|
||||
newfolder = os.path.join(dir_settings['state'],".oldscrobbles")
|
||||
os.makedirs(newfolder,exist_ok=True)
|
||||
if os.path.exists(oldfolder):
|
||||
scrobblefiles = [f for f in os.listdir(oldfolder) if f.endswith(".tsv")]
|
||||
if len(scrobblefiles) > 0:
|
||||
log("Upgrading v2 Database to v3 Database. This could take a while...",color='yellow')
|
||||
idx = 0
|
||||
for sf in scrobblefiles:
|
||||
idx += 1
|
||||
if re.match(r"[0-9]+_[0-9]+\.tsv",sf):
|
||||
origin = 'legacy'
|
||||
elif sf == "lastfmimport.tsv":
|
||||
origin = 'import:lastfm'
|
||||
elif sf == "spotifyimport.tsv":
|
||||
origin = 'import:spotify'
|
||||
else:
|
||||
origin = 'unknown'
|
||||
|
||||
scrobbles = read_tsv(os.path.join(oldfolder,sf),[int,str,str,str,str])
|
||||
#scrobbles = tsv.parse(os.path.join(oldfolder,sf),"int","string","string","string","string",comments=False)
|
||||
scrobblelist = []
|
||||
log(f"\tImporting from {sf} ({idx}/{len(scrobblefiles)}) - {len(scrobbles)} Scrobbles")
|
||||
for scrobble in scrobbles:
|
||||
timestamp, artists, title, album, duration = scrobble
|
||||
if album in ('-',''): album = None
|
||||
if duration in ('-',''): duration = None
|
||||
scrobblelist.append({
|
||||
"time":int(timestamp),
|
||||
"track":{
|
||||
"artists":artists.split('␟'),
|
||||
"title":title,
|
||||
"length":None
|
||||
},
|
||||
"duration":duration,
|
||||
"origin":origin,
|
||||
"extra":{
|
||||
"album_name":album
|
||||
# saving this in the scrobble instead of the track because for now it's not meant
|
||||
# to be authorative information, just payload of the scrobble
|
||||
}
|
||||
})
|
||||
callback_add_scrobbles(scrobblelist)
|
||||
os.rename(os.path.join(oldfolder,sf),os.path.join(newfolder,sf))
|
||||
log("Done!",color='yellow')
|
@ -1,3 +0,0 @@
|
||||
from .images import *
|
||||
from .maintenance import *
|
||||
from .utils import *
|
@ -1,282 +0,0 @@
|
||||
from ..globalconf import data_dir, malojaconfig
|
||||
from .. import thirdparty
|
||||
|
||||
from doreah import caching
|
||||
from doreah.logging import log
|
||||
|
||||
import itertools
|
||||
import os
|
||||
import urllib
|
||||
import random
|
||||
import base64
|
||||
from threading import Thread, Timer
|
||||
import re
|
||||
import datetime
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
### Caches
|
||||
|
||||
cacheage = malojaconfig["CACHE_EXPIRE_POSITIVE"] * 24 * 3600
|
||||
cacheage_neg = malojaconfig["CACHE_EXPIRE_NEGATIVE"] * 24 * 3600
|
||||
|
||||
artist_cache = caching.Cache(name="imgcache_artists",maxage=cacheage,maxage_negative=cacheage_neg,persistent=True)
|
||||
track_cache = caching.Cache(name="imgcache_tracks",maxage=cacheage,maxage_negative=cacheage_neg,persistent=True)
|
||||
|
||||
|
||||
# removes emojis and weird shit from names
|
||||
def clean(name):
|
||||
return "".join(c for c in name if c.isalnum() or c in []).strip()
|
||||
|
||||
def get_all_possible_filenames(artist=None,artists=None,title=None):
|
||||
# check if we're dealing with a track or artist, then clean up names
|
||||
# (only remove non-alphanumeric, allow korean and stuff)
|
||||
|
||||
if title is not None and artists is not None:
|
||||
track = True
|
||||
title, artists = clean(title), [clean(a) for a in artists]
|
||||
elif artist is not None:
|
||||
track = False
|
||||
artist = clean(artist)
|
||||
else: return []
|
||||
|
||||
|
||||
superfolder = "tracks/" if track else "artists/"
|
||||
|
||||
filenames = []
|
||||
|
||||
if track:
|
||||
#unsafeartists = [artist.translate(None,"-_./\\") for artist in artists]
|
||||
safeartists = [re.sub("[^a-zA-Z0-9]","",artist) for artist in artists]
|
||||
#unsafetitle = title.translate(None,"-_./\\")
|
||||
safetitle = re.sub("[^a-zA-Z0-9]","",title)
|
||||
|
||||
if len(artists) < 4:
|
||||
unsafeperms = itertools.permutations(artists)
|
||||
safeperms = itertools.permutations(safeartists)
|
||||
else:
|
||||
unsafeperms = [sorted(artists)]
|
||||
safeperms = [sorted(safeartists)]
|
||||
|
||||
|
||||
for unsafeartistlist in unsafeperms:
|
||||
filename = "-".join(unsafeartistlist) + "_" + title
|
||||
if filename != "":
|
||||
filenames.append(filename)
|
||||
filenames.append(filename.lower())
|
||||
for safeartistlist in safeperms:
|
||||
filename = "-".join(safeartistlist) + "_" + safetitle
|
||||
if filename != "":
|
||||
filenames.append(filename)
|
||||
filenames.append(filename.lower())
|
||||
filenames = list(set(filenames))
|
||||
if len(filenames) == 0: filenames.append(str(hash((frozenset(artists),title))))
|
||||
else:
|
||||
#unsafeartist = artist.translate(None,"-_./\\")
|
||||
safeartist = re.sub("[^a-zA-Z0-9]","",artist)
|
||||
|
||||
filename = artist
|
||||
if filename != "":
|
||||
filenames.append(filename)
|
||||
filenames.append(filename.lower())
|
||||
filename = safeartist
|
||||
if filename != "":
|
||||
filenames.append(filename)
|
||||
filenames.append(filename.lower())
|
||||
|
||||
filenames = list(set(filenames))
|
||||
if len(filenames) == 0: filenames.append(str(hash(artist)))
|
||||
|
||||
return [superfolder + name for name in filenames]
|
||||
|
||||
def local_files(artist=None,artists=None,title=None):
|
||||
|
||||
|
||||
filenames = get_all_possible_filenames(artist,artists,title)
|
||||
|
||||
images = []
|
||||
|
||||
for purename in filenames:
|
||||
# direct files
|
||||
for ext in ["png","jpg","jpeg","gif"]:
|
||||
#for num in [""] + [str(n) for n in range(0,10)]:
|
||||
if os.path.exists(data_dir['images'](purename + "." + ext)):
|
||||
images.append("/images/" + purename + "." + ext)
|
||||
|
||||
# folder
|
||||
try:
|
||||
for f in os.listdir(data_dir['images'](purename)):
|
||||
if f.split(".")[-1] in ["png","jpg","jpeg","gif"]:
|
||||
images.append("/images/" + purename + "/" + f)
|
||||
except:
|
||||
pass
|
||||
|
||||
return images
|
||||
|
||||
|
||||
|
||||
# these caches are there so we don't check all files every time, but return the same one
|
||||
local_cache_age = malojaconfig["LOCAL_IMAGE_ROTATE"]
|
||||
local_artist_cache = caching.Cache(maxage=local_cache_age)
|
||||
local_track_cache = caching.Cache(maxage=local_cache_age)
|
||||
|
||||
def getTrackImage(artists,title,fast=False):
|
||||
|
||||
hashable_track = (frozenset(artists),title)
|
||||
|
||||
# Prio 1: Local image
|
||||
if malojaconfig["USE_LOCAL_IMAGES"]:
|
||||
try:
|
||||
return local_track_cache.get(hashable_track)
|
||||
except:
|
||||
images = local_files(artists=artists,title=title)
|
||||
if len(images) != 0:
|
||||
res = random.choice(images)
|
||||
local_track_cache.add(hashable_track,res)
|
||||
return urllib.parse.quote(res)
|
||||
|
||||
|
||||
# Prio 2: Cached remote link
|
||||
try:
|
||||
result = track_cache.get(hashable_track)
|
||||
if result is not None: return result
|
||||
# if we have cached the nonexistence of that image, we immediately return
|
||||
# the redirect to the artist and let the resolver handle it
|
||||
# (even if we're not in a fast lookup right now)
|
||||
for a in artists:
|
||||
res = getArtistImage(artist=a,fast=True)
|
||||
if res != "": return res
|
||||
return ""
|
||||
except:
|
||||
pass
|
||||
|
||||
|
||||
# fast request will not go further than this, but now generate redirect link
|
||||
if fast:
|
||||
return ("/image?title=" + urllib.parse.quote(title) + "&" + "&".join(
|
||||
"artist=" + urllib.parse.quote(a) for a in artists))
|
||||
|
||||
|
||||
# Prio 3 (only non-fast lookup): actually call third parties
|
||||
result = thirdparty.get_image_track_all((artists,title))
|
||||
# cache results (even negative ones)
|
||||
track_cache.add(hashable_track,result)
|
||||
# return either result or redirect to artist
|
||||
if result is not None: return result
|
||||
for a in artists:
|
||||
res = getArtistImage(artist=a,fast=False)
|
||||
if res != "": return res
|
||||
return ""
|
||||
|
||||
|
||||
def getArtistImage(artist,fast=False):
|
||||
|
||||
# Prio 1: Local image
|
||||
if malojaconfig["USE_LOCAL_IMAGES"]:
|
||||
try:
|
||||
return local_artist_cache.get(artist)
|
||||
except:
|
||||
images = local_files(artist=artist)
|
||||
if len(images) != 0:
|
||||
res = random.choice(images)
|
||||
local_artist_cache.add(artist,res)
|
||||
return urllib.parse.quote(res)
|
||||
|
||||
|
||||
# Prio 2: Cached remote link
|
||||
try:
|
||||
result = artist_cache.get(artist)
|
||||
if result is not None: return result
|
||||
else: return ""
|
||||
# none means non-existence is cached, return empty
|
||||
except:
|
||||
pass
|
||||
# no cache entry, go on
|
||||
|
||||
|
||||
# fast request will not go further than this, but now generate redirect link
|
||||
if fast: return "/image?artist=" + urllib.parse.quote(artist)
|
||||
|
||||
|
||||
# Prio 3 (only non-fast lookup): actually call third parties
|
||||
result = thirdparty.get_image_artist_all(artist)
|
||||
# cache results (even negative ones)
|
||||
artist_cache.add(artist,result) #cache_artist(artist,result)
|
||||
if result is not None: return result
|
||||
else: return ""
|
||||
|
||||
def getTrackImages(trackobjectlist,fast=False):
|
||||
|
||||
threads = []
|
||||
|
||||
for track in trackobjectlist:
|
||||
t = Thread(target=getTrackImage,args=(track["artists"],track["title"],),kwargs={"fast":fast})
|
||||
t.start()
|
||||
threads.append(t)
|
||||
|
||||
for t in threads:
|
||||
t.join()
|
||||
|
||||
|
||||
return [getTrackImage(t["artists"],t["title"]) for t in trackobjectlist]
|
||||
|
||||
def getArtistImages(artistlist,fast=False):
|
||||
|
||||
threads = []
|
||||
|
||||
for artist in artistlist:
|
||||
t = Thread(target=getArtistImage,args=(artist,),kwargs={"fast":fast})
|
||||
t.start()
|
||||
threads.append(t)
|
||||
|
||||
for t in threads:
|
||||
t.join()
|
||||
|
||||
# async calls only cached results, now we need to get them
|
||||
return [getArtistImage(a) for a in artistlist]
|
||||
|
||||
|
||||
|
||||
# new way of serving images
|
||||
# instead always generate a link locally, but redirect that on the fly
|
||||
# this way the page can load faster and images will trickle in without having to resort to XHTTP requests
|
||||
|
||||
def resolveImage(artist=None,track=None):
|
||||
if track is not None:
|
||||
return getTrackImage(track["artists"],track["title"])
|
||||
elif artist is not None:
|
||||
return getArtistImage(artist)
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
def set_image(b64,**keys):
|
||||
track = "title" in keys
|
||||
|
||||
log("Trying to set image, b64 string: " + str(b64[:30] + "..."),module="debug")
|
||||
|
||||
regex = r"data:image/(\w+);base64,(.+)"
|
||||
type,b64 = re.fullmatch(regex,b64).groups()
|
||||
b64 = base64.b64decode(b64)
|
||||
filename = "webupload" + str(int(datetime.datetime.now().timestamp())) + "." + type
|
||||
for folder in get_all_possible_filenames(**keys):
|
||||
if os.path.exists(data_dir['images'](folder)):
|
||||
with open(data_dir['images'](folder,filename),"wb") as f:
|
||||
f.write(b64)
|
||||
break
|
||||
else:
|
||||
folder = get_all_possible_filenames(**keys)[0]
|
||||
os.makedirs(data_dir['images'](folder))
|
||||
with open(data_dir['images'](folder,filename),"wb") as f:
|
||||
f.write(b64)
|
||||
|
||||
log("Saved image as " + data_dir['images'](folder,filename),module="debug")
|
||||
|
||||
# set as current picture in rotation
|
||||
if track: local_track_cache.add((frozenset(keys["artists"]),keys["title"]),os.path.join("/images",folder,filename))
|
||||
else: local_artist_cache.add(keys["artist"],os.path.join("/images",folder,filename))
|
@ -1,114 +0,0 @@
|
||||
from ..__pkginfo__ import VERSION
|
||||
from ..malojatime import ranges, thisweek, thisyear
|
||||
from ..globalconf import malojaconfig
|
||||
|
||||
from doreah.regular import yearly, daily
|
||||
from doreah.logging import log
|
||||
|
||||
import datetime
|
||||
import json
|
||||
import urllib
|
||||
import itertools
|
||||
|
||||
|
||||
|
||||
get_track = lambda x:(frozenset(x["track"]["artists"]),x["track"]["title"])
|
||||
get_artist = lambda x:x["artist"]
|
||||
|
||||
def group_by_attribute(sequence,attribute):
|
||||
grouped = itertools.groupby(sequence,key=lambda x:x[attribute])
|
||||
for attrvalue,members in grouped:
|
||||
yield attrvalue,list(members)
|
||||
|
||||
def collect_rankings(chart,identify,collection,iteration=None,count=True):
|
||||
grouped = group_by_attribute(chart,"rank")
|
||||
for rank, members in grouped:
|
||||
if not count and rank not in rankmedals: break
|
||||
if count and rank != 1: break
|
||||
|
||||
for m in members:
|
||||
# get the actual object that we're counting
|
||||
entity = identify(m)
|
||||
|
||||
# count no1 spots
|
||||
if count:
|
||||
collection[entity] = collection.setdefault(entity,0) + 1
|
||||
|
||||
# collect instances of top3 spots
|
||||
else:
|
||||
medal = rankmedals[rank]
|
||||
collection.setdefault(entity,{}).setdefault(medal,[]).append(iteration)
|
||||
|
||||
|
||||
rankmedals = {
|
||||
1:'gold',
|
||||
2:'silver',
|
||||
3:'bronze'
|
||||
}
|
||||
|
||||
@yearly
|
||||
def update_medals():
|
||||
|
||||
|
||||
from ..database import MEDALS_ARTISTS, MEDALS_TRACKS, STAMPS, get_charts_artists, get_charts_tracks
|
||||
|
||||
|
||||
MEDALS_ARTISTS.clear()
|
||||
MEDALS_TRACKS.clear()
|
||||
|
||||
for year in ranges(step="year"):
|
||||
if year == thisyear(): break
|
||||
|
||||
charts_artists = get_charts_artists(timerange=year)
|
||||
charts_tracks = get_charts_tracks(timerange=year)
|
||||
|
||||
collect_rankings(charts_artists,get_artist,MEDALS_ARTISTS,iteration=year,count=False)
|
||||
collect_rankings(charts_tracks,get_track,MEDALS_TRACKS,iteration=year,count=False)
|
||||
|
||||
|
||||
@daily
|
||||
def update_weekly():
|
||||
|
||||
from ..database import WEEKLY_TOPTRACKS, WEEKLY_TOPARTISTS, get_charts_artists, get_charts_tracks
|
||||
|
||||
|
||||
WEEKLY_TOPARTISTS.clear()
|
||||
WEEKLY_TOPTRACKS.clear()
|
||||
|
||||
for week in ranges(step="week"):
|
||||
if week == thisweek(): break
|
||||
|
||||
charts_artists = get_charts_artists(timerange=week)
|
||||
charts_tracks = get_charts_tracks(timerange=week)
|
||||
|
||||
collect_rankings(charts_artists,get_artist,WEEKLY_TOPARTISTS)
|
||||
collect_rankings(charts_tracks,get_track,WEEKLY_TOPTRACKS)
|
||||
|
||||
|
||||
@daily
|
||||
def send_stats():
|
||||
if malojaconfig["SEND_STATS"]:
|
||||
|
||||
log("Sending daily stats report...")
|
||||
|
||||
from ..database import ARTISTS, TRACKS, SCROBBLES
|
||||
|
||||
keys = {
|
||||
"url":"https://myrcella.krateng.ch/malojastats",
|
||||
"method":"POST",
|
||||
"headers":{"Content-Type": "application/json"},
|
||||
"data":json.dumps({
|
||||
"name":malojaconfig["NAME"],
|
||||
"url":malojaconfig["PUBLIC_URL"],
|
||||
"version":VERSION,
|
||||
"artists":len(ARTISTS),
|
||||
"tracks":len(TRACKS),
|
||||
"scrobbles":len(SCROBBLES)
|
||||
}).encode("utf-8")
|
||||
}
|
||||
try:
|
||||
req = urllib.request.Request(**keys)
|
||||
response = urllib.request.urlopen(req)
|
||||
log("Sent daily report!")
|
||||
except:
|
||||
log("Could not send daily report!")
|
@ -1,24 +0,0 @@
|
||||
import json
|
||||
|
||||
|
||||
#####
|
||||
## SERIALIZE
|
||||
#####
|
||||
|
||||
def serialize(obj):
|
||||
try:
|
||||
return serialize(obj.hashable())
|
||||
except:
|
||||
try:
|
||||
return json.dumps(obj)
|
||||
except:
|
||||
if isinstance(obj, (list, tuple)):
|
||||
return "[" + ",".join(serialize(o) for o in obj) + "]"
|
||||
elif isinstance(obj,dict):
|
||||
return "{" + ",".join(serialize(o) + ":" + serialize(obj[o]) for o in obj) + "}"
|
||||
return json.dumps(obj.hashable())
|
||||
|
||||
|
||||
#if isinstance(obj,list) or if isinstance(obj,tuple):
|
||||
# return "[" + ",".join(dumps(o) for o in obj) + "]"
|
||||
#if isinstance(obj,str)
|
55
maloja/web/jinja/about.jinja
Normal file
55
maloja/web/jinja/about.jinja
Normal file
@ -0,0 +1,55 @@
|
||||
{% extends "abstracts/base.jinja" %}
|
||||
{% block title %}Maloja - About{% endblock %}
|
||||
|
||||
{% block content %}
|
||||
<table class="top_info">
|
||||
<tr>
|
||||
<td class="image">
|
||||
|
||||
<div style="background-image:url('/favicon.png');">
|
||||
</div>
|
||||
</td>
|
||||
<td class="text">
|
||||
<h1 class="headerwithextra">Maloja</h1>
|
||||
<br/>
|
||||
<span>Version {{ pkginfo.VERSION }}</span><br/>
|
||||
{# {% if adminmode %}
|
||||
<span>Python {{ platform.sys.version }}</span><br/>
|
||||
<span>{{ platform.system() }} {{ platform.release() }} ({{ platform.machine() }})</span><br/>
|
||||
<br/>
|
||||
{% set pid = psutil.os.getpid() %}
|
||||
{% set proc = psutil.Process(pid) %}
|
||||
CPU:
|
||||
{{ proc.cpu_percent() | int }}% Maloja,
|
||||
{{ (psutil.getloadavg()[2]/psutil.os.cpu_count() * 100) | int }}% System
|
||||
<br/>
|
||||
RAM:
|
||||
{{ (proc.memory_info().rss / (1024*1024)) | int }}MB Maloja ({{ proc.memory_percent() | int }}%),
|
||||
{{ (psutil.virtual_memory().used / (1024*1024)) | int }}MB System ({{ psutil.virtual_memory().percent | int }}%)
|
||||
{% endif %} #}
|
||||
|
||||
<p class="stats">
|
||||
|
||||
</p>
|
||||
|
||||
|
||||
|
||||
</td>
|
||||
</tr>
|
||||
</table>
|
||||
|
||||
Get your own Maloja server:
|
||||
<br/><br/>
|
||||
<a href="https://github.com/krateng/maloja" class="hidelink">
|
||||
<img src="https://img.shields.io/github/v/tag/krateng/maloja?label=GitHub&style=for-the-badge" />
|
||||
</a>
|
||||
<a href="https://pypi.org/project/malojaserver/" class="hidelink">
|
||||
<img src="https://img.shields.io/pypi/v/malojaserver?label=PyPI&style=for-the-badge" />
|
||||
</a>
|
||||
<a href="https://hub.docker.com/r/krateng/maloja" class="hidelink">
|
||||
<img src="https://img.shields.io/docker/v/krateng/maloja?label=Docker&style=for-the-badge" />
|
||||
</a>
|
||||
<br/><br/>
|
||||
Maloja is released under the <a class='textlink' href='https://github.com/krateng/maloja/blob/master/LICENSE'>GNU General Public License v3.0</a>.
|
||||
|
||||
{% endblock %}
|
@ -21,10 +21,12 @@
|
||||
['setup','Server Setup'],
|
||||
['settings','Settings'],
|
||||
['apikeys','API Keys'],
|
||||
['manual','Manual Scrobbling'],
|
||||
['issues','Database Maintenance']
|
||||
['manual','Manual Scrobbling']
|
||||
|
||||
] %}
|
||||
{# ['import','Scrobble Import'] hide page for now #}
|
||||
{# ['import','Scrobble Import'],
|
||||
['issues','Database Maintenance']
|
||||
hide for now #}
|
||||
{% if page=='admin_' + tab_url %}
|
||||
<span style="opacity:0.5;">{{ tab_name }}</span>
|
||||
{% else %}
|
||||
|
@ -11,11 +11,14 @@
|
||||
<meta name="darkreader" content="wat" />
|
||||
|
||||
<link rel="stylesheet" href="/style.css" />
|
||||
<link rel="stylesheet" href="/static/css/themes/{{ settings.theme }}.css" />
|
||||
|
||||
<script src="/search.js"></script>
|
||||
<script src="/neopolitan.js"></script>
|
||||
<script src="/upload.js"></script>
|
||||
|
||||
<link rel="preload" href="/static/ttf/Ubuntu-Regular.ttf" as="font" type="font/woff2" crossorigin />
|
||||
|
||||
{% block scripts %}{% endblock %}
|
||||
</head>
|
||||
|
||||
@ -48,10 +51,12 @@
|
||||
|
||||
<div class="footer">
|
||||
<div>
|
||||
<span>Get your own charts on
|
||||
<!--<span>Get your own charts on
|
||||
<a target="_blank" rel="noopener noreferrer" href="https://github.com/krateng/maloja">GitHub</a>,
|
||||
<a target="_blank" rel="noopener noreferrer" href="https://pypi.org/project/malojaserver/">PyPI</a> or
|
||||
<a target="_blank" rel="noopener noreferrer" href="https://hub.docker.com/r/krateng/maloja">Dockerhub</a>
|
||||
</span>
|
||||
</span>-->
|
||||
<span><a href="/about">About</a></span>
|
||||
</div>
|
||||
<div>
|
||||
<a href="/"><span style="font-weight:bold;">Maloja {% if settings["DEV_MODE"] %}[Developer Mode]{% endif %}</span></a>
|
||||
@ -73,7 +78,7 @@
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<a href="/admin_overview"><div title="Server Administration" id="settingsicon">
|
||||
<a href="/admin_overview"><div title="Server Administration" id="settingsicon" class="clickable_icon">
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24"><path d="M17 12.645v-2.289c-1.17-.417-1.907-.533-2.28-1.431-.373-.9.07-1.512.6-2.625l-1.618-1.619c-1.105.525-1.723.974-2.626.6-.9-.374-1.017-1.117-1.431-2.281h-2.29c-.412 1.158-.53 1.907-1.431 2.28h-.001c-.9.374-1.51-.07-2.625-.6l-1.617 1.619c.527 1.11.973 1.724.6 2.625-.375.901-1.123 1.019-2.281 1.431v2.289c1.155.412 1.907.531 2.28 1.431.376.908-.081 1.534-.6 2.625l1.618 1.619c1.107-.525 1.724-.974 2.625-.6h.001c.9.373 1.018 1.118 1.431 2.28h2.289c.412-1.158.53-1.905 1.437-2.282h.001c.894-.372 1.501.071 2.619.602l1.618-1.619c-.525-1.107-.974-1.723-.601-2.625.374-.899 1.126-1.019 2.282-1.43zm-8.5 1.689c-1.564 0-2.833-1.269-2.833-2.834s1.269-2.834 2.833-2.834 2.833 1.269 2.833 2.834-1.269 2.834-2.833 2.834zm15.5 4.205v-1.077c-.55-.196-.897-.251-1.073-.673-.176-.424.033-.711.282-1.236l-.762-.762c-.52.248-.811.458-1.235.283-.424-.175-.479-.525-.674-1.073h-1.076c-.194.545-.25.897-.674 1.073-.424.176-.711-.033-1.235-.283l-.762.762c.248.523.458.812.282 1.236-.176.424-.528.479-1.073.673v1.077c.544.193.897.25 1.073.673.177.427-.038.722-.282 1.236l.762.762c.521-.248.812-.458 1.235-.283.424.175.479.526.674 1.073h1.076c.194-.545.25-.897.676-1.074h.001c.421-.175.706.034 1.232.284l.762-.762c-.247-.521-.458-.812-.282-1.235s.529-.481 1.073-.674zm-4 .794c-.736 0-1.333-.597-1.333-1.333s.597-1.333 1.333-1.333 1.333.597 1.333 1.333-.597 1.333-1.333 1.333z"/></svg>
|
||||
</div></a>
|
||||
|
||||
|
@ -33,7 +33,7 @@
|
||||
</script>
|
||||
{% endblock %}
|
||||
|
||||
{% set issuedata = dbp.issues() %}
|
||||
{% set issuedata = dbc.issues() %}
|
||||
|
||||
{% block maincontent %}
|
||||
|
||||
|
@ -77,10 +77,13 @@
|
||||
|
||||
<h2>Download Data</h2>
|
||||
|
||||
Download your data to have a backup.<br/><br/>
|
||||
Backup your data.<br/><br/>
|
||||
|
||||
<a href="/apis/mlj_1/backup" download="maloja_backup.tar.gz">
|
||||
<button type="button">Download</button>
|
||||
<button type="button">Backup</button>
|
||||
</a>
|
||||
<a href="/apis/mlj_1/export" download="maloja_export.json">
|
||||
<button type="button">Export</button>
|
||||
</a>
|
||||
|
||||
|
||||
@ -89,7 +92,9 @@
|
||||
|
||||
<a class="textlink" target="_blank" rel="noopener noreferrer" href="https://github.com/krateng/maloja/issues/new">Report Issue</a><br/>
|
||||
<a class="textlink" target="_blank" rel="noopener noreferrer" href="https://github.com/krateng/maloja/blob/master/README.md">Readme</a><br/>
|
||||
<a class="textlink" target="_blank" rel="noopener noreferrer" href="https://pypi.org/project/malojaserver/">PyPi</a><br/>
|
||||
<a class="textlink" target="_blank" rel="noopener noreferrer" href="https://pypi.org/project/malojaserver/">PyPi</a> |
|
||||
<a class="textlink" target="_blank" rel="noopener noreferrer" href="https://hub.docker.com/r/krateng/maloja">Dockerhub</a> |
|
||||
<a class="textlink" target="_blank" rel="noopener noreferrer" href="https://github.com/krateng/maloja">Github</a><br/>
|
||||
|
||||
|
||||
|
||||
|
@ -47,14 +47,14 @@
|
||||
</script>
|
||||
{% endblock %}
|
||||
|
||||
{% set rulesets = dbp.get_predefined_rulesets() %}
|
||||
{% set rulesets = dbc.get_predefined_rulesets() %}
|
||||
|
||||
{% block maincontent %}
|
||||
|
||||
|
||||
<h2>Start Scrobbling</h2>
|
||||
|
||||
If you use Vivaldi, Brave, Iridium or any other Chromium-based browser and listen to music on Plex or YouTube Music, download the extension and simply enter the server URL as well as your API key in the relevant fields. They will turn green if the server is accessible.
|
||||
If you use a Chromium-based browser and listen to music on Plex, Spotify, Soundcloud, Bandcamp or YouTube Music, download the extension and simply enter the server URL as well as your API key in the relevant fields. They will turn green if the server is accessible.
|
||||
<br/><br/>
|
||||
You can also use any standard-compliant scrobbler. For GNUFM (audioscrobbler) scrobblers, enter <span class="stats"><span name="serverurl">yourserver.tld</span>/apis/audioscrobbler</span> as your Gnukebox server and your API key as the password. For Listenbrainz scrobblers, use <span class="stats"><span name="serverurl">yourserver.tld</span>/apis/listenbrainz</span> as the API URL and your API key as token.
|
||||
<br/><br/>
|
||||
@ -62,15 +62,20 @@
|
||||
|
||||
<span class="stats"><span name="serverurl">yourserver.tld</span>/apis/mlj_1/newscrobble</span>
|
||||
|
||||
(make sure to use the public URL) with the key-value-pairs
|
||||
(make sure to use the public URL) with the following values encoded as JSON:
|
||||
<br/>
|
||||
<br/>
|
||||
<table class="misc">
|
||||
<tr> <td>artist</td> <td><i>Artist String</i></td> </tr>
|
||||
<tr> <td>title</td> <td><i>Title String</i></td> </tr>
|
||||
<tr> <td>key</td> <td><i>API Key</i></td> </tr>
|
||||
<tr> <td>time</td> <td><i>UNIX timestamp - optional, defaults to time of request</i></td> </tr>
|
||||
<tr> <td>seconds</td> <td><i>Duration of Scrobble - optional and currently not used</i></td> </tr>
|
||||
<tr> <td>artists</td> <td><i>List of artist names</i></td> </tr>
|
||||
<tr> <td>title</td> <td><i>Song title</i></td> </tr>
|
||||
<tr> <td>album</td> <td><i>Album title - optional</i></td> </tr>
|
||||
<tr> <td>albumartists</td> <td><i>List of album artists - optional</i></td> </tr>
|
||||
<tr> <td>duration</td> <td><i>Duration of play in seconds - optional</i></td> </tr>
|
||||
<tr> <td>length</td> <td><i>Full length of the trackin seconds - optional</i></td> </tr>
|
||||
<tr> <td>time</td> <td><i>UNIX timestamp - optional, defaults to time of request</i></td> </tr>
|
||||
<tr> <td>fix</td> <td><i>Set this to false to skip server-side metadata fixing - optional</i></td> </tr>
|
||||
|
||||
<tr> <td>key</td> <td><i>API Key, see <a class="textlink" href="/admin_apikeys">API Keys</a></i></td> </tr>
|
||||
</table>
|
||||
<br/><br/>
|
||||
Finally, you could always <a class="textlink" href="/admin_manual">manually scrobble</a>!
|
||||
@ -79,7 +84,8 @@
|
||||
|
||||
<h2>Import your Last.FM data</h2>
|
||||
|
||||
Switching from Last.fm? <a class="textlink" href="https://benjaminbenben.com/lastfm-to-csv/">Download all your data</a> and run the command <span class="stats">maloja import <i>(the file you just downloaded)</i></span>.
|
||||
Switching from Last.fm? <a class="textlink" href="https://benjaminbenben.com/lastfm-to-csv/">Download all your data</a> and run the command <span class="stats">maloja import <i>(the file you just downloaded)</i></span>.<br/>
|
||||
You can also try out <a href="https://github.com/FoxxMD/multi-scrobbler">Multi-Scrobbler</a> to import scrobbles from a wider range of sources.
|
||||
<br/><br/>
|
||||
|
||||
<h2>Set up some rules</h2>
|
||||
|
@ -1,5 +1,5 @@
|
||||
{% extends "abstracts/base.jinja" %}
|
||||
{% block title %}Maloja - {{ artist }}{% endblock %}
|
||||
{% block title %}Maloja - {{ info.artist }}{% endblock %}
|
||||
|
||||
{% import 'snippets/links.jinja' as links %}
|
||||
{% import 'partials/awards_artist.jinja' as awards %}
|
||||
@ -9,7 +9,7 @@
|
||||
{% endblock %}
|
||||
|
||||
{% set artist = filterkeys.artist %}
|
||||
{% set info = db.artistInfo(artist) %}
|
||||
{% set info = db.artist_info(artist=artist) %}
|
||||
|
||||
{% set credited = info.get('replace') %}
|
||||
{% set included = info.get('associated') %}
|
||||
@ -39,15 +39,15 @@
|
||||
{% if adminmode %}
|
||||
<div
|
||||
class="changeable-image" data-uploader="b64=>upload('{{ encodedartist }}',b64)"
|
||||
style="background-image:url('{{ utilities.getArtistImage(artist=artist,fast=True) }}');"
|
||||
style="background-image:url('{{ images.get_artist_image(artist) }}');"
|
||||
></div>
|
||||
{% else %}
|
||||
<div style="background-image:url('{{ utilities.getArtistImage(artist=artist,fast=True) }}');">
|
||||
<div style="background-image:url('{{ images.get_artist_image(artist) }}');">
|
||||
</div>
|
||||
{% endif %}
|
||||
</td>
|
||||
<td class="text">
|
||||
<h1 class="headerwithextra">{{ artist }}</h1>
|
||||
<h1 class="headerwithextra">{{ info.artist }}</h1>
|
||||
{% if competes %}<span class="rank"><a href="/charts_artists?max=100">#{{ info.position }}</a></span>{% endif %}
|
||||
<br/>
|
||||
{% if competes and included %}
|
||||
|
@ -5,11 +5,11 @@
|
||||
<script src="/datechange.js" async></script>
|
||||
{% endblock %}
|
||||
|
||||
{% set charts = dbp.get_charts_artists(filterkeys,limitkeys) %}
|
||||
{% set charts = dbc.get_charts_artists(filterkeys,limitkeys) %}
|
||||
{% set pages = math.ceil(charts.__len__() / amountkeys.perpage) %}
|
||||
{% if charts[0] is defined %}
|
||||
{% set topartist = charts[0].artist %}
|
||||
{% set img = utilities.getArtistImage(artist=topartist,fast=True) %}
|
||||
{% set img = images.get_artist_image(topartist) %}
|
||||
{% else %}
|
||||
{% set img = "/favicon.png" %}
|
||||
{% endif %}
|
||||
|
@ -7,11 +7,11 @@
|
||||
<script src="/datechange.js" async></script>
|
||||
{% endblock %}
|
||||
|
||||
{% set charts = dbp.get_charts_tracks(filterkeys,limitkeys) %}
|
||||
{% set charts = dbc.get_charts_tracks(filterkeys,limitkeys) %}
|
||||
{% set pages = math.ceil(charts.__len__() / amountkeys.perpage) %}
|
||||
{% if charts[0] is defined %}
|
||||
{% set toptrack = charts[0].track %}
|
||||
{% set img = utilities.getTrackImage(artists=toptrack.artists,title=toptrack.title,fast=True) %}
|
||||
{% set img = images.get_track_image(toptrack) %}
|
||||
{% else %}
|
||||
{% set img = "/favicon.png" %}
|
||||
{% endif %}
|
||||
|
@ -1,112 +0,0 @@
|
||||
{% extends "abstracts/base.jinja" %}
|
||||
{% block title %}Maloja - Compare{% endblock %}
|
||||
|
||||
{% import 'snippets/links.jinja' as links %}
|
||||
|
||||
{% block scripts %}
|
||||
<style>
|
||||
.comparecircle {
|
||||
height:500px;
|
||||
width:500px;
|
||||
border-radius:250px;
|
||||
border: 1px solid rgba(245,245,220,0.3);
|
||||
margin:auto;
|
||||
margin-top:100px;
|
||||
text-align:center;
|
||||
line-height:500px;
|
||||
font-size:60px;
|
||||
color:black;
|
||||
/* background-image: linear-gradient(to right,KEY_CIRCLE_CSS); */
|
||||
|
||||
}
|
||||
|
||||
table tr td:first-child {
|
||||
text-align: left;
|
||||
padding:10px;
|
||||
width:33%;
|
||||
}
|
||||
table tr td {
|
||||
text-align: center;
|
||||
padding:10px;
|
||||
}
|
||||
|
||||
table tr td:last-child {
|
||||
text-align: right;
|
||||
padding:10px;
|
||||
width:33%;
|
||||
}
|
||||
</style>
|
||||
{% endblock %}
|
||||
|
||||
{% set data = db.compare(specialkeys.remote) %}
|
||||
{% set comparedata = data.result %}
|
||||
{% set info = data.info %}
|
||||
{% set bestartist = data.commonartist %}
|
||||
<!--
|
||||
{% set categories =
|
||||
{
|
||||
"unique_self":"rgba(255,255,255,0.2)",
|
||||
"more_self":"rgba(255,255,255,0.5)",
|
||||
"common":"white",
|
||||
"more_other":"rgba(255,255,255,0.5)",
|
||||
"unique_other":"rgba(255,255,255,0.2)"
|
||||
}
|
||||
%}-->
|
||||
<!--
|
||||
{% set css = [] %}
|
||||
{% set cumulative = 0 %}
|
||||
{% for cat in categories %}
|
||||
{% set cumulative = cumulative + (comparedata[cat][1]*100) %}
|
||||
{% set _ = css.append(categories[cat] + " " + cumulative.__str__() + "%") %}
|
||||
{% endfor %}-->
|
||||
|
||||
{% set fullmatch = comparedata.common[1]*100 %}
|
||||
{% set partialmatch = comparedata.more_self[1]*100 + comparedata.more_other[1]*100 %}
|
||||
|
||||
{% set match = fullmatch + (partialmatch)/2 %}
|
||||
{% set pixel_fullmatch = fullmatch * 2.5 %}
|
||||
{% set pixel_partialmatch = (fullmatch+partialmatch) * 2.5 %}
|
||||
|
||||
{% set match = [match,100] | min %}
|
||||
|
||||
{% set r = [255*match/50,255] | min %}
|
||||
{% set g = [255*match/50,255] | min %}
|
||||
{% set b = [255*(match/50-1),0] | max %}
|
||||
|
||||
|
||||
{% block content %}
|
||||
<table style="width:99%;">
|
||||
<tr>
|
||||
<td><h1>{{ info.ownname }}</h1></td>
|
||||
<td>
|
||||
|
||||
<div class="comparecircle"
|
||||
style="background-image: radial-gradient(rgb({{ r }},{{ g }}, {{ b }}) {{ pixel_fullmatch }}px, transparent {{ pixel_partialmatch }}px);">
|
||||
{{ match | round(1) }}%
|
||||
|
||||
</div>
|
||||
</td>
|
||||
<td><h1>{{ info.remotename }}</h1></td>
|
||||
|
||||
</tr>
|
||||
<tr>
|
||||
<td></td>
|
||||
<td style="font-size:70%;color:grey;">
|
||||
The size of the circle shows matching music taste.
|
||||
The fuzziness of its border indicates differences in quantity.
|
||||
</td>
|
||||
<td></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td></td>
|
||||
<td>
|
||||
<span>Common Favorite</span>
|
||||
<h2 style="margin:7px;">{{ links.link(bestartist) }}</h2>
|
||||
<img src="{{ utilities.getArtistImage(bestartist) }}" style="width:80px;" />
|
||||
</td>
|
||||
<td></td>
|
||||
</tr>
|
||||
</table>
|
||||
|
||||
|
||||
{% endblock %}
|
@ -55,13 +55,19 @@
|
||||
{% macro certs(artist) %}
|
||||
|
||||
<!-- CERTS -->
|
||||
{% for track in db.get_tracks(artist=artist) -%}
|
||||
{% set info = db.trackInfo(track) %}
|
||||
{% if info.certification is not none -%}
|
||||
<a href='{{ links.url(track) }}'><img class="certrecord_small"
|
||||
src="/media/record_{{ info.certification }}.png"
|
||||
title="{{ track.title }} has reached {{ info.certification.capitalize() }} status" /></a>
|
||||
|
||||
{% set charts = db.get_charts_tracks(artist=artist,timerange=malojatime.alltime()) %}
|
||||
{% for e in charts -%}
|
||||
{%- if e.scrobbles >= settings.scrobbles_gold -%}{% set cert = 'gold' %}{%- endif -%}
|
||||
{%- if e.scrobbles >= settings.scrobbles_platinum -%}{% set cert = 'platinum' %}{%- endif -%}
|
||||
{%- if e.scrobbles >= settings.scrobbles_diamond -%}{% set cert = 'diamond' %}{%- endif -%}
|
||||
|
||||
{%- if cert -%}
|
||||
<a href='{{ links.url(e.track) }}'><img class="certrecord_small"
|
||||
src="/media/record_{{ cert }}.png"
|
||||
title="{{ e.track.title }} has reached {{ cert.capitalize() }} status" /></a>
|
||||
{%- endif %}
|
||||
|
||||
{%- endfor %}
|
||||
|
||||
{%- endmacro %}
|
||||
|
@ -53,7 +53,7 @@
|
||||
|
||||
<!-- CERTS -->
|
||||
|
||||
{% set info = db.trackInfo(track) %}
|
||||
{% set info = db.track_info(track=track) %}
|
||||
{% if info.certification is not none %}
|
||||
<img class="certrecord"
|
||||
src="/media/record_{{ info.certification }}.png"
|
||||
|
@ -2,7 +2,7 @@
|
||||
{% import 'snippets/entityrow.jinja' as entityrow %}
|
||||
|
||||
{% if charts is undefined %}
|
||||
{% set charts = dbp.get_charts_artists(limitkeys) %}
|
||||
{% set charts = dbc.get_charts_artists(limitkeys) %}
|
||||
{% endif %}
|
||||
|
||||
{% if compare %}
|
||||
@ -11,7 +11,7 @@
|
||||
{% if compare is none %}{% set compare = False %}{% endif %}
|
||||
{% endif %}
|
||||
{% if compare %}
|
||||
{% set prevartists = dbp.get_charts_artists({'timerange':compare}) %}
|
||||
{% set prevartists = dbc.get_charts_artists({'timerange':compare}) %}
|
||||
|
||||
{% set lastranks = {} %}
|
||||
{% for a in prevartists %}
|
||||
|
@ -2,7 +2,7 @@
|
||||
|
||||
|
||||
{% if charts is undefined %}
|
||||
{% set charts = dbp.get_charts_artists(limitkeys) %}
|
||||
{% set charts = dbc.get_charts_artists(limitkeys) %}
|
||||
{% endif %}
|
||||
|
||||
{% set charts_14 = charts | fixlength(14) %}
|
||||
@ -23,7 +23,7 @@
|
||||
{% set rank = entry.rank %}
|
||||
<td>
|
||||
<a href="{{ links.url(artist) }}">
|
||||
<div style='background-image:url("{{ utilities.getArtistImage(artist,fast=True) }}")'>
|
||||
<div style='background-image:url("{{ images.get_artist_image(artist) }}")'>
|
||||
<span class='stats'>#{{ rank }}</span> <span>{{ artist }}</span>
|
||||
</div>
|
||||
</a>
|
||||
|
@ -2,7 +2,7 @@
|
||||
{% import 'snippets/entityrow.jinja' as entityrow %}
|
||||
|
||||
{% if charts is undefined %}
|
||||
{% set charts = dbp.get_charts_tracks(filterkeys,limitkeys) %}
|
||||
{% set charts = dbc.get_charts_tracks(filterkeys,limitkeys) %}
|
||||
{% endif %}
|
||||
{% if compare %}
|
||||
{% if compare is true %}
|
||||
@ -10,7 +10,7 @@
|
||||
{% if compare is none %}{% set compare = False %}{% endif %}
|
||||
{% endif %}
|
||||
{% if compare %}
|
||||
{% set prevtracks = dbp.get_charts_tracks(filterkeys,{'timerange':compare}) %}
|
||||
{% set prevtracks = dbc.get_charts_tracks(filterkeys,{'timerange':compare}) %}
|
||||
|
||||
{% set lastranks = {} %}
|
||||
{% for t in prevtracks %}
|
||||
|
@ -2,7 +2,7 @@
|
||||
|
||||
|
||||
{% if charts is undefined %}
|
||||
{% set charts = dbp.get_charts_tracks(filterkeys,limitkeys) %}
|
||||
{% set charts = dbc.get_charts_tracks(filterkeys,limitkeys) %}
|
||||
{% endif %}
|
||||
|
||||
{% set charts_14 = charts | fixlength(14) %}
|
||||
@ -23,7 +23,7 @@
|
||||
{% set rank = entry.rank %}
|
||||
<td>
|
||||
<a href="{{ links.url(track) }}">
|
||||
<div style='background-image:url("{{ utilities.getTrackImage(track.artists,track.title,fast=True) }}")'>
|
||||
<div style='background-image:url("{{ images.get_track_image(track) }}")'>
|
||||
<span class='stats'>#{{ rank }}</span> <span>{{ track.title }}</span>
|
||||
</div>
|
||||
</a>
|
||||
|
@ -1,6 +1,6 @@
|
||||
{% import 'snippets/links.jinja' as links %}
|
||||
|
||||
{% set ranges = dbp.get_performance(filterkeys,limitkeys,delimitkeys) %}
|
||||
{% set ranges = dbc.get_performance(filterkeys,limitkeys,delimitkeys) %}
|
||||
|
||||
{% set minrank = ranges|map(attribute="rank")|reject("none")|max|default(60) %}
|
||||
{% set minrank = minrank + 20 %}
|
||||
|
@ -1,6 +1,6 @@
|
||||
{% import 'snippets/links.jinja' as links %}
|
||||
|
||||
{% set ranges = dbp.get_pulse(filterkeys,limitkeys,delimitkeys) %}
|
||||
{% set ranges = dbc.get_pulse(filterkeys,limitkeys,delimitkeys) %}
|
||||
|
||||
{% set maxbar = ranges|map(attribute="scrobbles")|max|default(1) %}
|
||||
{% if maxbar < 1 %}{% set maxbar = 1 %}{% endif %}
|
||||
|
@ -1,4 +1,4 @@
|
||||
{% set scrobbles = dbp.get_scrobbles(filterkeys,limitkeys,amountkeys) %}
|
||||
{% set scrobbles = dbc.get_scrobbles(filterkeys,limitkeys,amountkeys) %}
|
||||
|
||||
{% set firstindex = amountkeys.page * amountkeys.perpage %}
|
||||
{% set lastindex = firstindex + amountkeys.perpage %}
|
||||
@ -6,12 +6,33 @@
|
||||
{% import 'snippets/entityrow.jinja' as entityrow %}
|
||||
|
||||
|
||||
<script src="/edit.js"></script>
|
||||
|
||||
|
||||
<table class='list'>
|
||||
{% for s in scrobbles -%}
|
||||
{%- if loop.index0 >= firstindex and loop.index0 < lastindex -%}
|
||||
<tr>
|
||||
<td class='time'>{{ malojatime.timestamp_desc(s["time"],short=shortTimeDesc) }}</td>
|
||||
{{ entityrow.row(s) }}
|
||||
{{ entityrow.row(s.track) }}
|
||||
{% if adminmode %}
|
||||
|
||||
<td class='delete_area'>
|
||||
<span class="confirmactions">
|
||||
<button class="smallbutton warning" onclick="deleteScrobble({{ s.time }},this)">Confirm</button>
|
||||
<button class="smallbutton" onclick="toggleDeleteConfirm(this)">Cancel</button>
|
||||
</span>
|
||||
|
||||
<span class="initializeactions">
|
||||
<div class='deleteicon clickable_icon danger' onclick="toggleDeleteConfirm(this)">
|
||||
<svg style="width:14px;height:14px" viewBox="0 0 24 24">
|
||||
<path d="M19,4H15.5L14.5,3H9.5L8.5,4H5V6H19M6,19A2,2 0 0,0 8,21H16A2,2 0 0,0 18,19V7H6V19Z" />
|
||||
</svg>
|
||||
</div>
|
||||
</span>
|
||||
|
||||
</td>
|
||||
{% endif %}
|
||||
</tr>
|
||||
{%- endif -%}
|
||||
{% endfor %}
|
||||
|
@ -1,7 +1,7 @@
|
||||
{% import 'snippets/links.jinja' as links %}
|
||||
{% import 'snippets/entityrow.jinja' as entityrow %}
|
||||
|
||||
{% set ranges = dbp.get_top_artists(limitkeys,delimitkeys) %}
|
||||
{% set ranges = dbc.get_top_artists(limitkeys,delimitkeys) %}
|
||||
|
||||
{% set maxbar = ranges|map(attribute="scrobbles")|max|default(1) %}
|
||||
{% if maxbar < 1 %}{% set maxbar = 1 %}{% endif %}
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user