Compare commits

...

160 Commits

Author SHA1 Message Date
CDrummond
9d9117e8f7 Allow "export" !!! 2025-03-24 17:07:26 +00:00
CDrummond
417ac5f652 Add option to preserve file modification time when writing tags.
Issue #21
2025-03-24 07:17:51 +00:00
CDrummond
342440f04b Add action to export analysis results from DB to files.
Issue #20
2025-03-24 06:47:35 +00:00
CraigD
85a2e985bf
Merge pull request #19 from Polochon-street/add-rpi-feature
Add rpi feature
2025-03-20 17:46:51 +00:00
Polochon_street
c999297430 Add rpi feature 2025-03-20 18:13:03 +01:00
CDrummond
db9b12be3f Update 2025-03-17 18:16:39 +00:00
CDrummond
2468a83d4b Reduce code duplication 2025-03-17 17:55:24 +00:00
CDrummond
22149c2349 Document build steps for Pi 2025-03-16 17:04:07 +00:00
CDrummond
0dda8a17e7 bliss-rs 0.10.0 2025-03-16 08:17:48 +00:00
CDrummond
a2ff1aade1 Specify git rev until release made 2025-03-15 10:59:40 +00:00
CDrummond
471063b53d Copy docs and example config for mac builds! 2025-03-15 10:22:34 +00:00
CDrummond
94011d12ea _Slightly_ slower 2025-03-15 09:33:32 +00:00
CDrummond
2210c44614 Quiten symphonia log messages 2025-03-15 09:27:35 +00:00
CDrummond
00be6015f7 Symphonia is not actually that slow... 2025-03-15 09:24:52 +00:00
CDrummond
e4522cc683 Add symphonia builds 2025-03-14 12:17:52 +00:00
CDrummond
5c7b33a5d3 Use correct name 2025-03-09 15:22:34 +00:00
CDrummond
6fd78df3ef Fix build 2025-03-09 13:26:39 +00:00
CDrummond
f23202e956 Document tags option 2025-03-09 13:26:21 +00:00
CDrummond
7ea6c3befd Add excplcit 'ffmpeg' feature flag. 2025-03-09 08:02:37 +00:00
CDrummond
302609ed1b Update lofty version 2025-03-05 19:10:47 +00:00
CDrummond
5e5bd9841a ...and use ffprobe when updating tags. 2025-03-05 18:43:05 +00:00
CDrummond
32610f22b7 Use ffprobe to read meta-data if not compiled against libav
Closes #18
2025-03-05 18:39:54 +00:00
CDrummond
751dca7091 Enable DSF support for libav builds 2025-03-05 17:58:19 +00:00
CDrummond
ac87ebb66c When saving anlysis results to a comment tag, remove any previous
results.
2025-03-05 17:40:25 +00:00
CDrummond
98b9cd61e0 Only stop when found correct BLISS_ANALYSIS comment 2025-03-05 17:37:51 +00:00
CDrummond
786b7d2c2d If log level set to 'trace' then set this level for the Bliss library too.
Issue #17
2025-03-05 17:09:52 +00:00
CDrummond
1e00e9593a Spelling 2025-03-05 17:01:48 +00:00
CDrummond
0d877fe79e Save analysis in a new COMMENT tag.
Closes #4
2025-03-04 21:57:06 +00:00
CDrummond
6cc26c399e Add option to write analysis results to files, and use for future scans.
Issue #4
2025-03-04 20:04:37 +00:00
CDrummond
bff4ba18b4 Update (C) year 2025-03-04 19:17:24 +00:00
CDrummond
6fc9ac7f2e Fix output format 2025-03-04 19:15:12 +00:00
CDrummond
3a387be3bf If new files analysed and 'ignore' file exists then update DB's 'ignore' flags.
Closes #9
2025-03-04 19:06:59 +00:00
CDrummond
2cb7dc0fa0 Add ability to specify LMS JSONRPC port.
Closes #16
2025-03-04 19:01:20 +00:00
CDrummond
b91c2edafd Spelling 2025-03-04 18:06:33 +00:00
CDrummond
f559abb395 Mention statically linked variant. 2025-03-04 17:59:15 +00:00
CDrummond
cca929a13e Need to use libav with staticlibav 2025-03-04 17:40:02 +00:00
CDrummond
6dcd61d9bb Remove bliss-analyser-mac-libav 2025-03-04 13:51:08 +00:00
CDrummond
f8ea9947ac Nope, cant build ffmpeg for macOS :( 2025-03-04 13:46:13 +00:00
CDrummond
b275568000 Static libav failed on macOS, try dynamic? 2025-03-04 13:25:45 +00:00
CDrummond
0249bf8edb nasm for macOS 2025-03-04 13:10:11 +00:00
CDrummond
6d5b7c80ef Fix syntax 2025-03-04 12:48:44 +00:00
CDrummond
b078736224 staticlibav 2025-03-04 12:47:21 +00:00
CDrummond
a610dab7c0 ...and for other binary! 2025-03-03 22:09:42 +00:00
CDrummond
05fb19e2f9 update-aubio-bindings 3rd try... 2025-03-03 22:06:00 +00:00
CDrummond
375abe91d0 bliss-audio-aubio-sys/bindgen? 2025-03-03 22:01:39 +00:00
CDrummond
f9984e8ca0 bindgen? 2025-03-03 21:57:16 +00:00
CDrummond
73d4ae72cd Fix job IDs 2025-03-03 21:48:00 +00:00
CDrummond
92c2a90d25 Fix indentation 2025-03-03 21:45:09 +00:00
CDrummond
dbe36c35db Fat binaries for macOS 2025-03-02 22:56:15 +00:00
CDrummond
decbbd18e5 Update 2025-03-02 22:55:55 +00:00
CDrummond
36f44d6550 Mention libav/ffmpeg variants 2025-03-02 22:54:57 +00:00
CDrummond
01be5d107a Put libav/ffmpeg last 2025-03-02 22:44:48 +00:00
CDrummond
388e571571 Rename jobs, and artifacts, to match OS 2025-03-02 22:35:17 +00:00
CDrummond
5f247ef10f Fix build, as it was copied from ARM scripts. 2025-03-02 22:14:51 +00:00
CDrummond
eb7214644a Oops! Should not have been added! 2025-03-02 21:50:00 +00:00
CDrummond
25362b9635 Rename jobs 2025-03-03 21:38:21 +00:00
CDrummond
4a98282ce2 Fix names, again! 2025-03-03 21:37:01 +00:00
CDrummond
331d770d46 Fix target 2025-03-03 21:28:43 +00:00
CDrummond
0d1c33b131 Fix command 2025-03-03 21:23:01 +00:00
CDrummond
c9d72b3f71 Fix artifact name 2025-03-03 21:22:50 +00:00
CDrummond
bfd73ea0bc actions/upload-artifact@v4 2025-03-03 21:15:34 +00:00
CDrummond
b643272d1f Update versions 2025-03-03 21:13:51 +00:00
CDrummond
9321be0ad5 Build libav and ffmpeg variants 2025-03-03 18:29:06 +00:00
CDrummond
55f944fbdd Remove macOS deps 2025-03-03 18:09:30 +00:00
CDrummond
d7d7c87582 Re-enable mac builds, but use ffmpeg from commandline 2025-03-03 17:34:42 +00:00
CDrummond
4e6d522829 Update github workflow scripts 2025-03-03 17:33:12 +00:00
CDrummond
25f399f932 Document libav feature 2025-03-03 17:26:38 +00:00
CDrummond
a5db8f48ce Reduce duplication 2025-03-03 17:14:31 +00:00
CDrummond
136651ada7 Add cargo features flag to enable libav usage. 2025-03-03 17:06:09 +00:00
CDrummond
e5ef67f8c6 Sort directory items, and do limiting when collecting files to analyse. 2025-03-02 22:10:55 +00:00
CDrummond
6b9cb960a9 Add CUE support for commandline ffmpeg usage. 2025-03-02 20:57:59 +00:00
CDrummond
c6f9a7faf5 Only invoke "stty sane" if not Windows. 2025-03-02 20:56:47 +00:00
CDrummond
2ac4b9d17f Avoid pushing and reallocating all the time. 2025-03-02 17:29:17 +00:00
CDrummond
7071ea14e7 Reset terminal 2025-03-02 09:40:57 +00:00
CDrummond
e23aba88e3 Have ffmpeg output to a pipe, and read from that - saves creating temp
files.
2025-03-02 08:57:08 +00:00
CDrummond
5fa745975c Check ffmpeg is in PATH 2025-03-01 16:06:07 +00:00
CDrummond
22133436b9 Use 'ffmpeg' commandline to decode files, and not ffmpeg libraries. 2025-03-01 15:45:47 +00:00
CDrummond
68ed1fc3b0 Update version of bliss library. 2024-11-02 08:58:50 +00:00
CDrummond
3b465543d7 Update 2024-04-28 13:32:20 +01:00
CraigD
56ed0b9045
Merge pull request #13 from terual/wavpack
Add support for (DSD) WavPack
2024-04-28 12:30:47 +00:00
Bart Lauret
62c956bc78 Add support for (DSD) WavPack
- Added wv as valid extension
- lofty had incorrect duration scanning for DSD Wavpack in version 0.15.0, so version bumped to 0.16.0
2024-04-28 11:55:34 +02:00
CraigD
a880fff993
Merge pull request #12 from chincheta0815/docker_ubuntu
make ubuntu a docker build for ffmpeg5
2024-02-22 17:37:47 +00:00
chincheta0815
777d8e3c6c make ubuntu a docker build for ffmpeg5 2024-02-22 07:00:03 +01:00
CraigD
319f72d2a4
Merge pull request #11 from chincheta0815/bookworm_and_bullseye
add builds for bullseye which requires newer ffmpeg
2024-02-16 17:34:06 +00:00
chincheta0815
297ed3304e add builds for bullseye which requires newer ffmpeg 2024-02-16 15:13:06 +01:00
CDrummond
04baedbfc2 Don't try to download ffmpeg5 or macOS builds - currently broken 2023-08-11 17:16:41 +01:00
CDrummond
609b6faff9 So, ffmpeg5 ppa is now paywalled so cannot use. 2023-08-10 21:25:21 +01:00
CDrummond
fe65d35066 Revert ffmpeg apt steps, and disable macOS builds (failing!) 2023-08-10 21:07:23 +01:00
CDrummond
75aeee128f Use 22.04 for ffmpeg5? 2023-08-10 20:50:46 +01:00
CDrummond
65e40acce4 Update bliss and lofty libraries 2023-08-10 20:08:17 +01:00
CDrummond
a9f819f39b Update version of bliss library. 2023-02-28 15:48:16 +00:00
CDrummond
4704624b2f Add option to limit number of concurrent threads. 2023-02-28 15:41:01 +00:00
CDrummond
c3083d9a1a Update (C) year 2023-01-02 09:28:26 +00:00
CDrummond
90d4dfec23 Nope, revert back to bullseye as build fails on stretch 2022-12-20 21:50:30 +00:00
CDrummond
e89aee1f4e Build on stretch? 2022-12-20 21:40:58 +00:00
CDrummond
703e39508c Update lofty to 0.9.0 2022-12-20 09:25:45 +00:00
CDrummond
eb16d8379d Update bliss-rs 2022-10-22 11:49:39 +01:00
CDrummond
a220b4d0c5 Update bliss-rs and lofty-rs 2022-10-04 17:06:52 +01:00
CDrummond
6d61616dda Update lofty version 2022-08-21 08:55:23 +01:00
CDrummond
d85424d27c Update lofty to 0.7.3 2022-08-04 22:26:32 +01:00
CDrummond
144c8a7ffe Update version of tag reader library. 2022-07-03 20:17:36 +01:00
CDrummond
ab0adc96ca Show analysis results on a new line after progress. Just show
"Finished!" as last progress message.
Issue #5
2022-06-19 17:53:24 +01:00
CDrummond
7cf62c3ad0 Fix chmod call 2022-06-18 10:18:46 +01:00
CDrummond
d9841b3527 Copy files within docker 2022-06-18 10:00:10 +01:00
CDrummond
3b6872b2a0 Place ARM binaries in bin sub-folder 2022-06-18 09:05:00 +01:00
CDrummond
45b20902d6 Merge branch 'master' into linux-arm 2022-06-18 08:44:47 +01:00
CDrummond
9feb80bbd1 Download arm builds 2022-06-18 08:44:17 +01:00
CDrummond
b9b8fdabfc Add script to launch arm for correct arch 2022-06-18 08:40:17 +01:00
CDrummond
a818628706 Add other deps 2022-06-18 08:05:44 +01:00
CDrummond
370c533a49 Revery to using docker, and add changes from chincheta0815 - #3 2022-06-18 07:55:54 +01:00
CDrummond
9aae7825d0 No sudo on bullseye either... 2022-06-16 20:18:25 +01:00
CDrummond
ebddf8febe Use sudo for mount 2022-06-16 19:35:43 +01:00
CDrummond
143fb2bf62 bullseye? 2022-06-16 19:34:42 +01:00
CDrummond
323978cbeb tmpfs? 2022-06-16 19:26:22 +01:00
CDrummond
a749c03195 source $HOME/.cargo/env 2022-06-16 19:16:42 +01:00
CDrummond
bfd2a44e2e Download rust? #3 2022-06-16 19:05:46 +01:00
CDrummond
943227c16b Update run-on-arch-action version. #3 2022-06-16 18:32:39 +01:00
CDrummond
b7259cd0ae Install cargo? #3 2022-06-16 18:01:19 +01:00
CDrummond
f023faa336 Alternate method 2022-06-13 13:21:25 +01:00
CDrummond
074be07d18 Rust 2021? 2022-06-13 13:08:18 +01:00
CDrummond
a12971df17 Split docker runs? 2022-06-13 12:53:32 +01:00
CDrummond
8ba10a0f8e nightly? 2022-06-13 12:51:09 +01:00
CDrummond
cf0e6cf5fb edition2021 requires nightly 2022-06-13 12:47:57 +01:00
CDrummond
de2c2dfaa3 Add "cargo-features = ["edition2021"]" at top 2022-06-13 12:39:44 +01:00
CDrummond
1a45791e58 cargo-features = ["edition2021"] ???? 2022-06-13 12:24:23 +01:00
CDrummond
b5f35dcf36 Build for Linux ARM 2022-06-13 12:02:25 +01:00
CDrummond
cd12e805c3 More compact 2022-05-28 17:50:19 +01:00
CDrummond
fa87a7e135 Fix checking if CUE already analysed.
Issue #2
2022-05-28 17:24:09 +01:00
CDrummond
df17032377 Fix Job names 2022-05-26 12:08:52 +01:00
CDrummond
e47c2b477e Single run command 2022-05-26 12:07:18 +01:00
CDrummond
133e3e2204 Produce ffmpeg 4 and 5 builds for Linux 2022-05-26 12:04:15 +01:00
CraigD
3dec74d328
Mention password protected LMS access 2022-05-21 10:11:17 +01:00
CDrummond
cb323f3efe Increment version 2022-05-21 10:04:10 +01:00
CDrummond
69303d62c6 Update lofty 2022-05-21 09:30:09 +01:00
CDrummond
f05e11da2e Add Fedora deps instructions 2022-05-21 09:24:00 +01:00
Craig Drummond
37947564fc Remove mention of temporary files, as this is no longer relevant. 2022-04-24 08:51:07 +01:00
Craig Drummond
8a5d9bc276 Don't need ffmpeg.exe for Windows builds, as bliss-rs does cue splitting 2022-04-24 08:35:27 +01:00
Craig Drummond
467e341cfb Update version of bliss-rs, this now handles CUE processing internally. 2022-04-18 20:20:32 +01:00
Craig Drummond
2788759829 More 'cargo fmt' reverts 2022-04-15 09:03:47 +01:00
Craig Drummond
0653e365b8 Undo some 'cargo fmt' changes. 2022-04-13 21:32:44 +01:00
Craig Drummond
985902325e Minor formatting mods 2022-04-13 21:07:09 +01:00
Craig Drummond
f3903383c9 Update version of bliss-rs 2022-04-12 21:17:29 +01:00
Craig Drummond
f13601f1c2 loft-rs 0.6.1 2022-04-10 20:27:29 +01:00
CDrummond
dfb87881c9 Add colon 2022-04-06 13:23:41 +01:00
CDrummond
b1f89050e2 Mention that must run from terminal window 2022-04-06 13:22:47 +01:00
CDrummond
94ef2e2c1f Show error if can't open/create database 2022-04-06 13:22:30 +01:00
CDrummond
7fb2e66c58 Update version of tag reader library, should now support ID3v2 in FLAC. 2022-04-06 13:10:05 +01:00
CDrummond
9f8ea6e490 Fix indentation 2022-03-23 20:16:27 +00:00
CDrummond
0aa10918ae Tidy up code, thanks to Serial-ATA 2022-03-23 20:15:54 +00:00
CraigD
caa8f124a7
Merge pull request #1 from Serial-ATA/master
Cleanup
2022-03-23 07:34:44 +00:00
Serial
dabbfdb206 More typos
Signed-off-by: Serial <69764315+Serial-ATA@users.noreply.github.com>
2022-03-22 19:08:50 -04:00
Serial
182d980395 Typos
Signed-off-by: Serial <69764315+Serial-ATA@users.noreply.github.com>
2022-03-22 19:07:59 -04:00
Serial
c51de47696 Missing derive
Signed-off-by: Serial <69764315+Serial-ATA@users.noreply.github.com>
2022-03-22 18:05:39 -04:00
Serial
f6edb983d3 Cleanup upload module
Signed-off-by: Serial <69764315+Serial-ATA@users.noreply.github.com>
2022-03-22 17:49:27 -04:00
Serial
625248fbc6 Cleanup tags module
Signed-off-by: Serial <69764315+Serial-ATA@users.noreply.github.com>
2022-03-22 17:44:40 -04:00
Serial
37d9f627ad Cleanup db module
Signed-off-by: Serial <69764315+Serial-ATA@users.noreply.github.com>
2022-03-22 17:41:02 -04:00
Serial
b61aee0a6a Cleanup cue module
Signed-off-by: Serial <69764315+Serial-ATA@users.noreply.github.com>
2022-03-22 17:32:31 -04:00
Serial
51ce6cadc7 Cleanup analyse module
Signed-off-by: Serial <69764315+Serial-ATA@users.noreply.github.com>
2022-03-22 17:25:03 -04:00
Serial
b097f06b5f Add Metadata::is_empty
Signed-off-by: Serial <69764315+Serial-ATA@users.noreply.github.com>
2022-03-22 17:05:33 -04:00
Serial
e0a6759af5 fmt
Signed-off-by: Serial <69764315+Serial-ATA@users.noreply.github.com>
2022-03-22 17:02:08 -04:00
27 changed files with 2722 additions and 1066 deletions

View File

@ -7,19 +7,19 @@ env:
CARGO_TERM_COLOR: always
jobs:
Linux:
runs-on: ubuntu-20.04
Linux_ffmpeg_x86:
runs-on: ubuntu-22.04
steps:
- name: Checkout
uses: actions/checkout@v1
uses: actions/checkout@v4
- name: Packages
run: sudo apt-get update && sudo apt-get install build-essential yasm libavutil-dev libavcodec-dev libavformat-dev libavfilter-dev libavfilter-dev libavdevice-dev libswresample-dev libfftw3-dev ffmpeg
- name: Build
run: sudo apt-get update && sudo apt-get install build-essential yasm -y
- name: Build x86 ffmpeg version
run: |
cargo build --release
cargo build --release --features=ffmpeg
strip target/release/bliss-analyser
mkdir releases
cp target/release/bliss-analyser releases/bliss-analyser
@ -28,14 +28,340 @@ jobs:
cp configs/linux.ini releases/config.ini
- name: Upload artifacts
uses: actions/upload-artifact@v2
uses: actions/upload-artifact@v4
with:
name: bliss-analyser-linux
name: bliss-analyser-linux-x86-ffmpeg
path: releases/
macOS:
runs-on: macos-11.0
Linux_static_x86:
runs-on: ubuntu-22.04
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Packages
run: sudo apt-get update && sudo apt-get install build-essential yasm -y
- name: Build x86 static-libav version
run: |
cargo build --release --features=libav,staticlibav
strip target/release/bliss-analyser
mkdir releases
cp target/release/bliss-analyser releases/bliss-analyser
cp UserGuide.md releases/README.md
cp LICENSE releases/
cp configs/linux.ini releases/config.ini
- name: Upload artifacts
uses: actions/upload-artifact@v4
with:
name: bliss-analyser-linux-x86-static
path: releases/
Ubuntu_2204_libav_x86:
runs-on: ubuntu-22.04
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Packages
run: sudo apt-get update && sudo apt-get install build-essential yasm libavutil-dev libavcodec-dev libavformat-dev libavfilter-dev libavdevice-dev libswresample-dev libfftw3-dev ffmpeg -y
- name: Build 22.04 libav version
run: |
cargo build --release --features=libav
strip target/release/bliss-analyser
mkdir releases
cp target/release/bliss-analyser releases/bliss-analyser
cp UserGuide.md releases/README.md
cp LICENSE releases/
cp configs/linux.ini releases/config.ini
- name: Upload artifacts
uses: actions/upload-artifact@v4
with:
name: bliss-analyser-ubuntu-22.04-x86-libav
path: releases/
Ubuntu_2404_libav_x86:
runs-on: ubuntu-24.04
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Packages
run: sudo apt-get update && sudo apt-get install build-essential yasm libavutil-dev libavcodec-dev libavformat-dev libavfilter-dev libavdevice-dev libswresample-dev libfftw3-dev ffmpeg -y
- name: Build 24.04 libav version
run: |
cargo build --release --features=libav
strip target/release/bliss-analyser
mkdir releases
cp target/release/bliss-analyser releases/bliss-analyser
cp UserGuide.md releases/README.md
cp LICENSE releases/
cp configs/linux.ini releases/config.ini
- name: Upload artifacts
uses: actions/upload-artifact@v4
with:
name: bliss-analyser-ubuntu-24.04-x86-libav
path: releases/
Linux_symphonia_x86:
runs-on: ubuntu-22.04
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Packages
run: sudo apt-get update && sudo apt-get install build-essential yasm -y
- name: Build x86 static-libav version
run: |
cargo build --release --features=symphonia
strip target/release/bliss-analyser
mkdir releases
cp target/release/bliss-analyser releases/bliss-analyser
cp UserGuide.md releases/README.md
cp LICENSE releases/
cp configs/linux.ini releases/config.ini
- name: Upload artifacts
uses: actions/upload-artifact@v4
with:
name: bliss-analyser-linux-x86-symphonia
path: releases/
Linux_ffmpeg_arm:
runs-on: ubuntu-22.04
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Build ARM ffmpeg on Debian
run: |
docker build -t bliss-analyser-cross - < docker/Dockerfile_Bullseye_ffmpeg
docker run --rm -v $PWD/target:/build -v $PWD:/src bliss-analyser-cross
- name: Upload artifacts
uses: actions/upload-artifact@v4
with:
name: bliss-analyser-linux-arm-ffmpeg
path: releases/
Linux_static_arm:
runs-on: ubuntu-22.04
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Build ARM static-libav on Debian
run: |
docker build -t bliss-analyser-cross - < docker/Dockerfile_Bullseye_static
docker run --rm -v $PWD/target:/build -v $PWD:/src bliss-analyser-cross
- name: Upload artifacts
uses: actions/upload-artifact@v4
with:
name: bliss-analyser-linux-arm-static
path: releases/
Debian_libav_arm:
runs-on: ubuntu-22.04
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Build ARM libav on Bullseye
run: |
docker build -t bliss-analyser-cross - < docker/Dockerfile_Bullseye_libav
docker run --rm -v $PWD/target:/build -v $PWD:/src bliss-analyser-cross
- name: Upload artifacts
uses: actions/upload-artifact@v4
with:
name: bliss-analyser-debian-bullseye-arm-libav
path: releases/
- name: Build ARM libav on Bookworm
run : |
docker build -t bliss-analyser-cross - < docker/Dockerfile_Bookworm_libav
docker run --rm -v $PWD/target:/build -v $PWD:/src bliss-analyser-cross
- name: Upload artifacts
uses: actions/upload-artifact@v4
with:
name: bliss-analyser-debian-bookworm-arm-libav
path: releases/
Linux_symphonia_arm:
runs-on: ubuntu-22.04
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Build ARM symphonia on Debian
run: |
docker build -t bliss-analyser-cross - < docker/Dockerfile_Bullseye_symphonia
docker run --rm -v $PWD/target:/build -v $PWD:/src bliss-analyser-cross
- name: Upload artifacts
uses: actions/upload-artifact@v4
with:
name: bliss-analyser-linux-arm-symphonia
path: releases/
macOS_ffmpeg:
runs-on: macos-13
steps:
- name: Install Rust
uses: actions-rs/toolchain@v1
with:
toolchain: stable
- name: Checkout
uses: actions/checkout@v2
- name: Install Rust support for ARM64 & prepare environment
run: |
rustup target add aarch64-apple-darwin
mkdir releases
- name: Build
run: |
cargo build --release --features ffmpeg,update-aubio-bindings
strip target/release/bliss-analyser
cp target/release/bliss-analyser releases/bliss-analyser-x86_64
cargo build --target=aarch64-apple-darwin --release --features ffmpeg,update-aubio-bindings
strip target/aarch64-apple-darwin/release/bliss-analyser
cp target/aarch64-apple-darwin/release/bliss-analyser releases/bliss-analyser-arm64
cp UserGuide.md releases/README.md
cp LICENSE releases/
cp configs/macos.ini releases/config.ini
- name: Build fat binary
run: |
lipo -create \
-arch x86_64 releases/bliss-analyser-x86_64 \
-arch arm64 releases/bliss-analyser-arm64 \
-output releases/bliss-analyser
- name: Remove unused binaries
run:
rm releases/bliss-analyser-x86_64 releases/bliss-analyser-arm64
- name: Upload artifacts
uses: actions/upload-artifact@v4
with:
name: bliss-analyser-mac-ffmpeg
path: releases/
# macOS_staticlibav:
# runs-on: macos-13
#
# steps:
# - name: Install Rust
# uses: actions-rs/toolchain@v1
# with:
# toolchain: stable
#
# - name: Checkout
# uses: actions/checkout@v2
#
# - name: Install deps
# run: |
# brew install pkg-config ffmpeg nasm
#
# - name: Install Rust support for ARM64 & prepare environment
# run: |
# rustup target add aarch64-apple-darwin
# mkdir releases
#
# - name: Build
# run: |
# cargo build --release --features update-aubio-bindings,libav,staticlibav
# strip target/release/bliss-analyser
# cp target/release/bliss-analyser releases/bliss-analyser-x86_64
# cargo build --target=aarch64-apple-darwin --release --features update-aubio-bindings,libav,staticlibav
# strip target/aarch64-apple-darwin/release/bliss-analyser
# cp target/aarch64-apple-darwin/release/bliss-analyser releases/bliss-analyser-arm64
# cp UserGuide.md releases/README.md
# cp LICENSE releases/
# cp configs/macos.ini releases/config.ini
#
# - name: Build fat binary
# run: |
# lipo -create \
# -arch x86_64 releases/bliss-analyser-x86_64 \
# -arch arm64 releases/bliss-analyser-arm64 \
# -output releases/bliss-analyser
#
# - name: Remove unused binaries
# run:
# rm releases/bliss-analyser-x86_64 releases/bliss-analyser-arm64
#
# - name: Upload artifacts
# uses: actions/upload-artifact@v4
# with:
# name: bliss-analyser-mac-static
# path: releases/
# macOS_libav:
# runs-on: macos-13
#
# steps:
# - name: Install Rust
# uses: actions-rs/toolchain@v1
# with:
# toolchain: stable
#
# - name: Checkout
# uses: actions/checkout@v2
#
# - name: Install deps
# run: |
# brew install pkg-config ffmpeg
#
# - name: Build
# run: |
# mkdir releases
# cargo build --release
# strip target/release/bliss-analyser
# cp target/release/bliss-analyser releases/bliss-analyser
# cp UserGuide.md releases/README.md
# cp LICENSE releases/
# cp configs/macos.ini releases/config.ini
#
# - name: Upload artifacts
# uses: actions/upload-artifact@v4
# with:
# name: bliss-analyser-mac-libav
# path: releases/
macOS_symphonia:
runs-on: macos-13
steps:
- name: Install Rust
@ -48,26 +374,44 @@ jobs:
- name: Install deps
run: |
brew install ffmpeg@5
brew install pkg-config
- name: Install Rust support for ARM64 & prepare environment
run: |
rustup target add aarch64-apple-darwin
mkdir releases
- name: Build
run: |
mkdir releases
cargo build --release
cargo build --release --features update-aubio-bindings,symphonia
strip target/release/bliss-analyser
cp target/release/bliss-analyser releases/bliss-analyser
cp target/release/bliss-analyser releases/bliss-analyser-x86_64
cargo build --target=aarch64-apple-darwin --release --features update-aubio-bindings,symphonia
strip target/aarch64-apple-darwin/release/bliss-analyser
cp target/aarch64-apple-darwin/release/bliss-analyser releases/bliss-analyser-arm64
cp UserGuide.md releases/README.md
cp LICENSE releases/
cp configs/macos.ini releases/config.ini
- name: Build fat binary
run: |
lipo -create \
-arch x86_64 releases/bliss-analyser-x86_64 \
-arch arm64 releases/bliss-analyser-arm64 \
-output releases/bliss-analyser
- name: Remove unused binaries
run:
rm releases/bliss-analyser-x86_64 releases/bliss-analyser-arm64
- name: Upload artifacts
uses: actions/upload-artifact@v2
uses: actions/upload-artifact@v4
with:
name: bliss-analyser-mac
name: bliss-analyser-mac-symphonia
path: releases/
Windows:
Windows_libav:
runs-on: windows-2019
steps:
@ -94,18 +438,48 @@ jobs:
- name: Build
run: |
cargo build --release
cargo build --release --features=libav
mkdir releases
cp target/release/bliss-analyser.exe releases/bliss-analyser.exe
cp deps/bin/*.dll releases/
cp deps/bin/ffmpeg.exe releases/
cp UserGuide.md releases/README.md
cp LICENSE releases/
cp configs/windows.ini releases/config.ini
cp c:\Windows\system32\vcruntime140.dll releases
- name: Upload artifacts
uses: actions/upload-artifact@v2
uses: actions/upload-artifact@v4
with:
name: bliss-analyser-windows
name: bliss-analyser-windows-libav
path: releases/
Windows_symphonia:
runs-on: windows-2019
steps:
- name: Checkout
uses: actions/checkout@v2
- name: Install Rust
uses: actions-rs/toolchain@v1
with:
toolchain: stable
override: true
components: rustfmt, clippy
- name: Build
run: |
cargo build --release --features=symphonia
mkdir releases
cp target/release/bliss-analyser.exe releases/bliss-analyser.exe
cp UserGuide.md releases/README.md
cp LICENSE releases/
cp configs/windows.ini releases/config.ini
cp c:\Windows\system32\vcruntime140.dll releases
- name: Upload artifacts
uses: actions/upload-artifact@v4
with:
name: bliss-analyser-windows-symphonia
path: releases/

1237
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -1,8 +1,8 @@
[package]
name = "bliss-analyser"
version = "0.1.0"
version = "0.4.0"
authors = ["Craig Drummond <craig.p.drummond@gmail.com>"]
edition = "2018"
edition = "2021"
license = "GPL-3.0-only"
description = "Analyse audio files with bliss-rs"
repository = "https://github.com/CDrummond/bliss-analyser"
@ -10,22 +10,36 @@ keywords = ["audio", "song", "similarity"]
readme = "README.md"
[dependencies]
bliss-audio = { git = "https://github.com/Polochon-street/bliss-rs", rev = "5f366b0" }
argparse = "0.2.2"
anyhow = "1.0.40"
rusqlite = { version = "0.25.0", features = ["bundled"] }
rusqlite = { version = "0.28.0", features = ["bundled"] }
log = "0.4.14"
env_logger = "0.8.4"
indicatif = "0.16.2"
lofty = { git = "https://github.com/Serial-ATA/lofty-rs", rev = "45182b6" }
lofty = "0.22.2"
dirs = "1"
chrono = "0.4.19"
chrono = "0.4.40"
regex = "1"
substring = "1.4.5"
ureq = "2.4.0"
configparser = "3.0.0"
rcue = { git = "https://github.com/gyng/rcue" }
hhmmss = "0.1.0"
if_chain = "1.0.2"
num_cpus = "1.13.0"
tempdir = "0.3.7"
subprocess = "0.2.8"
which = { version = "7.0.2", optional = true }
rcue = { version = "0.1.3", optional = true }
hhmmss = { version = "0.1.0", optional = true }
[features]
libav = ["bliss-audio/ffmpeg"]
update-aubio-bindings = ["bliss-audio/update-aubio-bindings"]
staticlibav = ["bliss-audio/build-ffmpeg", "bliss-audio/ffmpeg-static"]
ffmpeg = ["dep:which", "dep:rcue", "dep:hhmmss"]
symphonia = ["bliss-audio/symphonia-all", "bliss-audio/symphonia-aiff", "bliss-audio/symphonia-alac"]
rpi = ["bliss-audio/rpi"]
[dependencies.bliss-audio]
default-features = false
features = ["aubio-static"]
version = "0.10.0"
#git = "https://github.com/Polochon-street/bliss-rs.git"
#rev = "006927ac16752ff2e00bfe0d6b7756f67fa822c0"

View File

@ -1,3 +1,45 @@
0.4.0
-----
1. Add action to export results from database to files.
2. Add option to preserve file modification time when writing tags.
0.3.0
-----
1. Add support for (DSD) WavPack - thanks to Bart Lauret
2. Update version of tag reader library.
3. Update version of bliss library.
4. Allow builds to either use dynamic ffmpeg libraries, static ffmpeg
libraries, symphonia, or ffmpeg on commandline.
5. Add ability to specify LMS JSONRPC port.
6. If new files analysed and 'ignore' file exists then update DB's 'ignore'
flags.
7. Add option to write analysis results to files, and use for future scans.
8. If log level set to 'trace' then set this level for the bliss library too.
9. Enable support for '.dsf' files.
0.2.3
-----
1. Add option to limit number of concurrent threads.
2. Update version of tag reader library.
3. Update version of bliss library.
0.2.2
-----
1. Update version of tag reader library.
2. Update version of bliss library.
0.2.1
-----
1. Update version of tag reader library.
2. Fix checking if CUE already analysed.
0.2.0
-----
1. Tidy up code, thanks to Serial-ATA
2. Update version of tag reader library, should now support ID3v2 in FLAC.
3. Show error message if can't open, or create, database file.
4. Update version of bliss-rs, this now handles CUE processing internally.
0.1.0
-----
1. Add support for analysing CUE files.

106
README.md
View File

@ -7,7 +7,31 @@ bliss analysis. This is then intended to be used by [Bliss Mixer](https://github
# Building
clang, pkg-config, and ffmpeg are required to build, as well as
This application can be built in 4 variants:
1. Using `libavcodec`, etc, to decode files.
2. Using `libavcodec`, etc, to decode files, but statically linked to `libavcodec`, etc.
3. Using `symphonia` to decode files.
4. Using command-line `ffmpeg` to decode files.
`libavcodec` is the fastest (~15% faster than `symphonia`, ~50% faster than `ffmpeg`
commandline), but might have issues with library, versioning, etc., unless these
libraries are statically linked in. `libavcodec` statically linked may reduce supported
file formats, but is more portable.
`symphonia` also produces a more portable application, is only slightly slower to decode
files, but has more limited codec support, and does not produce identical analysis results.
Therefore, it is not advisable to mix files analysed with `ffmpeg` (any variant) and
`symphonia`.
Command-line `ffmpeg` whilst being the slowest, produces a more portable application, and
supports a wider range of codecs.
## Build for 'libavcodec' library usage
`clang`, `pkg-config`, and `ffmpeg` are required to build, as well as
[Rust](https://www.rust-lang.org/tools/install)
To install dependencies on a Debian system:
@ -16,7 +40,85 @@ To install dependencies on a Debian system:
apt install -y clang libavcodec-dev libavformat-dev libavutil-dev libavfilter-dev libavdevice-dev pkg-config
```
Build with `cargo build --release`
To install dependencies on a Fedora system:
```
dnf install ffmpeg-devel clang pkg-config
```
Build with `cargo build --release --features=libav`
If building on a Raspberry Pi, then `rpi` also needs to be passed to `--features`, e.g.
`cargo build --release --features=libav,rpi`
The resultant application will be less portable, due to dependencies on `libavcodec` libraries (and
their dependencies).
## Build for 'libavcodec' library usage, statically linked
`clang`, `pkg-config`, and `ffmpeg` are required to build, as well as
[Rust](https://www.rust-lang.org/tools/install)
To install dependencies on a Debian system:
```
apt install -y clang libavcodec-dev libavformat-dev libavutil-dev libavfilter-dev libavdevice-dev pkg-config
```
To install dependencies on a Fedora system:
```
dnf install ffmpeg-devel clang pkg-config
```
Build with `cargo build --release --features=libav,libavstatic`
If building on a Raspberry Pi, then `rpi` also needs to be passed to `--features`, e.g.
`cargo build --release --features=libav,libavstatic,rpi`
## Build for 'symphonia'
`clang`, and `pkg-config` are required to build, as well as
[Rust](https://www.rust-lang.org/tools/install)
To install dependencies on a Debian system:
```
apt install -y clang pkg-config
```
To install dependencies on a Fedora system:
```
dnf install clang pkg-config
```
Build with `cargo build --release --features=symphonia`
## Build for 'ffmpeg' command-line usage
`clang` and `pkg-config` are required to build, as well as
[Rust](https://www.rust-lang.org/tools/install)
To install dependencies on a Debian system:
```
apt install -y clang pkg-config
```
To install dependencies on a Fedora system:
```
dnf install clang pkg-config
```
Build with `cargo build --release --features=ffmpeg`
`ffmpeg` is then a run-time dependency, and should be installed on any system where this application
is to be run - it should also be in the users `$PATH`
# Usage

View File

@ -6,6 +6,36 @@ upload its database of music analysis to LMS. The `Bliss Mixer` LMS plugin can
then use this information to provide music mixes for LMS's `Don't Stop the Music`
feature.
**NOTE:** You must run this application from a terminal window (e.g. cmd.com or
PowerShell for Windows), as there is no graphical user interface.
Variants
--------
`bliss-analyser` can be built to support using either the `ffmpeg` libraries
(`libavcodec`, etc.), `symphonia` library, or invoking the `ffmpeg` command
itself.
If the package used ended with `-libav` then `bliss-analyser` has been built
with the `ffmpeg` libraries. This allows faster decoding of files, but will
require the exact `ffmpeg` library versions to be on your system. (These
libraries are usually provided with the Windows build).
If the package used ended with `-static` then `bliss-analyser` has been built
with the `ffmpeg` libraries - but these have been statically linked. This
allows faster decoding of files, and a more portable binary - however, this
_may_ reduce the number of supported file formats.
If the package used ended with `-symphonia` then `bliss-analyser` has been built
with the `symphonia` libraries. This allows a more portable binary, but at a
slightly slower decoding speed (than `libav`) and produces analysis results that
are not the same as those produced by `ffmpeg`/`libav`.
If the package used ended with `-ffmpeg`, then `bliss-analyser` requires you
also have the `ffmpeg` application installed and in your `$PATH`. These
builds are roughly 50% slower at analysis, but are more portable as they can
use (alomost) any `ffmpeg` version.
Quick guide
@ -13,9 +43,10 @@ Quick guide
1. Install the `Bliss Mixer` LMS plugin.
2. Install ffmpeg if using Linux or macOS.
2. Install `ffmpeg` if using Linux or macOS (and using `-libav` or `-ffmpeg`
package (see `Varaints` (above))).
3. Edit the supplied `config.ini` in the current folder to set appropiate values
3. Edit the supplied `config.ini` in the current folder to set appropriate values
for `music` and `lms` - e.g.:
```
[Bliss]
@ -43,8 +74,10 @@ Installation
============
For Windows no extra installation steps are required, as all dependencies are
bundled within its ZIP file. However, both the Linux and macOS versions require
that `ffmpeg` be installed.
bundled within its ZIP file. However, if using a `-libav` or `-ffmpeg` package (see
`Varaints` (above)), both the Linux and macOS versions require that `ffmpeg` be
installed - if using a `-static` or `-symphinia` package, then no additional
dependencies are used.
Linux
@ -98,6 +131,7 @@ music=/home/user/Music
db=bliss.db
lms=127.0.0.1
ignore=ignore.txt
tags=true
```
The following items are supported:
@ -107,10 +141,17 @@ music folders may be specified via `music_1`, `music_2`, `music_3`, and `music_4
* `db` specifies the name and location of the database file used to store the
analysis results. This will default to `bliss.db` in the current folder.
* `lms` specifies the hostname, or IP address, of your LMS server. This is used
when uploading the database file to LMS. This defaults to `127.0.0.1`
when uploading the database file to LMS. This defaults to `127.0.0.1` If your LMS is
password protected then use `user:pass@server` - e.g. `lms=pi:abc123@127.0.0.1`
* `json` specifies the JSONRPC port number of your LMS server. This will default to
9000.
* `ignore` specifies the name and location of a file containing items to ignore
in mixes. See the `Ignore` section later on for more details.
* `tags` specifies whether analysis results should be written to, and re-read from,
files. Set to `true` or `false`. If enabled, then results are stored in a `COMMENT`
tag that starts with `BLISS_ANALYSIS`
* `preserve` specifies whether file modification time should be preserved when
writing tags. Set to `true` or `false`.
Command-line parameters
@ -131,9 +172,12 @@ analysis will be performed, instead the logging will inform you how many new
tracks are to be analysed and how many old tracks are left in the database.
* `-i` / `--ignore` Name and location of the file containing items to ignore.
* `-L` / `--lms` Hostname, or IP address, of your LMS server.
* `-J` / `--json` JSONRPC port number of your LMS server.
* `-n` / `--numtracks` Specify maximum number of tracks to analyse.
* `-T` / `--tags` Write analysis results to file tags, and read from file tags.
* `-p' / '--preserve` Attempt to preserve file modification time when writing tags.
Equivalent items specied in the INI config file (detailed above) will override
Equivalent items specified in the INI config file (detailed above) will override
any specified on the commandline.
`bliss-analyser` requires one extra parameter, which is used to determine the
@ -146,6 +190,7 @@ required task. This takes the following values:
any changes.
* `ignore` Reads the `ignore` file and updates the database to flag tracks as
to be ignored for mixes.
* `export` Exports tags from DB and stores within the audio files.
@ -178,11 +223,9 @@ tracks/hour.
CUE files
---------
If the anlyser encounters an audio file with a matching CUE file (e.g.
If the analyser encounters an audio file with a matching CUE file (e.g.
`album.flac` and `album.cue` in same folder) then it will attempt to analyse the
individual tracks contained within. To do this the analyser uses `ffmpeg` to
create temporary files of each track, which are then analysed. (These temporary
files are removed afterwards).
individual tracks contained within.
Exclude folders
@ -301,7 +344,7 @@ The SQL LIKE lines do sub-string matching. So '%Dance%' will match any genre
string that contains 'Dance' - e.g. 'Classical Dance'. The 4 lines with 'Rock'
show how you can explicitly look for an exact match. The 1st line means 'Rock'
is the only genre, 2nd means 'Rock' is the first genre, 3rd means 'Rock' is the
last genre, and 4th means 'Rock' is amongs other genres.
last genre, and 4th means 'Rock' is amongst other genres.
Assuming `config.ini` is in the current folder and contains valid entries, this
is accomplished as follows:
@ -318,6 +361,27 @@ is accomplished as follows:
Exporting Analysis
==================
If you have analysis results stored within the SQLite DB, and not within the files
themselves, then you can use the `export` action to copy these analysis results from
the DB and into the files.
(Linux / macOS)
```
./bliss-analyser export
```
(Windows)
```
.\bliss-analyser.exe export
```
*NOTE* Exporting of analysis results is not implemented for CUE tracks.
Credits
=======

View File

@ -0,0 +1,34 @@
# Cross compilation environment for bliss-analyser
FROM debian:bookworm
RUN dpkg --add-architecture arm64 && \
dpkg --add-architecture armhf
RUN apt-get update
RUN apt-get install -y curl git yasm
RUN apt-get install -y build-essential clang
RUN apt-get install -y crossbuild-essential-armhf crossbuild-essential-arm64
RUN apt-get install -y pkg-config:arm64
RUN apt-get install -y pkg-config:armhf
RUN curl https://sh.rustup.rs -sSf | sh -s -- -y
ENV PATH="/root/.cargo/bin/:${PATH}"
RUN rustup target add aarch64-unknown-linux-gnu && \
rustup target add arm-unknown-linux-gnueabihf
RUN mkdir /.cargo && \
echo '[target.aarch64-unknown-linux-gnu]\nlinker = "aarch64-linux-gnu-gcc"' > /.cargo/config && \
echo '[target.arm-unknown-linux-gnueabihf]\nlinker = "arm-linux-gnueabihf-gcc"' >> /.cargo/config
RUN mkdir /build
ENV CARGO_TARGET_DIR /build
ENV CARGO_HOME /build/cache
RUN mkdir /src
WORKDIR /src
CMD ["/src/docker/docker-build-arm-ffmpeg.sh"]

View File

@ -0,0 +1,38 @@
# Cross compilation environment for bliss-analyser
FROM debian:bookworm
RUN dpkg --add-architecture arm64 && \
dpkg --add-architecture armhf
RUN apt-get update
RUN apt-get install -y curl git yasm
RUN apt-get install -y build-essential clang
RUN apt-get install -y crossbuild-essential-armhf crossbuild-essential-arm64
RUN apt-get install -y pkg-config:arm64 \
libavutil-dev:arm64 libavcodec-dev:arm64 libavformat-dev:arm64 \
libavfilter-dev:arm64 libavdevice-dev:arm64 libswresample-dev:arm64 libfftw3-dev:arm64
RUN apt-get install -y pkg-config:armhf \
libavutil-dev:armhf libavcodec-dev:armhf libavformat-dev:armhf \
libavfilter-dev:armhf libavdevice-dev:armhf libswresample-dev:armhf libfftw3-dev:armhf
RUN curl https://sh.rustup.rs -sSf | sh -s -- -y
ENV PATH="/root/.cargo/bin/:${PATH}"
RUN rustup target add aarch64-unknown-linux-gnu && \
rustup target add arm-unknown-linux-gnueabihf
RUN mkdir /.cargo && \
echo '[target.aarch64-unknown-linux-gnu]\nlinker = "aarch64-linux-gnu-gcc"' > /.cargo/config && \
echo '[target.arm-unknown-linux-gnueabihf]\nlinker = "arm-linux-gnueabihf-gcc"' >> /.cargo/config
RUN mkdir /build
ENV CARGO_TARGET_DIR /build
ENV CARGO_HOME /build/cache
RUN mkdir /src
WORKDIR /src
CMD ["/src/docker/docker-build-arm-libav.sh"]

View File

@ -0,0 +1,30 @@
# Cross compilation environment for bliss-analyser
FROM debian:bullseye
RUN dpkg --add-architecture arm64 && \
dpkg --add-architecture armhf
RUN apt-get update
RUN apt-get install -y curl git pkg-config yasm
RUN apt-get install -y build-essential clang
RUN apt-get install -y crossbuild-essential-armhf crossbuild-essential-arm64
RUN curl https://sh.rustup.rs -sSf | sh -s -- -y
ENV PATH="/root/.cargo/bin/:${PATH}"
RUN rustup target add aarch64-unknown-linux-gnu && \
rustup target add arm-unknown-linux-gnueabihf
RUN mkdir /.cargo && \
echo '[target.aarch64-unknown-linux-gnu]\nlinker = "aarch64-linux-gnu-gcc"' > /.cargo/config && \
echo '[target.arm-unknown-linux-gnueabihf]\nlinker = "arm-linux-gnueabihf-gcc"' >> /.cargo/config
RUN mkdir /build
ENV CARGO_TARGET_DIR /build
ENV CARGO_HOME /build/cache
RUN mkdir /src
WORKDIR /src
CMD ["/src/docker/docker-build-arm-ffmpeg.sh"]

View File

@ -0,0 +1,36 @@
# Cross compilation environment for bliss-anlyser
FROM debian:bullseye
RUN dpkg --add-architecture arm64 && \
dpkg --add-architecture armhf
RUN apt-get update
RUN apt-get install -y curl git pkg-config yasm
RUN apt-get install -y build-essential clang
RUN apt-get install -y crossbuild-essential-armhf crossbuild-essential-arm64
RUN apt-get install -y libavutil-dev:arm64 libavcodec-dev:arm64 libavformat-dev:arm64 \
libavfilter-dev:arm64 libavdevice-dev:arm64 libswresample-dev:arm64 libfftw3-dev:arm64
RUN apt-get install -y libavutil-dev:armhf libavcodec-dev:armhf libavformat-dev:armhf \
libavfilter-dev:armhf libavdevice-dev:armhf libswresample-dev:armhf libfftw3-dev:armhf
RUN curl https://sh.rustup.rs -sSf | sh -s -- -y
ENV PATH="/root/.cargo/bin/:${PATH}"
RUN rustup target add aarch64-unknown-linux-gnu && \
rustup target add arm-unknown-linux-gnueabihf
RUN mkdir /.cargo && \
echo '[target.aarch64-unknown-linux-gnu]\nlinker = "aarch64-linux-gnu-gcc"' > /.cargo/config && \
echo '[target.arm-unknown-linux-gnueabihf]\nlinker = "arm-linux-gnueabihf-gcc"' >> /.cargo/config
RUN mkdir /build
ENV CARGO_TARGET_DIR /build
ENV CARGO_HOME /build/cache
RUN mkdir /src
WORKDIR /src
CMD ["/src/docker/docker-build-arm-libav.sh"]

View File

@ -0,0 +1,36 @@
# Cross compilation environment for bliss-anlyser
FROM debian:bullseye
RUN dpkg --add-architecture arm64 && \
dpkg --add-architecture armhf
RUN apt-get update
RUN apt-get install -y curl git pkg-config yasm
RUN apt-get install -y build-essential clang
RUN apt-get install -y crossbuild-essential-armhf crossbuild-essential-arm64
RUN apt-get install -y libavutil-dev:arm64 libavcodec-dev:arm64 libavformat-dev:arm64 \
libavfilter-dev:arm64 libavdevice-dev:arm64 libswresample-dev:arm64 libfftw3-dev:arm64
RUN apt-get install -y libavutil-dev:armhf libavcodec-dev:armhf libavformat-dev:armhf \
libavfilter-dev:armhf libavdevice-dev:armhf libswresample-dev:armhf libfftw3-dev:armhf
RUN curl https://sh.rustup.rs -sSf | sh -s -- -y
ENV PATH="/root/.cargo/bin/:${PATH}"
RUN rustup target add aarch64-unknown-linux-gnu && \
rustup target add arm-unknown-linux-gnueabihf
RUN mkdir /.cargo && \
echo '[target.aarch64-unknown-linux-gnu]\nlinker = "aarch64-linux-gnu-gcc"' > /.cargo/config && \
echo '[target.arm-unknown-linux-gnueabihf]\nlinker = "arm-linux-gnueabihf-gcc"' >> /.cargo/config
RUN mkdir /build
ENV CARGO_TARGET_DIR /build
ENV CARGO_HOME /build/cache
RUN mkdir /src
WORKDIR /src
CMD ["/src/docker/docker-build-arm-staticlibav.sh"]

View File

@ -0,0 +1,30 @@
# Cross compilation environment for bliss-anlyser
FROM debian:bullseye
RUN dpkg --add-architecture arm64 && \
dpkg --add-architecture armhf
RUN apt-get update
RUN apt-get install -y curl git pkg-config yasm
RUN apt-get install -y build-essential clang
RUN apt-get install -y crossbuild-essential-armhf crossbuild-essential-arm64
RUN curl https://sh.rustup.rs -sSf | sh -s -- -y
ENV PATH="/root/.cargo/bin/:${PATH}"
RUN rustup target add aarch64-unknown-linux-gnu && \
rustup target add arm-unknown-linux-gnueabihf
RUN mkdir /.cargo && \
echo '[target.aarch64-unknown-linux-gnu]\nlinker = "aarch64-linux-gnu-gcc"' > /.cargo/config && \
echo '[target.arm-unknown-linux-gnueabihf]\nlinker = "arm-linux-gnueabihf-gcc"' >> /.cargo/config
RUN mkdir /build
ENV CARGO_TARGET_DIR /build
ENV CARGO_HOME /build/cache
RUN mkdir /src
WORKDIR /src
CMD ["/src/docker/docker-build-arm-symphonia.sh"]

View File

@ -0,0 +1,25 @@
# Cross compilation environment for bliss-anlyser
FROM ubuntu:24.04
RUN apt-get update
RUN apt-get install -y curl git pkg-config
RUN apt-get install -y build-essential yasm clang
RUN apt-get install -y libavutil-dev libavcodec-dev libavformat-dev \
libavfilter-dev libavdevice-dev libswresample-dev libfftw3-dev \
ffmpeg
RUN curl https://sh.rustup.rs -sSf | sh -s -- -y
ENV PATH="/root/.cargo/bin/:${PATH}"
RUN mkdir /build
ENV CARGO_TARGET_DIR /build
ENV CARGO_HOME /build/cache
RUN mkdir /src
WORKDIR /src
CMD ["/src/docker/docker-build-x86-libav.sh"]

View File

@ -0,0 +1,29 @@
#!/bin/bash
## #!/usr/bin/env bash
set -eux
uname -a
DESTDIR=/src/releases
mkdir -p $DESTDIR/bin
rm -rf $DESTDIR/bin/*
function build {
echo Building for $1 to $3...
if [[ ! -f /build/$1/release/bliss-analyser ]]; then
export RUST_BACKTRACE=full
export PKG_CONFIG=${1//unknown-/}-pkg-config
BINDGEN_EXTRA_CLANG_ARGS="--sysroot /usr/${1//unknown-/}" cargo build --release --features=ffmpeg --target $1
fi
$2 /build/$1/release/bliss-analyser && cp /build/$1/release/bliss-analyser $DESTDIR/$3
}
build arm-unknown-linux-gnueabihf arm-linux-gnueabihf-strip bin/bliss-analyser-armhf
build aarch64-unknown-linux-gnu aarch64-linux-gnu-strip bin/bliss-analyser-aarch64
cp UserGuide.md $DESTDIR/README.md
cp LICENSE $DESTDIR/
cp configs/linux.ini $DESTDIR/config.ini
cp scripts/bliss-analyser-arm $DESTDIR/bliss-analyser

View File

@ -0,0 +1,29 @@
#!/bin/bash
## #!/usr/bin/env bash
set -eux
uname -a
DESTDIR=/src/releases
mkdir -p $DESTDIR/bin
rm -rf $DESTDIR/bin/*
function build {
echo Building for $1 to $3...
if [[ ! -f /build/$1/release/bliss-analyser ]]; then
export RUST_BACKTRACE=full
export PKG_CONFIG=${1//unknown-/}-pkg-config
BINDGEN_EXTRA_CLANG_ARGS="--sysroot /usr/${1//unknown-/}" cargo build --release --features=libav --target $1
fi
$2 /build/$1/release/bliss-analyser && cp /build/$1/release/bliss-analyser $DESTDIR/$3
}
build arm-unknown-linux-gnueabihf arm-linux-gnueabihf-strip bin/bliss-analyser-armhf
build aarch64-unknown-linux-gnu aarch64-linux-gnu-strip bin/bliss-analyser-aarch64
cp UserGuide.md $DESTDIR/README.md
cp LICENSE $DESTDIR/
cp configs/linux.ini $DESTDIR/config.ini
cp scripts/bliss-analyser-arm $DESTDIR/bliss-analyser

View File

@ -0,0 +1,29 @@
#!/bin/bash
## #!/usr/bin/env bash
set -eux
uname -a
DESTDIR=/src/releases
mkdir -p $DESTDIR/bin
rm -rf $DESTDIR/bin/*
function build {
echo Building for $1 to $3...
if [[ ! -f /build/$1/release/bliss-analyser ]]; then
export RUST_BACKTRACE=full
export PKG_CONFIG=${1//unknown-/}-pkg-config
BINDGEN_EXTRA_CLANG_ARGS="--sysroot /usr/${1//unknown-/}" cargo build --release --features=libav,staticlibav --target $1
fi
$2 /build/$1/release/bliss-analyser && cp /build/$1/release/bliss-analyser $DESTDIR/$3
}
build arm-unknown-linux-gnueabihf arm-linux-gnueabihf-strip bin/bliss-analyser-armhf
build aarch64-unknown-linux-gnu aarch64-linux-gnu-strip bin/bliss-analyser-aarch64
cp UserGuide.md $DESTDIR/README.md
cp LICENSE $DESTDIR/
cp configs/linux.ini $DESTDIR/config.ini
cp scripts/bliss-analyser-arm $DESTDIR/bliss-analyser

View File

@ -0,0 +1,29 @@
#!/bin/bash
## #!/usr/bin/env bash
set -eux
uname -a
DESTDIR=/src/releases
mkdir -p $DESTDIR/bin
rm -rf $DESTDIR/bin/*
function build {
echo Building for $1 to $3...
if [[ ! -f /build/$1/release/bliss-analyser ]]; then
export RUST_BACKTRACE=full
export PKG_CONFIG=${1//unknown-/}-pkg-config
BINDGEN_EXTRA_CLANG_ARGS="--sysroot /usr/${1//unknown-/}" cargo build --release --features=symphonia --target $1
fi
$2 /build/$1/release/bliss-analyser && cp /build/$1/release/bliss-analyser $DESTDIR/$3
}
build arm-unknown-linux-gnueabihf arm-linux-gnueabihf-strip bin/bliss-analyser-armhf
build aarch64-unknown-linux-gnu aarch64-linux-gnu-strip bin/bliss-analyser-aarch64
cp UserGuide.md $DESTDIR/README.md
cp LICENSE $DESTDIR/
cp configs/linux.ini $DESTDIR/config.ini
cp scripts/bliss-analyser-arm $DESTDIR/bliss-analyser

View File

@ -0,0 +1,18 @@
#!/bin/bash
## #!/usr/bin/env bash
set -eux
uname -a
DESTDIR=/src/releases
mkdir -p $DESTDIR/bin
rm -rf $DESTDIR/bin/*
export RUST_BACKTRACE=full
cargo build --release --features=libav
strip /build/release/bliss-analyser && cp /build/release/bliss-analyser $DESTDIR/bliss-analyser
cp UserGuide.md $DESTDIR/README.md
cp LICENSE $DESTDIR/
cp configs/linux.ini $DESTDIR/config.ini

View File

@ -3,7 +3,7 @@
#
# LMS-BlissMixer
#
# Copyright (c) 2022 Craig Drummond <craig.p.drummond@gmail.com>
# Copyright (c) 2022-2025 Craig Drummond <craig.p.drummond@gmail.com>
# MIT license.
#
@ -11,7 +11,13 @@ import datetime, os, requests, shutil, subprocess, sys, tempfile, time
GITHUB_TOKEN_FILE = "%s/.config/github-token" % os.path.expanduser('~')
GITHUB_REPO = "CDrummond/bliss-analyser"
GITHUB_ARTIFACTS = ["bliss-analyser-linux", "bliss-analyser-mac", "bliss-analyser-windows"]
LINUX_ARM_ARTIFACTS = ["bliss-analyser-linux-arm-ffmpeg", "bliss-analyser-linux-arm-static", "bliss-analyser-debian-bullseye-arm-libav", "bliss-analyser-debian-bookworm-arm-libav", "bliss-analyser-linux-arm-symphonia"]
LINUX_X86_ARTIFACTS = ["bliss-analyser-linux-x86-ffmpeg", "bliss-analyser-linux-x86-static", "bliss-analyser-ubuntu-22.04-x86-libav", "bliss-analyser-ubuntu-24.04-x86-libav", "bliss-analyser-linux-x86-symphonia"]
MAC_ARTIFACTS = ["bliss-analyser-mac-ffmpeg", "bliss-analyser-mac-symphonia"]
WINDOWS_ARTIFACTS = ["bliss-analyser-windows-libav", "bliss-analyser-windows-symphonia"]
UNIX_ARTIFACTS = LINUX_ARM_ARTIFACTS + LINUX_X86_ARTIFACTS + MAC_ARTIFACTS
GITHUB_ARTIFACTS = UNIX_ARTIFACTS + WINDOWS_ARTIFACTS
def info(s):
print("INFO: %s" %s)
@ -64,12 +70,11 @@ def download_artifacts(version):
f.write(chunk)
if not os.path.exists(dest):
info("Failed to download %s" % item)
break
def make_executable(version):
cwd = os.getcwd()
for a in ["bliss-analyser-linux", "bliss-analyser-mac"]:
for a in UNIX_ARTIFACTS:
archive = "%s-%s.zip" % (a, version)
info("Making analyser executable in %s" % archive)
with tempfile.TemporaryDirectory() as td:
@ -77,6 +82,10 @@ def make_executable(version):
os.remove(archive)
os.chdir(td)
subprocess.call(["chmod", "a+x", "%s/bliss-analyser" % td], shell=False)
bindir = os.path.join(td, "bin")
if os.path.isdir(bindir):
for e in os.listdir(bindir):
subprocess.call(["chmod", "a+x", os.path.join(bindir, e)], shell=False)
shutil.make_archive("%s/%s-%s" % (cwd, a, version), "zip")
os.chdir(cwd)

10
scripts/bliss-analyser-arm Executable file
View File

@ -0,0 +1,10 @@
#!/usr/bin/env bash
ARCH=`arch`
SCRIPT_DIR="$( cd -- "$( dirname -- "${BASH_SOURCE[0]:-$0}"; )" &> /dev/null && pwd 2> /dev/null; )";
if [ "$ARCH" = "aarch64" ] ; then
$SCRIPT_DIR/bin/bliss-analyser-aarch64 $*
else
$SCRIPT_DIR/bin/bliss-analyser-armhf $*
fi

View File

@ -1,155 +1,148 @@
/**
* Analyse music with Bliss
*
* Copyright (c) 2022 Craig Drummond <craig.p.drummond@gmail.com>
* Copyright (c) 2022-2025 Craig Drummond <craig.p.drummond@gmail.com>
* GPLv3 license.
*
**/
use anyhow::{Result};
use bliss_audio::{library::analyze_paths_streaming, BlissResult, Song};
use hhmmss::Hhmmss;
use indicatif::{ProgressBar, ProgressStyle};
use std::convert::TryInto;
use std::fs;
use std::fs::File;
use std::io::{BufRead, BufReader};
use std::path::PathBuf;
use std::time::Duration;
use std::sync::mpsc;
use std::sync::mpsc::{Receiver, Sender};
use std::thread;
use subprocess::{Exec, NullFile};
use tempdir::TempDir;
use num_cpus;
use crate::cue;
use crate::db;
#[cfg(feature = "ffmpeg")]
use crate::ffmpeg;
use crate::tags;
use anyhow::Result;
#[cfg(feature = "ffmpeg")]
use hhmmss::Hhmmss;
use if_chain::if_chain;
use indicatif::{ProgressBar, ProgressStyle};
#[cfg(not(feature = "ffmpeg"))]
use std::collections::HashSet;
use std::convert::TryInto;
use std::fs::{DirEntry, File};
use std::io::{BufRead, BufReader};
use std::num::NonZeroUsize;
use std::path::{Path, PathBuf};
#[cfg(feature = "ffmpeg")]
use std::sync::mpsc;
#[cfg(feature = "ffmpeg")]
use std::sync::mpsc::{Receiver, Sender};
#[cfg(feature = "ffmpeg")]
use std::thread;
#[cfg(feature = "ffmpeg")]
use std::time::Duration;
use num_cpus;
use bliss_audio::decoder::Decoder;
#[cfg(feature = "libav")]
use bliss_audio::decoder::ffmpeg::FFmpegDecoder as SongDecoder;
#[cfg(feature = "symphonia")]
use bliss_audio::decoder::symphonia::SymphoniaDecoder as SongDecoder;
#[cfg(feature = "ffmpeg")]
use bliss_audio::{BlissResult, Song};
const DONT_ANALYSE:&str = ".notmusic";
const MAX_ERRORS_TO_SHOW:usize = 100;
const MAX_TAG_ERRORS_TO_SHOW:usize = 50;
const DONT_ANALYSE: &str = ".notmusic";
const MAX_ERRORS_TO_SHOW: usize = 100;
const MAX_TAG_ERRORS_TO_SHOW: usize = 50;
const VALID_EXTENSIONS: [&str; 7] = ["m4a", "mp3", "ogg", "flac", "opus", "wv", "dsf"];
fn get_file_list(db:&mut db::Db, mpath:&PathBuf, path:&PathBuf, track_paths:&mut Vec<String>, cue_tracks:&mut Vec<cue::CueTrack>) {
if path.is_dir() {
match path.read_dir() {
Ok(items) => {
for item in items {
match item {
Ok(entry) => {
let pb = entry.path().to_path_buf();
if entry.path().is_dir() {
let mut check = pb.clone();
check.push(PathBuf::from(DONT_ANALYSE));
if check.exists() {
log::info!("Skipping '{}', found '{}'", pb.to_string_lossy(), DONT_ANALYSE);
} else {
get_file_list(db, mpath, &entry.path(), track_paths, cue_tracks);
}
} else if entry.path().is_file() {
let e = pb.extension();
if e.is_some() {
let ext = e.unwrap().to_string_lossy();
if ext=="m4a" || ext=="mp3" || ext=="ogg" || ext=="flac" || ext=="opus" {
match pb.strip_prefix(mpath) {
Ok(stripped) => {
let mut cue_file = pb.clone();
cue_file.set_extension("cue");
if cue_file.exists() {
// Found a CUE file, try to parse and then check if tracks exists in DB
let this_cue_tracks = cue::parse(&pb, &cue_file);
for track in this_cue_tracks {
match track.track_path.strip_prefix(mpath) {
Ok(tstripped) => {
let spb = tstripped.to_path_buf();
let sname = String::from(spb.to_string_lossy());
match db.get_rowid(&sname) {
Ok(id) => {
if id<=0 {
cue_tracks.push(track.clone());
}
},
Err(_) => { }
}
},
Err(_) => { }
}
}
} else {
let spb = stripped.to_path_buf();
let sname = String::from(spb.to_string_lossy());
match db.get_rowid(&sname) {
Ok(id) => {
if id<=0 {
track_paths.push(String::from(pb.to_string_lossy()));
}
},
Err(_) => { }
}
}
},
Err(_) => { }
}
}
}
}
},
Err(_) => { }
}
}
},
Err(_) => { }
fn get_file_list(db: &mut db::Db, mpath: &Path, path: &Path, track_paths: &mut Vec<String>, cue_tracks:&mut Vec<cue::CueTrack>, file_count:&mut usize, max_num_files: usize, use_tags: bool, tagged_file_count:&mut usize, dry_run: bool) {
if !path.is_dir() {
return;
}
let mut items: Vec<_> = path.read_dir().unwrap().map(|r| r.unwrap()).collect();
items.sort_by_key(|dir| dir.path());
for item in items {
check_dir_entry(db, mpath, item, track_paths, cue_tracks, file_count, max_num_files, use_tags, tagged_file_count, dry_run);
if max_num_files>0 && *file_count>=max_num_files {
break;
}
}
}
pub fn analyse_new_files(db:&db::Db, mpath: &PathBuf, track_paths:Vec<String>) -> Result<()> {
let total = track_paths.len();
let pb = ProgressBar::new(total.try_into().unwrap());
let style = ProgressStyle::default_bar()
.template("[{elapsed_precise}] [{bar:25}] {percent:>3}% {pos:>6}/{len:6} {wide_msg}")
.progress_chars("=> ");
pb.set_style(style);
fn check_dir_entry(db: &mut db::Db, mpath: &Path, entry: DirEntry, track_paths: &mut Vec<String>, cue_tracks:&mut Vec<cue::CueTrack>, file_count:&mut usize, max_num_files: usize, use_tags: bool, tagged_file_count:&mut usize, dry_run: bool) {
let pb = entry.path();
if pb.is_dir() {
let check = pb.join(DONT_ANALYSE);
if check.exists() {
log::info!("Skipping '{}', found '{}'", pb.to_string_lossy(), DONT_ANALYSE);
} else if max_num_files<=0 || *file_count<max_num_files {
get_file_list(db, mpath, &pb, track_paths, cue_tracks, file_count, max_num_files, use_tags, tagged_file_count, dry_run);
}
} else if pb.is_file() && (max_num_files<=0 || *file_count<max_num_files) {
if_chain! {
if let Some(ext) = pb.extension();
let ext = ext.to_string_lossy();
if VALID_EXTENSIONS.contains(&&*ext);
if let Ok(stripped) = pb.strip_prefix(mpath);
then {
let sname = String::from(stripped.to_string_lossy());
let mut cue_file = pb.clone();
cue_file.set_extension("cue");
if cue_file.exists() {
// For cue files, check if first track is in DB
let mut cue_track_path = pb.clone();
let ext = pb.extension().unwrap().to_string_lossy();
cue_track_path.set_extension(format!("{}{}1", ext, db::CUE_MARKER));
if let Ok(cue_track_stripped) = cue_track_path.strip_prefix(mpath) {
let cue_track_sname = String::from(cue_track_stripped.to_string_lossy());
if let Ok(id) = db.get_rowid(&cue_track_sname) {
let results = analyze_paths_streaming(track_paths)?;
let mut analysed = 0;
let mut failed:Vec<String> = Vec::new();
let mut tag_error:Vec<String> = Vec::new();
#[cfg(not(feature = "ffmpeg"))]
if id<=0 {
track_paths.push(String::from(cue_file.to_string_lossy()));
*file_count+=1;
}
log::info!("Analysing new tracks");
for (path, result) in results {
let pbuff = PathBuf::from(&path);
let stripped = pbuff.strip_prefix(mpath).unwrap();
let spbuff = stripped.to_path_buf();
let sname = String::from(spbuff.to_string_lossy());
pb.set_message(format!("{}", sname));
match result {
Ok(track) => {
let cpath = String::from(path);
let meta = tags::read(&cpath);
if meta.title.is_empty() && meta.artist.is_empty() && meta.album.is_empty() && meta.genre.is_empty() {
tag_error.push(sname.clone());
#[cfg(feature = "ffmpeg")]
if id<=0 {
let this_cue_tracks = cue::parse(&pb, &cue_file);
for track in this_cue_tracks {
cue_tracks.push(track.clone());
}
*file_count+=1;
}
}
}
} else {
if let Ok(id) = db.get_rowid(&sname) {
if id<=0 {
let mut tags_used = false;
if use_tags {
let meta = tags::read(&sname, true);
if !meta.is_empty() && !meta.analysis.is_none() {
if !dry_run {
db.add_track(&sname, &meta, &meta.analysis.unwrap());
}
*tagged_file_count+=1;
tags_used = true;
}
}
if !tags_used {
track_paths.push(String::from(pb.to_string_lossy()));
*file_count+=1;
}
}
}
}
db.add_track(&sname, &meta, &track.analysis);
analysed += 1;
},
Err(e) => {
failed.push(format!("{} - {}", sname, e));
}
};
pb.inc(1);
}
}
pb.finish_with_message(format!("{} Analysed. {} Failure(s).", analysed, failed.len()));
}
fn show_errors(failed: &mut Vec<String>, tag_error: &mut Vec<String>) {
if !failed.is_empty() {
let total = failed.len();
failed.truncate(MAX_ERRORS_TO_SHOW);
log::error!("Failed to analyse the folling track(s):");
log::error!("Failed to analyse the following file(s):");
for err in failed {
log::error!(" {}", err);
}
if total>MAX_ERRORS_TO_SHOW {
if total > MAX_ERRORS_TO_SHOW {
log::error!(" + {} other(s)", total - MAX_ERRORS_TO_SHOW);
}
}
@ -157,20 +150,176 @@ pub fn analyse_new_files(db:&db::Db, mpath: &PathBuf, track_paths:Vec<String>) -
let total = tag_error.len();
tag_error.truncate(MAX_TAG_ERRORS_TO_SHOW);
log::error!("Failed to read tags of the folling track(s):");
log::error!("Failed to read tags of the following file(s):");
for err in tag_error {
log::error!(" {}", err);
}
if total>MAX_TAG_ERRORS_TO_SHOW {
if total > MAX_TAG_ERRORS_TO_SHOW {
log::error!(" + {} other(s)", total - MAX_TAG_ERRORS_TO_SHOW);
}
}
}
#[cfg(not(feature = "ffmpeg"))]
fn analyse_new_files(db: &db::Db, mpath: &PathBuf, track_paths: Vec<String>, max_threads: usize, use_tags: bool, preserve_mod_times: bool) -> Result<()> {
let total = track_paths.len();
let progress = ProgressBar::new(total.try_into().unwrap()).with_style(
ProgressStyle::default_bar()
.template("[{elapsed_precise}] [{bar:25}] {percent:>3}% {pos:>6}/{len:6} {wide_msg}")
.progress_chars("=> "),
);
let cpu_threads: NonZeroUsize = match max_threads {
0 => NonZeroUsize::new(num_cpus::get()).unwrap(),
_ => NonZeroUsize::new(max_threads).unwrap(),
};
let mut analysed = 0;
let mut failed: Vec<String> = Vec::new();
let mut tag_error: Vec<String> = Vec::new();
let mut reported_cue:HashSet<String> = HashSet::new();
log::info!("Analysing new files");
for (path, result) in SongDecoder::analyze_paths_with_cores(track_paths, cpu_threads) {
let stripped = path.strip_prefix(mpath).unwrap();
let spbuff = stripped.to_path_buf();
let sname = String::from(spbuff.to_string_lossy());
progress.set_message(format!("{}", sname));
let mut inc_progress = true; // Only want to increment progress once for cue tracks
match result {
Ok(track) => {
let cpath = String::from(path.to_string_lossy());
match track.cue_info {
Some(cue) => {
match track.track_number {
Some(track_num) => {
if reported_cue.contains(&cpath) {
inc_progress = false;
} else {
analysed += 1;
reported_cue.insert(cpath);
}
let meta = db::Metadata {
title: track.title.unwrap_or_default().to_string(),
artist: track.artist.unwrap_or_default().to_string(),
album: track.album.unwrap_or_default().to_string(),
album_artist: track.album_artist.unwrap_or_default().to_string(),
genre: track.genre.unwrap_or_default().to_string(),
duration: track.duration.as_secs() as u32,
analysis: None
};
// Remove prefix from audio_file_path
let pbuff = PathBuf::from(&cue.audio_file_path);
let stripped = pbuff.strip_prefix(mpath).unwrap();
let spbuff = stripped.to_path_buf();
let sname = String::from(spbuff.to_string_lossy());
let db_path = format!("{}{}{}", sname, db::CUE_MARKER, track_num);
db.add_track(&db_path, &meta, &track.analysis);
}
None => { failed.push(format!("{} - No track number?", sname)); }
}
}
None => {
// Use lofty to read tags here, and not bliss's, so that if update
// tags is ever used they are from the same source.
let mut meta = tags::read(&cpath, false);
if meta.is_empty() {
// Lofty failed? Try from bliss...
meta.title = track.title.unwrap_or_default().to_string();
meta.artist = track.artist.unwrap_or_default().to_string();
meta.album = track.album.unwrap_or_default().to_string();
meta.album_artist = track.album_artist.unwrap_or_default().to_string();
meta.genre = track.genre.unwrap_or_default().to_string();
meta.duration = track.duration.as_secs() as u32;
}
if meta.is_empty() {
tag_error.push(sname.clone());
}
if use_tags {
tags::write_analysis(&cpath, &track.analysis, preserve_mod_times);
}
db.add_track(&sname, &meta, &track.analysis);
}
}
analysed += 1;
}
Err(e) => { failed.push(format!("{} - {}", sname, e)); }
};
if inc_progress {
progress.inc(1);
}
}
progress.finish_with_message("Finished!");
log::info!("{} Analysed. {} Failure(s).", analysed, failed.len());
show_errors(&mut failed, &mut tag_error);
Ok(())
}
pub fn analyze_cue_streaming(tracks: Vec<cue::CueTrack>,) -> BlissResult<Receiver<(cue::CueTrack, BlissResult<Song>)>> {
#[cfg(feature = "ffmpeg")]
fn analyse_new_files(db: &db::Db, mpath: &PathBuf, track_paths: Vec<String>, max_threads: usize, use_tags: bool, preserve_mod_times: bool) -> Result<()> {
let total = track_paths.len();
let progress = ProgressBar::new(total.try_into().unwrap()).with_style(
ProgressStyle::default_bar()
.template("[{elapsed_precise}] [{bar:25}] {percent:>3}% {pos:>6}/{len:6} {wide_msg}")
.progress_chars("=> "),
);
let cpu_threads: NonZeroUsize = match max_threads {
0 => NonZeroUsize::new(num_cpus::get()).unwrap(),
_ => NonZeroUsize::new(max_threads).unwrap(),
};
let mut analysed = 0;
let mut failed: Vec<String> = Vec::new();
let mut tag_error: Vec<String> = Vec::new();
log::info!("Analysing new files");
for (path, result) in <ffmpeg::FFmpegCmdDecoder as Decoder>::analyze_paths_with_cores(track_paths, cpu_threads) {
let stripped = path.strip_prefix(mpath).unwrap();
let spbuff = stripped.to_path_buf();
let sname = String::from(spbuff.to_string_lossy());
progress.set_message(format!("{}", sname));
match result {
Ok(track) => {
let cpath = String::from(path.to_string_lossy());
let mut meta = tags::read(&cpath, false);
if meta.is_empty() {
meta = ffmpeg::read_tags(&cpath);
}
if meta.is_empty() {
tag_error.push(sname.clone());
}
if use_tags {
tags::write_analysis(&cpath, &track.analysis, preserve_mod_times);
}
db.add_track(&sname, &meta, &track.analysis);
analysed += 1;
}
Err(e) => { failed.push(format!("{} - {}", sname, e)); }
};
progress.inc(1);
}
// Reset terminal, otherwise typed output does not show? Perhaps Linux only...
if ! cfg!(windows) {
match std::process::Command::new("stty").arg("sane").spawn() {
Ok(_) => { },
Err(_) => { },
};
}
progress.finish_with_message("Finished!");
log::info!("{} Analysed. {} Failure(s).", analysed, failed.len());
show_errors(&mut failed, &mut tag_error);
Ok(())
}
#[cfg(feature = "ffmpeg")]
fn analyze_cue_streaming(tracks: Vec<cue::CueTrack>,) -> BlissResult<Receiver<(cue::CueTrack, BlissResult<Song>)>> {
let num_cpus = num_cpus::get();
let last_track_duration = Duration::new(cue::LAST_TRACK_DURATION, 0);
#[allow(clippy::type_complexity)]
let (tx, rx): (
@ -193,63 +342,13 @@ pub fn analyze_cue_streaming(tracks: Vec<cue::CueTrack>,) -> BlissResult<Receive
let tx_thread = tx.clone();
let owned_chunk = chunk.to_owned();
let child = thread::spawn(move || {
let mut idx = 0;
match &TempDir::new("bliss") {
Ok(dir) => {
for cue_track in owned_chunk {
let audio_path = String::from(cue_track.audio_path.to_string_lossy());
let ext = cue_track.audio_path.extension();
let track_path = String::from(cue_track.track_path.to_string_lossy());
let mut tmp_file = PathBuf::from(dir.path());
if ext.is_some() {
tmp_file.push(format!("{}.{}", idx, ext.unwrap().to_string_lossy()));
} else {
tmp_file.push(format!("{}.flac", idx));
}
idx += 1;
for cue_track in owned_chunk {
let audio_path = format!("{}{}{}.00{}{}.00", cue_track.audio_path.to_string_lossy(), ffmpeg::TIME_SEP, cue_track.start.hhmmss(), ffmpeg::TIME_SEP, cue_track.duration.hhmmss());
let track_path = String::from(cue_track.track_path.to_string_lossy());
log::debug!("Extracting '{}'", track_path);
match Exec::cmd("ffmpeg").arg("-i").arg(&audio_path)
.arg("-ss").arg(&cue_track.start.hhmmss())
.arg("-t").arg(&cue_track.duration.hhmmss())
.arg("-c").arg("copy")
.arg(String::from(tmp_file.to_string_lossy()))
.stderr(NullFile)
.join() {
Ok(_) => { },
Err(e) => { log::error!("Failed to call ffmpeg. {}", e); }
}
if ! cfg!(windows) {
// ffmpeg seeks to break echo on terminal? 'stty echo' restores...
match Exec::cmd("stty").arg("echo").join() {
Ok(_) => { },
Err(_) => { }
}
}
if tmp_file.exists() {
log::debug!("Analyzing '{}'", track_path);
let song = Song::new(&tmp_file);
if cue_track.duration>=last_track_duration {
// Last track, so read duration from temp file
let mut cloned = cue_track.clone();
let meta = tags::read(&String::from(tmp_file.to_string_lossy()));
cloned.duration = Duration::new(meta.duration as u64, 0);
tx_thread.send((cloned, song)).unwrap();
} else {
tx_thread.send((cue_track, song)).unwrap();
}
match fs::remove_file(tmp_file) {
Ok(_) => { },
Err(_) => { }
}
} else {
log::error!("Failed to create temp file");
}
}
},
Err(e) => { log::error!("Failed to create temp folder. {}", e); }
log::debug!("Analyzing '{}'", track_path);
let song = <ffmpeg::FFmpegCmdDecoder as Decoder>::song_from_path(audio_path);
tx_thread.send((cue_track, song)).unwrap();
}
});
handles.push(child);
@ -258,24 +357,27 @@ pub fn analyze_cue_streaming(tracks: Vec<cue::CueTrack>,) -> BlissResult<Receive
Ok(rx)
}
pub fn analyse_new_cue_tracks(db:&db::Db, mpath: &PathBuf, cue_tracks:Vec<cue::CueTrack>) -> Result<()> {
#[cfg(feature = "ffmpeg")]
fn analyse_new_cue_tracks(db:&db::Db, mpath: &PathBuf, cue_tracks:Vec<cue::CueTrack>) -> Result<()> {
let total = cue_tracks.len();
let pb = ProgressBar::new(total.try_into().unwrap());
let style = ProgressStyle::default_bar()
.template("[{elapsed_precise}] [{bar:25}] {percent:>3}% {pos:>6}/{len:6} {wide_msg}")
.progress_chars("=> ");
pb.set_style(style);
let progress = ProgressBar::new(total.try_into().unwrap()).with_style(
ProgressStyle::default_bar()
.template("[{elapsed_precise}] [{bar:25}] {percent:>3}% {pos:>6}/{len:6} {wide_msg}")
.progress_chars("=> "),
);
let results = analyze_cue_streaming(cue_tracks)?;
let mut analysed = 0;
let mut failed:Vec<String> = Vec::new();
let mut tag_error: Vec<String> = Vec::new();
let last_track_duration = Duration::new(cue::LAST_TRACK_DURATION, 0);
log::info!("Analysing new cue tracks");
for (track, result) in results {
let stripped = track.track_path.strip_prefix(mpath).unwrap();
let spbuff = stripped.to_path_buf();
let sname = String::from(spbuff.to_string_lossy());
pb.set_message(format!("{}", sname));
progress.set_message(format!("{}", sname));
match result {
Ok(song) => {
let meta = db::Metadata {
@ -284,7 +386,8 @@ pub fn analyse_new_cue_tracks(db:&db::Db, mpath: &PathBuf, cue_tracks:Vec<cue::C
album_artist:track.album_artist,
album:track.album,
genre:track.genre,
duration:track.duration.as_secs() as u32
duration:if track.duration>=last_track_duration { song.duration.as_secs() as u32 } else { track.duration.as_secs() as u32 },
analysis: None
};
db.add_track(&sname, &meta, &song.analysis);
@ -294,27 +397,16 @@ pub fn analyse_new_cue_tracks(db:&db::Db, mpath: &PathBuf, cue_tracks:Vec<cue::C
failed.push(format!("{} - {}", sname, e));
}
};
pb.inc(1);
}
pb.finish_with_message(format!("{} Analysed. {} Failure(s).", analysed, failed.len()));
if !failed.is_empty() {
let total = failed.len();
failed.truncate(MAX_ERRORS_TO_SHOW);
log::error!("Failed to analyse the folling track(s):");
for err in failed {
log::error!(" {}", err);
}
if total>MAX_ERRORS_TO_SHOW {
log::error!(" + {} other(s)", total - MAX_ERRORS_TO_SHOW);
}
progress.inc(1);
}
progress.finish_with_message("Finished!");
log::info!("{} Analysed. {} Failure(s).", analysed, failed.len());
show_errors(&mut failed, &mut tag_error);
Ok(())
}
pub fn analyse_files(db_path: &str, mpaths: &Vec<PathBuf>, dry_run:bool, keep_old:bool, max_num_tracks:usize) {
pub fn analyse_files(db_path: &str, mpaths: &Vec<PathBuf>, dry_run: bool, keep_old: bool, max_num_files: usize, max_threads: usize, ignore_path: &PathBuf, use_tags: bool, preserve_mod_times: bool) {
let mut db = db::Db::new(&String::from(db_path));
let mut track_count_left = max_num_tracks;
db.init();
@ -322,23 +414,30 @@ pub fn analyse_files(db_path: &str, mpaths: &Vec<PathBuf>, dry_run:bool, keep_ol
db.remove_old(mpaths, dry_run);
}
let mut changes_made = false;
for path in mpaths {
let mpath = path.clone();
let cur = path.clone();
let mut track_paths:Vec<String> = Vec::new();
let mut track_paths: Vec<String> = Vec::new();
let mut cue_tracks:Vec<cue::CueTrack> = Vec::new();
let mut file_count:usize = 0;
let mut tagged_file_count:usize = 0;
if mpaths.len()>1 {
log::info!("Looking for new tracks in {}", mpath.to_string_lossy());
if mpaths.len() > 1 {
log::info!("Looking for new files in {}", mpath.to_string_lossy());
} else {
log::info!("Looking for new tracks");
log::info!("Looking for new files");
}
get_file_list(&mut db, &mpath, &cur, &mut track_paths, &mut cue_tracks);
get_file_list(&mut db, &mpath, &cur, &mut track_paths, &mut cue_tracks, &mut file_count, max_num_files, use_tags, &mut tagged_file_count, dry_run);
track_paths.sort();
log::info!("Num new tracks: {}", track_paths.len());
log::info!("Num new files: {}", track_paths.len());
if !cue_tracks.is_empty() {
log::info!("Num new cue tracks: {}", cue_tracks.len());
}
if use_tags {
log::info!("Num tagged files: {}", tagged_file_count);
}
if dry_run {
if !track_paths.is_empty() || !cue_tracks.is_empty() {
log::info!("The following need to be analysed:");
@ -350,49 +449,30 @@ pub fn analyse_files(db_path: &str, mpaths: &Vec<PathBuf>, dry_run:bool, keep_ol
}
}
} else {
if max_num_tracks>0 {
if track_paths.len()>track_count_left {
log::info!("Only analysing {} tracks", track_count_left);
track_paths.truncate(track_count_left);
}
track_count_left -= track_paths.len();
}
if max_num_tracks>0 {
if track_count_left == 0 {
cue_tracks.clear();
} else {
if cue_tracks.len()>track_count_left {
log::info!("Only analysing {} cue tracks", track_count_left);
cue_tracks.truncate(track_count_left);
}
track_count_left -= track_paths.len();
}
}
if !track_paths.is_empty() {
match analyse_new_files(&db, &mpath, track_paths) {
Ok(_) => { },
match analyse_new_files(&db, &mpath, track_paths, max_threads, use_tags, preserve_mod_times) {
Ok(_) => { changes_made = true; }
Err(e) => { log::error!("Analysis returned error: {}", e); }
}
} else {
log::info!("No new tracks to analyse");
log::info!("No new files to analyse");
}
#[cfg(feature = "ffmpeg")]
if !cue_tracks.is_empty() {
match analyse_new_cue_tracks(&db, &mpath, cue_tracks) {
Ok(_) => { },
Ok(_) => { changes_made = true; },
Err(e) => { log::error!("Cue analysis returned error: {}", e); }
}
}
if max_num_tracks>0 && track_count_left<=0 {
log::info!("Track limit reached");
break;
}
}
}
db.close();
if changes_made && ignore_path.exists() && ignore_path.is_file() {
log::info!("Updating 'ignore' flags");
update_ignore(&db_path, &ignore_path);
}
}
pub fn read_tags(db_path: &str, mpaths: &Vec<PathBuf>) {
@ -402,6 +482,13 @@ pub fn read_tags(db_path: &str, mpaths: &Vec<PathBuf>) {
db.close();
}
pub fn export(db_path: &str, mpaths: &Vec<PathBuf>, preserve_mod_times: bool) {
let db = db::Db::new(&String::from(db_path));
db.init();
db.export(&mpaths, preserve_mod_times);
db.close();
}
pub fn update_ignore(db_path: &str, ignore_path: &PathBuf) {
let file = File::open(ignore_path).unwrap();
let reader = BufReader::new(file);
@ -409,11 +496,12 @@ pub fn update_ignore(db_path: &str, ignore_path: &PathBuf) {
db.init();
db.clear_ignore();
for (_index, line) in reader.lines().enumerate() {
let line = line.unwrap();
let mut lines = reader.lines();
while let Some(Ok(line)) = lines.next() {
if !line.is_empty() && !line.starts_with("#") {
db.set_ignore(&line);
}
}
db.close();
}

View File

@ -1,21 +1,34 @@
/**
* Analyse music with Bliss
*
* Copyright (c) 2022 Craig Drummond <craig.p.drummond@gmail.com>
* Copyright (c) 2022-2025 Craig Drummond <craig.p.drummond@gmail.com>
* GPLv3 license.
*
**/
#[cfg(feature = "ffmpeg")]
extern crate rcue;
#[cfg(feature = "ffmpeg")]
use crate::db;
#[cfg(feature = "ffmpeg")]
use rcue::parser::parse_from_file;
use std::path::PathBuf;
#[cfg(feature = "ffmpeg")]
use std::time::Duration;
pub const MARKER:&str = ".CUE_TRACK.";
#[cfg(feature = "ffmpeg")]
pub const LAST_TRACK_DURATION:u64 = 60*60*24;
#[cfg(feature = "ffmpeg")]
const GENRE:&str = "GENRE";
#[cfg(not(feature = "ffmpeg"))]
#[derive(Clone)]
pub struct CueTrack {
pub track_path:PathBuf
}
#[cfg(feature = "ffmpeg")]
#[derive(Clone)]
pub struct CueTrack {
pub audio_path:PathBuf,
@ -29,6 +42,7 @@ pub struct CueTrack {
pub duration:Duration
}
#[cfg(feature = "ffmpeg")]
pub fn parse(audio_path:&PathBuf, cue_path:&PathBuf) -> Vec<CueTrack> {
let mut resp:Vec<CueTrack> = Vec::new();
@ -49,7 +63,7 @@ pub fn parse(audio_path:&PathBuf, cue_path:&PathBuf) -> Vec<CueTrack> {
Some((_, start)) => {
let mut track_path = audio_path.clone();
let ext = audio_path.extension().unwrap().to_string_lossy();
track_path.set_extension(format!("{}{}{}", ext, MARKER, resp.len()+1));
track_path.set_extension(format!("{}{}{}", ext, db::CUE_MARKER, resp.len()+1));
let mut ctrack = CueTrack {
audio_path: audio_path.clone(),
track_path: track_path,

305
src/db.rs
View File

@ -1,53 +1,81 @@
/**
* Analyse music with Bliss
*
* Copyright (c) 2022 Craig Drummond <craig.p.drummond@gmail.com>
* Copyright (c) 2022-2025 Craig Drummond <craig.p.drummond@gmail.com>
* GPLv3 license.
*
**/
#[cfg(feature = "ffmpeg")]
use crate::ffmpeg;
use crate::tags;
use bliss_audio::{Analysis, AnalysisIndex};
use indicatif::{ProgressBar, ProgressStyle};
use rusqlite::{Connection, params};
use rusqlite::{params, Connection};
use std::convert::TryInto;
use std::path::PathBuf;
use std::process;
use crate::cue;
use crate::tags;
pub const CUE_MARKER: &str = ".CUE_TRACK.";
pub struct FileMetadata {
pub rowid:usize,
pub file:String,
pub title:Option<String>,
pub artist:Option<String>,
pub album_artist:Option<String>,
pub album:Option<String>,
pub genre:Option<String>,
pub duration:u32
pub rowid: usize,
pub file: String,
pub title: Option<String>,
pub artist: Option<String>,
pub album_artist: Option<String>,
pub album: Option<String>,
pub genre: Option<String>,
pub duration: u32,
}
struct AnalysisResults {
pub file: String,
pub analysis: Analysis,
}
#[derive(Default, PartialEq)]
pub struct Metadata {
pub title:String,
pub artist:String,
pub album_artist:String,
pub album:String,
pub genre:String,
pub duration:u32
pub title: String,
pub artist: String,
pub album_artist: String,
pub album: String,
pub genre: String,
pub duration: u32,
pub analysis: Option<Analysis>,
}
impl Metadata {
pub fn is_empty(&self) -> bool {
self.title.is_empty()
&& self.artist.is_empty()
&& self.album_artist.is_empty()
&& self.album.is_empty()
&& self.genre.is_empty()
}
}
pub struct Db {
pub conn: Connection
pub conn: Connection,
}
impl Db {
pub fn new(path: &String) -> Self {
Self {
conn: Connection::open(path).unwrap(),
match Connection::open(path) {
Ok(conn) => {
Self {
conn: conn,
}
}
Err(e) => {
log::error!("Failed top open/create database. {}", e);
process::exit(-1);
}
}
}
pub fn init(&self) {
match self.conn.execute(
let cmd = self.conn.execute(
"CREATE TABLE IF NOT EXISTS Tracks (
File text primary key,
Title text,
@ -77,39 +105,35 @@ impl Db {
Chroma8 real,
Chroma9 real,
Chroma10 real
);",[]) {
Ok(_) => { },
Err(_) => {
log::error!("Failed to create DB table");
process::exit(-1);
}
);",
[],
);
if cmd.is_err() {
log::error!("Failed to create DB table");
process::exit(-1);
}
match self.conn.execute("CREATE UNIQUE INDEX IF NOT EXISTS Tracks_idx ON Tracks(File)", []) {
Ok(_) => { },
Err(_) => {
log::error!("Failed to create DB index");
process::exit(-1);
}
let cmd = self.conn.execute("CREATE UNIQUE INDEX IF NOT EXISTS Tracks_idx ON Tracks(File)", []);
if cmd.is_err() {
log::error!("Failed to create DB index");
process::exit(-1);
}
}
pub fn close(self) {
match self.conn.close() {
Ok(_) => { },
Err(_) => { }
}
let _ = self.conn.close();
}
pub fn get_rowid(&self, path: &String) -> Result<usize, rusqlite::Error> {
let mut db_path = path.clone();
pub fn get_rowid(&self, path: &str) -> Result<usize, rusqlite::Error> {
let mut db_path = path.to_string();
if cfg!(windows) {
db_path = db_path.replace("\\", "/");
}
let mut stmt = self.conn.prepare("SELECT rowid FROM Tracks WHERE File=:path;")?;
let track_iter = stmt.query_map(&[(":path", &db_path)], |row| {
Ok(row.get(0)?)
}).unwrap();
let mut rowid:usize = 0;
let track_iter = stmt.query_map(&[(":path", &db_path)], |row| Ok(row.get(0)?)).unwrap();
let mut rowid: usize = 0;
for tr in track_iter {
rowid = tr.unwrap();
break;
@ -117,21 +141,21 @@ impl Db {
Ok(rowid)
}
pub fn add_track(&self, path: &String, meta: &Metadata, analysis:&Analysis) {
pub fn add_track(&self, path: &String, meta: &Metadata, analysis: &Analysis) {
let mut db_path = path.clone();
if cfg!(windows) {
db_path = db_path.replace("\\", "/");
}
match self.get_rowid(&path) {
Ok(id) => {
if id<=0 {
if id <= 0 {
match self.conn.execute("INSERT INTO Tracks (File, Title, Artist, AlbumArtist, Album, Genre, Duration, Ignore, Tempo, Zcr, MeanSpectralCentroid, StdDevSpectralCentroid, MeanSpectralRolloff, StdDevSpectralRolloff, MeanSpectralFlatness, StdDevSpectralFlatness, MeanLoudness, StdDevLoudness, Chroma1, Chroma2, Chroma3, Chroma4, Chroma5, Chroma6, Chroma7, Chroma8, Chroma9, Chroma10) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?);",
params![db_path, meta.title, meta.artist, meta.album_artist, meta.album, meta.genre, meta.duration, 0,
analysis[AnalysisIndex::Tempo], analysis[AnalysisIndex::Zcr], analysis[AnalysisIndex::MeanSpectralCentroid], analysis[AnalysisIndex::StdDeviationSpectralCentroid], analysis[AnalysisIndex::MeanSpectralRolloff],
analysis[AnalysisIndex::StdDeviationSpectralRolloff], analysis[AnalysisIndex::MeanSpectralFlatness], analysis[AnalysisIndex::StdDeviationSpectralFlatness], analysis[AnalysisIndex::MeanLoudness], analysis[AnalysisIndex::StdDeviationLoudness],
analysis[AnalysisIndex::Chroma1], analysis[AnalysisIndex::Chroma2], analysis[AnalysisIndex::Chroma3], analysis[AnalysisIndex::Chroma4], analysis[AnalysisIndex::Chroma5],
analysis[AnalysisIndex::Chroma6], analysis[AnalysisIndex::Chroma7], analysis[AnalysisIndex::Chroma8], analysis[AnalysisIndex::Chroma9], analysis[AnalysisIndex::Chroma10]]) {
Ok(_) => { },
Ok(_) => { }
Err(e) => { log::error!("Failed to insert '{}' into database. {}", path, e); }
}
} else {
@ -141,30 +165,28 @@ impl Db {
analysis[AnalysisIndex::StdDeviationSpectralRolloff], analysis[AnalysisIndex::MeanSpectralFlatness], analysis[AnalysisIndex::StdDeviationSpectralFlatness], analysis[AnalysisIndex::MeanLoudness], analysis[AnalysisIndex::StdDeviationLoudness],
analysis[AnalysisIndex::Chroma1], analysis[AnalysisIndex::Chroma2], analysis[AnalysisIndex::Chroma3], analysis[AnalysisIndex::Chroma4], analysis[AnalysisIndex::Chroma5],
analysis[AnalysisIndex::Chroma6], analysis[AnalysisIndex::Chroma7], analysis[AnalysisIndex::Chroma8], analysis[AnalysisIndex::Chroma9], analysis[AnalysisIndex::Chroma10], id]) {
Ok(_) => { },
Ok(_) => { }
Err(e) => { log::error!("Failed to update '{}' in database. {}", path, e); }
}
}
},
}
Err(_) => { }
}
}
pub fn remove_old(&self, mpaths: &Vec<PathBuf>, dry_run:bool) {
log::info!("Looking for non-existant tracks");
pub fn remove_old(&self, mpaths: &Vec<PathBuf>, dry_run: bool) {
log::info!("Looking for non-existent tracks");
let mut stmt = self.conn.prepare("SELECT File FROM Tracks;").unwrap();
let track_iter = stmt.query_map([], |row| {
Ok((row.get(0)?,))
}).unwrap();
let mut to_remove:Vec<String> = Vec::new();
let track_iter = stmt.query_map([], |row| Ok((row.get(0)?,))).unwrap();
let mut to_remove: Vec<String> = Vec::new();
for tr in track_iter {
let mut db_path:String = tr.unwrap().0;
let mut db_path: String = tr.unwrap().0;
let orig_path = db_path.clone();
match orig_path.find(cue::MARKER) {
match orig_path.find(CUE_MARKER) {
Some(s) => {
db_path.truncate(s);
},
None => { }
}
None => {}
}
if cfg!(windows) {
db_path = db_path.replace("/", "\\");
@ -187,8 +209,8 @@ impl Db {
}
let num_to_remove = to_remove.len();
log::info!("Num non-existant tracks: {}", num_to_remove);
if num_to_remove>0 {
log::info!("Num non-existent tracks: {}", num_to_remove);
if num_to_remove > 0 {
if dry_run {
log::info!("The following need to be removed from database:");
for t in to_remove {
@ -198,9 +220,10 @@ impl Db {
let count_before = self.get_track_count();
for t in to_remove {
//log::debug!("Remove '{}'", t);
match self.conn.execute("DELETE FROM Tracks WHERE File = ?;", params![t]) {
Ok(_) => { },
Err(e) => { log::error!("Failed to remove '{}' - {}", t, e) }
let cmd = self.conn.execute("DELETE FROM Tracks WHERE File = ?;", params![t]);
if let Err(e) = cmd {
log::error!("Failed to remove '{}' - {}", t, e)
}
}
let count_now = self.get_track_count();
@ -213,10 +236,8 @@ impl Db {
pub fn get_track_count(&self) -> usize {
let mut stmt = self.conn.prepare("SELECT COUNT(*) FROM Tracks;").unwrap();
let track_iter = stmt.query_map([], |row| {
Ok(row.get(0)?)
}).unwrap();
let mut count:usize = 0;
let track_iter = stmt.query_map([], |row| Ok(row.get(0)?)).unwrap();
let mut count: usize = 0;
for tr in track_iter {
count = tr.unwrap();
break;
@ -226,51 +247,64 @@ impl Db {
pub fn update_tags(&self, mpaths: &Vec<PathBuf>) {
let total = self.get_track_count();
if total>0 {
let pb = ProgressBar::new(total.try_into().unwrap());
let style = ProgressStyle::default_bar()
.template("[{elapsed_precise}] [{bar:25}] {percent:>3}% {pos:>6}/{len:6} {wide_msg}")
.progress_chars("=> ");
pb.set_style(style);
if total > 0 {
let progress = ProgressBar::new(total.try_into().unwrap()).with_style(
ProgressStyle::default_bar()
.template(
"[{elapsed_precise}] [{bar:25}] {percent:>3}% {pos:>6}/{len:6} {wide_msg}",
)
.progress_chars("=> "),
);
let mut stmt = self.conn.prepare("SELECT rowid, File, Title, Artist, AlbumArtist, Album, Genre, Duration FROM Tracks ORDER BY File ASC;").unwrap();
let track_iter = stmt.query_map([], |row| {
Ok(FileMetadata {
rowid: row.get(0)?,
file: row.get(1)?,
title: row.get(2)?,
artist: row.get(3)?,
album_artist: row.get(4)?,
album: row.get(5)?,
genre: row.get(6)?,
duration: row.get(7)?,
let track_iter = stmt
.query_map([], |row| {
Ok(FileMetadata {
rowid: row.get(0)?,
file: row.get(1)?,
title: row.get(2)?,
artist: row.get(3)?,
album_artist: row.get(4)?,
album: row.get(5)?,
genre: row.get(6)?,
duration: row.get(7)?,
})
})
}).unwrap();
.unwrap();
let mut updated = 0;
for tr in track_iter {
let dbtags = tr.unwrap();
if !dbtags.file.contains(cue::MARKER) {
let dtags = Metadata{
title:dbtags.title.unwrap_or(String::new()),
artist:dbtags.artist.unwrap_or(String::new()),
album_artist:dbtags.album_artist.unwrap_or(String::new()),
album:dbtags.album.unwrap_or(String::new()),
genre:dbtags.genre.unwrap_or(String::new()),
duration:dbtags.duration
if !dbtags.file.contains(CUE_MARKER) {
let dtags = Metadata {
title: dbtags.title.unwrap_or_default(),
artist: dbtags.artist.unwrap_or_default(),
album_artist: dbtags.album_artist.unwrap_or_default(),
album: dbtags.album.unwrap_or_default(),
genre: dbtags.genre.unwrap_or_default(),
duration: dbtags.duration,
analysis: None,
};
pb.set_message(format!("{}", dbtags.file));
progress.set_message(format!("{}", dbtags.file));
for mpath in mpaths {
let track_path = mpath.join(&dbtags.file);
if track_path.exists() {
let path = String::from(track_path.to_string_lossy());
let ftags = tags::read(&path);
if ftags.title.is_empty() && ftags.artist.is_empty() && ftags.album_artist.is_empty() && ftags.album.is_empty() && ftags.genre.is_empty() {
#[allow(unused_mut)] // ftags is mutable if using ffmpeg on commandline
let mut ftags = tags::read(&path, false);
#[cfg(feature = "ffmpeg")]
if ftags.is_empty() {
ftags = ffmpeg::read_tags(&path);
}
if ftags.is_empty() {
log::error!("Failed to read tags of '{}'", dbtags.file);
} else if ftags.duration!=dtags.duration || ftags.title!=dtags.title || ftags.artist!=dtags.artist || ftags.album_artist!=dtags.album_artist || ftags.album!=dtags.album || ftags.genre!=dtags.genre {
} else if ftags != dtags {
match self.conn.execute("UPDATE Tracks SET Title=?, Artist=?, AlbumArtist=?, Album=?, Genre=?, Duration=? WHERE rowid=?;",
params![ftags.title, ftags.artist, ftags.album_artist, ftags.album, ftags.genre, ftags.duration, dbtags.rowid]) {
Ok(_) => { updated += 1; },
Ok(_) => { updated += 1; }
Err(e) => { log::error!("Failed to update tags of '{}'. {}", dbtags.file, e); }
}
}
@ -278,32 +312,81 @@ impl Db {
}
}
}
pb.inc(1);
progress.inc(1);
}
pb.finish_with_message(format!("{} Updated.", updated))
progress.finish_with_message(format!("{} Updated.", updated))
}
}
pub fn clear_ignore(&self) {
match self.conn.execute("UPDATE Tracks SET Ignore=0;", []) {
Ok(_) => { },
Err(e) => { log::error!("Failed clear Ignore column. {}", e); }
let cmd = self.conn.execute("UPDATE Tracks SET Ignore=0;", []);
if let Err(e) = cmd {
log::error!("Failed clear Ignore column. {}", e);
}
}
pub fn set_ignore(&self, line:&str) {
pub fn set_ignore(&self, line: &str) {
log::info!("Ignore: {}", line);
if line.starts_with("SQL:") {
let sql = &line[4..];
match self.conn.execute(&format!("UPDATE Tracks Set Ignore=1 WHERE {}", sql), []) {
Ok(_) => { },
Err(e) => { log::error!("Failed set Ignore column for '{}'. {}", line, e); }
let cmd = self.conn.execute(&format!("UPDATE Tracks Set Ignore=1 WHERE {}", sql), []);
if let Err(e) = cmd {
log::error!("Failed set Ignore column for '{}'. {}", line, e);
}
} else {
match self.conn.execute(&format!("UPDATE Tracks SET Ignore=1 WHERE File LIKE \"{}%\"", line), []) {
Ok(_) => { },
Err(e) => { log::error!("Failed set Ignore column for '{}'. {}", line, e); }
let cmd = self.conn.execute(&format!("UPDATE Tracks SET Ignore=1 WHERE File LIKE \"{}%\"", line), []);
if let Err(e) = cmd {
log::error!("Failed set Ignore column for '{}'. {}", line, e);
}
}
}
}
pub fn export(&self, mpaths: &Vec<PathBuf>, preserve_mod_times: bool) {
let total = self.get_track_count();
if total > 0 {
let progress = ProgressBar::new(total.try_into().unwrap()).with_style(
ProgressStyle::default_bar()
.template(
"[{elapsed_precise}] [{bar:25}] {percent:>3}% {pos:>6}/{len:6} {wide_msg}",
)
.progress_chars("=> "),
);
let mut stmt = self.conn.prepare("SELECT File, Tempo, Zcr, MeanSpectralCentroid, StdDevSpectralCentroid, MeanSpectralRolloff, StdDevSpectralRolloff, MeanSpectralFlatness, StdDevSpectralFlatness, MeanLoudness, StdDevLoudness, Chroma1, Chroma2, Chroma3, Chroma4, Chroma5, Chroma6, Chroma7, Chroma8, Chroma9, Chroma10 FROM Tracks ORDER BY File ASC;").unwrap();
let track_iter = stmt
.query_map([], |row| {
Ok(AnalysisResults {
file: row.get(0)?,
analysis: Analysis::new([row.get(1)?, row.get(2)?, row.get(3)?, row.get(4)?, row.get(5)?, row.get(6)?, row.get(7)?, row.get(8)?, row.get(9)?, row.get(10)?, row.get(11)?, row.get(12)?, row.get(13)?, row.get(14)?, row.get(15)?, row.get(16)?, row.get(17)?, row.get(18)?, row.get(19)?, row.get(20)?]),
})
})
.unwrap();
let mut updated = 0;
for tr in track_iter {
let dbtags = tr.unwrap();
if !dbtags.file.contains(CUE_MARKER) {
progress.set_message(format!("{}", dbtags.file));
for mpath in mpaths {
let track_path = mpath.join(&dbtags.file);
if track_path.exists() {
let spath = String::from(track_path.to_string_lossy());
let meta = tags::read(&spath, true);
if meta.is_empty() || meta.analysis.is_none() || meta.analysis.unwrap()!=dbtags.analysis {
tags::write_analysis(&spath, &dbtags.analysis, preserve_mod_times);
updated+=1;
}
break;
}
}
}
progress.inc(1);
}
progress.finish_with_message(format!("{} Updated.", updated))
}
}
}

141
src/ffmpeg.rs Normal file
View File

@ -0,0 +1,141 @@
/**
* Analyse music with Bliss
*
* Copyright (c) 2022-2025 Craig Drummond <craig.p.drummond@gmail.com>
* GPLv3 license.
*
**/
use crate::db;
use bliss_audio::decoder::Decoder as DecoderTrait;
use bliss_audio::decoder::PreAnalyzedSong;
use bliss_audio::{BlissError, BlissResult};
use std::path::Path;
use std::process::{Child, Command, Stdio};
use std::io;
use std::io::Read;
use std::io::BufRead;
use std::io::BufReader;
use std::time::Duration;
pub const TIME_SEP:&str = "<TIME>";
pub struct FFmpegCmdDecoder;
fn handle_command(mut child: Child) -> BlissResult<PreAnalyzedSong> {
let mut decoded_song = PreAnalyzedSong::default();
let stdout = child.stdout.as_mut().expect("Failed to capture stdout");
let mut reader = io::BufReader::new(stdout);
let mut buffer: Vec<u8> = Vec::new();
reader.read_to_end(&mut buffer).map_err(|e| {
BlissError::DecodingError(format!("Could not read the decoded file into a buffer: {}", e))
})?;
decoded_song.sample_array = buffer
.chunks_exact(4)
.map(|x| {
let mut a: [u8; 4] = [0; 4];
a.copy_from_slice(x);
f32::from_le_bytes(a)
})
.collect();
let duration_seconds = decoded_song.sample_array.len() as f32 / 22050 as f32;
decoded_song.duration = Duration::from_nanos((duration_seconds * 1e9_f32).round() as u64);
Ok(decoded_song)
}
fn get_val(line: String) -> String {
let parts = line.split("=");
let mut resp:Vec<String> = Vec::new();
let mut first =true;
for part in parts {
if !first {
resp.push(String::from(part));
}
first = false
}
resp.join("=")
}
pub fn read_tags(path: &String) -> db::Metadata {
let mut meta = db::Metadata {
duration: 0,
..db::Metadata::default()
};
if let Ok(child) = Command::new("ffprobe")
.arg("-hide_banner")
.arg("-v").arg("quiet")
.arg("-show_entries").arg("format")
.arg(path)
.stdout(Stdio::piped())
.spawn() {
let out = child.stdout.unwrap();
let lines = BufReader::new(out).lines().filter_map(|line| line.ok());
for line in lines {
if line.starts_with("duration=") {
let val = get_val(line);
match val.parse::<f32>() {
Ok(v) => {
meta.duration = v as u32;
},
Err(_) => { }
}
} else if line.starts_with("TAG:title=") {
meta.title = get_val(line);
} else if line.starts_with("TAG:artist=") {
meta.artist = get_val(line);
} else if line.starts_with("TAG:album=") {
meta.album = get_val(line);
} else if line.starts_with("TAG:album_artist=") {
meta.album_artist = get_val(line);
} else if line.starts_with("TAG:genre=") {
meta.genre = get_val(line);
}
}
}
meta
}
impl DecoderTrait for FFmpegCmdDecoder {
fn decode(path: &Path) -> BlissResult<PreAnalyzedSong> {
let binding = path.to_string_lossy();
// First check if this is a CUE file track - which will have start and duration
let mut parts = binding.split(TIME_SEP);
if parts.clone().count()==3 {
if let Ok(child) = Command::new("ffmpeg")
.arg("-hide_banner")
.arg("-loglevel").arg("panic")
.arg("-i").arg(parts.next().unwrap_or(""))
.arg("-ss").arg(parts.next().unwrap_or(""))
.arg("-t").arg(parts.next().unwrap_or(""))
.arg("-ar").arg("22050")
.arg("-ac").arg("1")
.arg("-c:a")
.arg("pcm_f32le")
.arg("-f").arg("f32le")
.arg("pipe:1")
.stdout(Stdio::piped())
.spawn() {
return handle_command(child);
}
} else {
if let Ok(child) = Command::new("ffmpeg")
.arg("-hide_banner")
.arg("-loglevel").arg("panic")
.arg("-i").arg(path)
.arg("-ar").arg("22050")
.arg("-ac").arg("1")
.arg("-c:a")
.arg("pcm_f32le")
.arg("-f").arg("f32le")
.arg("pipe:1")
.stdout(Stdio::piped())
.spawn() {
return handle_command(child);
}
}
Err(BlissError::DecodingError("ffmpeg command failed".to_string()))
}
}

View File

@ -1,10 +1,11 @@
/**
* Analyse music with Bliss
*
* Copyright (c) 2022 Craig Drummond <craig.p.drummond@gmail.com>
* Copyright (c) 2022-2025 Craig Drummond <craig.p.drummond@gmail.com>
* GPLv3 license.
*
**/
use argparse::{ArgumentParser, Store, StoreTrue};
use chrono::Local;
use configparser::ini::Ini;
@ -13,15 +14,18 @@ use log::LevelFilter;
use std::io::Write;
use std::path::PathBuf;
use std::process;
#[cfg(feature = "ffmpeg")]
use which::which;
mod analyse;
mod cue;
mod db;
#[cfg(feature = "ffmpeg")]
mod ffmpeg;
mod tags;
mod upload;
const VERSION: &'static str = env!("CARGO_PKG_VERSION");
const TOP_LEVEL_INI_TAG:&str = "Bliss";
const TOP_LEVEL_INI_TAG: &str = "Bliss";
fn main() {
let mut config_file = "config.ini".to_string();
@ -29,16 +33,22 @@ fn main() {
let mut logging = "info".to_string();
let mut music_path = ".".to_string();
let mut ignore_file = "ignore.txt".to_string();
let mut keep_old:bool = false;
let mut dry_run:bool = false;
let mut keep_old: bool = false;
let mut dry_run: bool = false;
let mut task = "".to_string();
let mut lms_host = "127.0.0.1".to_string();
let mut max_num_tracks:usize = 0;
let mut music_paths:Vec<PathBuf> = Vec::new();
let mut lms_json_port:u16 = 9000;
let mut max_num_files: usize = 0;
let mut music_paths: Vec<PathBuf> = Vec::new();
let mut max_threads: usize = 0;
let mut use_tags = false;
let mut preserve_mod_times = false;
match dirs::home_dir() {
Some(path) => { music_path = String::from(path.join("Music").to_string_lossy()); }
None => { }
Some(path) => {
music_path = String::from(path.join("Music").to_string_lossy());
}
None => {}
}
{
@ -46,8 +56,9 @@ fn main() {
let music_path_help = format!("Music folder (default: {})", &music_path);
let db_path_help = format!("Database location (default: {})", &db_path);
let logging_help = format!("Log level; trace, debug, info, warn, error. (default: {})", logging);
let ignore_file_help = format!("File containg items to mark as ignored. (default: {})", ignore_file);
let ignore_file_help = format!("File contains items to mark as ignored. (default: {})", ignore_file);
let lms_host_help = format!("LMS hostname or IP address (default: {})", &lms_host);
let lms_json_port_help = format!("LMS JSONRPC port (default: {})", &lms_json_port);
let description = format!("Bliss Analyser v{}", VERSION);
// arg_parse.refer 'borrows' db_path, etc, and can only have one
@ -58,33 +69,53 @@ fn main() {
arg_parse.refer(&mut music_path).add_option(&["-m", "--music"], Store, &music_path_help);
arg_parse.refer(&mut db_path).add_option(&["-d", "--db"], Store, &db_path_help);
arg_parse.refer(&mut logging).add_option(&["-l", "--logging"], Store, &logging_help);
arg_parse.refer(&mut keep_old).add_option(&["-k", "--keep-old"], StoreTrue, "Don't remove tracks from DB if they don't exist (used with analyse task)");
arg_parse.refer(&mut keep_old).add_option(&["-k", "--keep-old"], StoreTrue, "Don't remove files from DB if they don't exist (used with analyse task)");
arg_parse.refer(&mut dry_run).add_option(&["-r", "--dry-run"], StoreTrue, "Dry run, only show what needs to be done (used with analyse task)");
arg_parse.refer(&mut ignore_file).add_option(&["-i", "--ignore"], Store, &ignore_file_help);
arg_parse.refer(&mut lms_host).add_option(&["-L", "--lms"], Store, &lms_host_help);
arg_parse.refer(&mut max_num_tracks).add_option(&["-n", "--numtracks"], Store, "Maximum number of tracks to analyse");
arg_parse.refer(&mut task).add_argument("task", Store, "Task to perform; analyse, tags, ignore, upload, stopmixer.");
arg_parse.refer(&mut lms_json_port).add_option(&["-J", "--json"], Store, &lms_json_port_help);
arg_parse.refer(&mut max_num_files).add_option(&["-n", "--numfiles"], Store, "Maximum number of files to analyse");
arg_parse.refer(&mut max_threads).add_option(&["-t", "--threads"], Store, "Maximum number of threads to use for analysis");
arg_parse.refer(&mut use_tags).add_option(&["-T", "--tags"], StoreTrue, "Read/write analysis results from/to source files");
arg_parse.refer(&mut preserve_mod_times).add_option(&["-p", "--preserve"], StoreTrue, "Preserve modification time when writing tags to files");
arg_parse.refer(&mut task).add_argument("task", Store, "Task to perform; analyse, tags, ignore, upload, export, stopmixer.");
arg_parse.parse_args_or_exit();
}
if !(logging.eq_ignore_ascii_case("trace") || logging.eq_ignore_ascii_case("debug") || logging.eq_ignore_ascii_case("info") || logging.eq_ignore_ascii_case("warn") || logging.eq_ignore_ascii_case("error")) {
if !(logging.eq_ignore_ascii_case("trace") || logging.eq_ignore_ascii_case("debug") || logging.eq_ignore_ascii_case("info")
|| logging.eq_ignore_ascii_case("warn") || logging.eq_ignore_ascii_case("error")) {
logging = String::from("info");
}
let other_level = if logging.eq_ignore_ascii_case("trace") { LevelFilter::Trace } else { LevelFilter::Error };
let mut builder = env_logger::Builder::from_env(env_logger::Env::default().filter_or("XXXXXXXX", logging));
builder.filter(Some("bliss_audio"), LevelFilter::Error);
builder.format(|buf, record| writeln!(buf, "[{} {:.1}] {}", Local::now().format("%Y-%m-%d %H:%M:%S"), record.level(), record.args()));
builder.filter(Some("bliss_audio"), other_level);
builder.filter(Some("symphonia"), other_level);
builder.format(|buf, record| {
writeln!(buf, "[{} {:.1}] {}", Local::now().format("%Y-%m-%d %H:%M:%S"), record.level(), record.args())
});
builder.init();
if task.is_empty() {
log::error!("No task specified, please choose from; analyse, tags, ignore, upload");
if task.is_empty() {
log::error!("No task specified, please choose from; analyse, tags, ignore, upload, export, stopmixer");
process::exit(-1);
}
if !task.eq_ignore_ascii_case("analyse") && !task.eq_ignore_ascii_case("tags") && !task.eq_ignore_ascii_case("ignore") && !task.eq_ignore_ascii_case("upload") && !task.eq_ignore_ascii_case("stopmixer") {
if !task.eq_ignore_ascii_case("analyse") && !task.eq_ignore_ascii_case("tags") && !task.eq_ignore_ascii_case("ignore")
&& !task.eq_ignore_ascii_case("upload") && !task.eq_ignore_ascii_case("export") && !task.eq_ignore_ascii_case("stopmixer") {
log::error!("Invalid task ({}) supplied", task);
process::exit(-1);
}
// Ensure ffmpeg is in PATH...
#[cfg(feature = "ffmpeg")]
match which("ffmpeg") {
Ok(_) => { }
Err(_) => {
log::error!("'ffmpeg' was not found! Please ensure this in your PATH");
process::exit(-1);
},
}
if !config_file.is_empty() {
let path = PathBuf::from(&config_file);
if path.exists() && path.is_file() {
@ -94,23 +125,35 @@ fn main() {
let path_keys: [&str; 5] = ["music", "music_1", "music_2", "music_3", "music_4"];
for key in &path_keys {
match config.get(TOP_LEVEL_INI_TAG, key) {
Some(val) => { music_paths.push(PathBuf::from(&val)); },
Some(val) => { music_paths.push(PathBuf::from(&val)); }
None => { }
}
}
match config.get(TOP_LEVEL_INI_TAG, "db") {
Some(val) => { db_path = val; },
Some(val) => { db_path = val; }
None => { }
}
match config.get(TOP_LEVEL_INI_TAG, "lms") {
Some(val) => { lms_host = val; },
Some(val) => { lms_host = val; }
None => { }
}
match config.get(TOP_LEVEL_INI_TAG, "json") {
Some(val) => { lms_json_port = val.parse::<u16>().unwrap(); }
None => { }
}
match config.get(TOP_LEVEL_INI_TAG, "ignore") {
Some(val) => { ignore_file = val; },
Some(val) => { ignore_file = val; }
None => { }
}
},
match config.get(TOP_LEVEL_INI_TAG, "tags") {
Some(val) => { use_tags = val.eq("true"); }
None => { }
}
match config.get(TOP_LEVEL_INI_TAG, "preserve") {
Some(val) => { preserve_mod_times = val.eq("true"); }
None => { }
}
}
Err(e) => {
log::error!("Failed to load config file. {}", e);
process::exit(-1);
@ -124,7 +167,7 @@ fn main() {
}
if task.eq_ignore_ascii_case("stopmixer") {
upload::stop_mixer(&lms_host);
upload::stop_mixer(&lms_host, lms_json_port);
} else {
if db_path.len() < 3 {
log::error!("Invalid DB path ({}) supplied", db_path);
@ -139,7 +182,7 @@ fn main() {
if task.eq_ignore_ascii_case("upload") {
if path.exists() {
upload::upload_db(&db_path, &lms_host);
upload::upload_db(&db_path, &lms_host, lms_json_port);
} else {
log::error!("DB ({}) does not exist", db_path);
process::exit(-1);
@ -169,8 +212,11 @@ fn main() {
process::exit(-1);
}
analyse::update_ignore(&db_path, &ignore_path);
} else if task.eq_ignore_ascii_case("export") {
analyse::export(&db_path, &music_paths, preserve_mod_times);
} else {
analyse::analyse_files(&db_path, &music_paths, dry_run, keep_old, max_num_tracks);
let ignore_path = PathBuf::from(&ignore_file);
analyse::analyse_files(&db_path, &music_paths, dry_run, keep_old, max_num_files, max_threads, &ignore_path, use_tags, preserve_mod_times);
}
}
}

View File

@ -1,87 +1,189 @@
/**
* Analyse music with Bliss
*
* Copyright (c) 2022 Craig Drummond <craig.p.drummond@gmail.com>
* Copyright (c) 2022-2025 Craig Drummond <craig.p.drummond@gmail.com>
* GPLv3 license.
*
**/
use lofty::{Accessor, ItemKey, Probe};
use crate::db;
use lofty::config::WriteOptions;
use lofty::file::FileType;
use lofty::prelude::{Accessor, AudioFile, ItemKey, TagExt, TaggedFileExt};
use lofty::tag::{ItemValue, Tag, TagItem};
use regex::Regex;
use std::fs::File;
use std::fs;
use std::path::Path;
use substring::Substring;
use crate::db;
use std::time::SystemTime;
use bliss_audio::{Analysis, AnalysisIndex};
const MAX_GENRE_VAL:usize = 192;
const MAX_GENRE_VAL: usize = 192;
const NUM_ANALYSIS_VALS: usize = 20;
const ANALYSIS_TAG:ItemKey = ItemKey::Comment;
const ANALYSIS_TAG_START: &str = "BLISS_ANALYSIS";
const ANALYSIS_TAG_VER: u16 = 1;
pub fn read(track:&String) -> db::Metadata {
let mut meta = db::Metadata{
title:String::new(),
artist:String::new(),
album:String::new(),
album_artist:String::new(),
genre:String::new(),
duration:180
pub fn write_analysis(track: &String, analysis: &Analysis, preserve_mod_times: bool) {
let value = format!("{},{},{:.24},{:.24},{:.24},{:.24},{:.24},{:.24},{:.24},{:.24},{:.24},{:.24},{:.24},{:.24},{:.24},{:.24},{:.24},{:.24},{:.24},{:.24},{:.24},{:.24}", ANALYSIS_TAG_START, ANALYSIS_TAG_VER,
analysis[AnalysisIndex::Tempo], analysis[AnalysisIndex::Zcr], analysis[AnalysisIndex::MeanSpectralCentroid], analysis[AnalysisIndex::StdDeviationSpectralCentroid], analysis[AnalysisIndex::MeanSpectralRolloff],
analysis[AnalysisIndex::StdDeviationSpectralRolloff], analysis[AnalysisIndex::MeanSpectralFlatness], analysis[AnalysisIndex::StdDeviationSpectralFlatness], analysis[AnalysisIndex::MeanLoudness], analysis[AnalysisIndex::StdDeviationLoudness],
analysis[AnalysisIndex::Chroma1], analysis[AnalysisIndex::Chroma2], analysis[AnalysisIndex::Chroma3], analysis[AnalysisIndex::Chroma4], analysis[AnalysisIndex::Chroma5],
analysis[AnalysisIndex::Chroma6], analysis[AnalysisIndex::Chroma7], analysis[AnalysisIndex::Chroma8], analysis[AnalysisIndex::Chroma9], analysis[AnalysisIndex::Chroma10]);
if let Ok(mut file) = lofty::read_from_path(Path::new(track)) {
let tag = match file.primary_tag_mut() {
Some(primary_tag) => primary_tag,
None => {
if let Some(first_tag) = file.first_tag_mut() {
first_tag
} else {
let tag_type = file.primary_tag_type();
file.insert_tag(Tag::new(tag_type));
file.primary_tag_mut().unwrap()
}
},
};
// Remove any existing analysis result tag
let entries = tag.get_strings(&ANALYSIS_TAG);
let mut keep: Vec<ItemValue> = Vec::new();
for entry in entries {
if !entry.starts_with(ANALYSIS_TAG_START) {
keep.push(ItemValue::Text(entry.to_string()));
}
}
tag.remove_key(&ANALYSIS_TAG);
for k in keep {
tag.push(TagItem::new(ANALYSIS_TAG, k));
}
// Store analysis results
tag.push(TagItem::new(ANALYSIS_TAG, ItemValue::Text(value)));
let now = SystemTime::now();
let mut mod_time = now;
if preserve_mod_times {
if let Ok(fmeta) = fs::metadata(track) {
if let Ok(time) = fmeta.modified() {
mod_time = time;
}
}
}
let _ = tag.save_to_path(Path::new(track), WriteOptions::default());
if preserve_mod_times {
if mod_time<now {
if let Ok(f) = File::open(track) {
let _ = f.set_modified(mod_time);
}
}
}
}
}
pub fn read(track: &String, read_analysis: bool) -> db::Metadata {
let mut meta = db::Metadata {
duration: 180,
..db::Metadata::default()
};
let path = Path::new(track);
match Probe::open(path) {
Ok(probe) => {
match probe.read(true) {
Ok(file) => {
let tag = match file.primary_tag() {
Some(primary_tag) => primary_tag,
None => file.first_tag().expect("Error: No tags found!"),
};
meta.title=tag.title().unwrap_or("").to_string();
meta.artist=tag.artist().unwrap_or("").to_string();
meta.album=tag.album().unwrap_or("").to_string();
meta.album_artist=tag.get_string(&ItemKey::AlbumArtist).unwrap_or("").to_string();
meta.genre=tag.genre().unwrap_or("").to_string();
// Check whether MP3 as numeric genre, and if so covert to text
if file.file_type().eq(&lofty::FileType::MP3) {
match tag.genre() {
Some(genre) => {
let test = &genre.parse::<u8>();
match test {
Ok(val) => {
let idx:usize = *val as usize;
if idx<MAX_GENRE_VAL {
meta.genre=lofty::id3::v1::GENRES[idx].to_string();
}
},
Err(_) => {
// Check for "(number)text"
let re = Regex::new(r"^\([0-9]+\)").unwrap();
if re.is_match(&genre) {
match genre.find(")") {
Some(end) => {
let test = &genre.to_string().substring(1, end).parse::<u8>();
match test {
Ok(val) => {
let idx:usize = *val as usize;
if idx<MAX_GENRE_VAL {
meta.genre=lofty::id3::v1::GENRES[idx].to_string();
}
},
Err(_) => { }
}
},
None => { }
if let Ok(file) = lofty::read_from_path(Path::new(track)) {
let tag = match file.primary_tag() {
Some(primary_tag) => primary_tag,
None => file.first_tag().expect("Error: No tags found!"),
};
meta.title = tag.title().unwrap_or_default().to_string();
meta.artist = tag.artist().unwrap_or_default().to_string();
meta.album = tag.album().unwrap_or_default().to_string();
meta.album_artist = tag.get_string(&ItemKey::AlbumArtist).unwrap_or_default().to_string();
meta.genre = tag.genre().unwrap_or_default().to_string();
// Check whether MP3 has numeric genre, and if so covert to text
if file.file_type().eq(&FileType::Mpeg) {
match tag.genre() {
Some(genre) => {
let test = genre.parse::<u8>();
match test {
Ok(val) => {
let idx: usize = val as usize;
if idx < MAX_GENRE_VAL {
meta.genre = lofty::id3::v1::GENRES[idx].to_string();
}
}
Err(_) => {
// Check for "(number)text"
let re = Regex::new(r"^\([0-9]+\)").unwrap();
if re.is_match(&genre) {
match genre.find(")") {
Some(end) => {
let test = genre.to_string().substring(1, end).parse::<u8>();
if let Ok(val) = test {
let idx: usize = val as usize;
if idx < MAX_GENRE_VAL {
meta.genre =
lofty::id3::v1::GENRES[idx].to_string();
}
}
}
None => { }
}
},
None => {}
}
}
}
meta.duration=file.properties().duration().as_secs() as u32;
},
Err(_) => { }
}
None => { }
}
},
Err(_) => { }
}
meta.duration = file.properties().duration().as_secs() as u32;
if read_analysis {
let entries = tag.get_strings(&ANALYSIS_TAG);
for entry in entries {
if entry.len()>(ANALYSIS_TAG_START.len()+(NUM_ANALYSIS_VALS*8)) && entry.starts_with(ANALYSIS_TAG_START) {
let parts = entry.split(",");
let mut index = 0;
let mut vals = [0.; NUM_ANALYSIS_VALS];
for part in parts {
if 0==index {
if part!=ANALYSIS_TAG_START {
break;
}
} else if 1==index {
match part.parse::<u16>() {
Ok(ver) => {
if ver!=ANALYSIS_TAG_VER {
break;
}
},
Err(_) => {
break;
}
}
} else if (index - 2) < NUM_ANALYSIS_VALS {
match part.parse::<f32>() {
Ok(val) => {
vals[index - 2] = val;
},
Err(_) => {
break;
}
}
} else {
break;
}
index += 1;
}
if index == (NUM_ANALYSIS_VALS+2) {
meta.analysis = Some(Analysis::new(vals));
break;
}
}
}
}
}
meta
}
}

View File

@ -1,7 +1,7 @@
/**
* Analyse music with Bliss
*
* Copyright (c) 2022 Craig Drummond <craig.p.drummond@gmail.com>
* Copyright (c) 2022-2025 Craig Drummond <craig.p.drummond@gmail.com>
* GPLv3 license.
*
**/
@ -12,93 +12,76 @@ use std::process;
use substring::Substring;
use ureq;
fn fail(msg:&str) {
fn fail(msg: &str) {
log::error!("{}", msg);
process::exit(-1);
}
pub fn stop_mixer(lms:&String) {
pub fn stop_mixer(lms_host: &String, json_port: u16) {
let stop_req = "{\"id\":1, \"method\":\"slim.request\",\"params\":[\"\",[\"blissmixer\",\"stop\"]]}";
log::info!("Asking plugin to stop mixer");
match ureq::post(&format!("http://{}:9000/jsonrpc.js", lms)).send_string(&stop_req) {
Ok(_) => { },
Err(e) => { log::error!("Failed to ask plugin to stop mixer. {}", e); }
let req = ureq::post(&format!("http://{}:{}/jsonrpc.js", lms_host, json_port)).send_string(&stop_req);
if let Err(e) = req {
log::error!("Failed to ask plugin to stop mixer. {}", e);
}
}
pub fn upload_db(db_path:&String, lms:&String) {
pub fn upload_db(db_path: &String, lms_host: &String, json_port: u16) {
// First tell LMS to restart the mixer in upload mode
let start_req = "{\"id\":1, \"method\":\"slim.request\",\"params\":[\"\",[\"blissmixer\",\"start-upload\"]]}";
let mut port:u16 = 0;
let mut port: u16 = 0;
log::info!("Requesting LMS plugin to allow uploads");
match ureq::post(&format!("http://{}:9000/jsonrpc.js", lms)).send_string(&start_req) {
Ok(resp) => {
match resp.into_string() {
Ok(text) => {
match text.find("\"port\":") {
Some(s) => {
let txt = text.to_string().substring(s+7, text.len()).to_string();
match txt.find("}") {
Some(e) => {
let p = txt.substring(0, e);
let test = p.parse::<u16>();
match test {
Ok(val) => {
port = val;
},
Err(_) => { fail("Could not parse resp (cast)"); }
}
},
None => { fail("Could not parse resp (closing)"); }
match ureq::post(&format!("http://{}:{}/jsonrpc.js", lms_host, json_port)).send_string(&start_req) {
Ok(resp) => match resp.into_string() {
Ok(text) => match text.find("\"port\":") {
Some(s) => {
let txt = text.to_string().substring(s + 7, text.len()).to_string();
match txt.find("}") {
Some(e) => {
let p = txt.substring(0, e);
let test = p.parse::<u16>();
match test {
Ok(val) => { port = val; }
Err(_) => { fail("Could not parse resp (cast)"); }
}
},
None => { fail("Could not parse resp (no port)"); }
}
None => { fail("Could not parse resp (closing)"); }
}
},
Err(_) => { fail("No text?")}
}
None => { fail("Could not parse resp (no port)"); }
}
},
Err(e) => {
fail(&format!("Failed to ask LMS plugin to allow upload. {}", e));
Err(_) => fail("No text?"),
}
Err(e) => { fail(&format!("Failed to ask LMS plugin to allow upload. {}", e)); }
}
if port<=0 {
if port == 0 {
fail("Invalid port");
}
// Now we have port number, do the actual upload...
log::info!("Uploading {}", db_path);
match File::open(db_path) {
Ok(file) => {
match file.metadata() {
Ok(meta) => {
let buffered_reader = BufReader::new(file);
log::info!("Length: {}", meta.len());
match ureq::put(&format!("http://{}:{}/upload", lms, port))
.set("Content-Length", &meta.len().to_string())
.set("Content-Type", "application/octet-stream")
.send(buffered_reader) {
Ok(_) => {
log::info!("Database uploaded");
stop_mixer(lms);
},
Err(e) => {
fail(&format!("Failed to upload database. {}", e));
}
Ok(file) => match file.metadata() {
Ok(meta) => {
let buffered_reader = BufReader::new(file);
log::info!("Length: {}", meta.len());
match ureq::put(&format!("http://{}:{}/upload", lms_host, port))
.set("Content-Length", &meta.len().to_string())
.set("Content-Type", "application/octet-stream")
.send(buffered_reader) {
Ok(_) => {
log::info!("Database uploaded");
stop_mixer(lms_host, json_port);
}
},
Err(e) => {
fail(&format!("Failed to open database. {}", e));
Err(e) => { fail(&format!("Failed to upload database. {}", e)); }
}
}
},
Err(e) => {
fail(&format!("Failed to open database. {}", e));
Err(e) => { fail(&format!("Failed to open database. {}", e)); }
}
Err(e) => { fail(&format!("Failed to open database. {}", e)); }
}
}
}