Compare commits

...

27 Commits

Author SHA1 Message Date
CraigD
e488903d7f
Merge pull request #32 from chincheta0815/docker_updates
change docker commands to get more images updates
2025-06-15 12:25:22 +00:00
CDrummond
2d2ddc7b17 Don't crash if file has no tags.
Closes #33
2025-06-15 13:21:49 +01:00
chincheta0815@nowhere.local
9e107c5fc2 change docker commands to get more images updates 2025-06-08 08:20:50 +02:00
CDrummond
4714f8f4ef Formatting 2025-05-26 17:57:25 +01:00
CDrummond
fd38256dc0 Handle both BLISS_ANALYSIS and bliss_analysis
Issue #31
2025-05-26 17:38:00 +01:00
CDrummond
b4eb7562cf Gracefully handle Ctrl-C. 2025-05-22 12:07:24 +01:00
CDrummond
c7d436c13b Spelling 2025-05-22 12:07:04 +01:00
CDrummond
e04b22ebcc Strip debug info from release binaries. 2025-05-20 12:10:00 +01:00
CDrummond
6055fa2af7 Consistent status messages 2025-05-20 12:09:40 +01:00
CDrummond
0a1b9d8790 Formatting 2025-05-20 12:09:25 +01:00
CDrummond
e395ff515b Consistency 2025-05-20 12:09:11 +01:00
CDrummond
4794a9b747 Remove --read-tags and --write-tags. Always read tags, so --tags just
controls writing.
Issue #29
2025-05-20 12:08:20 +01:00
CDrummond
b623c1939b No need to mention multi threaded export, as export itself is new to
this release.
2025-05-19 12:08:00 +01:00
CDrummond
de232d2a33 Use max of 16 significant digits (traling zeros stripped) when writing analysis results to file tags.
Issue #26
2025-05-19 12:07:28 +01:00
CDrummond
4830ef92b3 Make constant name more meaningful 2025-05-19 12:06:36 +01:00
CDrummond
520190c52e When writing new BLISS_ANALYSIS tag remove any old comment tags.
Issue #27
2025-05-19 12:06:01 +01:00
CDrummond
d4f32d8b4d Next version is 0.4.0, not 0.5.0!!! 2025-05-15 13:53:21 +01:00
CDrummond
0250117b15 Add --read-tags and --write-tags commandline options.
Closes #25
2025-05-15 12:04:49 +01:00
CDrummond
d2b3db43f7 Move functions to where they belong. 2025-05-15 12:04:04 +01:00
CDrummond
2759f7f47c Multi-thread DB export.
Closes #24
2025-05-15 12:03:47 +01:00
CDrummond
a79712cb24 Check for multiple genre tags, and if found add as semi-colon separated list to DB. 2025-05-14 12:23:45 +01:00
CDrummond
5722b04c85 - Store analysis in BLISS_ANALYSIS tag, not in a COMMENT tag.
- Fix read of tags during analysis phase.
Closes #23
2025-05-13 12:25:50 +01:00
CDrummond
50a55ee61a ...and again... 2025-05-01 07:16:40 +01:00
CDrummond
b5c9356ef2 Fix formatting. 2025-05-01 07:15:39 +01:00
CDrummond
9d9117e8f7 Allow "export" !!! 2025-03-24 17:07:26 +00:00
CDrummond
417ac5f652 Add option to preserve file modification time when writing tags.
Issue #21
2025-03-24 07:17:51 +00:00
CDrummond
342440f04b Add action to export analysis results from DB to files.
Issue #20
2025-03-24 06:47:35 +00:00
9 changed files with 552 additions and 168 deletions

View File

@ -149,15 +149,26 @@ jobs:
- name: Checkout
uses: actions/checkout@v4
- name: Build ARM ffmpeg on Debian
- name: Build ARM ffmpeg on Bullseye
run: |
docker build -t bliss-analyser-cross - < docker/Dockerfile_Bullseye_ffmpeg
docker build --pull --no-cache -t bliss-analyser-cross - < docker/Dockerfile_Bullseye_ffmpeg
docker run --rm -v $PWD/target:/build -v $PWD:/src bliss-analyser-cross
- name: Upload artifacts
uses: actions/upload-artifact@v4
with:
name: bliss-analyser-debian-bullseye-arm-ffmpeg
path: releases/
- name: Build ARM ffmpeg on Bookworm
run: |
docker build --pull --no-cache -t bliss-analyser-cross - < docker/Dockerfile_Bookworm_ffmpeg
docker run --rm -v $PWD/target:/build -v $PWD:/src bliss-analyser-cross
- name: Upload artifacts
uses: actions/upload-artifact@v4
with:
name: bliss-analyser-linux-arm-ffmpeg
name: bliss-analyser-debian-bookworm-arm-ffmpeg
path: releases/
@ -170,7 +181,7 @@ jobs:
- name: Build ARM static-libav on Debian
run: |
docker build -t bliss-analyser-cross - < docker/Dockerfile_Bullseye_static
docker build --pull --no-cache -t bliss-analyser-cross - < docker/Dockerfile_Bullseye_static
docker run --rm -v $PWD/target:/build -v $PWD:/src bliss-analyser-cross
- name: Upload artifacts
@ -189,7 +200,7 @@ jobs:
- name: Build ARM libav on Bullseye
run: |
docker build -t bliss-analyser-cross - < docker/Dockerfile_Bullseye_libav
docker build --pull --no-cache -t bliss-analyser-cross - < docker/Dockerfile_Bullseye_libav
docker run --rm -v $PWD/target:/build -v $PWD:/src bliss-analyser-cross
- name: Upload artifacts
@ -200,7 +211,7 @@ jobs:
- name: Build ARM libav on Bookworm
run : |
docker build -t bliss-analyser-cross - < docker/Dockerfile_Bookworm_libav
docker build --pull --no-cache -t bliss-analyser-cross - < docker/Dockerfile_Bookworm_libav
docker run --rm -v $PWD/target:/build -v $PWD:/src bliss-analyser-cross
- name: Upload artifacts
@ -219,7 +230,7 @@ jobs:
- name: Build ARM symphonia on Debian
run: |
docker build -t bliss-analyser-cross - < docker/Dockerfile_Bullseye_symphonia
docker build --pull --no-cache -t bliss-analyser-cross - < docker/Dockerfile_Bullseye_symphonia
docker run --rm -v $PWD/target:/build -v $PWD:/src bliss-analyser-cross
- name: Upload artifacts
@ -482,4 +493,4 @@ jobs:
uses: actions/upload-artifact@v4
with:
name: bliss-analyser-windows-symphonia
path: releases/
path: releases/

35
Cargo.lock generated
View File

@ -180,13 +180,14 @@ dependencies = [
[[package]]
name = "bliss-analyser"
version = "0.3.0"
version = "0.4.1"
dependencies = [
"anyhow",
"argparse",
"bliss-audio",
"chrono",
"configparser",
"ctrlc",
"dirs",
"env_logger",
"hhmmss",
@ -289,6 +290,12 @@ version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
[[package]]
name = "cfg_aliases"
version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "613afe47fcd5fac7ccf1db93babcb082c5994d996f20b8b159f2ad1658eb5724"
[[package]]
name = "chrono"
version = "0.4.40"
@ -410,6 +417,16 @@ dependencies = [
"lazy_static",
]
[[package]]
name = "ctrlc"
version = "3.4.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "46f93780a459b7d656ef7f071fe699c4d3d2cb201c4b24d085b6ddc505276e73"
dependencies = [
"nix",
"windows-sys",
]
[[package]]
name = "data-encoding"
version = "2.8.0"
@ -766,9 +783,9 @@ checksum = "830d08ce1d1d941e6b30645f1a0eb5643013d835ce3779a5fc208261dbe10f55"
[[package]]
name = "libc"
version = "0.2.170"
version = "0.2.172"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "875b3680cb2f8f71bdcf9a30f38d48282f5d3c95cbf9b3fa57269bb5d5c06828"
checksum = "d750af042f7ef4f724306de029d18836c26c1765a54a6a3f094cbd23a7267ffa"
[[package]]
name = "libloading"
@ -909,6 +926,18 @@ dependencies = [
"rand",
]
[[package]]
name = "nix"
version = "0.30.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "74523f3a35e05aba87a1d978330aef40f67b0304ac79c1c00b294c9830543db6"
dependencies = [
"bitflags 2.9.0",
"cfg-if",
"cfg_aliases",
"libc",
]
[[package]]
name = "noisy_float"
version = "0.2.0"

View File

@ -1,6 +1,6 @@
[package]
name = "bliss-analyser"
version = "0.3.0"
version = "0.4.1"
authors = ["Craig Drummond <craig.p.drummond@gmail.com>"]
edition = "2021"
license = "GPL-3.0-only"
@ -9,6 +9,9 @@ repository = "https://github.com/CDrummond/bliss-analyser"
keywords = ["audio", "song", "similarity"]
readme = "README.md"
[profile.release]
strip=true
[dependencies]
argparse = "0.2.2"
anyhow = "1.0.40"
@ -28,6 +31,7 @@ num_cpus = "1.13.0"
which = { version = "7.0.2", optional = true }
rcue = { version = "0.1.3", optional = true }
hhmmss = { version = "0.1.0", optional = true }
ctrlc = "3.4"
[features]
libav = ["bliss-audio/ffmpeg"]

View File

@ -1,3 +1,22 @@
0.4.1
-----
1. Don't crash if file has no tags.
0.4.0
-----
1. Add action to export results from DB to files.
2. Add option to preserve file modification time when writing tags.
3. Store analysis in BLISS_ANALYSIS tag, not in a COMMENT tag.
4. Fix reading of tags during analysis phase.
5. Check for multiple genre tags, and if found add as semi-colon separated
list to DB.
6. When analysing files, always read tags (if present), but only write tags
if '--tags' is specified on commandline.
7. Use max of 16 significant digits (trailing zeros stripped) when writing
analysis results to file tags.
8. Strip symbols from release binaries.
9. Gracefully handle Ctrl-C.
0.3.0
-----
1. Add support for (DSD) WavPack - thanks to Bart Lauret
@ -32,7 +51,7 @@
-----
1. Tidy up code, thanks to Serial-ATA
2. Update version of tag reader library, should now support ID3v2 in FLAC.
3. Show error message if can't open, or create, database file.
3. Show error message if can't open, or create, DB file.
4. Update version of bliss-rs, this now handles CUE processing internally.
0.1.0

View File

@ -143,13 +143,13 @@ analysis results. This will default to `bliss.db` in the current folder.
* `lms` specifies the hostname, or IP address, of your LMS server. This is used
when uploading the database file to LMS. This defaults to `127.0.0.1` If your LMS is
password protected then use `user:pass@server` - e.g. `lms=pi:abc123@127.0.0.1`
* `json` specifies the JSONRPC port number of your LMS server. This will defaul to
9000.
* `json` specifies the JSONRPC port number of your LMS server. This will default
to `9000`.
* `ignore` specifies the name and location of a file containing items to ignore
in mixes. See the `Ignore` section later on for more details.
* `tags` specifies whether analysis results should be written to, and re-read from,
files. Set to `true` or `false`. If enabled, then results are stored in a `COMMENT`
tag that starts with `BLISS_ANALYSIS`
* `tags` specifies whether analysis results should be written to files when anlysed.
* `preserve` specifies whether file modification time should be preserved when
writing tags. Set to `true` or `false`.
Command-line parameters
@ -172,21 +172,24 @@ tracks are to be analysed and how many old tracks are left in the database.
* `-L` / `--lms` Hostname, or IP address, of your LMS server.
* `-J` / `--json` JSONRPC port number of your LMS server.
* `-n` / `--numtracks` Specify maximum number of tracks to analyse.
* `-T` / `--tags` Write anlysis results to file tags, and read from file tags.
* `-T` / `--tags` When using the `analyse` task, write analysis results to files
within a `BLISS_ANALYSIS` tag.
* `-p' / '--preserve` Attempt to preserve file modification time when writing tags.
Equivalent items specified in the INI config file (detailed above) will override
any specified on the commandline.
any specified on the command-line.
`bliss-analyser` requires one extra parameter, which is used to determine the
required task. This takes the following values:
* `analyse` Performs analysis of tracks.
* `upload` Uploads the database to LMS.
* `stopmixer` Asks LMS plugin to stop it instance of `bliss-mixer`
* `stopmixer` Asks LMS plugin to stop its instance of `bliss-mixer`
* `tags` Re-reads tags from your music collection, and updates the database for
any changes.
* `ignore` Reads the `ignore` file and updates the database to flag tracks as
to be ignored for mixes.
* `export` Exports tags from database and stores within the audio files.
@ -215,6 +218,19 @@ is shown.
As a rough guide, a 2015-era i7 8-core laptop with SSD analyses around 14000
tracks/hour.
Tags
----
When analysing tracks, the analyser will first check for a `BLISS_ANALYSIS` tag
within the file, and if found this will be used instead of re-analysing the file.
If `--tags` is passed, the analysis results will be stored within a `BLISS_ANALYSIS`
tag in the file itself. Note, however, that only tracks that are not currently in
the database will be analysed - therefore any such tracks would not have the
`BLISS_ANALYSIS` tag updated. To export analysis results from the database to
files themselves use the `export` task.
*NOTE* USe of the `BLISS_ANALYSIS` tag is not supported for CUE files.
CUE files
---------
@ -357,6 +373,27 @@ is accomplished as follows:
Exporting Analysis
==================
If you have analysis results stored within the SQLite database, and not within
the files themselves, then you can use the `export` action to copy these
analysis results from the database and into the files.
(Linux / macOS)
```
./bliss-analyser export
```
(Windows)
```
.\bliss-analyser.exe export
```
*NOTE* Exporting of analysis results is not implemented for CUE tracks.
Credits
=======

View File

@ -19,8 +19,7 @@ use indicatif::{ProgressBar, ProgressStyle};
#[cfg(not(feature = "ffmpeg"))]
use std::collections::HashSet;
use std::convert::TryInto;
use std::fs::{DirEntry, File};
use std::io::{BufRead, BufReader};
use std::fs::DirEntry;
use std::num::NonZeroUsize;
use std::path::{Path, PathBuf};
#[cfg(feature = "ffmpeg")]
@ -45,7 +44,21 @@ const MAX_ERRORS_TO_SHOW: usize = 100;
const MAX_TAG_ERRORS_TO_SHOW: usize = 50;
const VALID_EXTENSIONS: [&str; 7] = ["m4a", "mp3", "ogg", "flac", "opus", "wv", "dsf"];
fn get_file_list(db: &mut db::Db, mpath: &Path, path: &Path, track_paths: &mut Vec<String>, cue_tracks:&mut Vec<cue::CueTrack>, file_count:&mut usize, max_num_files: usize, use_tags: bool, tagged_file_count:&mut usize, dry_run: bool) {
static mut TERMINATE_ANALYSIS_FLAG: bool = false;
fn terminate_analysis() -> bool {
unsafe {
return TERMINATE_ANALYSIS_FLAG
}
}
fn handle_ctrl_c() {
unsafe {
TERMINATE_ANALYSIS_FLAG = true;
}
}
fn get_file_list(db: &mut db::Db, mpath: &Path, path: &Path, track_paths: &mut Vec<String>, cue_tracks:&mut Vec<cue::CueTrack>, file_count:&mut usize, max_num_files: usize, tagged_file_count:&mut usize, dry_run: bool) {
if !path.is_dir() {
return;
}
@ -54,21 +67,21 @@ fn get_file_list(db: &mut db::Db, mpath: &Path, path: &Path, track_paths: &mut V
items.sort_by_key(|dir| dir.path());
for item in items {
check_dir_entry(db, mpath, item, track_paths, cue_tracks, file_count, max_num_files, use_tags, tagged_file_count, dry_run);
check_dir_entry(db, mpath, item, track_paths, cue_tracks, file_count, max_num_files, tagged_file_count, dry_run);
if max_num_files>0 && *file_count>=max_num_files {
break;
}
}
}
fn check_dir_entry(db: &mut db::Db, mpath: &Path, entry: DirEntry, track_paths: &mut Vec<String>, cue_tracks:&mut Vec<cue::CueTrack>, file_count:&mut usize, max_num_files: usize, use_tags: bool, tagged_file_count:&mut usize, dry_run: bool) {
fn check_dir_entry(db: &mut db::Db, mpath: &Path, entry: DirEntry, track_paths: &mut Vec<String>, cue_tracks:&mut Vec<cue::CueTrack>, file_count:&mut usize, max_num_files: usize, tagged_file_count:&mut usize, dry_run: bool) {
let pb = entry.path();
if pb.is_dir() {
let check = pb.join(DONT_ANALYSE);
if check.exists() {
log::info!("Skipping '{}', found '{}'", pb.to_string_lossy(), DONT_ANALYSE);
} else if max_num_files<=0 || *file_count<max_num_files {
get_file_list(db, mpath, &pb, track_paths, cue_tracks, file_count, max_num_files, use_tags, tagged_file_count, dry_run);
get_file_list(db, mpath, &pb, track_paths, cue_tracks, file_count, max_num_files, tagged_file_count, dry_run);
}
} else if pb.is_file() && (max_num_files<=0 || *file_count<max_num_files) {
if_chain! {
@ -110,15 +123,13 @@ fn check_dir_entry(db: &mut db::Db, mpath: &Path, entry: DirEntry, track_paths:
if let Ok(id) = db.get_rowid(&sname) {
if id<=0 {
let mut tags_used = false;
if use_tags {
let meta = tags::read(&sname, true);
if !meta.is_empty() && !meta.analysis.is_none() {
if !dry_run {
db.add_track(&sname, &meta, &meta.analysis.unwrap());
}
*tagged_file_count+=1;
tags_used = true;
let meta = tags::read(&String::from(pb.to_string_lossy()), true);
if !meta.is_empty() && !meta.analysis.is_none() {
if !dry_run {
db.add_track(&sname, &meta, &meta.analysis.unwrap());
}
*tagged_file_count+=1;
tags_used = true;
}
if !tags_used {
@ -161,7 +172,7 @@ fn show_errors(failed: &mut Vec<String>, tag_error: &mut Vec<String>) {
}
#[cfg(not(feature = "ffmpeg"))]
fn analyse_new_files(db: &db::Db, mpath: &PathBuf, track_paths: Vec<String>, max_threads: usize, use_tags: bool) -> Result<()> {
fn analyse_new_files(db: &db::Db, mpath: &PathBuf, track_paths: Vec<String>, max_threads: usize, write_tags: bool, preserve_mod_times: bool) -> Result<()> {
let total = track_paths.len();
let progress = ProgressBar::new(total.try_into().unwrap()).with_style(
ProgressStyle::default_bar()
@ -236,8 +247,8 @@ fn analyse_new_files(db: &db::Db, mpath: &PathBuf, track_paths: Vec<String>, max
if meta.is_empty() {
tag_error.push(sname.clone());
}
if use_tags {
tags::write_analysis(&cpath, &track.analysis);
if write_tags {
tags::write_analysis(&cpath, &track.analysis, preserve_mod_times);
}
db.add_track(&sname, &meta, &track.analysis);
}
@ -250,16 +261,23 @@ fn analyse_new_files(db: &db::Db, mpath: &PathBuf, track_paths: Vec<String>, max
if inc_progress {
progress.inc(1);
}
if terminate_analysis() {
break
}
}
progress.finish_with_message("Finished!");
log::info!("{} Analysed. {} Failure(s).", analysed, failed.len());
if terminate_analysis() {
progress.abandon_with_message("Terminated!");
} else {
progress.finish_with_message("Finished!");
}
log::info!("{} Analysed. {} Failed.", analysed, failed.len());
show_errors(&mut failed, &mut tag_error);
Ok(())
}
#[cfg(feature = "ffmpeg")]
fn analyse_new_files(db: &db::Db, mpath: &PathBuf, track_paths: Vec<String>, max_threads: usize, use_tags: bool) -> Result<()> {
fn analyse_new_files(db: &db::Db, mpath: &PathBuf, track_paths: Vec<String>, max_threads: usize, write_tags: bool, preserve_mod_times: bool) -> Result<()> {
let total = track_paths.len();
let progress = ProgressBar::new(total.try_into().unwrap()).with_style(
ProgressStyle::default_bar()
@ -291,8 +309,8 @@ fn analyse_new_files(db: &db::Db, mpath: &PathBuf, track_paths: Vec<String>, max
if meta.is_empty() {
tag_error.push(sname.clone());
}
if use_tags {
tags::write_analysis(&cpath, &track.analysis);
if write_tags {
tags::write_analysis(&cpath, &track.analysis, preserve_mod_times);
}
db.add_track(&sname, &meta, &track.analysis);
analysed += 1;
@ -301,6 +319,9 @@ fn analyse_new_files(db: &db::Db, mpath: &PathBuf, track_paths: Vec<String>, max
};
progress.inc(1);
if terminate_analysis() {
break
}
}
// Reset terminal, otherwise typed output does not show? Perhaps Linux only...
@ -311,8 +332,12 @@ fn analyse_new_files(db: &db::Db, mpath: &PathBuf, track_paths: Vec<String>, max
};
}
progress.finish_with_message("Finished!");
log::info!("{} Analysed. {} Failure(s).", analysed, failed.len());
if terminate_analysis() {
progress.abandon_with_message("Terminated!");
} else {
progress.finish_with_message("Finished!");
}
log::info!("{} Analysed. {} Failed.", analysed, failed.len());
show_errors(&mut failed, &mut tag_error);
Ok(())
}
@ -398,16 +423,28 @@ fn analyse_new_cue_tracks(db:&db::Db, mpath: &PathBuf, cue_tracks:Vec<cue::CueTr
}
};
progress.inc(1);
if terminate_analysis() {
break
}
}
progress.finish_with_message("Finished!");
log::info!("{} Analysed. {} Failure(s).", analysed, failed.len());
if terminate_analysis() {
progress.abandon_with_message("Terminated!");
} else {
progress.finish_with_message("Finished!");
}
log::info!("{} Analysed. {} Failed.", analysed, failed.len());
show_errors(&mut failed, &mut tag_error);
Ok(())
}
pub fn analyse_files(db_path: &str, mpaths: &Vec<PathBuf>, dry_run: bool, keep_old: bool, max_num_files: usize, max_threads: usize, ignore_path: &PathBuf, use_tags: bool) {
pub fn analyse_files(db_path: &str, mpaths: &Vec<PathBuf>, dry_run: bool, keep_old: bool, max_num_files: usize, max_threads: usize, ignore_path: &PathBuf, write_tags: bool, preserve_mod_times: bool) {
let mut db = db::Db::new(&String::from(db_path));
ctrlc::set_handler(move || {
handle_ctrl_c();
}).expect("Error setting Ctrl-C handler");
db.init();
if !keep_old {
@ -428,41 +465,41 @@ pub fn analyse_files(db_path: &str, mpaths: &Vec<PathBuf>, dry_run: bool, keep_o
} else {
log::info!("Looking for new files");
}
get_file_list(&mut db, &mpath, &cur, &mut track_paths, &mut cue_tracks, &mut file_count, max_num_files, use_tags, &mut tagged_file_count, dry_run);
get_file_list(&mut db, &mpath, &cur, &mut track_paths, &mut cue_tracks, &mut file_count, max_num_files, &mut tagged_file_count, dry_run);
track_paths.sort();
log::info!("Num new files: {}", track_paths.len());
log::info!("New untagged files: {}", track_paths.len());
if !cue_tracks.is_empty() {
log::info!("Num new cue tracks: {}", cue_tracks.len());
}
if use_tags {
log::info!("Num tagged files: {}", tagged_file_count);
log::info!("New cue tracks: {}", cue_tracks.len());
}
log::info!("New tagged files: {}", tagged_file_count);
if dry_run {
if !track_paths.is_empty() || !cue_tracks.is_empty() {
log::info!("The following need to be analysed:");
for track in track_paths {
log::info!(" {}", track);
}
for track in cue_tracks {
log::info!(" {}", track.track_path.to_string_lossy());
}
}
} else {
if !track_paths.is_empty() {
match analyse_new_files(&db, &mpath, track_paths, max_threads, use_tags) {
Ok(_) => { changes_made = true; }
Err(e) => { log::error!("Analysis returned error: {}", e); }
if !terminate_analysis() {
if dry_run {
if !track_paths.is_empty() || !cue_tracks.is_empty() {
log::info!("The following need to be analysed:");
for track in track_paths {
log::info!(" {}", track);
}
for track in cue_tracks {
log::info!(" {}", track.track_path.to_string_lossy());
}
}
} else {
log::info!("No new files to analyse");
}
if !track_paths.is_empty() {
match analyse_new_files(&db, &mpath, track_paths, max_threads, write_tags, preserve_mod_times) {
Ok(_) => { changes_made = true; }
Err(e) => { log::error!("Analysis returned error: {}", e); }
}
} else {
log::info!("No new files to analyse");
}
#[cfg(feature = "ffmpeg")]
if !cue_tracks.is_empty() {
match analyse_new_cue_tracks(&db, &mpath, cue_tracks) {
Ok(_) => { changes_made = true; },
Err(e) => { log::error!("Cue analysis returned error: {}", e); }
#[cfg(feature = "ffmpeg")]
if !cue_tracks.is_empty() && !terminate_analysis() {
match analyse_new_cue_tracks(&db, &mpath, cue_tracks) {
Ok(_) => { changes_made = true; },
Err(e) => { log::error!("Cue analysis returned error: {}", e); }
}
}
}
}
@ -471,30 +508,6 @@ pub fn analyse_files(db_path: &str, mpaths: &Vec<PathBuf>, dry_run: bool, keep_o
db.close();
if changes_made && ignore_path.exists() && ignore_path.is_file() {
log::info!("Updating 'ignore' flags");
update_ignore(&db_path, &ignore_path);
db::update_ignore(&db_path, &ignore_path);
}
}
pub fn read_tags(db_path: &str, mpaths: &Vec<PathBuf>) {
let db = db::Db::new(&String::from(db_path));
db.init();
db.update_tags(&mpaths);
db.close();
}
pub fn update_ignore(db_path: &str, ignore_path: &PathBuf) {
let file = File::open(ignore_path).unwrap();
let reader = BufReader::new(file);
let db = db::Db::new(&String::from(db_path));
db.init();
db.clear_ignore();
let mut lines = reader.lines();
while let Some(Ok(line)) = lines.next() {
if !line.is_empty() && !line.starts_with("#") {
db.set_ignore(&line);
}
}
db.close();
}

166
src/db.rs
View File

@ -13,8 +13,14 @@ use bliss_audio::{Analysis, AnalysisIndex};
use indicatif::{ProgressBar, ProgressStyle};
use rusqlite::{params, Connection};
use std::convert::TryInto;
use std::fs::File;
use std::io::{BufRead, BufReader};
use std::num::NonZeroUsize;
use std::path::PathBuf;
use std::process;
use std::thread;
use std::thread::JoinHandle;
use num_cpus;
pub const CUE_MARKER: &str = ".CUE_TRACK.";
@ -29,6 +35,12 @@ pub struct FileMetadata {
pub duration: u32,
}
#[derive(Clone)]
struct AnalysisResults {
pub file: String,
pub analysis: Analysis,
}
#[derive(Default, PartialEq)]
pub struct Metadata {
pub title: String,
@ -50,6 +62,20 @@ impl Metadata {
}
}
static mut TERMINATE_EXPORT_FLAG: bool = false;
fn terminate_export() -> bool {
unsafe {
return TERMINATE_EXPORT_FLAG
}
}
fn handle_ctrl_c() {
unsafe {
TERMINATE_EXPORT_FLAG = true;
}
}
pub struct Db {
pub conn: Connection,
}
@ -338,4 +364,144 @@ impl Db {
}
}
}
pub fn export(&self, mpaths: &Vec<PathBuf>, max_threads: usize, preserve_mod_times: bool) {
ctrlc::set_handler(move || {
handle_ctrl_c();
}).expect("Error setting Ctrl-C handler");
log::info!("Querying database");
let mut tracks:Vec<AnalysisResults> = Vec::new();
let mut stmt = self.conn.prepare("SELECT File, Tempo, Zcr, MeanSpectralCentroid, StdDevSpectralCentroid, MeanSpectralRolloff, StdDevSpectralRolloff, MeanSpectralFlatness, StdDevSpectralFlatness, MeanLoudness, StdDevLoudness, Chroma1, Chroma2, Chroma3, Chroma4, Chroma5, Chroma6, Chroma7, Chroma8, Chroma9, Chroma10 FROM Tracks ORDER BY File ASC;").unwrap();
let track_iter = stmt
.query_map([], |row| {
Ok(AnalysisResults {
file: row.get(0)?,
analysis: Analysis::new([row.get(1)?, row.get(2)?, row.get(3)?, row.get(4)?, row.get(5)?, row.get(6)?, row.get(7)?, row.get(8)?, row.get(9)?, row.get(10)?, row.get(11)?, row.get(12)?, row.get(13)?, row.get(14)?, row.get(15)?, row.get(16)?, row.get(17)?, row.get(18)?, row.get(19)?, row.get(20)?]),
})
})
.unwrap();
for tr in track_iter {
let dbtags = tr.unwrap();
if !dbtags.file.contains(CUE_MARKER) {
for mpath in mpaths {
let track_path = mpath.join(dbtags.file.clone());
if track_path.exists() {
tracks.push(AnalysisResults{file:String::from(track_path.to_string_lossy()), analysis:dbtags.analysis});
}
}
}
}
let total = tracks.len();
if total <= 0 {
log::info!("Nothing to export");
return;
}
log::info!("Starting export");
let cpu_threads: NonZeroUsize = match max_threads {
0 => NonZeroUsize::new(num_cpus::get()).unwrap(),
_ => NonZeroUsize::new(max_threads).unwrap(),
}.into();
let num_threads = cpu_threads.into();
let chunk_size = total/cpu_threads;
let mut threads: Vec<JoinHandle<()>> = vec![];
let (sender, receiver) = std::sync::mpsc::channel();
let reporting_thread = std::thread::spawn(move || {
let mut processed = 0;
let mut had_tags = 0;
let mut failed_to_write = 0;
let mut exported = 0;
let progress = ProgressBar::new(total.try_into().unwrap()).with_style(
ProgressStyle::default_bar()
.template(
"[{elapsed_precise}] [{bar:25}] {percent:>3}% {pos:>6}/{len:6} {wide_msg}",
)
.progress_chars("=> "),
);
for resp in receiver {
progress.inc(1);
processed+=1;
if resp==0 {
had_tags+=1;
} else if resp==1 {
failed_to_write+=1;
} else {
exported+=1;
}
if processed == total {
break;
}
if terminate_export() {
break
}
}
if terminate_export() {
progress.abandon_with_message("Terminated!");
} else {
progress.finish_with_message(format!("Finished!"));
}
log::info!("{} Exported. {} Existing. {} Failed.", exported, had_tags, failed_to_write);
});
threads.push(reporting_thread);
for thread in 0..num_threads {
let tid:usize = thread;
let start = tid * chunk_size;
let end = if tid+1 == num_threads { total } else { start + chunk_size };
let sndr = sender.clone();
let trks = Vec::from_iter(tracks[start..end].iter().cloned());
threads.push(thread::spawn(move || {
for track in trks {
let mut updated = 0;
let meta = tags::read(&track.file, true);
if meta.is_empty() || meta.analysis.is_none() || meta.analysis.unwrap()!=track.analysis {
updated = 1;
if tags::write_analysis(&track.file, &track.analysis, preserve_mod_times) {
updated = 2;
}
}
sndr.send(updated).unwrap();
if terminate_export() {
break
}
}
}));
}
for thread in threads {
let _ = thread.join();
}
}
}
pub fn read_tags(db_path: &str, mpaths: &Vec<PathBuf>) {
let db = Db::new(&String::from(db_path));
db.init();
db.update_tags(&mpaths);
db.close();
}
pub fn export(db_path: &str, mpaths: &Vec<PathBuf>, max_threads: usize, preserve_mod_times: bool) {
let db = Db::new(&String::from(db_path));
db.init();
db.export(&mpaths, max_threads, preserve_mod_times);
db.close();
}
pub fn update_ignore(db_path: &str, ignore_path: &PathBuf) {
let file = File::open(ignore_path).unwrap();
let reader = BufReader::new(file);
let db = Db::new(&String::from(db_path));
db.init();
db.clear_ignore();
let mut lines = reader.lines();
while let Some(Ok(line)) = lines.next() {
if !line.is_empty() && !line.starts_with("#") {
db.set_ignore(&line);
}
}
db.close();
}

View File

@ -41,7 +41,8 @@ fn main() {
let mut max_num_files: usize = 0;
let mut music_paths: Vec<PathBuf> = Vec::new();
let mut max_threads: usize = 0;
let mut use_tags = false;
let mut write_tags = false;
let mut preserve_mod_times = false;
match dirs::home_dir() {
Some(path) => {
@ -75,8 +76,9 @@ fn main() {
arg_parse.refer(&mut lms_json_port).add_option(&["-J", "--json"], Store, &lms_json_port_help);
arg_parse.refer(&mut max_num_files).add_option(&["-n", "--numfiles"], Store, "Maximum number of files to analyse");
arg_parse.refer(&mut max_threads).add_option(&["-t", "--threads"], Store, "Maximum number of threads to use for analysis");
arg_parse.refer(&mut use_tags).add_option(&["-T", "--tags"], StoreTrue, "Read/write analysis results from/to source files");
arg_parse.refer(&mut task).add_argument("task", Store, "Task to perform; analyse, tags, ignore, upload, stopmixer.");
arg_parse.refer(&mut write_tags).add_option(&["-T", "--tags"], StoreTrue, "When analysing files, also store results within files themselves");
arg_parse.refer(&mut preserve_mod_times).add_option(&["-p", "--preserve"], StoreTrue, "Preserve modification time when writing results to files");
arg_parse.refer(&mut task).add_argument("task", Store, "Task to perform; analyse, tags, ignore, upload, export, stopmixer.");
arg_parse.parse_args_or_exit();
}
@ -94,12 +96,12 @@ fn main() {
builder.init();
if task.is_empty() {
log::error!("No task specified, please choose from; analyse, tags, ignore, upload");
log::error!("No task specified, please choose from; analyse, tags, ignore, upload, export, stopmixer");
process::exit(-1);
}
if !task.eq_ignore_ascii_case("analyse") && !task.eq_ignore_ascii_case("tags") && !task.eq_ignore_ascii_case("ignore")
&& !task.eq_ignore_ascii_case("upload") && !task.eq_ignore_ascii_case("stopmixer") {
&& !task.eq_ignore_ascii_case("upload") && !task.eq_ignore_ascii_case("export") && !task.eq_ignore_ascii_case("stopmixer") {
log::error!("Invalid task ({}) supplied", task);
process::exit(-1);
}
@ -144,7 +146,11 @@ fn main() {
None => { }
}
match config.get(TOP_LEVEL_INI_TAG, "tags") {
Some(val) => { use_tags = val.eq("true"); }
Some(val) => { write_tags = val.eq("true"); }
None => { }
}
match config.get(TOP_LEVEL_INI_TAG, "preserve") {
Some(val) => { preserve_mod_times = val.eq("true"); }
None => { }
}
}
@ -194,7 +200,7 @@ fn main() {
}
if task.eq_ignore_ascii_case("tags") {
analyse::read_tags(&db_path, &music_paths);
db::read_tags(&db_path, &music_paths);
} else if task.eq_ignore_ascii_case("ignore") {
let ignore_path = PathBuf::from(&ignore_file);
if !ignore_path.exists() {
@ -205,10 +211,12 @@ fn main() {
log::error!("Ignore file ({}) is not a file", ignore_file);
process::exit(-1);
}
analyse::update_ignore(&db_path, &ignore_path);
db::update_ignore(&db_path, &ignore_path);
} else if task.eq_ignore_ascii_case("export") {
db::export(&db_path, &music_paths, max_threads, preserve_mod_times);
} else {
let ignore_path = PathBuf::from(&ignore_file);
analyse::analyse_files(&db_path, &music_paths, dry_run, keep_old, max_num_files, max_threads, &ignore_path, use_tags);
analyse::analyse_files(&db_path, &music_paths, dry_run, keep_old, max_num_files, max_threads, &ignore_path, write_tags, preserve_mod_times);
}
}
}

View File

@ -12,23 +12,30 @@ use lofty::file::FileType;
use lofty::prelude::{Accessor, AudioFile, ItemKey, TagExt, TaggedFileExt};
use lofty::tag::{ItemValue, Tag, TagItem};
use regex::Regex;
use std::fs::File;
use std::fs;
use std::path::Path;
use substring::Substring;
use std::time::SystemTime;
use bliss_audio::{Analysis, AnalysisIndex};
const MAX_GENRE_VAL: usize = 192;
const NUM_ANALYSIS_VALS: usize = 20;
const ANALYSIS_TAG:ItemKey = ItemKey::Comment;
const ANALYSIS_TAG_START: &str = "BLISS_ANALYSIS";
const ANALYSIS_TAG_VER: u16 = 1;
const ANALYSIS_TAG: &str = "BLISS_ANALYSIS";
const ANALYSIS_TAG_FORMAT_VER: u16 = 1;
pub fn write_analysis(track: &String, analysis: &Analysis) {
let value = format!("{},{},{:.24},{:.24},{:.24},{:.24},{:.24},{:.24},{:.24},{:.24},{:.24},{:.24},{:.24},{:.24},{:.24},{:.24},{:.24},{:.24},{:.24},{:.24},{:.24},{:.24}", ANALYSIS_TAG_START, ANALYSIS_TAG_VER,
analysis[AnalysisIndex::Tempo], analysis[AnalysisIndex::Zcr], analysis[AnalysisIndex::MeanSpectralCentroid], analysis[AnalysisIndex::StdDeviationSpectralCentroid], analysis[AnalysisIndex::MeanSpectralRolloff],
analysis[AnalysisIndex::StdDeviationSpectralRolloff], analysis[AnalysisIndex::MeanSpectralFlatness], analysis[AnalysisIndex::StdDeviationSpectralFlatness], analysis[AnalysisIndex::MeanLoudness], analysis[AnalysisIndex::StdDeviationLoudness],
analysis[AnalysisIndex::Chroma1], analysis[AnalysisIndex::Chroma2], analysis[AnalysisIndex::Chroma3], analysis[AnalysisIndex::Chroma4], analysis[AnalysisIndex::Chroma5],
analysis[AnalysisIndex::Chroma6], analysis[AnalysisIndex::Chroma7], analysis[AnalysisIndex::Chroma8], analysis[AnalysisIndex::Chroma9], analysis[AnalysisIndex::Chroma10]);
fn fmt(val: f32) -> String {
format!("{:.16}", val).trim_end_matches("0").to_string()
}
pub fn write_analysis(track: &String, analysis: &Analysis, preserve_mod_times: bool) -> bool {
let value = format!("{},{},{},{},{},{},{},{},{},{},{},{},{},{},{},{},{},{},{},{},{}", ANALYSIS_TAG_FORMAT_VER,
fmt(analysis[AnalysisIndex::Tempo]), fmt(analysis[AnalysisIndex::Zcr]), fmt(analysis[AnalysisIndex::MeanSpectralCentroid]), fmt(analysis[AnalysisIndex::StdDeviationSpectralCentroid]), fmt(analysis[AnalysisIndex::MeanSpectralRolloff]),
fmt(analysis[AnalysisIndex::StdDeviationSpectralRolloff]), fmt(analysis[AnalysisIndex::MeanSpectralFlatness]), fmt(analysis[AnalysisIndex::StdDeviationSpectralFlatness]), fmt(analysis[AnalysisIndex::MeanLoudness]), fmt(analysis[AnalysisIndex::StdDeviationLoudness]),
fmt(analysis[AnalysisIndex::Chroma1]), fmt(analysis[AnalysisIndex::Chroma2]), fmt(analysis[AnalysisIndex::Chroma3]), fmt(analysis[AnalysisIndex::Chroma4]), fmt(analysis[AnalysisIndex::Chroma5]),
fmt(analysis[AnalysisIndex::Chroma6]), fmt(analysis[AnalysisIndex::Chroma7]), fmt(analysis[AnalysisIndex::Chroma8]), fmt(analysis[AnalysisIndex::Chroma9]), fmt(analysis[AnalysisIndex::Chroma10]));
let mut written = false;
if let Ok(mut file) = lofty::read_from_path(Path::new(track)) {
let tag = match file.primary_tag_mut() {
Some(primary_tag) => primary_tag,
@ -43,23 +50,96 @@ pub fn write_analysis(track: &String, analysis: &Analysis) {
},
};
// Remove any existing analysis result tag
let entries = tag.get_strings(&ANALYSIS_TAG);
// Store analysis results
let tag_key = ItemKey::Unknown(ANALYSIS_TAG.to_string());
tag.remove_key(&tag_key);
let lower_tag_key = ItemKey::Unknown(ANALYSIS_TAG.to_lowercase().to_string());
tag.remove_key(&lower_tag_key);
tag.insert_unchecked(TagItem::new(tag_key, ItemValue::Text(value)));
// If we have any of the older analysis-in-comment tags, then remove these
let entries = tag.get_strings(&ItemKey::Comment);
let mut keep: Vec<ItemValue> = Vec::new();
let mut have_old = false;
for entry in entries {
if !entry.starts_with(ANALYSIS_TAG_START) {
if entry.starts_with(ANALYSIS_TAG) {
have_old = true;
} else {
keep.push(ItemValue::Text(entry.to_string()));
}
}
tag.remove_key(&ANALYSIS_TAG);
for k in keep {
tag.push(TagItem::new(ANALYSIS_TAG, k));
if have_old {
tag.remove_key(&ItemKey::Comment);
for k in keep {
tag.push(TagItem::new(ItemKey::Comment, k));
}
}
// Store analysis results
tag.push(TagItem::new(ANALYSIS_TAG, ItemValue::Text(value)));
let _ = tag.save_to_path(Path::new(track), WriteOptions::default());
let now = SystemTime::now();
let mut mod_time = now;
if preserve_mod_times {
if let Ok(fmeta) = fs::metadata(track) {
if let Ok(time) = fmeta.modified() {
mod_time = time;
}
}
}
if let Ok(_) = tag.save_to_path(Path::new(track), WriteOptions::default()) {
if preserve_mod_times {
if mod_time<now {
if let Ok(f) = File::open(track) {
let _ = f.set_modified(mod_time);
}
}
}
written = true;
}
}
written
}
fn read_analysis_string(tag_str: &str, start_tag_pos:usize, version_pos:usize) -> Option<Analysis> {
let parts = tag_str.split(",");
let mut index = 0;
let mut num_read_vals = 0;
let mut vals = [0.; NUM_ANALYSIS_VALS];
let val_start_pos = version_pos+1;
for part in parts {
if index==start_tag_pos && start_tag_pos<version_pos {
if part!=ANALYSIS_TAG {
break;
}
} else if index==version_pos {
match part.parse::<u16>() {
Ok(ver) => {
if ver!=ANALYSIS_TAG_FORMAT_VER {
break;
}
},
Err(_) => {
break;
}
}
} else if (index - val_start_pos) < NUM_ANALYSIS_VALS {
match part.parse::<f32>() {
Ok(val) => {
num_read_vals += 1;
vals[index - val_start_pos] = val;
},
Err(_) => {
break;
}
}
} else {
break;
}
index += 1;
}
if num_read_vals == NUM_ANALYSIS_VALS {
return Some(Analysis::new(vals));
}
None
}
pub fn read(track: &String, read_analysis: bool) -> db::Metadata {
@ -71,14 +151,32 @@ pub fn read(track: &String, read_analysis: bool) -> db::Metadata {
if let Ok(file) = lofty::read_from_path(Path::new(track)) {
let tag = match file.primary_tag() {
Some(primary_tag) => primary_tag,
None => file.first_tag().expect("Error: No tags found!"),
None => {
if let Some(first_tag) = file.first_tag() {
first_tag
} else {
return meta;
}
}
};
meta.title = tag.title().unwrap_or_default().to_string();
meta.artist = tag.artist().unwrap_or_default().to_string();
meta.album = tag.album().unwrap_or_default().to_string();
meta.album_artist = tag.get_string(&ItemKey::AlbumArtist).unwrap_or_default().to_string();
meta.genre = tag.genre().unwrap_or_default().to_string();
// If file has multiple genre tags then read all.
let genres = tag.get_strings(&ItemKey::Genre);
let mut genre_list:Vec<String> = Vec::new();
for genre in genres {
genre_list.push(genre.to_string());
}
if genre_list.len()>1 {
meta.genre = genre_list.join(";");
} else {
meta.genre = tag.genre().unwrap_or_default().to_string();
}
// Check whether MP3 has numeric genre, and if so covert to text
if file.file_type().eq(&FileType::Mpeg) {
@ -103,8 +201,7 @@ pub fn read(track: &String, read_analysis: bool) -> db::Metadata {
if let Ok(val) = test {
let idx: usize = val as usize;
if idx < MAX_GENRE_VAL {
meta.genre =
lofty::id3::v1::GENRES[idx].to_string();
meta.genre = lofty::id3::v1::GENRES[idx].to_string();
}
}
}
@ -121,45 +218,45 @@ pub fn read(track: &String, read_analysis: bool) -> db::Metadata {
meta.duration = file.properties().duration().as_secs() as u32;
if read_analysis {
let entries = tag.get_strings(&ANALYSIS_TAG);
for entry in entries {
if entry.len()>(ANALYSIS_TAG_START.len()+(NUM_ANALYSIS_VALS*8)) && entry.starts_with(ANALYSIS_TAG_START) {
let parts = entry.split(",");
let mut index = 0;
let mut vals = [0.; NUM_ANALYSIS_VALS];
for part in parts {
if 0==index {
if part!=ANALYSIS_TAG_START {
match tag.get_string(&ItemKey::Unknown(ANALYSIS_TAG.to_string())) {
Some(tag_str) => {
match read_analysis_string(tag_str, 100, 0) {
Some(analysis) => {
meta.analysis = Some(analysis);
}
None => { }
}
}
None => { }
}
if meta.analysis.is_none() {
// Try lowercase
match tag.get_string(&ItemKey::Unknown(ANALYSIS_TAG.to_lowercase().to_string())) {
Some(tag_str) => {
match read_analysis_string(tag_str, 100, 0) {
Some(analysis) => {
meta.analysis = Some(analysis);
}
None => { }
}
}
None => { }
}
}
if meta.analysis.is_none() {
// Try old, stored in comment
let entries = tag.get_strings(&ItemKey::Comment);
for entry in entries {
if entry.len()>(ANALYSIS_TAG.len()+(NUM_ANALYSIS_VALS*8)) && entry.starts_with(ANALYSIS_TAG) {
match read_analysis_string(entry, 0, 1) {
Some(analysis) => {
meta.analysis = Some(analysis);
break;
}
} else if 1==index {
match part.parse::<u16>() {
Ok(ver) => {
if ver!=ANALYSIS_TAG_VER {
break;
}
},
Err(_) => {
break;
}
}
} else if (index - 2) < NUM_ANALYSIS_VALS {
match part.parse::<f32>() {
Ok(val) => {
vals[index - 2] = val;
},
Err(_) => {
break;
}
}
} else {
break;
None => { }
}
index += 1;
}
if index == (NUM_ANALYSIS_VALS+2) {
meta.analysis = Some(Analysis::new(vals));
break;
}
}
}