Add action to export analysis results from DB to files.

Issue #20
This commit is contained in:
CDrummond 2025-03-24 06:47:22 +00:00
parent 85a2e985bf
commit 342440f04b
7 changed files with 92 additions and 6 deletions

2
Cargo.lock generated
View File

@ -180,7 +180,7 @@ dependencies = [
[[package]] [[package]]
name = "bliss-analyser" name = "bliss-analyser"
version = "0.3.0" version = "0.4.0"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"argparse", "argparse",

View File

@ -1,6 +1,6 @@
[package] [package]
name = "bliss-analyser" name = "bliss-analyser"
version = "0.3.0" version = "0.4.0"
authors = ["Craig Drummond <craig.p.drummond@gmail.com>"] authors = ["Craig Drummond <craig.p.drummond@gmail.com>"]
edition = "2021" edition = "2021"
license = "GPL-3.0-only" license = "GPL-3.0-only"

View File

@ -1,3 +1,7 @@
0.4.0
-----
1. Add action to export results from database to files.
0.3.0 0.3.0
----- -----
1. Add support for (DSD) WavPack - thanks to Bart Lauret 1. Add support for (DSD) WavPack - thanks to Bart Lauret

View File

@ -143,7 +143,7 @@ analysis results. This will default to `bliss.db` in the current folder.
* `lms` specifies the hostname, or IP address, of your LMS server. This is used * `lms` specifies the hostname, or IP address, of your LMS server. This is used
when uploading the database file to LMS. This defaults to `127.0.0.1` If your LMS is when uploading the database file to LMS. This defaults to `127.0.0.1` If your LMS is
password protected then use `user:pass@server` - e.g. `lms=pi:abc123@127.0.0.1` password protected then use `user:pass@server` - e.g. `lms=pi:abc123@127.0.0.1`
* `json` specifies the JSONRPC port number of your LMS server. This will defaul to * `json` specifies the JSONRPC port number of your LMS server. This will default to
9000. 9000.
* `ignore` specifies the name and location of a file containing items to ignore * `ignore` specifies the name and location of a file containing items to ignore
in mixes. See the `Ignore` section later on for more details. in mixes. See the `Ignore` section later on for more details.
@ -172,7 +172,7 @@ tracks are to be analysed and how many old tracks are left in the database.
* `-L` / `--lms` Hostname, or IP address, of your LMS server. * `-L` / `--lms` Hostname, or IP address, of your LMS server.
* `-J` / `--json` JSONRPC port number of your LMS server. * `-J` / `--json` JSONRPC port number of your LMS server.
* `-n` / `--numtracks` Specify maximum number of tracks to analyse. * `-n` / `--numtracks` Specify maximum number of tracks to analyse.
* `-T` / `--tags` Write anlysis results to file tags, and read from file tags. * `-T` / `--tags` Write analysis results to file tags, and read from file tags.
Equivalent items specified in the INI config file (detailed above) will override Equivalent items specified in the INI config file (detailed above) will override
any specified on the commandline. any specified on the commandline.
@ -187,6 +187,7 @@ required task. This takes the following values:
any changes. any changes.
* `ignore` Reads the `ignore` file and updates the database to flag tracks as * `ignore` Reads the `ignore` file and updates the database to flag tracks as
to be ignored for mixes. to be ignored for mixes.
* `export` Exports tags from DB and stores within the audio files.
@ -357,6 +358,27 @@ is accomplished as follows:
Exporting Analysis
==================
If you have analysis results stored within the SQLite DB, and not within the files
themselves, then you can use the `export` action to copy these analysis results from
the DB and into the files.
(Linux / macOS)
```
./bliss-analyser export
```
(Windows)
```
.\bliss-analyser.exe export
```
*NOTE* Exporting of analysis results is not implemented for CUE tracks.
Credits Credits
======= =======

View File

@ -482,6 +482,13 @@ pub fn read_tags(db_path: &str, mpaths: &Vec<PathBuf>) {
db.close(); db.close();
} }
pub fn export(db_path: &str, mpaths: &Vec<PathBuf>) {
let db = db::Db::new(&String::from(db_path));
db.init();
db.export(&mpaths);
db.close();
}
pub fn update_ignore(db_path: &str, ignore_path: &PathBuf) { pub fn update_ignore(db_path: &str, ignore_path: &PathBuf) {
let file = File::open(ignore_path).unwrap(); let file = File::open(ignore_path).unwrap();
let reader = BufReader::new(file); let reader = BufReader::new(file);

View File

@ -29,6 +29,11 @@ pub struct FileMetadata {
pub duration: u32, pub duration: u32,
} }
struct AnalysisResults {
pub file: String,
pub analysis: Analysis,
}
#[derive(Default, PartialEq)] #[derive(Default, PartialEq)]
pub struct Metadata { pub struct Metadata {
pub title: String, pub title: String,
@ -338,4 +343,50 @@ impl Db {
} }
} }
} }
pub fn export(&self, mpaths: &Vec<PathBuf>) {
let total = self.get_track_count();
if total > 0 {
let progress = ProgressBar::new(total.try_into().unwrap()).with_style(
ProgressStyle::default_bar()
.template(
"[{elapsed_precise}] [{bar:25}] {percent:>3}% {pos:>6}/{len:6} {wide_msg}",
)
.progress_chars("=> "),
);
let mut stmt = self.conn.prepare("SELECT File, Tempo, Zcr, MeanSpectralCentroid, StdDevSpectralCentroid, MeanSpectralRolloff, StdDevSpectralRolloff, MeanSpectralFlatness, StdDevSpectralFlatness, MeanLoudness, StdDevLoudness, Chroma1, Chroma2, Chroma3, Chroma4, Chroma5, Chroma6, Chroma7, Chroma8, Chroma9, Chroma10 FROM Tracks ORDER BY File ASC;").unwrap();
let track_iter = stmt
.query_map([], |row| {
Ok(AnalysisResults {
file: row.get(0)?,
analysis: Analysis::new([row.get(1)?, row.get(2)?, row.get(3)?, row.get(4)?, row.get(5)?, row.get(6)?, row.get(7)?, row.get(8)?, row.get(9)?, row.get(10)?, row.get(11)?, row.get(12)?, row.get(13)?, row.get(14)?, row.get(15)?, row.get(16)?, row.get(17)?, row.get(18)?, row.get(19)?, row.get(20)?]),
})
})
.unwrap();
let mut updated = 0;
for tr in track_iter {
let dbtags = tr.unwrap();
if !dbtags.file.contains(CUE_MARKER) {
progress.set_message(format!("{}", dbtags.file));
for mpath in mpaths {
let track_path = mpath.join(&dbtags.file);
if track_path.exists() {
let spath = String::from(track_path.to_string_lossy());
let meta = tags::read(&spath, true);
if meta.is_empty() || meta.analysis.is_none() || meta.analysis.unwrap()!=dbtags.analysis {
tags::write_analysis(&spath, &dbtags.analysis);
updated+=1;
}
break;
}
}
}
progress.inc(1);
}
progress.finish_with_message(format!("{} Updated.", updated))
}
}
} }

View File

@ -76,7 +76,7 @@ fn main() {
arg_parse.refer(&mut max_num_files).add_option(&["-n", "--numfiles"], Store, "Maximum number of files to analyse"); arg_parse.refer(&mut max_num_files).add_option(&["-n", "--numfiles"], Store, "Maximum number of files to analyse");
arg_parse.refer(&mut max_threads).add_option(&["-t", "--threads"], Store, "Maximum number of threads to use for analysis"); arg_parse.refer(&mut max_threads).add_option(&["-t", "--threads"], Store, "Maximum number of threads to use for analysis");
arg_parse.refer(&mut use_tags).add_option(&["-T", "--tags"], StoreTrue, "Read/write analysis results from/to source files"); arg_parse.refer(&mut use_tags).add_option(&["-T", "--tags"], StoreTrue, "Read/write analysis results from/to source files");
arg_parse.refer(&mut task).add_argument("task", Store, "Task to perform; analyse, tags, ignore, upload, stopmixer."); arg_parse.refer(&mut task).add_argument("task", Store, "Task to perform; analyse, tags, ignore, upload, export, stopmixer.");
arg_parse.parse_args_or_exit(); arg_parse.parse_args_or_exit();
} }
@ -94,7 +94,7 @@ fn main() {
builder.init(); builder.init();
if task.is_empty() { if task.is_empty() {
log::error!("No task specified, please choose from; analyse, tags, ignore, upload"); log::error!("No task specified, please choose from; analyse, tags, ignore, upload, export, stopmixer");
process::exit(-1); process::exit(-1);
} }
@ -206,6 +206,8 @@ fn main() {
process::exit(-1); process::exit(-1);
} }
analyse::update_ignore(&db_path, &ignore_path); analyse::update_ignore(&db_path, &ignore_path);
} else if task.eq_ignore_ascii_case("export") {
analyse::export(&db_path, &music_paths);
} else { } else {
let ignore_path = PathBuf::from(&ignore_file); let ignore_path = PathBuf::from(&ignore_file);
analyse::analyse_files(&db_path, &music_paths, dry_run, keep_old, max_num_files, max_threads, &ignore_path, use_tags); analyse::analyse_files(&db_path, &music_paths, dry_run, keep_old, max_num_files, max_threads, &ignore_path, use_tags);