mirror of
https://github.com/CDrummond/bliss-analyser.git
synced 2025-04-19 01:57:38 +03:00
Compare commits
3 Commits
Author | SHA1 | Date | |
---|---|---|---|
|
9d9117e8f7 | ||
|
417ac5f652 | ||
|
342440f04b |
2
Cargo.lock
generated
2
Cargo.lock
generated
@ -180,7 +180,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "bliss-analyser"
|
||||
version = "0.3.0"
|
||||
version = "0.4.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"argparse",
|
||||
|
@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "bliss-analyser"
|
||||
version = "0.3.0"
|
||||
version = "0.4.0"
|
||||
authors = ["Craig Drummond <craig.p.drummond@gmail.com>"]
|
||||
edition = "2021"
|
||||
license = "GPL-3.0-only"
|
||||
|
@ -1,3 +1,8 @@
|
||||
0.4.0
|
||||
-----
|
||||
1. Add action to export results from database to files.
|
||||
2. Add option to preserve file modification time when writing tags.
|
||||
|
||||
0.3.0
|
||||
-----
|
||||
1. Add support for (DSD) WavPack - thanks to Bart Lauret
|
||||
|
29
UserGuide.md
29
UserGuide.md
@ -143,13 +143,15 @@ analysis results. This will default to `bliss.db` in the current folder.
|
||||
* `lms` specifies the hostname, or IP address, of your LMS server. This is used
|
||||
when uploading the database file to LMS. This defaults to `127.0.0.1` If your LMS is
|
||||
password protected then use `user:pass@server` - e.g. `lms=pi:abc123@127.0.0.1`
|
||||
* `json` specifies the JSONRPC port number of your LMS server. This will defaul to
|
||||
* `json` specifies the JSONRPC port number of your LMS server. This will default to
|
||||
9000.
|
||||
* `ignore` specifies the name and location of a file containing items to ignore
|
||||
in mixes. See the `Ignore` section later on for more details.
|
||||
* `tags` specifies whether analysis results should be written to, and re-read from,
|
||||
files. Set to `true` or `false`. If enabled, then results are stored in a `COMMENT`
|
||||
tag that starts with `BLISS_ANALYSIS`
|
||||
* `preserve` specifies whether file modification time should be preserved when
|
||||
writing tags. Set to `true` or `false`.
|
||||
|
||||
|
||||
Command-line parameters
|
||||
@ -172,7 +174,8 @@ tracks are to be analysed and how many old tracks are left in the database.
|
||||
* `-L` / `--lms` Hostname, or IP address, of your LMS server.
|
||||
* `-J` / `--json` JSONRPC port number of your LMS server.
|
||||
* `-n` / `--numtracks` Specify maximum number of tracks to analyse.
|
||||
* `-T` / `--tags` Write anlysis results to file tags, and read from file tags.
|
||||
* `-T` / `--tags` Write analysis results to file tags, and read from file tags.
|
||||
* `-p' / '--preserve` Attempt to preserve file modification time when writing tags.
|
||||
|
||||
Equivalent items specified in the INI config file (detailed above) will override
|
||||
any specified on the commandline.
|
||||
@ -187,6 +190,7 @@ required task. This takes the following values:
|
||||
any changes.
|
||||
* `ignore` Reads the `ignore` file and updates the database to flag tracks as
|
||||
to be ignored for mixes.
|
||||
* `export` Exports tags from DB and stores within the audio files.
|
||||
|
||||
|
||||
|
||||
@ -357,6 +361,27 @@ is accomplished as follows:
|
||||
|
||||
|
||||
|
||||
Exporting Analysis
|
||||
==================
|
||||
|
||||
If you have analysis results stored within the SQLite DB, and not within the files
|
||||
themselves, then you can use the `export` action to copy these analysis results from
|
||||
the DB and into the files.
|
||||
|
||||
(Linux / macOS)
|
||||
```
|
||||
./bliss-analyser export
|
||||
```
|
||||
|
||||
(Windows)
|
||||
```
|
||||
.\bliss-analyser.exe export
|
||||
```
|
||||
|
||||
*NOTE* Exporting of analysis results is not implemented for CUE tracks.
|
||||
|
||||
|
||||
|
||||
Credits
|
||||
=======
|
||||
|
||||
|
@ -161,7 +161,7 @@ fn show_errors(failed: &mut Vec<String>, tag_error: &mut Vec<String>) {
|
||||
}
|
||||
|
||||
#[cfg(not(feature = "ffmpeg"))]
|
||||
fn analyse_new_files(db: &db::Db, mpath: &PathBuf, track_paths: Vec<String>, max_threads: usize, use_tags: bool) -> Result<()> {
|
||||
fn analyse_new_files(db: &db::Db, mpath: &PathBuf, track_paths: Vec<String>, max_threads: usize, use_tags: bool, preserve_mod_times: bool) -> Result<()> {
|
||||
let total = track_paths.len();
|
||||
let progress = ProgressBar::new(total.try_into().unwrap()).with_style(
|
||||
ProgressStyle::default_bar()
|
||||
@ -237,7 +237,7 @@ fn analyse_new_files(db: &db::Db, mpath: &PathBuf, track_paths: Vec<String>, max
|
||||
tag_error.push(sname.clone());
|
||||
}
|
||||
if use_tags {
|
||||
tags::write_analysis(&cpath, &track.analysis);
|
||||
tags::write_analysis(&cpath, &track.analysis, preserve_mod_times);
|
||||
}
|
||||
db.add_track(&sname, &meta, &track.analysis);
|
||||
}
|
||||
@ -259,7 +259,7 @@ fn analyse_new_files(db: &db::Db, mpath: &PathBuf, track_paths: Vec<String>, max
|
||||
}
|
||||
|
||||
#[cfg(feature = "ffmpeg")]
|
||||
fn analyse_new_files(db: &db::Db, mpath: &PathBuf, track_paths: Vec<String>, max_threads: usize, use_tags: bool) -> Result<()> {
|
||||
fn analyse_new_files(db: &db::Db, mpath: &PathBuf, track_paths: Vec<String>, max_threads: usize, use_tags: bool, preserve_mod_times: bool) -> Result<()> {
|
||||
let total = track_paths.len();
|
||||
let progress = ProgressBar::new(total.try_into().unwrap()).with_style(
|
||||
ProgressStyle::default_bar()
|
||||
@ -292,7 +292,7 @@ fn analyse_new_files(db: &db::Db, mpath: &PathBuf, track_paths: Vec<String>, max
|
||||
tag_error.push(sname.clone());
|
||||
}
|
||||
if use_tags {
|
||||
tags::write_analysis(&cpath, &track.analysis);
|
||||
tags::write_analysis(&cpath, &track.analysis, preserve_mod_times);
|
||||
}
|
||||
db.add_track(&sname, &meta, &track.analysis);
|
||||
analysed += 1;
|
||||
@ -405,7 +405,7 @@ fn analyse_new_cue_tracks(db:&db::Db, mpath: &PathBuf, cue_tracks:Vec<cue::CueTr
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn analyse_files(db_path: &str, mpaths: &Vec<PathBuf>, dry_run: bool, keep_old: bool, max_num_files: usize, max_threads: usize, ignore_path: &PathBuf, use_tags: bool) {
|
||||
pub fn analyse_files(db_path: &str, mpaths: &Vec<PathBuf>, dry_run: bool, keep_old: bool, max_num_files: usize, max_threads: usize, ignore_path: &PathBuf, use_tags: bool, preserve_mod_times: bool) {
|
||||
let mut db = db::Db::new(&String::from(db_path));
|
||||
|
||||
db.init();
|
||||
@ -450,7 +450,7 @@ pub fn analyse_files(db_path: &str, mpaths: &Vec<PathBuf>, dry_run: bool, keep_o
|
||||
}
|
||||
} else {
|
||||
if !track_paths.is_empty() {
|
||||
match analyse_new_files(&db, &mpath, track_paths, max_threads, use_tags) {
|
||||
match analyse_new_files(&db, &mpath, track_paths, max_threads, use_tags, preserve_mod_times) {
|
||||
Ok(_) => { changes_made = true; }
|
||||
Err(e) => { log::error!("Analysis returned error: {}", e); }
|
||||
}
|
||||
@ -482,6 +482,13 @@ pub fn read_tags(db_path: &str, mpaths: &Vec<PathBuf>) {
|
||||
db.close();
|
||||
}
|
||||
|
||||
pub fn export(db_path: &str, mpaths: &Vec<PathBuf>, preserve_mod_times: bool) {
|
||||
let db = db::Db::new(&String::from(db_path));
|
||||
db.init();
|
||||
db.export(&mpaths, preserve_mod_times);
|
||||
db.close();
|
||||
}
|
||||
|
||||
pub fn update_ignore(db_path: &str, ignore_path: &PathBuf) {
|
||||
let file = File::open(ignore_path).unwrap();
|
||||
let reader = BufReader::new(file);
|
||||
|
51
src/db.rs
51
src/db.rs
@ -29,6 +29,11 @@ pub struct FileMetadata {
|
||||
pub duration: u32,
|
||||
}
|
||||
|
||||
struct AnalysisResults {
|
||||
pub file: String,
|
||||
pub analysis: Analysis,
|
||||
}
|
||||
|
||||
#[derive(Default, PartialEq)]
|
||||
pub struct Metadata {
|
||||
pub title: String,
|
||||
@ -338,4 +343,50 @@ impl Db {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn export(&self, mpaths: &Vec<PathBuf>, preserve_mod_times: bool) {
|
||||
let total = self.get_track_count();
|
||||
if total > 0 {
|
||||
let progress = ProgressBar::new(total.try_into().unwrap()).with_style(
|
||||
ProgressStyle::default_bar()
|
||||
.template(
|
||||
"[{elapsed_precise}] [{bar:25}] {percent:>3}% {pos:>6}/{len:6} {wide_msg}",
|
||||
)
|
||||
.progress_chars("=> "),
|
||||
);
|
||||
|
||||
let mut stmt = self.conn.prepare("SELECT File, Tempo, Zcr, MeanSpectralCentroid, StdDevSpectralCentroid, MeanSpectralRolloff, StdDevSpectralRolloff, MeanSpectralFlatness, StdDevSpectralFlatness, MeanLoudness, StdDevLoudness, Chroma1, Chroma2, Chroma3, Chroma4, Chroma5, Chroma6, Chroma7, Chroma8, Chroma9, Chroma10 FROM Tracks ORDER BY File ASC;").unwrap();
|
||||
let track_iter = stmt
|
||||
.query_map([], |row| {
|
||||
Ok(AnalysisResults {
|
||||
file: row.get(0)?,
|
||||
analysis: Analysis::new([row.get(1)?, row.get(2)?, row.get(3)?, row.get(4)?, row.get(5)?, row.get(6)?, row.get(7)?, row.get(8)?, row.get(9)?, row.get(10)?, row.get(11)?, row.get(12)?, row.get(13)?, row.get(14)?, row.get(15)?, row.get(16)?, row.get(17)?, row.get(18)?, row.get(19)?, row.get(20)?]),
|
||||
})
|
||||
})
|
||||
.unwrap();
|
||||
|
||||
let mut updated = 0;
|
||||
for tr in track_iter {
|
||||
let dbtags = tr.unwrap();
|
||||
if !dbtags.file.contains(CUE_MARKER) {
|
||||
progress.set_message(format!("{}", dbtags.file));
|
||||
|
||||
for mpath in mpaths {
|
||||
let track_path = mpath.join(&dbtags.file);
|
||||
if track_path.exists() {
|
||||
let spath = String::from(track_path.to_string_lossy());
|
||||
let meta = tags::read(&spath, true);
|
||||
if meta.is_empty() || meta.analysis.is_none() || meta.analysis.unwrap()!=dbtags.analysis {
|
||||
tags::write_analysis(&spath, &dbtags.analysis, preserve_mod_times);
|
||||
updated+=1;
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
progress.inc(1);
|
||||
}
|
||||
progress.finish_with_message(format!("{} Updated.", updated))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
16
src/main.rs
16
src/main.rs
@ -42,6 +42,7 @@ fn main() {
|
||||
let mut music_paths: Vec<PathBuf> = Vec::new();
|
||||
let mut max_threads: usize = 0;
|
||||
let mut use_tags = false;
|
||||
let mut preserve_mod_times = false;
|
||||
|
||||
match dirs::home_dir() {
|
||||
Some(path) => {
|
||||
@ -76,7 +77,8 @@ fn main() {
|
||||
arg_parse.refer(&mut max_num_files).add_option(&["-n", "--numfiles"], Store, "Maximum number of files to analyse");
|
||||
arg_parse.refer(&mut max_threads).add_option(&["-t", "--threads"], Store, "Maximum number of threads to use for analysis");
|
||||
arg_parse.refer(&mut use_tags).add_option(&["-T", "--tags"], StoreTrue, "Read/write analysis results from/to source files");
|
||||
arg_parse.refer(&mut task).add_argument("task", Store, "Task to perform; analyse, tags, ignore, upload, stopmixer.");
|
||||
arg_parse.refer(&mut preserve_mod_times).add_option(&["-p", "--preserve"], StoreTrue, "Preserve modification time when writing tags to files");
|
||||
arg_parse.refer(&mut task).add_argument("task", Store, "Task to perform; analyse, tags, ignore, upload, export, stopmixer.");
|
||||
arg_parse.parse_args_or_exit();
|
||||
}
|
||||
|
||||
@ -94,12 +96,12 @@ fn main() {
|
||||
builder.init();
|
||||
|
||||
if task.is_empty() {
|
||||
log::error!("No task specified, please choose from; analyse, tags, ignore, upload");
|
||||
log::error!("No task specified, please choose from; analyse, tags, ignore, upload, export, stopmixer");
|
||||
process::exit(-1);
|
||||
}
|
||||
|
||||
if !task.eq_ignore_ascii_case("analyse") && !task.eq_ignore_ascii_case("tags") && !task.eq_ignore_ascii_case("ignore")
|
||||
&& !task.eq_ignore_ascii_case("upload") && !task.eq_ignore_ascii_case("stopmixer") {
|
||||
&& !task.eq_ignore_ascii_case("upload") && !task.eq_ignore_ascii_case("export") && !task.eq_ignore_ascii_case("stopmixer") {
|
||||
log::error!("Invalid task ({}) supplied", task);
|
||||
process::exit(-1);
|
||||
}
|
||||
@ -147,6 +149,10 @@ fn main() {
|
||||
Some(val) => { use_tags = val.eq("true"); }
|
||||
None => { }
|
||||
}
|
||||
match config.get(TOP_LEVEL_INI_TAG, "preserve") {
|
||||
Some(val) => { preserve_mod_times = val.eq("true"); }
|
||||
None => { }
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
log::error!("Failed to load config file. {}", e);
|
||||
@ -206,9 +212,11 @@ fn main() {
|
||||
process::exit(-1);
|
||||
}
|
||||
analyse::update_ignore(&db_path, &ignore_path);
|
||||
} else if task.eq_ignore_ascii_case("export") {
|
||||
analyse::export(&db_path, &music_paths, preserve_mod_times);
|
||||
} else {
|
||||
let ignore_path = PathBuf::from(&ignore_file);
|
||||
analyse::analyse_files(&db_path, &music_paths, dry_run, keep_old, max_num_files, max_threads, &ignore_path, use_tags);
|
||||
analyse::analyse_files(&db_path, &music_paths, dry_run, keep_old, max_num_files, max_threads, &ignore_path, use_tags, preserve_mod_times);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
21
src/tags.rs
21
src/tags.rs
@ -12,8 +12,11 @@ use lofty::file::FileType;
|
||||
use lofty::prelude::{Accessor, AudioFile, ItemKey, TagExt, TaggedFileExt};
|
||||
use lofty::tag::{ItemValue, Tag, TagItem};
|
||||
use regex::Regex;
|
||||
use std::fs::File;
|
||||
use std::fs;
|
||||
use std::path::Path;
|
||||
use substring::Substring;
|
||||
use std::time::SystemTime;
|
||||
use bliss_audio::{Analysis, AnalysisIndex};
|
||||
|
||||
const MAX_GENRE_VAL: usize = 192;
|
||||
@ -22,7 +25,7 @@ const ANALYSIS_TAG:ItemKey = ItemKey::Comment;
|
||||
const ANALYSIS_TAG_START: &str = "BLISS_ANALYSIS";
|
||||
const ANALYSIS_TAG_VER: u16 = 1;
|
||||
|
||||
pub fn write_analysis(track: &String, analysis: &Analysis) {
|
||||
pub fn write_analysis(track: &String, analysis: &Analysis, preserve_mod_times: bool) {
|
||||
let value = format!("{},{},{:.24},{:.24},{:.24},{:.24},{:.24},{:.24},{:.24},{:.24},{:.24},{:.24},{:.24},{:.24},{:.24},{:.24},{:.24},{:.24},{:.24},{:.24},{:.24},{:.24}", ANALYSIS_TAG_START, ANALYSIS_TAG_VER,
|
||||
analysis[AnalysisIndex::Tempo], analysis[AnalysisIndex::Zcr], analysis[AnalysisIndex::MeanSpectralCentroid], analysis[AnalysisIndex::StdDeviationSpectralCentroid], analysis[AnalysisIndex::MeanSpectralRolloff],
|
||||
analysis[AnalysisIndex::StdDeviationSpectralRolloff], analysis[AnalysisIndex::MeanSpectralFlatness], analysis[AnalysisIndex::StdDeviationSpectralFlatness], analysis[AnalysisIndex::MeanLoudness], analysis[AnalysisIndex::StdDeviationLoudness],
|
||||
@ -58,7 +61,23 @@ pub fn write_analysis(track: &String, analysis: &Analysis) {
|
||||
|
||||
// Store analysis results
|
||||
tag.push(TagItem::new(ANALYSIS_TAG, ItemValue::Text(value)));
|
||||
let now = SystemTime::now();
|
||||
let mut mod_time = now;
|
||||
if preserve_mod_times {
|
||||
if let Ok(fmeta) = fs::metadata(track) {
|
||||
if let Ok(time) = fmeta.modified() {
|
||||
mod_time = time;
|
||||
}
|
||||
}
|
||||
}
|
||||
let _ = tag.save_to_path(Path::new(track), WriteOptions::default());
|
||||
if preserve_mod_times {
|
||||
if mod_time<now {
|
||||
if let Ok(f) = File::open(track) {
|
||||
let _ = f.set_modified(mod_time);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
Loading…
x
Reference in New Issue
Block a user