Merge pull request #26 from Polochon-street/example-playlist-save-results
Playlist example: save analysis to a file
This commit is contained in:
commit
eee2bf612c
5 changed files with 69 additions and 9 deletions
2
.github/workflows/rust.yml
vendored
2
.github/workflows/rust.yml
vendored
|
@ -33,4 +33,4 @@ jobs:
|
||||||
- name: Build benches
|
- name: Build benches
|
||||||
run: cargo +nightly-2021-04-01 bench --verbose --features=bench --no-run
|
run: cargo +nightly-2021-04-01 bench --verbose --features=bench --no-run
|
||||||
- name: Build examples
|
- name: Build examples
|
||||||
run: cargo build --examples --verbose
|
run: cargo build --examples --verbose --features=serde
|
||||||
|
|
24
Cargo.lock
generated
24
Cargo.lock
generated
|
@ -97,6 +97,7 @@ dependencies = [
|
||||||
"ripemd160",
|
"ripemd160",
|
||||||
"rustfft",
|
"rustfft",
|
||||||
"serde",
|
"serde",
|
||||||
|
"serde_json",
|
||||||
"strum",
|
"strum",
|
||||||
"strum_macros",
|
"strum_macros",
|
||||||
"thiserror",
|
"thiserror",
|
||||||
|
@ -518,6 +519,12 @@ dependencies = [
|
||||||
"either",
|
"either",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "itoa"
|
||||||
|
version = "0.4.8"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "b71991ff56294aa922b450139ee08b3bfc70982c6b2c7562771375cf73542dd4"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "jobserver"
|
name = "jobserver"
|
||||||
version = "0.1.24"
|
version = "0.1.24"
|
||||||
|
@ -1037,6 +1044,12 @@ dependencies = [
|
||||||
"transpose",
|
"transpose",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "ryu"
|
||||||
|
version = "1.0.5"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "71d301d4193d031abdd79ff7e3dd721168a9572ef3fe51a1517aba235bd8f86e"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "scopeguard"
|
name = "scopeguard"
|
||||||
version = "1.1.0"
|
version = "1.1.0"
|
||||||
|
@ -1063,6 +1076,17 @@ dependencies = [
|
||||||
"syn",
|
"syn",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "serde_json"
|
||||||
|
version = "1.0.68"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "0f690853975602e1bfe1ccbf50504d67174e3bcf340f23b5ea9992e0587a52d8"
|
||||||
|
dependencies = [
|
||||||
|
"itoa",
|
||||||
|
"ryu",
|
||||||
|
"serde",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "sha-1"
|
name = "sha-1"
|
||||||
version = "0.8.2"
|
version = "0.8.2"
|
||||||
|
|
|
@ -49,3 +49,4 @@ serde = { version = "1.0", optional = true, features = ["derive"] }
|
||||||
mime_guess = "2.0.3"
|
mime_guess = "2.0.3"
|
||||||
glob = "0.3.0"
|
glob = "0.3.0"
|
||||||
anyhow = "1.0.45"
|
anyhow = "1.0.45"
|
||||||
|
serde_json = "1.0.59"
|
||||||
|
|
|
@ -3,16 +3,19 @@ use bliss_audio::distance::{closest_to_first_song, dedup_playlist, euclidean_dis
|
||||||
use bliss_audio::{library::analyze_paths_streaming, Song};
|
use bliss_audio::{library::analyze_paths_streaming, Song};
|
||||||
use glob::glob;
|
use glob::glob;
|
||||||
use mime_guess;
|
use mime_guess;
|
||||||
|
use serde_json;
|
||||||
use std::env;
|
use std::env;
|
||||||
use std::fs;
|
use std::fs;
|
||||||
use std::path::Path;
|
use std::io::BufReader;
|
||||||
|
use std::path::{Path, PathBuf};
|
||||||
|
|
||||||
/* Analyzes a folder recursively, and make a playlist out of the file
|
/* Analyzes a folder recursively, and make a playlist out of the file
|
||||||
* provided by the user. */
|
* provided by the user. */
|
||||||
// TODO still:
|
// TODO still:
|
||||||
// * Save the results somewhere to avoid analyzing stuff over and over
|
// * Mention it in the README
|
||||||
// * Make the output file configurable
|
// * Make the output file configurable
|
||||||
// * Allow to choose between outputing to stdout and a file
|
// * Allow to choose between outputing to stdout and a file
|
||||||
|
#[cfg(feature = "serde")]
|
||||||
fn main() -> Result<()> {
|
fn main() -> Result<()> {
|
||||||
let args: Vec<String> = env::args().skip(1).collect();
|
let args: Vec<String> = env::args().skip(1).collect();
|
||||||
if args.len() > 3 || args.len() < 2 {
|
if args.len() > 3 || args.len() < 2 {
|
||||||
|
@ -27,7 +30,20 @@ fn main() -> Result<()> {
|
||||||
let folder = &args[0];
|
let folder = &args[0];
|
||||||
let file = fs::canonicalize(&args[1])?;
|
let file = fs::canonicalize(&args[1])?;
|
||||||
let pattern = Path::new(folder).join("**").join("*");
|
let pattern = Path::new(folder).join("**").join("*");
|
||||||
let songs = glob(&pattern.to_string_lossy())?
|
|
||||||
|
let mut songs: Vec<Song> = Vec::new();
|
||||||
|
let analysis_file = fs::File::open("./songs.json");
|
||||||
|
if let Ok(f) = analysis_file {
|
||||||
|
let reader = BufReader::new(f);
|
||||||
|
songs = serde_json::from_reader(reader)?;
|
||||||
|
}
|
||||||
|
|
||||||
|
let analyzed_paths = songs
|
||||||
|
.iter()
|
||||||
|
.map(|s| s.path.to_owned())
|
||||||
|
.collect::<Vec<PathBuf>>();
|
||||||
|
|
||||||
|
let paths = glob(&pattern.to_string_lossy())?
|
||||||
.map(|e| fs::canonicalize(e.unwrap()).unwrap())
|
.map(|e| fs::canonicalize(e.unwrap()).unwrap())
|
||||||
.filter(|e| match mime_guess::from_path(e).first() {
|
.filter(|e| match mime_guess::from_path(e).first() {
|
||||||
Some(m) => m.type_() == "audio",
|
Some(m) => m.type_() == "audio",
|
||||||
|
@ -35,7 +51,14 @@ fn main() -> Result<()> {
|
||||||
})
|
})
|
||||||
.map(|x| x.to_string_lossy().to_string())
|
.map(|x| x.to_string_lossy().to_string())
|
||||||
.collect::<Vec<String>>();
|
.collect::<Vec<String>>();
|
||||||
let rx = analyze_paths_streaming(songs)?;
|
|
||||||
|
let rx = analyze_paths_streaming(
|
||||||
|
paths
|
||||||
|
.iter()
|
||||||
|
.filter(|p| !analyzed_paths.contains(&PathBuf::from(p)))
|
||||||
|
.map(|p| p.to_owned())
|
||||||
|
.collect(),
|
||||||
|
)?;
|
||||||
let first_song = Song::new(file)?;
|
let first_song = Song::new(file)?;
|
||||||
let mut analyzed_songs = vec![first_song.to_owned()];
|
let mut analyzed_songs = vec![first_song.to_owned()];
|
||||||
for (path, result) in rx.iter() {
|
for (path, result) in rx.iter() {
|
||||||
|
@ -44,9 +67,16 @@ fn main() -> Result<()> {
|
||||||
Err(e) => println!("error analyzing {}: {}", path, e),
|
Err(e) => println!("error analyzing {}: {}", path, e),
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
closest_to_first_song(&first_song, &mut analyzed_songs, euclidean_distance);
|
analyzed_songs.extend_from_slice(&songs);
|
||||||
dedup_playlist(&mut analyzed_songs, None);
|
let serialized = serde_json::to_string(&analyzed_songs).unwrap();
|
||||||
let playlist = analyzed_songs
|
let mut songs_to_chose_from = analyzed_songs
|
||||||
|
.into_iter()
|
||||||
|
.filter(|x| x == &first_song || paths.contains(&x.path.to_string_lossy().to_string()))
|
||||||
|
.collect();
|
||||||
|
closest_to_first_song(&first_song, &mut songs_to_chose_from, euclidean_distance);
|
||||||
|
dedup_playlist(&mut songs_to_chose_from, None);
|
||||||
|
fs::write("./songs.json", serialized)?;
|
||||||
|
let playlist = songs_to_chose_from
|
||||||
.iter()
|
.iter()
|
||||||
.map(|s| s.path.to_string_lossy().to_string())
|
.map(|s| s.path.to_string_lossy().to_string())
|
||||||
.collect::<Vec<String>>()
|
.collect::<Vec<String>>()
|
||||||
|
@ -55,3 +85,8 @@ fn main() -> Result<()> {
|
||||||
fs::write("./playlist.m3u", playlist)?;
|
fs::write("./playlist.m3u", playlist)?;
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg(not(feature = "serde"))]
|
||||||
|
fn main() {
|
||||||
|
println!("You need the serde feature enabled to run this file.");
|
||||||
|
}
|
||||||
|
|
|
@ -63,7 +63,7 @@
|
||||||
//! ```
|
//! ```
|
||||||
#![cfg_attr(feature = "bench", feature(test))]
|
#![cfg_attr(feature = "bench", feature(test))]
|
||||||
#![warn(missing_docs)]
|
#![warn(missing_docs)]
|
||||||
#![warn(missing_doc_code_examples)]
|
#![warn(rustdoc::missing_doc_code_examples)]
|
||||||
mod chroma;
|
mod chroma;
|
||||||
pub mod distance;
|
pub mod distance;
|
||||||
pub mod library;
|
pub mod library;
|
||||||
|
|
Loading…
Reference in a new issue