46 Commits

Author SHA1 Message Date
6f762742bd chore: bump version (v1.3.1) 2024-07-25 22:44:40 +02:00
1c8ff6d2e9 docs: fix required gstreamer version in readme 2024-07-25 22:42:07 +02:00
ec9188fbaf chore: use new edition 2024-07-25 22:41:27 +02:00
5c11600c64 chore: remove nix files due to lack of maintenance 2024-07-25 22:31:46 +02:00
05ad75e20d chore: update deps 2024-07-25 22:27:33 +02:00
a10345fbbe build: bump version (v1.3.0) 2022-04-07 21:12:25 +02:00
f3423eea53 fix: fix "from", "to" & "config" cli argument processing 2022-04-07 21:10:37 +02:00
80e3d02cb4 refactor: simplify config processing 2022-04-07 20:58:05 +02:00
b7a7abbf61 refactor: fmt 2022-04-07 19:36:49 +02:00
5abadc3131 build: update deps 2022-04-07 19:35:37 +02:00
9c86cdcc62 build: update deps 2022-02-24 21:45:52 +01:00
605fa5c15b build: update deps 2022-02-04 23:29:19 +01:00
9c1d39ba5f fix: allow multiple values for tag "musicbrainz-artistid" & "musicbrainz-albumartistid"
issue #2
2021-12-14 19:08:05 +01:00
c6c9da2f27 build: update deps 2021-12-14 18:49:01 +01:00
6872e7897b build: bump version (v1.2.2) 2021-08-25 17:11:23 +02:00
2d1497cb36 build: update deps 2021-08-25 17:09:33 +02:00
f1fb3506b5 build: update deps 2021-07-25 14:37:08 +02:00
f4050fe645 refactor: switch from space to tab indentation 2021-07-10 10:07:30 +02:00
b533f059d7 build: bump version (v1.2.1) 2021-07-05 20:02:49 +02:00
00a25e168d refactor: use more readable clamp method 2021-07-05 19:53:49 +02:00
b51c9939c1 build: upgrade deps 2021-07-05 19:46:06 +02:00
c22d45818e build: update deps 2021-06-18 11:46:48 +02:00
18cc852e6b build: update deps 2021-05-27 22:52:48 +02:00
65b4f398d9 build: bump version (v1.2.0) 2021-04-22 21:20:51 +02:00
7f40cb0581 build: update nix deps 2021-04-22 21:20:51 +02:00
bc15a4449d build: update cargo deps 2021-04-22 21:20:51 +02:00
1cf7cec8bd feat: add "jobs" cli argument 2021-04-22 21:20:51 +02:00
5cf98b3c17 feat: improve error messages 2021-04-21 00:57:23 +02:00
54e174eb0a build: include "README.md" & "CHANGELOG.md" to crate package 2021-04-21 00:26:28 +02:00
803860cce5 doc: add changelog 2021-04-21 00:26:21 +02:00
f2bfddd76e refactor: move actual transcoding into own fn 2021-04-17 01:53:25 +02:00
d073ef10b5 feat: add "copy" codec 2021-04-17 01:14:34 +02:00
399c4b8a2c build: update cargo deps 2021-04-17 00:21:47 +02:00
3188d074b7 build: update nix deps 2021-04-17 00:20:44 +02:00
5242aac566 bump version (v1.1.0) 2021-02-10 23:22:40 +01:00
49003c9983 add readme 2021-02-10 23:21:57 +01:00
530446bcd6 add flac dest format to example config 2021-02-10 23:18:24 +01:00
aa65b30873 update cargo deps 2021-02-10 22:22:06 +01:00
f1a71189c8 flake.lock: Update
Flake input changes:

* Updated 'nixpkgs': 'github:NixOS/nixpkgs/b881f100f65eacbbafdf65cff78c8d4104a1d04a' -> 'github:NixOS/nixpkgs/26f6af373ec1b867d751b56fb802f14010c8351b'
2021-02-10 22:15:31 +01:00
cc2ac9cb37 update nix deps 2021-02-02 13:27:15 +01:00
511f5d1237 update cargo deps 2021-02-02 13:26:33 +01:00
ff06358268 set resampling qulity to highest "10" 2021-02-02 13:22:14 +01:00
183e34c217 add flac encoding format 2021-01-15 21:18:27 +01:00
755f5dbd1d update nix deps 2020-12-31 01:07:44 +01:00
762064efd6 clean up 2020-12-29 21:31:33 +01:00
9959a26e48 only include needed src files in cargo pkg/publish 2020-12-28 18:46:47 +01:00
13 changed files with 1851 additions and 1484 deletions

View File

@@ -1,7 +1,7 @@
root = true root = true
[*] [*]
indent_style = space indent_style = tab
indent_size = 4 indent_size = 4
charset = utf-8 charset = utf-8
trim_trailing_whitespace = true trim_trailing_whitespace = true

1
.rustfmt.toml Normal file
View File

@@ -0,0 +1 @@
hard_tabs = true

28
CHANGELOG.md Normal file
View File

@@ -0,0 +1,28 @@
# Changelog
## v1.3.1
* dependencies upgraded
## v1.3.0
* allow multiple values for the tags "musicbrainz-artistid" and "musicbrainz-albumartistid"
* fix "from", "to" & "config" cli argument processing
## v1.2.2
* dependencies upgraded
## v1.2.1
* dependencies upgraded
## v1.2.0
* "copy" encoding format added
* "jobs" cli argument added, that lets you set the number of concurrent transcodes
## v1.1.0
* "flac" encoding format added
* resampling quality set to highest/"10"

1093
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,7 +1,7 @@
[package] [package]
name = "audio-conv" name = "audio-conv"
version = "1.0.0" version = "1.3.1"
edition = "2018" edition = "2021"
description = "Copies directory structure and converts audio files in it" description = "Copies directory structure and converts audio files in it"
authors = ["Thomas Heck <t@b128.net>"] authors = ["Thomas Heck <t@b128.net>"]
repository = "https://gitlab.com/chpio/audio-conv" repository = "https://gitlab.com/chpio/audio-conv"
@@ -13,24 +13,30 @@ categories = [
"multimedia::encoding", "multimedia::encoding",
] ]
keywords = ["audio", "conversion", "opus", "flac"] keywords = ["audio", "conversion", "opus", "flac"]
include = [
"/src/**/*",
"/example.audio-conv.yaml",
"/README.md",
"/CHANGELOG.md",
]
[dependencies] [dependencies]
gstreamer-audio = { version = "0.16", features = ["v1_10"] } gstreamer = { version = "0.23", features = ["v1_16"] }
gstreamer = { version = "0.16", features = ["v1_10"] } gstreamer-base = { version = "0.23", features = ["v1_16"] }
gstreamer-base = { version = "0.16", features = ["v1_10"] } gstreamer-audio = { version = "0.23", features = ["v1_16"] }
glib = "0.10" glib = "0.20"
futures = "0.3" futures = "0.3"
num_cpus = "1" num_cpus = "1"
walkdir = "2" walkdir = "2"
libc = "0.2" libc = "0.2"
anyhow = "1" anyhow = "1"
clap = "2" clap = { version = "4", features = ["cargo"] }
serde = { version = "1.0", features = ["derive"] } serde = { version = "1.0", features = ["derive"] }
serde_yaml = "0.8" serde_yaml = "0.9"
regex = "1" regex = "1"
globset = "0.4" globset = "0.4"
derive_more = "0.99" derive_more = "0.99"
tui = { version = "0.13", default-features = false, features = ["crossterm"] } tui = { version = "0.19", default-features = false, features = ["crossterm"] }
[dependencies.tokio] [dependencies.tokio]
version = "1" version = "1"

34
README.md Normal file
View File

@@ -0,0 +1,34 @@
# audio-conv
Takes two paths, all audio files encountered in the first path are transcoded and stored in the
second path. The directory structure from the first path gets also copied to the second path.
## Dependencies
Requires *gstreamer* version 1.16 or higher with the *base* plugin.
The supported source audio formats (or even other media that is able to contain audio) depend on
the installed *gstreamer* plugins.
## Installation via nix flakes
*audio-conv* can be easily installed via *nix flakes*:
```bash
$ nix profile install gitlab:chpio/audio-conv/release
```
## Generate example config
*audio-conv* is able to write an example config to your current directory:
```bash
$ audio-conv init
```
Now you need to edit the generated *audio-conv.yaml* file. And let it convert your audio files
by running it:
```bash
$ audio-conv
```

View File

@@ -13,9 +13,19 @@ matches:
bitrate: 160 bitrate: 160
bitrate_type: vbr # or cbr bitrate_type: vbr # or cbr
# for copy (copies file without transcoding it):
# to:
# codec: copy
# for mp3: # for mp3:
# to: # to:
# codec: mp3 # codec: mp3
# # one of: 8, 16, 24, 32, 40, 48, 56, 64, 80, 96, 112, 128, 160, 192, 224, 256 or 320 # # one of: 8, 16, 24, 32, 40, 48, 56, 64, 80, 96, 112, 128, 160, 192, 224, 256 or 320
# bitrate: 256 # bitrate: 256
# bitrate_type: vbr # or cbr # bitrate_type: vbr # or cbr
# for flac:
# to:
# codec: flac
# # effort spend for the compression. 0 (fastes compression) to 9 (highest compression)
# compression: 8

58
flake.lock generated
View File

@@ -1,58 +0,0 @@
{
"nodes": {
"flake-utils": {
"locked": {
"lastModified": 1605370193,
"narHash": "sha256-YyMTf3URDL/otKdKgtoMChu4vfVL3vCMkRqpGifhUn0=",
"owner": "numtide",
"repo": "flake-utils",
"rev": "5021eac20303a61fafe17224c087f5519baed54d",
"type": "github"
},
"original": {
"owner": "numtide",
"repo": "flake-utils",
"type": "github"
}
},
"import-cargo": {
"locked": {
"lastModified": 1594305518,
"narHash": "sha256-frtArgN42rSaEcEOYWg8sVPMUK+Zgch3c+wejcpX3DY=",
"owner": "edolstra",
"repo": "import-cargo",
"rev": "25d40be4a73d40a2572e0cc233b83253554f06c5",
"type": "github"
},
"original": {
"owner": "edolstra",
"repo": "import-cargo",
"type": "github"
}
},
"nixpkgs": {
"locked": {
"lastModified": 1608633860,
"narHash": "sha256-AGJfdJCR5jfIt8PqGiENXRqhthrS3Gxy8Wzb3Z2GsS4=",
"owner": "NixOS",
"repo": "nixpkgs",
"rev": "da1b28ab8f361fbe14dc539cd69ce1bfd015fd68",
"type": "github"
},
"original": {
"owner": "NixOS",
"repo": "nixpkgs",
"type": "github"
}
},
"root": {
"inputs": {
"flake-utils": "flake-utils",
"import-cargo": "import-cargo",
"nixpkgs": "nixpkgs"
}
}
},
"root": "root",
"version": 7
}

View File

@@ -1,63 +0,0 @@
{
description = "Converts audio files";
inputs = {
nixpkgs.url = github:NixOS/nixpkgs;
flake-utils.url = "github:numtide/flake-utils";
import-cargo.url = github:edolstra/import-cargo;
};
outputs = { self, flake-utils, nixpkgs, import-cargo }:
flake-utils.lib.eachDefaultSystem (system:
let
pkgs = import nixpkgs { inherit system; };
buildtimeDeps = with pkgs; [
cargo
rustc
pkg-config
];
runtimeDeps = with pkgs; [
gst_all_1.gstreamer
# needed for opus, resample, ...
gst_all_1.gst-plugins-base
# needed for flac
gst_all_1.gst-plugins-good
];
inherit (import-cargo.builders) importCargo;
in {
defaultPackage = pkgs.stdenv.mkDerivation {
name = "audio-conv";
src = self;
nativeBuildInputs = [
# setupHook which makes sure that a CARGO_HOME with vendored dependencies
# exists
(importCargo { lockFile = ./Cargo.lock; inherit pkgs; }).cargoHome
]
++ buildtimeDeps;
buildInputs = runtimeDeps;
buildPhase = ''
cargo build --release --offline
'';
installPhase = ''
install -Dm775 ./target/release/audio-conv $out/bin/audio-conv
'';
};
devShell = pkgs.stdenv.mkDerivation {
name = "audio-conv";
buildInputs = [ pkgs.rustfmt pkgs.rust-analyzer ]
++ buildtimeDeps
++ runtimeDeps;
};
}
);
}

View File

@@ -1,278 +1,322 @@
use anyhow::{Context, Error, Result}; use anyhow::{Context, Error, Result};
use clap::{builder::ValueParser, ArgAction};
use globset::GlobBuilder; use globset::GlobBuilder;
use regex::bytes::{Regex, RegexBuilder}; use regex::bytes::{Regex, RegexBuilder};
use serde::Deserialize; use serde::Deserialize;
use std::{ use std::{
io::Write, io::Write,
path::{Path, PathBuf}, path::{Path, PathBuf},
}; };
#[derive(Debug)] #[derive(Debug)]
pub struct Config { pub struct Config {
pub from: PathBuf, pub from: PathBuf,
pub to: PathBuf, pub to: PathBuf,
pub matches: Vec<TranscodeMatch>, pub matches: Vec<TranscodeMatch>,
pub jobs: Option<usize>,
} }
#[derive(Debug)] #[derive(Debug)]
pub struct TranscodeMatch { pub struct TranscodeMatch {
pub regexes: Vec<Regex>, pub regexes: Vec<Regex>,
pub to: Transcode, pub to: Transcode,
} }
#[derive(Clone, Debug, Deserialize)] #[derive(Clone, Debug, Deserialize)]
#[serde(tag = "codec")] #[serde(tag = "codec")]
pub enum Transcode { pub enum Transcode {
#[serde(rename = "opus")] #[serde(rename = "opus")]
Opus { Opus {
#[serde(default = "default_opus_bitrate")] #[serde(default = "default_opus_bitrate")]
bitrate: u16, bitrate: u16,
#[serde(default = "bitrate_type_vbr")] #[serde(default = "bitrate_type_vbr")]
bitrate_type: BitrateType, bitrate_type: BitrateType,
}, },
#[serde(rename = "mp3")] #[serde(rename = "flac")]
Mp3 { Flac {
#[serde(default = "default_mp3_bitrate")] #[serde(default = "default_flac_compression")]
bitrate: u16, compression: u8,
},
#[serde(default = "bitrate_type_vbr")] #[serde(rename = "mp3")]
bitrate_type: BitrateType, Mp3 {
}, #[serde(default = "default_mp3_bitrate")]
bitrate: u16,
#[serde(default = "bitrate_type_vbr")]
bitrate_type: BitrateType,
},
#[serde(rename = "copy")]
Copy,
} }
impl Transcode { impl Transcode {
pub fn extension(&self) -> &'static str { pub fn extension(&self) -> &'static str {
match self { match self {
Transcode::Opus { .. } => "opus", Transcode::Opus { .. } => "opus",
Transcode::Mp3 { .. } => "mp3", Transcode::Flac { .. } => "flac",
} Transcode::Mp3 { .. } => "mp3",
} Transcode::Copy => "",
}
}
} }
fn default_opus_bitrate() -> u16 { fn default_opus_bitrate() -> u16 {
160 160
}
fn default_flac_compression() -> u8 {
5
} }
fn bitrate_type_vbr() -> BitrateType { fn bitrate_type_vbr() -> BitrateType {
BitrateType::Vbr BitrateType::Vbr
} }
fn default_mp3_bitrate() -> u16 { fn default_mp3_bitrate() -> u16 {
256 256
} }
impl Default for Transcode { impl Default for Transcode {
fn default() -> Self { fn default() -> Self {
Transcode::Opus { Transcode::Opus {
bitrate: default_opus_bitrate(), bitrate: default_opus_bitrate(),
bitrate_type: bitrate_type_vbr(), bitrate_type: bitrate_type_vbr(),
} }
} }
} }
#[derive(Clone, Debug, Deserialize)] #[derive(Clone, Debug, Deserialize)]
pub enum BitrateType { pub enum BitrateType {
#[serde(rename = "cbr")] #[serde(rename = "cbr")]
Cbr, Cbr,
#[serde(rename = "vbr")] #[serde(rename = "vbr")]
Vbr, Vbr,
} }
#[derive(Debug, Default, Deserialize)] #[derive(Debug, Default, Deserialize)]
struct ConfigFile { struct ConfigFile {
from: Option<PathBuf>, from: Option<PathBuf>,
to: Option<PathBuf>, to: Option<PathBuf>,
#[serde(default)] #[serde(default)]
matches: Vec<TranscodeMatchFile>, matches: Vec<TranscodeMatchFile>,
jobs: Option<usize>,
} }
#[derive(Debug, Deserialize)] #[derive(Debug, Deserialize)]
struct TranscodeMatchFile { struct TranscodeMatchFile {
glob: Option<String>, glob: Option<String>,
regex: Option<String>, regex: Option<String>,
#[serde(default)] #[serde(default)]
extensions: Vec<String>, extensions: Vec<String>,
to: Transcode, to: Transcode,
} }
pub fn config() -> Result<Config> { pub fn config() -> Result<Config> {
use clap::{App, Arg, SubCommand}; use clap::{Arg, Command};
let arg_matches = App::new("audio-conv") let arg_matches = Command::new("audio-conv")
.version(clap::crate_version!()) .version(clap::crate_version!())
.about("Converts audio files") .about("Converts audio files")
.arg( .arg(
Arg::with_name("config") Arg::new("config")
.short("c") .short('c')
.long("config") .long("config")
.required(false) .required(false)
.takes_value(true) .value_parser(ValueParser::path_buf())
.help("path to an audio-conv config file, defaults to \"audio-conv.yaml\""), .action(ArgAction::Set)
) .help("Path to an audio-conv config file, defaults to \"audio-conv.yaml\""),
.arg( )
Arg::with_name("from") .arg(
.short("f") Arg::new("from")
.long("from") .short('f')
.required(false) .long("from")
.takes_value(true) .required(false)
.help("from directory path"), .value_parser(ValueParser::path_buf())
) .action(ArgAction::Set)
.arg( .help("\"from\" directory path"),
Arg::with_name("to") )
.short("t") .arg(
.long("to") Arg::new("to")
.required(false) .short('t')
.takes_value(true) .long("to")
.help("to directory path"), .required(false)
) .value_parser(ValueParser::path_buf())
.subcommand(SubCommand::with_name("init").about("writes an example config")) .action(ArgAction::Set)
.get_matches(); .help("\"to\" directory path"),
)
.arg(
Arg::new("jobs")
.short('j')
.long("jobs")
.required(false)
.value_parser(clap::value_parser!(usize))
.action(ArgAction::Set)
.help("Allow N jobs/transcodes at once. Defaults to number of logical cores"),
)
.subcommand(Command::new("init").about("writes an example config"))
.get_matches();
let current_dir = std::env::current_dir().context("could not get current directory")?; let current_dir = std::env::current_dir().context("Could not get current directory")?;
let config_path = arg_matches.value_of_os("config"); let config_path = arg_matches.get_one::<PathBuf>("config");
let force_load = config_path.is_some(); let enforce_config_load = config_path.is_some();
let config_path = config_path let config_path = config_path
.map(AsRef::<Path>::as_ref) .map(AsRef::<Path>::as_ref)
.unwrap_or_else(|| AsRef::<Path>::as_ref("audio-conv.yaml")); .unwrap_or_else(|| AsRef::<Path>::as_ref("audio-conv.yaml"));
let config_path = current_dir.join(config_path); let config_path = current_dir.join(config_path);
if let Some("init") = arg_matches.subcommand_name() { if let Some("init") = arg_matches.subcommand_name() {
std::fs::OpenOptions::new() std::fs::OpenOptions::new()
.write(true) .write(true)
.create_new(true) .create_new(true)
.open(&config_path) .open(&config_path)
.and_then(|mut f| f.write_all(std::include_bytes!("../example.audio-conv.yaml"))) .and_then(|mut f| f.write_all(std::include_bytes!("../example.audio-conv.yaml")))
.with_context(|| format!("unable to write config file to {}", config_path.display()))?; .with_context(|| format!("Unable to write config file to {}", config_path.display()))?;
std::process::exit(0); std::process::exit(0);
} }
let config_dir = config_path let config_dir = config_path
.parent() .parent()
.context("could not get parent directory of the config file")?; .context("Could not get parent directory of the config file")?;
let config_file = load_config_file(&config_path) let config_file = load_config_file(&config_path)
.with_context(|| format!("failed loading config file \"{}\"", config_path.display()))?; .with_context(|| format!("Failed loading config file {}", config_path.display()))?;
if force_load && config_file.is_none() { if enforce_config_load && config_file.is_none() {
return Err(Error::msg(format!( return Err(Error::msg(format!(
"could not find config file \"{}\"", "could not find config file \"{}\"",
config_path.display() config_path.display()
))); )));
} }
let default_regex = RegexBuilder::new("\\.(flac|wav)$") let default_regex = RegexBuilder::new("\\.(flac|wav)$")
.case_insensitive(true) .case_insensitive(true)
.build() .build()
.expect("failed compiling default match regex"); .expect("Failed compiling default match regex");
let transcode_matches = config_file let transcode_matches = config_file
.as_ref() .as_ref()
.map(|config_file| { .map(|config_file| {
config_file config_file
.matches .matches
.iter() .iter()
.map(|m| { .map(|m| {
let glob = m.glob.iter().map(|glob| { let glob = m.glob.iter().map(|glob| {
let glob = GlobBuilder::new(glob) let glob = GlobBuilder::new(glob)
.case_insensitive(true) .case_insensitive(true)
.build() .build()
.context("failed building glob")?; .context("Failed building glob")?;
let regex = Regex::new(glob.regex()).context("failed compiling regex")?; let regex = Regex::new(glob.regex()).context("Failed compiling regex")?;
Ok(regex) Ok(regex)
}); });
let regex = m.regex.iter().map(|regex| { let regex = m.regex.iter().map(|regex| {
let regex = RegexBuilder::new(regex) let regex = RegexBuilder::new(regex)
.case_insensitive(true) .case_insensitive(true)
.build() .build()
.context("failed compiling regex")?; .context("Failed compiling regex")?;
Ok(regex) Ok(regex)
}); });
let extensions = m.extensions.iter().map(|ext| { let extensions = m.extensions.iter().map(|ext| {
let mut ext = regex::escape(ext); let mut ext = regex::escape(ext);
ext.insert_str(0, &"\\."); ext.insert_str(0, &"\\.");
ext.push_str("$"); ext.push_str("$");
let regex = RegexBuilder::new(&ext) let regex = RegexBuilder::new(&ext)
.case_insensitive(true) .case_insensitive(true)
.build() .build()
.context("failed compiling regex")?; .context("Failed compiling regex")?;
Ok(regex) Ok(regex)
}); });
let mut regexes = glob let mut regexes = glob
.chain(regex) .chain(regex)
.chain(extensions) .chain(extensions)
.collect::<Result<Vec<_>>>()?; .collect::<Result<Vec<_>>>()?;
if regexes.is_empty() { if regexes.is_empty() {
regexes.push(default_regex.clone()); regexes.push(default_regex.clone());
} }
Ok(TranscodeMatch { Ok(TranscodeMatch {
regexes, regexes,
to: m.to.clone(), to: m.to.clone(),
}) })
}) })
.collect::<Result<Vec<_>>>() .collect::<Result<Vec<_>>>()
}) })
.transpose()? .transpose()?
.filter(|matches| !matches.is_empty()) .filter(|matches| !matches.is_empty())
.unwrap_or_else(|| { .unwrap_or_else(|| {
vec![TranscodeMatch { vec![TranscodeMatch {
regexes: vec![default_regex], regexes: vec![default_regex],
to: Transcode::default(), to: Transcode::default(),
}] }]
}); });
Ok(Config { Ok(Config {
from: { from: {
arg_matches arg_matches
.value_of_os("from") .get_one::<PathBuf>("from")
.map(|p| current_dir.join(p)) .map(|p| current_dir.join(p))
.or_else(|| { .or_else(|| {
config_file config_file
.as_ref() .as_ref()
.map(|c| c.from.as_ref()) .map(|c| c.from.as_ref())
.flatten() .flatten()
.map(|p| config_dir.join(p)) .map(|p| config_dir.join(p))
}) })
.ok_or_else(|| Error::msg("\"from\" not configured"))? .ok_or_else(|| Error::msg("\"from\" not configured"))?
.canonicalize() .canonicalize()
.context("could not canonicalize \"from\" path")? .context("Could not canonicalize \"from\" path")?
}, },
to: arg_matches to: arg_matches
.value_of_os("to") .get_one::<PathBuf>("to")
.map(|p| current_dir.join(p)) .map(|p| current_dir.join(p))
.or_else(|| { .or_else(|| {
config_file config_file
.as_ref() .as_ref()
.map(|c| c.to.as_ref()) .map(|c| c.to.as_ref())
.flatten() .flatten()
.map(|p| config_dir.join(p)) .map(|p| config_dir.join(p))
}) })
.ok_or_else(|| Error::msg("\"to\" not configured"))? .ok_or_else(|| Error::msg("\"to\" not configured"))?
.canonicalize() .canonicalize()
.context("could not canonicalize \"to\" path")?, .context("Could not canonicalize \"to\" path")?,
matches: transcode_matches, matches: transcode_matches,
}) jobs: arg_matches
.get_one("jobs")
.copied()
.or_else(|| config_file.as_ref().map(|c| c.jobs).flatten()),
// .map(|jobs_str| {
// jobs_str.parse().with_context(|| {
// format!(
// "Could not parse \"jobs\" argument \"{}\" to a number",
// &jobs_str
// )
// })
// })
// .transpose()?,
})
} }
fn load_config_file(path: &Path) -> Result<Option<ConfigFile>> { fn load_config_file(path: &Path) -> Result<Option<ConfigFile>> {
let mut file = match std::fs::File::open(path) { let mut file = match std::fs::File::open(path) {
Ok(file) => file, Ok(file) => file,
Err(err) if err.kind() == std::io::ErrorKind::NotFound => return Ok(None), Err(err) if err.kind() == std::io::ErrorKind::NotFound => return Ok(None),
Err(err) => return Err(Error::new(err)), Err(err) => return Err(Error::new(err)),
}; };
let config: ConfigFile = let config: ConfigFile =
serde_yaml::from_reader(&mut file).context("could not parse config file")?; serde_yaml::from_reader(&mut file).context("Could not parse config file")?;
Ok(Some(config)) Ok(Some(config))
} }

File diff suppressed because it is too large Load Diff

39
src/tag.rs Normal file
View File

@@ -0,0 +1,39 @@
use glib::{gstr, GStr, Value};
use gstreamer::{
tags::{merge_strings_with_comma, CustomTag},
Tag, TagFlag,
};
pub struct MbArtistId;
impl<'a> Tag<'a> for MbArtistId {
type TagType = &'a str;
const TAG_NAME: &'static GStr = gstr!("musicbrainz-artistid");
}
impl CustomTag<'_> for MbArtistId {
const FLAG: TagFlag = TagFlag::Meta;
const NICK: &'static GStr = gstr!("artist ID");
const DESCRIPTION: &'static GStr = gstr!("MusicBrainz artist ID");
fn merge_func(src: &Value) -> Value {
merge_strings_with_comma(src)
}
}
pub struct MbAlbumArtistId;
impl<'a> Tag<'a> for MbAlbumArtistId {
type TagType = &'a str;
const TAG_NAME: &'static GStr = gstr!("musicbrainz-albumartistid");
}
impl CustomTag<'_> for MbAlbumArtistId {
const FLAG: TagFlag = TagFlag::Meta;
const NICK: &'static GStr = gstr!("album artist ID");
const DESCRIPTION: &'static GStr = gstr!("MusicBrainz album artist ID");
fn merge_func(src: &Value) -> Value {
merge_strings_with_comma(src)
}
}

440
src/ui.rs
View File

@@ -2,8 +2,8 @@ use crate::ConversionArgs;
use anyhow::{Context, Result}; use anyhow::{Context, Result};
use futures::Future; use futures::Future;
use std::{ use std::{
borrow::Cow, cell::RefCell, collections::HashMap, io, mem, path::PathBuf, rc::Rc, borrow::Cow, cell::RefCell, collections::HashMap, io, mem, path::PathBuf, rc::Rc,
time::Duration, time::Duration,
}; };
use tokio::{task, time::interval}; use tokio::{task, time::interval};
use tui::{backend::CrosstermBackend, Terminal}; use tui::{backend::CrosstermBackend, Terminal};
@@ -12,272 +12,272 @@ pub const UPDATE_INTERVAL_MILLIS: u64 = 100;
#[derive(Debug)] #[derive(Debug)]
pub enum Msg { pub enum Msg {
Init { task_len: usize, log_path: PathBuf }, Init { task_len: usize, log_path: PathBuf },
Exit, Exit,
TaskStart { id: usize, args: ConversionArgs }, TaskStart { id: usize, args: ConversionArgs },
TaskEnd { id: usize }, TaskEnd { id: usize },
TaskProgress { id: usize, ratio: f64 }, TaskProgress { id: usize, ratio: f64 },
TaskError { id: usize }, TaskError { id: usize },
} }
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub struct MsgQueue { pub struct MsgQueue {
inner: Rc<RefCell<Vec<Msg>>>, inner: Rc<RefCell<Vec<Msg>>>,
} }
impl MsgQueue { impl MsgQueue {
fn new() -> MsgQueue { fn new() -> MsgQueue {
MsgQueue { MsgQueue {
inner: Rc::new(RefCell::new(Vec::new())), inner: Rc::new(RefCell::new(Vec::new())),
} }
} }
pub fn push(&self, msg: Msg) { pub fn push(&self, msg: Msg) {
self.inner.borrow_mut().push(msg); self.inner.borrow_mut().push(msg);
} }
fn swap_inner(&self, other: &mut Vec<Msg>) { fn swap_inner(&self, other: &mut Vec<Msg>) {
let mut inner = self.inner.borrow_mut(); let mut inner = self.inner.borrow_mut();
mem::swap(&mut *inner, other) mem::swap(&mut *inner, other)
} }
} }
struct State { struct State {
terminal: Terminal<CrosstermBackend<io::Stdout>>, terminal: Terminal<CrosstermBackend<io::Stdout>>,
log_path: Option<PathBuf>, log_path: Option<PathBuf>,
task_len: Option<usize>, task_len: Option<usize>,
ended_tasks: usize, ended_tasks: usize,
running_tasks: HashMap<usize, Task>, running_tasks: HashMap<usize, Task>,
has_rendered: bool, has_rendered: bool,
has_errored: bool, has_errored: bool,
} }
impl State { impl State {
fn new() -> Result<State> { fn new() -> Result<State> {
let terminal = Terminal::new(CrosstermBackend::new(io::stdout())) let terminal = Terminal::new(CrosstermBackend::new(io::stdout()))
.context("Unable to create ui terminal")?; .context("Unable to create ui terminal")?;
Ok(State { Ok(State {
terminal, terminal,
log_path: None, log_path: None,
task_len: None, task_len: None,
ended_tasks: 0, ended_tasks: 0,
running_tasks: HashMap::new(), running_tasks: HashMap::new(),
has_rendered: false, has_rendered: false,
has_errored: false, has_errored: false,
}) })
} }
fn process_msg(&mut self, msg: Msg) -> Result<bool> { fn process_msg(&mut self, msg: Msg) -> Result<bool> {
match msg { match msg {
Msg::Init { task_len, log_path } => { Msg::Init { task_len, log_path } => {
self.task_len = Some(task_len); self.task_len = Some(task_len);
self.log_path = Some(log_path); self.log_path = Some(log_path);
} }
Msg::Exit => return Ok(false), Msg::Exit => return Ok(false),
Msg::TaskStart { id, args } => { Msg::TaskStart { id, args } => {
self.running_tasks.insert( self.running_tasks.insert(
id, id,
Task { Task {
id, id,
ratio: None, ratio: None,
args, args,
}, },
); );
} }
Msg::TaskEnd { id } => { Msg::TaskEnd { id } => {
self.running_tasks self.running_tasks
.remove(&id) .remove(&id)
.context("unable to remove finished task; could't find task")?; .context("Unable to remove finished task; could't find task")?;
self.ended_tasks += 1; self.ended_tasks += 1;
} }
Msg::TaskProgress { id, ratio } => { Msg::TaskProgress { id, ratio } => {
let mut task = self let task = self
.running_tasks .running_tasks
.get_mut(&id) .get_mut(&id)
.context("Unable to update task progress; could't find task")?; .context("Unable to update task progress; could't find task")?;
task.ratio = Some(ratio); task.ratio = Some(ratio);
} }
Msg::TaskError { id } => { Msg::TaskError { id } => {
// TODO // TODO
self.running_tasks self.running_tasks
.remove(&id) .remove(&id)
.context("unable to remove errored task; could't find task")?; .context("Unable to remove errored task; could't find task")?;
self.ended_tasks += 1; self.ended_tasks += 1;
self.has_errored = true; self.has_errored = true;
} }
} }
Ok(true) Ok(true)
} }
fn render(&mut self) -> Result<()> { fn render(&mut self) -> Result<()> {
use tui::{ use tui::{
layout::{Constraint, Direction, Layout, Rect}, layout::{Constraint, Direction, Layout, Rect},
style::{Color, Modifier, Style}, style::{Color, Modifier, Style},
text::Text, text::Text,
widgets::{Block, Borders, Gauge, Paragraph}, widgets::{Block, Borders, Gauge, Paragraph},
}; };
let task_len = if let Some(task_len) = self.task_len { let task_len = if let Some(task_len) = self.task_len {
task_len task_len
} else { } else {
return Ok(()); return Ok(());
}; };
if task_len == 0 { if task_len == 0 {
return Ok(()); return Ok(());
} }
let tasks_ended = self.ended_tasks; let tasks_ended = self.ended_tasks;
let mut running_tasks: Vec<_> = self.running_tasks.values().cloned().collect(); let mut running_tasks: Vec<_> = self.running_tasks.values().cloned().collect();
running_tasks.sort_by_key(|task| task.id); running_tasks.sort_by_key(|task| task.id);
if !self.has_rendered { if !self.has_rendered {
self.terminal.clear().context("cleaning ui failed")?; self.terminal.clear().context("Clearing ui failed")?;
self.has_rendered = true; self.has_rendered = true;
} }
let error_text = match self.has_errored { let error_text = match self.has_errored {
true => { true => {
let text: Cow<'static, str> = self let text: Cow<'static, str> = self
.log_path .log_path
.as_ref() .as_ref()
.map(|lp| { .map(|lp| {
let text = format!("Error(s) occurred and were logged to {}", lp.display()); let text = format!("Error(s) occurred and were logged to {}", lp.display());
Cow::Owned(text) Cow::Owned(text)
}) })
.unwrap_or_else(|| Cow::Borrowed("Error(s) occurred")); .unwrap_or_else(|| Cow::Borrowed("Error(s) occurred"));
Some(text) Some(text)
} }
false => None, false => None,
}; };
self.terminal self.terminal
.draw(|f| { .draw(|f| {
let chunks = Layout::default() let chunks = Layout::default()
.direction(Direction::Vertical) .direction(Direction::Vertical)
.margin(1) .margin(1)
.constraints([Constraint::Percentage(90), Constraint::Percentage(10)].as_ref()) .constraints([Constraint::Percentage(90), Constraint::Percentage(10)].as_ref())
.split(f.size()); .split(f.size());
let mut task_rect = chunks[0]; let mut task_rect = chunks[0];
if error_text.is_some() { if error_text.is_some() {
task_rect.height -= 3; task_rect.height -= 3;
} }
for (row, task) in running_tasks for (row, task) in running_tasks
.into_iter() .into_iter()
.take(task_rect.height as usize / 2) .take(task_rect.height as usize / 2)
.enumerate() .enumerate()
{ {
f.render_widget( f.render_widget(
Gauge::default() Gauge::default()
.label(task.args.rel_from_path.to_string_lossy().as_ref()) .label(task.args.rel_from_path.to_string_lossy().as_ref())
.gauge_style( .gauge_style(
Style::default() Style::default()
.fg(Color::White) .fg(Color::White)
.bg(Color::Black) .bg(Color::Black)
.add_modifier(Modifier::ITALIC), .add_modifier(Modifier::ITALIC),
) )
.ratio(task.ratio.unwrap_or(0.0)), .ratio(task.ratio.unwrap_or(0.0)),
Rect::new( Rect::new(
task_rect.x, task_rect.x,
task_rect.y + row as u16 * 2, task_rect.y + row as u16 * 2,
task_rect.width, task_rect.width,
1, 1,
), ),
); );
} }
if let Some(error_text) = error_text { if let Some(error_text) = error_text {
f.render_widget( f.render_widget(
Paragraph::new(Text::raw(error_text)).style( Paragraph::new(Text::raw(error_text)).style(
Style::default() Style::default()
.fg(Color::Red) .fg(Color::Red)
.bg(Color::Black) .bg(Color::Black)
.add_modifier(Modifier::BOLD), .add_modifier(Modifier::BOLD),
), ),
Rect::new(task_rect.x, task_rect.height + 1, task_rect.width, 2), Rect::new(task_rect.x, task_rect.height + 1, task_rect.width, 2),
); );
} }
f.render_widget( f.render_widget(
Gauge::default() Gauge::default()
.block( .block(
Block::default() Block::default()
.borders(Borders::ALL) .borders(Borders::ALL)
.title("Overall Progress"), .title("Overall Progress"),
) )
.gauge_style( .gauge_style(
Style::default() Style::default()
.fg(Color::White) .fg(Color::White)
.bg(Color::Black) .bg(Color::Black)
.add_modifier(Modifier::ITALIC), .add_modifier(Modifier::ITALIC),
) )
.ratio(tasks_ended as f64 / task_len as f64), .ratio(tasks_ended as f64 / task_len as f64),
chunks[1], chunks[1],
); );
}) })
.context("rendering ui failed")?; .context("Rendering ui failed")?;
Ok(()) Ok(())
} }
} }
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
struct Task { struct Task {
id: usize, id: usize,
ratio: Option<f64>, ratio: Option<f64>,
args: ConversionArgs, args: ConversionArgs,
} }
pub fn init() -> (MsgQueue, impl Future<Output = Result<()>>) { pub fn init() -> (MsgQueue, impl Future<Output = Result<()>>) {
let queue = MsgQueue::new(); let queue = MsgQueue::new();
let queue_clone = queue.clone(); let queue_clone = queue.clone();
let fut = async move { let fut = async move {
let mut interval = interval(Duration::from_millis(UPDATE_INTERVAL_MILLIS)); let mut interval = interval(Duration::from_millis(UPDATE_INTERVAL_MILLIS));
let mut wrapped = Some((Vec::new(), State::new()?)); let mut wrapped = Some((Vec::new(), State::new()?));
loop { loop {
interval.tick().await; interval.tick().await;
let (mut current_queue, mut state) = wrapped.take().context("`wrapped` is None")?; let (mut current_queue, mut state) = wrapped.take().context("`wrapped` is None")?;
queue_clone.swap_inner(&mut current_queue); queue_clone.swap_inner(&mut current_queue);
let render_res = task::spawn_blocking(move || -> Result<_> { let render_res = task::spawn_blocking(move || -> Result<_> {
let mut exit = false; let mut exit = false;
for msg in current_queue.drain(..) { for msg in current_queue.drain(..) {
if !state.process_msg(msg)? { if !state.process_msg(msg)? {
exit = true; exit = true;
} }
} }
state.render()?; state.render()?;
if exit { if exit {
Ok(None) Ok(None)
} else { } else {
Ok(Some((current_queue, state))) Ok(Some((current_queue, state)))
} }
}) })
.await .await
.context("ui update task failed")? .context("Ui update task failed")?
.context("ui update failed")?; .context("Ui update failed")?;
match render_res { match render_res {
Some(s) => wrapped = Some(s), Some(s) => wrapped = Some(s),
None => break, None => break,
} }
} }
Result::<_>::Ok(()) Result::<_>::Ok(())
}; };
(queue, fut) (queue, fut)
} }