29 Commits

Author SHA1 Message Date
a10345fbbe build: bump version (v1.3.0) 2022-04-07 21:12:25 +02:00
f3423eea53 fix: fix "from", "to" & "config" cli argument processing 2022-04-07 21:10:37 +02:00
80e3d02cb4 refactor: simplify config processing 2022-04-07 20:58:05 +02:00
b7a7abbf61 refactor: fmt 2022-04-07 19:36:49 +02:00
5abadc3131 build: update deps 2022-04-07 19:35:37 +02:00
9c86cdcc62 build: update deps 2022-02-24 21:45:52 +01:00
605fa5c15b build: update deps 2022-02-04 23:29:19 +01:00
9c1d39ba5f fix: allow multiple values for tag "musicbrainz-artistid" & "musicbrainz-albumartistid"
issue #2
2021-12-14 19:08:05 +01:00
c6c9da2f27 build: update deps 2021-12-14 18:49:01 +01:00
6872e7897b build: bump version (v1.2.2) 2021-08-25 17:11:23 +02:00
2d1497cb36 build: update deps 2021-08-25 17:09:33 +02:00
f1fb3506b5 build: update deps 2021-07-25 14:37:08 +02:00
f4050fe645 refactor: switch from space to tab indentation 2021-07-10 10:07:30 +02:00
b533f059d7 build: bump version (v1.2.1) 2021-07-05 20:02:49 +02:00
00a25e168d refactor: use more readable clamp method 2021-07-05 19:53:49 +02:00
b51c9939c1 build: upgrade deps 2021-07-05 19:46:06 +02:00
c22d45818e build: update deps 2021-06-18 11:46:48 +02:00
18cc852e6b build: update deps 2021-05-27 22:52:48 +02:00
65b4f398d9 build: bump version (v1.2.0) 2021-04-22 21:20:51 +02:00
7f40cb0581 build: update nix deps 2021-04-22 21:20:51 +02:00
bc15a4449d build: update cargo deps 2021-04-22 21:20:51 +02:00
1cf7cec8bd feat: add "jobs" cli argument 2021-04-22 21:20:51 +02:00
5cf98b3c17 feat: improve error messages 2021-04-21 00:57:23 +02:00
54e174eb0a build: include "README.md" & "CHANGELOG.md" to crate package 2021-04-21 00:26:28 +02:00
803860cce5 doc: add changelog 2021-04-21 00:26:21 +02:00
f2bfddd76e refactor: move actual transcoding into own fn 2021-04-17 01:53:25 +02:00
d073ef10b5 feat: add "copy" codec 2021-04-17 01:14:34 +02:00
399c4b8a2c build: update cargo deps 2021-04-17 00:21:47 +02:00
3188d074b7 build: update nix deps 2021-04-17 00:20:44 +02:00
11 changed files with 1342 additions and 1195 deletions

View File

@@ -1,7 +1,7 @@
root = true root = true
[*] [*]
indent_style = space indent_style = tab
indent_size = 4 indent_size = 4
charset = utf-8 charset = utf-8
trim_trailing_whitespace = true trim_trailing_whitespace = true

1
.rustfmt.toml Normal file
View File

@@ -0,0 +1 @@
hard_tabs = true

24
CHANGELOG.md Normal file
View File

@@ -0,0 +1,24 @@
# Changelog
## v1.3.0
* allow multiple values for the tags "musicbrainz-artistid" and "musicbrainz-albumartistid"
* fix "from", "to" & "config" cli argument processing
## v1.2.2
* dependencies upgraded
## v1.2.1
* dependencies upgraded
## v1.2.0
* "copy" encoding format added
* "jobs" cli argument added, that lets you set the number of concurrent transcodes
## v1.1.0
* "flac" encoding format added
* resampling quality set to highest/"10"

551
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,6 +1,6 @@
[package] [package]
name = "audio-conv" name = "audio-conv"
version = "1.1.0" version = "1.3.0"
edition = "2018" edition = "2018"
description = "Copies directory structure and converts audio files in it" description = "Copies directory structure and converts audio files in it"
authors = ["Thomas Heck <t@b128.net>"] authors = ["Thomas Heck <t@b128.net>"]
@@ -16,25 +16,27 @@ keywords = ["audio", "conversion", "opus", "flac"]
include = [ include = [
"/src/**/*", "/src/**/*",
"/example.audio-conv.yaml", "/example.audio-conv.yaml",
"/README.md",
"/CHANGELOG.md",
] ]
[dependencies] [dependencies]
gstreamer-audio = { version = "0.16", features = ["v1_10"] } gstreamer-audio = { version = "0.18", features = ["v1_10"] }
gstreamer = { version = "0.16", features = ["v1_10"] } gstreamer = { version = "0.18", features = ["v1_10"] }
gstreamer-base = { version = "0.16", features = ["v1_10"] } gstreamer-base = { version = "0.18", features = ["v1_10"] }
glib = "0.10" glib = "0.15"
futures = "0.3" futures = "0.3"
num_cpus = "1" num_cpus = "1"
walkdir = "2" walkdir = "2"
libc = "0.2" libc = "0.2"
anyhow = "1" anyhow = "1"
clap = "2" clap = { version = "3", features = ["cargo"] }
serde = { version = "1.0", features = ["derive"] } serde = { version = "1.0", features = ["derive"] }
serde_yaml = "0.8" serde_yaml = "0.8"
regex = "1" regex = "1"
globset = "0.4" globset = "0.4"
derive_more = "0.99" derive_more = "0.99"
tui = { version = "0.14", default-features = false, features = ["crossterm"] } tui = { version = "0.17", default-features = false, features = ["crossterm"] }
[dependencies.tokio] [dependencies.tokio]
version = "1" version = "1"

View File

@@ -13,6 +13,10 @@ matches:
bitrate: 160 bitrate: 160
bitrate_type: vbr # or cbr bitrate_type: vbr # or cbr
# for copy (copies file without transcoding it):
# to:
# codec: copy
# for mp3: # for mp3:
# to: # to:
# codec: mp3 # codec: mp3

12
flake.lock generated
View File

@@ -2,11 +2,11 @@
"nodes": { "nodes": {
"flake-utils": { "flake-utils": {
"locked": { "locked": {
"lastModified": 1610051610, "lastModified": 1648297722,
"narHash": "sha256-U9rPz/usA1/Aohhk7Cmc2gBrEEKRzcW4nwPWMPwja4Y=", "narHash": "sha256-W+qlPsiZd8F3XkzXOzAoR+mpFqzm3ekQkJNa+PIh1BQ=",
"owner": "numtide", "owner": "numtide",
"repo": "flake-utils", "repo": "flake-utils",
"rev": "3982c9903e93927c2164caa727cd3f6a0e6d14cc", "rev": "0f8662f1319ad6abf89b3380dd2722369fc51ade",
"type": "github" "type": "github"
}, },
"original": { "original": {
@@ -32,11 +32,11 @@
}, },
"nixpkgs": { "nixpkgs": {
"locked": { "locked": {
"lastModified": 1612988144, "lastModified": 1649352661,
"narHash": "sha256-X1IO9gtzE0dRVpDqknjF39IVDnuKuZsRis38WnLfHLo=", "narHash": "sha256-6IO5W02HKY6pj4uRgStJ2EjIENlpvbb99OlDBBzJMDQ=",
"owner": "NixOS", "owner": "NixOS",
"repo": "nixpkgs", "repo": "nixpkgs",
"rev": "26f6af373ec1b867d751b56fb802f14010c8351b", "rev": "2bc410afc423de1fd7ce1d84da9f294eee866b3f",
"type": "github" "type": "github"
}, },
"original": { "original": {

View File

@@ -12,6 +12,7 @@ pub struct Config {
pub from: PathBuf, pub from: PathBuf,
pub to: PathBuf, pub to: PathBuf,
pub matches: Vec<TranscodeMatch>, pub matches: Vec<TranscodeMatch>,
pub jobs: Option<usize>,
} }
#[derive(Debug)] #[derive(Debug)]
@@ -46,6 +47,9 @@ pub enum Transcode {
#[serde(default = "bitrate_type_vbr")] #[serde(default = "bitrate_type_vbr")]
bitrate_type: BitrateType, bitrate_type: BitrateType,
}, },
#[serde(rename = "copy")]
Copy,
} }
impl Transcode { impl Transcode {
@@ -54,6 +58,7 @@ impl Transcode {
Transcode::Opus { .. } => "opus", Transcode::Opus { .. } => "opus",
Transcode::Flac { .. } => "flac", Transcode::Flac { .. } => "flac",
Transcode::Mp3 { .. } => "mp3", Transcode::Mp3 { .. } => "mp3",
Transcode::Copy => "",
} }
} }
} }
@@ -112,39 +117,50 @@ struct TranscodeMatchFile {
} }
pub fn config() -> Result<Config> { pub fn config() -> Result<Config> {
use clap::{App, Arg, SubCommand}; use clap::{Arg, Command};
let arg_matches = App::new("audio-conv") let arg_matches = Command::new("audio-conv")
.version(clap::crate_version!()) .version(clap::crate_version!())
.about("Converts audio files") .about("Converts audio files")
.arg( .arg(
Arg::with_name("config") Arg::new("config")
.short("c") .short('c')
.long("config") .long("config")
.allow_invalid_utf8(true)
.required(false) .required(false)
.takes_value(true) .takes_value(true)
.help("path to an audio-conv config file, defaults to \"audio-conv.yaml\""), .help("Path to an audio-conv config file, defaults to \"audio-conv.yaml\""),
) )
.arg( .arg(
Arg::with_name("from") Arg::new("from")
.short("f") .short('f')
.long("from") .long("from")
.allow_invalid_utf8(true)
.required(false) .required(false)
.takes_value(true) .takes_value(true)
.help("from directory path"), .help("\"from\" directory path"),
) )
.arg( .arg(
Arg::with_name("to") Arg::new("to")
.short("t") .short('t')
.long("to") .long("to")
.allow_invalid_utf8(true)
.required(false) .required(false)
.takes_value(true) .takes_value(true)
.help("to directory path"), .help("\"to\" directory path"),
) )
.subcommand(SubCommand::with_name("init").about("writes an example config")) .arg(
Arg::new("jobs")
.short('j')
.long("jobs")
.required(false)
.takes_value(true)
.help("Allow N jobs/transcodes at once. Defaults to number of logical cores"),
)
.subcommand(Command::new("init").about("writes an example config"))
.get_matches(); .get_matches();
let current_dir = std::env::current_dir().context("could not get current directory")?; let current_dir = std::env::current_dir().context("Could not get current directory")?;
let config_path = arg_matches.value_of_os("config"); let config_path = arg_matches.value_of_os("config");
let force_load = config_path.is_some(); let force_load = config_path.is_some();
@@ -159,17 +175,17 @@ pub fn config() -> Result<Config> {
.create_new(true) .create_new(true)
.open(&config_path) .open(&config_path)
.and_then(|mut f| f.write_all(std::include_bytes!("../example.audio-conv.yaml"))) .and_then(|mut f| f.write_all(std::include_bytes!("../example.audio-conv.yaml")))
.with_context(|| format!("unable to write config file to {}", config_path.display()))?; .with_context(|| format!("Unable to write config file to {}", config_path.display()))?;
std::process::exit(0); std::process::exit(0);
} }
let config_dir = config_path let config_dir = config_path
.parent() .parent()
.context("could not get parent directory of the config file")?; .context("Could not get parent directory of the config file")?;
let config_file = load_config_file(&config_path) let config_file = load_config_file(&config_path)
.with_context(|| format!("failed loading config file \"{}\"", config_path.display()))?; .with_context(|| format!("Failed loading config file {}", config_path.display()))?;
if force_load && config_file.is_none() { if force_load && config_file.is_none() {
return Err(Error::msg(format!( return Err(Error::msg(format!(
@@ -181,7 +197,7 @@ pub fn config() -> Result<Config> {
let default_regex = RegexBuilder::new("\\.(flac|wav)$") let default_regex = RegexBuilder::new("\\.(flac|wav)$")
.case_insensitive(true) .case_insensitive(true)
.build() .build()
.expect("failed compiling default match regex"); .expect("Failed compiling default match regex");
let transcode_matches = config_file let transcode_matches = config_file
.as_ref() .as_ref()
@@ -194,8 +210,8 @@ pub fn config() -> Result<Config> {
let glob = GlobBuilder::new(glob) let glob = GlobBuilder::new(glob)
.case_insensitive(true) .case_insensitive(true)
.build() .build()
.context("failed building glob")?; .context("Failed building glob")?;
let regex = Regex::new(glob.regex()).context("failed compiling regex")?; let regex = Regex::new(glob.regex()).context("Failed compiling regex")?;
Ok(regex) Ok(regex)
}); });
@@ -203,7 +219,7 @@ pub fn config() -> Result<Config> {
let regex = RegexBuilder::new(regex) let regex = RegexBuilder::new(regex)
.case_insensitive(true) .case_insensitive(true)
.build() .build()
.context("failed compiling regex")?; .context("Failed compiling regex")?;
Ok(regex) Ok(regex)
}); });
@@ -215,7 +231,7 @@ pub fn config() -> Result<Config> {
let regex = RegexBuilder::new(&ext) let regex = RegexBuilder::new(&ext)
.case_insensitive(true) .case_insensitive(true)
.build() .build()
.context("failed compiling regex")?; .context("Failed compiling regex")?;
Ok(regex) Ok(regex)
}); });
@@ -258,7 +274,7 @@ pub fn config() -> Result<Config> {
}) })
.ok_or_else(|| Error::msg("\"from\" not configured"))? .ok_or_else(|| Error::msg("\"from\" not configured"))?
.canonicalize() .canonicalize()
.context("could not canonicalize \"from\" path")? .context("Could not canonicalize \"from\" path")?
}, },
to: arg_matches to: arg_matches
.value_of_os("to") .value_of_os("to")
@@ -272,8 +288,19 @@ pub fn config() -> Result<Config> {
}) })
.ok_or_else(|| Error::msg("\"to\" not configured"))? .ok_or_else(|| Error::msg("\"to\" not configured"))?
.canonicalize() .canonicalize()
.context("could not canonicalize \"to\" path")?, .context("Could not canonicalize \"to\" path")?,
matches: transcode_matches, matches: transcode_matches,
jobs: arg_matches
.value_of("jobs")
.map(|jobs_str| {
jobs_str.parse().with_context(|| {
format!(
"Could not parse \"jobs\" argument \"{}\" to a number",
&jobs_str
)
})
})
.transpose()?,
}) })
} }
@@ -284,6 +311,6 @@ fn load_config_file(path: &Path) -> Result<Option<ConfigFile>> {
Err(err) => return Err(Error::new(err)), Err(err) => return Err(Error::new(err)),
}; };
let config: ConfigFile = let config: ConfigFile =
serde_yaml::from_reader(&mut file).context("could not parse config file")?; serde_yaml::from_reader(&mut file).context("Could not parse config file")?;
Ok(Some(config)) Ok(Some(config))
} }

View File

@@ -1,11 +1,12 @@
mod config; mod config;
mod tag;
mod ui; mod ui;
use crate::config::Config; use crate::config::{Config, Transcode};
use anyhow::{Context, Error, Result}; use anyhow::{Context, Error, Result};
use futures::{pin_mut, prelude::*}; use futures::{pin_mut, prelude::*};
use glib::{subclass::prelude::*, GBoxed, GString}; use glib::{Boxed, GString};
use gstreamer::{gst_element_error, prelude::*, Element}; use gstreamer::{element_error, prelude::*, Element};
use gstreamer_base::prelude::*; use gstreamer_base::prelude::*;
use std::{ use std::{
borrow::Cow, borrow::Cow,
@@ -19,8 +20,8 @@ use std::{
}; };
use tokio::{fs, io::AsyncWriteExt, task, time::interval}; use tokio::{fs, io::AsyncWriteExt, task, time::interval};
#[derive(Clone, Debug, GBoxed)] #[derive(Clone, Debug, Boxed)]
#[gboxed(type_name = "GBoxErrorWrapper")] #[boxed_type(name = "GBoxErrorWrapper")]
struct GBoxErrorWrapper(Arc<Error>); struct GBoxErrorWrapper(Arc<Error>);
impl GBoxErrorWrapper { impl GBoxErrorWrapper {
@@ -52,12 +53,12 @@ struct GErrorMessage {
fn gmake<T: IsA<Element>>(factory_name: &str) -> Result<T> { fn gmake<T: IsA<Element>>(factory_name: &str) -> Result<T> {
let res = gstreamer::ElementFactory::make(factory_name, None) let res = gstreamer::ElementFactory::make(factory_name, None)
.with_context(|| format!("could not make gstreamer Element \"{}\"", factory_name))? .with_context(|| format!("Could not make gstreamer Element \"{}\"", factory_name))?
.downcast() .downcast()
.ok() .ok()
.with_context(|| { .with_context(|| {
format!( format!(
"could not cast gstreamer Element \"{}\" into `{}`", "Could not cast gstreamer Element \"{}\" into `{}`",
factory_name, factory_name,
std::any::type_name::<T>() std::any::type_name::<T>()
) )
@@ -68,7 +69,7 @@ fn gmake<T: IsA<Element>>(factory_name: &str) -> Result<T> {
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub struct ConversionArgs { pub struct ConversionArgs {
rel_from_path: PathBuf, rel_from_path: PathBuf,
transcode: config::Transcode, transcode: Transcode,
} }
fn get_conversion_args(config: &Config) -> impl Iterator<Item = Result<ConversionArgs>> + '_ { fn get_conversion_args(config: &Config) -> impl Iterator<Item = Result<ConversionArgs>> + '_ {
@@ -97,7 +98,7 @@ fn get_conversion_args(config: &Config) -> impl Iterator<Item = Result<Conversio
let rel_path = e.path().strip_prefix(&config.from).with_context(|| { let rel_path = e.path().strip_prefix(&config.from).with_context(|| {
format!( format!(
"unable to get relative path for {} from {}", "Unable to get relative path for {} from {}",
e.path().display(), e.path().display(),
config.from.display() config.from.display()
) )
@@ -112,7 +113,10 @@ fn get_conversion_args(config: &Config) -> impl Iterator<Item = Result<Conversio
.map_err(Error::new) .map_err(Error::new)
.and_then(|md| md.modified().map_err(Error::new)) .and_then(|md| md.modified().map_err(Error::new))
.with_context(|| { .with_context(|| {
format!("unable to get mtime for from file {}", e.path().display()) format!(
"Unable to get mtime for \"from\" file {}",
e.path().display()
)
})?; })?;
let to_mtime = to.metadata().and_then(|md| md.modified()); let to_mtime = to.metadata().and_then(|md| md.modified());
match to_mtime { match to_mtime {
@@ -120,7 +124,7 @@ fn get_conversion_args(config: &Config) -> impl Iterator<Item = Result<Conversio
Err(err) if err.kind() == std::io::ErrorKind::NotFound => true, Err(err) if err.kind() == std::io::ErrorKind::NotFound => true,
Err(err) => { Err(err) => {
return Err(err).with_context(|| { return Err(err).with_context(|| {
format!("unable to get mtime for to file {}", to.display()) format!("Unable to get mtime for \"to\" file {}", to.display())
}) })
} }
} }
@@ -147,15 +151,15 @@ async fn main() -> Result<()> {
let main_handle = async move { let main_handle = async move {
let ok = task::spawn_local(main_loop(ui_queue)) let ok = task::spawn_local(main_loop(ui_queue))
.await .await
.context("main task failed")??; .context("Main task failed")??;
Result::<_>::Ok(ok) Result::<_>::Ok(ok)
}; };
let ui_handle = async move { let ui_handle = async move {
let ok = task::spawn_local(ui_fut) let ok = task::spawn_local(ui_fut)
.await .await
.context("ui task failed")? .context("Ui task failed")?
.context("ui failed")?; .context("Ui failed")?;
Result::<_>::Ok(ok) Result::<_>::Ok(ok)
}; };
@@ -168,20 +172,23 @@ async fn main() -> Result<()> {
async fn main_loop(ui_queue: ui::MsgQueue) -> Result<()> { async fn main_loop(ui_queue: ui::MsgQueue) -> Result<()> {
let (config, conv_args) = task::spawn_blocking(|| -> Result<_> { let (config, conv_args) = task::spawn_blocking(|| -> Result<_> {
gstreamer::init()?; gstreamer::init()?;
let config = config::config().context("could not get the config")?; gstreamer::tags::register::<tag::MbArtistId>();
gstreamer::tags::register::<tag::MbAlbumArtistId>();
let config = config::config().context("Could not get the config")?;
let conv_args = get_conversion_args(&config) let conv_args = get_conversion_args(&config)
.collect::<Result<Vec<_>>>() .collect::<Result<Vec<_>>>()
.context("failed loading dir structure")?; .context("Failed loading dir structure")?;
Ok((config, conv_args)) Ok((config, conv_args))
}) })
.await .await
.context("init task failed")??; .context("Init task failed")??;
let log_path = Path::new(".") let log_path = Path::new(".")
.canonicalize() .canonicalize()
.context("unable to canonicalize path to log file")? .context("Unable to canonicalize path to log file")?
.join("audio-conv.log"); .join("audio-conv.log");
ui_queue.push(ui::Msg::Init { ui_queue.push(ui::Msg::Init {
@@ -189,9 +196,11 @@ async fn main_loop(ui_queue: ui::MsgQueue) -> Result<()> {
log_path: log_path.clone(), log_path: log_path.clone(),
}); });
let concurrent_jobs = config.jobs.unwrap_or_else(|| num_cpus::get());
stream::iter(conv_args.into_iter().enumerate()) stream::iter(conv_args.into_iter().enumerate())
.map(Ok) .map(Ok)
.try_for_each_concurrent(num_cpus::get(), |(i, args)| { .try_for_each_concurrent(concurrent_jobs, |(i, args)| {
let config = &config; let config = &config;
let ui_queue = &ui_queue; let ui_queue = &ui_queue;
let log_path = &log_path; let log_path = &log_path;
@@ -206,7 +215,7 @@ async fn main_loop(ui_queue: ui::MsgQueue) -> Result<()> {
Ok(()) => ui_queue.push(ui::Msg::TaskEnd { id: i }), Ok(()) => ui_queue.push(ui::Msg::TaskEnd { id: i }),
Err(err) => { Err(err) => {
let err = err.context(format!( let err = err.context(format!(
"failed transcoding \"{}\"", "Transcoding failed for {}",
args.rel_from_path.display() args.rel_from_path.display()
)); ));
@@ -264,15 +273,64 @@ async fn transcode(
) -> Result<()> { ) -> Result<()> {
let from_path = config.from.join(&args.rel_from_path); let from_path = config.from.join(&args.rel_from_path);
let mut to_path = config.to.join(&args.rel_from_path); let mut to_path = config.to.join(&args.rel_from_path);
to_path.set_extension(args.transcode.extension());
let file_src: Element = gmake("filesrc")?; fs::create_dir_all(
file_src.set_property("location", &path_to_gstring(&from_path))?; to_path
.parent()
.with_context(|| format!("Could not get parent dir for {}", to_path.display()))?,
)
.await?;
// encode into a tmp file first, then rename to actuall file name, that way we're writing // encode into a tmp file first, then rename to actuall file name, that way we're writing
// "whole" files to the intended file path, ignoring partial files in the mtime check // "whole" files to the intended file path, ignoring partial files in the mtime check
let to_path_tmp = to_path.with_extension("tmp"); let to_path_tmp = to_path.with_extension("tmp");
rm_file_on_err(&to_path_tmp, async {
match args.transcode {
Transcode::Copy => {
fs::copy(&from_path, &to_path_tmp).await.with_context(|| {
format!(
"Could not copy file from {} to {}",
from_path.display(),
to_path_tmp.display()
)
})?;
}
_ => {
to_path.set_extension(args.transcode.extension());
transcode_gstreamer(
&from_path,
&to_path_tmp,
args.transcode.clone(),
task_id,
queue,
)
.await?
}
}
fs::rename(&to_path_tmp, &to_path).await.with_context(|| {
format!(
"Could not rename temporary file {} to {}",
to_path_tmp.display(),
to_path.display()
)
})
})
.await
}
async fn transcode_gstreamer(
from_path: &Path,
to_path: &Path,
transcode: Transcode,
task_id: usize,
queue: &ui::MsgQueue,
) -> Result<()> {
let file_src: Element = gmake("filesrc")?;
file_src.try_set_property("location", &path_to_gstring(&from_path))?;
let decodebin: Element = gmake("decodebin")?; let decodebin: Element = gmake("decodebin")?;
let src_elems: &[&Element] = &[&file_src, &decodebin]; let src_elems: &[&Element] = &[&file_src, &decodebin];
@@ -285,10 +343,7 @@ async fn transcode(
// downgrade pipeline RC to a weak RC to break the reference cycle // downgrade pipeline RC to a weak RC to break the reference cycle
let pipeline_weak = pipeline.downgrade(); let pipeline_weak = pipeline.downgrade();
let transcode_args = args.transcode.clone(); let to_path_clone = to_path.to_owned();
let to_path_tmp_clone = to_path_tmp.clone();
decodebin.connect_pad_added(move |decodebin, src_pad| { decodebin.connect_pad_added(move |decodebin, src_pad| {
let insert_sink = || -> Result<()> { let insert_sink = || -> Result<()> {
let pipeline = match pipeline_weak.upgrade() { let pipeline = match pipeline_weak.upgrade() {
@@ -299,9 +354,9 @@ async fn transcode(
} }
}; };
let is_audio = src_pad.get_current_caps().and_then(|caps| { let is_audio = src_pad.current_caps().and_then(|caps| {
caps.get_structure(0).map(|s| { caps.structure(0).map(|s| {
let name = s.get_name(); let name = s.name();
name.starts_with("audio/") name.starts_with("audio/")
}) })
}); });
@@ -309,7 +364,7 @@ async fn transcode(
None => { None => {
return Err(Error::msg(format!( return Err(Error::msg(format!(
"Failed to get media type from pad {}", "Failed to get media type from pad {}",
src_pad.get_name() src_pad.name()
))); )));
} }
Some(false) => { Some(false) => {
@@ -321,7 +376,7 @@ async fn transcode(
let resample: Element = gmake("audioresample")?; let resample: Element = gmake("audioresample")?;
// quality from 0 to 10 // quality from 0 to 10
resample.set_property("quality", &10)?; resample.try_set_property("quality", &10i32)?;
let mut dest_elems = vec![ let mut dest_elems = vec![
resample, resample,
@@ -329,17 +384,17 @@ async fn transcode(
gmake("audioconvert")?, gmake("audioconvert")?,
]; ];
match &transcode_args { match &transcode {
config::Transcode::Opus { Transcode::Opus {
bitrate, bitrate,
bitrate_type, bitrate_type,
} => { } => {
let encoder: Element = gmake("opusenc")?; let encoder: Element = gmake("opusenc")?;
encoder.set_property( encoder.try_set_property(
"bitrate", "bitrate",
&i32::from(*bitrate) &i32::from(*bitrate)
.checked_mul(1_000) .checked_mul(1_000)
.context("bitrate overflowed")?, .context("Bitrate overflowed")?,
)?; )?;
encoder.set_property_from_str( encoder.set_property_from_str(
"bitrate-type", "bitrate-type",
@@ -353,21 +408,21 @@ async fn transcode(
dest_elems.push(gmake("oggmux")?); dest_elems.push(gmake("oggmux")?);
} }
config::Transcode::Flac { compression } => { Transcode::Flac { compression } => {
let encoder: Element = gmake("flacenc")?; let encoder: Element = gmake("flacenc")?;
encoder.set_property_from_str("quality", &compression.to_string()); encoder.set_property_from_str("quality", &compression.to_string());
dest_elems.push(encoder); dest_elems.push(encoder);
} }
config::Transcode::Mp3 { Transcode::Mp3 {
bitrate, bitrate,
bitrate_type, bitrate_type,
} => { } => {
let encoder: Element = gmake("lamemp3enc")?; let encoder: Element = gmake("lamemp3enc")?;
// target: "1" = "bitrate" // target: "1" = "bitrate"
encoder.set_property_from_str("target", "1"); encoder.set_property_from_str("target", "1");
encoder.set_property("bitrate", &i32::from(*bitrate))?; encoder.try_set_property("bitrate", &i32::from(*bitrate))?;
encoder.set_property( encoder.try_set_property(
"cbr", "cbr",
match bitrate_type { match bitrate_type {
config::BitrateType::Vbr => &false, config::BitrateType::Vbr => &false,
@@ -378,10 +433,15 @@ async fn transcode(
dest_elems.push(encoder); dest_elems.push(encoder);
dest_elems.push(gmake("id3v2mux")?); dest_elems.push(gmake("id3v2mux")?);
} }
Transcode::Copy => {
// already handled outside this fn
unreachable!();
}
}; };
let file_dest: gstreamer_base::BaseSink = gmake("filesink")?; let file_dest: gstreamer_base::BaseSink = gmake("filesink")?;
file_dest.set_property("location", &path_to_gstring(&to_path_tmp_clone))?; file_dest.try_set_property("location", &path_to_gstring(&to_path_clone))?;
file_dest.set_sync(false); file_dest.set_sync(false);
dest_elems.push(file_dest.upcast()); dest_elems.push(file_dest.upcast());
@@ -396,7 +456,7 @@ async fn transcode(
let sink_pad = dest_elems let sink_pad = dest_elems
.get(0) .get(0)
.unwrap() .unwrap()
.get_static_pad("sink") .static_pad("sink")
.expect("1. dest element has no sinkpad"); .expect("1. dest element has no sinkpad");
src_pad.link(&sink_pad)?; src_pad.link(&sink_pad)?;
@@ -408,7 +468,7 @@ async fn transcode(
.field("error", &GBoxErrorWrapper::new(err)) .field("error", &GBoxErrorWrapper::new(err))
.build(); .build();
gst_element_error!( element_error!(
decodebin, decodebin,
gstreamer::LibraryError::Failed, gstreamer::LibraryError::Failed,
("Failed to insert sink"), ("Failed to insert sink"),
@@ -417,16 +477,8 @@ async fn transcode(
} }
}); });
let bus = pipeline.get_bus().context("pipe get bus")?; let bus = pipeline.bus().context("Could not get bus for pipeline")?;
fs::create_dir_all(
to_path
.parent()
.with_context(|| format!("could not get parent dir for {}", to_path.display()))?,
)
.await?;
rm_file_on_err(&to_path_tmp, async {
pipeline pipeline
.set_state(gstreamer::State::Playing) .set_state(gstreamer::State::Playing)
.context("Unable to set the pipeline to the `Playing` state")?; .context("Unable to set the pipeline to the `Playing` state")?;
@@ -446,37 +498,43 @@ async fn transcode(
Ok(false) Ok(false)
} }
MessageView::Error(err) => { MessageView::Error(err) => {
pipeline.set_state(gstreamer::State::Null).context( let pipe_stop_res = pipeline.set_state(gstreamer::State::Null);
"Unable to set the pipeline to the `Null` state, after error",
)?;
let err = err let err: Error = err
.get_details() .details()
.and_then(|details| { .and_then(|details| {
if details.get_name() != "error-details" { if details.name() != "error-details" {
return None; return None;
} }
let err = details let err = details
.get::<&GBoxErrorWrapper>("error") .get::<&GBoxErrorWrapper>("error")
.unwrap() .unwrap()
.map(|err| err.clone().into()) .clone()
.expect("error-details message without actual error"); .into();
Some(err) Some(err)
}) })
.unwrap_or_else(|| { .unwrap_or_else(|| {
GErrorMessage { GErrorMessage {
src: msg src: msg
.get_src() .src()
.map(|s| String::from(s.get_path_string())) .map(|s| String::from(s.path_string()))
.unwrap_or_else(|| String::from("None")), .unwrap_or_else(|| String::from("None")),
error: err.get_error().to_string(), error: err.error().to_string(),
debug: err.get_debug(), debug: err.debug(),
source: err.get_error(), source: err.error(),
} }
.into() .into()
}); });
if let Err(pipe_err) = pipe_stop_res {
let err = err.context(pipe_err).context(
"Unable to set the pipeline to the `Null` state, after error",
);
Err(err) Err(err)
} else {
Err(err)
}
} }
_ => Ok(true), _ => Ok(true),
} }
@@ -489,8 +547,7 @@ async fn transcode(
} }
}) })
.try_for_each(|_| futures::future::ready(Ok(()))) .try_for_each(|_| futures::future::ready(Ok(())))
.await .await?;
.context("failed converting")?;
Result::<_>::Ok(()) Result::<_>::Ok(())
}; };
@@ -505,7 +562,7 @@ async fn transcode(
let dur = decodebin let dur = decodebin
.query_duration::<ClockTime>() .query_duration::<ClockTime>()
.and_then(|time| time.nanoseconds()); .map(|time| time.nseconds());
let ratio = dur.and_then(|dur| { let ratio = dur.and_then(|dur| {
if dur == 0 { if dur == 0 {
@@ -514,11 +571,11 @@ async fn transcode(
let pos = decodebin let pos = decodebin
.query_position::<ClockTime>() .query_position::<ClockTime>()
.and_then(|time| time.nanoseconds()); .map(|time| time.nseconds());
pos.map(|pos| { pos.map(|pos| {
let ratio = pos as f64 / dur as f64; let ratio = pos as f64 / dur as f64;
ratio.max(0.0).min(1.0) ratio.clamp(0.0, 1.0)
}) })
}); });
@@ -540,11 +597,7 @@ async fn transcode(
.set_state(gstreamer::State::Null) .set_state(gstreamer::State::Null)
.context("Unable to set the pipeline to the `Null` state")?; .context("Unable to set the pipeline to the `Null` state")?;
fs::rename(&to_path_tmp, &to_path).await?;
Ok(()) Ok(())
})
.await
} }
async fn rm_file_on_err<F, T>(path: &Path, f: F) -> Result<T> async fn rm_file_on_err<F, T>(path: &Path, f: F) -> Result<T>
@@ -558,7 +611,7 @@ where
Err(fs_err) => { Err(fs_err) => {
let err = err let err = err
.context(fs_err) .context(fs_err)
.context(format!("removing {} failed", path.display())); .context(format!("Removing file {} failed", path.display()));
Err(err) Err(err)
} }
}, },

45
src/tag.rs Normal file
View File

@@ -0,0 +1,45 @@
use glib::Value;
use gstreamer::{
tags::{merge_strings_with_comma, CustomTag},
Tag, TagFlag,
};
pub struct MbArtistId;
impl<'a> Tag<'a> for MbArtistId {
type TagType = &'a str;
fn tag_name<'b>() -> &'b str {
"musicbrainz-artistid"
}
}
impl CustomTag<'_> for MbArtistId {
const FLAG: TagFlag = TagFlag::Meta;
const NICK: &'static str = "artist ID";
const DESCRIPTION: &'static str = "MusicBrainz artist ID";
fn merge_func(src: &Value) -> Value {
merge_strings_with_comma(src)
}
}
pub struct MbAlbumArtistId;
impl<'a> Tag<'a> for MbAlbumArtistId {
type TagType = &'a str;
fn tag_name<'b>() -> &'b str {
"musicbrainz-albumartistid"
}
}
impl CustomTag<'_> for MbAlbumArtistId {
const FLAG: TagFlag = TagFlag::Meta;
const NICK: &'static str = "album artist ID";
const DESCRIPTION: &'static str = "MusicBrainz album artist ID";
fn merge_func(src: &Value) -> Value {
merge_strings_with_comma(src)
}
}

View File

@@ -88,7 +88,7 @@ impl State {
Msg::TaskEnd { id } => { Msg::TaskEnd { id } => {
self.running_tasks self.running_tasks
.remove(&id) .remove(&id)
.context("unable to remove finished task; could't find task")?; .context("Unable to remove finished task; could't find task")?;
self.ended_tasks += 1; self.ended_tasks += 1;
} }
Msg::TaskProgress { id, ratio } => { Msg::TaskProgress { id, ratio } => {
@@ -102,7 +102,7 @@ impl State {
// TODO // TODO
self.running_tasks self.running_tasks
.remove(&id) .remove(&id)
.context("unable to remove errored task; could't find task")?; .context("Unable to remove errored task; could't find task")?;
self.ended_tasks += 1; self.ended_tasks += 1;
self.has_errored = true; self.has_errored = true;
} }
@@ -136,7 +136,7 @@ impl State {
running_tasks.sort_by_key(|task| task.id); running_tasks.sort_by_key(|task| task.id);
if !self.has_rendered { if !self.has_rendered {
self.terminal.clear().context("cleaning ui failed")?; self.terminal.clear().context("Clearing ui failed")?;
self.has_rendered = true; self.has_rendered = true;
} }
@@ -222,7 +222,7 @@ impl State {
chunks[1], chunks[1],
); );
}) })
.context("rendering ui failed")?; .context("Rendering ui failed")?;
Ok(()) Ok(())
} }
@@ -267,8 +267,8 @@ pub fn init() -> (MsgQueue, impl Future<Output = Result<()>>) {
} }
}) })
.await .await
.context("ui update task failed")? .context("Ui update task failed")?
.context("ui update failed")?; .context("Ui update failed")?;
match render_res { match render_res {
Some(s) => wrapped = Some(s), Some(s) => wrapped = Some(s),