More idiomatic Rust, add Cargo.lock to the repo (#313)

Previously, we represented quantizer values as an owned `String` for
constructing various encoder commands, which is much slower and less
robust than representing the quantizer as an integral value. This has
been updated to represent the `q` and `n_threads` both as a `usize`, and
only convert to a `String` for formatting as needed.

Furthermore, several struct fields containing paths have been updated to
use a `PathBuf` or `&Path` instead of a `String`. These are the first
steps required to supporting non-UTF8 filenames on Windows and
Unix-based operating systems.

`run_vmaf_on_chunk`'s function signature has been update to take generic
arguments that satisfy `AsRef<Path>`, which allows the caller to save
themselves from creating a new `Path` if the original argument was a
`String`, for example.

* Do not represent the concatenation method as a string
* Fix pedantic clippy warnings
* Remove Cargo.lock from .gitignore
* Fix startup_check for ivf concatenation
* Add doc comment to `Args` for better help generation in Clap
* Use Display impl instead of directly using str::From for `Encoder` and `ConcatMethod`
* Do not unwrap the result of killing child processes
* Assert that VMAF calculation was successful
* Use more idiomatic Rust
This commit is contained in:
redzic 2021-07-24 12:09:21 -05:00 committed by GitHub
parent 772eddd103
commit 4ef9bdaa8a
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
12 changed files with 2347 additions and 489 deletions

1
.gitignore vendored
View file

@ -150,6 +150,5 @@ Upload.sh
*.backup
*.lock
*.png
*.json

2003
Cargo.lock generated Normal file

File diff suppressed because it is too large Load diff

View file

@ -5,7 +5,7 @@ use std::path::PathBuf;
use av1an_core::{ChunkMethod, ConcatMethod, Encoder, SplitMethod};
// Cross-platform command-line AV1 / VP9 / HEVC / H264 encoding framework with per scene quality encoding
/// Cross-platform command-line AV1 / VP9 / HEVC / H264 encoding framework with per scene quality encoding
#[derive(Clap, Debug, Serialize, Deserialize)]
#[clap(name = "av1an", setting = ColoredHelp, version)]
pub struct Args {

View file

@ -37,8 +37,7 @@ pub fn main() {
Vec::new()
},
temp,
mkvmerge: false,
output_ivf: false,
force: args.force,
passes: if let Some(passes) = args.passes {
passes
} else {
@ -55,8 +54,7 @@ pub fn main() {
format!(
"{}_{}.mkv",
args.input.file_stem().unwrap().to_str().unwrap(),
// TODO make Encoder implement Display
<&'static str>::from(args.encoder)
args.encoder
)
},
audio_params: if let Some(params) = args.audio_params {
@ -68,7 +66,7 @@ pub fn main() {
chunk_method: args
.chunk_method
.unwrap_or_else(|| vapoursynth::select_chunk_method().unwrap()),
concat: <&'static str>::from(args.concat).to_owned(),
concat: args.concat,
encoder: args.encoder,
extra_splits_len: Some(args.extra_split),
input: args.input.to_str().unwrap().to_owned(),
@ -97,9 +95,7 @@ pub fn main() {
},
vmaf: args.vmaf,
vmaf_filter: args.vmaf_filter,
vmaf_path: args
.vmaf_path
.map(|scenes| scenes.to_str().unwrap().to_owned()),
vmaf_path: args.vmaf_path,
vmaf_res: Some(args.vmaf_res),
webm: false,
workers: args.workers,

View file

@ -2,8 +2,8 @@ use av_format::buffer::AccReader;
use av_format::demuxer::Context as DemuxerContext;
use av_format::demuxer::Event;
use av_format::muxer::Context as MuxerContext;
use av_ivf::demuxer::*;
use av_ivf::muxer::*;
use av_ivf::demuxer::IvfDemuxer;
use av_ivf::muxer::IvfMuxer;
use std::fs::{read_dir, File};
use std::path::{Path, PathBuf};
use std::process::Command;
@ -78,7 +78,7 @@ pub fn concat_ivf(input: &Path, out: &Path) -> anyhow::Result<()> {
muxer.write_header()?;
let mut pos_offset: usize = 0;
for file in files.iter() {
for file in &files {
let mut last_pos: usize = 0;
let input = std::fs::File::open(file)?;
@ -135,10 +135,10 @@ pub fn concatenate_mkvmerge(encode_folder: String, output: String) -> Result<(),
let mut files: Vec<_> = read_dir(&encode_folder)
.unwrap()
.map(|x| x.unwrap())
.map(Result::unwrap)
.collect();
files.sort_by_key(|x| x.path());
files.sort_by_key(std::fs::DirEntry::path);
let mut cmd = Command::new("mkvmerge");
cmd.args([

View file

@ -7,14 +7,14 @@ use std::process::{Command, Stdio};
use crate::Encoder;
/// Get frame count. Direct counting of frame count using ffmpeg
pub fn ffmpeg_get_frame_count(source: &Path) -> usize {
let source_path = Path::new(&source);
pub fn ffmpeg_get_frame_count(source: impl AsRef<Path>) -> usize {
let source = source.as_ref();
let mut cmd = Command::new("ffmpeg");
cmd.args(&[
"-hide_banner",
"-i",
source_path.to_str().unwrap(),
source.to_str().unwrap(),
"-map",
"0:v:0",
"-c",
@ -40,7 +40,8 @@ pub fn ffmpeg_get_frame_count(source: &Path) -> usize {
}
/// Returns vec of all keyframes
pub fn get_keyframes(source: &Path) -> Vec<usize> {
pub fn get_keyframes<P: AsRef<Path>>(source: P) -> Vec<usize> {
let source = source.as_ref();
let mut cmd = Command::new("ffmpeg");
cmd.stdout(Stdio::piped());
@ -81,10 +82,10 @@ pub fn write_concat_file(temp_folder: &Path) {
let encode_folder = &temp_folder.join("encode");
let mut files: Vec<_> = read_dir(encode_folder)
.unwrap()
.map(|x| x.unwrap())
.map(Result::unwrap)
.collect();
files.sort_by_key(|x| x.path());
files.sort_by_key(std::fs::DirEntry::path);
let mut contents = String::new();
@ -114,7 +115,10 @@ pub fn have_audio(file: &Path) -> bool {
}
/// Extracting audio
pub fn extract_audio(input: &Path, temp: &Path, audio_params: &[String]) {
pub fn extract_audio(input: impl AsRef<Path>, temp: impl AsRef<Path>, audio_params: &[String]) {
let input = input.as_ref();
let temp = temp.as_ref();
let have_audio = have_audio(input);
if have_audio {
@ -158,11 +162,11 @@ pub fn concatenate_ffmpeg<P: AsRef<Path>>(temp: P, output: P, encoder: Encoder)
let audio_file = Path::new(&temp).join("audio.mkv");
let mut audio_cmd = vec![];
if audio_file.exists() && audio_file.metadata().unwrap().len() > 1000 {
audio_cmd = vec!["-i", audio_file.to_str().unwrap(), "-c", "copy"];
}
let audio_cmd = if audio_file.exists() && audio_file.metadata().unwrap().len() > 1000 {
vec!["-i", audio_file.to_str().unwrap(), "-c", "copy"]
} else {
Vec::with_capacity(0)
};
let mut cmd = Command::new("ffmpeg");

View file

@ -1,4 +1,13 @@
#![warn(clippy::needless_pass_by_value)]
#![warn(clippy::all, clippy::pedantic, clippy::nursery)]
#![allow(clippy::missing_errors_doc)]
#![allow(clippy::missing_panics_doc)]
#![allow(clippy::cast_possible_truncation)]
#![allow(clippy::cast_sign_loss)]
#![allow(clippy::cast_precision_loss)]
#![allow(clippy::must_use_candidate)]
#![allow(clippy::too_many_arguments)]
#![allow(clippy::too_many_lines)]
#![allow(clippy::cast_possible_wrap)]
#[macro_use]
extern crate log;
@ -28,7 +37,6 @@ use std::path::PathBuf;
#[allow(non_camel_case_types)]
#[derive(Clone, Copy, Serialize, Deserialize, Debug, strum::EnumString, strum::IntoStaticStr)]
pub enum Encoder {
aom,
rav1e,
@ -56,41 +64,32 @@ macro_rules! into_vec {
}
impl Encoder {
pub fn compose_1_1_pass(&self, params: Vec<String>, output: String) -> Vec<String> {
pub fn compose_1_1_pass(self, params: Vec<String>, output: String) -> Vec<String> {
match &self {
// Aomenc
Self::aom => chain!(
into_vec!["aomenc", "--passes=1"],
params,
into_vec!["-o", output, "-"],
)
.collect(),
// Rav1e
Self::rav1e => chain!(
into_vec!["rav1e", "-", "-y"],
params,
into_vec!["--output", output]
)
.collect(),
// VPX
Self::vpx => chain!(
into_vec!["vpxenc", "--passes=1"],
params,
into_vec!["-o", output, "-"]
)
.collect(),
// SVT-AV1
Self::svt_av1 => chain!(
into_vec!["SvtAv1EncApp", "-i", "stdin", "--progress", "2",],
into_vec!["SvtAv1EncApp", "-i", "stdin", "--progress", "2"],
params,
into_vec!["-b", output,],
into_vec!["-b", output],
)
.collect(),
// x264
Self::x264 => chain!(
into_vec![
"x264",
@ -101,23 +100,20 @@ impl Encoder {
"y4m",
],
params,
into_vec!["-", "-o", output,]
into_vec!["-", "-o", output]
)
.collect(),
// x265
Self::x265 => chain!(
into_vec!["x265", "--y4m",],
into_vec!["x265", "--y4m"],
params,
into_vec!["-", "-o", output,]
into_vec!["-", "-o", output]
)
.collect(),
}
}
pub fn compose_1_2_pass(&self, params: Vec<String>, fpf: &str) -> Vec<String> {
pub fn compose_1_2_pass(self, params: Vec<String>, fpf: &str) -> Vec<String> {
match &self {
// Aomenc
Self::aom => chain!(
into_vec!["aomenc", "--passes=2", "--pass=1"],
params,
@ -129,8 +125,6 @@ impl Encoder {
],
)
.collect(),
// Rav1e
Self::rav1e => chain!(
into_vec!["rav1e", "-", "-y", "-q"],
params,
@ -142,8 +136,6 @@ impl Encoder {
]
)
.collect(),
// VPX
Self::vpx => chain!(
into_vec!["vpxenc", "--passes=2", "--pass=1"],
params,
@ -155,8 +147,6 @@ impl Encoder {
],
)
.collect(),
// SVT-AV1
Self::svt_av1 => chain!(
into_vec![
"SvtAv1EncApp",
@ -178,8 +168,6 @@ impl Encoder {
],
)
.collect(),
// x264
Self::x264 => chain!(
into_vec![
"x264",
@ -201,8 +189,6 @@ impl Encoder {
]
)
.collect(),
// x265
Self::x265 => chain!(
into_vec![
"x265",
@ -227,7 +213,7 @@ impl Encoder {
}
}
pub fn compose_2_2_pass(&self, params: Vec<String>, fpf: &str, output: String) -> Vec<String> {
pub fn compose_2_2_pass(self, params: Vec<String>, fpf: &str, output: String) -> Vec<String> {
match &self {
Self::aom => chain!(
into_vec!["aomenc", "--passes=2", "--pass=2"],
@ -238,7 +224,7 @@ impl Encoder {
Self::rav1e => chain!(
into_vec!["rav1e", "-", "-y", "-q"],
params,
into_vec!["--second-pass", format!("{}.stat", fpf), "--output", output,]
into_vec!["--second-pass", format!("{}.stat", fpf), "--output", output]
)
.collect(),
Self::vpx => chain!(
@ -280,7 +266,7 @@ impl Encoder {
"y4m",
],
params,
into_vec!["--stats", format!("{}.log", fpf), "-", "-o", output,]
into_vec!["--stats", format!("{}.log", fpf), "-", "-o", output]
)
.collect(),
Self::x265 => chain!(
@ -295,13 +281,13 @@ impl Encoder {
"y4m",
],
params,
into_vec!["--stats", format!("{}.log", fpf), "-", "-o", output,]
into_vec!["--stats", format!("{}.log", fpf), "-", "-o", output]
)
.collect(),
}
}
pub fn get_default_arguments(&self) -> Vec<String> {
pub fn get_default_arguments(self) -> Vec<String> {
match &self {
Encoder::aom => into_vec![
"--threads=8",
@ -339,30 +325,24 @@ impl Encoder {
}
}
pub fn get_default_pass(&self) -> u8 {
pub const fn get_default_pass(self) -> u8 {
match &self {
Self::aom => 2,
Self::rav1e => 1,
Self::vpx => 2,
Self::svt_av1 => 1,
Self::x264 => 1,
Self::x265 => 1,
Self::aom | Self::vpx => 2,
_ => 1,
}
}
/// Default quantizer range target quality mode
pub fn get_default_cq_range(&self) -> (usize, usize) {
pub const fn get_default_cq_range(self) -> (usize, usize) {
match &self {
Self::aom => (15, 55),
Self::aom | Self::vpx => (15, 55),
Self::rav1e => (50, 140),
Self::vpx => (15, 55),
Self::svt_av1 => (15, 50),
Self::x264 => (15, 35),
Self::x265 => (15, 35),
Self::x264 | Self::x265 => (15, 35),
}
}
pub fn help_command(&self) -> [&str; 2] {
pub const fn help_command(self) -> [&'static str; 2] {
match &self {
Self::aom => ["aomenc", "--help"],
Self::rav1e => ["rav1e", "--fullhelp"],
@ -374,7 +354,7 @@ impl Encoder {
}
/// Default quantizer range target quality mode
pub fn encoder_bin(&self) -> &str {
pub const fn encoder_bin(&self) -> &str {
match &self {
Self::aom => "aomenc",
Self::rav1e => "rav1e",
@ -385,40 +365,30 @@ impl Encoder {
}
}
pub fn output_extension(&self) -> &str {
pub const fn output_extension(&self) -> &str {
match &self {
Self::aom => "ivf",
Self::rav1e => "ivf",
Self::vpx => "ivf",
Self::svt_av1 => "ivf",
Self::x264 => "mkv",
Self::x265 => "mkv",
Self::aom | Self::rav1e | Self::vpx | Self::svt_av1 => "ivf",
Self::x264 | Self::x265 => "mkv",
}
}
fn q_regex_str(&self) -> &str {
const fn q_regex_str(&self) -> &str {
match &self {
Self::aom => r"--cq-level=.+",
Self::aom | Self::vpx => r"--cq-level=.+",
Self::rav1e => r"--quantizer",
Self::vpx => r"--cq-level=.+",
Self::svt_av1 => r"(--qp|-q|--crf)",
Self::x264 => r"--crf",
Self::x265 => r"--crf",
Self::x264 | Self::x265 => r"--crf",
}
}
fn replace_q(&self, index: usize, q: usize) -> (usize, String) {
fn replace_q(self, index: usize, q: usize) -> (usize, String) {
match &self {
Self::aom => (index, format!("--cq-level={}", q)),
Self::rav1e => (index + 1, q.to_string()),
Self::vpx => (index, format!("--cq-level={}", q)),
Self::svt_av1 => (index + 1, q.to_string()),
Self::x264 => (index + 1, q.to_string()),
Self::x265 => (index + 1, q.to_string()),
Self::aom | Self::vpx => (index, format!("--cq-level={}", q)),
Self::rav1e | Self::svt_av1 | Self::x265 | Self::x264 => (index + 1, q.to_string()),
}
}
pub fn man_command(&self, params: Vec<String>, q: usize) -> Vec<String> {
pub fn man_command(self, params: Vec<String>, q: usize) -> Vec<String> {
let index = list_index_of_regex(&params, self.q_regex_str()).unwrap();
let mut new_params = params;
@ -428,18 +398,17 @@ impl Encoder {
new_params
}
fn pipe_match(&self) -> &str {
const fn pipe_match(&self) -> &str {
match &self {
Self::aom => r".*Pass (?:1/1|2/2) .*frame.*?/([^ ]+?) ",
Self::aom | Self::vpx => r".*Pass (?:1/1|2/2) .*frame.*?/([^ ]+?) ",
Self::rav1e => r"encoded.*? ([^ ]+?) ",
Self::vpx => r".*Pass (?:1/1|2/2) .*frame.*?/([^ ]+?) ",
Self::svt_av1 => r"Encoding frame\s+(\d+)",
Self::x264 => r"^[^\d]*(\d+)",
Self::x265 => r"(\d+) frames",
}
}
pub fn match_line(&self, line: &str) -> Option<usize> {
pub fn match_line(self, line: &str) -> Option<usize> {
let encoder_regex = Regex::new(self.pipe_match()).unwrap();
if !encoder_regex.is_match(line) {
return Some(0);
@ -448,7 +417,11 @@ impl Encoder {
captures.parse::<usize>().ok()
}
pub fn construct_target_quality_command(&self, threads: String, q: String) -> Vec<Cow<str>> {
pub fn construct_target_quality_command(
self,
threads: usize,
q: usize,
) -> Vec<Cow<'static, str>> {
match &self {
Self::aom => into_vec![
"aomenc",
@ -494,11 +467,11 @@ impl Encoder {
"-s",
"10",
"--threads",
threads,
threads.to_string(),
"--tiles",
"16",
"--quantizer",
q,
q.to_string(),
"--low-latency",
"--rdo-lookahead-frames",
"5",
@ -523,13 +496,13 @@ impl Encoder {
"-i",
"stdin",
"--lp",
threads,
threads.to_string(),
"--preset",
"8",
"--keyint",
"240",
"--crf",
q,
q.to_string(),
"--tile-rows",
"1",
"--tile-columns",
@ -590,11 +563,11 @@ impl Encoder {
"-",
"--no-progress",
"--threads",
threads,
threads.to_string(),
"--preset",
"medium",
"--crf",
q,
q.to_string(),
],
Self::x265 => into_vec![
"x265",
@ -603,19 +576,19 @@ impl Encoder {
"--no-progress",
"--y4m",
"--frame-threads",
cmp::min(threads.parse().unwrap(), 16).to_string(),
cmp::min(threads, 16).to_string(),
"--preset",
"fast",
"--crf",
q,
q.to_string(),
],
}
}
pub fn construct_target_quality_command_probe_slow(&self, q: String) -> Vec<Cow<str>> {
pub fn construct_target_quality_command_probe_slow(self, q: usize) -> Vec<Cow<'static, str>> {
match &self {
Self::aom => into_vec!["aomenc", "--passes=1", format!("--cq-level={}", q),],
Self::rav1e => into_vec!["rav1e", "-y", "--quantizer", q,],
Self::aom => into_vec!["aomenc", "--passes=1", format!("--cq-level={}", q)],
Self::rav1e => into_vec!["rav1e", "-y", "--quantizer", q.to_string()],
Self::vpx => into_vec![
"vpxenc",
"--passes=1",
@ -624,7 +597,7 @@ impl Encoder {
"--end-usage=q",
format!("--cq-level={}", q),
],
Self::svt_av1 => into_vec!["SvtAv1EncApp", "-i", "stdin", "--crf", q,],
Self::svt_av1 => into_vec!["SvtAv1EncApp", "-i", "stdin", "--crf", q.to_string()],
Self::x264 => into_vec![
"x264",
"--log-level",
@ -634,7 +607,7 @@ impl Encoder {
"-",
"--no-progress",
"--crf",
q,
q.to_string(),
],
Self::x265 => into_vec![
"x265",
@ -643,13 +616,13 @@ impl Encoder {
"--no-progress",
"--y4m",
"--crf",
q,
q.to_string(),
],
}
}
// Function remove_patterns that takes in args and patterns and removes all instances of the patterns from the args.
pub fn remove_patterns(&self, args: Vec<String>, patterns: Vec<String>) -> Vec<String> {
pub fn remove_patterns(args: Vec<String>, patterns: Vec<String>) -> Vec<String> {
let mut out = args;
for pattern in patterns {
if let Some(index) = out.iter().position(|value| value.contains(&pattern)) {
@ -664,18 +637,21 @@ impl Encoder {
}
// Function unwrap cow strings that take in a vec of strings and returns a vec of strings.
pub fn decow_strings(&self, args: &[Cow<str>]) -> Vec<String> {
args.iter().map(|s| s.to_string()).collect::<Vec<String>>()
pub fn decow_strings(args: &[Cow<str>]) -> Vec<String> {
args
.iter()
.map(ToString::to_string)
.collect::<Vec<String>>()
}
pub fn probe_cmd(
&self,
self,
temp: String,
name: &str,
q: String,
q: usize,
ffmpeg_pipe: Vec<String>,
probing_rate: &str,
n_threads: String,
n_threads: usize,
video_params: Vec<String>,
probe_slow: bool,
) -> (Vec<String>, Vec<String>) {
@ -713,22 +689,20 @@ impl Encoder {
"--crf",
"--quantizer"
];
args = self.remove_patterns(args, patterns);
args = Self::remove_patterns(args, patterns);
let ps = self.construct_target_quality_command_probe_slow(q);
params = self.decow_strings(&ps);
params = Self::decow_strings(&ps);
params.append(&mut args)
} else {
let ps = self.construct_target_quality_command(n_threads, q);
params = self.decow_strings(&ps);
params = Self::decow_strings(&ps);
}
let output: Vec<String> = match &self {
Self::aom => chain!(params, into_vec!["-o", probe_path, "-"]).collect(),
Self::rav1e => chain!(params, into_vec!["-o", probe_path, "-"]).collect(),
Self::svt_av1 => chain!(params, into_vec!["-b", probe_path]).collect(),
Self::vpx => chain!(params, into_vec!["-o", probe_path, "-"]).collect(),
Self::x264 => chain!(params, into_vec!["-o", probe_path, "-"]).collect(),
Self::x265 => chain!(params, into_vec!["-o", probe_path, "-"]).collect(),
Self::aom | Self::rav1e | Self::vpx | Self::x264 | Self::x265 => {
chain!(params, into_vec!["-o", probe_path, "-"]).collect()
}
};
(pipe, output)
@ -767,7 +741,9 @@ pub fn list_index_of_regex(params: &[String], regex_str: &str) -> Option<usize>
panic!("No match found for params: {:#?}", params)
}
#[derive(Serialize, Deserialize, Debug, strum::EnumString, strum::IntoStaticStr)]
#[derive(
PartialEq, Eq, Copy, Clone, Serialize, Deserialize, Debug, strum::EnumString, strum::IntoStaticStr,
)]
pub enum ConcatMethod {
#[strum(serialize = "mkvmerge")]
MKVMerge,
@ -777,6 +753,12 @@ pub enum ConcatMethod {
Ivf,
}
impl Display for ConcatMethod {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.write_str(<&'static str>::from(self))
}
}
#[derive(Serialize, Deserialize, Debug, strum::EnumString, strum::IntoStaticStr)]
pub enum SplitMethod {
#[strum(serialize = "av-scenechange")]
@ -801,14 +783,14 @@ pub enum ChunkMethod {
LSMASH,
}
/// Check for FFmpeg
/// Check for `FFmpeg`
pub fn get_ffmpeg_info() -> String {
let mut cmd = Command::new("ffmpeg");
cmd.stderr(Stdio::piped());
String::from_utf8(cmd.output().unwrap().stderr).unwrap()
}
pub fn adapt_probing_rate(rate: usize) -> usize {
pub const fn adapt_probing_rate(rate: usize) -> usize {
match rate {
1..=4 => rate,
_ => 4,
@ -824,7 +806,7 @@ pub fn determine_workers(encoder: Encoder) -> u64 {
let cpu = num_cpus::get() as u64;
// available_memory returns kb, convert to gb
let ram_gb = system.available_memory() / 10u64.pow(6);
let ram_gb = system.available_memory() / 10_u64.pow(6);
std::cmp::max(
match encoder {
@ -871,11 +853,11 @@ struct Baz {
frames: Vec<Bar>,
}
pub fn read_file_to_string(file: &Path) -> Result<String, Error> {
Ok(fs::read_to_string(&file).unwrap_or_else(|_| panic!("Can't open file {:?}", file)))
pub fn read_file_to_string(file: impl AsRef<Path>) -> Result<String, Error> {
Ok(fs::read_to_string(&file).unwrap_or_else(|_| panic!("Can't open file {:?}", file.as_ref())))
}
pub fn read_vmaf_file(file: &Path) -> Result<Vec<f64>, serde_json::Error> {
pub fn read_vmaf_file(file: impl AsRef<Path>) -> Result<Vec<f64>, serde_json::Error> {
let json_str = read_file_to_string(file).unwrap();
let bazs = serde_json::from_str::<Baz>(&json_str)?;
let v = bazs
@ -887,7 +869,10 @@ pub fn read_vmaf_file(file: &Path) -> Result<Vec<f64>, serde_json::Error> {
Ok(v)
}
pub fn read_weighted_vmaf(file: &Path, percentile: f64) -> Result<f64, serde_json::Error> {
pub fn read_weighted_vmaf(
file: impl AsRef<Path>,
percentile: f64,
) -> Result<f64, serde_json::Error> {
let mut scores = read_vmaf_file(file).unwrap();
Ok(get_percentile(&mut scores, percentile))

View file

@ -5,8 +5,11 @@ use std::io::prelude::*;
use std::io::BufReader;
use std::path::Path;
use std::process::{Command, Stdio};
use std::string::ToString;
pub fn segment(input: &Path, temp: &Path, segments: &[usize]) {
pub fn segment(input: impl AsRef<Path>, temp: impl AsRef<Path>, segments: &[usize]) {
let input = input.as_ref();
let temp = temp.as_ref();
let mut cmd = Command::new("ffmpeg");
cmd.stdout(Stdio::piped());
@ -28,10 +31,14 @@ pub fn segment(input: &Path, temp: &Path, segments: &[usize]) {
"0",
]);
if !segments.is_empty() {
if segments.is_empty() {
let split_path = Path::new(temp).join("split").join("0.mkv");
let split_str = split_path.to_str().unwrap();
cmd.arg(split_str);
} else {
let segments_to_string = segments
.iter()
.map(|x| x.to_string())
.map(ToString::to_string)
.collect::<Vec<String>>();
let segments_joined = segments_to_string.join(",");
@ -39,10 +46,6 @@ pub fn segment(input: &Path, temp: &Path, segments: &[usize]) {
let split_path = Path::new(temp).join("split").join("%05d.mkv");
let split_str = split_path.to_str().unwrap();
cmd.arg(split_str);
} else {
let split_path = Path::new(temp).join("split").join("0.mkv");
let split_str = split_path.to_str().unwrap();
cmd.arg(split_str);
}
let out = cmd.output().unwrap();
assert!(out.status.success());
@ -88,7 +91,7 @@ struct ScenesData {
pub fn write_scenes_to_file(
scenes: &[usize],
total_frames: usize,
scene_path: &Path,
scene_path: impl AsRef<Path>,
) -> std::io::Result<()> {
// Writes a list of scenes and frame count to the file
let data = ScenesData {

View file

@ -10,7 +10,7 @@ pub fn weighted_search(num1: f64, vmaf1: f64, num2: f64, vmaf2: f64, target: f64
let tot = dif1 + dif2;
(num1 * (dif1 / tot) + (num2 * (dif2 / tot))).round() as usize
num1.mul_add(dif1 / tot, num2 * (dif2 / tot)).round() as usize
}
pub fn transform_vmaf(vmaf: f64) -> f64 {
@ -54,7 +54,7 @@ pub fn interpolate_target_vmaf(scores: Vec<(f64, u32)>, q: f64) -> Result<f64, E
let keys = sorted
.iter()
.map(|f| Key::new(f.1 as f64, f.0 as f64, Interpolation::Linear))
.map(|f| Key::new(f64::from(f.1), f.0 as f64, Interpolation::Linear))
.collect();
let spline = Spline::from_vec(keys);

View file

@ -23,7 +23,7 @@ use std::time::Instant;
use self::num_rational::Ratio;
use self::vapoursynth::prelude::*;
use super::*;
use super::ChunkMethod;
enum OutputTarget {
File(File),
@ -252,7 +252,7 @@ fn print_y4m_header<W: Write>(writer: &mut W, node: &Node) -> Result<(), Error>
}
// Checks if the frame is completed, that is, we have the frame and, if needed, its alpha part.
fn is_completed(entry: &(Option<FrameRef>, Option<FrameRef>), have_alpha: bool) -> bool {
const fn is_completed(entry: &(Option<FrameRef>, Option<FrameRef>), have_alpha: bool) -> bool {
entry.0.is_some() && (!have_alpha || entry.1.is_some())
}
@ -327,20 +327,20 @@ fn frame_done_callback<'core>(
let mut state = shared_data.output_state.lock().unwrap();
// Increase the progress counter.
if !alpha {
if alpha {
state.callbacks_fired_alpha += 1;
} else {
state.callbacks_fired += 1;
if parameters.alpha_node.is_none() {
state.callbacks_fired_alpha += 1;
}
} else {
state.callbacks_fired_alpha += 1;
}
// Figure out the FPS.
if parameters.progress {
let current = Instant::now();
let elapsed = current.duration_since(state.last_fps_report_time);
let elapsed_seconds = elapsed.as_secs() as f64 + elapsed.subsec_nanos() as f64 * 1e-9;
let elapsed_seconds = f64::from(elapsed.subsec_nanos()).mul_add(1e-9, elapsed.as_secs() as f64);
if elapsed.as_secs() > 10 {
state.fps =
@ -396,8 +396,9 @@ fn frame_done_callback<'core>(
while state
.reorder_map
.get(&state.next_output_frame)
.map(|entry| is_completed(entry, parameters.alpha_node.is_some()))
.unwrap_or(false)
.map_or(false, |entry| {
is_completed(entry, parameters.alpha_node.is_some())
})
{
let next_output_frame = state.next_output_frame;
let (frame, alpha_frame) = state.reorder_map.remove(&next_output_frame).unwrap();
@ -415,7 +416,7 @@ fn frame_done_callback<'core>(
}
if state.timecodes_file.is_some() && state.error.is_none() {
let timecode = (*state.current_timecode.numer() as f64 * 1000f64)
let timecode = (*state.current_timecode.numer() as f64 * 1000_f64)
/ *state.current_timecode.denom() as f64;
match writeln!(state.timecodes_file.as_mut().unwrap(), "{:.6}", timecode)
.context("Couldn't output the timecode")
@ -534,7 +535,7 @@ fn output(
}
let elapsed = start_time.elapsed();
let elapsed_seconds = elapsed.as_secs() as f64 + elapsed.subsec_nanos() as f64 * 1e-9;
let elapsed_seconds = f64::from(elapsed.subsec_nanos()).mul_add(1e-9, elapsed.as_secs() as f64);
let mut state = shared_data.output_state.lock().unwrap();
eprintln!(
@ -739,8 +740,7 @@ fn run(args: &[&str]) -> Result<(), Error> {
// Get the output node.
let output_index = matches
.value_of("outputindex")
.map(str::parse)
.unwrap_or(Ok(0))
.map_or(Ok(0), str::parse)
.context("Couldn't convert the output index to an integer")?;
#[cfg(feature = "gte-vsscript-api-31")]
@ -797,13 +797,11 @@ fn run(args: &[&str]) -> Result<(), Error> {
let start_frame = matches
.value_of("start")
.map(str::parse::<i32>)
.unwrap_or(Ok(0))
.map_or(Ok(0), str::parse::<i32>)
.context("Couldn't convert the start frame to an integer")?;
let end_frame = matches
.value_of("end")
.map(str::parse::<i32>)
.unwrap_or_else(|| Ok(num_frames as i32 - 1))
.map_or_else(|| Ok(num_frames as i32 - 1), str::parse::<i32>)
.context("Couldn't convert the end frame to an integer")?;
// Check if the input start and end frames make sense.
@ -820,16 +818,14 @@ fn run(args: &[&str]) -> Result<(), Error> {
end_frame
.checked_sub(start_frame)
.and_then(|x| x.checked_add(1))
.map(|x| format!("{}", x))
.unwrap_or_else(|| "<overflow>".to_owned())
.map_or_else(|| "<overflow>".to_owned(), |x| format!("{}", x))
);
}
let requests = {
let requests = matches
.value_of("requests")
.map(str::parse::<usize>)
.unwrap_or(Ok(0))
.map_or(Ok(0), str::parse::<usize>)
.context("Couldn't convert the request count to an unsigned integer")?;
if requests == 0 {
@ -860,7 +856,7 @@ fn run(args: &[&str]) -> Result<(), Error> {
// This is still not a very valid comparison since vspipe does all argument validation
// before it starts the time.
let elapsed = start_time.elapsed();
let elapsed_seconds = elapsed.as_secs() as f64 + elapsed.subsec_nanos() as f64 * 1e-9;
let elapsed_seconds = f64::from(elapsed.subsec_nanos()).mul_add(1e-9, elapsed.as_secs() as f64);
eprintln!("vspipe time: {:.2} seconds", elapsed_seconds);
}

View file

@ -15,7 +15,7 @@ pub fn plot_vmaf_score_file(
scores_file: &Path,
plot_path: &Path,
) -> Result<(), Box<dyn std::error::Error>> {
let scores = read_vmaf_file(&scores_file).unwrap();
let scores = read_vmaf_file(scores_file).unwrap();
let plot_width = 2560 + (printable_base10_digits(scores.len()) as u32 * 200);
let plot_heigth = 1440;
@ -37,7 +37,7 @@ pub fn plot_vmaf_score_file(
.set_label_area_size(LabelAreaPosition::Left, (5).percent())
.set_label_area_size(LabelAreaPosition::Top, (5).percent())
.margin((1).percent())
.build_cartesian_2d(0u32..length, perc_1.floor()..100.0)?;
.build_cartesian_2d(0_u32..length, perc_1.floor()..100.0)?;
chart.configure_mesh().draw()?;
@ -114,12 +114,11 @@ pub fn validate_vmaf_test_run(model: &str) -> Result<(), Error> {
let out = cmd.output()?;
let stdr = String::from_utf8(out.stderr)?;
let stderr = String::from_utf8(out.stderr)?;
match out.status.success() {
true => Ok(()),
false => panic!("Test vmaf run failed : \n{:#?}", stdr),
}
assert!(out.status.success(), "Test VMAF run failed:\n{:?}", stderr);
Ok(())
}
pub fn validate_vmaf(vmaf_model: &str) -> Result<(), Error> {
@ -163,7 +162,10 @@ pub fn run_vmaf_on_files(source: &Path, output: &Path) -> Result<PathBuf, Error>
Ok(file_path)
}
pub fn plot_vmaf(source: &Path, output: &Path) -> Result<(), Error> {
pub fn plot_vmaf(source: impl AsRef<Path>, output: impl AsRef<Path>) -> Result<(), Error> {
let source = source.as_ref();
let output = output.as_ref();
println!("::VMAF Run..");
let json_file = run_vmaf_on_files(source, output)?;
@ -173,10 +175,10 @@ pub fn plot_vmaf(source: &Path, output: &Path) -> Result<(), Error> {
}
pub fn run_vmaf_on_chunk(
encoded: &Path,
encoded: impl AsRef<Path>,
pipe_cmd: &[String],
stat_file: &Path,
model: &str,
stat_file: impl AsRef<Path>,
model: Option<impl AsRef<Path>>,
res: &str,
sample_rate: usize,
vmaf_filter: &str,
@ -195,12 +197,21 @@ pub fn run_vmaf_on_chunk(
let distorted = format!("[0:v]scale={}:flags=bicubic:force_original_aspect_ratio=decrease,setpts=PTS-STARTPTS[distorted];", &res );
let reference = format!("[1:v]{}{}scale={}:flags=bicubic:force_original_aspect_ratio=decrease,setpts=PTS-STARTPTS[ref];", select, vmaf_filter, &res );
let vmaf = format!(
"[distorted][ref]libvmaf=log_fmt='json':eof_action=endall:log_path={}{}:n_threads={}",
stat_file.to_str().unwrap(),
&model,
threads
);
let vmaf = if let Some(model) = model {
format!(
"[distorted][ref]libvmaf=log_fmt='json':eof_action=endall:log_path={}:model_path={}:n_threads={}",
stat_file.as_ref().to_str().unwrap(),
&model.as_ref().to_str().unwrap(),
threads
)
} else {
format!(
"[distorted][ref]libvmaf=log_fmt='json':eof_action=endall:log_path={}:n_threads={}",
stat_file.as_ref().to_str().unwrap(),
threads
)
};
let vmaf_cmd = [
"-loglevel",
@ -213,7 +224,7 @@ pub fn run_vmaf_on_chunk(
"-r",
"60",
"-i",
encoded.to_str().unwrap(),
encoded.as_ref().to_str().unwrap(),
"-r",
"60",
"-i",
@ -245,10 +256,17 @@ pub fn run_vmaf_on_chunk(
cmd.args(cmd_out);
cmd.stderr(Stdio::piped());
cmd.stdout(Stdio::piped());
cmd
let output = cmd
.stdin(handle.stdout.unwrap())
.output()
.unwrap_or_else(|e| panic!("Failed to execute vmaf pipe: {}\ncommand: {:#?}", e, cmd));
assert!(
output.status.success(),
"VMAF calculation failed:\nCommand: {:?}\nOutput: {:?}",
cmd,
output
);
Ok(())
}

View file

@ -1,5 +1,16 @@
#![warn(clippy::all, clippy::pedantic, clippy::nursery)]
#![allow(clippy::missing_errors_doc)]
#![allow(clippy::missing_panics_doc)]
#![allow(clippy::cast_possible_truncation)]
#![allow(clippy::cast_sign_loss)]
#![allow(clippy::cast_precision_loss)]
#![allow(clippy::must_use_candidate)]
#![allow(clippy::too_many_arguments)]
#![allow(clippy::too_many_lines)]
#![allow(clippy::cast_possible_wrap)]
use av1an_core::{vapoursynth, SplitMethod};
use av1an_core::{ChunkMethod, Encoder};
use av1an_core::{ChunkMethod, ConcatMethod, Encoder};
use anyhow::anyhow;
use once_cell::sync::Lazy;
@ -9,10 +20,27 @@ use tokio::io::{AsyncBufReadExt, BufReader};
use itertools::Itertools;
use av1an_core::compose_ffmpeg_pipe;
use av1an_core::determine_workers;
use av1an_core::ffmpeg::extract_audio;
use av1an_core::ffmpeg::ffmpeg_get_frame_count;
use av1an_core::ffmpeg::get_keyframes;
use av1an_core::logger::{log, set_log};
use av1an_core::progress_bar::finish_progress_bar;
use av1an_core::progress_bar::init_progress_bar;
use av1an_core::progress_bar::update_bar;
use av1an_core::progress_bar::{
finish_multi_progress_bar, init_multi_progress_bar, update_mp_bar, update_mp_msg,
};
use av1an_core::split::extra_splits;
use av1an_core::split::segment;
use av1an_core::split::write_scenes_to_file;
use av1an_core::target_quality::log_probes;
use av1an_core::target_quality::vmaf_auto_threads;
use av1an_core::target_quality::weighted_search;
use av1an_core::vmaf::plot_vmaf;
use av1an_core::read_weighted_vmaf;
use std::cmp;
use std::cmp::{Ordering, Reverse};
use std::collections::HashMap;
@ -27,19 +55,14 @@ use std::io::Write;
use std::iter;
use std::path::Path;
use std::process::{Command, Stdio};
use std::string::ToString;
use std::time::Instant;
use std::{collections::hash_map::DefaultHasher, path::PathBuf};
fn adapt_probing_rate(rate: usize, _frames: usize) -> usize {
const fn adapt_probing_rate(rate: usize, _frames: usize) -> usize {
av1an_core::adapt_probing_rate(rate)
}
fn get_keyframes(source: &str) -> anyhow::Result<Vec<usize>> {
let pt = Path::new(source);
let kf = av1an_core::ffmpeg::get_keyframes(pt);
Ok(kf)
}
pub fn hash_path(path: &str) -> String {
let mut s = DefaultHasher::new();
path.hash(&mut s);
@ -96,15 +119,6 @@ core.{}({:?}, cachefile={:?}).set_output()",
Ok(load_script_path.to_string_lossy().to_string())
}
fn get_ffmpeg_info() -> String {
av1an_core::get_ffmpeg_info()
}
fn determine_workers(encoder: Encoder) -> anyhow::Result<u64> {
Ok(av1an_core::determine_workers(encoder))
}
// same as frame_probe, but you can call it without the python GIL
fn frame_probe(source: &str) -> usize {
if is_vapoursynth(source) {
av1an_core::vapoursynth::num_frames(Path::new(source)).unwrap()
@ -114,110 +128,6 @@ fn frame_probe(source: &str) -> usize {
}
}
fn extract_audio(input: &str, temp: &str, audio_params: Vec<String>) {
let input_path = Path::new(&input);
let temp_path = Path::new(&temp);
av1an_core::ffmpeg::extract_audio(input_path, temp_path, &audio_params);
}
fn ffmpeg_get_frame_count(source: &str) -> usize {
av1an_core::ffmpeg::ffmpeg_get_frame_count(Path::new(source))
}
fn segment(input: &str, temp: &str, segments: Vec<usize>) -> anyhow::Result<()> {
let input = Path::new(&input);
let temp = Path::new(&temp);
av1an_core::split::segment(input, temp, &segments);
Ok(())
}
fn write_scenes_to_file(
scenes: Vec<usize>,
frames: usize,
scenes_path_string: &str,
) -> anyhow::Result<()> {
let scene_path = PathBuf::from(scenes_path_string);
av1an_core::split::write_scenes_to_file(&scenes, frames, &scene_path).unwrap();
Ok(())
}
fn vmaf_auto_threads(workers: usize) -> usize {
av1an_core::target_quality::vmaf_auto_threads(workers)
}
fn set_log(file: &str) -> anyhow::Result<()> {
av1an_core::logger::set_log(file).unwrap();
Ok(())
}
fn log(msg: &str) -> anyhow::Result<()> {
av1an_core::logger::log(msg);
Ok(())
}
fn compose_ffmpeg_pipe(params: Vec<String>) -> anyhow::Result<Vec<String>> {
let res = av1an_core::compose_ffmpeg_pipe(params);
Ok(res)
}
fn weighted_search(
num1: f64,
vmaf1: f64,
num2: f64,
vmaf2: f64,
target: f64,
) -> anyhow::Result<usize> {
Ok(av1an_core::target_quality::weighted_search(
num1, vmaf1, num2, vmaf2, target,
))
}
pub fn get_percentile(scores: Vec<f64>, percent: f64) -> anyhow::Result<f64> {
// pyo3 doesn't seem to support `mut` in function declarations, so this is necessary
let mut scores = scores;
Ok(av1an_core::get_percentile(&mut scores, percent))
}
pub fn read_weighted_vmaf(fl: String, percentile: f64) -> anyhow::Result<f64> {
let file = PathBuf::from(fl);
let val = av1an_core::read_weighted_vmaf(&file, percentile).unwrap();
Ok(val)
}
pub fn init_progress_bar(len: u64) -> anyhow::Result<()> {
av1an_core::progress_bar::init_progress_bar(len).unwrap();
Ok(())
}
pub fn update_bar(inc: u64) -> anyhow::Result<()> {
av1an_core::progress_bar::update_bar(inc).unwrap();
Ok(())
}
pub fn finish_progress_bar() -> anyhow::Result<()> {
av1an_core::progress_bar::finish_progress_bar().unwrap();
Ok(())
}
pub fn plot_vmaf_score_file(scores_file_string: String, plot_path_string: String) {
let scores_file = PathBuf::from(scores_file_string);
let plot_path = PathBuf::from(plot_path_string);
av1an_core::vmaf::plot_vmaf_score_file(&scores_file, &plot_path).unwrap()
}
pub fn validate_vmaf(model: &str) -> anyhow::Result<()> {
av1an_core::vmaf::validate_vmaf(&model).unwrap();
Ok(())
}
pub fn plot_vmaf(source: &str, output: &str) -> anyhow::Result<()> {
let input = PathBuf::from(source);
let out = PathBuf::from(output);
av1an_core::vmaf::plot_vmaf(&input, &out).unwrap();
Ok(())
}
pub fn interpolate_target_q(scores: Vec<(f64, u32)>, target: f64) -> anyhow::Result<(f64, f64)> {
let q = av1an_core::target_quality::interpolate_target_q(scores.clone(), target).unwrap();
@ -226,31 +136,6 @@ pub fn interpolate_target_q(scores: Vec<(f64, u32)>, target: f64) -> anyhow::Res
Ok((q, vmaf))
}
pub fn interpolate_target_vmaf(scores: Vec<(f64, u32)>, target: f64) -> anyhow::Result<f64> {
Ok(av1an_core::target_quality::interpolate_target_vmaf(scores, target).unwrap())
}
pub fn log_probes(
vmaf_cq_scores: Vec<(f64, u32)>,
frames: u32,
probing_rate: u32,
name: String,
target_q: u32,
target_vmaf: f64,
skip: String,
) -> anyhow::Result<()> {
av1an_core::target_quality::log_probes(
vmaf_cq_scores,
frames,
probing_rate,
&name,
target_q,
target_vmaf,
&skip,
);
Ok(())
}
pub fn av_scenechange_detect(
input: &str,
total_frames: usize,
@ -294,11 +179,11 @@ pub fn is_vapoursynth(s: &str) -> bool {
struct Queue<'a> {
chunk_queue: Vec<Chunk>,
project: &'a Project,
target_quality: Option<TargetQuality>,
target_quality: Option<TargetQuality<'a>>,
}
impl<'a> Queue<'a> {
fn encoding_loop(self) -> Result<(), ()> {
fn encoding_loop(self) {
if !self.chunk_queue.is_empty() {
let (sender, receiver) = crossbeam_channel::bounded(self.chunk_queue.len());
@ -335,14 +220,12 @@ impl<'a> Queue<'a> {
finish_multi_progress_bar().unwrap();
}
}
Ok(())
}
fn encode_chunk(&self, chunk: &mut Chunk, worker_id: usize) -> Result<(), String> {
let st_time = Instant::now();
let _ = log(format!("Enc: {}, {} fr", chunk.index, chunk.frames).as_str());
log(format!("Enc: {}, {} fr", chunk.index, chunk.frames).as_str());
// Target Quality mode
if self.project.target_quality.is_some() {
@ -358,14 +241,12 @@ impl<'a> Queue<'a> {
// Run all passes for this chunk
const MAX_TRIES: usize = 3;
for current_pass in 1..=self.project.passes {
for _try in 1..=MAX_TRIES {
let res = self
.project
.create_pipes(chunk.clone(), current_pass, worker_id);
for r#try in 1..=MAX_TRIES {
let res = self.project.create_pipes(chunk, current_pass, worker_id);
if let Err(e) = res {
eprintln!("{}", e);
let _ = log(&e);
if _try == MAX_TRIES {
log(&e);
if r#try == MAX_TRIES {
eprintln!("Encoder crashed {} times, shutting down thread.", MAX_TRIES);
return Err(e);
}
@ -375,7 +256,7 @@ impl<'a> Queue<'a> {
}
}
let encoded_frames = self.frame_check_output(chunk, chunk.frames);
let encoded_frames = Self::frame_check_output(chunk, chunk.frames);
if encoded_frames == chunk.frames {
let progress_file = Path::new(&self.project.temp).join("done.json");
@ -390,14 +271,14 @@ impl<'a> Queue<'a> {
let enc_time = st_time.elapsed();
let _ = log(
log(
format!(
"Done: {} Fr: {}/{}",
chunk.index, encoded_frames, chunk.frames
)
.as_str(),
);
let _ = log(
log(
format!(
"Fps: {:.2} Time: {:?}",
encoded_frames as f64 / enc_time.as_secs_f64(),
@ -410,7 +291,7 @@ impl<'a> Queue<'a> {
Ok(())
}
fn frame_check_output(&self, chunk: &Chunk, expected_frames: usize) -> usize {
fn frame_check_output(chunk: &Chunk, expected_frames: usize) -> usize {
let actual_frames = frame_probe(&chunk.output_path());
if actual_frames != expected_frames {
@ -418,7 +299,7 @@ impl<'a> Queue<'a> {
"Chunk #{}: {}/{} fr",
chunk.index, actual_frames, expected_frames
);
let _ = log(&msg);
log(&msg);
println!(":: {}", msg);
}
@ -426,11 +307,12 @@ impl<'a> Queue<'a> {
}
}
struct TargetQuality {
struct TargetQuality<'a> {
vmaf_res: String,
vmaf_filter: String,
n_threads: usize,
model: String,
// model: Option<PathBuf>,
model: Option<&'a Path>,
probing_rate: usize,
probes: u32,
target: f32,
@ -444,8 +326,8 @@ struct TargetQuality {
probe_slow: bool,
}
impl TargetQuality {
fn new(project: &Project) -> Self {
impl<'a> TargetQuality<'a> {
fn new(project: &'a Project) -> Self {
Self {
vmaf_res: project
.vmaf_res
@ -456,10 +338,7 @@ impl TargetQuality {
.clone()
.unwrap_or_else(|| String::with_capacity(0)),
n_threads: project.n_threads.unwrap_or(0) as usize,
model: project
.vmaf_path
.clone()
.unwrap_or_else(|| String::with_capacity(0)),
model: project.vmaf_path.as_deref(),
probes: project.probes,
target: project.target_quality.unwrap(),
min_q: project.min_q.unwrap(),
@ -485,7 +364,7 @@ impl TargetQuality {
q_list.push(middle_point);
let last_q = middle_point;
let mut score = read_weighted_vmaf(self.vmaf_probe(chunk, last_q.to_string()), 0.25).unwrap();
let mut score = read_weighted_vmaf(self.vmaf_probe(chunk, last_q as usize), 0.25).unwrap();
vmaf_cq.push((score, last_q));
// Initialize search boundary
@ -495,7 +374,7 @@ impl TargetQuality {
let mut vmaf_cq_upper = last_q;
// Branch
let next_q = if score < self.target as f64 {
let next_q = if score < f64::from(self.target) {
self.min_q
} else {
self.max_q
@ -504,11 +383,11 @@ impl TargetQuality {
q_list.push(next_q);
// Edge case check
score = read_weighted_vmaf(self.vmaf_probe(chunk, next_q.to_string()), 0.25).unwrap();
score = read_weighted_vmaf(self.vmaf_probe(chunk, next_q as usize), 0.25).unwrap();
vmaf_cq.push((score, next_q));
if (next_q == self.min_q && score < self.target as f64)
|| (next_q == self.max_q && score > self.target as f64)
if (next_q == self.min_q && score < f64::from(self.target))
|| (next_q == self.max_q && score > f64::from(self.target))
{
av1an_core::target_quality::log_probes(
vmaf_cq,
@ -517,7 +396,7 @@ impl TargetQuality {
&chunk.name(),
next_q,
score,
if score < self.target as f64 {
if score < f64::from(self.target) {
"low"
} else {
"high"
@ -527,7 +406,7 @@ impl TargetQuality {
}
// Set boundary
if score < self.target as f64 {
if score < f64::from(self.target) {
vmaf_lower = score;
vmaf_cq_lower = next_q;
} else {
@ -538,13 +417,12 @@ impl TargetQuality {
// VMAF search
for _ in 0..self.probes - 2 {
let new_point = weighted_search(
vmaf_cq_lower as f64,
f64::from(vmaf_cq_lower),
vmaf_lower,
vmaf_cq_upper as f64,
f64::from(vmaf_cq_upper),
vmaf_upper,
self.target as f64,
)
.unwrap();
f64::from(self.target),
);
if vmaf_cq
.iter()
@ -555,11 +433,11 @@ impl TargetQuality {
}
q_list.push(new_point as u32);
score = read_weighted_vmaf(self.vmaf_probe(chunk, new_point.to_string()), 0.25).unwrap();
score = read_weighted_vmaf(self.vmaf_probe(chunk, new_point), 0.25).unwrap();
vmaf_cq.push((score, new_point as u32));
// Update boundary
if score < self.target as f64 {
if score < f64::from(self.target) {
vmaf_lower = score;
vmaf_cq_lower = new_point as u32;
} else {
@ -568,22 +446,21 @@ impl TargetQuality {
}
}
let (q, q_vmaf) = interpolate_target_q(vmaf_cq.clone(), self.target as f64).unwrap();
let (q, q_vmaf) = interpolate_target_q(vmaf_cq.clone(), f64::from(self.target)).unwrap();
log_probes(
vmaf_cq,
frames as u32,
self.probing_rate as u32,
chunk.name(),
&chunk.name(),
q as u32,
q_vmaf,
"".into(),
)
.unwrap();
"",
);
q as u32
}
fn vmaf_probe(&self, chunk: &Chunk, q: String) -> String {
fn vmaf_probe(&self, chunk: &Chunk, q: usize) -> String {
let n_threads = if self.n_threads == 0 {
vmaf_auto_threads(self.workers)
} else {
@ -593,10 +470,10 @@ impl TargetQuality {
let cmd = self.encoder.probe_cmd(
self.temp.clone(),
&chunk.name(),
q.clone(),
q,
self.ffmpeg_pipe.clone(),
&self.probing_rate.to_string(),
n_threads.to_string(),
n_threads,
self.video_params.clone(),
self.probe_slow,
);
@ -656,16 +533,17 @@ impl TargetQuality {
let fl_path = fl_path.to_str().unwrap().to_owned();
run_vmaf_on_chunk(
probe_name.to_str().unwrap().to_owned(),
chunk.ffmpeg_gen_cmd.clone(),
fl_path.clone(),
self.model.clone(),
self.vmaf_res.clone(),
av1an_core::vmaf::run_vmaf_on_chunk(
&probe_name,
&chunk.ffmpeg_gen_cmd,
&fl_path,
self.model.as_ref(),
&self.vmaf_res,
self.probing_rate,
self.vmaf_filter.clone(),
&self.vmaf_filter,
self.n_threads,
);
)
.unwrap();
fl_path
}
@ -689,7 +567,9 @@ async fn process_pipe(
}
for util in utility {
util.kill().await.unwrap();
// On Windows, killing the process can fail with a permission denied error, so we don't
// unwrap the result to prevent the program from crashing if killing the child process fails.
drop(util.kill().await);
}
let returncode = pipe.wait().await.unwrap();
@ -737,7 +617,7 @@ impl Chunk {
}
}
fn save_chunk_queue(temp: &str, chunk_queue: Vec<Chunk>) {
fn save_chunk_queue(temp: &str, chunk_queue: &[Chunk]) {
let mut file = File::create(Path::new(temp).join("chunks.json")).unwrap();
file
@ -759,8 +639,6 @@ pub struct Project {
pub input: String,
pub temp: String,
pub output_file: String,
pub mkvmerge: bool,
pub output_ivf: bool,
pub webm: bool,
pub chunk_method: ChunkMethod,
@ -784,12 +662,13 @@ pub struct Project {
pub logging: String,
pub resume: bool,
pub keep: bool,
pub force: bool,
pub vmaf: bool,
pub vmaf_path: Option<String>,
pub vmaf_path: Option<PathBuf>,
pub vmaf_res: Option<String>,
pub concat: String,
pub concat: ConcatMethod,
pub target_quality: Option<f32>,
pub target_quality_method: Option<String>,
@ -809,23 +688,26 @@ static HELP_REGEX: Lazy<Regex> = Lazy::new(|| Regex::new(r"\s+(-\w+|(?:--\w+(?:-
fn invalid_params(params: &[String], valid_options: &HashSet<String>) -> Vec<String> {
params
.iter()
.filter_map(|param| {
if valid_options.contains(param) {
None
} else {
Some(param)
}
})
.map(|s| s.to_string())
.filter(|param| !valid_options.contains(*param))
.map(ToString::to_string)
.collect()
}
fn suggest_fix(wrong_arg: &str, arg_dictionary: &HashSet<String>) -> Option<String> {
// Minimum threshold to consider a suggestion similar enough that it could be a typo
const MIN_THRESHOLD: f64 = 0.75;
arg_dictionary
.iter()
.map(|arg| (arg, strsim::jaro_winkler(arg, wrong_arg)))
.max_by(|(_, a), (_, b)| a.partial_cmp(b).unwrap_or(Ordering::Less))
.map(|(s, _)| (*s).to_owned())
.and_then(|(suggestion, score)| {
if score > MIN_THRESHOLD {
Some((*suggestion).clone())
} else {
None
}
})
}
fn read_chunk_queue(temp: &str) -> Vec<Chunk> {
@ -857,7 +739,7 @@ impl Project {
}
impl Project {
fn create_pipes(&self, c: Chunk, current_pass: u8, worker_id: usize) -> Result<(), String> {
fn create_pipes(&self, c: &Chunk, current_pass: u8, worker_id: usize) -> Result<(), String> {
let fpf_file = Path::new(&c.temp)
.join("split")
.join(format!("{}_fpf", c.name()));
@ -903,7 +785,7 @@ impl Project {
let ffmpeg_gen_pipe_stdout: Stdio =
ffmpeg_gen_pipe.stdout.take().unwrap().try_into().unwrap();
let ffmpeg_pipe = compose_ffmpeg_pipe(self.ffmpeg_pipe.clone()).unwrap();
let ffmpeg_pipe = compose_ffmpeg_pipe(self.ffmpeg_pipe.clone());
let mut ffmpeg_pipe = tokio::process::Command::new(&ffmpeg_pipe[0])
.args(&ffmpeg_pipe[1..])
.stdin(ffmpeg_gen_pipe_stdout)
@ -958,8 +840,8 @@ impl Project {
let returncode = pipe.wait_with_output().await.unwrap();
ffmpeg_gen_pipe.kill().await.unwrap();
ffmpeg_pipe.kill().await.unwrap();
drop(ffmpeg_gen_pipe.kill().await);
drop(ffmpeg_pipe.kill().await);
returncode.status
});
@ -1029,13 +911,17 @@ impl Project {
m.as_str()
.split_ascii_whitespace()
.next()
.map(|s| s.to_owned())
.map(ToString::to_string)
})
.collect::<HashSet<String>>()
}
// TODO remove all of these extra allocations
fn validate_inputs(&self) {
if self.force {
return;
}
let video_params: Vec<String> = self
.video_params
.as_slice()
@ -1047,32 +933,34 @@ impl Project {
None
}
})
.map(|s| s.to_owned())
.map(ToString::to_string)
.collect();
let valid_params = self.valid_encoder_params();
let mut invalid_param_found = false;
for wrong_param in invalid_params(video_params.as_slice(), &valid_params) {
if let Some(suggestion) = suggest_fix(&wrong_param, &valid_params) {
println!(
"'{}' isn't a valid parameter for {}. Did you mean '{}'?",
wrong_param, self.encoder, suggestion,
);
invalid_param_found = true;
let invalid_params = invalid_params(video_params.as_slice(), &valid_params);
for wrong_param in &invalid_params {
println!(
"'{}' isn't a valid parameter for {}.",
wrong_param, self.encoder,
);
if let Some(suggestion) = suggest_fix(wrong_param, &valid_params) {
println!("\tDid you mean '{}'?", suggestion)
}
}
if invalid_param_found {
panic!("To continue anyway, run Av1an with --force");
if !invalid_params.is_empty() {
println!("\nTo continue anyway, run av1an with '--force'.");
std::process::exit(1);
}
}
pub fn startup_check(&mut self) -> anyhow::Result<()> {
if matches!(
if !matches!(
self.encoder,
Encoder::rav1e | Encoder::aom | Encoder::svt_av1 | Encoder::vpx
) && self.output_ivf
) && self.concat == ConcatMethod::Ivf
{
panic!(".ivf only supports VP8, VP9, and AV1");
}
@ -1089,8 +977,6 @@ impl Project {
panic!("No FFmpeg");
}
let _ = log(&get_ffmpeg_info());
if let Some(ref vmaf_path) = self.vmaf_path {
assert!(Path::new(vmaf_path).exists());
}
@ -1146,7 +1032,7 @@ impl Project {
ChunkMethod::FFMS2 | ChunkMethod::LSMASH => self.create_video_queue_vs(splits),
ChunkMethod::Hybrid => self.create_video_queue_hybrid(splits),
ChunkMethod::Select => self.create_video_queue_select(splits),
ChunkMethod::Segment => self.create_video_queue_segment(splits),
ChunkMethod::Segment => self.create_video_queue_segment(&splits),
};
chunks.sort_unstable_by_key(|chunk| Reverse(chunk.size));
@ -1183,22 +1069,22 @@ impl Project {
} else {
self.calc_split_locations()
};
let _ = log(&format!("SC: Found {} scenes", scenes.len() + 1));
log(&format!("SC: Found {} scenes", scenes.len() + 1));
if let Some(split_len) = self.extra_splits_len {
let _ = log(&format!(
log(&format!(
"SC: Applying extra splits every {} frames",
split_len
));
scenes = extra_splits(scenes, self.frames, split_len);
let _ = log(&format!("SC: Now at {} scenes", scenes.len() + 1));
log(&format!("SC: Now at {} scenes", scenes.len() + 1));
}
self.write_scenes_to_file(scenes.clone(), scene_file.as_path().to_str().unwrap());
self.write_scenes_to_file(&scenes, scene_file.as_path().to_str().unwrap());
scenes
}
fn write_scenes_to_file(&self, scenes: Vec<usize>, path: &str) {
fn write_scenes_to_file(&self, scenes: &[usize], path: &str) {
write_scenes_to_file(scenes, self.frames, path).unwrap();
}
@ -1250,7 +1136,7 @@ impl Project {
output_ext,
size,
frames,
..Default::default()
..Chunk::default()
}
}
@ -1291,7 +1177,7 @@ impl Project {
// use the number of frames to prioritize which chunks encode first, since we don't have file size
size: frames,
frames,
..Default::default()
..Chunk::default()
}
}
@ -1349,10 +1235,10 @@ impl Project {
chunk_queue
}
fn create_video_queue_segment(&mut self, splits: Vec<usize>) -> Vec<Chunk> {
let _ = log("Split video");
segment(&self.input, &self.temp, splits).unwrap();
let _ = log("Split done");
fn create_video_queue_segment(&mut self, splits: &[usize]) -> Vec<Chunk> {
log("Split video");
segment(&self.input, &self.temp, splits);
log("Split done");
let source_path = Path::new(&self.temp).join("split");
let queue_files = Self::read_queue_files(&source_path);
@ -1372,7 +1258,7 @@ impl Project {
}
fn create_video_queue_hybrid(&mut self, split_locations: Vec<usize>) -> Vec<Chunk> {
let keyframes = get_keyframes(&self.input).unwrap();
let keyframes = get_keyframes(&self.input);
let mut splits = vec![0];
splits.extend(split_locations);
@ -1390,14 +1276,9 @@ impl Project {
.copied()
.collect();
let _ = log("Segmenting video");
segment(
&self.input,
&self.temp,
to_split[1..].iter().copied().collect(),
)
.unwrap();
let _ = log("Segment done");
log("Segmenting video");
segment(&self.input, &self.temp, &to_split[1..]);
log("Segment done");
let source_path = Path::new(&self.temp).join("split");
let queue_files = Self::read_queue_files(&source_path);
@ -1456,7 +1337,7 @@ impl Project {
output_ext,
index,
size: file_size as usize,
..Default::default()
..Chunk::default()
}
}
@ -1475,13 +1356,13 @@ impl Project {
chunks
} else {
let chunks = self.create_encoding_queue(splits);
save_chunk_queue(&self.temp, chunks.clone());
save_chunk_queue(&self.temp, &chunks);
chunks
}
}
pub fn encode_file(&mut self) {
let _ = log(format!("File hash: {}", hash_path(&self.input)).as_str());
log(format!("File hash: {}", hash_path(&self.input)).as_str());
let done_path = Path::new(&self.temp).join("done.json");
@ -1491,14 +1372,14 @@ impl Project {
fs::remove_dir_all(&self.temp).unwrap();
}
let _ = match fs::create_dir_all(Path::new(&self.temp).join("split")) {
match fs::create_dir_all(Path::new(&self.temp).join("split")) {
Ok(_) => {}
Err(e) => match e.kind() {
io::ErrorKind::AlreadyExists => {}
_ => panic!("{}", e),
},
};
let _ = match fs::create_dir_all(Path::new(&self.temp).join("encode")) {
match fs::create_dir_all(Path::new(&self.temp).join("encode")) {
Ok(_) => {}
Err(e) => match e.kind() {
io::ErrorKind::AlreadyExists => {}
@ -1517,11 +1398,11 @@ impl Project {
let mut initial_frames: usize = 0;
if self.resume && done_path.exists() {
let _ = log("Resuming...");
log("Resuming...");
let done: DoneJson = serde_json::from_str(&fs::read_to_string(&done_path).unwrap()).unwrap();
initial_frames = done.done.iter().map(|(_, frames)| frames).sum();
let _ = log(format!("Resmued with {} encoded clips done", done.done.len()).as_str());
log(format!("Resumed with {} encoded clips done", done.done.len()).as_str());
} else {
let total = self.get_frames();
let mut done_file = fs::File::create(&done_path).unwrap();
@ -1538,11 +1419,11 @@ impl Project {
}
if !self.resume {
extract_audio(&self.input, &self.temp, self.audio_params.clone());
extract_audio(&self.input, &self.temp, &self.audio_params);
}
if self.workers == 0 {
self.workers = determine_workers(self.encoder).unwrap() as usize;
self.workers = determine_workers(self.encoder) as usize;
}
self.workers = cmp::min(self.workers, chunk_queue.len());
println!(
@ -1560,7 +1441,7 @@ impl Project {
}
// hack to avoid borrow checker errors
let concat = self.concat.clone();
let concat = self.concat;
let temp = self.temp.clone();
let input = self.input.clone();
let output_file = self.output_file.clone();
@ -1570,31 +1451,30 @@ impl Project {
let queue = Queue {
chunk_queue,
project: &self,
project: self,
target_quality: if self.target_quality.is_some() {
Some(TargetQuality::new(&self))
Some(TargetQuality::new(self))
} else {
None
},
};
queue.encoding_loop().unwrap();
queue.encoding_loop();
let _ = log("Concatenating");
log("Concatenating");
// TODO refactor into Concatenate trait
match concat.as_str() {
"ivf" => {
match concat {
ConcatMethod::Ivf => {
av1an_core::concat::concat_ivf(&Path::new(&temp).join("encode"), Path::new(&output_file))
.unwrap();
}
"mkvmerge" => {
ConcatMethod::MKVMerge => {
av1an_core::concat::concatenate_mkvmerge(temp.clone(), output_file.clone()).unwrap()
}
"ffmpeg" => {
ConcatMethod::FFmpeg => {
av1an_core::ffmpeg::concatenate_ffmpeg(temp.clone(), output_file.clone(), encoder);
}
_ => unreachable!(),
}
if vmaf {
@ -1606,29 +1486,3 @@ impl Project {
}
}
}
fn run_vmaf_on_chunk(
encoded: String,
pipe_cmd: Vec<String>,
stat_file: String,
model: String,
res: String,
sample_rate: usize,
vmaf_filter: String,
threads: usize,
) {
let encoded = PathBuf::from(encoded);
let stat_file = PathBuf::from(stat_file);
av1an_core::vmaf::run_vmaf_on_chunk(
&encoded,
&pipe_cmd,
&stat_file,
&model,
&res,
sample_rate,
&vmaf_filter,
threads,
)
.unwrap()
}