mirror of
https://github.com/FliegendeWurst/ripgrep-all.git
synced 2024-11-08 22:10:37 +00:00
fix clippy lints
This commit is contained in:
parent
0d70be4b74
commit
960883b616
@ -58,9 +58,9 @@ impl AdapterMeta {
|
|||||||
self.keep_fast_matchers_if_accurate,
|
self.keep_fast_matchers_if_accurate,
|
||||||
&self.slow_matchers,
|
&self.slow_matchers,
|
||||||
) {
|
) {
|
||||||
(true, false, Some(ref sm)) => Box::new(sm.iter().map(|e| Cow::Borrowed(e))),
|
(true, false, Some(ref sm)) => Box::new(sm.iter().map(Cow::Borrowed)),
|
||||||
(true, true, Some(ref sm)) => Box::new(
|
(true, true, Some(ref sm)) => Box::new(
|
||||||
sm.iter().map(|e| Cow::Borrowed(e)).chain(
|
sm.iter().map(Cow::Borrowed).chain(
|
||||||
self.fast_matchers
|
self.fast_matchers
|
||||||
.iter()
|
.iter()
|
||||||
.map(|e| Cow::Owned(FileMatcher::Fast(e.clone()))),
|
.map(|e| Cow::Owned(FileMatcher::Fast(e.clone()))),
|
||||||
@ -83,11 +83,7 @@ pub trait FileAdapter: GetMetadata + Send + Sync {
|
|||||||
/// adapt a file.
|
/// adapt a file.
|
||||||
///
|
///
|
||||||
/// detection_reason is the Matcher that was used to identify this file. Unless --rga-accurate was given, it is always a FastMatcher
|
/// detection_reason is the Matcher that was used to identify this file. Unless --rga-accurate was given, it is always a FastMatcher
|
||||||
fn adapt<'a>(
|
fn adapt(&self, a: AdaptInfo, detection_reason: &FileMatcher) -> Result<AdaptedFilesIterBox>;
|
||||||
&self,
|
|
||||||
a: AdaptInfo,
|
|
||||||
detection_reason: &FileMatcher,
|
|
||||||
) -> Result<AdaptedFilesIterBox>;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct AdaptInfo {
|
pub struct AdaptInfo {
|
||||||
@ -118,7 +114,7 @@ pub fn get_all_adapters(custom_adapters: Option<Vec<CustomAdapterConfig>>) -> Ad
|
|||||||
}
|
}
|
||||||
|
|
||||||
let internal_adapters: Vec<Arc<dyn FileAdapter>> = vec![
|
let internal_adapters: Vec<Arc<dyn FileAdapter>> = vec![
|
||||||
Arc::new(PostprocPageBreaks::new()),
|
Arc::new(PostprocPageBreaks::default()),
|
||||||
//Rc::new(ffmpeg::FFmpegAdapter::new()),
|
//Rc::new(ffmpeg::FFmpegAdapter::new()),
|
||||||
// Rc::new(zip::ZipAdapter::new()),
|
// Rc::new(zip::ZipAdapter::new()),
|
||||||
//Rc::new(decompress::DecompressAdapter::new()),
|
//Rc::new(decompress::DecompressAdapter::new()),
|
||||||
|
@ -154,7 +154,7 @@ fn proc_wait(mut child: Child) -> impl AsyncRead {
|
|||||||
};
|
};
|
||||||
StreamReader::new(s)
|
StreamReader::new(s)
|
||||||
}
|
}
|
||||||
pub fn pipe_output<'a>(
|
pub fn pipe_output(
|
||||||
_line_prefix: &str,
|
_line_prefix: &str,
|
||||||
mut cmd: Command,
|
mut cmd: Command,
|
||||||
inp: ReadBox,
|
inp: ReadBox,
|
||||||
@ -205,7 +205,7 @@ impl CustomSpawningFileAdapter {
|
|||||||
command.args(
|
command.args(
|
||||||
self.args
|
self.args
|
||||||
.iter()
|
.iter()
|
||||||
.map(|arg| arg_replacer(arg, &filepath_hint))
|
.map(|arg| arg_replacer(arg, filepath_hint))
|
||||||
.collect::<Result<Vec<_>>>()?,
|
.collect::<Result<Vec<_>>>()?,
|
||||||
);
|
);
|
||||||
log::debug!("running command {:?}", command);
|
log::debug!("running command {:?}", command);
|
||||||
@ -355,14 +355,14 @@ PREFIX:Page 2:
|
|||||||
with a long dead crew
|
with a long dead crew
|
||||||
and a witch with the flu
|
and a witch with the flu
|
||||||
"#;
|
"#;
|
||||||
let input = format!("{0}{0}{0}{0}", input);
|
let input = format!("{input}{input}{input}{input}");
|
||||||
let input = format!("{0}{0}{0}{0}", input);
|
let input = format!("{input}{input}{input}{input}");
|
||||||
let input = format!("{0}{0}{0}{0}", input);
|
let input = format!("{input}{input}{input}{input}");
|
||||||
let input = format!("{0}{0}{0}{0}", input);
|
let input = format!("{input}{input}{input}{input}");
|
||||||
let input = format!("{0}{0}{0}{0}", input);
|
let input = format!("{input}{input}{input}{input}");
|
||||||
let input = format!("{0}{0}{0}{0}", input);
|
let input = format!("{input}{input}{input}{input}");
|
||||||
let (a, d) = simple_adapt_info(
|
let (a, d) = simple_adapt_info(
|
||||||
&Path::new("foo.txt"),
|
Path::new("foo.txt"),
|
||||||
Box::pin(Cursor::new(Vec::from(input))),
|
Box::pin(Cursor::new(Vec::from(input))),
|
||||||
);
|
);
|
||||||
let output = adapter.adapt(a, &d).unwrap();
|
let output = adapter.adapt(a, &d).unwrap();
|
||||||
|
@ -2,12 +2,10 @@
|
|||||||
|
|
||||||
//impl<T> FileAdapter for T where T: RunFnAdapter {}
|
//impl<T> FileAdapter for T where T: RunFnAdapter {}
|
||||||
|
|
||||||
|
|
||||||
use anyhow::Result;
|
use anyhow::Result;
|
||||||
use async_stream::stream;
|
use async_stream::stream;
|
||||||
use bytes::Bytes;
|
use bytes::Bytes;
|
||||||
|
|
||||||
|
|
||||||
use std::ffi::OsStr;
|
use std::ffi::OsStr;
|
||||||
use std::io::Cursor;
|
use std::io::Cursor;
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
@ -124,7 +122,7 @@ pub fn postproc_encoding(
|
|||||||
|
|
||||||
/// Adds the given prefix to each line in an `AsyncRead`.
|
/// Adds the given prefix to each line in an `AsyncRead`.
|
||||||
pub fn postproc_prefix(line_prefix: &str, inp: impl AsyncRead + Send) -> impl AsyncRead + Send {
|
pub fn postproc_prefix(line_prefix: &str, inp: impl AsyncRead + Send) -> impl AsyncRead + Send {
|
||||||
let line_prefix_n = format!("\n{}", line_prefix); // clone since we need it later
|
let line_prefix_n = format!("\n{line_prefix}"); // clone since we need it later
|
||||||
let line_prefix_o = Bytes::copy_from_slice(line_prefix.as_bytes());
|
let line_prefix_o = Bytes::copy_from_slice(line_prefix.as_bytes());
|
||||||
let regex = regex::bytes::Regex::new("\n").unwrap();
|
let regex = regex::bytes::Regex::new("\n").unwrap();
|
||||||
let inp_stream = ReaderStream::new(inp);
|
let inp_stream = ReaderStream::new(inp);
|
||||||
@ -146,12 +144,9 @@ pub fn postproc_prefix(line_prefix: &str, inp: impl AsyncRead + Send) -> impl As
|
|||||||
Box::pin(StreamReader::new(oup_stream))
|
Box::pin(StreamReader::new(oup_stream))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Default)]
|
||||||
pub struct PostprocPageBreaks {}
|
pub struct PostprocPageBreaks {}
|
||||||
impl PostprocPageBreaks {
|
|
||||||
pub fn new() -> Self {
|
|
||||||
Self {}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
impl GetMetadata for PostprocPageBreaks {
|
impl GetMetadata for PostprocPageBreaks {
|
||||||
fn metadata(&self) -> &super::AdapterMeta {
|
fn metadata(&self) -> &super::AdapterMeta {
|
||||||
lazy_static::lazy_static! {
|
lazy_static::lazy_static! {
|
||||||
@ -203,7 +198,7 @@ pub fn postproc_pagebreaks(
|
|||||||
let regex_linefeed = regex::bytes::Regex::new(r"\x0c").unwrap();
|
let regex_linefeed = regex::bytes::Regex::new(r"\x0c").unwrap();
|
||||||
let regex_newline = regex::bytes::Regex::new("\n").unwrap();
|
let regex_newline = regex::bytes::Regex::new("\n").unwrap();
|
||||||
let mut page_count: i32 = 1;
|
let mut page_count: i32 = 1;
|
||||||
let mut page_prefix: String = format!("Page {}:{}", page_count, line_prefix_o);
|
let mut page_prefix: String = format!("Page {page_count}:{line_prefix_o}");
|
||||||
|
|
||||||
let input_stream = ReaderStream::new(input);
|
let input_stream = ReaderStream::new(input);
|
||||||
let output_stream = stream! {
|
let output_stream = stream! {
|
||||||
@ -216,7 +211,7 @@ pub fn postproc_pagebreaks(
|
|||||||
// println!("{}", String::from_utf8_lossy(page_prefix.as_bytes()));
|
// println!("{}", String::from_utf8_lossy(page_prefix.as_bytes()));
|
||||||
yield Ok(Bytes::copy_from_slice(page_prefix.as_bytes()));
|
yield Ok(Bytes::copy_from_slice(page_prefix.as_bytes()));
|
||||||
page_prefix = format!("\nPage {}:{}", page_count, line_prefix_o);
|
page_prefix = format!("\nPage {}:{}", page_count, line_prefix_o);
|
||||||
yield Ok(Bytes::copy_from_slice(®ex_newline.replace_all(&sub_chunk, page_prefix.as_bytes())));
|
yield Ok(Bytes::copy_from_slice(®ex_newline.replace_all(sub_chunk, page_prefix.as_bytes())));
|
||||||
page_count += 1;
|
page_count += 1;
|
||||||
page_prefix = format!("\nPage {}:{}", page_count, line_prefix_o);
|
page_prefix = format!("\nPage {}:{}", page_count, line_prefix_o);
|
||||||
}
|
}
|
||||||
@ -292,7 +287,7 @@ mod tests {
|
|||||||
if b != c {
|
if b != c {
|
||||||
anyhow::bail!(
|
anyhow::bail!(
|
||||||
"`{}`\nshould be\n`{}`\nbut is\n`{}`",
|
"`{}`\nshould be\n`{}`\nbut is\n`{}`",
|
||||||
String::from_utf8_lossy(&a),
|
String::from_utf8_lossy(a),
|
||||||
b,
|
b,
|
||||||
c
|
c
|
||||||
);
|
);
|
||||||
|
@ -9,7 +9,7 @@ use std::process::{Command, Stdio};
|
|||||||
fn main() -> anyhow::Result<()> {
|
fn main() -> anyhow::Result<()> {
|
||||||
env_logger::init();
|
env_logger::init();
|
||||||
let mut passthrough_args: Vec<String> = std::env::args().skip(1).collect();
|
let mut passthrough_args: Vec<String> = std::env::args().skip(1).collect();
|
||||||
let inx = passthrough_args.iter().position(|e| !e.starts_with("-"));
|
let inx = passthrough_args.iter().position(|e| !e.starts_with('-'));
|
||||||
let initial_query = if let Some(inx) = inx {
|
let initial_query = if let Some(inx) = inx {
|
||||||
passthrough_args.remove(inx)
|
passthrough_args.remove(inx)
|
||||||
} else {
|
} else {
|
||||||
@ -27,22 +27,20 @@ fn main() -> anyhow::Result<()> {
|
|||||||
.context("rga-fzf-open executable is in non-unicode path")?;
|
.context("rga-fzf-open executable is in non-unicode path")?;
|
||||||
|
|
||||||
let rg_prefix = format!(
|
let rg_prefix = format!(
|
||||||
"{} --files-with-matches --rga-cache-max-blob-len=10M",
|
"{preproc_exe} --files-with-matches --rga-cache-max-blob-len=10M"
|
||||||
preproc_exe
|
|
||||||
);
|
);
|
||||||
|
|
||||||
let child = Command::new("fzf")
|
let child = Command::new("fzf")
|
||||||
.arg(format!(
|
.arg(format!(
|
||||||
"--preview={} --pretty --context 5 {{q}} --rga-fzf-path=_{{}}",
|
"--preview={preproc_exe} --pretty --context 5 {{q}} --rga-fzf-path=_{{}}"
|
||||||
preproc_exe
|
|
||||||
))
|
))
|
||||||
.arg("--preview-window=70%:wrap")
|
.arg("--preview-window=70%:wrap")
|
||||||
.arg("--phony")
|
.arg("--phony")
|
||||||
.arg("--query")
|
.arg("--query")
|
||||||
.arg(&initial_query)
|
.arg(&initial_query)
|
||||||
.arg("--print-query")
|
.arg("--print-query")
|
||||||
.arg(format!("--bind=change:reload: {} {{q}}", rg_prefix))
|
.arg(format!("--bind=change:reload: {rg_prefix} {{q}}"))
|
||||||
.arg(format!("--bind=ctrl-m:execute:{} {{q}} {{}}", open_exe))
|
.arg(format!("--bind=ctrl-m:execute:{open_exe} {{q}} {{}}"))
|
||||||
.env(
|
.env(
|
||||||
"FZF_DEFAULT_COMMAND",
|
"FZF_DEFAULT_COMMAND",
|
||||||
format!("{} '{}'", rg_prefix, &initial_query),
|
format!("{} '{}'", rg_prefix, &initial_query),
|
||||||
@ -58,7 +56,7 @@ fn main() -> anyhow::Result<()> {
|
|||||||
std::str::from_utf8(x.next().context("fzf output empty")?).context("fzf query not utf8")?;
|
std::str::from_utf8(x.next().context("fzf output empty")?).context("fzf query not utf8")?;
|
||||||
let selected_file = std::str::from_utf8(x.next().context("fzf output not two line")?)
|
let selected_file = std::str::from_utf8(x.next().context("fzf output not two line")?)
|
||||||
.context("fzf ofilename not utf8")?;
|
.context("fzf ofilename not utf8")?;
|
||||||
println!("query='{}', file='{}'", final_query, selected_file);
|
println!("query='{final_query}', file='{selected_file}'");
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
@ -17,7 +17,7 @@ async fn main() -> anyhow::Result<()> {
|
|||||||
//clap::App::new("rga-preproc").arg(Arg::from_usage())
|
//clap::App::new("rga-preproc").arg(Arg::from_usage())
|
||||||
let path = {
|
let path = {
|
||||||
let filepath = last;
|
let filepath = last;
|
||||||
std::env::current_dir()?.join(&filepath)
|
std::env::current_dir()?.join(filepath)
|
||||||
};
|
};
|
||||||
|
|
||||||
let i = File::open(&path)
|
let i = File::open(&path)
|
||||||
|
@ -12,7 +12,7 @@ use std::process::Command;
|
|||||||
use std::time::Instant;
|
use std::time::Instant;
|
||||||
|
|
||||||
fn list_adapters(args: RgaConfig) -> Result<()> {
|
fn list_adapters(args: RgaConfig) -> Result<()> {
|
||||||
let (enabled_adapters, disabled_adapters) = get_all_adapters(args.custom_adapters.clone());
|
let (enabled_adapters, disabled_adapters) = get_all_adapters(args.custom_adapters);
|
||||||
|
|
||||||
println!("Adapters:\n");
|
println!("Adapters:\n");
|
||||||
let print = |adapter: std::sync::Arc<dyn FileAdapter>| {
|
let print = |adapter: std::sync::Arc<dyn FileAdapter>| {
|
||||||
@ -21,7 +21,7 @@ fn list_adapters(args: RgaConfig) -> Result<()> {
|
|||||||
.fast_matchers
|
.fast_matchers
|
||||||
.iter()
|
.iter()
|
||||||
.map(|m| match m {
|
.map(|m| match m {
|
||||||
FastFileMatcher::FileExtension(ext) => format!(".{}", ext),
|
FastFileMatcher::FileExtension(ext) => format!(".{ext}"),
|
||||||
})
|
})
|
||||||
.collect::<Vec<_>>()
|
.collect::<Vec<_>>()
|
||||||
.join(", ");
|
.join(", ");
|
||||||
@ -31,7 +31,7 @@ fn list_adapters(args: RgaConfig) -> Result<()> {
|
|||||||
.unwrap_or(&vec![])
|
.unwrap_or(&vec![])
|
||||||
.iter()
|
.iter()
|
||||||
.filter_map(|m| match m {
|
.filter_map(|m| match m {
|
||||||
FileMatcher::MimeType(x) => Some(format!("{}", x)),
|
FileMatcher::MimeType(x) => Some(x.to_string()),
|
||||||
FileMatcher::Fast(_) => None,
|
FileMatcher::Fast(_) => None,
|
||||||
})
|
})
|
||||||
.collect::<Vec<_>>()
|
.collect::<Vec<_>>()
|
||||||
@ -39,16 +39,16 @@ fn list_adapters(args: RgaConfig) -> Result<()> {
|
|||||||
let mime_text = if slow_matchers.is_empty() {
|
let mime_text = if slow_matchers.is_empty() {
|
||||||
"".to_owned()
|
"".to_owned()
|
||||||
} else {
|
} else {
|
||||||
format!("Mime Types: {}", slow_matchers)
|
format!("Mime Types: {slow_matchers}")
|
||||||
};
|
};
|
||||||
print!(
|
print!(
|
||||||
" - **{name}**\n {desc} \n Extensions: {matchers} \n {mime} \n",
|
" - **{name}**\n {desc} \n Extensions: {matchers} \n {mime} \n",
|
||||||
name = meta.name,
|
name = meta.name,
|
||||||
desc = meta.description.replace("\n", "\n "),
|
desc = meta.description.replace('\n', "\n "),
|
||||||
matchers = matchers,
|
matchers = matchers,
|
||||||
mime = mime_text
|
mime = mime_text
|
||||||
);
|
);
|
||||||
println!("");
|
println!();
|
||||||
};
|
};
|
||||||
for adapter in enabled_adapters {
|
for adapter in enabled_adapters {
|
||||||
print(adapter)
|
print(adapter)
|
||||||
@ -57,11 +57,11 @@ fn list_adapters(args: RgaConfig) -> Result<()> {
|
|||||||
for adapter in disabled_adapters {
|
for adapter in disabled_adapters {
|
||||||
print(adapter)
|
print(adapter)
|
||||||
}
|
}
|
||||||
return Ok(());
|
Ok(())
|
||||||
}
|
}
|
||||||
fn main() -> anyhow::Result<()> {
|
fn main() -> anyhow::Result<()> {
|
||||||
// set debugging as early as possible
|
// set debugging as early as possible
|
||||||
if std::env::args().position(|e| e == "--debug").is_some() {
|
if std::env::args().any(|e| e == "--debug") {
|
||||||
std::env::set_var("RUST_LOG", "debug");
|
std::env::set_var("RUST_LOG", "debug");
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -85,10 +85,10 @@ fn main() -> anyhow::Result<()> {
|
|||||||
passthrough_args.push(std::ffi::OsString::from(&path[1..]));
|
passthrough_args.push(std::ffi::OsString::from(&path[1..]));
|
||||||
}
|
}
|
||||||
|
|
||||||
if passthrough_args.len() == 0 {
|
if passthrough_args.is_empty() {
|
||||||
// rg would show help. Show own help instead.
|
// rg would show help. Show own help instead.
|
||||||
RgaConfig::clap().print_help()?;
|
RgaConfig::clap().print_help()?;
|
||||||
println!("");
|
println!();
|
||||||
return Ok(());
|
return Ok(());
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -103,7 +103,7 @@ fn main() -> anyhow::Result<()> {
|
|||||||
})
|
})
|
||||||
.collect::<Vec<_>>()
|
.collect::<Vec<_>>()
|
||||||
.join(",");
|
.join(",");
|
||||||
format!("*.{{{}}}", extensions)
|
format!("*.{{{extensions}}}")
|
||||||
} else {
|
} else {
|
||||||
"*".to_owned()
|
"*".to_owned()
|
||||||
};
|
};
|
||||||
@ -153,6 +153,6 @@ fn add_exe_to_path() -> Result<()> {
|
|||||||
// may be somewhat of a security issue if rga binary is in installed in unprivileged locations
|
// may be somewhat of a security issue if rga binary is in installed in unprivileged locations
|
||||||
let paths = [&[exe.to_owned(), exe.join("lib")], &paths[..]].concat();
|
let paths = [&[exe.to_owned(), exe.join("lib")], &paths[..]].concat();
|
||||||
let new_path = env::join_paths(paths)?;
|
let new_path = env::join_paths(paths)?;
|
||||||
env::set_var("PATH", &new_path);
|
env::set_var("PATH", new_path);
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
@ -9,8 +9,7 @@ use tokio::io::{AsyncRead, AsyncWriteExt};
|
|||||||
use tokio_stream::StreamExt;
|
use tokio_stream::StreamExt;
|
||||||
use tokio_util::io::{ReaderStream, StreamReader};
|
use tokio_util::io::{ReaderStream, StreamReader};
|
||||||
|
|
||||||
|
type FinishHandler = dyn FnOnce((u64, Option<Vec<u8>>)) -> Result<()> + Send;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* wrap a AsyncRead so that it is passthrough,
|
* wrap a AsyncRead so that it is passthrough,
|
||||||
* but also the written data is compressed and written into a buffer,
|
* but also the written data is compressed and written into a buffer,
|
||||||
@ -20,7 +19,7 @@ pub fn async_read_and_write_to_cache<'a>(
|
|||||||
inp: impl AsyncRead + Send + 'a,
|
inp: impl AsyncRead + Send + 'a,
|
||||||
max_cache_size: usize,
|
max_cache_size: usize,
|
||||||
compression_level: i32,
|
compression_level: i32,
|
||||||
on_finish: Box<dyn FnOnce((u64, Option<Vec<u8>>)) -> Result<()> + Send>,
|
on_finish: Box<FinishHandler>,
|
||||||
) -> Result<Pin<Box<dyn AsyncRead + Send + 'a>>> {
|
) -> Result<Pin<Box<dyn AsyncRead + Send + 'a>>> {
|
||||||
let inp = Box::pin(inp);
|
let inp = Box::pin(inp);
|
||||||
let mut zstd_writer = Some(ZstdEncoder::with_quality(
|
let mut zstd_writer = Some(ZstdEncoder::with_quality(
|
||||||
@ -34,7 +33,7 @@ pub fn async_read_and_write_to_cache<'a>(
|
|||||||
while let Some(bytes) = stream.next().await {
|
while let Some(bytes) = stream.next().await {
|
||||||
if let Ok(bytes) = &bytes {
|
if let Ok(bytes) = &bytes {
|
||||||
if let Some(writer) = zstd_writer.as_mut() {
|
if let Some(writer) = zstd_writer.as_mut() {
|
||||||
writer.write_all(&bytes).await?;
|
writer.write_all(bytes).await?;
|
||||||
bytes_written += bytes.len() as u64;
|
bytes_written += bytes.len() as u64;
|
||||||
let compressed_len = writer.get_ref().len();
|
let compressed_len = writer.get_ref().len();
|
||||||
trace!("wrote {} to zstd, len now {}", bytes.len(), compressed_len);
|
trace!("wrote {} to zstd, len now {}", bytes.len(), compressed_len);
|
||||||
|
@ -78,16 +78,16 @@ impl FromStr for CacheMaxBlobLen {
|
|||||||
if let Some(suffix) = suffix {
|
if let Some(suffix) = suffix {
|
||||||
Ok(CacheMaxBlobLen(match suffix {
|
Ok(CacheMaxBlobLen(match suffix {
|
||||||
'k' | 'M' | 'G' => usize::from_str(s.trim_end_matches(suffix))
|
'k' | 'M' | 'G' => usize::from_str(s.trim_end_matches(suffix))
|
||||||
.with_context(|| format!("Could not parse int"))
|
.with_context(|| "Could not parse int".to_string())
|
||||||
.map(|e| {
|
.map(|e| {
|
||||||
e * match suffix {
|
e * match suffix {
|
||||||
'k' => 1000,
|
'k' => 1000,
|
||||||
'M' => 1000_000,
|
'M' => 1_000_000,
|
||||||
'G' => 1000_000_000,
|
'G' => 1_000_000_000,
|
||||||
_ => panic!("impossible"),
|
_ => panic!("impossible"),
|
||||||
}
|
}
|
||||||
}),
|
}),
|
||||||
_ => usize::from_str(s).with_context(|| format!("Could not parse int")),
|
_ => usize::from_str(s).with_context(|| "Could not parse int".to_string()),
|
||||||
}?))
|
}?))
|
||||||
} else {
|
} else {
|
||||||
Err(anyhow::format_err!("empty byte input"))
|
Err(anyhow::format_err!("empty byte input"))
|
||||||
@ -247,7 +247,7 @@ static RGA_CONFIG: &str = "RGA_CONFIG";
|
|||||||
use serde_json::Value;
|
use serde_json::Value;
|
||||||
fn json_merge(a: &mut Value, b: &Value) {
|
fn json_merge(a: &mut Value, b: &Value) {
|
||||||
match (a, b) {
|
match (a, b) {
|
||||||
(&mut Value::Object(ref mut a), &Value::Object(ref b)) => {
|
(&mut Value::Object(ref mut a), Value::Object(b)) => {
|
||||||
for (k, v) in b {
|
for (k, v) in b {
|
||||||
json_merge(a.entry(k.clone()).or_insert(Value::Null), v);
|
json_merge(a.entry(k.clone()).or_insert(Value::Null), v);
|
||||||
}
|
}
|
||||||
@ -263,19 +263,16 @@ fn read_config_file(path_override: Option<String>) -> Result<(String, Value)> {
|
|||||||
let config_dir = proj.config_dir();
|
let config_dir = proj.config_dir();
|
||||||
let config_filename = path_override
|
let config_filename = path_override
|
||||||
.as_ref()
|
.as_ref()
|
||||||
.map(|e| PathBuf::from(e))
|
.map(PathBuf::from)
|
||||||
.unwrap_or(config_dir.join("config.jsonc"));
|
.unwrap_or(config_dir.join("config.jsonc"));
|
||||||
let config_filename_str = config_filename.to_string_lossy().into_owned();
|
let config_filename_str = config_filename.to_string_lossy().into_owned();
|
||||||
if config_filename.exists() {
|
if config_filename.exists() {
|
||||||
let config_file_contents = std::fs::read_to_string(config_filename)
|
let config_file_contents = std::fs::read_to_string(config_filename)
|
||||||
.with_context(|| format!("Could not read config file json {}", config_filename_str))?;
|
.with_context(|| format!("Could not read config file json {config_filename_str}"))?;
|
||||||
{
|
{
|
||||||
// just for error messages
|
// just for error messages
|
||||||
serde_json::from_str::<RgaConfig>(&config_file_contents).with_context(|| {
|
serde_json::from_str::<RgaConfig>(&config_file_contents).with_context(|| {
|
||||||
format!(
|
format!("Error in config file {config_filename_str}: {config_file_contents}")
|
||||||
"Error in config file {}: {}",
|
|
||||||
config_filename_str, config_file_contents
|
|
||||||
)
|
|
||||||
})?;
|
})?;
|
||||||
}
|
}
|
||||||
let config_json: serde_json::Value =
|
let config_json: serde_json::Value =
|
||||||
@ -288,10 +285,11 @@ fn read_config_file(path_override: Option<String>) -> Result<(String, Value)> {
|
|||||||
std::fs::create_dir_all(config_dir)?;
|
std::fs::create_dir_all(config_dir)?;
|
||||||
let mut schemafile = File::create(config_dir.join("config.schema.json"))?;
|
let mut schemafile = File::create(config_dir.join("config.schema.json"))?;
|
||||||
|
|
||||||
schemafile
|
schemafile.write_all(
|
||||||
.write(serde_json::to_string_pretty(&schemars::schema_for!(RgaConfig))?.as_bytes())?;
|
serde_json::to_string_pretty(&schemars::schema_for!(RgaConfig))?.as_bytes(),
|
||||||
|
)?;
|
||||||
|
|
||||||
let mut config_json = serde_json::to_value(&RgaConfig::default())?;
|
let mut config_json = serde_json::to_value(RgaConfig::default())?;
|
||||||
match &mut config_json {
|
match &mut config_json {
|
||||||
serde_json::Value::Object(o) => {
|
serde_json::Value::Object(o) => {
|
||||||
o.insert(
|
o.insert(
|
||||||
@ -302,7 +300,7 @@ fn read_config_file(path_override: Option<String>) -> Result<(String, Value)> {
|
|||||||
_ => panic!("impos"),
|
_ => panic!("impos"),
|
||||||
}
|
}
|
||||||
let mut configfile = File::create(config_filename)?;
|
let mut configfile = File::create(config_filename)?;
|
||||||
configfile.write(serde_json::to_string_pretty(&config_json)?.as_bytes())?;
|
configfile.write_all(serde_json::to_string_pretty(&config_json)?.as_bytes())?;
|
||||||
Ok((config_filename_str, config_json))
|
Ok((config_filename_str, config_json))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -311,7 +309,7 @@ fn read_config_env() -> Result<Value> {
|
|||||||
if let Some(val) = val {
|
if let Some(val) = val {
|
||||||
serde_json::from_str(&val).context("could not parse config from env RGA_CONFIG")
|
serde_json::from_str(&val).context("could not parse config from env RGA_CONFIG")
|
||||||
} else {
|
} else {
|
||||||
serde_json::to_value(&RgaConfig::default()).context("could not create default config")
|
serde_json::to_value(RgaConfig::default()).context("could not create default config")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
pub fn parse_args<I>(args: I, is_rga_preproc: bool) -> Result<RgaConfig>
|
pub fn parse_args<I>(args: I, is_rga_preproc: bool) -> Result<RgaConfig>
|
||||||
@ -349,14 +347,14 @@ where
|
|||||||
serde_json::to_string_pretty(&merged_config)?
|
serde_json::to_string_pretty(&merged_config)?
|
||||||
);
|
);
|
||||||
// pass to child processes
|
// pass to child processes
|
||||||
std::env::set_var(RGA_CONFIG, &merged_config.to_string());
|
std::env::set_var(RGA_CONFIG, merged_config.to_string());
|
||||||
merged_config
|
merged_config
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
let mut res: RgaConfig = serde_json::from_value(merged_config.clone())
|
let mut res: RgaConfig = serde_json::from_value(merged_config.clone())
|
||||||
.map_err(|e| {
|
.map_err(|e| {
|
||||||
println!("{:?}", e);
|
println!("{e:?}");
|
||||||
e
|
e
|
||||||
})
|
})
|
||||||
.with_context(|| {
|
.with_context(|| {
|
||||||
|
@ -115,7 +115,7 @@ fn find_cap_ref(replacement: &[u8]) -> Option<CaptureRef> {
|
|||||||
// check with either unsafe or by parsing the number straight from &[u8].
|
// check with either unsafe or by parsing the number straight from &[u8].
|
||||||
let cap = std::str::from_utf8(&rep[i..cap_end]).expect("valid UTF-8 capture name");
|
let cap = std::str::from_utf8(&rep[i..cap_end]).expect("valid UTF-8 capture name");
|
||||||
Some(CaptureRef {
|
Some(CaptureRef {
|
||||||
cap: &cap,
|
cap,
|
||||||
end: cap_end,
|
end: cap_end,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
@ -137,7 +137,7 @@ fn find_cap_ref_braced(rep: &[u8], mut i: usize) -> Option<CaptureRef> {
|
|||||||
Ok(cap) => cap,
|
Ok(cap) => cap,
|
||||||
};
|
};
|
||||||
Some(CaptureRef {
|
Some(CaptureRef {
|
||||||
cap: &cap,
|
cap,
|
||||||
end: i + 1,
|
end: i + 1,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
13
src/lib.rs
13
src/lib.rs
@ -29,7 +29,8 @@ fn meh(float: f32, precision: usize) -> usize {
|
|||||||
let a = float.abs();
|
let a = float.abs();
|
||||||
|
|
||||||
// if abs value is greater than 1, then precision becomes less than "standard"
|
// if abs value is greater than 1, then precision becomes less than "standard"
|
||||||
let precision = if a >= 1. {
|
|
||||||
|
if a >= 1. {
|
||||||
// reduce by number of digits, minimum 0
|
// reduce by number of digits, minimum 0
|
||||||
let n = (1. + a.log10().floor()) as usize;
|
let n = (1. + a.log10().floor()) as usize;
|
||||||
if n <= precision {
|
if n <= precision {
|
||||||
@ -45,8 +46,7 @@ fn meh(float: f32, precision: usize) -> usize {
|
|||||||
// special case for 0
|
// special case for 0
|
||||||
} else {
|
} else {
|
||||||
0
|
0
|
||||||
};
|
}
|
||||||
precision
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn print_dur(start: Instant) -> String {
|
pub fn print_dur(start: Instant) -> String {
|
||||||
@ -58,15 +58,12 @@ pub fn print_dur(start: Instant) -> String {
|
|||||||
}
|
}
|
||||||
let precision = meh(dur, 3);
|
let precision = meh(dur, 3);
|
||||||
format!(
|
format!(
|
||||||
"{dur:.prec$}{suffix}s",
|
"{dur:.precision$}{suffix}s"
|
||||||
dur = dur,
|
|
||||||
prec = precision,
|
|
||||||
suffix = suffix
|
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn print_bytes(bytes: impl Into<f64>) -> String {
|
pub fn print_bytes(bytes: impl Into<f64>) -> String {
|
||||||
return pretty_bytes::converter::convert(bytes.into());
|
pretty_bytes::converter::convert(bytes.into())
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
|
@ -54,14 +54,14 @@ pub fn extension_to_regex(extension: &str) -> Regex {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn adapter_matcher(
|
pub fn adapter_matcher(
|
||||||
adapters: &Vec<Arc<dyn FileAdapter>>,
|
adapters: &[Arc<dyn FileAdapter>],
|
||||||
slow: bool,
|
slow: bool,
|
||||||
) -> Result<impl Fn(FileMeta) -> Option<(Arc<dyn FileAdapter>, FileMatcher)>> {
|
) -> Result<impl Fn(FileMeta) -> Option<(Arc<dyn FileAdapter>, FileMatcher)>> {
|
||||||
// need order later
|
// need order later
|
||||||
let adapter_names: Vec<String> = adapters.iter().map(|e| e.metadata().name.clone()).collect();
|
let adapter_names: Vec<String> = adapters.iter().map(|e| e.metadata().name.clone()).collect();
|
||||||
let mut fname_regexes = vec![];
|
let mut fname_regexes = vec![];
|
||||||
let mut mime_regexes = vec![];
|
let mut mime_regexes = vec![];
|
||||||
for adapter in adapters.into_iter() {
|
for adapter in adapters.iter() {
|
||||||
let metadata = adapter.metadata();
|
let metadata = adapter.metadata();
|
||||||
use FileMatcher::*;
|
use FileMatcher::*;
|
||||||
for matcher in metadata.get_matchers(slow) {
|
for matcher in metadata.get_matchers(slow) {
|
||||||
@ -86,7 +86,7 @@ pub fn adapter_matcher(
|
|||||||
.collect();
|
.collect();
|
||||||
let mime_matches: Vec<_> = if slow {
|
let mime_matches: Vec<_> = if slow {
|
||||||
mime_regex_set
|
mime_regex_set
|
||||||
.matches(&meta.mimetype.expect("No mimetype?"))
|
.matches(meta.mimetype.expect("No mimetype?"))
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.collect()
|
.collect()
|
||||||
} else {
|
} else {
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
use crate::adapted_iter::{AdaptedFilesIterBox};
|
use crate::adapted_iter::AdaptedFilesIterBox;
|
||||||
use crate::adapters::*;
|
use crate::adapters::*;
|
||||||
use crate::caching_writer::async_read_and_write_to_cache;
|
use crate::caching_writer::async_read_and_write_to_cache;
|
||||||
use crate::config::RgaConfig;
|
use crate::config::RgaConfig;
|
||||||
@ -14,7 +14,6 @@ use async_stream::stream;
|
|||||||
use log::*;
|
use log::*;
|
||||||
use path_clean::PathClean;
|
use path_clean::PathClean;
|
||||||
use postproc::PostprocPrefix;
|
use postproc::PostprocPrefix;
|
||||||
use std::convert::TryInto;
|
|
||||||
use std::io::Cursor;
|
use std::io::Cursor;
|
||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
@ -131,7 +130,7 @@ fn compute_cache_key(
|
|||||||
active_adapters: ActiveAdapters,
|
active_adapters: ActiveAdapters,
|
||||||
) -> Result<Vec<u8>> {
|
) -> Result<Vec<u8>> {
|
||||||
let clean_path = filepath_hint.to_owned().clean();
|
let clean_path = filepath_hint.to_owned().clean();
|
||||||
let meta = std::fs::metadata(&filepath_hint)
|
let meta = std::fs::metadata(filepath_hint)
|
||||||
.with_context(|| format!("reading metadata for {}", filepath_hint.to_string_lossy()))?;
|
.with_context(|| format!("reading metadata for {}", filepath_hint.to_string_lossy()))?;
|
||||||
let modified = meta.modified().expect("weird OS that can't into mtime");
|
let modified = meta.modified().expect("weird OS that can't into mtime");
|
||||||
|
|
||||||
@ -194,8 +193,8 @@ async fn adapt_caching(
|
|||||||
let inp = concat_read_streams(inp);
|
let inp = concat_read_streams(inp);
|
||||||
let inp = async_read_and_write_to_cache(
|
let inp = async_read_and_write_to_cache(
|
||||||
inp,
|
inp,
|
||||||
cache_max_blob_len.0.try_into().unwrap(),
|
cache_max_blob_len.0,
|
||||||
cache_compression_level.0.try_into().unwrap(),
|
cache_compression_level.0,
|
||||||
Box::new(move |(uncompressed_size, compressed)| {
|
Box::new(move |(uncompressed_size, compressed)| {
|
||||||
debug!(
|
debug!(
|
||||||
"uncompressed output: {}",
|
"uncompressed output: {}",
|
||||||
|
@ -59,7 +59,7 @@ impl LmdbCache {
|
|||||||
}
|
}
|
||||||
let path = Path::new(&config.path.0);
|
let path = Path::new(&config.path.0);
|
||||||
Ok(Some(LmdbCache {
|
Ok(Some(LmdbCache {
|
||||||
db_arc: open_cache_db(&path)?,
|
db_arc: open_cache_db(path)?,
|
||||||
}))
|
}))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -122,13 +122,13 @@ impl PreprocCache for LmdbCache {
|
|||||||
.map_err(RkvErrWrap)
|
.map_err(RkvErrWrap)
|
||||||
.with_context(|| format_err!("could not open write handle to cache"))?;
|
.with_context(|| format_err!("could not open write handle to cache"))?;
|
||||||
|
|
||||||
db.put(&mut writer, &key, &rkv::Value::Blob(&got))
|
db.put(&mut writer, &key, &rkv::Value::Blob(got))
|
||||||
.map_err(RkvErrWrap)
|
.map_err(RkvErrWrap)
|
||||||
.with_context(|| format_err!("could not write to cache"))?;
|
.with_context(|| format_err!("could not write to cache"))?;
|
||||||
writer
|
writer
|
||||||
.commit()
|
.commit()
|
||||||
.map_err(RkvErrWrap)
|
.map_err(RkvErrWrap)
|
||||||
.with_context(|| format!("could not write cache"))?;
|
.with_context(|| "could not write cache".to_string())?;
|
||||||
debug!("writing to cache took {}", print_dur(start));
|
debug!("writing to cache took {}", print_dur(start));
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
@ -16,7 +16,7 @@ pub fn test_data_dir() -> PathBuf {
|
|||||||
d
|
d
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn simple_adapt_info<'a>(filepath: &Path, inp: ReadBox) -> (AdaptInfo, FileMatcher) {
|
pub fn simple_adapt_info(filepath: &Path, inp: ReadBox) -> (AdaptInfo, FileMatcher) {
|
||||||
(
|
(
|
||||||
AdaptInfo {
|
AdaptInfo {
|
||||||
filepath_hint: filepath.to_owned(),
|
filepath_hint: filepath.to_owned(),
|
||||||
|
Loading…
Reference in New Issue
Block a user