performance

This commit is contained in:
phiresky 2019-06-07 01:17:55 +02:00
parent bdc9c0666d
commit b69f903b58
4 changed files with 17 additions and 4 deletions

View File

@ -15,6 +15,8 @@ pub fn postproc_line_prefix(
inp: &mut dyn Read, inp: &mut dyn Read,
oup: &mut dyn Write, oup: &mut dyn Write,
) -> Fallible<()> { ) -> Fallible<()> {
//std::io::copy(inp, oup)?;
//return Ok(());
let mut reader = BufReader::with_capacity(1 << 12, inp); let mut reader = BufReader::with_capacity(1 << 12, inp);
let fourk = reader.fill_buf()?; let fourk = reader.fill_buf()?;
if fourk.contains(&0u8) { if fourk.contains(&0u8) {

View File

@ -4,6 +4,7 @@ use ::zip::read::ZipFile;
use failure::*; use failure::*;
use lazy_static::lazy_static; use lazy_static::lazy_static;
// todo: // todo:
// maybe todo: read list of extensions from // maybe todo: read list of extensions from
//ffmpeg -demuxers | tail -n+5 | awk '{print $2}' | while read demuxer; do echo MUX=$demuxer; ffmpeg -h demuxer=$demuxer | grep 'Common extensions'; done 2>/dev/null //ffmpeg -demuxers | tail -n+5 | awk '{print $2}' | while read demuxer; do echo MUX=$demuxer; ffmpeg -h demuxer=$demuxer | grep 'Common extensions'; done 2>/dev/null

View File

@ -3,6 +3,7 @@ use rga::adapters::*;
use rga::preproc::*; use rga::preproc::*;
use std::env; use std::env;
use std::fs::File; use std::fs::File;
use std::io::{BufReader};
fn main() -> Result<(), Error> { fn main() -> Result<(), Error> {
let path = { let path = {
@ -14,11 +15,13 @@ fn main() -> Result<(), Error> {
std::env::current_dir()?.join(&filepath) std::env::current_dir()?.join(&filepath)
}; };
let i = File::open(&path)?;
let mut o = std::io::stdout();
let ai = AdaptInfo { let ai = AdaptInfo {
inp: &mut File::open(&path)?, inp: &mut BufReader::new(i),
filepath_hint: &path, filepath_hint: &path,
is_real_file: true, is_real_file: true,
oup: &mut std::io::stdout(), oup: &mut o,
line_prefix: "", line_prefix: "",
}; };

View File

@ -2,6 +2,7 @@ use crate::adapters::*;
use crate::CachingWriter; use crate::CachingWriter;
use failure::{format_err, Error}; use failure::{format_err, Error};
use path_clean::PathClean; use path_clean::PathClean;
use std::io::BufWriter;
// longest compressed conversion output to save in cache // longest compressed conversion output to save in cache
const MAX_DB_BLOB_LEN: usize = 2_000_000; const MAX_DB_BLOB_LEN: usize = 2_000_000;
@ -102,7 +103,9 @@ pub fn rga_preproc<'a>(
Some(_) => Err(format_err!("Integrity: value not blob")), Some(_) => Err(format_err!("Integrity: value not blob")),
None => { None => {
drop(reader); drop(reader);
let mut compbuf = CachingWriter::new(oup, MAX_DB_BLOB_LEN, ZSTD_LEVEL)?; // wrapping BufWriter here gives ~10% perf boost
let mut compbuf =
BufWriter::new(CachingWriter::new(oup, MAX_DB_BLOB_LEN, ZSTD_LEVEL)?);
eprintln!("adapting..."); eprintln!("adapting...");
ad.adapt(AdaptInfo { ad.adapt(AdaptInfo {
line_prefix, line_prefix,
@ -111,7 +114,11 @@ pub fn rga_preproc<'a>(
inp, inp,
oup: &mut compbuf, oup: &mut compbuf,
})?; })?;
let compressed = compbuf.finish()?; let compressed = compbuf
.into_inner()
.map_err(|_| "could not finish zstd")
.unwrap()
.finish()?;
if let Some(cached) = compressed { if let Some(cached) = compressed {
eprintln!("compressed len: {}", cached.len()); eprintln!("compressed len: {}", cached.len());