mirror of
https://github.com/FliegendeWurst/KIT-ILIAS-downloader.git
synced 2024-08-28 04:04:18 +00:00
Don't create folders for ignored items (fix #21)
This commit is contained in:
parent
3e0a2bf154
commit
a6c0e97755
21
src/ilias.rs
21
src/ilias.rs
@ -55,7 +55,12 @@ fn error_is_http2(error: &reqwest::Error) -> bool {
|
|||||||
|
|
||||||
impl ILIAS {
|
impl ILIAS {
|
||||||
// TODO: de-duplicate the logic below
|
// TODO: de-duplicate the logic below
|
||||||
pub async fn with_session(opt: Opt, session: Arc<CookieStoreMutex>, ignore: Gitignore, course_names: HashMap<String, String>) -> Result<Self> {
|
pub async fn with_session(
|
||||||
|
opt: Opt,
|
||||||
|
session: Arc<CookieStoreMutex>,
|
||||||
|
ignore: Gitignore,
|
||||||
|
course_names: HashMap<String, String>,
|
||||||
|
) -> Result<Self> {
|
||||||
let mut builder = Client::builder()
|
let mut builder = Client::builder()
|
||||||
.cookie_provider(Arc::clone(&session))
|
.cookie_provider(Arc::clone(&session))
|
||||||
.user_agent(concat!(env!("CARGO_PKG_NAME"), "/", env!("CARGO_PKG_VERSION")));
|
.user_agent(concat!(env!("CARGO_PKG_NAME"), "/", env!("CARGO_PKG_VERSION")));
|
||||||
@ -76,7 +81,13 @@ impl ILIAS {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn login(opt: Opt, user: &str, pass: &str, ignore: Gitignore, course_names: HashMap<String, String>) -> Result<Self> {
|
pub async fn login(
|
||||||
|
opt: Opt,
|
||||||
|
user: &str,
|
||||||
|
pass: &str,
|
||||||
|
ignore: Gitignore,
|
||||||
|
course_names: HashMap<String, String>,
|
||||||
|
) -> Result<Self> {
|
||||||
let cookie_store = CookieStore::default();
|
let cookie_store = CookieStore::default();
|
||||||
let cookie_store = reqwest_cookie_store::CookieStoreMutex::new(cookie_store);
|
let cookie_store = reqwest_cookie_store::CookieStoreMutex::new(cookie_store);
|
||||||
let cookie_store = std::sync::Arc::new(cookie_store);
|
let cookie_store = std::sync::Arc::new(cookie_store);
|
||||||
@ -464,6 +475,12 @@ impl Object {
|
|||||||
_ => Generic { name, url },
|
_ => Generic { name, url },
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub(crate) fn is_ignored_by_option(&self, opt: &Opt) -> bool {
|
||||||
|
(matches!(self, Object::Forum { .. }) && !opt.forum)
|
||||||
|
|| (matches!(self, Object::Video { .. }) && opt.no_videos)
|
||||||
|
|| (matches!(self, Object::File { .. }) && opt.skip_files)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[allow(non_snake_case)]
|
#[allow(non_snake_case)]
|
||||||
|
@ -22,7 +22,9 @@ pub async fn download(path: &Path, ilias: Arc<ILIAS>, url: &URL) -> Result<()> {
|
|||||||
}
|
}
|
||||||
for item in content.0 {
|
for item in content.0 {
|
||||||
let item = item?;
|
let item = item?;
|
||||||
let path = path.join(file_escape(ilias.course_names.get(item.name()).map(|x| &**x).unwrap_or(item.name())));
|
let path = path.join(file_escape(
|
||||||
|
ilias.course_names.get(item.name()).map(|x| &**x).unwrap_or(item.name()),
|
||||||
|
));
|
||||||
let ilias = Arc::clone(&ilias);
|
let ilias = Arc::clone(&ilias);
|
||||||
spawn(process_gracefully(ilias, path, item));
|
spawn(process_gracefully(ilias, path, item));
|
||||||
}
|
}
|
||||||
|
@ -1,4 +1,8 @@
|
|||||||
use std::{path::{Path, PathBuf}, process::{ExitStatus, Stdio}, sync::Arc};
|
use std::{
|
||||||
|
path::{Path, PathBuf},
|
||||||
|
process::{ExitStatus, Stdio},
|
||||||
|
sync::Arc,
|
||||||
|
};
|
||||||
|
|
||||||
use anyhow::{Context, Result};
|
use anyhow::{Context, Result};
|
||||||
use once_cell::sync::Lazy;
|
use once_cell::sync::Lazy;
|
||||||
@ -59,7 +63,7 @@ pub async fn download(path: &Path, relative_path: &Path, ilias: Arc<ILIAS>, url:
|
|||||||
}
|
}
|
||||||
arguments.push("-c".into());
|
arguments.push("-c".into());
|
||||||
arguments.push("copy".into());
|
arguments.push("copy".into());
|
||||||
for i in 0..(arguments.len() / 2)-1 {
|
for i in 0..(arguments.len() / 2) - 1 {
|
||||||
arguments.push("-map".into());
|
arguments.push("-map".into());
|
||||||
arguments.push(format!("{}", i));
|
arguments.push(format!("{}", i));
|
||||||
}
|
}
|
||||||
@ -70,7 +74,8 @@ pub async fn download(path: &Path, relative_path: &Path, ilias: Arc<ILIAS>, url:
|
|||||||
.stdout(Stdio::null())
|
.stdout(Stdio::null())
|
||||||
.spawn()
|
.spawn()
|
||||||
.context("failed to start ffmpeg")?
|
.context("failed to start ffmpeg")?
|
||||||
.wait().await
|
.wait()
|
||||||
|
.await
|
||||||
.context("failed to wait for ffmpeg")?;
|
.context("failed to wait for ffmpeg")?;
|
||||||
if !status.success() {
|
if !status.success() {
|
||||||
error!(format!("ffmpeg failed to merge video files into {}", path.display()));
|
error!(format!("ffmpeg failed to merge video files into {}", path.display()));
|
||||||
@ -82,7 +87,12 @@ pub async fn download(path: &Path, relative_path: &Path, ilias: Arc<ILIAS>, url:
|
|||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn download_all(path: &Path, streams: &Vec<serde_json::Value>, ilias: Arc<ILIAS>, relative_path: &Path) -> Result<Vec<PathBuf>> {
|
async fn download_all(
|
||||||
|
path: &Path,
|
||||||
|
streams: &Vec<serde_json::Value>,
|
||||||
|
ilias: Arc<ILIAS>,
|
||||||
|
relative_path: &Path,
|
||||||
|
) -> Result<Vec<PathBuf>> {
|
||||||
let mut paths = Vec::new();
|
let mut paths = Vec::new();
|
||||||
for (i, stream) in streams.into_iter().enumerate() {
|
for (i, stream) in streams.into_iter().enumerate() {
|
||||||
let url = stream
|
let url = stream
|
||||||
@ -91,7 +101,13 @@ async fn download_all(path: &Path, streams: &Vec<serde_json::Value>, ilias: Arc<
|
|||||||
.as_str()
|
.as_str()
|
||||||
.context("video src not string")?;
|
.context("video src not string")?;
|
||||||
let new_path = path.join(format!("Stream{}.mp4", i + 1));
|
let new_path = path.join(format!("Stream{}.mp4", i + 1));
|
||||||
download_to_path(&ilias, &new_path, &relative_path.join(format!("Stream{}.mp4", i + 1)), url).await?;
|
download_to_path(
|
||||||
|
&ilias,
|
||||||
|
&new_path,
|
||||||
|
&relative_path.join(format!("Stream{}.mp4", i + 1)),
|
||||||
|
url,
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
paths.push(new_path);
|
paths.push(new_path);
|
||||||
}
|
}
|
||||||
Ok(paths)
|
Ok(paths)
|
||||||
|
10
src/main.rs
10
src/main.rs
@ -124,7 +124,12 @@ async fn real_main(mut opt: Opt) -> Result<()> {
|
|||||||
let course_names_path = opt.output.join("course_names.toml");
|
let course_names_path = opt.output.join("course_names.toml");
|
||||||
let course_names = if fs::metadata(&course_names_path).await.is_ok() {
|
let course_names = if fs::metadata(&course_names_path).await.is_ok() {
|
||||||
// file exists, try to read it
|
// file exists, try to read it
|
||||||
toml::from_str(&fs::read_to_string(course_names_path).await.context("accessing course_names.toml")?).context("processing course_names.toml")?
|
toml::from_str(
|
||||||
|
&fs::read_to_string(course_names_path)
|
||||||
|
.await
|
||||||
|
.context("accessing course_names.toml")?,
|
||||||
|
)
|
||||||
|
.context("processing course_names.toml")?
|
||||||
} else {
|
} else {
|
||||||
// If file doesn't exist, initialise course_names with empty HashMap
|
// If file doesn't exist, initialise course_names with empty HashMap
|
||||||
HashMap::new()
|
HashMap::new()
|
||||||
@ -230,6 +235,9 @@ async fn process(ilias: Arc<ILIAS>, path: PathBuf, obj: Object) -> Result<()> {
|
|||||||
}
|
}
|
||||||
log!(1, "Syncing {} {}", obj.kind(), relative_path.to_string_lossy());
|
log!(1, "Syncing {} {}", obj.kind(), relative_path.to_string_lossy());
|
||||||
log!(2, " URL: {}", obj.url().url);
|
log!(2, " URL: {}", obj.url().url);
|
||||||
|
if obj.is_ignored_by_option(&ilias.opt) {
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
if obj.is_dir() {
|
if obj.is_dir() {
|
||||||
create_dir(&path).await?;
|
create_dir(&path).await?;
|
||||||
}
|
}
|
||||||
|
Loading…
Reference in New Issue
Block a user