Compare commits

..

5 Commits

Author SHA1 Message Date
Arne Keller
19202327b9
Merge pull request #45 from Ma27/flake
Init nix flake, fix build, add clippy+rustfmt
2023-07-22 15:25:38 +02:00
Maximilian Bosch
4c90a6029a
flake: Init
* Uses crane to build deps and code independently. Allows on development
  fast iterations by running `nix run` to run the program.
* Implements formatting and linting as checks.
2023-07-09 11:54:42 +02:00
Maximilian Bosch
3f326713ba
Fix clippy hints
* https://rust-lang.github.io/rust-clippy/master/index.html#collapsible_else_if
  (-D clippy::collapsible-else-if)
* https://rust-lang.github.io/rust-clippy/master/index.html#needless_borrow
  (-D clippy::needless-borrow)
* https://rust-lang.github.io/rust-clippy/master/index.html#into_iter_on_ref
  (-D clippy::into-iter-on-ref)
* https://rust-lang.github.io/rust-clippy/master/index.html#map_flatten
  (-D clippy::map-flatten)
* https://rust-lang.github.io/rust-clippy/master/index.html#len_zero
  (-D clippy::len-zero)
2023-07-09 11:53:08 +02:00
Maximilian Bosch
05abc5bdad
Run cargo fmt 2023-07-09 11:36:01 +02:00
Maximilian Bosch
0396dfc096
Fix build 2023-07-09 11:35:29 +02:00
10 changed files with 328 additions and 110 deletions

132
flake.lock Normal file
View File

@ -0,0 +1,132 @@
{
"nodes": {
"crane": {
"inputs": {
"flake-compat": [
"flake-compat"
],
"flake-utils": [
"flake-utils"
],
"nixpkgs": [
"nixpkgs"
],
"rust-overlay": [
"rust-overlay"
]
},
"locked": {
"lastModified": 1688772518,
"narHash": "sha256-ol7gZxwvgLnxNSZwFTDJJ49xVY5teaSvF7lzlo3YQfM=",
"owner": "ipetkov",
"repo": "crane",
"rev": "8b08e96c9af8c6e3a2b69af5a7fa168750fcf88e",
"type": "github"
},
"original": {
"owner": "ipetkov",
"repo": "crane",
"type": "github"
}
},
"flake-compat": {
"flake": false,
"locked": {
"lastModified": 1673956053,
"narHash": "sha256-4gtG9iQuiKITOjNQQeQIpoIB6b16fm+504Ch3sNKLd8=",
"owner": "edolstra",
"repo": "flake-compat",
"rev": "35bb57c0c8d8b62bbfd284272c928ceb64ddbde9",
"type": "github"
},
"original": {
"owner": "edolstra",
"repo": "flake-compat",
"type": "github"
}
},
"flake-utils": {
"inputs": {
"systems": "systems"
},
"locked": {
"lastModified": 1687709756,
"narHash": "sha256-Y5wKlQSkgEK2weWdOu4J3riRd+kV/VCgHsqLNTTWQ/0=",
"owner": "numtide",
"repo": "flake-utils",
"rev": "dbabf0ca0c0c4bce6ea5eaf65af5cb694d2082c7",
"type": "github"
},
"original": {
"owner": "numtide",
"repo": "flake-utils",
"type": "github"
}
},
"nixpkgs": {
"locked": {
"lastModified": 1688679045,
"narHash": "sha256-t3xGEfYIwhaLTPU8FLtN/pLPytNeDwbLI6a7XFFBlGo=",
"owner": "nixos",
"repo": "nixpkgs",
"rev": "3c7487575d9445185249a159046cc02ff364bff8",
"type": "github"
},
"original": {
"owner": "nixos",
"ref": "nixos-unstable",
"repo": "nixpkgs",
"type": "github"
}
},
"root": {
"inputs": {
"crane": "crane",
"flake-compat": "flake-compat",
"flake-utils": "flake-utils",
"nixpkgs": "nixpkgs",
"rust-overlay": "rust-overlay"
}
},
"rust-overlay": {
"inputs": {
"flake-utils": [
"flake-utils"
],
"nixpkgs": [
"nixpkgs"
]
},
"locked": {
"lastModified": 1688870171,
"narHash": "sha256-8tD8fheWPa7TaJoxzcU3iHkCrQQpOpdMN+HYqgZ1N5A=",
"owner": "oxalica",
"repo": "rust-overlay",
"rev": "5a932f10ac4bd59047d6e8b5780750ec76ea988a",
"type": "github"
},
"original": {
"owner": "oxalica",
"repo": "rust-overlay",
"type": "github"
}
},
"systems": {
"locked": {
"lastModified": 1681028828,
"narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=",
"owner": "nix-systems",
"repo": "default",
"rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e",
"type": "github"
},
"original": {
"owner": "nix-systems",
"repo": "default",
"type": "github"
}
}
},
"root": "root",
"version": 7
}

86
flake.nix Normal file
View File

@ -0,0 +1,86 @@
{
description = "Download content from ilias.studium.kit.edu";
inputs = {
nixpkgs.url = "github:nixos/nixpkgs/nixos-unstable";
crane = {
url = "github:ipetkov/crane";
inputs.nixpkgs.follows = "nixpkgs";
inputs.flake-utils.follows = "flake-utils";
inputs.flake-compat.follows = "flake-compat";
inputs.rust-overlay.follows = "rust-overlay";
};
# Import them even though we don't use them. Needed to allow overriding `rust-overlay`
# etc. in flakes consuming this flake.
# Temporary until https://github.com/NixOS/nix/issues/6986 is solved.
rust-overlay = {
url = "github:oxalica/rust-overlay";
inputs.nixpkgs.follows = "nixpkgs";
inputs.flake-utils.follows = "flake-utils";
};
flake-utils.url = "github:numtide/flake-utils";
flake-compat = {
url = "github:edolstra/flake-compat";
flake = false;
};
};
outputs = { self, nixpkgs, crane, ... }: let
systems = [ "x86_64-linux" ];
inherit (nixpkgs) lib;
forEachSystem = lib.genAttrs systems;
craneLib = forEachSystem (system: crane.lib.${system});
toHydraJob = with lib; foldlAttrs
(jobset: system: attrs: recursiveUpdate jobset
(mapAttrs (const (drv: { ${system} = drv; }))
(filterAttrs (name: const (name != "default")) attrs)))
{ };
builds = forEachSystem (system: (lib.fix (final: {
common = {
pname = "KIT-ILIAS-Downloader";
src = craneLib.${system}.cleanCargoSource self;
};
cargoArtifacts = craneLib.${system}.buildDepsOnly (final.common // {
doCheck = false;
});
clippy = craneLib.${system}.cargoClippy (final.common // {
inherit (final) cargoArtifacts;
cargoClippyExtraArgs = lib.escapeShellArgs [
"--all-targets"
"--"
"-D"
"warnings"
"-A"
"non-snake-case"
"-A"
"clippy::upper-case-acronyms"
];
});
format = craneLib.${system}.cargoFmt (final.common // {
inherit (final) cargoArtifacts;
});
kit-ilias-downloader = craneLib.${system}.buildPackage (final.common // {
inherit (final) cargoArtifacts;
doCheck = false;
meta.license = lib.licenses.gpl3Plus;
meta.platforms = systems;
});
})));
in {
packages = forEachSystem (system: {
default = self.packages.${system}.kit-ilias-downloader;
inherit (builds.${system}) kit-ilias-downloader;
});
checks = forEachSystem (system: {
inherit (builds.${system}) format clippy;
});
hydraJobs = {
packages = toHydraJob self.packages;
checks = toHydraJob self.checks;
};
};
}

View File

@ -5,13 +5,12 @@ use std::{collections::HashMap, error::Error as _, io::Write, sync::Arc};
use anyhow::{anyhow, Context, Result}; use anyhow::{anyhow, Context, Result};
use cookie_store::CookieStore; use cookie_store::CookieStore;
use once_cell::sync::Lazy; use once_cell::sync::Lazy;
use regex::Regex;
use reqwest::{Client, IntoUrl, Proxy, Url}; use reqwest::{Client, IntoUrl, Proxy, Url};
use reqwest_cookie_store::CookieStoreMutex; use reqwest_cookie_store::CookieStoreMutex;
use scraper::{ElementRef, Html, Selector}; use scraper::{ElementRef, Html, Selector};
use serde_json::json; use serde_json::json;
use crate::{cli::Opt, queue, util::wrap_html, ILIAS_URL, iliasignore::IliasIgnore}; use crate::{cli::Opt, iliasignore::IliasIgnore, queue, util::wrap_html, ILIAS_URL};
pub mod course; pub mod course;
pub mod exercise; pub mod exercise;
@ -46,12 +45,9 @@ pub struct ILIAS {
fn error_is_http2(error: &reqwest::Error) -> bool { fn error_is_http2(error: &reqwest::Error) -> bool {
error error
.source() // hyper::Error .source() // hyper::Error
.map(|x| x.source()) // h2::Error .and_then(|x| x.source()) // h2::Error
.flatten() .and_then(|x| x.downcast_ref::<h2::Error>())
.map(|x| x.downcast_ref::<h2::Error>()) .and_then(|x| x.reason())
.flatten()
.map(|x| x.reason())
.flatten()
.map(|x| x == h2::Reason::NO_ERROR) .map(|x| x == h2::Reason::NO_ERROR)
.unwrap_or(false) .unwrap_or(false)
} }
@ -222,8 +218,8 @@ impl ILIAS {
} }
unreachable!() unreachable!()
} }
pub async fn is_error_response(html: &Html) { pub fn is_error_response(html: &Html) -> bool {
html.select(&ALERT_DANGER).next().is_some() html.select(&ALERT_DANGER).next().is_some()
} }
@ -286,7 +282,13 @@ impl ILIAS {
} else { } else {
None None
}; };
Ok((ILIAS::get_items(&html), main_text, html.select(&LINKS).flat_map(|x| x.value().attr("href").map(|x| x.to_owned())).collect())) Ok((
ILIAS::get_items(&html),
main_text,
html.select(&LINKS)
.flat_map(|x| x.value().attr("href").map(|x| x.to_owned()))
.collect(),
))
} }
pub async fn get_course_content_tree(&self, ref_id: &str, cmd_node: &str) -> Result<Vec<Object>> { pub async fn get_course_content_tree(&self, ref_id: &str, cmd_node: &str) -> Result<Vec<Object>> {
@ -344,8 +346,8 @@ impl Object {
| Presentation { name, .. } | Presentation { name, .. }
| ExerciseHandler { name, .. } | ExerciseHandler { name, .. }
| PluginDispatch { name, .. } | PluginDispatch { name, .. }
| Generic { name, .. } => &name, | Generic { name, .. } => name,
Thread { url } => &url.thr_pk.as_ref().unwrap(), Thread { url } => url.thr_pk.as_ref().unwrap(),
Video { url } => &url.url, Video { url } => &url.url,
Dashboard { url } => &url.url, Dashboard { url } => &url.url,
} }
@ -366,7 +368,7 @@ impl Object {
| ExerciseHandler { url, .. } | ExerciseHandler { url, .. }
| PluginDispatch { url, .. } | PluginDispatch { url, .. }
| Video { url } | Video { url }
| Generic { url, .. } => &url, | Generic { url, .. } => url,
} }
} }

View File

@ -28,12 +28,12 @@ pub async fn download(path: PathBuf, ilias: Arc<ILIAS>, url: &URL, name: &str) -
return Ok(()); // ignore groups we are not in return Ok(()); // ignore groups we are not in
} }
warning!(name, "falling back to incomplete course content extractor!", e); warning!(name, "falling back to incomplete course content extractor!", e);
let (items, main_text, _) = ilias.get_course_content(&url).await?; let (items, main_text, _) = ilias.get_course_content(url).await?;
(items, main_text) (items, main_text)
}, },
} }
} else { } else {
let (items, main_text, _) = ilias.get_course_content(&url).await?; let (items, main_text, _) = ilias.get_course_content(url).await?;
(items, main_text) (items, main_text)
}; };
if ilias.opt.save_ilias_pages { if ilias.opt.save_ilias_pages {

View File

@ -17,6 +17,6 @@ pub async fn download(path: &Path, relative_path: &Path, ilias: Arc<ILIAS>, url:
} }
let data = ilias.download(&url.url).await?; let data = ilias.download(&url.url).await?;
log!(0, "Writing {}", relative_path.to_string_lossy()); log!(0, "Writing {}", relative_path.to_string_lossy());
write_stream_to_file(&path, data.bytes_stream()).await?; write_stream_to_file(path, data.bytes_stream()).await?;
Ok(()) Ok(())
} }

View File

@ -17,7 +17,7 @@ static EXPAND_LINK: Lazy<Regex> = Lazy::new(|| Regex::new("expand=\\d").unwrap()
#[async_recursion] #[async_recursion]
pub async fn download(path: &Path, ilias: Arc<ILIAS>, url: &URL) -> Result<()> { pub async fn download(path: &Path, ilias: Arc<ILIAS>, url: &URL) -> Result<()> {
let content = ilias.get_course_content(&url).await?; let content = ilias.get_course_content(url).await?;
// expand all sessions // expand all sessions
for href in content.2 { for href in content.2 {

View File

@ -34,7 +34,7 @@ pub async fn download(path: &Path, relative_path: &Path, ilias: Arc<ILIAS>, url:
let json = &json_capture.next().context("xoct player json not found")?[1]; let json = &json_capture.next().context("xoct player json not found")?[1];
log!(2, "{}", json); log!(2, "{}", json);
let json = json.split(",\n").next().context("invalid xoct player json")?; let json = json.split(",\n").next().context("invalid xoct player json")?;
serde_json::from_str(&json.trim())? serde_json::from_str(json.trim())?
}; };
log!(2, "{}", json); log!(2, "{}", json);
let streams = json let streams = json
@ -49,52 +49,50 @@ pub async fn download(path: &Path, relative_path: &Path, ilias: Arc<ILIAS>, url:
.as_str() .as_str()
.context("video src not string")?; .context("video src not string")?;
download_to_path(&ilias, path, relative_path, url).await?; download_to_path(&ilias, path, relative_path, url).await?;
} else if !ilias.opt.combine_videos {
fs::create_dir(path).await.context("failed to create video directory")?;
download_all(path, streams, ilias, relative_path).await?;
} else { } else {
if !ilias.opt.combine_videos { let dir = tempdir()?;
fs::create_dir(path).await.context("failed to create video directory")?; // construct ffmpeg command to combine all files
download_all(path, streams, ilias, relative_path).await?; let mut arguments = vec![];
} else { for file in download_all(dir.path(), streams, ilias, relative_path).await? {
let dir = tempdir()?; arguments.push("-i".to_owned());
// construct ffmpeg command to combine all files arguments.push(file.to_str().context("invalid UTF8")?.into());
let mut arguments = vec![]; }
for file in download_all(dir.path(), streams, ilias, relative_path).await? { arguments.push("-c".into());
arguments.push("-i".to_owned()); arguments.push("copy".into());
arguments.push(file.to_str().context("invalid UTF8")?.into()); for i in 0..(arguments.len() / 2) - 1 {
} arguments.push("-map".into());
arguments.push("-c".into()); arguments.push(format!("{}", i));
arguments.push("copy".into()); }
for i in 0..(arguments.len() / 2) - 1 { arguments.push(path.to_str().context("invalid UTF8 in path")?.into());
arguments.push("-map".into()); let status = Command::new("ffmpeg")
arguments.push(format!("{}", i)); .args(&arguments)
} .stderr(Stdio::null())
arguments.push(path.to_str().context("invalid UTF8 in path")?.into()); .stdout(Stdio::null())
let status = Command::new("ffmpeg") .spawn()
.args(&arguments) .context("failed to start ffmpeg")?
.stderr(Stdio::null()) .wait()
.stdout(Stdio::null()) .await
.spawn() .context("failed to wait for ffmpeg")?;
.context("failed to start ffmpeg")? if !status.success() {
.wait() error!(format!("ffmpeg failed to merge video files into {}", path.display()));
.await error!(format!("check this directory: {}", dir.into_path().display()));
.context("failed to wait for ffmpeg")?; error!(format!("ffmpeg command: {}", arguments.join(" ")));
if !status.success() { }
error!(format!("ffmpeg failed to merge video files into {}", path.display()));
error!(format!("check this directory: {}", dir.into_path().display()));
error!(format!("ffmpeg command: {}", arguments.join(" ")));
}
};
} }
Ok(()) Ok(())
} }
async fn download_all( async fn download_all(
path: &Path, path: &Path,
streams: &Vec<serde_json::Value>, streams: &[serde_json::Value],
ilias: Arc<ILIAS>, ilias: Arc<ILIAS>,
relative_path: &Path, relative_path: &Path,
) -> Result<Vec<PathBuf>> { ) -> Result<Vec<PathBuf>> {
let mut paths = Vec::new(); let mut paths = Vec::new();
for (i, stream) in streams.into_iter().enumerate() { for (i, stream) in streams.iter().enumerate() {
let url = stream let url = stream
.pointer("/sources/mp4/0/src") .pointer("/sources/mp4/0/src")
.context("video src not found")? .context("video src not found")?
@ -126,9 +124,9 @@ async fn download_to_path(ilias: &ILIAS, path: &Path, relative_path: &Path, url:
} }
} }
} else { } else {
let resp = ilias.download(&url).await?; let resp = ilias.download(url).await?;
log!(0, "Writing {}", relative_path.to_string_lossy()); log!(0, "Writing {}", relative_path.to_string_lossy());
write_stream_to_file(&path, resp.bytes_stream()).await?; write_stream_to_file(path, resp.bytes_stream()).await?;
} }
Ok(()) Ok(())
} }

View File

@ -27,7 +27,7 @@ pub async fn download(path: &Path, relative_path: &Path, ilias: Arc<ILIAS>, url:
if url.starts_with(ILIAS_URL) { if url.starts_with(ILIAS_URL) {
// is a link list // is a link list
if fs::metadata(&path).await.is_err() { if fs::metadata(&path).await.is_err() {
create_dir(&path).await?; create_dir(path).await?;
log!(0, "Writing {}", relative_path.to_string_lossy()); log!(0, "Writing {}", relative_path.to_string_lossy());
} }

View File

@ -1,66 +1,67 @@
use std::{path::{Path, PathBuf, Component}, ffi::OsString}; use std::{
ffi::OsString,
path::{Component, Path, PathBuf},
};
use anyhow::Result; use anyhow::Result;
use ignore::gitignore::Gitignore; use ignore::gitignore::Gitignore;
#[derive(Clone, Debug)] #[derive(Clone, Debug)]
pub struct IliasIgnore { pub struct IliasIgnore {
ignores: Vec<IgnoreFile> ignores: Vec<IgnoreFile>,
} }
impl IliasIgnore { impl IliasIgnore {
pub fn load(mut path: PathBuf) -> Result<Self> { pub fn load(mut path: PathBuf) -> Result<Self> {
let mut ignores = Vec::new(); let mut ignores = Vec::new();
let mut prefix = Vec::new(); let mut prefix = Vec::new();
// example scenario: // example scenario:
// path = /KIT/ILIAS/SS 23/Next Generation Internet // path = /KIT/ILIAS/SS 23/Next Generation Internet
// iliasignore in ILIAS/.iliasignore: prefix = SS 23/Next Generation Internet/ // iliasignore in ILIAS/.iliasignore: prefix = SS 23/Next Generation Internet/
// iliasignore in Next Generation Internet/.iliasignore: prefix = "" // iliasignore in Next Generation Internet/.iliasignore: prefix = ""
loop { loop {
let (ignore, error) = Gitignore::new(path.join(".iliasignore")); let (ignore, error) = Gitignore::new(path.join(".iliasignore"));
if let Some(err) = error { if let Some(err) = error {
warning!(err); warning!(err);
} }
if ignore.len() > 0 { if !ignore.is_empty() {
ignores.push(IgnoreFile { ignores.push(IgnoreFile {
ignore, ignore,
prefix: prefix.iter().fold(OsString::new(), |mut acc, el| { prefix: prefix.iter().fold(OsString::new(), |mut acc, el| {
acc.push(el); acc.push(el);
acc.push("/"); acc.push("/");
acc acc
}) }),
}); });
} }
if let Some(last) = path.components().last() { if let Some(last) = path.components().last() {
match last { match last {
Component::Normal(name) => prefix.insert(0, name.to_owned()), Component::Normal(name) => prefix.insert(0, name.to_owned()),
_ => break _ => break,
} }
} }
path.pop(); path.pop();
} }
Ok(IliasIgnore { Ok(IliasIgnore { ignores })
ignores }
})
}
pub fn should_ignore(&self, path: &Path, is_dir: bool) -> bool { pub fn should_ignore(&self, path: &Path, is_dir: bool) -> bool {
for ignore_file in &self.ignores { for ignore_file in &self.ignores {
let mut full_path = ignore_file.prefix.clone(); let mut full_path = ignore_file.prefix.clone();
full_path.push(path.as_os_str()); full_path.push(path.as_os_str());
let matched = ignore_file.ignore.matched(&full_path, is_dir); let matched = ignore_file.ignore.matched(&full_path, is_dir);
if matched.is_whitelist() { if matched.is_whitelist() {
return false; return false;
} else if matched.is_ignore() { } else if matched.is_ignore() {
return true; return true;
} }
} }
false false
} }
} }
#[derive(Clone, Debug)] #[derive(Clone, Debug)]
struct IgnoreFile { struct IgnoreFile {
ignore: Gitignore, ignore: Gitignore,
prefix: OsString prefix: OsString,
} }

View File

@ -3,7 +3,6 @@
use anyhow::{anyhow, Context, Result}; use anyhow::{anyhow, Context, Result};
use futures::future::{self, Either}; use futures::future::{self, Either};
use futures::StreamExt; use futures::StreamExt;
use ignore::gitignore::Gitignore;
use indicatif::{ProgressDrawTarget, ProgressStyle}; use indicatif::{ProgressDrawTarget, ProgressStyle};
use structopt::StructOpt; use structopt::StructOpt;
use tokio::fs; use tokio::fs;