mirror of
https://github.com/FliegendeWurst/KIT-ILIAS-downloader.git
synced 2024-08-28 04:04:18 +00:00
Merge pull request #45 from Ma27/flake
Init nix flake, fix build, add clippy+rustfmt
This commit is contained in:
commit
19202327b9
132
flake.lock
Normal file
132
flake.lock
Normal file
@ -0,0 +1,132 @@
|
|||||||
|
{
|
||||||
|
"nodes": {
|
||||||
|
"crane": {
|
||||||
|
"inputs": {
|
||||||
|
"flake-compat": [
|
||||||
|
"flake-compat"
|
||||||
|
],
|
||||||
|
"flake-utils": [
|
||||||
|
"flake-utils"
|
||||||
|
],
|
||||||
|
"nixpkgs": [
|
||||||
|
"nixpkgs"
|
||||||
|
],
|
||||||
|
"rust-overlay": [
|
||||||
|
"rust-overlay"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"locked": {
|
||||||
|
"lastModified": 1688772518,
|
||||||
|
"narHash": "sha256-ol7gZxwvgLnxNSZwFTDJJ49xVY5teaSvF7lzlo3YQfM=",
|
||||||
|
"owner": "ipetkov",
|
||||||
|
"repo": "crane",
|
||||||
|
"rev": "8b08e96c9af8c6e3a2b69af5a7fa168750fcf88e",
|
||||||
|
"type": "github"
|
||||||
|
},
|
||||||
|
"original": {
|
||||||
|
"owner": "ipetkov",
|
||||||
|
"repo": "crane",
|
||||||
|
"type": "github"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"flake-compat": {
|
||||||
|
"flake": false,
|
||||||
|
"locked": {
|
||||||
|
"lastModified": 1673956053,
|
||||||
|
"narHash": "sha256-4gtG9iQuiKITOjNQQeQIpoIB6b16fm+504Ch3sNKLd8=",
|
||||||
|
"owner": "edolstra",
|
||||||
|
"repo": "flake-compat",
|
||||||
|
"rev": "35bb57c0c8d8b62bbfd284272c928ceb64ddbde9",
|
||||||
|
"type": "github"
|
||||||
|
},
|
||||||
|
"original": {
|
||||||
|
"owner": "edolstra",
|
||||||
|
"repo": "flake-compat",
|
||||||
|
"type": "github"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"flake-utils": {
|
||||||
|
"inputs": {
|
||||||
|
"systems": "systems"
|
||||||
|
},
|
||||||
|
"locked": {
|
||||||
|
"lastModified": 1687709756,
|
||||||
|
"narHash": "sha256-Y5wKlQSkgEK2weWdOu4J3riRd+kV/VCgHsqLNTTWQ/0=",
|
||||||
|
"owner": "numtide",
|
||||||
|
"repo": "flake-utils",
|
||||||
|
"rev": "dbabf0ca0c0c4bce6ea5eaf65af5cb694d2082c7",
|
||||||
|
"type": "github"
|
||||||
|
},
|
||||||
|
"original": {
|
||||||
|
"owner": "numtide",
|
||||||
|
"repo": "flake-utils",
|
||||||
|
"type": "github"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"nixpkgs": {
|
||||||
|
"locked": {
|
||||||
|
"lastModified": 1688679045,
|
||||||
|
"narHash": "sha256-t3xGEfYIwhaLTPU8FLtN/pLPytNeDwbLI6a7XFFBlGo=",
|
||||||
|
"owner": "nixos",
|
||||||
|
"repo": "nixpkgs",
|
||||||
|
"rev": "3c7487575d9445185249a159046cc02ff364bff8",
|
||||||
|
"type": "github"
|
||||||
|
},
|
||||||
|
"original": {
|
||||||
|
"owner": "nixos",
|
||||||
|
"ref": "nixos-unstable",
|
||||||
|
"repo": "nixpkgs",
|
||||||
|
"type": "github"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"root": {
|
||||||
|
"inputs": {
|
||||||
|
"crane": "crane",
|
||||||
|
"flake-compat": "flake-compat",
|
||||||
|
"flake-utils": "flake-utils",
|
||||||
|
"nixpkgs": "nixpkgs",
|
||||||
|
"rust-overlay": "rust-overlay"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"rust-overlay": {
|
||||||
|
"inputs": {
|
||||||
|
"flake-utils": [
|
||||||
|
"flake-utils"
|
||||||
|
],
|
||||||
|
"nixpkgs": [
|
||||||
|
"nixpkgs"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"locked": {
|
||||||
|
"lastModified": 1688870171,
|
||||||
|
"narHash": "sha256-8tD8fheWPa7TaJoxzcU3iHkCrQQpOpdMN+HYqgZ1N5A=",
|
||||||
|
"owner": "oxalica",
|
||||||
|
"repo": "rust-overlay",
|
||||||
|
"rev": "5a932f10ac4bd59047d6e8b5780750ec76ea988a",
|
||||||
|
"type": "github"
|
||||||
|
},
|
||||||
|
"original": {
|
||||||
|
"owner": "oxalica",
|
||||||
|
"repo": "rust-overlay",
|
||||||
|
"type": "github"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"systems": {
|
||||||
|
"locked": {
|
||||||
|
"lastModified": 1681028828,
|
||||||
|
"narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=",
|
||||||
|
"owner": "nix-systems",
|
||||||
|
"repo": "default",
|
||||||
|
"rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e",
|
||||||
|
"type": "github"
|
||||||
|
},
|
||||||
|
"original": {
|
||||||
|
"owner": "nix-systems",
|
||||||
|
"repo": "default",
|
||||||
|
"type": "github"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"root": "root",
|
||||||
|
"version": 7
|
||||||
|
}
|
86
flake.nix
Normal file
86
flake.nix
Normal file
@ -0,0 +1,86 @@
|
|||||||
|
{
|
||||||
|
description = "Download content from ilias.studium.kit.edu";
|
||||||
|
|
||||||
|
inputs = {
|
||||||
|
nixpkgs.url = "github:nixos/nixpkgs/nixos-unstable";
|
||||||
|
|
||||||
|
crane = {
|
||||||
|
url = "github:ipetkov/crane";
|
||||||
|
inputs.nixpkgs.follows = "nixpkgs";
|
||||||
|
inputs.flake-utils.follows = "flake-utils";
|
||||||
|
inputs.flake-compat.follows = "flake-compat";
|
||||||
|
inputs.rust-overlay.follows = "rust-overlay";
|
||||||
|
};
|
||||||
|
|
||||||
|
# Import them even though we don't use them. Needed to allow overriding `rust-overlay`
|
||||||
|
# etc. in flakes consuming this flake.
|
||||||
|
# Temporary until https://github.com/NixOS/nix/issues/6986 is solved.
|
||||||
|
rust-overlay = {
|
||||||
|
url = "github:oxalica/rust-overlay";
|
||||||
|
inputs.nixpkgs.follows = "nixpkgs";
|
||||||
|
inputs.flake-utils.follows = "flake-utils";
|
||||||
|
};
|
||||||
|
flake-utils.url = "github:numtide/flake-utils";
|
||||||
|
flake-compat = {
|
||||||
|
url = "github:edolstra/flake-compat";
|
||||||
|
flake = false;
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
outputs = { self, nixpkgs, crane, ... }: let
|
||||||
|
systems = [ "x86_64-linux" ];
|
||||||
|
inherit (nixpkgs) lib;
|
||||||
|
forEachSystem = lib.genAttrs systems;
|
||||||
|
craneLib = forEachSystem (system: crane.lib.${system});
|
||||||
|
|
||||||
|
toHydraJob = with lib; foldlAttrs
|
||||||
|
(jobset: system: attrs: recursiveUpdate jobset
|
||||||
|
(mapAttrs (const (drv: { ${system} = drv; }))
|
||||||
|
(filterAttrs (name: const (name != "default")) attrs)))
|
||||||
|
{ };
|
||||||
|
|
||||||
|
builds = forEachSystem (system: (lib.fix (final: {
|
||||||
|
common = {
|
||||||
|
pname = "KIT-ILIAS-Downloader";
|
||||||
|
src = craneLib.${system}.cleanCargoSource self;
|
||||||
|
};
|
||||||
|
cargoArtifacts = craneLib.${system}.buildDepsOnly (final.common // {
|
||||||
|
doCheck = false;
|
||||||
|
});
|
||||||
|
clippy = craneLib.${system}.cargoClippy (final.common // {
|
||||||
|
inherit (final) cargoArtifacts;
|
||||||
|
cargoClippyExtraArgs = lib.escapeShellArgs [
|
||||||
|
"--all-targets"
|
||||||
|
"--"
|
||||||
|
"-D"
|
||||||
|
"warnings"
|
||||||
|
"-A"
|
||||||
|
"non-snake-case"
|
||||||
|
"-A"
|
||||||
|
"clippy::upper-case-acronyms"
|
||||||
|
];
|
||||||
|
});
|
||||||
|
format = craneLib.${system}.cargoFmt (final.common // {
|
||||||
|
inherit (final) cargoArtifacts;
|
||||||
|
});
|
||||||
|
kit-ilias-downloader = craneLib.${system}.buildPackage (final.common // {
|
||||||
|
inherit (final) cargoArtifacts;
|
||||||
|
doCheck = false;
|
||||||
|
meta.license = lib.licenses.gpl3Plus;
|
||||||
|
meta.platforms = systems;
|
||||||
|
});
|
||||||
|
})));
|
||||||
|
in {
|
||||||
|
packages = forEachSystem (system: {
|
||||||
|
default = self.packages.${system}.kit-ilias-downloader;
|
||||||
|
inherit (builds.${system}) kit-ilias-downloader;
|
||||||
|
});
|
||||||
|
checks = forEachSystem (system: {
|
||||||
|
inherit (builds.${system}) format clippy;
|
||||||
|
});
|
||||||
|
hydraJobs = {
|
||||||
|
packages = toHydraJob self.packages;
|
||||||
|
checks = toHydraJob self.checks;
|
||||||
|
};
|
||||||
|
};
|
||||||
|
}
|
28
src/ilias.rs
28
src/ilias.rs
@ -5,13 +5,12 @@ use std::{collections::HashMap, error::Error as _, io::Write, sync::Arc};
|
|||||||
use anyhow::{anyhow, Context, Result};
|
use anyhow::{anyhow, Context, Result};
|
||||||
use cookie_store::CookieStore;
|
use cookie_store::CookieStore;
|
||||||
use once_cell::sync::Lazy;
|
use once_cell::sync::Lazy;
|
||||||
use regex::Regex;
|
|
||||||
use reqwest::{Client, IntoUrl, Proxy, Url};
|
use reqwest::{Client, IntoUrl, Proxy, Url};
|
||||||
use reqwest_cookie_store::CookieStoreMutex;
|
use reqwest_cookie_store::CookieStoreMutex;
|
||||||
use scraper::{ElementRef, Html, Selector};
|
use scraper::{ElementRef, Html, Selector};
|
||||||
use serde_json::json;
|
use serde_json::json;
|
||||||
|
|
||||||
use crate::{cli::Opt, queue, util::wrap_html, ILIAS_URL, iliasignore::IliasIgnore};
|
use crate::{cli::Opt, iliasignore::IliasIgnore, queue, util::wrap_html, ILIAS_URL};
|
||||||
|
|
||||||
pub mod course;
|
pub mod course;
|
||||||
pub mod exercise;
|
pub mod exercise;
|
||||||
@ -46,12 +45,9 @@ pub struct ILIAS {
|
|||||||
fn error_is_http2(error: &reqwest::Error) -> bool {
|
fn error_is_http2(error: &reqwest::Error) -> bool {
|
||||||
error
|
error
|
||||||
.source() // hyper::Error
|
.source() // hyper::Error
|
||||||
.map(|x| x.source()) // h2::Error
|
.and_then(|x| x.source()) // h2::Error
|
||||||
.flatten()
|
.and_then(|x| x.downcast_ref::<h2::Error>())
|
||||||
.map(|x| x.downcast_ref::<h2::Error>())
|
.and_then(|x| x.reason())
|
||||||
.flatten()
|
|
||||||
.map(|x| x.reason())
|
|
||||||
.flatten()
|
|
||||||
.map(|x| x == h2::Reason::NO_ERROR)
|
.map(|x| x == h2::Reason::NO_ERROR)
|
||||||
.unwrap_or(false)
|
.unwrap_or(false)
|
||||||
}
|
}
|
||||||
@ -223,7 +219,7 @@ impl ILIAS {
|
|||||||
unreachable!()
|
unreachable!()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn is_error_response(html: &Html) {
|
pub fn is_error_response(html: &Html) -> bool {
|
||||||
html.select(&ALERT_DANGER).next().is_some()
|
html.select(&ALERT_DANGER).next().is_some()
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -286,7 +282,13 @@ impl ILIAS {
|
|||||||
} else {
|
} else {
|
||||||
None
|
None
|
||||||
};
|
};
|
||||||
Ok((ILIAS::get_items(&html), main_text, html.select(&LINKS).flat_map(|x| x.value().attr("href").map(|x| x.to_owned())).collect()))
|
Ok((
|
||||||
|
ILIAS::get_items(&html),
|
||||||
|
main_text,
|
||||||
|
html.select(&LINKS)
|
||||||
|
.flat_map(|x| x.value().attr("href").map(|x| x.to_owned()))
|
||||||
|
.collect(),
|
||||||
|
))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn get_course_content_tree(&self, ref_id: &str, cmd_node: &str) -> Result<Vec<Object>> {
|
pub async fn get_course_content_tree(&self, ref_id: &str, cmd_node: &str) -> Result<Vec<Object>> {
|
||||||
@ -344,8 +346,8 @@ impl Object {
|
|||||||
| Presentation { name, .. }
|
| Presentation { name, .. }
|
||||||
| ExerciseHandler { name, .. }
|
| ExerciseHandler { name, .. }
|
||||||
| PluginDispatch { name, .. }
|
| PluginDispatch { name, .. }
|
||||||
| Generic { name, .. } => &name,
|
| Generic { name, .. } => name,
|
||||||
Thread { url } => &url.thr_pk.as_ref().unwrap(),
|
Thread { url } => url.thr_pk.as_ref().unwrap(),
|
||||||
Video { url } => &url.url,
|
Video { url } => &url.url,
|
||||||
Dashboard { url } => &url.url,
|
Dashboard { url } => &url.url,
|
||||||
}
|
}
|
||||||
@ -366,7 +368,7 @@ impl Object {
|
|||||||
| ExerciseHandler { url, .. }
|
| ExerciseHandler { url, .. }
|
||||||
| PluginDispatch { url, .. }
|
| PluginDispatch { url, .. }
|
||||||
| Video { url }
|
| Video { url }
|
||||||
| Generic { url, .. } => &url,
|
| Generic { url, .. } => url,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -28,12 +28,12 @@ pub async fn download(path: PathBuf, ilias: Arc<ILIAS>, url: &URL, name: &str) -
|
|||||||
return Ok(()); // ignore groups we are not in
|
return Ok(()); // ignore groups we are not in
|
||||||
}
|
}
|
||||||
warning!(name, "falling back to incomplete course content extractor!", e);
|
warning!(name, "falling back to incomplete course content extractor!", e);
|
||||||
let (items, main_text, _) = ilias.get_course_content(&url).await?;
|
let (items, main_text, _) = ilias.get_course_content(url).await?;
|
||||||
(items, main_text)
|
(items, main_text)
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
let (items, main_text, _) = ilias.get_course_content(&url).await?;
|
let (items, main_text, _) = ilias.get_course_content(url).await?;
|
||||||
(items, main_text)
|
(items, main_text)
|
||||||
};
|
};
|
||||||
if ilias.opt.save_ilias_pages {
|
if ilias.opt.save_ilias_pages {
|
||||||
|
@ -17,6 +17,6 @@ pub async fn download(path: &Path, relative_path: &Path, ilias: Arc<ILIAS>, url:
|
|||||||
}
|
}
|
||||||
let data = ilias.download(&url.url).await?;
|
let data = ilias.download(&url.url).await?;
|
||||||
log!(0, "Writing {}", relative_path.to_string_lossy());
|
log!(0, "Writing {}", relative_path.to_string_lossy());
|
||||||
write_stream_to_file(&path, data.bytes_stream()).await?;
|
write_stream_to_file(path, data.bytes_stream()).await?;
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
@ -17,7 +17,7 @@ static EXPAND_LINK: Lazy<Regex> = Lazy::new(|| Regex::new("expand=\\d").unwrap()
|
|||||||
|
|
||||||
#[async_recursion]
|
#[async_recursion]
|
||||||
pub async fn download(path: &Path, ilias: Arc<ILIAS>, url: &URL) -> Result<()> {
|
pub async fn download(path: &Path, ilias: Arc<ILIAS>, url: &URL) -> Result<()> {
|
||||||
let content = ilias.get_course_content(&url).await?;
|
let content = ilias.get_course_content(url).await?;
|
||||||
|
|
||||||
// expand all sessions
|
// expand all sessions
|
||||||
for href in content.2 {
|
for href in content.2 {
|
||||||
|
@ -34,7 +34,7 @@ pub async fn download(path: &Path, relative_path: &Path, ilias: Arc<ILIAS>, url:
|
|||||||
let json = &json_capture.next().context("xoct player json not found")?[1];
|
let json = &json_capture.next().context("xoct player json not found")?[1];
|
||||||
log!(2, "{}", json);
|
log!(2, "{}", json);
|
||||||
let json = json.split(",\n").next().context("invalid xoct player json")?;
|
let json = json.split(",\n").next().context("invalid xoct player json")?;
|
||||||
serde_json::from_str(&json.trim())?
|
serde_json::from_str(json.trim())?
|
||||||
};
|
};
|
||||||
log!(2, "{}", json);
|
log!(2, "{}", json);
|
||||||
let streams = json
|
let streams = json
|
||||||
@ -49,8 +49,7 @@ pub async fn download(path: &Path, relative_path: &Path, ilias: Arc<ILIAS>, url:
|
|||||||
.as_str()
|
.as_str()
|
||||||
.context("video src not string")?;
|
.context("video src not string")?;
|
||||||
download_to_path(&ilias, path, relative_path, url).await?;
|
download_to_path(&ilias, path, relative_path, url).await?;
|
||||||
} else {
|
} else if !ilias.opt.combine_videos {
|
||||||
if !ilias.opt.combine_videos {
|
|
||||||
fs::create_dir(path).await.context("failed to create video directory")?;
|
fs::create_dir(path).await.context("failed to create video directory")?;
|
||||||
download_all(path, streams, ilias, relative_path).await?;
|
download_all(path, streams, ilias, relative_path).await?;
|
||||||
} else {
|
} else {
|
||||||
@ -82,19 +81,18 @@ pub async fn download(path: &Path, relative_path: &Path, ilias: Arc<ILIAS>, url:
|
|||||||
error!(format!("check this directory: {}", dir.into_path().display()));
|
error!(format!("check this directory: {}", dir.into_path().display()));
|
||||||
error!(format!("ffmpeg command: {}", arguments.join(" ")));
|
error!(format!("ffmpeg command: {}", arguments.join(" ")));
|
||||||
}
|
}
|
||||||
};
|
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn download_all(
|
async fn download_all(
|
||||||
path: &Path,
|
path: &Path,
|
||||||
streams: &Vec<serde_json::Value>,
|
streams: &[serde_json::Value],
|
||||||
ilias: Arc<ILIAS>,
|
ilias: Arc<ILIAS>,
|
||||||
relative_path: &Path,
|
relative_path: &Path,
|
||||||
) -> Result<Vec<PathBuf>> {
|
) -> Result<Vec<PathBuf>> {
|
||||||
let mut paths = Vec::new();
|
let mut paths = Vec::new();
|
||||||
for (i, stream) in streams.into_iter().enumerate() {
|
for (i, stream) in streams.iter().enumerate() {
|
||||||
let url = stream
|
let url = stream
|
||||||
.pointer("/sources/mp4/0/src")
|
.pointer("/sources/mp4/0/src")
|
||||||
.context("video src not found")?
|
.context("video src not found")?
|
||||||
@ -126,9 +124,9 @@ async fn download_to_path(ilias: &ILIAS, path: &Path, relative_path: &Path, url:
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
let resp = ilias.download(&url).await?;
|
let resp = ilias.download(url).await?;
|
||||||
log!(0, "Writing {}", relative_path.to_string_lossy());
|
log!(0, "Writing {}", relative_path.to_string_lossy());
|
||||||
write_stream_to_file(&path, resp.bytes_stream()).await?;
|
write_stream_to_file(path, resp.bytes_stream()).await?;
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
@ -27,7 +27,7 @@ pub async fn download(path: &Path, relative_path: &Path, ilias: Arc<ILIAS>, url:
|
|||||||
if url.starts_with(ILIAS_URL) {
|
if url.starts_with(ILIAS_URL) {
|
||||||
// is a link list
|
// is a link list
|
||||||
if fs::metadata(&path).await.is_err() {
|
if fs::metadata(&path).await.is_err() {
|
||||||
create_dir(&path).await?;
|
create_dir(path).await?;
|
||||||
log!(0, "Writing {}", relative_path.to_string_lossy());
|
log!(0, "Writing {}", relative_path.to_string_lossy());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1,11 +1,14 @@
|
|||||||
use std::{path::{Path, PathBuf, Component}, ffi::OsString};
|
use std::{
|
||||||
|
ffi::OsString,
|
||||||
|
path::{Component, Path, PathBuf},
|
||||||
|
};
|
||||||
|
|
||||||
use anyhow::Result;
|
use anyhow::Result;
|
||||||
use ignore::gitignore::Gitignore;
|
use ignore::gitignore::Gitignore;
|
||||||
|
|
||||||
#[derive(Clone, Debug)]
|
#[derive(Clone, Debug)]
|
||||||
pub struct IliasIgnore {
|
pub struct IliasIgnore {
|
||||||
ignores: Vec<IgnoreFile>
|
ignores: Vec<IgnoreFile>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl IliasIgnore {
|
impl IliasIgnore {
|
||||||
@ -21,27 +24,25 @@ impl IliasIgnore {
|
|||||||
if let Some(err) = error {
|
if let Some(err) = error {
|
||||||
warning!(err);
|
warning!(err);
|
||||||
}
|
}
|
||||||
if ignore.len() > 0 {
|
if !ignore.is_empty() {
|
||||||
ignores.push(IgnoreFile {
|
ignores.push(IgnoreFile {
|
||||||
ignore,
|
ignore,
|
||||||
prefix: prefix.iter().fold(OsString::new(), |mut acc, el| {
|
prefix: prefix.iter().fold(OsString::new(), |mut acc, el| {
|
||||||
acc.push(el);
|
acc.push(el);
|
||||||
acc.push("/");
|
acc.push("/");
|
||||||
acc
|
acc
|
||||||
})
|
}),
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
if let Some(last) = path.components().last() {
|
if let Some(last) = path.components().last() {
|
||||||
match last {
|
match last {
|
||||||
Component::Normal(name) => prefix.insert(0, name.to_owned()),
|
Component::Normal(name) => prefix.insert(0, name.to_owned()),
|
||||||
_ => break
|
_ => break,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
path.pop();
|
path.pop();
|
||||||
}
|
}
|
||||||
Ok(IliasIgnore {
|
Ok(IliasIgnore { ignores })
|
||||||
ignores
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn should_ignore(&self, path: &Path, is_dir: bool) -> bool {
|
pub fn should_ignore(&self, path: &Path, is_dir: bool) -> bool {
|
||||||
@ -62,5 +63,5 @@ impl IliasIgnore {
|
|||||||
#[derive(Clone, Debug)]
|
#[derive(Clone, Debug)]
|
||||||
struct IgnoreFile {
|
struct IgnoreFile {
|
||||||
ignore: Gitignore,
|
ignore: Gitignore,
|
||||||
prefix: OsString
|
prefix: OsString,
|
||||||
}
|
}
|
@ -3,7 +3,6 @@
|
|||||||
use anyhow::{anyhow, Context, Result};
|
use anyhow::{anyhow, Context, Result};
|
||||||
use futures::future::{self, Either};
|
use futures::future::{self, Either};
|
||||||
use futures::StreamExt;
|
use futures::StreamExt;
|
||||||
use ignore::gitignore::Gitignore;
|
|
||||||
use indicatif::{ProgressDrawTarget, ProgressStyle};
|
use indicatif::{ProgressDrawTarget, ProgressStyle};
|
||||||
use structopt::StructOpt;
|
use structopt::StructOpt;
|
||||||
use tokio::fs;
|
use tokio::fs;
|
||||||
|
Loading…
Reference in New Issue
Block a user