Compare commits

...

40 Commits

Author SHA1 Message Date
southerntofu 34a5840aa0 Add support for --inbox/--inbox-folder 2022-01-12 19:20:32 +01:00
southerntofu 697173bb81 Check for submodule updates right after first clone 2022-01-11 16:07:46 +01:00
southerntofu 250e4372c4 A DVCS backend implements the Backend trait (allows 3rd party backends) 2022-01-10 15:02:10 +01:00
southerntofu dbd65cdc27 Get rid of Backend::Unknown variant 2022-01-09 16:29:23 +01:00
southerntofu ad61faaecf Make sure submodules added after first clone are initialized properly 2022-01-09 15:59:27 +01:00
southerntofu 38d85f954a Pass all tests successfully (for the moment) 2022-01-07 19:30:06 +01:00
southerntofu 70b0edaf89 Cleanup warnings 2022-01-06 18:12:54 +01:00
southerntofu b5c0d6aad3 Move CLI building to cli.rs 2022-01-06 18:08:51 +01:00
southerntofu a887b0afbc Fix broken error message 2022-01-06 17:46:56 +01:00
southerntofu 628b6372aa Don't panic on wrong argument, instead display error message 2022-01-06 17:46:25 +01:00
southerntofu 8fa9d7dca0 Load translations properly when forgebuild is not setup, just cloned 2022-01-05 18:38:54 +01:00
southerntofu 8f7bc18837 Support setuid mode 2021-01-04 16:25:47 +01:00
southerntofu c3a4ece084 Don't fail default locale ("C") and don't look for $HOME env variable 2021-01-04 15:34:09 +01:00
southerntofu 741af8c6f1 Don't use $HOME env variable because it doesn't always work 2021-01-04 15:30:34 +01:00
southerntofu ec10c2f6bd Load translations from specific file 2020-12-01 18:19:53 +01:00
southerntofu 5760794747 Reorder tasks before debug print, rename GITBUILDCONF to FORGEBUILDCONF 2020-12-01 18:19:31 +01:00
southerntofu ce6339a4f2 Rename CLI to forgebuild 2020-12-01 18:19:12 +01:00
southerntofu b639d1d82b Start JSON debug output for context 2020-11-28 20:36:11 +01:00
southerntofu 78b5ebcd64 Simplify logging system 2020-11-28 16:48:23 +01:00
southerntofu bd6e1d67f9 Load tasks from URLs 2020-11-28 15:03:04 +01:00
southerntofu 70f476dcc9 Debug output for task processing 2020-11-28 12:16:18 +01:00
southerntofu d7cded3297 Output messages 2020-11-28 12:00:02 +01:00
southerntofu b28c97fdb1 Refactor task database 2020-11-28 11:34:22 +01:00
southerntofu 9ae8eaf3c2 Start implementing output tests, don't panic on missing basedir 2020-11-27 19:38:11 +01:00
southerntofu ae043f5cd2 Fix some debug messages 2020-11-26 00:21:02 +01:00
southerntofu 6b6f30b80a (still) renaming to forgebuild 2020-11-26 00:05:23 +01:00
southerntofu 104302ede2 cargo fmt 2020-11-25 23:59:39 +01:00
southerntofu 6ed76928db Cleanup unused stuff 2020-11-25 23:55:33 +01:00
southerntofu 19e9b98302 Reorder tasks, start implementing git 2020-11-25 23:26:52 +01:00
southerntofu 5bee567e34 Expand relative basedir 2020-11-25 21:58:00 +01:00
southerntofu f86e3f6905 Setup environment vars for tasks 2020-11-25 18:49:02 +01:00
southerntofu f8f1f7fea5 Clone to basedir/.task and set working dir there 2020-11-25 18:44:54 +01:00
southerntofu e9bc8434a5 Load host config (not passed to task yet) and respect ignored tasks 2020-11-25 17:10:14 +01:00
southerntofu 02e6a5d827 Run sourceless tasks, respect opt-in task.hosts list of hostnames 2020-11-25 12:07:08 +01:00
southerntofu 25ea54143c Basedir can be configured with -b/--basedir 2020-11-23 20:07:38 +01:00
southerntofu 5b99ff9550 Rename ~/.git-build to ~/forgebuild 2020-11-23 19:45:53 +01:00
southerntofu 8d854c1d1a Find translations in several places 2020-11-23 19:45:35 +01:00
southerntofu 3e048747cd Add translations in submodule 2020-07-11 17:53:11 +02:00
southerntofu fb90de6aad Add README 2020-07-01 18:14:16 +02:00
southerntofu 896a3e6f5c Add some comments 2020-07-01 17:38:27 +02:00
14 changed files with 1099 additions and 310 deletions

6
.gitmodules vendored Normal file
View File

@ -0,0 +1,6 @@
[submodule "i18n"]
path = i18n
url = https://tildegit.org/southerntofu/git-build-i18n
[submodule "spec"]
path = spec
url = https://tildegit.org/forge/build

63
Cargo.lock generated
View File

@ -26,6 +26,12 @@ version = "1.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cf1de2fe8c75bc145a2f577add951f8134889b4795d47466a54a5c846d691693"
[[package]]
name = "cfg-if"
version = "0.1.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4785bdd1c96b2a846b2bd7cc02e86b6b3dbf14e7e53446c4f54c92a361040822"
[[package]]
name = "clap"
version = "2.33.0"
@ -42,13 +48,16 @@ dependencies = [
]
[[package]]
name = "git-build-rs"
version = "0.1.0"
name = "forgebuild"
version = "0.2.0"
dependencies = [
"glob",
"hostname",
"lazy_static",
"serde",
"serde_json",
"structopt",
"users",
]
[[package]]
@ -75,6 +84,17 @@ dependencies = [
"libc",
]
[[package]]
name = "hostname"
version = "0.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3c731c3e10504cc8ed35cfe2f1db4c9274c3d35fa486e3b31df46f068ef3e867"
dependencies = [
"libc",
"match_cfg",
"winapi",
]
[[package]]
name = "itoa"
version = "0.4.5"
@ -93,6 +113,21 @@ version = "0.2.69"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "99e85c08494b21a9054e7fe1374a732aeadaff3980b6990b94bfd3a70f690005"
[[package]]
name = "log"
version = "0.4.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4fabed175da42fed1fa0746b0ea71f412aa9d35e76e95e59b192c64b9dc2bf8b"
dependencies = [
"cfg-if",
]
[[package]]
name = "match_cfg"
version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ffbee8634e0d45d258acb448e7eaab3fce7a0a467395d4d9f228e3c1f01fb2e4"
[[package]]
name = "proc-macro-error"
version = "1.0.2"
@ -148,6 +183,20 @@ name = "serde"
version = "1.0.106"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "36df6ac6412072f67cf767ebbde4133a5b2e88e76dc6187fa7104cd16f783399"
dependencies = [
"serde_derive",
]
[[package]]
name = "serde_derive"
version = "1.0.106"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9e549e3abf4fb8621bd1609f11dfc9f5e50320802273b12f3811a67e6716ea6c"
dependencies = [
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "serde_json"
@ -239,6 +288,16 @@ version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "826e7639553986605ec5979c7dd957c7895e93eabed50ab2ffa7f6128a75097c"
[[package]]
name = "users"
version = "0.11.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "24cc0f6d6f267b73e5a2cadf007ba8f9bc39c6a6f9666f8cf25ea809a153b032"
dependencies = [
"libc",
"log",
]
[[package]]
name = "vec_map"
version = "0.8.1"

View File

@ -1,6 +1,6 @@
[package]
name = "git-build-rs"
version = "0.1.0"
name = "forgebuild"
version = "0.2.0"
authors = ["southerntofu"]
edition = "2018"
@ -9,6 +9,12 @@ edition = "2018"
[dependencies]
glob = "0.3"
lazy_static = "1.4"
# Detect hostname
hostname = "0.3"
# Translations
serde_json = "1.0"
structopt = "0.3"
# Debug Context for translations
serde = { version = "1.0", features = ["derive"] }
# Users information
users = "0.11"

19
README.md Normal file
View File

@ -0,0 +1,19 @@
# git-build.rs
Rust reimplementation of git-build.sh
# Running
Requires the translation files to run. They are included in a submodule, you need to use `--recursive` with git:
```
git clone --recursive https://tildegit.org/southerntofu/git-build.rs
```
```
LANG=fr LOG=debug I18N=~/git-build.sh/i18n/ cargo run --release
```
# Status
Experimental, not working (yet)

1
spec Submodule

@ -0,0 +1 @@
Subproject commit 891b8aabbf4da2e25fc66df503b85266485b8020

149
src/backend/git.rs Normal file
View File

@ -0,0 +1,149 @@
use std::boxed::Box;
use std::env::{set_current_dir as cd, current_dir as pwd};
use std::path::{Path, PathBuf};
use std::process::Command;
use crate::backend::{Backend, Repo};
#[derive(Debug, Clone, Copy)]
pub struct Git;
impl Backend for Git {
fn download(&self, source: &str, dest: &Path) -> bool {
Command::new("git")
.arg("clone")
.arg("--recursive")
.arg(source)
.arg(dest)
.output() // To suppress (capture) output
.expect("PROCESS ERROR!")
.status
.success()
}
fn submodules(&self) -> Vec<PathBuf> {
let cmd = Command::new("git")
.arg("config")
.arg("--file")
.arg(".gitmodules")
.arg("--get-regexp")
.arg("path")
.output()
.expect("WTF");
if cmd.status.success() {
// Command succeded, split by the first space character to find the path to the submodule
let out = std::string::String::from_utf8(cmd.stdout).expect("Wrong unicode");
let mut results = Vec::new();
for line in out.lines() {
results.push(PathBuf::from(line.split_once(" ").expect("Misformed .gitmodules").1));
}
return results;
} else {
// No submodules found (even if .gitmodules exist but is empty)
return Vec::new();
}
}
fn branch(&self) -> String {
let output = Command::new("git")
.arg("rev-parse")
.arg("--abbrev-ref")
.arg("HEAD")
.output()
.expect("WTF");
if !output.status.success() {
panic!("Corrupted git repository???");
}
String::from_utf8(output.stdout).unwrap().trim().to_string()
}
fn checkout(&self, target: &str) -> bool {
let status = Command::new("git")
.arg("checkout")
.arg(target)
.status()
.expect("PROCESS ERROR!");
status.success()
}
fn has_updates(&self) -> bool {
// Refresh remote
if !Command::new("git")
.arg("fetch")
.arg("--quiet")
.arg("origin")
.status()
.expect("WTF")
.success()
{
// FAILED, no internet??
// TODO: This should be a forgebuild error message
eprintln!("Fetching updates failed");
return false;
}
let branch = self.branch();
if Command::new("git")
.arg("diff")
.arg("--quiet")
.arg(&format!("remotes/origin/{}", &branch))
.status()
.expect("WTF")
.success()
{
// Command succeeeded, no updates
return false;
}
// Updates
return true;
}
fn update(&self) -> bool {
if Command::new("git")
.arg("pull")
.arg("--ff-only")
.arg("origin")
.arg(self.branch())
.status()
.expect("WTF")
.success()
{
// Main updates succeeded. If new submodules were added, initialize them
if !Command::new("git")
.arg("submodule")
.arg("update")
.arg("--init")
.arg("--recursive")
.status()
.expect("WTF")
.success()
{
// TODO: Should be forgebuild error message
eprintln!("Failed to initialize submodules which were added to the repo");
}
return true;
} else {
// Main updates failed
return false;
}
}
// TODO: Maybe this should be a generic implementation part of the trait?
fn subupdate(&self) -> bool {
let mut found_subupdates = false;
let prev_dir = pwd().expect("failed to pwd");
for subpath in self.submodules() {
// Move into the submodule
cd(subpath).unwrap();
// Generate a Repo instance for the submodule but don't enable subupdates
// Just in case someone would trigger an infinite loop by accident
let subrepo = Repo::new(Box::new(Git), "irrelevant", &pwd().expect("failed to pwd"), false);
if subrepo.backend.update() {
found_subupdates = true;
}
// Move back into main repo
cd(&prev_dir).unwrap();
}
found_subupdates
}
}

46
src/backend/mercurial.rs Normal file
View File

@ -0,0 +1,46 @@
use std::boxed::Box;
use std::env::{set_current_dir as cd, current_dir as pwd};
use std::path::{Path, PathBuf};
use std::process::Command;
use crate::backend::{Backend, Repo};
#[derive(Debug, Clone, Copy)]
pub struct Mercurial;
impl Backend for Mercurial {
#[allow(dead_code)]
fn download(&self, _src: &str, _dest: &Path) -> bool {
unimplemented!();
}
#[allow(dead_code)]
fn submodules(&self) -> Vec<PathBuf> {
unimplemented!();
}
#[allow(dead_code)]
fn branch(&self) -> String {
unimplemented!();
}
#[allow(dead_code)]
fn checkout(&self, _branch: &str) -> bool {
unimplemented!();
}
#[allow(dead_code)]
fn has_updates(&self) -> bool {
unimplemented!();
}
#[allow(dead_code)]
fn update(&self) -> bool {
unimplemented!();
}
#[allow(dead_code)]
fn subupdate(&self) -> bool {
unimplemented!();
}
}

105
src/backend/mod.rs Normal file
View File

@ -0,0 +1,105 @@
use std::boxed::Box;
use std::path::{Path, PathBuf};
mod git;
mod mercurial;
pub use git::Git;
pub use mercurial::Mercurial;
/// Generates Some(Backend) from an optional String, or defaults to Git,
/// or None when the backend is not recognized. If you want to implement your own
/// backend outside of this crate, you should override this method.
/// TODO: fallback backend should be customizable
pub fn backend(setting: Option<String>) -> Option<Box<dyn Backend>> {
// Git is the default setting until further notice
setting.map_or(Some(Box::new(Git)), |name| match name.as_ref() {
"git" => Some(Box::new(Git)),
"mercurial" => Some(Box::new(Mercurial)),
_ => None,
})
}
/// The trait implemented by DVCS backends (git, mercurial).
/// NOTE: This interface may evolve quickly. Currently no state is stored inside
/// the backend and it's assumed the current working dir is the repository we're operating on
pub trait Backend: std::fmt::Debug {
/// Clone a src repository and all related submodules to dest
fn download(&self, src: &str, dest: &Path) -> bool;
/// List all submodules in repo, as a list of PathBuf's
/// Returns an empty vector when no submodules are declared
fn submodules(&self) -> Vec<PathBuf>;
/// Returns the current branch/commit/tag tracked by the repo
fn branch(&self) -> String;
/// Checkouts out a specific branch/commit/tag to track on the repo
fn checkout(&self, branch: &str) -> bool;
/// Checks for updates in repo. Does not account for submodules
fn has_updates(&self) -> bool;
/// Applies updates on the main repo, and on submodules if subupdates is enabled. Returns true
/// if some updates were applied, false otherwise.
fn update(&self) -> bool;
/// Applies submodule updates. Returns true if some updates were applied, false otherwise.
fn subupdate(&self) -> bool;
}
#[derive(Debug)]
pub struct Repo {
pub backend: Box<dyn Backend>,
pub source: String,
pub dest: PathBuf,
pub subupdates: bool,
}
impl Repo {
pub fn new(backend: Box<dyn Backend>, source: &str, dest: &Path, subupdates: bool) -> Repo {
Repo {
backend,
source: source.to_string(),
dest: dest.to_path_buf(),
subupdates,
}
}
pub fn download(&self) -> bool {
let success = self.backend.download(&self.source, &self.dest);
// If the clone was successful and subupdates is enabled,
// List submodules and check them for updates
if success && self.subupdates {
self.subupdate();
}
return success;
}
pub fn checkout(&self, target: &str) -> bool {
self.backend.checkout(target)
}
pub fn has_updates(&self) -> bool {
self.backend.has_updates()
}
pub fn update(&self) -> bool {
// First try to run submodules updates
let had_subupdates = if self.subupdates { self.subupdate() } else { false };
// Now run main repo updates
if self.has_updates() {
self.backend.update() || had_subupdates
} else {
had_subupdates
}
}
pub fn subupdate(&self) -> bool {
self.backend.subupdate()
}
}

View File

@ -1,12 +1,86 @@
use std::path::PathBuf;
use structopt::StructOpt;
use structopt::clap::ErrorKind as ClapError;
// To get effective user id (EUID) so that setuid works
use users::{get_effective_uid,get_user_by_uid};
// For home directory
use users::os::unix::UserExt;
use crate::log;
use crate::log::Context;
#[derive(Debug, StructOpt)]
#[structopt(name = "git-build", about = "Update your repositories and trigger tasks")]
#[structopt(
name = "forgebuild",
about = "Update your repositories and trigger tasks"
)]
pub struct Cli {
#[structopt(short = "f", long = "force")]
pub force: bool,
#[structopt(short = "b", long = "basedir")]
pub basedir: Option<String>,
#[structopt(long = "inbox")]
pub inbox: bool,
#[structopt(long = "inbox-folder")]
pub inboxdir: Option<String>,
//#[structopt(def)]
pub tasks: Vec<String>,
}
impl Cli {
/// Builds the command-line from passed arguments
/// Returns the Cli instance alongside a PathBuf of the basedir
pub fn build() -> (Self, PathBuf) {
// We create a dedicated context so we don't have to pass it as argument
let mut context = Context::new();
// We don't want to use structopt's error handler for unknown argument as we have our own
// error message for that case (unknown_arg). So we use from_iter_safe() not from_args()
match Cli::from_iter_safe(std::env::args()) {
Ok(cmd) => {
// Parsing was successful, but we'd like to ensure requested basedir exists
match cmd.basedir().canonicalize() {
Ok(p) => {
(cmd, p)
},
Err(_) => {
// Missing basedir
context.insert("$i18n_basedir".to_string(), cmd.basedir().to_str().unwrap().to_string());
log::error("missing_basedir", &context);
std::process::exit(1);
}
}
},
Err(e) => {
match &e.kind {
ClapError::UnknownArgument => {
context.insert("$i18n_arg".to_string(), e.info.unwrap().first().unwrap().to_string());
log::error("unknown_arg", &context);
std::process::exit(1);
},
_ => e.exit()
}
}
}
}
/// Returns a PathBuf to the basedir. If it's a relative link,
/// it's not expanded here! Panics if no basedir is provided and $HOME isn't defined
pub fn basedir(&self) -> PathBuf {
if let Some(basedir) = &self.basedir {
// Returns an error when the path doesn't exist
PathBuf::from(basedir)
} else {
let owner = get_effective_uid();
let mut home_path = get_user_by_uid(owner).expect("Failed owner profile")
.home_dir().to_path_buf();
home_path.push(".forgebuild");
home_path
}
}
}

134
src/db.rs
View File

@ -1,80 +1,6 @@
use std::path::{PathBuf,Path};
use std::fs;
use std::os::unix::fs::MetadataExt;
use std::ffi::OsString;
#[derive(Debug)]
pub enum Error {
EntryNotFound(String)
}
#[derive(Debug, Clone)]
pub struct Entry {
pub name: OsString,
pub path: PathBuf,
pub base_dir: PathBuf,
}
impl Entry {
pub fn new(path: PathBuf, name: OsString, base_dir: PathBuf) -> Entry {
Entry {
path,
name,
base_dir,
}
}
pub fn read_setting(&self, setting: &str) -> Option<String> {
let mut path = self.path.clone();
path.set_extension(setting);
read_or_none(&path)
}
}
/// Load single entry from folder
pub fn entry(basedir: &str, filter: impl Fn(&Path) -> bool, name: &str) -> Option<Entry> {
let basepath = PathBuf::from(&basedir);
let path = basepath.clone().join(name);
if !path.exists() || !filter(&path) {
return None;
}
Some(Entry::new(
path.clone(),
path.file_name().expect("Failed to read file name").to_os_string(),
basepath
))
}
/// Loads entire database from folder
pub fn from(path_name: &str, filter: impl Fn(&Path) -> bool) -> Result<Vec<Entry>, std::io::Error> {
let path = PathBuf::from(&path_name);
let mut entries = Vec::new();
for file in path.read_dir()? {
if file.is_err() {
// Dismiss individual errors (in case there's a permission problem)
// TODO: maybe print a warning? Actually a configurable error level
// (using enum variants) should be passed to the function to configure
// whether to continue silently or error out for individual files
continue;
}
let entry = file?.path();
if filter(&entry) {
entries.push(
Entry::new(
entry.clone(),
entry.file_name().expect("Failed to read file name").to_os_string(),
path.clone()
)
);
}
}
return Ok(entries);
}
use std::path::Path;
/// Reads the file and strips whitespace (including newlines)
/// Useful for file-based key-value store
@ -86,7 +12,7 @@ pub fn read_or_none(path: &Path) -> Option<String> {
Ok(content) => {
// Remove trailing space/newlines
Some(content.trim().to_string())
},
}
Err(e) => {
eprintln!("IO ERROR: {}", e);
None
@ -94,33 +20,14 @@ pub fn read_or_none(path: &Path) -> Option<String> {
}
}
fn list_files(path: &Path) -> Vec<PathBuf> {
let mut res = Vec::new();
match fs::read_dir(path) {
Ok(files) => {
for r in files {
match r {
Ok(entry) => {
let file = entry.path();
if file.is_file() {
res.push(file);
}
},
Err(e) => {
eprintln!("IOERROR: {}", e)
}
}
}
},
Err(e) => {
eprintln!("IOERROR: {}", e);
}
}
return res;
pub fn read_extension(path: &Path, setting: &str) -> Option<String> {
let mut path = path.to_path_buf();
path.set_extension(setting);
read_or_none(&path)
}
// If the file doesn't exist or fails, return false
/// Returns true when the file exists and has user exec
/// permission. Returns false otherwise.
pub fn is_executable(path: &Path) -> bool {
// Do not match directories
if !path.is_file() {
@ -135,33 +42,10 @@ pub fn is_executable(path: &Path) -> bool {
} else {
false
}
},
}
Err(e) => {
eprintln!("IO Error: {}", e);
false
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn can_load_db() {
let base_dir = "tests/success";
let entries = from(base_dir, is_executable).expect("Could not load db");
let entries_names: Vec<String> = entries.iter().map(|x| x.name.clone().into_string().unwrap()).collect();
let expected: Vec<String> = vec!("task", "symlink", "no_source").iter().map(|x| x.to_string()).collect();
assert_eq!(expected.len(), entries_names.len());
for entry in expected {
if !entries_names.contains(&entry.to_string()) {
panic!("Could not find {}", &entry);
}
}
}
}

View File

@ -1,39 +0,0 @@
use std::process::Command;
pub fn from_setting(setting: Option<String>) -> Backend {
// Git is the default setting until further notice
setting.map_or(Backend::Git, |name| match name.as_ref() {
"git" => Backend::Git,
"mercurial" => Backend::Mercurial,
_ => Backend::Unknown(name.to_string())
})
}
#[derive(Debug)]
pub enum Backend {
Git,
Mercurial,
Unknown(String)
}
impl Backend {
pub fn clone(&self, source: &str) -> bool {
match self {
Backend::Git => {
let status = Command::new("git")
.arg("clone")
.arg("--recursive")
.arg(source)
.status().expect("PROCESS ERROR!");
status.success()
},
Backend::Mercurial => {
unreachable!("Unimplemented");
},
Backend::Unknown(name) => {
eprintln!("Unknown DVCS: {}", name);
false
}
}
}
}

View File

@ -1,64 +1,166 @@
use std::env;
use std::collections::HashMap;
use std::path::PathBuf;
use std::env;
use std::fs;
use std::path::{Path, PathBuf};
// To get effective user id (EUID) so that setuid works
use users::{get_effective_uid,get_user_by_uid};
// For home directory
use users::os::unix::UserExt;
use lazy_static::lazy_static;
lazy_static!{
lazy_static! {
static ref LOGLEVEL: LogLevel = LogLevel::from_env();
static ref LANG: String = lang_from_env();
static ref LANG: Lang = Lang::from_env();
static ref TRANSLATIONS: HashMap<String, String> = load_translations();
}
pub type Context<'a> = &'a HashMap<&'a str, &'a str>;
/// Lang configures how to deal with translations. It has three possible values:
/// None: translations return their own key name
/// Some(code): where code is the language code for the JSON translation file
/// JsonContext: debug output for Context as JSON
/// File(PathBuf): fixed translation file (instead of language code)
pub enum Lang {
None,
JsonContext,
Some(String),
File(PathBuf),
}
fn load_translations() -> HashMap<String, String> {
let folder = env::var("I18N").unwrap_or("./i18n/".to_string());
let mut path = PathBuf::from(folder);
if !path.is_dir() {
panic!("Could not find translations in {:?}", path);
impl Lang {
fn from_env() -> Lang {
let lang =
env::var("LANG").expect("$LANG not set in environment. Your machine is misconfigured!");
match lang.to_uppercase().as_str() {
"NONE" => Lang::None,
"JSON" => Lang::JsonContext,
"C" => {
// Special case: when no lang is specified, default to english
Lang::Some("en".to_string())
},
_ => {
let p = PathBuf::from(&lang);
if p.is_file() {
// LANG env variable contains a path to a full (JSON) file
Lang::File(p)
} else {
Lang::Some(lang[0..2].to_string())
}
}
}
}
path.push(format!("{}.json", *LANG));
}
if !path.is_file() {
panic!("Could not find translation file: {:?}", path);
pub type Context = HashMap<String, String>;
/// Finds the JSON translation files checking in order, relative to the program path:
/// - ../../../build/i18n
/// - ../../spec/i18n
/// - $HOME/.local/share/forgebuild/i18n
/// - /usr/share/forgebuild/i18n
/// If all of the above fails, but ../../spec/ folder exists (we are in a build.rs repo)
/// attempt to inialize the spec submodule which may have been forgotten on clone
fn find_translations() -> Option<PathBuf> {
let mut bindir = PathBuf::from(env::args().nth(0).expect("Argument 0 should contain the path to the program"));
bindir.pop();
let options = [
bindir.join("../../../build/i18n"),
bindir.join("../../spec/i18n"),
get_user_by_uid(get_effective_uid()).expect("Failed to get info about $USER").home_dir().join(".local/share/forgebuild/i18n"),
PathBuf::from("/usr/share/forgebuild/i18n"),
];
for entry in options {
if entry.is_dir() {
// We found a matching entry
return Some(entry);
}
}
// Maybe spec folder exists but hasn't been cloned
if bindir.join("../../spec/").is_dir() {
// TODO: Try to clone
unimplemented!("TODO: The spec submodule has not been cloned. We should clone it here. In the meantime, you can do it manually from the build.rs repo using git submodule init && git submodule update");
}
return None;
}
fn load_translations_from_file(path: &Path) -> HashMap<String, String> {
match fs::read_to_string(&path) {
Ok(content) => {
//let trans: HashMap<String, String> = serde_json::from_str(&content).expect("Could not load translations");
//return trans
match serde_json::from_str(&content) {
Ok(trans) => trans,
Err(e) => panic!("JSON ERROR: {}", e)
Err(e) => panic!("JSON ERROR: {}", e),
}
},
}
Err(e) => {
panic!("IO ERROR: {}", e);
}
}
}
fn trans(key: &str) -> String {
match TRANSLATIONS.get(key) {
Some(t) => t.to_string(),
None => {
panic!("Unknown translation string in lang {}: {}", *LANG, key);
fn load_translations() -> HashMap<String, String> {
match &*LANG {
Lang::File(path) => {
load_translations_from_file(&path)
},
Lang::Some(lang) => {
if let Some(i18ndir) = find_translations() {
let i18nfile = i18ndir.join(format!("{}.json", lang));
if !i18nfile.is_file() {
panic!("No such translation file: {:?}", i18nfile);
}
return load_translations_from_file(&i18nfile)
} else {
panic!("No translation folder found.");
}
},
_ => {
HashMap::new()
}
}
}
fn lang_from_env() -> String {
let lang = env::var("LANG").expect("$LANG not set in environment. Your machine is misconfigured!");
lang[0..2].to_string()
fn trans(key: &str) -> String {
match &*LANG {
Lang::File(path) => {
if let Some(t) = TRANSLATIONS.get(key) {
t.to_string()
} else {
panic!("Missing translation for {} in translation file: {}", key, path.to_str().unwrap())
}
},
Lang::Some(lang) => {
if let Some(t) = TRANSLATIONS.get(key) {
t.to_string()
} else {
panic!("Missing translation for {} in lang {}", key, lang)
}
},
Lang::JsonContext => String::new(),
Lang::None => key.to_string(),
}
}
fn trans_context(key: &str, context: &Context) -> String {
match &*LANG {
Lang::JsonContext => {
// Serialize the context to JSON for debugging
serde_json::to_string(context).expect("Failed to serialize to JSON")
},
_ => expand(&trans(key), context)
}
}
struct LogLevel {
info: bool,
debug: bool,
error: bool
error: bool,
}
impl LogLevel {
@ -70,59 +172,62 @@ impl LogLevel {
let env_log = env::var("LOG").unwrap_or("info".to_string());
match env_log.to_lowercase().as_str() {
"info" => {},
"debug" => { debug = true; },
"error" => { info = false; }
"info" => {}
"debug" => {
debug = true;
}
"error" => {
info = false;
}
_ => {
// This happens before loglevel initialization
// so we can't use warn function
eprintln!("$LOG level is incorrect: {} (can be: debug, info, error", env_log);
eprintln!(
"$LOG level is incorrect: {} (can be: debug, info, error",
env_log
);
}
}
return LogLevel {
info,
debug,
error
}
return LogLevel { info, debug, error };
}
}
fn expand(msg: &str, vars: Option<Context>) -> String {
//let mut s = msg;
if vars.is_some() {
return vars.unwrap().iter().fold(msg.to_string(), |prev, (key, val)| {
prev.replace(key, val)
})
/// Expands variables from the vars Context. If the
/// context is empty, the string is returned untouched
fn expand(msg: &str, vars: &Context) -> String {
if vars.is_empty() {
return msg.to_string();
}
return msg.to_string();
return vars
.iter()
.fold(msg.to_string(), |prev, (key, val)| prev.replace(key, val));
}
pub fn info(msg: &str, vars: Option<Context>) {
#[allow(dead_code)]
pub fn info(msg: &str, vars: &Context) {
if LOGLEVEL.info {
let t_msg = expand(&trans(msg), vars);
println!("[git-build] {}", t_msg);
println!("{}{}", trans("info"), trans_context(msg, vars));
}
}
pub fn error(msg: &str, vars: Option<Context>) {
#[allow(dead_code)]
pub fn error(msg: &str, vars: &Context) {
if LOGLEVEL.error {
let t_msg = expand(&trans(msg), vars);
eprintln!("{}{}", trans("error"), t_msg);
eprintln!("{}{}", trans("error"), trans_context(msg, vars));
}
}
pub fn warn(msg: &str, vars: Option<Context>) {
#[allow(dead_code)]
pub fn warn(msg: &str, vars: &Context) {
if LOGLEVEL.error {
let t_msg = expand(&trans(msg), vars);
eprintln!("{}{}", trans("warning"), t_msg);
eprintln!("{}{}", trans("warning"), trans_context(msg, vars));
}
}
pub fn debug(msg: &str, vars: Option<Context>) {
#[allow(dead_code)]
pub fn debug(msg: &str, vars: &Context) {
if LOGLEVEL.debug {
let t_msg = expand(&trans(msg), vars);
println!("{}{}", trans("debug"), t_msg);
println!("{}{}", trans("debug"), trans_context(msg, vars));
}
}

View File

@ -1,47 +1,111 @@
use std::env;
use std::collections::HashMap;
use structopt::StructOpt;
// For UNIX extended metadata
use std::env::{set_current_dir as cd, set_var};
use std::process::exit;
mod log;
mod db;
mod dvcs;
mod cli;
mod db;
mod backend;
mod log;
mod task;
use log::Context;
use task::Select;
fn main() -> Result<(), std::io::Error> {
let mut context = Context::new();
let (cmd, basedir) = cli::Cli::build();
context.insert("$i18n_basedir".to_string(), basedir.to_str().unwrap().to_string());
let basedir_str = basedir.to_str().unwrap().to_string();
let home_dir = env::var("HOME").expect("$HOME not defined. WTF?");
let base_dir = format!("{}/.git-build", home_dir);
set_var("GITBUILDDIR", &basedir);
let cmd = cli::Cli::from_args();
if (cmd.force && cmd.inbox) || (cmd.inbox && cmd.inboxdir.is_some()) || (cmd.force && cmd.inboxdir.is_some()) {
println!("CONFLICTING COMMANDS: You can only run --inbox, --inbox-dir or --forge. These options cannot be combined");
exit(2);
}
let tasks = if cmd.tasks.is_empty() {
task::from_dir(&base_dir).expect("Could not load DB")
// Setup a filter for the tasks to load/run
let select = if cmd.inbox {
Select::Inbox(basedir.clone())
} else if let Some(inboxdir) = cmd.inboxdir {
Select::InboxDir(basedir.clone(), inboxdir)
} else if cmd.tasks.is_empty() {
log::info("no_task", &context);
Select::All(basedir.clone())
} else {
task::from_dir_and_list(&base_dir, cmd.tasks).expect("Could not load given tasks")
Select::List(basedir.clone(), cmd.tasks.clone())
};
for (task_name, task) in tasks.iter() {
let mut context = HashMap::new();
//context.insert("$i18n_task", task_name.to_str().expect("WTF"));
context.insert("$i18n_task", task_name.as_str());
log::debug("found_task", Some(&context));
if task.cloned == false {
// Maybe the task has a source we should clone?
if let Some(source) = &task.source {
if !task.dvcs.clone(source) {
context.insert("$i18n_source", &source);
log::error("clone_failed", Some(&context));
// Skip further processing
continue
}
}
// Otherwise, it's a sourceless task
continue
// Load requested tasks
let mut tasks = match select.apply(&context) {
Ok(t) => t,
Err(task::MissingTask(t)) => {
context.insert("$i18n_arg".to_string(), t);
log::error("unknown_arg", &context);
exit(1);
}
};
// Reorder tasks alphanumerically
tasks.sort_unstable_by_key(|t| t.name.clone());
// Remove duplicates, in case a task was called along
// the corresponding source URL (so we'd be tempted to call the task twice)
tasks.dedup_by_key(|t| t.name.clone());
for t in &tasks {
t.debug("found_task");
}
let (config_folder, ignored_tasks) = task::config(&basedir);
set_var("FORGEBUILDCONF", &config_folder);
context.insert("$i18n_config".to_string(), config_folder.to_str().unwrap().to_string());
log::info("config", &context);
for task in &tasks {
task.debug("start_proc");
if ignored_tasks.contains(&task.name) {
// Skip task which has CONFIG/task.ignore
continue;
}
task.info("process");
// Maybe the task has a source we should clone?
if let Some(repo) = &task.repo {
let source_dir = format!("{}/.{}", basedir_str, &task.name);
if task.cloned == false {
task.info("clone");
if !repo.download() {
task.error("clone_failed");
// Skip further processing
continue;
}
// New repo just cloned. Check for submodule updates
cd(&source_dir).expect("Failed to change working dir");
if task.subupdates {
let _had_subupdates = repo.subupdate();
}
// Checkout specific branch?
// TODO: To avoid submodule inconsistencies between branches, we should directly clone a specific branch
task.checkout();
task.run();
} else {
// So the cloned repo is already here maybe update?
// Let's say there was an update and run
//println!("Task {} already exists, run i t only if updates", task.name);
cd(&source_dir).expect("Failed to change working dir");
task.checkout();
task.update_and_run(&cmd.force);
//task.run();
}
} else {
// No source, chaneg working dir to basedir
cd(&basedir).expect("Failed to change working dir");
//println!("Taks {} doesn't have a source, run it", task.name);
task.run_once();
}
// So the cloned repo is here maybe update?
}
Ok(())
}

View File

@ -1,82 +1,392 @@
use std::path::PathBuf;
use std::ffi::OsString;
use std::collections::HashMap;
use std::path::{Path, PathBuf};
use std::process::Command;
//use crate::log;
use crate::dvcs;
use crate::db;
use crate::db::{Entry,is_executable};
use crate::db::{is_executable, read_extension, read_or_none};
use crate::backend::{Repo, backend};
use crate::log;
/// Defines how tasks are selected to be run
#[derive(Clone, Debug)]
pub enum Select {
/// Select all tasks (executable files) from a given basedir
All(PathBuf),
/// Select all tasks with a matching TASK.inbox from a basedir
Inbox(PathBuf),
/// Select all tasks from a basedir with a matching TASK.inbox
/// in a separate inbox directory.
InboxDir(PathBuf, String),
/// Select all tasks from a basedir that are also in a list
List(PathBuf, Vec<String>),
}
impl Select {
/// Removes the inbox file for the given task
pub fn clean(&self, task: &str) {
match self {
Select::Inbox(basedir) => {
std::fs::remove_file(&basedir.join(&format!("{}.inbox", task))).unwrap();
},
Select::InboxDir(_basedir, inboxdir) => {
std::fs::remove_file(&PathBuf::from(inboxdir).join(&format!("{}.inbox", task))).unwrap()
},
_ => {},
}
}
/// Find tasks matched by a selection
pub fn apply(&self, context: &log::Context) -> Result<Vec<Task>, MissingTask> {
// TODO: Dedicated error type for specific IO errors:
// - missing basedir/select
// - permissions problem for basedir/select
match self {
// We load all executable entries from basedir
Select::All(basedir) => {
Ok(from_dir(basedir, self.clone(), context))
},
// We load all entries ending with .inbox from basedir
Select::Inbox(basedir) => {
let inbox_entries = basedir.read_dir().unwrap().filter_map(|f| {
if f.is_err() {
// Dismiss individual file errors
return None;
}
let f_string = f.unwrap().file_name().to_str().unwrap().to_string();
if ! f_string.ends_with(".inbox") || f_string.starts_with(".") {
// We're only looking for non-hidden *.inbox files
return None;
}
return Some(f_string.trim_end_matches(".inbox").to_string());
}).collect();
from_dir_and_list(basedir, inbox_entries, self.clone(), context)
},
// We load all entries ending with .inbox from inboxdir
Select::InboxDir(basedir, inboxdir) => {
let inbox_entries = PathBuf::from(inboxdir).read_dir().unwrap().filter_map(|f| {
if f.is_err() {
// Dismiss individual file errors
return None;
}
let f_string = f.unwrap().file_name().to_str().unwrap().to_string();
if ! f_string.ends_with(".inbox") || f_string.starts_with(".") {
// We're only looking for non-hidden *.inbox files
return None;
}
return Some(f_string.trim_end_matches(".inbox").to_string());
}).collect();
from_dir_and_list(basedir, inbox_entries, self.clone(), context)
},
// Load all entries from list
Select::List(basedir, list) => {
from_dir_and_list(basedir, list.clone(), self.clone(), context)
},
}
}
}
// TODO: Maybe all source/DVCS information should be moved to Repo
// so that task structure is simpler.
#[derive(Debug)]
pub struct Task {
pub name: OsString,
/// The filename for the task
pub name: String,
/// Full path to the task
pub bin: PathBuf,
/// Potentially, a source repository to track for updates
pub source: Option<String>,
pub dvcs: dvcs::Backend,
/// The full Repo information
pub repo: Option<Repo>,
/// Variables for task config
pub config: HashMap<String, String>,
/// Potentially, a branch/commit to track
pub branch: Option<String>,
pub host: Option<String>,
/// List of hosts on which this task should run
pub hosts: Vec<String>,
/// Whether the source, if any, has been cloned already
pub cloned: bool,
/// Whether to track subrepository/submodule updates
pub subupdates: bool,
/// The context in which to store variables for translations
pub context: log::Context,
/// The selection context in which a task was created, so that running it can remove it from inbox
pub select: Select,
}
/// config returns an option of (settings directory, ignored tasks) as
/// (PathBuf, Vec<String>)
pub fn config(basedir: &Path) -> (PathBuf, Vec<String>) {
let hostname =
std::env::var("HOST").unwrap_or_else(|_| hostname::get().unwrap().into_string().unwrap());
let path = basedir.join(hostname);
if path.is_dir() {
let ignored = path
.read_dir()
.unwrap()
.filter_map(|x| {
if x.is_err() {
return None;
}
let name = x.unwrap().file_name().into_string().unwrap();
if name.ends_with(".ignore") {
return Some(name.trim_end_matches(".ignore").to_string());
}
return None;
})
.collect();
(path, ignored)
} else {
// TODO: load .ignore in default config?
(basedir.join("config"), Vec::new())
}
}
impl Task {
pub fn from_entry(entry: &Entry) -> Task {
let source = entry.read_setting("source");
let cloned = source.clone().map_or(false, |_| {
let mut path = entry.base_dir.clone();
path.push(format!(".{}", entry.name.to_str().expect("WTF")));
path.is_dir()
});
Task {
name: entry.name.clone(),
bin: entry.path.clone(),
pub fn from_path(path: &Path, select: Select, context: &log::Context) -> Option<Task> {
let name = path.file_name().unwrap().to_str().unwrap().to_string();
// We don't return a task if:
// - the path is not a file
// - the file is not executable (can't be run)
// - the file starts with . (hidden file)
if !path.is_file() || !is_executable(&path) || name.starts_with('.') {
return None;
}
let basedir = path.parent().unwrap(); // Calling a task in / (FS root) will panic
let source = read_extension(path, "source");
let dest = source_dir_from_basedir(&basedir, &name);
let cloned = source.clone().map_or(false, |_| dest.is_dir());
let subupdates = read_extension(path, "subupdates").is_some();
let checkout = read_extension(path, "checkout");
// Copy the global context so we have a more local scope
let mut context = context.clone();
context.insert("$i18n_task".to_string(), name.clone());
if let Some(branch) = &checkout {
context.insert("$i18n_branch".to_string(), branch.to_string());
}
if let Some(source_url) = &source {
context.insert("$i18n_source".to_string(), source_url.clone());
}
let dvcs = if let Some(dvcs) = backend(read_extension(path, "dvcs")) { dvcs } else {
// TODO: forgebuild error message
eprintln!("Unrecognized dvcs for task {}, possible options are 'git' or 'mercurial'. Skipped", &name);
return None;
};
Some(Task {
name,
bin: path.to_path_buf(),
// None source = None repo
repo: source.as_ref().map(|s| {
Repo::new(
dvcs,
s,
&dest,
subupdates,
)
}),
source,
dvcs: dvcs::from_setting(entry.read_setting("dvcs")),
config: HashMap::new(),
branch: entry.read_setting("branch"),
host: entry.read_setting("host"),
branch: checkout,
hosts: read_extension(path, "hosts").map_or(Vec::new(), |c| {
c.split("\n").map(|line| line.to_string()).collect()
}),
cloned,
subupdates: read_extension(path, "subupdates").is_some(),
context,
select,
})
}
pub fn checkout(&self) {
if let Some(branch) = &self.branch {
if let Some(repo) = &self.repo {
self.info("to_branch");
self.debug("checkout");
repo.checkout(branch);
}
}
}
}
// Takes an already instanced database (ie Vec<Entry>)
// to turn into a dictionary of Tasks
pub fn from_entries(db: Vec<Entry>) -> HashMap<String, Task> {
let mut res: HashMap<String, Task> = HashMap::new();
for entry in db {
let task = Task::from_entry(&entry);
res.insert(
task.name.clone().into_string().expect("Failed to convert"),
task
);
pub fn run_on_host(&self) -> bool {
if self.hosts.len() == 0 {
return true;
}
// $HOSTNAME env is a bashism, we need to call libc (through hostname crate)
// to find out the actual hostname
let hostname = std::env::var("HOST")
.unwrap_or_else(|_| hostname::get().unwrap().into_string().unwrap());
if self.hosts.contains(&hostname) {
return true;
}
return false;
}
return res;
}
/// Returns a hashmap of tasks, or std::io::Error
/// Reads all entries in a directory
pub fn from_dir(base_dir: &str) -> Result<HashMap<String, Task>, std::io::Error> {
Ok(from_entries(
db::from(base_dir, is_executable)?
))
}
/// Returns a hashmap of tasks, or std::io::Error
/// Reads entries in a given list from a directory, fails if a requested entry doesn't exist
/// (does not load the whole folder)
pub fn from_dir_and_list(basedir: &str, list: Vec<String>) -> Result<HashMap<String, Task>, db::Error> {
let mut entries: HashMap<String, Task> = HashMap::new();
for item in list {
if let Some(entry) = db::entry(&basedir, is_executable, &item) {
entries.insert(item.clone(), Task::from_entry(&entry));
pub fn update_and_run(&self, force: &bool) {
if let Some(repo) = &self.repo {
if repo.update() {
self.run();
} else if *force {
self.debug("forcing");
self.run();
} else {
self.debug("no_update");
}
} else {
return Err(db::Error::EntryNotFound(item.clone()))
unreachable!("this function should never be called on a task whcih doesnt have a repo");
}
}
Ok(
entries
)
pub fn run(&self) {
if !self.run_on_host() {
// TODO: Skip host debug?
return;
}
self.info("run");
// TODO: debug message for removing inbox
self.select.clean(&self.name);
let cmd_out = Command::new("bash") // TODO: no need to call bash?
.arg(&self.bin)
.arg(&self.name)
.output()
.expect(&format!("Failed to run {:?}", &self.bin));
let mut log_path = self.bin.clone();
log_path.set_extension("log");
std::fs::write(&log_path, cmd_out.stderr)
.expect(&format!("Failed to write log to {:?}", &log_path));
}
pub fn run_once(&self) {
if !self.run_on_host() {
return;
}
let mut done_path = self.bin.clone();
done_path.set_extension("done");
if !done_path.exists() {
self.run();
std::fs::write(&done_path, "").expect("Failed to register task as done");
}
}
#[allow(dead_code)]
pub fn debug(&self, message: &str) {
log::debug(message, &self.context);
}
#[allow(dead_code)]
pub fn info(&self, message: &str) {
log::info(message, &self.context);
}
#[allow(dead_code)]
pub fn warn(&self, message: &str) {
log::warn(message, &self.context);
}
#[allow(dead_code)]
pub fn error(&self, message: &str) {
log::error(message, &self.context);
}
}
pub struct MissingTask(pub String);
/// Contains a mapping of sources to their corresponding tasks
pub struct SourceSet {
mapping: HashMap<String, Vec<String>>,
}
impl SourceSet {
/// Loads a SourceSet from a basedir
pub fn from(basedir: &Path) -> Result<SourceSet, std::io::Error> {
let source_urls = basedir.read_dir()?.filter_map(|p| {
if p.is_err() { return None; } // Skip individual errors
let p = p.unwrap().path();
let path_str = p.to_str().unwrap();
if !path_str.ends_with(".source") {
// Filter out non-source files
return None;
}
return Some((
path_str.trim_end_matches(".source").to_string(), // Task name
read_or_none(&p).unwrap() // Source URL
));
});
let mut sources_map: HashMap<String, Vec<String>> = HashMap::new();
for (task, source) in source_urls {
if let Some(list) = sources_map.get_mut(&source) {
list.push(task.to_string());
} else {
sources_map.insert(source.clone(), vec!(task.to_string()));
}
}
Ok(SourceSet {
mapping: sources_map
})
}
/// Returns the task names associated with a given source
pub fn tasks_for(&self, source: &str) -> Option<Vec<String>> {
self.mapping.get(source).map(|x| x.clone())
}
}
/// Loads a task list from a given base directory, taking only tasks that are in requested list.
/// Given tasks can be either a task name or a task URL. This function will panic if the basedir
/// does not exist, or error if a requested task/source does not exist.
pub fn from_dir_and_list(basedir: &Path, list: Vec<String>, select: Select, context: &log::Context) -> Result<Vec<Task>, MissingTask> {
// TODO: Write tests for permissions problems
// If we're looking up specific tasks, maybe they're referenced by source
// and not by name. SourceSet allows for a source->name mapping.
let sourceset = SourceSet::from(basedir).unwrap();
let mut tasks = Vec::new();
for t in list {
if let Some(task) = Task::from_path(&basedir.join(&t), select.clone(), context) {
tasks.push(task);
} else {
// Maybe it's not a task name, but a task URL?
if let Some(list) = sourceset.tasks_for(&t) {
// Hopefully safe unwrap (unless there's a source without a corresponding task?)
let task_list = list.iter().map(|t_name| Task::from_path(&basedir.join(&t_name), select.clone(), context).unwrap());
tasks.extend(task_list);
} else {
return Err(MissingTask(t));
}
}
}
Ok(tasks)
}
/// Loads a task list from a given base directory. Fails if the directory
/// is not readable with std::io::Error.
pub fn from_dir(basedir: &Path, select: Select, context: &log::Context) -> Vec<Task> {
basedir.read_dir().unwrap().filter_map(|f| {
if f.is_err() {
// Dismiss individual file errors
return None;
}
return Task::from_path(&f.unwrap().path(), select.clone(), context);
}).collect()
}
/// Takes a &Path to a basedir, and a &str task_name, and return
/// the corresponding source directory as a PathBuf. Does not check
/// if the target exists.
pub fn source_dir_from_basedir(basedir: &Path, task_name: &str) -> PathBuf {
basedir.join(&format!(".{}", task_name))
}