Compare commits

..

5 Commits

14 changed files with 396 additions and 1101 deletions

6
.gitmodules vendored
View File

@ -1,6 +0,0 @@
[submodule "i18n"]
path = i18n
url = https://tildegit.org/southerntofu/git-build-i18n
[submodule "spec"]
path = spec
url = https://tildegit.org/forge/build

63
Cargo.lock generated
View File

@ -26,12 +26,6 @@ version = "1.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cf1de2fe8c75bc145a2f577add951f8134889b4795d47466a54a5c846d691693"
[[package]]
name = "cfg-if"
version = "0.1.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4785bdd1c96b2a846b2bd7cc02e86b6b3dbf14e7e53446c4f54c92a361040822"
[[package]]
name = "clap"
version = "2.33.0"
@ -48,16 +42,13 @@ dependencies = [
]
[[package]]
name = "forgebuild"
version = "0.2.0"
name = "git-build-rs"
version = "0.1.0"
dependencies = [
"glob",
"hostname",
"lazy_static",
"serde",
"serde_json",
"structopt",
"users",
]
[[package]]
@ -84,17 +75,6 @@ dependencies = [
"libc",
]
[[package]]
name = "hostname"
version = "0.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3c731c3e10504cc8ed35cfe2f1db4c9274c3d35fa486e3b31df46f068ef3e867"
dependencies = [
"libc",
"match_cfg",
"winapi",
]
[[package]]
name = "itoa"
version = "0.4.5"
@ -113,21 +93,6 @@ version = "0.2.69"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "99e85c08494b21a9054e7fe1374a732aeadaff3980b6990b94bfd3a70f690005"
[[package]]
name = "log"
version = "0.4.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4fabed175da42fed1fa0746b0ea71f412aa9d35e76e95e59b192c64b9dc2bf8b"
dependencies = [
"cfg-if",
]
[[package]]
name = "match_cfg"
version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ffbee8634e0d45d258acb448e7eaab3fce7a0a467395d4d9f228e3c1f01fb2e4"
[[package]]
name = "proc-macro-error"
version = "1.0.2"
@ -183,20 +148,6 @@ name = "serde"
version = "1.0.106"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "36df6ac6412072f67cf767ebbde4133a5b2e88e76dc6187fa7104cd16f783399"
dependencies = [
"serde_derive",
]
[[package]]
name = "serde_derive"
version = "1.0.106"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9e549e3abf4fb8621bd1609f11dfc9f5e50320802273b12f3811a67e6716ea6c"
dependencies = [
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "serde_json"
@ -288,16 +239,6 @@ version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "826e7639553986605ec5979c7dd957c7895e93eabed50ab2ffa7f6128a75097c"
[[package]]
name = "users"
version = "0.11.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "24cc0f6d6f267b73e5a2cadf007ba8f9bc39c6a6f9666f8cf25ea809a153b032"
dependencies = [
"libc",
"log",
]
[[package]]
name = "vec_map"
version = "0.8.1"

View File

@ -1,6 +1,6 @@
[package]
name = "forgebuild"
version = "0.2.0"
name = "git-build-rs"
version = "0.1.0"
authors = ["southerntofu"]
edition = "2018"
@ -9,12 +9,6 @@ edition = "2018"
[dependencies]
glob = "0.3"
lazy_static = "1.4"
# Detect hostname
hostname = "0.3"
# Translations
serde_json = "1.0"
structopt = "0.3"
# Debug Context for translations
serde = { version = "1.0", features = ["derive"] }
# Users information
users = "0.11"

View File

@ -1,19 +0,0 @@
# git-build.rs
Rust reimplementation of git-build.sh
# Running
Requires the translation files to run. They are included in a submodule, you need to use `--recursive` with git:
```
git clone --recursive https://tildegit.org/southerntofu/git-build.rs
```
```
LANG=fr LOG=debug I18N=~/git-build.sh/i18n/ cargo run --release
```
# Status
Experimental, not working (yet)

1
spec

@ -1 +0,0 @@
Subproject commit 891b8aabbf4da2e25fc66df503b85266485b8020

View File

@ -1,149 +0,0 @@
use std::boxed::Box;
use std::env::{set_current_dir as cd, current_dir as pwd};
use std::path::{Path, PathBuf};
use std::process::Command;
use crate::backend::{Backend, Repo};
#[derive(Debug, Clone, Copy)]
pub struct Git;
impl Backend for Git {
fn download(&self, source: &str, dest: &Path) -> bool {
Command::new("git")
.arg("clone")
.arg("--recursive")
.arg(source)
.arg(dest)
.output() // To suppress (capture) output
.expect("PROCESS ERROR!")
.status
.success()
}
fn submodules(&self) -> Vec<PathBuf> {
let cmd = Command::new("git")
.arg("config")
.arg("--file")
.arg(".gitmodules")
.arg("--get-regexp")
.arg("path")
.output()
.expect("WTF");
if cmd.status.success() {
// Command succeded, split by the first space character to find the path to the submodule
let out = std::string::String::from_utf8(cmd.stdout).expect("Wrong unicode");
let mut results = Vec::new();
for line in out.lines() {
results.push(PathBuf::from(line.split_once(" ").expect("Misformed .gitmodules").1));
}
return results;
} else {
// No submodules found (even if .gitmodules exist but is empty)
return Vec::new();
}
}
fn branch(&self) -> String {
let output = Command::new("git")
.arg("rev-parse")
.arg("--abbrev-ref")
.arg("HEAD")
.output()
.expect("WTF");
if !output.status.success() {
panic!("Corrupted git repository???");
}
String::from_utf8(output.stdout).unwrap().trim().to_string()
}
fn checkout(&self, target: &str) -> bool {
let status = Command::new("git")
.arg("checkout")
.arg(target)
.status()
.expect("PROCESS ERROR!");
status.success()
}
fn has_updates(&self) -> bool {
// Refresh remote
if !Command::new("git")
.arg("fetch")
.arg("--quiet")
.arg("origin")
.status()
.expect("WTF")
.success()
{
// FAILED, no internet??
// TODO: This should be a forgebuild error message
eprintln!("Fetching updates failed");
return false;
}
let branch = self.branch();
if Command::new("git")
.arg("diff")
.arg("--quiet")
.arg(&format!("remotes/origin/{}", &branch))
.status()
.expect("WTF")
.success()
{
// Command succeeeded, no updates
return false;
}
// Updates
return true;
}
fn update(&self) -> bool {
if Command::new("git")
.arg("pull")
.arg("--ff-only")
.arg("origin")
.arg(self.branch())
.status()
.expect("WTF")
.success()
{
// Main updates succeeded. If new submodules were added, initialize them
if !Command::new("git")
.arg("submodule")
.arg("update")
.arg("--init")
.arg("--recursive")
.status()
.expect("WTF")
.success()
{
// TODO: Should be forgebuild error message
eprintln!("Failed to initialize submodules which were added to the repo");
}
return true;
} else {
// Main updates failed
return false;
}
}
// TODO: Maybe this should be a generic implementation part of the trait?
fn subupdate(&self) -> bool {
let mut found_subupdates = false;
let prev_dir = pwd().expect("failed to pwd");
for subpath in self.submodules() {
// Move into the submodule
cd(subpath).unwrap();
// Generate a Repo instance for the submodule but don't enable subupdates
// Just in case someone would trigger an infinite loop by accident
let subrepo = Repo::new(Box::new(Git), "irrelevant", &pwd().expect("failed to pwd"), false);
if subrepo.backend.update() {
found_subupdates = true;
}
// Move back into main repo
cd(&prev_dir).unwrap();
}
found_subupdates
}
}

View File

@ -1,46 +0,0 @@
use std::boxed::Box;
use std::env::{set_current_dir as cd, current_dir as pwd};
use std::path::{Path, PathBuf};
use std::process::Command;
use crate::backend::{Backend, Repo};
#[derive(Debug, Clone, Copy)]
pub struct Mercurial;
impl Backend for Mercurial {
#[allow(dead_code)]
fn download(&self, _src: &str, _dest: &Path) -> bool {
unimplemented!();
}
#[allow(dead_code)]
fn submodules(&self) -> Vec<PathBuf> {
unimplemented!();
}
#[allow(dead_code)]
fn branch(&self) -> String {
unimplemented!();
}
#[allow(dead_code)]
fn checkout(&self, _branch: &str) -> bool {
unimplemented!();
}
#[allow(dead_code)]
fn has_updates(&self) -> bool {
unimplemented!();
}
#[allow(dead_code)]
fn update(&self) -> bool {
unimplemented!();
}
#[allow(dead_code)]
fn subupdate(&self) -> bool {
unimplemented!();
}
}

View File

@ -1,105 +0,0 @@
use std::boxed::Box;
use std::path::{Path, PathBuf};
mod git;
mod mercurial;
pub use git::Git;
pub use mercurial::Mercurial;
/// Generates Some(Backend) from an optional String, or defaults to Git,
/// or None when the backend is not recognized. If you want to implement your own
/// backend outside of this crate, you should override this method.
/// TODO: fallback backend should be customizable
pub fn backend(setting: Option<String>) -> Option<Box<dyn Backend>> {
// Git is the default setting until further notice
setting.map_or(Some(Box::new(Git)), |name| match name.as_ref() {
"git" => Some(Box::new(Git)),
"mercurial" => Some(Box::new(Mercurial)),
_ => None,
})
}
/// The trait implemented by DVCS backends (git, mercurial).
/// NOTE: This interface may evolve quickly. Currently no state is stored inside
/// the backend and it's assumed the current working dir is the repository we're operating on
pub trait Backend: std::fmt::Debug {
/// Clone a src repository and all related submodules to dest
fn download(&self, src: &str, dest: &Path) -> bool;
/// List all submodules in repo, as a list of PathBuf's
/// Returns an empty vector when no submodules are declared
fn submodules(&self) -> Vec<PathBuf>;
/// Returns the current branch/commit/tag tracked by the repo
fn branch(&self) -> String;
/// Checkouts out a specific branch/commit/tag to track on the repo
fn checkout(&self, branch: &str) -> bool;
/// Checks for updates in repo. Does not account for submodules
fn has_updates(&self) -> bool;
/// Applies updates on the main repo, and on submodules if subupdates is enabled. Returns true
/// if some updates were applied, false otherwise.
fn update(&self) -> bool;
/// Applies submodule updates. Returns true if some updates were applied, false otherwise.
fn subupdate(&self) -> bool;
}
#[derive(Debug)]
pub struct Repo {
pub backend: Box<dyn Backend>,
pub source: String,
pub dest: PathBuf,
pub subupdates: bool,
}
impl Repo {
pub fn new(backend: Box<dyn Backend>, source: &str, dest: &Path, subupdates: bool) -> Repo {
Repo {
backend,
source: source.to_string(),
dest: dest.to_path_buf(),
subupdates,
}
}
pub fn download(&self) -> bool {
let success = self.backend.download(&self.source, &self.dest);
// If the clone was successful and subupdates is enabled,
// List submodules and check them for updates
if success && self.subupdates {
self.subupdate();
}
return success;
}
pub fn checkout(&self, target: &str) -> bool {
self.backend.checkout(target)
}
pub fn has_updates(&self) -> bool {
self.backend.has_updates()
}
pub fn update(&self) -> bool {
// First try to run submodules updates
let had_subupdates = if self.subupdates { self.subupdate() } else { false };
// Now run main repo updates
if self.has_updates() {
self.backend.update() || had_subupdates
} else {
had_subupdates
}
}
pub fn subupdate(&self) -> bool {
self.backend.subupdate()
}
}

View File

@ -1,86 +1,12 @@
use std::path::PathBuf;
use structopt::StructOpt;
use structopt::clap::ErrorKind as ClapError;
// To get effective user id (EUID) so that setuid works
use users::{get_effective_uid,get_user_by_uid};
// For home directory
use users::os::unix::UserExt;
use crate::log;
use crate::log::Context;
#[derive(Debug, StructOpt)]
#[structopt(
name = "forgebuild",
about = "Update your repositories and trigger tasks"
)]
#[structopt(name = "git-build", about = "Update your repositories and trigger tasks")]
pub struct Cli {
#[structopt(short = "f", long = "force")]
pub force: bool,
#[structopt(short = "b", long = "basedir")]
pub basedir: Option<String>,
#[structopt(long = "inbox")]
pub inbox: bool,
#[structopt(long = "inbox-folder")]
pub inboxdir: Option<String>,
//#[structopt(def)]
pub tasks: Vec<String>,
}
impl Cli {
/// Builds the command-line from passed arguments
/// Returns the Cli instance alongside a PathBuf of the basedir
pub fn build() -> (Self, PathBuf) {
// We create a dedicated context so we don't have to pass it as argument
let mut context = Context::new();
// We don't want to use structopt's error handler for unknown argument as we have our own
// error message for that case (unknown_arg). So we use from_iter_safe() not from_args()
match Cli::from_iter_safe(std::env::args()) {
Ok(cmd) => {
// Parsing was successful, but we'd like to ensure requested basedir exists
match cmd.basedir().canonicalize() {
Ok(p) => {
(cmd, p)
},
Err(_) => {
// Missing basedir
context.insert("$i18n_basedir".to_string(), cmd.basedir().to_str().unwrap().to_string());
log::error("missing_basedir", &context);
std::process::exit(1);
}
}
},
Err(e) => {
match &e.kind {
ClapError::UnknownArgument => {
context.insert("$i18n_arg".to_string(), e.info.unwrap().first().unwrap().to_string());
log::error("unknown_arg", &context);
std::process::exit(1);
},
_ => e.exit()
}
}
}
}
/// Returns a PathBuf to the basedir. If it's a relative link,
/// it's not expanded here! Panics if no basedir is provided and $HOME isn't defined
pub fn basedir(&self) -> PathBuf {
if let Some(basedir) = &self.basedir {
// Returns an error when the path doesn't exist
PathBuf::from(basedir)
} else {
let owner = get_effective_uid();
let mut home_path = get_user_by_uid(owner).expect("Failed owner profile")
.home_dir().to_path_buf();
home_path.push(".forgebuild");
home_path
}
}
}

134
src/db.rs
View File

@ -1,6 +1,80 @@
use std::path::{PathBuf,Path};
use std::fs;
use std::os::unix::fs::MetadataExt;
use std::path::Path;
use std::ffi::OsString;
#[derive(Debug)]
pub enum Error {
EntryNotFound(String)
}
#[derive(Debug, Clone)]
pub struct Entry {
pub name: OsString,
pub path: PathBuf,
pub base_dir: PathBuf,
}
impl Entry {
pub fn new(path: PathBuf, name: OsString, base_dir: PathBuf) -> Entry {
Entry {
path,
name,
base_dir,
}
}
pub fn read_setting(&self, setting: &str) -> Option<String> {
let mut path = self.path.clone();
path.set_extension(setting);
read_or_none(&path)
}
}
/// Load single entry from folder
pub fn entry(basedir: &Path, filter: impl Fn(&Path) -> bool, name: &str) -> Option<Entry> {
let basepath = PathBuf::from(&basedir);
let path = basepath.clone().join(name);
if !path.exists() || !filter(&path) {
return None;
}
Some(Entry::new(
path.clone(),
path.file_name().expect("Failed to read file name").to_os_string(),
basepath
))
}
/// Loads entire database from folder
pub fn from(path_name: &Path, filter: impl Fn(&Path) -> bool) -> Result<Vec<Entry>, std::io::Error> {
let path = PathBuf::from(&path_name);
let mut entries = Vec::new();
for file in path.read_dir()? {
if file.is_err() {
// Dismiss individual errors (in case there's a permission problem)
// TODO: maybe print a warning? Actually a configurable error level
// (using enum variants) should be passed to the function to configure
// whether to continue silently or error out for individual files
continue;
}
let entry = file?.path();
if filter(&entry) {
entries.push(
Entry::new(
entry.clone(),
entry.file_name().expect("Failed to read file name").to_os_string(),
path.clone()
)
);
}
}
return Ok(entries);
}
/// Reads the file and strips whitespace (including newlines)
/// Useful for file-based key-value store
@ -12,7 +86,7 @@ pub fn read_or_none(path: &Path) -> Option<String> {
Ok(content) => {
// Remove trailing space/newlines
Some(content.trim().to_string())
}
},
Err(e) => {
eprintln!("IO ERROR: {}", e);
None
@ -20,14 +94,33 @@ pub fn read_or_none(path: &Path) -> Option<String> {
}
}
pub fn read_extension(path: &Path, setting: &str) -> Option<String> {
let mut path = path.to_path_buf();
path.set_extension(setting);
read_or_none(&path)
fn list_files(path: &Path) -> Vec<PathBuf> {
let mut res = Vec::new();
match fs::read_dir(path) {
Ok(files) => {
for r in files {
match r {
Ok(entry) => {
let file = entry.path();
if file.is_file() {
res.push(file);
}
},
Err(e) => {
eprintln!("IOERROR: {}", e)
}
}
}
},
Err(e) => {
eprintln!("IOERROR: {}", e);
}
}
return res;
}
/// Returns true when the file exists and has user exec
/// permission. Returns false otherwise.
// If the file doesn't exist or fails, return false
pub fn is_executable(path: &Path) -> bool {
// Do not match directories
if !path.is_file() {
@ -42,10 +135,33 @@ pub fn is_executable(path: &Path) -> bool {
} else {
false
}
}
},
Err(e) => {
eprintln!("IO Error: {}", e);
false
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn can_load_db() {
let base_dir = "tests/success";
let entries = from(base_dir, is_executable).expect("Could not load db");
let entries_names: Vec<String> = entries.iter().map(|x| x.name.clone().into_string().unwrap()).collect();
let expected: Vec<String> = vec!("task", "symlink", "no_source").iter().map(|x| x.to_string()).collect();
assert_eq!(expected.len(), entries_names.len());
for entry in expected {
if !entries_names.contains(&entry.to_string()) {
panic!("Could not find {}", &entry);
}
}
}
}

58
src/dvcs.rs Normal file
View File

@ -0,0 +1,58 @@
use std::process::Command;
pub fn from_setting(setting: Option<String>) -> Backend {
// Git is the default setting until further notice
setting.map_or(Backend::Git, |name| match name.as_ref() {
"git" => Backend::Git,
"mercurial" => Backend::Mercurial,
_ => Backend::Unknown(name.to_string())
})
}
#[derive(Debug)]
pub enum Backend {
Git,
Mercurial,
Unknown(String)
}
impl Backend {
pub fn clone(&self, source: &str) -> bool {
match self {
Backend::Git => {
let status = Command::new("git")
.arg("clone")
.arg("--recursive")
.arg(source)
.status().expect("PROCESS ERROR!");
status.success()
},
Backend::Mercurial => {
unreachable!("Unimplemented");
},
Backend::Unknown(name) => {
eprintln!("Unknown DVCS: {}", name);
false
}
}
}
pub fn update(&self) -> bool {
match self {
Backend::Git => {
let status = Command::new("git")
.arg("pull")
.status()
.expect("Failed to pull on git repo.");
status.success()
},
Backend::Mercurial => {
unreachable!("Unimplemented");
},
Backend::Unknown(name) => {
eprintln!("Unknown DVCS: {}", name);
false
}
}
}
}

View File

@ -1,166 +1,64 @@
use std::collections::HashMap;
use std::env;
use std::collections::HashMap;
use std::path::PathBuf;
use std::fs;
use std::path::{Path, PathBuf};
// To get effective user id (EUID) so that setuid works
use users::{get_effective_uid,get_user_by_uid};
// For home directory
use users::os::unix::UserExt;
use lazy_static::lazy_static;
lazy_static! {
lazy_static!{
static ref LOGLEVEL: LogLevel = LogLevel::from_env();
static ref LANG: Lang = Lang::from_env();
static ref LANG: String = lang_from_env();
static ref TRANSLATIONS: HashMap<String, String> = load_translations();
}
/// Lang configures how to deal with translations. It has three possible values:
/// None: translations return their own key name
/// Some(code): where code is the language code for the JSON translation file
/// JsonContext: debug output for Context as JSON
/// File(PathBuf): fixed translation file (instead of language code)
pub enum Lang {
None,
JsonContext,
Some(String),
File(PathBuf),
}
pub type Context<'a> = &'a HashMap<&'a str, &'a str>;
impl Lang {
fn from_env() -> Lang {
let lang =
env::var("LANG").expect("$LANG not set in environment. Your machine is misconfigured!");
match lang.to_uppercase().as_str() {
"NONE" => Lang::None,
"JSON" => Lang::JsonContext,
"C" => {
// Special case: when no lang is specified, default to english
Lang::Some("en".to_string())
},
_ => {
let p = PathBuf::from(&lang);
if p.is_file() {
// LANG env variable contains a path to a full (JSON) file
Lang::File(p)
} else {
Lang::Some(lang[0..2].to_string())
}
}
}
fn load_translations() -> HashMap<String, String> {
let folder = env::var("I18N").unwrap_or("./i18n/".to_string());
let mut path = PathBuf::from(folder);
if !path.is_dir() {
panic!("Could not find translations in {:?}", path);
}
}
path.push(format!("{}.json", *LANG));
pub type Context = HashMap<String, String>;
/// Finds the JSON translation files checking in order, relative to the program path:
/// - ../../../build/i18n
/// - ../../spec/i18n
/// - $HOME/.local/share/forgebuild/i18n
/// - /usr/share/forgebuild/i18n
/// If all of the above fails, but ../../spec/ folder exists (we are in a build.rs repo)
/// attempt to inialize the spec submodule which may have been forgotten on clone
fn find_translations() -> Option<PathBuf> {
let mut bindir = PathBuf::from(env::args().nth(0).expect("Argument 0 should contain the path to the program"));
bindir.pop();
let options = [
bindir.join("../../../build/i18n"),
bindir.join("../../spec/i18n"),
get_user_by_uid(get_effective_uid()).expect("Failed to get info about $USER").home_dir().join(".local/share/forgebuild/i18n"),
PathBuf::from("/usr/share/forgebuild/i18n"),
];
for entry in options {
if entry.is_dir() {
// We found a matching entry
return Some(entry);
}
}
// Maybe spec folder exists but hasn't been cloned
if bindir.join("../../spec/").is_dir() {
// TODO: Try to clone
unimplemented!("TODO: The spec submodule has not been cloned. We should clone it here. In the meantime, you can do it manually from the build.rs repo using git submodule init && git submodule update");
if !path.is_file() {
panic!("Could not find translation file: {:?}", path);
}
return None;
}
fn load_translations_from_file(path: &Path) -> HashMap<String, String> {
match fs::read_to_string(&path) {
Ok(content) => {
//let trans: HashMap<String, String> = serde_json::from_str(&content).expect("Could not load translations");
//return trans
match serde_json::from_str(&content) {
Ok(trans) => trans,
Err(e) => panic!("JSON ERROR: {}", e),
Err(e) => panic!("JSON ERROR: {}", e)
}
}
},
Err(e) => {
panic!("IO ERROR: {}", e);
}
}
}
fn load_translations() -> HashMap<String, String> {
match &*LANG {
Lang::File(path) => {
load_translations_from_file(&path)
},
Lang::Some(lang) => {
if let Some(i18ndir) = find_translations() {
let i18nfile = i18ndir.join(format!("{}.json", lang));
if !i18nfile.is_file() {
panic!("No such translation file: {:?}", i18nfile);
}
return load_translations_from_file(&i18nfile)
} else {
panic!("No translation folder found.");
}
},
_ => {
HashMap::new()
fn trans(key: &str) -> String {
match TRANSLATIONS.get(key) {
Some(t) => t.to_string(),
None => {
panic!("Unknown translation string in lang {}: {}", *LANG, key);
}
}
}
fn trans(key: &str) -> String {
match &*LANG {
Lang::File(path) => {
if let Some(t) = TRANSLATIONS.get(key) {
t.to_string()
} else {
panic!("Missing translation for {} in translation file: {}", key, path.to_str().unwrap())
}
},
Lang::Some(lang) => {
if let Some(t) = TRANSLATIONS.get(key) {
t.to_string()
} else {
panic!("Missing translation for {} in lang {}", key, lang)
}
},
Lang::JsonContext => String::new(),
Lang::None => key.to_string(),
}
fn lang_from_env() -> String {
let lang = env::var("LANG").expect("$LANG not set in environment. Your machine is misconfigured!");
lang[0..2].to_string()
}
fn trans_context(key: &str, context: &Context) -> String {
match &*LANG {
Lang::JsonContext => {
// Serialize the context to JSON for debugging
serde_json::to_string(context).expect("Failed to serialize to JSON")
},
_ => expand(&trans(key), context)
}
}
struct LogLevel {
info: bool,
debug: bool,
error: bool,
error: bool
}
impl LogLevel {
@ -172,62 +70,59 @@ impl LogLevel {
let env_log = env::var("LOG").unwrap_or("info".to_string());
match env_log.to_lowercase().as_str() {
"info" => {}
"debug" => {
debug = true;
}
"error" => {
info = false;
}
"info" => {},
"debug" => { debug = true; },
"error" => { info = false; }
_ => {
// This happens before loglevel initialization
// so we can't use warn function
eprintln!(
"$LOG level is incorrect: {} (can be: debug, info, error",
env_log
);
eprintln!("$LOG level is incorrect: {} (can be: debug, info, error", env_log);
}
}
return LogLevel { info, debug, error };
return LogLevel {
info,
debug,
error
}
}
}
/// Expands variables from the vars Context. If the
/// context is empty, the string is returned untouched
fn expand(msg: &str, vars: &Context) -> String {
if vars.is_empty() {
return msg.to_string();
fn expand(msg: &str, vars: Option<Context>) -> String {
//let mut s = msg;
if vars.is_some() {
return vars.unwrap().iter().fold(msg.to_string(), |prev, (key, val)| {
prev.replace(key, val)
})
}
return vars
.iter()
.fold(msg.to_string(), |prev, (key, val)| prev.replace(key, val));
return msg.to_string();
}
#[allow(dead_code)]
pub fn info(msg: &str, vars: &Context) {
pub fn info(msg: &str, vars: Option<Context>) {
if LOGLEVEL.info {
println!("{}{}", trans("info"), trans_context(msg, vars));
let t_msg = expand(&trans(msg), vars);
println!("[git-build] {}", t_msg);
}
}
#[allow(dead_code)]
pub fn error(msg: &str, vars: &Context) {
pub fn error(msg: &str, vars: Option<Context>) {
if LOGLEVEL.error {
eprintln!("{}{}", trans("error"), trans_context(msg, vars));
let t_msg = expand(&trans(msg), vars);
eprintln!("{}{}", trans("error"), t_msg);
}
}
#[allow(dead_code)]
pub fn warn(msg: &str, vars: &Context) {
pub fn warn(msg: &str, vars: Option<Context>) {
if LOGLEVEL.error {
eprintln!("{}{}", trans("warning"), trans_context(msg, vars));
let t_msg = expand(&trans(msg), vars);
eprintln!("{}{}", trans("warning"), t_msg);
}
}
#[allow(dead_code)]
pub fn debug(msg: &str, vars: &Context) {
pub fn debug(msg: &str, vars: Option<Context>) {
if LOGLEVEL.debug {
println!("{}{}", trans("debug"), trans_context(msg, vars));
let t_msg = expand(&trans(msg), vars);
println!("{}{}", trans("debug"), t_msg);
}
}

View File

@ -1,111 +1,112 @@
use std::env::{set_current_dir as cd, set_var};
use std::process::exit;
use std::env;
use std::collections::HashMap;
use structopt::StructOpt;
use std::fs::{DirBuilder};
use std::path::{Path,PathBuf};
// For UNIX extended metadata
mod cli;
mod db;
mod backend;
mod log;
mod db;
mod dvcs;
mod cli;
mod task;
use log::Context;
use task::Select;
fn main() -> Result<(), std::io::Error> {
let mut context = Context::new();
let (cmd, basedir) = cli::Cli::build();
context.insert("$i18n_basedir".to_string(), basedir.to_str().unwrap().to_string());
let basedir_str = basedir.to_str().unwrap().to_string();
set_var("GITBUILDDIR", &basedir);
let default_folder;
if (cmd.force && cmd.inbox) || (cmd.inbox && cmd.inboxdir.is_some()) || (cmd.force && cmd.inboxdir.is_some()) {
println!("CONFLICTING COMMANDS: You can only run --inbox, --inbox-dir or --forge. These options cannot be combined");
exit(2);
}
// Setup a filter for the tasks to load/run
let select = if cmd.inbox {
Select::Inbox(basedir.clone())
} else if let Some(inboxdir) = cmd.inboxdir {
Select::InboxDir(basedir.clone(), inboxdir)
} else if cmd.tasks.is_empty() {
log::info("no_task", &context);
Select::All(basedir.clone())
} else {
Select::List(basedir.clone(), cmd.tasks.clone())
};
// Load requested tasks
let mut tasks = match select.apply(&context) {
Ok(t) => t,
Err(task::MissingTask(t)) => {
context.insert("$i18n_arg".to_string(), t);
log::error("unknown_arg", &context);
exit(1);
// Check if we defined a given folder in which to find the tasks
let base_dir = match env::var_os("GITBUILDDIR") {
Some(val) => {
default_folder = false;
PathBuf::from(val)
},
None => {
default_folder = true;
let home_dir = env::var("HOME").expect("$HOME not defined. WTF?");
PathBuf::from(&format!("{}/.git-build", home_dir).clone())
}
};
// Reorder tasks alphanumerically
tasks.sort_unstable_by_key(|t| t.name.clone());
// Remove duplicates, in case a task was called along
// the corresponding source URL (so we'd be tempted to call the task twice)
tasks.dedup_by_key(|t| t.name.clone());
for t in &tasks {
t.debug("found_task");
}
let (config_folder, ignored_tasks) = task::config(&basedir);
set_var("FORGEBUILDCONF", &config_folder);
context.insert("$i18n_config".to_string(), config_folder.to_str().unwrap().to_string());
log::info("config", &context);
for task in &tasks {
task.debug("start_proc");
if ignored_tasks.contains(&task.name) {
// Skip task which has CONFIG/task.ignore
continue;
}
task.info("process");
// Maybe the task has a source we should clone?
if let Some(repo) = &task.repo {
let source_dir = format!("{}/.{}", basedir_str, &task.name);
if task.cloned == false {
task.info("clone");
if !repo.download() {
task.error("clone_failed");
// Skip further processing
continue;
}
// New repo just cloned. Check for submodule updates
cd(&source_dir).expect("Failed to change working dir");
if task.subupdates {
let _had_subupdates = repo.subupdate();
}
// Checkout specific branch?
// TODO: To avoid submodule inconsistencies between branches, we should directly clone a specific branch
task.checkout();
task.run();
} else {
// So the cloned repo is already here maybe update?
// Let's say there was an update and run
//println!("Task {} already exists, run i t only if updates", task.name);
cd(&source_dir).expect("Failed to change working dir");
task.checkout();
task.update_and_run(&cmd.force);
//task.run();
}
// If we're not using a GITBUILDDIR folder,
// We're gonna check if they already is a .git-build folder, and create one otherwise.
if !base_dir.exists() {
if default_folder == false {
let mut context = HashMap::new();
context.insert("$i18n_folder", base_dir.to_str().unwrap());
log::error("no_folder", Some(&context));
return Err(std::io::Error::new(std::io::ErrorKind::NotFound, "Folder not found."));
} else {
// No source, chaneg working dir to basedir
cd(&basedir).expect("Failed to change working dir");
//println!("Taks {} doesn't have a source, run it", task.name);
task.run_once();
log::info("create_git_build_folder", Some(&HashMap::new()));
DirBuilder::new()
.recursive(true)
.create(&base_dir).unwrap();
}
}
let cmd = cli::Cli::from_args();
let tasks = if cmd.tasks.is_empty() {
task::from_dir(&base_dir).expect("Could not load DB")
} else {
task::from_dir_and_list(&base_dir, cmd.tasks).expect("Could not load given tasks")
};
// Change directory to the base_dir
// assert!(env::set_current_dir(&base_dir).is_ok());
// println!("Successfully changed working directory to {}!", base_dir.display());
for (task_name, task) in tasks.iter() {
let mut context = HashMap::new();
//context.insert("$i18n_task", task_name.to_str().expect("WTF"));
context.insert("$i18n_task", task_name.as_str());
log::debug("found_task", Some(&context));
if task.cloned == false {
// Maybe the task has a source we should clone?
if let Some(source) = &task.source {
// If we're not using the default folder, change it
let curr_dirr = env::current_dir().unwrap(); // TODO: Might fail because of a lack of permission to read current dir.
env::set_current_dir(&base_dir).unwrap();
let mut context = HashMap::new();
context.insert("$i18n_folder", base_dir.to_str().unwrap());
log::info("change_folder", Some(&context));
if !task.dvcs.clone(source) {
context.insert("$i18n_source", &source);
log::error("clone_failed", Some(&context));
// Change back to the parent folder.
// TODO: refactor this.
env::set_current_dir(curr_dirr).unwrap();
let mut context = HashMap::new();
context.insert("$i18n_folder", curr_dirr.to_str().unwrap().clone());
log::info("change_folder", Some(&context));
// Skip further processing
continue
}
// Change back to the parent folder.
// TODO: refactor this.
env::set_current_dir(curr_dirr).unwrap();
let mut context = HashMap::new();
context.insert("$i18n_folder", curr_dirr.clone().to_str().unwrap());
log::info("change_folder", Some(&context));
// TODO: in case the clone fails
}
// Otherwise, it's a sourceless task
continue
}
// The repo has been cloned already, we update it.
task.dvcs.update();
}
Ok(())
}

View File

@ -1,392 +1,82 @@
use std::path::{PathBuf, Path};
use std::ffi::OsString;
use std::collections::HashMap;
use std::path::{Path, PathBuf};
use std::process::Command;
use crate::db::{is_executable, read_extension, read_or_none};
use crate::backend::{Repo, backend};
use crate::log;
/// Defines how tasks are selected to be run
#[derive(Clone, Debug)]
pub enum Select {
/// Select all tasks (executable files) from a given basedir
All(PathBuf),
/// Select all tasks with a matching TASK.inbox from a basedir
Inbox(PathBuf),
/// Select all tasks from a basedir with a matching TASK.inbox
/// in a separate inbox directory.
InboxDir(PathBuf, String),
/// Select all tasks from a basedir that are also in a list
List(PathBuf, Vec<String>),
}
impl Select {
/// Removes the inbox file for the given task
pub fn clean(&self, task: &str) {
match self {
Select::Inbox(basedir) => {
std::fs::remove_file(&basedir.join(&format!("{}.inbox", task))).unwrap();
},
Select::InboxDir(_basedir, inboxdir) => {
std::fs::remove_file(&PathBuf::from(inboxdir).join(&format!("{}.inbox", task))).unwrap()
},
_ => {},
}
}
/// Find tasks matched by a selection
pub fn apply(&self, context: &log::Context) -> Result<Vec<Task>, MissingTask> {
// TODO: Dedicated error type for specific IO errors:
// - missing basedir/select
// - permissions problem for basedir/select
match self {
// We load all executable entries from basedir
Select::All(basedir) => {
Ok(from_dir(basedir, self.clone(), context))
},
// We load all entries ending with .inbox from basedir
Select::Inbox(basedir) => {
let inbox_entries = basedir.read_dir().unwrap().filter_map(|f| {
if f.is_err() {
// Dismiss individual file errors
return None;
}
let f_string = f.unwrap().file_name().to_str().unwrap().to_string();
if ! f_string.ends_with(".inbox") || f_string.starts_with(".") {
// We're only looking for non-hidden *.inbox files
return None;
}
return Some(f_string.trim_end_matches(".inbox").to_string());
}).collect();
from_dir_and_list(basedir, inbox_entries, self.clone(), context)
},
// We load all entries ending with .inbox from inboxdir
Select::InboxDir(basedir, inboxdir) => {
let inbox_entries = PathBuf::from(inboxdir).read_dir().unwrap().filter_map(|f| {
if f.is_err() {
// Dismiss individual file errors
return None;
}
let f_string = f.unwrap().file_name().to_str().unwrap().to_string();
if ! f_string.ends_with(".inbox") || f_string.starts_with(".") {
// We're only looking for non-hidden *.inbox files
return None;
}
return Some(f_string.trim_end_matches(".inbox").to_string());
}).collect();
from_dir_and_list(basedir, inbox_entries, self.clone(), context)
},
// Load all entries from list
Select::List(basedir, list) => {
from_dir_and_list(basedir, list.clone(), self.clone(), context)
},
}
}
}
// TODO: Maybe all source/DVCS information should be moved to Repo
// so that task structure is simpler.
//use crate::log;
use crate::dvcs;
use crate::db;
use crate::db::{Entry,is_executable};
#[derive(Debug)]
pub struct Task {
/// The filename for the task
pub name: String,
/// Full path to the task
pub name: OsString,
pub bin: PathBuf,
/// Potentially, a source repository to track for updates
pub source: Option<String>,
/// The full Repo information
pub repo: Option<Repo>,
/// Variables for task config
pub dvcs: dvcs::Backend,
pub config: HashMap<String, String>,
/// Potentially, a branch/commit to track
pub branch: Option<String>,
/// List of hosts on which this task should run
pub hosts: Vec<String>,
/// Whether the source, if any, has been cloned already
pub host: Option<String>,
pub cloned: bool,
/// Whether to track subrepository/submodule updates
pub subupdates: bool,
/// The context in which to store variables for translations
pub context: log::Context,
/// The selection context in which a task was created, so that running it can remove it from inbox
pub select: Select,
}
/// config returns an option of (settings directory, ignored tasks) as
/// (PathBuf, Vec<String>)
pub fn config(basedir: &Path) -> (PathBuf, Vec<String>) {
let hostname =
std::env::var("HOST").unwrap_or_else(|_| hostname::get().unwrap().into_string().unwrap());
let path = basedir.join(hostname);
if path.is_dir() {
let ignored = path
.read_dir()
.unwrap()
.filter_map(|x| {
if x.is_err() {
return None;
}
let name = x.unwrap().file_name().into_string().unwrap();
if name.ends_with(".ignore") {
return Some(name.trim_end_matches(".ignore").to_string());
}
return None;
})
.collect();
(path, ignored)
} else {
// TODO: load .ignore in default config?
(basedir.join("config"), Vec::new())
}
}
impl Task {
pub fn from_path(path: &Path, select: Select, context: &log::Context) -> Option<Task> {
let name = path.file_name().unwrap().to_str().unwrap().to_string();
// We don't return a task if:
// - the path is not a file
// - the file is not executable (can't be run)
// - the file starts with . (hidden file)
if !path.is_file() || !is_executable(&path) || name.starts_with('.') {
return None;
}
let basedir = path.parent().unwrap(); // Calling a task in / (FS root) will panic
let source = read_extension(path, "source");
let dest = source_dir_from_basedir(&basedir, &name);
let cloned = source.clone().map_or(false, |_| dest.is_dir());
let subupdates = read_extension(path, "subupdates").is_some();
let checkout = read_extension(path, "checkout");
// Copy the global context so we have a more local scope
let mut context = context.clone();
context.insert("$i18n_task".to_string(), name.clone());
if let Some(branch) = &checkout {
context.insert("$i18n_branch".to_string(), branch.to_string());
}
if let Some(source_url) = &source {
context.insert("$i18n_source".to_string(), source_url.clone());
}
let dvcs = if let Some(dvcs) = backend(read_extension(path, "dvcs")) { dvcs } else {
// TODO: forgebuild error message
eprintln!("Unrecognized dvcs for task {}, possible options are 'git' or 'mercurial'. Skipped", &name);
return None;
};
Some(Task {
name,
bin: path.to_path_buf(),
// None source = None repo
repo: source.as_ref().map(|s| {
Repo::new(
dvcs,
s,
&dest,
subupdates,
)
}),
source,
config: HashMap::new(),
branch: checkout,
hosts: read_extension(path, "hosts").map_or(Vec::new(), |c| {
c.split("\n").map(|line| line.to_string()).collect()
}),
cloned,
subupdates: read_extension(path, "subupdates").is_some(),
context,
select,
})
}
pub fn checkout(&self) {
if let Some(branch) = &self.branch {
if let Some(repo) = &self.repo {
self.info("to_branch");
self.debug("checkout");
repo.checkout(branch);
}
}
}
pub fn run_on_host(&self) -> bool {
if self.hosts.len() == 0 {
return true;
}
// $HOSTNAME env is a bashism, we need to call libc (through hostname crate)
// to find out the actual hostname
let hostname = std::env::var("HOST")
.unwrap_or_else(|_| hostname::get().unwrap().into_string().unwrap());
if self.hosts.contains(&hostname) {
return true;
}
return false;
}
pub fn update_and_run(&self, force: &bool) {
if let Some(repo) = &self.repo {
if repo.update() {
self.run();
} else if *force {
self.debug("forcing");
self.run();
} else {
self.debug("no_update");
}
} else {
unreachable!("this function should never be called on a task whcih doesnt have a repo");
}
}
pub fn run(&self) {
if !self.run_on_host() {
// TODO: Skip host debug?
return;
}
self.info("run");
// TODO: debug message for removing inbox
self.select.clean(&self.name);
let cmd_out = Command::new("bash") // TODO: no need to call bash?
.arg(&self.bin)
.arg(&self.name)
.output()
.expect(&format!("Failed to run {:?}", &self.bin));
let mut log_path = self.bin.clone();
log_path.set_extension("log");
std::fs::write(&log_path, cmd_out.stderr)
.expect(&format!("Failed to write log to {:?}", &log_path));
}
pub fn run_once(&self) {
if !self.run_on_host() {
return;
}
let mut done_path = self.bin.clone();
done_path.set_extension("done");
if !done_path.exists() {
self.run();
std::fs::write(&done_path, "").expect("Failed to register task as done");
}
}
#[allow(dead_code)]
pub fn debug(&self, message: &str) {
log::debug(message, &self.context);
}
#[allow(dead_code)]
pub fn info(&self, message: &str) {
log::info(message, &self.context);
}
#[allow(dead_code)]
pub fn warn(&self, message: &str) {
log::warn(message, &self.context);
}
#[allow(dead_code)]
pub fn error(&self, message: &str) {
log::error(message, &self.context);
}
}
pub struct MissingTask(pub String);
/// Contains a mapping of sources to their corresponding tasks
pub struct SourceSet {
mapping: HashMap<String, Vec<String>>,
}
impl SourceSet {
/// Loads a SourceSet from a basedir
pub fn from(basedir: &Path) -> Result<SourceSet, std::io::Error> {
let source_urls = basedir.read_dir()?.filter_map(|p| {
if p.is_err() { return None; } // Skip individual errors
let p = p.unwrap().path();
let path_str = p.to_str().unwrap();
if !path_str.ends_with(".source") {
// Filter out non-source files
return None;
}
return Some((
path_str.trim_end_matches(".source").to_string(), // Task name
read_or_none(&p).unwrap() // Source URL
));
pub fn from_entry(entry: &Entry) -> Task {
let source = entry.read_setting("source");
let cloned = source.clone().map_or(false, |_| {
let mut path = entry.base_dir.clone();
path.push(format!(".{}", entry.name.to_str().expect("WTF")));
path.is_dir()
});
Task {
name: entry.name.clone(),
bin: entry.path.clone(),
source,
dvcs: dvcs::from_setting(entry.read_setting("dvcs")),
config: HashMap::new(),
branch: entry.read_setting("branch"),
host: entry.read_setting("host"),
cloned,
let mut sources_map: HashMap<String, Vec<String>> = HashMap::new();
for (task, source) in source_urls {
if let Some(list) = sources_map.get_mut(&source) {
list.push(task.to_string());
} else {
sources_map.insert(source.clone(), vec!(task.to_string()));
}
}
Ok(SourceSet {
mapping: sources_map
})
}
/// Returns the task names associated with a given source
pub fn tasks_for(&self, source: &str) -> Option<Vec<String>> {
self.mapping.get(source).map(|x| x.clone())
}
}
/// Loads a task list from a given base directory, taking only tasks that are in requested list.
/// Given tasks can be either a task name or a task URL. This function will panic if the basedir
/// does not exist, or error if a requested task/source does not exist.
pub fn from_dir_and_list(basedir: &Path, list: Vec<String>, select: Select, context: &log::Context) -> Result<Vec<Task>, MissingTask> {
// TODO: Write tests for permissions problems
// Takes an already instanced database (ie Vec<Entry>)
// to turn into a dictionary of Tasks
pub fn from_entries(db: Vec<Entry>) -> HashMap<String, Task> {
let mut res: HashMap<String, Task> = HashMap::new();
for entry in db {
let task = Task::from_entry(&entry);
res.insert(
task.name.clone().into_string().expect("Failed to convert"),
task
);
}
return res;
}
// If we're looking up specific tasks, maybe they're referenced by source
// and not by name. SourceSet allows for a source->name mapping.
let sourceset = SourceSet::from(basedir).unwrap();
let mut tasks = Vec::new();
for t in list {
if let Some(task) = Task::from_path(&basedir.join(&t), select.clone(), context) {
tasks.push(task);
/// Returns a hashmap of tasks, or std::io::Error
/// Reads all entries in a directory
pub fn from_dir(base_dir: &Path) -> Result<HashMap<String, Task>, std::io::Error> {
Ok(from_entries(
db::from(base_dir, is_executable)?
))
}
/// Returns a hashmap of tasks, or std::io::Error
/// Reads entries in a given list from a directory, fails if a requested entry doesn't exist
/// (does not load the whole folder)
pub fn from_dir_and_list(basedir: &Path, list: Vec<String>) -> Result<HashMap<String, Task>, db::Error> {
let mut entries: HashMap<String, Task> = HashMap::new();
for item in list {
if let Some(entry) = db::entry(&basedir, is_executable, &item) {
entries.insert(item.clone(), Task::from_entry(&entry));
} else {
// Maybe it's not a task name, but a task URL?
if let Some(list) = sourceset.tasks_for(&t) {
// Hopefully safe unwrap (unless there's a source without a corresponding task?)
let task_list = list.iter().map(|t_name| Task::from_path(&basedir.join(&t_name), select.clone(), context).unwrap());
tasks.extend(task_list);
} else {
return Err(MissingTask(t));
}
return Err(db::Error::EntryNotFound(item.clone()))
}
}
Ok(tasks)
}
/// Loads a task list from a given base directory. Fails if the directory
/// is not readable with std::io::Error.
pub fn from_dir(basedir: &Path, select: Select, context: &log::Context) -> Vec<Task> {
basedir.read_dir().unwrap().filter_map(|f| {
if f.is_err() {
// Dismiss individual file errors
return None;
}
return Task::from_path(&f.unwrap().path(), select.clone(), context);
}).collect()
}
/// Takes a &Path to a basedir, and a &str task_name, and return
/// the corresponding source directory as a PathBuf. Does not check
/// if the target exists.
pub fn source_dir_from_basedir(basedir: &Path, task_name: &str) -> PathBuf {
basedir.join(&format!(".{}", task_name))
Ok(
entries
)
}