Merge branch 'master' into libimagerror/integration
This merge solved a _LOT_ of conflicts and was a rather complicated one, as parts of the conflict-resolution involved rewriting of half the stuff. This merge commit fixes all the things so a `cargo check --all` succeeds, but I did not yet check whether tests run without failure.
This commit is contained in:
commit
c115215fa4
67 changed files with 2751 additions and 1354 deletions
|
@ -29,6 +29,7 @@ members = [
|
||||||
"lib/entry/libimagentrydatetime",
|
"lib/entry/libimagentrydatetime",
|
||||||
"lib/entry/libimagentryedit",
|
"lib/entry/libimagentryedit",
|
||||||
"lib/entry/libimagentryfilter",
|
"lib/entry/libimagentryfilter",
|
||||||
|
"lib/entry/libimagentrygps",
|
||||||
"lib/entry/libimagentrylink",
|
"lib/entry/libimagentrylink",
|
||||||
"lib/entry/libimagentrylist",
|
"lib/entry/libimagentrylist",
|
||||||
"lib/entry/libimagentrymarkdown",
|
"lib/entry/libimagentrymarkdown",
|
||||||
|
|
|
@ -33,3 +33,9 @@ path = "../../../lib/etc/libimagutil"
|
||||||
default-features = false
|
default-features = false
|
||||||
features = ["testing"]
|
features = ["testing"]
|
||||||
|
|
||||||
|
[dev-dependencies.libimagrt]
|
||||||
|
version = "0.4.0"
|
||||||
|
path = "../../../lib/core/libimagrt"
|
||||||
|
default-features = false
|
||||||
|
features = ["testing"]
|
||||||
|
|
||||||
|
|
|
@ -108,17 +108,18 @@ fn handle_internal_linking(rt: &Runtime) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
match cmd.value_of("list") {
|
|
||||||
Some(list) => handle_internal_linking_list_call(rt, cmd, list),
|
|
||||||
None => {
|
|
||||||
match cmd.subcommand_name() {
|
match cmd.subcommand_name() {
|
||||||
|
Some("list") => {
|
||||||
|
cmd.subcommand_matches("list")
|
||||||
|
.map(|matches| handle_internal_linking_list_call(rt, cmd, matches));
|
||||||
|
},
|
||||||
Some("add") => {
|
Some("add") => {
|
||||||
let (mut from, to) = get_from_to_entry(&rt, "add");
|
let (mut from, to) = get_from_to_entry(&rt, "add");
|
||||||
for mut to_entry in to {
|
for mut to_entry in to {
|
||||||
if let Err(e) = to_entry.add_internal_link(&mut from) {
|
if let Err(e) = to_entry.add_internal_link(&mut from) {
|
||||||
trace_error_exit(&e, 1);
|
trace_error_exit(&e, 1);
|
||||||
}
|
}
|
||||||
}
|
};
|
||||||
},
|
},
|
||||||
|
|
||||||
Some("remove") => {
|
Some("remove") => {
|
||||||
|
@ -127,21 +128,19 @@ fn handle_internal_linking(rt: &Runtime) {
|
||||||
if let Err(e) = to_entry.remove_internal_link(&mut from) {
|
if let Err(e) = to_entry.remove_internal_link(&mut from) {
|
||||||
trace_error_exit(&e, 1);
|
trace_error_exit(&e, 1);
|
||||||
}
|
}
|
||||||
}
|
};
|
||||||
},
|
},
|
||||||
|
|
||||||
_ => unreachable!(),
|
_ => unreachable!(),
|
||||||
};
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
fn handle_internal_linking_list_call(rt: &Runtime, cmd: &ArgMatches, list: &str) {
|
fn handle_internal_linking_list_call(rt: &Runtime, cmd: &ArgMatches, list: &ArgMatches) {
|
||||||
use libimagentrylink::external::is_external_link_storeid;
|
use libimagentrylink::external::is_external_link_storeid;
|
||||||
|
|
||||||
debug!("List...");
|
debug!("List...");
|
||||||
for entry in list.split(',') {
|
for entry in list.values_of("entries").unwrap() { // clap has our back
|
||||||
debug!("Listing for '{}'", entry);
|
debug!("Listing for '{}'", entry);
|
||||||
match get_entry_by_name(rt, entry) {
|
match get_entry_by_name(rt, entry) {
|
||||||
Ok(Some(e)) => {
|
Ok(Some(e)) => {
|
||||||
|
@ -362,6 +361,7 @@ mod tests {
|
||||||
with help "imag-link mocking app";
|
with help "imag-link mocking app";
|
||||||
}
|
}
|
||||||
use self::mock::generate_test_runtime;
|
use self::mock::generate_test_runtime;
|
||||||
|
use self::mock::reset_test_runtime;
|
||||||
use libimagutil::testing::DEFAULT_ENTRY;
|
use libimagutil::testing::DEFAULT_ENTRY;
|
||||||
|
|
||||||
fn create_test_default_entry<'a, S: AsRef<OsStr>>(rt: &'a Runtime, name: S) -> StoreResult<StoreId> {
|
fn create_test_default_entry<'a, S: AsRef<OsStr>>(rt: &'a Runtime, name: S) -> StoreResult<StoreId> {
|
||||||
|
@ -392,7 +392,7 @@ mod tests {
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_link_modificates() {
|
fn test_link_modificates() {
|
||||||
let rt = generate_test_runtime(vec!["internal", "add", "--from", "test1", "--to", "test2"])
|
let rt = generate_test_runtime(vec!["internal", "add", "test1", "test2"])
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
let test_id1 = create_test_default_entry(&rt, "test1").unwrap();
|
let test_id1 = create_test_default_entry(&rt, "test1").unwrap();
|
||||||
|
@ -412,7 +412,7 @@ mod tests {
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_linking_links() {
|
fn test_linking_links() {
|
||||||
let rt = generate_test_runtime(vec!["internal", "add", "--from", "test1", "--to", "test2"])
|
let rt = generate_test_runtime(vec!["internal", "add", "test1", "test2"])
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
let test_id1 = create_test_default_entry(&rt, "test1").unwrap();
|
let test_id1 = create_test_default_entry(&rt, "test1").unwrap();
|
||||||
|
@ -432,7 +432,7 @@ mod tests {
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_multilinking() {
|
fn test_multilinking() {
|
||||||
let rt = generate_test_runtime(vec!["internal", "add", "--from", "test1", "--to", "test2"])
|
let rt = generate_test_runtime(vec!["internal", "add", "test1", "test2"])
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
let test_id1 = create_test_default_entry(&rt, "test1").unwrap();
|
let test_id1 = create_test_default_entry(&rt, "test1").unwrap();
|
||||||
|
@ -450,4 +450,87 @@ mod tests {
|
||||||
assert_eq!(*test_links1, links_toml_value(vec!["test2"]));
|
assert_eq!(*test_links1, links_toml_value(vec!["test2"]));
|
||||||
assert_eq!(*test_links2, links_toml_value(vec!["test1"]));
|
assert_eq!(*test_links2, links_toml_value(vec!["test1"]));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_linking_more_than_two() {
|
||||||
|
let rt = generate_test_runtime(vec!["internal", "add", "test1", "test2", "test3"])
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
let test_id1 = create_test_default_entry(&rt, "test1").unwrap();
|
||||||
|
let test_id2 = create_test_default_entry(&rt, "test2").unwrap();
|
||||||
|
let test_id3 = create_test_default_entry(&rt, "test3").unwrap();
|
||||||
|
|
||||||
|
handle_internal_linking(&rt);
|
||||||
|
handle_internal_linking(&rt);
|
||||||
|
|
||||||
|
let test_entry1 = rt.store().get(test_id1).unwrap().unwrap();
|
||||||
|
let test_links1 = get_entry_links(&test_entry1).unwrap();
|
||||||
|
|
||||||
|
let test_entry2 = rt.store().get(test_id2).unwrap().unwrap();
|
||||||
|
let test_links2 = get_entry_links(&test_entry2).unwrap();
|
||||||
|
|
||||||
|
let test_entry3 = rt.store().get(test_id3).unwrap().unwrap();
|
||||||
|
let test_links3 = get_entry_links(&test_entry3).unwrap();
|
||||||
|
|
||||||
|
assert_eq!(*test_links1, links_toml_value(vec!["test2", "test3"]));
|
||||||
|
assert_eq!(*test_links2, links_toml_value(vec!["test1"]));
|
||||||
|
assert_eq!(*test_links3, links_toml_value(vec!["test1"]));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Remove tests
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_linking_links_unlinking_removes_links() {
|
||||||
|
let rt = generate_test_runtime(vec!["internal", "add", "test1", "test2"])
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
let test_id1 = create_test_default_entry(&rt, "test1").unwrap();
|
||||||
|
let test_id2 = create_test_default_entry(&rt, "test2").unwrap();
|
||||||
|
|
||||||
|
handle_internal_linking(&rt);
|
||||||
|
|
||||||
|
let rt = reset_test_runtime(vec!["internal", "remove", "test1", "test2"], rt)
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
handle_internal_linking(&rt);
|
||||||
|
|
||||||
|
let test_entry1 = rt.store().get(test_id1).unwrap().unwrap();
|
||||||
|
let test_links1 = get_entry_links(&test_entry1).unwrap();
|
||||||
|
|
||||||
|
let test_entry2 = rt.store().get(test_id2).unwrap().unwrap();
|
||||||
|
let test_links2 = get_entry_links(&test_entry2).unwrap();
|
||||||
|
|
||||||
|
assert_eq!(*test_links1, links_toml_value(vec![]));
|
||||||
|
assert_eq!(*test_links2, links_toml_value(vec![]));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_linking_and_unlinking_more_than_two() {
|
||||||
|
let rt = generate_test_runtime(vec!["internal", "add", "test1", "test2", "test3"])
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
let test_id1 = create_test_default_entry(&rt, "test1").unwrap();
|
||||||
|
let test_id2 = create_test_default_entry(&rt, "test2").unwrap();
|
||||||
|
let test_id3 = create_test_default_entry(&rt, "test3").unwrap();
|
||||||
|
|
||||||
|
handle_internal_linking(&rt);
|
||||||
|
|
||||||
|
let rt = reset_test_runtime(vec!["internal", "remove", "test1", "test2", "test3"], rt)
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
handle_internal_linking(&rt);
|
||||||
|
|
||||||
|
let test_entry1 = rt.store().get(test_id1).unwrap().unwrap();
|
||||||
|
let test_links1 = get_entry_links(&test_entry1).unwrap();
|
||||||
|
|
||||||
|
let test_entry2 = rt.store().get(test_id2).unwrap().unwrap();
|
||||||
|
let test_links2 = get_entry_links(&test_entry2).unwrap();
|
||||||
|
|
||||||
|
let test_entry3 = rt.store().get(test_id3).unwrap().unwrap();
|
||||||
|
let test_links3 = get_entry_links(&test_entry3).unwrap();
|
||||||
|
|
||||||
|
assert_eq!(*test_links1, links_toml_value(vec![]));
|
||||||
|
assert_eq!(*test_links2, links_toml_value(vec![]));
|
||||||
|
assert_eq!(*test_links3, links_toml_value(vec![]));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -28,15 +28,14 @@ pub fn build_ui<'a>(app: App<'a, 'a>) -> App<'a, 'a> {
|
||||||
.about("Add link from one entry to another (and vice-versa)")
|
.about("Add link from one entry to another (and vice-versa)")
|
||||||
.version("0.1")
|
.version("0.1")
|
||||||
.arg(Arg::with_name("from")
|
.arg(Arg::with_name("from")
|
||||||
.long("from")
|
.index(1)
|
||||||
.short("f")
|
|
||||||
.takes_value(true)
|
.takes_value(true)
|
||||||
.required(true)
|
.required(true)
|
||||||
|
.multiple(false)
|
||||||
.help("Link from this entry")
|
.help("Link from this entry")
|
||||||
.value_name("ENTRY"))
|
.value_name("ENTRY"))
|
||||||
.arg(Arg::with_name("to")
|
.arg(Arg::with_name("to")
|
||||||
.long("to")
|
.index(2)
|
||||||
.short("t")
|
|
||||||
.takes_value(true)
|
.takes_value(true)
|
||||||
.required(true)
|
.required(true)
|
||||||
.multiple(true)
|
.multiple(true)
|
||||||
|
@ -48,15 +47,14 @@ pub fn build_ui<'a>(app: App<'a, 'a>) -> App<'a, 'a> {
|
||||||
.about("Remove a link between two or more entries")
|
.about("Remove a link between two or more entries")
|
||||||
.version("0.1")
|
.version("0.1")
|
||||||
.arg(Arg::with_name("from")
|
.arg(Arg::with_name("from")
|
||||||
.long("from")
|
.index(1)
|
||||||
.short("f")
|
|
||||||
.takes_value(true)
|
.takes_value(true)
|
||||||
.required(true)
|
.required(true)
|
||||||
|
.multiple(false)
|
||||||
.help("Remove Link from this entry")
|
.help("Remove Link from this entry")
|
||||||
.value_name("ENTRY"))
|
.value_name("ENTRY"))
|
||||||
.arg(Arg::with_name("to")
|
.arg(Arg::with_name("to")
|
||||||
.long("to")
|
.index(2)
|
||||||
.short("t")
|
|
||||||
.takes_value(true)
|
.takes_value(true)
|
||||||
.required(true)
|
.required(true)
|
||||||
.multiple(true)
|
.multiple(true)
|
||||||
|
@ -64,20 +62,23 @@ pub fn build_ui<'a>(app: App<'a, 'a>) -> App<'a, 'a> {
|
||||||
.value_name("ENTRIES"))
|
.value_name("ENTRIES"))
|
||||||
)
|
)
|
||||||
|
|
||||||
.arg(Arg::with_name("list")
|
.subcommand(SubCommand::with_name("list")
|
||||||
.long("list")
|
.about("List links to this entry")
|
||||||
.short("l")
|
.version("0.1")
|
||||||
|
.arg(Arg::with_name("entries")
|
||||||
|
.index(1)
|
||||||
.takes_value(true)
|
.takes_value(true)
|
||||||
.required(false)
|
.multiple(true)
|
||||||
.help("List links to this entry")
|
.required(true)
|
||||||
.value_name("ENTRY"))
|
.help("List these entries, seperate by comma")
|
||||||
|
.value_name("ENTRIES"))
|
||||||
|
|
||||||
.arg(Arg::with_name("list-externals-too")
|
.arg(Arg::with_name("list-externals-too")
|
||||||
.long("list-external")
|
.long("list-external")
|
||||||
.takes_value(false)
|
.takes_value(false)
|
||||||
.required(false)
|
.required(false)
|
||||||
.help("If --list is provided, also list external links (debugging helper that might be removed at some point"))
|
.help("If --list is provided, also list external links (debugging helper that might be removed at some point"))
|
||||||
|
)
|
||||||
|
|
||||||
.arg(Arg::with_name("check-consistency")
|
.arg(Arg::with_name("check-consistency")
|
||||||
.long("check-consistency")
|
.long("check-consistency")
|
||||||
|
|
|
@ -48,7 +48,7 @@ use ui::build_ui;
|
||||||
|
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
|
|
||||||
use libimagentryref::reference::Ref;
|
use libimagentryref::refstore::RefStore;
|
||||||
use libimagentryref::flags::RefFlags;
|
use libimagentryref::flags::RefFlags;
|
||||||
use libimagerror::trace::trace_error;
|
use libimagerror::trace::trace_error;
|
||||||
use libimagrt::setup::generate_runtime_setup;
|
use libimagrt::setup::generate_runtime_setup;
|
||||||
|
@ -82,7 +82,7 @@ fn add(rt: &Runtime) {
|
||||||
.with_content_hashing(cmd.is_present("track-content"))
|
.with_content_hashing(cmd.is_present("track-content"))
|
||||||
.with_permission_tracking(cmd.is_present("track-permissions"));
|
.with_permission_tracking(cmd.is_present("track-permissions"));
|
||||||
|
|
||||||
match Ref::create(rt.store(), path, flags) {
|
match RefStore::create(rt.store(), path, flags) {
|
||||||
Ok(r) => {
|
Ok(r) => {
|
||||||
debug!("Reference created: {:?}", r);
|
debug!("Reference created: {:?}", r);
|
||||||
info!("Ok");
|
info!("Ok");
|
||||||
|
@ -102,7 +102,7 @@ fn remove(rt: &Runtime) {
|
||||||
let yes = cmd.is_present("yes");
|
let yes = cmd.is_present("yes");
|
||||||
|
|
||||||
if yes || ask_bool(&format!("Delete Ref with hash '{}'", hash)[..], None) {
|
if yes || ask_bool(&format!("Delete Ref with hash '{}'", hash)[..], None) {
|
||||||
match Ref::delete_by_hash(rt.store(), hash) {
|
match rt.store().delete_by_hash(hash) {
|
||||||
Err(e) => trace_error(&e),
|
Err(e) => trace_error(&e),
|
||||||
Ok(_) => info!("Ok"),
|
Ok(_) => info!("Ok"),
|
||||||
}
|
}
|
||||||
|
@ -126,7 +126,7 @@ fn list(rt: &Runtime) {
|
||||||
|
|
||||||
let iter = match rt.store().retrieve_for_module("ref") {
|
let iter = match rt.store().retrieve_for_module("ref") {
|
||||||
Ok(iter) => iter.filter_map(|id| {
|
Ok(iter) => iter.filter_map(|id| {
|
||||||
match Ref::get(rt.store(), id) {
|
match rt.store().get(id) {
|
||||||
Ok(r) => Some(r),
|
Ok(r) => Some(r),
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
trace_error(&e);
|
trace_error(&e);
|
||||||
|
@ -145,7 +145,7 @@ fn list(rt: &Runtime) {
|
||||||
.check_changed(do_check_changed)
|
.check_changed(do_check_changed)
|
||||||
.check_changed_content(do_check_changed_content)
|
.check_changed_content(do_check_changed_content)
|
||||||
.check_changed_permiss(do_check_changed_permiss)
|
.check_changed_permiss(do_check_changed_permiss)
|
||||||
.list(iter.map(|e| e.into()))
|
.list(iter.filter_map(Into::into))
|
||||||
.ok();
|
.ok();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -28,3 +28,18 @@ libimagutil = { version = "0.4.0", path = "../../../lib/etc/libimagutil" }
|
||||||
[features]
|
[features]
|
||||||
early-panic = [ "libimagstore/early-panic" ]
|
early-panic = [ "libimagstore/early-panic" ]
|
||||||
|
|
||||||
|
[dev-dependencies.libimagutil]
|
||||||
|
version = "0.4.0"
|
||||||
|
path = "../../../lib/etc/libimagutil"
|
||||||
|
default-features = false
|
||||||
|
features = ["testing"]
|
||||||
|
|
||||||
|
[dev-dependencies.libimagrt]
|
||||||
|
version = "0.4.0"
|
||||||
|
path = "../../../lib/core/libimagrt"
|
||||||
|
default-features = false
|
||||||
|
features = ["testing"]
|
||||||
|
|
||||||
|
[dev-dependencies.toml-query]
|
||||||
|
version = "0.3"
|
||||||
|
|
||||||
|
|
|
@ -178,3 +178,39 @@ fn string_from_raw_src(raw_src: &str) -> String {
|
||||||
}
|
}
|
||||||
content
|
content
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::create;
|
||||||
|
|
||||||
|
use std::path::PathBuf;
|
||||||
|
use toml_query::read::TomlValueReadExt;
|
||||||
|
use toml::Value;
|
||||||
|
|
||||||
|
make_mock_app! {
|
||||||
|
app "imag-link";
|
||||||
|
modulename mock;
|
||||||
|
version "0.4.0";
|
||||||
|
with help "imag-link mocking app";
|
||||||
|
}
|
||||||
|
use self::mock::generate_test_runtime;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_create_simple() {
|
||||||
|
let test_name = "test_create_simple";
|
||||||
|
let rt = generate_test_runtime(vec!["create", "-p", "test_create_simple"]).unwrap();
|
||||||
|
|
||||||
|
create(&rt);
|
||||||
|
|
||||||
|
let e = rt.store().get(PathBuf::from(test_name));
|
||||||
|
assert!(e.is_ok());
|
||||||
|
let e = e.unwrap();
|
||||||
|
assert!(e.is_some());
|
||||||
|
let e = e.unwrap();
|
||||||
|
|
||||||
|
let version = e.get_header().read("imag.version").map(Option::unwrap).unwrap();
|
||||||
|
assert_eq!(Value::String(String::from("0.4.0")), *version);
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
|
|
@ -46,3 +46,38 @@ pub fn delete(rt: &Runtime) {
|
||||||
.or_else(|| warn_exit("No subcommand 'delete'. Will exit now", 1));
|
.or_else(|| warn_exit("No subcommand 'delete'. Will exit now", 1));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use create::create;
|
||||||
|
use super::delete;
|
||||||
|
|
||||||
|
use std::path::PathBuf;
|
||||||
|
|
||||||
|
make_mock_app! {
|
||||||
|
app "imag-link";
|
||||||
|
modulename mock;
|
||||||
|
version "0.4.0";
|
||||||
|
with help "imag-link mocking app";
|
||||||
|
}
|
||||||
|
use self::mock::generate_test_runtime;
|
||||||
|
use self::mock::reset_test_runtime;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_create_simple() {
|
||||||
|
let test_name = "test_create_simple";
|
||||||
|
let rt = generate_test_runtime(vec!["create", "-p", "test_create_simple"]).unwrap();
|
||||||
|
|
||||||
|
create(&rt);
|
||||||
|
|
||||||
|
let rt = reset_test_runtime(vec!["delete", "--id", "test_create_simple"], rt).unwrap();
|
||||||
|
|
||||||
|
delete(&rt);
|
||||||
|
|
||||||
|
let e = rt.store().get(PathBuf::from(test_name));
|
||||||
|
assert!(e.is_ok());
|
||||||
|
let e = e.unwrap();
|
||||||
|
assert!(e.is_none());
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
|
|
@ -35,14 +35,21 @@
|
||||||
extern crate clap;
|
extern crate clap;
|
||||||
#[macro_use] extern crate log;
|
#[macro_use] extern crate log;
|
||||||
extern crate toml;
|
extern crate toml;
|
||||||
|
#[cfg(test)] extern crate toml_query;
|
||||||
#[macro_use] extern crate version;
|
#[macro_use] extern crate version;
|
||||||
#[macro_use] extern crate error_chain;
|
#[macro_use] extern crate error_chain;
|
||||||
|
|
||||||
extern crate libimagrt;
|
extern crate libimagrt;
|
||||||
extern crate libimagstore;
|
extern crate libimagstore;
|
||||||
extern crate libimagutil;
|
|
||||||
extern crate libimagerror;
|
extern crate libimagerror;
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
#[macro_use]
|
||||||
|
extern crate libimagutil;
|
||||||
|
|
||||||
|
#[cfg(not(test))]
|
||||||
|
extern crate libimagutil;
|
||||||
|
|
||||||
use libimagrt::setup::generate_runtime_setup;
|
use libimagrt::setup::generate_runtime_setup;
|
||||||
|
|
||||||
mod create;
|
mod create;
|
||||||
|
|
|
@ -35,3 +35,9 @@ path = "../../../lib/etc/libimagutil"
|
||||||
default-features = false
|
default-features = false
|
||||||
features = ["testing"]
|
features = ["testing"]
|
||||||
|
|
||||||
|
[dev-dependencies.libimagrt]
|
||||||
|
version = "0.4.0"
|
||||||
|
path = "../../../lib/core/libimagrt"
|
||||||
|
default-features = false
|
||||||
|
features = ["testing"]
|
||||||
|
|
||||||
|
|
|
@ -48,10 +48,11 @@ use libimagrt::setup::generate_runtime_setup;
|
||||||
use libimagentrytag::tagable::Tagable;
|
use libimagentrytag::tagable::Tagable;
|
||||||
use libimagentrytag::tag::Tag;
|
use libimagentrytag::tag::Tag;
|
||||||
use libimagerror::trace::{trace_error, trace_error_exit};
|
use libimagerror::trace::{trace_error, trace_error_exit};
|
||||||
use libimagentrytag::ui::{get_add_tags, get_remove_tags};
|
|
||||||
use libimagstore::storeid::StoreId;
|
use libimagstore::storeid::StoreId;
|
||||||
use libimagutil::warn_exit::warn_exit;
|
use libimagutil::warn_exit::warn_exit;
|
||||||
|
|
||||||
|
use clap::ArgMatches;
|
||||||
|
|
||||||
mod ui;
|
mod ui;
|
||||||
|
|
||||||
use ui::build_ui;
|
use ui::build_ui;
|
||||||
|
@ -181,6 +182,42 @@ fn list(id: PathBuf, rt: &Runtime) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Get the tags which should be added from the commandline
|
||||||
|
///
|
||||||
|
/// Returns none if the argument was not specified
|
||||||
|
fn get_add_tags(matches: &ArgMatches) -> Option<Vec<Tag>> {
|
||||||
|
let a = "add-tags";
|
||||||
|
extract_tags(matches, a, '+')
|
||||||
|
.or_else(|| matches.values_of(a).map(|values| values.map(String::from).collect()))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get the tags which should be removed from the commandline
|
||||||
|
///
|
||||||
|
/// Returns none if the argument was not specified
|
||||||
|
fn get_remove_tags(matches: &ArgMatches) -> Option<Vec<Tag>> {
|
||||||
|
let r = "remove-tags";
|
||||||
|
extract_tags(matches, r, '+')
|
||||||
|
.or_else(|| matches.values_of(r).map(|values| values.map(String::from).collect()))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn extract_tags(matches: &ArgMatches, specifier: &str, specchar: char) -> Option<Vec<Tag>> {
|
||||||
|
if let Some(submatch) = matches.subcommand_matches("tags") {
|
||||||
|
submatch.values_of(specifier)
|
||||||
|
.map(|values| values.map(String::from).collect())
|
||||||
|
} else {
|
||||||
|
matches.values_of("specify-tags")
|
||||||
|
.map(|argmatches| {
|
||||||
|
argmatches
|
||||||
|
.map(String::from)
|
||||||
|
.filter(|s| s.starts_with(specchar))
|
||||||
|
.map(|s| {
|
||||||
|
String::from(s.split_at(1).1)
|
||||||
|
})
|
||||||
|
.collect()
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
|
@ -190,12 +227,11 @@ mod tests {
|
||||||
use toml_query::read::TomlValueReadExt;
|
use toml_query::read::TomlValueReadExt;
|
||||||
use toml_query::error::Result as TomlQueryResult;
|
use toml_query::error::Result as TomlQueryResult;
|
||||||
|
|
||||||
use libimagentrytag::ui::{get_add_tags, get_remove_tags};
|
|
||||||
use libimagrt::runtime::Runtime;
|
use libimagrt::runtime::Runtime;
|
||||||
use libimagstore::storeid::StoreId;
|
use libimagstore::storeid::StoreId;
|
||||||
use libimagstore::store::{Result as StoreResult, FileLockEntry};
|
use libimagstore::store::{Result as StoreResult, FileLockEntry};
|
||||||
|
|
||||||
use super::alter;
|
use super::*;
|
||||||
|
|
||||||
make_mock_app! {
|
make_mock_app! {
|
||||||
app "imag-tag";
|
app "imag-tag";
|
||||||
|
|
|
@ -19,7 +19,7 @@
|
||||||
|
|
||||||
use clap::{Arg, App, ArgGroup, SubCommand};
|
use clap::{Arg, App, ArgGroup, SubCommand};
|
||||||
|
|
||||||
use libimagentrytag::ui::{tag_add_arg, tag_remove_arg};
|
use libimagentrytag::tag::is_tag;
|
||||||
|
|
||||||
pub fn build_ui<'a>(app: App<'a, 'a>) -> App<'a, 'a> {
|
pub fn build_ui<'a>(app: App<'a, 'a>) -> App<'a, 'a> {
|
||||||
app.arg(Arg::with_name("id")
|
app.arg(Arg::with_name("id")
|
||||||
|
@ -30,8 +30,22 @@ pub fn build_ui<'a>(app: App<'a, 'a>) -> App<'a, 'a> {
|
||||||
.help("Use this entry")
|
.help("Use this entry")
|
||||||
.value_name("ID"))
|
.value_name("ID"))
|
||||||
|
|
||||||
.arg(tag_add_arg())
|
.arg(Arg::with_name("add-tags")
|
||||||
.arg(tag_remove_arg())
|
.short("a")
|
||||||
|
.long("add")
|
||||||
|
.takes_value(true)
|
||||||
|
.value_name("tags")
|
||||||
|
.multiple(true)
|
||||||
|
.validator(is_tag)
|
||||||
|
.help("Add tags, seperated by comma or by specifying multiple times"))
|
||||||
|
.arg(Arg::with_name("remove-tags")
|
||||||
|
.short("r")
|
||||||
|
.long("remove")
|
||||||
|
.takes_value(true)
|
||||||
|
.value_name("tags")
|
||||||
|
.multiple(true)
|
||||||
|
.validator(is_tag)
|
||||||
|
.help("Remove tags, seperated by comma or by specifying multiple times"))
|
||||||
|
|
||||||
.subcommand(SubCommand::with_name("list")
|
.subcommand(SubCommand::with_name("list")
|
||||||
.about("List tags (default)")
|
.about("List tags (default)")
|
||||||
|
|
|
@ -21,5 +21,4 @@ version = "2.0.1"
|
||||||
libimagrt = { version = "0.4.0", path = "../../../lib/core/libimagrt" }
|
libimagrt = { version = "0.4.0", path = "../../../lib/core/libimagrt" }
|
||||||
libimagerror = { version = "0.4.0", path = "../../../lib/core/libimagerror" }
|
libimagerror = { version = "0.4.0", path = "../../../lib/core/libimagerror" }
|
||||||
libimagbookmark = { version = "0.4.0", path = "../../../lib/domain/libimagbookmark" }
|
libimagbookmark = { version = "0.4.0", path = "../../../lib/domain/libimagbookmark" }
|
||||||
libimagentrytag = { version = "0.4.0", path = "../../../lib/entry/libimagentrytag" }
|
|
||||||
libimagutil = { version = "0.4.0", path = "../../../lib/etc/libimagutil" }
|
libimagutil = { version = "0.4.0", path = "../../../lib/etc/libimagutil" }
|
||||||
|
|
|
@ -37,7 +37,6 @@ extern crate clap;
|
||||||
#[macro_use] extern crate version;
|
#[macro_use] extern crate version;
|
||||||
|
|
||||||
extern crate libimagbookmark;
|
extern crate libimagbookmark;
|
||||||
extern crate libimagentrytag;
|
|
||||||
extern crate libimagrt;
|
extern crate libimagrt;
|
||||||
extern crate libimagerror;
|
extern crate libimagerror;
|
||||||
extern crate libimagutil;
|
extern crate libimagutil;
|
||||||
|
|
|
@ -19,7 +19,6 @@
|
||||||
|
|
||||||
use clap::{Arg, App, SubCommand};
|
use clap::{Arg, App, SubCommand};
|
||||||
|
|
||||||
use libimagentrytag::ui::tag_add_arg;
|
|
||||||
use libimagutil::cli_validators::*;
|
use libimagutil::cli_validators::*;
|
||||||
|
|
||||||
pub fn build_ui<'a>(app: App<'a, 'a>) -> App<'a, 'a> {
|
pub fn build_ui<'a>(app: App<'a, 'a>) -> App<'a, 'a> {
|
||||||
|
@ -44,7 +43,6 @@ pub fn build_ui<'a>(app: App<'a, 'a>) -> App<'a, 'a> {
|
||||||
.value_name("URL")
|
.value_name("URL")
|
||||||
.validator(is_url)
|
.validator(is_url)
|
||||||
.help("Add this URL, multiple possible"))
|
.help("Add this URL, multiple possible"))
|
||||||
.arg(tag_add_arg())
|
|
||||||
)
|
)
|
||||||
|
|
||||||
.subcommand(SubCommand::with_name("remove")
|
.subcommand(SubCommand::with_name("remove")
|
||||||
|
|
|
@ -19,16 +19,18 @@
|
||||||
|
|
||||||
use std::process::exit;
|
use std::process::exit;
|
||||||
|
|
||||||
|
use clap::ArgMatches;
|
||||||
|
|
||||||
use libimagdiary::diary::Diary;
|
use libimagdiary::diary::Diary;
|
||||||
use libimagdiary::diaryid::DiaryId;
|
use libimagdiary::diaryid::DiaryId;
|
||||||
use libimagdiary::error::DiaryErrorKind as DEK;
|
use libimagdiary::error::DiaryErrorKind as DEK;
|
||||||
use libimagdiary::error::ResultExt;
|
use libimagdiary::error::ResultExt;
|
||||||
use libimagentryedit::edit::Edit;
|
use libimagentryedit::edit::Edit;
|
||||||
use libimagrt::runtime::Runtime;
|
use libimagrt::runtime::Runtime;
|
||||||
use libimagerror::trace::trace_error;
|
use libimagerror::trace::trace_error_exit;
|
||||||
use libimagdiary::entry::Entry;
|
|
||||||
use libimagdiary::error::Result;
|
|
||||||
use libimagutil::warn_exit::warn_exit;
|
use libimagutil::warn_exit::warn_exit;
|
||||||
|
use libimagstore::store::FileLockEntry;
|
||||||
|
use libimagstore::store::Store;
|
||||||
|
|
||||||
use util::get_diary_name;
|
use util::get_diary_name;
|
||||||
|
|
||||||
|
@ -36,20 +38,49 @@ pub fn create(rt: &Runtime) {
|
||||||
let diaryname = get_diary_name(rt)
|
let diaryname = get_diary_name(rt)
|
||||||
.unwrap_or_else( || warn_exit("No diary selected. Use either the configuration file or the commandline option", 1));
|
.unwrap_or_else( || warn_exit("No diary selected. Use either the configuration file or the commandline option", 1));
|
||||||
|
|
||||||
let prevent_edit = rt.cli().subcommand_matches("create").unwrap().is_present("no-edit");
|
let mut entry = create_entry(rt.store(), &diaryname, rt);
|
||||||
|
|
||||||
fn create_entry<'a>(diary: &'a Diary, rt: &Runtime) -> Result<Entry<'a>> {
|
let res = if rt.cli().subcommand_matches("create").unwrap().is_present("no-edit") {
|
||||||
|
debug!("Not editing new diary entry");
|
||||||
|
Ok(())
|
||||||
|
} else {
|
||||||
|
debug!("Editing new diary entry");
|
||||||
|
entry.edit_content(rt)
|
||||||
|
.chain_err(|| DEK::DiaryEditError)
|
||||||
|
};
|
||||||
|
|
||||||
|
if let Err(e) = res {
|
||||||
|
trace_error_exit(&e, 1);
|
||||||
|
} else {
|
||||||
|
info!("Ok!");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn create_entry<'a>(diary: &'a Store, diaryname: &str, rt: &Runtime) -> FileLockEntry<'a> {
|
||||||
|
let create = rt.cli().subcommand_matches("create").unwrap();
|
||||||
|
let entry = if !create.is_present("timed") {
|
||||||
|
debug!("Creating non-timed entry");
|
||||||
|
diary.new_entry_today(diaryname)
|
||||||
|
} else {
|
||||||
|
let id = create_id_from_clispec(&create, &diaryname);
|
||||||
|
diary.retrieve(id).chain_err(|| DEK::StoreReadError)
|
||||||
|
};
|
||||||
|
|
||||||
|
match entry {
|
||||||
|
Err(e) => trace_error_exit(&e, 1),
|
||||||
|
Ok(e) => {
|
||||||
|
debug!("Created: {}", e.get_location());
|
||||||
|
e
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
fn create_id_from_clispec(create: &ArgMatches, diaryname: &str) -> DiaryId {
|
||||||
use std::str::FromStr;
|
use std::str::FromStr;
|
||||||
|
|
||||||
let create = rt.cli().subcommand_matches("create").unwrap();
|
let get_hourly_id = |create: &ArgMatches| -> DiaryId {
|
||||||
if !create.is_present("timed") {
|
let time = DiaryId::now(String::from(diaryname));
|
||||||
debug!("Creating non-timed entry");
|
|
||||||
diary.new_entry_today()
|
|
||||||
} else {
|
|
||||||
let id = match create.value_of("timed") {
|
|
||||||
Some("h") | Some("hourly") => {
|
|
||||||
debug!("Creating hourly-timed entry");
|
|
||||||
let time = DiaryId::now(String::from(diary.name()));
|
|
||||||
let hr = create
|
let hr = create
|
||||||
.value_of("hour")
|
.value_of("hour")
|
||||||
.map(|v| { debug!("Creating hourly entry with hour = {:?}", v); v })
|
.map(|v| { debug!("Creating hourly entry with hour = {:?}", v); v })
|
||||||
|
@ -60,22 +91,17 @@ pub fn create(rt: &Runtime) {
|
||||||
})
|
})
|
||||||
.unwrap_or(time.hour());
|
.unwrap_or(time.hour());
|
||||||
|
|
||||||
time.with_hour(hr).with_minute(0)
|
time.with_hour(hr)
|
||||||
|
};
|
||||||
|
|
||||||
|
match create.value_of("timed") {
|
||||||
|
Some("h") | Some("hourly") => {
|
||||||
|
debug!("Creating hourly-timed entry");
|
||||||
|
get_hourly_id(create)
|
||||||
},
|
},
|
||||||
|
|
||||||
Some("m") | Some("minutely") => {
|
Some("m") | Some("minutely") => {
|
||||||
debug!("Creating minutely-timed entry");
|
let time = get_hourly_id(create);
|
||||||
let time = DiaryId::now(String::from(diary.name()));
|
|
||||||
let hr = create
|
|
||||||
.value_of("hour")
|
|
||||||
.map(|h| { debug!("hour = {:?}", h); h })
|
|
||||||
.and_then(|s| {
|
|
||||||
FromStr::from_str(s)
|
|
||||||
.map_err(|_| warn!("Could not parse hour: '{}'", s))
|
|
||||||
.ok()
|
|
||||||
})
|
|
||||||
.unwrap_or(time.hour());
|
|
||||||
|
|
||||||
let min = create
|
let min = create
|
||||||
.value_of("minute")
|
.value_of("minute")
|
||||||
.map(|m| { debug!("minute = {:?}", m); m })
|
.map(|m| { debug!("minute = {:?}", m); m })
|
||||||
|
@ -86,7 +112,7 @@ pub fn create(rt: &Runtime) {
|
||||||
})
|
})
|
||||||
.unwrap_or(time.minute());
|
.unwrap_or(time.minute());
|
||||||
|
|
||||||
time.with_hour(hr).with_minute(min)
|
time.with_minute(min)
|
||||||
},
|
},
|
||||||
|
|
||||||
Some(_) => {
|
Some(_) => {
|
||||||
|
@ -96,28 +122,6 @@ pub fn create(rt: &Runtime) {
|
||||||
},
|
},
|
||||||
|
|
||||||
None => warn_exit("Unexpected error, cannot continue", 1)
|
None => warn_exit("Unexpected error, cannot continue", 1)
|
||||||
};
|
|
||||||
|
|
||||||
diary.new_entry_by_id(id)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let diary = Diary::open(rt.store(), &diaryname[..]);
|
|
||||||
let res = create_entry(&diary, rt)
|
|
||||||
.and_then(|mut entry| {
|
|
||||||
if prevent_edit {
|
|
||||||
debug!("Not editing new diary entry");
|
|
||||||
Ok(())
|
|
||||||
} else {
|
|
||||||
debug!("Editing new diary entry");
|
|
||||||
entry.edit_content(rt).chain_err(|| DEK::DiaryEditError)
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
if let Err(e) = res {
|
|
||||||
trace_error(&e);
|
|
||||||
} else {
|
|
||||||
info!("Ok!");
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -17,15 +17,17 @@
|
||||||
// Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
|
// Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
|
||||||
//
|
//
|
||||||
|
|
||||||
|
use std::process::exit;
|
||||||
|
|
||||||
use chrono::naive::NaiveDateTime;
|
use chrono::naive::NaiveDateTime;
|
||||||
|
|
||||||
use libimagdiary::diary::Diary;
|
|
||||||
use libimagdiary::diaryid::DiaryId;
|
use libimagdiary::diaryid::DiaryId;
|
||||||
use libimagrt::runtime::Runtime;
|
use libimagrt::runtime::Runtime;
|
||||||
use libimagerror::trace::trace_error_exit;
|
use libimagerror::trace::trace_error_exit;
|
||||||
use libimagtimeui::datetime::DateTime;
|
use libimagtimeui::datetime::DateTime;
|
||||||
use libimagtimeui::parse::Parse;
|
use libimagtimeui::parse::Parse;
|
||||||
use libimagutil::warn_exit::warn_exit;
|
use libimagutil::warn_exit::warn_exit;
|
||||||
|
use libimagstore::storeid::IntoStoreId;
|
||||||
|
|
||||||
use util::get_diary_name;
|
use util::get_diary_name;
|
||||||
|
|
||||||
|
@ -35,36 +37,34 @@ pub fn delete(rt: &Runtime) {
|
||||||
let diaryname = get_diary_name(rt)
|
let diaryname = get_diary_name(rt)
|
||||||
.unwrap_or_else(|| warn_exit("No diary selected. Use either the configuration file or the commandline option", 1));
|
.unwrap_or_else(|| warn_exit("No diary selected. Use either the configuration file or the commandline option", 1));
|
||||||
|
|
||||||
let diary = Diary::open(rt.store(), &diaryname[..]);
|
let to_del_location = rt
|
||||||
debug!("Diary opened: {:?}", diary);
|
|
||||||
|
|
||||||
let datetime : Option<NaiveDateTime> = rt
|
|
||||||
.cli()
|
.cli()
|
||||||
.subcommand_matches("delete")
|
.subcommand_matches("delete")
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.value_of("datetime")
|
.value_of("datetime")
|
||||||
.map(|dt| { debug!("DateTime = {:?}", dt); dt })
|
.map(|dt| { debug!("DateTime = {:?}", dt); dt })
|
||||||
.and_then(DateTime::parse)
|
.and_then(DateTime::parse)
|
||||||
.map(|dt| dt.into());
|
.map(|dt| dt.into())
|
||||||
|
.ok_or_else(|| {
|
||||||
|
warn!("Not deleting entries, because missing date/time specification");
|
||||||
|
exit(1);
|
||||||
|
})
|
||||||
|
.and_then(|dt: NaiveDateTime| {
|
||||||
|
DiaryId::from_datetime(diaryname.clone(), dt)
|
||||||
|
.into_storeid()
|
||||||
|
.map(|id| rt.store().retrieve(id))
|
||||||
|
.unwrap_or_else(|e| trace_error_exit(&e, 1))
|
||||||
|
})
|
||||||
|
.unwrap_or_else(|e| trace_error_exit(&e, 1))
|
||||||
|
.get_location()
|
||||||
|
.clone();
|
||||||
|
|
||||||
let to_del = match datetime {
|
if !ask_bool(&format!("Deleting {:?}", to_del_location), Some(true)) {
|
||||||
Some(dt) => Some(diary.retrieve(DiaryId::from_datetime(diaryname.clone(), dt))),
|
|
||||||
None => diary.get_youngest_entry(),
|
|
||||||
};
|
|
||||||
|
|
||||||
let to_del = match to_del {
|
|
||||||
Some(Ok(e)) => e,
|
|
||||||
|
|
||||||
Some(Err(e)) => trace_error_exit(&e, 1),
|
|
||||||
None => warn_exit("No entry", 1)
|
|
||||||
};
|
|
||||||
|
|
||||||
if !ask_bool(&format!("Deleting {:?}", to_del.get_location())[..], Some(true)) {
|
|
||||||
info!("Aborting delete action");
|
info!("Aborting delete action");
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Err(e) = diary.delete_entry(to_del) {
|
if let Err(e) = rt.store().delete(to_del_location) {
|
||||||
trace_error_exit(&e, 1)
|
trace_error_exit(&e, 1)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -17,6 +17,8 @@
|
||||||
// Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
|
// Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
|
||||||
//
|
//
|
||||||
|
|
||||||
|
use std::process::exit;
|
||||||
|
|
||||||
use chrono::naive::NaiveDateTime;
|
use chrono::naive::NaiveDateTime;
|
||||||
|
|
||||||
use libimagdiary::diary::Diary;
|
use libimagdiary::diary::Diary;
|
||||||
|
@ -30,33 +32,39 @@ use libimagerror::trace::MapErrTrace;
|
||||||
use libimagtimeui::datetime::DateTime;
|
use libimagtimeui::datetime::DateTime;
|
||||||
use libimagtimeui::parse::Parse;
|
use libimagtimeui::parse::Parse;
|
||||||
use libimagutil::warn_exit::warn_exit;
|
use libimagutil::warn_exit::warn_exit;
|
||||||
|
use libimagerror::trace::trace_error_exit;
|
||||||
|
|
||||||
use util::get_diary_name;
|
use util::get_diary_name;
|
||||||
|
|
||||||
pub fn edit(rt: &Runtime) {
|
pub fn edit(rt: &Runtime) {
|
||||||
let diaryname = get_diary_name(rt).unwrap_or_else(|| warn_exit("No diary name", 1));
|
let diaryname = get_diary_name(rt).unwrap_or_else(|| warn_exit("No diary name", 1));
|
||||||
let diary = Diary::open(rt.store(), &diaryname[..]);
|
|
||||||
|
|
||||||
let datetime : Option<NaiveDateTime> = rt
|
rt.cli()
|
||||||
.cli()
|
|
||||||
.subcommand_matches("edit")
|
.subcommand_matches("edit")
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.value_of("datetime")
|
.value_of("datetime")
|
||||||
.and_then(DateTime::parse)
|
.and_then(DateTime::parse)
|
||||||
.map(|dt| dt.into());
|
.map(|dt| dt.into())
|
||||||
|
.map(|dt: NaiveDateTime| DiaryId::from_datetime(diaryname.clone(), dt))
|
||||||
let to_edit = match datetime {
|
.or_else(|| {
|
||||||
Some(dt) => Some(diary.retrieve(DiaryId::from_datetime(diaryname.clone(), dt))),
|
rt.store()
|
||||||
None => diary.get_youngest_entry(),
|
.get_youngest_entry_id(&diaryname)
|
||||||
};
|
.map(|optid| match optid {
|
||||||
|
Ok(id) => id,
|
||||||
match to_edit {
|
Err(e) => trace_error_exit(&e, 1),
|
||||||
Some(Ok(mut e)) => e.edit_content(rt).chain_err(|| DEK::IOError),
|
})
|
||||||
|
})
|
||||||
Some(Err(e)) => Err(e),
|
.ok_or_else(|| {
|
||||||
|
error!("No entries in diary. Aborting");
|
||||||
|
exit(1)
|
||||||
|
})
|
||||||
|
.and_then(|id| rt.store().get(id))
|
||||||
|
.map(|opte| match opte {
|
||||||
|
Some(mut e) => e.edit_content(rt).chain_err(|| DEK::IOError),
|
||||||
None => Err(DE::from_kind(DEK::EntryNotInDiary)),
|
None => Err(DE::from_kind(DEK::EntryNotInDiary)),
|
||||||
}
|
})
|
||||||
.map_err_trace().ok();
|
.map_err_trace()
|
||||||
|
.ok();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -42,9 +42,7 @@ pub fn list(rt: &Runtime) {
|
||||||
.unwrap_or(String::from("<<Path Parsing Error>>"))
|
.unwrap_or(String::from("<<Path Parsing Error>>"))
|
||||||
}
|
}
|
||||||
|
|
||||||
let diary = Diary::open(rt.store(), &diaryname[..]);
|
Diary::entries(rt.store(), &diaryname)
|
||||||
debug!("Diary opened: {:?}", diary);
|
|
||||||
diary.entries()
|
|
||||||
.and_then(|es| {
|
.and_then(|es| {
|
||||||
debug!("Iterator for listing: {:?}", es);
|
debug!("Iterator for listing: {:?}", es);
|
||||||
|
|
||||||
|
|
|
@ -27,10 +27,9 @@ use util::get_diary_name;
|
||||||
|
|
||||||
pub fn view(rt: &Runtime) {
|
pub fn view(rt: &Runtime) {
|
||||||
let diaryname = get_diary_name(rt).unwrap_or_else(|| warn_exit("No diary name", 1));
|
let diaryname = get_diary_name(rt).unwrap_or_else(|| warn_exit("No diary name", 1));
|
||||||
let diary = Diary::open(rt.store(), &diaryname[..]);
|
|
||||||
let hdr = rt.cli().subcommand_matches("view").unwrap().is_present("show-header");
|
let hdr = rt.cli().subcommand_matches("view").unwrap().is_present("show-header");
|
||||||
|
|
||||||
diary.entries()
|
Diary::entries(rt.store(), &diaryname)
|
||||||
.and_then(|entries| DV::new(hdr).view_entries(entries.into_iter().filter_map(Result::ok)))
|
.and_then(|entries| DV::new(hdr).view_entries(entries.into_iter().filter_map(Result::ok)))
|
||||||
.map_err_trace()
|
.map_err_trace()
|
||||||
.ok();
|
.ok();
|
||||||
|
|
|
@ -17,5 +17,4 @@ version = "2.0.1"
|
||||||
libimagrt = { version = "0.4.0", path = "../../../lib/core/libimagrt" }
|
libimagrt = { version = "0.4.0", path = "../../../lib/core/libimagrt" }
|
||||||
libimagerror = { version = "0.4.0", path = "../../../lib/core/libimagerror" }
|
libimagerror = { version = "0.4.0", path = "../../../lib/core/libimagerror" }
|
||||||
libimagmail = { version = "0.4.0", path = "../../../lib/domain/libimagmail" }
|
libimagmail = { version = "0.4.0", path = "../../../lib/domain/libimagmail" }
|
||||||
libimagentryref = { version = "0.4.0", path = "../../../lib/entry/libimagentryref" }
|
|
||||||
libimagutil = { version = "0.4.0", path = "../../../lib/etc/libimagutil" }
|
libimagutil = { version = "0.4.0", path = "../../../lib/etc/libimagutil" }
|
||||||
|
|
|
@ -25,11 +25,9 @@ extern crate libimagrt;
|
||||||
extern crate libimagmail;
|
extern crate libimagmail;
|
||||||
extern crate libimagerror;
|
extern crate libimagerror;
|
||||||
extern crate libimagutil;
|
extern crate libimagutil;
|
||||||
extern crate libimagentryref;
|
|
||||||
|
|
||||||
use libimagerror::trace::{MapErrTrace, trace_error, trace_error_exit};
|
use libimagerror::trace::{MapErrTrace, trace_error, trace_error_exit};
|
||||||
use libimagmail::mail::Mail;
|
use libimagmail::mail::Mail;
|
||||||
use libimagentryref::reference::Ref;
|
|
||||||
use libimagrt::runtime::Runtime;
|
use libimagrt::runtime::Runtime;
|
||||||
use libimagrt::setup::generate_runtime_setup;
|
use libimagrt::setup::generate_runtime_setup;
|
||||||
use libimagutil::info_result::*;
|
use libimagutil::info_result::*;
|
||||||
|
@ -74,11 +72,11 @@ fn list(rt: &Runtime) {
|
||||||
|
|
||||||
let iter = match store.retrieve_for_module("ref") {
|
let iter = match store.retrieve_for_module("ref") {
|
||||||
Ok(iter) => iter.filter_map(|id| {
|
Ok(iter) => iter.filter_map(|id| {
|
||||||
Ref::get(store, id)
|
match store.get(id).chain_err(|| MEK::RefHandlingError).map_err_trace() {
|
||||||
.chain_err(|| MEK::RefHandlingError)
|
Ok(Some(fle)) => Mail::from_fle(fle).map_err_trace().ok(),
|
||||||
.and_then(|rf| Mail::from_ref(rf))
|
Ok(None) => None,
|
||||||
.map_err_trace()
|
Err(e) => trace_error_exit(&e, 1),
|
||||||
.ok()
|
}
|
||||||
}),
|
}),
|
||||||
Err(e) => trace_error_exit(&e, 1),
|
Err(e) => trace_error_exit(&e, 1),
|
||||||
};
|
};
|
||||||
|
|
|
@ -23,5 +23,4 @@ libimagrt = { version = "0.4.0", path = "../../../lib/core/libimagrt" }
|
||||||
libimagerror = { version = "0.4.0", path = "../../../lib/core/libimagerror" }
|
libimagerror = { version = "0.4.0", path = "../../../lib/core/libimagerror" }
|
||||||
libimagnotes = { version = "0.4.0", path = "../../../lib/domain/libimagnotes" }
|
libimagnotes = { version = "0.4.0", path = "../../../lib/domain/libimagnotes" }
|
||||||
libimagentryedit = { version = "0.4.0", path = "../../../lib/entry/libimagentryedit" }
|
libimagentryedit = { version = "0.4.0", path = "../../../lib/entry/libimagentryedit" }
|
||||||
libimagentrytag = { version = "0.4.0", path = "../../../lib/entry/libimagentrytag" }
|
|
||||||
libimagutil = { version = "0.4.0", path = "../../../lib/etc/libimagutil" }
|
libimagutil = { version = "0.4.0", path = "../../../lib/etc/libimagutil" }
|
||||||
|
|
|
@ -25,7 +25,6 @@ extern crate itertools;
|
||||||
extern crate libimagnotes;
|
extern crate libimagnotes;
|
||||||
extern crate libimagrt;
|
extern crate libimagrt;
|
||||||
extern crate libimagentryedit;
|
extern crate libimagentryedit;
|
||||||
extern crate libimagentrytag;
|
|
||||||
extern crate libimagerror;
|
extern crate libimagerror;
|
||||||
extern crate libimagutil;
|
extern crate libimagutil;
|
||||||
|
|
||||||
|
|
|
@ -19,8 +19,6 @@
|
||||||
|
|
||||||
use clap::{Arg, App, SubCommand};
|
use clap::{Arg, App, SubCommand};
|
||||||
|
|
||||||
use libimagentrytag::ui::tag_argument;
|
|
||||||
|
|
||||||
pub fn build_ui<'a>(app: App<'a, 'a>) -> App<'a, 'a> {
|
pub fn build_ui<'a>(app: App<'a, 'a>) -> App<'a, 'a> {
|
||||||
app
|
app
|
||||||
.subcommand(SubCommand::with_name("create")
|
.subcommand(SubCommand::with_name("create")
|
||||||
|
@ -62,8 +60,6 @@ pub fn build_ui<'a>(app: App<'a, 'a>) -> App<'a, 'a> {
|
||||||
.required(true)
|
.required(true)
|
||||||
.help("Edit Note with this name")
|
.help("Edit Note with this name")
|
||||||
.value_name("NAME"))
|
.value_name("NAME"))
|
||||||
|
|
||||||
.arg(tag_argument())
|
|
||||||
)
|
)
|
||||||
|
|
||||||
.subcommand(SubCommand::with_name("list")
|
.subcommand(SubCommand::with_name("list")
|
||||||
|
|
|
@ -35,7 +35,7 @@ use toml::Value;
|
||||||
|
|
||||||
use libimagrt::runtime::Runtime;
|
use libimagrt::runtime::Runtime;
|
||||||
use libimagrt::setup::generate_runtime_setup;
|
use libimagrt::setup::generate_runtime_setup;
|
||||||
use libimagtodo::task::Task;
|
use libimagtodo::taskstore::TaskStore;
|
||||||
use libimagerror::trace::{MapErrTrace, trace_error, trace_error_exit};
|
use libimagerror::trace::{MapErrTrace, trace_error, trace_error_exit};
|
||||||
|
|
||||||
mod ui;
|
mod ui;
|
||||||
|
@ -61,9 +61,11 @@ fn tw_hook(rt: &Runtime) {
|
||||||
let subcmd = rt.cli().subcommand_matches("tw-hook").unwrap();
|
let subcmd = rt.cli().subcommand_matches("tw-hook").unwrap();
|
||||||
if subcmd.is_present("add") {
|
if subcmd.is_present("add") {
|
||||||
let stdin = stdin();
|
let stdin = stdin();
|
||||||
let stdin = stdin.lock(); // implements BufRead which is required for `Task::import()`
|
|
||||||
|
|
||||||
match Task::import(rt.store(), stdin) {
|
// implements BufRead which is required for `Store::import_task_from_reader()`
|
||||||
|
let stdin = stdin.lock();
|
||||||
|
|
||||||
|
match rt.store().import_task_from_reader(stdin) {
|
||||||
Ok((_, line, uuid)) => println!("{}\nTask {} stored in imag", line, uuid),
|
Ok((_, line, uuid)) => println!("{}\nTask {} stored in imag", line, uuid),
|
||||||
Err(e) => trace_error_exit(&e, 1),
|
Err(e) => trace_error_exit(&e, 1),
|
||||||
}
|
}
|
||||||
|
@ -71,7 +73,7 @@ fn tw_hook(rt: &Runtime) {
|
||||||
// The used hook is "on-modify". This hook gives two json-objects
|
// The used hook is "on-modify". This hook gives two json-objects
|
||||||
// per usage und wants one (the second one) back.
|
// per usage und wants one (the second one) back.
|
||||||
let stdin = stdin();
|
let stdin = stdin();
|
||||||
Task::delete_by_imports(rt.store(), stdin.lock()).map_err_trace().ok();
|
rt.store().delete_tasks_by_imports(stdin.lock()).map_err_trace().ok();
|
||||||
} else {
|
} else {
|
||||||
// Should not be possible, as one argument is required via
|
// Should not be possible, as one argument is required via
|
||||||
// ArgGroup
|
// ArgGroup
|
||||||
|
@ -92,18 +94,21 @@ fn list(rt: &Runtime) {
|
||||||
is_match!(e.kind(), &::toml_query::error::ErrorKind::IdentifierNotFoundInDocument(_))
|
is_match!(e.kind(), &::toml_query::error::ErrorKind::IdentifierNotFoundInDocument(_))
|
||||||
};
|
};
|
||||||
|
|
||||||
let res = Task::all(rt.store()) // get all tasks
|
let res = rt.store().all_tasks() // get all tasks
|
||||||
.map(|iter| { // and if this succeeded
|
.map(|iter| { // and if this succeeded
|
||||||
// filter out the ones were we can read the uuid
|
// filter out the ones were we can read the uuid
|
||||||
let uuids : Vec<_> = iter.filter_map(|t| match t {
|
let uuids : Vec<_> = iter.filter_map(|storeid| {
|
||||||
Ok(v) => match v.get_header().read(&String::from("todo.uuid")) {
|
match rt.store().retrieve(storeid) {
|
||||||
|
Ok(fle) => {
|
||||||
|
match fle.get_header().read(&String::from("todo.uuid")) {
|
||||||
Ok(Some(&Value::String(ref u))) => Some(u.clone()),
|
Ok(Some(&Value::String(ref u))) => Some(u.clone()),
|
||||||
Ok(Some(_)) => {
|
Ok(Some(_)) => {
|
||||||
warn!("Header type error");
|
error!("Header type error, expected String at 'todo.uuid' in {}",
|
||||||
|
fle.get_location());
|
||||||
None
|
None
|
||||||
},
|
},
|
||||||
Ok(None) => {
|
Ok(None) => {
|
||||||
warn!("Header missing field");
|
error!("Header missing field in {}", fle.get_location());
|
||||||
None
|
None
|
||||||
},
|
},
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
|
@ -112,10 +117,12 @@ fn list(rt: &Runtime) {
|
||||||
}
|
}
|
||||||
None
|
None
|
||||||
}
|
}
|
||||||
|
}
|
||||||
},
|
},
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
trace_error(&e);
|
trace_error(&e);
|
||||||
None
|
None
|
||||||
|
},
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
.collect();
|
.collect();
|
||||||
|
|
|
@ -25,6 +25,9 @@ This section contains the changelog from the last release to the next release.
|
||||||
* The codebase was moved to a more tree-ish approach, where several
|
* The codebase was moved to a more tree-ish approach, where several
|
||||||
subdirectories were introduced for different types of crates
|
subdirectories were introduced for different types of crates
|
||||||
* The documentation got a major overhaul and was partly rewritten
|
* The documentation got a major overhaul and was partly rewritten
|
||||||
|
* The logger is now configurable via the config file.
|
||||||
|
* New
|
||||||
|
* `libimagentrygps` was introduced
|
||||||
* Fixed bugs
|
* Fixed bugs
|
||||||
* The config loading in `libimagrt`
|
* The config loading in `libimagrt`
|
||||||
[was fixed](http://git.imag-pim.org/imag/commit/?id=9193d50f96bce099665d2eb716bcaa29a8d9b8ff).
|
[was fixed](http://git.imag-pim.org/imag/commit/?id=9193d50f96bce099665d2eb716bcaa29a8d9b8ff).
|
||||||
|
|
29
imagrc.toml
29
imagrc.toml
|
@ -1,6 +1,35 @@
|
||||||
# This is a example configuration file for the imag suite.
|
# This is a example configuration file for the imag suite.
|
||||||
# It is written in TOML
|
# It is written in TOML
|
||||||
|
|
||||||
|
[imag.logging]
|
||||||
|
level = "debug"
|
||||||
|
destinations = [ "-" ]
|
||||||
|
|
||||||
|
# Valid variables for logging:
|
||||||
|
# * "level"
|
||||||
|
# * "module_path"
|
||||||
|
# * "file"
|
||||||
|
# * "line"
|
||||||
|
# * "target"
|
||||||
|
# * "message"
|
||||||
|
#
|
||||||
|
# Valid functions to be applied:
|
||||||
|
# * "black"
|
||||||
|
# * "blue"
|
||||||
|
# * "cyan"
|
||||||
|
# * "green"
|
||||||
|
# * "purple"
|
||||||
|
# * "red"
|
||||||
|
# * "white"
|
||||||
|
# * "yellow"
|
||||||
|
|
||||||
|
[imag.logging.format]
|
||||||
|
trace = "[imag][{{red level}}][{{module_path}}]: {{message}}"
|
||||||
|
debug = "[imag][{{cyan level}}]: {{message}}"
|
||||||
|
info = "[imag]: {{message}}"
|
||||||
|
warn = "[imag][{{bold level}}]: {{yellow message}}"
|
||||||
|
error = "[imag][{{red level}}]: {{red message}}"
|
||||||
|
|
||||||
#
|
#
|
||||||
# Configuration options for the user interface
|
# Configuration options for the user interface
|
||||||
#
|
#
|
||||||
|
|
|
@ -24,7 +24,16 @@ ansi_term = "0.9"
|
||||||
is-match = "0.1"
|
is-match = "0.1"
|
||||||
toml-query = "0.3.0"
|
toml-query = "0.3.0"
|
||||||
error-chain = "0.10"
|
error-chain = "0.10"
|
||||||
|
handlebars = "0.29.0"
|
||||||
|
|
||||||
libimagstore = { version = "0.4.0", path = "../../../lib/core/libimagstore" }
|
libimagstore = { version = "0.4.0", path = "../../../lib/core/libimagstore" }
|
||||||
libimagerror = { version = "0.4.0", path = "../../../lib/core/libimagerror" }
|
libimagerror = { version = "0.4.0", path = "../../../lib/core/libimagerror" }
|
||||||
libimagutil = { version = "0.4.0", path = "../../../lib/etc/libimagutil" }
|
libimagutil = { version = "0.4.0", path = "../../../lib/etc/libimagutil" }
|
||||||
|
|
||||||
|
[features]
|
||||||
|
default = []
|
||||||
|
|
||||||
|
# Enable testing functionality. Used for building the libimagrt for testing CLI
|
||||||
|
# apps. Do not use in production!
|
||||||
|
testing = []
|
||||||
|
|
||||||
|
|
|
@ -38,6 +38,71 @@ error_chain! {
|
||||||
display("Process exited with failure")
|
display("Process exited with failure")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
IOLogFileOpenError {
|
||||||
|
description("IO Error: Could not open logfile")
|
||||||
|
display("IO Error: Could not open logfile")
|
||||||
|
}
|
||||||
|
|
||||||
|
ConfigReadError {
|
||||||
|
description("Error while reading the configuration")
|
||||||
|
display("Error while reading the configuration")
|
||||||
|
}
|
||||||
|
|
||||||
|
ConfigTypeError {
|
||||||
|
description("Error while reading the configuration: Type Error")
|
||||||
|
display("Error while reading the configuration: Type Error")
|
||||||
|
}
|
||||||
|
|
||||||
|
GlobalLogLevelConfigMissing {
|
||||||
|
description("Global config 'imag.logging.level' missing")
|
||||||
|
display("Global config 'imag.logging.level' missing")
|
||||||
|
}
|
||||||
|
|
||||||
|
GlobalDestinationConfigMissing {
|
||||||
|
description("Global config 'imag.logging.destinations' missing")
|
||||||
|
display("Global config 'imag.logging.destinations' missing")
|
||||||
|
}
|
||||||
|
|
||||||
|
InvalidLogLevelSpec {
|
||||||
|
description("Invalid log level specification: Only 'trace', 'debug', 'info', 'warn', 'error' are allowed")
|
||||||
|
display("Invalid log level specification: Only 'trace', 'debug', 'info', 'warn', 'error' are allowed")
|
||||||
|
}
|
||||||
|
|
||||||
|
TomlReadError {
|
||||||
|
description("Error while reading in TOML document")
|
||||||
|
display("Error while reading in TOML document")
|
||||||
|
}
|
||||||
|
|
||||||
|
TemplateStringRegistrationError {
|
||||||
|
description("Error while registering logging template string")
|
||||||
|
display("Error while registering logging template string")
|
||||||
|
}
|
||||||
|
|
||||||
|
ConfigMissingLoggingFormatTrace {
|
||||||
|
description("Missing config for logging format for trace logging")
|
||||||
|
display("Missing config for logging format for trace logging")
|
||||||
|
}
|
||||||
|
|
||||||
|
ConfigMissingLoggingFormatDebug {
|
||||||
|
description("Missing config for logging format for debug logging")
|
||||||
|
display("Missing config for logging format for debug logging")
|
||||||
|
}
|
||||||
|
|
||||||
|
ConfigMissingLoggingFormatInfo {
|
||||||
|
description("Missing config for logging format for info logging")
|
||||||
|
display("Missing config for logging format for info logging")
|
||||||
|
}
|
||||||
|
|
||||||
|
ConfigMissingLoggingFormatWarn {
|
||||||
|
description("Missing config for logging format for warn logging")
|
||||||
|
display("Missing config for logging format for warn logging")
|
||||||
|
}
|
||||||
|
|
||||||
|
ConfigMissingLoggingFormatError {
|
||||||
|
description("Missing config for logging format for error logging")
|
||||||
|
display("Missing config for logging format for error logging")
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -41,6 +41,7 @@ extern crate itertools;
|
||||||
#[cfg(unix)] extern crate xdg_basedir;
|
#[cfg(unix)] extern crate xdg_basedir;
|
||||||
extern crate env_logger;
|
extern crate env_logger;
|
||||||
extern crate ansi_term;
|
extern crate ansi_term;
|
||||||
|
extern crate handlebars;
|
||||||
|
|
||||||
extern crate clap;
|
extern crate clap;
|
||||||
extern crate toml;
|
extern crate toml;
|
||||||
|
|
|
@ -19,67 +19,114 @@
|
||||||
|
|
||||||
use std::io::Write;
|
use std::io::Write;
|
||||||
use std::io::stderr;
|
use std::io::stderr;
|
||||||
|
use std::collections::BTreeMap;
|
||||||
|
|
||||||
|
use configuration::Configuration;
|
||||||
|
use error::RuntimeErrorKind as EK;
|
||||||
|
use error::RuntimeError as RE;
|
||||||
|
use error::ResultExt;
|
||||||
|
use runtime::Runtime;
|
||||||
|
|
||||||
|
use clap::ArgMatches;
|
||||||
use log::{Log, LogLevel, LogRecord, LogMetadata};
|
use log::{Log, LogLevel, LogRecord, LogMetadata};
|
||||||
|
use toml::Value;
|
||||||
|
use toml_query::read::TomlValueReadExt;
|
||||||
|
use handlebars::Handlebars;
|
||||||
|
|
||||||
use ansi_term::Style;
|
type ModuleName = String;
|
||||||
use ansi_term::Colour;
|
type Result<T> = ::std::result::Result<T, RE>;
|
||||||
use ansi_term::ANSIString;
|
|
||||||
|
enum LogDestination {
|
||||||
|
Stderr,
|
||||||
|
File(::std::fs::File),
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Default for LogDestination {
|
||||||
|
fn default() -> LogDestination {
|
||||||
|
LogDestination::Stderr
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
struct ModuleSettings {
|
||||||
|
enabled: bool,
|
||||||
|
level: Option<LogLevel>,
|
||||||
|
|
||||||
|
#[allow(unused)]
|
||||||
|
destinations: Option<Vec<LogDestination>>,
|
||||||
|
}
|
||||||
|
|
||||||
/// Logger implementation for `log` crate.
|
/// Logger implementation for `log` crate.
|
||||||
pub struct ImagLogger {
|
pub struct ImagLogger {
|
||||||
prefix: String,
|
global_loglevel : LogLevel,
|
||||||
dbg_fileline: bool,
|
|
||||||
lvl: LogLevel,
|
#[allow(unused)]
|
||||||
color_enabled: bool,
|
global_destinations : Vec<LogDestination>,
|
||||||
|
// global_format_trace : ,
|
||||||
|
// global_format_debug : ,
|
||||||
|
// global_format_info : ,
|
||||||
|
// global_format_warn : ,
|
||||||
|
// global_format_error : ,
|
||||||
|
module_settings : BTreeMap<ModuleName, ModuleSettings>,
|
||||||
|
|
||||||
|
handlebars: Handlebars,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ImagLogger {
|
impl ImagLogger {
|
||||||
|
|
||||||
/// Create a new ImagLogger object with a certain level
|
/// Create a new ImagLogger object with a certain level
|
||||||
pub fn new(lvl: LogLevel) -> ImagLogger {
|
pub fn new(matches: &ArgMatches, config: Option<&Configuration>) -> Result<ImagLogger> {
|
||||||
ImagLogger {
|
let mut handlebars = Handlebars::new();
|
||||||
prefix: "[imag]".to_owned(),
|
|
||||||
dbg_fileline: true,
|
handlebars.register_helper("black" , Box::new(self::template_helpers::ColorizeBlackHelper));
|
||||||
lvl: lvl,
|
handlebars.register_helper("blue" , Box::new(self::template_helpers::ColorizeBlueHelper));
|
||||||
color_enabled: true
|
handlebars.register_helper("cyan" , Box::new(self::template_helpers::ColorizeCyanHelper));
|
||||||
|
handlebars.register_helper("green" , Box::new(self::template_helpers::ColorizeGreenHelper));
|
||||||
|
handlebars.register_helper("purple" , Box::new(self::template_helpers::ColorizePurpleHelper));
|
||||||
|
handlebars.register_helper("red" , Box::new(self::template_helpers::ColorizeRedHelper));
|
||||||
|
handlebars.register_helper("white" , Box::new(self::template_helpers::ColorizeWhiteHelper));
|
||||||
|
handlebars.register_helper("yellow" , Box::new(self::template_helpers::ColorizeYellowHelper));
|
||||||
|
|
||||||
|
handlebars.register_helper("underline" , Box::new(self::template_helpers::UnderlineHelper));
|
||||||
|
handlebars.register_helper("bold" , Box::new(self::template_helpers::BoldHelper));
|
||||||
|
handlebars.register_helper("blink" , Box::new(self::template_helpers::BlinkHelper));
|
||||||
|
handlebars.register_helper("strikethrough" , Box::new(self::template_helpers::StrikethroughHelper));
|
||||||
|
|
||||||
|
{
|
||||||
|
let fmt = try!(aggregate_global_format_trace(matches, config));
|
||||||
|
try!(handlebars.register_template_string("TRACE", fmt) // name must be uppercase
|
||||||
|
.chain_err(|| EK::TemplateStringRegistrationError));
|
||||||
}
|
}
|
||||||
|
{
|
||||||
|
let fmt = try!(aggregate_global_format_debug(matches, config));
|
||||||
|
try!(handlebars.register_template_string("DEBUG", fmt) // name must be uppercase
|
||||||
|
.chain_err(|| EK::TemplateStringRegistrationError));
|
||||||
|
}
|
||||||
|
{
|
||||||
|
let fmt = try!(aggregate_global_format_info(matches, config));
|
||||||
|
try!(handlebars.register_template_string("INFO", fmt) // name must be uppercase
|
||||||
|
.chain_err(|| EK::TemplateStringRegistrationError));
|
||||||
|
}
|
||||||
|
{
|
||||||
|
let fmt = try!(aggregate_global_format_warn(matches, config));
|
||||||
|
try!(handlebars.register_template_string("WARN", fmt) // name must be uppercase
|
||||||
|
.chain_err(|| EK::TemplateStringRegistrationError));
|
||||||
|
}
|
||||||
|
{
|
||||||
|
let fmt = try!(aggregate_global_format_error(matches, config));
|
||||||
|
try!(handlebars.register_template_string("ERROR", fmt) // name must be uppercase
|
||||||
|
.chain_err(|| EK::TemplateStringRegistrationError));
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Set debugging to include file and line
|
Ok(ImagLogger {
|
||||||
pub fn with_dbg_file_and_line(mut self, b: bool) -> ImagLogger {
|
global_loglevel : try!(aggregate_global_loglevel(matches, config)),
|
||||||
self.dbg_fileline = b;
|
global_destinations : try!(aggregate_global_destinations(matches, config)),
|
||||||
self
|
module_settings : try!(aggregate_module_settings(matches, config)),
|
||||||
|
handlebars : handlebars,
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Set debugging to include prefix
|
pub fn global_loglevel(&self) -> LogLevel {
|
||||||
pub fn with_prefix(mut self, pref: String) -> ImagLogger {
|
self.global_loglevel
|
||||||
self.prefix = pref;
|
|
||||||
self
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Set debugging to have color
|
|
||||||
pub fn with_color(mut self, b: bool) -> ImagLogger {
|
|
||||||
self.color_enabled = b;
|
|
||||||
self
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Helper function to colorize a string with a certain Style
|
|
||||||
fn style_or_not(&self, c: Style, s: String) -> ANSIString {
|
|
||||||
if self.color_enabled {
|
|
||||||
c.paint(s)
|
|
||||||
} else {
|
|
||||||
ANSIString::from(s)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Helper function to colorize a string with a certain Color
|
|
||||||
fn color_or_not(&self, c: Colour, s: String) -> ANSIString {
|
|
||||||
if self.color_enabled {
|
|
||||||
c.paint(s)
|
|
||||||
} else {
|
|
||||||
ANSIString::from(s)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -87,47 +134,432 @@ impl ImagLogger {
|
||||||
impl Log for ImagLogger {
|
impl Log for ImagLogger {
|
||||||
|
|
||||||
fn enabled(&self, metadata: &LogMetadata) -> bool {
|
fn enabled(&self, metadata: &LogMetadata) -> bool {
|
||||||
metadata.level() <= self.lvl
|
metadata.level() <= self.global_loglevel
|
||||||
}
|
}
|
||||||
|
|
||||||
fn log(&self, record: &LogRecord) {
|
fn log(&self, record: &LogRecord) {
|
||||||
use ansi_term::Colour::Red;
|
if record.location().module_path().starts_with("handlebars") {
|
||||||
use ansi_term::Colour::Yellow;
|
// This is a ugly, yet necessary hack. When logging, we use handlebars for templating.
|
||||||
use ansi_term::Colour::Cyan;
|
// But as the handlebars library itselfs logs via a normal logging macro ("debug!()"),
|
||||||
|
// we have a recursion in our chain.
|
||||||
if self.enabled(record.metadata()) {
|
//
|
||||||
// TODO: This is just simple logging. Maybe we can enhance this lateron
|
// To prevent this recursion, we return here.
|
||||||
let loc = record.location();
|
//
|
||||||
match record.metadata().level() {
|
// (As of handlebars 0.29.0 - please check whether you can update handlebars if you see
|
||||||
LogLevel::Debug => {
|
// this. Hopefully the next version has a compiletime flag to disable logging)
|
||||||
let lvl = self.color_or_not(Cyan, format!("{}", record.level()));
|
return;
|
||||||
let args = self.color_or_not(Cyan, format!("{}", record.args()));
|
|
||||||
if self.dbg_fileline {
|
|
||||||
let file = self.color_or_not(Cyan, format!("{}", loc.file()));
|
|
||||||
let ln = self.color_or_not(Cyan, format!("{}", loc.line()));
|
|
||||||
|
|
||||||
writeln!(stderr(), "{}[{: <5}][{}][{: >5}]: {}", self.prefix, lvl, file, ln, args).ok();
|
|
||||||
} else {
|
|
||||||
writeln!(stderr(), "{}[{: <5}]: {}", self.prefix, lvl, args).ok();
|
|
||||||
}
|
}
|
||||||
},
|
|
||||||
LogLevel::Warn | LogLevel::Error => {
|
|
||||||
let lvl = self.style_or_not(Red.blink(), format!("{}", record.level()));
|
|
||||||
let args = self.color_or_not(Red, format!("{}", record.args()));
|
|
||||||
|
|
||||||
writeln!(stderr(), "{}[{: <5}]: {}", self.prefix, lvl, args).ok();
|
let mut data = BTreeMap::new();
|
||||||
},
|
|
||||||
LogLevel::Info => {
|
|
||||||
let lvl = self.color_or_not(Yellow, format!("{}", record.level()));
|
|
||||||
let args = self.color_or_not(Yellow, format!("{}", record.args()));
|
|
||||||
|
|
||||||
writeln!(stderr(), "{}[{: <5}]: {}", self.prefix, lvl, args).ok();
|
{
|
||||||
|
data.insert("level", format!("{}", record.level()));
|
||||||
|
data.insert("module_path", String::from(record.location().module_path()));
|
||||||
|
data.insert("file", String::from(record.location().file()));
|
||||||
|
data.insert("line", format!("{}", record.location().line()));
|
||||||
|
data.insert("target", String::from(record.target()));
|
||||||
|
data.insert("message", format!("{}", record.args()));
|
||||||
|
}
|
||||||
|
|
||||||
|
let logtext = self
|
||||||
|
.handlebars
|
||||||
|
.render(&format!("{}", record.level()), &data)
|
||||||
|
.unwrap_or_else(|e| format!("Failed rendering logging data: {:?}\n", e));
|
||||||
|
|
||||||
|
self.module_settings
|
||||||
|
.get(record.target())
|
||||||
|
.map(|module_setting| {
|
||||||
|
let set = module_setting.enabled &&
|
||||||
|
module_setting.level.unwrap_or(self.global_loglevel) >= record.level();
|
||||||
|
|
||||||
|
if set {
|
||||||
|
let _ = write!(stderr(), "{}\n", logtext);
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.unwrap_or_else(|| {
|
||||||
|
if self.global_loglevel >= record.level() {
|
||||||
|
// Yes, we log
|
||||||
|
let _ = write!(stderr(), "{}\n", logtext);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn match_log_level_str(s: &str) -> Result<LogLevel> {
|
||||||
|
match s {
|
||||||
|
"trace" => Ok(LogLevel::Trace),
|
||||||
|
"debug" => Ok(LogLevel::Debug),
|
||||||
|
"info" => Ok(LogLevel::Info),
|
||||||
|
"warn" => Ok(LogLevel::Warn),
|
||||||
|
"error" => Ok(LogLevel::Error),
|
||||||
|
_ => return Err(RE::from_kind(EK::InvalidLogLevelSpec)),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn aggregate_global_loglevel(matches: &ArgMatches, config: Option<&Configuration>)
|
||||||
|
-> Result<LogLevel>
|
||||||
|
{
|
||||||
|
match config {
|
||||||
|
Some(cfg) => match cfg
|
||||||
|
.read("imag.logging.level")
|
||||||
|
.chain_err(|| EK::ConfigReadError)
|
||||||
|
{
|
||||||
|
Ok(Some(&Value::String(ref s))) => match_log_level_str(s),
|
||||||
|
Ok(Some(_)) => Err(RE::from_kind(EK::ConfigTypeError)),
|
||||||
|
Ok(None) => Err(RE::from_kind(EK::GlobalLogLevelConfigMissing)),
|
||||||
|
Err(e) => Err(e)
|
||||||
},
|
},
|
||||||
_ => {
|
None => {
|
||||||
writeln!(stderr(), "{}[{: <5}]: {}", self.prefix, record.level(), record.args()).ok();
|
if matches.is_present(Runtime::arg_debugging_name()) {
|
||||||
|
return Ok(LogLevel::Debug)
|
||||||
|
}
|
||||||
|
|
||||||
|
matches
|
||||||
|
.value_of(Runtime::arg_verbosity_name())
|
||||||
|
.map(match_log_level_str)
|
||||||
|
.unwrap_or(Ok(LogLevel::Info))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn translate_destination(raw: &str) -> Result<LogDestination> {
|
||||||
|
use std::fs::OpenOptions;
|
||||||
|
|
||||||
|
match raw {
|
||||||
|
"-" => Ok(LogDestination::Stderr),
|
||||||
|
other => {
|
||||||
|
OpenOptions::new()
|
||||||
|
.append(true)
|
||||||
|
.create(true)
|
||||||
|
.open(other)
|
||||||
|
.map(LogDestination::File)
|
||||||
|
.chain_err(|| EK::IOLogFileOpenError)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
fn translate_destinations(raw: &Vec<Value>) -> Result<Vec<LogDestination>> {
|
||||||
|
raw.iter()
|
||||||
|
.fold(Ok(vec![]), |acc, val| {
|
||||||
|
acc.and_then(|mut v| {
|
||||||
|
let dest = match *val {
|
||||||
|
Value::String(ref s) => try!(translate_destination(s)),
|
||||||
|
_ => return Err(RE::from_kind(EK::ConfigTypeError)),
|
||||||
|
};
|
||||||
|
v.push(dest);
|
||||||
|
Ok(v)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
fn aggregate_global_destinations(matches: &ArgMatches, config: Option<&Configuration>)
|
||||||
|
-> Result<Vec<LogDestination>>
|
||||||
|
{
|
||||||
|
|
||||||
|
match config {
|
||||||
|
Some(cfg) => match cfg
|
||||||
|
.read("imag.logging.destinations")
|
||||||
|
.chain_err(|| EK::ConfigReadError)
|
||||||
|
{
|
||||||
|
Ok(Some(&Value::Array(ref a))) => translate_destinations(a),
|
||||||
|
Ok(Some(_)) => Err(RE::from_kind(EK::ConfigTypeError)),
|
||||||
|
Ok(None) => Err(RE::from_kind(EK::GlobalDestinationConfigMissing)),
|
||||||
|
Err(e) => Err(e)
|
||||||
},
|
},
|
||||||
|
None => {
|
||||||
|
if let Some(values) = matches.value_of(Runtime::arg_logdest_name()) {
|
||||||
|
// parse logdest specification from commandline
|
||||||
|
|
||||||
|
values.split(",")
|
||||||
|
.fold(Ok(vec![]), move |acc, dest| {
|
||||||
|
acc.and_then(|mut v| {
|
||||||
|
v.push(try!(translate_destination(dest)));
|
||||||
|
Ok(v)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
Ok(vec![ LogDestination::default() ])
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
fn aggregate_global_format(
|
||||||
|
read_str: &str,
|
||||||
|
cli_match_name: &str,
|
||||||
|
error_kind_if_missing: EK,
|
||||||
|
matches: &ArgMatches,
|
||||||
|
config: Option<&Configuration>
|
||||||
|
)
|
||||||
|
-> Result<String>
|
||||||
|
{
|
||||||
|
match config {
|
||||||
|
Some(cfg) => match cfg
|
||||||
|
.read(read_str)
|
||||||
|
.chain_err(|| EK::ConfigReadError)
|
||||||
|
{
|
||||||
|
Ok(Some(&Value::String(ref s))) => Ok(s.clone()),
|
||||||
|
Ok(Some(_)) => Err(RE::from_kind(EK::ConfigTypeError)),
|
||||||
|
Ok(None) => Err(RE::from_kind(error_kind_if_missing)),
|
||||||
|
Err(e) => Err(e)
|
||||||
|
},
|
||||||
|
None => match matches.value_of(cli_match_name).map(String::from) {
|
||||||
|
Some(s) => Ok(s),
|
||||||
|
None => Err(RE::from_kind(error_kind_if_missing))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn aggregate_global_format_trace(matches: &ArgMatches, config: Option<&Configuration>)
|
||||||
|
-> Result<String>
|
||||||
|
{
|
||||||
|
aggregate_global_format("imag.logging.format.trace",
|
||||||
|
Runtime::arg_override_trace_logging_format(),
|
||||||
|
EK::ConfigMissingLoggingFormatTrace,
|
||||||
|
matches,
|
||||||
|
config)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn aggregate_global_format_debug(matches: &ArgMatches, config: Option<&Configuration>)
|
||||||
|
-> Result<String>
|
||||||
|
{
|
||||||
|
aggregate_global_format("imag.logging.format.debug",
|
||||||
|
Runtime::arg_override_debug_logging_format(),
|
||||||
|
EK::ConfigMissingLoggingFormatDebug,
|
||||||
|
matches,
|
||||||
|
config)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn aggregate_global_format_info(matches: &ArgMatches, config: Option<&Configuration>)
|
||||||
|
-> Result<String>
|
||||||
|
{
|
||||||
|
aggregate_global_format("imag.logging.format.info",
|
||||||
|
Runtime::arg_override_info_logging_format(),
|
||||||
|
EK::ConfigMissingLoggingFormatInfo,
|
||||||
|
matches,
|
||||||
|
config)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn aggregate_global_format_warn(matches: &ArgMatches, config: Option<&Configuration>)
|
||||||
|
-> Result<String>
|
||||||
|
{
|
||||||
|
aggregate_global_format("imag.logging.format.warn",
|
||||||
|
Runtime::arg_override_warn_logging_format(),
|
||||||
|
EK::ConfigMissingLoggingFormatWarn,
|
||||||
|
matches,
|
||||||
|
config)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn aggregate_global_format_error(matches: &ArgMatches, config: Option<&Configuration>)
|
||||||
|
-> Result<String>
|
||||||
|
{
|
||||||
|
aggregate_global_format("imag.logging.format.error",
|
||||||
|
Runtime::arg_override_error_logging_format(),
|
||||||
|
EK::ConfigMissingLoggingFormatError,
|
||||||
|
matches,
|
||||||
|
config)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn aggregate_module_settings(_matches: &ArgMatches, config: Option<&Configuration>)
|
||||||
|
-> Result<BTreeMap<ModuleName, ModuleSettings>>
|
||||||
|
{
|
||||||
|
match config {
|
||||||
|
Some(cfg) => match cfg
|
||||||
|
.read("imag.logging.modules")
|
||||||
|
.chain_err(|| EK::ConfigReadError)
|
||||||
|
{
|
||||||
|
Ok(Some(&Value::Table(ref t))) => {
|
||||||
|
// translate the module settings from the table `t`
|
||||||
|
let mut settings = BTreeMap::new();
|
||||||
|
|
||||||
|
for (module_name, v) in t {
|
||||||
|
let destinations = try!(match v.read("destinations") {
|
||||||
|
Ok(Some(&Value::Array(ref a))) => translate_destinations(a).map(Some),
|
||||||
|
Ok(None) => Ok(None),
|
||||||
|
Ok(Some(_)) => Err(RE::from_kind(EK::ConfigTypeError)),
|
||||||
|
Err(e) => Err(e).chain_err(|| EK::TomlReadError),
|
||||||
|
});
|
||||||
|
|
||||||
|
let level = try!(match v.read("level") {
|
||||||
|
Ok(Some(&Value::String(ref s))) => match_log_level_str(s).map(Some),
|
||||||
|
Ok(None) => Ok(None),
|
||||||
|
Ok(Some(_)) => Err(RE::from_kind(EK::ConfigTypeError)),
|
||||||
|
Err(e) => Err(e).chain_err(|| EK::TomlReadError),
|
||||||
|
});
|
||||||
|
|
||||||
|
let enabled = try!(match v.read("enabled") {
|
||||||
|
Ok(Some(&Value::Boolean(b))) => Ok(b),
|
||||||
|
Ok(None) => Ok(false),
|
||||||
|
Ok(Some(_)) => Err(RE::from_kind(EK::ConfigTypeError)),
|
||||||
|
Err(e) => Err(e).chain_err(|| EK::TomlReadError),
|
||||||
|
});
|
||||||
|
|
||||||
|
let module_settings = ModuleSettings {
|
||||||
|
enabled: enabled,
|
||||||
|
level: level,
|
||||||
|
destinations: destinations,
|
||||||
|
};
|
||||||
|
|
||||||
|
// We don't care whether there was a value, we override it.
|
||||||
|
let _ = settings.insert(module_name.to_owned(), module_settings);
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(settings)
|
||||||
|
},
|
||||||
|
Ok(Some(_)) => Err(RE::from_kind(EK::ConfigTypeError)),
|
||||||
|
Ok(None) => {
|
||||||
|
// No modules configured. This is okay!
|
||||||
|
Ok(BTreeMap::new())
|
||||||
|
},
|
||||||
|
Err(e) => Err(e),
|
||||||
|
},
|
||||||
|
None => {
|
||||||
|
write!(stderr(), "No Configuration.").ok();
|
||||||
|
write!(stderr(), "cannot find module-settings for logging.").ok();
|
||||||
|
write!(stderr(), "Will use global defaults").ok();
|
||||||
|
|
||||||
|
Ok(BTreeMap::new())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
mod template_helpers {
|
||||||
|
use handlebars::{Handlebars, HelperDef, JsonRender, RenderError, RenderContext, Helper};
|
||||||
|
use ansi_term::Colour;
|
||||||
|
use ansi_term::Style;
|
||||||
|
|
||||||
|
#[derive(Clone, Copy)]
|
||||||
|
pub struct ColorizeBlackHelper;
|
||||||
|
|
||||||
|
impl HelperDef for ColorizeBlackHelper {
|
||||||
|
fn call(&self, h: &Helper, hb: &Handlebars, rc: &mut RenderContext) -> Result<(), RenderError> {
|
||||||
|
colorize(Colour::Black, h, hb, rc)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Copy)]
|
||||||
|
pub struct ColorizeBlueHelper;
|
||||||
|
|
||||||
|
impl HelperDef for ColorizeBlueHelper {
|
||||||
|
fn call(&self, h: &Helper, hb: &Handlebars, rc: &mut RenderContext) -> Result<(), RenderError> {
|
||||||
|
colorize(Colour::Blue, h, hb, rc)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Copy)]
|
||||||
|
pub struct ColorizeCyanHelper;
|
||||||
|
|
||||||
|
impl HelperDef for ColorizeCyanHelper {
|
||||||
|
fn call(&self, h: &Helper, hb: &Handlebars, rc: &mut RenderContext) -> Result<(), RenderError> {
|
||||||
|
colorize(Colour::Cyan, h, hb, rc)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Copy)]
|
||||||
|
pub struct ColorizeGreenHelper;
|
||||||
|
|
||||||
|
impl HelperDef for ColorizeGreenHelper {
|
||||||
|
fn call(&self, h: &Helper, hb: &Handlebars, rc: &mut RenderContext) -> Result<(), RenderError> {
|
||||||
|
colorize(Colour::Green, h, hb, rc)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Copy)]
|
||||||
|
pub struct ColorizePurpleHelper;
|
||||||
|
|
||||||
|
impl HelperDef for ColorizePurpleHelper {
|
||||||
|
fn call(&self, h: &Helper, hb: &Handlebars, rc: &mut RenderContext) -> Result<(), RenderError> {
|
||||||
|
colorize(Colour::Purple, h, hb, rc)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Copy)]
|
||||||
|
pub struct ColorizeRedHelper;
|
||||||
|
|
||||||
|
impl HelperDef for ColorizeRedHelper {
|
||||||
|
fn call(&self, h: &Helper, hb: &Handlebars, rc: &mut RenderContext) -> Result<(), RenderError> {
|
||||||
|
colorize(Colour::Red, h, hb, rc)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Copy)]
|
||||||
|
pub struct ColorizeWhiteHelper;
|
||||||
|
|
||||||
|
impl HelperDef for ColorizeWhiteHelper {
|
||||||
|
fn call(&self, h: &Helper, hb: &Handlebars, rc: &mut RenderContext) -> Result<(), RenderError> {
|
||||||
|
colorize(Colour::White, h, hb, rc)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Copy)]
|
||||||
|
pub struct ColorizeYellowHelper;
|
||||||
|
|
||||||
|
impl HelperDef for ColorizeYellowHelper {
|
||||||
|
fn call(&self, h: &Helper, hb: &Handlebars, rc: &mut RenderContext) -> Result<(), RenderError> {
|
||||||
|
colorize(Colour::Yellow, h, hb, rc)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn colorize(color: Colour, h: &Helper, _: &Handlebars, rc: &mut RenderContext) -> Result<(), RenderError> {
|
||||||
|
let p = try!(h.param(0).ok_or(RenderError::new("Too few arguments")));
|
||||||
|
|
||||||
|
try!(write!(rc.writer(), "{}", color.paint(p.value().render())));
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Copy)]
|
||||||
|
pub struct UnderlineHelper;
|
||||||
|
|
||||||
|
impl HelperDef for UnderlineHelper {
|
||||||
|
fn call(&self, h: &Helper, _: &Handlebars, rc: &mut RenderContext) -> Result<(),
|
||||||
|
RenderError> {
|
||||||
|
let p = try!(h.param(0).ok_or(RenderError::new("Too few arguments")));
|
||||||
|
let s = Style::new().underline();
|
||||||
|
try!(write!(rc.writer(), "{}", s.paint(p.value().render())));
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Copy)]
|
||||||
|
pub struct BoldHelper;
|
||||||
|
|
||||||
|
impl HelperDef for BoldHelper {
|
||||||
|
fn call(&self, h: &Helper, _: &Handlebars, rc: &mut RenderContext) -> Result<(),
|
||||||
|
RenderError> {
|
||||||
|
let p = try!(h.param(0).ok_or(RenderError::new("Too few arguments")));
|
||||||
|
let s = Style::new().bold();
|
||||||
|
try!(write!(rc.writer(), "{}", s.paint(p.value().render())));
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Copy)]
|
||||||
|
pub struct BlinkHelper;
|
||||||
|
|
||||||
|
impl HelperDef for BlinkHelper {
|
||||||
|
fn call(&self, h: &Helper, _: &Handlebars, rc: &mut RenderContext) -> Result<(),
|
||||||
|
RenderError> {
|
||||||
|
let p = try!(h.param(0).ok_or(RenderError::new("Too few arguments")));
|
||||||
|
let s = Style::new().blink();
|
||||||
|
try!(write!(rc.writer(), "{}", s.paint(p.value().render())));
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Copy)]
|
||||||
|
pub struct StrikethroughHelper;
|
||||||
|
|
||||||
|
impl HelperDef for StrikethroughHelper {
|
||||||
|
fn call(&self, h: &Helper, _: &Handlebars, rc: &mut RenderContext) -> Result<(),
|
||||||
|
RenderError> {
|
||||||
|
let p = try!(h.param(0).ok_or(RenderError::new("Too few arguments")));
|
||||||
|
let s = Style::new().strikethrough();
|
||||||
|
try!(write!(rc.writer(), "{}", s.paint(p.value().render())));
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
|
|
@ -20,14 +20,12 @@
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
use std::process::Command;
|
use std::process::Command;
|
||||||
use std::env;
|
use std::env;
|
||||||
use std::io::stderr;
|
use std::process::exit;
|
||||||
use std::io::Write;
|
|
||||||
|
|
||||||
pub use clap::App;
|
pub use clap::App;
|
||||||
|
|
||||||
use clap::{Arg, ArgMatches};
|
use clap::{Arg, ArgMatches};
|
||||||
use log;
|
use log;
|
||||||
use log::LogLevelFilter;
|
|
||||||
|
|
||||||
use configuration::{Configuration, InternalConfiguration};
|
use configuration::{Configuration, InternalConfiguration};
|
||||||
use error::RuntimeError;
|
use error::RuntimeError;
|
||||||
|
@ -35,6 +33,7 @@ use error::RuntimeErrorKind;
|
||||||
use error::ResultExt;
|
use error::ResultExt;
|
||||||
use logger::ImagLogger;
|
use logger::ImagLogger;
|
||||||
|
|
||||||
|
use libimagerror::trace::*;
|
||||||
use libimagstore::store::Store;
|
use libimagstore::store::Store;
|
||||||
use libimagstore::file_abstraction::InMemoryFileAbstraction;
|
use libimagstore::file_abstraction::InMemoryFileAbstraction;
|
||||||
use spec::CliSpec;
|
use spec::CliSpec;
|
||||||
|
@ -77,8 +76,8 @@ impl<'a> Runtime<'a> {
|
||||||
Err(e) => if !is_match!(e.kind(), &ConfigErrorKind::NoConfigFileFound) {
|
Err(e) => if !is_match!(e.kind(), &ConfigErrorKind::NoConfigFileFound) {
|
||||||
return Err(e).chain_err(|| RuntimeErrorKind::Instantiate);
|
return Err(e).chain_err(|| RuntimeErrorKind::Instantiate);
|
||||||
} else {
|
} else {
|
||||||
warn!("No config file found.");
|
println!("No config file found.");
|
||||||
warn!("Continuing without configuration file");
|
println!("Continuing without configuration file");
|
||||||
None
|
None
|
||||||
},
|
},
|
||||||
|
|
||||||
|
@ -111,16 +110,10 @@ impl<'a> Runtime<'a> {
|
||||||
where C: Clone + CliSpec<'a> + InternalConfiguration
|
where C: Clone + CliSpec<'a> + InternalConfiguration
|
||||||
{
|
{
|
||||||
use std::io::stdout;
|
use std::io::stdout;
|
||||||
|
|
||||||
use clap::Shell;
|
use clap::Shell;
|
||||||
|
|
||||||
let is_debugging = matches.is_present(Runtime::arg_debugging_name());
|
|
||||||
|
|
||||||
if cli_app.enable_logging() {
|
if cli_app.enable_logging() {
|
||||||
let is_verbose = matches.is_present(Runtime::arg_verbosity_name());
|
Runtime::init_logger(&matches, config.as_ref())
|
||||||
let colored = !matches.is_present(Runtime::arg_no_color_output_name());
|
|
||||||
|
|
||||||
Runtime::init_logger(is_debugging, is_verbose, colored);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
match matches.value_of(Runtime::arg_generate_compl()) {
|
match matches.value_of(Runtime::arg_generate_compl()) {
|
||||||
|
@ -150,9 +143,9 @@ impl<'a> Runtime<'a> {
|
||||||
None => None,
|
None => None,
|
||||||
};
|
};
|
||||||
|
|
||||||
if is_debugging {
|
if matches.is_present(Runtime::arg_debugging_name()) {
|
||||||
write!(stderr(), "Config: {:?}\n", config).ok();
|
debug!("Config: {:?}\n", config);
|
||||||
write!(stderr(), "Store-config: {:?}\n", store_config).ok();
|
debug!("Store-config: {:?}\n", store_config);
|
||||||
}
|
}
|
||||||
|
|
||||||
let store_result = if cli_app.use_inmemory_fs() {
|
let store_result = if cli_app.use_inmemory_fs() {
|
||||||
|
@ -199,9 +192,11 @@ impl<'a> Runtime<'a> {
|
||||||
.arg(Arg::with_name(Runtime::arg_verbosity_name())
|
.arg(Arg::with_name(Runtime::arg_verbosity_name())
|
||||||
.short("v")
|
.short("v")
|
||||||
.long("verbose")
|
.long("verbose")
|
||||||
.help("Enables verbosity")
|
.help("Enables verbosity, can be used to set log level to one of 'trace', 'debug', 'info', 'warn' or 'error'")
|
||||||
.required(false)
|
.required(false)
|
||||||
.takes_value(false))
|
.takes_value(true)
|
||||||
|
.possible_values(&["trace", "debug", "info", "warn", "error"])
|
||||||
|
.value_name("LOGLEVEL"))
|
||||||
|
|
||||||
.arg(Arg::with_name(Runtime::arg_debugging_name())
|
.arg(Arg::with_name(Runtime::arg_debugging_name())
|
||||||
.long("debug")
|
.long("debug")
|
||||||
|
@ -253,6 +248,61 @@ impl<'a> Runtime<'a> {
|
||||||
.value_name("SHELL")
|
.value_name("SHELL")
|
||||||
.possible_values(&["bash", "fish", "zsh"]))
|
.possible_values(&["bash", "fish", "zsh"]))
|
||||||
|
|
||||||
|
.arg(Arg::with_name(Runtime::arg_logdest_name())
|
||||||
|
.long(Runtime::arg_logdest_name())
|
||||||
|
.help("Override the logging destinations from the configuration: values can be seperated by ',', a value of '-' marks the stderr output, everything else is expected to be a path")
|
||||||
|
.required(false)
|
||||||
|
.takes_value(true)
|
||||||
|
.value_name("LOGDESTS"))
|
||||||
|
|
||||||
|
.arg(Arg::with_name(Runtime::arg_override_module_logging_setting_name())
|
||||||
|
.long(Runtime::arg_override_module_logging_setting_name())
|
||||||
|
.help("Override a module logging setting for one module. Format: <modulename>=<setting>=<value>, whereas <setting> is either 'enabled', 'level' or 'destinations' - This commandline argument is CURRENTLY NOT IMPLEMENTED")
|
||||||
|
.multiple(true)
|
||||||
|
.required(false)
|
||||||
|
.takes_value(true)
|
||||||
|
.value_name("SPEC"))
|
||||||
|
|
||||||
|
.arg(Arg::with_name(Runtime::arg_override_trace_logging_format())
|
||||||
|
.long(Runtime::arg_override_trace_logging_format())
|
||||||
|
.help("Override the logging format for the trace logging")
|
||||||
|
.multiple(false)
|
||||||
|
.required(false)
|
||||||
|
.takes_value(true)
|
||||||
|
.value_name("FMT"))
|
||||||
|
|
||||||
|
.arg(Arg::with_name(Runtime::arg_override_debug_logging_format())
|
||||||
|
.long(Runtime::arg_override_debug_logging_format())
|
||||||
|
.help("Override the logging format for the debug logging")
|
||||||
|
.multiple(false)
|
||||||
|
.required(false)
|
||||||
|
.takes_value(true)
|
||||||
|
.value_name("FMT"))
|
||||||
|
|
||||||
|
.arg(Arg::with_name(Runtime::arg_override_info_logging_format())
|
||||||
|
.long(Runtime::arg_override_info_logging_format())
|
||||||
|
.help("Override the logging format for the info logging")
|
||||||
|
.multiple(false)
|
||||||
|
.required(false)
|
||||||
|
.takes_value(true)
|
||||||
|
.value_name("FMT"))
|
||||||
|
|
||||||
|
.arg(Arg::with_name(Runtime::arg_override_warn_logging_format())
|
||||||
|
.long(Runtime::arg_override_warn_logging_format())
|
||||||
|
.help("Override the logging format for the warn logging")
|
||||||
|
.multiple(false)
|
||||||
|
.required(false)
|
||||||
|
.takes_value(true)
|
||||||
|
.value_name("FMT"))
|
||||||
|
|
||||||
|
.arg(Arg::with_name(Runtime::arg_override_error_logging_format())
|
||||||
|
.long(Runtime::arg_override_error_logging_format())
|
||||||
|
.help("Override the logging format for the error logging")
|
||||||
|
.multiple(false)
|
||||||
|
.required(false)
|
||||||
|
.takes_value(true)
|
||||||
|
.value_name("FMT"))
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Get the argument names of the Runtime which are available
|
/// Get the argument names of the Runtime which are available
|
||||||
|
@ -314,26 +364,73 @@ impl<'a> Runtime<'a> {
|
||||||
"generate-completion"
|
"generate-completion"
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Extract the Store object from the Runtime object, destroying the Runtime object
|
||||||
|
///
|
||||||
|
/// # Warning
|
||||||
|
///
|
||||||
|
/// This function is for testing _only_! It can be used to re-build a Runtime object with an
|
||||||
|
/// alternative Store.
|
||||||
|
#[cfg(feature = "testing")]
|
||||||
|
pub fn extract_store(self) -> Store {
|
||||||
|
self.store
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Re-set the Store object within
|
||||||
|
///
|
||||||
|
/// # Warning
|
||||||
|
///
|
||||||
|
/// This function is for testing _only_! It can be used to re-build a Runtime object with an
|
||||||
|
/// alternative Store.
|
||||||
|
#[cfg(feature = "testing")]
|
||||||
|
pub fn with_store(mut self, s: Store) -> Self {
|
||||||
|
self.store = s;
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get the argument name for the logging destination
|
||||||
|
pub fn arg_logdest_name() -> &'static str {
|
||||||
|
"logging-destinations"
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn arg_override_module_logging_setting_name() -> &'static str {
|
||||||
|
"override-module-log-setting"
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn arg_override_trace_logging_format() -> &'static str {
|
||||||
|
"override-logging-format-trace"
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn arg_override_debug_logging_format() -> &'static str {
|
||||||
|
"override-logging-format-debug"
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn arg_override_info_logging_format() -> &'static str {
|
||||||
|
"override-logging-format-info"
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn arg_override_warn_logging_format() -> &'static str {
|
||||||
|
"override-logging-format-warn"
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn arg_override_error_logging_format() -> &'static str {
|
||||||
|
"override-logging-format-error"
|
||||||
|
}
|
||||||
|
|
||||||
/// Initialize the internal logger
|
/// Initialize the internal logger
|
||||||
fn init_logger(is_debugging: bool, is_verbose: bool, colored: bool) {
|
fn init_logger(matches: &ArgMatches, config: Option<&Configuration>) {
|
||||||
use std::env::var as env_var;
|
use std::env::var as env_var;
|
||||||
use env_logger;
|
use env_logger;
|
||||||
|
|
||||||
if env_var("IMAG_LOG_ENV").is_ok() {
|
if env_var("IMAG_LOG_ENV").is_ok() {
|
||||||
env_logger::init().unwrap();
|
env_logger::init().unwrap();
|
||||||
} else {
|
} else {
|
||||||
let lvl = if is_debugging {
|
|
||||||
LogLevelFilter::Debug
|
|
||||||
} else if is_verbose {
|
|
||||||
LogLevelFilter::Info
|
|
||||||
} else {
|
|
||||||
LogLevelFilter::Warn
|
|
||||||
};
|
|
||||||
|
|
||||||
log::set_logger(|max_log_lvl| {
|
log::set_logger(|max_log_lvl| {
|
||||||
max_log_lvl.set(lvl);
|
let logger = ImagLogger::new(matches, config)
|
||||||
debug!("Init logger with {}", lvl);
|
.map_err_trace()
|
||||||
Box::new(ImagLogger::new(lvl.to_log_level().unwrap()).with_color(colored))
|
.unwrap_or_else(|_| exit(1));
|
||||||
|
max_log_lvl.set(logger.global_loglevel().to_log_level_filter());
|
||||||
|
debug!("Init logger with {}", logger.global_loglevel());
|
||||||
|
Box::new(logger)
|
||||||
})
|
})
|
||||||
.map_err(|e| panic!("Could not setup logger: {:?}", e))
|
.map_err(|e| panic!("Could not setup logger: {:?}", e))
|
||||||
.ok();
|
.ok();
|
||||||
|
|
|
@ -19,85 +19,84 @@
|
||||||
|
|
||||||
use std::cmp::Ordering;
|
use std::cmp::Ordering;
|
||||||
|
|
||||||
|
use libimagstore::store::FileLockEntry;
|
||||||
use libimagstore::store::Store;
|
use libimagstore::store::Store;
|
||||||
use libimagstore::storeid::IntoStoreId;
|
|
||||||
use libimagerror::trace::trace_error;
|
use libimagerror::trace::trace_error;
|
||||||
|
|
||||||
use chrono::offset::Local;
|
use chrono::offset::Local;
|
||||||
use chrono::Datelike;
|
use chrono::Datelike;
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
use chrono::naive::NaiveDateTime;
|
use chrono::naive::NaiveDateTime;
|
||||||
|
use chrono::Timelike;
|
||||||
|
|
||||||
use entry::Entry;
|
use entry::DiaryEntry;
|
||||||
use diaryid::DiaryId;
|
use diaryid::DiaryId;
|
||||||
use error::DiaryError as DE;
|
|
||||||
use error::DiaryErrorKind as DEK;
|
use error::DiaryErrorKind as DEK;
|
||||||
use error::ResultExt;
|
use error::ResultExt;
|
||||||
use error::Result;
|
use error::Result;
|
||||||
use iter::DiaryEntryIterator;
|
use iter::DiaryEntryIterator;
|
||||||
use is_in_diary::IsInDiary;
|
use iter::DiaryNameIterator;
|
||||||
|
|
||||||
#[derive(Debug)]
|
pub trait Diary {
|
||||||
pub struct Diary<'a> {
|
|
||||||
store: &'a Store,
|
|
||||||
name: &'a str,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'a> Diary<'a> {
|
|
||||||
|
|
||||||
pub fn open(store: &'a Store, name: &'a str) -> Diary<'a> {
|
|
||||||
Diary {
|
|
||||||
store: store,
|
|
||||||
name: name,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// create or get a new entry for today
|
// create or get a new entry for today
|
||||||
pub fn new_entry_today(&self) -> Result<Entry> {
|
fn new_entry_today(&self, diary_name: &str) -> Result<FileLockEntry>;
|
||||||
|
|
||||||
|
// create or get a new entry for now
|
||||||
|
fn new_entry_now(&self, diary_name: &str) -> Result<FileLockEntry>;
|
||||||
|
|
||||||
|
// Get an iterator for iterating over all entries of a Diary
|
||||||
|
fn entries(&self, diary_name: &str) -> Result<DiaryEntryIterator>;
|
||||||
|
|
||||||
|
fn get_youngest_entry_id(&self, diary_name: &str) -> Option<Result<DiaryId>>;
|
||||||
|
|
||||||
|
/// Get all diary names
|
||||||
|
fn diary_names(&self) -> Result<DiaryNameIterator>;
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Diary for Store {
|
||||||
|
|
||||||
|
// create or get a new entry for today
|
||||||
|
fn new_entry_today(&self, diary_name: &str) -> Result<FileLockEntry> {
|
||||||
let dt = Local::now();
|
let dt = Local::now();
|
||||||
let ndt = dt.naive_local();
|
let ndt = dt.naive_local();
|
||||||
let id = DiaryId::new(String::from(self.name), ndt.year(), ndt.month(), ndt.day(), 0, 0);
|
let id = DiaryId::new(String::from(diary_name), ndt.year(), ndt.month(), ndt.day(), 0, 0);
|
||||||
self.new_entry_by_id(id)
|
|
||||||
|
self.retrieve(id).chain_err(|| DEK::StoreReadError)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn new_entry_by_id(&self, id: DiaryId) -> Result<Entry> {
|
fn new_entry_now(&self, diary_name: &str) -> Result<FileLockEntry> {
|
||||||
self.retrieve(id.with_diary_name(String::from(self.name)))
|
let dt = Local::now();
|
||||||
}
|
let ndt = dt.naive_local();
|
||||||
|
let id = DiaryId::new(String::from(diary_name),
|
||||||
|
ndt.year(),
|
||||||
|
ndt.month(),
|
||||||
|
ndt.day(),
|
||||||
|
ndt.hour(),
|
||||||
|
ndt.minute());
|
||||||
|
|
||||||
pub fn retrieve(&self, id: DiaryId) -> Result<Entry> {
|
self.retrieve(id).chain_err(|| DEK::StoreReadError)
|
||||||
id.into_storeid()
|
|
||||||
.and_then(|id| self.store.retrieve(id))
|
|
||||||
.map(|fle| Entry::new(fle))
|
|
||||||
.chain_err(|| DEK::StoreWriteError)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Get an iterator for iterating over all entries
|
// Get an iterator for iterating over all entries
|
||||||
pub fn entries(&self) -> Result<DiaryEntryIterator<'a>> {
|
fn entries(&self, diary_name: &str) -> Result<DiaryEntryIterator> {
|
||||||
self.store
|
self.retrieve_for_module("diary")
|
||||||
.retrieve_for_module("diary")
|
.map(|iter| DiaryEntryIterator::new(self, String::from(diary_name), iter))
|
||||||
.map(|iter| DiaryEntryIterator::new(self.name, self.store, iter))
|
|
||||||
.chain_err(|| DEK::StoreReadError)
|
.chain_err(|| DEK::StoreReadError)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn delete_entry(&self, entry: Entry) -> Result<()> {
|
fn get_youngest_entry_id(&self, diary_name: &str) -> Option<Result<DiaryId>> {
|
||||||
if !entry.is_in_diary(self.name) {
|
match Diary::entries(self, diary_name) {
|
||||||
return Err(DE::from_kind(DEK::EntryNotInDiary));
|
|
||||||
}
|
|
||||||
let id = entry.get_location().clone();
|
|
||||||
drop(entry);
|
|
||||||
|
|
||||||
self.store.delete(id).chain_err(|| DEK::StoreWriteError)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn get_youngest_entry(&self) -> Option<Result<Entry>> {
|
|
||||||
match self.entries() {
|
|
||||||
Err(e) => Some(Err(e)),
|
Err(e) => Some(Err(e)),
|
||||||
Ok(entries) => {
|
Ok(entries) => {
|
||||||
entries.sorted_by(|a, b| {
|
entries
|
||||||
|
.map(|e| e.and_then(|e| e.diary_id()))
|
||||||
|
.sorted_by(|a, b| {
|
||||||
match (a, b) {
|
match (a, b) {
|
||||||
(&Ok(ref a), &Ok(ref b)) => {
|
(&Ok(ref a), &Ok(ref b)) => {
|
||||||
let a : NaiveDateTime = a.diary_id().into();
|
let a : NaiveDateTime = a.clone().into();
|
||||||
let b : NaiveDateTime = b.diary_id().into();
|
let b : NaiveDateTime = b.clone().into();
|
||||||
|
|
||||||
a.cmp(&b)
|
a.cmp(&b)
|
||||||
},
|
},
|
||||||
|
@ -116,13 +115,20 @@ impl<'a> Diary<'a> {
|
||||||
Ordering::Equal
|
Ordering::Equal
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
}).into_iter().next()
|
})
|
||||||
|
.into_iter()
|
||||||
|
//.map(|sidres| sidres.map(|sid| DiaryId::from_storeid(&sid)))
|
||||||
|
.next()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn name(&self) -> &'a str {
|
/// Get all diary names
|
||||||
&self.name
|
fn diary_names(&self) -> Result<DiaryNameIterator> {
|
||||||
|
self.retrieve_for_module("diary")
|
||||||
|
.chain_err(|| DEK::StoreReadError)
|
||||||
|
.map(DiaryNameIterator::new)
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -17,74 +17,24 @@
|
||||||
// Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
|
// Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
|
||||||
//
|
//
|
||||||
|
|
||||||
use std::ops::Deref;
|
use libimagstore::store::Entry;
|
||||||
use std::ops::DerefMut;
|
|
||||||
|
|
||||||
use libimagstore::store::FileLockEntry;
|
|
||||||
use libimagentryedit::edit::Edit;
|
|
||||||
use libimagentryedit::error::Result as EditResult;
|
|
||||||
use libimagrt::runtime::Runtime;
|
|
||||||
|
|
||||||
use diaryid::DiaryId;
|
use diaryid::DiaryId;
|
||||||
use diaryid::FromStoreId;
|
use diaryid::FromStoreId;
|
||||||
|
use error::Result;
|
||||||
|
|
||||||
#[derive(Debug)]
|
pub trait DiaryEntry {
|
||||||
pub struct Entry<'a>(FileLockEntry<'a>);
|
fn diary_id(&self) -> Result<DiaryId>;
|
||||||
|
|
||||||
impl<'a> Deref for Entry<'a> {
|
|
||||||
type Target = FileLockEntry<'a>;
|
|
||||||
|
|
||||||
fn deref(&self) -> &FileLockEntry<'a> {
|
|
||||||
&self.0
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> DerefMut for Entry<'a> {
|
impl DiaryEntry for Entry {
|
||||||
|
|
||||||
fn deref_mut(&mut self) -> &mut FileLockEntry<'a> {
|
|
||||||
&mut self.0
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'a> Entry<'a> {
|
|
||||||
|
|
||||||
pub fn new(fle: FileLockEntry<'a>) -> Entry<'a> {
|
|
||||||
Entry(fle)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Get the diary id for this entry.
|
/// Get the diary id for this entry.
|
||||||
///
|
///
|
||||||
/// TODO: calls Option::unwrap() as it assumes that an existing Entry has an ID that is parsable
|
/// TODO: calls Option::unwrap() as it assumes that an existing Entry has an ID that is parsable
|
||||||
pub fn diary_id(&self) -> DiaryId {
|
fn diary_id(&self) -> Result<DiaryId> {
|
||||||
DiaryId::from_storeid(&self.0.get_location().clone()).unwrap()
|
DiaryId::from_storeid(&self.get_location().clone())
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> Into<FileLockEntry<'a>> for Entry<'a> {
|
|
||||||
|
|
||||||
fn into(self) -> FileLockEntry<'a> {
|
|
||||||
self.0
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'a> From<FileLockEntry<'a>> for Entry<'a> {
|
|
||||||
|
|
||||||
fn from(fle: FileLockEntry<'a>) -> Entry<'a> {
|
|
||||||
Entry::new(fle)
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'a> Edit for Entry<'a> {
|
|
||||||
|
|
||||||
fn edit_content(&mut self, rt: &Runtime) -> EditResult<()> {
|
|
||||||
self.0.edit_content(rt)
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -73,6 +73,11 @@ error_chain! {
|
||||||
display("Error while parsing ID")
|
display("Error while parsing ID")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
DiaryNameFindingError {
|
||||||
|
description("Error while finding a diary name")
|
||||||
|
display("Error while finding a diary name")
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -24,6 +24,8 @@ pub trait IsInDiary {
|
||||||
|
|
||||||
fn is_in_diary(&self, name: &str) -> bool;
|
fn is_in_diary(&self, name: &str) -> bool;
|
||||||
|
|
||||||
|
fn is_a_diary_entry(&self) -> bool;
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl IsInDiary for Entry {
|
impl IsInDiary for Entry {
|
||||||
|
@ -32,6 +34,10 @@ impl IsInDiary for Entry {
|
||||||
self.get_location().clone().is_in_diary(name)
|
self.get_location().clone().is_in_diary(name)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn is_a_diary_entry(&self) -> bool {
|
||||||
|
self.get_location().clone().is_a_diary_entry()
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl IsInDiary for StoreId {
|
impl IsInDiary for StoreId {
|
||||||
|
@ -40,5 +46,9 @@ impl IsInDiary for StoreId {
|
||||||
self.local().starts_with(format!("diary/{}", name))
|
self.local().starts_with(format!("diary/{}", name))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn is_a_diary_entry(&self) -> bool {
|
||||||
|
self.local().starts_with("diary")
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -21,21 +21,22 @@ use std::fmt::{Debug, Formatter, Error as FmtError};
|
||||||
use std::result::Result as RResult;
|
use std::result::Result as RResult;
|
||||||
|
|
||||||
use libimagstore::store::Store;
|
use libimagstore::store::Store;
|
||||||
|
use libimagstore::store::FileLockEntry;
|
||||||
use libimagstore::storeid::StoreIdIterator;
|
use libimagstore::storeid::StoreIdIterator;
|
||||||
|
use libimagerror::trace::trace_error;
|
||||||
|
|
||||||
use diaryid::DiaryId;
|
use diaryid::DiaryId;
|
||||||
use diaryid::FromStoreId;
|
use diaryid::FromStoreId;
|
||||||
use is_in_diary::IsInDiary;
|
use is_in_diary::IsInDiary;
|
||||||
use entry::Entry as DiaryEntry;
|
|
||||||
use error::DiaryErrorKind as DEK;
|
use error::DiaryErrorKind as DEK;
|
||||||
|
use error::DiaryError as DE;
|
||||||
use error::ResultExt;
|
use error::ResultExt;
|
||||||
use error::Result;
|
use error::Result;
|
||||||
use libimagerror::trace::trace_error;
|
|
||||||
|
|
||||||
/// A iterator for iterating over diary entries
|
/// A iterator for iterating over diary entries
|
||||||
pub struct DiaryEntryIterator<'a> {
|
pub struct DiaryEntryIterator<'a> {
|
||||||
store: &'a Store,
|
store: &'a Store,
|
||||||
name: &'a str,
|
name: String,
|
||||||
iter: StoreIdIterator,
|
iter: StoreIdIterator,
|
||||||
|
|
||||||
year: Option<i32>,
|
year: Option<i32>,
|
||||||
|
@ -54,7 +55,7 @@ impl<'a> Debug for DiaryEntryIterator<'a> {
|
||||||
|
|
||||||
impl<'a> DiaryEntryIterator<'a> {
|
impl<'a> DiaryEntryIterator<'a> {
|
||||||
|
|
||||||
pub fn new(diaryname: &'a str, store: &'a Store, iter: StoreIdIterator) -> DiaryEntryIterator<'a> {
|
pub fn new(store: &'a Store, diaryname: String, iter: StoreIdIterator) -> DiaryEntryIterator<'a> {
|
||||||
DiaryEntryIterator {
|
DiaryEntryIterator {
|
||||||
store: store,
|
store: store,
|
||||||
name: diaryname,
|
name: diaryname,
|
||||||
|
@ -87,9 +88,9 @@ impl<'a> DiaryEntryIterator<'a> {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> Iterator for DiaryEntryIterator<'a> {
|
impl<'a> Iterator for DiaryEntryIterator<'a> {
|
||||||
type Item = Result<DiaryEntry<'a>>;
|
type Item = Result<FileLockEntry<'a>>;
|
||||||
|
|
||||||
fn next(&mut self) -> Option<Result<DiaryEntry<'a>>> {
|
fn next(&mut self) -> Option<Self::Item> {
|
||||||
loop {
|
loop {
|
||||||
let next = match self.iter.next() {
|
let next = match self.iter.next() {
|
||||||
Some(s) => s,
|
Some(s) => s,
|
||||||
|
@ -97,7 +98,7 @@ impl<'a> Iterator for DiaryEntryIterator<'a> {
|
||||||
};
|
};
|
||||||
debug!("Next element: {:?}", next);
|
debug!("Next element: {:?}", next);
|
||||||
|
|
||||||
if next.is_in_diary(self.name) {
|
if next.is_in_diary(&self.name) {
|
||||||
debug!("Seems to be in diary: {:?}", next);
|
debug!("Seems to be in diary: {:?}", next);
|
||||||
let id = match DiaryId::from_storeid(&next) {
|
let id = match DiaryId::from_storeid(&next) {
|
||||||
Ok(i) => i,
|
Ok(i) => i,
|
||||||
|
@ -118,9 +119,7 @@ impl<'a> Iterator for DiaryEntryIterator<'a> {
|
||||||
return Some(self
|
return Some(self
|
||||||
.store
|
.store
|
||||||
.retrieve(next)
|
.retrieve(next)
|
||||||
.map(|fle| DiaryEntry::new(fle))
|
.chain_err(|| DEK::StoreReadError));
|
||||||
.chain_err(|| DEK::StoreReadError)
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
debug!("Not in the requested diary ({}): {:?}", self.name, next);
|
debug!("Not in the requested diary ({}): {:?}", self.name, next);
|
||||||
|
@ -130,3 +129,37 @@ impl<'a> Iterator for DiaryEntryIterator<'a> {
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/// Get diary names.
|
||||||
|
///
|
||||||
|
/// # Warning
|
||||||
|
///
|
||||||
|
/// Does _not_ run a `unique` on the iterator!
|
||||||
|
pub struct DiaryNameIterator(StoreIdIterator);
|
||||||
|
|
||||||
|
impl DiaryNameIterator {
|
||||||
|
pub fn new(s: StoreIdIterator) -> DiaryNameIterator {
|
||||||
|
DiaryNameIterator(s)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Iterator for DiaryNameIterator {
|
||||||
|
type Item = Result<String>;
|
||||||
|
|
||||||
|
fn next(&mut self) -> Option<Self::Item> {
|
||||||
|
self.0
|
||||||
|
.next()
|
||||||
|
.map(|s| {
|
||||||
|
s.to_str()
|
||||||
|
.chain_err(|| DEK::DiaryNameFindingError)
|
||||||
|
.and_then(|s| {
|
||||||
|
s.split("diary/")
|
||||||
|
.nth(1)
|
||||||
|
.and_then(|n| n.split("/").nth(0).map(String::from))
|
||||||
|
.ok_or(DE::from_kind(DEK::DiaryNameFindingError))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
|
|
@ -19,13 +19,15 @@
|
||||||
|
|
||||||
//! A diary viewer built on libimagentryview.
|
//! A diary viewer built on libimagentryview.
|
||||||
|
|
||||||
use entry::Entry;
|
use entry::DiaryEntry;
|
||||||
use error::DiaryErrorKind as DEK;
|
use error::DiaryErrorKind as DEK;
|
||||||
use error::ResultExt;
|
use error::ResultExt;
|
||||||
use error::Result;
|
use error::Result;
|
||||||
|
|
||||||
|
use libimagstore::store::FileLockEntry;
|
||||||
use libimagentryview::viewer::Viewer;
|
use libimagentryview::viewer::Viewer;
|
||||||
use libimagentryview::builtin::plain::PlainViewer;
|
use libimagentryview::builtin::plain::PlainViewer;
|
||||||
|
use libimagerror::trace::trace_error;
|
||||||
|
|
||||||
/// This viewer does _not_ implement libimagentryview::viewer::Viewer because we need to be able to
|
/// This viewer does _not_ implement libimagentryview::viewer::Viewer because we need to be able to
|
||||||
/// call some diary-type specific functions on the entries passed to this.
|
/// call some diary-type specific functions on the entries passed to this.
|
||||||
|
@ -46,10 +48,12 @@ impl DiaryViewer {
|
||||||
|
|
||||||
/// View all entries from the iterator, or stop immediately if an error occurs, returning that
|
/// View all entries from the iterator, or stop immediately if an error occurs, returning that
|
||||||
/// error.
|
/// error.
|
||||||
pub fn view_entries<'a, I: Iterator<Item = Entry<'a>>>(&self, entries: I) -> Result<()> {
|
pub fn view_entries<'a, I: Iterator<Item = FileLockEntry<'a>>>(&self, entries: I) -> Result<()> {
|
||||||
for entry in entries {
|
for entry in entries {
|
||||||
let id = entry.diary_id();
|
match entry.diary_id() {
|
||||||
println!("{} :\n", id);
|
Ok(id) => println!("{} :\n", id),
|
||||||
|
Err(e) => trace_error(&e),
|
||||||
|
}
|
||||||
let _ = try!(self.0
|
let _ = try!(self.0
|
||||||
.view_entry(&entry)
|
.view_entry(&entry)
|
||||||
.chain_err(|| DEK::ViewError)
|
.chain_err(|| DEK::ViewError)
|
||||||
|
|
|
@ -27,16 +27,16 @@
|
||||||
use mail::Mail;
|
use mail::Mail;
|
||||||
use error::Result;
|
use error::Result;
|
||||||
|
|
||||||
use libimagentryref::reference::Ref;
|
use libimagstore::store::FileLockEntry;
|
||||||
|
|
||||||
use std::marker::PhantomData;
|
use std::marker::PhantomData;
|
||||||
|
|
||||||
pub struct MailIter<'a, I: 'a + Iterator<Item = Ref<'a>>> {
|
pub struct MailIter<'a, I: Iterator<Item = FileLockEntry<'a>>> {
|
||||||
_marker: PhantomData<&'a I>,
|
_marker: PhantomData<I>,
|
||||||
i: I,
|
i: I,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a, I: Iterator<Item = Ref<'a>>> MailIter<'a, I> {
|
impl<'a, I: Iterator<Item = FileLockEntry<'a>>> MailIter<'a, I> {
|
||||||
|
|
||||||
pub fn new(i: I) -> MailIter<'a, I> {
|
pub fn new(i: I) -> MailIter<'a, I> {
|
||||||
MailIter { _marker: PhantomData, i: i }
|
MailIter { _marker: PhantomData, i: i }
|
||||||
|
@ -44,12 +44,11 @@ impl<'a, I: Iterator<Item = Ref<'a>>> MailIter<'a, I> {
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a, I: Iterator<Item = Ref<'a>>> Iterator for MailIter<'a, I> {
|
impl<'a, I: Iterator<Item = FileLockEntry<'a>>> Iterator for MailIter<'a, I> {
|
||||||
|
|
||||||
type Item = Result<Mail<'a>>;
|
type Item = Result<Mail<'a>>;
|
||||||
|
|
||||||
fn next(&mut self) -> Option<Result<Mail<'a>>> {
|
fn next(&mut self) -> Option<Self::Item> {
|
||||||
self.i.next().map(Mail::from_ref)
|
self.i.next().map(Mail::from_fle)
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -23,8 +23,10 @@ use std::fs::File;
|
||||||
use std::io::Read;
|
use std::io::Read;
|
||||||
|
|
||||||
use libimagstore::store::Store;
|
use libimagstore::store::Store;
|
||||||
|
use libimagstore::store::FileLockEntry;
|
||||||
use libimagentryref::reference::Ref;
|
use libimagentryref::reference::Ref;
|
||||||
use libimagentryref::flags::RefFlags;
|
use libimagentryref::flags::RefFlags;
|
||||||
|
use libimagentryref::refstore::RefStore;
|
||||||
|
|
||||||
use email::MimeMessage;
|
use email::MimeMessage;
|
||||||
use email::results::ParsingResult as EmailParsingResult;
|
use email::results::ParsingResult as EmailParsingResult;
|
||||||
|
@ -47,7 +49,7 @@ impl From<String> for Buffer {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct Mail<'a>(Ref<'a>, Buffer);
|
pub struct Mail<'a>(FileLockEntry<'a>, Buffer);
|
||||||
|
|
||||||
impl<'a> Mail<'a> {
|
impl<'a> Mail<'a> {
|
||||||
|
|
||||||
|
@ -58,7 +60,7 @@ impl<'a> Mail<'a> {
|
||||||
let f = RefFlags::default().with_content_hashing(true).with_permission_tracking(false);
|
let f = RefFlags::default().with_content_hashing(true).with_permission_tracking(false);
|
||||||
let p = PathBuf::from(p.as_ref());
|
let p = PathBuf::from(p.as_ref());
|
||||||
|
|
||||||
Ref::create_with_hasher(store, p, f, h)
|
store.create_with_hasher(p, f, h)
|
||||||
.chain_err(|| MEK::RefCreationError)
|
.chain_err(|| MEK::RefCreationError)
|
||||||
.and_then(|reference| {
|
.and_then(|reference| {
|
||||||
debug!("Build reference file: {:?}", reference);
|
debug!("Build reference file: {:?}", reference);
|
||||||
|
@ -79,20 +81,19 @@ impl<'a> Mail<'a> {
|
||||||
/// Opens a mail by the passed hash
|
/// Opens a mail by the passed hash
|
||||||
pub fn open<S: AsRef<str>>(store: &Store, hash: S) -> Result<Option<Mail>> {
|
pub fn open<S: AsRef<str>>(store: &Store, hash: S) -> Result<Option<Mail>> {
|
||||||
debug!("Opening Mail by Hash");
|
debug!("Opening Mail by Hash");
|
||||||
Ref::get_by_hash(store, String::from(hash.as_ref()))
|
store.get_by_hash(String::from(hash.as_ref()))
|
||||||
.chain_err(|| MEK::FetchByHashError)
|
.chain_err(|| MEK::FetchByHashError)
|
||||||
.chain_err(|| MEK::FetchError)
|
.chain_err(|| MEK::FetchError)
|
||||||
.and_then(|o| match o {
|
.and_then(|o| match o {
|
||||||
Some(r) => Mail::from_ref(r).map(Some),
|
Some(r) => Mail::from_fle(r).map(Some),
|
||||||
None => Ok(None),
|
None => Ok(None),
|
||||||
})
|
})
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Implement me as TryFrom as soon as it is stable
|
/// Implement me as TryFrom as soon as it is stable
|
||||||
pub fn from_ref(r: Ref<'a>) -> Result<Mail> {
|
pub fn from_fle(fle: FileLockEntry<'a>) -> Result<Mail<'a>> {
|
||||||
debug!("Building Mail object from Ref: {:?}", r);
|
fle.fs_file()
|
||||||
r.fs_file()
|
|
||||||
.chain_err(|| MEK::RefHandlingError)
|
.chain_err(|| MEK::RefHandlingError)
|
||||||
.and_then(|path| File::open(path).chain_err(|| MEK::IOError))
|
.and_then(|path| File::open(path).chain_err(|| MEK::IOError))
|
||||||
.and_then(|mut file| {
|
.and_then(|mut file| {
|
||||||
|
@ -102,7 +103,7 @@ impl<'a> Mail<'a> {
|
||||||
.chain_err(|| MEK::IOError)
|
.chain_err(|| MEK::IOError)
|
||||||
})
|
})
|
||||||
.map(Buffer::from)
|
.map(Buffer::from)
|
||||||
.map(|buffer| Mail(r, buffer))
|
.map(|buffer| Mail(fle, buffer))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_field(&self, field: &str) -> Result<Option<String>> {
|
pub fn get_field(&self, field: &str) -> Result<Option<String>> {
|
||||||
|
|
|
@ -48,6 +48,20 @@ error_chain! {
|
||||||
display("Encountered non-UTF8 characters while reading input")
|
display("Encountered non-UTF8 characters while reading input")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
HeaderFieldMissing {
|
||||||
|
description("Header field missing")
|
||||||
|
display("Header field missing")
|
||||||
|
}
|
||||||
|
|
||||||
|
HeaderTypeError {
|
||||||
|
description("Header field type error")
|
||||||
|
display("Header field type error")
|
||||||
|
}
|
||||||
|
|
||||||
|
UuidParserError {
|
||||||
|
description("Uuid parser error")
|
||||||
|
display("Uuid parser error")
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -50,4 +50,5 @@ module_entry_path_mod!("todo");
|
||||||
|
|
||||||
pub mod error;
|
pub mod error;
|
||||||
pub mod task;
|
pub mod task;
|
||||||
|
pub mod taskstore;
|
||||||
|
|
||||||
|
|
|
@ -17,272 +17,31 @@
|
||||||
// Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
|
// Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
|
||||||
//
|
//
|
||||||
|
|
||||||
use std::collections::BTreeMap;
|
|
||||||
use std::ops::{Deref, DerefMut};
|
|
||||||
use std::io::BufRead;
|
|
||||||
use std::result::Result as RResult;
|
|
||||||
|
|
||||||
use toml::Value;
|
|
||||||
use uuid::Uuid;
|
|
||||||
|
|
||||||
use task_hookrs::task::Task as TTask;
|
|
||||||
use task_hookrs::import::{import_task, import_tasks};
|
|
||||||
|
|
||||||
use libimagstore::store::{FileLockEntry, Store};
|
|
||||||
use libimagstore::storeid::{IntoStoreId, StoreIdIterator, StoreId};
|
|
||||||
use module_path::ModuleEntryPath;
|
|
||||||
|
|
||||||
use error::{TodoErrorKind as TEK, ResultExt};
|
|
||||||
use error::TodoError as TE;
|
use error::TodoError as TE;
|
||||||
|
use error::TodoErrorKind as TEK;
|
||||||
|
use error::ResultExt;
|
||||||
use error::Result;
|
use error::Result;
|
||||||
|
|
||||||
/// Task struct containing a `FileLockEntry`
|
use libimagstore::store::Entry;
|
||||||
#[derive(Debug)]
|
|
||||||
pub struct Task<'a>(FileLockEntry<'a>);
|
|
||||||
|
|
||||||
impl<'a> Task<'a> {
|
use uuid::Uuid;
|
||||||
|
use toml::Value;
|
||||||
|
use toml_query::read::TomlValueReadExt;
|
||||||
|
|
||||||
/// Concstructs a new `Task` with a `FileLockEntry`
|
pub trait Task {
|
||||||
pub fn new(fle: FileLockEntry<'a>) -> Task<'a> {
|
fn get_uuid(&self) -> Result<Uuid>;
|
||||||
Task(fle)
|
}
|
||||||
}
|
|
||||||
|
|
||||||
pub fn import<R: BufRead>(store: &'a Store, mut r: R) -> Result<(Task<'a>, String, Uuid)> {
|
impl Task for Entry {
|
||||||
let mut line = String::new();
|
fn get_uuid(&self) -> Result<Uuid> {
|
||||||
try!(r.read_line(&mut line).chain_err(|| TEK::UTF8Error));
|
match self.get_header().read("todo.uuid") {
|
||||||
import_task(&line.as_str())
|
Ok(Some(&Value::String(ref uuid))) => {
|
||||||
.map_err(|_| TE::from_kind(TEK::ImportError))
|
Uuid::parse_str(uuid).chain_err(|| TEK::UuidParserError)
|
||||||
.and_then(|t| {
|
|
||||||
let uuid = t.uuid().clone();
|
|
||||||
t.into_task(store).map(|t| (t, line, uuid))
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Get a task from an import string. That is: read the imported string, get the UUID from it
|
|
||||||
/// and try to load this UUID from store.
|
|
||||||
///
|
|
||||||
/// Possible return values are:
|
|
||||||
///
|
|
||||||
/// * Ok(Ok(Task))
|
|
||||||
/// * Ok(Err(String)) - where the String is the String read from the `r` parameter
|
|
||||||
/// * Err(_) - where the error is an error that happened during evaluation
|
|
||||||
///
|
|
||||||
pub fn get_from_import<R>(store: &'a Store, mut r: R) -> Result<RResult<Task<'a>, String>>
|
|
||||||
where R: BufRead
|
|
||||||
{
|
|
||||||
let mut line = String::new();
|
|
||||||
try!(r.read_line(&mut line).chain_err(|| TEK::UTF8Error));
|
|
||||||
Task::get_from_string(store, line)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Get a task from a String. The String is expected to contain the JSON-representation of the
|
|
||||||
/// Task to get from the store (only the UUID really matters in this case)
|
|
||||||
///
|
|
||||||
/// For an explanation on the return values see `Task::get_from_import()`.
|
|
||||||
pub fn get_from_string(store: &'a Store, s: String) -> Result<RResult<Task<'a>, String>> {
|
|
||||||
import_task(s.as_str())
|
|
||||||
.map_err(|_| TE::from_kind(TEK::ImportError))
|
|
||||||
.map(|t| t.uuid().clone())
|
|
||||||
.and_then(|uuid| Task::get_from_uuid(store, uuid))
|
|
||||||
.and_then(|o| match o {
|
|
||||||
None => Ok(Err(s)),
|
|
||||||
Some(t) => Ok(Ok(t)),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Get a task from an UUID.
|
|
||||||
///
|
|
||||||
/// If there is no task with this UUID, this returns `Ok(None)`.
|
|
||||||
pub fn get_from_uuid(store: &'a Store, uuid: Uuid) -> Result<Option<Task<'a>>> {
|
|
||||||
ModuleEntryPath::new(format!("taskwarrior/{}", uuid))
|
|
||||||
.into_storeid()
|
|
||||||
.and_then(|store_id| store.get(store_id))
|
|
||||||
.map(|o| o.map(Task::new))
|
|
||||||
.chain_err(|| TEK::StoreError)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Same as Task::get_from_import() but uses Store::retrieve() rather than Store::get(), to
|
|
||||||
/// implicitely create the task if it does not exist.
|
|
||||||
pub fn retrieve_from_import<R: BufRead>(store: &'a Store, mut r: R) -> Result<Task<'a>> {
|
|
||||||
let mut line = String::new();
|
|
||||||
try!(r.read_line(&mut line).chain_err(|| TEK::UTF8Error));
|
|
||||||
Task::retrieve_from_string(store, line)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Retrieve a task from a String. The String is expected to contain the JSON-representation of
|
|
||||||
/// the Task to retrieve from the store (only the UUID really matters in this case)
|
|
||||||
pub fn retrieve_from_string(store: &'a Store, s: String) -> Result<Task<'a>> {
|
|
||||||
Task::get_from_string(store, s)
|
|
||||||
.and_then(|opt| match opt {
|
|
||||||
Ok(task) => Ok(task),
|
|
||||||
Err(string) => import_task(string.as_str())
|
|
||||||
.map_err(|_| TE::from_kind(TEK::ImportError))
|
|
||||||
.and_then(|t| t.into_task(store)),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn delete_by_imports<R: BufRead>(store: &Store, r: R) -> Result<()> {
|
|
||||||
use serde_json::ser::to_string as serde_to_string;
|
|
||||||
use task_hookrs::status::TaskStatus;
|
|
||||||
|
|
||||||
for (counter, res_ttask) in import_tasks(r).into_iter().enumerate() {
|
|
||||||
match res_ttask {
|
|
||||||
Ok(ttask) => {
|
|
||||||
if counter % 2 == 1 {
|
|
||||||
// Only every second task is needed, the first one is the
|
|
||||||
// task before the change, and the second one after
|
|
||||||
// the change. The (maybe modified) second one is
|
|
||||||
// expected by taskwarrior.
|
|
||||||
match serde_to_string(&ttask).chain_err(|| TEK::ImportError) {
|
|
||||||
// use println!() here, as we talk with TW
|
|
||||||
Ok(val) => println!("{}", val),
|
|
||||||
Err(e) => return Err(e),
|
|
||||||
}
|
|
||||||
|
|
||||||
// Taskwarrior does not have the concept of deleted tasks, but only modified
|
|
||||||
// ones.
|
|
||||||
//
|
|
||||||
// Here we check if the status of a task is deleted and if yes, we delete it
|
|
||||||
// from the store.
|
|
||||||
if *ttask.status() == TaskStatus::Deleted {
|
|
||||||
match Task::delete_by_uuid(store, *ttask.uuid()) {
|
|
||||||
Ok(_) => info!("Deleted task {}", *ttask.uuid()),
|
|
||||||
Err(e) => return Err(e),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} // end if c % 2
|
|
||||||
},
|
},
|
||||||
Err(e) => return Err(e).map_err(|_| TE::from_kind(TEK::ImportError)),
|
Ok(Some(_)) => Err(TE::from_kind(TEK::HeaderTypeError)),
|
||||||
|
Ok(None) => Err(TE::from_kind(TEK::HeaderFieldMissing)),
|
||||||
|
Err(e) => Err(e).chain_err(|| TEK::StoreError),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn delete_by_uuid(store: &Store, uuid: Uuid) -> Result<()> {
|
|
||||||
ModuleEntryPath::new(format!("taskwarrior/{}", uuid))
|
|
||||||
.into_storeid()
|
|
||||||
.and_then(|id| store.delete(id))
|
|
||||||
.chain_err(|| TEK::StoreError)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn all_as_ids(store: &Store) -> Result<StoreIdIterator> {
|
|
||||||
store.retrieve_for_module("todo/taskwarrior")
|
|
||||||
.chain_err(|| TEK::StoreError)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn all(store: &Store) -> Result<TaskIterator> {
|
|
||||||
Task::all_as_ids(store)
|
|
||||||
.map(|iter| TaskIterator::new(store, iter))
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'a> Deref for Task<'a> {
|
|
||||||
type Target = FileLockEntry<'a>;
|
|
||||||
|
|
||||||
fn deref(&self) -> &FileLockEntry<'a> {
|
|
||||||
&self.0
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'a> DerefMut for Task<'a> {
|
|
||||||
|
|
||||||
fn deref_mut(&mut self) -> &mut FileLockEntry<'a> {
|
|
||||||
&mut self.0
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
/// A trait to get a `libimagtodo::task::Task` out of the implementing object.
|
|
||||||
pub trait IntoTask<'a> {
|
|
||||||
|
|
||||||
/// # Usage
|
|
||||||
/// ```ignore
|
|
||||||
/// use std::io::stdin;
|
|
||||||
///
|
|
||||||
/// use task_hookrs::task::Task;
|
|
||||||
/// use task_hookrs::import::import;
|
|
||||||
/// use libimagstore::store::{Store, FileLockEntry};
|
|
||||||
///
|
|
||||||
/// if let Ok(task_hookrs_task) = import(stdin()) {
|
|
||||||
/// // Store is given at runtime
|
|
||||||
/// let task = task_hookrs_task.into_filelockentry(store);
|
|
||||||
/// println!("Task with uuid: {}", task.flentry.get_header().get("todo.uuid"));
|
|
||||||
/// }
|
|
||||||
/// ```
|
|
||||||
fn into_task(self, store : &'a Store) -> Result<Task<'a>>;
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'a> IntoTask<'a> for TTask {
|
|
||||||
|
|
||||||
fn into_task(self, store : &'a Store) -> Result<Task<'a>> {
|
|
||||||
use toml_query::read::TomlValueReadExt;
|
|
||||||
use toml_query::set::TomlValueSetExt;
|
|
||||||
|
|
||||||
let uuid = self.uuid();
|
|
||||||
ModuleEntryPath::new(format!("taskwarrior/{}", uuid))
|
|
||||||
.into_storeid()
|
|
||||||
.chain_err(|| TEK::StoreIdError)
|
|
||||||
.and_then(|id| {
|
|
||||||
store.retrieve(id)
|
|
||||||
.chain_err(|| TEK::StoreError)
|
|
||||||
.and_then(|mut fle| {
|
|
||||||
{
|
|
||||||
let hdr = fle.get_header_mut();
|
|
||||||
if try!(hdr.read("todo").chain_err(|| TEK::StoreError)).is_none() {
|
|
||||||
try!(hdr
|
|
||||||
.set("todo", Value::Table(BTreeMap::new()))
|
|
||||||
.chain_err(|| TEK::StoreError));
|
|
||||||
}
|
|
||||||
|
|
||||||
try!(hdr.set("todo.uuid", Value::String(format!("{}",uuid)))
|
|
||||||
.chain_err(|| TEK::StoreError));
|
|
||||||
}
|
|
||||||
|
|
||||||
// If none of the errors above have returned the function, everything is fine
|
|
||||||
Ok(Task::new(fle))
|
|
||||||
})
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
trait FromStoreId {
|
|
||||||
fn from_storeid<'a>(&'a Store, StoreId) -> Result<Task<'a>>;
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'a> FromStoreId for Task<'a> {
|
|
||||||
|
|
||||||
fn from_storeid<'b>(store: &'b Store, id: StoreId) -> Result<Task<'b>> {
|
|
||||||
store.retrieve(id)
|
|
||||||
.chain_err(|| TEK::StoreError)
|
|
||||||
.map(Task::new)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub struct TaskIterator<'a> {
|
|
||||||
store: &'a Store,
|
|
||||||
iditer: StoreIdIterator,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'a> TaskIterator<'a> {
|
|
||||||
|
|
||||||
pub fn new(store: &'a Store, iditer: StoreIdIterator) -> TaskIterator<'a> {
|
|
||||||
TaskIterator {
|
|
||||||
store: store,
|
|
||||||
iditer: iditer,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'a> Iterator for TaskIterator<'a> {
|
|
||||||
type Item = Result<Task<'a>>;
|
|
||||||
|
|
||||||
fn next(&mut self) -> Option<Result<Task<'a>>> {
|
|
||||||
self.iditer.next().map(|id| Task::from_storeid(self.store, id))
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
207
lib/domain/libimagtodo/src/taskstore.rs
Normal file
207
lib/domain/libimagtodo/src/taskstore.rs
Normal file
|
@ -0,0 +1,207 @@
|
||||||
|
//
|
||||||
|
// imag - the personal information management suite for the commandline
|
||||||
|
// Copyright (C) 2015, 2016 Matthias Beyer <mail@beyermatthias.de> and contributors
|
||||||
|
//
|
||||||
|
// This library is free software; you can redistribute it and/or
|
||||||
|
// modify it under the terms of the GNU Lesser General Public
|
||||||
|
// License as published by the Free Software Foundation; version
|
||||||
|
// 2.1 of the License.
|
||||||
|
//
|
||||||
|
// This library is distributed in the hope that it will be useful,
|
||||||
|
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||||
|
// Lesser General Public License for more details.
|
||||||
|
//
|
||||||
|
// You should have received a copy of the GNU Lesser General Public
|
||||||
|
// License along with this library; if not, write to the Free Software
|
||||||
|
// Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
|
||||||
|
//
|
||||||
|
|
||||||
|
use std::collections::BTreeMap;
|
||||||
|
use std::io::BufRead;
|
||||||
|
use std::result::Result as RResult;
|
||||||
|
|
||||||
|
use toml::Value;
|
||||||
|
use uuid::Uuid;
|
||||||
|
|
||||||
|
use task_hookrs::task::Task as TTask;
|
||||||
|
use task_hookrs::import::{import_task, import_tasks};
|
||||||
|
|
||||||
|
use libimagstore::store::{FileLockEntry, Store};
|
||||||
|
use libimagstore::storeid::{IntoStoreId, StoreIdIterator};
|
||||||
|
use module_path::ModuleEntryPath;
|
||||||
|
|
||||||
|
use error::TodoErrorKind as TEK;
|
||||||
|
use error::TodoError as TE;
|
||||||
|
use error::Result;
|
||||||
|
use error::ResultExt;
|
||||||
|
|
||||||
|
/// Task struct containing a `FileLockEntry`
|
||||||
|
pub trait TaskStore<'a> {
|
||||||
|
fn import_task_from_reader<R: BufRead>(&'a self, r: R) -> Result<(FileLockEntry<'a>, String, Uuid)>;
|
||||||
|
fn get_task_from_import<R: BufRead>(&'a self, r: R) -> Result<RResult<FileLockEntry<'a>, String>>;
|
||||||
|
fn get_task_from_string(&'a self, s: String) -> Result<RResult<FileLockEntry<'a>, String>>;
|
||||||
|
fn get_task_from_uuid(&'a self, uuid: Uuid) -> Result<Option<FileLockEntry<'a>>>;
|
||||||
|
fn retrieve_task_from_import<R: BufRead>(&'a self, r: R) -> Result<FileLockEntry<'a>>;
|
||||||
|
fn retrieve_task_from_string(&'a self, s: String) -> Result<FileLockEntry<'a>>;
|
||||||
|
fn delete_tasks_by_imports<R: BufRead>(&self, r: R) -> Result<()>;
|
||||||
|
fn delete_task_by_uuid(&self, uuid: Uuid) -> Result<()>;
|
||||||
|
fn all_tasks(&self) -> Result<StoreIdIterator>;
|
||||||
|
fn new_from_twtask(&'a self, task: TTask) -> Result<FileLockEntry<'a>>;
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a> TaskStore<'a> for Store {
|
||||||
|
|
||||||
|
fn import_task_from_reader<R: BufRead>(&'a self, mut r: R) -> Result<(FileLockEntry<'a>, String, Uuid)> {
|
||||||
|
let mut line = String::new();
|
||||||
|
try!(r.read_line(&mut line).map_err(|_| TE::from_kind(TEK::UTF8Error)));
|
||||||
|
import_task(&line.as_str())
|
||||||
|
.map_err(|_| TE::from_kind(TEK::ImportError))
|
||||||
|
.and_then(|t| {
|
||||||
|
let uuid = t.uuid().clone();
|
||||||
|
self.new_from_twtask(t).map(|t| (t, line, uuid))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get a task from an import string. That is: read the imported string, get the UUID from it
|
||||||
|
/// and try to load this UUID from store.
|
||||||
|
///
|
||||||
|
/// Possible return values are:
|
||||||
|
///
|
||||||
|
/// * Ok(Ok(Task))
|
||||||
|
/// * Ok(Err(String)) - where the String is the String read from the `r` parameter
|
||||||
|
/// * Err(_) - where the error is an error that happened during evaluation
|
||||||
|
///
|
||||||
|
fn get_task_from_import<R: BufRead>(&'a self, mut r: R) -> Result<RResult<FileLockEntry<'a>, String>> {
|
||||||
|
let mut line = String::new();
|
||||||
|
try!(r.read_line(&mut line).chain_err(|| TEK::UTF8Error));
|
||||||
|
self.get_task_from_string(line)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get a task from a String. The String is expected to contain the JSON-representation of the
|
||||||
|
/// Task to get from the store (only the UUID really matters in this case)
|
||||||
|
///
|
||||||
|
/// For an explanation on the return values see `Task::get_from_import()`.
|
||||||
|
fn get_task_from_string(&'a self, s: String) -> Result<RResult<FileLockEntry<'a>, String>> {
|
||||||
|
import_task(s.as_str())
|
||||||
|
.map_err(|_| TE::from_kind(TEK::ImportError))
|
||||||
|
.map(|t| t.uuid().clone())
|
||||||
|
.and_then(|uuid| self.get_task_from_uuid(uuid))
|
||||||
|
.and_then(|o| match o {
|
||||||
|
None => Ok(Err(s)),
|
||||||
|
Some(t) => Ok(Ok(t)),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get a task from an UUID.
|
||||||
|
///
|
||||||
|
/// If there is no task with this UUID, this returns `Ok(None)`.
|
||||||
|
fn get_task_from_uuid(&'a self, uuid: Uuid) -> Result<Option<FileLockEntry<'a>>> {
|
||||||
|
ModuleEntryPath::new(format!("taskwarrior/{}", uuid))
|
||||||
|
.into_storeid()
|
||||||
|
.and_then(|store_id| self.get(store_id))
|
||||||
|
.chain_err(|| TEK::StoreError)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Same as Task::get_from_import() but uses Store::retrieve() rather than Store::get(), to
|
||||||
|
/// implicitely create the task if it does not exist.
|
||||||
|
fn retrieve_task_from_import<R: BufRead>(&'a self, mut r: R) -> Result<FileLockEntry<'a>> {
|
||||||
|
let mut line = String::new();
|
||||||
|
try!(r.read_line(&mut line).chain_err(|| TEK::UTF8Error));
|
||||||
|
self.retrieve_task_from_string(line)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Retrieve a task from a String. The String is expected to contain the JSON-representation of
|
||||||
|
/// the Task to retrieve from the store (only the UUID really matters in this case)
|
||||||
|
fn retrieve_task_from_string(&'a self, s: String) -> Result<FileLockEntry<'a>> {
|
||||||
|
self.get_task_from_string(s)
|
||||||
|
.and_then(|opt| match opt {
|
||||||
|
Ok(task) => Ok(task),
|
||||||
|
Err(string) => import_task(string.as_str())
|
||||||
|
.map_err(|_| TE::from_kind(TEK::ImportError))
|
||||||
|
.and_then(|t| self.new_from_twtask(t)),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
fn delete_tasks_by_imports<R: BufRead>(&self, r: R) -> Result<()> {
|
||||||
|
use serde_json::ser::to_string as serde_to_string;
|
||||||
|
use task_hookrs::status::TaskStatus;
|
||||||
|
|
||||||
|
for (counter, res_ttask) in import_tasks(r).into_iter().enumerate() {
|
||||||
|
match res_ttask {
|
||||||
|
Ok(ttask) => {
|
||||||
|
if counter % 2 == 1 {
|
||||||
|
// Only every second task is needed, the first one is the
|
||||||
|
// task before the change, and the second one after
|
||||||
|
// the change. The (maybe modified) second one is
|
||||||
|
// expected by taskwarrior.
|
||||||
|
match serde_to_string(&ttask).chain_err(|| TEK::ImportError) {
|
||||||
|
// use println!() here, as we talk with TW
|
||||||
|
Ok(val) => println!("{}", val),
|
||||||
|
Err(e) => return Err(e),
|
||||||
|
}
|
||||||
|
|
||||||
|
// Taskwarrior does not have the concept of deleted tasks, but only modified
|
||||||
|
// ones.
|
||||||
|
//
|
||||||
|
// Here we check if the status of a task is deleted and if yes, we delete it
|
||||||
|
// from the store.
|
||||||
|
if *ttask.status() == TaskStatus::Deleted {
|
||||||
|
match self.delete_task_by_uuid(*ttask.uuid()) {
|
||||||
|
Ok(_) => info!("Deleted task {}", *ttask.uuid()),
|
||||||
|
Err(e) => return Err(e),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} // end if c % 2
|
||||||
|
},
|
||||||
|
Err(e) => return Err(TE::from_kind(TEK::ImportError)),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn delete_task_by_uuid(&self, uuid: Uuid) -> Result<()> {
|
||||||
|
ModuleEntryPath::new(format!("taskwarrior/{}", uuid))
|
||||||
|
.into_storeid()
|
||||||
|
.and_then(|id| self.delete(id))
|
||||||
|
.chain_err(|| TEK::StoreError)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn all_tasks(&self) -> Result<StoreIdIterator> {
|
||||||
|
self.retrieve_for_module("todo/taskwarrior")
|
||||||
|
.chain_err(|| TEK::StoreError)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn new_from_twtask(&'a self, task: TTask) -> Result<FileLockEntry<'a>> {
|
||||||
|
use toml_query::read::TomlValueReadExt;
|
||||||
|
use toml_query::set::TomlValueSetExt;
|
||||||
|
|
||||||
|
let uuid = task.uuid();
|
||||||
|
ModuleEntryPath::new(format!("taskwarrior/{}", uuid))
|
||||||
|
.into_storeid()
|
||||||
|
.chain_err(|| TEK::StoreIdError)
|
||||||
|
.and_then(|id| {
|
||||||
|
self.retrieve(id)
|
||||||
|
.chain_err(|| TEK::StoreError)
|
||||||
|
.and_then(|mut fle| {
|
||||||
|
{
|
||||||
|
let hdr = fle.get_header_mut();
|
||||||
|
if try!(hdr.read("todo").chain_err(|| TEK::StoreError)).is_none() {
|
||||||
|
try!(hdr
|
||||||
|
.set("todo", Value::Table(BTreeMap::new()))
|
||||||
|
.chain_err(|| TEK::StoreError));
|
||||||
|
}
|
||||||
|
|
||||||
|
try!(hdr.set("todo.uuid", Value::String(format!("{}",uuid)))
|
||||||
|
.chain_err(|| TEK::StoreError));
|
||||||
|
}
|
||||||
|
|
||||||
|
// If none of the errors above have returned the function, everything is fine
|
||||||
|
Ok(fle)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
24
lib/entry/libimagentrygps/Cargo.toml
Normal file
24
lib/entry/libimagentrygps/Cargo.toml
Normal file
|
@ -0,0 +1,24 @@
|
||||||
|
[package]
|
||||||
|
name = "libimagentrygps"
|
||||||
|
version = "0.4.0"
|
||||||
|
authors = ["Matthias Beyer <mail@beyermatthias.de>"]
|
||||||
|
|
||||||
|
description = "Library for the imag core distribution"
|
||||||
|
|
||||||
|
keywords = ["imag", "PIM", "personal", "information", "management"]
|
||||||
|
readme = "../README.md"
|
||||||
|
license = "LGPL-2.1"
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
toml = "^0.4"
|
||||||
|
toml-query = "0.3.0"
|
||||||
|
serde_derive = "1"
|
||||||
|
serde = "1"
|
||||||
|
error-chain = "0.10"
|
||||||
|
|
||||||
|
libimagstore = { version = "0.4.0", path = "../../../lib/core/libimagstore" }
|
||||||
|
libimagerror = { version = "0.4.0", path = "../../../lib/core/libimagerror" }
|
||||||
|
|
||||||
|
[dev-dependencies]
|
||||||
|
env_logger = "0.3"
|
||||||
|
|
123
lib/entry/libimagentrygps/src/entry.rs
Normal file
123
lib/entry/libimagentrygps/src/entry.rs
Normal file
|
@ -0,0 +1,123 @@
|
||||||
|
//
|
||||||
|
// imag - the personal information management suite for the commandline
|
||||||
|
// Copyright (C) 2015, 2016 Matthias Beyer <mail@beyermatthias.de> and contributors
|
||||||
|
//
|
||||||
|
// This library is free software; you can redistribute it and/or
|
||||||
|
// modify it under the terms of the GNU Lesser General Public
|
||||||
|
// License as published by the Free Software Foundation; version
|
||||||
|
// 2.1 of the License.
|
||||||
|
//
|
||||||
|
// This library is distributed in the hope that it will be useful,
|
||||||
|
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||||
|
// Lesser General Public License for more details.
|
||||||
|
//
|
||||||
|
// You should have received a copy of the GNU Lesser General Public
|
||||||
|
// License along with this library; if not, write to the Free Software
|
||||||
|
// Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
|
||||||
|
//
|
||||||
|
|
||||||
|
use error::Result;
|
||||||
|
use error::GPSErrorKind as GPSEK;
|
||||||
|
use error::ResultExt;
|
||||||
|
use types::*;
|
||||||
|
|
||||||
|
use libimagstore::store::Entry;
|
||||||
|
|
||||||
|
use toml_query::read::TomlValueReadExt;
|
||||||
|
use toml_query::insert::TomlValueInsertExt;
|
||||||
|
|
||||||
|
pub trait GPSEntry {
|
||||||
|
|
||||||
|
fn set_coordinates(&mut self, c: Coordinates) -> Result<()>;
|
||||||
|
fn get_coordinates(&self) -> Result<Option<Coordinates>>;
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
impl GPSEntry for Entry {
|
||||||
|
|
||||||
|
fn set_coordinates(&mut self, c: Coordinates) -> Result<()> {
|
||||||
|
self.get_header_mut()
|
||||||
|
.insert("gps.coordinates", c.into())
|
||||||
|
.map(|_| ())
|
||||||
|
.chain_err(|| GPSEK::HeaderWriteError)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_coordinates(&self) -> Result<Option<Coordinates>> {
|
||||||
|
match self.get_header().read("gps.coordinates").chain_err(|| GPSEK::HeaderWriteError) {
|
||||||
|
Ok(Some(hdr)) => Coordinates::from_value(hdr).map(Some),
|
||||||
|
Ok(None) => Ok(None),
|
||||||
|
Err(e) => Err(e),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use std::path::PathBuf;
|
||||||
|
|
||||||
|
use libimagstore::store::Store;
|
||||||
|
|
||||||
|
use entry::*;
|
||||||
|
|
||||||
|
fn setup_logging() {
|
||||||
|
use env_logger;
|
||||||
|
let _ = env_logger::init().unwrap_or(());
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_store() -> Store {
|
||||||
|
Store::new(PathBuf::from("/"), None).unwrap()
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_set_gps() {
|
||||||
|
setup_logging();
|
||||||
|
|
||||||
|
let store = get_store();
|
||||||
|
|
||||||
|
let mut entry = store.create(PathBuf::from("test_set_gps")).unwrap();
|
||||||
|
|
||||||
|
let coordinates = Coordinates {
|
||||||
|
latitude: GPSValue::new(0, 0, 0),
|
||||||
|
longitude: GPSValue::new(0, 0, 0),
|
||||||
|
};
|
||||||
|
|
||||||
|
let res = entry.set_coordinates(coordinates);
|
||||||
|
|
||||||
|
assert!(res.is_ok());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_setget_gps() {
|
||||||
|
setup_logging();
|
||||||
|
|
||||||
|
let store = get_store();
|
||||||
|
|
||||||
|
let mut entry = store.create(PathBuf::from("test_setget_gps")).unwrap();
|
||||||
|
|
||||||
|
let coordinates = Coordinates {
|
||||||
|
latitude: GPSValue::new(0, 0, 0),
|
||||||
|
longitude: GPSValue::new(0, 0, 0),
|
||||||
|
};
|
||||||
|
|
||||||
|
let res = entry.set_coordinates(coordinates);
|
||||||
|
assert!(res.is_ok());
|
||||||
|
|
||||||
|
let coordinates = entry.get_coordinates();
|
||||||
|
|
||||||
|
assert!(coordinates.is_ok());
|
||||||
|
let coordinates = coordinates.unwrap();
|
||||||
|
|
||||||
|
assert!(coordinates.is_some());
|
||||||
|
let coordinates = coordinates.unwrap();
|
||||||
|
|
||||||
|
assert_eq!(0, coordinates.longitude.degree);
|
||||||
|
assert_eq!(0, coordinates.longitude.minutes);
|
||||||
|
assert_eq!(0, coordinates.longitude.seconds);
|
||||||
|
assert_eq!(0, coordinates.latitude.degree);
|
||||||
|
assert_eq!(0, coordinates.latitude.minutes);
|
||||||
|
assert_eq!(0, coordinates.latitude.seconds);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
87
lib/entry/libimagentrygps/src/error.rs
Normal file
87
lib/entry/libimagentrygps/src/error.rs
Normal file
|
@ -0,0 +1,87 @@
|
||||||
|
//
|
||||||
|
// imag - the personal information management suite for the commandline
|
||||||
|
// Copyright (C) 2015, 2016 Matthias Beyer <mail@beyermatthias.de> and contributors
|
||||||
|
//
|
||||||
|
// This library is free software; you can redistribute it and/or
|
||||||
|
// modify it under the terms of the GNU Lesser General Public
|
||||||
|
// License as published by the Free Software Foundation; version
|
||||||
|
// 2.1 of the License.
|
||||||
|
//
|
||||||
|
// This library is distributed in the hope that it will be useful,
|
||||||
|
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||||
|
// Lesser General Public License for more details.
|
||||||
|
//
|
||||||
|
// You should have received a copy of the GNU Lesser General Public
|
||||||
|
// License along with this library; if not, write to the Free Software
|
||||||
|
// Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
|
||||||
|
//
|
||||||
|
|
||||||
|
error_chain! {
|
||||||
|
types {
|
||||||
|
GPSError, GPSErrorKind, ResultExt, Result;
|
||||||
|
}
|
||||||
|
|
||||||
|
errors {
|
||||||
|
StoreReadError {
|
||||||
|
description("Store read error")
|
||||||
|
display("Store read error")
|
||||||
|
}
|
||||||
|
|
||||||
|
StoreWriteError {
|
||||||
|
description("Store write error")
|
||||||
|
display("Store write error")
|
||||||
|
}
|
||||||
|
|
||||||
|
HeaderWriteError {
|
||||||
|
description("Couldn't write Header for annotation")
|
||||||
|
display("Couldn't write Header for annotation")
|
||||||
|
}
|
||||||
|
|
||||||
|
HeaderReadError {
|
||||||
|
description("Couldn't read Header of Entry")
|
||||||
|
display("Couldn't read Header of Entry")
|
||||||
|
}
|
||||||
|
|
||||||
|
HeaderTypeError {
|
||||||
|
description("Header field has unexpected type")
|
||||||
|
display("Header field has unexpected type")
|
||||||
|
}
|
||||||
|
|
||||||
|
TypeError {
|
||||||
|
description("Type Error")
|
||||||
|
display("Type Error")
|
||||||
|
}
|
||||||
|
|
||||||
|
DegreeMissing {
|
||||||
|
description("'degree' value missing")
|
||||||
|
display("'degree' value missing")
|
||||||
|
}
|
||||||
|
|
||||||
|
MinutesMissing {
|
||||||
|
description("'minutes' value missing")
|
||||||
|
display("'minutes' value missing")
|
||||||
|
}
|
||||||
|
|
||||||
|
SecondsMissing {
|
||||||
|
description("'seconds' value missing")
|
||||||
|
display("'seconds' value missing")
|
||||||
|
}
|
||||||
|
|
||||||
|
LongitudeMissing {
|
||||||
|
description("'longitude' value missing")
|
||||||
|
display("'longitude' value missing")
|
||||||
|
}
|
||||||
|
|
||||||
|
LatitudeMissing {
|
||||||
|
description("'latitude' value missing")
|
||||||
|
display("'latitude' value missing")
|
||||||
|
}
|
||||||
|
|
||||||
|
NumberConversionError {
|
||||||
|
description("Cannot convert number to fit into variable")
|
||||||
|
display("Cannot convert number to fit into variable")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
@ -17,30 +17,18 @@
|
||||||
// Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
|
// Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
|
||||||
//
|
//
|
||||||
|
|
||||||
use clap::ArgMatches;
|
extern crate toml;
|
||||||
|
extern crate toml_query;
|
||||||
|
#[macro_use] extern crate serde_derive;
|
||||||
|
#[macro_use] extern crate error_chain;
|
||||||
|
|
||||||
use libimagstore::store::FileLockEntry;
|
extern crate libimagstore;
|
||||||
|
#[macro_use] extern crate libimagerror;
|
||||||
|
|
||||||
use error::Result;
|
#[cfg(test)]
|
||||||
use tagable::*;
|
extern crate env_logger;
|
||||||
use ui::{get_add_tags, get_remove_tags};
|
|
||||||
|
|
||||||
pub fn exec_cli_for_entry(matches: &ArgMatches, entry: &mut FileLockEntry) -> Result<()> {
|
pub mod entry;
|
||||||
if let Some(ts) = get_add_tags(matches) {
|
pub mod error;
|
||||||
for t in ts {
|
pub mod types;
|
||||||
if let Err(e) = entry.add_tag(t) {
|
|
||||||
return Err(e);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Some(ts) = get_remove_tags(matches) {
|
|
||||||
for t in ts {
|
|
||||||
if let Err(e) = entry.remove_tag(t) {
|
|
||||||
return Err(e);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
156
lib/entry/libimagentrygps/src/types.rs
Normal file
156
lib/entry/libimagentrygps/src/types.rs
Normal file
|
@ -0,0 +1,156 @@
|
||||||
|
//
|
||||||
|
// imag - the personal information management suite for the commandline
|
||||||
|
// Copyright (C) 2015, 2016 Matthias Beyer <mail@beyermatthias.de> and contributors
|
||||||
|
//
|
||||||
|
// This library is free software; you can redistribute it and/or
|
||||||
|
// modify it under the terms of the GNU Lesser General Public
|
||||||
|
// License as published by the Free Software Foundation; version
|
||||||
|
// 2.1 of the License.
|
||||||
|
//
|
||||||
|
// This library is distributed in the hope that it will be useful,
|
||||||
|
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||||
|
// Lesser General Public License for more details.
|
||||||
|
//
|
||||||
|
// You should have received a copy of the GNU Lesser General Public
|
||||||
|
// License along with this library; if not, write to the Free Software
|
||||||
|
// Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
|
||||||
|
//
|
||||||
|
|
||||||
|
use std::collections::BTreeMap;
|
||||||
|
|
||||||
|
use toml::Value;
|
||||||
|
|
||||||
|
use error::GPSErrorKind as GPSEK;
|
||||||
|
use error::GPSError as GPSE;
|
||||||
|
use error::Result;
|
||||||
|
use error::ResultExt;
|
||||||
|
|
||||||
|
pub trait FromValue : Sized {
|
||||||
|
fn from_value(v: &Value) -> Result<Self>;
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)]
|
||||||
|
pub struct GPSValue {
|
||||||
|
pub degree: i8,
|
||||||
|
pub minutes: i8,
|
||||||
|
pub seconds: i8
|
||||||
|
}
|
||||||
|
|
||||||
|
impl GPSValue {
|
||||||
|
|
||||||
|
pub fn new(d: i8, m: i8, s: i8) -> GPSValue {
|
||||||
|
GPSValue {
|
||||||
|
degree: d,
|
||||||
|
minutes: m,
|
||||||
|
seconds: s
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
impl Into<Value> for GPSValue {
|
||||||
|
|
||||||
|
fn into(self) -> Value {
|
||||||
|
let mut map = BTreeMap::new();
|
||||||
|
let _ = map.insert("degree".to_owned(), Value::Integer(self.degree as i64));
|
||||||
|
let _ = map.insert("minutes".to_owned(), Value::Integer(self.minutes as i64));
|
||||||
|
let _ = map.insert("seconds".to_owned(), Value::Integer(self.seconds as i64));
|
||||||
|
Value::Table(map)
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
impl FromValue for GPSValue {
|
||||||
|
fn from_value(v: &Value) -> Result<Self> {
|
||||||
|
match *v {
|
||||||
|
Value::Table(ref map) => {
|
||||||
|
Ok(GPSValue::new(
|
||||||
|
map.get("degree")
|
||||||
|
.ok_or_else(|| GPSE::from_kind(GPSEK::DegreeMissing))
|
||||||
|
.and_then(|v| match *v {
|
||||||
|
Value::Integer(i) => i64_to_i8(i),
|
||||||
|
_ => Err(GPSE::from_kind(GPSEK::HeaderTypeError)),
|
||||||
|
})?,
|
||||||
|
|
||||||
|
map
|
||||||
|
.get("minutes")
|
||||||
|
.ok_or_else(|| GPSE::from_kind(GPSEK::MinutesMissing))
|
||||||
|
.and_then(|v| match *v {
|
||||||
|
Value::Integer(i) => i64_to_i8(i),
|
||||||
|
_ => Err(GPSE::from_kind(GPSEK::HeaderTypeError)),
|
||||||
|
})?,
|
||||||
|
|
||||||
|
map
|
||||||
|
.get("seconds")
|
||||||
|
.ok_or_else(|| GPSE::from_kind(GPSEK::SecondsMissing))
|
||||||
|
.and_then(|v| match *v {
|
||||||
|
Value::Integer(i) => i64_to_i8(i),
|
||||||
|
_ => Err(GPSE::from_kind(GPSEK::HeaderTypeError)),
|
||||||
|
})?
|
||||||
|
))
|
||||||
|
}
|
||||||
|
_ => Err(GPSE::from_kind(GPSEK::TypeError))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Data-transfer type for transfering longitude-latitude-pairs
|
||||||
|
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)]
|
||||||
|
pub struct Coordinates {
|
||||||
|
pub longitude: GPSValue,
|
||||||
|
pub latitude: GPSValue,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Coordinates {
|
||||||
|
pub fn new(long: GPSValue, lat: GPSValue) -> Coordinates {
|
||||||
|
Coordinates {
|
||||||
|
longitude: long,
|
||||||
|
latitude: lat,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Into<Value> for Coordinates {
|
||||||
|
|
||||||
|
fn into(self) -> Value {
|
||||||
|
let mut map = BTreeMap::new();
|
||||||
|
let _ = map.insert("longitude".to_owned(), self.longitude.into());
|
||||||
|
let _ = map.insert("latitude".to_owned(), self.latitude.into());
|
||||||
|
Value::Table(map)
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
impl FromValue for Coordinates {
|
||||||
|
fn from_value(v: &Value) -> Result<Self> {
|
||||||
|
match *v {
|
||||||
|
Value::Table(ref map) => {
|
||||||
|
Ok(Coordinates::new(
|
||||||
|
match map.get("longitude") {
|
||||||
|
Some(v) => GPSValue::from_value(v),
|
||||||
|
None => Err(GPSE::from_kind(GPSEK::LongitudeMissing)),
|
||||||
|
}?,
|
||||||
|
|
||||||
|
match map.get("latitude") {
|
||||||
|
Some(v) => GPSValue::from_value(v),
|
||||||
|
None => Err(GPSE::from_kind(GPSEK::LongitudeMissing)),
|
||||||
|
}?
|
||||||
|
))
|
||||||
|
}
|
||||||
|
_ => Err(GPSE::from_kind(GPSEK::TypeError))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Helper to convert a i64 to i8 or return an error if this doesn't work.
|
||||||
|
fn i64_to_i8(i: i64) -> Result<i8> {
|
||||||
|
if i > (<i8>::max_value() as i64) {
|
||||||
|
Err(GPSE::from_kind(GPSEK::NumberConversionError))
|
||||||
|
} else {
|
||||||
|
Ok(i as i8)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
@ -35,7 +35,6 @@ use std::collections::BTreeMap;
|
||||||
use std::fmt::Debug;
|
use std::fmt::Debug;
|
||||||
|
|
||||||
use libimagstore::store::Entry;
|
use libimagstore::store::Entry;
|
||||||
use libimagstore::store::FileLockEntry;
|
|
||||||
use libimagstore::store::Store;
|
use libimagstore::store::Store;
|
||||||
use libimagstore::storeid::StoreId;
|
use libimagstore::storeid::StoreId;
|
||||||
use libimagstore::storeid::IntoStoreId;
|
use libimagstore::storeid::IntoStoreId;
|
||||||
|
@ -58,37 +57,32 @@ use url::Url;
|
||||||
use crypto::sha1::Sha1;
|
use crypto::sha1::Sha1;
|
||||||
use crypto::digest::Digest;
|
use crypto::digest::Digest;
|
||||||
|
|
||||||
/// "Link" Type, just an abstraction over `FileLockEntry` to have some convenience internally.
|
pub trait Link {
|
||||||
pub struct Link<'a> {
|
|
||||||
link: FileLockEntry<'a>
|
fn get_link_uri_from_filelockentry(&self) -> Result<Option<Url>>;
|
||||||
|
|
||||||
|
fn get_url(&self) -> Result<Option<Url>>;
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> Link<'a> {
|
impl Link for Entry {
|
||||||
|
|
||||||
pub fn new(fle: FileLockEntry<'a>) -> Link<'a> {
|
fn get_link_uri_from_filelockentry(&self) -> Result<Option<Url>> {
|
||||||
Link { link: fle }
|
self.get_header()
|
||||||
}
|
|
||||||
|
|
||||||
/// Get a link Url object from a `FileLockEntry`, ignore errors.
|
|
||||||
fn get_link_uri_from_filelockentry(file: &FileLockEntry<'a>) -> Option<Url> {
|
|
||||||
file.get_header()
|
|
||||||
.read("imag.content.url")
|
.read("imag.content.url")
|
||||||
.ok()
|
.chain_err(|| LEK::EntryHeaderReadError)
|
||||||
.and_then(|opt| match opt {
|
.and_then(|opt| match opt {
|
||||||
Some(&Value::String(ref s)) => {
|
Some(&Value::String(ref s)) => {
|
||||||
debug!("Found url, parsing: {:?}", s);
|
debug!("Found url, parsing: {:?}", s);
|
||||||
Url::parse(&s[..]).ok()
|
Url::parse(&s[..]).chain_err(|| LEK::InvalidUri).map(Some)
|
||||||
},
|
},
|
||||||
_ => None
|
Some(_) => Err(LE::from_kind(LEK::LinkParserFieldTypeError)),
|
||||||
|
None => Ok(None),
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_url(&self) -> Result<Option<Url>> {
|
fn get_url(&self) -> Result<Option<Url>> {
|
||||||
let opt = self.link
|
match self.get_header().read("imag.content.url") {
|
||||||
.get_header()
|
|
||||||
.read("imag.content.url");
|
|
||||||
|
|
||||||
match opt {
|
|
||||||
Ok(Some(&Value::String(ref s))) => {
|
Ok(Some(&Value::String(ref s))) => {
|
||||||
Url::parse(&s[..])
|
Url::parse(&s[..])
|
||||||
.map(Some)
|
.map(Some)
|
||||||
|
@ -261,9 +255,10 @@ pub mod iter {
|
||||||
type Item = Result<Url>;
|
type Item = Result<Url>;
|
||||||
|
|
||||||
fn next(&mut self) -> Option<Self::Item> {
|
fn next(&mut self) -> Option<Self::Item> {
|
||||||
use super::get_external_link_from_file;
|
use external::Link;
|
||||||
|
|
||||||
self.0
|
loop {
|
||||||
|
let next = self.0
|
||||||
.next()
|
.next()
|
||||||
.map(|id| {
|
.map(|id| {
|
||||||
debug!("Retrieving entry for id: '{:?}'", id);
|
debug!("Retrieving entry for id: '{:?}'", id);
|
||||||
|
@ -274,10 +269,18 @@ pub mod iter {
|
||||||
.and_then(|f| {
|
.and_then(|f| {
|
||||||
debug!("Store::retrieve({:?}) succeeded", id);
|
debug!("Store::retrieve({:?}) succeeded", id);
|
||||||
debug!("getting external link from file now");
|
debug!("getting external link from file now");
|
||||||
get_external_link_from_file(&f)
|
f.get_link_uri_from_filelockentry()
|
||||||
.map_dbg_err(|e| format!("URL -> Err = {:?}", e))
|
.map_dbg_err(|e| format!("URL -> Err = {:?}", e))
|
||||||
})
|
})
|
||||||
})
|
});
|
||||||
|
|
||||||
|
match next {
|
||||||
|
Some(Ok(Some(link))) => return Some(Ok(link)),
|
||||||
|
Some(Ok(None)) => continue,
|
||||||
|
Some(Err(e)) => return Some(Err(e)),
|
||||||
|
None => return None
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -291,11 +294,6 @@ pub fn is_external_link_storeid<A: AsRef<StoreId> + Debug>(id: A) -> bool {
|
||||||
id.as_ref().local().starts_with("links/external")
|
id.as_ref().local().starts_with("links/external")
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_external_link_from_file(entry: &FileLockEntry) -> Result<Url> {
|
|
||||||
Link::get_link_uri_from_filelockentry(entry) // TODO: Do not hide error by using this function
|
|
||||||
.ok_or(LE::from_kind(LEK::StoreReadError))
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Implement `ExternalLinker` for `Entry`, hiding the fact that there is no such thing as an external
|
/// Implement `ExternalLinker` for `Entry`, hiding the fact that there is no such thing as an external
|
||||||
/// link in an entry, but internal links to other entries which serve as external links, as one
|
/// link in an entry, but internal links to other entries which serve as external links, as one
|
||||||
/// entry in the store can only have one external link.
|
/// entry in the store can only have one external link.
|
||||||
|
|
|
@ -18,9 +18,9 @@ itertools = "0.5"
|
||||||
log = "0.3"
|
log = "0.3"
|
||||||
rust-crypto = "0.2"
|
rust-crypto = "0.2"
|
||||||
toml = "^0.4"
|
toml = "^0.4"
|
||||||
walkdir = "1.0.*"
|
|
||||||
toml-query = "0.3.0"
|
toml-query = "0.3.0"
|
||||||
error-chain = "0.10"
|
error-chain = "0.10"
|
||||||
|
walkdir = "1.0.*"
|
||||||
|
|
||||||
libimagstore = { version = "0.4.0", path = "../../../lib/core/libimagstore" }
|
libimagstore = { version = "0.4.0", path = "../../../lib/core/libimagstore" }
|
||||||
libimagerror = { version = "0.4.0", path = "../../../lib/core/libimagerror" }
|
libimagerror = { version = "0.4.0", path = "../../../lib/core/libimagerror" }
|
||||||
|
|
|
@ -52,15 +52,13 @@ impl Hasher for NBytesHasher {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn create_hash<R: Read>(&mut self, _: &PathBuf, contents: &mut R) -> Result<String> {
|
fn create_hash<R: Read>(&mut self, _: &PathBuf, contents: &mut R) -> Result<String> {
|
||||||
let s = contents
|
let s = try!(contents
|
||||||
.bytes()
|
.bytes()
|
||||||
.take(self.n)
|
.take(self.n)
|
||||||
.collect::<RResult<Vec<u8>, _>>()
|
.collect::<RResult<Vec<u8>, _>>()
|
||||||
.chain_err(|| REK::IOError)
|
.chain_err(|| REK::IOError)
|
||||||
.and_then(|v| String::from_utf8(v).chain_err(|| REK::IOError))
|
.and_then(|v| String::from_utf8(v).chain_err(|| REK::UTF8Error)));
|
||||||
.chain_err(|| REK::UTF8Error)
|
self.hasher.input_str(&s[..]);
|
||||||
.chain_err(|| REK::IOError);
|
|
||||||
self.hasher.input_str(&try!(s)[..]);
|
|
||||||
Ok(self.hasher.result_str())
|
Ok(self.hasher.result_str())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -55,3 +55,5 @@ pub mod hasher;
|
||||||
pub mod hashers;
|
pub mod hashers;
|
||||||
pub mod lister;
|
pub mod lister;
|
||||||
pub mod reference;
|
pub mod reference;
|
||||||
|
pub mod refstore;
|
||||||
|
mod util;
|
||||||
|
|
|
@ -20,13 +20,14 @@
|
||||||
use std::default::Default;
|
use std::default::Default;
|
||||||
use std::io::stdout;
|
use std::io::stdout;
|
||||||
use std::io::Write;
|
use std::io::Write;
|
||||||
|
use std::ops::Deref;
|
||||||
|
|
||||||
use libimagentrylist::lister::Lister;
|
use libimagentrylist::lister::Lister;
|
||||||
use libimagentrylist::error::Result;
|
use libimagentrylist::error::Result;
|
||||||
use libimagentrylist::error::ResultExt;
|
|
||||||
use libimagerror::trace::trace_error;
|
use libimagerror::trace::trace_error;
|
||||||
use libimagstore::store::FileLockEntry;
|
use libimagstore::store::FileLockEntry;
|
||||||
use libimagentrylist::error::ListErrorKind as LEK;
|
use libimagentrylist::error::ListErrorKind as LEK;
|
||||||
|
use libimagentrylist::error as lerror;
|
||||||
|
|
||||||
use reference::Ref;
|
use reference::Ref;
|
||||||
|
|
||||||
|
@ -86,13 +87,45 @@ impl Lister for RefLister {
|
||||||
debug!("fold({:?}, {:?})", accu, entry);
|
debug!("fold({:?}, {:?})", accu, entry);
|
||||||
let r = accu.and_then(|_| {
|
let r = accu.and_then(|_| {
|
||||||
debug!("Listing Entry: {:?}", entry);
|
debug!("Listing Entry: {:?}", entry);
|
||||||
lister_fn(entry,
|
{
|
||||||
self.check_dead,
|
let is_dead = if self.check_dead {
|
||||||
self.check_changed,
|
if try!(lerror::ResultExt::chain_err(entry.fs_link_exists(), || LEK::FormatError)) {
|
||||||
self.check_changed_content,
|
"dead"
|
||||||
self.check_changed_permiss)
|
} else {
|
||||||
|
"alive"
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
"not checked"
|
||||||
|
};
|
||||||
|
|
||||||
|
let is_changed = if self.check_changed {
|
||||||
|
if check_changed(entry.deref()) { "changed" } else { "unchanged" }
|
||||||
|
} else {
|
||||||
|
"not checked"
|
||||||
|
};
|
||||||
|
|
||||||
|
let is_changed_content = if self.check_changed_content {
|
||||||
|
if check_changed_content(entry.deref()) { "changed" } else { "unchanged" }
|
||||||
|
} else {
|
||||||
|
"not checked"
|
||||||
|
};
|
||||||
|
|
||||||
|
let is_changed_permiss = if self.check_changed_permiss {
|
||||||
|
if check_changed_permiss(entry.deref()) { "changed" } else { "unchanged" }
|
||||||
|
} else {
|
||||||
|
"not checked"
|
||||||
|
};
|
||||||
|
|
||||||
|
Ok(format!("{} | {} | {} | {} | {} | {}",
|
||||||
|
is_dead,
|
||||||
|
is_changed,
|
||||||
|
is_changed_content,
|
||||||
|
is_changed_permiss,
|
||||||
|
entry.get_path_hash().unwrap_or_else(|_| String::from("Cannot get hash")),
|
||||||
|
entry.get_location()))
|
||||||
|
}
|
||||||
.and_then(|s| {
|
.and_then(|s| {
|
||||||
write!(stdout(), "{}\n", s).chain_err(|| LEK::IOError)
|
lerror::ResultExt::chain_err(write!(stdout(), "{}\n", s), || LEK::FormatError)
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
.map(|_| ());
|
.map(|_| ());
|
||||||
|
@ -104,68 +137,11 @@ impl Lister for RefLister {
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn lister_fn(fle: FileLockEntry,
|
fn check_changed<R: Ref>(r: &R) -> bool {
|
||||||
do_check_dead: bool,
|
|
||||||
do_check_changed: bool,
|
|
||||||
do_check_changed_content: bool,
|
|
||||||
do_check_changed_permiss: bool) -> Result<String>
|
|
||||||
{
|
|
||||||
Ref::from_filelockentry(fle)
|
|
||||||
.map(|r| {
|
|
||||||
let is_dead = if do_check_dead {
|
|
||||||
if check_dead(&r) { "dead" } else { "alive" }
|
|
||||||
} else {
|
|
||||||
"not checked"
|
|
||||||
};
|
|
||||||
|
|
||||||
let is_changed = if do_check_changed {
|
|
||||||
if check_changed(&r) { "changed" } else { "unchanged" }
|
|
||||||
} else {
|
|
||||||
"not checked"
|
|
||||||
};
|
|
||||||
|
|
||||||
let is_changed_content = if do_check_changed_content {
|
|
||||||
if check_changed_content(&r) { "changed" } else { "unchanged" }
|
|
||||||
} else {
|
|
||||||
"not checked"
|
|
||||||
};
|
|
||||||
|
|
||||||
let is_changed_permiss = if do_check_changed_permiss {
|
|
||||||
if check_changed_permiss(&r) { "changed" } else { "unchanged" }
|
|
||||||
} else {
|
|
||||||
"not checked"
|
|
||||||
};
|
|
||||||
|
|
||||||
format!("{} | {} | {} | {} | {} | {}",
|
|
||||||
is_dead,
|
|
||||||
is_changed,
|
|
||||||
is_changed_content,
|
|
||||||
is_changed_permiss,
|
|
||||||
r.get_path_hash().unwrap_or_else(|_| String::from("Cannot get hash")),
|
|
||||||
r.get_location())
|
|
||||||
})
|
|
||||||
.chain_err(|| LEK::FormatError)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn check_dead(r: &Ref) -> bool {
|
|
||||||
match r.fs_link_exists() {
|
|
||||||
Ok(b) => b,
|
|
||||||
Err(e) => {
|
|
||||||
warn!("Could not check whether the ref {} exists on the FS:", r);
|
|
||||||
trace_error(&e);
|
|
||||||
|
|
||||||
// We continue here and tell the callee that this reference is dead, what is kind of
|
|
||||||
// true actually, as we might not have access to it right now
|
|
||||||
true
|
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn check_changed(r: &Ref) -> bool {
|
|
||||||
check_changed_content(r) && check_changed_permiss(r)
|
check_changed_content(r) && check_changed_permiss(r)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn check_changed_content(r: &Ref) -> bool {
|
fn check_changed_content<R: Ref>(r: &R) -> bool {
|
||||||
let eq = r.get_current_hash()
|
let eq = r.get_current_hash()
|
||||||
.and_then(|hash| r.get_stored_hash().map(|stored| (hash, stored)))
|
.and_then(|hash| r.get_stored_hash().map(|stored| (hash, stored)))
|
||||||
.map(|(hash, stored)| hash == stored);
|
.map(|(hash, stored)| hash == stored);
|
||||||
|
@ -173,7 +149,7 @@ fn check_changed_content(r: &Ref) -> bool {
|
||||||
match eq {
|
match eq {
|
||||||
Ok(eq) => eq,
|
Ok(eq) => eq,
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
warn!("Could not check whether the ref {} changed on the FS:", r);
|
warn!("Could not check whether the ref changed on the FS");
|
||||||
trace_error(&e);
|
trace_error(&e);
|
||||||
|
|
||||||
// We continue here and tell the callee that this reference is unchanged
|
// We continue here and tell the callee that this reference is unchanged
|
||||||
|
@ -182,7 +158,7 @@ fn check_changed_content(r: &Ref) -> bool {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn check_changed_permiss(_: &Ref) -> bool {
|
fn check_changed_permiss<R: Ref>(_: &R) -> bool {
|
||||||
warn!("Permission changes tracking not supported yet.");
|
warn!("Permission changes tracking not supported yet.");
|
||||||
false
|
false
|
||||||
}
|
}
|
||||||
|
|
|
@ -21,230 +21,104 @@
|
||||||
//! files outside of the imag store.
|
//! files outside of the imag store.
|
||||||
|
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
use std::ops::Deref;
|
|
||||||
use std::ops::DerefMut;
|
|
||||||
use std::collections::BTreeMap;
|
|
||||||
use std::fs::File;
|
use std::fs::File;
|
||||||
use std::fmt::{Display, Error as FmtError, Formatter};
|
|
||||||
use std::fs::Permissions;
|
use std::fs::Permissions;
|
||||||
use std::result::Result as RResult;
|
|
||||||
|
|
||||||
use libimagstore::store::FileLockEntry;
|
use libimagstore::store::Entry;
|
||||||
use libimagstore::storeid::StoreId;
|
|
||||||
use libimagstore::storeid::IntoStoreId;
|
|
||||||
use libimagstore::store::Store;
|
|
||||||
|
|
||||||
use toml::Value;
|
use toml::Value;
|
||||||
use toml_query::read::TomlValueReadExt;
|
use toml_query::read::TomlValueReadExt;
|
||||||
use toml_query::set::TomlValueSetExt;
|
use toml_query::set::TomlValueSetExt;
|
||||||
use toml_query::insert::TomlValueInsertExt;
|
|
||||||
|
|
||||||
use error::RefErrorKind as REK;
|
use error::RefErrorKind as REK;
|
||||||
use error::RefError as RE;
|
use error::RefError as RE;
|
||||||
use error::ResultExt;
|
use error::ResultExt;
|
||||||
use flags::RefFlags;
|
|
||||||
use error::Result;
|
use error::Result;
|
||||||
use hasher::*;
|
use hasher::*;
|
||||||
use module_path::ModuleEntryPath;
|
|
||||||
|
|
||||||
#[derive(Debug)]
|
pub trait Ref {
|
||||||
pub struct Ref<'a>(FileLockEntry<'a>);
|
|
||||||
|
|
||||||
impl<'a> Ref<'a> {
|
|
||||||
|
|
||||||
/// Try to build a Ref object based on an existing FileLockEntry object
|
|
||||||
pub fn from_filelockentry(fle: FileLockEntry<'a>) -> Result<Ref<'a>> {
|
|
||||||
Ref::read_reference(&fle).map(|_| Ref(fle))
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Try to get `si` as Ref object from the store
|
|
||||||
pub fn get(store: &'a Store, si: StoreId) -> Result<Ref<'a>> {
|
|
||||||
match store.get(si) {
|
|
||||||
Err(e) => return Err(e).chain_err(|| REK::StoreReadError),
|
|
||||||
Ok(None) => return Err(RE::from_kind(REK::RefNotInStore)),
|
|
||||||
Ok(Some(fle)) => Ref::from_filelockentry(fle),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Get a Ref object from the store by hash.
|
|
||||||
///
|
|
||||||
/// Returns None if the hash cannot be found.
|
|
||||||
pub fn get_by_hash(store: &'a Store, hash: String) -> Result<Option<Ref<'a>>> {
|
|
||||||
ModuleEntryPath::new(hash)
|
|
||||||
.into_storeid()
|
|
||||||
.and_then(|id| store.get(id))
|
|
||||||
.map(|opt_fle| opt_fle.map(|fle| Ref(fle)))
|
|
||||||
.chain_err(|| REK::StoreReadError)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Delete a ref by hash
|
|
||||||
///
|
|
||||||
/// If the returned Result contains an error, the ref might not be deleted.
|
|
||||||
pub fn delete_by_hash(store: &'a Store, hash: String) -> Result<()> {
|
|
||||||
ModuleEntryPath::new(hash)
|
|
||||||
.into_storeid()
|
|
||||||
.and_then(|id| store.delete(id))
|
|
||||||
.chain_err(|| REK::StoreWriteError)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn read_reference(fle: &FileLockEntry<'a>) -> Result<PathBuf> {
|
|
||||||
match fle.get_header().read("ref.path") {
|
|
||||||
Ok(Some(&Value::String(ref s))) => Ok(PathBuf::from(s)),
|
|
||||||
Ok(Some(_)) => Err(RE::from_kind(REK::HeaderTypeError)),
|
|
||||||
Ok(None) => Err(RE::from_kind(REK::HeaderFieldMissingError)),
|
|
||||||
Err(e) => Err(e).chain_err(|| REK::StoreReadError),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn create_with_hasher<H: Hasher>(store: &'a Store, pb: PathBuf, flags: RefFlags, mut h: H)
|
|
||||||
-> Result<Ref<'a>>
|
|
||||||
{
|
|
||||||
if !pb.exists() {
|
|
||||||
return Err(RE::from_kind(REK::RefTargetDoesNotExist));
|
|
||||||
}
|
|
||||||
if flags.get_content_hashing() && pb.is_dir() {
|
|
||||||
return Err(RE::from_kind(REK::RefTargetCannotBeHashed));
|
|
||||||
}
|
|
||||||
|
|
||||||
let (mut fle, content_hash, permissions, canonical_path) = { // scope to be able to fold
|
|
||||||
try!(File::open(pb.clone())
|
|
||||||
.chain_err(|| REK::RefTargetFileCannotBeOpened)
|
|
||||||
|
|
||||||
// If we were able to open this file,
|
|
||||||
// we hash the contents of the file and return (file, hash)
|
|
||||||
.and_then(|mut file| {
|
|
||||||
let opt_contenthash = if flags.get_content_hashing() {
|
|
||||||
Some(try!(h.create_hash(&pb, &mut file)))
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
};
|
|
||||||
|
|
||||||
Ok((file, opt_contenthash))
|
|
||||||
})
|
|
||||||
|
|
||||||
// and then we get the permissions if we have to
|
|
||||||
// and return (file, content hash, permissions)
|
|
||||||
.and_then(|(file, opt_contenthash)| {
|
|
||||||
let opt_permissions = if flags.get_permission_tracking() {
|
|
||||||
Some(try!(file
|
|
||||||
.metadata()
|
|
||||||
.map(|md| md.permissions())
|
|
||||||
.chain_err(|| REK::RefTargetCannotReadPermissions)
|
|
||||||
))
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
};
|
|
||||||
|
|
||||||
Ok((opt_contenthash, opt_permissions))
|
|
||||||
})
|
|
||||||
|
|
||||||
// and then we try to canonicalize the PathBuf, because we want to store a
|
|
||||||
// canonicalized path
|
|
||||||
// and return (file, content hash, permissions, canonicalized path)
|
|
||||||
.and_then(|(opt_contenthash, opt_permissions)| {
|
|
||||||
pb.canonicalize()
|
|
||||||
.map(|can| (opt_contenthash, opt_permissions, can))
|
|
||||||
// if PathBuf::canonicalize() failed, build an error from the return value
|
|
||||||
.chain_err(|| REK::PathCanonicalizationError)
|
|
||||||
})
|
|
||||||
|
|
||||||
// and then we hash the canonicalized path
|
|
||||||
// and return (file, content hash, permissions, canonicalized path, path hash)
|
|
||||||
.and_then(|(opt_contenthash, opt_permissions, can)| {
|
|
||||||
let path_hash = try!(Ref::hash_path(&can)
|
|
||||||
.chain_err(|| REK::PathHashingError)
|
|
||||||
);
|
|
||||||
|
|
||||||
Ok((opt_contenthash, opt_permissions, can, path_hash))
|
|
||||||
})
|
|
||||||
|
|
||||||
// and then we convert the PathBuf of the canonicalized path to a String to be able
|
|
||||||
// to save it in the Ref FileLockEntry obj
|
|
||||||
// and return
|
|
||||||
// (file, content hash, permissions, canonicalized path as String, path hash)
|
|
||||||
.and_then(|(opt_conhash, opt_perm, can, path_hash)| {
|
|
||||||
match can.to_str().map(String::from) {
|
|
||||||
// UTF convert error in PathBuf::to_str(),
|
|
||||||
None => Err(RE::from_kind(REK::PathUTF8Error)),
|
|
||||||
Some(can) => Ok((opt_conhash, opt_perm, can, path_hash))
|
|
||||||
}
|
|
||||||
})
|
|
||||||
|
|
||||||
// and then we create the FileLockEntry in the Store
|
|
||||||
// and return (filelockentry, content hash, permissions, canonicalized path)
|
|
||||||
.and_then(|(opt_conhash, opt_perm, can, path_hash)| {
|
|
||||||
let fle = try!(store
|
|
||||||
.create(ModuleEntryPath::new(path_hash))
|
|
||||||
.chain_err(|| REK::StoreWriteError)
|
|
||||||
);
|
|
||||||
|
|
||||||
Ok((fle, opt_conhash, opt_perm, can))
|
|
||||||
})
|
|
||||||
)
|
|
||||||
};
|
|
||||||
|
|
||||||
for tpl in [
|
|
||||||
Some((String::from("ref"), Value::Table(BTreeMap::new()))),
|
|
||||||
Some((String::from("ref.permissions"), Value::Table(BTreeMap::new()))),
|
|
||||||
Some((String::from("ref.path"), Value::String(canonical_path))),
|
|
||||||
Some((String::from("ref.content_hash"), Value::Table(BTreeMap::new()))),
|
|
||||||
|
|
||||||
content_hash.map(|hash| {
|
|
||||||
(format!("ref.content_hash.{}", h.hash_name()), Value::String(hash))
|
|
||||||
}),
|
|
||||||
permissions.map(|p| {
|
|
||||||
(String::from("ref.permissions.ro"), Value::Boolean(p.readonly()))
|
|
||||||
}),
|
|
||||||
].into_iter()
|
|
||||||
{
|
|
||||||
match tpl {
|
|
||||||
&Some((ref s, ref v)) => {
|
|
||||||
match fle.get_header_mut().insert(s, v.clone()) {
|
|
||||||
Ok(None) => {
|
|
||||||
debug!("Header insert worked");
|
|
||||||
}
|
|
||||||
Ok(Some(val)) => {
|
|
||||||
debug!("Overwrote: {}, which was: {:?}", s, val);
|
|
||||||
},
|
|
||||||
Err(e) => {
|
|
||||||
return Err(e).chain_err(|| REK::HeaderFieldWriteError);
|
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
&None => {
|
|
||||||
debug!("Not going to insert.");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(Ref(fle))
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Create a Ref object which refers to `pb`
|
|
||||||
pub fn create(store: &'a Store, pb: PathBuf, flags: RefFlags) -> Result<Ref<'a>> {
|
|
||||||
Ref::create_with_hasher(store, pb, flags, DefaultHasher::new())
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Creates a Hash from a PathBuf by making the PathBuf absolute and then running a hash
|
|
||||||
/// algorithm on it
|
|
||||||
fn hash_path(pb: &PathBuf) -> Result<String> {
|
|
||||||
use crypto::sha1::Sha1;
|
|
||||||
use crypto::digest::Digest;
|
|
||||||
|
|
||||||
match pb.to_str() {
|
|
||||||
Some(s) => {
|
|
||||||
let mut hasher = Sha1::new();
|
|
||||||
hasher.input_str(s);
|
|
||||||
Ok(hasher.result_str())
|
|
||||||
},
|
|
||||||
None => return Err(RE::from_kind(REK::PathUTF8Error)),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Get the hash from the path of the ref
|
/// Get the hash from the path of the ref
|
||||||
pub fn get_path_hash(&self) -> Result<String> {
|
fn get_path_hash(&self) -> Result<String>;
|
||||||
self.0
|
|
||||||
.get_location()
|
/// Get the hash of the link target which is stored in the ref object
|
||||||
|
fn get_stored_hash(&self) -> Result<String>;
|
||||||
|
|
||||||
|
/// Get the hahs of the link target which is stored in the ref object, which is hashed with a
|
||||||
|
/// custom Hasher instance.
|
||||||
|
fn get_stored_hash_with_hasher<H: Hasher>(&self, h: &H) -> Result<String>;
|
||||||
|
|
||||||
|
/// Get the hash of the link target by reading the link target and hashing the contents
|
||||||
|
fn get_current_hash(&self) -> Result<String>;
|
||||||
|
|
||||||
|
/// Get the hash of the link target by reading the link target and hashing the contents with the
|
||||||
|
/// custom hasher
|
||||||
|
fn get_current_hash_with_hasher<H: Hasher>(&self, h: H) -> Result<String>;
|
||||||
|
|
||||||
|
/// check whether the pointer the Ref represents still points to a file which exists
|
||||||
|
fn fs_link_exists(&self) -> Result<bool>;
|
||||||
|
|
||||||
|
/// Alias for `r.fs_link_exists() && r.deref().is_file()`
|
||||||
|
fn is_ref_to_file(&self) -> Result<bool>;
|
||||||
|
|
||||||
|
/// Alias for `r.fs_link_exists() && r.deref().is_dir()`
|
||||||
|
fn is_ref_to_dir(&self) -> Result<bool>;
|
||||||
|
|
||||||
|
/// Alias for `!Ref::fs_link_exists()`
|
||||||
|
fn is_dangling(&self) -> Result<bool>;
|
||||||
|
|
||||||
|
/// check whether the pointer the Ref represents is valid
|
||||||
|
/// This includes:
|
||||||
|
/// - Hashsum of the file is still the same as stored in the Ref
|
||||||
|
/// - file permissions are still valid
|
||||||
|
fn fs_link_valid(&self) -> Result<bool>;
|
||||||
|
|
||||||
|
/// Check whether the file permissions of the referenced file are equal to the stored
|
||||||
|
/// permissions
|
||||||
|
fn fs_link_valid_permissions(&self) -> Result<bool>;
|
||||||
|
|
||||||
|
/// Check whether the Hashsum of the referenced file is equal to the stored hashsum
|
||||||
|
fn fs_link_valid_hash(&self) -> Result<bool>;
|
||||||
|
|
||||||
|
/// Update the Ref by re-checking the file from FS
|
||||||
|
/// This errors if the file is not present or cannot be read()
|
||||||
|
fn update_ref(&mut self) -> Result<()>;
|
||||||
|
|
||||||
|
/// Update the Ref by re-checking the file from FS using the passed Hasher instance
|
||||||
|
/// This errors if the file is not present or cannot be read()
|
||||||
|
fn update_ref_with_hasher<H: Hasher>(&mut self, h: &H) -> Result<()>;
|
||||||
|
|
||||||
|
/// Get the path of the file which is reffered to by this Ref
|
||||||
|
fn fs_file(&self) -> Result<PathBuf>;
|
||||||
|
|
||||||
|
/// Re-find a referenced file
|
||||||
|
///
|
||||||
|
/// This function tries to re-find a ref by searching all directories in `search_roots` recursively
|
||||||
|
/// for a file which matches the hash of the Ref.
|
||||||
|
///
|
||||||
|
/// If `search_roots` is `None`, it starts at the filesystem root `/`.
|
||||||
|
///
|
||||||
|
/// If the target cannot be found, this yields a RefTargetDoesNotExist error kind.
|
||||||
|
///
|
||||||
|
/// # Warning
|
||||||
|
///
|
||||||
|
/// This option causes heavy I/O as it recursively searches the Filesystem.
|
||||||
|
fn refind(&self, search_roots: Option<Vec<PathBuf>>) -> Result<PathBuf>;
|
||||||
|
|
||||||
|
/// See documentation of `Ref::refind()`
|
||||||
|
fn refind_with_hasher<H: Hasher>(&self, search_roots: Option<Vec<PathBuf>>, h: H)
|
||||||
|
-> Result<PathBuf>;
|
||||||
|
|
||||||
|
/// Get the permissions of the file which are present
|
||||||
|
fn get_current_permissions(&self) -> Result<Permissions>;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
impl Ref for Entry {
|
||||||
|
|
||||||
|
/// Get the hash from the path of the ref
|
||||||
|
fn get_path_hash(&self) -> Result<String> {
|
||||||
|
self.get_location()
|
||||||
.clone()
|
.clone()
|
||||||
.into_pathbuf()
|
.into_pathbuf()
|
||||||
.chain_err(|| REK::StoreIdError)
|
.chain_err(|| REK::StoreIdError)
|
||||||
|
@ -258,14 +132,14 @@ impl<'a> Ref<'a> {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Get the hash of the link target which is stored in the ref object
|
/// Get the hash of the link target which is stored in the ref object
|
||||||
pub fn get_stored_hash(&self) -> Result<String> {
|
fn get_stored_hash(&self) -> Result<String> {
|
||||||
self.get_stored_hash_with_hasher(&DefaultHasher::new())
|
self.get_stored_hash_with_hasher(&DefaultHasher::new())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Get the hahs of the link target which is stored in the ref object, which is hashed with a
|
/// Get the hahs of the link target which is stored in the ref object, which is hashed with a
|
||||||
/// custom Hasher instance.
|
/// custom Hasher instance.
|
||||||
pub fn get_stored_hash_with_hasher<H: Hasher>(&self, h: &H) -> Result<String> {
|
fn get_stored_hash_with_hasher<H: Hasher>(&self, h: &H) -> Result<String> {
|
||||||
match self.0.get_header().read(&format!("ref.content_hash.{}", h.hash_name())[..]) {
|
match self.get_header().read(&format!("ref.content_hash.{}", h.hash_name())[..]) {
|
||||||
// content hash stored...
|
// content hash stored...
|
||||||
Ok(Some(&Value::String(ref s))) => Ok(s.clone()),
|
Ok(Some(&Value::String(ref s))) => Ok(s.clone()),
|
||||||
|
|
||||||
|
@ -281,13 +155,13 @@ impl<'a> Ref<'a> {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Get the hash of the link target by reading the link target and hashing the contents
|
/// Get the hash of the link target by reading the link target and hashing the contents
|
||||||
pub fn get_current_hash(&self) -> Result<String> {
|
fn get_current_hash(&self) -> Result<String> {
|
||||||
self.get_current_hash_with_hasher(DefaultHasher::new())
|
self.get_current_hash_with_hasher(DefaultHasher::new())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Get the hash of the link target by reading the link target and hashing the contents with the
|
/// Get the hash of the link target by reading the link target and hashing the contents with the
|
||||||
/// custom hasher
|
/// custom hasher
|
||||||
pub fn get_current_hash_with_hasher<H: Hasher>(&self, mut h: H) -> Result<String> {
|
fn get_current_hash_with_hasher<H: Hasher>(&self, mut h: H) -> Result<String> {
|
||||||
self.fs_file()
|
self.fs_file()
|
||||||
.and_then(|pb| {
|
.and_then(|pb| {
|
||||||
File::open(pb.clone())
|
File::open(pb.clone())
|
||||||
|
@ -297,38 +171,23 @@ impl<'a> Ref<'a> {
|
||||||
.and_then(|(path, mut file)| h.create_hash(&path, &mut file))
|
.and_then(|(path, mut file)| h.create_hash(&path, &mut file))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Get the permissions of the file which are present
|
|
||||||
fn get_current_permissions(&self) -> Result<Permissions> {
|
|
||||||
self.fs_file()
|
|
||||||
.and_then(|pb| {
|
|
||||||
File::open(pb)
|
|
||||||
.chain_err(|| REK::HeaderFieldReadError)
|
|
||||||
})
|
|
||||||
.and_then(|file| {
|
|
||||||
file
|
|
||||||
.metadata()
|
|
||||||
.map(|md| md.permissions())
|
|
||||||
.chain_err(|| REK::RefTargetCannotReadPermissions)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
/// check whether the pointer the Ref represents still points to a file which exists
|
/// check whether the pointer the Ref represents still points to a file which exists
|
||||||
pub fn fs_link_exists(&self) -> Result<bool> {
|
fn fs_link_exists(&self) -> Result<bool> {
|
||||||
self.fs_file().map(|pathbuf| pathbuf.exists())
|
self.fs_file().map(|pathbuf| pathbuf.exists())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Alias for `r.fs_link_exists() && r.deref().is_file()`
|
/// Alias for `r.fs_link_exists() && r.deref().is_file()`
|
||||||
pub fn is_ref_to_file(&self) -> Result<bool> {
|
fn is_ref_to_file(&self) -> Result<bool> {
|
||||||
self.fs_file().map(|pathbuf| pathbuf.is_file())
|
self.fs_file().map(|pathbuf| pathbuf.is_file())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Alias for `r.fs_link_exists() && r.deref().is_dir()`
|
/// Alias for `r.fs_link_exists() && r.deref().is_dir()`
|
||||||
pub fn is_ref_to_dir(&self) -> Result<bool> {
|
fn is_ref_to_dir(&self) -> Result<bool> {
|
||||||
self.fs_file().map(|pathbuf| pathbuf.is_dir())
|
self.fs_file().map(|pathbuf| pathbuf.is_dir())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Alias for `!Ref::fs_link_exists()`
|
/// Alias for `!Ref::fs_link_exists()`
|
||||||
pub fn is_dangling(&self) -> Result<bool> {
|
fn is_dangling(&self) -> Result<bool> {
|
||||||
self.fs_link_exists().map(|b| !b)
|
self.fs_link_exists().map(|b| !b)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -336,7 +195,7 @@ impl<'a> Ref<'a> {
|
||||||
/// This includes:
|
/// This includes:
|
||||||
/// - Hashsum of the file is still the same as stored in the Ref
|
/// - Hashsum of the file is still the same as stored in the Ref
|
||||||
/// - file permissions are still valid
|
/// - file permissions are still valid
|
||||||
pub fn fs_link_valid(&self) -> Result<bool> {
|
fn fs_link_valid(&self) -> Result<bool> {
|
||||||
match (self.fs_link_valid_permissions(), self.fs_link_valid_hash()) {
|
match (self.fs_link_valid_permissions(), self.fs_link_valid_hash()) {
|
||||||
(Ok(true) , Ok(true)) => Ok(true),
|
(Ok(true) , Ok(true)) => Ok(true),
|
||||||
(Ok(_) , Ok(_)) => Ok(false),
|
(Ok(_) , Ok(_)) => Ok(false),
|
||||||
|
@ -347,8 +206,8 @@ impl<'a> Ref<'a> {
|
||||||
|
|
||||||
/// Check whether the file permissions of the referenced file are equal to the stored
|
/// Check whether the file permissions of the referenced file are equal to the stored
|
||||||
/// permissions
|
/// permissions
|
||||||
pub fn fs_link_valid_permissions(&self) -> Result<bool> {
|
fn fs_link_valid_permissions(&self) -> Result<bool> {
|
||||||
self.0
|
self
|
||||||
.get_header()
|
.get_header()
|
||||||
.read("ref.permissions.ro")
|
.read("ref.permissions.ro")
|
||||||
.chain_err(|| REK::HeaderFieldReadError)
|
.chain_err(|| REK::HeaderFieldReadError)
|
||||||
|
@ -364,7 +223,7 @@ impl<'a> Ref<'a> {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Check whether the Hashsum of the referenced file is equal to the stored hashsum
|
/// Check whether the Hashsum of the referenced file is equal to the stored hashsum
|
||||||
pub fn fs_link_valid_hash(&self) -> Result<bool> {
|
fn fs_link_valid_hash(&self) -> Result<bool> {
|
||||||
let stored_hash = try!(self.get_stored_hash());
|
let stored_hash = try!(self.get_stored_hash());
|
||||||
let current_hash = try!(self.get_current_hash());
|
let current_hash = try!(self.get_current_hash());
|
||||||
Ok(stored_hash == current_hash)
|
Ok(stored_hash == current_hash)
|
||||||
|
@ -372,23 +231,23 @@ impl<'a> Ref<'a> {
|
||||||
|
|
||||||
/// Update the Ref by re-checking the file from FS
|
/// Update the Ref by re-checking the file from FS
|
||||||
/// This errors if the file is not present or cannot be read()
|
/// This errors if the file is not present or cannot be read()
|
||||||
pub fn update_ref(&mut self) -> Result<()> {
|
fn update_ref(&mut self) -> Result<()> {
|
||||||
self.update_ref_with_hasher(&DefaultHasher::new())
|
self.update_ref_with_hasher(&DefaultHasher::new())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Update the Ref by re-checking the file from FS using the passed Hasher instance
|
/// Update the Ref by re-checking the file from FS using the passed Hasher instance
|
||||||
/// This errors if the file is not present or cannot be read()
|
/// This errors if the file is not present or cannot be read()
|
||||||
pub fn update_ref_with_hasher<H: Hasher>(&mut self, h: &H) -> Result<()> {
|
fn update_ref_with_hasher<H: Hasher>(&mut self, h: &H) -> Result<()> {
|
||||||
let current_hash = try!(self.get_current_hash()); // uses the default hasher
|
let current_hash = try!(self.get_current_hash()); // uses the default hasher
|
||||||
let current_perm = try!(self.get_current_permissions());
|
let current_perm = try!(self.get_current_permissions());
|
||||||
|
|
||||||
try!(self.0
|
try!(self
|
||||||
.get_header_mut()
|
.get_header_mut()
|
||||||
.set("ref.permissions.ro", Value::Boolean(current_perm.readonly()))
|
.set("ref.permissions.ro", Value::Boolean(current_perm.readonly()))
|
||||||
.chain_err(|| REK::StoreWriteError)
|
.chain_err(|| REK::StoreWriteError)
|
||||||
);
|
);
|
||||||
|
|
||||||
try!(self.0
|
try!(self
|
||||||
.get_header_mut()
|
.get_header_mut()
|
||||||
.set(&format!("ref.content_hash.{}", h.hash_name())[..], Value::String(current_hash))
|
.set(&format!("ref.content_hash.{}", h.hash_name())[..], Value::String(current_hash))
|
||||||
.chain_err(|| REK::StoreWriteError)
|
.chain_err(|| REK::StoreWriteError)
|
||||||
|
@ -398,8 +257,8 @@ impl<'a> Ref<'a> {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Get the path of the file which is reffered to by this Ref
|
/// Get the path of the file which is reffered to by this Ref
|
||||||
pub fn fs_file(&self) -> Result<PathBuf> {
|
fn fs_file(&self) -> Result<PathBuf> {
|
||||||
match self.0.get_header().read("ref.path") {
|
match self.get_header().read("ref.path") {
|
||||||
Ok(Some(&Value::String(ref s))) => Ok(PathBuf::from(s)),
|
Ok(Some(&Value::String(ref s))) => Ok(PathBuf::from(s)),
|
||||||
Ok(Some(_)) => Err(RE::from_kind(REK::HeaderTypeError)),
|
Ok(Some(_)) => Err(RE::from_kind(REK::HeaderTypeError)),
|
||||||
Ok(None) => Err(RE::from_kind(REK::HeaderFieldMissingError)),
|
Ok(None) => Err(RE::from_kind(REK::HeaderFieldMissingError)),
|
||||||
|
@ -407,53 +266,6 @@ impl<'a> Ref<'a> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Check whether there is a reference to the file at `pb`
|
|
||||||
pub fn exists(store: &Store, pb: PathBuf) -> Result<bool> {
|
|
||||||
pb.canonicalize()
|
|
||||||
.chain_err(|| REK::PathCanonicalizationError)
|
|
||||||
.and_then(|can| {
|
|
||||||
Ref::hash_path(&can)
|
|
||||||
.chain_err(|| REK::PathHashingError)
|
|
||||||
})
|
|
||||||
.and_then(|hash| {
|
|
||||||
store.retrieve_for_module("ref").map(|iter| (hash, iter))
|
|
||||||
.chain_err(|| REK::StoreReadError)
|
|
||||||
})
|
|
||||||
.and_then(|(hash, possible_refs)| {
|
|
||||||
// This is kind of a manual Iterator::filter() call what we do here, but with the
|
|
||||||
// actual ::filter method we cannot return the error in a nice way, so we do it
|
|
||||||
// manually here. If you can come up with a better version of this, feel free to
|
|
||||||
// take this note as a todo.
|
|
||||||
for r in possible_refs {
|
|
||||||
let contains_hash = try!(r.to_str()
|
|
||||||
.chain_err(|| REK::TypeConversionError)
|
|
||||||
.map(|s| s.contains(&hash[..])));
|
|
||||||
|
|
||||||
if !contains_hash {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
match store.get(r) {
|
|
||||||
Ok(Some(fle)) => {
|
|
||||||
if Ref::read_reference(&fle).map(|path| path == pb).unwrap_or(false) {
|
|
||||||
return Ok(true)
|
|
||||||
}
|
|
||||||
},
|
|
||||||
|
|
||||||
Ok(None) => { // Something weird just happened
|
|
||||||
return Err(RE::from_kind(REK::StoreReadError));
|
|
||||||
},
|
|
||||||
|
|
||||||
Err(e) => {
|
|
||||||
return Err(e).chain_err(|| REK::StoreReadError);
|
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(false)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Re-find a referenced file
|
/// Re-find a referenced file
|
||||||
///
|
///
|
||||||
/// This function tries to re-find a ref by searching all directories in `search_roots` recursively
|
/// This function tries to re-find a ref by searching all directories in `search_roots` recursively
|
||||||
|
@ -466,11 +278,12 @@ impl<'a> Ref<'a> {
|
||||||
/// # Warning
|
/// # Warning
|
||||||
///
|
///
|
||||||
/// This option causes heavy I/O as it recursively searches the Filesystem.
|
/// This option causes heavy I/O as it recursively searches the Filesystem.
|
||||||
pub fn refind(&self, search_roots: Option<Vec<PathBuf>>) -> Result<PathBuf> {
|
fn refind(&self, search_roots: Option<Vec<PathBuf>>) -> Result<PathBuf> {
|
||||||
self.refind_with_hasher(search_roots, DefaultHasher::new())
|
self.refind_with_hasher(search_roots, DefaultHasher::new())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn refind_with_hasher<H: Hasher>(&self, search_roots: Option<Vec<PathBuf>>, mut h: H)
|
/// See documentation of `Ref::refind()`
|
||||||
|
fn refind_with_hasher<H: Hasher>(&self, search_roots: Option<Vec<PathBuf>>, mut h: H)
|
||||||
-> Result<PathBuf>
|
-> Result<PathBuf>
|
||||||
{
|
{
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
|
@ -514,43 +327,19 @@ impl<'a> Ref<'a> {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
/// Get the permissions of the file which are present
|
||||||
|
fn get_current_permissions(&self) -> Result<Permissions> {
|
||||||
impl<'a> Deref for Ref<'a> {
|
self.fs_file()
|
||||||
type Target = FileLockEntry<'a>;
|
.and_then(|pb| {
|
||||||
|
File::open(pb)
|
||||||
fn deref(&self) -> &FileLockEntry<'a> {
|
.chain_err(|| REK::HeaderFieldReadError)
|
||||||
&self.0
|
})
|
||||||
}
|
.and_then(|file| {
|
||||||
|
file
|
||||||
}
|
.metadata()
|
||||||
|
.map(|md| md.permissions())
|
||||||
impl<'a> DerefMut for Ref<'a> {
|
.chain_err(|| REK::RefTargetCannotReadPermissions)
|
||||||
|
})
|
||||||
fn deref_mut(&mut self) -> &mut FileLockEntry<'a> {
|
|
||||||
&mut self.0
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'a> Display for Ref<'a> {
|
|
||||||
|
|
||||||
fn fmt(&self, fmt: &mut Formatter) -> RResult<(), FmtError> {
|
|
||||||
let path = self.fs_file()
|
|
||||||
.map(|pb| String::from(pb.to_str().unwrap_or("<UTF8-Error>")))
|
|
||||||
.unwrap_or(String::from("Could not read Path from reference object"));
|
|
||||||
|
|
||||||
let hash = self.get_stored_hash().unwrap_or(String::from("<could not read hash>"));
|
|
||||||
|
|
||||||
write!(fmt, "Ref({} -> {})", hash, path)
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'a> Into<FileLockEntry<'a>> for Ref<'a> {
|
|
||||||
|
|
||||||
fn into(self) -> FileLockEntry<'a> {
|
|
||||||
self.0
|
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
268
lib/entry/libimagentryref/src/refstore.rs
Normal file
268
lib/entry/libimagentryref/src/refstore.rs
Normal file
|
@ -0,0 +1,268 @@
|
||||||
|
//
|
||||||
|
// imag - the personal information management suite for the commandline
|
||||||
|
// Copyright (C) 2015, 2016 Matthias Beyer <mail@beyermatthias.de> and contributors
|
||||||
|
//
|
||||||
|
// This library is free software; you can redistribute it and/or
|
||||||
|
// modify it under the terms of the GNU Lesser General Public
|
||||||
|
// License as published by the Free Software Foundation; version
|
||||||
|
// 2.1 of the License.
|
||||||
|
//
|
||||||
|
// This library is distributed in the hope that it will be useful,
|
||||||
|
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||||
|
// Lesser General Public License for more details.
|
||||||
|
//
|
||||||
|
// You should have received a copy of the GNU Lesser General Public
|
||||||
|
// License along with this library; if not, write to the Free Software
|
||||||
|
// Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
|
||||||
|
//
|
||||||
|
|
||||||
|
use std::path::PathBuf;
|
||||||
|
use std::collections::BTreeMap;
|
||||||
|
use std::fs::File;
|
||||||
|
|
||||||
|
use libimagstore::store::FileLockEntry;
|
||||||
|
use libimagstore::storeid::StoreId;
|
||||||
|
use libimagstore::storeid::IntoStoreId;
|
||||||
|
use libimagstore::store::Store;
|
||||||
|
|
||||||
|
use toml::Value;
|
||||||
|
|
||||||
|
use error::RefErrorKind as REK;
|
||||||
|
use error::RefError as RE;
|
||||||
|
use error::ResultExt;
|
||||||
|
use error::Result;
|
||||||
|
use flags::RefFlags;
|
||||||
|
use hasher::*;
|
||||||
|
use module_path::ModuleEntryPath;
|
||||||
|
use util::*;
|
||||||
|
|
||||||
|
pub trait RefStore {
|
||||||
|
|
||||||
|
/// Check whether there is a reference to the file at `pb`
|
||||||
|
fn exists(&self, pb: PathBuf) -> Result<bool>;
|
||||||
|
|
||||||
|
/// Try to get `si` as Ref object from the store
|
||||||
|
fn get<'a>(&'a self, si: StoreId) -> Result<FileLockEntry<'a>>;
|
||||||
|
|
||||||
|
/// Get a Ref object from the store by hash.
|
||||||
|
///
|
||||||
|
/// Returns None if the hash cannot be found.
|
||||||
|
fn get_by_hash<'a>(&'a self, hash: String) -> Result<Option<FileLockEntry<'a>>>;
|
||||||
|
|
||||||
|
/// Delete a ref by hash
|
||||||
|
///
|
||||||
|
/// If the returned Result contains an error, the ref might not be deleted.
|
||||||
|
fn delete_by_hash(&self, hash: String) -> Result<()>;
|
||||||
|
|
||||||
|
/// Create a Ref object which refers to `pb`
|
||||||
|
fn create<'a>(&'a self, pb: PathBuf, flags: RefFlags) -> Result<FileLockEntry<'a>>;
|
||||||
|
|
||||||
|
fn create_with_hasher<'a, H: Hasher>(&'a self, pb: PathBuf, flags: RefFlags, h: H)
|
||||||
|
-> Result<FileLockEntry<'a>>;
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
impl RefStore for Store {
|
||||||
|
|
||||||
|
/// Check whether there is a reference to the file at `pb`
|
||||||
|
fn exists(&self, pb: PathBuf) -> Result<bool> {
|
||||||
|
pb.canonicalize()
|
||||||
|
.chain_err(|| REK::PathCanonicalizationError)
|
||||||
|
.and_then(|c| hash_path(&c))
|
||||||
|
.chain_err(|| REK::PathHashingError)
|
||||||
|
.and_then(|hash| {
|
||||||
|
self.retrieve_for_module("ref")
|
||||||
|
.map(|iter| (hash, iter))
|
||||||
|
.chain_err(|| REK::StoreReadError)
|
||||||
|
})
|
||||||
|
.and_then(|(hash, possible_refs)| {
|
||||||
|
// This is kind of a manual Iterator::filter() call what we do here, but with the
|
||||||
|
// actual ::filter method we cannot return the error in a nice way, so we do it
|
||||||
|
// manually here. If you can come up with a better version of this, feel free to
|
||||||
|
// take this note as a todo.
|
||||||
|
for r in possible_refs {
|
||||||
|
let contains_hash = try!(r.to_str()
|
||||||
|
.chain_err(|| REK::TypeConversionError)
|
||||||
|
.map(|s| s.contains(&hash[..]))
|
||||||
|
);
|
||||||
|
|
||||||
|
if !contains_hash {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
match self.get(r) {
|
||||||
|
Ok(Some(fle)) => {
|
||||||
|
if read_reference(&fle).map(|path| path == pb).unwrap_or(false) {
|
||||||
|
return Ok(true)
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
|
Ok(None) => return Err(RE::from_kind(REK::StoreReadError)),
|
||||||
|
Err(e) => return Err(e).chain_err(|| REK::StoreReadError)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(false)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Try to get `si` as Ref object from the store
|
||||||
|
fn get<'a>(&'a self, si: StoreId) -> Result<FileLockEntry<'a>> {
|
||||||
|
match self.get(si) {
|
||||||
|
Err(e) => return Err(e).chain_err(|| REK::StoreReadError),
|
||||||
|
Ok(None) => return Err(RE::from_kind(REK::RefNotInStore)),
|
||||||
|
Ok(Some(fle)) => Ok(fle),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get a Ref object from the store by hash.
|
||||||
|
///
|
||||||
|
/// Returns None if the hash cannot be found.
|
||||||
|
fn get_by_hash<'a>(&'a self, hash: String) -> Result<Option<FileLockEntry<'a>>> {
|
||||||
|
ModuleEntryPath::new(hash)
|
||||||
|
.into_storeid()
|
||||||
|
.and_then(|id| self.get(id))
|
||||||
|
.chain_err(|| REK::StoreReadError)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Delete a ref by hash
|
||||||
|
///
|
||||||
|
/// If the returned Result contains an error, the ref might not be deleted.
|
||||||
|
fn delete_by_hash(&self, hash: String) -> Result<()> {
|
||||||
|
ModuleEntryPath::new(hash)
|
||||||
|
.into_storeid()
|
||||||
|
.and_then(|id| self.delete(id))
|
||||||
|
.chain_err(|| REK::StoreWriteError)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Create a Ref object which refers to `pb`
|
||||||
|
fn create<'a>(&'a self, pb: PathBuf, flags: RefFlags) -> Result<FileLockEntry<'a>> {
|
||||||
|
self.create_with_hasher(pb, flags, DefaultHasher::new())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn create_with_hasher<'a, H: Hasher>(&'a self, pb: PathBuf, flags: RefFlags, mut h: H)
|
||||||
|
-> Result<FileLockEntry<'a>>
|
||||||
|
{
|
||||||
|
use toml_query::insert::TomlValueInsertExt;
|
||||||
|
|
||||||
|
if !pb.exists() {
|
||||||
|
return Err(RE::from_kind(REK::RefTargetDoesNotExist));
|
||||||
|
}
|
||||||
|
if flags.get_content_hashing() && pb.is_dir() {
|
||||||
|
return Err(RE::from_kind(REK::RefTargetCannotBeHashed));
|
||||||
|
}
|
||||||
|
|
||||||
|
let (mut fle, content_hash, permissions, canonical_path) = { // scope to be able to fold
|
||||||
|
try!(File::open(pb.clone())
|
||||||
|
.chain_err(|| REK::RefTargetFileCannotBeOpened)
|
||||||
|
|
||||||
|
// If we were able to open this file,
|
||||||
|
// we hash the contents of the file and return (file, hash)
|
||||||
|
.and_then(|mut file| {
|
||||||
|
let opt_contenthash = if flags.get_content_hashing() {
|
||||||
|
Some(try!(h.create_hash(&pb, &mut file)))
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
};
|
||||||
|
|
||||||
|
Ok((file, opt_contenthash))
|
||||||
|
})
|
||||||
|
|
||||||
|
// and then we get the permissions if we have to
|
||||||
|
// and return (file, content hash, permissions)
|
||||||
|
.and_then(|(file, opt_contenthash)| {
|
||||||
|
let opt_permissions = if flags.get_permission_tracking() {
|
||||||
|
Some(try!(file
|
||||||
|
.metadata()
|
||||||
|
.map(|md| md.permissions())
|
||||||
|
.chain_err(|| REK::RefTargetCannotReadPermissions)
|
||||||
|
))
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
};
|
||||||
|
|
||||||
|
Ok((opt_contenthash, opt_permissions))
|
||||||
|
})
|
||||||
|
|
||||||
|
// and then we try to canonicalize the PathBuf, because we want to store a
|
||||||
|
// canonicalized path
|
||||||
|
// and return (file, content hash, permissions, canonicalized path)
|
||||||
|
.and_then(|(opt_contenthash, opt_permissions)| {
|
||||||
|
pb.canonicalize()
|
||||||
|
.map(|can| (opt_contenthash, opt_permissions, can))
|
||||||
|
// if PathBuf::canonicalize() failed, build an error from the return value
|
||||||
|
.chain_err(|| REK::PathCanonicalizationError)
|
||||||
|
})
|
||||||
|
|
||||||
|
// and then we hash the canonicalized path
|
||||||
|
// and return (file, content hash, permissions, canonicalized path, path hash)
|
||||||
|
.and_then(|(opt_contenthash, opt_permissions, can)| {
|
||||||
|
let path_hash = try!(hash_path(&can).chain_err(|| REK::PathHashingError));
|
||||||
|
|
||||||
|
Ok((opt_contenthash, opt_permissions, can, path_hash))
|
||||||
|
})
|
||||||
|
|
||||||
|
// and then we convert the PathBuf of the canonicalized path to a String to be able
|
||||||
|
// to save it in the Ref FileLockEntry obj
|
||||||
|
// and return
|
||||||
|
// (file, content hash, permissions, canonicalized path as String, path hash)
|
||||||
|
.and_then(|(opt_conhash, opt_perm, can, path_hash)| {
|
||||||
|
match can.to_str().map(String::from) {
|
||||||
|
// UTF convert error in PathBuf::to_str(),
|
||||||
|
None => Err(RE::from_kind(REK::PathUTF8Error)),
|
||||||
|
Some(can) => Ok((opt_conhash, opt_perm, can, path_hash))
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
// and then we create the FileLockEntry in the Store
|
||||||
|
// and return (filelockentry, content hash, permissions, canonicalized path)
|
||||||
|
.and_then(|(opt_conhash, opt_perm, can, path_hash)| {
|
||||||
|
let fle = try!(self
|
||||||
|
.create(ModuleEntryPath::new(path_hash))
|
||||||
|
.chain_err(|| REK::StoreWriteError)
|
||||||
|
);
|
||||||
|
|
||||||
|
Ok((fle, opt_conhash, opt_perm, can))
|
||||||
|
})
|
||||||
|
)
|
||||||
|
};
|
||||||
|
|
||||||
|
for tpl in [
|
||||||
|
Some((String::from("ref"), Value::Table(BTreeMap::new()))),
|
||||||
|
Some((String::from("ref.permissions"), Value::Table(BTreeMap::new()))),
|
||||||
|
Some((String::from("ref.path"), Value::String(canonical_path))),
|
||||||
|
Some((String::from("ref.content_hash"), Value::Table(BTreeMap::new()))),
|
||||||
|
|
||||||
|
content_hash.map(|hash| {
|
||||||
|
(format!("ref.content_hash.{}", h.hash_name()), Value::String(hash))
|
||||||
|
}),
|
||||||
|
permissions.map(|p| {
|
||||||
|
(String::from("ref.permissions.ro"), Value::Boolean(p.readonly()))
|
||||||
|
}),
|
||||||
|
].into_iter()
|
||||||
|
{
|
||||||
|
match tpl {
|
||||||
|
&Some((ref s, ref v)) => {
|
||||||
|
match fle.get_header_mut().insert(s, v.clone()) {
|
||||||
|
Ok(Some(_)) => {
|
||||||
|
let e = RE::from_kind(REK::HeaderFieldAlreadyExistsError);
|
||||||
|
return Err(e).chain_err(|| REK::HeaderFieldWriteError);
|
||||||
|
},
|
||||||
|
Ok(None) => {
|
||||||
|
// Okay, we just inserted a new header value...
|
||||||
|
},
|
||||||
|
Err(e) => return Err(e).chain_err(|| REK::HeaderFieldWriteError),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
&None => {
|
||||||
|
debug!("Not going to insert.");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(fle)
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
57
lib/entry/libimagentryref/src/util.rs
Normal file
57
lib/entry/libimagentryref/src/util.rs
Normal file
|
@ -0,0 +1,57 @@
|
||||||
|
//
|
||||||
|
// imag - the personal information management suite for the commandline
|
||||||
|
// Copyright (C) 2015, 2016 Matthias Beyer <mail@beyermatthias.de> and contributors
|
||||||
|
//
|
||||||
|
// This library is free software; you can redistribute it and/or
|
||||||
|
// modify it under the terms of the GNU Lesser General Public
|
||||||
|
// License as published by the Free Software Foundation; version
|
||||||
|
// 2.1 of the License.
|
||||||
|
//
|
||||||
|
// This library is distributed in the hope that it will be useful,
|
||||||
|
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||||
|
// Lesser General Public License for more details.
|
||||||
|
//
|
||||||
|
// You should have received a copy of the GNU Lesser General Public
|
||||||
|
// License along with this library; if not, write to the Free Software
|
||||||
|
// Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
|
||||||
|
//
|
||||||
|
|
||||||
|
use std::path::PathBuf;
|
||||||
|
|
||||||
|
use error::RefErrorKind as REK;
|
||||||
|
use error::RefError as RE;
|
||||||
|
use error::Result;
|
||||||
|
use error::ResultExt;
|
||||||
|
|
||||||
|
use libimagstore::store::Entry;
|
||||||
|
|
||||||
|
use toml::Value;
|
||||||
|
use toml_query::read::TomlValueReadExt;
|
||||||
|
|
||||||
|
/// Creates a Hash from a PathBuf by making the PathBuf absolute and then running a hash
|
||||||
|
/// algorithm on it
|
||||||
|
pub fn hash_path(pb: &PathBuf) -> Result<String> {
|
||||||
|
use crypto::sha1::Sha1;
|
||||||
|
use crypto::digest::Digest;
|
||||||
|
|
||||||
|
match pb.to_str() {
|
||||||
|
Some(s) => {
|
||||||
|
let mut hasher = Sha1::new();
|
||||||
|
hasher.input_str(s);
|
||||||
|
Ok(hasher.result_str())
|
||||||
|
},
|
||||||
|
None => return Err(RE::from_kind(REK::PathUTF8Error)),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Read the reference from a file
|
||||||
|
pub fn read_reference(refentry: &Entry) -> Result<PathBuf> {
|
||||||
|
match refentry.get_header().read("ref.path") {
|
||||||
|
Ok(Some(&Value::String(ref s))) => Ok(PathBuf::from(s)),
|
||||||
|
Ok(Some(_)) => Err(RE::from_kind(REK::HeaderTypeError)),
|
||||||
|
Ok(None) => Err(RE::from_kind(REK::HeaderFieldMissingError)),
|
||||||
|
Err(e) => Err(e).chain_err(|| REK::StoreReadError),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
@ -49,8 +49,6 @@ extern crate libimagstore;
|
||||||
extern crate libimagerror;
|
extern crate libimagerror;
|
||||||
|
|
||||||
pub mod error;
|
pub mod error;
|
||||||
pub mod exec;
|
|
||||||
pub mod tag;
|
pub mod tag;
|
||||||
pub mod tagable;
|
pub mod tagable;
|
||||||
pub mod ui;
|
|
||||||
|
|
||||||
|
|
|
@ -1,125 +0,0 @@
|
||||||
//
|
|
||||||
// imag - the personal information management suite for the commandline
|
|
||||||
// Copyright (C) 2015, 2016 Matthias Beyer <mail@beyermatthias.de> and contributors
|
|
||||||
//
|
|
||||||
// This library is free software; you can redistribute it and/or
|
|
||||||
// modify it under the terms of the GNU Lesser General Public
|
|
||||||
// License as published by the Free Software Foundation; version
|
|
||||||
// 2.1 of the License.
|
|
||||||
//
|
|
||||||
// This library is distributed in the hope that it will be useful,
|
|
||||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
|
||||||
// Lesser General Public License for more details.
|
|
||||||
//
|
|
||||||
// You should have received a copy of the GNU Lesser General Public
|
|
||||||
// License along with this library; if not, write to the Free Software
|
|
||||||
// Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
|
|
||||||
//
|
|
||||||
|
|
||||||
use clap::{Arg, ArgMatches, App, SubCommand};
|
|
||||||
|
|
||||||
use tag::Tag;
|
|
||||||
use tag::is_tag;
|
|
||||||
|
|
||||||
/// Generates a `clap::SubCommand` to be integrated in the commandline-ui builder for building a
|
|
||||||
/// "tags --add foo --remove bar" subcommand to do tagging action.
|
|
||||||
pub fn tag_subcommand<'a, 'b>() -> App<'a, 'b> {
|
|
||||||
SubCommand::with_name(tag_subcommand_name())
|
|
||||||
.author("Matthias Beyer <mail@beyermatthias.de>")
|
|
||||||
.version("0.1")
|
|
||||||
.about("Add or remove tags")
|
|
||||||
.arg(tag_add_arg())
|
|
||||||
.arg(tag_remove_arg())
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn tag_add_arg<'a, 'b>() -> Arg<'a, 'b> {
|
|
||||||
Arg::with_name(tag_subcommand_add_arg_name())
|
|
||||||
.short("a")
|
|
||||||
.long("add")
|
|
||||||
.takes_value(true)
|
|
||||||
.value_name("tags")
|
|
||||||
.multiple(true)
|
|
||||||
.validator(is_tag)
|
|
||||||
.help("Add tags, seperated by comma or by specifying multiple times")
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn tag_remove_arg<'a, 'b>() -> Arg<'a, 'b> {
|
|
||||||
Arg::with_name(tag_subcommand_remove_arg_name())
|
|
||||||
.short("r")
|
|
||||||
.long("remove")
|
|
||||||
.takes_value(true)
|
|
||||||
.value_name("tags")
|
|
||||||
.multiple(true)
|
|
||||||
.validator(is_tag)
|
|
||||||
.help("Remove tags, seperated by comma or by specifying multiple times")
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn tag_subcommand_name() -> &'static str {
|
|
||||||
"tags"
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn tag_subcommand_add_arg_name() -> &'static str {
|
|
||||||
"add-tags"
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn tag_subcommand_remove_arg_name() -> &'static str {
|
|
||||||
"remove-tags"
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn tag_subcommand_names() -> Vec<&'static str> {
|
|
||||||
vec![tag_subcommand_add_arg_name(), tag_subcommand_remove_arg_name()]
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Generates a `clap::Arg` which can be integrated into the commandline-ui builder for building a
|
|
||||||
/// "-t" or "--tags" argument which takes values for tagging actions (add, remove)
|
|
||||||
pub fn tag_argument<'a, 'b>() -> Arg<'a, 'b> {
|
|
||||||
Arg::with_name(tag_argument_name())
|
|
||||||
.short("t")
|
|
||||||
.long("tags")
|
|
||||||
.takes_value(true)
|
|
||||||
.multiple(true)
|
|
||||||
.validator(is_tag)
|
|
||||||
.help("Add or remove tags, prefixed by '+' (for adding) or '-' (for removing)")
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn tag_argument_name() -> &'static str {
|
|
||||||
"specify-tags"
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Get the tags which should be added from the commandline
|
|
||||||
///
|
|
||||||
/// Returns none if the argument was not specified
|
|
||||||
pub fn get_add_tags(matches: &ArgMatches) -> Option<Vec<Tag>> {
|
|
||||||
let add = tag_subcommand_add_arg_name();
|
|
||||||
extract_tags(matches, add, '+')
|
|
||||||
.or_else(|| matches.values_of(add).map(|values| values.map(String::from).collect()))
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Get the tags which should be removed from the commandline
|
|
||||||
///
|
|
||||||
/// Returns none if the argument was not specified
|
|
||||||
pub fn get_remove_tags(matches: &ArgMatches) -> Option<Vec<Tag>> {
|
|
||||||
let rem = tag_subcommand_remove_arg_name();
|
|
||||||
extract_tags(matches, rem, '+')
|
|
||||||
.or_else(|| matches.values_of(rem).map(|values| values.map(String::from).collect()))
|
|
||||||
}
|
|
||||||
|
|
||||||
fn extract_tags(matches: &ArgMatches, specifier: &str, specchar: char) -> Option<Vec<Tag>> {
|
|
||||||
if let Some(submatch) = matches.subcommand_matches("tags") {
|
|
||||||
submatch.values_of(specifier)
|
|
||||||
.map(|values| values.map(String::from).collect())
|
|
||||||
} else {
|
|
||||||
matches.values_of("specify-tags")
|
|
||||||
.map(|argmatches| {
|
|
||||||
argmatches
|
|
||||||
.map(String::from)
|
|
||||||
.filter(|s| s.starts_with(specchar))
|
|
||||||
.map(|s| {
|
|
||||||
String::from(s.split_at(1).1)
|
|
||||||
})
|
|
||||||
.collect()
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
|
@ -19,7 +19,7 @@
|
||||||
|
|
||||||
use error::InteractionError as IE;
|
use error::InteractionError as IE;
|
||||||
use error::InteractionErrorKind as IEK;
|
use error::InteractionErrorKind as IEK;
|
||||||
use error::MapErrInto;
|
use error::ResultExt;
|
||||||
|
|
||||||
use toml::Value;
|
use toml::Value;
|
||||||
|
|
||||||
|
@ -46,36 +46,36 @@ impl Readline {
|
||||||
let histfile = try!(match histfile {
|
let histfile = try!(match histfile {
|
||||||
Value::String(s) => PathBuf::from(s),
|
Value::String(s) => PathBuf::from(s),
|
||||||
_ => Err(IE::from_kind(IEK::ConfigTypeError))
|
_ => Err(IE::from_kind(IEK::ConfigTypeError))
|
||||||
.map_err_into(IEK::ConfigError)
|
.chain_err(|| IEK::ConfigError)
|
||||||
.map_err_into(IEK::ReadlineError)
|
.chain_err(|| IEK::ReadlineError)
|
||||||
});
|
});
|
||||||
|
|
||||||
let histsize = try!(match histsize {
|
let histsize = try!(match histsize {
|
||||||
Value::Integer(i) => i,
|
Value::Integer(i) => i,
|
||||||
_ => Err(IE::from_kind(IEK::ConfigTypeError))
|
_ => Err(IE::from_kind(IEK::ConfigTypeError))
|
||||||
.map_err_into(IEK::ConfigError)
|
.chain_err(|| IEK::ConfigError)
|
||||||
.map_err_into(IEK::ReadlineError)
|
.chain_err(|| IEK::ReadlineError)
|
||||||
});
|
});
|
||||||
|
|
||||||
let histigndups = try!(match histigndups {
|
let histigndups = try!(match histigndups {
|
||||||
Value::Boolean(b) => b,
|
Value::Boolean(b) => b,
|
||||||
_ => Err(IE::from_kind(IEK::ConfigTypeError))
|
_ => Err(IE::from_kind(IEK::ConfigTypeError))
|
||||||
.map_err_into(IEK::ConfigError)
|
.chain_err(|| IEK::ConfigError)
|
||||||
.map_err_into(IEK::ReadlineError)
|
.chain_err(|| IEK::ReadlineError)
|
||||||
});
|
});
|
||||||
|
|
||||||
let histignspace = try!(match histignspace {
|
let histignspace = try!(match histignspace {
|
||||||
Value::Boolean(b) => b,
|
Value::Boolean(b) => b,
|
||||||
_ => Err(IE::from_kind(IEK::ConfigTypeError))
|
_ => Err(IE::from_kind(IEK::ConfigTypeError))
|
||||||
.map_err_into(IEK::ConfigError)
|
.chain_err(|| IEK::ConfigError)
|
||||||
.map_err_into(IEK::ReadlineError)
|
.chain_err(|| IEK::ReadlineError)
|
||||||
});
|
});
|
||||||
|
|
||||||
let prompt = try!(match prompt {
|
let prompt = try!(match prompt {
|
||||||
Value::String(s) => s,
|
Value::String(s) => s,
|
||||||
_ => Err(IE::from_kind(IEK::ConfigTypeError))
|
_ => Err(IE::from_kind(IEK::ConfigTypeError))
|
||||||
.map_err_into(IEK::ConfigError)
|
.chain_err(|| IEK::ConfigError)
|
||||||
.map_err_into(IEK::ReadlineError)
|
.chain_err(|| IEK::ReadlineError)
|
||||||
});
|
});
|
||||||
|
|
||||||
let config = Config::builder().
|
let config = Config::builder().
|
||||||
|
@ -88,10 +88,10 @@ impl Readline {
|
||||||
|
|
||||||
if !histfile.exists() {
|
if !histfile.exists() {
|
||||||
let _ = try!(File::create(histfile.clone())
|
let _ = try!(File::create(histfile.clone())
|
||||||
.map_err_into(IEK::ReadlineHistoryFileCreationError));
|
.chain_err(|| IEK::ReadlineHistoryFileCreationError));
|
||||||
}
|
}
|
||||||
|
|
||||||
let _ = try!(editor.load_history(&histfile).map_err_into(ReadlineError));
|
let _ = try!(editor.load_history(&histfile).chain_err(|| ReadlineError));
|
||||||
|
|
||||||
Ok(Readline {
|
Ok(Readline {
|
||||||
editor: editor,
|
editor: editor,
|
||||||
|
|
|
@ -95,19 +95,34 @@ macro_rules! make_mock_app {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[allow(unused)]
|
||||||
pub fn generate_minimal_test_config() -> Option<Configuration> { ::toml::de::from_str("[store]\nimplicit-create=true")
|
pub fn generate_minimal_test_config() -> Option<Configuration> { ::toml::de::from_str("[store]\nimplicit-create=true")
|
||||||
.map(Configuration::with_value)
|
.map(Configuration::with_value)
|
||||||
.ok()
|
.ok()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[allow(unused)]
|
||||||
pub fn generate_test_runtime<'a>(mut args: Vec<&'static str>) -> Result<Runtime<'a>, RuntimeError> {
|
pub fn generate_test_runtime<'a>(mut args: Vec<&'static str>) -> Result<Runtime<'a>, RuntimeError> {
|
||||||
let mut cli_args = vec!["imag-link", "--rtp", "/tmp"];
|
let mut cli_args = vec![$appname, "--rtp", "/tmp"];
|
||||||
|
|
||||||
cli_args.append(&mut args);
|
cli_args.append(&mut args);
|
||||||
|
|
||||||
let cli_app = MockLinkApp::new(cli_args);
|
let cli_app = MockLinkApp::new(cli_args);
|
||||||
Runtime::with_configuration(cli_app, generate_minimal_test_config())
|
Runtime::with_configuration(cli_app, generate_minimal_test_config())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[allow(unused)]
|
||||||
|
pub fn reset_test_runtime<'a>(mut args: Vec<&'static str>, old_runtime: Runtime)
|
||||||
|
-> Result<Runtime<'a>, RuntimeError>
|
||||||
|
{
|
||||||
|
let mut cli_args = vec![$appname, "--rtp", "/tmp"];
|
||||||
|
|
||||||
|
cli_args.append(&mut args);
|
||||||
|
|
||||||
|
let cli_app = MockLinkApp::new(cli_args);
|
||||||
|
Runtime::with_configuration(cli_app, generate_minimal_test_config())
|
||||||
|
.map(|rt| rt.with_store(old_runtime.extract_store()))
|
||||||
|
}
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
33
scripts/hooks/applypatch-msg.check-signed-off.sh
Normal file
33
scripts/hooks/applypatch-msg.check-signed-off.sh
Normal file
|
@ -0,0 +1,33 @@
|
||||||
|
#!/usr/bin/env bash
|
||||||
|
#
|
||||||
|
# An hook script to check the commit log message taken by
|
||||||
|
# applypatch from an e-mail message for proper "Signed-off-by" line(s).
|
||||||
|
#
|
||||||
|
# To enable this hook, copy this file to ".git/hooks/applypatch-msg" and make it
|
||||||
|
# executable.
|
||||||
|
|
||||||
|
#
|
||||||
|
# This hook is used when applying patches which are send via mail, to verify the
|
||||||
|
# Signed-off-by line is in the commit message.
|
||||||
|
#
|
||||||
|
|
||||||
|
. git-sh-setup
|
||||||
|
|
||||||
|
RED='\e[0;31m' # Red
|
||||||
|
YELLOW='\e[0;33m' # Yellow
|
||||||
|
NORMAL='\e[0m' # Text Reset
|
||||||
|
|
||||||
|
warn() {
|
||||||
|
echo -e >&2 "${YELLOW}$*${DEFAULT}"
|
||||||
|
}
|
||||||
|
|
||||||
|
abort() {
|
||||||
|
echo -e >&2 "${RED}$*${DEFAULT}"
|
||||||
|
exit 1
|
||||||
|
}
|
||||||
|
|
||||||
|
headline=$(head -n 1 $1 | wc -c)
|
||||||
|
[[ $headline -gt 50 ]] && warn "Headline of patch longer than 50 chars"
|
||||||
|
|
||||||
|
grep "^Signed-off-by" $1 >/dev/null 2>/dev/null && abort "No Signed-off-by line"
|
||||||
|
|
17
scripts/hooks/pre-commit.signoffby-missing-warn.sh
Normal file
17
scripts/hooks/pre-commit.signoffby-missing-warn.sh
Normal file
|
@ -0,0 +1,17 @@
|
||||||
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
|
#
|
||||||
|
# The following snippet can be used to _WARN_ if a Signed-off-by line is missing
|
||||||
|
# in the commit message
|
||||||
|
#
|
||||||
|
|
||||||
|
RED='\e[0;31m' # Red
|
||||||
|
NORMAL='\e[0m' # Text Reset
|
||||||
|
|
||||||
|
if [ "1" != "$(grep -c '^Signed-off-by: ' "$1")" ]; then
|
||||||
|
printf >&2 "%sMissing Signed-off-by line.%s\n" "$RED" "$NORMAL"
|
||||||
|
|
||||||
|
# To not only warn, but abort the commit, uncomment the next line
|
||||||
|
# exit 1
|
||||||
|
fi
|
||||||
|
|
58
scripts/hooks/pre-push.fixup-warn.sh
Normal file
58
scripts/hooks/pre-push.fixup-warn.sh
Normal file
|
@ -0,0 +1,58 @@
|
||||||
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
|
#
|
||||||
|
# The following snippet can be used to WARN about "!fixup" / "WIP" / "TMP"
|
||||||
|
# commits when pushing
|
||||||
|
#
|
||||||
|
# Aborting the push is possible
|
||||||
|
#
|
||||||
|
|
||||||
|
remote="$1"
|
||||||
|
url="$2"
|
||||||
|
|
||||||
|
z40=0000000000000000000000000000000000000000
|
||||||
|
|
||||||
|
while read local_ref local_sha remote_ref remote_sha
|
||||||
|
do
|
||||||
|
if [ "$local_sha" = $z40 ]
|
||||||
|
then
|
||||||
|
# Branch is deleted, nothing to check here, move along.
|
||||||
|
else
|
||||||
|
if [ "$remote_sha" = $z40 ]
|
||||||
|
then
|
||||||
|
# New branch, examine all commits
|
||||||
|
range="$local_sha"
|
||||||
|
else
|
||||||
|
# Update to existing branch, examine new commits
|
||||||
|
range="$remote_sha..$local_sha"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Check for WIP commit
|
||||||
|
commit=$(git rev-list -n 1 --grep '^WIP|^TMP|!fixup' "$range")
|
||||||
|
if [ -n "$commit" ]
|
||||||
|
then
|
||||||
|
echo >&2 "Found WIP commit in $local_ref, not pushing"
|
||||||
|
|
||||||
|
# TO NOT ONLY WARN BUT ABORT UNCOMMENT THE NEXT LINE
|
||||||
|
# exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Check for commits without sign-off
|
||||||
|
if [ "$remote_sha" = $z40 ]; then
|
||||||
|
# New branch is pushed, we only want to check commits that are not
|
||||||
|
# on master.
|
||||||
|
range="$(git merge-base master "$local_sha")..$local_sha"
|
||||||
|
fi
|
||||||
|
while read ref; do
|
||||||
|
msg=$(git log -n 1 --format=%B "$ref")
|
||||||
|
if ! grep -q '^Signed-off-by: ' <<<"$msg"; then
|
||||||
|
echo >&2 "Unsigned commit $ref"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
done < <(git rev-list "$range")
|
||||||
|
# The process substitution above is a hack to make sure loop runs in
|
||||||
|
# the same shell and can actually exit the whole script.
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
exit 0
|
50
scripts/hooks/pre-push.signoffby-missing-warn.sh
Normal file
50
scripts/hooks/pre-push.signoffby-missing-warn.sh
Normal file
|
@ -0,0 +1,50 @@
|
||||||
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
|
#
|
||||||
|
# The following snippet can be used to WARN about a missing signed-off-by line
|
||||||
|
# in commits when pushing
|
||||||
|
#
|
||||||
|
# Aborting the push is possible
|
||||||
|
#
|
||||||
|
|
||||||
|
remote="$1"
|
||||||
|
url="$2"
|
||||||
|
|
||||||
|
z40=0000000000000000000000000000000000000000
|
||||||
|
|
||||||
|
while read local_ref local_sha remote_ref remote_sha
|
||||||
|
do
|
||||||
|
if [ "$local_sha" = $z40 ]
|
||||||
|
then
|
||||||
|
# Branch is deleted, nothing to check here, move along.
|
||||||
|
else
|
||||||
|
if [ "$remote_sha" = $z40 ]
|
||||||
|
then
|
||||||
|
# New branch, examine all commits
|
||||||
|
range="$local_sha"
|
||||||
|
else
|
||||||
|
# Update to existing branch, examine new commits
|
||||||
|
range="$remote_sha..$local_sha"
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ "$remote_sha" = $z40 ]; then
|
||||||
|
# New branch is pushed, we only want to check commits that are not
|
||||||
|
# on master.
|
||||||
|
range="$(git merge-base master "$local_sha")..$local_sha"
|
||||||
|
fi
|
||||||
|
while read ref; do
|
||||||
|
msg=$(git log -n 1 --format=%B "$ref")
|
||||||
|
if ! grep -q '^Signed-off-by: ' <<<"$msg"; then
|
||||||
|
echo >&2 "Unsigned commit $ref"
|
||||||
|
|
||||||
|
# TO NOT ONLY WARN BUT ABORT UNCOMMENT THE NEXT LINE
|
||||||
|
# exit 1
|
||||||
|
fi
|
||||||
|
done < <(git rev-list "$range")
|
||||||
|
# The process substitution above is a hack to make sure loop runs in
|
||||||
|
# the same shell and can actually exit the whole script.
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
exit 0
|
||||||
|
|
17
scripts/signed-off-by-in-branch.sh
Normal file
17
scripts/signed-off-by-in-branch.sh
Normal file
|
@ -0,0 +1,17 @@
|
||||||
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
|
# Checks whether all commit between $1 and $2 have a signed-off-by line
|
||||||
|
|
||||||
|
RED='\e[0;31m' # Red
|
||||||
|
NORMAL='\e[0m' # Text Reset
|
||||||
|
|
||||||
|
faulty=$(git rev-list --grep "Signed-off-by" --invert-grep $1..$2 | wc -l)
|
||||||
|
|
||||||
|
if [[ $faulty -eq 0 ]]
|
||||||
|
then
|
||||||
|
echo >&2 "All good"
|
||||||
|
else
|
||||||
|
echo -en >&2 "${RED}Got $faulty non Signed-off-by commits${NORMAL}"
|
||||||
|
echo -e >&2 "${RED}between $1 and $2${NORMAL}"
|
||||||
|
fi
|
||||||
|
|
Loading…
Reference in a new issue