Auto merge of #88 - matthiasbeyer:init-rewrite, r=matthiasbeyer
Init rewrite Start the rewrite. @TheNeikos tell me what you think about the structure I create in this PR.
This commit is contained in:
commit
fab165d72f
53 changed files with 85 additions and 4700 deletions
20
.travis.yml
20
.travis.yml
|
@ -16,14 +16,24 @@ before_script:
|
|||
|
||||
script:
|
||||
- |
|
||||
if [[ "doc" == $(git diff --name-only $TRAVIS_BRANCH..$TRAVIS_COMMIT | cut -d "/" -f 1 | uniq) ]]; then
|
||||
echo "There are only changes in the ./doc directory... not doing anything"
|
||||
else
|
||||
changes_in() {
|
||||
[[ $(git diff --name-only $TRAVIS_BRANCH..$TRAVIS_COMMIT | \
|
||||
cut -d "/" -f 1 | \
|
||||
grep "$n") ]] > /dev/null
|
||||
}
|
||||
|
||||
travis_cargo_run_in() {
|
||||
[[ -d "$1" ]] &&
|
||||
cd "$1" &&
|
||||
travis-cargo build &&
|
||||
travis-cargo test &&
|
||||
travis-cargo bench &&
|
||||
travis-cargo --only stable doc
|
||||
fi
|
||||
travis-cargo --only stable doc &&
|
||||
cd -
|
||||
}
|
||||
|
||||
[[ $(changes_in "doc") ]] && echo "Changes in ./doc are not build by CI"
|
||||
travis_cargo_run_in "."
|
||||
|
||||
addons:
|
||||
apt:
|
||||
|
|
296
Cargo.lock
generated
296
Cargo.lock
generated
|
@ -2,297 +2,25 @@
|
|||
name = "imag"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"ansi_term 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"chrono 0.2.16 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"clap 1.4.5 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"config 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"glob 0.2.10 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"hoedown 3.0.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"itertools 0.4.5 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"log 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"open 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"prettytable-rs 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"rand 0.3.11 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"regex 0.1.41 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"serde 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"serde_json 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"term_grid 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"url 0.2.37 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"uuid 0.1.18 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"yaml-rust 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"libimagmodule 0.1.0",
|
||||
"libimagrt 0.1.0",
|
||||
"libimagstore 0.1.0",
|
||||
"libimagutil 0.1.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "advapi32-sys"
|
||||
version = "0.1.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"winapi 0.2.4 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"winapi-build 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
name = "libimagmodule"
|
||||
version = "0.1.0"
|
||||
|
||||
[[package]]
|
||||
name = "aho-corasick"
|
||||
version = "0.3.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"memchr 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
name = "libimagrt"
|
||||
version = "0.1.0"
|
||||
|
||||
[[package]]
|
||||
name = "ansi_term"
|
||||
version = "0.6.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
name = "libimagstore"
|
||||
version = "0.1.0"
|
||||
|
||||
[[package]]
|
||||
name = "ansi_term"
|
||||
version = "0.7.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
name = "bitflags"
|
||||
version = "0.3.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
name = "chrono"
|
||||
version = "0.2.16"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"num 0.1.27 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"time 0.1.32 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "clap"
|
||||
version = "1.4.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"ansi_term 0.6.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"bitflags 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"strsim 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"yaml-rust 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "config"
|
||||
version = "0.1.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"nom 0.3.11 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "gcc"
|
||||
version = "0.3.21"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"advapi32-sys 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"winapi 0.2.4 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "glob"
|
||||
version = "0.2.10"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
name = "hoedown"
|
||||
version = "3.0.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"bitflags 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"gcc 0.3.21 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"libc 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "itertools"
|
||||
version = "0.4.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
name = "kernel32-sys"
|
||||
version = "0.1.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"winapi 0.2.4 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"winapi-build 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "libc"
|
||||
version = "0.1.10"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
name = "log"
|
||||
version = "0.3.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"libc 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "matches"
|
||||
version = "0.1.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
name = "memchr"
|
||||
version = "0.1.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"libc 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "nom"
|
||||
version = "0.3.11"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
name = "num"
|
||||
version = "0.1.27"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"rand 0.3.11 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"rustc-serialize 0.3.16 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "open"
|
||||
version = "1.1.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
name = "prettytable-rs"
|
||||
version = "0.5.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"term 0.2.12 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"unicode-width 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rand"
|
||||
version = "0.3.11"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"advapi32-sys 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"libc 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"winapi 0.2.4 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "regex"
|
||||
version = "0.1.41"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"aho-corasick 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"memchr 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"regex-syntax 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "regex-syntax"
|
||||
version = "0.2.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
name = "rustc-serialize"
|
||||
version = "0.3.16"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
name = "serde"
|
||||
version = "0.6.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"num 0.1.27 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "serde_json"
|
||||
version = "0.6.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"num 0.1.27 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"serde 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "strsim"
|
||||
version = "0.4.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
name = "term"
|
||||
version = "0.2.12"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"kernel32-sys 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"winapi 0.2.4 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "term_grid"
|
||||
version = "0.1.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"unicode-width 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "time"
|
||||
version = "0.1.32"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"kernel32-sys 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"libc 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"winapi 0.2.4 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "unicode-width"
|
||||
version = "0.1.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
name = "url"
|
||||
version = "0.2.37"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"matches 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"rustc-serialize 0.3.16 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "uuid"
|
||||
version = "0.1.18"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"rand 0.3.11 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"rustc-serialize 0.3.16 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "winapi"
|
||||
version = "0.2.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
name = "winapi-build"
|
||||
version = "0.1.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
name = "yaml-rust"
|
||||
version = "0.2.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
name = "yaml-rust"
|
||||
version = "0.3.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
name = "libimagutil"
|
||||
version = "0.1.0"
|
||||
|
||||
|
|
28
Cargo.toml
28
Cargo.toml
|
@ -10,27 +10,15 @@ readme = "README.md"
|
|||
description = "CLI PIM suite with nice API, so you can use an MUA, Editor, etc. of your choice"
|
||||
keywords = ["PIM", "personal", "information", "management", "cli", "vcard", "ical", "wiki", "bookmark", "todo", "rss" ]
|
||||
|
||||
[dependencies]
|
||||
log = "0.3.2"
|
||||
regex = "0.1.41"
|
||||
url = "0.2.37"
|
||||
uuid = "0.1.18"
|
||||
glob = "0.2.10"
|
||||
[dependencies.libimagmodule]
|
||||
path = "./libimagmodule"
|
||||
|
||||
config = "0.1.2"
|
||||
[dependencies.libimagrt]
|
||||
path = "./libimagstore"
|
||||
|
||||
chrono = "0.2.16"
|
||||
[dependencies.libimagstore]
|
||||
path = "./libimagstore"
|
||||
|
||||
serde = "0.6.1"
|
||||
serde_json = "0.6.0"
|
||||
|
||||
clap = { version = "1.4.5", features = ["yaml"] }
|
||||
term_grid = "0.1.2"
|
||||
prettytable-rs = "0.5.1"
|
||||
open = "1.1.0"
|
||||
itertools = "0.4.5"
|
||||
hoedown = "3.0.3"
|
||||
ansi_term = "0.7.1"
|
||||
rand = "0.3"
|
||||
yaml-rust = "0.3.0"
|
||||
[dependencies.libimagutil]
|
||||
path = "./libimagutil"
|
||||
|
||||
|
|
1
libimagmodule/.gitignore
vendored
Normal file
1
libimagmodule/.gitignore
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
target
|
4
libimagmodule/Cargo.lock
generated
Normal file
4
libimagmodule/Cargo.lock
generated
Normal file
|
@ -0,0 +1,4 @@
|
|||
[root]
|
||||
name = "libimagmodule"
|
||||
version = "0.1.0"
|
||||
|
4
libimagmodule/Cargo.toml
Normal file
4
libimagmodule/Cargo.toml
Normal file
|
@ -0,0 +1,4 @@
|
|||
[package]
|
||||
name = "libimagmodule"
|
||||
version = "0.1.0"
|
||||
authors = ["Matthias Beyer <mail@beyermatthias.de>"]
|
3
libimagmodule/src/lib.rs
Normal file
3
libimagmodule/src/lib.rs
Normal file
|
@ -0,0 +1,3 @@
|
|||
#[test]
|
||||
fn it_works() {
|
||||
}
|
1
libimagrt/.gitignore
vendored
Normal file
1
libimagrt/.gitignore
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
target
|
4
libimagrt/Cargo.lock
generated
Normal file
4
libimagrt/Cargo.lock
generated
Normal file
|
@ -0,0 +1,4 @@
|
|||
[root]
|
||||
name = "libimagrt"
|
||||
version = "0.1.0"
|
||||
|
4
libimagrt/Cargo.toml
Normal file
4
libimagrt/Cargo.toml
Normal file
|
@ -0,0 +1,4 @@
|
|||
[package]
|
||||
name = "libimagrt"
|
||||
version = "0.1.0"
|
||||
authors = ["Matthias Beyer <mail@beyermatthias.de>"]
|
3
libimagrt/src/lib.rs
Normal file
3
libimagrt/src/lib.rs
Normal file
|
@ -0,0 +1,3 @@
|
|||
#[test]
|
||||
fn it_works() {
|
||||
}
|
1
libimagstore/.gitignore
vendored
Normal file
1
libimagstore/.gitignore
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
target
|
4
libimagstore/Cargo.lock
generated
Normal file
4
libimagstore/Cargo.lock
generated
Normal file
|
@ -0,0 +1,4 @@
|
|||
[root]
|
||||
name = "libimagstore"
|
||||
version = "0.1.0"
|
||||
|
4
libimagstore/Cargo.toml
Normal file
4
libimagstore/Cargo.toml
Normal file
|
@ -0,0 +1,4 @@
|
|||
[package]
|
||||
name = "libimagstore"
|
||||
version = "0.1.0"
|
||||
authors = ["Matthias Beyer <mail@beyermatthias.de>"]
|
3
libimagstore/src/lib.rs
Normal file
3
libimagstore/src/lib.rs
Normal file
|
@ -0,0 +1,3 @@
|
|||
#[test]
|
||||
fn it_works() {
|
||||
}
|
1
libimagutil/.gitignore
vendored
Normal file
1
libimagutil/.gitignore
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
target
|
4
libimagutil/Cargo.lock
generated
Normal file
4
libimagutil/Cargo.lock
generated
Normal file
|
@ -0,0 +1,4 @@
|
|||
[root]
|
||||
name = "libimagutil"
|
||||
version = "0.1.0"
|
||||
|
4
libimagutil/Cargo.toml
Normal file
4
libimagutil/Cargo.toml
Normal file
|
@ -0,0 +1,4 @@
|
|||
[package]
|
||||
name = "libimagutil"
|
||||
version = "0.1.0"
|
||||
authors = ["Matthias Beyer <mail@beyermatthias.de>"]
|
3
libimagutil/src/lib.rs
Normal file
3
libimagutil/src/lib.rs
Normal file
|
@ -0,0 +1,3 @@
|
|||
#[test]
|
||||
fn it_works() {
|
||||
}
|
93
src/cli.rs
93
src/cli.rs
|
@ -1,93 +0,0 @@
|
|||
use std::fmt::{Debug, Formatter, Error};
|
||||
|
||||
extern crate clap;
|
||||
use clap::{App, ArgMatches};
|
||||
|
||||
pub struct ModuleConfig {
|
||||
pub load : bool,
|
||||
}
|
||||
|
||||
impl ModuleConfig {
|
||||
pub fn new() -> ModuleConfig {
|
||||
ModuleConfig {
|
||||
load: false,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub struct CliConfig<'a> {
|
||||
pub module_configs : Vec<ModuleConfig>,
|
||||
pub cli_matches : ArgMatches<'a, 'a>,
|
||||
}
|
||||
|
||||
impl<'a> CliConfig<'a> {
|
||||
pub fn new(app : clap::App<'a, 'a, 'a, 'a, 'a, 'a>) -> CliConfig<'a> {
|
||||
CliConfig {
|
||||
module_configs: vec![],
|
||||
cli_matches: app.get_matches(),
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Check whether the CLI says we should run verbose
|
||||
*/
|
||||
pub fn is_verbose(&self) -> bool {
|
||||
self.cli_matches.is_present("verbose") || self.is_debugging()
|
||||
}
|
||||
|
||||
/**
|
||||
* Check whether the CLI says we should run in debugging
|
||||
*/
|
||||
pub fn is_debugging(&self) -> bool {
|
||||
self.cli_matches.is_present("debug")
|
||||
}
|
||||
|
||||
/**
|
||||
* Check whether the CLI says we should run with reporting
|
||||
*/
|
||||
pub fn report_exit(&self) -> bool {
|
||||
self.cli_matches.is_present("report")
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the runtime path the CLI configured
|
||||
*/
|
||||
pub fn get_rtp(&self) -> Option<String> {
|
||||
self.cli_matches.value_of("rtp").and_then(|s| Some(String::from(s)))
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the store path the CLI configured
|
||||
*
|
||||
* TODO: Implement properly. Not working by now.
|
||||
*/
|
||||
pub fn store_path(&self) -> Option<String> {
|
||||
self.get_rtp().and_then(|rtp| {
|
||||
self.cli_matches
|
||||
.value_of("storepath")
|
||||
.and_then(|s| Some(rtp + s))
|
||||
})
|
||||
}
|
||||
|
||||
pub fn editor(&self) -> Option<String> {
|
||||
self.cli_matches.value_of("editor").and_then(|s| Some(String::from(s)))
|
||||
}
|
||||
|
||||
pub fn editor_opts(&self) -> String {
|
||||
self.cli_matches
|
||||
.value_of("editor_opts")
|
||||
.map(|s| String::from(s))
|
||||
.unwrap_or(String::from(""))
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Debug for CliConfig<'a> {
|
||||
|
||||
fn fmt(&self, f: &mut Formatter) -> Result<(), Error> {
|
||||
write!(f, "CliConfig (verbose: {}, debugging: {}, rtp: {})",
|
||||
self.is_verbose(),
|
||||
self.is_debugging(),
|
||||
self.get_rtp().or(Some(String::from("NONE"))).unwrap())
|
||||
}
|
||||
|
||||
}
|
|
@ -1,124 +0,0 @@
|
|||
use std::fmt::{Debug, Formatter, Error};
|
||||
use std::path::Path;
|
||||
|
||||
use config::reader::from_file;
|
||||
use config::types::Config as Cfg;
|
||||
use cli::CliConfig;
|
||||
|
||||
/**
|
||||
* Configuration object which represents the configuration file.
|
||||
*
|
||||
* It gets passed a CliConfig object on ::new(), retreives some data from this one which is then
|
||||
* provided as default value to the callee if there is no value for it in the configuration.
|
||||
*
|
||||
* TODO: Setup is kinda ugly, as we re-use data from the CLI, which is the job of the Runtime
|
||||
* object later.
|
||||
*/
|
||||
pub struct Configuration {
|
||||
pub rtp : String,
|
||||
pub store_sub : String,
|
||||
pub editor : Option<String>,
|
||||
pub editor_opts : String,
|
||||
pub report_exit : bool,
|
||||
}
|
||||
|
||||
impl Configuration {
|
||||
|
||||
pub fn new(config: &CliConfig) -> Configuration {
|
||||
let rtp = rtp_path(config).or(default_path()).unwrap_or(String::from("/tmp/"));
|
||||
|
||||
|
||||
let cfg = fetch_config(&rtp);
|
||||
|
||||
let store_sub = String::from(cfg.lookup_str("store").unwrap_or("/store"));
|
||||
let editor = cfg.lookup_str("editor").map(String::from);
|
||||
let editor_opts = String::from(cfg.lookup_str("editor-opts").unwrap_or(""));
|
||||
let report_exit = cfg.lookup_boolean("report-exit").unwrap_or(false);
|
||||
|
||||
debug!("Building configuration");
|
||||
debug!(" - store sub : {}", store_sub);
|
||||
debug!(" - runtimepath: {}", rtp);
|
||||
debug!(" - editor : {:?}", editor);
|
||||
debug!(" - editor-opts: {}", editor_opts);
|
||||
debug!(" - report exit: {}", report_exit);
|
||||
|
||||
Configuration {
|
||||
store_sub: store_sub,
|
||||
rtp: rtp,
|
||||
editor: editor,
|
||||
editor_opts: editor_opts,
|
||||
report_exit: report_exit,
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the store path the configuration configured
|
||||
*/
|
||||
pub fn store_path(&self) -> String {
|
||||
format!("{}{}", self.rtp, self.store_sub)
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the runtime path the configuration configured
|
||||
*/
|
||||
pub fn get_rtp(&self) -> String {
|
||||
self.rtp.clone()
|
||||
}
|
||||
|
||||
pub fn editor(&self) -> Option<String> {
|
||||
self.editor.clone()
|
||||
}
|
||||
|
||||
pub fn editor_opts(&self) -> String {
|
||||
self.editor_opts.clone()
|
||||
}
|
||||
|
||||
pub fn report_exit(&self) -> bool {
|
||||
self.report_exit
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper to get the runtimepath from the CLI
|
||||
*/
|
||||
fn rtp_path(config: &CliConfig) -> Option<String> {
|
||||
config.cli_matches.value_of("rtp")
|
||||
.and_then(|s| Some(String::from(s)))
|
||||
}
|
||||
|
||||
fn fetch_config(rtp: &String) -> Cfg {
|
||||
use std::process::exit;
|
||||
|
||||
let configpath = format!("{}{}", rtp, "/config");
|
||||
from_file(Path::new(&configpath)).map_err(|e| {
|
||||
println!("Error loading config at '{}' -> {:?}", configpath, e);
|
||||
println!("Exiting now.");
|
||||
exit(1)
|
||||
}).unwrap()
|
||||
}
|
||||
|
||||
/**
|
||||
* Default runtime path, if available.
|
||||
*/
|
||||
fn default_path() -> Option<String> {
|
||||
use std::env::home_dir;
|
||||
|
||||
home_dir().and_then(|mut buf| {
|
||||
buf.push("/.imag");
|
||||
buf.to_str().map(|s| String::from(s))
|
||||
})
|
||||
|
||||
}
|
||||
|
||||
impl Debug for Configuration {
|
||||
|
||||
fn fmt(&self, f: &mut Formatter) -> Result<(), Error> {
|
||||
write!(f, "Configuration (rtp: {}, store path: {})",
|
||||
self.get_rtp(),
|
||||
self.store_path()
|
||||
)
|
||||
}
|
||||
|
||||
}
|
||||
|
80
src/main.rs
80
src/main.rs
|
@ -1,80 +1,4 @@
|
|||
#![deny(unused_imports)]
|
||||
#![deny(unused_variables)]
|
||||
#![deny(unused_must_use)]
|
||||
#![deny(unused_mut)]
|
||||
#![deny(non_shorthand_field_patterns)]
|
||||
|
||||
#[macro_use] extern crate clap;
|
||||
#[macro_use] extern crate log;
|
||||
#[macro_use] extern crate serde;
|
||||
#[macro_use] extern crate serde_json;
|
||||
#[macro_use] extern crate glob;
|
||||
#[macro_use] extern crate uuid;
|
||||
#[macro_use] extern crate regex;
|
||||
#[macro_use] extern crate prettytable;
|
||||
extern crate hoedown;
|
||||
extern crate url;
|
||||
extern crate config;
|
||||
extern crate open;
|
||||
extern crate itertools;
|
||||
extern crate ansi_term;
|
||||
extern crate rand;
|
||||
extern crate yaml_rust;
|
||||
|
||||
pub use cli::CliConfig;
|
||||
pub use configuration::Configuration;
|
||||
pub use runtime::{ImagLogger, Runtime};
|
||||
pub use clap::App;
|
||||
pub use module::Module;
|
||||
|
||||
pub mod cli;
|
||||
pub mod configuration;
|
||||
pub mod runtime;
|
||||
pub mod module;
|
||||
pub mod storage;
|
||||
pub mod ui;
|
||||
pub mod util;
|
||||
|
||||
pub use module::bm::BM;
|
||||
pub use module::notes::Notes;
|
||||
|
||||
fn main() {
|
||||
use std::process::exit;
|
||||
use ansi_term::Colour::Yellow;
|
||||
|
||||
let yaml = load_yaml!("../etc/cli.yml");
|
||||
let app = App::from_yaml(yaml);
|
||||
let config = CliConfig::new(app);
|
||||
|
||||
ImagLogger::init(&config).map_err(|e| {
|
||||
error!("Could not initialize logger");
|
||||
debug!("Could not initialize logger: {:?}", e);
|
||||
exit(1);
|
||||
}).ok();
|
||||
|
||||
let configuration = Configuration::new(&config);
|
||||
|
||||
debug!("Logger created!");
|
||||
debug!("CliConfig : {:?}", &config);
|
||||
debug!("Configuration: {:?}", &configuration);
|
||||
|
||||
let rt = Runtime::new(configuration, config);
|
||||
|
||||
debug!("Runtime : {:?}", &rt);
|
||||
|
||||
let res = match rt.config.cli_matches.subcommand_name() {
|
||||
Some("bm") => BM::new(&rt).exec(rt.config.cli_matches.subcommand_matches("bm").unwrap()),
|
||||
Some("notes") => Notes::new(&rt).exec(rt.config.cli_matches.subcommand_matches("notes").unwrap()),
|
||||
_ => false,
|
||||
};
|
||||
|
||||
info!("{}", Yellow.paint(format!("Module execution ended with {}", res)));
|
||||
|
||||
if rt.report_exit() {
|
||||
if res {
|
||||
println!("Ok");
|
||||
} else {
|
||||
println!("Error");
|
||||
}
|
||||
}
|
||||
println!("It works");
|
||||
}
|
||||
|
||||
|
|
|
@ -1,40 +0,0 @@
|
|||
use module::helpers::header as headerhelpers;
|
||||
use storage::file::header::data::FileHeaderData as FHD;
|
||||
use storage::file::header::spec::FileHeaderSpec as FHS;
|
||||
|
||||
pub fn get_spec() -> FHS {
|
||||
FHS::Map {
|
||||
keys: vec![
|
||||
headerhelpers::tags::spec::url_key(),
|
||||
headerhelpers::tags::spec::tags_key(),
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
pub fn build_header(url: String, tags: Vec<String>) -> FHD {
|
||||
FHD::Map {
|
||||
keys: vec![
|
||||
FHD::Key {
|
||||
name: String::from("URL"),
|
||||
value: Box::new(FHD::Text(url.clone()))
|
||||
},
|
||||
FHD::Key {
|
||||
name: String::from("TAGS"),
|
||||
value: Box::new(headerhelpers::tags::data::build_tag_array(tags))
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_tags_from_header(header: &FHD) -> Vec<String> {
|
||||
headerhelpers::tags::data::get_tags_from_header(header)
|
||||
}
|
||||
|
||||
pub fn get_url_from_header(header: &FHD) -> Option<String> {
|
||||
headerhelpers::data::get_url_from_header(header)
|
||||
}
|
||||
|
||||
pub fn rebuild_header_with_tags(header: &FHD, tags: Vec<String>) -> Option<FHD> {
|
||||
get_url_from_header(header).map(|url| build_header(url, tags))
|
||||
}
|
||||
|
|
@ -1,343 +0,0 @@
|
|||
use std::fmt::{Debug, Formatter};
|
||||
use std::fmt;
|
||||
use std::ops::Deref;
|
||||
|
||||
use clap::ArgMatches;
|
||||
|
||||
use runtime::Runtime;
|
||||
use module::Module;
|
||||
|
||||
use storage::parser::Parser;
|
||||
use storage::json::parser::JsonHeaderParser;
|
||||
use module::helpers::cli::create_tag_filter;
|
||||
use module::helpers::cli::create_hash_filter;
|
||||
use module::helpers::cli::create_text_header_field_grep_filter;
|
||||
use module::helpers::cli::CliFileFilter;
|
||||
|
||||
mod header;
|
||||
|
||||
use self::header::get_url_from_header;
|
||||
use self::header::get_tags_from_header;
|
||||
|
||||
pub struct BM<'a> {
|
||||
rt: &'a Runtime<'a>,
|
||||
parser: Parser<JsonHeaderParser>,
|
||||
}
|
||||
|
||||
impl<'a> BM<'a> {
|
||||
|
||||
pub fn new(rt: &'a Runtime<'a>) -> BM<'a> {
|
||||
BM {
|
||||
rt: rt,
|
||||
parser: Parser::new(JsonHeaderParser::new(None)),
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Subcommand: add
|
||||
*/
|
||||
fn command_add(&self, matches: &ArgMatches) -> bool {
|
||||
use ansi_term::Colour::{Green, Yellow, Red};
|
||||
use std::process::exit;
|
||||
use self::header::build_header;
|
||||
|
||||
let url = matches.value_of("url").map(String::from).unwrap(); // clap ensures this is present
|
||||
|
||||
if !self.validate_url(&url) {
|
||||
error!("URL validation failed, exiting.");
|
||||
exit(1);
|
||||
} else {
|
||||
debug!("Verification succeeded");
|
||||
}
|
||||
|
||||
let tags = matches.value_of("tags").and_then(|s| {
|
||||
Some(s.split(",").map(String::from).collect())
|
||||
}).unwrap_or(vec![]);
|
||||
|
||||
debug!("Building header with");
|
||||
debug!(" url = '{:?}'", url);
|
||||
debug!(" tags = '{:?}'", tags);
|
||||
let header = build_header(url, tags);
|
||||
|
||||
let fileid = self.rt
|
||||
.store()
|
||||
.new_file_with_header(self, header);
|
||||
|
||||
let result = self.rt
|
||||
.store()
|
||||
.load(self, &self.parser, &fileid)
|
||||
.map(|file| {
|
||||
info!("{}", Yellow.paint(format!("Created file in memory: {}", fileid)));
|
||||
self.rt
|
||||
.store()
|
||||
.persist(&self.parser, file)
|
||||
})
|
||||
.unwrap_or(false);
|
||||
|
||||
if result {
|
||||
info!("{}", Red.paint("Adding worked"));
|
||||
} else {
|
||||
info!("{}", Green.paint("Adding failed"));
|
||||
}
|
||||
|
||||
result
|
||||
}
|
||||
|
||||
fn validate_url(&self, url: &String) -> bool
|
||||
{
|
||||
use util::is_url;
|
||||
|
||||
if !is_url(url) {
|
||||
error!("Url '{}' is not a valid URL. Will not store.", url);
|
||||
return false;
|
||||
}
|
||||
|
||||
let is_in_store = self.rt
|
||||
.store()
|
||||
.load_for_module(self, &self.parser)
|
||||
.iter()
|
||||
.any(|file| {
|
||||
let f = file.deref().borrow();
|
||||
get_url_from_header(f.header()).map(|url_in_store| {
|
||||
&url_in_store == url
|
||||
}).unwrap_or(false)
|
||||
});
|
||||
|
||||
if is_in_store {
|
||||
error!("URL '{}' seems to be in the store already", url);
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Subcommand: list
|
||||
*/
|
||||
fn command_list(&self, matches: &ArgMatches) -> bool {
|
||||
use ui::file::{FilePrinter, TablePrinter};
|
||||
use std::ops::Deref;
|
||||
|
||||
let filter = {
|
||||
let hash_filter = create_hash_filter(matches, "id", true);
|
||||
let text_filter = create_text_header_field_grep_filter(matches, "match", "URL", true);
|
||||
let tags_filter = create_tag_filter(matches, "tags", true);
|
||||
hash_filter.and(Box::new(text_filter)).and(Box::new(tags_filter))
|
||||
};
|
||||
|
||||
let files = self.rt
|
||||
.store()
|
||||
.load_for_module(self, &self.parser)
|
||||
.into_iter()
|
||||
.filter(|file| filter.filter_file(file));
|
||||
let pretty = matches.is_present("pretty");
|
||||
let printer = TablePrinter::new(self.rt.is_verbose(), self.rt.is_debugging(), pretty);
|
||||
|
||||
printer.print_files_custom(files,
|
||||
&|file| {
|
||||
let fl = file.deref().borrow();
|
||||
let hdr = fl.header();
|
||||
let url = get_url_from_header(hdr).unwrap_or(String::from("Parser error"));
|
||||
let tags = get_tags_from_header(hdr);
|
||||
|
||||
debug!("Custom printer field: url = '{:?}'", url);
|
||||
debug!("Custom printer field: tags = '{:?}'", tags);
|
||||
|
||||
vec![url, tags.join(", ")]
|
||||
}
|
||||
);
|
||||
true
|
||||
}
|
||||
|
||||
/**
|
||||
* Subcommand: open
|
||||
*/
|
||||
fn command_open(&self, matches: &ArgMatches) -> bool {
|
||||
use ansi_term::Colour::{Green, Red};
|
||||
use open;
|
||||
|
||||
let filter : Box<CliFileFilter> = {
|
||||
let hash_filter = create_hash_filter(matches, "id", true);
|
||||
let text_filter = create_text_header_field_grep_filter(matches, "match", "URL", true);
|
||||
let tags_filter = create_tag_filter(matches, "tags", true);
|
||||
Box::new(hash_filter.and(Box::new(text_filter)).and(Box::new(tags_filter)))
|
||||
};
|
||||
let result = self.rt
|
||||
.store()
|
||||
.load_for_module(self, &self.parser)
|
||||
.iter()
|
||||
.filter(|file| filter.filter_file(file))
|
||||
.map(|file| {
|
||||
debug!("File loaded, can open now: {:?}", file);
|
||||
let f = file.deref().borrow();
|
||||
get_url_from_header(f.header()).map(|url| {
|
||||
if open::that(&url[..]).is_ok() {
|
||||
info!("{}", Green.paint(format!("open({})", url)));
|
||||
true
|
||||
} else {
|
||||
info!("{}", Red.paint(format!("could not open({})", url)));
|
||||
false
|
||||
}
|
||||
})
|
||||
.unwrap_or(false)
|
||||
})
|
||||
.fold((0, 0), |acc, succeeded| {
|
||||
let (worked, failed) = acc;
|
||||
if succeeded {
|
||||
(worked + 1, failed)
|
||||
} else {
|
||||
(worked, failed + 1)
|
||||
}
|
||||
});
|
||||
|
||||
let (succ, fail) = result;
|
||||
info!("{}", Green.paint(format!("open() succeeded for {} files", succ)));
|
||||
info!("{}", Red.paint(format!( "open() failed for {} files", fail)));
|
||||
return fail == 0;
|
||||
}
|
||||
|
||||
/**
|
||||
* Subcommand: remove
|
||||
*/
|
||||
fn command_remove(&self, matches: &ArgMatches) -> bool {
|
||||
use ansi_term::Colour::{Green, Red};
|
||||
|
||||
let filter = {
|
||||
let hash_filter = create_hash_filter(matches, "id", false);
|
||||
let text_filter = create_text_header_field_grep_filter(matches, "match", "URL", false);
|
||||
let tags_filter = create_tag_filter(matches, "tags", false);
|
||||
hash_filter.or(Box::new(text_filter)).or(Box::new(tags_filter))
|
||||
};
|
||||
|
||||
let result = self.rt
|
||||
.store()
|
||||
.load_for_module(self, &self.parser)
|
||||
.iter()
|
||||
.filter(|file| filter.filter_file(file))
|
||||
.map(|file| {
|
||||
debug!("File loaded, can remove now: {:?}", file);
|
||||
let f = file.deref().borrow();
|
||||
self.rt.store().remove(f.id().clone())
|
||||
})
|
||||
.fold((0, 0), |acc, succeeded| {
|
||||
let (worked, failed) = acc;
|
||||
if succeeded {
|
||||
(worked + 1, failed)
|
||||
} else {
|
||||
(worked, failed + 1)
|
||||
}
|
||||
});
|
||||
|
||||
let (worked, failed) = result;
|
||||
|
||||
info!("{}", Green.paint(format!("Removing succeeded for {} files", worked)));
|
||||
info!("{}", Red.paint(format!( "Removing failed for {} files", failed)));
|
||||
|
||||
return failed == 0;
|
||||
}
|
||||
|
||||
/**
|
||||
* Subcommand: add_tags
|
||||
*/
|
||||
fn command_add_tags(&self, matches: &ArgMatches) -> bool {
|
||||
use module::helpers::header::tags::data::alter_tags_in_files;
|
||||
use self::header::rebuild_header_with_tags;
|
||||
|
||||
alter_tags_in_files(self, matches, &self.parser, |old_tags, cli_tags| {
|
||||
let mut new_tags = old_tags.clone();
|
||||
new_tags.append(&mut cli_tags.clone());
|
||||
new_tags
|
||||
}, rebuild_header_with_tags)
|
||||
}
|
||||
|
||||
/**
|
||||
* Subcommand: rm_tags
|
||||
*/
|
||||
fn command_rm_tags(&self, matches: &ArgMatches) -> bool {
|
||||
use module::helpers::header::tags::data::alter_tags_in_files;
|
||||
use self::header::rebuild_header_with_tags;
|
||||
|
||||
alter_tags_in_files(self, matches, &self.parser, |old_tags, cli_tags| {
|
||||
old_tags.clone()
|
||||
.into_iter()
|
||||
.filter(|tag| !cli_tags.contains(tag))
|
||||
.collect()
|
||||
}, rebuild_header_with_tags)
|
||||
}
|
||||
|
||||
/**
|
||||
* Subcommand: set_tags
|
||||
*/
|
||||
fn command_set_tags(&self, matches: &ArgMatches) -> bool {
|
||||
use module::helpers::header::tags::data::alter_tags_in_files;
|
||||
use self::header::rebuild_header_with_tags;
|
||||
|
||||
alter_tags_in_files(self, matches, &self.parser, |_, cli_tags| {
|
||||
cli_tags.clone()
|
||||
}, rebuild_header_with_tags)
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Trait implementation for BM module
|
||||
*/
|
||||
impl<'a> Module<'a> for BM<'a> {
|
||||
|
||||
fn exec(&self, matches: &ArgMatches) -> bool {
|
||||
use ansi_term::Colour::Red;
|
||||
|
||||
match matches.subcommand_name() {
|
||||
Some("add") => {
|
||||
self.command_add(matches.subcommand_matches("add").unwrap())
|
||||
},
|
||||
|
||||
Some("list") => {
|
||||
self.command_list(matches.subcommand_matches("list").unwrap())
|
||||
},
|
||||
|
||||
Some("open") => {
|
||||
self.command_open(matches.subcommand_matches("open").unwrap())
|
||||
},
|
||||
|
||||
Some("remove") => {
|
||||
self.command_remove(matches.subcommand_matches("remove").unwrap())
|
||||
},
|
||||
|
||||
Some("add_tags") => {
|
||||
self.command_add_tags(matches.subcommand_matches("add_tags").unwrap())
|
||||
},
|
||||
|
||||
Some("rm_tags") => {
|
||||
self.command_rm_tags(matches.subcommand_matches("rm_tags").unwrap())
|
||||
},
|
||||
|
||||
Some("set_tags") => {
|
||||
self.command_set_tags(matches.subcommand_matches("set_tags").unwrap())
|
||||
},
|
||||
|
||||
Some(_) | None => {
|
||||
info!("{}", Red.bold().paint("No command given, doing nothing"));
|
||||
false
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
fn name(&self) -> &'static str {
|
||||
"bookmark"
|
||||
}
|
||||
|
||||
fn runtime(&self) -> &Runtime {
|
||||
self.rt
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Debug for BM<'a> {
|
||||
|
||||
fn fmt(&self, fmt: &mut Formatter) -> fmt::Result {
|
||||
try!(write!(fmt, "BM"));
|
||||
Ok(())
|
||||
}
|
||||
|
||||
}
|
||||
|
|
@ -1,224 +0,0 @@
|
|||
use std::rc::Rc;
|
||||
use std::cell::RefCell;
|
||||
use std::ops::Deref;
|
||||
use std::process::exit;
|
||||
|
||||
use clap::ArgMatches;
|
||||
use regex::Regex;
|
||||
|
||||
use storage::file::File;
|
||||
use storage::file::hash::FileHash;
|
||||
|
||||
pub trait CliFileFilter {
|
||||
|
||||
fn filter_file(&self, &Rc<RefCell<File>>) -> bool;
|
||||
|
||||
fn not(self) -> CliFileFilterNot
|
||||
where Self: Sized + 'static
|
||||
{
|
||||
CliFileFilterNot {
|
||||
a: Box::new(self),
|
||||
}
|
||||
}
|
||||
|
||||
fn or(self, other: Box<CliFileFilter>) -> CliFileFilterOr
|
||||
where Self: Sized + 'static
|
||||
{
|
||||
CliFileFilterOr {
|
||||
a: Box::new(self),
|
||||
b: other
|
||||
}
|
||||
}
|
||||
|
||||
fn and(self, other: Box<CliFileFilter>) -> CliFileFilterAnd
|
||||
where Self: Sized + 'static
|
||||
{
|
||||
CliFileFilterAnd {
|
||||
a: Box::new(self),
|
||||
b: other
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
pub struct CliFileFilterNot {
|
||||
a: Box<CliFileFilter>,
|
||||
}
|
||||
|
||||
impl CliFileFilter for CliFileFilterNot {
|
||||
|
||||
fn filter_file(&self, f: &Rc<RefCell<File>>) -> bool {
|
||||
!self.a.filter_file(f)
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
pub struct CliFileFilterOr {
|
||||
a: Box<CliFileFilter>,
|
||||
b: Box<CliFileFilter>
|
||||
}
|
||||
|
||||
impl CliFileFilter for CliFileFilterOr {
|
||||
|
||||
fn filter_file(&self, f: &Rc<RefCell<File>>) -> bool {
|
||||
self.a.filter_file(f) || self.b.filter_file(f)
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
pub struct CliFileFilterAnd {
|
||||
a: Box<CliFileFilter>,
|
||||
b: Box<CliFileFilter>
|
||||
}
|
||||
|
||||
impl CliFileFilter for CliFileFilterAnd {
|
||||
|
||||
fn filter_file(&self, f: &Rc<RefCell<File>>) -> bool {
|
||||
self.a.filter_file(f) && self.b.filter_file(f)
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
pub struct CliFileFilterByHash {
|
||||
default: bool,
|
||||
hash: Option<FileHash>,
|
||||
}
|
||||
|
||||
impl CliFileFilter for CliFileFilterByHash {
|
||||
|
||||
fn filter_file(&self, file: &Rc<RefCell<File>>) -> bool {
|
||||
self.hash.clone().map(|h| {
|
||||
debug!("Filtering file with hash = {}", h);
|
||||
let f = file.deref().borrow();
|
||||
f.id().get_id() == h
|
||||
})
|
||||
.unwrap_or(self.default)
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
pub struct CliFileFilterByDataRegex {
|
||||
default: bool,
|
||||
regex: Option<Regex>,
|
||||
}
|
||||
|
||||
impl CliFileFilter for CliFileFilterByDataRegex {
|
||||
|
||||
fn filter_file(&self, file: &Rc<RefCell<File>>) -> bool {
|
||||
self.regex.clone().map(|r| {
|
||||
debug!("Filtering file with regex = {:?}", r);
|
||||
let f = file.deref().borrow();
|
||||
r.is_match(&f.data()[..])
|
||||
})
|
||||
.unwrap_or(self.default)
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
pub struct CliFileFilterByHeaderRegex {
|
||||
default: bool,
|
||||
header_field_name: &'static str,
|
||||
regex: Option<Regex>,
|
||||
}
|
||||
|
||||
impl CliFileFilter for CliFileFilterByHeaderRegex {
|
||||
|
||||
fn filter_file(&self, file: &Rc<RefCell<File>>) -> bool {
|
||||
use module::helpers::header::data::get_named_text_from_header;
|
||||
|
||||
self.regex.clone().map(|r| {
|
||||
debug!("Filtering file (header field = {}) with regex = {:?}", self.header_field_name, r);
|
||||
|
||||
let f = file.deref().borrow();
|
||||
get_named_text_from_header(self.header_field_name, f.header())
|
||||
.map(|headerfield| r.is_match(&headerfield[..]))
|
||||
.unwrap_or(self.default)
|
||||
})
|
||||
.unwrap_or(self.default)
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
pub struct CliFileFilterByTags {
|
||||
default: bool,
|
||||
tags: Option<Vec<String>>,
|
||||
}
|
||||
|
||||
impl CliFileFilter for CliFileFilterByTags {
|
||||
|
||||
fn filter_file(&self, file: &Rc<RefCell<File>>) -> bool {
|
||||
use module::helpers::header::tags::data::get_tags_from_header;
|
||||
|
||||
self.tags.clone().map(|ts| {
|
||||
debug!("Filtering file with tags = {:?}", ts);
|
||||
|
||||
let f = file.deref().borrow();
|
||||
get_tags_from_header(f.header())
|
||||
.iter()
|
||||
.any(|tag| ts.iter().any(|remtag| remtag == tag))
|
||||
})
|
||||
.unwrap_or(self.default)
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/*
|
||||
*
|
||||
*
|
||||
* Functions to generate filters
|
||||
*
|
||||
*
|
||||
*/
|
||||
|
||||
pub fn create_hash_filter(matches: &ArgMatches, id_key: &'static str, default: bool) -> CliFileFilterByHash {
|
||||
CliFileFilterByHash {
|
||||
hash: matches.value_of(id_key).map(FileHash::from),
|
||||
default: default
|
||||
}
|
||||
}
|
||||
|
||||
pub fn create_content_grep_filter(matches: &ArgMatches, match_key: &'static str, default: bool) -> CliFileFilterByDataRegex {
|
||||
use std::process::exit;
|
||||
|
||||
CliFileFilterByDataRegex {
|
||||
regex: matches.value_of(match_key).map(|m| {
|
||||
Regex::new(&m[..]).unwrap_or_else(|e| {
|
||||
error!("Regex compiler error: {}", e);
|
||||
exit(1);
|
||||
})
|
||||
}),
|
||||
default: default,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn create_text_header_field_grep_filter(matches: &ArgMatches,
|
||||
match_key: &'static str,
|
||||
header_field_name: &'static str,
|
||||
default: bool)
|
||||
-> CliFileFilterByHeaderRegex
|
||||
{
|
||||
CliFileFilterByHeaderRegex {
|
||||
default: default,
|
||||
header_field_name: header_field_name,
|
||||
regex: matches.value_of(match_key)
|
||||
.map(|m| {
|
||||
Regex::new(&m[..]).unwrap_or_else(|e| {
|
||||
error!("Regex compiler error: {}", e);
|
||||
exit(1);
|
||||
})
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn create_tag_filter(matches: &ArgMatches, tag_key: &'static str, default: bool) -> CliFileFilterByTags {
|
||||
|
||||
CliFileFilterByTags {
|
||||
default: default,
|
||||
tags: matches.value_of(tag_key)
|
||||
.map(|m| m.split(",")
|
||||
.map(String::from)
|
||||
.collect::<Vec<String>>()
|
||||
),
|
||||
}
|
||||
}
|
||||
|
|
@ -1,97 +0,0 @@
|
|||
pub mod markdown {
|
||||
use hoedown::renderer::Render;
|
||||
use hoedown::Buffer;
|
||||
use hoedown::Markdown;
|
||||
|
||||
pub type LinkTitle = String;
|
||||
pub type LinkURL = String;
|
||||
|
||||
pub struct Link {
|
||||
pub title: LinkTitle,
|
||||
pub url: LinkURL,
|
||||
}
|
||||
|
||||
struct LinkExtractRenderer {
|
||||
links : Vec<Link>
|
||||
}
|
||||
|
||||
impl LinkExtractRenderer {
|
||||
|
||||
fn new() -> LinkExtractRenderer {
|
||||
LinkExtractRenderer {
|
||||
links: vec![],
|
||||
}
|
||||
}
|
||||
|
||||
fn extract(self) -> Vec<Link> {
|
||||
self.links
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
impl Render for LinkExtractRenderer {
|
||||
|
||||
fn link(&mut self,
|
||||
_: &mut Buffer,
|
||||
_: &Buffer,
|
||||
link: &Buffer,
|
||||
title: &Buffer) -> bool {
|
||||
|
||||
let l = String::from(link.to_str().unwrap_or("<<UTF8 Error>>"));
|
||||
let t = String::from(title.to_str().unwrap_or("<<UTF8 Error>>"));
|
||||
|
||||
debug!("[Markdown] Push link: '{}' -> '{}'", t, l);
|
||||
self.links.push(Link {
|
||||
title: t,
|
||||
url: l,
|
||||
});
|
||||
true
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
pub struct MarkdownParser {
|
||||
text: Markdown,
|
||||
}
|
||||
|
||||
impl MarkdownParser {
|
||||
|
||||
pub fn new(s: &String) -> MarkdownParser {
|
||||
MarkdownParser {
|
||||
text: Markdown::new(&s[..])
|
||||
}
|
||||
}
|
||||
|
||||
pub fn links(&self) -> Vec<Link> {
|
||||
let mut renderer = LinkExtractRenderer::new();
|
||||
renderer.render(&self.text);
|
||||
renderer.extract()
|
||||
}
|
||||
|
||||
pub fn to_html(self) -> String {
|
||||
use hoedown::renderer::html::Html;
|
||||
use hoedown::renderer::html;
|
||||
|
||||
String::from(
|
||||
Html::new(html::Flags::empty(), 0)
|
||||
.render(&self.text)
|
||||
.to_str()
|
||||
.unwrap_or("UTF8Error"))
|
||||
}
|
||||
|
||||
pub fn to_html_page(self) -> String {
|
||||
let header = "
|
||||
<html>
|
||||
<head>
|
||||
<meta http-equiv='content-type' content='text/html; charset=utf-8'>
|
||||
</head>
|
||||
<body>
|
||||
";
|
||||
let content = self.to_html();
|
||||
let footer = "</body></html>";
|
||||
format!("{}{}{}", header, content, footer)
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
|
@ -1,81 +0,0 @@
|
|||
/*!
|
||||
* Helpers for headers
|
||||
*/
|
||||
|
||||
pub mod tags;
|
||||
|
||||
/**
|
||||
* Utility helpers for header data
|
||||
*/
|
||||
pub mod data {
|
||||
use std::ops::Deref;
|
||||
use storage::file::header::data::FileHeaderData as FHD;
|
||||
|
||||
/**
|
||||
* Get an URL from a header, whereas the header has to have the following format:
|
||||
*
|
||||
* { ..., "URL": "<URL>", ... }
|
||||
*
|
||||
* Does no spec verification.
|
||||
*/
|
||||
pub fn get_url_from_header(header: &FHD) -> Option<String> {
|
||||
get_named_text_from_header("URL", header)
|
||||
}
|
||||
|
||||
/**
|
||||
* Get an NAME from a header, whereas the header has to have the following format:
|
||||
*
|
||||
* { ..., "NAME": "<NAME>", ... }
|
||||
*
|
||||
* Does no spec verification.
|
||||
*/
|
||||
pub fn get_name_from_header(header: &FHD) -> Option<String> {
|
||||
get_named_text_from_header("NAME", header)
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Get a named field from the header, which has to be of this format
|
||||
*
|
||||
* { ..., "<name of field>": "<content as string>", ... }
|
||||
*
|
||||
* Does no spec verification.
|
||||
*/
|
||||
pub fn get_named_text_from_header(name: &'static str, header: &FHD) -> Option<String> {
|
||||
match header {
|
||||
&FHD::Map{keys: ref ks} => {
|
||||
ks.clone()
|
||||
.iter()
|
||||
.find(|k| {
|
||||
match k.deref() {
|
||||
&FHD::Key{name: ref n, value: _} => n == name,
|
||||
_ => false
|
||||
}
|
||||
}).and_then(|urlkey| {
|
||||
match urlkey.deref().clone() {
|
||||
FHD::Key{name: ref n, value: ref v} => {
|
||||
match v.deref().clone() {
|
||||
FHD::Text(s) => Some(s),
|
||||
_ => {
|
||||
warn!("Malformed Header Data: Expected Text, found non-Text");
|
||||
debug!(" in {}", n);
|
||||
None
|
||||
},
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
warn!("Malformed Header Data: Expected Text, found non-Text");
|
||||
None
|
||||
},
|
||||
}
|
||||
})
|
||||
},
|
||||
_ => {
|
||||
warn!("Malformed Header Data: Expected Map, found non-Map");
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
|
@ -1,177 +0,0 @@
|
|||
/*!
|
||||
* Helpers for headers - Tags
|
||||
*/
|
||||
|
||||
/**
|
||||
* Spec helpers for header-tags
|
||||
*/
|
||||
pub mod spec {
|
||||
use storage::file::header::spec::FileHeaderSpec as FHS;
|
||||
use module::helpers::spec::{named_text, named_text_array};
|
||||
|
||||
/**
|
||||
* helper for a Header spec for
|
||||
*
|
||||
* { "URL": "<Text>" }
|
||||
*/
|
||||
pub fn url_key() -> FHS {
|
||||
named_text("URL")
|
||||
}
|
||||
|
||||
/**
|
||||
* helper for a Header spec for
|
||||
*
|
||||
* { "TAGS": [ "<Text>", ... ] }
|
||||
*/
|
||||
pub fn tags_key() -> FHS {
|
||||
named_text_array("TAGS")
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Data helpers for header-tags
|
||||
*/
|
||||
pub mod data {
|
||||
use std::ops::Deref;
|
||||
use storage::file::header::data::FileHeaderData as FHD;
|
||||
use module::Module;
|
||||
use clap::ArgMatches;
|
||||
use storage::parser::Parser;
|
||||
use storage::parser::FileHeaderParser;
|
||||
|
||||
/**
|
||||
* Use a Vec<String> to build a Tag-Array:
|
||||
*
|
||||
* [ "<Text>", ... ]
|
||||
*/
|
||||
pub fn build_tag_array(tags: Vec<String>) -> FHD {
|
||||
let texttags = tags.into_iter().map(|t| FHD::Text(t.clone())).collect();
|
||||
FHD::Array { values: Box::new(texttags) }
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetch tags from a header, whereas the header looks like this:
|
||||
*
|
||||
* { ...,
|
||||
* "TAGS": [ "<Text>", ... ],
|
||||
* ...
|
||||
* }
|
||||
*
|
||||
* Does no spec verification.
|
||||
*/
|
||||
pub fn get_tags_from_header(header: &FHD) -> Vec<String> {
|
||||
let tags : Vec<String> = vec![];
|
||||
|
||||
fn match_array(a: &Box<FHD>) -> Vec<String> {
|
||||
let mut tags : Vec<String> = vec![];
|
||||
|
||||
match a.deref() {
|
||||
&FHD::Array{values: ref vs} => {
|
||||
let values : Vec<FHD> = vs.deref().clone();
|
||||
for value in values {
|
||||
match value {
|
||||
FHD::Text(t) => tags.push(t),
|
||||
_ => warn!("Malformed Header Data: Expected Text, found non-Text"),
|
||||
}
|
||||
}
|
||||
}
|
||||
_ => warn!("Malformed Header Data: Expected Array, found non-Array"),
|
||||
}
|
||||
|
||||
tags
|
||||
}
|
||||
|
||||
match header {
|
||||
&FHD::Map{keys: ref ks} => {
|
||||
let keys : Vec<FHD> = ks.clone();
|
||||
for key in keys {
|
||||
match key {
|
||||
FHD::Key{ref name, value: ref v} => {
|
||||
if name == "TAGS" {
|
||||
return match_array(v)
|
||||
}
|
||||
},
|
||||
_ => warn!("Malformed Header Data: Expected Key, found non-Key"),
|
||||
}
|
||||
}
|
||||
},
|
||||
_ => warn!("Malformed Header Data: Expected Map, found non-Map"),
|
||||
}
|
||||
|
||||
tags
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper function to alter the tags in a file
|
||||
*/
|
||||
pub fn alter_tags_in_files<HP, F, R>(m: &Module,
|
||||
matches: &ArgMatches,
|
||||
parser: &Parser<HP>,
|
||||
generate_new_tags: F,
|
||||
rebuild_header: R) -> bool
|
||||
where HP: FileHeaderParser,
|
||||
F: Fn(Vec<String>, &Vec<String>) -> Vec<String>,
|
||||
R: Fn(&FHD, Vec<String>) -> Option<FHD>
|
||||
{
|
||||
use std::process::exit;
|
||||
use module::helpers::cli::create_tag_filter;
|
||||
use module::helpers::cli::create_hash_filter;
|
||||
use module::helpers::cli::create_text_header_field_grep_filter;
|
||||
use module::helpers::cli::CliFileFilter;
|
||||
|
||||
let cli_tags = matches.value_of("tags")
|
||||
.map(|ts| {
|
||||
ts.split(",")
|
||||
.map(String::from)
|
||||
.collect::<Vec<String>>()
|
||||
})
|
||||
.unwrap_or(vec![]);
|
||||
|
||||
let filter = {
|
||||
let hash_filter = create_hash_filter(matches, "with:id", false);
|
||||
let text_filter = create_text_header_field_grep_filter(matches, "with_match", "URL", false);
|
||||
let tags_filter = create_tag_filter(matches, "with_tags", false);
|
||||
hash_filter.or(Box::new(text_filter)).or(Box::new(tags_filter))
|
||||
};
|
||||
|
||||
m.runtime()
|
||||
.store()
|
||||
.load_for_module(m, &parser)
|
||||
.into_iter()
|
||||
.filter(|file| filter.filter_file(file))
|
||||
.map(|file| {
|
||||
debug!("Alter tags in file: {:?}", file);
|
||||
|
||||
let hdr = {
|
||||
let f = file.deref().borrow();
|
||||
f.header().clone()
|
||||
};
|
||||
|
||||
debug!("Tags:...");
|
||||
let old_tags = get_tags_from_header(&hdr);
|
||||
debug!(" old_tags = {:?}", &old_tags);
|
||||
debug!(" cli_tags = {:?}", &cli_tags);
|
||||
|
||||
let new_tags = generate_new_tags(old_tags, &cli_tags);
|
||||
debug!(" new_tags = {:?}", &new_tags);
|
||||
|
||||
let new_header = rebuild_header(&hdr, new_tags)
|
||||
.unwrap_or_else(|| {
|
||||
error!("Could not rebuild header for file");
|
||||
exit(1);
|
||||
});
|
||||
{
|
||||
let mut f_mut = file.deref().borrow_mut();
|
||||
f_mut.set_header(new_header);
|
||||
}
|
||||
|
||||
m.runtime().store().persist(&parser, file);
|
||||
true
|
||||
})
|
||||
.all(|x| x)
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
|
@ -1,44 +0,0 @@
|
|||
/*!
|
||||
* Utility helpers for modules
|
||||
*/
|
||||
|
||||
pub mod cli;
|
||||
pub mod header;
|
||||
pub mod utils;
|
||||
pub mod content;
|
||||
|
||||
/**
|
||||
* Helpers for header specs
|
||||
*/
|
||||
pub mod spec {
|
||||
use storage::file::header::spec::FileHeaderSpec as FHS;
|
||||
|
||||
/**
|
||||
* Helper to get a spec for a Key-Value for a named text:
|
||||
*
|
||||
* { '<name>': "<Text>" }
|
||||
*/
|
||||
pub fn named_text(name: &str) -> FHS {
|
||||
FHS::Key { name: String::from(name), value_type: Box::new(FHS::Text) }
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper to get a spec for a Key-Value for a named array:
|
||||
*
|
||||
* { '<name>': [ "<Text>", ...] }
|
||||
*/
|
||||
pub fn named_text_array(name: &str) -> FHS {
|
||||
FHS::Key { name: String::from(name), value_type: Box::new(text_array()) }
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper to get a spec for Array<Text>:
|
||||
*
|
||||
* [ "<Text>", ...]
|
||||
*/
|
||||
pub fn text_array() -> FHS {
|
||||
FHS::Array { allowed_types: vec![FHS::Text] }
|
||||
}
|
||||
|
||||
}
|
||||
|
|
@ -1,33 +0,0 @@
|
|||
/**
|
||||
* Utility helpers for CLI
|
||||
*/
|
||||
pub mod cli {
|
||||
use clap::ArgMatches;
|
||||
|
||||
/**
|
||||
* Get a commandline option "tags" and split the argument by "," to be able to provide a
|
||||
* Vec<String> with the argument as array.
|
||||
*/
|
||||
pub fn get_tags<'a>(sub: &ArgMatches<'a, 'a>) -> Vec<String> {
|
||||
|
||||
fn reject_if_with_spaces(e: &String) -> bool {
|
||||
if e.contains(" ") {
|
||||
warn!("Tag contains spaces: '{}'", e);
|
||||
false
|
||||
} else {
|
||||
true
|
||||
}
|
||||
}
|
||||
|
||||
debug!("Fetching tags from commandline");
|
||||
sub.value_of("tags").and_then(|tags| {
|
||||
Some(tags.split(",")
|
||||
.into_iter()
|
||||
.map(|s| s.to_string())
|
||||
.filter(|e| reject_if_with_spaces(e))
|
||||
.collect()
|
||||
)
|
||||
}).or(Some(vec![])).unwrap()
|
||||
}
|
||||
|
||||
}
|
|
@ -1,20 +0,0 @@
|
|||
use std::fmt::Debug;
|
||||
|
||||
use clap::ArgMatches;
|
||||
|
||||
use runtime::Runtime;
|
||||
|
||||
pub mod bm;
|
||||
pub mod helpers;
|
||||
pub mod notes;
|
||||
|
||||
/**
|
||||
* Module interface, each module has to implement this.
|
||||
*/
|
||||
pub trait Module<'a> : Debug {
|
||||
fn exec(&self, matches: &ArgMatches) -> bool;
|
||||
fn name(&self) -> &'static str;
|
||||
|
||||
fn runtime(&self) -> &Runtime;
|
||||
}
|
||||
|
|
@ -1,64 +0,0 @@
|
|||
/*
|
||||
* Lets talk about header data first.
|
||||
* We need:
|
||||
*
|
||||
* - tags
|
||||
* - name (not unique)
|
||||
*
|
||||
* So an header could look like this:
|
||||
*
|
||||
* ```json
|
||||
* {
|
||||
* 'name': "kittennotes",
|
||||
* 'tags': ['foo', 'bar', 'baz'],
|
||||
* }
|
||||
* ```
|
||||
*
|
||||
* Nothing more is required for the header, I guess
|
||||
*
|
||||
*/
|
||||
|
||||
use module::helpers;
|
||||
use module::helpers::header as headerhelpers;
|
||||
use storage::file::header::data::FileHeaderData as FHD;
|
||||
use storage::file::header::spec::FileHeaderSpec as FHS;
|
||||
|
||||
pub fn get_spec() -> FHS {
|
||||
FHS::Map { keys: vec![ helpers::spec::named_text("NAME"),
|
||||
headerhelpers::tags::spec::tags_key() ] }
|
||||
}
|
||||
|
||||
|
||||
pub fn build_header(name: String, tags: Vec<String>) -> FHD {
|
||||
FHD::Map {
|
||||
keys: vec![
|
||||
FHD::Key {
|
||||
name: String::from("NAME"),
|
||||
value: Box::new(FHD::Text(name.clone()))
|
||||
},
|
||||
FHD::Key {
|
||||
name: String::from("TAGS"),
|
||||
value: Box::new(headerhelpers::tags::data::build_tag_array(tags))
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_tags_from_header(header: &FHD) -> Vec<String> {
|
||||
headerhelpers::tags::data::get_tags_from_header(header)
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the name from the Header
|
||||
*
|
||||
* Returns empty string if there is no NAME field
|
||||
*/
|
||||
pub fn get_name_from_header(header: &FHD) -> String {
|
||||
headerhelpers::data::get_name_from_header(header).unwrap_or(String::from(""))
|
||||
}
|
||||
|
||||
pub fn rebuild_header_with_tags(header: &FHD, tags: Vec<String>) -> Option<FHD> {
|
||||
let name = get_name_from_header(header);
|
||||
Some(build_header(name, tags))
|
||||
}
|
||||
|
|
@ -1,545 +0,0 @@
|
|||
use std::fmt::{Debug, Formatter};
|
||||
use std::fmt::Result as FMTResult;
|
||||
use std::ops::Deref;
|
||||
use std::rc::Rc;
|
||||
use std::cell::RefCell;
|
||||
|
||||
use clap::ArgMatches;
|
||||
|
||||
mod header;
|
||||
|
||||
use module::Module;
|
||||
use runtime::Runtime;
|
||||
use storage::file::File;
|
||||
use storage::parser::Parser;
|
||||
use storage::yaml::parser::YamlHeaderParser;
|
||||
use module::helpers::cli::create_tag_filter;
|
||||
use module::helpers::cli::create_hash_filter;
|
||||
use module::helpers::cli::create_text_header_field_grep_filter;
|
||||
use module::helpers::cli::create_content_grep_filter;
|
||||
use module::helpers::cli::CliFileFilter;
|
||||
|
||||
pub struct Notes<'a> {
|
||||
rt: &'a Runtime<'a>,
|
||||
parser: Parser<YamlHeaderParser>,
|
||||
}
|
||||
|
||||
impl<'a> Notes<'a> {
|
||||
|
||||
pub fn new(rt: &'a Runtime<'a>) -> Notes<'a> {
|
||||
Notes {
|
||||
rt: rt,
|
||||
parser: Parser::new(YamlHeaderParser::new(None)),
|
||||
}
|
||||
}
|
||||
|
||||
fn command_add(&self, matches: &ArgMatches) -> bool {
|
||||
use ansi_term::Colour::Yellow;
|
||||
use self::header::build_header;
|
||||
use ui::external::editor::let_user_provide_content;
|
||||
|
||||
let name = matches.value_of("name")
|
||||
.map(String::from)
|
||||
.unwrap_or(String::from(""));
|
||||
let tags = matches.value_of("tags")
|
||||
.and_then(|s| Some(s.split(",").map(String::from).collect()))
|
||||
.unwrap_or(vec![]);
|
||||
|
||||
debug!("Building header with");
|
||||
debug!(" name = '{:?}'", name);
|
||||
debug!(" tags = '{:?}'", tags);
|
||||
let header = build_header(name, tags);
|
||||
|
||||
let content = let_user_provide_content(self.runtime()).unwrap_or(String::from(""));
|
||||
|
||||
let fileid = self.rt.store().new_file_with_content(self, header, content);
|
||||
self.rt
|
||||
.store()
|
||||
.load(self, &self.parser, &fileid)
|
||||
.and_then(|file| {
|
||||
info!("{}", Yellow.paint(format!("Created file in memory: {}", fileid)));
|
||||
Some(self.rt.store().persist(&self.parser, file))
|
||||
})
|
||||
.unwrap_or(false)
|
||||
}
|
||||
|
||||
fn command_edit(&self, matches: &ArgMatches) -> bool {
|
||||
use ansi_term::Colour::{Red, Green};
|
||||
use ui::external::editor::edit_content;
|
||||
|
||||
let filter = {
|
||||
let hash_filter = create_hash_filter(matches, "id", false);
|
||||
let head_filter = create_text_header_field_grep_filter(matches, "namematch", "NAME", false);
|
||||
let text_filter = create_content_grep_filter(matches, "match", false);
|
||||
let tags_filter = create_tag_filter(matches, "tags", false);
|
||||
hash_filter.or(Box::new(head_filter)).or(Box::new(text_filter)).or(Box::new(tags_filter))
|
||||
};
|
||||
|
||||
let result = self.rt
|
||||
.store()
|
||||
.load_for_module(self, &self.parser)
|
||||
.into_iter()
|
||||
.filter(|f| filter.filter_file(f))
|
||||
.map(|file| {
|
||||
debug!("File loaded, can edit now: {:?}", file);
|
||||
|
||||
let old_content = {
|
||||
let f = file.deref().borrow();
|
||||
f.data().clone()
|
||||
};
|
||||
|
||||
debug!("Editing content now...");
|
||||
let (new_content, editing_worked) = edit_content(self.runtime(), old_content);
|
||||
debug!("... ready with editing");
|
||||
|
||||
if editing_worked {
|
||||
debug!("Editing worked");
|
||||
{
|
||||
let mut f = file.deref().borrow_mut();
|
||||
f.set_data(new_content);
|
||||
}
|
||||
self.runtime().store().persist(&self.parser, file)
|
||||
} else {
|
||||
debug!("Editing didn't work");
|
||||
false
|
||||
}
|
||||
})
|
||||
.fold((0, 0), |acc, succeeded| {
|
||||
let (worked, failed) = acc;
|
||||
if succeeded {
|
||||
(worked + 1, failed)
|
||||
} else {
|
||||
(worked, failed + 1)
|
||||
}
|
||||
});
|
||||
|
||||
let (worked, failed) = result;
|
||||
|
||||
info!("{}", Green.paint(format!("Editing succeeded for {} files", worked)));
|
||||
info!("{}", Red.paint(format!( "Editing failed for {} files", failed)));
|
||||
|
||||
return failed == 0;
|
||||
}
|
||||
|
||||
fn command_show(&self, matches: &ArgMatches) -> bool {
|
||||
use self::header::get_name_from_header;
|
||||
use self::header::get_tags_from_header;
|
||||
|
||||
let filter = {
|
||||
let hash_filter = create_hash_filter(matches, "id", true);
|
||||
let head_filter = create_text_header_field_grep_filter(matches, "match", "NAME", true);
|
||||
let text_filter = create_content_grep_filter(matches, "match", true);
|
||||
let tags_filter = create_tag_filter(matches, "tags", true);
|
||||
hash_filter.and(Box::new(head_filter)).and(Box::new(text_filter)).and(Box::new(tags_filter))
|
||||
};
|
||||
|
||||
self.rt
|
||||
.store()
|
||||
.load_for_module(self, &self.parser)
|
||||
.into_iter()
|
||||
.filter(|file| {
|
||||
let res = filter.filter_file(file);
|
||||
debug!("Filter: {} -> {}", file.deref().borrow().id(), res);
|
||||
res
|
||||
})
|
||||
.map(|file| {
|
||||
let content = file.deref().borrow().data().clone();
|
||||
|
||||
let text = if matches.is_present("plain") {
|
||||
self.parser.write((file.deref().borrow().header(), &content))
|
||||
.unwrap_or(format!("Parser error for file: {}", file.deref().borrow().id()))
|
||||
} else {
|
||||
let tags = get_tags_from_header(file.deref().borrow().header());
|
||||
let name = get_name_from_header(file.deref().borrow().header());
|
||||
format!("Name = '{}'\nTags = '{}'\n\n{}\n\n",
|
||||
name, tags.join(", "), content)
|
||||
};
|
||||
|
||||
println!("{:-<79}", "-");
|
||||
println!("{}", text);
|
||||
true
|
||||
})
|
||||
.all(|x| x)
|
||||
}
|
||||
|
||||
fn command_open(&self, matches: &ArgMatches) -> bool {
|
||||
let filter = {
|
||||
let hash_filter = create_hash_filter(matches, "id", true);
|
||||
let head_filter = create_text_header_field_grep_filter(matches, "match", "NAME", true);
|
||||
let text_filter = create_content_grep_filter(matches, "match", true);
|
||||
let tags_filter = create_tag_filter(matches, "tags", true);
|
||||
hash_filter.and(Box::new(head_filter)).and(Box::new(text_filter)).and(Box::new(tags_filter))
|
||||
};
|
||||
|
||||
let files = self.rt
|
||||
.store()
|
||||
.load_for_module(self, &self.parser)
|
||||
.into_iter()
|
||||
.filter(|file| {
|
||||
let res = filter.filter_file(file);
|
||||
debug!("Filter: {} -> {}", file.deref().borrow().id(), res);
|
||||
res
|
||||
});
|
||||
|
||||
if matches.is_present("onepage") {
|
||||
let tmpcontent = files.fold(String::new(), |acc, file| {
|
||||
let content = self.preprocess_file_for_markdown(file);
|
||||
format!("{}\n\n{}", acc, content)
|
||||
});
|
||||
self.open_tmpcontent(tmpcontent)
|
||||
} else {
|
||||
let result = files.map(|file| {
|
||||
self.open_tmpcontent(self.preprocess_file_for_markdown(file))
|
||||
})
|
||||
.fold((0, 0), |acc, succeeded| {
|
||||
let (worked, failed) = acc;
|
||||
if succeeded {
|
||||
(worked + 1, failed)
|
||||
} else {
|
||||
(worked, failed + 1)
|
||||
}
|
||||
});
|
||||
|
||||
let (worked, failed) = result;
|
||||
|
||||
info!("Opening as HTML page succeeded for {} files", worked);
|
||||
info!("Opening as HTML page failed for {} files", failed);
|
||||
|
||||
failed == 0
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
fn preprocess_file_for_markdown(&self, file: Rc<RefCell<File>>) -> String {
|
||||
use self::header::get_name_from_header;
|
||||
use self::header::get_tags_from_header;
|
||||
|
||||
let tagsstr = {
|
||||
let tags = get_tags_from_header(file.deref().borrow().header());
|
||||
if tags.len() != 0 {
|
||||
format!(" <small>(<i>{}</i>)</small>", tags.join(", "))
|
||||
} else {
|
||||
format!(" <small>(No Tags)</small>")
|
||||
}
|
||||
};
|
||||
|
||||
let (name, id) = {
|
||||
let notename = get_name_from_header(file.deref().borrow().header());
|
||||
if notename.len() == 0 {
|
||||
(format!("{}", file.deref().borrow().id()), String::new())
|
||||
} else {
|
||||
(notename, format!("{}", file.deref().borrow().id()))
|
||||
}
|
||||
};
|
||||
|
||||
format!("<h1>{}</h1><small>{}</small>{}\n\n{}", name, id, tagsstr,
|
||||
file.deref().borrow().data())
|
||||
}
|
||||
|
||||
fn open_tmpcontent(&self, s: String) -> bool {
|
||||
use std::process::exit;
|
||||
use std::io::Write;
|
||||
use open;
|
||||
use ui::external::get_tempfile;
|
||||
use module::helpers::content::markdown::MarkdownParser;
|
||||
|
||||
let (temppath, mut tempfile) = match get_tempfile("html") {
|
||||
Some(tpl) => tpl,
|
||||
None => {
|
||||
error!("Could not create tempfile");
|
||||
exit(1);
|
||||
}
|
||||
};
|
||||
|
||||
tempfile.write_all(MarkdownParser::new(&s).to_html_page().as_ref())
|
||||
.map_err(|e| {
|
||||
error!("Could not write HTML to file: {}", temppath);
|
||||
debug!("Could not write HTML to file: {:?}", e);
|
||||
})
|
||||
.ok();
|
||||
open::that(&temppath[..]).is_ok()
|
||||
}
|
||||
|
||||
fn command_list(&self, matches: &ArgMatches) -> bool {
|
||||
use ui::file::{FilePrinter, TablePrinter};
|
||||
use self::header::get_name_from_header;
|
||||
use self::header::get_tags_from_header;
|
||||
use module::helpers::cli::CliFileFilter;
|
||||
|
||||
let filter = {
|
||||
let hash_filter = create_hash_filter(matches, "id", true);
|
||||
let head_filter = create_text_header_field_grep_filter(matches, "match", "NAME", true);
|
||||
let text_filter = create_content_grep_filter(matches, "match", true);
|
||||
let tags_filter = create_tag_filter(matches, "tags", true);
|
||||
hash_filter.or(Box::new(head_filter)).and(Box::new(text_filter)).and(Box::new(tags_filter))
|
||||
};
|
||||
|
||||
let pretty = matches.is_present("pretty");
|
||||
debug!("Printing pretty table = {}", pretty);
|
||||
let printer = TablePrinter::new(self.rt.is_verbose(), self.rt.is_debugging(), pretty);
|
||||
|
||||
printer.print_files_custom(
|
||||
self.rt.store()
|
||||
.load_for_module(self, &self.parser)
|
||||
.into_iter()
|
||||
.filter(|f| filter.filter_file(f)),
|
||||
&|file| {
|
||||
let fl = file.deref().borrow();
|
||||
let hdr = fl.header();
|
||||
let name = get_name_from_header(hdr);
|
||||
let tags = get_tags_from_header(hdr);
|
||||
|
||||
debug!("Custom printer field: name = '{:?}'", name);
|
||||
debug!("Custom printer field: tags = '{:?}'", tags);
|
||||
|
||||
vec![name, tags.join(", ")]
|
||||
}
|
||||
);
|
||||
true
|
||||
}
|
||||
|
||||
fn command_links(&self, matches: &ArgMatches) -> bool {
|
||||
use ansi_term::Colour::{Red, Green};
|
||||
use module::helpers::content::markdown::MarkdownParser;
|
||||
use util::is_url;
|
||||
use prettytable::Table;
|
||||
use prettytable::row::Row;
|
||||
use prettytable::cell::Cell;
|
||||
use itertools::Itertools;
|
||||
|
||||
debug!("Going to list links in files...");
|
||||
|
||||
let list_intern = matches.is_present("internal");
|
||||
let list_extern = matches.is_present("external");
|
||||
debug!("list internal links = {}", list_intern);
|
||||
debug!("list external links = {}", list_extern);
|
||||
|
||||
let titles = row!["#", "Text", "Link", "Direction"];
|
||||
let mut table = Table::new();
|
||||
table.set_titles(titles);
|
||||
debug!("Table setup finished");
|
||||
|
||||
let filter = {
|
||||
let hash_filter = create_hash_filter(matches, "id", false);
|
||||
let text_filter = create_text_header_field_grep_filter(matches, "match", "URL", false);
|
||||
let tags_filter = create_tag_filter(matches, "tags", false);
|
||||
hash_filter.or(Box::new(text_filter)).or(Box::new(tags_filter))
|
||||
};
|
||||
|
||||
let result = self.rt
|
||||
.store()
|
||||
.load_for_module(self, &self.parser)
|
||||
.iter()
|
||||
.filter(|file| {
|
||||
let res = filter.filter_file(file);
|
||||
debug!("Filter: {} -> {}", file.deref().borrow().id(), res);
|
||||
res
|
||||
})
|
||||
.map(|file| {
|
||||
debug!("File loaded, can parse for links now: {}", file.deref().borrow().id());
|
||||
let data = {
|
||||
let f = file.deref().borrow();
|
||||
debug!("Parsing markdown in file = {:?}", f);
|
||||
f.data().clone()
|
||||
};
|
||||
let links = MarkdownParser::new(&data).links();
|
||||
debug!("Retreived {} links from {}", links.len(), file.deref().borrow().id());
|
||||
links
|
||||
})
|
||||
.flatten()
|
||||
.filter(|link| {
|
||||
let url = &link.url;
|
||||
let is_extern = is_url(&url);
|
||||
debug!("Is external URL {} -> {}", url, is_extern);
|
||||
debug!("List external URLs -> {}", list_extern);
|
||||
debug!("List internal URLs -> {}", list_intern);
|
||||
((!list_intern && !list_extern) ||
|
||||
(is_extern && list_extern) ||
|
||||
(!is_extern && list_intern))
|
||||
})
|
||||
.enumerate()
|
||||
.map(|(i_link, link)| {
|
||||
let title = &link.title;
|
||||
let url = &link.url;
|
||||
let is_url = is_url(&url);
|
||||
debug!("Listing: {} -> {}", title, url);
|
||||
|
||||
let linkno_cell = Cell::new(&format!("{}", i_link)[..]);
|
||||
let title_cell = Cell::new(&format!("{}", title)[..]);
|
||||
let url_cell = Cell::new(&format!("{}", url)[..]);
|
||||
let dir_cell = Cell::new(if is_url { "extern" } else { "intern" });
|
||||
|
||||
let r = Row::new(vec![linkno_cell,
|
||||
title_cell,
|
||||
url_cell,
|
||||
dir_cell]);
|
||||
table.add_row(r);
|
||||
true
|
||||
})
|
||||
.fold((0, 0), |acc, succeeded| {
|
||||
let (worked, failed) = acc;
|
||||
if succeeded {
|
||||
(worked + 1, failed)
|
||||
} else {
|
||||
(worked, failed + 1)
|
||||
}
|
||||
});
|
||||
|
||||
let (worked, failed) = result;
|
||||
|
||||
if worked != 0 {
|
||||
debug!("Printing table entries");
|
||||
table.printstd();
|
||||
} else {
|
||||
debug!("Not printing table as there wouldn't be any entries in it");
|
||||
}
|
||||
|
||||
info!("{}", Green.paint(format!("Listing links succeeded for {} files", worked)));
|
||||
info!("{}", Red.paint( format!("Listing links failed for {} files", failed)));
|
||||
|
||||
return failed == 0;
|
||||
}
|
||||
|
||||
fn command_remove(&self, matches: &ArgMatches) -> bool {
|
||||
use ansi_term::Colour::{Red, Green};
|
||||
|
||||
let filter = {
|
||||
let hash_filter = create_hash_filter(matches, "id", false);
|
||||
let text_filter = create_text_header_field_grep_filter(matches, "match", "URL", false);
|
||||
let tags_filter = create_tag_filter(matches, "tags", false);
|
||||
hash_filter.or(Box::new(text_filter)).or(Box::new(tags_filter))
|
||||
};
|
||||
|
||||
let result = self.rt
|
||||
.store()
|
||||
.load_for_module(self, &self.parser)
|
||||
.iter()
|
||||
.filter(|file| filter.filter_file(file))
|
||||
.map(|file| {
|
||||
debug!("File loaded, can remove now: {:?}", file);
|
||||
let f = file.deref().borrow();
|
||||
self.rt.store().remove(f.id().clone())
|
||||
})
|
||||
.fold((0, 0), |acc, succeeded| {
|
||||
let (worked, failed) = acc;
|
||||
if succeeded {
|
||||
(worked + 1, failed)
|
||||
} else {
|
||||
(worked, failed + 1)
|
||||
}
|
||||
});
|
||||
|
||||
let (worked, failed) = result;
|
||||
|
||||
|
||||
info!("{}", Green.paint(format!("Removing succeeded for {} files", worked)));
|
||||
info!("{}", Red.paint( format!("Removing failed for {} files", failed)));
|
||||
|
||||
return failed == 0;
|
||||
}
|
||||
|
||||
fn command_add_tags(&self, matches: &ArgMatches) -> bool {
|
||||
use module::helpers::header::tags::data::alter_tags_in_files;
|
||||
use self::header::rebuild_header_with_tags;
|
||||
|
||||
alter_tags_in_files(self, matches, &self.parser, |old_tags, cli_tags| {
|
||||
let mut new_tags = old_tags.clone();
|
||||
new_tags.append(&mut cli_tags.clone());
|
||||
new_tags
|
||||
}, rebuild_header_with_tags)
|
||||
}
|
||||
|
||||
fn command_rm_tags(&self, matches: &ArgMatches) -> bool {
|
||||
use module::helpers::header::tags::data::alter_tags_in_files;
|
||||
use self::header::rebuild_header_with_tags;
|
||||
|
||||
alter_tags_in_files(self, matches, &self.parser, |old_tags, cli_tags| {
|
||||
old_tags.clone()
|
||||
.into_iter()
|
||||
.filter(|tag| !cli_tags.contains(tag))
|
||||
.collect()
|
||||
}, rebuild_header_with_tags)
|
||||
}
|
||||
|
||||
fn command_set_tags(&self, matches: &ArgMatches) -> bool {
|
||||
use module::helpers::header::tags::data::alter_tags_in_files;
|
||||
use self::header::rebuild_header_with_tags;
|
||||
|
||||
alter_tags_in_files(self, matches, &self.parser, |_, cli_tags| {
|
||||
cli_tags.clone()
|
||||
}, rebuild_header_with_tags)
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
impl<'a> Module<'a> for Notes<'a> {
|
||||
|
||||
fn exec(&self, matches: &ArgMatches) -> bool {
|
||||
use ansi_term::Colour::Red;
|
||||
|
||||
match matches.subcommand_name() {
|
||||
Some("add") => {
|
||||
self.command_add(matches.subcommand_matches("add").unwrap())
|
||||
},
|
||||
|
||||
Some("edit") => {
|
||||
self.command_edit(matches.subcommand_matches("edit").unwrap())
|
||||
},
|
||||
|
||||
Some("show") => {
|
||||
self.command_show(matches.subcommand_matches("show").unwrap())
|
||||
},
|
||||
|
||||
Some("open") => {
|
||||
self.command_open(matches.subcommand_matches("open").unwrap())
|
||||
},
|
||||
|
||||
Some("list") => {
|
||||
self.command_list(matches.subcommand_matches("list").unwrap())
|
||||
},
|
||||
|
||||
Some("links") => {
|
||||
self.command_links(matches.subcommand_matches("links").unwrap())
|
||||
},
|
||||
|
||||
Some("remove") => {
|
||||
self.command_remove(matches.subcommand_matches("remove").unwrap())
|
||||
},
|
||||
|
||||
Some("add_tags") => {
|
||||
self.command_add_tags(matches.subcommand_matches("add_tags").unwrap())
|
||||
},
|
||||
|
||||
Some("rm_tags") => {
|
||||
self.command_rm_tags(matches.subcommand_matches("rm_tags").unwrap())
|
||||
},
|
||||
|
||||
Some("set_tags") => {
|
||||
self.command_set_tags(matches.subcommand_matches("set_tags").unwrap())
|
||||
},
|
||||
|
||||
Some(_) | None => {
|
||||
info!("{}", Red.bold().paint("No command given, doing nothing"));
|
||||
false
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
fn name(&self) -> &'static str{
|
||||
"notes"
|
||||
}
|
||||
|
||||
fn runtime(&self) -> &Runtime {
|
||||
self.rt
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
impl<'a> Debug for Notes<'a> {
|
||||
|
||||
fn fmt(&self, fmt: &mut Formatter) -> FMTResult {
|
||||
try!(write!(fmt, "[Module][Notes]"));
|
||||
Ok(())
|
||||
}
|
||||
|
||||
}
|
154
src/runtime.rs
154
src/runtime.rs
|
@ -1,154 +0,0 @@
|
|||
use std::fmt::{Debug, Formatter, Error};
|
||||
use std::process::Command;
|
||||
|
||||
extern crate log;
|
||||
use log::{LogRecord, LogLevel, LogLevelFilter, LogMetadata, SetLoggerError};
|
||||
|
||||
pub use cli::CliConfig;
|
||||
pub use configuration::Configuration as Cfg;
|
||||
|
||||
use storage::Store;
|
||||
|
||||
pub struct ImagLogger {
|
||||
lvl: LogLevel,
|
||||
}
|
||||
|
||||
impl ImagLogger {
|
||||
|
||||
pub fn new(lvl: LogLevel) -> ImagLogger {
|
||||
ImagLogger {
|
||||
lvl: lvl,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn init(config: &CliConfig) -> Result<(), SetLoggerError> {
|
||||
let lvl = if config.is_debugging() {
|
||||
LogLevelFilter::Debug
|
||||
} else if config.is_verbose() {
|
||||
LogLevelFilter::Info
|
||||
} else {
|
||||
LogLevelFilter::Error
|
||||
};
|
||||
|
||||
log::set_logger(|max_log_lvl| {
|
||||
max_log_lvl.set(lvl);
|
||||
debug!("Init logger with: {}", lvl);
|
||||
Box::new(ImagLogger::new(lvl.to_log_level().unwrap()))
|
||||
})
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
impl log::Log for ImagLogger {
|
||||
|
||||
fn enabled(&self, metadata: &LogMetadata) -> bool {
|
||||
metadata.level() <= self.lvl
|
||||
}
|
||||
|
||||
fn log(&self, record: &LogRecord) {
|
||||
if self.enabled(record.metadata()) {
|
||||
println!("[{}]: {}", record.level(), record.args());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Runtime object, represents a single interface to both the CLI configuration and the
|
||||
* configuration file. Also carries the store object around and is basically an object which
|
||||
* contains everything which is required to run a command/module.
|
||||
*/
|
||||
pub struct Runtime<'a> {
|
||||
pub config : CliConfig<'a>,
|
||||
pub configuration : Cfg,
|
||||
pub store : Store,
|
||||
}
|
||||
|
||||
impl<'a> Runtime<'a> {
|
||||
|
||||
pub fn new(cfg: Cfg, config : CliConfig<'a>) -> Runtime<'a> {
|
||||
let sp = config.store_path().unwrap_or(cfg.store_path());
|
||||
Runtime {
|
||||
config: config,
|
||||
configuration: cfg,
|
||||
store: Store::new(sp),
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Check whether we run verbose
|
||||
*/
|
||||
pub fn is_verbose(&self) -> bool {
|
||||
self.config.is_verbose()
|
||||
}
|
||||
|
||||
/**
|
||||
* Check whether we run in debugging
|
||||
*/
|
||||
pub fn is_debugging(&self) -> bool {
|
||||
self.config.is_debugging()
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the store path we are currently using
|
||||
*/
|
||||
pub fn store_path(&self) -> String {
|
||||
self.config.store_path().unwrap_or(self.configuration.store_path())
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the store object
|
||||
*/
|
||||
pub fn store(&self) -> &Store {
|
||||
&self.store
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the runtime path we are currently using
|
||||
*/
|
||||
pub fn get_rtp(&self) -> String {
|
||||
if let Some(rtp) = self.config.get_rtp() {
|
||||
rtp
|
||||
} else {
|
||||
self.configuration.get_rtp()
|
||||
}
|
||||
}
|
||||
|
||||
pub fn editor(&self) -> Command {
|
||||
use std::env::var;
|
||||
|
||||
let (editor, args) : (String, String) = {
|
||||
if let Some(editor) = self.config.editor() {
|
||||
(editor, self.config.editor_opts())
|
||||
} else if let Some(editor) = self.configuration.editor() {
|
||||
(editor, self.configuration.editor_opts())
|
||||
} else if let Ok(editor) = var("EDITOR") {
|
||||
(editor, String::from(""))
|
||||
} else {
|
||||
(String::from("vim"), String::from(""))
|
||||
}
|
||||
};
|
||||
|
||||
let mut e = Command::new(editor);
|
||||
for arg in args.split(" ") {
|
||||
e.arg(arg);
|
||||
}
|
||||
e
|
||||
}
|
||||
|
||||
pub fn report_exit(&self) -> bool {
|
||||
self.config.report_exit() || self.configuration.report_exit()
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
impl<'a> Debug for Runtime<'a> {
|
||||
|
||||
fn fmt(&self, f: &mut Formatter) -> Result<(), Error> {
|
||||
write!(f, "Runtime (verbose: {}, debugging: {}, rtp: {})",
|
||||
self.is_verbose(),
|
||||
self.is_debugging(),
|
||||
self.get_rtp())
|
||||
}
|
||||
|
||||
}
|
||||
|
|
@ -1,67 +0,0 @@
|
|||
use std::convert::{From, Into};
|
||||
use std::fmt::{Display, Formatter};
|
||||
use std::fmt;
|
||||
use std::hash::Hash;
|
||||
use uuid::Uuid;
|
||||
|
||||
#[derive(Clone)]
|
||||
#[derive(Debug)]
|
||||
#[derive(PartialEq)]
|
||||
#[derive(Eq)]
|
||||
#[derive(Hash)]
|
||||
/**
|
||||
* FileHash type
|
||||
*
|
||||
* Simple abstraction over String by now.
|
||||
*/
|
||||
pub struct FileHash {
|
||||
hash: String,
|
||||
}
|
||||
|
||||
impl From<String> for FileHash {
|
||||
|
||||
fn from(s: String) -> FileHash {
|
||||
FileHash { hash: s }
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
impl<'a> From<&'a String> for FileHash {
|
||||
|
||||
fn from(s: &'a String) -> FileHash {
|
||||
FileHash::from(s.clone())
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
impl From<Uuid> for FileHash {
|
||||
|
||||
fn from(u: Uuid) -> FileHash {
|
||||
FileHash::from(u.to_hyphenated_string())
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
impl<'a> From<&'a str> for FileHash {
|
||||
|
||||
fn from(s: &str) -> FileHash {
|
||||
FileHash::from(String::from(s))
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
impl Into<String> for FileHash {
|
||||
|
||||
fn into(self) -> String {
|
||||
self.hash
|
||||
}
|
||||
}
|
||||
|
||||
impl Display for FileHash {
|
||||
|
||||
fn fmt(&self, fmt: &mut Formatter) -> fmt::Result {
|
||||
try!(write!(fmt, "{}", self.hash));
|
||||
Ok(())
|
||||
}
|
||||
|
||||
}
|
|
@ -1,37 +0,0 @@
|
|||
use regex::Regex;
|
||||
|
||||
#[derive(Debug)]
|
||||
#[derive(Clone)]
|
||||
pub enum FileHeaderData {
|
||||
Null,
|
||||
Bool(bool),
|
||||
Integer(i64),
|
||||
UInteger(u64),
|
||||
Float(f64),
|
||||
Text(String),
|
||||
Key { name: String, value: Box<FileHeaderData> },
|
||||
Map { keys: Vec<FileHeaderData> },
|
||||
Array { values: Box<Vec<FileHeaderData>> },
|
||||
}
|
||||
|
||||
impl FileHeaderData {
|
||||
|
||||
pub fn matches_with(&self, r: &Regex) -> bool {
|
||||
match self {
|
||||
&FileHeaderData::Text(ref t) => r.is_match(&t[..]),
|
||||
&FileHeaderData::Key{name: ref n, value: ref val} => {
|
||||
r.is_match(n) || val.matches_with(r)
|
||||
},
|
||||
|
||||
&FileHeaderData::Map{keys: ref dks} => {
|
||||
dks.iter().any(|x| x.matches_with(r))
|
||||
},
|
||||
|
||||
&FileHeaderData::Array{values: ref vs} => {
|
||||
vs.iter().any(|x| x.matches_with(r))
|
||||
}
|
||||
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,126 +0,0 @@
|
|||
use std::error::Error;
|
||||
use std::fmt::{Debug, Display, Formatter};
|
||||
use std::fmt;
|
||||
|
||||
pub mod spec;
|
||||
pub mod data;
|
||||
|
||||
use self::data::*;
|
||||
use self::spec::*;
|
||||
|
||||
pub struct MatchError<'a> {
|
||||
summary: String,
|
||||
expected: &'a FileHeaderSpec,
|
||||
found: &'a FileHeaderData
|
||||
}
|
||||
|
||||
impl<'a> MatchError<'a> {
|
||||
|
||||
pub fn new(s: String,
|
||||
ex: &'a FileHeaderSpec,
|
||||
found: &'a FileHeaderData) -> MatchError<'a> {
|
||||
MatchError {
|
||||
summary: s,
|
||||
expected: ex,
|
||||
found: found,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn format(&self) -> String {
|
||||
format!("MatchError: {:?}\nExpected: {:?}\nFound: {:?}\n",
|
||||
self.summary, self.expected, self.found)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Error for MatchError<'a> {
|
||||
|
||||
fn description(&self) -> &str {
|
||||
&self.summary[..]
|
||||
}
|
||||
|
||||
fn cause(&self) -> Option<&Error> {
|
||||
None
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
impl<'a> Debug for MatchError<'a> {
|
||||
|
||||
fn fmt(&self, fmt: &mut Formatter) -> fmt::Result {
|
||||
try!(write!(fmt, "{}", self.format()));
|
||||
Ok(())
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
impl<'a> Display for MatchError<'a> {
|
||||
|
||||
fn fmt(&self, fmt: &mut Formatter) -> fmt::Result {
|
||||
try!(write!(fmt, "{}", self.format()));
|
||||
Ok(())
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
pub fn match_header_spec<'a>(spec: &'a FileHeaderSpec, data: &'a FileHeaderData)
|
||||
-> Option<MatchError<'a>>
|
||||
{
|
||||
debug!("Start matching:\n'{:?}'\non\n{:?}", spec, data);
|
||||
match (spec, data) {
|
||||
(&FileHeaderSpec::Null, &FileHeaderData::Null) => { }
|
||||
(&FileHeaderSpec::Bool, &FileHeaderData::Bool(_)) => { }
|
||||
(&FileHeaderSpec::Integer, &FileHeaderData::Integer(_)) => { }
|
||||
(&FileHeaderSpec::UInteger, &FileHeaderData::UInteger(_)) => { }
|
||||
(&FileHeaderSpec::Float, &FileHeaderData::Float(_)) => { }
|
||||
(&FileHeaderSpec::Text, &FileHeaderData::Text(_)) => { }
|
||||
|
||||
(
|
||||
&FileHeaderSpec::Key{name: ref kname, value_type: ref vtype},
|
||||
&FileHeaderData::Key{name: ref n, value: ref val}
|
||||
) => {
|
||||
debug!("Matching Key: '{:?}' == '{:?}', Value: '{:?}' == '{:?}'",
|
||||
kname, n,
|
||||
vtype, val);
|
||||
if kname != n {
|
||||
debug!("Keys not matching");
|
||||
unimplemented!();
|
||||
}
|
||||
return match_header_spec(&*vtype, &*val);
|
||||
}
|
||||
|
||||
(
|
||||
&FileHeaderSpec::Map{keys: ref sks},
|
||||
&FileHeaderData::Map{keys: ref dks}
|
||||
) => {
|
||||
debug!("Matching Map: '{:?}' == '{:?}'", sks, dks);
|
||||
|
||||
for (s, d) in sks.iter().zip(dks.iter()) {
|
||||
let res = match_header_spec(s, d);
|
||||
if res.is_some() {
|
||||
return res;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
(
|
||||
&FileHeaderSpec::Array{allowed_types: ref vtypes},
|
||||
&FileHeaderData::Array{values: ref vs}
|
||||
) => {
|
||||
debug!("Matching Array: '{:?}' == '{:?}'", vtypes, vs);
|
||||
for (t, v) in vtypes.iter().zip(vs.iter()) {
|
||||
let res = match_header_spec(t, v);
|
||||
if res.is_some() {
|
||||
return res;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
(k, v) => {
|
||||
return Some(MatchError::new(String::from("Expected type does not match found type"),
|
||||
k, v
|
||||
))
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
|
|
@ -1,41 +0,0 @@
|
|||
use std::fmt::{Display, Formatter};
|
||||
use std::fmt;
|
||||
|
||||
#[derive(Debug)]
|
||||
#[derive(Clone)]
|
||||
pub enum FileHeaderSpec {
|
||||
Null,
|
||||
Bool,
|
||||
Integer,
|
||||
UInteger,
|
||||
Float,
|
||||
Text,
|
||||
Key { name: String, value_type: Box<FileHeaderSpec> },
|
||||
Map { keys: Vec<FileHeaderSpec> },
|
||||
Array { allowed_types: Vec<FileHeaderSpec> },
|
||||
}
|
||||
|
||||
impl Display for FileHeaderSpec {
|
||||
|
||||
fn fmt(&self, fmt: &mut Formatter) -> fmt::Result {
|
||||
match self {
|
||||
&FileHeaderSpec::Null => write!(fmt, "NULL"),
|
||||
&FileHeaderSpec::Bool => write!(fmt, "Bool"),
|
||||
&FileHeaderSpec::Integer => write!(fmt, "Integer"),
|
||||
&FileHeaderSpec::UInteger => write!(fmt, "UInteger"),
|
||||
&FileHeaderSpec::Float => write!(fmt, "Float"),
|
||||
&FileHeaderSpec::Text => write!(fmt, "Text"),
|
||||
&FileHeaderSpec::Key{name: ref n, value_type: ref vt} => {
|
||||
write!(fmt, "Key({:?}) -> {:?}", n, vt)
|
||||
}
|
||||
&FileHeaderSpec::Map{keys: ref ks} => {
|
||||
write!(fmt, "Map -> {:?}", ks)
|
||||
}
|
||||
&FileHeaderSpec::Array{allowed_types: ref at} => {
|
||||
write!(fmt, "Array({:?})", at)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
|
@ -1,182 +0,0 @@
|
|||
use std::convert::{From, Into};
|
||||
use std::fmt::{Debug, Display, Formatter};
|
||||
use std::fmt;
|
||||
use std::result::Result;
|
||||
use std::str::FromStr;
|
||||
|
||||
use regex::Regex;
|
||||
|
||||
use storage::file::id_type::FileIDType;
|
||||
use storage::file::hash::FileHash;
|
||||
|
||||
#[derive(Clone)]
|
||||
#[derive(Hash)]
|
||||
#[derive(Eq)]
|
||||
#[derive(PartialEq)]
|
||||
/**
|
||||
* FileID
|
||||
*
|
||||
* A FileID contains of two parts: The ID type and the Hash. For example the FileID
|
||||
*
|
||||
* UUID-235-1215-1212
|
||||
*
|
||||
* has a type ("UUID") and a Hash ("235-1215-1212").
|
||||
*/
|
||||
pub struct FileID {
|
||||
id: FileHash,
|
||||
id_type: FileIDType,
|
||||
}
|
||||
|
||||
impl FileID {
|
||||
|
||||
pub fn new(id_type: FileIDType, id: FileHash) -> FileID {
|
||||
FileID {
|
||||
id: id,
|
||||
id_type: id_type,
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the type of the FileID
|
||||
*/
|
||||
pub fn get_type(&self) -> FileIDType {
|
||||
self.id_type.clone()
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the Hash of the FileID
|
||||
*/
|
||||
pub fn get_id(&self) -> FileHash {
|
||||
self.id.clone()
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse a String into a FileID, if possible
|
||||
*/
|
||||
pub fn parse(string: &String) -> Option<FileID> {
|
||||
// we assume that it is an path
|
||||
let regex = Regex::new(r"([:alnum:]*)-([:upper:]*)-([A-Za-z0-9-_]*)\.(.*)").unwrap();
|
||||
let s = string.split("/").last().unwrap_or("");
|
||||
|
||||
debug!("Regex build: {:?}", regex);
|
||||
debug!("Matching string: '{}'", s);
|
||||
regex.captures(s).and_then(|capts| {
|
||||
// first one is the whole string, index 1-N are the matches.
|
||||
if capts.len() != 5 {
|
||||
debug!("Matches, but not expected number of groups");
|
||||
return None;
|
||||
}
|
||||
debug!("Matches: {}", capts.len());
|
||||
|
||||
let modname = capts.at(1).unwrap();
|
||||
let hashname = capts.at(2).unwrap();
|
||||
let hash = capts.at(3).unwrap();
|
||||
|
||||
debug!("Destructure FilePath to ID:");
|
||||
debug!(" FilePath: {:?}", s);
|
||||
debug!(" Module Name: {:?}", modname);
|
||||
debug!(" Hash Name: {:?}", hashname);
|
||||
debug!(" Hash: {:?}", hash);
|
||||
|
||||
FileIDType::from_str(hashname).map(|idtype| {
|
||||
debug!("ID type = {:?}", idtype);
|
||||
Some(FileID {
|
||||
id: FileHash::from(hash),
|
||||
id_type: idtype,
|
||||
})
|
||||
}).ok()
|
||||
}).unwrap_or({
|
||||
debug!("Did not match");
|
||||
debug!("It is no path, actually. So we assume it is an ID already");
|
||||
None
|
||||
})
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
impl Debug for FileID {
|
||||
|
||||
fn fmt(&self, fmt: &mut Formatter) -> fmt::Result {
|
||||
try!(write!(fmt, "FileID[{:?}]: {:?}",
|
||||
self.id_type,
|
||||
self.id));
|
||||
Ok(())
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
impl Display for FileID {
|
||||
|
||||
fn fmt(&self, fmt: &mut Formatter) -> fmt::Result {
|
||||
try!(write!(fmt, "{}-{}", self.id_type, self.id));
|
||||
Ok(())
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
impl Into<String> for FileID {
|
||||
|
||||
fn into(self) -> String {
|
||||
let typestr : String = self.id_type.into();
|
||||
let idstr : String = self.id.into();
|
||||
typestr + "-" + &idstr[..]
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
|
||||
use storage::file::id::FileID;
|
||||
use storage::file::id_type::FileIDType;
|
||||
|
||||
#[test]
|
||||
fn file_id_from_string() {
|
||||
setup_logger();
|
||||
|
||||
let s1 = String::from("/home/user/testmodule-UUID-some-id.imag");
|
||||
let s2 = String::from("/home/user/testmodule-UUID-some-id.extension.imag");
|
||||
let s3 = String::from("/home/user/testmodule-NOHASH-some-id.imag");
|
||||
|
||||
let id1 = FileID::parse(&s1).unwrap();
|
||||
let id2 = FileID::parse(&s2).unwrap();
|
||||
assert!(FileID::parse(&s3).is_none());
|
||||
|
||||
println!("Id 1 : {:?}", id1);
|
||||
println!("Id 2 : {:?}", id2);
|
||||
|
||||
assert_eq!(FileIDType::UUID, id1.get_type());
|
||||
assert_eq!(FileIDType::UUID, id2.get_type());
|
||||
|
||||
let h1 : String = id1.get_id().into();
|
||||
let h2 : String = id2.get_id().into();
|
||||
|
||||
assert_eq!(String::from("some-id"), h1);
|
||||
assert_eq!(String::from("some-id"), h2);
|
||||
|
||||
let f1 : String = id1.into();
|
||||
let f2 : String = id2.into();
|
||||
|
||||
assert_eq!(String::from("UUID-some-id"), f1);
|
||||
assert_eq!(String::from("UUID-some-id"), f2);
|
||||
}
|
||||
|
||||
fn setup_logger() {
|
||||
extern crate log;
|
||||
use log::{LogLevelFilter, set_logger};
|
||||
use runtime::ImagLogger;
|
||||
|
||||
log::set_logger(|max_log_lvl| {
|
||||
let lvl = LogLevelFilter::Debug;
|
||||
max_log_lvl.set(lvl);
|
||||
Box::new(ImagLogger::new(lvl.to_log_level().unwrap()))
|
||||
})
|
||||
.map_err(|e| {
|
||||
println!("Error setting logger: {:?}", e);
|
||||
assert!(false);
|
||||
})
|
||||
.ok();
|
||||
debug!("Init logger for test");
|
||||
}
|
||||
|
||||
}
|
||||
|
|
@ -1,52 +0,0 @@
|
|||
use std::fmt::{Display, Formatter};
|
||||
use std::fmt;
|
||||
use std::convert::{From, Into};
|
||||
use std::str::FromStr;
|
||||
|
||||
#[derive(Debug)]
|
||||
#[derive(Clone)]
|
||||
#[derive(PartialEq)]
|
||||
#[derive(Eq)]
|
||||
#[derive(Hash)]
|
||||
/**
|
||||
* File ID type
|
||||
*
|
||||
* Currently only UUID is available. Maybe this will be the only type available at all.
|
||||
*/
|
||||
pub enum FileIDType {
|
||||
UUID,
|
||||
}
|
||||
|
||||
pub enum FileIDTypeParseError {
|
||||
UnknownType
|
||||
}
|
||||
|
||||
impl FromStr for FileIDType {
|
||||
type Err = FileIDTypeParseError;
|
||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||
match s {
|
||||
"UUID" => Ok(FileIDType::UUID),
|
||||
_ => Err(FileIDTypeParseError::UnknownType)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Into<String> for FileIDType {
|
||||
|
||||
fn into(self) -> String {
|
||||
match self {
|
||||
FileIDType::UUID => String::from("UUID"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Display for FileIDType {
|
||||
|
||||
fn fmt(&self, fmt: &mut Formatter) -> fmt::Result {
|
||||
match self {
|
||||
&FileIDType::UUID => try!(write!(fmt, "UUID")),
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
|
@ -1,159 +0,0 @@
|
|||
use std::fmt::{Debug, Display, Formatter};
|
||||
use std::fmt;
|
||||
|
||||
use regex::Regex;
|
||||
|
||||
pub mod id;
|
||||
pub mod id_type;
|
||||
pub mod header;
|
||||
pub mod hash;
|
||||
|
||||
use storage::file::id::*;
|
||||
use self::header::data::*;
|
||||
|
||||
/**
|
||||
* Internal abstract view on a file. Does not neccessarily exist on the FS and is just kept
|
||||
* internally until it is written to disk.
|
||||
*/
|
||||
pub struct File {
|
||||
/// The name of the module which owns this file
|
||||
pub owning_module_name : &'static str,
|
||||
|
||||
/// The header of the file
|
||||
pub header : FileHeaderData,
|
||||
|
||||
/// The content part of the file
|
||||
pub data : String,
|
||||
|
||||
/// The ID of the file
|
||||
pub id : FileID,
|
||||
}
|
||||
|
||||
impl File {
|
||||
|
||||
/**
|
||||
* Get the owner module name of the file
|
||||
*/
|
||||
pub fn owner_name(&self) -> &'static str {
|
||||
self.owning_module_name
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the header of the file
|
||||
*/
|
||||
pub fn header(&self) -> &FileHeaderData {
|
||||
&self.header
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the header of the file
|
||||
*/
|
||||
pub fn set_header(&mut self, new_header: FileHeaderData) {
|
||||
self.header = new_header;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the data of the file
|
||||
*/
|
||||
pub fn data(&self) -> &String {
|
||||
&self.data
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the data
|
||||
*/
|
||||
pub fn set_data(&mut self, new_data: String) {
|
||||
self.data = new_data;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the (header, data) of the file
|
||||
*/
|
||||
pub fn contents(&self) -> (&FileHeaderData, &String) {
|
||||
(self.header(), self.data())
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the id of the file
|
||||
*/
|
||||
pub fn id(&self) -> &FileID {
|
||||
&self.id
|
||||
}
|
||||
|
||||
/**
|
||||
* Check whether the header or the data of the file match some regex
|
||||
*/
|
||||
pub fn matches_with(&self, r: &Regex) -> bool {
|
||||
r.is_match(&self.data[..]) || self.header.matches_with(r)
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
impl Display for File {
|
||||
|
||||
fn fmt(&self, fmt: &mut Formatter) -> fmt::Result {
|
||||
try!(write!(fmt,
|
||||
"[File] Owner : '{:?}'
|
||||
FileID: '{:?}'
|
||||
Header: '{:?}'
|
||||
Data : '{:?}'",
|
||||
self.owning_module_name,
|
||||
self.header,
|
||||
self.data,
|
||||
self.id));
|
||||
Ok(())
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
impl Debug for File {
|
||||
|
||||
fn fmt(&self, fmt: &mut Formatter) -> fmt::Result {
|
||||
try!(write!(fmt,
|
||||
"[File] Owner : '{:?}'
|
||||
FileID: '{:?}'
|
||||
Header: '{:?}'
|
||||
Data : '{:?}'",
|
||||
self.owning_module_name,
|
||||
self.id,
|
||||
self.header,
|
||||
self.data));
|
||||
Ok(())
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
// we use the JSON parser here, so we can generate FileHeaderData
|
||||
use storage::json::parser::JsonHeaderParser;
|
||||
use storage::file::header::match_header_spec;
|
||||
use storage::parser::FileHeaderParser;
|
||||
use storage::file::header::spec::FileHeaderSpec as FHS;
|
||||
|
||||
#[test]
|
||||
fn test_spec_matching() {
|
||||
let text = String::from("{\"a\": 1, \"b\": -2}");
|
||||
let spec = FHS::Map {
|
||||
keys: vec![
|
||||
FHS::Key {
|
||||
name: String::from("a"),
|
||||
value_type: Box::new(FHS::UInteger)
|
||||
},
|
||||
FHS::Key {
|
||||
name: String::from("b"),
|
||||
value_type: Box::new(FHS::Integer)
|
||||
}
|
||||
]
|
||||
};
|
||||
|
||||
let parser = JsonHeaderParser::new(Some(spec.clone()));
|
||||
let datares = parser.read(Some(text.clone()));
|
||||
assert!(datares.is_ok(), "Text could not be parsed: '{}'", text);
|
||||
let data = datares.unwrap();
|
||||
|
||||
let matchres = match_header_spec(&spec, &data);
|
||||
assert!(matchres.is_none(), "Matching returns error: {:?}", matchres);
|
||||
}
|
||||
}
|
||||
|
|
@ -1 +0,0 @@
|
|||
pub mod parser;
|
|
@ -1,281 +0,0 @@
|
|||
use std::collections::HashMap;
|
||||
use std::error::Error;
|
||||
use std::fmt::{Debug, Display, Formatter};
|
||||
use std::fmt;
|
||||
|
||||
use serde_json::{Value, from_str};
|
||||
use serde_json::error::Result as R;
|
||||
use serde_json::Serializer;
|
||||
use serde::ser::Serialize;
|
||||
use serde::ser::Serializer as Ser;
|
||||
|
||||
use storage::parser::{FileHeaderParser, ParserError};
|
||||
use storage::file::header::spec::FileHeaderSpec;
|
||||
use storage::file::header::data::FileHeaderData;
|
||||
|
||||
pub struct JsonHeaderParser {
|
||||
spec: Option<FileHeaderSpec>,
|
||||
}
|
||||
|
||||
impl JsonHeaderParser {
|
||||
|
||||
pub fn new(spec: Option<FileHeaderSpec>) -> JsonHeaderParser {
|
||||
JsonHeaderParser {
|
||||
spec: spec
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
impl Display for JsonHeaderParser {
|
||||
|
||||
fn fmt(&self, fmt: &mut Formatter) -> fmt::Result {
|
||||
try!(write!(fmt, "JsonHeaderParser"));
|
||||
Ok(())
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
impl Debug for JsonHeaderParser {
|
||||
|
||||
fn fmt(&self, fmt: &mut Formatter) -> fmt::Result {
|
||||
try!(write!(fmt, "JsonHeaderParser, Spec: {:?}", self.spec));
|
||||
Ok(())
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
impl FileHeaderParser for JsonHeaderParser {
|
||||
|
||||
fn read(&self, string: Option<String>)
|
||||
-> Result<FileHeaderData, ParserError>
|
||||
{
|
||||
if string.is_some() {
|
||||
let s = string.unwrap();
|
||||
debug!("Deserializing: {}", s);
|
||||
let fromstr : R<Value> = from_str(&s[..]);
|
||||
if let Ok(ref content) = fromstr {
|
||||
return Ok(visit_json(&content))
|
||||
}
|
||||
let oe = fromstr.err().unwrap();
|
||||
let s = format!("JSON parser error: {}", oe.description());
|
||||
let e = ParserError::short(&s[..], s.clone(), 0);
|
||||
Err(e)
|
||||
} else {
|
||||
Ok(FileHeaderData::Null)
|
||||
}
|
||||
}
|
||||
|
||||
fn write(&self, data: &FileHeaderData) -> Result<String, ParserError> {
|
||||
let mut s = Vec::<u8>::new();
|
||||
{
|
||||
let mut ser = Serializer::pretty(&mut s);
|
||||
data.serialize(&mut ser).map_err(|e| {
|
||||
debug!("Serializer error: {:?}", e);
|
||||
}).ok();
|
||||
}
|
||||
|
||||
String::from_utf8(s).or(
|
||||
Err(ParserError::short("Cannot parse utf8 bytes",
|
||||
String::from("<not printable>"),
|
||||
0)))
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
// TODO: This function must be able to return a parser error
|
||||
fn visit_json(v: &Value) -> FileHeaderData {
|
||||
match v {
|
||||
&Value::Null => FileHeaderData::Null,
|
||||
&Value::Bool(b) => FileHeaderData::Bool(b),
|
||||
&Value::I64(i) => FileHeaderData::Integer(i),
|
||||
&Value::U64(u) => FileHeaderData::UInteger(u),
|
||||
&Value::F64(f) => FileHeaderData::Float(f),
|
||||
&Value::String(ref s) => FileHeaderData::Text(s.clone()),
|
||||
&Value::Array(ref vec) => {
|
||||
FileHeaderData::Array {
|
||||
values: Box::new(vec.clone().into_iter().map(|i| visit_json(&i)).collect())
|
||||
}
|
||||
},
|
||||
&Value::Object(ref btree) => {
|
||||
let btree = btree.clone();
|
||||
FileHeaderData::Map{
|
||||
keys: btree.into_iter().map(|(k, v)|
|
||||
FileHeaderData::Key {
|
||||
name: k,
|
||||
value: Box::new(visit_json(&v)),
|
||||
}
|
||||
).collect()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Serialize for FileHeaderData {
|
||||
|
||||
fn serialize<S>(&self, ser: &mut S) -> Result<(), S::Error>
|
||||
where S: Ser
|
||||
{
|
||||
match self {
|
||||
&FileHeaderData::Null => {
|
||||
let o : Option<bool> = None;
|
||||
o.serialize(ser)
|
||||
},
|
||||
&FileHeaderData::Bool(ref b) => b.serialize(ser),
|
||||
&FileHeaderData::Integer(ref i) => i.serialize(ser),
|
||||
&FileHeaderData::UInteger(ref u) => u.serialize(ser),
|
||||
&FileHeaderData::Float(ref f) => f.serialize(ser),
|
||||
&FileHeaderData::Text(ref s) => (&s[..]).serialize(ser),
|
||||
&FileHeaderData::Array{values: ref vs} => vs.serialize(ser),
|
||||
&FileHeaderData::Map{keys: ref ks} => {
|
||||
let mut hm = HashMap::new();
|
||||
|
||||
for key in ks {
|
||||
if let &FileHeaderData::Key{name: ref n, value: ref v} = key {
|
||||
hm.insert(n, v);
|
||||
} else {
|
||||
panic!("Not a key: {:?}", key);
|
||||
}
|
||||
}
|
||||
|
||||
hm.serialize(ser)
|
||||
},
|
||||
&FileHeaderData::Key{name: _, value: _} => unreachable!(),
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
|
||||
use std::ops::Deref;
|
||||
|
||||
use super::JsonHeaderParser;
|
||||
use storage::parser::FileHeaderParser;
|
||||
use storage::file::header::data::FileHeaderData as FHD;
|
||||
use storage::file::header::spec::FileHeaderSpec as FHS;
|
||||
|
||||
#[test]
|
||||
fn test_deserialization() {
|
||||
let text = String::from("{\"a\": 1, \"b\": -2}");
|
||||
let spec = FHS::Map {
|
||||
keys: vec![
|
||||
FHS::Key {
|
||||
name: String::from("a"),
|
||||
value_type: Box::new(FHS::UInteger)
|
||||
},
|
||||
FHS::Key {
|
||||
name: String::from("b"),
|
||||
value_type: Box::new(FHS::Integer)
|
||||
}
|
||||
]
|
||||
};
|
||||
|
||||
let parser = JsonHeaderParser::new(Some(spec));
|
||||
let parsed = parser.read(Some(text));
|
||||
assert!(parsed.is_ok(), "Parsed is not ok: {:?}", parsed);
|
||||
|
||||
match parsed.ok() {
|
||||
Some(FHD::Map{keys}) => {
|
||||
for k in keys {
|
||||
match k {
|
||||
FHD::Key{name, value} => {
|
||||
assert!(name == "a" || name == "b", "Key unknown");
|
||||
match value.deref() {
|
||||
&FHD::UInteger(u) => assert_eq!(u, 1),
|
||||
&FHD::Integer(i) => assert_eq!(i, -2),
|
||||
_ => assert!(false, "Integers are not here"),
|
||||
}
|
||||
},
|
||||
_ => assert!(false, "Key is not a Key"),
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
_ => assert!(false, "Parsed is not a map"),
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_deserialization_without_spec() {
|
||||
let text = String::from("{\"a\": [1], \"b\": {\"c\": -2}}");
|
||||
let parser = JsonHeaderParser::new(None);
|
||||
let parsed = parser.read(Some(text));
|
||||
|
||||
assert!(parsed.is_ok(), "Parsed is not ok: {:?}", parsed);
|
||||
|
||||
match parsed.ok() {
|
||||
Some(FHD::Map{keys}) => {
|
||||
for k in keys {
|
||||
match_key(&k);
|
||||
}
|
||||
},
|
||||
|
||||
_ => assert!(false, "Parsed is not a map"),
|
||||
}
|
||||
}
|
||||
|
||||
fn match_key(k: &FHD) {
|
||||
use std::ops::Deref;
|
||||
|
||||
match k {
|
||||
&FHD::Key{ref name, ref value} => {
|
||||
assert!(name == "a" || name == "b", "Key unknown");
|
||||
match value.deref() {
|
||||
&FHD::Array{ref values} => {
|
||||
for value in values.iter() {
|
||||
match value {
|
||||
&FHD::UInteger(u) => assert_eq!(u, 1),
|
||||
_ => assert!(false, "UInt is not an UInt"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
&FHD::Map{ref keys} => {
|
||||
for key in keys.iter() {
|
||||
match key {
|
||||
&FHD::Key{ref name, ref value} => {
|
||||
match value.deref() {
|
||||
&FHD::Integer(i) => {
|
||||
assert_eq!(i, -2);
|
||||
assert_eq!(name, "c");
|
||||
},
|
||||
_ => assert!(false, "Int is not an Int"),
|
||||
};
|
||||
},
|
||||
_ => assert!(false, "Key is not a Key"),
|
||||
}
|
||||
}
|
||||
}
|
||||
_ => assert!(false, "Integers are not here"),
|
||||
}
|
||||
},
|
||||
_ => assert!(false, "Key in main Map is not a Key"),
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_desser() {
|
||||
use serde_json::error::Result as R;
|
||||
use serde_json::{Value, from_str};
|
||||
|
||||
let text = String::from("{\"a\": [1], \"b\": {\"c\": -2}}");
|
||||
let parser = JsonHeaderParser::new(None);
|
||||
|
||||
let des = parser.read(Some(text.clone()));
|
||||
assert!(des.is_ok(), "Deserializing failed");
|
||||
|
||||
let ser = parser.write(&des.unwrap());
|
||||
assert!(ser.is_ok(), "Parser error when serializing deserialized text");
|
||||
|
||||
let json_text : R<Value> = from_str(&text[..]);
|
||||
let json_ser : R<Value> = from_str(&ser.unwrap()[..]);
|
||||
|
||||
assert!(json_text.is_ok(), "Could not use serde to serialize text for comparison");
|
||||
assert!(json_ser.is_ok(), "Could not use serde to serialize serialized-deserialized text for comparison");
|
||||
assert_eq!(json_text.unwrap(), json_ser.unwrap());
|
||||
}
|
||||
|
||||
}
|
|
@ -1,372 +0,0 @@
|
|||
use std::rc::Rc;
|
||||
use std::cell::RefCell;
|
||||
use std::collections::HashMap;
|
||||
use std::fs::File as FSFile;
|
||||
use std::ops::Deref;
|
||||
use std::io::Write;
|
||||
use std::io::Read;
|
||||
|
||||
pub mod path;
|
||||
pub mod file;
|
||||
pub mod parser;
|
||||
pub mod json;
|
||||
pub mod yaml;
|
||||
|
||||
use module::Module;
|
||||
use storage::file::File;
|
||||
use storage::file::id::FileID;
|
||||
use storage::file::id_type::FileIDType;
|
||||
use storage::file::hash::FileHash;
|
||||
use storage::parser::{FileHeaderParser, Parser};
|
||||
use storage::file::header::data::FileHeaderData;
|
||||
|
||||
type Cache = HashMap<FileID, Rc<RefCell<File>>>;
|
||||
|
||||
pub struct Store {
|
||||
storepath: String,
|
||||
cache : RefCell<Cache>,
|
||||
}
|
||||
|
||||
/**
|
||||
* Store object
|
||||
*
|
||||
* This object is an abstraction layer over FS and an interface to the object store of this
|
||||
* software.
|
||||
*/
|
||||
impl Store {
|
||||
|
||||
pub fn new(storepath: String) -> Store {
|
||||
Store {
|
||||
storepath: storepath,
|
||||
cache: RefCell::new(HashMap::new()),
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Put a file into the cache
|
||||
*/
|
||||
fn put_in_cache(&self, f: File) -> FileID {
|
||||
let res = f.id().clone();
|
||||
self.cache.borrow_mut().insert(f.id().clone(), Rc::new(RefCell::new(f)));
|
||||
res
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate a new file for a module.
|
||||
*
|
||||
* Returns the new FileID object then
|
||||
*/
|
||||
pub fn new_file(&self, module: &Module)
|
||||
-> FileID
|
||||
{
|
||||
let f = File {
|
||||
owning_module_name: module.name(),
|
||||
header: FileHeaderData::Null,
|
||||
data: String::from(""),
|
||||
id: self.get_new_file_id(),
|
||||
};
|
||||
|
||||
debug!("Create new File object: {:?}", &f);
|
||||
self.put_in_cache(f)
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate a new file from a parser result.
|
||||
*
|
||||
* @deprecated This function shouldn't be needed anymore
|
||||
*/
|
||||
pub fn new_file_from_parser_result(&self,
|
||||
module: &Module,
|
||||
id: FileID,
|
||||
header: FileHeaderData,
|
||||
data: String)
|
||||
-> FileID
|
||||
{
|
||||
let f = File {
|
||||
owning_module_name: module.name(),
|
||||
header: header,
|
||||
data: data,
|
||||
id: id,
|
||||
};
|
||||
debug!("Create new File object from parser result: {:?}", f);
|
||||
self.put_in_cache(f)
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate a new file for a module, providing some header data
|
||||
*
|
||||
* Returns the new FileID object then
|
||||
*/
|
||||
pub fn new_file_with_header(&self,
|
||||
module: &Module,
|
||||
h: FileHeaderData)
|
||||
-> FileID
|
||||
{
|
||||
let f = File {
|
||||
owning_module_name: module.name(),
|
||||
header: h,
|
||||
data: String::from(""),
|
||||
id: self.get_new_file_id(),
|
||||
};
|
||||
debug!("Create new File object with header: {:?}", f);
|
||||
self.put_in_cache(f)
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate a new file for a module, providing some initial data
|
||||
*
|
||||
* Returns the new FileID object then
|
||||
*/
|
||||
pub fn new_file_with_data(&self, module: &Module, d: String)
|
||||
-> FileID
|
||||
{
|
||||
let f = File {
|
||||
owning_module_name: module.name(),
|
||||
header: FileHeaderData::Null,
|
||||
data: d,
|
||||
id: self.get_new_file_id(),
|
||||
};
|
||||
debug!("Create new File object with data: {:?}", f);
|
||||
self.put_in_cache(f)
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Generate a new file for a module, providing some initial data and some header
|
||||
*
|
||||
* Returns the new FileID object then
|
||||
*/
|
||||
pub fn new_file_with_content(&self,
|
||||
module: &Module,
|
||||
h: FileHeaderData,
|
||||
d: String)
|
||||
-> FileID
|
||||
{
|
||||
let f = File {
|
||||
owning_module_name: module.name(),
|
||||
header: h,
|
||||
data: d,
|
||||
id: self.get_new_file_id(),
|
||||
};
|
||||
debug!("Create new File object with content: {:?}", f);
|
||||
self.put_in_cache(f)
|
||||
}
|
||||
|
||||
/**
|
||||
* Persist a File on the filesystem
|
||||
*
|
||||
* Returns true if this worked
|
||||
*/
|
||||
pub fn persist<HP>(&self,
|
||||
p: &Parser<HP>,
|
||||
f: Rc<RefCell<File>>) -> bool
|
||||
where HP: FileHeaderParser
|
||||
{
|
||||
let file = f.deref().borrow();
|
||||
let text = p.write(file.contents());
|
||||
if text.is_err() {
|
||||
error!("Error: {}", text.err().unwrap());
|
||||
return false;
|
||||
}
|
||||
|
||||
let path = {
|
||||
let ids : String = file.id().clone().into();
|
||||
format!("{}/{}-{}.imag", self.storepath, file.owning_module_name, ids)
|
||||
};
|
||||
|
||||
self.ensure_store_path_exists();
|
||||
|
||||
FSFile::create(&path).map(|mut fsfile| {
|
||||
fsfile.write_all(&text.unwrap().clone().into_bytes()[..])
|
||||
}).map_err(|writeerr| {
|
||||
debug!("Could not create file at '{}'", path);
|
||||
debug!(" error: {:?}", writeerr);
|
||||
}).and(Ok(true)).unwrap()
|
||||
|
||||
// TODO: Is this unwrap() save?
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper to generate the store path
|
||||
*
|
||||
* Kills the program if it fails
|
||||
*/
|
||||
fn ensure_store_path_exists(&self) {
|
||||
use std::fs::create_dir_all;
|
||||
use std::process::exit;
|
||||
|
||||
create_dir_all(&self.storepath).unwrap_or_else(|e| {
|
||||
error!("Could not create store: '{}'", self.storepath);
|
||||
error!("Error : '{}'", e);
|
||||
error!("Killing myself now");
|
||||
exit(1);
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Load a file by ID into the cache and return it afterwards
|
||||
*
|
||||
* Returns None if the file could be loaded from the Filesystem
|
||||
*/
|
||||
fn load_into_cache<HP>(&self, m: &Module, parser: &Parser<HP>, id: &FileID)
|
||||
-> bool
|
||||
where HP: FileHeaderParser
|
||||
{
|
||||
let idstr : String = id.clone().into();
|
||||
let path = format!("{}/{}-{}.imag", self.storepath, m.name(), idstr);
|
||||
debug!("Loading path = '{}'", path);
|
||||
let mut string = String::new();
|
||||
|
||||
FSFile::open(&path).map(|mut file| {
|
||||
file.read_to_string(&mut string)
|
||||
.map_err(|e| {
|
||||
error!("Failed reading file: '{}'", path);
|
||||
debug!(" error {}", e);
|
||||
})
|
||||
.is_ok();
|
||||
})
|
||||
.map_err(|e| {
|
||||
error!("Error opening file: {}", path);
|
||||
debug!("Error opening file: {:?}", e);
|
||||
}).ok();
|
||||
|
||||
parser.read(string).map(|(header, data)| {
|
||||
self.new_file_from_parser_result(m, id.clone(), header, data);
|
||||
true
|
||||
}).unwrap_or(false)
|
||||
}
|
||||
|
||||
/**
|
||||
* Load a file from the cache by FileID
|
||||
*
|
||||
* TODO: Semantics: This function should load from FS if the file is not in the cache yet or
|
||||
* fail if the file is not available.
|
||||
*/
|
||||
pub fn load<HP>(&self, m: &Module, parser: &Parser<HP>, id: &FileID)
|
||||
-> Option<Rc<RefCell<File>>>
|
||||
where HP: FileHeaderParser
|
||||
{
|
||||
if !self.cache.borrow().contains_key(id) {
|
||||
self.load_into_cache(m, parser, id);
|
||||
}
|
||||
debug!("Loading '{:?}'", id);
|
||||
self.cache.borrow().get(id).cloned()
|
||||
}
|
||||
|
||||
/**
|
||||
* Load a file from the filesystem/cache by a FileHash
|
||||
*/
|
||||
pub fn load_by_hash<HP>(&self,
|
||||
m: &Module,
|
||||
parser: &Parser<HP>,
|
||||
hash: FileHash)
|
||||
-> Option<Rc<RefCell<File>>>
|
||||
where HP: FileHeaderParser
|
||||
{
|
||||
macro_rules! try_some {
|
||||
($expr:expr) => (match $expr {
|
||||
::std::option::Option::Some(val) => val,
|
||||
::std::option::Option::None => return ::std::option::Option::None,
|
||||
});
|
||||
|
||||
($expr:expr => return) => (match $expr {
|
||||
::std::option::Option::Some(val) => val,
|
||||
::std::option::Option::None => return,
|
||||
})
|
||||
}
|
||||
|
||||
use glob::glob;
|
||||
|
||||
let hashstr : String = hash.into();
|
||||
let globstr = format!("{}/*-{}.imag", self.storepath, hashstr);
|
||||
debug!("glob({})", globstr);
|
||||
|
||||
let globs = glob(&globstr[..]);
|
||||
if globs.is_err() {
|
||||
return None;
|
||||
}
|
||||
|
||||
let path = globs.unwrap().last();
|
||||
debug!("path = {:?}", path);
|
||||
|
||||
let pathbuf = try_some!(path);
|
||||
if pathbuf.is_err() { return None; }
|
||||
|
||||
let pathbuf_un = pathbuf.unwrap();
|
||||
let filename = pathbuf_un.file_name();
|
||||
let s = try_some!(filename).to_str();
|
||||
let string = String::from(try_some!(s));
|
||||
let id = try_some!(FileID::parse(&string));
|
||||
|
||||
debug!("Loaded ID = '{:?}'", id);
|
||||
|
||||
self.load(m, parser, &id)
|
||||
}
|
||||
|
||||
/**
|
||||
* Load all files for a module
|
||||
*/
|
||||
pub fn load_for_module<HP>(&self, m: &Module, parser: &Parser<HP>)
|
||||
-> Vec<Rc<RefCell<File>>>
|
||||
where HP: FileHeaderParser
|
||||
{
|
||||
use glob::glob;
|
||||
|
||||
let globstr = format!("{}/{}-*.imag", self.storepath, m.name());
|
||||
let mut res = vec![];
|
||||
|
||||
glob(&globstr[..]).map(|paths| {
|
||||
for path in paths {
|
||||
if let Ok(pathbuf) = path {
|
||||
let fname = pathbuf.file_name().and_then(|s| s.to_str());
|
||||
fname.map(|s| {
|
||||
FileID::parse(&String::from(s)).map(|id| {
|
||||
self.load(m, parser, &id).map(|file| {
|
||||
res.push(file);
|
||||
})
|
||||
});
|
||||
});
|
||||
}
|
||||
}
|
||||
})
|
||||
.map_err(|e| {
|
||||
error!("Could not glob: '{}'", globstr);
|
||||
debug!("Could not glob(): {:?}", e);
|
||||
})
|
||||
.ok();
|
||||
res
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove a file from the filesystem by FileID
|
||||
*
|
||||
* Returns true if this works.
|
||||
*/
|
||||
pub fn remove(&self, id: FileID) -> bool {
|
||||
use std::fs::remove_file;
|
||||
|
||||
self.cache
|
||||
.borrow_mut()
|
||||
.remove(&id)
|
||||
.map(|file| {
|
||||
let idstr : String = id.into();
|
||||
let path = format!("{}/{}-{}.imag",
|
||||
self.storepath,
|
||||
file.deref().borrow().owner_name(),
|
||||
idstr);
|
||||
debug!("Removing file NOW: '{}'", path);
|
||||
remove_file(path).is_ok()
|
||||
})
|
||||
.unwrap_or(false)
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper to generate a new FileID object
|
||||
*/
|
||||
fn get_new_file_id(&self) -> FileID {
|
||||
use uuid::Uuid;
|
||||
let hash = FileHash::from(Uuid::new_v4().to_hyphenated_string());
|
||||
FileID::new(FileIDType::UUID, hash)
|
||||
}
|
||||
|
||||
}
|
|
@ -1,202 +0,0 @@
|
|||
use std::error::Error;
|
||||
use std::fmt::{Debug, Display, Formatter};
|
||||
use std::fmt;
|
||||
|
||||
use regex::Regex;
|
||||
|
||||
use super::file::header::data::FileHeaderData;
|
||||
|
||||
pub struct ParserError {
|
||||
summary: String,
|
||||
parsertext: String,
|
||||
index: i32,
|
||||
explanation: Option<String>,
|
||||
caused_by: Option<Box<Error>>,
|
||||
}
|
||||
|
||||
impl ParserError {
|
||||
pub fn new(sum: &'static str, text: String, idx: i32, expl: &'static str) -> ParserError {
|
||||
ParserError {
|
||||
summary: String::from(sum),
|
||||
parsertext: text,
|
||||
index: idx,
|
||||
explanation: Some(String::from(expl)),
|
||||
caused_by: None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn short(sum: &str, text: String, idx: i32) -> ParserError {
|
||||
ParserError {
|
||||
summary: String::from(sum),
|
||||
parsertext: text,
|
||||
index: idx,
|
||||
explanation: None,
|
||||
caused_by: None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn with_cause(mut self, e: Box<Error>) -> ParserError {
|
||||
self.caused_by = Some(e);
|
||||
self
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
impl Error for ParserError {
|
||||
|
||||
fn description(&self) -> &str {
|
||||
&self.summary[..]
|
||||
}
|
||||
|
||||
fn cause(&self) -> Option<&Error> {
|
||||
self.caused_by.as_ref().map(|e| &**e)
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
impl Debug for ParserError {
|
||||
|
||||
fn fmt(&self, fmt: &mut Formatter) -> fmt::Result {
|
||||
try!(write!(fmt, "ParserError: {}\n\n", self.summary));
|
||||
|
||||
if let Some(ref e) = self.explanation {
|
||||
try!(write!(fmt, "{}\n\n", e));
|
||||
}
|
||||
|
||||
try!(write!(fmt, "On position {}\nin\n{}", self.index, self.parsertext));
|
||||
Ok(())
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
impl Display for ParserError {
|
||||
|
||||
fn fmt(&self, fmt: &mut Formatter) -> fmt::Result {
|
||||
try!(write!(fmt, "ParserError: {}", self.summary));
|
||||
|
||||
if let Some(ref e) = self.explanation {
|
||||
try!(write!(fmt, "\n\n{}", e));
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Trait for a header parser.
|
||||
*
|
||||
* This parser type has to provide two functions:
|
||||
* - read(), which reads an String into a FileHeaderData structure
|
||||
* - write(), which parses a FileHeaderData structure into a String
|
||||
*
|
||||
* TODO: Use Write/Read traits?
|
||||
*/
|
||||
pub trait FileHeaderParser : Sized + Debug + Display {
|
||||
fn read(&self, string: Option<String>) -> Result<FileHeaderData, ParserError>;
|
||||
fn write(&self, data: &FileHeaderData) -> Result<String, ParserError>;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parser
|
||||
*
|
||||
* This Parser object is an abstraction which uses the FileHeaderParser to parse the whole contents
|
||||
* of a file into a header (FileHeaderData) structure and the content (String).
|
||||
*/
|
||||
pub struct Parser<HP> {
|
||||
headerp : HP,
|
||||
}
|
||||
|
||||
impl<HP: FileHeaderParser> Parser<HP> {
|
||||
|
||||
pub fn new(headerp: HP) -> Parser<HP> {
|
||||
Parser {
|
||||
headerp: headerp,
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Read the String which is the contents of a file into a (FileHeaderData, String) tuple, which
|
||||
* is the header and the content of the file.
|
||||
*/
|
||||
pub fn read(&self, s: String) -> Result<(FileHeaderData, String), ParserError> {
|
||||
debug!("Reading into internal datastructure: '{}'", s);
|
||||
let divided = self.divide_text(&s);
|
||||
|
||||
if divided.is_err() {
|
||||
debug!("Error reading into internal datastructure");
|
||||
let p = ParserError::new("Dividing text failed", s, 0,
|
||||
"Dividing text with divide_text() failed");
|
||||
return Err(p.with_cause(Box::new(divided.err().unwrap())));
|
||||
}
|
||||
|
||||
let (header, data) = divided.ok().unwrap();
|
||||
debug!("Header = '{:?}'", header);
|
||||
debug!("Data = '{:?}'", data);
|
||||
|
||||
let h_parseres = try!(self.headerp.read(header));
|
||||
debug!("Success parsing header");
|
||||
|
||||
Ok((h_parseres, data.unwrap_or(String::new())))
|
||||
}
|
||||
|
||||
/**
|
||||
* Write the FileHeaderData and String (header and content) of the tuple into a String, which
|
||||
* can then simply be written into the store as a file.
|
||||
*/
|
||||
pub fn write(&self, tpl : (&FileHeaderData, &String)) -> Result<String, ParserError> {
|
||||
debug!("Parsing internal datastructure to String");
|
||||
let (header, data) = tpl;
|
||||
let h_text = try!(self.headerp.write(&header));
|
||||
debug!("Success translating header");
|
||||
|
||||
let text = format!("---\n{}\n---\n{}", h_text, data);
|
||||
Ok(text)
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper to parse the full-text of a file into a header part (String) and a content part
|
||||
* (String)
|
||||
*/
|
||||
fn divide_text(&self, text: &String) -> Result<(Option<String>, Option<String>), ParserError> {
|
||||
let re = Regex::new(r"(?sm)^---$(.*)^---$(.*)").unwrap();
|
||||
|
||||
debug!("Splitting: '{}'", text);
|
||||
debug!(" regex = {:?}", re);
|
||||
|
||||
re.captures(text).map(|captures| {
|
||||
|
||||
if captures.len() != 3 {
|
||||
debug!("Unexpected amount of captures");
|
||||
return Err(ParserError::new("Unexpected Regex output",
|
||||
text.clone(), 0,
|
||||
"The regex to divide text into header and content had an unexpected output."))
|
||||
}
|
||||
|
||||
let header = captures.at(1).map(|s| String::from(s));
|
||||
let content = captures.at(2).map(|s| String::from(s));
|
||||
|
||||
debug!("Splitted, Header = '{:?}'", header.clone().unwrap_or("NONE".into()));
|
||||
debug!("Splitted, Data = '{:?}'", content.clone().unwrap_or("NONE".into()));
|
||||
Ok((header, content))
|
||||
}).or_else(|| {
|
||||
debug!("Cannot capture from text");
|
||||
let e = ParserError::new("Cannot run regex on text",
|
||||
text.clone(), 0,
|
||||
"Cannot run regex on text to divide it into header and content.");
|
||||
Some(Err(e))
|
||||
}).unwrap()
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
impl<HP> Debug for Parser<HP>
|
||||
where HP: FileHeaderParser
|
||||
{
|
||||
|
||||
fn fmt(&self, fmt: &mut Formatter) -> fmt::Result {
|
||||
try!(write!(fmt, "Parser<{:?}>", self.headerp));
|
||||
Ok(())
|
||||
}
|
||||
|
||||
}
|
|
@ -1,108 +0,0 @@
|
|||
use std::path::PathBuf;
|
||||
|
||||
use glob::glob;
|
||||
use glob::Paths;
|
||||
use glob::PatternError;
|
||||
|
||||
use storage::file::id::FileID;
|
||||
use storage::file::id_type::FileIDType;
|
||||
use storage::file::hash::FileHash;
|
||||
use module::Module;
|
||||
|
||||
/*
|
||||
* A path represents either a GLOB ("/tmp/store/module-*-*.imag" for example) or a full path
|
||||
*
|
||||
* It can be used to generate a File or iterate over some files
|
||||
*
|
||||
*/
|
||||
struct Path<'a> {
|
||||
|
||||
/*
|
||||
* The base part ("/tmp/")
|
||||
*/
|
||||
base: PathBuf,
|
||||
|
||||
/*
|
||||
* The store part ("/store/")
|
||||
*/
|
||||
store: PathBuf,
|
||||
|
||||
/*
|
||||
* The module
|
||||
*/
|
||||
module: &'a Module<'a>,
|
||||
|
||||
/*
|
||||
* The ID
|
||||
*/
|
||||
idtype: Option<FileIDType>,
|
||||
idhash: Option<FileHash>,
|
||||
id: Option<FileID>,
|
||||
|
||||
}
|
||||
|
||||
impl<'a> Path<'a> {
|
||||
|
||||
fn new(base: PathBuf, store: PathBuf, m: &'a Module<'a>, id: FileID) -> Path<'a> {
|
||||
Path {
|
||||
base: base,
|
||||
store: store,
|
||||
module: m,
|
||||
idtype: Some(id.get_type()),
|
||||
idhash: Some(id.get_id()),
|
||||
id: Some(id),
|
||||
}
|
||||
}
|
||||
|
||||
fn new_with_idtype(base: PathBuf, store: PathBuf, m: &'a Module<'a>, id: FileIDType) -> Path<'a> {
|
||||
Path {
|
||||
base: base,
|
||||
store: store,
|
||||
module: m,
|
||||
idtype: Some(id),
|
||||
idhash: None,
|
||||
id: None,
|
||||
}
|
||||
}
|
||||
|
||||
fn new_with_idhash(base: PathBuf, store: PathBuf, m: &'a Module<'a>, id: FileHash) -> Path<'a> {
|
||||
Path {
|
||||
base: base,
|
||||
store: store,
|
||||
module: m,
|
||||
idtype: None,
|
||||
idhash: Some(id),
|
||||
id: None,
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/*
|
||||
* Transform Path into str, so we can call glob() on it
|
||||
*/
|
||||
impl<'a> Into<String> for Path<'a> {
|
||||
|
||||
fn into(self) -> String {
|
||||
let mut s = self.base.clone();
|
||||
s.push(self.store.clone());
|
||||
s.push(self.module.name());
|
||||
if self.id.is_some() {
|
||||
let idstr : String = self.id.unwrap().into();
|
||||
s.push(idstr);
|
||||
} else {
|
||||
s.push("*");
|
||||
}
|
||||
s.set_extension("imag");
|
||||
s.to_str().unwrap_or("").into()
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Into<Result<Paths, PatternError>> for Path<'a> {
|
||||
|
||||
fn into(self) -> Result<Paths, PatternError> {
|
||||
let s : String = self.into();
|
||||
glob(&s[..])
|
||||
}
|
||||
|
||||
}
|
|
@ -1 +0,0 @@
|
|||
pub mod parser;
|
|
@ -1,256 +0,0 @@
|
|||
use std::fmt::{Debug, Display, Formatter};
|
||||
use std::fmt;
|
||||
|
||||
use yaml_rust::Yaml;
|
||||
|
||||
use storage::parser::{FileHeaderParser, ParserError};
|
||||
use storage::file::header::spec::FileHeaderSpec;
|
||||
use storage::file::header::data::FileHeaderData;
|
||||
|
||||
pub struct YamlHeaderParser {
|
||||
spec: Option<FileHeaderSpec>,
|
||||
}
|
||||
|
||||
impl YamlHeaderParser {
|
||||
|
||||
pub fn new(spec: Option<FileHeaderSpec>) -> YamlHeaderParser {
|
||||
YamlHeaderParser {
|
||||
spec: spec
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
impl Display for YamlHeaderParser {
|
||||
|
||||
fn fmt(&self, fmt: &mut Formatter) -> fmt::Result {
|
||||
try!(write!(fmt, "YamlHeaderParser"));
|
||||
Ok(())
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
impl Debug for YamlHeaderParser {
|
||||
|
||||
fn fmt(&self, fmt: &mut Formatter) -> fmt::Result {
|
||||
try!(write!(fmt, "YamlHeaderParser, Spec: {:?}", self.spec));
|
||||
Ok(())
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
impl FileHeaderParser for YamlHeaderParser {
|
||||
|
||||
fn read(&self, string: Option<String>) -> Result<FileHeaderData, ParserError> {
|
||||
use yaml_rust::YamlLoader;
|
||||
if string.is_some() {
|
||||
let s = string.unwrap();
|
||||
YamlLoader::load_from_str(&s[..])
|
||||
.map(|mut vec_yaml| {
|
||||
vec_yaml.pop().map(|f| {
|
||||
visit_yaml(f)
|
||||
}).unwrap()
|
||||
})
|
||||
.map_err(|e| {
|
||||
debug!("YAML parser error: {:?}", e);
|
||||
ParserError::short(&s[..], s.clone(), 0)
|
||||
})
|
||||
} else {
|
||||
Ok(FileHeaderData::Null)
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
fn write(&self, data: &FileHeaderData) -> Result<String, ParserError> {
|
||||
use yaml_rust::YamlEmitter;
|
||||
|
||||
let mut buffer = String::new();
|
||||
let result = {
|
||||
let mut emitter = YamlEmitter::new(&mut buffer);
|
||||
emitter.dump(&visit_header(data))
|
||||
};
|
||||
result
|
||||
.map_err(|e| {
|
||||
error!("Error emitting YAML.");
|
||||
debug!("YAML parser error: {:?}", e);
|
||||
ParserError::short(&buffer[..], buffer.clone(), 0)
|
||||
})
|
||||
.map(|_| buffer)
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
fn visit_yaml(v: Yaml) -> FileHeaderData {
|
||||
use std::process::exit;
|
||||
|
||||
match v {
|
||||
Yaml::Real(_) => FileHeaderData::Float(v.as_f64().unwrap()),
|
||||
Yaml::Integer(i) => {
|
||||
if i > 0 {
|
||||
debug!("Castring {} : i64 -> u64", i);
|
||||
FileHeaderData::UInteger(i as u64)
|
||||
} else {
|
||||
FileHeaderData::Integer(i)
|
||||
}
|
||||
},
|
||||
Yaml::String(s) => FileHeaderData::Text(s),
|
||||
Yaml::Boolean(b) => FileHeaderData::Bool(b),
|
||||
|
||||
Yaml::Array(vec) => {
|
||||
FileHeaderData::Array {
|
||||
values: Box::new(vec.clone().into_iter().map(|i| visit_yaml(i)).collect())
|
||||
}
|
||||
},
|
||||
|
||||
Yaml::Hash(btree) => {
|
||||
let btree = btree.clone();
|
||||
FileHeaderData::Map{
|
||||
keys: btree.into_iter().map(|(k, v)|
|
||||
FileHeaderData::Key {
|
||||
name: String::from(k.as_str().unwrap()),
|
||||
value: Box::new(visit_yaml(v)),
|
||||
}
|
||||
).collect()
|
||||
}
|
||||
},
|
||||
|
||||
Yaml::Alias(_) => {
|
||||
warn!("YAML::ALIAS is not yet fully supported by rust-yaml");
|
||||
FileHeaderData::Null
|
||||
},
|
||||
|
||||
Yaml::Null => FileHeaderData::Null,
|
||||
|
||||
Yaml::BadValue => {
|
||||
warn!("YAML parsing error");
|
||||
exit(1);
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
fn visit_header(h: &FileHeaderData) -> Yaml {
|
||||
use std::ops::Deref;
|
||||
use std::collections::BTreeMap;
|
||||
use std::process::exit;
|
||||
|
||||
match h {
|
||||
&FileHeaderData::Null => Yaml::Null,
|
||||
&FileHeaderData::Float(f) => Yaml::Real(format!("{}", f)),
|
||||
&FileHeaderData::Integer(i) => Yaml::Integer(i),
|
||||
&FileHeaderData::UInteger(u) => {
|
||||
debug!("Might be losing data now: u64 -> i64 cast");
|
||||
Yaml::Integer(u as i64)
|
||||
},
|
||||
&FileHeaderData::Text(ref s) => Yaml::String(s.clone()),
|
||||
&FileHeaderData::Bool(b) => Yaml::Boolean(b),
|
||||
|
||||
&FileHeaderData::Array{values: ref a} => {
|
||||
Yaml::Array(a.deref().into_iter().map(|e| visit_header(e)).collect())
|
||||
},
|
||||
|
||||
&FileHeaderData::Key{name: _, value: _} => {
|
||||
error!("Something went terribly wrong when trying to emit YAML");
|
||||
exit(1);
|
||||
},
|
||||
|
||||
&FileHeaderData::Map{ref keys} => {
|
||||
let mut map : BTreeMap<Yaml, Yaml> = BTreeMap::new();
|
||||
|
||||
let failed = keys.into_iter().map(|key| {
|
||||
match key {
|
||||
&FileHeaderData::Key{ref name, ref value} => {
|
||||
let k = Yaml::String(name.clone());
|
||||
let v = visit_header(value.deref());
|
||||
|
||||
map.insert(k, v).is_none()
|
||||
},
|
||||
|
||||
_ => {
|
||||
error!("Something went terribly wrong when trying to emit YAML");
|
||||
exit(1);
|
||||
}
|
||||
}
|
||||
})
|
||||
.fold(0, |acc, succeeded : bool| {
|
||||
if !succeeded { acc + 1 } else { acc }
|
||||
});
|
||||
|
||||
debug!("Failed to insert {} keys", failed);
|
||||
Yaml::Hash(map)
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use std::ops::Deref;
|
||||
|
||||
use super::YamlHeaderParser;
|
||||
use storage::parser::FileHeaderParser;
|
||||
use storage::file::header::data::FileHeaderData as FHD;
|
||||
use storage::file::header::spec::FileHeaderSpec as FHS;
|
||||
|
||||
#[test]
|
||||
fn test_deserialization() {
|
||||
let text = String::from("a: 1\nb: 2");
|
||||
let spec = FHS::Array { allowed_types: vec![
|
||||
FHS::Map {
|
||||
keys: vec![
|
||||
FHS::Key {
|
||||
name: String::from("a"),
|
||||
value_type: Box::new(FHS::UInteger)
|
||||
},
|
||||
]
|
||||
}
|
||||
]
|
||||
};
|
||||
|
||||
let parser = YamlHeaderParser::new(Some(spec));
|
||||
let parsed = parser.read(Some(text));
|
||||
assert!(parsed.is_ok(), "Parsed is not ok: {:?}", parsed);
|
||||
debug!("Parsed: {:?}", parsed);
|
||||
|
||||
match parsed.ok() {
|
||||
Some(FHD::Map{ref keys}) => {
|
||||
keys.into_iter().map(|k| {
|
||||
match k {
|
||||
&FHD::Key{ref name, ref value} => {
|
||||
assert!(name == "a" || name == "b", "Key unknown");
|
||||
match value.deref() {
|
||||
&FHD::UInteger(u) => assert!(u == 1 || u == 2),
|
||||
&FHD::Integer(_) => assert!(false, "Found Integer, expected UInteger"),
|
||||
_ => assert!(false, "Integers are not here"),
|
||||
};
|
||||
},
|
||||
_ => assert!(false, "Key is not a Key"),
|
||||
};
|
||||
})
|
||||
.all(|x| x == ());
|
||||
},
|
||||
_ => assert!(false, "Map is not a Map"),
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_desser() {
|
||||
use yaml_rust::YamlLoader;
|
||||
|
||||
let text = String::from("a: [1, 32, 42]\nb: -2");
|
||||
let parser = YamlHeaderParser::new(None);
|
||||
|
||||
let des = parser.read(Some(text.clone()));
|
||||
assert!(des.is_ok(), "Deserializing failed");
|
||||
|
||||
let ser = parser.write(&des.unwrap());
|
||||
assert!(ser.is_ok(), "Parser error when serializing deserialized text");
|
||||
|
||||
let yaml_text = YamlLoader::load_from_str(&text[..]);
|
||||
let yaml_ser = YamlLoader::load_from_str(&ser.unwrap()[..]);
|
||||
|
||||
assert!(yaml_text.is_ok(), "Could not use yaml_rust to serialize text for comparison");
|
||||
assert!(yaml_ser.is_ok(), "Could not use yaml_rust to serialize serialized-deserialized text for comparison");
|
||||
assert_eq!(yaml_text.unwrap(), yaml_ser.unwrap());
|
||||
}
|
||||
|
||||
}
|
||||
|
122
src/ui/external/editor.rs
vendored
122
src/ui/external/editor.rs
vendored
|
@ -1,122 +0,0 @@
|
|||
use runtime::Runtime;
|
||||
|
||||
/**
|
||||
* A function which lets the user provide content by editing a temp files which gets removed after
|
||||
* the function got the content from it.
|
||||
*/
|
||||
pub fn let_user_provide_content(rt: &Runtime) -> Option<String> {
|
||||
use std::io::Read;
|
||||
use std::fs::File;
|
||||
use std::process::exit;
|
||||
|
||||
let filepath = "/tmp/imag-tmp.md";
|
||||
let file_created = File::create(filepath)
|
||||
.map(|_| true)
|
||||
.unwrap_or(false);
|
||||
|
||||
if !file_created {
|
||||
warn!("Could not create temporary file for user input!");
|
||||
return None;
|
||||
}
|
||||
|
||||
let output = {
|
||||
let mut cmd = rt.editor();
|
||||
cmd.arg(filepath);
|
||||
debug!("cmd = {:?}", cmd);
|
||||
cmd.spawn()
|
||||
.and_then(|child| {
|
||||
child.wait_with_output()
|
||||
})
|
||||
};
|
||||
|
||||
let process_out = output.map_err(|e| {
|
||||
error!("Editor call failed");
|
||||
debug!("Editor call failed: {:?}", e);
|
||||
return None as Option<String>;
|
||||
}).unwrap();
|
||||
|
||||
if !process_out.status.success() {
|
||||
error!("Editor call failed");
|
||||
debug!("status = {:?}", process_out.status);
|
||||
debug!("stdout = {:?}", String::from_utf8(process_out.stdout));
|
||||
debug!("stderr = {:?}", String::from_utf8(process_out.stderr));
|
||||
return None;
|
||||
}
|
||||
|
||||
let mut contents = String::new();
|
||||
File::open(filepath).map(|mut file| {
|
||||
file.read_to_string(&mut contents)
|
||||
.map_err(|e| {
|
||||
error!("Error reading content: {}", e);
|
||||
debug!("Error reading content: {:?}", e);
|
||||
exit(1);
|
||||
})
|
||||
.is_ok();
|
||||
Some(contents)
|
||||
}).unwrap_or(None)
|
||||
}
|
||||
|
||||
/**
|
||||
* Edit some content in a temporary file. If anything failes within this routine, it returns the
|
||||
* old content and false.
|
||||
* If the editing succeeded, it returns the new content and true
|
||||
*/
|
||||
pub fn edit_content(rt: &Runtime, old_content: String) -> (String, bool) {
|
||||
use std::io::Read;
|
||||
use std::io::Write;
|
||||
use std::fs::File;
|
||||
use std::process::exit;
|
||||
|
||||
let filepath = "/tmp/imag-tmp.md";
|
||||
{
|
||||
let mut file = match File::create(filepath) {
|
||||
Ok(f) => f,
|
||||
Err(e) => {
|
||||
error!("Error creating file {}", filepath);
|
||||
debug!("Error creating file at '{}', error = {}", filepath, e);
|
||||
exit(1);
|
||||
}
|
||||
};
|
||||
|
||||
file.write(old_content.as_ref())
|
||||
.map_err(|e| {
|
||||
error!("Error writing content: {}", e);
|
||||
debug!("Error writing content: {:?}", e);
|
||||
exit(1);
|
||||
}).is_ok();
|
||||
}
|
||||
debug!("Ready with putting old content into the file");
|
||||
|
||||
let output = {
|
||||
let mut cmd = rt.editor();
|
||||
cmd.arg(filepath);
|
||||
debug!("cmd = {:?}", cmd);
|
||||
cmd.spawn()
|
||||
.and_then(|child| child.wait_with_output())
|
||||
};
|
||||
|
||||
let process_out = output.map_err(|e| {
|
||||
error!("Editor call failed");
|
||||
debug!("Editor call failed: {:?}", e);
|
||||
return None as Option<String>;
|
||||
}).unwrap();
|
||||
|
||||
if !process_out.status.success() {
|
||||
error!("Editor call failed");
|
||||
debug!("status = {:?}", process_out.status);
|
||||
debug!("stdout = {:?}", String::from_utf8(process_out.stdout));
|
||||
debug!("stderr = {:?}", String::from_utf8(process_out.stderr));
|
||||
return (old_content, false);
|
||||
}
|
||||
|
||||
let mut contents = String::new();
|
||||
File::open(filepath).map(|mut file| {
|
||||
file.read_to_string(&mut contents).map_err(|e| {
|
||||
error!("Error reading content: {}", e);
|
||||
debug!("Error reading content: {:?}", e);
|
||||
exit(1);
|
||||
}).is_ok();
|
||||
(contents, true)
|
||||
}).unwrap_or((old_content, false))
|
||||
}
|
||||
|
14
src/ui/external/mod.rs
vendored
14
src/ui/external/mod.rs
vendored
|
@ -1,14 +0,0 @@
|
|||
use std::fs::File;
|
||||
|
||||
pub mod editor;
|
||||
|
||||
pub fn get_tempfile(ext: &str) -> Option<(String, File)> {
|
||||
use rand::random;
|
||||
|
||||
let randomname = format!("/tmp/imag-{}.{}", random::<u64>(), ext);
|
||||
debug!("Attempting to create tempfile at {}", randomname);
|
||||
File::create(randomname.clone())
|
||||
.map_err(|e| debug!(" Error -> {}", e))
|
||||
.ok()
|
||||
.map(|f| (randomname, f))
|
||||
}
|
242
src/ui/file.rs
242
src/ui/file.rs
|
@ -1,242 +0,0 @@
|
|||
use std::cell::RefCell;
|
||||
use std::iter::Iterator;
|
||||
use std::rc::Rc;
|
||||
use std::ops::Deref;
|
||||
|
||||
use storage::file::File;
|
||||
|
||||
/**
|
||||
* Trait for a printer which can be used to print data from files
|
||||
*/
|
||||
pub trait FilePrinter {
|
||||
|
||||
/*
|
||||
* Print a single file
|
||||
*/
|
||||
fn print_file(&self, Rc<RefCell<File>>);
|
||||
|
||||
/*
|
||||
* Print a list of files
|
||||
*/
|
||||
fn print_files<I: Iterator<Item = Rc<RefCell<File>>>>(&self, files: I) {
|
||||
for file in files {
|
||||
self.print_file(file);
|
||||
}
|
||||
}
|
||||
|
||||
fn print_file_custom<F>(&self, file: Rc<RefCell<File>>, f: &F)
|
||||
where F: Fn(Rc<RefCell<File>>) -> Vec<String>
|
||||
{
|
||||
info!("{}", f(file).join(" "));
|
||||
}
|
||||
|
||||
fn print_files_custom<F, I>(&self, files: I, f: &F)
|
||||
where I: Iterator<Item = Rc<RefCell<File>>>,
|
||||
F: Fn(Rc<RefCell<File>>) -> Vec<String>
|
||||
{
|
||||
for file in files {
|
||||
self.print_file_custom(file, f);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Printer which prints in debug mode if enabled
|
||||
*/
|
||||
struct DebugPrinter {
|
||||
debug: bool,
|
||||
}
|
||||
|
||||
impl DebugPrinter {
|
||||
|
||||
pub fn new(debug: bool) -> DebugPrinter {
|
||||
DebugPrinter {
|
||||
debug: debug,
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
impl FilePrinter for DebugPrinter {
|
||||
|
||||
fn print_file(&self, f: Rc<RefCell<File>>) {
|
||||
if self.debug {
|
||||
debug!("[DebugPrinter] ->\n{:?}", f);
|
||||
}
|
||||
}
|
||||
|
||||
fn print_file_custom<F>(&self, file: Rc<RefCell<File>>, f: &F)
|
||||
where F: Fn(Rc<RefCell<File>>) -> Vec<String>
|
||||
{
|
||||
if self.debug {
|
||||
debug!("[DebugPrinter] ->\n{:?}", f(file).join(" "));
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Simple printer, which just uses the info!() macro or debug!() macro if in debug mode.
|
||||
*/
|
||||
struct SimplePrinter {
|
||||
verbose: bool,
|
||||
debug: bool,
|
||||
}
|
||||
|
||||
impl SimplePrinter {
|
||||
|
||||
pub fn new(verbose: bool, debug: bool) -> SimplePrinter {
|
||||
SimplePrinter {
|
||||
debug: debug,
|
||||
verbose: verbose,
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
impl FilePrinter for SimplePrinter {
|
||||
|
||||
fn print_file(&self, f: Rc<RefCell<File>>) {
|
||||
use ansi_term::Colour::Cyan;
|
||||
|
||||
if self.debug {
|
||||
debug!("{:?}", f);
|
||||
} else if self.verbose {
|
||||
info!("{}", &*f.deref().borrow());
|
||||
} else {
|
||||
info!("{}: {}", Cyan.paint("[File]"), f.deref().borrow().id());
|
||||
}
|
||||
}
|
||||
|
||||
fn print_file_custom<F>(&self, file: Rc<RefCell<File>>, f: &F)
|
||||
where F: Fn(Rc<RefCell<File>>) -> Vec<String>
|
||||
{
|
||||
use ansi_term::Colour::Cyan;
|
||||
|
||||
let s = f(file).join(" ");
|
||||
if self.debug {
|
||||
debug!("{:?}", s);
|
||||
} else if self.verbose {
|
||||
info!("{}", s);
|
||||
} else {
|
||||
info!("{}: {}", Cyan.paint("[File]"), s);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Table printer to print file information in a nice ASCII-table
|
||||
*/
|
||||
pub struct TablePrinter {
|
||||
sp: SimplePrinter,
|
||||
pretty: bool,
|
||||
}
|
||||
|
||||
impl TablePrinter {
|
||||
|
||||
pub fn new(verbose: bool, debug: bool, pretty: bool) -> TablePrinter {
|
||||
TablePrinter {
|
||||
sp: SimplePrinter::new(verbose, debug),
|
||||
pretty: pretty,
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
impl FilePrinter for TablePrinter {
|
||||
|
||||
fn print_file(&self, f: Rc<RefCell<File>>) {
|
||||
self.sp.print_file(f);
|
||||
}
|
||||
|
||||
fn print_files<I: Iterator<Item = Rc<RefCell<File>>>>(&self, files: I) {
|
||||
use prettytable::Table;
|
||||
use prettytable::format::TableFormat;
|
||||
use prettytable::row::Row;
|
||||
use prettytable::cell::Cell;
|
||||
|
||||
let titles = row!["File#", "Owner", "ID"];
|
||||
|
||||
let mut tab = Table::new();
|
||||
|
||||
if !self.pretty {
|
||||
let plain_format = TableFormat::new(None, None, None);
|
||||
debug!("Setting plain format for table");
|
||||
tab.set_format(plain_format);
|
||||
}
|
||||
|
||||
tab.set_titles(titles);
|
||||
|
||||
let mut i = 0;
|
||||
for file in files {
|
||||
debug!("Printing file: {:?}", file);
|
||||
i += 1;
|
||||
let cell_i = Cell::new(&format!("{}", i)[..]);
|
||||
let cell_o = Cell::new(&format!("{}", file.deref().borrow().owner_name())[..]);
|
||||
|
||||
let id : String = file.deref().borrow().id().clone().into();
|
||||
let cell_id = Cell::new(&id[..]);
|
||||
let row = Row::new(vec![cell_i, cell_o, cell_id]);
|
||||
tab.add_row(row);
|
||||
}
|
||||
|
||||
if i != 0 {
|
||||
debug!("Printing {} table entries", i);
|
||||
tab.printstd();
|
||||
} else {
|
||||
debug!("Not printing table because there are zero entries");
|
||||
}
|
||||
}
|
||||
|
||||
fn print_files_custom<F, I>(&self, files: I, f: &F)
|
||||
where I: Iterator<Item = Rc<RefCell<File>>>,
|
||||
F: Fn(Rc<RefCell<File>>) -> Vec<String>
|
||||
{
|
||||
use prettytable::Table;
|
||||
use prettytable::format::TableFormat;
|
||||
use prettytable::row::Row;
|
||||
use prettytable::cell::Cell;
|
||||
|
||||
let titles = row!["#", "Module", "ID", "..."];
|
||||
|
||||
let mut tab = Table::new();
|
||||
|
||||
if !self.pretty {
|
||||
let plain_format = TableFormat::new(None, None, None);
|
||||
debug!("Setting plain format for table");
|
||||
tab.set_format(plain_format);
|
||||
}
|
||||
|
||||
tab.set_titles(titles);
|
||||
|
||||
let mut i = 0;
|
||||
for file in files {
|
||||
debug!("Printing file: {:?}", file);
|
||||
i += 1;
|
||||
let cell_i = Cell::new(&format!("{}", i)[..]);
|
||||
let cell_o = Cell::new(&format!("{}", file.deref().borrow().owner_name())[..]);
|
||||
|
||||
let id : String = file.deref().borrow().id().clone().into();
|
||||
let cell_id = Cell::new(&id[..]);
|
||||
|
||||
let mut row = Row::new(vec![cell_i, cell_o, cell_id]);
|
||||
|
||||
for cell in f(file).iter() {
|
||||
debug!("Adding custom cell: {:?}", cell);
|
||||
row.add_cell(Cell::new(&cell[..]))
|
||||
}
|
||||
|
||||
tab.add_row(row);
|
||||
}
|
||||
|
||||
if i != 0 {
|
||||
debug!("Printing {} table entries", i);
|
||||
tab.printstd();
|
||||
} else {
|
||||
debug!("Not printing table because there are zero entries");
|
||||
}
|
||||
}
|
||||
|
||||
}
|
|
@ -1,2 +0,0 @@
|
|||
pub mod file;
|
||||
pub mod external;
|
|
@ -1,9 +0,0 @@
|
|||
use url::Url;
|
||||
|
||||
/**
|
||||
* Util: Check wether a String can be parsed as an URL
|
||||
*/
|
||||
pub fn is_url(url: &String) -> bool {
|
||||
Url::parse(&url[..]).is_ok()
|
||||
}
|
||||
|
Loading…
Reference in a new issue