Merge branch 'refactor'

This commit is contained in:
Matthias Beyer 2015-12-05 15:20:10 +01:00
commit 566a2c2cce
15 changed files with 170 additions and 208 deletions

View file

@ -1,7 +1,7 @@
{ pkgs ? (import <nixpkgs> {}) }:
let
env = with pkgs.rustUnstable; [
env = with pkgs.rustStable; [
rustc
cargo
pkgs.llvmPackages.lldb

View file

@ -1,10 +1,8 @@
use std::fmt::{Debug, Formatter, Error};
extern crate clap;
use clap::{App, ArgMatches};
use std::fmt::Debug;
use std::fmt::Formatter;
use std::fmt::Error;
pub struct ModuleConfig {
pub load : bool,
}

View file

@ -1,14 +1,9 @@
extern crate clap;
use cli::CliConfig;
use std::fmt::{Debug, Formatter, Error};
use std::path::Path;
use config::reader::from_file;
use config::types::Config as Cfg;
use std::fmt::Debug;
use std::fmt::Formatter;
use std::fmt::Error;
use cli::CliConfig;
pub struct Configuration {
pub rtp : String,
@ -20,8 +15,6 @@ pub struct Configuration {
impl Configuration {
pub fn new(config: &CliConfig) -> Configuration {
use std::env::home_dir;
let rtp = rtp_path(config).or(default_path());
let mut verbose = false;

View file

@ -1,5 +1,3 @@
#![feature(box_patterns)]
#[macro_use] extern crate clap;
#[macro_use] extern crate log;
#[macro_use] extern crate serde;
@ -11,6 +9,8 @@
extern crate url;
extern crate config;
use std::process::exit;
use cli::CliConfig;
use configuration::Configuration;
use runtime::{ImagLogger, Runtime};
@ -28,8 +28,6 @@ mod module;
mod storage;
mod ui;
use std::process::exit;
fn main() {
let yaml = load_yaml!("../etc/cli.yml");
let app = App::from_yaml(yaml);

View file

@ -1,22 +1,17 @@
use runtime::Runtime;
use storage::backend::{StorageBackendError, StorageBackend};
use module::Module;
use module::ModuleError;
use module::CommandResult;
use module::CommandEnv;
use module::bm::header::build_header;
use module::bm::header::get_tags_from_header;
use storage::json::parser::JsonHeaderParser;
use storage::parser::{Parser, FileHeaderParser};
use storage::file::File;
use ui::file::{FilePrinter, TablePrinter};
use std::vec::IntoIter;
use clap::ArgMatches;
use regex::Regex;
use module::{CommandEnv, CommandResult, Module, ModuleError};
use module::bm::header::{build_header, get_tags_from_header};
use runtime::Runtime;
use storage::backend::StorageBackendError;
use storage::file::File;
use storage::json::parser::JsonHeaderParser;
use storage::parser::Parser;
use ui::file::{FilePrinter, TablePrinter};
pub fn add_command(module: &Module, env: CommandEnv) -> CommandResult {
use url::Url;
@ -54,11 +49,7 @@ pub fn list_command(module: &Module, env: CommandEnv) -> CommandResult {
}
pub fn remove_command(module: &Module, env: CommandEnv) -> CommandResult {
let checked : bool = run_removal_checking(&env);
debug!("Checked mode: {}", checked);
if let Some(id) = get_id(env.rt, env.matches) {
debug!("Remove by id: {}", id);
fn remove_by_id(module: &Module, env: CommandEnv, id: String, checked: bool) -> CommandResult {
let parser = Parser::new(JsonHeaderParser::new(None));
let file = env.bk
.get_file_by_id(module, &id.into(), &parser)
@ -78,9 +69,9 @@ pub fn remove_command(module: &Module, env: CommandEnv) -> CommandResult {
info!("Remove worked");
Ok(())
}
} else {
debug!("Remove more than one file");
}
fn remove_by_filtering(module: &Module, env: CommandEnv, checked: bool) -> CommandResult {
get_filtered_files_from_backend(module, &env).and_then(|files| {
let nfiles = files.len();
info!("Removing {} Files", nfiles);
@ -97,10 +88,8 @@ pub fn remove_command(module: &Module, env: CommandEnv) -> CommandResult {
})
.collect::<Vec<StorageBackendError>>();
let nerrs = errs.len();
if nerrs != 0 {
warn!("{} Errors occured while removing {} files", nerrs, nfiles);
if errs.len() != 0 {
warn!("{} Errors occured while removing {} files", errs.len(), nfiles);
let moderr = ModuleError::new("File removal failed");
// TODO : Collect StorageBackendErrors
@ -111,6 +100,17 @@ pub fn remove_command(module: &Module, env: CommandEnv) -> CommandResult {
}
})
}
let checked : bool = run_removal_checking(&env);
debug!("Checked mode: {}", checked);
if let Some(id) = get_id(env.rt, env.matches) {
debug!("Remove by id: {}", id);
remove_by_id(module, env, id, checked)
} else {
debug!("Remove more than one file");
remove_by_filtering(module, env, checked)
}
}
/*
@ -123,27 +123,34 @@ fn get_filtered_files_from_backend<'a>(module: &'a Module,
env: &CommandEnv)
-> Result<IntoIter<File<'a>>, ModuleError>
{
fn check_tags(tags: &Vec<String>, file: &File) -> bool {
if tags.len() != 0 {
debug!("Checking tags of: {:?}", file.id());
get_tags_from_header(&file.header())
.iter()
.any(|t| tags.contains(t))
} else {
true
}
}
let parser = Parser::new(JsonHeaderParser::new(None));
let tags = get_tags(env.rt, env.matches);
debug!("Tags: {:?}", tags);
env.bk.iter_files(module, &parser)
env.bk
.iter_files(module, &parser)
.map(|files| {
let f = files.filter(|file| {
files.filter(|file| {
debug!("Backend returns file: {:?}", file);
if tags.len() != 0 {
debug!("Checking tags of: {:?}", file.id());
get_tags_from_header(&file.header()).iter()
.any(|t| tags.contains(t))
} else {
true
}
check_tags(&tags, file)
}).filter(|file| {
debug!("Checking matches of: {:?}", file.id());
get_matcher(env.rt, env.matches)
.and_then(|r| Some(file.matches_with(&r)))
.map(|r| file.matches_with(&r))
.unwrap_or(true)
}).collect::<Vec<File>>();
f.into_iter()
})
.collect::<Vec<File>>()
.into_iter()
}).map_err(|e| {
debug!("Error from Backend: {:?}", e);
let mut merr = ModuleError::new("Could not filter files");
@ -153,21 +160,25 @@ fn get_filtered_files_from_backend<'a>(module: &'a Module,
}
fn get_tags<'a>(rt: &Runtime, sub: &ArgMatches<'a, 'a>) -> Vec<String> {
debug!("Fetching tags from commandline");
sub.value_of("tags").and_then(|tags|
Some(tags.split(",")
.into_iter()
.map(|s| s.to_string())
.filter(|e|
if e.contains(" ") {
warn!("Tag contains spaces: '{}'", e);
false
} else {
true
}).collect()
)
).or(Some(vec![])).unwrap()
fn reject_if_with_spaces(e: &String) -> bool {
if e.contains(" ") {
warn!("Tag contains spaces: '{}'", e);
false
} else {
true
}
}
debug!("Fetching tags from commandline");
sub.value_of("tags").and_then(|tags| {
Some(tags.split(",")
.into_iter()
.map(|s| s.to_string())
.filter(|e| reject_if_with_spaces(e))
.collect()
)
}).or(Some(vec![])).unwrap()
}
fn get_matcher<'a>(rt: &Runtime, sub: &ArgMatches<'a, 'a>) -> Option<Regex> {

View file

@ -1,7 +1,8 @@
use storage::file::FileHeaderSpec as FHS;
use storage::file::FileHeaderData as FHD;
use std::ops::Deref;
use storage::file::FileHeaderData as FHD;
use storage::file::FileHeaderSpec as FHS;
pub fn get_spec() -> FHS {
FHS::Map { keys: vec![ url_key(), tags_key() ] }
}

View file

@ -1,23 +1,11 @@
use runtime::Runtime;
use module::Module;
use module::CommandMap;
use module::ModuleResult;
use module::ModuleError;
use std::path::Path;
use std::result::Result;
use std::fmt::Result as FMTResult;
use std::fmt::Formatter;
use std::fmt::Debug;
use clap::ArgMatches;
use regex::Regex;
mod header;
mod commands;
use self::header::build_header;
use storage::json::parser::JsonHeaderParser;
use storage::parser::FileHeaderParser;
use std::fmt::{Debug, Formatter};
use std::fmt::Result as FMTResult;
use module::{CommandMap, Module, ModuleResult};
use runtime::Runtime;
use self::commands::*;
pub struct BMModule {

View file

@ -1,16 +1,13 @@
use runtime::Runtime;
use std::error::Error;
use std::fmt::Formatter;
use std::fmt::Result as FMTResult;
use std::fmt::Display;
use std::fmt::Debug;
use std::path::Path;
use std::result::Result;
use std::collections::HashMap;
use std::error::Error;
use std::fmt::{Debug, Display, Formatter};
use std::fmt::Result as FMTResult;
use std::result::Result;
use clap::{App, ArgMatches};
use clap::ArgMatches;
use storage::backend::{StorageBackend, StorageBackendError};
use runtime::Runtime;
use storage::backend::StorageBackend;
pub mod bm;

View file

@ -1,14 +1,11 @@
use std::fmt::{Debug, Formatter, Error};
extern crate log;
use log::{LogRecord, LogLevel, LogLevelFilter, LogMetadata, SetLoggerError};
pub use cli::CliConfig;
pub use configuration::Configuration as Cfg;
use std::fmt::Debug;
use std::fmt::Formatter;
use std::fmt::Error;
use log::{LogRecord, LogLevel, LogLevelFilter, LogMetadata, SetLoggerError};
pub struct ImagLogger {
lvl: LogLevel,
}

View file

@ -1,26 +1,19 @@
use std::error::Error;
use std::fmt::Display;
use std::fmt::Formatter;
use std::fmt::{Display, Formatter};
use std::fmt::Result as FMTResult;
use std::path::Path;
use std::path::PathBuf;
use std::vec::Vec;
use std::fs::File as FSFile;
use std::fs::create_dir_all;
use std::fs::remove_file;
use std::io::Read;
use std::io::Write;
use std::vec::IntoIter;
use std::fs::{create_dir_all, remove_file};
use std::io::{Read, Write};
use std::vec::{Vec, IntoIter};
use glob::glob;
use glob::Paths;
use storage::file::File;
use storage::file_id::*;
use storage::parser::{FileHeaderParser, Parser, ParserError};
use module::Module;
use runtime::Runtime;
use storage::file::File;
use storage::file_id::*;
use storage::parser::{FileHeaderParser, Parser};
pub type BackendOperationResult<T = ()> = Result<T, StorageBackendError>;
@ -32,6 +25,8 @@ pub struct StorageBackend {
impl StorageBackend {
pub fn new(rt: &Runtime) -> BackendOperationResult<StorageBackend> {
use self::StorageBackendError as SBE;
let storepath = rt.get_rtp() + "/store/";
debug!("Trying to create {}", storepath);
create_dir_all(&storepath).and_then(|_| {
@ -42,18 +37,15 @@ impl StorageBackend {
})
}).or_else(|e| {
debug!("Creating failed, constructing error instance");
let mut serr = StorageBackendError::new(
"create_dir_all()",
"Could not create store directories",
Some(storepath)
);
serr.caused_by = Some(Box::new(e));
Err(serr)
Err(SBE::new("create_dir_all()", "Could not create store directories",
Some(storepath), Some(Box::new(e))))
})
}
pub fn iter_ids(&self, m: &Module) -> Result<IntoIter<FileID>, StorageBackendError>
{
use self::StorageBackendError as SBE;
let globstr = self.prefix_of_files_for_module(m) + "*.imag";
debug!("Globstring = {}", globstr);
glob(&globstr[..])
@ -63,13 +55,7 @@ impl StorageBackend {
})
.map_err(|e| {
debug!("glob() returned error: {:?}", e);
let serr = StorageBackendError::new(
"iter_ids()",
"Cannot iter on file ids",
None);
// Why the hack is Error not implemented for glob::PatternError
// serr.caused_by = Some(Box::new(e));
serr
SBE::new("iter_ids()", "Cannot iter on file ids", None, None)
})
}
@ -77,21 +63,18 @@ impl StorageBackend {
-> Result<IntoIter<File<'a>>, StorageBackendError>
where HP: FileHeaderParser
{
use self::StorageBackendError as SBE;
self.iter_ids(m)
.and_then(|ids| {
debug!("Iterating ids and building files from them");
debug!(" number of ids = {}", ids.len());
Ok(ids.filter_map(|id| self.get_file_by_id(m, &id, p))
.collect::<Vec<File>>()
.into_iter())
Ok(self.filter_map_ids_to_files(m, p, ids).into_iter())
})
.map_err(|e| {
debug!("StorageBackend::iter_ids() returned error = {:?}", e);
let mut serr = StorageBackendError::new("iter_files()",
"Cannot iter on files",
None);
serr.caused_by = Some(Box::new(e));
serr
SBE::new("iter_files()", "Cannot iter on files", None,
Some(Box::new(e)))
})
}
@ -103,6 +86,8 @@ impl StorageBackend {
pub fn put_file<HP>(&self, f: File, p: &Parser<HP>) -> BackendOperationResult
where HP: FileHeaderParser
{
use self::StorageBackendError as SBE;
let written = write_with_parser(&f, p);
if written.is_err() { return Err(written.err().unwrap()); }
let string = written.unwrap();
@ -116,23 +101,14 @@ impl StorageBackend {
file.write_all(&string.clone().into_bytes())
.map_err(|ioerr| {
debug!("Could not write file");
let mut err = StorageBackendError::new(
"File::write_all()",
"Could not write out File contents",
None
);
err.caused_by = Some(Box::new(ioerr));
err
SBE::new("File::write_all()",
"Could not write out File contents",
None, Some(Box::new(ioerr)))
})
}).map_err(|writeerr| {
debug!("Could not create file at '{}'", path);
let mut err = StorageBackendError::new(
"File::create()",
"Creating file on disk failed",
None
);
err.caused_by = Some(Box::new(writeerr));
err
SBE::new("File::create()", "Creating file on disk failed", None,
Some(Box::new(writeerr)))
}).and(Ok(()))
}
@ -143,6 +119,8 @@ impl StorageBackend {
pub fn update_file<HP>(&self, f: File, p: &Parser<HP>) -> BackendOperationResult
where HP: FileHeaderParser
{
use self::StorageBackendError as SBE;
let contents = write_with_parser(&f, p);
if contents.is_err() { return Err(contents.err().unwrap()); }
let string = contents.unwrap();
@ -156,23 +134,15 @@ impl StorageBackend {
file.write_all(&string.clone().into_bytes())
.map_err(|ioerr| {
debug!("Could not write file");
let mut err = StorageBackendError::new(
"File::write()",
"Tried to write contents of this file, though operation did not succeed",
Some(string)
);
err.caused_by = Some(Box::new(ioerr));
err
SBE::new("File::write()",
"Tried to write contents of this file, though operation did not succeed",
Some(string), Some(Box::new(ioerr)))
})
}).map_err(|writeerr| {
debug!("Could not write file at '{}'", path);
let mut err = StorageBackendError::new(
"File::open()",
"Tried to update contents of this file, though file doesn't exist",
None
);
err.caused_by = Some(Box::new(writeerr));
err
SBE::new("File::open()",
"Tried to update contents of this file, though file doesn't exist",
None, Some(Box::new(writeerr)))
}).and(Ok(()))
}
@ -186,8 +156,6 @@ impl StorageBackend {
pub fn get_file_by_id<'a, HP>(&self, m: &'a Module, id: &FileID, p: &Parser<HP>) -> Option<File<'a>>
where HP: FileHeaderParser
{
use std::ops::Index;
debug!("Searching for file with id '{}'", id);
if id.get_type() == FileIDType::NONE {
@ -198,9 +166,8 @@ impl StorageBackend {
let globstr = self.prefix_of_files_for_module(m) + "*" + &id_str[..] + ".imag";
debug!("Globbing with globstr = '{}'", globstr);
glob(&globstr[..]).map(|globlist| {
let mut vec = globlist_to_file_id_vec(globlist).into_iter()
.filter_map(|id| self.get_file_by_id(m, &id, p))
.collect::<Vec<File>>();
let idvec = globlist_to_file_id_vec(globlist).into_iter();
let mut vec = self.filter_map_ids_to_files(m, p, idvec);
vec.reverse();
vec.pop()
}).unwrap_or({
@ -211,12 +178,16 @@ impl StorageBackend {
// The (hash)type is already in the FileID object, so we can just
// build a path from the information we already have
debug!("We know FileIDType, so we build the path directly now");
if let Ok(mut fs) = FSFile::open(self.build_filepath_with_id(m, id.clone())) {
let filepath = self.build_filepath_with_id(m, id.clone());
if let Ok(mut fs) = FSFile::open(filepath) {
let mut s = String::new();
fs.read_to_string(&mut s);
debug!("Success opening file with id '{}'", id);
debug!("Parsing to internal structure now");
p.read(s).and_then(|(h, d)| Ok(File::from_parser_result(m, id.clone(), h, d))).ok()
p.read(s).and_then(|(h, d)| {
Ok(File::from_parser_result(m, id.clone(), h, d))
}).ok()
} else {
debug!("No file with id '{}'", id);
None
@ -225,6 +196,8 @@ impl StorageBackend {
}
pub fn remove_file(&self, m: &Module, file: File, checked: bool) -> BackendOperationResult {
use self::StorageBackendError as SBE;
if checked {
error!("Checked remove not implemented yet. I will crash now");
unimplemented!()
@ -235,13 +208,8 @@ impl StorageBackend {
let fp = self.build_filepath(&file);
remove_file(fp).map_err(|e| {
let mut serr = StorageBackendError::new(
"remove_file()",
"File removal failed",
Some(format!("{}", file))
);
serr.caused_by = Some(Box::new(e));
serr
SBE::new("remove_file()", "File removal failed",
Some(format!("{}", file)), Some(Box::new(e)))
})
}
@ -270,6 +238,17 @@ impl StorageBackend {
self.storepath.clone() + m.name()
}
fn filter_map_ids_to_files<'a, HP>(&self,
m: &'a Module,
p: &Parser<HP>,
ids: IntoIter<FileID>)
-> Vec<File<'a>>
where HP: FileHeaderParser
{
ids.filter_map(|id| self.get_file_by_id(m, &id, p))
.collect::<Vec<File>>()
}
}
#[derive(Debug)]
@ -282,7 +261,11 @@ pub struct StorageBackendError {
impl StorageBackendError {
fn new<S>(action: S, desc: S, data: Option<String>) -> StorageBackendError
fn new<S>(action: S,
desc: S,
data: Option<String>,
cause: Option<Box<Error>>)
-> StorageBackendError
where S: Into<String>
{
StorageBackendError {
@ -318,15 +301,13 @@ impl<'a> Display for StorageBackendError {
fn write_with_parser<'a, HP>(f: &File, p: &Parser<HP>) -> Result<String, StorageBackendError>
where HP: FileHeaderParser
{
use self::StorageBackendError as SBE;
p.write(f.contents())
.or_else(|err| {
let mut serr = StorageBackendError::new(
"Parser::write()",
"Cannot translate internal representation of file contents into on-disk representation",
None
);
serr.caused_by = Some(Box::new(err));
Err(serr)
Err(SBE::new("Parser::write()",
"Cannot translate internal representation of file contents into on-disk representation",
None, Some(Box::new(err))))
})
}
@ -335,3 +316,4 @@ fn globlist_to_file_id_vec(globlist: Paths) -> Vec<FileID> {
.map(|pbuf| FileID::from(&pbuf))
.collect::<Vec<FileID>>()
}

View file

@ -2,12 +2,12 @@ use std::error::Error;
use std::fmt::{Debug, Display, Formatter};
use std::fmt;
use module::Module;
use super::parser::{FileHeaderParser, Parser, ParserError};
use storage::file_id::*;
use regex::Regex;
use module::Module;
use storage::file_id::*;
use super::parser::{FileHeaderParser, Parser, ParserError};
#[derive(Debug)]
#[derive(Clone)]
pub enum FileHeaderSpec {

View file

@ -1,10 +1,9 @@
use std::convert::{From, Into};
use std::error::Error;
use std::fmt::{Debug, Display, Formatter};
use std::fmt;
use std::path::PathBuf;
use std::result::Result;
use std::path::{Path, PathBuf};
use std::convert::From;
use std::convert::Into;
use regex::Regex;

View file

@ -1,16 +1,14 @@
use std::collections::HashMap;
use std::error::Error;
use serde_json::{Value, from_str};
use serde_json::error::Result as R;
use serde_json::Serializer;
use serde::ser::Serialize;
use serde::ser::Serializer as Ser;
use std::collections::HashMap;
use std::io::stdout;
use std::error::Error;
use super::super::parser::{FileHeaderParser, ParserError};
use super::super::file::{FileHeaderSpec, FileHeaderData};
use storage::parser::{FileHeaderParser, ParserError};
use storage::file::{FileHeaderSpec, FileHeaderData};
pub struct JsonHeaderParser {
spec: Option<FileHeaderSpec>,

View file

@ -1,9 +1,10 @@
use regex::Regex;
use std::error::Error;
use std::fmt::{Debug, Display, Formatter};
use std::fmt;
use super::file::{FileHeaderSpec, FileHeaderData};
use regex::Regex;
use super::file::FileHeaderData;
pub struct ParserError {
summary: String,

View file

@ -1,6 +1,5 @@
use std::iter::Iterator;
use runtime::Runtime;
use storage::file::File;
pub trait FilePrinter {
@ -29,7 +28,7 @@ struct DebugPrinter {
impl FilePrinter for DebugPrinter {
fn new(verbose: bool, debug: bool) -> DebugPrinter {
fn new(_: bool, debug: bool) -> DebugPrinter {
DebugPrinter {
debug: debug,
}