Merge branch 'refactor'
This commit is contained in:
commit
566a2c2cce
15 changed files with 170 additions and 208 deletions
|
@ -1,7 +1,7 @@
|
||||||
{ pkgs ? (import <nixpkgs> {}) }:
|
{ pkgs ? (import <nixpkgs> {}) }:
|
||||||
|
|
||||||
let
|
let
|
||||||
env = with pkgs.rustUnstable; [
|
env = with pkgs.rustStable; [
|
||||||
rustc
|
rustc
|
||||||
cargo
|
cargo
|
||||||
pkgs.llvmPackages.lldb
|
pkgs.llvmPackages.lldb
|
||||||
|
|
|
@ -1,10 +1,8 @@
|
||||||
|
use std::fmt::{Debug, Formatter, Error};
|
||||||
|
|
||||||
extern crate clap;
|
extern crate clap;
|
||||||
use clap::{App, ArgMatches};
|
use clap::{App, ArgMatches};
|
||||||
|
|
||||||
use std::fmt::Debug;
|
|
||||||
use std::fmt::Formatter;
|
|
||||||
use std::fmt::Error;
|
|
||||||
|
|
||||||
pub struct ModuleConfig {
|
pub struct ModuleConfig {
|
||||||
pub load : bool,
|
pub load : bool,
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,14 +1,9 @@
|
||||||
extern crate clap;
|
use std::fmt::{Debug, Formatter, Error};
|
||||||
|
|
||||||
use cli::CliConfig;
|
|
||||||
|
|
||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
|
|
||||||
use config::reader::from_file;
|
use config::reader::from_file;
|
||||||
use config::types::Config as Cfg;
|
use config::types::Config as Cfg;
|
||||||
|
use cli::CliConfig;
|
||||||
use std::fmt::Debug;
|
|
||||||
use std::fmt::Formatter;
|
|
||||||
use std::fmt::Error;
|
|
||||||
|
|
||||||
pub struct Configuration {
|
pub struct Configuration {
|
||||||
pub rtp : String,
|
pub rtp : String,
|
||||||
|
@ -20,8 +15,6 @@ pub struct Configuration {
|
||||||
impl Configuration {
|
impl Configuration {
|
||||||
|
|
||||||
pub fn new(config: &CliConfig) -> Configuration {
|
pub fn new(config: &CliConfig) -> Configuration {
|
||||||
use std::env::home_dir;
|
|
||||||
|
|
||||||
let rtp = rtp_path(config).or(default_path());
|
let rtp = rtp_path(config).or(default_path());
|
||||||
|
|
||||||
let mut verbose = false;
|
let mut verbose = false;
|
||||||
|
|
|
@ -1,5 +1,3 @@
|
||||||
#![feature(box_patterns)]
|
|
||||||
|
|
||||||
#[macro_use] extern crate clap;
|
#[macro_use] extern crate clap;
|
||||||
#[macro_use] extern crate log;
|
#[macro_use] extern crate log;
|
||||||
#[macro_use] extern crate serde;
|
#[macro_use] extern crate serde;
|
||||||
|
@ -11,6 +9,8 @@
|
||||||
extern crate url;
|
extern crate url;
|
||||||
extern crate config;
|
extern crate config;
|
||||||
|
|
||||||
|
use std::process::exit;
|
||||||
|
|
||||||
use cli::CliConfig;
|
use cli::CliConfig;
|
||||||
use configuration::Configuration;
|
use configuration::Configuration;
|
||||||
use runtime::{ImagLogger, Runtime};
|
use runtime::{ImagLogger, Runtime};
|
||||||
|
@ -28,8 +28,6 @@ mod module;
|
||||||
mod storage;
|
mod storage;
|
||||||
mod ui;
|
mod ui;
|
||||||
|
|
||||||
use std::process::exit;
|
|
||||||
|
|
||||||
fn main() {
|
fn main() {
|
||||||
let yaml = load_yaml!("../etc/cli.yml");
|
let yaml = load_yaml!("../etc/cli.yml");
|
||||||
let app = App::from_yaml(yaml);
|
let app = App::from_yaml(yaml);
|
||||||
|
|
|
@ -1,22 +1,17 @@
|
||||||
use runtime::Runtime;
|
|
||||||
use storage::backend::{StorageBackendError, StorageBackend};
|
|
||||||
|
|
||||||
use module::Module;
|
|
||||||
use module::ModuleError;
|
|
||||||
use module::CommandResult;
|
|
||||||
use module::CommandEnv;
|
|
||||||
|
|
||||||
use module::bm::header::build_header;
|
|
||||||
use module::bm::header::get_tags_from_header;
|
|
||||||
use storage::json::parser::JsonHeaderParser;
|
|
||||||
use storage::parser::{Parser, FileHeaderParser};
|
|
||||||
use storage::file::File;
|
|
||||||
use ui::file::{FilePrinter, TablePrinter};
|
|
||||||
use std::vec::IntoIter;
|
use std::vec::IntoIter;
|
||||||
|
|
||||||
use clap::ArgMatches;
|
use clap::ArgMatches;
|
||||||
use regex::Regex;
|
use regex::Regex;
|
||||||
|
|
||||||
|
use module::{CommandEnv, CommandResult, Module, ModuleError};
|
||||||
|
use module::bm::header::{build_header, get_tags_from_header};
|
||||||
|
use runtime::Runtime;
|
||||||
|
use storage::backend::StorageBackendError;
|
||||||
|
use storage::file::File;
|
||||||
|
use storage::json::parser::JsonHeaderParser;
|
||||||
|
use storage::parser::Parser;
|
||||||
|
use ui::file::{FilePrinter, TablePrinter};
|
||||||
|
|
||||||
pub fn add_command(module: &Module, env: CommandEnv) -> CommandResult {
|
pub fn add_command(module: &Module, env: CommandEnv) -> CommandResult {
|
||||||
use url::Url;
|
use url::Url;
|
||||||
|
|
||||||
|
@ -54,11 +49,7 @@ pub fn list_command(module: &Module, env: CommandEnv) -> CommandResult {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn remove_command(module: &Module, env: CommandEnv) -> CommandResult {
|
pub fn remove_command(module: &Module, env: CommandEnv) -> CommandResult {
|
||||||
let checked : bool = run_removal_checking(&env);
|
fn remove_by_id(module: &Module, env: CommandEnv, id: String, checked: bool) -> CommandResult {
|
||||||
debug!("Checked mode: {}", checked);
|
|
||||||
if let Some(id) = get_id(env.rt, env.matches) {
|
|
||||||
debug!("Remove by id: {}", id);
|
|
||||||
|
|
||||||
let parser = Parser::new(JsonHeaderParser::new(None));
|
let parser = Parser::new(JsonHeaderParser::new(None));
|
||||||
let file = env.bk
|
let file = env.bk
|
||||||
.get_file_by_id(module, &id.into(), &parser)
|
.get_file_by_id(module, &id.into(), &parser)
|
||||||
|
@ -78,9 +69,9 @@ pub fn remove_command(module: &Module, env: CommandEnv) -> CommandResult {
|
||||||
info!("Remove worked");
|
info!("Remove worked");
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
} else {
|
}
|
||||||
debug!("Remove more than one file");
|
|
||||||
|
|
||||||
|
fn remove_by_filtering(module: &Module, env: CommandEnv, checked: bool) -> CommandResult {
|
||||||
get_filtered_files_from_backend(module, &env).and_then(|files| {
|
get_filtered_files_from_backend(module, &env).and_then(|files| {
|
||||||
let nfiles = files.len();
|
let nfiles = files.len();
|
||||||
info!("Removing {} Files", nfiles);
|
info!("Removing {} Files", nfiles);
|
||||||
|
@ -97,10 +88,8 @@ pub fn remove_command(module: &Module, env: CommandEnv) -> CommandResult {
|
||||||
})
|
})
|
||||||
.collect::<Vec<StorageBackendError>>();
|
.collect::<Vec<StorageBackendError>>();
|
||||||
|
|
||||||
let nerrs = errs.len();
|
if errs.len() != 0 {
|
||||||
|
warn!("{} Errors occured while removing {} files", errs.len(), nfiles);
|
||||||
if nerrs != 0 {
|
|
||||||
warn!("{} Errors occured while removing {} files", nerrs, nfiles);
|
|
||||||
let moderr = ModuleError::new("File removal failed");
|
let moderr = ModuleError::new("File removal failed");
|
||||||
|
|
||||||
// TODO : Collect StorageBackendErrors
|
// TODO : Collect StorageBackendErrors
|
||||||
|
@ -111,6 +100,17 @@ pub fn remove_command(module: &Module, env: CommandEnv) -> CommandResult {
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let checked : bool = run_removal_checking(&env);
|
||||||
|
debug!("Checked mode: {}", checked);
|
||||||
|
|
||||||
|
if let Some(id) = get_id(env.rt, env.matches) {
|
||||||
|
debug!("Remove by id: {}", id);
|
||||||
|
remove_by_id(module, env, id, checked)
|
||||||
|
} else {
|
||||||
|
debug!("Remove more than one file");
|
||||||
|
remove_by_filtering(module, env, checked)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/*
|
/*
|
||||||
|
@ -123,27 +123,34 @@ fn get_filtered_files_from_backend<'a>(module: &'a Module,
|
||||||
env: &CommandEnv)
|
env: &CommandEnv)
|
||||||
-> Result<IntoIter<File<'a>>, ModuleError>
|
-> Result<IntoIter<File<'a>>, ModuleError>
|
||||||
{
|
{
|
||||||
|
fn check_tags(tags: &Vec<String>, file: &File) -> bool {
|
||||||
|
if tags.len() != 0 {
|
||||||
|
debug!("Checking tags of: {:?}", file.id());
|
||||||
|
get_tags_from_header(&file.header())
|
||||||
|
.iter()
|
||||||
|
.any(|t| tags.contains(t))
|
||||||
|
} else {
|
||||||
|
true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
let parser = Parser::new(JsonHeaderParser::new(None));
|
let parser = Parser::new(JsonHeaderParser::new(None));
|
||||||
let tags = get_tags(env.rt, env.matches);
|
let tags = get_tags(env.rt, env.matches);
|
||||||
debug!("Tags: {:?}", tags);
|
debug!("Tags: {:?}", tags);
|
||||||
env.bk.iter_files(module, &parser)
|
env.bk
|
||||||
|
.iter_files(module, &parser)
|
||||||
.map(|files| {
|
.map(|files| {
|
||||||
let f = files.filter(|file| {
|
files.filter(|file| {
|
||||||
debug!("Backend returns file: {:?}", file);
|
debug!("Backend returns file: {:?}", file);
|
||||||
if tags.len() != 0 {
|
check_tags(&tags, file)
|
||||||
debug!("Checking tags of: {:?}", file.id());
|
|
||||||
get_tags_from_header(&file.header()).iter()
|
|
||||||
.any(|t| tags.contains(t))
|
|
||||||
} else {
|
|
||||||
true
|
|
||||||
}
|
|
||||||
}).filter(|file| {
|
}).filter(|file| {
|
||||||
debug!("Checking matches of: {:?}", file.id());
|
debug!("Checking matches of: {:?}", file.id());
|
||||||
get_matcher(env.rt, env.matches)
|
get_matcher(env.rt, env.matches)
|
||||||
.and_then(|r| Some(file.matches_with(&r)))
|
.map(|r| file.matches_with(&r))
|
||||||
.unwrap_or(true)
|
.unwrap_or(true)
|
||||||
}).collect::<Vec<File>>();
|
})
|
||||||
f.into_iter()
|
.collect::<Vec<File>>()
|
||||||
|
.into_iter()
|
||||||
}).map_err(|e| {
|
}).map_err(|e| {
|
||||||
debug!("Error from Backend: {:?}", e);
|
debug!("Error from Backend: {:?}", e);
|
||||||
let mut merr = ModuleError::new("Could not filter files");
|
let mut merr = ModuleError::new("Could not filter files");
|
||||||
|
@ -153,21 +160,25 @@ fn get_filtered_files_from_backend<'a>(module: &'a Module,
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_tags<'a>(rt: &Runtime, sub: &ArgMatches<'a, 'a>) -> Vec<String> {
|
fn get_tags<'a>(rt: &Runtime, sub: &ArgMatches<'a, 'a>) -> Vec<String> {
|
||||||
debug!("Fetching tags from commandline");
|
|
||||||
sub.value_of("tags").and_then(|tags|
|
|
||||||
Some(tags.split(",")
|
|
||||||
.into_iter()
|
|
||||||
.map(|s| s.to_string())
|
|
||||||
.filter(|e|
|
|
||||||
if e.contains(" ") {
|
|
||||||
warn!("Tag contains spaces: '{}'", e);
|
|
||||||
false
|
|
||||||
} else {
|
|
||||||
true
|
|
||||||
}).collect()
|
|
||||||
)
|
|
||||||
).or(Some(vec![])).unwrap()
|
|
||||||
|
|
||||||
|
fn reject_if_with_spaces(e: &String) -> bool {
|
||||||
|
if e.contains(" ") {
|
||||||
|
warn!("Tag contains spaces: '{}'", e);
|
||||||
|
false
|
||||||
|
} else {
|
||||||
|
true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
debug!("Fetching tags from commandline");
|
||||||
|
sub.value_of("tags").and_then(|tags| {
|
||||||
|
Some(tags.split(",")
|
||||||
|
.into_iter()
|
||||||
|
.map(|s| s.to_string())
|
||||||
|
.filter(|e| reject_if_with_spaces(e))
|
||||||
|
.collect()
|
||||||
|
)
|
||||||
|
}).or(Some(vec![])).unwrap()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_matcher<'a>(rt: &Runtime, sub: &ArgMatches<'a, 'a>) -> Option<Regex> {
|
fn get_matcher<'a>(rt: &Runtime, sub: &ArgMatches<'a, 'a>) -> Option<Regex> {
|
||||||
|
|
|
@ -1,7 +1,8 @@
|
||||||
use storage::file::FileHeaderSpec as FHS;
|
|
||||||
use storage::file::FileHeaderData as FHD;
|
|
||||||
use std::ops::Deref;
|
use std::ops::Deref;
|
||||||
|
|
||||||
|
use storage::file::FileHeaderData as FHD;
|
||||||
|
use storage::file::FileHeaderSpec as FHS;
|
||||||
|
|
||||||
pub fn get_spec() -> FHS {
|
pub fn get_spec() -> FHS {
|
||||||
FHS::Map { keys: vec![ url_key(), tags_key() ] }
|
FHS::Map { keys: vec![ url_key(), tags_key() ] }
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,23 +1,11 @@
|
||||||
use runtime::Runtime;
|
|
||||||
use module::Module;
|
|
||||||
use module::CommandMap;
|
|
||||||
use module::ModuleResult;
|
|
||||||
use module::ModuleError;
|
|
||||||
use std::path::Path;
|
|
||||||
use std::result::Result;
|
|
||||||
use std::fmt::Result as FMTResult;
|
|
||||||
use std::fmt::Formatter;
|
|
||||||
use std::fmt::Debug;
|
|
||||||
use clap::ArgMatches;
|
|
||||||
use regex::Regex;
|
|
||||||
|
|
||||||
mod header;
|
mod header;
|
||||||
mod commands;
|
mod commands;
|
||||||
|
|
||||||
use self::header::build_header;
|
use std::fmt::{Debug, Formatter};
|
||||||
use storage::json::parser::JsonHeaderParser;
|
use std::fmt::Result as FMTResult;
|
||||||
use storage::parser::FileHeaderParser;
|
|
||||||
|
|
||||||
|
use module::{CommandMap, Module, ModuleResult};
|
||||||
|
use runtime::Runtime;
|
||||||
use self::commands::*;
|
use self::commands::*;
|
||||||
|
|
||||||
pub struct BMModule {
|
pub struct BMModule {
|
||||||
|
|
|
@ -1,16 +1,13 @@
|
||||||
use runtime::Runtime;
|
|
||||||
use std::error::Error;
|
|
||||||
use std::fmt::Formatter;
|
|
||||||
use std::fmt::Result as FMTResult;
|
|
||||||
use std::fmt::Display;
|
|
||||||
use std::fmt::Debug;
|
|
||||||
use std::path::Path;
|
|
||||||
use std::result::Result;
|
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
|
use std::error::Error;
|
||||||
|
use std::fmt::{Debug, Display, Formatter};
|
||||||
|
use std::fmt::Result as FMTResult;
|
||||||
|
use std::result::Result;
|
||||||
|
|
||||||
use clap::{App, ArgMatches};
|
use clap::ArgMatches;
|
||||||
|
|
||||||
use storage::backend::{StorageBackend, StorageBackendError};
|
use runtime::Runtime;
|
||||||
|
use storage::backend::StorageBackend;
|
||||||
|
|
||||||
pub mod bm;
|
pub mod bm;
|
||||||
|
|
||||||
|
|
|
@ -1,14 +1,11 @@
|
||||||
|
use std::fmt::{Debug, Formatter, Error};
|
||||||
|
|
||||||
extern crate log;
|
extern crate log;
|
||||||
|
use log::{LogRecord, LogLevel, LogLevelFilter, LogMetadata, SetLoggerError};
|
||||||
|
|
||||||
pub use cli::CliConfig;
|
pub use cli::CliConfig;
|
||||||
pub use configuration::Configuration as Cfg;
|
pub use configuration::Configuration as Cfg;
|
||||||
|
|
||||||
use std::fmt::Debug;
|
|
||||||
use std::fmt::Formatter;
|
|
||||||
use std::fmt::Error;
|
|
||||||
|
|
||||||
use log::{LogRecord, LogLevel, LogLevelFilter, LogMetadata, SetLoggerError};
|
|
||||||
|
|
||||||
pub struct ImagLogger {
|
pub struct ImagLogger {
|
||||||
lvl: LogLevel,
|
lvl: LogLevel,
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,26 +1,19 @@
|
||||||
use std::error::Error;
|
use std::error::Error;
|
||||||
use std::fmt::Display;
|
use std::fmt::{Display, Formatter};
|
||||||
use std::fmt::Formatter;
|
|
||||||
use std::fmt::Result as FMTResult;
|
use std::fmt::Result as FMTResult;
|
||||||
use std::path::Path;
|
|
||||||
use std::path::PathBuf;
|
|
||||||
use std::vec::Vec;
|
|
||||||
use std::fs::File as FSFile;
|
use std::fs::File as FSFile;
|
||||||
use std::fs::create_dir_all;
|
use std::fs::{create_dir_all, remove_file};
|
||||||
use std::fs::remove_file;
|
use std::io::{Read, Write};
|
||||||
use std::io::Read;
|
use std::vec::{Vec, IntoIter};
|
||||||
use std::io::Write;
|
|
||||||
use std::vec::IntoIter;
|
|
||||||
|
|
||||||
use glob::glob;
|
use glob::glob;
|
||||||
use glob::Paths;
|
use glob::Paths;
|
||||||
|
|
||||||
use storage::file::File;
|
|
||||||
use storage::file_id::*;
|
|
||||||
use storage::parser::{FileHeaderParser, Parser, ParserError};
|
|
||||||
|
|
||||||
use module::Module;
|
use module::Module;
|
||||||
use runtime::Runtime;
|
use runtime::Runtime;
|
||||||
|
use storage::file::File;
|
||||||
|
use storage::file_id::*;
|
||||||
|
use storage::parser::{FileHeaderParser, Parser};
|
||||||
|
|
||||||
pub type BackendOperationResult<T = ()> = Result<T, StorageBackendError>;
|
pub type BackendOperationResult<T = ()> = Result<T, StorageBackendError>;
|
||||||
|
|
||||||
|
@ -32,6 +25,8 @@ pub struct StorageBackend {
|
||||||
impl StorageBackend {
|
impl StorageBackend {
|
||||||
|
|
||||||
pub fn new(rt: &Runtime) -> BackendOperationResult<StorageBackend> {
|
pub fn new(rt: &Runtime) -> BackendOperationResult<StorageBackend> {
|
||||||
|
use self::StorageBackendError as SBE;
|
||||||
|
|
||||||
let storepath = rt.get_rtp() + "/store/";
|
let storepath = rt.get_rtp() + "/store/";
|
||||||
debug!("Trying to create {}", storepath);
|
debug!("Trying to create {}", storepath);
|
||||||
create_dir_all(&storepath).and_then(|_| {
|
create_dir_all(&storepath).and_then(|_| {
|
||||||
|
@ -42,18 +37,15 @@ impl StorageBackend {
|
||||||
})
|
})
|
||||||
}).or_else(|e| {
|
}).or_else(|e| {
|
||||||
debug!("Creating failed, constructing error instance");
|
debug!("Creating failed, constructing error instance");
|
||||||
let mut serr = StorageBackendError::new(
|
Err(SBE::new("create_dir_all()", "Could not create store directories",
|
||||||
"create_dir_all()",
|
Some(storepath), Some(Box::new(e))))
|
||||||
"Could not create store directories",
|
|
||||||
Some(storepath)
|
|
||||||
);
|
|
||||||
serr.caused_by = Some(Box::new(e));
|
|
||||||
Err(serr)
|
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn iter_ids(&self, m: &Module) -> Result<IntoIter<FileID>, StorageBackendError>
|
pub fn iter_ids(&self, m: &Module) -> Result<IntoIter<FileID>, StorageBackendError>
|
||||||
{
|
{
|
||||||
|
use self::StorageBackendError as SBE;
|
||||||
|
|
||||||
let globstr = self.prefix_of_files_for_module(m) + "*.imag";
|
let globstr = self.prefix_of_files_for_module(m) + "*.imag";
|
||||||
debug!("Globstring = {}", globstr);
|
debug!("Globstring = {}", globstr);
|
||||||
glob(&globstr[..])
|
glob(&globstr[..])
|
||||||
|
@ -63,13 +55,7 @@ impl StorageBackend {
|
||||||
})
|
})
|
||||||
.map_err(|e| {
|
.map_err(|e| {
|
||||||
debug!("glob() returned error: {:?}", e);
|
debug!("glob() returned error: {:?}", e);
|
||||||
let serr = StorageBackendError::new(
|
SBE::new("iter_ids()", "Cannot iter on file ids", None, None)
|
||||||
"iter_ids()",
|
|
||||||
"Cannot iter on file ids",
|
|
||||||
None);
|
|
||||||
// Why the hack is Error not implemented for glob::PatternError
|
|
||||||
// serr.caused_by = Some(Box::new(e));
|
|
||||||
serr
|
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -77,21 +63,18 @@ impl StorageBackend {
|
||||||
-> Result<IntoIter<File<'a>>, StorageBackendError>
|
-> Result<IntoIter<File<'a>>, StorageBackendError>
|
||||||
where HP: FileHeaderParser
|
where HP: FileHeaderParser
|
||||||
{
|
{
|
||||||
|
use self::StorageBackendError as SBE;
|
||||||
|
|
||||||
self.iter_ids(m)
|
self.iter_ids(m)
|
||||||
.and_then(|ids| {
|
.and_then(|ids| {
|
||||||
debug!("Iterating ids and building files from them");
|
debug!("Iterating ids and building files from them");
|
||||||
debug!(" number of ids = {}", ids.len());
|
debug!(" number of ids = {}", ids.len());
|
||||||
Ok(ids.filter_map(|id| self.get_file_by_id(m, &id, p))
|
Ok(self.filter_map_ids_to_files(m, p, ids).into_iter())
|
||||||
.collect::<Vec<File>>()
|
|
||||||
.into_iter())
|
|
||||||
})
|
})
|
||||||
.map_err(|e| {
|
.map_err(|e| {
|
||||||
debug!("StorageBackend::iter_ids() returned error = {:?}", e);
|
debug!("StorageBackend::iter_ids() returned error = {:?}", e);
|
||||||
let mut serr = StorageBackendError::new("iter_files()",
|
SBE::new("iter_files()", "Cannot iter on files", None,
|
||||||
"Cannot iter on files",
|
Some(Box::new(e)))
|
||||||
None);
|
|
||||||
serr.caused_by = Some(Box::new(e));
|
|
||||||
serr
|
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -103,6 +86,8 @@ impl StorageBackend {
|
||||||
pub fn put_file<HP>(&self, f: File, p: &Parser<HP>) -> BackendOperationResult
|
pub fn put_file<HP>(&self, f: File, p: &Parser<HP>) -> BackendOperationResult
|
||||||
where HP: FileHeaderParser
|
where HP: FileHeaderParser
|
||||||
{
|
{
|
||||||
|
use self::StorageBackendError as SBE;
|
||||||
|
|
||||||
let written = write_with_parser(&f, p);
|
let written = write_with_parser(&f, p);
|
||||||
if written.is_err() { return Err(written.err().unwrap()); }
|
if written.is_err() { return Err(written.err().unwrap()); }
|
||||||
let string = written.unwrap();
|
let string = written.unwrap();
|
||||||
|
@ -116,23 +101,14 @@ impl StorageBackend {
|
||||||
file.write_all(&string.clone().into_bytes())
|
file.write_all(&string.clone().into_bytes())
|
||||||
.map_err(|ioerr| {
|
.map_err(|ioerr| {
|
||||||
debug!("Could not write file");
|
debug!("Could not write file");
|
||||||
let mut err = StorageBackendError::new(
|
SBE::new("File::write_all()",
|
||||||
"File::write_all()",
|
"Could not write out File contents",
|
||||||
"Could not write out File contents",
|
None, Some(Box::new(ioerr)))
|
||||||
None
|
|
||||||
);
|
|
||||||
err.caused_by = Some(Box::new(ioerr));
|
|
||||||
err
|
|
||||||
})
|
})
|
||||||
}).map_err(|writeerr| {
|
}).map_err(|writeerr| {
|
||||||
debug!("Could not create file at '{}'", path);
|
debug!("Could not create file at '{}'", path);
|
||||||
let mut err = StorageBackendError::new(
|
SBE::new("File::create()", "Creating file on disk failed", None,
|
||||||
"File::create()",
|
Some(Box::new(writeerr)))
|
||||||
"Creating file on disk failed",
|
|
||||||
None
|
|
||||||
);
|
|
||||||
err.caused_by = Some(Box::new(writeerr));
|
|
||||||
err
|
|
||||||
}).and(Ok(()))
|
}).and(Ok(()))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -143,6 +119,8 @@ impl StorageBackend {
|
||||||
pub fn update_file<HP>(&self, f: File, p: &Parser<HP>) -> BackendOperationResult
|
pub fn update_file<HP>(&self, f: File, p: &Parser<HP>) -> BackendOperationResult
|
||||||
where HP: FileHeaderParser
|
where HP: FileHeaderParser
|
||||||
{
|
{
|
||||||
|
use self::StorageBackendError as SBE;
|
||||||
|
|
||||||
let contents = write_with_parser(&f, p);
|
let contents = write_with_parser(&f, p);
|
||||||
if contents.is_err() { return Err(contents.err().unwrap()); }
|
if contents.is_err() { return Err(contents.err().unwrap()); }
|
||||||
let string = contents.unwrap();
|
let string = contents.unwrap();
|
||||||
|
@ -156,23 +134,15 @@ impl StorageBackend {
|
||||||
file.write_all(&string.clone().into_bytes())
|
file.write_all(&string.clone().into_bytes())
|
||||||
.map_err(|ioerr| {
|
.map_err(|ioerr| {
|
||||||
debug!("Could not write file");
|
debug!("Could not write file");
|
||||||
let mut err = StorageBackendError::new(
|
SBE::new("File::write()",
|
||||||
"File::write()",
|
"Tried to write contents of this file, though operation did not succeed",
|
||||||
"Tried to write contents of this file, though operation did not succeed",
|
Some(string), Some(Box::new(ioerr)))
|
||||||
Some(string)
|
|
||||||
);
|
|
||||||
err.caused_by = Some(Box::new(ioerr));
|
|
||||||
err
|
|
||||||
})
|
})
|
||||||
}).map_err(|writeerr| {
|
}).map_err(|writeerr| {
|
||||||
debug!("Could not write file at '{}'", path);
|
debug!("Could not write file at '{}'", path);
|
||||||
let mut err = StorageBackendError::new(
|
SBE::new("File::open()",
|
||||||
"File::open()",
|
"Tried to update contents of this file, though file doesn't exist",
|
||||||
"Tried to update contents of this file, though file doesn't exist",
|
None, Some(Box::new(writeerr)))
|
||||||
None
|
|
||||||
);
|
|
||||||
err.caused_by = Some(Box::new(writeerr));
|
|
||||||
err
|
|
||||||
}).and(Ok(()))
|
}).and(Ok(()))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -186,8 +156,6 @@ impl StorageBackend {
|
||||||
pub fn get_file_by_id<'a, HP>(&self, m: &'a Module, id: &FileID, p: &Parser<HP>) -> Option<File<'a>>
|
pub fn get_file_by_id<'a, HP>(&self, m: &'a Module, id: &FileID, p: &Parser<HP>) -> Option<File<'a>>
|
||||||
where HP: FileHeaderParser
|
where HP: FileHeaderParser
|
||||||
{
|
{
|
||||||
use std::ops::Index;
|
|
||||||
|
|
||||||
debug!("Searching for file with id '{}'", id);
|
debug!("Searching for file with id '{}'", id);
|
||||||
|
|
||||||
if id.get_type() == FileIDType::NONE {
|
if id.get_type() == FileIDType::NONE {
|
||||||
|
@ -198,9 +166,8 @@ impl StorageBackend {
|
||||||
let globstr = self.prefix_of_files_for_module(m) + "*" + &id_str[..] + ".imag";
|
let globstr = self.prefix_of_files_for_module(m) + "*" + &id_str[..] + ".imag";
|
||||||
debug!("Globbing with globstr = '{}'", globstr);
|
debug!("Globbing with globstr = '{}'", globstr);
|
||||||
glob(&globstr[..]).map(|globlist| {
|
glob(&globstr[..]).map(|globlist| {
|
||||||
let mut vec = globlist_to_file_id_vec(globlist).into_iter()
|
let idvec = globlist_to_file_id_vec(globlist).into_iter();
|
||||||
.filter_map(|id| self.get_file_by_id(m, &id, p))
|
let mut vec = self.filter_map_ids_to_files(m, p, idvec);
|
||||||
.collect::<Vec<File>>();
|
|
||||||
vec.reverse();
|
vec.reverse();
|
||||||
vec.pop()
|
vec.pop()
|
||||||
}).unwrap_or({
|
}).unwrap_or({
|
||||||
|
@ -211,12 +178,16 @@ impl StorageBackend {
|
||||||
// The (hash)type is already in the FileID object, so we can just
|
// The (hash)type is already in the FileID object, so we can just
|
||||||
// build a path from the information we already have
|
// build a path from the information we already have
|
||||||
debug!("We know FileIDType, so we build the path directly now");
|
debug!("We know FileIDType, so we build the path directly now");
|
||||||
if let Ok(mut fs) = FSFile::open(self.build_filepath_with_id(m, id.clone())) {
|
let filepath = self.build_filepath_with_id(m, id.clone());
|
||||||
|
if let Ok(mut fs) = FSFile::open(filepath) {
|
||||||
let mut s = String::new();
|
let mut s = String::new();
|
||||||
fs.read_to_string(&mut s);
|
fs.read_to_string(&mut s);
|
||||||
|
|
||||||
debug!("Success opening file with id '{}'", id);
|
debug!("Success opening file with id '{}'", id);
|
||||||
debug!("Parsing to internal structure now");
|
debug!("Parsing to internal structure now");
|
||||||
p.read(s).and_then(|(h, d)| Ok(File::from_parser_result(m, id.clone(), h, d))).ok()
|
p.read(s).and_then(|(h, d)| {
|
||||||
|
Ok(File::from_parser_result(m, id.clone(), h, d))
|
||||||
|
}).ok()
|
||||||
} else {
|
} else {
|
||||||
debug!("No file with id '{}'", id);
|
debug!("No file with id '{}'", id);
|
||||||
None
|
None
|
||||||
|
@ -225,6 +196,8 @@ impl StorageBackend {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn remove_file(&self, m: &Module, file: File, checked: bool) -> BackendOperationResult {
|
pub fn remove_file(&self, m: &Module, file: File, checked: bool) -> BackendOperationResult {
|
||||||
|
use self::StorageBackendError as SBE;
|
||||||
|
|
||||||
if checked {
|
if checked {
|
||||||
error!("Checked remove not implemented yet. I will crash now");
|
error!("Checked remove not implemented yet. I will crash now");
|
||||||
unimplemented!()
|
unimplemented!()
|
||||||
|
@ -235,13 +208,8 @@ impl StorageBackend {
|
||||||
|
|
||||||
let fp = self.build_filepath(&file);
|
let fp = self.build_filepath(&file);
|
||||||
remove_file(fp).map_err(|e| {
|
remove_file(fp).map_err(|e| {
|
||||||
let mut serr = StorageBackendError::new(
|
SBE::new("remove_file()", "File removal failed",
|
||||||
"remove_file()",
|
Some(format!("{}", file)), Some(Box::new(e)))
|
||||||
"File removal failed",
|
|
||||||
Some(format!("{}", file))
|
|
||||||
);
|
|
||||||
serr.caused_by = Some(Box::new(e));
|
|
||||||
serr
|
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -270,6 +238,17 @@ impl StorageBackend {
|
||||||
self.storepath.clone() + m.name()
|
self.storepath.clone() + m.name()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn filter_map_ids_to_files<'a, HP>(&self,
|
||||||
|
m: &'a Module,
|
||||||
|
p: &Parser<HP>,
|
||||||
|
ids: IntoIter<FileID>)
|
||||||
|
-> Vec<File<'a>>
|
||||||
|
where HP: FileHeaderParser
|
||||||
|
{
|
||||||
|
ids.filter_map(|id| self.get_file_by_id(m, &id, p))
|
||||||
|
.collect::<Vec<File>>()
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
|
@ -282,7 +261,11 @@ pub struct StorageBackendError {
|
||||||
|
|
||||||
impl StorageBackendError {
|
impl StorageBackendError {
|
||||||
|
|
||||||
fn new<S>(action: S, desc: S, data: Option<String>) -> StorageBackendError
|
fn new<S>(action: S,
|
||||||
|
desc: S,
|
||||||
|
data: Option<String>,
|
||||||
|
cause: Option<Box<Error>>)
|
||||||
|
-> StorageBackendError
|
||||||
where S: Into<String>
|
where S: Into<String>
|
||||||
{
|
{
|
||||||
StorageBackendError {
|
StorageBackendError {
|
||||||
|
@ -318,15 +301,13 @@ impl<'a> Display for StorageBackendError {
|
||||||
fn write_with_parser<'a, HP>(f: &File, p: &Parser<HP>) -> Result<String, StorageBackendError>
|
fn write_with_parser<'a, HP>(f: &File, p: &Parser<HP>) -> Result<String, StorageBackendError>
|
||||||
where HP: FileHeaderParser
|
where HP: FileHeaderParser
|
||||||
{
|
{
|
||||||
|
use self::StorageBackendError as SBE;
|
||||||
|
|
||||||
p.write(f.contents())
|
p.write(f.contents())
|
||||||
.or_else(|err| {
|
.or_else(|err| {
|
||||||
let mut serr = StorageBackendError::new(
|
Err(SBE::new("Parser::write()",
|
||||||
"Parser::write()",
|
"Cannot translate internal representation of file contents into on-disk representation",
|
||||||
"Cannot translate internal representation of file contents into on-disk representation",
|
None, Some(Box::new(err))))
|
||||||
None
|
|
||||||
);
|
|
||||||
serr.caused_by = Some(Box::new(err));
|
|
||||||
Err(serr)
|
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -335,3 +316,4 @@ fn globlist_to_file_id_vec(globlist: Paths) -> Vec<FileID> {
|
||||||
.map(|pbuf| FileID::from(&pbuf))
|
.map(|pbuf| FileID::from(&pbuf))
|
||||||
.collect::<Vec<FileID>>()
|
.collect::<Vec<FileID>>()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -2,12 +2,12 @@ use std::error::Error;
|
||||||
use std::fmt::{Debug, Display, Formatter};
|
use std::fmt::{Debug, Display, Formatter};
|
||||||
use std::fmt;
|
use std::fmt;
|
||||||
|
|
||||||
use module::Module;
|
|
||||||
use super::parser::{FileHeaderParser, Parser, ParserError};
|
|
||||||
use storage::file_id::*;
|
|
||||||
|
|
||||||
use regex::Regex;
|
use regex::Regex;
|
||||||
|
|
||||||
|
use module::Module;
|
||||||
|
use storage::file_id::*;
|
||||||
|
use super::parser::{FileHeaderParser, Parser, ParserError};
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
pub enum FileHeaderSpec {
|
pub enum FileHeaderSpec {
|
||||||
|
|
|
@ -1,10 +1,9 @@
|
||||||
|
use std::convert::{From, Into};
|
||||||
use std::error::Error;
|
use std::error::Error;
|
||||||
use std::fmt::{Debug, Display, Formatter};
|
use std::fmt::{Debug, Display, Formatter};
|
||||||
use std::fmt;
|
use std::fmt;
|
||||||
|
use std::path::PathBuf;
|
||||||
use std::result::Result;
|
use std::result::Result;
|
||||||
use std::path::{Path, PathBuf};
|
|
||||||
use std::convert::From;
|
|
||||||
use std::convert::Into;
|
|
||||||
|
|
||||||
use regex::Regex;
|
use regex::Regex;
|
||||||
|
|
||||||
|
|
|
@ -1,16 +1,14 @@
|
||||||
|
use std::collections::HashMap;
|
||||||
|
use std::error::Error;
|
||||||
|
|
||||||
use serde_json::{Value, from_str};
|
use serde_json::{Value, from_str};
|
||||||
use serde_json::error::Result as R;
|
use serde_json::error::Result as R;
|
||||||
use serde_json::Serializer;
|
use serde_json::Serializer;
|
||||||
use serde::ser::Serialize;
|
use serde::ser::Serialize;
|
||||||
use serde::ser::Serializer as Ser;
|
use serde::ser::Serializer as Ser;
|
||||||
|
|
||||||
use std::collections::HashMap;
|
use storage::parser::{FileHeaderParser, ParserError};
|
||||||
use std::io::stdout;
|
use storage::file::{FileHeaderSpec, FileHeaderData};
|
||||||
use std::error::Error;
|
|
||||||
|
|
||||||
use super::super::parser::{FileHeaderParser, ParserError};
|
|
||||||
use super::super::file::{FileHeaderSpec, FileHeaderData};
|
|
||||||
|
|
||||||
|
|
||||||
pub struct JsonHeaderParser {
|
pub struct JsonHeaderParser {
|
||||||
spec: Option<FileHeaderSpec>,
|
spec: Option<FileHeaderSpec>,
|
||||||
|
|
|
@ -1,9 +1,10 @@
|
||||||
use regex::Regex;
|
|
||||||
use std::error::Error;
|
use std::error::Error;
|
||||||
use std::fmt::{Debug, Display, Formatter};
|
use std::fmt::{Debug, Display, Formatter};
|
||||||
use std::fmt;
|
use std::fmt;
|
||||||
|
|
||||||
use super::file::{FileHeaderSpec, FileHeaderData};
|
use regex::Regex;
|
||||||
|
|
||||||
|
use super::file::FileHeaderData;
|
||||||
|
|
||||||
pub struct ParserError {
|
pub struct ParserError {
|
||||||
summary: String,
|
summary: String,
|
||||||
|
|
|
@ -1,6 +1,5 @@
|
||||||
use std::iter::Iterator;
|
use std::iter::Iterator;
|
||||||
|
|
||||||
use runtime::Runtime;
|
|
||||||
use storage::file::File;
|
use storage::file::File;
|
||||||
|
|
||||||
pub trait FilePrinter {
|
pub trait FilePrinter {
|
||||||
|
@ -29,7 +28,7 @@ struct DebugPrinter {
|
||||||
|
|
||||||
impl FilePrinter for DebugPrinter {
|
impl FilePrinter for DebugPrinter {
|
||||||
|
|
||||||
fn new(verbose: bool, debug: bool) -> DebugPrinter {
|
fn new(_: bool, debug: bool) -> DebugPrinter {
|
||||||
DebugPrinter {
|
DebugPrinter {
|
||||||
debug: debug,
|
debug: debug,
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in a new issue