Auto merge of #61 - matthiasbeyer:deny, r=matthiasbeyer

Turn warning into hard errors and fix them

Finally, I cleaned up a bit.

The `dead_code` warning is still disabled, though I might enable it anytime soon.
This commit is contained in:
Homu 2016-01-06 01:39:47 +09:00
commit fc7f2c0bb1
16 changed files with 102 additions and 66 deletions

View file

@ -1,3 +1,9 @@
#![deny(unused_imports)]
#![deny(unused_variables)]
#![deny(unused_must_use)]
#![deny(unused_mut)]
#![deny(non_shorthand_field_patterns)]
#[macro_use] extern crate clap;
#[macro_use] extern crate log;
#[macro_use] extern crate serde;
@ -32,13 +38,18 @@ pub use module::bm::BM;
pub use module::notes::Notes;
fn main() {
use std::process::exit;
use ansi_term::Colour::Yellow;
let yaml = load_yaml!("../etc/cli.yml");
let app = App::from_yaml(yaml);
let config = CliConfig::new(app);
ImagLogger::init(&config);
ImagLogger::init(&config).map_err(|e| {
error!("Could not initialize logger");
debug!("Could not initialize logger: {:?}", e);
exit(1);
}).ok();
let configuration = Configuration::new(&config);

View file

@ -1,7 +1,6 @@
use std::fmt::{Debug, Formatter};
use std::fmt;
use std::ops::Deref;
use std::process::exit;
use clap::ArgMatches;
@ -14,7 +13,6 @@ use storage::json::parser::JsonHeaderParser;
use module::helpers::cli::create_tag_filter;
use module::helpers::cli::create_hash_filter;
use module::helpers::cli::create_text_header_field_grep_filter;
use module::helpers::cli::create_content_grep_filter;
use module::helpers::cli::CliFileFilter;
mod header;
@ -157,7 +155,7 @@ impl<'a> BM<'a> {
* Subcommand: open
*/
fn command_open(&self, matches: &ArgMatches) -> bool {
use ansi_term::Colour::{Green, Yellow, Red};
use ansi_term::Colour::{Green, Red};
use open;
let parser = Parser::new(JsonHeaderParser::new(None));
@ -205,7 +203,7 @@ impl<'a> BM<'a> {
* Subcommand: remove
*/
fn command_remove(&self, matches: &ArgMatches) -> bool {
use ansi_term::Colour::{Green, Yellow, Red};
use ansi_term::Colour::{Green, Red};
let parser = Parser::new(JsonHeaderParser::new(None));
@ -282,7 +280,7 @@ impl<'a> BM<'a> {
use self::header::rebuild_header_with_tags;
let parser = Parser::new(JsonHeaderParser::new(None));
alter_tags_in_files(self, matches, &parser, |old_tags, cli_tags| {
alter_tags_in_files(self, matches, &parser, |_, cli_tags| {
cli_tags.clone()
}, rebuild_header_with_tags)
}
@ -295,7 +293,7 @@ impl<'a> BM<'a> {
impl<'a> Module<'a> for BM<'a> {
fn exec(&self, matches: &ArgMatches) -> bool {
use ansi_term::Colour::{Green, Yellow, Red};
use ansi_term::Colour::Red;
match matches.subcommand_name() {
Some("add") => {

View file

@ -8,9 +8,6 @@ use regex::Regex;
use storage::file::File;
use storage::file::hash::FileHash;
use storage::json::parser::JsonHeaderParser;
use storage::parser::FileHeaderParser;
use storage::parser::Parser;
pub trait CliFileFilter {

View file

@ -32,8 +32,8 @@ pub mod markdown {
impl Render for LinkExtractRenderer {
fn link(&mut self,
output: &mut Buffer,
content: &Buffer,
_: &mut Buffer,
_: &Buffer,
link: &Buffer,
title: &Buffer) -> bool {

View file

@ -44,12 +44,13 @@ pub mod data {
pub fn get_named_text_from_header(name: &'static str, header: &FHD) -> Option<String> {
match header {
&FHD::Map{keys: ref ks} => {
let mut keys : Vec<FHD> = ks.clone();
keys.iter().find(|k| {
match k.deref() {
&FHD::Key{name: ref n, value: ref v} => n == name,
_ => false
}
ks.clone()
.iter()
.find(|k| {
match k.deref() {
&FHD::Key{name: ref n, value: _} => n == name,
_ => false
}
}).and_then(|urlkey| {
match urlkey.deref().clone() {
FHD::Key{name: ref n, value: ref v} => {

View file

@ -61,7 +61,7 @@ pub mod data {
* Does no spec verification.
*/
pub fn get_tags_from_header(header: &FHD) -> Vec<String> {
let mut tags : Vec<String> = vec![];
let tags : Vec<String> = vec![];
fn match_array(a: &Box<FHD>) -> Vec<String> {
let mut tags : Vec<String> = vec![];
@ -87,7 +87,7 @@ pub mod data {
let keys : Vec<FHD> = ks.clone();
for key in keys {
match key {
FHD::Key{name: ref name, value: ref v} => {
FHD::Key{ref name, value: ref v} => {
if name == "TAGS" {
return match_array(v)
}
@ -118,7 +118,6 @@ pub mod data {
use module::helpers::cli::create_tag_filter;
use module::helpers::cli::create_hash_filter;
use module::helpers::cli::create_text_header_field_grep_filter;
use module::helpers::cli::create_content_grep_filter;
use module::helpers::cli::CliFileFilter;
let cli_tags = matches.value_of("tags")

View file

@ -4,13 +4,11 @@
pub mod cli {
use clap::ArgMatches;
use runtime::Runtime;
/**
* Get a commandline option "tags" and split the argument by "," to be able to provide a
* Vec<String> with the argument as array.
*/
pub fn get_tags<'a>(rt: &Runtime, sub: &ArgMatches<'a, 'a>) -> Vec<String> {
pub fn get_tags<'a>(sub: &ArgMatches<'a, 'a>) -> Vec<String> {
fn reject_if_with_spaces(e: &String) -> bool {
if e.contains(" ") {

View file

@ -1,11 +1,10 @@
use std::fmt::{Debug, Formatter};
use std::fmt::Result as FMTResult;
use std::ops::Deref;
use std::rc::Rc;
use std::cell::RefCell;
use std::ops::Deref;
use clap::ArgMatches;
use regex::Regex;
mod header;
@ -33,7 +32,6 @@ impl<'a> Notes<'a> {
}
fn command_add(&self, matches: &ArgMatches) -> bool {
use std::process::exit;
use ansi_term::Colour::Yellow;
use self::header::build_header;
use ui::external::editor::let_user_provide_content;
@ -215,16 +213,19 @@ impl<'a> Notes<'a> {
}
};
tempfile.write_all(MarkdownParser::new(&s).to_html_page().as_ref());
tempfile.write_all(MarkdownParser::new(&s).to_html_page().as_ref())
.map_err(|e| {
error!("Could not write HTML to file: {}", temppath);
debug!("Could not write HTML to file: {:?}", e);
})
.ok();
open::that(&temppath[..]).is_ok()
}
fn command_list(&self, matches: &ArgMatches) -> bool {
use ansi_term::Colour::{Red, Green};
use ui::file::{FilePrinter, TablePrinter};
use self::header::get_name_from_header;
use self::header::get_tags_from_header;
use std::process::exit;
use module::helpers::cli::CliFileFilter;
let parser = Parser::new(JsonHeaderParser::new(None));
@ -262,7 +263,7 @@ impl<'a> Notes<'a> {
fn command_links(&self, matches: &ArgMatches) -> bool {
use ansi_term::Colour::{Red, Green};
use module::helpers::content::markdown::MarkdownParser;
use ui::file::{FilePrinter, TablePrinter};
use ui::file::FilePrinter;
use util::is_url;
use prettytable::Table;
use prettytable::row::Row;
@ -276,7 +277,6 @@ impl<'a> Notes<'a> {
debug!("list internal links = {}", list_intern);
debug!("list external links = {}", list_extern);
let printer = TablePrinter::new(self.rt.is_verbose(), self.rt.is_debugging());
let titles = row!["#", "Text", "Link", "Direction"];
let mut table = Table::new();
table.set_titles(titles);
@ -312,7 +312,6 @@ impl<'a> Notes<'a> {
})
.flatten()
.filter(|link| {
let title = &link.title;
let url = &link.url;
let is_extern = is_url(&url);
debug!("Is external URL {} -> {}", url, is_extern);
@ -435,7 +434,7 @@ impl<'a> Notes<'a> {
use self::header::rebuild_header_with_tags;
let parser = Parser::new(JsonHeaderParser::new(None));
alter_tags_in_files(self, matches, &parser, |old_tags, cli_tags| {
alter_tags_in_files(self, matches, &parser, |_, cli_tags| {
cli_tags.clone()
}, rebuild_header_with_tags)
}
@ -504,7 +503,7 @@ impl<'a> Module<'a> for Notes<'a> {
impl<'a> Debug for Notes<'a> {
fn fmt(&self, fmt: &mut Formatter) -> FMTResult {
write!(fmt, "[Module][Notes]");
try!(write!(fmt, "[Module][Notes]"));
Ok(())
}

View file

@ -60,7 +60,7 @@ impl Into<String> for FileHash {
impl Display for FileHash {
fn fmt(&self, fmt: &mut Formatter) -> fmt::Result {
write!(fmt, "{}", self.hash);
try!(write!(fmt, "{}", self.hash));
Ok(())
}

View file

@ -68,9 +68,9 @@ impl FileID {
}
debug!("Matches: {}", capts.len());
let modname = capts.at(1).unwrap();
let hashname = capts.at(2).unwrap();
let mut hash = capts.at(3).unwrap();
let modname = capts.at(1).unwrap();
let hashname = capts.at(2).unwrap();
let hash = capts.at(3).unwrap();
debug!("Destructure FilePath to ID:");
debug!(" FilePath: {:?}", s);
@ -169,7 +169,12 @@ mod test {
let lvl = LogLevelFilter::Debug;
max_log_lvl.set(lvl);
Box::new(ImagLogger::new(lvl.to_log_level().unwrap()))
});
})
.map_err(|e| {
println!("Error setting logger: {:?}", e);
assert!(false);
})
.ok();
debug!("Init logger for test");
}

View file

@ -1,4 +1,4 @@
use std::fmt::{Debug, Display, Formatter};
use std::fmt::{Display, Formatter};
use std::fmt;
use std::convert::{From, Into};
use std::str::FromStr;

View file

@ -128,8 +128,7 @@ mod test {
// we use the JSON parser here, so we can generate FileHeaderData
use storage::json::parser::JsonHeaderParser;
use storage::file::header::match_header_spec;
use storage::parser::{FileHeaderParser, ParserError};
use storage::file::header::data::FileHeaderData as FHD;
use storage::parser::FileHeaderParser;
use storage::file::header::spec::FileHeaderSpec as FHS;
#[test]

View file

@ -70,7 +70,9 @@ impl FileHeaderParser for JsonHeaderParser {
let mut s = Vec::<u8>::new();
{
let mut ser = Serializer::pretty(&mut s);
data.serialize(&mut ser);
data.serialize(&mut ser).map_err(|e| {
debug!("Serializer error: {:?}", e);
}).ok();
}
String::from_utf8(s).or(
@ -138,7 +140,7 @@ impl Serialize for FileHeaderData {
hm.serialize(ser)
},
&FileHeaderData::Key{name: ref n, value: ref v} => unreachable!(),
&FileHeaderData::Key{name: _, value: _} => unreachable!(),
}
}
@ -151,7 +153,7 @@ mod test {
use std::ops::Deref;
use super::JsonHeaderParser;
use storage::parser::{FileHeaderParser, ParserError};
use storage::parser::FileHeaderParser;
use storage::file::header::data::FileHeaderData as FHD;
use storage::file::header::spec::FileHeaderSpec as FHS;
@ -176,10 +178,10 @@ mod test {
assert!(parsed.is_ok(), "Parsed is not ok: {:?}", parsed);
match parsed.ok() {
Some(FHD::Map{keys: keys}) => {
Some(FHD::Map{keys}) => {
for k in keys {
match k {
FHD::Key{name: name, value: value} => {
FHD::Key{name, value} => {
assert!(name == "a" || name == "b", "Key unknown");
match value.deref() {
&FHD::UInteger(u) => assert_eq!(u, 1),
@ -205,7 +207,7 @@ mod test {
assert!(parsed.is_ok(), "Parsed is not ok: {:?}", parsed);
match parsed.ok() {
Some(FHD::Map{keys: keys}) => {
Some(FHD::Map{keys}) => {
for k in keys {
match_key(&k);
}
@ -219,11 +221,11 @@ mod test {
use std::ops::Deref;
match k {
&FHD::Key{name: ref name, value: ref value} => {
&FHD::Key{ref name, ref value} => {
assert!(name == "a" || name == "b", "Key unknown");
match value.deref() {
&FHD::Array{values: ref vs} => {
for value in vs.iter() {
&FHD::Array{ref values} => {
for value in values.iter() {
match value {
&FHD::UInteger(u) => assert_eq!(u, 1),
_ => assert!(false, "UInt is not an UInt"),
@ -231,10 +233,10 @@ mod test {
}
}
&FHD::Map{keys: ref ks} => {
for key in ks.iter() {
&FHD::Map{ref keys} => {
for key in keys.iter() {
match key {
&FHD::Key{name: ref name, value: ref value} => {
&FHD::Key{ref name, ref value} => {
match value.deref() {
&FHD::Integer(i) => {
assert_eq!(i, -2);

View file

@ -179,6 +179,7 @@ impl Store {
fsfile.write_all(&text.unwrap().clone().into_bytes()[..])
}).map_err(|writeerr| {
debug!("Could not create file at '{}'", path);
debug!(" error: {:?}", writeerr);
}).and(Ok(true)).unwrap()
// TODO: Is this unwrap() save?
@ -217,8 +218,16 @@ impl Store {
FSFile::open(&path).map(|mut file| {
file.read_to_string(&mut string)
.map_err(|e| error!("Failed reading file: '{}'", path));
});
.map_err(|e| {
error!("Failed reading file: '{}'", path);
debug!(" error {}", e);
})
.is_ok();
})
.map_err(|e| {
error!("Error opening file: {}", path);
debug!("Error opening file: {:?}", e);
}).ok();
parser.read(string).map(|(header, data)| {
self.new_file_from_parser_result(m, id.clone(), header, data);
@ -318,7 +327,12 @@ impl Store {
});
}
}
});
})
.map_err(|e| {
error!("Could not glob: '{}'", globstr);
debug!("Could not glob(): {:?}", e);
})
.ok();
res
}

View file

@ -125,9 +125,8 @@ impl<HP: FileHeaderParser> Parser<HP> {
if divided.is_err() {
debug!("Error reading into internal datastructure");
let mut p = ParserError::new("Dividing text failed",
s, 0,
"Dividing text with divide_text() failed");
let p = ParserError::new("Dividing text failed", s, 0,
"Dividing text with divide_text() failed");
return Err(p.with_cause(Box::new(divided.err().unwrap())));
}

View file

@ -7,7 +7,7 @@ use runtime::Runtime;
pub fn let_user_provide_content(rt: &Runtime) -> Option<String> {
use std::io::Read;
use std::fs::File;
use std::process::Command;
use std::process::exit;
let filepath = "/tmp/imag-tmp.md";
let file_created = File::create(filepath)
@ -45,7 +45,13 @@ pub fn let_user_provide_content(rt: &Runtime) -> Option<String> {
let mut contents = String::new();
File::open(filepath).map(|mut file| {
file.read_to_string(&mut contents);
file.read_to_string(&mut contents)
.map_err(|e| {
error!("Error reading content: {}", e);
debug!("Error reading content: {:?}", e);
exit(1);
})
.is_ok();
Some(contents)
}).unwrap_or(None)
}
@ -59,7 +65,6 @@ pub fn edit_content(rt: &Runtime, old_content: String) -> (String, bool) {
use std::io::Read;
use std::io::Write;
use std::fs::File;
use std::process::Command;
use std::process::exit;
let filepath = "/tmp/imag-tmp.md";
@ -73,7 +78,12 @@ pub fn edit_content(rt: &Runtime, old_content: String) -> (String, bool) {
}
};
file.write(old_content.as_ref());
file.write(old_content.as_ref())
.map_err(|e| {
error!("Error writing content: {}", e);
debug!("Error writing content: {:?}", e);
exit(1);
}).is_ok();
}
debug!("Ready with putting old content into the file");
@ -101,7 +111,11 @@ pub fn edit_content(rt: &Runtime, old_content: String) -> (String, bool) {
let mut contents = String::new();
File::open(filepath).map(|mut file| {
file.read_to_string(&mut contents);
file.read_to_string(&mut contents).map_err(|e| {
error!("Error reading content: {}", e);
debug!("Error reading content: {:?}", e);
exit(1);
}).is_ok();
(contents, true)
}).unwrap_or((old_content, false))
}