Merge pull request #995 from matthiasbeyer/update-toml-query
Update toml query
This commit is contained in:
commit
afc5d1f929
14 changed files with 66 additions and 50 deletions
|
@ -21,7 +21,7 @@ semver = "0.5.1"
|
|||
serde_json = "0.8.3"
|
||||
task-hookrs = "0.2.2"
|
||||
toml = "0.4.*"
|
||||
toml-query = "0.2.*"
|
||||
toml-query = "0.3.*"
|
||||
is-match = "0.1.*"
|
||||
version = "2.0.1"
|
||||
|
||||
|
|
|
@ -103,11 +103,15 @@ fn list(rt: &Runtime) {
|
|||
// filter out the ones were we can read the uuid
|
||||
let uuids : Vec<_> = iter.filter_map(|t| match t {
|
||||
Ok(v) => match v.get_header().read(&String::from("todo.uuid")) {
|
||||
Ok(&Value::String(ref u)) => Some(u.clone()),
|
||||
Ok(_) => {
|
||||
Ok(Some(&Value::String(ref u))) => Some(u.clone()),
|
||||
Ok(Some(_)) => {
|
||||
warn!("Header type error");
|
||||
None
|
||||
},
|
||||
Ok(None) => {
|
||||
warn!("Header missing field");
|
||||
None
|
||||
},
|
||||
Err(e) => {
|
||||
if !no_identifier(&e) {
|
||||
trace_error(&e);
|
||||
|
|
|
@ -16,7 +16,7 @@ homepage = "http://imag-pim.org"
|
|||
[dependencies]
|
||||
log = "0.3"
|
||||
toml = "0.4.*"
|
||||
toml-query = "0.2.*"
|
||||
toml-query = "0.3.*"
|
||||
semver = "0.5"
|
||||
|
||||
[dependencies.libimagstore]
|
||||
|
|
|
@ -117,7 +117,7 @@ impl<'a> Counter<'a> {
|
|||
let mut header = self.fle.deref_mut().get_header_mut();
|
||||
let query = String::from("counter.value");
|
||||
match try!(header.read(&query).map_err_into(CEK::StoreReadError)) {
|
||||
&Value::Integer(i) => {
|
||||
Some(&Value::Integer(i)) => {
|
||||
header.set(&query, Value::Integer(i + 1))
|
||||
.map_err_into(CEK::StoreWriteError)
|
||||
.map(|_| ())
|
||||
|
@ -130,7 +130,7 @@ impl<'a> Counter<'a> {
|
|||
let mut header = self.fle.deref_mut().get_header_mut();
|
||||
let query = String::from("counter.value");
|
||||
match try!(header.read(&query).map_err_into(CEK::StoreReadError)) {
|
||||
&Value::Integer(i) => {
|
||||
Some(&Value::Integer(i)) => {
|
||||
header.set(&query, Value::Integer(i - 1))
|
||||
.map_err_into(CEK::StoreWriteError)
|
||||
.map(|_| ())
|
||||
|
@ -152,15 +152,17 @@ impl<'a> Counter<'a> {
|
|||
|
||||
pub fn name(&self) -> Result<CounterName> {
|
||||
self.read_header_at("counter.name", |v| match v {
|
||||
&Value::String(ref s) => Ok(s.clone()),
|
||||
_ => Err(CEK::HeaderTypeError.into_error()),
|
||||
Some(&Value::String(ref s)) => Ok(s.clone()),
|
||||
Some(_) => Err(CEK::HeaderTypeError.into_error()),
|
||||
_ => Err(CEK::StoreReadError.into_error()),
|
||||
})
|
||||
}
|
||||
|
||||
pub fn value(&self) -> Result<i64> {
|
||||
self.read_header_at("counter.value", |v| match v {
|
||||
&Value::Integer(i) => Ok(i),
|
||||
_ => Err(CEK::HeaderTypeError.into_error()),
|
||||
Some(&Value::Integer(i)) => Ok(i),
|
||||
Some(_) => Err(CEK::HeaderTypeError.into_error()),
|
||||
_ => Err(CEK::StoreReadError.into_error()),
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -170,13 +172,14 @@ impl<'a> Counter<'a> {
|
|||
|
||||
pub fn read_unit(&self) -> Result<Option<CounterUnit>> {
|
||||
self.read_header_at("counter.unit", |s| match s {
|
||||
&Value::String(ref s) => Ok(Some(CounterUnit::new(s.clone()))),
|
||||
_ => Err(CEK::HeaderTypeError.into_error()),
|
||||
Some(&Value::String(ref s)) => Ok(Some(CounterUnit::new(s.clone()))),
|
||||
Some(_) => Err(CEK::HeaderTypeError.into_error()),
|
||||
_ => Err(CEK::StoreReadError.into_error()),
|
||||
})
|
||||
}
|
||||
|
||||
fn read_header_at<T, F>(&self, name: &str, f: F) -> Result<T>
|
||||
where F: FnOnce(&Value) -> Result<T>
|
||||
where F: FnOnce(Option<&Value>) -> Result<T>
|
||||
{
|
||||
|
||||
self.fle
|
||||
|
|
|
@ -18,7 +18,7 @@ chrono = "0.2"
|
|||
log = "0.3"
|
||||
semver = "0.5"
|
||||
toml = "0.4.*"
|
||||
toml-query = "0.2.*"
|
||||
toml-query = "0.3.*"
|
||||
regex = "0.1"
|
||||
itertools = "0.5"
|
||||
|
||||
|
|
|
@ -27,7 +27,7 @@ pub fn get_default_diary_name(rt: &Runtime) -> Option<String> {
|
|||
get_diary_config_section(rt)
|
||||
.and_then(|config| {
|
||||
match config.read(&String::from("default_diary")) {
|
||||
Ok(&Value::String(ref s)) => Some(s.clone()),
|
||||
Ok(Some(&Value::String(ref s))) => Some(s.clone()),
|
||||
_ => None,
|
||||
}
|
||||
})
|
||||
|
@ -36,5 +36,8 @@ pub fn get_default_diary_name(rt: &Runtime) -> Option<String> {
|
|||
pub fn get_diary_config_section<'a>(rt: &'a Runtime) -> Option<&'a Value> {
|
||||
rt.config()
|
||||
.map(|config| config.config())
|
||||
.and_then(|config| config.read(&String::from("diary")).ok())
|
||||
.and_then(|config| match config.read(&String::from("diary")) {
|
||||
Ok(x) => x,
|
||||
Err(_) => None,
|
||||
})
|
||||
}
|
||||
|
|
|
@ -16,7 +16,7 @@ homepage = "http://imag-pim.org"
|
|||
[dependencies]
|
||||
log = "0.3"
|
||||
toml = "0.4"
|
||||
toml-query = "0.2"
|
||||
toml-query = "0.3"
|
||||
is-match = "0.1"
|
||||
|
||||
[dev-dependencies]
|
||||
|
|
|
@ -88,23 +88,16 @@ impl EntryCategory for Entry {
|
|||
}
|
||||
.map_err_into(CEK::HeaderReadError),
|
||||
|
||||
Ok(&Value::String(ref s)) => Ok(Some(s.clone().into())),
|
||||
Ok(Some(&Value::String(ref s))) => Ok(Some(s.clone().into())),
|
||||
Ok(None) => Err(CEK::StoreReadError.into_error()).map_err_into(CEK::HeaderReadError),
|
||||
Ok(_) => Err(CEK::TypeError.into_error()).map_err_into(CEK::HeaderReadError),
|
||||
}
|
||||
}
|
||||
|
||||
fn has_category(&self) -> Result<bool> {
|
||||
let res = self.get_header().read(&String::from("category.value"));
|
||||
if res.is_err() {
|
||||
let res = res.unwrap_err();
|
||||
match res.kind() {
|
||||
&TQEK::IdentifierNotFoundInDocument(_) => Ok(false),
|
||||
_ => Err(res),
|
||||
}
|
||||
self.get_header().read(&String::from("category.value"))
|
||||
.map_err_into(CEK::HeaderReadError)
|
||||
} else {
|
||||
Ok(true)
|
||||
}
|
||||
.map(|e| e.is_some())
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -198,9 +198,10 @@ mod tests {
|
|||
assert!(header_field.is_ok(), format!("Expected Ok(_), got: {:?}", header_field));
|
||||
let header_field = header_field.unwrap();
|
||||
|
||||
match *header_field {
|
||||
Value::String(ref s) => assert_eq!(category_name, s),
|
||||
_ => assert!(false, "Header field has wrong type"),
|
||||
match header_field {
|
||||
Some(&Value::String(ref s)) => assert_eq!(category_name, s),
|
||||
Some(_) => assert!(false, "Header field has wrong type"),
|
||||
None => assert!(false, "Header field not present"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -228,7 +229,7 @@ fn represents_category(store: &Store, sid: StoreId, name: &str) -> Result<bool>
|
|||
.read(&String::from(CATEGORY_REGISTER_NAME_FIELD_PATH))
|
||||
.map_err_into(CEK::HeaderReadError)
|
||||
{
|
||||
Ok(&Value::String(ref s)) => Ok(s == name),
|
||||
Ok(Some(&Value::String(ref s))) => Ok(s == name),
|
||||
Ok(_) => Err(CEK::TypeError.into_error()),
|
||||
Err(e) => Err(e).map_err_into(CEK::HeaderReadError),
|
||||
}
|
||||
|
@ -279,7 +280,7 @@ impl<'a> Iterator for CategoryNameIter<'a> {
|
|||
.map_err_into(CEK::StoreReadError)
|
||||
.and_then(|fle| fle.ok_or(CEK::StoreReadError.into_error()))
|
||||
.and_then(|fle| match fle.get_header().read(&query) {
|
||||
Ok(&Value::String(ref s)) => Ok(Category::from(s.clone())),
|
||||
Ok(Some(&Value::String(ref s))) => Ok(Category::from(s.clone())),
|
||||
Ok(_) => Err(CEK::TypeError.into_error()),
|
||||
Err(e) => Err(e).map_err_into(CEK::HeaderReadError),
|
||||
})
|
||||
|
|
|
@ -15,7 +15,7 @@ homepage = "http://imag-pim.org"
|
|||
|
||||
[dependencies]
|
||||
chrono = "0.3"
|
||||
toml-query = "0.2"
|
||||
toml-query = "0.3"
|
||||
lazy_static = "0.2"
|
||||
toml = "0.4"
|
||||
|
||||
|
|
|
@ -65,9 +65,10 @@ impl EntryDate for Entry {
|
|||
.map_err_into(DEK::ReadDateError)
|
||||
.and_then(|v| {
|
||||
match v {
|
||||
&Value::String(ref s) => s.parse::<NaiveDateTime>()
|
||||
Some(&Value::String(ref s)) => s.parse::<NaiveDateTime>()
|
||||
.map_err_into(DEK::DateTimeParsingError),
|
||||
_ => Err(DEK::DateHeaderFieldTypeError.into_error()),
|
||||
Some(_) => Err(DEK::DateHeaderFieldTypeError.into_error()),
|
||||
_ => Err(DEK::ReadDateError.into_error()),
|
||||
}
|
||||
})
|
||||
}
|
||||
|
@ -131,9 +132,10 @@ impl EntryDate for Entry {
|
|||
.map_err_into(DEK::ReadDateTimeRangeError)
|
||||
.and_then(|v| {
|
||||
match v {
|
||||
&Value::String(ref s) => s.parse::<NaiveDateTime>()
|
||||
Some(&Value::String(ref s)) => s.parse::<NaiveDateTime>()
|
||||
.map_err_into(DEK::DateTimeParsingError),
|
||||
_ => Err(DEK::DateHeaderFieldTypeError.into_error()),
|
||||
Some(_) => Err(DEK::DateHeaderFieldTypeError.into_error()),
|
||||
_ => Err(DEK::ReadDateError.into_error()),
|
||||
}
|
||||
}));
|
||||
|
||||
|
@ -143,9 +145,10 @@ impl EntryDate for Entry {
|
|||
.map_err_into(DEK::ReadDateTimeRangeError)
|
||||
.and_then(|v| {
|
||||
match v {
|
||||
&Value::String(ref s) => s.parse::<NaiveDateTime>()
|
||||
Some(&Value::String(ref s)) => s.parse::<NaiveDateTime>()
|
||||
.map_err_into(DEK::DateTimeParsingError),
|
||||
_ => Err(DEK::DateHeaderFieldTypeError.into_error()),
|
||||
Some(_) => Err(DEK::DateHeaderFieldTypeError.into_error()),
|
||||
_ => Err(DEK::ReadDateError.into_error()),
|
||||
}
|
||||
}));
|
||||
|
||||
|
@ -250,6 +253,9 @@ mod tests {
|
|||
assert!(hdr_field.is_ok());
|
||||
let hdr_field = hdr_field.unwrap();
|
||||
|
||||
assert!(hdr_field.is_some());
|
||||
let hdr_field = hdr_field.unwrap();
|
||||
|
||||
match *hdr_field {
|
||||
Value::String(ref s) => assert_eq!("2000-01-02T03:04:05", s),
|
||||
_ => assert!(false, "Wrong header type"),
|
||||
|
@ -315,7 +321,10 @@ mod tests {
|
|||
|
||||
let hdr_field = entry.get_header().read(&DATE_HEADER_LOCATION);
|
||||
|
||||
assert!(hdr_field.is_err(), format!("Expected Err(_), got: {:?}", hdr_field));
|
||||
assert!(hdr_field.is_ok());
|
||||
let hdr_field = hdr_field.unwrap();
|
||||
|
||||
assert!(hdr_field.is_none());
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -17,7 +17,7 @@ homepage = "http://imag-pim.org"
|
|||
filters = "0.1"
|
||||
chrono = "0.3"
|
||||
toml = "0.4"
|
||||
toml-query = "0.2"
|
||||
toml-query = "0.3"
|
||||
lazy_static = "0.2"
|
||||
|
||||
[dependencies.libimagerror]
|
||||
|
|
|
@ -43,13 +43,13 @@ pub trait TimeTracking {
|
|||
|
||||
fn set_start_datetime(&mut self, dt: NaiveDateTime) -> Result<()>;
|
||||
|
||||
fn get_start_datetime(&self) -> Result<NaiveDateTime>;
|
||||
fn get_start_datetime(&self) -> Result<Option<NaiveDateTime>>;
|
||||
|
||||
fn delete_start_datetime(&mut self) -> Result<()>;
|
||||
|
||||
fn set_end_datetime(&mut self, dt: NaiveDateTime) -> Result<()>;
|
||||
|
||||
fn get_end_datetime(&self) -> Result<NaiveDateTime>;
|
||||
fn get_end_datetime(&self) -> Result<Option<NaiveDateTime>>;
|
||||
|
||||
fn delete_end_datetime(&mut self) -> Result<()>;
|
||||
|
||||
|
@ -68,7 +68,7 @@ impl TimeTracking for Entry {
|
|||
.map(|_| ())
|
||||
}
|
||||
|
||||
fn get_start_datetime(&self) -> Result<NaiveDateTime> {
|
||||
fn get_start_datetime(&self) -> Result<Option<NaiveDateTime>> {
|
||||
self.get_header()
|
||||
.read(DATE_TIME_START_HEADER_PATH)
|
||||
.map_err_into(TTEK::HeaderReadError)
|
||||
|
@ -91,7 +91,7 @@ impl TimeTracking for Entry {
|
|||
.map(|_| ())
|
||||
}
|
||||
|
||||
fn get_end_datetime(&self) -> Result<NaiveDateTime> {
|
||||
fn get_end_datetime(&self) -> Result<Option<NaiveDateTime>> {
|
||||
self.get_header()
|
||||
.read(DATE_TIME_END_HEADER_PATH)
|
||||
.map_err_into(TTEK::HeaderReadError)
|
||||
|
@ -123,13 +123,16 @@ impl TimeTracking for Entry {
|
|||
|
||||
}
|
||||
|
||||
fn header_value_to_dt(val: &Value) -> Result<NaiveDateTime> {
|
||||
fn header_value_to_dt(val: Option<&Value>) -> Result<Option<NaiveDateTime>> {
|
||||
match val {
|
||||
&Value::String(ref s) => {
|
||||
Some(&Value::String(ref s)) => {
|
||||
NaiveDateTime::parse_from_str(s, DATE_TIME_FORMAT)
|
||||
.map_err_into(TTEK::DateTimeParserError)
|
||||
.map(Some)
|
||||
|
||||
},
|
||||
_ => Err(TTEK::HeaderFieldTypeError.into_error())
|
||||
Some(_) => Err(TTEK::HeaderFieldTypeError.into_error()),
|
||||
None => Ok(None),
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -18,7 +18,7 @@ semver = "0.2"
|
|||
task-hookrs = "0.2.2"
|
||||
uuid = "0.3"
|
||||
toml = "0.4.*"
|
||||
toml-query = "0.2.*"
|
||||
toml-query = "0.3.*"
|
||||
is-match = "0.1.*"
|
||||
log = "0.3"
|
||||
serde_json = "0.8"
|
||||
|
|
Loading…
Reference in a new issue