Merge pull request #973 from matthiasbeyer/libimagstore/io-backend-knows-format
Libimagstore/io backend knows format
This commit is contained in:
commit
e75c37fbb2
8 changed files with 113 additions and 131 deletions
|
@ -201,12 +201,6 @@ The `InMemoryFileAbstractionInstance` implementation is corrosponding to
|
|||
the `InMemoryFileAbstraction` implementation - for the in-memory
|
||||
"filesystem".
|
||||
|
||||
The implementation of the `get_file_content()` function had to be
|
||||
changed to return a `String` rather than a `&mut Read` because of
|
||||
lifetime issues.
|
||||
This change is store-internally and the API of the store itself was not
|
||||
affected.
|
||||
|
||||
## The StdIo backend {#sec:thestore:backends:stdio}
|
||||
|
||||
Sidenote: The name is "StdIo" because its main purpose is Stdin/Stdio, but it
|
||||
|
@ -231,42 +225,29 @@ implementation are possible.
|
|||
|
||||
The following section assumes a JSON mapper.
|
||||
|
||||
The backends themselves do not know "header" and "content" - they know only
|
||||
blobs which live in pathes.
|
||||
Indeed, this "backend" code does not serve "content" or "header" to the `Store`
|
||||
implementation, but only a blob of bytes.
|
||||
Anyways, the JSON-protocol for passing a store around _does_ know about content
|
||||
and header (see @sec:thestore:backends:stdio:json for the JSON format).
|
||||
|
||||
So the mapper reads the JSON, parses it (thanks serde!) and translates it to
|
||||
TOML, because TOML is the Store representation of a header.
|
||||
But because the backend does not serve header and content, but only a blob,
|
||||
this TOML is then translated (with the content of the respective file) to a
|
||||
blob.
|
||||
The mapper reads the JSON, parses it (thanks serde!) and translates it to
|
||||
a `Entry`, which is the in-memory representation of the files.
|
||||
The `Entry` contains a `Header` part and a `Content` part.
|
||||
|
||||
This is then made available to the store codebase.
|
||||
This is complex and probably slow, we know.
|
||||
|
||||
To summarize what we do right now, lets have a look at the awesome ascii-art
|
||||
below:
|
||||
|
||||
```
|
||||
libimag*
|
||||
|
|
||||
v
|
||||
IO Mapper FS Store FS Mapper IO
|
||||
+--+-------------+---------+--------+---------+--------------+--+
|
||||
| | | | | | | |
|
||||
JSON -> TOML -> String -> Entry -> String -> TOML -> JSON
|
||||
+ TOML
|
||||
+ Content
|
||||
libimag*
|
||||
|
|
||||
v
|
||||
IO Mapper Store Mapper IO
|
||||
+--+---------+----------------+--------+--+
|
||||
| | | | | |
|
||||
JSON -> Entry -> JSON
|
||||
+ Header
|
||||
+ Content
|
||||
```
|
||||
|
||||
This is what gets translated where for one imag call with a stdio store backend.
|
||||
|
||||
The rationale behind this implementation is that this is the best implementation
|
||||
we can have in a relatively short amount of time.
|
||||
|
||||
### The JSON Mapper {#sec:thestore:backends:stdio:json}
|
||||
|
||||
The JSON mapper maps JSON which is read from a source into a HashMap which
|
||||
|
@ -278,7 +259,7 @@ The strucure is as follows:
|
|||
{
|
||||
"version": "0.3.0",
|
||||
"store": {
|
||||
"/example": {
|
||||
"example": {
|
||||
"header": {
|
||||
"imag": {
|
||||
"version": "0.3.0",
|
||||
|
@ -292,24 +273,14 @@ The strucure is as follows:
|
|||
|
||||
### TODO {#sec:thestore:backends:todo}
|
||||
|
||||
Of course, the above is not optimal.
|
||||
The TODO here is almost visible: Implement a proper backend where we do not need
|
||||
to translate between types all the time.
|
||||
If you look at the version history of this file you will see that this
|
||||
implementation has grown from something complex and probably slow to what we
|
||||
have today.
|
||||
|
||||
The first goal would be to reduce the above figure to:
|
||||
|
||||
```
|
||||
libimag*
|
||||
|
|
||||
v
|
||||
IO Mapper Store Mapper IO
|
||||
+--+------+--------+-------+--+
|
||||
| | | | | |
|
||||
JSON -> Entry -> JSON
|
||||
+ TOML
|
||||
+ Content
|
||||
```
|
||||
|
||||
and the second step would be to abstract all the things away so the `libimag*`
|
||||
crates handle the header without knowing whether it is JSON or TOML.
|
||||
Still, there's one improvement we could make: abstract all the things away so
|
||||
the `libimag*` crates handle the header without knowing whether it is JSON or
|
||||
TOML.
|
||||
With this, we would not even have to translate JSON to TOML anymore.
|
||||
We should measure whether this would have actually any performance impact before
|
||||
implementing it.
|
||||
|
||||
|
|
|
@ -25,6 +25,8 @@ use error::{MapErrInto, StoreError as SE, StoreErrorKind as SEK};
|
|||
|
||||
use super::FileAbstraction;
|
||||
use super::FileAbstractionInstance;
|
||||
use store::Entry;
|
||||
use storeid::StoreId;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum FSFileAbstractionInstance {
|
||||
|
@ -37,7 +39,7 @@ impl FileAbstractionInstance for FSFileAbstractionInstance {
|
|||
/**
|
||||
* Get the content behind this file
|
||||
*/
|
||||
fn get_file_content(&mut self) -> Result<String, SE> {
|
||||
fn get_file_content(&mut self, id: StoreId) -> Result<Entry, SE> {
|
||||
debug!("Getting lazy file: {:?}", self);
|
||||
let (file, path) = match *self {
|
||||
FSFileAbstractionInstance::File(ref mut f, _) => return {
|
||||
|
@ -50,6 +52,7 @@ impl FileAbstractionInstance for FSFileAbstractionInstance {
|
|||
f.read_to_string(&mut s)
|
||||
.map_err_into(SEK::IoError)
|
||||
.map(|_| s)
|
||||
.and_then(|s| Entry::from_str(id, &s))
|
||||
},
|
||||
FSFileAbstractionInstance::Absent(ref p) =>
|
||||
(try!(open_file(p).map_err_into(SEK::FileNotFound)), p.clone()),
|
||||
|
@ -60,6 +63,7 @@ impl FileAbstractionInstance for FSFileAbstractionInstance {
|
|||
f.read_to_string(&mut s)
|
||||
.map_err_into(SEK::IoError)
|
||||
.map(|_| s)
|
||||
.and_then(|s| Entry::from_str(id, &s))
|
||||
} else {
|
||||
unreachable!()
|
||||
}
|
||||
|
@ -68,22 +72,25 @@ impl FileAbstractionInstance for FSFileAbstractionInstance {
|
|||
/**
|
||||
* Write the content of this file
|
||||
*/
|
||||
fn write_file_content(&mut self, buf: &[u8]) -> Result<(), SE> {
|
||||
fn write_file_content(&mut self, buf: &Entry) -> Result<(), SE> {
|
||||
use std::io::Write;
|
||||
|
||||
let buf = buf.to_str().into_bytes();
|
||||
|
||||
let (file, path) = match *self {
|
||||
FSFileAbstractionInstance::File(ref mut f, _) => return {
|
||||
// We seek to the beginning of the file since we expect each
|
||||
// access to the file to be in a different context
|
||||
try!(f.seek(SeekFrom::Start(0))
|
||||
.map_err_into(SEK::FileNotCreated));
|
||||
f.write_all(buf).map_err_into(SEK::FileNotWritten)
|
||||
f.write_all(&buf).map_err_into(SEK::FileNotWritten)
|
||||
},
|
||||
FSFileAbstractionInstance::Absent(ref p) =>
|
||||
(try!(create_file(p).map_err_into(SEK::FileNotCreated)), p.clone()),
|
||||
};
|
||||
*self = FSFileAbstractionInstance::File(file, path);
|
||||
if let FSFileAbstractionInstance::File(ref mut f, _) = *self {
|
||||
return f.write_all(buf).map_err_into(SEK::FileNotWritten);
|
||||
return f.write_all(&buf).map_err_into(SEK::FileNotWritten);
|
||||
}
|
||||
unreachable!();
|
||||
}
|
||||
|
|
|
@ -19,8 +19,6 @@
|
|||
|
||||
use error::StoreError as SE;
|
||||
use error::StoreErrorKind as SEK;
|
||||
use std::io::Read;
|
||||
use std::io::Cursor;
|
||||
use std::path::PathBuf;
|
||||
use std::collections::HashMap;
|
||||
use std::sync::Mutex;
|
||||
|
@ -31,9 +29,10 @@ use libimagerror::into::IntoError;
|
|||
|
||||
use super::FileAbstraction;
|
||||
use super::FileAbstractionInstance;
|
||||
use error::MapErrInto;
|
||||
use store::Entry;
|
||||
use storeid::StoreId;
|
||||
|
||||
type Backend = Arc<Mutex<RefCell<HashMap<PathBuf, Cursor<Vec<u8>>>>>>;
|
||||
type Backend = Arc<Mutex<RefCell<HashMap<PathBuf, Entry>>>>;
|
||||
|
||||
/// `FileAbstraction` type, this is the Test version!
|
||||
///
|
||||
|
@ -60,41 +59,28 @@ impl FileAbstractionInstance for InMemoryFileAbstractionInstance {
|
|||
/**
|
||||
* Get the mutable file behind a InMemoryFileAbstraction object
|
||||
*/
|
||||
fn get_file_content(&mut self) -> Result<String, SE> {
|
||||
fn get_file_content(&mut self, _: StoreId) -> Result<Entry, SE> {
|
||||
debug!("Getting lazy file: {:?}", self);
|
||||
|
||||
let p = self.absent_path.clone();
|
||||
match self.fs_abstraction.lock() {
|
||||
Ok(mut mtx) => {
|
||||
mtx.get_mut()
|
||||
.get_mut(&p)
|
||||
.get(&p)
|
||||
.cloned()
|
||||
.ok_or(SEK::FileNotFound.into_error())
|
||||
.and_then(|t| {
|
||||
let mut s = String::new();
|
||||
t.read_to_string(&mut s)
|
||||
.map_err_into(SEK::IoError)
|
||||
.map(|_| s)
|
||||
})
|
||||
}
|
||||
|
||||
Err(_) => Err(SEK::LockError.into_error())
|
||||
}
|
||||
}
|
||||
|
||||
fn write_file_content(&mut self, buf: &[u8]) -> Result<(), SE> {
|
||||
fn write_file_content(&mut self, buf: &Entry) -> Result<(), SE> {
|
||||
match *self {
|
||||
InMemoryFileAbstractionInstance { ref absent_path, .. } => {
|
||||
let mut mtx = self.fs_abstraction.lock().expect("Locking Mutex failed");
|
||||
let mut backend = mtx.get_mut();
|
||||
|
||||
if let Some(ref mut cur) = backend.get_mut(absent_path) {
|
||||
let mut vec = cur.get_mut();
|
||||
vec.clear();
|
||||
vec.extend_from_slice(buf);
|
||||
return Ok(());
|
||||
}
|
||||
let vec = Vec::from(buf);
|
||||
backend.insert(absent_path.clone(), Cursor::new(vec));
|
||||
let _ = backend.insert(absent_path.clone(), buf.clone());
|
||||
return Ok(());
|
||||
},
|
||||
};
|
||||
|
|
|
@ -21,7 +21,8 @@ use std::path::PathBuf;
|
|||
use std::fmt::Debug;
|
||||
|
||||
use error::StoreError as SE;
|
||||
|
||||
use store::Entry;
|
||||
use storeid::StoreId;
|
||||
|
||||
mod fs;
|
||||
mod inmemory;
|
||||
|
@ -44,27 +45,43 @@ pub trait FileAbstraction : Debug {
|
|||
|
||||
/// An abstraction trait over actions on files
|
||||
pub trait FileAbstractionInstance : Debug {
|
||||
fn get_file_content(&mut self) -> Result<String, SE>;
|
||||
fn write_file_content(&mut self, buf: &[u8]) -> Result<(), SE>;
|
||||
|
||||
/// Get the contents of the FileAbstractionInstance, as Entry object.
|
||||
///
|
||||
/// The `StoreId` is passed because the backend does not know where the Entry lives, but the
|
||||
/// Entry type itself must be constructed with the id.
|
||||
fn get_file_content(&mut self, id: StoreId) -> Result<Entry, SE>;
|
||||
fn write_file_content(&mut self, buf: &Entry) -> Result<(), SE>;
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use std::path::PathBuf;
|
||||
|
||||
use super::FileAbstractionInstance;
|
||||
use super::inmemory::InMemoryFileAbstraction;
|
||||
use super::inmemory::InMemoryFileAbstractionInstance;
|
||||
use std::path::PathBuf;
|
||||
use storeid::StoreId;
|
||||
use store::Entry;
|
||||
|
||||
#[test]
|
||||
fn lazy_file() {
|
||||
let fs = InMemoryFileAbstraction::new();
|
||||
|
||||
let mut path = PathBuf::from("/tests");
|
||||
let mut path = PathBuf::from("tests");
|
||||
path.set_file_name("test1");
|
||||
let mut lf = InMemoryFileAbstractionInstance::new(fs.backend().clone(), path);
|
||||
lf.write_file_content(b"Hello World").unwrap();
|
||||
let bah = lf.get_file_content().unwrap();
|
||||
assert_eq!(bah, "Hello World");
|
||||
let mut lf = InMemoryFileAbstractionInstance::new(fs.backend().clone(), path.clone());
|
||||
|
||||
let loca = StoreId::new_baseless(path).unwrap();
|
||||
let file = Entry::from_str(loca.clone(), r#"---
|
||||
[imag]
|
||||
version = "0.3.0"
|
||||
---
|
||||
Hello World"#).unwrap();
|
||||
|
||||
lf.write_file_content(&file).unwrap();
|
||||
let bah = lf.get_file_content(loca).unwrap();
|
||||
assert_eq!(bah.get_content(), "Hello World");
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -18,7 +18,6 @@
|
|||
//
|
||||
|
||||
use std::collections::HashMap;
|
||||
use std::io::Cursor;
|
||||
use std::io::{Read, Write};
|
||||
use std::path::PathBuf;
|
||||
|
||||
|
@ -29,16 +28,18 @@ use error::StoreErrorKind as SEK;
|
|||
use error::MapErrInto;
|
||||
use super::Mapper;
|
||||
use store::Result;
|
||||
use store::Entry;
|
||||
use storeid::StoreId;
|
||||
|
||||
use libimagerror::into::IntoError;
|
||||
|
||||
#[derive(Deserialize, Serialize)]
|
||||
struct Entry {
|
||||
struct BackendEntry {
|
||||
header: serde_json::Value,
|
||||
content: String,
|
||||
}
|
||||
|
||||
impl Entry {
|
||||
impl BackendEntry {
|
||||
|
||||
fn to_string(self) -> Result<String> {
|
||||
toml::to_string(&self.header)
|
||||
|
@ -55,7 +56,7 @@ impl Entry {
|
|||
#[derive(Deserialize, Serialize)]
|
||||
struct Document {
|
||||
version: String,
|
||||
store: HashMap<PathBuf, Entry>,
|
||||
store: HashMap<PathBuf, BackendEntry>,
|
||||
}
|
||||
|
||||
pub struct JsonMapper;
|
||||
|
@ -69,7 +70,7 @@ impl JsonMapper {
|
|||
}
|
||||
|
||||
impl Mapper for JsonMapper {
|
||||
fn read_to_fs<R: Read>(&self, r: &mut R, hm: &mut HashMap<PathBuf, Cursor<Vec<u8>>>) -> Result<()> {
|
||||
fn read_to_fs<R: Read>(&self, r: &mut R, hm: &mut HashMap<PathBuf, Entry>) -> Result<()> {
|
||||
let mut document = {
|
||||
let mut s = String::new();
|
||||
try!(r.read_to_string(&mut s).map_err_into(SEK::IoError));
|
||||
|
@ -93,7 +94,11 @@ impl Mapper for JsonMapper {
|
|||
for (key, val) in document.store.drain() {
|
||||
let res = val
|
||||
.to_string()
|
||||
.map(|vals| hm.insert(key, Cursor::new(vals.into_bytes())))
|
||||
.and_then(|vals| {
|
||||
StoreId::new_baseless(key.clone())
|
||||
.and_then(|id| Entry::from_str(id, &vals))
|
||||
.map(|entry| hm.insert(key, entry))
|
||||
})
|
||||
.map(|_| ());
|
||||
|
||||
let _ = try!(res);
|
||||
|
@ -102,43 +107,38 @@ impl Mapper for JsonMapper {
|
|||
Ok(())
|
||||
}
|
||||
|
||||
fn fs_to_write<W: Write>(&self, hm: &mut HashMap<PathBuf, Cursor<Vec<u8>>>, out: &mut W) -> Result<()> {
|
||||
use util::entry_buffer_to_header_content;
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
struct Entry {
|
||||
fn fs_to_write<W: Write>(&self, hm: &mut HashMap<PathBuf, Entry>, out: &mut W) -> Result<()> {
|
||||
#[derive(Serialize)]
|
||||
struct BackendEntry {
|
||||
header: ::toml::Value,
|
||||
content: String,
|
||||
}
|
||||
|
||||
impl BackendEntry {
|
||||
fn construct_from(e: Entry) -> BackendEntry {
|
||||
BackendEntry {
|
||||
header: e.get_header().clone(),
|
||||
content: e.get_content().clone(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
struct OutDocument {
|
||||
version: String,
|
||||
store: HashMap<PathBuf, Entry>,
|
||||
store: HashMap<PathBuf, BackendEntry>,
|
||||
}
|
||||
|
||||
let mut doc = OutDocument {
|
||||
version: String::from(version!()),
|
||||
store: HashMap::new(),
|
||||
};
|
||||
|
||||
let mut store = HashMap::new();
|
||||
for (key, value) in hm.drain() {
|
||||
let res = String::from_utf8(value.into_inner())
|
||||
.map_err_into(SEK::IoError)
|
||||
.and_then(|buf| entry_buffer_to_header_content(&buf))
|
||||
.map(|(header, content)| {
|
||||
let entry = Entry {
|
||||
header: header,
|
||||
content: content
|
||||
};
|
||||
|
||||
doc.store.insert(key, entry);
|
||||
})
|
||||
.map(|_| ());
|
||||
|
||||
let _ = try!(res);
|
||||
store.insert(key, BackendEntry::construct_from(value));
|
||||
}
|
||||
|
||||
let doc = OutDocument {
|
||||
version: String::from(version!()),
|
||||
store: store,
|
||||
};
|
||||
|
||||
serde_json::to_string(&doc)
|
||||
.map_err_into(SEK::IoError)
|
||||
.and_then(|json| out.write(&json.into_bytes()).map_err_into(SEK::IoError))
|
||||
|
@ -158,7 +158,7 @@ mod test {
|
|||
let json = r#"
|
||||
{ "version": "0.3.0",
|
||||
"store": {
|
||||
"/example": {
|
||||
"example": {
|
||||
"header": {
|
||||
"imag": {
|
||||
"version": "0.3.0"
|
||||
|
@ -191,7 +191,10 @@ mod test {
|
|||
version = "0.3.0"
|
||||
---
|
||||
hi there!"#;
|
||||
hm.insert(PathBuf::from("/example"), Cursor::new(String::from(content).into_bytes()));
|
||||
|
||||
let id = PathBuf::from("example");
|
||||
let entry = Entry::from_str(id.clone(), content).unwrap();
|
||||
hm.insert(id, entry);
|
||||
hm
|
||||
};
|
||||
|
||||
|
@ -202,7 +205,7 @@ hi there!"#;
|
|||
{
|
||||
"version": "0.3.0",
|
||||
"store": {
|
||||
"/example": {
|
||||
"example": {
|
||||
"header": {
|
||||
"imag": {
|
||||
"version": "0.3.0"
|
||||
|
|
|
@ -18,14 +18,14 @@
|
|||
//
|
||||
|
||||
use std::collections::HashMap;
|
||||
use std::io::Cursor;
|
||||
use std::io::{Read, Write};
|
||||
use std::path::PathBuf;
|
||||
use store::Result;
|
||||
use store::Entry;
|
||||
|
||||
pub trait Mapper {
|
||||
fn read_to_fs<R: Read>(&self, &mut R, &mut HashMap<PathBuf, Cursor<Vec<u8>>>) -> Result<()>;
|
||||
fn fs_to_write<W: Write>(&self, &mut HashMap<PathBuf, Cursor<Vec<u8>>>, &mut W) -> Result<()>;
|
||||
fn read_to_fs<R: Read>(&self, &mut R, &mut HashMap<PathBuf, Entry>) -> Result<()>;
|
||||
fn fs_to_write<W: Write>(&self, &mut HashMap<PathBuf, Entry>, &mut W) -> Result<()>;
|
||||
}
|
||||
|
||||
pub mod json;
|
||||
|
|
|
@ -23,7 +23,6 @@ use std::collections::HashMap;
|
|||
use std::fmt::Debug;
|
||||
use std::fmt::Error as FmtError;
|
||||
use std::fmt::Formatter;
|
||||
use std::io::Cursor;
|
||||
use std::io::{Read, Write};
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
|
@ -37,12 +36,13 @@ use error::StoreError as SE;
|
|||
use super::FileAbstraction;
|
||||
use super::FileAbstractionInstance;
|
||||
use super::InMemoryFileAbstraction;
|
||||
use store::Entry;
|
||||
|
||||
pub mod mapper;
|
||||
use self::mapper::Mapper;
|
||||
|
||||
// Because this is not exported in super::inmemory;
|
||||
type Backend = Arc<Mutex<RefCell<HashMap<PathBuf, Cursor<Vec<u8>>>>>>;
|
||||
type Backend = Arc<Mutex<RefCell<HashMap<PathBuf, Entry>>>>;
|
||||
|
||||
pub struct StdIoFileAbstraction<W: Write, M: Mapper> {
|
||||
mapper: M,
|
||||
|
@ -83,7 +83,7 @@ impl<W, M> StdIoFileAbstraction<W, M>
|
|||
}
|
||||
|
||||
pub fn backend(&self) -> &Backend {
|
||||
&self.mem.backend()
|
||||
self.mem.backend()
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -158,13 +158,11 @@ impl StoreEntry {
|
|||
}
|
||||
|
||||
fn get_entry(&mut self) -> Result<Entry> {
|
||||
let id = &self.id.clone();
|
||||
if !self.is_borrowed() {
|
||||
self.file
|
||||
.get_file_content()
|
||||
.and_then(|content| Entry::from_str(id.clone(), &content))
|
||||
.get_file_content(self.id.clone())
|
||||
.or_else(|err| if err.err_type() == SEK::FileNotFound {
|
||||
Ok(Entry::new(id.clone()))
|
||||
Ok(Entry::new(self.id.clone()))
|
||||
} else {
|
||||
Err(err)
|
||||
})
|
||||
|
@ -176,7 +174,7 @@ impl StoreEntry {
|
|||
fn write_entry(&mut self, entry: &Entry) -> Result<()> {
|
||||
if self.is_borrowed() {
|
||||
assert_eq!(self.id, entry.location);
|
||||
self.file.write_file_content(entry.to_str().as_bytes())
|
||||
self.file.write_file_content(entry)
|
||||
.map_err_into(SEK::FileError)
|
||||
.map(|_| ())
|
||||
} else {
|
||||
|
|
Loading…
Reference in a new issue