use std::collections::HashMap;
use Error;
use crypto;
use decode::*;
use encode;
use document::parse_schema_hash;
use validator::{query_check, ValidObj, Validator, ValidReader, ValidatorChecklist};
use {MAX_DOC_SIZE, MAX_ENTRY_SIZE, Value, Entry, Hash, Document, Query, MarkerType, CompressType};
use checklist::{ChecklistItem, Checklist};
use super::zstd_help;
pub struct Schema {
hash: Hash,
object: ValidObj,
object_valid: bool,
entries: Vec<(String, usize)>,
types: Vec<Validator>,
compressor: zstd_safe::CCtx<'static>,
decompressor: zstd_safe::DCtx<'static>,
doc_compress: Compression,
entries_compress: Vec<(String, Compression)>,
}
impl Schema {
pub fn from_doc(doc: Document) -> crate::Result<Self> {
Ok(Self::from_raw(&mut doc.raw_doc(), Some(doc.hash().clone()))?)
}
fn from_raw(raw: &mut &[u8], hash: Option<Hash>) -> crate::Result<Self> {
let hash = if let Some(hash) = hash {
hash
}
else {
let raw_for_hash: &[u8] = raw;
Hash::new(raw_for_hash)
};
let mut entries = Vec::new();
let mut types = Vec::with_capacity(2);
let mut type_names = HashMap::new();
let mut object = ValidObj::new_schema();
let mut object_valid = true;
let mut doc_compress = Default::default();
let mut entries_compress = Vec::new();
types.push(Validator::Invalid);
types.push(Validator::Valid);
let mut reader = ValidReader::new(false, &mut types, &mut type_names, &hash);
let num_fields = match read_marker(raw)? {
MarkerType::Object(len) => len,
_ => return Err(Error::BadEncode(raw.len(), "Schema isn't a Document")),
};
object_iterate(raw, num_fields, |field, raw| {
match field {
"" => {
read_hash(raw)?;
},
"doc_compress" => {
doc_compress = Compression::read_raw(raw)?;
}
"description" => {
read_str(raw)?;
},
"name" => {
read_str(raw)?;
},
"version" => {
read_integer(raw)?;
},
"entries" => {
if let MarkerType::Object(len) = read_marker(raw)? {
object_iterate(raw, len, |field, raw| {
let v = Validator::read_validator(raw, &mut reader)?;
entries.push((field.to_string(), v));
Ok(())
})?;
}
else {
return Err(Error::FailValidate(raw.len(), "`entries` field doesn't contain an Object"));
}
},
"entries_compress" => {
if let MarkerType::Object(len) = read_marker(raw)? {
object_iterate(raw, len, |field, raw| {
let c = Compression::read_raw(raw)?;
entries_compress.push((field.to_string(), c));
Ok(())
})?;
}
else {
return Err(Error::FailValidate(raw.len(), "`entries_compress` field doesn't contain an Object"));
}
},
"field_type" | "max_fields" | "min_fields" | "req" | "opt" | "ban" | "unknown_ok" => {
let valid = object.update(field, raw, &mut reader)?;
object_valid = object_valid && valid;
},
"types" => {
if let MarkerType::Object(len) = read_marker(raw)? {
object_iterate(raw, len, |field, raw| {
let v = Validator::read_validator(raw, &mut reader)?;
if v == (reader.types.len() - 1) {
let v = reader.types.pop();
match field {
"Null" | "Bool" | "Int" | "Str" | "F32" | "F64" | "Bin" |
"Array" | "Obj" | "Hash" | "Ident" | "Lock" | "Time" | "Multi" => (),
_ => {
if let Some(index) = reader.type_names.get(field) {
reader.types[*index] = v.unwrap();
}
}
}
}
Ok(())
})?;
}
else {
return Err(Error::FailValidate(raw.len(), "`entries` field doesn't contain an Object"));
}
}
_ => {
return Err(Error::FailValidate(raw.len(), "Unrecognized field in schema document"));
}
}
Ok(())
})?;
object_valid = object_valid && object.finalize();
drop(reader);
Ok(Schema {
hash,
object,
object_valid,
entries,
types,
compressor: zstd_safe::create_cctx(),
decompressor: zstd_safe::create_dctx(),
doc_compress,
entries_compress,
})
}
pub fn hash(&self) -> &Hash {
&self.hash
}
pub fn encode_doc(&mut self, doc: Document) -> crate::Result<Vec<u8>> {
let mut buf = Vec::new();
let len = doc.len();
let mut raw: &[u8] = doc.raw_doc();
assert!(len <= MAX_DOC_SIZE,
"Document was larger than maximum size! Document implementation should've made this impossible!");
if let Some(doc_schema) = doc.schema_hash() {
if doc_schema != self.hash() { return Err(Error::SchemaMismatch); }
}
else {
return Err(Error::SchemaMismatch);
}
if self.object_valid {
if !doc.validated() {
let mut checklist = ValidatorChecklist::new();
self.object.validate(&mut doc.raw_doc(), &self.types, &mut checklist, true)?;
}
}
else {
return Err(Error::FailValidate(doc.raw_doc().len(), "This schema cannot pass anything"));
}
if doc.override_compression() {
if let Some(level) = doc.compression() {
CompressType::Compressed.encode(&mut buf);
let _ = parse_schema_hash(&mut raw)
.expect("Document has invalid vec!")
.expect("Document has invalid vec!");
let header_len = doc.raw_doc().len() - raw.len();
buf.extend_from_slice(&doc.raw_doc()[..header_len]);
zstd_help::compress(&mut self.compressor, level, raw, &mut buf);
}
else {
CompressType::Uncompressed.encode(&mut buf);
buf.extend_from_slice(raw);
}
}
else {
match self.doc_compress {
Compression::NoCompress => {
CompressType::Uncompressed.encode(&mut buf);
}
Compression::Compress(_) => {
CompressType::Compressed.encode(&mut buf);
let _ = parse_schema_hash(&mut raw)
.expect("Document has invalid vec!")
.expect("Document has invalid vec!");
let header_len = doc.raw_doc().len() - raw.len();
buf.extend_from_slice(&doc.raw_doc()[..header_len]);
}
Compression::DictCompress(_) => {
CompressType::DictCompressed.encode(&mut buf);
let _ = parse_schema_hash(&mut raw)
.expect("Document has invalid vec!")
.expect("Document has invalid vec!");
let header_len = doc.raw_doc().len() - raw.len();
buf.extend_from_slice(&doc.raw_doc()[..header_len]);
}
}
self.doc_compress.compress(&mut self.compressor, raw, &mut buf);
}
Ok(buf)
}
pub fn trusted_decode_doc(&mut self, buf: &mut &[u8], hash: Option<Hash>) -> crate::Result<Document> {
let mut buf_ptr: &[u8] = buf;
let compress_type = CompressType::decode(&mut buf_ptr)?;
if let CompressType::CompressedNoSchema = compress_type {
return Err(Error::SchemaMismatch);
}
let mut doc = Vec::new();
let mut buf_post_hash: &[u8] = buf_ptr;
parse_schema_hash(&mut buf_post_hash)?;
doc.extend_from_slice(&buf_ptr[..(buf_ptr.len()-buf_post_hash.len())]);
self.doc_compress.decompress(&mut self.decompressor, MAX_DOC_SIZE, compress_type, &mut buf_post_hash, &mut doc)?;
let doc_len = verify_value(&mut &doc[..])?;
let (hash_state, doc_hash, hash) = if let Some(hash) = hash {
(None, None, hash)
}
else {
let mut hash_state = crypto::HashState::new();
hash_state.update(&doc[..doc_len]);
let doc_hash = hash_state.get_hash();
let hash = if doc.len() > doc_len {
hash_state.update(&doc[doc_len..]);
hash_state.get_hash()
}
else {
doc_hash.clone()
};
(Some(hash_state), Some(doc_hash), hash)
};
let mut signed_by = Vec::new();
let mut index = &mut &doc[doc_len..];
while index.len() > 0 {
let signature = crypto::Signature::decode(&mut index)?;
signed_by.push(signature.signed_by().clone());
}
let override_compression = false;
let compression = None;
let compressed = None;
Ok(Document::from_parts(
hash_state,
doc_hash,
hash,
doc_len,
doc,
compressed,
override_compression,
compression,
signed_by,
None
))
}
pub fn decode_doc(&mut self, buf: &mut &[u8]) -> crate::Result<Document> {
let mut buf_ptr: &[u8] = buf;
let mut doc = Vec::new();
let compress_type = CompressType::decode(&mut buf_ptr)?;
if let CompressType::CompressedNoSchema = compress_type {
return Err(Error::SchemaMismatch);
}
let mut buf_post_hash: &[u8] = buf_ptr;
parse_schema_hash(&mut buf_post_hash)?;
doc.extend_from_slice(&buf_ptr[..(buf_ptr.len()-buf_post_hash.len())]);
self.doc_compress.decompress(&mut self.decompressor, MAX_DOC_SIZE, compress_type, &mut buf_post_hash, &mut doc)?;
let mut doc_ptr: &[u8] = &doc[..];
if self.object_valid {
let mut checklist = ValidatorChecklist::new();
self.object.validate(&mut doc_ptr, &self.types, &mut checklist, true)?;
}
else {
return Err(Error::FailValidate(doc.len(), "This schema cannot pass any documents"));
}
let doc_len = doc.len() - doc_ptr.len();
let mut hash_state = crypto::HashState::new();
hash_state.update(&doc[..doc_len]);
let doc_hash = hash_state.get_hash();
let hash = if doc.len() > doc_len {
hash_state.update(&doc[doc_len..]);
hash_state.get_hash()
}
else {
doc_hash.clone()
};
let mut signed_by = Vec::new();
while doc_ptr.len() > 0 {
let signature = crypto::Signature::decode(&mut doc_ptr)?;
if !signature.verify(&doc_hash) {
return Err(Error::BadSignature);
}
signed_by.push(signature.signed_by().clone());
}
let override_compression = false;
let compression = None;
let compressed = None;
Ok(Document::from_parts(
Some(hash_state),
Some(doc_hash),
hash,
doc_len,
doc,
compressed,
override_compression,
compression,
signed_by,
None
))
}
pub fn encode_entry(&mut self, entry: Entry) -> crate::Result<Checklist<Vec<u8>>> {
let mut buf = Vec::new();
let len = entry.len();
let raw: &[u8] = entry.raw_entry();
assert!(len <= MAX_ENTRY_SIZE,
"Entry was larger than maximum size! Entry implementation should've made this impossible!");
let checklist = if !entry.validated() {
let mut checklist = ValidatorChecklist::new();
let v = self.entries.binary_search_by(|x| x.0.as_str().cmp(entry.field()));
if let Ok(v) = v {
let v = self.entries[v].1;
self.types[v].validate(&mut entry.raw_entry(), &self.types, 0, &mut checklist)?;
}
else {
return Err(Error::FailValidate(len, "Entry field is not in schema"));
}
checklist
}
else {
ValidatorChecklist::new()
};
if entry.override_compression() {
if let Some(level) = entry.compression() {
CompressType::CompressedNoSchema.encode(&mut buf);
zstd_help::compress(&mut self.compressor, level, raw, &mut buf);
}
else {
CompressType::Uncompressed.encode(&mut buf);
buf.extend_from_slice(raw);
}
}
else {
let compress = self.entries_compress.binary_search_by(|x| x.0.as_str().cmp(entry.field()));
if let Ok(compress_index) = compress {
let compress = &self.entries_compress[compress_index].1;
match compress {
Compression::NoCompress => {
CompressType::Uncompressed.encode(&mut buf);
}
Compression::Compress(_) => {
CompressType::CompressedNoSchema.encode(&mut buf);
}
Compression::DictCompress(_) => {
CompressType::DictCompressed.encode(&mut buf);
}
}
compress.compress(&mut self.compressor, raw, &mut buf);
}
else {
CompressType::CompressedNoSchema.encode(&mut buf);
zstd_help::compress(&mut self.compressor, zstd_safe::CLEVEL_DEFAULT, raw, &mut buf);
}
}
Ok(Checklist::new(checklist, buf))
}
pub fn trusted_decode_entry(&mut self, buf: &mut &[u8], doc: Hash, field: String, hash: Option<Hash>) -> crate::Result<Entry> {
let mut buf_ptr: &[u8] = buf;
let mut entry = Vec::new();
let compress_type = CompressType::decode(&mut buf_ptr)?;
match compress_type {
CompressType::Compressed => {
return Err(Error::FailValidate(buf_ptr.len(), "Entries don't allow a compression header with the schema included!"));
},
CompressType::Uncompressed => {
entry.extend_from_slice(buf_ptr);
},
CompressType::CompressedNoSchema => {
zstd_help::decompress(&mut self.decompressor, MAX_ENTRY_SIZE, &mut buf_ptr, &mut entry)?;
},
CompressType::DictCompressed => {
let compress = self.entries_compress.binary_search_by(|x| x.0.as_str().cmp(&field))
.map_err(|_| Error::FailDecompress)?;
let compress = &self.entries_compress[compress].1;
compress.decompress(&mut self.decompressor, MAX_ENTRY_SIZE, compress_type, &mut buf_ptr, &mut entry)?;
}
}
let entry_len = verify_value(&mut &entry[..])?;
let hash_provided = hash.is_some();
let hash = hash.unwrap_or(Hash::new_empty());
let mut signed_by = Vec::new();
let mut index = &mut &entry[entry_len..];
while index.len() > 0 {
let signature = crypto::Signature::decode(&mut index)?;
signed_by.push(signature.signed_by().clone());
}
let override_compression = false;
let compression = None;
let compressed = None;
let mut entry = Entry::from_parts(
None,
None,
hash,
doc,
field,
entry_len,
entry,
signed_by,
compressed,
override_compression,
compression,
);
if !hash_provided {
entry.populate_hash_state();
}
Ok(entry)
}
pub fn decode_entry(&mut self, buf: &mut &[u8], doc: Hash, field: String) -> crate::Result<Checklist<Entry>> {
let mut buf_ptr: &[u8] = buf;
let mut entry = Vec::new();
let validator = self.entries.binary_search_by(|x| x.0.as_str().cmp(&field))
.map_err(|_| Error::FailValidate(buf.len(), "Entry field is not in schema"))?;
let validator = self.entries[validator].1;
let compress_type = CompressType::decode(&mut buf_ptr)?;
match compress_type {
CompressType::Compressed => {
return Err(Error::FailValidate(buf_ptr.len(), "Entries don't allow a compression header with the schema included!"));
},
CompressType::Uncompressed => {
entry.extend_from_slice(buf_ptr);
},
CompressType::CompressedNoSchema => {
zstd_help::decompress(&mut self.decompressor, MAX_ENTRY_SIZE, &mut buf_ptr, &mut entry)?;
},
CompressType::DictCompressed => {
let compress = self.entries_compress.binary_search_by(|x| x.0.as_str().cmp(&field))
.map_err(|_| Error::FailDecompress)?;
let compress = &self.entries_compress[compress].1;
compress.decompress(&mut self.decompressor, MAX_ENTRY_SIZE, compress_type, &mut buf_ptr, &mut entry)?;
}
}
let mut entry_ptr: &[u8] = &entry[..];
let mut checklist = ValidatorChecklist::new();
self.types[validator].validate(&mut entry_ptr, &self.types, 0, &mut checklist)?;
let entry_len = entry.len() - entry_ptr.len();
let mut temp = Vec::new();
let mut hash_state = crypto::HashState::new();
doc.encode(&mut temp);
hash_state.update(&temp[..]);
temp.clear();
encode::write_value(&mut temp, &Value::from(field.clone()));
hash_state.update(&temp[..]);
hash_state.update(&entry[..entry_len]);
let entry_hash = hash_state.get_hash();
let hash = if entry.len() > entry_len {
hash_state.update(&entry[entry_len..]);
hash_state.get_hash()
}
else {
entry_hash.clone()
};
let mut signed_by = Vec::new();
while entry_ptr.len() > 0 {
let signature = crypto::Signature::decode(&mut entry_ptr)?;
if !signature.verify(&entry_hash) {
return Err(Error::BadSignature);
}
signed_by.push(signature.signed_by().clone());
}
let override_compression = false;
let compression = None;
let compressed = None;
let entry = Entry::from_parts(
Some(hash_state),
Some(entry_hash),
hash,
doc,
field,
entry_len,
entry,
signed_by,
compressed,
override_compression,
compression,
);
Ok(Checklist::new(checklist, entry))
}
pub fn check_item(&self, doc: &Document, item: &mut ChecklistItem) -> crate::Result<()> {
for index in item.iter() {
if let Validator::Hash(ref v) = self.types[*index] {
if v.schema_required() {
if let Some(hash) = doc.schema_hash() {
if !v.schema_in_set(&hash) {
return Err(Error::FailValidate(doc.len(), "Document uses unrecognized schema"));
}
}
else {
return Err(Error::FailValidate(doc.len(), "Document doesn't have schema, but needs one"));
}
}
if let Some(link) = v.link() {
let mut checklist = ValidatorChecklist::new();
if let Validator::Object(ref v) = self.types[link] {
v.validate(&mut doc.raw_doc(), &self.types, &mut checklist, true)?;
}
else {
return Err(Error::FailValidate(doc.len(), "Can't validate a document against a non-object validator"));
}
}
}
else {
return Err(Error::FailValidate(doc.len(), "Can't validate against non-hash validator"));
}
};
item.mark_done();
Ok(())
}
pub fn decode_query(&self, buf: &mut &[u8]) -> crate::Result<Query> {
let mut buf_ptr: &[u8] = buf;
let doc_hash = Hash::decode(&mut buf_ptr)?;
let field = read_string(&mut buf_ptr)?;
let content = Vec::from(buf_ptr);
let self_validator = self.entries.binary_search_by(|x| x.0.as_str().cmp(&field))
.map_err(|_| Error::FailValidate(buf.len(), "Query field is not in schema"))?;
let self_validator = self.entries[self_validator].1;
let mut content_ptr: &[u8] = &content[..];
let mut types = Vec::with_capacity(3);
let mut type_names = HashMap::new();
types.push(Validator::Invalid);
types.push(Validator::Valid);
let empty_hash = Hash::new_empty();
let valid = {
let mut reader = ValidReader::new(false, &mut types, &mut type_names, &empty_hash);
Validator::read_validator(&mut content_ptr, &mut reader)?
};
if !query_check(self_validator, valid, &self.types, &types) {
return Err(Error::FailValidate(content.len(), "Query is not allowed by schema"));
}
let content_len = content.len() - content_ptr.len();
let mut temp = Vec::new();
let mut hash_state = crypto::HashState::new();
doc_hash.encode(&mut temp);
hash_state.update(&temp[..]);
temp.clear();
encode::write_value(&mut temp, &Value::from(field.clone()));
hash_state.update(&temp[..]);
hash_state.update(&content[..content_len]);
let content_hash = hash_state.get_hash();
let hash = if content.len() > content_len {
hash_state.update(&content[content_len..]);
hash_state.get_hash()
}
else {
content_hash.clone()
};
let mut signed_by = Vec::new();
while content_ptr.len() > 0 {
let signature = crypto::Signature::decode(&mut content_ptr)?;
if !signature.verify(&content_hash) {
return Err(Error::BadSignature);
}
signed_by.push(signature.signed_by().clone());
}
Ok(Query::from_parts(
valid,
types.into_boxed_slice(),
hash,
doc_hash,
field,
content,
signed_by,
))
}
}
enum Compression {
NoCompress,
Compress(i32),
DictCompress((zstd_safe::CDict<'static>, zstd_safe::DDict<'static>))
}
impl std::default::Default for Compression {
fn default() -> Self {
Compression::Compress(zstd_safe::CLEVEL_DEFAULT)
}
}
impl Compression {
fn read_raw(raw: &mut &[u8]) -> crate::Result<Compression> {
let mut setting_seen = false;
let mut format_seen = false;
let mut level_seen = false;
let mut level = zstd_safe::CLEVEL_DEFAULT;
let mut format = 0;
let mut setting = None;
let mut setting_bool = false;
let num_fields = match read_marker(raw)? {
MarkerType::Object(len) => len,
_ => return Err(Error::FailValidate(raw.len(), "Compress spec wasn't an object")),
};
object_iterate(raw, num_fields, |field, raw| {
match field {
"format" => {
format_seen = true;
if let Some(i) = read_integer(raw)?.as_u64() {
if i > 31 {
Err(Error::FailValidate(raw.len(),
"Compress `format` field didn't contain integer between 0 and 31"))
}
else {
format = i;
Ok(())
}
}
else {
Err(Error::FailValidate(raw.len(),
"Compress `format` field didn't contain integer between 0 and 31"))
}
},
"level" => {
level_seen = true;
if let Some(i) = read_integer(raw)?.as_u64() {
if i > 255 {
Err(Error::FailValidate(raw.len(),
"Compress `level` field didn't contain integer between 0 and 255"))
}
else {
level = i as i32;
let max = zstd_safe::max_c_level();
if level > max {
level = max;
}
Ok(())
}
}
else {
Err(Error::FailValidate(raw.len(),
"Compress `level` field didn't contain integer between 0 and 255"))
}
},
"setting" => {
setting_seen = true;
match read_marker(raw)? {
MarkerType::Boolean(v) => {
setting_bool = v;
Ok(())
},
MarkerType::Binary(len) => {
let v = read_raw_bin(raw, len)?;
setting = Some(v.to_vec());
setting_bool = true;
Ok(())
},
_ => {
Err(Error::FailValidate(raw.len(),
"Compress `setting` field didn't contain boolean or binary data"))
}
}
},
_ => {
Err(Error::FailValidate(raw.len(), "Compress contains unrecognized field"))
}
}
})?;
if !setting_seen {
return Err(Error::FailValidate(raw.len(), "Compress spec didn't have setting field"));
}
if !setting_bool && (format_seen || level_seen) {
return Err(Error::FailValidate(raw.len(), "Compress spec had false setting field, but other fields were also present"));
}
if !format_seen && setting_bool {
return Err(Error::FailValidate(raw.len(), "Compress spec had setting field not set to false, but no format field"));
}
Ok(
if !setting_bool {
Compression::NoCompress
}
else if format > 0 {
Compression::Compress(zstd_safe::CLEVEL_DEFAULT)
}
else if let Some(bin) = setting {
Compression::DictCompress((
zstd_safe::create_cdict(&bin[..], level),
zstd_safe::create_ddict(&bin[..])
))
}
else {
Compression::Compress(level)
}
)
}
fn compress(&self, compressor: &mut zstd_safe::CCtx, raw: &[u8], buf: &mut Vec<u8>) {
match self {
Compression::NoCompress => {
buf.extend_from_slice(raw);
},
Compression::Compress(level) => {
zstd_help::compress(compressor, *level, raw, buf);
},
Compression::DictCompress((dict, _)) => {
zstd_help::dict_compress(compressor, dict, raw, buf);
},
}
}
fn decompress(
&self,
decompressor: &mut zstd_safe::DCtx,
max_size: usize,
compress_type: CompressType,
buf: &mut &[u8],
decode: &mut Vec<u8>
)
-> crate::Result<()>
{
match compress_type {
CompressType::Uncompressed => {
if (buf.len() + decode.len()) > max_size {
return Err(Error::BadSize);
}
decode.extend_from_slice(buf);
Ok(())
},
CompressType::Compressed | CompressType::CompressedNoSchema => {
zstd_help::decompress(decompressor, max_size, buf, decode)
},
CompressType::DictCompressed => {
if let Compression::DictCompress((_,dict)) = &self {
zstd_help::dict_decompress(decompressor, dict, max_size, buf, decode)
}
else {
Err(Error::FailDecompress)
}
}
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use super::super::Value;
use NoSchema;
fn simple_schema() -> Document {
let schema: Value = fogpack!({
"req": {
"title": { "type": "Str", "max_len": 200 },
"text": { "type": "Str" }
},
"entries": {
"rel": {
"type": "Obj",
"req": {
"name": { "type": "Str" },
"link": { "type": "Hash" }
}
}
},
"doc_compress": {
"format": 0,
"level": 3,
"setting": true
},
"entries_compress": {
"rel": { "setting": false }
}
});
Document::new(schema).expect("Should've been able to encode as a document")
}
fn simple_doc(schema: &Hash) -> Document {
let doc: Value = fogpack!({
"": Value::from(schema.clone()),
"title": "A Test",
"text": "This is a test of a schema document"
});
Document::new(doc).expect("Should've been able to encode as document")
}
fn simple_bad_doc(schema: &Hash) -> Document {
let doc: Value = fogpack!({
"": Value::from(schema.clone()),
"title": "A Test",
"text": 0
});
Document::new(doc).expect("Should've been able to encode as document")
}
fn simple_entry(doc: &Hash) -> Entry {
let test: Value = fogpack!({
"name": "test_entry",
"link": Hash::new(b"fake hash")
});
Entry::new(doc.clone(), String::from("rel"), test).expect("Should've been able to encode as an entry")
}
#[test]
fn use_simple_schema() {
let schema = simple_schema();
let mut schema = Schema::from_doc(schema).unwrap();
let mut no_schema = NoSchema::new();
let test = simple_doc(schema.hash());
let enc = schema.encode_doc(test.clone()).unwrap();
let dec = schema.decode_doc(&mut &enc[..]).unwrap();
assert!(test == dec, "Document didn't stay same through enc/dec");
let test = simple_bad_doc(schema.hash());
assert!(schema.encode_doc(test.clone()).is_err());
assert!(no_schema.encode_doc(test.clone()).is_err());
let mut enc = Vec::new();
CompressType::Uncompressed.encode(&mut enc);
enc.extend_from_slice(&test.raw_doc()[..]);
assert!(schema.decode_doc(&mut &enc[..]).is_err());
}
}