writer: finish migration to custom error type

This commit is contained in:
Matt Bilker 2019-11-09 01:21:37 +00:00
parent 9b64f976ee
commit 8e37feff51
No known key found for this signature in database
GPG Key ID: 69ADF8AEB6C8B5D1
3 changed files with 280 additions and 124 deletions

View File

@ -6,9 +6,8 @@ use byteorder::{BigEndian, ReadBytesExt, WriteBytesExt};
use bytes::Bytes;
use snafu::{ResultExt, Snafu};
use crate::encoding_type::EncodingType;
use crate::error::KbinError;
use crate::node_types::KbinType;
use crate::encoding_type::{EncodingError, EncodingType};
use crate::node_types::{KbinType, StandardType};
#[derive(Debug, Snafu)]
pub enum ByteBufferError {
@ -46,9 +45,24 @@ pub enum ByteBufferError {
#[snafu(display("Failed to write data block to data buffer"))]
WriteDataBlock { source: io::Error },
#[snafu(display("Failed to encode string"))]
StringEncode { source: EncodingError },
#[snafu(display("Failed to write padding {} byte(s) to data buffer", size))]
WritePadding { size: usize, source: io::Error },
#[snafu(display(
"Mismatched size for {} node data (expected: {}, actual: {})",
node_type,
expected,
actual
))]
WriteSizeMismatch {
node_type: StandardType,
expected: usize,
actual: usize,
},
#[snafu(display("Failed to seek to {} in data buffer", offset))]
SeekOffset { offset: usize, source: io::Error },
@ -285,20 +299,24 @@ impl ByteBufferWrite {
Ok(())
}
pub fn write_str(&mut self, encoding: EncodingType, data: &str) -> Result<(), KbinError> {
pub fn write_str(&mut self, encoding: EncodingType, data: &str) -> Result<(), ByteBufferError> {
trace!(
"write_str => input: {}, data: 0x{:02x?}",
data,
data.as_bytes()
);
let bytes = encoding.encode_bytes(data)?;
let bytes = encoding.encode_bytes(data).context(StringEncode)?;
self.buf_write(&bytes)?;
Ok(())
}
pub fn write_aligned(&mut self, data_type: KbinType, data: &[u8]) -> Result<(), KbinError> {
pub fn write_aligned(
&mut self,
node_type: StandardType,
data: &[u8],
) -> Result<(), ByteBufferError> {
if self.offset_1 % 4 == 0 {
self.offset_1 = self.data_buf_offset();
}
@ -307,7 +325,7 @@ impl ByteBufferWrite {
}
let old_pos = self.data_buf_offset();
let size = data_type.size * data_type.count;
let size = node_type.size * node_type.count;
trace!(
"write_aligned => old_pos: {}, size: {}, data: 0x{:02x?}",
old_pos,
@ -316,8 +334,8 @@ impl ByteBufferWrite {
);
if size != data.len() {
return Err(KbinError::SizeMismatch {
node_type: data_type.name,
return Err(ByteBufferError::WriteSizeMismatch {
node_type,
expected: size,
actual: data.len(),
});

View File

@ -16,6 +16,7 @@ use crate::node_types::{StandardType, UnknownKbinType};
use crate::reader::ReaderError;
use crate::sixbit::SixbitError;
use crate::value::Value;
use crate::writer::WriterError;
pub type Result<T> = StdResult<T, KbinError>;
@ -28,18 +29,6 @@ pub enum KbinError {
source: io::Error,
},
#[snafu(display("Invalid byte value for {} header field", field))]
HeaderValue { field: &'static str },
#[snafu(display("Unable to read {} bytes from data buffer", size))]
DataRead { size: usize, source: io::Error },
#[snafu(display("Unable to write a {} to data buffer", node_type))]
DataWrite {
node_type: &'static str,
source: io::Error,
},
#[snafu(display("Unable to read bytes or not enough data read"))]
DataConvert { source: io::Error },
@ -131,6 +120,12 @@ pub enum KbinError {
source: ReaderError,
},
#[snafu(display("Failed to write binary XML"))]
Writer {
#[snafu(backtrace)]
source: WriterError,
},
#[snafu(display("Failed to handle sixbit string operation"))]
Sixbit {
#[snafu(backtrace)]
@ -176,6 +171,13 @@ impl From<ReaderError> for KbinError {
}
}
impl From<WriterError> for KbinError {
#[inline]
fn from(source: WriterError) -> Self {
KbinError::Writer { source }
}
}
impl From<SixbitError> for KbinError {
#[inline]
fn from(source: SixbitError) -> Self {

View File

@ -1,69 +1,178 @@
use std::io::{Cursor, Write};
use std::io::{self, Cursor, Write};
use byteorder::{BigEndian, WriteBytesExt};
use snafu::ResultExt;
use snafu::{ResultExt, Snafu};
use crate::byte_buffer::ByteBufferWrite;
use crate::byte_buffer::{ByteBufferError, ByteBufferWrite};
use crate::compression_type::CompressionType;
use crate::error::*;
use crate::encoding_type::{EncodingError, EncodingType};
use crate::node::{Node, NodeCollection};
use crate::node_types::StandardType;
use crate::options::Options;
use crate::sixbit::Sixbit;
use crate::sixbit::{Sixbit, SixbitError};
use crate::value::Value;
use super::{ARRAY_MASK, SIGNATURE};
#[derive(Debug, Snafu)]
pub enum WriterError {
#[snafu(display("Failed to write signature to header"))]
Signature { source: io::Error },
#[snafu(display("Failed to write compression type to header"))]
Compression { source: io::Error },
#[snafu(display("Failed to write encoding type to header"))]
Encoding { source: io::Error },
#[snafu(display("Failed to write encoding type inverted value to header"))]
EncodingNegate { source: io::Error },
#[snafu(display("Failed to write node buffer length"))]
NodeBufferLength { source: io::Error },
#[snafu(display("Failed to write data buffer length"))]
DataBufferLength { source: io::Error },
#[snafu(display(
"Failed to write node size ({} byte(s)) for node type {}",
size,
node_type
))]
NodeSize {
node_type: StandardType,
size: u32,
source: io::Error,
},
#[snafu(display("Failed to write sixbit node name"))]
NodeSixbitName { source: SixbitError },
#[snafu(display("Failed to encode uncompressed node name to {:?}", encoding))]
NodeUncompressedNameEncode {
encoding: EncodingType,
source: EncodingError,
},
#[snafu(display("Failed to write uncompressed node name length"))]
NodeUncompressedNameLength { source: io::Error },
#[snafu(display("Failed to write uncompressed node name data"))]
NodeUncompressedNameData { source: io::Error },
#[snafu(display("Failed to write node data for node type {}", node_type))]
DataWrite {
node_type: StandardType,
source: io::Error,
},
#[snafu(display("Failed to write node type {} to node buffer", node_type))]
NodeType {
node_type: StandardType,
source: io::Error,
},
#[snafu(display("Failed to handle data buffer operation for node type {}", node_type))]
DataBuffer {
node_type: StandardType,
source: ByteBufferError,
},
#[snafu(display("Failed to handle node buffer operation for node type {}", node_type))]
NodeBuffer {
node_type: StandardType,
source: ByteBufferError,
},
// TODO: remove when better error type is made
#[snafu(display("Failed to encode value to bytes for node type {}", node_type))]
ValueEncode {
node_type: StandardType,
#[snafu(source(from(crate::KbinError, Box::new)))]
source: Box<crate::KbinError>,
},
// TODO: remove when better error type is made
#[snafu(display("Failed to get key from definition for node type {}", node_type))]
DefinitionKey {
node_type: StandardType,
#[snafu(source(from(crate::KbinError, Box::new)))]
source: Box<crate::KbinError>,
},
// TODO: remove when better error type is made
#[snafu(display("Failed to get value from definition for node type {}", node_type))]
DefinitionValue {
node_type: StandardType,
#[snafu(source(from(crate::KbinError, Box::new)))]
source: Box<crate::KbinError>,
},
#[snafu(display("Attempted to write node definition without key data"))]
NoNodeKey,
#[snafu(display("Attempted to write node definition without value data"))]
NoNodeValue,
}
fn write_value(
options: &Options,
data_buf: &mut ByteBufferWrite,
node_type: StandardType,
is_array: bool,
value: &Value,
) -> Result<()> {
) -> Result<(), WriterError> {
match value {
Value::Binary(data) => {
trace!("data: 0x{:02x?}", data);
let size = (data.len() as u32) * (node_type.size as u32);
data_buf.write_u32::<BigEndian>(size).context(DataWrite {
node_type: "binary node size",
})?;
data_buf.write_all(&data).context(DataWrite {
node_type: "binary",
})?;
data_buf.realign_writes(None)?;
// TODO: add overflow check
let size = (data.len() * node_type.size) as u32;
data_buf
.write_u32::<BigEndian>(size)
.context(NodeSize { node_type, size })?;
data_buf.write_all(&data).context(DataWrite { node_type })?;
data_buf
.realign_writes(None)
.context(DataBuffer { node_type })?;
},
Value::String(text) => {
data_buf.write_str(options.encoding, &text)?;
data_buf
.write_str(options.encoding, &text)
.context(DataBuffer { node_type })?;
},
Value::Array(values) => {
if !is_array {
return Err(KbinError::InvalidState);
panic!("Attempted to write value array but was not marked as array");
}
let total_size = values.len() * node_type.count * node_type.size;
let mut data = Vec::with_capacity(total_size);
values.to_bytes_into(&mut data)?;
values
.to_bytes_into(&mut data)
.context(ValueEncode { node_type })?;
data_buf
.write_u32::<BigEndian>(total_size as u32)
.context(DataWrite {
node_type: "node size",
.context(NodeSize {
node_type,
size: total_size as u32,
})?;
data_buf.write_all(&data).context(DataWrite {
node_type: node_type.name,
})?;
data_buf.realign_writes(None)?;
data_buf.write_all(&data).context(DataWrite { node_type })?;
data_buf
.realign_writes(None)
.context(DataBuffer { node_type })?;
},
value => {
if is_array {
return Err(KbinError::InvalidState);
} else {
let data = value.to_bytes()?;
data_buf.write_aligned(*node_type, &data)?;
panic!("Attempted to write non-array value but was marked as array");
}
let data = value.to_bytes().context(ValueEncode { node_type })?;
data_buf
.write_aligned(node_type, &data)
.context(DataBuffer { node_type })?;
},
};
@ -76,7 +185,7 @@ pub trait Writeable {
options: &Options,
node_buf: &mut ByteBufferWrite,
data_buf: &mut ByteBufferWrite,
) -> Result<()>;
) -> Result<(), WriterError>;
}
impl Writeable for NodeCollection {
@ -85,10 +194,14 @@ impl Writeable for NodeCollection {
options: &Options,
node_buf: &mut ByteBufferWrite,
data_buf: &mut ByteBufferWrite,
) -> Result<()> {
) -> Result<(), WriterError> {
let (node_type, is_array) = self.base().node_type_tuple();
let array_mask = if is_array { ARRAY_MASK } else { 0 };
let name = self.base().key()?.ok_or(KbinError::InvalidState)?;
let name = self
.base()
.key()
.context(DefinitionValue { node_type })?
.ok_or(WriterError::NoNodeKey)?;
debug!("NodeCollection write_node => name: {}, type: {:?}, type_size: {}, type_count: {}, is_array: {}",
name,
@ -99,31 +212,42 @@ impl Writeable for NodeCollection {
node_buf
.write_u8(node_type as u8 | array_mask)
.context(DataWrite {
node_type: node_type.name,
})?;
.context(DataWrite { node_type })?;
match options.compression {
CompressionType::Compressed => Sixbit::pack(&mut **node_buf, &name)?,
CompressionType::Compressed => {
Sixbit::pack(&mut **node_buf, &name).context(NodeSixbitName)?
},
CompressionType::Uncompressed => {
let data = options.encoding.encode_bytes(&name)?;
let data =
options
.encoding
.encode_bytes(&name)
.context(NodeUncompressedNameEncode {
encoding: options.encoding,
})?;
let len = (data.len() - 1) as u8;
node_buf.write_u8(len | ARRAY_MASK).context(DataWrite {
node_type: "node name length",
})?;
node_buf.write_all(&data).context(DataWrite {
node_type: "node name bytes",
})?;
node_buf
.write_u8(len | ARRAY_MASK)
.context(NodeUncompressedNameLength)?;
node_buf
.write_all(&data)
.context(NodeUncompressedNameData)?;
},
};
if node_type != StandardType::NodeStart {
let value = self.base().value()?;
let value = self.base().value().context(DefinitionValue { node_type })?;
write_value(options, data_buf, node_type, is_array, &value)?;
}
for attr in self.attributes() {
let key = attr.key()?.ok_or(KbinError::InvalidState)?;
let value = attr.value_bytes().ok_or(KbinError::InvalidState)?;
let node_type = StandardType::Attribute;
let key = attr
.key()
.context(DefinitionKey { node_type })?
.ok_or(WriterError::NoNodeKey)?;
let value = attr.value_bytes().ok_or(WriterError::NoNodeValue)?;
trace!(
"NodeCollection write_node => attr: {}, value: 0x{:02x?}",
@ -131,24 +255,31 @@ impl Writeable for NodeCollection {
value
);
data_buf.buf_write(value)?;
data_buf
.buf_write(value)
.context(DataBuffer { node_type })?;
node_buf
.write_u8(StandardType::Attribute as u8)
.context(DataWrite {
node_type: StandardType::Attribute.name,
})?;
.context(DataWrite { node_type })?;
match options.compression {
CompressionType::Compressed => Sixbit::pack(&mut **node_buf, &key)?,
CompressionType::Compressed => {
Sixbit::pack(&mut **node_buf, &key).context(NodeSixbitName)?
},
CompressionType::Uncompressed => {
let data = options.encoding.encode_bytes(&key)?;
let data = options.encoding.encode_bytes(&key).context(
NodeUncompressedNameEncode {
encoding: options.encoding,
},
)?;
let len = (data.len() - 1) as u8;
node_buf.write_u8(len | ARRAY_MASK).context(DataWrite {
node_type: "attribute name length",
})?;
node_buf.write_all(&data).context(DataWrite {
node_type: "node name bytes",
})?;
node_buf
.write_u8(len | ARRAY_MASK)
.context(NodeUncompressedNameLength)?;
node_buf
.write_all(&data)
.context(NodeUncompressedNameData)?;
},
};
}
@ -160,8 +291,8 @@ impl Writeable for NodeCollection {
// node end always has the array bit set
node_buf
.write_u8(StandardType::NodeEnd as u8 | ARRAY_MASK)
.context(DataWrite {
node_type: "node end",
.context(NodeType {
node_type: StandardType::NodeEnd,
})?;
Ok(())
@ -174,7 +305,7 @@ impl Writeable for Node {
options: &Options,
node_buf: &mut ByteBufferWrite,
data_buf: &mut ByteBufferWrite,
) -> Result<()> {
) -> Result<(), WriterError> {
let (node_type, is_array) = match self.value() {
Some(Value::Array(ref values)) => (values.standard_type(), true),
Some(ref value) => (value.standard_type(), false),
@ -183,7 +314,7 @@ impl Writeable for Node {
let array_mask = if is_array { ARRAY_MASK } else { 0 };
debug!(
"Node write_node => name: {}, type: {:?}, type_size: {}, type_count: {}, is_array: {}",
"Node::write_node => name: {}, type: {:?}, type_size: {}, type_count: {}, is_array: {}",
self.key(),
node_type,
node_type.size,
@ -194,19 +325,25 @@ impl Writeable for Node {
node_buf
.write_u8(node_type as u8 | array_mask)
.context(DataWrite {
node_type: node_type.name,
node_type: node_type,
})?;
match options.compression {
CompressionType::Compressed => Sixbit::pack(&mut **node_buf, &self.key())?,
CompressionType::Compressed => {
Sixbit::pack(&mut **node_buf, &self.key()).context(NodeSixbitName)?
},
CompressionType::Uncompressed => {
let data = options.encoding.encode_bytes(&self.key())?;
let data = options.encoding.encode_bytes(&self.key()).context(
NodeUncompressedNameEncode {
encoding: options.encoding,
},
)?;
let len = (data.len() - 1) as u8;
node_buf.write_u8(len | ARRAY_MASK).context(DataWrite {
node_type: "node name length",
})?;
node_buf.write_all(&data).context(DataWrite {
node_type: "node name bytes",
})?;
node_buf
.write_u8(len | ARRAY_MASK)
.context(NodeUncompressedNameLength)?;
node_buf
.write_all(&data)
.context(NodeUncompressedNameData)?;
},
};
@ -218,24 +355,33 @@ impl Writeable for Node {
for (key, value) in attributes {
trace!("Node write_node => attr: {}, value: {}", key, value);
data_buf.write_str(options.encoding, value)?;
data_buf
.write_str(options.encoding, value)
.context(DataBuffer { node_type })?;
node_buf
.write_u8(StandardType::Attribute as u8)
.context(DataWrite {
node_type: StandardType::Attribute.name,
node_type: StandardType::Attribute,
})?;
match options.compression {
CompressionType::Compressed => Sixbit::pack(&mut **node_buf, &key)?,
CompressionType::Compressed => {
Sixbit::pack(&mut **node_buf, &key).context(NodeSixbitName)?
},
CompressionType::Uncompressed => {
let data = options.encoding.encode_bytes(&key)?;
let data = options.encoding.encode_bytes(&key).context(
NodeUncompressedNameEncode {
encoding: options.encoding,
},
)?;
let len = (data.len() - 1) as u8;
node_buf.write_u8(len | ARRAY_MASK).context(DataWrite {
node_type: "attribute name length",
})?;
node_buf.write_all(&data).context(DataWrite {
node_type: "node name bytes",
})?;
node_buf
.write_u8(len | ARRAY_MASK)
.context(NodeUncompressedNameLength)?;
node_buf
.write_all(&data)
.context(NodeUncompressedNameData)?;
},
};
}
@ -250,8 +396,8 @@ impl Writeable for Node {
// node end always has the array bit set
node_buf
.write_u8(StandardType::NodeEnd as u8 | ARRAY_MASK)
.context(DataWrite {
node_type: "node end",
.context(NodeType {
node_type: StandardType::NodeEnd,
})?;
Ok(())
@ -273,27 +419,19 @@ impl Writer {
Self { options }
}
pub fn to_binary<T>(&mut self, input: &T) -> Result<Vec<u8>>
pub fn to_binary<T>(&mut self, input: &T) -> Result<Vec<u8>, WriterError>
where
T: Writeable,
{
let mut header = Cursor::new(Vec::with_capacity(8));
header
.write_u8(SIGNATURE)
.context(HeaderWrite { field: "signature" })?;
header.write_u8(SIGNATURE).context(Signature)?;
let compression = self.options.compression.to_byte();
header.write_u8(compression).context(HeaderWrite {
field: "compression",
})?;
header.write_u8(compression).context(Compression)?;
let encoding = self.options.encoding.to_byte();
header
.write_u8(encoding)
.context(HeaderWrite { field: "encoding" })?;
header.write_u8(0xFF ^ encoding).context(HeaderWrite {
field: "encoding negation",
})?;
header.write_u8(encoding).context(Encoding)?;
header.write_u8(0xFF ^ encoding).context(EncodingNegate)?;
let mut node_buf = ByteBufferWrite::new(Vec::new());
let mut data_buf = ByteBufferWrite::new(Vec::new());
@ -302,10 +440,12 @@ impl Writer {
node_buf
.write_u8(StandardType::FileEnd as u8 | ARRAY_MASK)
.context(DataWrite {
node_type: "file end",
.context(NodeType {
node_type: StandardType::FileEnd,
})?;
node_buf.realign_writes(None)?;
node_buf.realign_writes(None).context(NodeBuffer {
node_type: StandardType::FileEnd,
})?;
let mut output = header.into_inner();
@ -316,9 +456,7 @@ impl Writer {
);
output
.write_u32::<BigEndian>(node_buf.len() as u32)
.context(HeaderWrite {
field: "node buffer length",
})?;
.context(NodeBufferLength)?;
output.extend_from_slice(&node_buf);
let data_buf = data_buf.into_inner();
@ -328,9 +466,7 @@ impl Writer {
);
output
.write_u32::<BigEndian>(data_buf.len() as u32)
.context(HeaderWrite {
field: "data buffer length",
})?;
.context(DataBufferLength)?;
output.extend_from_slice(&data_buf);
Ok(output)