aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorJoey Sacchini <joey@sacchini.net>2020-12-03 13:28:10 -0500
committerJoey Sacchini <joey@sacchini.net>2020-12-03 13:28:10 -0500
commit606f3f5cce6b91895af7cb941118d59af88bf9fe (patch)
treef6cf814ae401c92c76affaa313aaebaea9206188
parent0888fc50a6fb379a378193a5cbaecdaa1ac03d17 (diff)
downloadcraftio-rs-606f3f5cce6b91895af7cb941118d59af88bf9fe.tar.gz
craftio-rs-606f3f5cce6b91895af7cb941118d59af88bf9fe.tar.bz2
craftio-rs-606f3f5cce6b91895af7cb941118d59af88bf9fe.zip
implement fixes for broken reader code
-rw-r--r--Cargo.lock4
-rw-r--r--Cargo.toml8
-rw-r--r--src/lib.rs6
-rw-r--r--src/reader.rs154
-rw-r--r--src/writer.rs100
5 files changed, 198 insertions, 74 deletions
diff --git a/Cargo.lock b/Cargo.lock
index a47a4cf..f2a02f5 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -436,9 +436,9 @@ dependencies = [
[[package]]
name = "serde_json"
-version = "1.0.59"
+version = "1.0.60"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "dcac07dbffa1c65e7f816ab9eba78eb142c6d44410f4eeba1e26e4f5dfa56b95"
+checksum = "1500e84d27fe482ed1dc791a56eddc2f230046a040fa908c08bda1d9fb615779"
dependencies = [
"itoa",
"ryu",
diff --git a/Cargo.toml b/Cargo.toml
index d8e1c24..6124744 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -7,12 +7,14 @@ license = "APACHE 2.0"
[dependencies]
mcproto-rs = { git = "https://github.com/Twister915/mcproto-rs", branch = "master", default-features = false, features = ["std", "v1_15_2", "v1_16_3"] }
-futures = { version = "0.3.8", optional = true }
-async-trait = { version = "0.1.42", optional = true }
aes = "0.6.0"
-thiserror = "1.0"
flate2 = { version = "1.0", features = ["zlib"] }
+thiserror = "1.0"
+
+futures = { version = "0.3.8", optional = true }
+async-trait = { version = "0.1.42", optional = true }
+
[features]
default = ["async"]
diff --git a/src/lib.rs b/src/lib.rs
index 8fafec0..16d26a7 100644
--- a/src/lib.rs
+++ b/src/lib.rs
@@ -1,3 +1,5 @@
+#![feature(backtrace)]
+
mod cfb8;
mod connection;
mod reader;
@@ -6,9 +8,9 @@ mod util;
mod wrapper;
mod writer;
+pub use cfb8::CipherError;
pub use connection::CraftConnection;
pub use reader::*;
-pub use writer::*;
pub use tcp::*;
-pub use cfb8::CipherError;
pub use wrapper::*;
+pub use writer::*;
diff --git a/src/reader.rs b/src/reader.rs
index 4920e69..dd27440 100644
--- a/src/reader.rs
+++ b/src/reader.rs
@@ -5,6 +5,8 @@ use flate2::{DecompressError, FlushDecompress, Status};
use mcproto_rs::protocol::{Id, PacketDirection, RawPacket, State};
use mcproto_rs::types::VarInt;
use mcproto_rs::{Deserialize, Deserialized};
+use std::backtrace::Backtrace;
+use std::io;
use thiserror::Error;
#[cfg(feature = "async")]
@@ -13,13 +15,29 @@ use {async_trait::async_trait, futures::AsyncReadExt};
#[derive(Debug, Error)]
pub enum ReadError {
#[error("i/o failure during read")]
- IoFailure(#[from] std::io::Error),
+ IoFailure {
+ #[from]
+ err: io::Error,
+ backtrace: Backtrace,
+ },
#[error("failed to read header VarInt")]
- PacketHeaderErr(#[from] mcproto_rs::DeserializeErr),
+ PacketHeaderErr {
+ #[from]
+ err: mcproto_rs::DeserializeErr,
+ backtrace: Backtrace,
+ },
#[error("failed to read packet")]
- PacketErr(#[from] mcproto_rs::protocol::PacketErr),
+ PacketErr {
+ #[from]
+ err: mcproto_rs::protocol::PacketErr,
+ backtrace: Backtrace,
+ },
#[error("failed to decompress packet")]
- DecompressFailed(#[from] DecompressErr),
+ DecompressFailed {
+ #[from]
+ err: DecompressErr,
+ backtrace: Backtrace,
+ },
}
#[derive(Debug, Error)]
@@ -63,6 +81,8 @@ pub trait CraftSyncReader {
pub struct CraftReader<R> {
inner: R,
raw_buf: Option<Vec<u8>>,
+ raw_ready: usize,
+ raw_offset: usize,
decompress_buf: Option<Vec<u8>>,
compression_threshold: Option<i32>,
state: State,
@@ -107,26 +127,23 @@ macro_rules! check_unexpected_eof {
return Ok(None);
}
- return Err(ReadError::IoFailure(err));
+ return Err(err.into());
}
};
}
impl<R> CraftSyncReader for CraftReader<R>
where
- R: std::io::Read,
+ R: io::Read,
{
fn read_raw_packet<'a, P>(&'a mut self) -> ReadResult<P>
where
P: RawPacket<'a>,
{
- let (primary_packet_len, len_bytes) = rr_unwrap!(self.read_one_varint_sync());
- let primary_packet_len = primary_packet_len.0 as usize;
- rr_unwrap!(self.read_n(
- VAR_INT_BUF_SIZE,
- primary_packet_len - VAR_INT_BUF_SIZE + len_bytes
- ));
- self.read_packet_in_buf::<'a, P>(len_bytes, primary_packet_len)
+ self.move_ready_data_to_front();
+ let primary_packet_len = rr_unwrap!(self.read_packet_len_sync()).0 as usize;
+ self.ensure_n_ready_sync(primary_packet_len)?;
+ self.read_packet_in_buf(primary_packet_len)
}
}
@@ -136,35 +153,40 @@ impl<R> CraftAsyncReader for CraftReader<R>
where
R: futures::AsyncRead + Unpin + Sync + Send,
{
- async fn read_raw_packet<'a, P>(&'a mut self) -> Result<Option<P>, ReadError>
+ async fn read_raw_packet<'a, P>(&'a mut self) -> ReadResult<P>
where
P: RawPacket<'a>,
{
- let (primary_packet_len, len_bytes) = rr_unwrap!(self.read_one_varint_async().await);
- let primary_packet_len = primary_packet_len.0 as usize;
- rr_unwrap!(
- self.read_n_async(
- VAR_INT_BUF_SIZE,
- primary_packet_len - VAR_INT_BUF_SIZE + len_bytes
- )
- .await
- );
- self.read_packet_in_buf::<P>(len_bytes, primary_packet_len)
+ self.move_ready_data_to_front();
+ let primary_packet_len = rr_unwrap!(self.read_packet_len_async().await).0 as usize;
+ self.ensure_n_ready_async(primary_packet_len).await?;
+ self.read_packet_in_buf(primary_packet_len)
}
}
impl<R> CraftReader<R>
where
- R: std::io::Read,
+ R: io::Read,
{
- fn read_one_varint_sync(&mut self) -> ReadResult<(VarInt, usize)> {
- deserialize_varint(rr_unwrap!(self.read_n(0, VAR_INT_BUF_SIZE)))
+ fn read_packet_len_sync(&mut self) -> ReadResult<VarInt> {
+ let buf = rr_unwrap!(self.ensure_n_ready_sync(VAR_INT_BUF_SIZE));
+ let (v, size) = rr_unwrap!(deserialize_varint(buf));
+ self.raw_ready -= size;
+ self.raw_offset += size;
+ Ok(Some(v))
}
- fn read_n(&mut self, offset: usize, n: usize) -> ReadResult<&mut [u8]> {
- let buf = get_sized_buf(&mut self.raw_buf, offset, n);
- check_unexpected_eof!(self.inner.read_exact(buf));
- Ok(Some(buf))
+ fn ensure_n_ready_sync(&mut self, n: usize) -> ReadResult<&[u8]> {
+ if self.raw_ready < n {
+ let to_read = n - self.raw_ready;
+ let target =
+ get_sized_buf(&mut self.raw_buf, self.raw_offset + self.raw_ready, to_read);
+ check_unexpected_eof!(self.inner.read_exact(target));
+ self.raw_ready = n;
+ }
+
+ let ready = get_sized_buf(&mut self.raw_buf, self.raw_offset, n);
+ Ok(Some(ready))
}
}
@@ -173,14 +195,26 @@ impl<R> CraftReader<R>
where
R: futures::io::AsyncRead + Unpin + Sync + Send,
{
- async fn read_one_varint_async(&mut self) -> ReadResult<(VarInt, usize)> {
- deserialize_varint(rr_unwrap!(self.read_n_async(0, VAR_INT_BUF_SIZE).await))
+ async fn read_packet_len_async(&mut self) -> ReadResult<VarInt> {
+ self.move_ready_data_to_front();
+ let buf = rr_unwrap!(self.ensure_n_ready_async(VAR_INT_BUF_SIZE).await);
+ let (v, size) = rr_unwrap!(deserialize_varint(buf));
+ self.raw_ready -= size;
+ self.raw_offset += size;
+ Ok(Some(v))
}
- async fn read_n_async(&mut self, offset: usize, n: usize) -> ReadResult<&mut [u8]> {
- let buf = get_sized_buf(&mut self.raw_buf, offset, n);
- check_unexpected_eof!(self.inner.read_exact(buf).await);
- Ok(Some(buf))
+ async fn ensure_n_ready_async(&mut self, n: usize) -> ReadResult<&[u8]> {
+ if self.raw_ready < n {
+ let to_read = n - self.raw_ready;
+ let target =
+ get_sized_buf(&mut self.raw_buf, self.raw_offset + self.raw_ready, to_read);
+ check_unexpected_eof!(self.inner.read_exact(target).await);
+ self.raw_ready = n;
+ }
+
+ let ready = get_sized_buf(&mut self.raw_buf, self.raw_offset, n);
+ Ok(Some(ready))
}
}
@@ -192,7 +226,7 @@ macro_rules! dsz_unwrap {
data: rest,
}) => (val, rest),
Err(err) => {
- return Err(ReadError::PacketHeaderErr(err));
+ return Err(err.into());
}
};
};
@@ -207,6 +241,8 @@ impl<R> CraftReader<R> {
Self {
inner,
raw_buf: None,
+ raw_ready: 0,
+ raw_offset: 0,
decompress_buf: None,
compression_threshold: None,
state,
@@ -215,11 +251,17 @@ impl<R> CraftReader<R> {
}
}
- fn read_packet_in_buf<'a, P>(&'a mut self, offset: usize, size: usize) -> ReadResult<P>
+ fn read_packet_in_buf<'a, P>(&'a mut self, size: usize) -> ReadResult<P>
where
P: RawPacket<'a>,
{
// find data in buf
+ let offset = self.raw_offset;
+ if self.raw_ready < size {
+ panic!("not enough data is ready!");
+ }
+ self.raw_ready -= size;
+ self.raw_offset += size;
let buf =
&mut self.raw_buf.as_mut().expect("should exist right now")[offset..offset + size];
// decrypt the packet if encryption is enabled
@@ -258,9 +300,28 @@ impl<R> CraftReader<R> {
match P::create(id, body_buf) {
Ok(raw) => Ok(Some(raw)),
- Err(err) => Err(ReadError::PacketErr(err)),
+ Err(err) => Err(err.into()),
}
}
+
+ fn move_ready_data_to_front(&mut self) {
+ // if there's data that's ready which isn't at the front of the buf, move it to the front
+ if self.raw_ready > 0 && self.raw_offset > 0 {
+ let raw_buf = self
+ .raw_buf
+ .as_mut()
+ .expect("if raw_ready > 0 and raw_offset > 0 then a raw_buf should exist!");
+
+ unsafe {
+ let dest = raw_buf.as_mut_ptr();
+ let src = dest.offset(self.raw_offset as isize);
+ let n_copy = self.raw_ready;
+ std::ptr::copy(src, dest, n_copy);
+ }
+ }
+
+ self.raw_offset = 0;
+ }
}
fn deserialize_raw_packet<'a, P>(raw: ReadResult<P>) -> ReadResult<P::Packet>
@@ -270,7 +331,7 @@ where
match raw {
Ok(Some(raw)) => match raw.deserialize() {
Ok(deserialized) => Ok(Some(deserialized)),
- Err(err) => Err(ReadError::PacketErr(err)),
+ Err(err) => Err(err.into()),
},
Ok(None) => Ok(None),
Err(err) => Err(err),
@@ -280,7 +341,7 @@ where
fn deserialize_varint(buf: &[u8]) -> ReadResult<(VarInt, usize)> {
match VarInt::mc_deserialize(buf) {
Ok(v) => Ok(Some((v.value, buf.len() - v.data.len()))),
- Err(err) => Err(ReadError::PacketHeaderErr(err)),
+ Err(err) => Err(err.into()),
}
}
@@ -295,12 +356,11 @@ fn decompress<'a>(
match decompress.decompress(src, decompress_buf, FlushDecompress::Finish) {
Ok(Status::StreamEnd) => break,
Ok(Status::Ok) => {}
- Ok(Status::BufError) => {
- return Err(ReadError::DecompressFailed(DecompressErr::BufError))
- }
- Err(err) => return Err(ReadError::DecompressFailed(DecompressErr::Failure(err))),
+ Ok(Status::BufError) => return Err(DecompressErr::BufError.into()),
+ Err(err) => return Err(DecompressErr::Failure(err).into()),
}
}
- Ok(&mut decompress_buf[..(decompress.total_out() as usize)])
+ let decompressed_size = decompress.total_out() as usize;
+ Ok(&mut decompress_buf[..decompressed_size])
}
diff --git a/src/writer.rs b/src/writer.rs
index 4c2de69..d4e6027 100644
--- a/src/writer.rs
+++ b/src/writer.rs
@@ -5,6 +5,8 @@ use flate2::{CompressError, Compression, FlushCompress, Status};
use mcproto_rs::protocol::{Id, Packet, PacketDirection, RawPacket, State};
use mcproto_rs::types::VarInt;
use mcproto_rs::{Serialize, SerializeErr, SerializeResult, Serializer};
+use std::backtrace::Backtrace;
+use std::ops::{Deref, DerefMut};
use thiserror::Error;
#[cfg(feature = "async")]
@@ -12,23 +14,78 @@ use {async_trait::async_trait, futures::AsyncWriteExt};
#[derive(Debug, Error)]
pub enum WriteError {
- #[error("serialization of header data failed")]
- HeaderSerializeFail(SerializeErr),
- #[error("packet body serialization failed")]
- BodySerializeFail(SerializeErr),
+ #[error("packet serialization error")]
+ Serialize {
+ #[from]
+ err: PacketSerializeFail,
+ backtrace: Backtrace,
+ },
#[error("failed to compress packet")]
- CompressFail(CompressError),
+ CompressFail {
+ #[from]
+ err: CompressError,
+ backtrace: Backtrace,
+ },
#[error("compression gave buf error")]
- CompressBufError,
+ CompressBufError { backtrace: Backtrace },
#[error("io error while writing data")]
- IoFail(#[from] std::io::Error),
+ IoFail {
+ #[from]
+ err: std::io::Error,
+ backtrace: Backtrace,
+ },
#[error("bad direction")]
BadDirection {
attempted: PacketDirection,
expected: PacketDirection,
+ backtrace: Backtrace,
},
#[error("bad state")]
- BadState { attempted: State, expected: State },
+ BadState {
+ attempted: State,
+ expected: State,
+ backtrace: Backtrace,
+ },
+}
+
+#[derive(Debug, Error)]
+pub enum PacketSerializeFail {
+ #[error("failed to serialize packet header")]
+ Header(#[source] SerializeErr),
+ #[error("failed to serialize packet contents")]
+ Body(#[source] SerializeErr),
+}
+
+impl Deref for PacketSerializeFail {
+ type Target = SerializeErr;
+
+ fn deref(&self) -> &Self::Target {
+ use PacketSerializeFail::*;
+ match self {
+ Header(err) => err,
+ Body(err) => err,
+ }
+ }
+}
+
+impl DerefMut for PacketSerializeFail {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ use PacketSerializeFail::*;
+ match self {
+ Header(err) => err,
+ Body(err) => err,
+ }
+ }
+}
+
+impl Into<SerializeErr> for PacketSerializeFail {
+ fn into(self) -> SerializeErr {
+ use PacketSerializeFail::*;
+ match self {
+ Header(err) => err,
+ Body(err) => err,
+ }
+ }
}
pub type WriteResult<P> = Result<P, WriteError>;
@@ -195,14 +252,14 @@ impl<W> CraftWriter<W> {
let mut data_len_serializer = SliceSerializer::create(data_len_target);
VarInt(body_size as i32)
.mc_serialize(&mut data_len_serializer)
- .map_err(move |err| WriteError::HeaderSerializeFail(err))?;
+ .map_err(move |err| PacketSerializeFail::Header(err))?;
let data_len_bytes = data_len_serializer.finish().len();
let packet_len_target = &mut compress_buf[..VAR_INT_BUF_SIZE];
let mut packet_len_serializer = SliceSerializer::create(packet_len_target);
VarInt((compressed_size + data_len_bytes) as i32)
.mc_serialize(&mut packet_len_serializer)
- .map_err(move |err| WriteError::HeaderSerializeFail(err))?;
+ .map_err(move |err| PacketSerializeFail::Header(err))?;
let packet_len_bytes = packet_len_serializer.finish().len();
let n_shift_packet_len = VAR_INT_BUF_SIZE - packet_len_bytes;
@@ -226,7 +283,7 @@ impl<W> CraftWriter<W> {
let mut packet_len_serializer = SliceSerializer::create(packet_len_target);
VarInt((body_size + 1) as i32)
.mc_serialize(&mut packet_len_serializer)
- .map_err(move |err| WriteError::HeaderSerializeFail(err))?;
+ .map_err(move |err| PacketSerializeFail::Header(err))?;
let packet_len_bytes = packet_len_serializer.finish().len();
let n_shift_packet_len = VAR_INT_BUF_SIZE - packet_len_bytes;
@@ -238,7 +295,7 @@ impl<W> CraftWriter<W> {
let start_offset = packet_len_start_at + n_shift_packet_len;
let end_at = start_offset + packet_len_bytes + 1 + body_size;
- buf[start_offset+packet_len_bytes] = 0; // data_len = 0
+ buf[start_offset + packet_len_bytes] = 0; // data_len = 0
&mut buf[start_offset..end_at]
}
} else {
@@ -246,7 +303,7 @@ impl<W> CraftWriter<W> {
let mut packet_len_serializer = SliceSerializer::create(packet_len_target);
VarInt(body_size as i32)
.mc_serialize(&mut packet_len_serializer)
- .map_err(move |err| WriteError::HeaderSerializeFail(err))?;
+ .map_err(move |err| PacketSerializeFail::Header(err))?;
let packet_len_bytes = packet_len_serializer.finish().len();
let n_shift_packet_len = VAR_INT_BUF_SIZE - packet_len_bytes;
move_data_rightwards(
@@ -274,7 +331,7 @@ impl<W> CraftWriter<W> {
let data_size = self.serialize_to_buf(HEADER_OFFSET + id_size, move |serializer| {
packet
.mc_serialize_body(serializer)
- .map_err(move |err| WriteError::BodySerializeFail(err))
+ .map_err(move |err| PacketSerializeFail::Body(err).into())
})?;
Ok(PreparedPacketHandle { id_size, data_size })
@@ -299,6 +356,7 @@ impl<W> CraftWriter<W> {
return Err(WriteError::BadDirection {
expected: self.direction,
attempted: id.direction,
+ backtrace: Backtrace::capture(),
});
}
@@ -306,12 +364,13 @@ impl<W> CraftWriter<W> {
return Err(WriteError::BadState {
expected: self.state,
attempted: id.state,
+ backtrace: Backtrace::capture(),
});
}
self.serialize_to_buf(HEADER_OFFSET, move |serializer| {
id.mc_serialize(serializer)
- .map_err(move |err| WriteError::HeaderSerializeFail(err))
+ .map_err(move |err| PacketSerializeFail::Header(err).into())
})
}
@@ -408,12 +467,13 @@ fn compress<'a, 'b>(
FlushCompress::None
};
- match compressor
- .compress(input, output, flush)
- .map_err(move |err| WriteError::CompressFail(err))?
- {
+ match compressor.compress(input, output, flush)? {
Status::Ok => {}
- Status::BufError => return Err(WriteError::CompressBufError),
+ Status::BufError => {
+ return Err(WriteError::CompressBufError {
+ backtrace: Backtrace::capture(),
+ })
+ }
Status::StreamEnd => break,
}
}