aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorJoey Sacchini <joey@sacchini.net>2021-01-07 13:52:33 -0500
committerJoey Sacchini <joey@sacchini.net>2021-01-07 13:52:33 -0500
commit3451193c972e0d6c8d145658d8b56fba1aa6cab5 (patch)
tree45158f888109e052659e632cab4021a2f86e166e
parent6ec44bd5b6a354c55f3f5175f55d26534ee5b886 (diff)
downloadcraftio-rs-3451193c972e0d6c8d145658d8b56fba1aa6cab5.tar.gz
craftio-rs-3451193c972e0d6c8d145658d8b56fba1aa6cab5.tar.bz2
craftio-rs-3451193c972e0d6c8d145658d8b56fba1aa6cab5.zip
add max packet size as a configurable option for any CraftIo
-rw-r--r--src/connection.rs5
-rw-r--r--src/reader.rs41
-rw-r--r--src/wrapper.rs13
-rw-r--r--src/writer.rs63
4 files changed, 107 insertions, 15 deletions
diff --git a/src/connection.rs b/src/connection.rs
index 486eed8..c5e0cf7 100644
--- a/src/connection.rs
+++ b/src/connection.rs
@@ -41,6 +41,11 @@ impl<R, W> CraftIo for CraftConnection<R, W> {
self.writer.enable_encryption(key, iv)?;
Ok(())
}
+
+ fn set_max_packet_size(&mut self, max_size: usize) {
+ self.reader.set_max_packet_size(max_size);
+ self.writer.set_max_packet_size(max_size);
+ }
}
impl<R, W> CraftSyncReader for CraftConnection<R, W>
diff --git a/src/reader.rs b/src/reader.rs
index 7f2cd68..bba028e 100644
--- a/src/reader.rs
+++ b/src/reader.rs
@@ -16,7 +16,7 @@ use thiserror::Error;
#[cfg(any(feature = "futures-io", feature = "tokio-io"))]
use async_trait::async_trait;
-pub const MAX_PACKET_SIZE: usize = 32 * 1000 * 1000;
+pub const DEAFULT_MAX_PACKET_SIZE: usize = 32 * 1000 * 1000; // 32MB
#[derive(Debug, Error)]
pub enum ReadError {
@@ -49,6 +49,13 @@ pub enum ReadError {
#[cfg(feature = "backtrace")]
backtrace: Backtrace,
},
+ #[error("{size} exceeds max size of {max_size}")]
+ PacketTooLarge {
+ size: usize,
+ max_size: usize,
+ #[cfg(feature = "backtrace")]
+ backtrace: Backtrace,
+ }
}
#[cfg(feature = "compression")]
@@ -129,6 +136,7 @@ pub struct CraftReader<R> {
raw_buf: Option<Vec<u8>>,
raw_ready: usize,
raw_offset: usize,
+ max_packet_size: usize,
#[cfg(feature = "compression")]
decompress_buf: Option<Vec<u8>>,
#[cfg(feature = "compression")]
@@ -159,6 +167,11 @@ impl<R> CraftIo for CraftReader<R> {
fn enable_encryption(&mut self, key: &[u8], iv: &[u8]) -> Result<(), CipherError> {
setup_craft_cipher(&mut self.encryption, key, iv)
}
+
+ fn set_max_packet_size(&mut self, max_size: usize) {
+ debug_assert!(max_size > 5);
+ self.max_packet_size = max_size;
+ }
}
macro_rules! rr_unwrap {
@@ -261,8 +274,13 @@ where
fn read_raw_inner(&mut self) -> ReadResult<usize> {
self.move_ready_data_to_front();
let primary_packet_len = rr_unwrap!(self.read_packet_len_sync()).0 as usize;
- if primary_packet_len > MAX_PACKET_SIZE {
- return Ok(None);
+ if primary_packet_len > self.max_packet_size {
+ return Err(ReadError::PacketTooLarge {
+ size: primary_packet_len,
+ max_size: self.max_packet_size,
+ #[cfg(feature="backtrace")]
+ backtrace: Backtrace::capture(),
+ });
}
if self.ensure_n_ready_sync(primary_packet_len)?.is_none() {
@@ -321,8 +339,13 @@ where
async fn read_raw_inner_async(&mut self) -> ReadResult<usize> {
self.move_ready_data_to_front();
let primary_packet_len = rr_unwrap!(self.read_packet_len_async().await).0 as usize;
- if primary_packet_len > MAX_PACKET_SIZE {
- return Ok(None);
+ if primary_packet_len > self.max_packet_size {
+ return Err(ReadError::PacketTooLarge {
+ size: primary_packet_len,
+ max_size: self.max_packet_size,
+ #[cfg(feature="backtrace")]
+ backtrace: Backtrace::capture(),
+ });
}
if self.ensure_n_ready_async(primary_packet_len).await?.is_none() {
@@ -450,6 +473,7 @@ impl<R> CraftReader<R> {
direction,
#[cfg(feature = "encryption")]
encryption: None,
+ max_packet_size: DEAFULT_MAX_PACKET_SIZE
}
}
@@ -483,6 +507,13 @@ impl<R> CraftReader<R> {
let data_len = data_len.0 as usize;
if data_len == 0 {
rest
+ } else if data_len >= self.max_packet_size {
+ return Err(ReadError::PacketTooLarge {
+ size: data_len,
+ max_size: self.max_packet_size,
+ #[cfg(feature = "backtrace")]
+ backtrace: Backtrace::capture()
+ })
} else {
decompress(rest, &mut self.decompress_buf, data_len)?
}
diff --git a/src/wrapper.rs b/src/wrapper.rs
index 992988c..19ec840 100644
--- a/src/wrapper.rs
+++ b/src/wrapper.rs
@@ -47,4 +47,17 @@ pub trait CraftIo {
/// error is returned and nothing in the underlying state is changed.
///
fn enable_encryption(&mut self, key: &[u8], iv: &[u8]) -> Result<(), CipherError>;
+
+ ///
+ /// Sets the max packet size which this I/O wrapper will decode or transmit.
+ ///
+ /// This limit is meant to be used to ensure connections never allocate gigantic buffers.
+ /// Therefore, the limitation applies to the representation of packet in memory. This means
+ /// that a reader cannot read a compressed packet above this threshold, nor can it decompress
+ /// to a packet which is above this threshold. A writer cannot write a packet which exceeds
+ /// this size (when serialized) even if compression is enabled.
+ ///
+ /// todo split the compressed vs not compressed limits?
+ ///
+ fn set_max_packet_size(&mut self, max_size: usize);
}
diff --git a/src/writer.rs b/src/writer.rs
index 44086e6..8d0fa58 100644
--- a/src/writer.rs
+++ b/src/writer.rs
@@ -2,6 +2,7 @@
use crate::cfb8::{setup_craft_cipher, CipherError, CraftCipher};
use crate::util::{get_sized_buf, move_data_rightwards, VAR_INT_BUF_SIZE};
use crate::wrapper::{CraftIo, CraftWrapper};
+use crate::DEAFULT_MAX_PACKET_SIZE;
#[cfg(feature = "compression")]
use flate2::{CompressError, Compression, FlushCompress, Status};
use mcproto_rs::protocol::{Id, Packet, PacketDirection, RawPacket, State};
@@ -58,6 +59,13 @@ pub enum WriteError {
#[cfg(feature = "backtrace")]
backtrace: Backtrace,
},
+ #[error("packet size {size} exceeds maximum size {max_size}")]
+ PacketTooLarge {
+ size: usize,
+ max_size: usize,
+ #[cfg(feature = "backtrace")]
+ backtrace: Backtrace,
+ }
}
#[derive(Debug, Error)]
@@ -180,6 +188,7 @@ pub struct CraftWriter<W> {
direction: PacketDirection,
#[cfg(feature = "encryption")]
encryption: Option<CraftCipher>,
+ max_packet_size: usize,
}
impl<W> CraftWrapper<W> for CraftWriter<W> {
@@ -202,6 +211,11 @@ impl<W> CraftIo for CraftWriter<W> {
fn enable_encryption(&mut self, key: &[u8], iv: &[u8]) -> Result<(), CipherError> {
setup_craft_cipher(&mut self.encryption, key, iv)
}
+
+ fn set_max_packet_size(&mut self, max_size: usize) {
+ debug_assert!(max_size > 5);
+ self.max_packet_size = max_size;
+ }
}
impl<W> CraftSyncWriter for CraftWriter<W>
@@ -370,6 +384,7 @@ impl<W> CraftWriter<W> {
direction,
#[cfg(feature = "encryption")]
encryption: None,
+ max_packet_size: DEAFULT_MAX_PACKET_SIZE,
}
}
@@ -423,6 +438,14 @@ impl<W> CraftWriter<W> {
let id_size = self.serialize_id_to_buf(packet.id())?;
let packet_data = packet.data();
let data_size = packet_data.len();
+ if data_size > self.max_packet_size {
+ return Err(WriteError::PacketTooLarge {
+ size: data_size,
+ max_size: self.max_packet_size,
+ #[cfg(feature = "backtrace")]
+ backtrace: Backtrace::capture()
+ })
+ }
let buf = get_sized_buf(&mut self.raw_buf, HEADER_OFFSET, id_size + data_size);
(&mut buf[id_size..]).copy_from_slice(packet_data);
@@ -459,9 +482,19 @@ impl<W> CraftWriter<W> {
where
F: FnOnce(&mut GrowVecSerializer<'a>) -> Result<(), WriteError>,
{
- let mut serializer = GrowVecSerializer::create(&mut self.raw_buf, offset);
+ let mut serializer = GrowVecSerializer::create(&mut self.raw_buf, offset, self.max_packet_size);
f(&mut serializer)?;
- Ok(serializer.finish().map(move |b| b.len()).unwrap_or(0))
+ let packet_size = serializer.written_data_len();
+ if serializer.exceeded_max_size {
+ Err(WriteError::PacketTooLarge {
+ size: packet_size,
+ max_size: self.max_packet_size,
+ #[cfg(feature = "backtrace")]
+ backtrace: Backtrace::capture(),
+ })
+ } else {
+ Ok(packet_size)
+ }
}
}
@@ -567,31 +600,41 @@ struct GrowVecSerializer<'a> {
target: &'a mut Option<Vec<u8>>,
at: usize,
offset: usize,
+ max_size: usize,
+ exceeded_max_size: bool,
}
impl<'a> Serializer for GrowVecSerializer<'a> {
fn serialize_bytes(&mut self, data: &[u8]) -> SerializeResult {
- get_sized_buf(self.target, self.at + self.offset, data.len()).copy_from_slice(data);
+ if !self.exceeded_max_size {
+ let cur_len = self.at - self.offset;
+ let new_len = cur_len + data.len();
+ if new_len > self.max_size {
+ self.exceeded_max_size = true;
+ } else {
+ get_sized_buf(self.target, self.at + self.offset, data.len()).copy_from_slice(data);
+ }
+ }
+
self.at += data.len();
+
Ok(())
}
}
impl<'a> GrowVecSerializer<'a> {
- fn create(target: &'a mut Option<Vec<u8>>, offset: usize) -> Self {
+ fn create(target: &'a mut Option<Vec<u8>>, offset: usize, max_size: usize) -> Self {
Self {
target,
at: 0,
offset,
+ max_size,
+ exceeded_max_size: false,
}
}
- fn finish(self) -> Option<&'a mut [u8]> {
- if let Some(buf) = self.target {
- Some(&mut buf[self.offset..self.offset + self.at])
- } else {
- None
- }
+ fn written_data_len(&self) -> usize {
+ self.at - self.offset
}
}