use std::io::{self, BufRead, Read, Write};
use crate::ascii::HttpChar;
use crate::bytes::ByteSearch;
use crate::error::{BodyErrorKind, Error};
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum BodyKind {
None,
ContentLength(u64),
Chunked,
}
#[inline]
fn map_io_err(e: io::Error) -> Error {
if e.kind() == io::ErrorKind::UnexpectedEof {
Error::from(BodyErrorKind::UnexpectedEof)
} else {
e.into()
}
}
#[derive(Debug)]
pub struct RequestBody<'buf> {
data: &'buf [u8],
}
impl<'buf> RequestBody<'buf> {
pub const EMPTY: Self = Self { data: &[] };
pub fn parse(data: &'buf mut [u8], kind: BodyKind) -> Result<Self, Error> {
match kind {
BodyKind::None => Ok(Self::EMPTY),
BodyKind::ContentLength(len) => {
let len =
usize::try_from(len).map_err(|_| Error::from(BodyErrorKind::BodyTooLarge))?;
if len > data.len() {
return Err(Error::from(BodyErrorKind::BodyTooLarge));
}
Ok(Self { data: &data[..len] })
}
BodyKind::Chunked => {
let decoded = decode_chunked(data)?;
Ok(Self {
data: &data[..decoded],
})
}
}
}
#[inline]
#[must_use]
pub const fn data(&self) -> &'buf [u8] {
self.data
}
#[inline]
pub const fn as_str(&self) -> Result<&'buf str, std::str::Utf8Error> {
std::str::from_utf8(self.data)
}
#[inline]
#[must_use]
pub const fn len(&self) -> usize {
self.data.len()
}
#[inline]
#[must_use]
pub const fn is_empty(&self) -> bool {
self.data.is_empty()
}
}
pub const DEFAULT_MAX_BODY_SIZE: u64 = 8 * 1024 * 1024;
pub const UNLIMITED_BODY_SIZE: u64 = u64::MAX;
#[inline]
fn read_byte(reader: &mut impl BufRead) -> Result<u8, Error> {
let buf = reader.fill_buf().map_err(map_io_err)?;
if buf.is_empty() {
return Err(Error::from(BodyErrorKind::UnexpectedEof));
}
let b = buf[0];
reader.consume(1);
Ok(b)
}
pub const DEFAULT_CHUNK_LINE_BUF: usize = 512;
fn read_chunk_size_line<const BUF: usize>(reader: &mut impl BufRead) -> Result<u64, Error> {
let mut line = [0u8; BUF];
let mut len = 0;
loop {
let buf = reader.fill_buf().map_err(map_io_err)?;
if buf.is_empty() {
return Err(Error::from(BodyErrorKind::UnexpectedEof));
}
if let Some(pos) = buf.iter().position(|&b| b == HttpChar::LineFeed) {
let take = pos + 1;
if len + take > line.len() {
return Err(Error::from(BodyErrorKind::InvalidChunkSize));
}
line[len..len + take].copy_from_slice(&buf[..take]);
len += take;
reader.consume(take);
if len >= 2 && line[len - 2] == HttpChar::CarriageReturn {
return parse_chunk_size(&line[..len - 2]);
}
return Err(Error::from(BodyErrorKind::MalformedChunkedEncoding));
}
let avail = buf.len();
if len + avail > line.len() {
return Err(Error::from(BodyErrorKind::InvalidChunkSize));
}
line[len..len + avail].copy_from_slice(buf);
len += avail;
reader.consume(avail);
}
}
pub const DEFAULT_MAX_TRAILER_SIZE: usize = 8192;
const MAX_TRAILER_LINE: usize = 1024;
fn skip_trailers(reader: &mut impl BufRead, max_trailer_size: usize) -> Result<(), Error> {
let mut line = [0u8; MAX_TRAILER_LINE];
let mut total_consumed: usize = 0;
loop {
let mut len = 0usize;
loop {
let buf = reader.fill_buf().map_err(map_io_err)?;
if buf.is_empty() {
return Err(Error::from(BodyErrorKind::UnexpectedEof));
}
if let Some(pos) = buf.iter().position(|&b| b == HttpChar::LineFeed) {
let take = pos + 1;
if len + take > line.len() {
return Err(Error::from(BodyErrorKind::MalformedChunkedEncoding));
}
if total_consumed + take > max_trailer_size {
return Err(Error::from(BodyErrorKind::MalformedChunkedEncoding));
}
line[len..len + take].copy_from_slice(&buf[..take]);
len += take;
total_consumed += take;
reader.consume(take);
break;
}
let avail = buf.len();
if len + avail > line.len() {
return Err(Error::from(BodyErrorKind::MalformedChunkedEncoding));
}
if total_consumed + avail > max_trailer_size {
return Err(Error::from(BodyErrorKind::MalformedChunkedEncoding));
}
line[len..len + avail].copy_from_slice(buf);
len += avail;
total_consumed += avail;
reader.consume(avail);
}
if len < 2 || line[len - 2] != HttpChar::CarriageReturn {
return Err(Error::from(BodyErrorKind::MalformedChunkedEncoding));
}
if len == 2 {
return Ok(());
}
crate::scan::scan_header_line(&line[..len])
.map_err(|_| Error::from(BodyErrorKind::MalformedChunkedEncoding))?;
}
}
fn stream_chunked<const CHUNK_LINE_BUF: usize>(
source: &mut impl BufRead,
writer: &mut impl Write,
max_body_size: u64,
max_trailer_size: usize,
) -> Result<u64, Error> {
let mut total = 0u64;
loop {
let chunk_size = read_chunk_size_line::<CHUNK_LINE_BUF>(source)?;
if chunk_size == 0 {
skip_trailers(source, max_trailer_size)?;
break;
}
let next_total = total
.checked_add(chunk_size)
.ok_or_else(|| Error::from(BodyErrorKind::BodyTooLarge))?;
if next_total > max_body_size {
return Err(Error::from(BodyErrorKind::BodyTooLarge));
}
let mut remaining = chunk_size;
while remaining > 0 {
let buf = source.fill_buf().map_err(map_io_err)?;
if buf.is_empty() {
return Err(Error::from(BodyErrorKind::UnexpectedEof));
}
#[allow(clippy::cast_possible_truncation)]
let n = (buf.len() as u64).min(remaining) as usize;
writer.write_all(&buf[..n]).map_err(map_io_err)?;
source.consume(n);
remaining -= n as u64;
}
total = next_total;
let cr = read_byte(source)?;
let lf = read_byte(source)?;
if cr != HttpChar::CarriageReturn || lf != HttpChar::LineFeed {
return Err(Error::from(BodyErrorKind::MalformedChunkedEncoding));
}
}
Ok(total)
}
pub fn stream_body(
reader: &mut impl BufRead,
writer: &mut impl Write,
prefetch: &[u8],
kind: BodyKind,
max_body_size: u64,
) -> Result<u64, Error> {
stream_body_impl::<DEFAULT_CHUNK_LINE_BUF>(
reader,
writer,
prefetch,
kind,
max_body_size,
DEFAULT_MAX_TRAILER_SIZE,
)
}
pub fn stream_body_with<
const CHUNK_LINE_BUF: usize,
const MAX_BODY_SIZE: u64,
const MAX_TRAILER_SIZE: usize,
>(
reader: &mut impl BufRead,
writer: &mut impl Write,
prefetch: &[u8],
kind: BodyKind,
) -> Result<u64, Error> {
stream_body_impl::<CHUNK_LINE_BUF>(
reader,
writer,
prefetch,
kind,
MAX_BODY_SIZE,
MAX_TRAILER_SIZE,
)
}
fn stream_body_impl<const CHUNK_LINE_BUF: usize>(
reader: &mut impl BufRead,
writer: &mut impl Write,
prefetch: &[u8],
kind: BodyKind,
max_body_size: u64,
max_trailer_size: usize,
) -> Result<u64, Error> {
match kind {
BodyKind::None => Ok(0),
BodyKind::ContentLength(len) => {
if len > max_body_size {
return Err(Error::from(BodyErrorKind::BodyTooLarge));
}
let prefetch_len = u64::try_from(prefetch.len()).unwrap_or(u64::MAX);
let pre_len = prefetch_len.min(len);
#[allow(clippy::cast_possible_truncation)]
let pre_bytes = pre_len as usize;
if pre_bytes > 0 {
writer
.write_all(&prefetch[..pre_bytes])
.map_err(map_io_err)?;
}
let remaining = len - pre_len;
if remaining == 0 {
return Ok(len);
}
let copied = io::copy(&mut reader.take(remaining), writer).map_err(map_io_err)?;
if copied < remaining {
return Err(Error::from(BodyErrorKind::UnexpectedEof));
}
Ok(len)
}
BodyKind::Chunked => {
let cursor = io::Cursor::new(prefetch);
let mut source = cursor.chain(reader);
stream_chunked::<CHUNK_LINE_BUF>(&mut source, writer, max_body_size, max_trailer_size)
}
}
}
enum BodyReaderState {
Done,
ContentLength { remaining: u64 },
Chunked { remaining: u64 },
}
pub struct BodyReader<'p, R: BufRead> {
source: io::Chain<io::Cursor<&'p [u8]>, R>,
state: BodyReaderState,
decoded: u64,
max_body_size: u64,
chunk_line_buf: usize,
max_trailer_size: usize,
}
impl<'p, R: BufRead> BodyReader<'p, R> {
#[must_use]
pub fn new(reader: R, prefetch: &'p [u8], kind: BodyKind, max_body_size: u64) -> Self {
let state = match kind {
BodyKind::None => BodyReaderState::Done,
BodyKind::ContentLength(len) => BodyReaderState::ContentLength { remaining: len },
BodyKind::Chunked => BodyReaderState::Chunked { remaining: 0 },
};
Self {
source: io::Cursor::new(prefetch).chain(reader),
state,
decoded: 0,
max_body_size,
chunk_line_buf: DEFAULT_CHUNK_LINE_BUF,
max_trailer_size: DEFAULT_MAX_TRAILER_SIZE,
}
}
#[must_use]
pub const fn with_chunk_line_buf(mut self, buf: usize) -> Self {
self.chunk_line_buf = buf;
self
}
#[must_use]
pub const fn with_max_trailer_size(mut self, n: usize) -> Self {
self.max_trailer_size = n;
self
}
}
impl<R: BufRead> io::Read for BodyReader<'_, R> {
fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {
if buf.is_empty() {
return Ok(0);
}
match self.state {
BodyReaderState::Done => Ok(0),
BodyReaderState::ContentLength { remaining } => {
if remaining == 0 {
self.state = BodyReaderState::Done;
return Ok(0);
}
#[allow(clippy::cast_possible_truncation)]
let want = (remaining.min(buf.len() as u64)) as usize;
let n = self.source.read(&mut buf[..want])?;
if n == 0 {
return Err(io::Error::from(Error::from(BodyErrorKind::UnexpectedEof)));
}
self.decoded = self
.decoded
.checked_add(n as u64)
.ok_or_else(|| io::Error::from(Error::from(BodyErrorKind::BodyTooLarge)))?;
if self.decoded > self.max_body_size {
return Err(io::Error::from(Error::from(BodyErrorKind::BodyTooLarge)));
}
self.state = BodyReaderState::ContentLength {
remaining: remaining - n as u64,
};
if remaining - n as u64 == 0 {
self.state = BodyReaderState::Done;
}
Ok(n)
}
BodyReaderState::Chunked { remaining } => self.read_chunked(buf, remaining),
}
}
}
impl<R: BufRead> BodyReader<'_, R> {
fn read_chunked(&mut self, buf: &mut [u8], mut remaining: u64) -> io::Result<usize> {
if remaining == 0 {
let size = read_chunk_size_line_dyn(&mut self.source, self.chunk_line_buf)?;
if size == 0 {
skip_trailers(&mut self.source, self.max_trailer_size)?;
self.state = BodyReaderState::Done;
return Ok(0);
}
if self
.decoded
.checked_add(size)
.is_none_or(|t| t > self.max_body_size)
{
return Err(io::Error::from(Error::from(BodyErrorKind::BodyTooLarge)));
}
remaining = size;
}
#[allow(clippy::cast_possible_truncation)]
let want = remaining.min(buf.len() as u64) as usize;
let n = self.source.read(&mut buf[..want])?;
if n == 0 {
return Err(io::Error::from(Error::from(BodyErrorKind::UnexpectedEof)));
}
self.decoded = self
.decoded
.checked_add(n as u64)
.ok_or_else(|| io::Error::from(Error::from(BodyErrorKind::BodyTooLarge)))?;
let new_remaining = remaining - n as u64;
if new_remaining == 0 {
let cr = read_byte(&mut self.source).map_err(io::Error::from)?;
let lf = read_byte(&mut self.source).map_err(io::Error::from)?;
if cr != HttpChar::CarriageReturn || lf != HttpChar::LineFeed {
return Err(io::Error::from(Error::from(
BodyErrorKind::MalformedChunkedEncoding,
)));
}
}
self.state = BodyReaderState::Chunked {
remaining: new_remaining,
};
Ok(n)
}
}
fn read_chunk_size_line_dyn(reader: &mut impl BufRead, max: usize) -> Result<u64, Error> {
{
let buf = reader.fill_buf().map_err(map_io_err)?;
if buf.is_empty() {
return Err(Error::from(BodyErrorKind::UnexpectedEof));
}
if let Some(lf) = buf.iter().position(|&b| b == HttpChar::LineFeed) {
if lf + 1 > max {
return Err(Error::from(BodyErrorKind::InvalidChunkSize));
}
if lf < 1 || buf[lf - 1] != HttpChar::CarriageReturn {
return Err(Error::from(BodyErrorKind::MalformedChunkedEncoding));
}
let size = parse_chunk_size(&buf[..lf - 1])?;
reader.consume(lf + 1);
return Ok(size);
}
}
let mut line: Vec<u8> = Vec::new();
loop {
let buf = reader.fill_buf().map_err(map_io_err)?;
if buf.is_empty() {
return Err(Error::from(BodyErrorKind::UnexpectedEof));
}
if let Some(pos) = buf.iter().position(|&b| b == HttpChar::LineFeed) {
let take = pos + 1;
if line.len() + take > max {
return Err(Error::from(BodyErrorKind::InvalidChunkSize));
}
line.extend_from_slice(&buf[..take]);
reader.consume(take);
if line.len() >= 2 && line[line.len() - 2] == HttpChar::CarriageReturn {
return parse_chunk_size(&line[..line.len() - 2]);
}
return Err(Error::from(BodyErrorKind::MalformedChunkedEncoding));
}
let avail = buf.len();
if line.len() + avail > max {
return Err(Error::from(BodyErrorKind::InvalidChunkSize));
}
line.extend_from_slice(buf);
reader.consume(avail);
}
}
#[inline]
fn parse_chunk_size(bytes: &[u8]) -> Result<u64, Error> {
let mut n: u64 = 0;
let mut digits = 0u32;
for &b in bytes {
if b == b';' {
break;
}
let digit = match b {
b'0'..=b'9' => b - b'0',
b'a'..=b'f' => b - b'a' + 10,
b'A'..=b'F' => b - b'A' + 10,
_ => return Err(Error::from(BodyErrorKind::InvalidChunkSize)),
};
digits += 1;
n = n
.checked_mul(16)
.and_then(|v| v.checked_add(u64::from(digit)))
.ok_or_else(|| Error::from(BodyErrorKind::InvalidChunkSize))?;
}
if digits == 0 {
return Err(Error::from(BodyErrorKind::InvalidChunkSize));
}
Ok(n)
}
#[inline]
pub fn decode_chunked(data: &mut [u8]) -> Result<usize, Error> {
let len = data.len();
let mut read_pos = 0;
let mut write_pos = 0;
loop {
let crlf = data[read_pos..]
.find_crlf(0)
.ok_or_else(|| Error::from(BodyErrorKind::MalformedChunkedEncoding))?;
let size = usize::try_from(parse_chunk_size(&data[read_pos..read_pos + crlf])?)
.map_err(|_| Error::from(BodyErrorKind::InvalidChunkSize))?;
read_pos += crlf + 2;
if size == 0 {
break;
}
let chunk_end = read_pos
.checked_add(size)
.and_then(|s| s.checked_add(2))
.ok_or_else(|| Error::from(BodyErrorKind::MalformedChunkedEncoding))?;
if chunk_end > len {
return Err(Error::from(BodyErrorKind::MalformedChunkedEncoding));
}
data.copy_within(read_pos..read_pos + size, write_pos);
write_pos += size;
read_pos += size;
if data[read_pos..].find_crlf(0) != Some(0) {
return Err(Error::from(BodyErrorKind::MalformedChunkedEncoding));
}
read_pos += 2;
}
Ok(write_pos)
}
#[cfg(test)]
mod tests {
use crate::error::{BodyErrorKind, Error};
use super::*;
#[test]
fn chunk_size_simple() {
assert_eq!(parse_chunk_size(b"0").unwrap(), 0);
assert_eq!(parse_chunk_size(b"5").unwrap(), 5);
assert_eq!(parse_chunk_size(b"a").unwrap(), 10);
assert_eq!(parse_chunk_size(b"F").unwrap(), 15);
assert_eq!(parse_chunk_size(b"1a").unwrap(), 26);
assert_eq!(parse_chunk_size(b"ff").unwrap(), 255);
assert_eq!(parse_chunk_size(b"FF").unwrap(), 255);
}
#[test]
fn chunk_size_with_extension() {
assert_eq!(parse_chunk_size(b"a;ext=val").unwrap(), 10);
assert_eq!(parse_chunk_size(b"0;ext").unwrap(), 0);
}
#[test]
fn chunk_size_empty() {
assert!(parse_chunk_size(b"").is_err());
}
#[test]
fn chunk_size_invalid_hex() {
assert!(parse_chunk_size(b"zz").is_err());
}
#[test]
fn chunk_size_overflow() {
assert!(parse_chunk_size(b"ffffffffffffffff0").is_err());
}
#[test]
fn decode_single_chunk() {
let mut data = *b"5\r\nhello\r\n0\r\n\r\n";
let n = decode_chunked(&mut data).unwrap();
assert_eq!(&data[..n], b"hello");
}
#[test]
fn decode_multiple_chunks() {
let mut data = b"5\r\nhello\r\n6\r\n world\r\n0\r\n\r\n".to_vec();
let n = decode_chunked(&mut data).unwrap();
assert_eq!(&data[..n], b"hello world");
}
#[test]
fn decode_empty_body() {
let mut data = *b"0\r\n\r\n";
let n = decode_chunked(&mut data).unwrap();
assert_eq!(n, 0);
}
#[test]
fn decode_with_chunk_extension() {
let mut data = b"5;name=val\r\nhello\r\n0\r\n\r\n".to_vec();
let n = decode_chunked(&mut data).unwrap();
assert_eq!(&data[..n], b"hello");
}
#[test]
fn decode_truncated_chunk() {
let mut data = *b"5\r\nhel";
assert!(decode_chunked(&mut data).is_err());
}
#[test]
fn decode_missing_crlf_after_chunk() {
let mut data = *b"5\r\nhelloXX0\r\n\r\n";
assert!(decode_chunked(&mut data).is_err());
}
#[test]
fn parse_none() {
let mut data = [];
let body = RequestBody::parse(&mut data, BodyKind::None).unwrap();
assert!(body.is_empty());
assert_eq!(body.len(), 0);
}
#[test]
fn parse_content_length() {
let mut data = *b"hello world";
let body = RequestBody::parse(&mut data, BodyKind::ContentLength(5)).unwrap();
assert_eq!(body.data(), b"hello");
assert_eq!(body.as_str().unwrap(), "hello");
assert_eq!(body.len(), 5);
assert!(!body.is_empty());
}
#[test]
fn parse_content_length_too_large() {
let mut data = *b"hi";
let err = RequestBody::parse(&mut data, BodyKind::ContentLength(100)).unwrap_err();
assert!(matches!(err, Error::Body(BodyErrorKind::BodyTooLarge)));
}
#[test]
fn parse_chunked() {
let mut data = *b"5\r\nhello\r\n0\r\n\r\n";
let body = RequestBody::parse(&mut data, BodyKind::Chunked).unwrap();
assert_eq!(body.data(), b"hello");
}
#[test]
fn stream_none() {
let mut out = Vec::new();
let n = stream_body(
&mut &b""[..],
&mut out,
b"",
BodyKind::None,
UNLIMITED_BODY_SIZE,
)
.unwrap();
assert_eq!(n, 0);
assert!(out.is_empty());
}
#[test]
fn stream_content_length_all_prefetch() {
let mut out = Vec::new();
let n = stream_body(
&mut &b""[..],
&mut out,
b"hello",
BodyKind::ContentLength(5),
UNLIMITED_BODY_SIZE,
)
.unwrap();
assert_eq!(n, 5);
assert_eq!(out, b"hello");
}
#[test]
fn stream_content_length_no_prefetch() {
let mut out = Vec::new();
let n = stream_body(
&mut &b"hello"[..],
&mut out,
b"",
BodyKind::ContentLength(5),
UNLIMITED_BODY_SIZE,
)
.unwrap();
assert_eq!(n, 5);
assert_eq!(out, b"hello");
}
#[test]
fn stream_content_length_partial_prefetch() {
let mut out = Vec::new();
let n = stream_body(
&mut &b" world"[..],
&mut out,
b"hello",
BodyKind::ContentLength(11),
UNLIMITED_BODY_SIZE,
)
.unwrap();
assert_eq!(n, 11);
assert_eq!(out, b"hello world");
}
#[test]
fn stream_content_length_eof() {
let mut out = Vec::new();
let err = stream_body(
&mut &b"hi"[..],
&mut out,
b"",
BodyKind::ContentLength(100),
UNLIMITED_BODY_SIZE,
)
.unwrap_err();
assert!(matches!(err, Error::Body(BodyErrorKind::UnexpectedEof)));
}
#[test]
fn stream_chunked_single() {
let mut out = Vec::new();
let n = stream_body(
&mut &b"5\r\nhello\r\n0\r\n\r\n"[..],
&mut out,
b"",
BodyKind::Chunked,
UNLIMITED_BODY_SIZE,
)
.unwrap();
assert_eq!(n, 5);
assert_eq!(out, b"hello");
}
#[test]
fn stream_chunked_multiple() {
let mut out = Vec::new();
let n = stream_body(
&mut &b"5\r\nhello\r\n6\r\n world\r\n0\r\n\r\n"[..],
&mut out,
b"",
BodyKind::Chunked,
UNLIMITED_BODY_SIZE,
)
.unwrap();
assert_eq!(n, 11);
assert_eq!(out, b"hello world");
}
#[test]
fn stream_chunked_with_prefetch() {
let mut out = Vec::new();
let n = stream_body(
&mut &b"llo\r\n0\r\n\r\n"[..],
&mut out,
b"5\r\nhe",
BodyKind::Chunked,
UNLIMITED_BODY_SIZE,
)
.unwrap();
assert_eq!(n, 5);
assert_eq!(out, b"hello");
}
#[test]
fn stream_chunked_empty() {
let mut out = Vec::new();
let n = stream_body(
&mut &b"0\r\n\r\n"[..],
&mut out,
b"",
BodyKind::Chunked,
UNLIMITED_BODY_SIZE,
)
.unwrap();
assert_eq!(n, 0);
assert!(out.is_empty());
}
#[test]
fn stream_chunked_with_extension() {
let mut out = Vec::new();
let n = stream_body(
&mut &b"5;ext=val\r\nhello\r\n0\r\n\r\n"[..],
&mut out,
b"",
BodyKind::Chunked,
UNLIMITED_BODY_SIZE,
)
.unwrap();
assert_eq!(n, 5);
assert_eq!(out, b"hello");
}
#[test]
fn stream_chunked_with_trailers() {
let mut out = Vec::new();
let n = stream_body(
&mut &b"5\r\nhello\r\n0\r\nTrailer: val\r\n\r\n"[..],
&mut out,
b"",
BodyKind::Chunked,
UNLIMITED_BODY_SIZE,
)
.unwrap();
assert_eq!(n, 5);
assert_eq!(out, b"hello");
}
#[test]
fn stream_chunked_accepts_well_formed_trailer() {
let mut out = Vec::new();
let n = stream_body(
&mut &b"5\r\nhello\r\n0\r\nX-One: a\r\nX-Two: b\r\n\r\n"[..],
&mut out,
b"",
BodyKind::Chunked,
UNLIMITED_BODY_SIZE,
)
.unwrap();
assert_eq!(n, 5);
assert_eq!(out, b"hello");
}
#[test]
fn stream_chunked_rejects_trailer_with_embedded_cr() {
let mut out = Vec::new();
let err = stream_body(
&mut &b"5\r\nhello\r\n0\r\nTrailer: va\rlue\r\n\r\n"[..],
&mut out,
b"",
BodyKind::Chunked,
UNLIMITED_BODY_SIZE,
)
.unwrap_err();
assert!(matches!(
err,
Error::Body(BodyErrorKind::MalformedChunkedEncoding)
));
}
#[test]
fn stream_chunked_rejects_trailer_without_colon() {
let mut out = Vec::new();
let err = stream_body(
&mut &b"5\r\nhello\r\n0\r\nNotAHeader\r\n\r\n"[..],
&mut out,
b"",
BodyKind::Chunked,
UNLIMITED_BODY_SIZE,
)
.unwrap_err();
assert!(matches!(
err,
Error::Body(BodyErrorKind::MalformedChunkedEncoding)
));
}
#[test]
fn stream_chunked_eof() {
let mut out = Vec::new();
let err = stream_body(
&mut &b"5\r\nhe"[..],
&mut out,
b"",
BodyKind::Chunked,
UNLIMITED_BODY_SIZE,
)
.unwrap_err();
assert!(matches!(err, Error::Body(BodyErrorKind::UnexpectedEof)));
}
#[test]
fn stream_chunked_bad_crlf_after_data() {
let mut out = Vec::new();
let err = stream_body(
&mut &b"5\r\nhelloXX"[..],
&mut out,
b"",
BodyKind::Chunked,
UNLIMITED_BODY_SIZE,
)
.unwrap_err();
assert!(matches!(
err,
Error::Body(BodyErrorKind::MalformedChunkedEncoding)
));
}
#[test]
fn stream_chunked_huge_chunk_size_does_not_overflow_limit() {
let huge = format!("{:x}\r\n", u64::MAX - 1);
let mut input = huge.into_bytes();
input.extend_from_slice(b"data\r\n0\r\n\r\n");
let mut out = Vec::new();
let err = stream_body(
&mut input.as_slice(),
&mut out,
b"",
BodyKind::Chunked,
1024,
)
.unwrap_err();
assert!(matches!(err, Error::Body(BodyErrorKind::BodyTooLarge)));
}
#[test]
fn decode_chunked_huge_size_does_not_overflow() {
let huge = format!("{:x}\r\n", usize::MAX - 1);
let mut input = huge.into_bytes();
input.extend_from_slice(b"x");
assert!(decode_chunked(&mut input).is_err());
}
#[test]
fn stream_content_length_too_large() {
let mut out = Vec::new();
let err = stream_body(
&mut &b"hello"[..],
&mut out,
b"",
BodyKind::ContentLength(5),
4,
)
.unwrap_err();
assert!(matches!(err, Error::Body(BodyErrorKind::BodyTooLarge)));
}
#[test]
fn stream_chunked_too_large() {
let mut out = Vec::new();
let err = stream_body(
&mut &b"5\r\nhello\r\n0\r\n\r\n"[..],
&mut out,
b"",
BodyKind::Chunked,
4,
)
.unwrap_err();
assert!(matches!(err, Error::Body(BodyErrorKind::BodyTooLarge)));
}
#[test]
fn error_display() {
assert_eq!(
BodyErrorKind::BodyTooLarge.to_string(),
"body exceeds buffer size"
);
assert_eq!(
BodyErrorKind::UnexpectedEof.to_string(),
"connection closed before body complete"
);
assert_eq!(
BodyErrorKind::InvalidChunkSize.to_string(),
"invalid chunk size"
);
assert_eq!(
BodyErrorKind::MalformedChunkedEncoding.to_string(),
"malformed chunked encoding"
);
}
#[test]
fn error_converts_to_io_error() {
let err = Error::Body(BodyErrorKind::InvalidChunkSize);
let io_err: io::Error = err.into();
assert_eq!(io_err.kind(), io::ErrorKind::InvalidData);
}
#[test]
fn error_from_io() {
let io_err = io::Error::new(io::ErrorKind::BrokenPipe, "pipe");
let err = Error::from(io_err);
assert!(matches!(err, Error::Io(_)));
assert!(std::error::Error::source(&err).is_some());
}
#[test]
fn error_io_roundtrip() {
let io_err = io::Error::new(io::ErrorKind::BrokenPipe, "pipe");
let err = Error::from(io_err);
let back: io::Error = err.into();
assert_eq!(back.kind(), io::ErrorKind::BrokenPipe);
}
#[test]
fn stream_chunked_trailers_within_limit() {
let mut trailer = Vec::new();
for i in 0..5 {
trailer.extend_from_slice(format!("X-Trailer-{i}: value-{i}\r\n").as_bytes());
}
trailer.extend_from_slice(b"\r\n");
let mut input = b"5\r\nhello\r\n0\r\n".to_vec();
input.extend_from_slice(&trailer);
let mut out = Vec::new();
let n = stream_body(
&mut input.as_slice(),
&mut out,
b"",
BodyKind::Chunked,
UNLIMITED_BODY_SIZE,
)
.unwrap();
assert_eq!(n, 5);
assert_eq!(out, b"hello");
}
#[test]
fn stream_chunked_trailers_exceed_limit() {
let mut trailer = Vec::new();
for i in 0..500 {
trailer.extend_from_slice(format!("X-T-{i}: {}\r\n", "x".repeat(20)).as_bytes());
}
let mut input = b"5\r\nhello\r\n0\r\n".to_vec();
input.extend_from_slice(&trailer);
let mut out = Vec::new();
let err = stream_body(
&mut input.as_slice(),
&mut out,
b"",
BodyKind::Chunked,
UNLIMITED_BODY_SIZE,
)
.unwrap_err();
assert!(matches!(
err,
Error::Body(BodyErrorKind::MalformedChunkedEncoding)
));
}
#[test]
fn stream_body_with_custom_chunk_buf() {
let mut out = Vec::new();
let n = stream_body_with::<64, { UNLIMITED_BODY_SIZE }, { DEFAULT_MAX_TRAILER_SIZE }>(
&mut &b"5\r\nhello\r\n0\r\n\r\n"[..],
&mut out,
b"",
BodyKind::Chunked,
)
.unwrap();
assert_eq!(n, 5);
assert_eq!(out, b"hello");
}
#[test]
fn stream_body_with_tiny_chunk_buf_rejects_long_extension() {
let mut out = Vec::new();
let err = stream_body_with::<16, { UNLIMITED_BODY_SIZE }, { DEFAULT_MAX_TRAILER_SIZE }>(
&mut &b"5;extension=very-long-value\r\nhello\r\n0\r\n\r\n"[..],
&mut out,
b"",
BodyKind::Chunked,
)
.unwrap_err();
assert!(matches!(err, Error::Body(BodyErrorKind::InvalidChunkSize)));
}
#[test]
fn stream_chunked_custom_max_trailer_size() {
let mut out = Vec::new();
let err = stream_body_with::<{ DEFAULT_CHUNK_LINE_BUF }, { UNLIMITED_BODY_SIZE }, 16>(
&mut &b"5\r\nhello\r\n0\r\nX-Trailer: this-value-is-way-too-long\r\n\r\n"[..],
&mut out,
b"",
BodyKind::Chunked,
)
.unwrap_err();
assert!(matches!(
err,
Error::Body(BodyErrorKind::MalformedChunkedEncoding)
));
}
use std::io::Read;
#[test]
fn body_reader_none_returns_zero() {
let mut r = BodyReader::new(&b""[..], b"", BodyKind::None, UNLIMITED_BODY_SIZE);
let mut out = [0u8; 8];
assert_eq!(r.read(&mut out).unwrap(), 0);
}
#[test]
fn body_reader_content_length_all_prefetch() {
let mut r = BodyReader::new(
&b""[..],
b"hello",
BodyKind::ContentLength(5),
UNLIMITED_BODY_SIZE,
);
let mut out = Vec::new();
r.read_to_end(&mut out).unwrap();
assert_eq!(out, b"hello");
}
#[test]
fn body_reader_content_length_split() {
let mut r = BodyReader::new(
&b" world"[..],
b"hello",
BodyKind::ContentLength(11),
UNLIMITED_BODY_SIZE,
);
let mut out = Vec::new();
r.read_to_end(&mut out).unwrap();
assert_eq!(out, b"hello world");
}
#[test]
fn body_reader_content_length_eof() {
let mut r = BodyReader::new(
&b"hi"[..],
b"",
BodyKind::ContentLength(100),
UNLIMITED_BODY_SIZE,
);
let mut out = Vec::new();
let err = r.read_to_end(&mut out).unwrap_err();
assert_eq!(err.kind(), io::ErrorKind::InvalidData);
}
#[test]
fn body_reader_chunked_single() {
let mut r = BodyReader::new(
&b"5\r\nhello\r\n0\r\n\r\n"[..],
b"",
BodyKind::Chunked,
UNLIMITED_BODY_SIZE,
);
let mut out = Vec::new();
r.read_to_end(&mut out).unwrap();
assert_eq!(out, b"hello");
}
#[test]
fn body_reader_chunked_multiple() {
let mut r = BodyReader::new(
&b"5\r\nhello\r\n6\r\n world\r\n0\r\n\r\n"[..],
b"",
BodyKind::Chunked,
UNLIMITED_BODY_SIZE,
);
let mut out = Vec::new();
r.read_to_end(&mut out).unwrap();
assert_eq!(out, b"hello world");
}
#[test]
fn body_reader_chunked_with_prefetch() {
let mut r = BodyReader::new(
&b"llo\r\n0\r\n\r\n"[..],
b"5\r\nhe",
BodyKind::Chunked,
UNLIMITED_BODY_SIZE,
);
let mut out = Vec::new();
r.read_to_end(&mut out).unwrap();
assert_eq!(out, b"hello");
}
#[test]
fn body_reader_chunked_with_trailers() {
let mut r = BodyReader::new(
&b"5\r\nhello\r\n0\r\nTrailer: val\r\n\r\n"[..],
b"",
BodyKind::Chunked,
UNLIMITED_BODY_SIZE,
);
let mut out = Vec::new();
r.read_to_end(&mut out).unwrap();
assert_eq!(out, b"hello");
}
#[test]
fn body_reader_chunked_too_large() {
let mut r = BodyReader::new(&b"5\r\nhello\r\n0\r\n\r\n"[..], b"", BodyKind::Chunked, 4);
let mut out = Vec::new();
let err = r.read_to_end(&mut out).unwrap_err();
assert_eq!(err.kind(), io::ErrorKind::InvalidData);
}
#[test]
fn body_reader_content_length_too_large() {
let mut r = BodyReader::new(&b"hello"[..], b"", BodyKind::ContentLength(5), 4);
let mut out = Vec::new();
let err = r.read_to_end(&mut out).unwrap_err();
assert_eq!(err.kind(), io::ErrorKind::InvalidData);
}
#[test]
fn body_reader_small_buffer_drains_chunked() {
let mut r = BodyReader::new(
&b"b\r\nhello world\r\n0\r\n\r\n"[..],
b"",
BodyKind::Chunked,
UNLIMITED_BODY_SIZE,
);
let mut out = Vec::new();
let mut tmp = [0u8; 3];
loop {
let n = r.read(&mut tmp).unwrap();
if n == 0 {
break;
}
out.extend_from_slice(&tmp[..n]);
}
assert_eq!(out, b"hello world");
}
#[test]
fn body_reader_chunked_eof_mid_chunk() {
let mut r = BodyReader::new(&b"5\r\nhe"[..], b"", BodyKind::Chunked, UNLIMITED_BODY_SIZE);
let mut out = Vec::new();
let err = r.read_to_end(&mut out).unwrap_err();
assert_eq!(err.kind(), io::ErrorKind::InvalidData);
}
#[test]
fn body_reader_chunked_bad_terminator() {
let mut r = BodyReader::new(
&b"5\r\nhelloXX"[..],
b"",
BodyKind::Chunked,
UNLIMITED_BODY_SIZE,
);
let mut out = Vec::new();
let err = r.read_to_end(&mut out).unwrap_err();
assert_eq!(err.kind(), io::ErrorKind::InvalidData);
}
#[test]
fn stream_body_with_const_max_body_size() {
let mut out = Vec::new();
let err = stream_body_with::<{ DEFAULT_CHUNK_LINE_BUF }, 4, { DEFAULT_MAX_TRAILER_SIZE }>(
&mut &b"5\r\nhello\r\n0\r\n\r\n"[..],
&mut out,
b"",
BodyKind::Chunked,
)
.unwrap_err();
assert!(matches!(err, Error::Body(BodyErrorKind::BodyTooLarge)));
}
struct AlwaysZero;
impl io::Read for AlwaysZero {
fn read(&mut self, _buf: &mut [u8]) -> io::Result<usize> {
Ok(0)
}
}
impl io::BufRead for AlwaysZero {
fn fill_buf(&mut self) -> io::Result<&[u8]> {
Ok(&[])
}
fn consume(&mut self, _amt: usize) {}
}
#[test]
#[allow(clippy::never_loop)] fn body_reader_content_length_returns_eof_on_zero_read() {
let mut r = BodyReader::new(
AlwaysZero,
b"",
BodyKind::ContentLength(10),
UNLIMITED_BODY_SIZE,
);
let mut buf = [0u8; 8];
let mut errored = false;
for _ in 0..16 {
match r.read(&mut buf) {
Ok(0) => panic!("BodyReader returned Ok(0) instead of EOF error"),
Ok(n) => panic!("BodyReader returned Ok({n}) from a zero-yielding source"),
Err(e) => {
assert_eq!(e.kind(), io::ErrorKind::InvalidData);
errored = true;
break;
}
}
}
assert!(
errored,
"BodyReader::read did not error within the iteration cap"
);
}
#[test]
#[allow(clippy::never_loop)] fn body_reader_chunked_returns_eof_on_zero_read() {
let mut r = BodyReader::new(AlwaysZero, b"5\r\n", BodyKind::Chunked, UNLIMITED_BODY_SIZE);
let mut buf = [0u8; 8];
let mut errored = false;
for _ in 0..16 {
match r.read(&mut buf) {
Ok(0) => panic!("BodyReader returned Ok(0) instead of EOF error"),
Ok(n) => panic!("BodyReader returned Ok({n}) from a zero-yielding source"),
Err(e) => {
assert_eq!(e.kind(), io::ErrorKind::InvalidData);
errored = true;
break;
}
}
}
assert!(
errored,
"BodyReader::read did not error within the iteration cap"
);
}
#[test]
#[allow(clippy::never_loop)] fn body_reader_chunked_size_line_eof() {
let mut r = BodyReader::new(AlwaysZero, b"", BodyKind::Chunked, UNLIMITED_BODY_SIZE);
let mut buf = [0u8; 8];
let mut errored = false;
for _ in 0..16 {
match r.read(&mut buf) {
Ok(0) => panic!("BodyReader returned Ok(0) instead of EOF error"),
Ok(n) => panic!("BodyReader returned Ok({n}) from a zero-yielding source"),
Err(e) => {
assert_eq!(e.kind(), io::ErrorKind::InvalidData);
errored = true;
break;
}
}
}
assert!(
errored,
"BodyReader::read did not error within the iteration cap"
);
}
}