use crate::head::read_http_head;
use crate::http_error::HttpError;
use crate::{AsciiString, ContentType, HeaderList, RequestBody, Response};
use fixed_buffer::FixedBuf;
use futures_io::AsyncRead;
use std::collections::HashMap;
use std::fmt::Debug;
use std::net::SocketAddr;
use url::Url;
#[derive(Clone, Eq, PartialEq)]
pub struct Request {
pub remote_addr: SocketAddr,
pub method: String,
pub url: Url,
pub headers: HeaderList,
pub cookies: HashMap<String, String>,
pub content_type: ContentType,
pub expect_continue: bool,
pub chunked: bool,
pub gzip: bool,
pub content_length: Option<u64>,
pub body: RequestBody,
}
impl Request {
#[must_use]
pub fn body(&self) -> &RequestBody {
&self.body
}
#[must_use]
pub fn content_type(&self) -> &ContentType {
&self.content_type
}
#[must_use]
pub fn method(&self) -> &str {
&self.method
}
#[must_use]
pub fn url(&self) -> &Url {
&self.url
}
pub fn recv_body(self, max_len: u64) -> Result<Request, Response> {
if self.body.len() > max_len {
Err(Response::payload_too_large_413())
} else if self.body().is_pending() {
Err(Response::get_body_and_reprocess(max_len))
} else {
Ok(self)
}
}
#[cfg(feature = "urlencoded")]
pub fn urlencoded<T: serde::de::DeserializeOwned>(&self) -> Result<T, Response> {
use crate::util::escape_and_elide;
use std::io::Read;
if self.content_type != ContentType::FormUrlEncoded {
Err(Response::text(
400,
"expected x-www-form-urlencoded request body",
))
} else if self.body.is_pending() {
if self.body.length_is_known() {
Err(Response::payload_too_large_413())
} else {
Err(Response::length_required_411())
}
} else {
let mut buf = Vec::new();
if let Err(e) = self.body.reader()?.read_to_end(&mut buf) {
panic!("error reading body: {}", e);
}
serde_urlencoded::from_bytes(&buf).map_err(|e| {
Response::text(
400,
format!(
"error processing form data: {}",
escape_and_elide(e.to_string().as_bytes(), 100)
),
)
})
}
}
#[cfg(feature = "json")]
pub fn json<T: serde::de::DeserializeOwned>(&self) -> Result<T, Response> {
use serde_json::error::Category;
if self.content_type != ContentType::Json {
Err(Response::text(400, "expected json request body"))
} else if self.body.is_pending() {
if self.body.length_is_known() {
Err(Response::payload_too_large_413())
} else {
Err(Response::length_required_411())
}
} else {
serde_json::from_reader(self.body.reader()?).map_err(|e| match e.classify() {
Category::Eof => Response::text(400, "truncated json"),
Category::Io => panic!("error reading body: {}", e),
Category::Syntax => Response::text(400, format!("malformed json: {}", e)),
Category::Data => Response::text(400, format!("unexpected json: {}", e)),
})
}
}
}
impl Debug for Request {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> Result<(), core::fmt::Error> {
let mut cookie_strings: Vec<String> = self
.cookies
.iter()
.map(|(name, value)| format!("{}={}", name, value))
.collect();
cookie_strings.sort();
write!(
f,
"Request{{{}, method={}, path={:?}, headers={:?}, cookies={:?}, {:?}{}{}{}{}, {:?}}}",
self.remote_addr,
self.method(),
self.url().path(),
self.headers,
cookie_strings,
self.content_type(),
if self.expect_continue { ", expect" } else { "" },
if self.chunked { ", chunked" } else { "" },
if self.gzip { ", gzip" } else { "" },
if let Some(len) = &self.content_length {
format!(", {}", len)
} else {
String::new()
},
*self.body()
)
}
}
#[allow(clippy::module_name_repetitions)]
pub async fn read_http_request<const BUF_SIZE: usize>(
remote_addr: SocketAddr,
buf: &mut FixedBuf<BUF_SIZE>,
reader: impl AsyncRead + Unpin,
) -> Result<Request, HttpError> {
buf.shift();
let mut head = read_http_head(buf, reader).await?;
let content_type = head
.headers
.remove_only("content-type")
.map_or(ContentType::None, |s| ContentType::parse(s.as_str()));
let expect_continue = head
.headers
.remove_only("expect")
.map_or(false, |s| s.as_str() == "100-continue");
let (gzip, chunked) = {
let opt_ascii_string = head.headers.remove_only("transfer-encoding");
let mut iter = opt_ascii_string
.as_ref()
.map(AsciiString::as_str)
.unwrap_or_default()
.split(',')
.map(str::trim)
.filter(|s| !s.is_empty());
match (iter.next(), iter.next(), iter.next()) {
(Some("gzip"), Some("chunked"), None) => (true, true),
(Some("gzip"), None, None) => (true, false),
(Some("chunked"), None, None) => (false, true),
(None, None, None) => (false, false),
_ => return Err(HttpError::UnsupportedTransferEncoding),
}
};
let mut cookies = HashMap::new();
for header_value in head.headers.get_all("cookie") {
for cookie_str in header_value
.split(';')
.map(str::trim)
.filter(|s| !s.is_empty())
{
let mut parts = cookie_str.splitn(2, '=');
match (parts.next(), parts.next()) {
(Some(name), Some(value)) => {
cookies.insert(name.to_string(), value.to_string());
}
_ => return Err(HttpError::MalformedCookieHeader),
}
}
}
let content_length = if let Some(s) = head.headers.get_only("content-length") {
Some(s.parse().map_err(|_| HttpError::InvalidContentLength)?)
} else {
None
};
#[allow(clippy::match_same_arms)]
let body = match (chunked, &content_length, head.method.as_str()) {
(true, _, _) => RequestBody::PendingUnknown,
(false, Some(0), _) => RequestBody::empty(),
(false, Some(len), _) => RequestBody::PendingKnown(*len),
(false, None, "POST" | "PUT") => RequestBody::PendingUnknown,
(false, None, _) if expect_continue || gzip => RequestBody::PendingUnknown,
(false, None, _) => RequestBody::empty(),
};
Ok(Request {
remote_addr,
method: head.method,
url: head.url,
headers: head.headers,
cookies,
content_type,
expect_continue,
chunked,
gzip,
content_length,
body,
})
}