use proc_macro2::TokenStream;
use quote::{quote, ToTokens};
use syn::{
spanned::Spanned, FnArg, GenericArgument, ItemFn, PathArguments, ReturnType, Type, TypePath,
};
#[derive(Default)]
pub struct InferredAttrs {
pub request_body: Option<Type>,
pub arg_types: Vec<Type>,
pub return_type: Option<Type>,
pub error_type: Option<Type>,
pub header_marker_types: Vec<Type>,
}
pub fn infer(item_fn: &ItemFn) -> InferredAttrs {
let mut inferred = InferredAttrs::default();
for input in &item_fn.sig.inputs {
let FnArg::Typed(pat_type) = input else {
continue; };
if !matches!(wrapper_and_inner(&pat_type.ty), Some(("Header", _))) {
inferred.arg_types.push((*pat_type.ty).clone());
}
if inferred.request_body.is_none() {
if let Some(body_ty) = find_json_inner(&pat_type.ty) {
inferred.request_body = Some(body_ty.clone());
}
}
if let Some(("Header", inner)) = wrapper_and_inner(&pat_type.ty) {
inferred.header_marker_types.push(inner.clone());
}
}
if let ReturnType::Type(_, ret) = &item_fn.sig.output {
inferred.return_type = Some(strip_impl_trait((**ret).clone()));
let (_success_inner, error) = unwrap_result(ret);
inferred.error_type = error;
}
inferred
}
fn strip_impl_trait(ty: Type) -> Type {
match ty {
Type::ImplTrait(_) => Type::Tuple(syn::TypeTuple {
paren_token: syn::token::Paren::default(),
elems: syn::punctuated::Punctuated::new(),
}),
Type::Path(mut tp) => {
for seg in tp.path.segments.iter_mut() {
if let PathArguments::AngleBracketed(args) = &mut seg.arguments {
for arg in args.args.iter_mut() {
if let GenericArgument::Type(inner) = arg {
*inner = strip_impl_trait(inner.clone());
}
}
}
}
Type::Path(tp)
}
Type::Tuple(mut tt) => {
for inner in tt.elems.iter_mut() {
*inner = strip_impl_trait(inner.clone());
}
Type::Tuple(tt)
}
Type::Array(mut ta) => {
ta.elem = Box::new(strip_impl_trait(*ta.elem));
Type::Array(ta)
}
Type::Reference(mut tr) => {
tr.elem = Box::new(strip_impl_trait(*tr.elem));
Type::Reference(tr)
}
Type::Ptr(mut tp) => {
tp.elem = Box::new(strip_impl_trait(*tp.elem));
Type::Ptr(tp)
}
Type::Paren(mut tp) => {
tp.elem = Box::new(strip_impl_trait(*tp.elem));
Type::Paren(tp)
}
Type::Group(mut tg) => {
tg.elem = Box::new(strip_impl_trait(*tg.elem));
Type::Group(tg)
}
other => other,
}
}
fn collect_nested_type_args(ty: &Type, out: &mut Vec<Type>) {
match ty {
Type::Path(tp) => {
for seg in &tp.path.segments {
if let PathArguments::AngleBracketed(args) = &seg.arguments {
for arg in &args.args {
if let GenericArgument::Type(inner) = arg {
out.push(inner.clone());
collect_nested_type_args(inner, out);
}
}
}
}
}
Type::Tuple(tt) => {
for elem in &tt.elems {
out.push(elem.clone());
collect_nested_type_args(elem, out);
}
}
Type::Array(ta) => {
out.push((*ta.elem).clone());
collect_nested_type_args(&ta.elem, out);
}
Type::Reference(tr) => collect_nested_type_args(&tr.elem, out),
Type::Ptr(tp) => collect_nested_type_args(&tp.elem, out),
Type::Paren(tp) => collect_nested_type_args(&tp.elem, out),
Type::Group(tg) => collect_nested_type_args(&tg.elem, out),
_ => {}
}
}
fn wrapper_and_inner(ty: &Type) -> Option<(&'static str, &Type)> {
let TypePath { path, .. } = match ty {
Type::Path(tp) => tp,
_ => return None,
};
let last = path.segments.last()?;
let wrapper = match last.ident.to_string().as_str() {
"Json" => "Json",
"Path" => "Path",
"Query" => "Query",
"Header" => "Header",
_ => return None,
};
let PathArguments::AngleBracketed(args) = &last.arguments else {
return None;
};
let inner = args.args.iter().find_map(|arg| match arg {
GenericArgument::Type(t) => Some(t),
_ => None,
})?;
Some((wrapper, inner))
}
fn find_json_inner(ty: &Type) -> Option<&Type> {
if let Some(("Json", inner)) = wrapper_and_inner(ty) {
return Some(inner);
}
let Type::Path(TypePath { path, .. }) = ty else {
return None;
};
let last = path.segments.last()?;
let PathArguments::AngleBracketed(args) = &last.arguments else {
return None;
};
for arg in &args.args {
if let GenericArgument::Type(inner_ty) = arg {
if let Some(found) = find_json_inner(inner_ty) {
return Some(found);
}
}
}
None
}
pub fn parse_path_names(path: &str) -> Vec<String> {
let mut out = Vec::new();
let bytes = path.as_bytes();
let mut i = 0;
while i < bytes.len() {
if bytes[i] == b'{' {
let start = i + 1;
if let Some(end_rel) = path[start..].find('}') {
let end = start + end_rel;
let name = &path[start..end];
if !name.is_empty() && !name.starts_with('*') {
out.push(name.to_string());
}
i = end + 1;
continue;
}
}
i += 1;
}
out
}
fn unwrap_result(ty: &Type) -> (Option<Type>, Option<Type>) {
let Type::Path(TypePath { path, .. }) = ty else {
return (Some(ty.clone()), None);
};
let Some(last) = path.segments.last() else {
return (Some(ty.clone()), None);
};
let is_result = last.ident == "Result" || last.ident == "ApiResult";
if !is_result {
return (Some(ty.clone()), None);
}
let PathArguments::AngleBracketed(args) = &last.arguments else {
return (Some(ty.clone()), None);
};
let mut iter = args.args.iter().filter_map(|arg| match arg {
GenericArgument::Type(t) => Some(t.clone()),
_ => None,
});
let success = iter.next();
let error = iter.next();
(success, error)
}
pub struct InferredTokens {
pub pre_items: TokenStream,
pub attr_additions: TokenStream,
}
impl InferredAttrs {
pub fn into_tokens(
self,
fn_ident: &syn::Ident,
path_param_names: &[String],
user_keys: &[&str],
) -> InferredTokens {
let mut attr_additions = TokenStream::new();
let mut pre_items = TokenStream::new();
if !user_keys.contains(&"request_body") {
if let Some(ty) = &self.request_body {
attr_additions.extend(quote! { , request_body = #ty });
}
}
let path_struct_ident = syn::Ident::new(&format!("__path_{}", fn_ident), fn_ident.span());
let arg_types_for_schemas = self.arg_types.clone();
let error_schemas_tt = if let Some(err_ty) = &self.error_type {
quote! {
{
#[allow(unused_imports)]
use ::doxa::__private::{
BareSchemaImplementedAdhoc as _,
BareSchemaMissingAdhoc as _,
};
::doxa::__private::BareSchemaContribution::<
#err_ty,
>::new()
.__collect(__out);
}
}
} else {
quote! {}
};
let generic_arg_schemas_tt = if let Some(ret_ty) = &self.return_type {
let mut nested: Vec<Type> = Vec::new();
collect_nested_type_args(ret_ty, &mut nested);
let probes = nested.iter().map(|ty| {
quote! {
{
#[allow(unused_imports)]
use ::doxa::__private::{
GenericArgSchemaImplementedAdhoc as _,
GenericArgSchemaMissingAdhoc as _,
};
::doxa::__private::GenericArgSchemaContribution::<
#ty,
>::new()
.__collect(__out);
}
}
});
quote! { #(#probes)* }
} else {
quote! {}
};
let (response_body_ops_tt, response_body_schemas_tt) = match &self.return_type {
Some(ret_ty) => (
quote! {
{
#[allow(unused_imports)]
use ::doxa::__private::{
ResponseBodyImplementedAdhoc as _,
ResponseBodyMissingAdhoc as _,
};
::doxa::__private::ResponseBodyContribution::<
#ret_ty,
>::new()
.__describe(__op, &mut __schemas);
}
},
quote! {
{
#[allow(unused_imports)]
use ::doxa::__private::{
ResponseBodyImplementedAdhoc as _,
ResponseBodyMissingAdhoc as _,
};
let mut __throwaway_op =
::utoipa::openapi::path::OperationBuilder::new().build();
::doxa::__private::ResponseBodyContribution::<
#ret_ty,
>::new()
.__describe(&mut __throwaway_op, __out);
}
},
),
None => (quote! {}, quote! {}),
};
let arg_types_for_ops = self.arg_types.clone();
pre_items.extend(quote! {
impl ::doxa::ApidocHandlerOps for #path_struct_ident {
fn augment(
__paths: &mut ::utoipa::openapi::path::Paths,
) {
let __path_str = <Self as ::utoipa::Path>::path();
let __methods = <Self as ::utoipa::Path>::methods();
let __item = match __paths.paths.get_mut(&__path_str) {
::core::option::Option::Some(item) => item,
::core::option::Option::None => return,
};
for __method in __methods {
let __op = match ::doxa::operation_for_method_mut(
__item, __method,
) {
::core::option::Option::Some(op) => op,
::core::option::Option::None => continue,
};
#(
{
#[allow(unused_imports)]
use ::doxa::__private::{
OpSecurityImplementedAdhoc as _,
OpSecurityMissingAdhoc as _,
};
::doxa::__private::OpSecurityContribution::<
#arg_types_for_ops,
>::new()
.__describe(__op);
}
)*
let mut __schemas: ::std::vec::Vec<(
::std::string::String,
::utoipa::openapi::RefOr<::utoipa::openapi::schema::Schema>,
)> = ::std::vec::Vec::new();
#response_body_ops_tt
drop(__schemas);
}
}
}
});
pre_items.extend(quote! {
impl ::doxa::ApidocHandlerSchemas for #path_struct_ident {
fn collect(
__out: &mut ::std::vec::Vec<(
::std::string::String,
::utoipa::openapi::RefOr<::utoipa::openapi::schema::Schema>,
)>,
) {
let _ = &__out;
#(
{
#[allow(unused_imports)]
use ::doxa::__private::{
InnerSchemaImplementedAdhoc as _,
InnerSchemaMissingAdhoc as _,
};
::doxa::__private::InnerSchemaContribution::<
#arg_types_for_schemas,
>::new()
.__collect(__out);
}
)*
#error_schemas_tt
#response_body_schemas_tt
#generic_arg_schemas_tt
}
}
});
if !user_keys.contains(&"params") {
let mut entries: Vec<TokenStream> = Vec::new();
for ty in &self.header_marker_types {
entries.push(quote! { ::doxa::DocHeaderEntry<#ty> });
}
if !self.arg_types.is_empty() {
let arg_types = &self.arg_types;
let names_lits: Vec<_> = path_param_names.iter().map(|s| s.as_str()).collect();
pre_items.extend(quote! {
impl ::utoipa::IntoParams for #path_struct_ident {
fn into_params(
_: impl ::core::ops::Fn()
-> ::core::option::Option<::utoipa::openapi::path::ParameterIn>,
) -> ::std::vec::Vec<::utoipa::openapi::path::Parameter> {
const __PATH_NAMES: &[&'static str] = &[#(#names_lits),*];
let mut __out: ::std::vec::Vec<::utoipa::openapi::path::Parameter> =
::std::vec::Vec::new();
#(
{
#[allow(unused_imports)]
use ::doxa::__private::{
QueryParamsImplementedAdhoc as _,
QueryParamsMissingAdhoc as _,
};
__out.extend(
::doxa::__private::QueryParamContribution::<
#arg_types,
>::new()
.__collect(),
);
}
{
#[allow(unused_imports)]
use ::doxa::__private::{
PathParamsImplementedAdhoc as _,
PathParamsMissingAdhoc as _,
};
__out.extend(
::doxa::__private::PathParamContribution::<
#arg_types,
>::new()
.__collect(__PATH_NAMES),
);
}
{
#[allow(unused_imports)]
use ::doxa::__private::{
PathScalarImplementedAdhoc as _,
PathScalarMissingAdhoc as _,
};
__out.extend(
::doxa::__private::PathScalarContribution::<
#arg_types,
>::new()
.__collect(__PATH_NAMES),
);
}
{
#[allow(unused_imports)]
use ::doxa::__private::{
HeaderParamsImplementedAdhoc as _,
HeaderParamsMissingAdhoc as _,
};
__out.extend(
::doxa::__private::HeaderParamContribution::<
#arg_types,
>::new()
.__collect(),
);
}
)*
__out
}
}
});
entries.push(quote! { #path_struct_ident });
}
if !entries.is_empty() {
attr_additions.extend(quote! { , params(#(#entries),*) });
}
}
if !user_keys.contains(&"responses") {
if let Some(error) = &self.error_type {
attr_additions.extend(quote! { , responses(#error) });
}
}
InferredTokens {
pre_items,
attr_additions,
}
}
}
pub fn collect_user_keys(extra: &[syn::Meta]) -> Vec<&'static str> {
let mut keys = Vec::new();
for meta in extra {
let path = meta.path();
if path.is_ident("request_body") {
keys.push("request_body");
} else if path.is_ident("params") {
keys.push("params");
} else if path.is_ident("responses") {
keys.push("responses");
} else if path.is_ident("security") {
keys.push("security");
} else if path.is_ident("tag") {
keys.push("tag");
} else if path.is_ident("tags") {
keys.push("tag"); } else if path.is_ident("operation_id") {
keys.push("operation_id");
} else if path.is_ident("description") {
keys.push("description");
} else if path.is_ident("summary") {
keys.push("summary");
}
}
keys
}
#[allow(dead_code)]
fn _trait_anchors(t: &Type, _t2: TokenStream) {
let _ = t.span();
let _ = t.to_token_stream();
}