typhoon_handler_macro/
lib.rs

1use {
2    proc_macro::TokenStream,
3    quote::{quote, ToTokens},
4    syn::{parse::Parse, parse_macro_input, punctuated::Punctuated, Path, Token},
5};
6
7#[proc_macro]
8pub fn handlers(item: TokenStream) -> TokenStream {
9    parse_macro_input!(item as Handlers)
10        .to_token_stream()
11        .into()
12}
13
14struct Handlers {
15    instructions: Punctuated<Path, Token![,]>,
16}
17
18impl Parse for Handlers {
19    fn parse(input: syn::parse::ParseStream) -> syn::Result<Self> {
20        let instructions = Punctuated::<Path, Token![,]>::parse_terminated(input)?;
21
22        Ok(Handlers { instructions })
23    }
24}
25
26impl ToTokens for Handlers {
27    fn to_tokens(&self, tokens: &mut proc_macro2::TokenStream) {
28        let instructions = self.instructions.iter().enumerate().map(|(i, val)| {
29            let i = i as u8;
30            quote! {
31                #i => handle(accounts, data, #val),
32            }
33        });
34
35        let expanded = quote! {
36            entrypoint!(process_instruction);
37            nostd_panic_handler!();
38
39            pub fn process_instruction(
40                program_id: &Pubkey,
41                accounts: &[AccountInfo],
42                instruction_data: &[u8],
43            ) -> Result<(), ProgramError> {
44                if program_id != &crate::ID {
45                    return Err(ProgramError::IncorrectProgramId);
46                }
47
48                let (discriminator, data) = instruction_data.split_first().ok_or(ProgramError::InvalidInstructionData)?;
49                let result = match discriminator {
50                    #(#instructions)*
51                    _ => Err(ProgramError::InvalidInstructionData.into()),
52                };
53
54                #[cfg(feature = "logging")]
55                result.inspect_err(log_error)?;
56
57                #[cfg(not(feature = "logging"))]
58                result?;
59
60                Ok(())
61            }
62        };
63
64        expanded.to_tokens(tokens);
65    }
66}