1#![doc = include_str!("../README.md")]
2#![warn(missing_docs)]
3
4use std::env;
5use std::fs;
6use std::path::{Path, PathBuf};
7
8use proc_macro::TokenStream;
9use proc_macro_crate::{FoundCrate, crate_name};
10use proc_macro2::{Span, TokenStream as TokenStream2};
11use quote::{format_ident, quote};
12use syn::parse::{Parse, ParseStream};
13use syn::{Expr, Ident, LitStr, Token, parse_macro_input};
14
15#[proc_macro]
23pub fn code(input: TokenStream) -> TokenStream {
24 let input = parse_macro_input!(input as CodeInput);
25
26 match expand_code(input) {
27 Ok(tokens) => tokens.into(),
28 Err(error) => error.to_compile_error().into(),
29 }
30}
31
32struct CodeInput {
33 path: Expr,
34 language: Option<LitStr>,
35}
36
37impl Parse for CodeInput {
38 fn parse(input: ParseStream<'_>) -> syn::Result<Self> {
39 let path = input.parse()?;
40 let mut language = None;
41
42 if input.peek(Token![,]) {
43 input.parse::<Token![,]>()?;
44 if !input.is_empty() {
45 let ident = input.parse::<Ident>()?;
46 if ident != "language" {
47 return Err(syn::Error::new(
48 ident.span(),
49 "expected `language = \"...\"`",
50 ));
51 }
52 input.parse::<Token![=]>()?;
53 language = Some(input.parse()?);
54 }
55 }
56
57 Ok(Self { path, language })
58 }
59}
60
61fn expand_code(input: CodeInput) -> syn::Result<TokenStream2> {
62 let manifest_dir = env::var("CARGO_MANIFEST_DIR")
63 .map_err(|error| syn::Error::new(Span::call_site(), error.to_string()))?;
64 let manifest_dir = PathBuf::from(manifest_dir);
65 let macro_path = eval_path_expr(&input.path)?;
66 let absolute_path = resolve_manifest_path(&manifest_dir, ¯o_path);
67
68 let source = fs::read_to_string(&absolute_path).map_err(|error| {
69 syn::Error::new(
70 Span::call_site(),
71 format!("failed to read `{}`: {error}", absolute_path.display()),
72 )
73 })?;
74
75 let language = input
76 .language
77 .as_ref()
78 .map(LitStr::value)
79 .or_else(|| arborium::detect_language(¯o_path).map(str::to_string))
80 .ok_or_else(|| {
81 syn::Error::new(
82 Span::call_site(),
83 format!("could not detect language for `{macro_path}`; pass `language = \"...\"`"),
84 )
85 })?;
86
87 let mut highlighter = arborium::Highlighter::new();
88 let spans = highlighter
89 .highlight_spans(&language, &source)
90 .map_err(|error| syn::Error::new(Span::call_site(), error.to_string()))?;
91 let crate_path = dioxus_code_crate_path()?;
92
93 let language_lit = LitStr::new(&language, Span::call_site());
94 let absolute_lit = LitStr::new(&absolute_path.to_string_lossy(), Span::call_site());
95 let spans = normalize_spans(spans).into_iter().map(|span| {
96 let start = span.start;
97 let end = span.end;
98 let tag = LitStr::new(span.tag, Span::call_site());
99
100 quote! {
101 #crate_path::StaticSpan {
102 start: #start,
103 end: #end,
104 tag: #tag,
105 }
106 }
107 });
108
109 Ok(quote! {{
110 const SOURCE: &str = include_str!(#absolute_lit);
111 static SPANS: &[#crate_path::StaticSpan] = &[#(#spans),*];
112 #crate_path::CodeTree::from_static_parts(SOURCE, #language_lit, SPANS)
113 }})
114}
115
116struct NormalizedSpan {
117 start: u32,
118 end: u32,
119 tag: &'static str,
120}
121
122struct RawSpan {
123 start: u32,
124 end: u32,
125 tag: Option<&'static str>,
126 pattern_index: u32,
127}
128
129fn normalize_spans(spans: Vec<arborium::advanced::Span>) -> Vec<NormalizedSpan> {
130 use std::collections::HashMap;
131
132 let mut deduped: HashMap<(u32, u32), RawSpan> = HashMap::new();
133 for span in spans {
134 let span = RawSpan {
135 start: span.start,
136 end: span.end,
137 tag: arborium_theme::tag_for_capture(&span.capture),
138 pattern_index: span.pattern_index,
139 };
140 let key = (span.start, span.end);
141
142 if let Some(existing) = deduped.get(&key) {
143 let should_replace = match (span.tag.is_some(), existing.tag.is_some()) {
144 (true, false) => true,
145 (false, true) => false,
146 _ => span.pattern_index >= existing.pattern_index,
147 };
148 if should_replace {
149 deduped.insert(key, span);
150 }
151 } else {
152 deduped.insert(key, span);
153 }
154 }
155
156 let mut spans: Vec<_> = deduped
157 .into_values()
158 .filter_map(|span| {
159 Some(NormalizedSpan {
160 start: span.start,
161 end: span.end,
162 tag: span.tag?,
163 })
164 })
165 .collect();
166
167 spans.sort_by_key(|span| (span.start, span.end));
168
169 let mut coalesced: Vec<NormalizedSpan> = Vec::with_capacity(spans.len());
170 for span in spans {
171 if let Some(last) = coalesced.last_mut()
172 && span.tag == last.tag
173 && span.start <= last.end
174 {
175 last.end = last.end.max(span.end);
176 continue;
177 }
178 coalesced.push(span);
179 }
180
181 coalesced
182}
183
184fn dioxus_code_crate_path() -> syn::Result<TokenStream2> {
185 match crate_name("dioxus-code") {
186 Ok(FoundCrate::Itself) => Ok(quote!(crate)),
187 Ok(FoundCrate::Name(name)) => {
188 let ident = format_ident!("{}", name);
189 Ok(quote!(::#ident))
190 }
191 Err(error) => Err(syn::Error::new(Span::call_site(), error.to_string())),
192 }
193}
194
195fn resolve_manifest_path(manifest_dir: &Path, path: &str) -> PathBuf {
196 if let Some(stripped) = path.strip_prefix('/') {
197 manifest_dir.join(stripped)
198 } else {
199 manifest_dir.join(path)
200 }
201}
202
203fn eval_path_expr(expr: &Expr) -> syn::Result<String> {
204 match expr {
205 Expr::Lit(expr_lit) => {
206 if let syn::Lit::Str(lit) = &expr_lit.lit {
207 Ok(lit.value())
208 } else {
209 Err(syn::Error::new_spanned(
210 expr,
211 "path must be a string literal",
212 ))
213 }
214 }
215 Expr::Macro(expr_macro) => {
216 let Some(ident) = expr_macro.mac.path.get_ident() else {
217 return Err(syn::Error::new_spanned(
218 expr,
219 "only string literals, concat!, and env! are supported",
220 ));
221 };
222
223 match ident.to_string().as_str() {
224 "concat" => eval_concat(expr_macro.mac.tokens.clone()),
225 "env" => eval_env(expr_macro.mac.tokens.clone()),
226 _ => Err(syn::Error::new_spanned(
227 expr,
228 "only string literals, concat!, and env! are supported",
229 )),
230 }
231 }
232 _ => Err(syn::Error::new_spanned(
233 expr,
234 "only string literals, concat!, and env! are supported",
235 )),
236 }
237}
238
239fn eval_concat(tokens: TokenStream2) -> syn::Result<String> {
240 struct Args {
241 exprs: syn::punctuated::Punctuated<Expr, Token![,]>,
242 }
243
244 impl Parse for Args {
245 fn parse(input: ParseStream<'_>) -> syn::Result<Self> {
246 Ok(Self {
247 exprs: syn::punctuated::Punctuated::parse_terminated(input)?,
248 })
249 }
250 }
251
252 let args = syn::parse2::<Args>(tokens)?;
253 let mut value = String::new();
254 for expr in args.exprs {
255 value.push_str(&eval_path_expr(&expr)?);
256 }
257 Ok(value)
258}
259
260fn eval_env(tokens: TokenStream2) -> syn::Result<String> {
261 let lit = syn::parse2::<LitStr>(tokens)?;
262 env::var(lit.value()).map_err(|error| syn::Error::new(lit.span(), error.to_string()))
263}