use proc_macro2::TokenStream;
pub struct EmitEntry {
pub tool: &'static str,
pub crate_name: &'static str,
pub constructor: fn(serde_json::Value) -> Result<Box<dyn EmitCode>, String>,
}
inventory::collect!(EmitEntry);
pub fn dispatch_emit(tool: &str, params: serde_json::Value) -> Result<Box<dyn EmitCode>, String> {
inventory::iter::<EmitEntry>()
.find(|e| e.tool == tool)
.ok_or_else(|| format!("unknown emit tool: '{tool}'"))
.and_then(|e| (e.constructor)(params))
}
pub fn dispatch_emit_from(
tool: &str,
crate_name: &str,
params: serde_json::Value,
) -> Result<Box<dyn EmitCode>, String> {
inventory::iter::<EmitEntry>()
.find(|e| e.tool == tool && e.crate_name == crate_name)
.ok_or_else(|| format!("unknown emit tool: '{crate_name}::{tool}'"))
.and_then(|e| (e.constructor)(params))
}
#[macro_export]
macro_rules! register_emit {
($tool:literal, $T:ty) => {
const _: () = {
fn __emit_constructor(
v: elicitation::serde_json::Value,
) -> ::std::result::Result<
::std::boxed::Box<dyn elicitation::emit_code::EmitCode>,
::std::string::String,
> {
elicitation::serde_json::from_value::<$T>(v)
.map(|p| {
::std::boxed::Box::new(p)
as ::std::boxed::Box<dyn elicitation::emit_code::EmitCode>
})
.map_err(|e| e.to_string())
}
elicitation::inventory::submit! {
elicitation::emit_code::EmitEntry {
tool: $tool,
crate_name: env!("CARGO_PKG_NAME"),
constructor: __emit_constructor,
}
}
};
};
}
pub trait ToCodeLiteral {
fn to_code_literal(&self) -> TokenStream;
fn type_tokens() -> TokenStream
where
Self: Sized,
{
quote::quote! { _ }
}
}
pub trait CustomEmit<P> {
fn emit_code(params: &P) -> TokenStream;
}
pub trait EmitCode {
fn emit_code(&self) -> TokenStream;
fn crate_deps(&self) -> Vec<CrateDep> {
vec![]
}
fn shared_scope(&self) -> bool {
false
}
}
#[derive(Debug, Clone)]
pub struct RawFragment(pub String);
impl EmitCode for RawFragment {
fn emit_code(&self) -> TokenStream {
self.0
.parse()
.unwrap_or_else(|_| quote::quote!())
}
fn crate_deps(&self) -> Vec<CrateDep> {
vec![]
}
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct CrateDep {
pub name: &'static str,
pub version: &'static str,
pub features: &'static [&'static str],
}
impl CrateDep {
pub const fn new(name: &'static str, version: &'static str) -> Self {
Self {
name,
version,
features: &[],
}
}
pub const fn with_features(
name: &'static str,
version: &'static str,
features: &'static [&'static str],
) -> Self {
Self {
name,
version,
features,
}
}
pub fn to_toml_line(&self) -> String {
if self.features.is_empty() {
format!(r#"{} = "{}""#, self.name, self.version)
} else {
let feats = self
.features
.iter()
.map(|f| format!(r#""{}""#, f))
.collect::<Vec<_>>()
.join(", ");
format!(
r#"{} = {{ version = "{}", features = [{}] }}"#,
self.name, self.version, feats
)
}
}
}
macro_rules! impl_emit_totokens {
($($T:ty),+ $(,)?) => {
$(
impl EmitCode for $T {
fn emit_code(&self) -> TokenStream {
let mut ts = TokenStream::new();
quote::ToTokens::to_tokens(self, &mut ts);
ts
}
}
)+
};
}
impl_emit_totokens!(
bool, i8, i16, i32, i64, i128, u8, u16, u32, u64, u128, usize, isize, f32, f64, char, String,
);
pub struct BinaryScaffold {
steps: Vec<Box<dyn EmitCode>>,
with_tracing: bool,
workspace_root: Option<std::path::PathBuf>,
}
impl BinaryScaffold {
pub fn new(steps: Vec<Box<dyn EmitCode>>, with_tracing: bool) -> Self {
Self {
steps,
with_tracing,
workspace_root: None,
}
}
pub fn with_workspace_root(mut self, root: impl Into<std::path::PathBuf>) -> Self {
self.workspace_root = Some(root.into());
self
}
fn resolved_workspace_root(&self) -> Option<std::path::PathBuf> {
self.workspace_root
.clone()
.or_else(|| std::env::var("ELICIT_WORKSPACE_ROOT").ok().map(Into::into))
}
pub fn all_deps(&self) -> Vec<CrateDep> {
let mut seen = std::collections::HashSet::new();
let mut deps = Vec::new();
let scaffold_deps = [
CrateDep::with_features("tokio", "1", &["full"]),
CrateDep::new("tracing-subscriber", "0.3"),
CrateDep::new("tracing", "0.1"),
];
for dep in scaffold_deps {
if seen.insert(dep.name) {
deps.push(dep);
}
}
for step in &self.steps {
for dep in step.crate_deps() {
if seen.insert(dep.name) {
deps.push(dep);
}
}
}
deps
}
pub fn render(&self) -> TokenStream {
let step_tokens: Vec<TokenStream> = self
.steps
.iter()
.map(|s| {
let code = s.emit_code();
if s.shared_scope() {
quote::quote! { #code ; }
} else {
quote::quote! { { #code } }
}
})
.collect();
let tracing_init = if self.with_tracing {
quote::quote! { tracing_subscriber::fmt::init(); }
} else {
TokenStream::new()
};
let mut use_stmts: Vec<TokenStream> = self
.all_deps()
.into_iter()
.filter(|d| d.name.starts_with("elicit"))
.map(|d| {
let krate: TokenStream = d.name.parse().expect("valid ident");
if d.name == "elicitation" {
quote::quote! { use #krate::contracts::*; }
} else {
quote::quote! { use #krate::*; }
}
})
.collect();
if self.all_deps().iter().any(|d| d.name == "reqwest") {
use_stmts.push(quote::quote! { use reqwest::header::HeaderMap; });
}
if self.all_deps().iter().any(|d| d.name == "elicit_reqwest") {
use_stmts.push(quote::quote! { use std::collections::HashMap; });
}
quote::quote! {
#( #use_stmts )*
#[tokio::main]
async fn main() -> Result<(), Box<dyn std::error::Error>> {
#tracing_init
#( #step_tokens )*
Ok(())
}
}
}
pub fn to_source(&self) -> Result<String, syn::Error> {
let tokens = self.render();
let file: syn::File = syn::parse2(tokens)?;
Ok(prettyplease::unparse(&file))
}
pub fn to_cargo_toml(&self, package_name: &str) -> String {
let ws_root = self.resolved_workspace_root();
let deps = self.all_deps();
let dep_lines: String = deps
.iter()
.map(|d| {
let line = if let Some(ref root) = ws_root {
if d.name == "elicitation"
|| d.name.starts_with("elicit_")
|| d.name.starts_with("elicitation_")
{
let path = root.join("crates").join(d.name);
let path_str = path.to_string_lossy().replace('\\', "/");
format!(r#"{} = {{ path = "{}" }}"#, d.name, path_str)
} else {
d.to_toml_line()
}
} else {
d.to_toml_line()
};
format!("{line}\n")
})
.collect();
format!(
r#"[package]
name = "{}"
version = "0.1.0"
edition = "2021"
# Prevent cargo from treating this as a member of any parent workspace.
[workspace]
[dependencies]
{}
"#,
package_name, dep_lines
)
}
pub fn emit_to_disk(
&self,
output_dir: &std::path::Path,
package_name: &str,
) -> Result<std::path::PathBuf, EmitError> {
let src_dir = output_dir.join("src");
std::fs::create_dir_all(&src_dir)?;
let source = self.to_source().map_err(EmitError::Syntax)?;
let main_rs = src_dir.join("main.rs");
std::fs::write(&main_rs, &source)?;
let cargo_toml = output_dir.join("Cargo.toml");
std::fs::write(&cargo_toml, self.to_cargo_toml(package_name))?;
Ok(main_rs)
}
}
pub fn compile(project_dir: &std::path::Path) -> Result<std::path::PathBuf, CompileError> {
let output = std::process::Command::new("cargo")
.args(["build", "--release"])
.current_dir(project_dir)
.output()
.map_err(|e| CompileError::Io(e.to_string()))?;
if output.status.success() {
let binary = project_dir.join("target/release").join(
project_dir
.file_name()
.unwrap_or(std::ffi::OsStr::new("generated_workflow")),
);
Ok(binary)
} else {
Err(CompileError::CargoFailed(
String::from_utf8_lossy(&output.stderr).into_owned(),
))
}
}
#[derive(Debug, derive_more::Display, derive_more::Error)]
pub enum EmitError {
#[display("Syntax error in emitted code: {}", _0)]
Syntax(#[error(not(source))] syn::Error),
#[display("IO error: {}", _0)]
Io(#[error(not(source))] std::io::Error),
}
impl From<std::io::Error> for EmitError {
fn from(e: std::io::Error) -> Self {
EmitError::Io(e)
}
}
#[derive(Debug, derive_more::Display, derive_more::Error)]
pub enum CompileError {
#[display("Compilation failed:\n{}", _0)]
CargoFailed(#[error(not(source))] String),
#[display("Could not launch cargo: {}", _0)]
Io(#[error(not(source))] String),
}
impl<T: EmitCode> EmitCode for Vec<T> {
fn emit_code(&self) -> TokenStream {
let elems: Vec<TokenStream> = self.iter().map(|e| e.emit_code()).collect();
quote::quote! { vec![ #( #elems ),* ] }
}
}
impl<T: EmitCode> EmitCode for Option<T> {
fn emit_code(&self) -> TokenStream {
match self {
Some(inner) => {
let inner_ts = inner.emit_code();
quote::quote! { Some(#inner_ts) }
}
None => quote::quote! { None },
}
}
}
impl EmitCode for std::path::PathBuf {
fn emit_code(&self) -> TokenStream {
let s = self.to_string_lossy();
let s = s.as_ref();
quote::quote! { std::path::PathBuf::from(#s) }
}
}
impl EmitCode for std::time::Duration {
fn emit_code(&self) -> TokenStream {
let nanos = self.as_nanos() as u64;
quote::quote! { std::time::Duration::from_nanos(#nanos) }
}
}
macro_rules! impl_emit_tuple {
( $( $T:ident ),+ ; $( $idx:tt ),+ ) => {
impl< $( $T: EmitCode ),+ > EmitCode for ( $( $T, )+ ) {
fn emit_code(&self) -> TokenStream {
paste::paste! {
$( let [<$T:lower _val>] = self.$idx.emit_code(); )+
quote::quote! { ( $( #[<$T:lower _val>] ),+ ) }
}
}
}
};
}
impl_emit_tuple!(A, B; 0, 1);
impl_emit_tuple!(A, B, C; 0, 1, 2);
impl_emit_tuple!(A, B, C, D; 0, 1, 2, 3);
#[cfg(feature = "serde_json")]
impl EmitCode for serde_json::Value {
fn emit_code(&self) -> TokenStream {
let s = self.to_string();
quote::quote! {
serde_json::from_str(#s).expect("valid json literal")
}
}
}
#[cfg(feature = "url")]
impl EmitCode for url::Url {
fn emit_code(&self) -> TokenStream {
let s = self.as_str();
quote::quote! { url::Url::parse(#s).expect("valid URL") }
}
}
#[cfg(feature = "uuid")]
impl EmitCode for uuid::Uuid {
fn emit_code(&self) -> TokenStream {
let s = self.to_string();
quote::quote! { uuid::Uuid::parse_str(#s).expect("valid UUID") }
}
}
impl EmitCode for std::net::IpAddr {
fn emit_code(&self) -> TokenStream {
let s = self.to_string();
quote::quote! { #s.parse::<std::net::IpAddr>().expect("valid IP") }
}
}
impl EmitCode for std::net::Ipv4Addr {
fn emit_code(&self) -> TokenStream {
let s = self.to_string();
quote::quote! { #s.parse::<std::net::Ipv4Addr>().expect("valid IPv4") }
}
}
impl EmitCode for std::net::Ipv6Addr {
fn emit_code(&self) -> TokenStream {
let s = self.to_string();
quote::quote! { #s.parse::<std::net::Ipv6Addr>().expect("valid IPv6") }
}
}
#[cfg(feature = "chrono")]
impl EmitCode for chrono::DateTime<chrono::Utc> {
fn emit_code(&self) -> TokenStream {
let s = self.to_rfc3339();
quote::quote! {
chrono::DateTime::parse_from_rfc3339(#s)
.expect("valid RFC3339 datetime")
.with_timezone(&chrono::Utc)
}
}
}
#[cfg(feature = "chrono")]
impl EmitCode for chrono::NaiveDateTime {
fn emit_code(&self) -> TokenStream {
let s = self.format("%Y-%m-%dT%H:%M:%S%.f").to_string();
quote::quote! {
chrono::NaiveDateTime::parse_from_str(#s, "%Y-%m-%dT%H:%M:%S%.f")
.expect("valid NaiveDateTime")
}
}
}
#[cfg(feature = "time")]
impl EmitCode for time::OffsetDateTime {
fn emit_code(&self) -> TokenStream {
let s = self
.format(&time::format_description::well_known::Rfc3339)
.unwrap_or_default();
quote::quote! {
time::OffsetDateTime::parse(#s, &time::format_description::well_known::Rfc3339)
.expect("valid OffsetDateTime")
}
}
}
#[cfg(feature = "jiff")]
impl EmitCode for jiff::Timestamp {
fn emit_code(&self) -> TokenStream {
let s = self.to_string();
quote::quote! {
#s.parse::<jiff::Timestamp>().expect("valid Timestamp")
}
}
}
#[cfg(feature = "reqwest")]
impl EmitCode for reqwest::StatusCode {
fn emit_code(&self) -> TokenStream {
let n = self.as_u16();
quote::quote! {
reqwest::StatusCode::from_u16(#n).expect("valid status code")
}
}
}
macro_rules! impl_to_code_literal_totokens {
($($T:ty),+ $(,)?) => {
$(
impl ToCodeLiteral for $T {
fn to_code_literal(&self) -> TokenStream {
let mut ts = TokenStream::new();
quote::ToTokens::to_tokens(self, &mut ts);
ts
}
fn type_tokens() -> TokenStream {
quote::quote! { $T }
}
}
)+
};
}
impl_to_code_literal_totokens!(
bool, i8, i16, i32, i64, i128, u8, u16, u32, u64, u128, usize, isize, f32, f64, char,
);
impl ToCodeLiteral for String {
fn to_code_literal(&self) -> TokenStream {
let s = self.as_str();
quote::quote! { #s.to_string() }
}
fn type_tokens() -> TokenStream {
quote::quote! { String }
}
}
impl<T: ToCodeLiteral> ToCodeLiteral for Option<T> {
fn to_code_literal(&self) -> TokenStream {
match self {
Some(v) => {
let inner = v.to_code_literal();
quote::quote! { ::std::option::Option::Some(#inner) }
}
None => {
let t = <T as ToCodeLiteral>::type_tokens();
quote::quote! { None::<#t> }
}
}
}
}
impl<T: ToCodeLiteral> ToCodeLiteral for Vec<T> {
fn type_tokens() -> TokenStream {
let t = <T as ToCodeLiteral>::type_tokens();
quote::quote! { ::std::vec::Vec<#t> }
}
fn to_code_literal(&self) -> TokenStream {
let elems: Vec<_> = self.iter().map(|v| v.to_code_literal()).collect();
quote::quote! { ::std::vec![#(#elems),*] }
}
}
impl<T: ToCodeLiteral, const N: usize> ToCodeLiteral for [T; N] {
fn type_tokens() -> TokenStream {
let t = <T as ToCodeLiteral>::type_tokens();
let n = proc_macro2::Literal::usize_suffixed(N);
quote::quote! { [#t; #n] }
}
fn to_code_literal(&self) -> TokenStream {
let elements: Vec<_> = self.iter().map(|e| e.to_code_literal()).collect();
quote::quote! { [#(#elements),*] }
}
}
impl<V: ToCodeLiteral> ToCodeLiteral for std::collections::HashMap<String, V> {
fn type_tokens() -> TokenStream {
let v = <V as ToCodeLiteral>::type_tokens();
quote::quote! { ::std::collections::HashMap<::std::string::String, #v> }
}
fn to_code_literal(&self) -> TokenStream {
let entries: Vec<_> = self
.iter()
.map(|(k, v)| {
let v_ts = v.to_code_literal();
quote::quote! { (#k.to_string(), #v_ts) }
})
.collect();
quote::quote! {
[#(#entries),*].into_iter().collect::<::std::collections::HashMap<_, _>>()
}
}
}
impl<T: ToCodeLiteral> ToCodeLiteral for Box<T> {
fn type_tokens() -> TokenStream {
let inner = <T as ToCodeLiteral>::type_tokens();
quote::quote! { ::std::boxed::Box<#inner> }
}
fn to_code_literal(&self) -> TokenStream {
let inner = (**self).to_code_literal();
quote::quote! { ::std::boxed::Box::new(#inner) }
}
}
impl<A: ToCodeLiteral, B: ToCodeLiteral> ToCodeLiteral for (A, B) {
fn type_tokens() -> TokenStream {
let a = <A as ToCodeLiteral>::type_tokens();
let b = <B as ToCodeLiteral>::type_tokens();
quote::quote! { (#a, #b) }
}
fn to_code_literal(&self) -> TokenStream {
let a = self.0.to_code_literal();
let b = self.1.to_code_literal();
quote::quote! { (#a, #b) }
}
}
impl ToCodeLiteral for std::net::IpAddr {
fn to_code_literal(&self) -> TokenStream {
EmitCode::emit_code(self)
}
}
impl ToCodeLiteral for std::net::Ipv4Addr {
fn to_code_literal(&self) -> TokenStream {
EmitCode::emit_code(self)
}
}
impl ToCodeLiteral for std::net::Ipv6Addr {
fn to_code_literal(&self) -> TokenStream {
EmitCode::emit_code(self)
}
}
impl ToCodeLiteral for std::path::PathBuf {
fn to_code_literal(&self) -> TokenStream {
EmitCode::emit_code(self)
}
}
impl ToCodeLiteral for std::time::Duration {
fn to_code_literal(&self) -> TokenStream {
EmitCode::emit_code(self)
}
}
#[cfg(feature = "serde_json")]
impl ToCodeLiteral for serde_json::Value {
fn to_code_literal(&self) -> TokenStream {
EmitCode::emit_code(self)
}
}
#[cfg(feature = "url")]
impl ToCodeLiteral for url::Url {
fn to_code_literal(&self) -> TokenStream {
EmitCode::emit_code(self)
}
}
#[cfg(feature = "uuid")]
impl ToCodeLiteral for uuid::Uuid {
fn to_code_literal(&self) -> TokenStream {
EmitCode::emit_code(self)
}
}
#[cfg(feature = "chrono")]
impl ToCodeLiteral for chrono::DateTime<chrono::Utc> {
fn to_code_literal(&self) -> TokenStream {
EmitCode::emit_code(self)
}
}
#[cfg(feature = "chrono")]
impl ToCodeLiteral for chrono::NaiveDateTime {
fn to_code_literal(&self) -> TokenStream {
EmitCode::emit_code(self)
}
}
#[cfg(feature = "time")]
impl ToCodeLiteral for time::OffsetDateTime {
fn to_code_literal(&self) -> TokenStream {
EmitCode::emit_code(self)
}
}
#[cfg(feature = "jiff")]
impl ToCodeLiteral for jiff::Timestamp {
fn to_code_literal(&self) -> TokenStream {
EmitCode::emit_code(self)
}
}
#[cfg(feature = "reqwest")]
impl ToCodeLiteral for reqwest::StatusCode {
fn to_code_literal(&self) -> TokenStream {
EmitCode::emit_code(self)
}
}
macro_rules! impl_atomic_to_code_literal {
($($atomic:ident => $prim:ty),+ $(,)?) => {
$(
impl ToCodeLiteral for ::std::sync::atomic::$atomic {
fn to_code_literal(&self) -> TokenStream {
use ::std::sync::atomic::Ordering;
let val = self.load(Ordering::SeqCst);
let val_lit = <$prim as ToCodeLiteral>::to_code_literal(&val);
let ty: TokenStream =
concat!("::std::sync::atomic::", stringify!($atomic))
.parse()
.expect("valid atomic type path");
quote::quote! { #ty::new(#val_lit) }
}
fn type_tokens() -> TokenStream {
concat!("::std::sync::atomic::", stringify!($atomic))
.parse()
.expect("valid atomic type path")
}
}
)+
};
}
impl_atomic_to_code_literal!(
AtomicBool => bool,
AtomicI8 => i8,
AtomicI16 => i16,
AtomicI32 => i32,
AtomicI64 => i64,
AtomicIsize => isize,
AtomicU8 => u8,
AtomicU16 => u16,
AtomicU32 => u32,
AtomicU64 => u64,
AtomicUsize => usize,
);