#![allow(clippy::doc_markdown)]
use super::Transpiler;
use anyhow::Result;
use proc_macro2::TokenStream;
use quote::{format_ident, quote};
impl Transpiler {
pub fn transpile_iterator_methods(
&self,
obj_tokens: &TokenStream,
method: &str,
arg_tokens: &[TokenStream],
) -> Result<TokenStream> {
let obj_str = obj_tokens.to_string();
let already_iter = obj_str.ends_with(". iter ()")
|| obj_str.ends_with(". into_iter ()")
|| obj_str.contains(". iter ( )");
match method {
"map" => {
if already_iter {
Ok(quote! { #obj_tokens.map(#(#arg_tokens),*).collect::<Vec<_>>() })
} else {
Ok(quote! { #obj_tokens.iter().map(#(#arg_tokens),*).collect::<Vec<_>>() })
}
}
"filter" => {
if already_iter {
Ok(quote! { #obj_tokens.filter(#(#arg_tokens),*).collect::<Vec<_>>() })
} else {
let user_closure = &arg_tokens[0];
Ok(
quote! { #obj_tokens.into_iter().filter(|__x| { let __f = #user_closure; __f(*__x) }).collect::<Vec<_>>() },
)
}
}
"reduce" => {
if already_iter {
Ok(quote! { #obj_tokens.reduce(#(#arg_tokens),*) })
} else {
Ok(quote! { #obj_tokens.into_iter().reduce(#(#arg_tokens),*) })
}
}
_ => unreachable!("Non-iterator method passed to transpile_iterator_methods"),
}
}
pub fn transpile_map_set_methods(
&self,
obj_tokens: &TokenStream,
method_ident: &proc_macro2::Ident,
method: &str,
arg_tokens: &[TokenStream],
) -> Result<TokenStream> {
match method {
"contains_key" | "keys" | "values" | "entry" | "contains" => {
Ok(quote! { #obj_tokens.#method_ident(#(#arg_tokens),*) })
}
"items" => {
Ok(quote! { #obj_tokens.iter().map(|(k, v)| (k.clone(), v.clone())) })
}
"update" => {
Ok(quote! { #obj_tokens.extend(#(#arg_tokens),*) })
}
_ => unreachable!(
"Non-map/set method {} passed to transpile_map_set_methods",
method
),
}
}
pub fn transpile_set_operations(
&self,
obj_tokens: &TokenStream,
method: &str,
arg_tokens: &[TokenStream],
) -> Result<TokenStream> {
Self::require_exact_args(method, arg_tokens, 1)?;
let other = &arg_tokens[0];
let method_ident = format_ident!("{}", method);
Ok(quote! {
{
use std::collections::HashSet;
#obj_tokens.#method_ident(&#other).cloned().collect::<HashSet<_>>()
}
})
}
pub fn transpile_string_methods(
&self,
obj_tokens: &TokenStream,
method: &str,
arg_tokens: &[TokenStream],
) -> Result<TokenStream> {
match method {
"to_s" | "to_string" => {
Ok(quote! { #obj_tokens.to_string() })
}
"to_upper" | "upper" => {
let rust_method = format_ident!("to_uppercase");
Ok(quote! { #obj_tokens.#rust_method(#(#arg_tokens),*) })
}
"to_lower" | "lower" => {
let rust_method = format_ident!("to_lowercase");
Ok(quote! { #obj_tokens.#rust_method(#(#arg_tokens),*) })
}
"strip" => Ok(quote! { #obj_tokens.trim().to_string() }),
"lstrip" => Ok(quote! { #obj_tokens.trim_start() }),
"rstrip" => Ok(quote! { #obj_tokens.trim_end() }),
"startswith" => Ok(quote! { #obj_tokens.starts_with(#(#arg_tokens),*) }),
"endswith" => Ok(quote! { #obj_tokens.ends_with(#(#arg_tokens),*) }),
"split" => {
Ok(
quote! { #obj_tokens.split(#(#arg_tokens),*).map(|s| s.to_string()).collect::<Vec<String>>() },
)
}
"replace" => Ok(quote! { #obj_tokens.replace(#(#arg_tokens),*) }),
"length" => {
let rust_method = format_ident!("len");
Ok(quote! { #obj_tokens.#rust_method(#(#arg_tokens),*) })
}
"substring" => {
Self::require_exact_args("substring", arg_tokens, 2)?;
let start = &arg_tokens[0];
let end = &arg_tokens[1];
Ok(quote! {
#obj_tokens.chars()
.skip(#start as usize)
.take((#end as usize).saturating_sub(#start as usize))
.collect::<String>()
})
}
_ => unreachable!(
"Non-string method {} passed to transpile_string_methods",
method
),
}
}
pub fn transpile_advanced_collection_methods(
&self,
obj_tokens: &TokenStream,
method: &str,
arg_tokens: &[TokenStream],
) -> Result<TokenStream> {
match method {
"slice" => {
Self::require_exact_args("slice", arg_tokens, 2)?;
let start = &arg_tokens[0];
let end = &arg_tokens[1];
Ok(quote! { #obj_tokens[#start as usize..#end as usize].to_vec() })
}
"concat" => {
Self::require_exact_args("concat", arg_tokens, 1)?;
let other = &arg_tokens[0];
Ok(quote! { [#obj_tokens, #other].concat() })
}
"flatten" => {
Self::require_no_args("flatten", arg_tokens)?;
Ok(quote! { #obj_tokens.into_iter().flatten().collect::<Vec<_>>() })
}
"unique" => {
Self::require_no_args("unique", arg_tokens)?;
Ok(quote! {
{
use std::collections::HashSet;
#obj_tokens.into_iter().collect::<HashSet<_>>().into_iter().collect::<Vec<_>>()
}
})
}
"join" => {
Self::require_exact_args("join", arg_tokens, 1)?;
let separator = &arg_tokens[0];
Ok(quote! { #obj_tokens.join(&#separator) })
}
_ => unreachable!(
"Non-advanced-collection method passed to transpile_advanced_collection_methods"
),
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_iterator_map() {
let transpiler = Transpiler::new();
let obj_tokens = quote! { vec };
let arg_tokens = vec![quote! { |x| x * 2 }];
let result = transpiler.transpile_iterator_methods(&obj_tokens, "map", &arg_tokens);
assert!(result.is_ok());
let tokens_str = result.unwrap().to_string();
assert!(tokens_str.contains("iter"));
assert!(tokens_str.contains("map"));
assert!(tokens_str.contains("collect"));
}
#[test]
fn test_iterator_map_already_iter() {
let transpiler = Transpiler::new();
let obj_tokens = quote! { vec . iter () };
let arg_tokens = vec![quote! { |x| x * 2 }];
let result = transpiler.transpile_iterator_methods(&obj_tokens, "map", &arg_tokens);
assert!(result.is_ok());
let tokens_str = result.unwrap().to_string();
assert!(tokens_str.contains("map"));
}
#[test]
fn test_iterator_filter() {
let transpiler = Transpiler::new();
let obj_tokens = quote! { vec };
let arg_tokens = vec![quote! { |x| x > 0 }];
let result = transpiler.transpile_iterator_methods(&obj_tokens, "filter", &arg_tokens);
assert!(result.is_ok());
let tokens_str = result.unwrap().to_string();
assert!(tokens_str.contains("into_iter"));
assert!(tokens_str.contains("filter"));
}
#[test]
fn test_iterator_reduce() {
let transpiler = Transpiler::new();
let obj_tokens = quote! { vec };
let arg_tokens = vec![quote! { |a, b| a + b }];
let result = transpiler.transpile_iterator_methods(&obj_tokens, "reduce", &arg_tokens);
assert!(result.is_ok());
let tokens_str = result.unwrap().to_string();
assert!(tokens_str.contains("into_iter"));
assert!(tokens_str.contains("reduce"));
}
#[test]
fn test_map_contains_key() {
let transpiler = Transpiler::new();
let obj_tokens = quote! { map };
let method_ident = format_ident!("contains_key");
let arg_tokens = vec![quote! { "key" }];
let result = transpiler.transpile_map_set_methods(
&obj_tokens,
&method_ident,
"contains_key",
&arg_tokens,
);
assert!(result.is_ok());
let tokens_str = result.unwrap().to_string();
assert!(tokens_str.contains("contains_key"));
}
#[test]
fn test_map_items() {
let transpiler = Transpiler::new();
let obj_tokens = quote! { map };
let method_ident = format_ident!("items");
let arg_tokens: Vec<TokenStream> = vec![];
let result =
transpiler.transpile_map_set_methods(&obj_tokens, &method_ident, "items", &arg_tokens);
assert!(result.is_ok());
let tokens_str = result.unwrap().to_string();
assert!(tokens_str.contains("iter"));
assert!(tokens_str.contains("clone"));
}
#[test]
fn test_map_update() {
let transpiler = Transpiler::new();
let obj_tokens = quote! { map };
let method_ident = format_ident!("update");
let arg_tokens = vec![quote! { other }];
let result =
transpiler.transpile_map_set_methods(&obj_tokens, &method_ident, "update", &arg_tokens);
assert!(result.is_ok());
let tokens_str = result.unwrap().to_string();
assert!(tokens_str.contains("extend"));
}
#[test]
fn test_map_keys() {
let transpiler = Transpiler::new();
let obj_tokens = quote! { map };
let method_ident = format_ident!("keys");
let arg_tokens: Vec<TokenStream> = vec![];
let result =
transpiler.transpile_map_set_methods(&obj_tokens, &method_ident, "keys", &arg_tokens);
assert!(result.is_ok());
let tokens_str = result.unwrap().to_string();
assert!(tokens_str.contains("keys"));
}
#[test]
fn test_map_values() {
let transpiler = Transpiler::new();
let obj_tokens = quote! { map };
let method_ident = format_ident!("values");
let arg_tokens: Vec<TokenStream> = vec![];
let result =
transpiler.transpile_map_set_methods(&obj_tokens, &method_ident, "values", &arg_tokens);
assert!(result.is_ok());
let tokens_str = result.unwrap().to_string();
assert!(tokens_str.contains("values"));
}
#[test]
fn test_set_union() {
let transpiler = Transpiler::new();
let obj_tokens = quote! { set_a };
let arg_tokens = vec![quote! { set_b }];
let result = transpiler.transpile_set_operations(&obj_tokens, "union", &arg_tokens);
assert!(result.is_ok());
let tokens_str = result.unwrap().to_string();
assert!(tokens_str.contains("union"));
assert!(tokens_str.contains("HashSet"));
}
#[test]
fn test_set_intersection() {
let transpiler = Transpiler::new();
let obj_tokens = quote! { set_a };
let arg_tokens = vec![quote! { set_b }];
let result = transpiler.transpile_set_operations(&obj_tokens, "intersection", &arg_tokens);
assert!(result.is_ok());
let tokens_str = result.unwrap().to_string();
assert!(tokens_str.contains("intersection"));
}
#[test]
fn test_set_difference() {
let transpiler = Transpiler::new();
let obj_tokens = quote! { set_a };
let arg_tokens = vec![quote! { set_b }];
let result = transpiler.transpile_set_operations(&obj_tokens, "difference", &arg_tokens);
assert!(result.is_ok());
let tokens_str = result.unwrap().to_string();
assert!(tokens_str.contains("difference"));
}
#[test]
fn test_string_to_s() {
let transpiler = Transpiler::new();
let obj_tokens = quote! { value };
let arg_tokens: Vec<TokenStream> = vec![];
let result = transpiler.transpile_string_methods(&obj_tokens, "to_s", &arg_tokens);
assert!(result.is_ok());
let tokens_str = result.unwrap().to_string();
assert!(tokens_str.contains("to_string"));
}
#[test]
fn test_string_to_upper() {
let transpiler = Transpiler::new();
let obj_tokens = quote! { s };
let arg_tokens: Vec<TokenStream> = vec![];
let result = transpiler.transpile_string_methods(&obj_tokens, "to_upper", &arg_tokens);
assert!(result.is_ok());
let tokens_str = result.unwrap().to_string();
assert!(tokens_str.contains("to_uppercase"));
}
#[test]
fn test_string_to_lower() {
let transpiler = Transpiler::new();
let obj_tokens = quote! { s };
let arg_tokens: Vec<TokenStream> = vec![];
let result = transpiler.transpile_string_methods(&obj_tokens, "to_lower", &arg_tokens);
assert!(result.is_ok());
let tokens_str = result.unwrap().to_string();
assert!(tokens_str.contains("to_lowercase"));
}
#[test]
fn test_string_strip() {
let transpiler = Transpiler::new();
let obj_tokens = quote! { s };
let arg_tokens: Vec<TokenStream> = vec![];
let result = transpiler.transpile_string_methods(&obj_tokens, "strip", &arg_tokens);
assert!(result.is_ok());
let tokens_str = result.unwrap().to_string();
assert!(tokens_str.contains("trim"));
}
#[test]
fn test_string_split() {
let transpiler = Transpiler::new();
let obj_tokens = quote! { s };
let arg_tokens = vec![quote! { "," }];
let result = transpiler.transpile_string_methods(&obj_tokens, "split", &arg_tokens);
assert!(result.is_ok());
let tokens_str = result.unwrap().to_string();
assert!(tokens_str.contains("split"));
assert!(tokens_str.contains("collect"));
}
#[test]
fn test_string_startswith() {
let transpiler = Transpiler::new();
let obj_tokens = quote! { s };
let arg_tokens = vec![quote! { "prefix" }];
let result = transpiler.transpile_string_methods(&obj_tokens, "startswith", &arg_tokens);
assert!(result.is_ok());
let tokens_str = result.unwrap().to_string();
assert!(tokens_str.contains("starts_with"));
}
#[test]
fn test_string_endswith() {
let transpiler = Transpiler::new();
let obj_tokens = quote! { s };
let arg_tokens = vec![quote! { "suffix" }];
let result = transpiler.transpile_string_methods(&obj_tokens, "endswith", &arg_tokens);
assert!(result.is_ok());
let tokens_str = result.unwrap().to_string();
assert!(tokens_str.contains("ends_with"));
}
#[test]
fn test_string_replace() {
let transpiler = Transpiler::new();
let obj_tokens = quote! { s };
let arg_tokens = vec![quote! { "old" }, quote! { "new" }];
let result = transpiler.transpile_string_methods(&obj_tokens, "replace", &arg_tokens);
assert!(result.is_ok());
let tokens_str = result.unwrap().to_string();
assert!(tokens_str.contains("replace"));
}
#[test]
fn test_string_length() {
let transpiler = Transpiler::new();
let obj_tokens = quote! { s };
let arg_tokens: Vec<TokenStream> = vec![];
let result = transpiler.transpile_string_methods(&obj_tokens, "length", &arg_tokens);
assert!(result.is_ok());
let tokens_str = result.unwrap().to_string();
assert!(tokens_str.contains("len"));
}
#[test]
fn test_string_substring() {
let transpiler = Transpiler::new();
let obj_tokens = quote! { s };
let arg_tokens = vec![quote! { 0 }, quote! { 5 }];
let result = transpiler.transpile_string_methods(&obj_tokens, "substring", &arg_tokens);
assert!(result.is_ok());
let tokens_str = result.unwrap().to_string();
assert!(tokens_str.contains("chars"));
assert!(tokens_str.contains("skip"));
assert!(tokens_str.contains("take"));
}
#[test]
fn test_collection_slice() {
let transpiler = Transpiler::new();
let obj_tokens = quote! { vec };
let arg_tokens = vec![quote! { 0 }, quote! { 2 }];
let result =
transpiler.transpile_advanced_collection_methods(&obj_tokens, "slice", &arg_tokens);
assert!(result.is_ok());
let tokens_str = result.unwrap().to_string();
assert!(tokens_str.contains("to_vec"));
}
#[test]
fn test_collection_concat() {
let transpiler = Transpiler::new();
let obj_tokens = quote! { vec1 };
let arg_tokens = vec![quote! { vec2 }];
let result =
transpiler.transpile_advanced_collection_methods(&obj_tokens, "concat", &arg_tokens);
assert!(result.is_ok());
let tokens_str = result.unwrap().to_string();
assert!(tokens_str.contains("concat"));
}
#[test]
fn test_collection_flatten() {
let transpiler = Transpiler::new();
let obj_tokens = quote! { nested };
let arg_tokens: Vec<TokenStream> = vec![];
let result =
transpiler.transpile_advanced_collection_methods(&obj_tokens, "flatten", &arg_tokens);
assert!(result.is_ok());
let tokens_str = result.unwrap().to_string();
assert!(tokens_str.contains("flatten"));
}
#[test]
fn test_collection_unique() {
let transpiler = Transpiler::new();
let obj_tokens = quote! { vec };
let arg_tokens: Vec<TokenStream> = vec![];
let result =
transpiler.transpile_advanced_collection_methods(&obj_tokens, "unique", &arg_tokens);
assert!(result.is_ok());
let tokens_str = result.unwrap().to_string();
assert!(tokens_str.contains("HashSet"));
}
#[test]
fn test_collection_join() {
let transpiler = Transpiler::new();
let obj_tokens = quote! { vec };
let arg_tokens = vec![quote! { ", " }];
let result =
transpiler.transpile_advanced_collection_methods(&obj_tokens, "join", &arg_tokens);
assert!(result.is_ok());
let tokens_str = result.unwrap().to_string();
assert!(tokens_str.contains("join"));
}
}