use std::{
collections::{BTreeMap, BTreeSet, VecDeque},
fmt::{Debug, Display},
path::{Path, PathBuf},
};
use log::*;
use proc_macro2::TokenStream;
use quote::{quote, ToTokens};
use simple_error::{bail, SimpleError as Error};
use utils::Package;
mod gen;
pub use gen::CodegenOptions;
use gen::*;
mod parse;
use parse::*;
pub mod utils;
use utils::RosVersion;
mod ros2_hashing;
use ros2_hashing::*;
mod ros2_builtin_interfaces;
pub mod integral_types;
pub use integral_types::*;
pub mod serde_rosmsg_bytes;
pub use ::serde;
pub use serde::{de::DeserializeOwned, Deserialize, Serialize};
pub use serde_big_array::BigArray; pub use serde_bytes;
pub use smart_default::SmartDefault;
#[derive(Clone, Debug, Default)]
pub struct Ros2Hash([u8; 32]);
impl Ros2Hash {
pub fn to_hash_string(&self) -> String {
format!("RIHS01_{}", hex::encode(self.0))
}
pub fn from_string(hash_str: &str) -> Self {
let hex_str = hash_str.trim_start_matches("RIHS01_");
let mut bytes = [0u8; 32];
hex::decode_to_slice(hex_str, &mut bytes).expect("Invalid hex string");
Ros2Hash(bytes)
}
}
impl ToTokens for Ros2Hash {
fn to_tokens(&self, tokens: &mut TokenStream) {
let bytes = self.0;
let arr_tokens = bytes
.iter()
.map(|b| syn::LitInt::new(&format!("0x{:02x}", b), proc_macro2::Span::call_site()));
tokens.extend(quote! { [ #(#arr_tokens,)* ] });
}
}
#[derive(Clone, Debug)]
pub struct MessageFile {
pub parsed: ParsedMessageFile,
pub md5sum: String,
pub ros2_hash: Ros2Hash,
pub definition: String,
pub is_fixed_encoding_length: bool,
}
impl MessageFile {
fn resolve(parsed: ParsedMessageFile, graph: &BTreeMap<String, MessageFile>) -> Option<Self> {
let md5sum = Self::compute_md5sum(&parsed, graph).or_else(|| {
log::error!("Failed to calculate md5sum for message: {parsed:#?}");
None
})?;
let ros2_hash = calculate_ros2_hash(&parsed, graph);
let definition = Self::compute_full_definition(&parsed, graph).or_else(|| {
log::error!("Failed to calculate full definition for message: {parsed:#?}");
None
})?;
let is_fixed_length = Self::determine_if_fixed_length(&parsed, graph).or_else(|| {
log::error!("Failed to determine if message is fixed length: {parsed:#?}");
None
})?;
Some(MessageFile {
parsed,
md5sum,
ros2_hash,
definition,
is_fixed_encoding_length: is_fixed_length,
})
}
pub fn get_package_name(&self) -> String {
self.parsed.package.clone()
}
pub fn get_short_name(&self) -> String {
self.parsed.name.clone()
}
pub fn get_full_name(&self) -> String {
format!("{}/{}", self.parsed.package, self.parsed.name)
}
pub fn get_md5sum(&self) -> &str {
self.md5sum.as_str()
}
pub fn get_fields(&self) -> &[FieldInfo] {
&self.parsed.fields
}
pub fn get_constants(&self) -> &[ConstantInfo] {
&self.parsed.constants
}
pub fn is_fixed_length(&self) -> bool {
self.is_fixed_encoding_length
}
pub fn get_definition(&self) -> &str {
&self.definition
}
fn compute_md5sum(
parsed: &ParsedMessageFile,
graph: &BTreeMap<String, MessageFile>,
) -> Option<String> {
let md5sum_content = Self::_compute_md5sum(parsed, graph)?;
let md5sum = md5::compute(md5sum_content.trim_end().as_bytes());
log::trace!(
"Message type: {} calculated with md5sum: {md5sum:x}",
parsed.get_full_name()
);
Some(format!("{md5sum:x}"))
}
fn _compute_md5sum(
parsed: &ParsedMessageFile,
graph: &BTreeMap<String, MessageFile>,
) -> Option<String> {
let mut md5sum_content = String::new();
for constant in &parsed.constants {
md5sum_content.push_str(&format!(
"{} {}={}\n",
constant.constant_type, constant.constant_name, constant.constant_value
));
}
for field in &parsed.fields {
let field_type = field.field_type.field_type.as_str();
if is_intrinsic_type(parsed.version.unwrap_or(RosVersion::ROS1), field_type) {
md5sum_content.push_str(&format!("{} {}\n", field.field_type, field.field_name));
} else {
let field_package = field
.field_type
.package_name
.as_ref()
.unwrap_or_else(|| panic!("Expected package name for field {field:#?}"));
let field_full_name = format!("{field_package}/{field_type}");
let sub_message = graph.get(field_full_name.as_str())?;
let sub_md5sum = Self::compute_md5sum(&sub_message.parsed, graph)?;
md5sum_content.push_str(&format!("{} {}\n", sub_md5sum, field.field_name));
}
}
Some(md5sum_content)
}
fn get_unique_field_types(
parsed: &ParsedMessageFile,
graph: &BTreeMap<String, MessageFile>,
) -> Option<BTreeSet<String>> {
let mut unique_field_types = BTreeSet::new();
for field in &parsed.fields {
let field_type = field.field_type.field_type.as_str();
if is_intrinsic_type(parsed.version.unwrap_or(RosVersion::ROS1), field_type) {
continue;
}
let sub_message = graph.get(field.get_full_type_name().as_str())?;
unique_field_types.insert(field.get_full_type_name());
let mut sub_deps = Self::get_unique_field_types(&sub_message.parsed, graph)?;
unique_field_types.append(&mut sub_deps);
}
Some(unique_field_types)
}
fn compute_full_definition(
parsed: &ParsedMessageFile,
graph: &BTreeMap<String, MessageFile>,
) -> Option<String> {
let mut definition_content = String::new();
definition_content.push_str(&format!("{}\n", parsed.source.trim()));
let sep: &str =
"================================================================================\n";
for field in Self::get_unique_field_types(parsed, graph)? {
let Some(sub_message) = graph.get(&field) else {
log::error!(
"Unable to find message type: {field:?}, while computing full definition of {}",
parsed.get_full_name()
);
return None;
};
definition_content.push_str(sep);
definition_content.push_str(&format!("MSG: {}\n", sub_message.get_full_name()));
definition_content.push_str(&format!("{}\n", sub_message.get_definition().trim()));
}
definition_content.pop();
Some(definition_content)
}
fn determine_if_fixed_length(
parsed: &ParsedMessageFile,
graph: &BTreeMap<String, MessageFile>,
) -> Option<bool> {
for field in &parsed.fields {
match field.field_type.array_info {
ArrayType::Unbounded | ArrayType::Bounded(_) => return Some(false),
_ => {}
}
if field.field_type.package_name.is_none() {
if field.field_type.field_type == "string" {
return Some(false);
}
} else {
let field_msg = graph.get(field.get_full_type_name().as_str())?;
let field_is_fixed_length =
Self::determine_if_fixed_length(&field_msg.parsed, graph)?;
if !field_is_fixed_length {
return Some(false);
}
}
}
Some(true)
}
}
#[derive(Clone, Debug)]
pub struct ServiceFile {
pub(crate) parsed: ParsedServiceFile,
pub(crate) request: MessageFile,
pub(crate) response: MessageFile,
pub(crate) md5sum: String,
pub(crate) ros2_hash: Ros2Hash,
}
impl ServiceFile {
fn resolve(parsed: ParsedServiceFile, graph: &BTreeMap<String, MessageFile>) -> Option<Self> {
if let (Some(request), Some(response)) = (
MessageFile::resolve(parsed.request_type.clone(), graph),
MessageFile::resolve(parsed.response_type.clone(), graph),
) {
let md5sum = Self::compute_md5sum(&parsed, graph)?;
let ros2_hash = calculate_ros2_srv_hash(&parsed, graph);
Some(ServiceFile {
parsed,
request,
response,
md5sum,
ros2_hash,
})
} else {
log::error!("Unable to resolve dependencies in service: {parsed:#?}");
None
}
}
pub fn get_full_name(&self) -> String {
format!("{}/{}", self.parsed.package, self.parsed.name)
}
pub fn get_short_name(&self) -> String {
self.parsed.name.clone()
}
pub fn get_package_name(&self) -> String {
self.parsed.package.clone()
}
pub fn request(&self) -> &MessageFile {
&self.request
}
pub fn response(&self) -> &MessageFile {
&self.response
}
pub fn get_md5sum(&self) -> String {
self.md5sum.clone()
}
pub fn get_ros2_hash(&self) -> &Ros2Hash {
&self.ros2_hash
}
fn compute_md5sum(
parsed: &ParsedServiceFile,
graph: &BTreeMap<String, MessageFile>,
) -> Option<String> {
let request_content = MessageFile::_compute_md5sum(&parsed.request_type, graph)?;
let response_content = MessageFile::_compute_md5sum(&parsed.response_type, graph)?;
let mut md5sum_context = md5::Context::new();
md5sum_context.consume(request_content.trim_end().as_bytes());
md5sum_context.consume(response_content.trim_end().as_bytes());
let md5sum = md5sum_context.compute();
log::trace!(
"Message type: {} calculated with md5sum: {md5sum:x}",
parsed.get_full_name()
);
Some(format!("{md5sum:x}"))
}
}
pub struct ActionWithHashes {
pub parsed: ParsedActionFile,
pub send_goal_hash: Ros2Hash,
pub get_result_hash: Ros2Hash,
pub feedback_message_hash: Ros2Hash,
}
impl ActionWithHashes {
pub fn from_json_metadata(parsed: ParsedActionFile, json_path: &Path) -> Option<Self> {
use std::fs;
let json_content = fs::read_to_string(json_path).ok()?;
let json: serde_json::Value = serde_json::from_str(&json_content).ok()?;
let type_hashes = json.get("type_hashes")?.as_array()?;
let find_hash = |suffix: &str| -> Option<Ros2Hash> {
type_hashes.iter().find_map(|type_hash| {
let type_name = type_hash.get("type_name")?.as_str()?;
let hash_string = type_hash.get("hash_string")?.as_str()?;
type_name
.ends_with(suffix)
.then(|| Ros2Hash::from_string(hash_string))
})
};
Some(ActionWithHashes {
parsed,
send_goal_hash: find_hash("_SendGoal")?,
get_result_hash: find_hash("_GetResult")?,
feedback_message_hash: find_hash("_FeedbackMessage")?,
})
}
pub fn get_package_name(&self) -> String {
self.parsed.package.clone()
}
pub fn get_short_name(&self) -> String {
self.parsed.name.clone()
}
}
#[derive(Clone, Debug)]
pub struct RosLiteral {
pub inner: String,
}
impl Display for RosLiteral {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
std::fmt::Display::fmt(&self.inner, f)
}
}
impl From<String> for RosLiteral {
fn from(value: String) -> Self {
Self { inner: value }
}
}
#[derive(PartialEq, Eq, Hash, Debug, Clone)]
pub enum ArrayType {
NotArray,
FixedLength(usize),
Bounded(usize),
Unbounded,
}
#[derive(PartialEq, Eq, Hash, Debug, Clone)]
pub struct FieldType {
pub package_name: Option<String>,
pub source_package: String,
pub field_type: String,
pub array_info: ArrayType,
pub string_capacity: Option<usize>,
}
impl std::fmt::Display for FieldType {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self.array_info {
ArrayType::FixedLength(n) => f.write_fmt(format_args!("{}[{}]", self.field_type, n)),
ArrayType::Unbounded => f.write_fmt(format_args!("{}[]", self.field_type)),
ArrayType::NotArray => f.write_fmt(format_args!("{}", self.field_type)),
ArrayType::Bounded(n) => f.write_fmt(format_args!("{}[<={}]", self.field_type, n)),
}
}
}
impl FieldType {
pub fn is_primitive(&self) -> bool {
crate::parse::ROS_PRIMITIVE_TYPE_LIST.contains(&self.field_type.as_str())
}
}
#[derive(Clone, Debug)]
pub struct FieldInfo {
pub field_type: FieldType,
pub field_name: String,
pub default: Option<RosLiteral>,
}
impl PartialEq for FieldInfo {
fn eq(&self, other: &Self) -> bool {
self.field_type == other.field_type && self.field_name == other.field_name
}
}
impl FieldInfo {
pub fn get_full_type_name(&self) -> String {
let field_package = self
.field_type
.package_name
.as_ref()
.unwrap_or(&self.field_type.source_package);
format!("{field_package}/{}", self.field_type.field_type)
}
pub fn get_ros2_full_type_name(&self) -> String {
let field_package = self
.field_type
.package_name
.as_ref()
.unwrap_or(&self.field_type.source_package);
format!("{field_package}/msg/{}", self.field_type.field_type)
}
}
#[derive(Clone, Debug)]
pub struct ConstantInfo {
pub constant_type: String,
pub constant_name: String,
pub constant_value: RosLiteral,
}
impl PartialEq for ConstantInfo {
fn eq(&self, other: &Self) -> bool {
self.constant_type == other.constant_type && self.constant_name == other.constant_name
}
}
pub fn find_and_generate_ros_messages(
additional_search_paths: Vec<PathBuf>,
) -> Result<(TokenStream, Vec<PathBuf>), Error> {
let mut ros_package_paths = utils::get_search_paths();
ros_package_paths.extend(additional_search_paths);
find_and_generate_ros_messages_without_ros_package_path(ros_package_paths)
}
pub fn find_and_generate_ros_messages_without_ros_package_path(
search_paths: Vec<PathBuf>,
) -> Result<(TokenStream, Vec<PathBuf>), Error> {
let (messages, services, actions) = find_and_parse_ros_messages(&search_paths)?;
if messages.is_empty() && services.is_empty() {
bail!("Failed to find any services or messages while generating ROS message definitions, paths searched: {search_paths:?}");
}
tokenize_messages_and_services(messages, services, actions)
}
fn tokenize_messages_and_services(
messages: Vec<ParsedMessageFile>,
services: Vec<ParsedServiceFile>,
actions: Vec<ParsedActionFile>,
) -> Result<(TokenStream, Vec<PathBuf>), Error> {
let (messages, services) = resolve_dependency_graph(messages, services)?;
let msg_iter = messages.iter().map(|m| m.parsed.path.clone());
let srv_iter = services.iter().map(|s| s.parsed.path.clone());
let action_iter = actions.iter().map(|a| a.path.clone());
let dependent_paths = msg_iter
.chain(srv_iter)
.chain(action_iter)
.filter(|p| !p.starts_with("/tmp/roslibrust_builtin/"))
.collect();
let source =
generate_rust_ros_message_definitions(messages, services, &CodegenOptions::default())?;
Ok((source, dependent_paths))
}
pub fn generate_ros_messages_for_packages(
packages: Vec<Package>,
) -> Result<(TokenStream, Vec<PathBuf>), Error> {
let msg_paths = packages
.iter()
.flat_map(|package| {
utils::get_message_files(package).map(|msgs| {
msgs.into_iter()
.map(|msg| (package.clone(), msg))
.collect::<Vec<_>>()
})
})
.flatten()
.collect();
let (messages, services, actions) = parse_ros_files(msg_paths)?;
if messages.is_empty() && services.is_empty() {
bail!("Failed to find any services or messages while generating ROS message definitions, packages searched: {packages:?}")
}
tokenize_messages_and_services(messages, services, actions)
}
#[allow(clippy::type_complexity)]
pub fn find_and_parse_ros_messages(
search_paths: &[PathBuf],
) -> Result<
(
Vec<ParsedMessageFile>,
Vec<ParsedServiceFile>,
Vec<ParsedActionFile>,
),
Error,
> {
let search_paths = search_paths
.iter()
.map(|path| {
path.canonicalize().map_err(
|e| {
Error::with(format!("Codegen was instructed to search a path that could not be canonicalized relative to {:?}: {path:?}", std::env::current_dir().unwrap()).as_str(), e)
})
})
.collect::<Result<Vec<_>, Error>>()?;
debug!(
"Codegen is looking in following paths for files: {:?}",
&search_paths
);
let packages = utils::crawl(&search_paths);
let packages = utils::deduplicate_packages(packages);
if packages.is_empty() {
bail!(
"No ROS packages found while searching in: {search_paths:?}, relative to {:?}",
std::env::current_dir().unwrap()
);
}
debug!("After deduplication {:?} packages remain.", packages.len());
let message_files = packages
.iter()
.flat_map(|pkg| {
let files = utils::get_message_files(pkg).map_err(|err| {
Error::with(
format!("Unable to get paths to message files for {pkg:?}:").as_str(),
err,
)
});
match files {
Ok(files) => {
debug!(
"Found {:?} interface files in package: {:?}",
files.len(),
pkg.name
);
files
.into_iter()
.map(|path| Ok((pkg.clone(), path)))
.collect()
}
Err(e) => vec![Err(e)],
}
})
.collect::<Result<Vec<(Package, PathBuf)>, Error>>()?;
parse_ros_files(message_files)
}
pub fn generate_rust_ros_message_definitions(
messages: Vec<MessageFile>,
services: Vec<ServiceFile>,
options: &CodegenOptions,
) -> Result<TokenStream, Error> {
let mut modules_to_struct_definitions: BTreeMap<String, Vec<TokenStream>> = BTreeMap::new();
messages.into_iter().try_for_each(|message| {
let pkg_name = message.parsed.package.clone();
let definition = generate_struct(message, Some(options))?;
if let Some(entry) = modules_to_struct_definitions.get_mut(&pkg_name) {
entry.push(definition);
} else {
modules_to_struct_definitions.insert(pkg_name, vec![definition]);
}
Ok::<(), Error>(())
})?;
services.into_iter().try_for_each(|service| {
let pkg_name = service.parsed.package.clone();
let definition = generate_service(service, Some(options))?;
if let Some(entry) = modules_to_struct_definitions.get_mut(&pkg_name) {
entry.push(definition);
} else {
modules_to_struct_definitions.insert(pkg_name, vec![definition]);
}
Ok::<(), Error>(())
})?;
let all_pkgs = modules_to_struct_definitions
.keys()
.cloned()
.collect::<Vec<String>>();
let module_definitions = modules_to_struct_definitions
.into_iter()
.map(|(pkg, struct_defs)| generate_mod(pkg, struct_defs, &all_pkgs[..]))
.collect::<Vec<TokenStream>>();
Ok(quote! {
#(#module_definitions)*
})
}
struct MessageMetadata {
msg: ParsedMessageFile,
seen_count: u32,
}
pub fn resolve_dependency_graph(
messages: Vec<ParsedMessageFile>,
services: Vec<ParsedServiceFile>,
) -> Result<(Vec<MessageFile>, Vec<ServiceFile>), Error> {
const MAX_PARSE_ITER_LIMIT: u32 = 2048;
let mut unresolved_messages = messages
.into_iter()
.map(|msg| MessageMetadata { msg, seen_count: 0 })
.collect::<VecDeque<_>>();
let mut resolved_messages = ros2_builtin_interfaces::get_builtin_interfaces();
while let Some(MessageMetadata { msg, seen_count }) = unresolved_messages.pop_front() {
let fully_resolved = msg.fields.iter().all(|field| {
let is_primitive = field.field_type.is_primitive();
if !is_primitive {
let is_resolved =
resolved_messages.contains_key(field.get_full_type_name().as_str());
is_resolved
} else {
true
}
});
if fully_resolved {
let debug_name = msg.get_full_name();
let msg_file = MessageFile::resolve(msg, &resolved_messages).ok_or(
Error::new(format!("Failed to correctly resolve message {debug_name:?}, either md5sum could not be calculated, or fixed length was indeterminate"))
)?;
resolved_messages.insert(msg_file.get_full_name(), msg_file);
} else {
unresolved_messages.push_back(MessageMetadata {
seen_count: seen_count + 1,
msg,
});
}
if seen_count > MAX_PARSE_ITER_LIMIT {
let msg_names = unresolved_messages
.iter()
.map(|item| format!("{}/{}", item.msg.package, item.msg.name))
.collect::<Vec<_>>();
let mut unresolved_fields = unresolved_messages
.iter()
.flat_map(|item| {
item.msg
.fields
.iter()
.filter_map(|field| {
if !field.field_type.is_primitive() {
if resolved_messages
.contains_key(field.get_full_type_name().as_str())
{
None
} else {
Some(field.get_full_type_name())
}
} else {
None
}
})
.collect::<Vec<_>>()
})
.collect::<Vec<_>>();
unresolved_fields.sort();
unresolved_fields.dedup();
let unresolved_fields = unresolved_fields
.into_iter()
.filter(|f| !msg_names.contains(f))
.collect::<Vec<_>>();
bail!(
"Unable to resolve ROS message dependencies after reaching search limit.\n\
The following types are still unresolved:\n{unresolved_fields:#?}\n
This is preventing full resolution for the following messages:\n{msg_names:#?}"
);
}
}
let mut resolved_services: Vec<_> = services
.into_iter()
.map(|srv| {
let name = srv.path.clone();
ServiceFile::resolve(srv, &resolved_messages).ok_or(Error::new(format!(
"Failed to correctly resolve service: {:?}",
&name
)))
})
.collect::<Result<Vec<_>, Error>>()?;
resolved_services.sort_by(|a: &ServiceFile, b: &ServiceFile| a.parsed.name.cmp(&b.parsed.name));
Ok((resolved_messages.into_values().collect(), resolved_services))
}
#[allow(clippy::type_complexity)]
pub(crate) fn parse_ros_files(
msg_paths: Vec<(Package, PathBuf)>,
) -> Result<
(
Vec<ParsedMessageFile>,
Vec<ParsedServiceFile>,
Vec<ParsedActionFile>,
),
Error,
> {
let mut parsed_messages = Vec::new();
let mut parsed_services = Vec::new();
let mut parsed_actions = Vec::new();
for (pkg, path) in msg_paths {
let contents = std::fs::read_to_string(&path).map_err(|e| {
Error::with(
format!("Codgen failed while attempting to read file {path:?} from disk:").as_str(),
e,
)
})?;
let name = path
.file_stem()
.ok_or(Error::new(format!(
"Failed to extract valid file stem for file at {path:?}"
)))?
.to_str()
.ok_or(Error::new(format!(
"File stem for file at path {path:?} was not valid unicode?"
)))?;
match path.extension().unwrap().to_str().unwrap() {
"srv" => {
let srv_file = parse_ros_service_file(&contents, name, &pkg, &path)?;
parsed_services.push(srv_file);
}
"msg" => {
let msg = parse_ros_message_file(&contents, name, &pkg, &path)?;
parsed_messages.push(msg);
}
"action" => {
let action = parse_ros_action_file(&contents, name, &pkg, &path)?;
parsed_actions.push(action.clone());
parsed_messages.push(action.action_type);
parsed_messages.push(action.action_goal_type);
parsed_messages.push(action.goal_type);
parsed_messages.push(action.action_result_type);
parsed_messages.push(action.result_type);
parsed_messages.push(action.action_feedback_type);
parsed_messages.push(action.feedback_type);
}
_ => {
log::error!("File extension not recognized as a ROS file: {path:?}");
}
}
}
Ok((parsed_messages, parsed_services, parsed_actions))
}
pub fn resolve_action_hashes(parsed_actions: Vec<ParsedActionFile>) -> Vec<ActionWithHashes> {
parsed_actions
.into_iter()
.filter_map(|parsed_action| {
let json_path = parsed_action.path.with_extension("json");
ActionWithHashes::from_json_metadata(parsed_action.clone(), &json_path).or_else(|| {
log::warn!(
"Failed to resolve action hashes for {}/{}",
parsed_action.package,
parsed_action.name
);
None
})
})
.collect()
}
#[cfg(test)]
mod test {
use crate::find_and_generate_ros_messages;
#[test_log::test]
fn generate_ok_on_ros1() {
let assets_path = concat!(
env!("CARGO_MANIFEST_DIR"),
"/../assets/ros1_common_interfaces"
);
let (source, paths) = find_and_generate_ros_messages(vec![assets_path.into()]).unwrap();
assert!(!source.is_empty());
assert!(!paths.is_empty());
}
#[test_log::test]
fn generate_ok_on_ros2() {
let assets_path = concat!(
env!("CARGO_MANIFEST_DIR"),
"/../assets/ros2_common_interfaces"
);
let required_path = concat!(
env!("CARGO_MANIFEST_DIR"),
"/../assets/ros2_required_msgs/rcl_interfaces/builtin_interfaces"
);
let (source, paths) =
find_and_generate_ros_messages(vec![assets_path.into(), required_path.into()]).unwrap();
assert!(!source.is_empty());
assert!(!paths.is_empty());
}
#[test_log::test]
fn generate_ok_on_ros1_test_msgs() {
let assets_path = concat!(env!("CARGO_MANIFEST_DIR"), "/../assets/ros1_test_msgs");
let std_msgs = concat!(
env!("CARGO_MANIFEST_DIR"),
"/../assets/ros1_common_interfaces/std_msgs"
);
let (source, paths) =
find_and_generate_ros_messages(vec![assets_path.into(), std_msgs.into()]).unwrap();
assert!(!source.is_empty());
assert!(!paths.is_empty());
}
#[test_log::test]
fn generate_ok_on_ros2_test_msgs() {
let assets_path = concat!(env!("CARGO_MANIFEST_DIR"), "/../assets/ros2_test_msgs");
let required_path = concat!(
env!("CARGO_MANIFEST_DIR"),
"/../assets/ros2_required_msgs/rcl_interfaces/builtin_interfaces"
);
let (source, paths) =
find_and_generate_ros_messages(vec![assets_path.into(), required_path.into()]).unwrap();
assert!(!source.is_empty());
assert!(!paths.is_empty());
}
}