use mecha10_core::behavior_interrupt::BehaviorInterruptConfig;
use serde::{Deserialize, Serialize};
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct OpenAIReasoningConfig {
#[serde(default = "default_provider")]
pub provider: String,
#[serde(default = "default_model")]
pub llm_model: String,
#[serde(skip_serializing_if = "Option::is_none")]
pub system_prompt: Option<String>,
#[serde(default = "default_temperature")]
pub temperature: f32,
#[serde(default = "default_max_tokens")]
pub max_tokens: u32,
#[serde(default)]
pub vision_enabled: bool,
#[serde(default)]
pub topics: TopicConfig,
#[serde(default)]
pub behavior_interrupt: BehaviorInterruptConfig,
}
impl Default for OpenAIReasoningConfig {
fn default() -> Self {
Self {
provider: default_provider(),
llm_model: default_model(),
system_prompt: Some(default_system_prompt()),
temperature: default_temperature(),
max_tokens: default_max_tokens(),
vision_enabled: true, topics: TopicConfig::default(),
behavior_interrupt: BehaviorInterruptConfig::default(),
}
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct TopicConfig {
#[serde(default = "default_command_topic")]
pub command_in: String,
#[serde(default = "default_response_topic")]
pub response_out: String,
#[serde(default = "default_camera_topic")]
pub camera_in: String,
#[serde(default = "default_detections_topic")]
pub detections_in: String,
#[serde(default = "default_nav_goal_topic")]
pub nav_goal_out: String,
#[serde(default = "default_motor_cmd_topic")]
pub motor_cmd_out: String,
#[serde(default = "default_behavior_topic")]
pub behavior_out: String,
}
impl Default for TopicConfig {
fn default() -> Self {
Self {
command_in: default_command_topic(),
response_out: default_response_topic(),
camera_in: default_camera_topic(),
detections_in: default_detections_topic(),
nav_goal_out: default_nav_goal_topic(),
motor_cmd_out: default_motor_cmd_topic(),
behavior_out: default_behavior_topic(),
}
}
}
fn default_provider() -> String {
"openai".to_string()
}
fn default_model() -> String {
"gpt-4o-mini".to_string()
}
fn default_system_prompt() -> String {
r#"You are a helpful robot assistant. Parse user commands and respond with structured actions.
For navigation commands (e.g., "go to the door", "move to coordinates"), extract the goal and respond with JSON:
{"action": "navigate", "goal": {"x": 5.0, "y": 3.0, "theta": 0.0}}
For motor commands (e.g., "move forward", "turn left", "stop"), respond with JSON:
{"action": "motor", "linear": 0.5, "angular": 0.0}
For motor commands with duration (e.g., "drive forward for 2 seconds", "turn left for 1 second"), include duration_secs:
{"action": "motor", "linear": 0.5, "angular": 0.0, "duration_secs": 2.0}
For behavior commands (e.g., "follow that person", "patrol the area"), respond with JSON:
{"action": "behavior", "name": "follow_person"}
For vision queries (e.g., "what do you see?", "is there a person?", "how many cars?"):
- You will receive current object detections from the robot's vision system
- Analyze the detections and provide a natural language response
- If no detections are available, mention that the vision system is not active
- Be specific about what objects are detected and their confidence levels
For general questions, respond conversationally."#
.to_string()
}
fn default_temperature() -> f32 {
0.7
}
fn default_max_tokens() -> u32 {
500
}
fn default_command_topic() -> String {
"/ai/command".to_string()
}
fn default_response_topic() -> String {
"/ai/response".to_string()
}
fn default_camera_topic() -> String {
"/robot/sensors/camera/rgb".to_string()
}
fn default_detections_topic() -> String {
"/vision/object/detections".to_string()
}
fn default_nav_goal_topic() -> String {
"/nav/goal".to_string()
}
fn default_motor_cmd_topic() -> String {
"/motor/cmd_vel".to_string()
}
fn default_behavior_topic() -> String {
"/behavior/execute".to_string()
}