greentic-flow-builder 0.3.1

Greentic Flow Builder — orchestrator that powers Adaptive Card design via the adaptive-card-mcp toolkit
Documentation
//! Flow Builder web UI — Axum server.
//!
//! This module boots an HTTP server that serves the embedded HTML/JS/CSS
//! bundle and exposes a JSON API around the LLM orchestrator (`/api/chat`,
//! `/api/validate`, `/api/examples`, ...). The real handler code lives in
//! `routes/`.

mod assets;
pub mod openai;
pub mod prompt_builder;
pub mod routes;
pub mod state;
pub mod tool_bridge;

use std::sync::Arc;

use anyhow::Result;

use crate::cli::UiArgs;
use state::AppState;

/// Start the Flow Builder HTTP server and block until it exits.
pub async fn launch(args: UiArgs) -> Result<()> {
    let knowledge_base = Arc::new(crate::knowledge::Knowledge::embedded());
    let system_prompt = prompt_builder::build_system_prompt(&knowledge_base, None);

    let state = Arc::new(AppState {
        openai_api_key: args.openai_api_key,
        model: args.model,
        system_prompt,
        knowledge_base,
        pack_jobs: tokio::sync::Mutex::new(std::collections::HashMap::new()),
    });

    let router = routes::build(state);

    let addr = format!("127.0.0.1:{}", args.port.unwrap_or(0));
    let listener = tokio::net::TcpListener::bind(&addr).await?;
    let port = listener.local_addr()?.port();
    let url = format!("http://127.0.0.1:{port}");

    eprintln!("Flow Builder UI started at: {url}");
    let _ = open::that(&url);

    axum::serve(listener, router).await?;

    Ok(())
}