llmprogram 0.1.0

A Rust library that provides a structured and powerful way to create and run programs that use Large Language Models (LLMs). It uses a YAML-based configuration to define the behavior of your LLM programs, making them easy to create, manage, and share.
Documentation
[package]
name = "llmprogram"
version = "0.1.0"
edition = "2021"
authors = ["Dipankar Sarkar <me@dipankar.name>"]
description = "A Rust library that provides a structured and powerful way to create and run programs that use Large Language Models (LLMs). It uses a YAML-based configuration to define the behavior of your LLM programs, making them easy to create, manage, and share."
license = "MIT"
repository = "https://github.com/skelf-research/llmprogram-rs"
homepage = "https://github.com/skelf-research/llmprogram-rs"
documentation = "https://docs.rs/llmprogram"
keywords = ["llm", "openai", "gpt", "ai", "machine-learning"]
categories = ["science", "text-processing"]

[dependencies]
# CLI argument parsing
clap = { version = "4.0", features = ["derive"] }

# YAML parsing
serde = { version = "1.0", features = ["derive"] }
serde_yaml = "0.9"

# JSON handling
serde_json = "1.0"

# Async runtime
tokio = { version = "1.0", features = ["full"] }

# HTTP client for OpenAI API
reqwest = { version = "0.11", features = ["json", "stream"] }

# Environment variables
dotenv = "0.15"

# Logging
log = "0.4"
env_logger = "0.10"

# JSON Schema validation
jsonschema = "0.17"

# Template engine (for Jinja2-like templates)
tera = "0.11"

# Redis client
redis = { version = "0.23", features = ["tokio-comp"] }

# SQLite database
rusqlite = { version = "0.29", features = ["bundled"] }

# UUID generation
uuid = { version = "1.0", features = ["v4"] }

# Error handling
anyhow = "1.0"

# For streaming
futures-util = "0.3"

# For the CLI
[[bin]]
name = "llmprogram"
path = "src/main.rs"

[lib]
name = "llmprogram"
path = "src/lib.rs"

[workspace]
members = [
    ".",
    "examples",
]