mod context;
mod partials;
mod schemas;
mod tier1;
mod tier2;
mod tier3;
mod tier4;
mod tier5;
mod tier6;
pub use context::*;
pub use partials::*;
pub use schemas::*;
pub use tier1::*;
pub use tier2::*;
pub use tier3::*;
pub use tier4::*;
pub use tier5::*;
pub use tier6::*;
pub struct WorkflowTemplate {
pub filename: &'static str,
pub tier_dir: &'static str,
pub content: &'static str,
}
pub struct ContextFile {
pub filename: &'static str,
pub dir: &'static str,
pub content: &'static str,
}
pub fn get_all_workflows() -> Vec<WorkflowTemplate> {
let mut workflows = Vec::with_capacity(30);
workflows.extend(tier1::get_tier1_workflows());
workflows.extend(tier2::get_tier2_workflows());
workflows.extend(tier3::get_tier3_workflows());
workflows.extend(tier4::get_tier4_workflows());
workflows.extend(tier5::get_tier5_workflows());
workflows.extend(tier6::get_tier6_workflows());
workflows
}
pub fn get_all_context_files() -> Vec<ContextFile> {
context::get_context_files()
}
pub fn get_all_schemas() -> Vec<ContextFile> {
schemas::get_schema_files()
}
pub fn get_all_partials() -> Vec<ContextFile> {
partials::get_partial_files()
}
pub const WORKFLOWS_README: &str = r#"# Nika Example Workflows
> 30 progressive workflows from zero-dependency to full MCP integration.
## Quick Start
```bash
# Tier 1: Works immediately (no API keys needed)
nika run workflows/tier-1-no-deps/01-exec-basics.nika.yaml
# Tier 2: Setup LLM provider first
nika provider set anthropic # or: openai, mistral, groq, deepseek, gemini
nika run workflows/tier-2-llm/04-infer-basics.nika.yaml
# Tier 3: Agent with file tools
nika run workflows/tier-3-agent/08-agent-basic.nika.yaml
# Tier 4: MCP integration (requires NovaNet)
nika mcp add novanet
nika run workflows/tier-4-mcp/10-mcp-novanet.nika.yaml
# Tier 5-6: Complex use cases
nika run workflows/tier-5-dev/11-code-review-pipeline.nika.yaml
nika run workflows/tier-6-magic/21-morning-briefing.nika.yaml
```
## Tiers Overview
| Tier | Folder | Workflows | Prerequisites | Features |
|------|--------|-----------|---------------|----------|
| 1 | `tier-1-no-deps/` | 01-03 | None | exec, fetch, builtins |
| 2 | `tier-2-llm/` | 04-07 | `nika provider set <name>` | infer, DAG, for_each, context |
| 3 | `tier-3-agent/` | 08-09 | LLM provider | agent, file tools, artifacts |
| 4 | `tier-4-mcp/` | 10 | `nika mcp add novanet` | MCP, NovaNet, invoke |
| 5 | `tier-5-dev/` | 11-20 | LLM + optional MCP | Complex DAGs, real dev workflows |
| 6 | `tier-6-magic/` | 21-30 | LLM | Everyday automation, marketing, fun |
## Provider Setup
```bash
# Check available providers
nika provider list
# Set your preferred provider (pick one)
nika provider set anthropic # Claude (recommended)
nika provider set openai # GPT-4
nika provider set mistral # Mistral Large
nika provider set groq # Groq (fast, free tier)
nika provider set deepseek # DeepSeek
nika provider set gemini # Google Gemini
```
## Local Models (Native Inference)
Run LLMs locally without API keys using Nika's native mistral.rs runtime:
```bash
# Download a GGUF model (e.g., from HuggingFace)
nika model pull llama3.2:1b # Small, fast
nika model pull mistral:7b-instruct # Good balance
nika model pull qwen2.5:7b # Multilingual
# List installed models
nika model list
```
Use in workflows with `provider: native`:
```yaml
tasks:
- id: local_infer
infer:
provider: native
model: ~/.cache/huggingface/models/llama3.2-1b-q4.gguf
prompt: "Explain quantum computing in simple terms"
temperature: 0.7
```
**Benefits:**
- 🔒 Complete privacy (no data leaves your machine)
- 💰 No API costs
- 🔌 Works offline
- âš¡ Fast inference with Metal (macOS) or CUDA (Linux)
## MCP Setup (Tier 4+)
```bash
# Add NovaNet MCP server
nika mcp add novanet
# Verify MCP connection
nika mcp test novanet
```
## Workflow Index
### Tier 1: Zero Dependencies
- `01-exec-basics` - Shell commands, env vars, timeouts
- `02-fetch-http` - HTTP requests, headers, JSON parsing
- `03-builtins-core` - nika:log, nika:sleep, nika:emit, nika:assert
### Tier 2: LLM Required
- `04-infer-basics` - LLM prompts, temperature, system prompts
- `05-dag-patterns` - Diamond DAG, fan-out/fan-in
- `06-parallel-foreach` - for_each with concurrency
- `07-context-include` - File loading, DAG fusion, skills
### Tier 3: Agent + Tools
- `08-agent-basic` - Agent with file tools, stop conditions
- `09-structured-output` - JSON Schema validation, artifacts
### Tier 4: MCP Integration
- `10-mcp-novanet` - NovaNet queries, knowledge graph
### Tier 5: Developer Use Cases
- `11-code-review-pipeline` - Multi-file code analysis
- `12-content-localization` - Multi-locale content generation
- `13-seo-content-generator` - SEO-optimized articles
- `14-documentation-generator` - Auto-generate docs from code
- `15-data-etl-pipeline` - Extract, transform, load
- `16-research-assistant` - Nested agents, deep research
- `17-pr-review-bot` - Git diff analysis
- `18-meeting-processor` - Extract action items
- `19-api-health-checker` - Endpoint monitoring
- `20-knowledge-extractor` - Entity extraction to graph
### Tier 6: Everyday Magic
- `21-morning-briefing` - Daily summary (weather, news, calendar)
- `22-social-media-planner` - Week of posts in 2 minutes
- `23-competitor-spy` - Competitive intelligence
- `24-email-composer` - Perfect emails in any tone
- `25-recipe-meal-planner` - Weekly menu + grocery list
- `26-travel-planner` - Complete trip itinerary
- `27-birthday-party-planner` - Party planning kit
- `28-podcast-show-notes` - Transcript to content package
- `29-product-review-analyzer` - "Should I buy this?"
- `30-newsletter-curator` - Auto-curate newsletters
## Learn More
- [Nika Documentation](https://github.com/supernovae-studio/nika)
- [Nika CLI](https://github.com/supernovae-studio/nika)
- [NovaNet](https://github.com/supernovae-studio/novanet)
"#;
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_all_workflows_exist() {
let workflows = get_all_workflows();
assert_eq!(workflows.len(), 30, "Should have exactly 30 workflows");
}
#[test]
fn test_workflow_tiers() {
let workflows = get_all_workflows();
let tier1 = workflows
.iter()
.filter(|w| w.tier_dir == "tier-1-no-deps")
.count();
let tier2 = workflows
.iter()
.filter(|w| w.tier_dir == "tier-2-llm")
.count();
let tier3 = workflows
.iter()
.filter(|w| w.tier_dir == "tier-3-agent")
.count();
let tier4 = workflows
.iter()
.filter(|w| w.tier_dir == "tier-4-mcp")
.count();
let tier5 = workflows
.iter()
.filter(|w| w.tier_dir == "tier-5-dev")
.count();
let tier6 = workflows
.iter()
.filter(|w| w.tier_dir == "tier-6-magic")
.count();
assert_eq!(tier1, 3, "Tier 1 should have 3 workflows");
assert_eq!(tier2, 4, "Tier 2 should have 4 workflows");
assert_eq!(tier3, 2, "Tier 3 should have 2 workflows");
assert_eq!(tier4, 1, "Tier 4 should have 1 workflow");
assert_eq!(tier5, 10, "Tier 5 should have 10 workflows");
assert_eq!(tier6, 10, "Tier 6 should have 10 workflows");
}
#[test]
fn test_workflow_filenames_unique() {
let workflows = get_all_workflows();
let mut filenames: Vec<&str> = workflows.iter().map(|w| w.filename).collect();
let len_before = filenames.len();
filenames.sort();
filenames.dedup();
assert_eq!(
filenames.len(),
len_before,
"All workflow filenames should be unique"
);
}
#[test]
fn test_workflow_filenames_format() {
let workflows = get_all_workflows();
for w in &workflows {
assert!(
w.filename.ends_with(".nika.yaml"),
"Workflow {} should end with .nika.yaml",
w.filename
);
assert!(
w.filename.starts_with(char::is_numeric),
"Workflow {} should start with a number",
w.filename
);
}
}
#[test]
fn test_workflows_have_schema() {
let workflows = get_all_workflows();
for w in &workflows {
assert!(
w.content.contains("schema: \"nika/workflow@0.12\"")
|| w.content.contains("schema: 'nika/workflow@0.12'")
|| w.content.contains("schema: nika/workflow@0.12"),
"Workflow {} should have schema declaration",
w.filename
);
}
}
#[test]
fn test_workflows_have_workflow_name() {
let workflows = get_all_workflows();
for w in &workflows {
assert!(
w.content.contains("workflow:"),
"Workflow {} should have workflow: declaration",
w.filename
);
}
}
#[test]
fn test_workflows_have_tasks() {
let workflows = get_all_workflows();
for w in &workflows {
assert!(
w.content.contains("tasks:"),
"Workflow {} should have tasks: section",
w.filename
);
}
}
#[test]
fn test_context_files_exist() {
let files = get_all_context_files();
assert!(files.len() >= 5, "Should have at least 5 context files");
}
#[test]
fn test_schema_files_exist() {
let files = get_all_schemas();
assert!(files.len() >= 5, "Should have at least 5 schema files");
}
#[test]
fn test_schema_files_valid_json() {
let files = get_all_schemas();
for f in &files {
assert!(
f.filename.ends_with(".schema.json"),
"Schema {} should end with .schema.json",
f.filename
);
let parsed: Result<serde_json::Value, _> = serde_json::from_str(f.content);
assert!(
parsed.is_ok(),
"Schema {} should be valid JSON: {:?}",
f.filename,
parsed.err()
);
}
}
#[test]
fn test_partial_files_exist() {
let files = get_all_partials();
assert!(files.len() >= 5, "Should have at least 5 partial files");
}
#[test]
fn test_readme_exists() {
assert!(!WORKFLOWS_README.is_empty(), "README should not be empty");
assert!(
WORKFLOWS_README.contains("Nika Example Workflows"),
"README should have title"
);
assert!(
WORKFLOWS_README.contains("Quick Start"),
"README should have Quick Start section"
);
}
#[test]
fn test_workflows_valid_yaml() {
let workflows = get_all_workflows();
for w in &workflows {
let parsed: Result<serde_json::Value, _> = crate::serde_yaml::from_str(w.content);
assert!(
parsed.is_ok(),
"Workflow {} should be valid YAML: {:?}",
w.filename,
parsed.err()
);
}
}
#[test]
fn test_partials_valid_yaml() {
let partials = get_all_partials();
for p in &partials {
let parsed: Result<serde_json::Value, _> = crate::serde_yaml::from_str(p.content);
assert!(
parsed.is_ok(),
"Partial {} should be valid YAML: {:?}",
p.filename,
parsed.err()
);
}
}
}