pub mod error;
mod context;
pub mod course;
mod minimal;
mod schemas;
mod showcase_advanced;
mod showcase_fetch;
mod showcase_infra;
mod showcase_patterns;
pub use context::*;
pub use minimal::*;
pub use schemas::*;
pub use showcase_advanced::*;
pub use showcase_fetch::*;
pub use showcase_infra::*;
pub use showcase_patterns::*;
pub struct WorkflowTemplate {
pub filename: &'static str,
pub tier_dir: &'static str,
pub content: &'static str,
}
pub struct ContextFile {
pub filename: &'static str,
pub dir: &'static str,
pub content: &'static str,
}
pub fn get_all_workflows() -> Vec<WorkflowTemplate> {
let mut all = minimal::get_minimal_workflows();
all.extend(showcase_patterns::get_showcase_workflows());
all.extend(showcase_advanced::get_showcase_advanced_workflows());
all.extend(showcase_infra::get_showcase_infra_workflows());
all.extend(showcase_fetch::get_showcase_fetch_workflows());
all
}
pub fn get_all_context_files() -> Vec<ContextFile> {
context::get_context_files()
}
pub fn get_all_schemas() -> Vec<ContextFile> {
schemas::get_schema_files()
}
pub const WORKFLOWS_README: &str = r#"# Nika Workflows
> 5 minimal starter workflows + interactive course with 12 levels.
## Quick Start
```bash
# 1. Run immediately (no API key needed)
nika run workflows/minimal/01-exec.nika.yaml
nika run workflows/minimal/02-fetch.nika.yaml
# 2. Setup LLM provider, then run LLM workflows
nika provider set anthropic # or: openai, mistral, groq, deepseek, gemini
nika run workflows/minimal/03-infer.nika.yaml
nika run workflows/minimal/04-invoke.nika.yaml
nika run workflows/minimal/05-agent.nika.yaml
```
## Minimal Starters (5 workflows)
| # | File | Verb | Prerequisites |
|---|------|------|---------------|
| 01 | `01-exec.nika.yaml` | `exec:` | None |
| 02 | `02-fetch.nika.yaml` | `fetch:` | None |
| 03 | `03-infer.nika.yaml` | `infer:` | LLM provider |
| 04 | `04-invoke.nika.yaml` | `invoke:` | None (builtins) |
| 05 | `05-agent.nika.yaml` | `agent:` | LLM provider |
## Interactive Course (12 levels)
Start the course to learn Nika from zero to production:
```bash
nika course next
```
| # | Level | Exercises | Theme |
|---|-------|-----------|-------|
| 01 | Jailbreak | 5 | Break free — exec: basics |
| 02 | Hot Wire | 4 | Network — fetch: HTTP |
| 03 | Fork Bomb | 4 | Multiply — DAG patterns |
| 04 | Root Access | 3 | Unlock LLM — infer: |
| 05 | Shapeshifter | 3 | Transform — with: bindings |
| 06 | Pay-Per-Dream | 3 | Structured output |
| 07 | Swiss Knife | 3 | Builtin tools — invoke: |
| 08 | Gone Rogue | 3 | Autonomous — agent: |
| 09 | Data Heist | 4 | Extraction — fetch: extract |
| 10 | Open Protocol | 3 | MCP integration |
| 11 | Pixel Pirate | 4 | Media pipeline |
| 12 | SuperNovae | 5 | Boss — full orchestration |
## Provider Setup
```bash
nika provider list # Check available providers
nika provider set anthropic # Claude (recommended)
nika provider set openai # GPT-4
nika provider set mistral # Mistral Large
nika provider set groq # Groq (fast, free tier)
nika provider set deepseek # DeepSeek
nika provider set gemini # Google Gemini
```
## Learn More
- [Nika Documentation](https://github.com/supernovae-st/nika)
- [NovaNet](https://github.com/supernovae-st/novanet)
"#;
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_all_workflows_count() {
let workflows = get_all_workflows();
assert_eq!(
workflows.len(),
65,
"Should have 5 minimal + 15 patterns + 15 advanced + 15 infra + 15 fetch"
);
}
#[test]
fn test_workflow_filenames_unique() {
let workflows = get_all_workflows();
let mut f: Vec<&str> = workflows.iter().map(|w| w.filename).collect();
let n = f.len();
f.sort();
f.dedup();
assert_eq!(f.len(), n);
}
#[test]
fn test_workflows_have_schema() {
let workflows = get_all_workflows();
for w in &workflows {
assert!(
w.content.contains("schema: \"nika/workflow@0.12\""),
"Workflow {} should have schema declaration",
w.filename
);
}
}
#[test]
fn test_workflows_have_workflow_name() {
let workflows = get_all_workflows();
for w in &workflows {
assert!(
w.content.contains("workflow:"),
"Workflow {} should have workflow: declaration",
w.filename
);
}
}
#[test]
fn test_workflows_have_tasks() {
for w in get_all_workflows() {
assert!(w.content.contains("tasks:"));
}
}
#[test]
fn test_context_files_exist() {
assert!(get_all_context_files().len() >= 2);
}
#[test]
fn test_schema_files_valid_json() {
let files = get_all_schemas();
for f in &files {
assert!(
f.filename.ends_with(".schema.json"),
"Schema {} should end with .schema.json",
f.filename
);
let parsed: Result<serde_json::Value, _> = serde_json::from_str(f.content);
assert!(
parsed.is_ok(),
"Schema {} should be valid JSON: {:?}",
f.filename,
parsed.err()
);
}
}
#[test]
fn test_readme_exists() {
assert!(!WORKFLOWS_README.is_empty(), "README should not be empty");
assert!(
WORKFLOWS_README.contains("Nika Workflows"),
"README should have title"
);
assert!(
WORKFLOWS_README.contains("Quick Start"),
"README should have Quick Start section"
);
}
#[test]
fn test_workflows_valid_yaml() {
let workflows = get_all_workflows();
for w in &workflows {
if w.content.contains("{{PROVIDER}}") || w.content.contains("{{MODEL}}") {
continue;
}
let parsed: Result<serde_json::Value, _> = serde_saphyr::from_str(w.content);
assert!(
parsed.is_ok(),
"Workflow {} should be valid YAML: {:?}",
w.filename,
parsed.err()
);
}
}
#[test]
fn test_course_levels_exist() {
assert_eq!(
course::levels::LEVELS.len(),
12,
"Course should have 12 levels"
);
}
#[test]
fn test_course_total_exercises() {
assert_eq!(
course::levels::total_exercises(),
44,
"Course should have 44 total exercises"
);
}
}