use napi::bindgen_prelude::*;
use napi::Task;
use napi_derive::napi;
use std::collections::HashMap;
use ox_content_allocator::Allocator;
use ox_content_ast::{Document, Heading, Node};
use ox_content_parser::{Parser, ParserOptions};
use ox_content_renderer::{HtmlRenderer, HtmlRendererOptions};
use ox_content_search::{DocumentIndexer, SearchIndex, SearchIndexBuilder, SearchOptions};
#[napi(object)]
pub struct ParseResult {
pub ast: String,
pub errors: Vec<String>,
}
#[napi(object)]
pub struct RenderResult {
pub html: String,
pub errors: Vec<String>,
}
#[napi(object)]
#[derive(Clone)]
pub struct TocEntry {
pub depth: u8,
pub text: String,
pub slug: String,
}
#[napi(object)]
pub struct TransformResult {
pub html: String,
pub frontmatter: String,
pub toc: Vec<TocEntry>,
pub errors: Vec<String>,
}
#[napi(object)]
#[derive(Default, Clone)]
pub struct JsTransformOptions {
pub gfm: Option<bool>,
pub footnotes: Option<bool>,
pub task_lists: Option<bool>,
pub tables: Option<bool>,
pub strikethrough: Option<bool>,
pub autolinks: Option<bool>,
pub toc_max_depth: Option<u8>,
pub convert_md_links: Option<bool>,
pub base_url: Option<String>,
}
#[napi(object)]
#[derive(Default)]
pub struct JsParserOptions {
pub gfm: Option<bool>,
pub footnotes: Option<bool>,
pub task_lists: Option<bool>,
pub tables: Option<bool>,
pub strikethrough: Option<bool>,
pub autolinks: Option<bool>,
}
impl From<JsParserOptions> for ParserOptions {
fn from(opts: JsParserOptions) -> Self {
let mut options =
if opts.gfm.unwrap_or(false) { ParserOptions::gfm() } else { ParserOptions::default() };
if let Some(v) = opts.footnotes {
options.footnotes = v;
}
if let Some(v) = opts.task_lists {
options.task_lists = v;
}
if let Some(v) = opts.tables {
options.tables = v;
}
if let Some(v) = opts.strikethrough {
options.strikethrough = v;
}
if let Some(v) = opts.autolinks {
options.autolinks = v;
}
options
}
}
#[napi]
pub fn parse(source: String, options: Option<JsParserOptions>) -> ParseResult {
let allocator = Allocator::new();
let parser_options = options.map(ParserOptions::from).unwrap_or_default();
let parser = Parser::with_options(&allocator, &source, parser_options);
let result = parser.parse();
match result {
Ok(_doc) => {
let ast = "{\"type\":\"document\",\"children\":[]}".to_string();
ParseResult { ast, errors: vec![] }
}
Err(e) => ParseResult { ast: String::new(), errors: vec![e.to_string()] },
}
}
#[napi]
pub fn parse_and_render(source: String, options: Option<JsParserOptions>) -> RenderResult {
let allocator = Allocator::new();
let parser_options = options.map(ParserOptions::from).unwrap_or_default();
let parser = Parser::with_options(&allocator, &source, parser_options);
let result = parser.parse();
match result {
Ok(doc) => {
let mut renderer = HtmlRenderer::new();
let html = renderer.render(&doc);
RenderResult { html, errors: vec![] }
}
Err(e) => RenderResult { html: String::new(), errors: vec![e.to_string()] },
}
}
#[napi]
pub fn render(_ast_json: String) -> RenderResult {
RenderResult {
html: String::new(),
errors: vec!["render from JSON not yet implemented".to_string()],
}
}
#[napi]
pub fn version() -> String {
env!("CARGO_PKG_VERSION").to_string()
}
#[napi]
pub fn transform(source: String, options: Option<JsTransformOptions>) -> TransformResult {
let opts = options.unwrap_or_default();
let toc_max_depth = opts.toc_max_depth.unwrap_or(3);
let (content, frontmatter) = parse_frontmatter(&source);
let allocator = Allocator::new();
let parser_options = transform_options_to_parser_options(&opts);
let parser = Parser::with_options(&allocator, &content, parser_options);
let result = parser.parse();
match result {
Ok(doc) => {
let toc = extract_toc(&doc, toc_max_depth);
let renderer_options = transform_options_to_renderer_options(&opts);
let mut renderer = HtmlRenderer::with_options(renderer_options);
let html = renderer.render(&doc);
TransformResult {
html,
frontmatter: serde_json::to_string(&frontmatter)
.unwrap_or_else(|_| "{}".to_string()),
toc,
errors: vec![],
}
}
Err(e) => TransformResult {
html: String::new(),
frontmatter: "{}".to_string(),
toc: vec![],
errors: vec![e.to_string()],
},
}
}
fn parse_frontmatter(source: &str) -> (String, HashMap<String, serde_json::Value>) {
let mut frontmatter = HashMap::new();
if !source.starts_with("---") {
return (source.to_string(), frontmatter);
}
let rest = &source[3..];
let Some(end_pos) = rest.find("\n---") else {
return (source.to_string(), frontmatter);
};
let frontmatter_str = &rest[..end_pos].trim_start_matches('\n');
let content = &rest[end_pos + 4..].trim_start_matches('\n');
for line in frontmatter_str.lines() {
let line = line.trim();
if line.is_empty() || line.starts_with('#') {
continue;
}
if let Some(colon_pos) = line.find(':') {
let key = line[..colon_pos].trim().to_string();
let value_str = line[colon_pos + 1..].trim();
let value = if value_str == "true" {
serde_json::Value::Bool(true)
} else if value_str == "false" {
serde_json::Value::Bool(false)
} else if let Ok(n) = value_str.parse::<i64>() {
serde_json::Value::Number(n.into())
} else if let Ok(n) = value_str.parse::<f64>() {
serde_json::Number::from_f64(n).map_or_else(
|| serde_json::Value::String(value_str.to_string()),
serde_json::Value::Number,
)
} else {
let s = value_str.trim_matches('"').trim_matches('\'');
serde_json::Value::String(s.to_string())
};
frontmatter.insert(key, value);
}
}
(content.to_string(), frontmatter)
}
fn extract_toc(doc: &Document, max_depth: u8) -> Vec<TocEntry> {
let mut entries = Vec::new();
for node in &doc.children {
if let Node::Heading(heading) = node {
if heading.depth <= max_depth {
let text = extract_heading_text(heading);
let slug = slugify(&text);
entries.push(TocEntry { depth: heading.depth, text, slug });
}
}
}
entries
}
fn extract_heading_text(heading: &Heading) -> String {
let mut text = String::new();
for child in &heading.children {
collect_text(child, &mut text);
}
text
}
fn collect_text(node: &Node, text: &mut String) {
match node {
Node::Text(t) => text.push_str(t.value),
Node::Emphasis(e) => {
for child in &e.children {
collect_text(child, text);
}
}
Node::Strong(s) => {
for child in &s.children {
collect_text(child, text);
}
}
Node::InlineCode(c) => text.push_str(c.value),
Node::Delete(d) => {
for child in &d.children {
collect_text(child, text);
}
}
Node::Link(l) => {
for child in &l.children {
collect_text(child, text);
}
}
_ => {}
}
}
fn slugify(text: &str) -> String {
text.to_lowercase()
.chars()
.map(|c| if c.is_alphanumeric() || c == ' ' || c == '-' { c } else { ' ' })
.collect::<String>()
.split_whitespace()
.collect::<Vec<_>>()
.join("-")
}
fn transform_options_to_parser_options(opts: &JsTransformOptions) -> ParserOptions {
let mut options =
if opts.gfm.unwrap_or(false) { ParserOptions::gfm() } else { ParserOptions::default() };
if let Some(v) = opts.footnotes {
options.footnotes = v;
}
if let Some(v) = opts.task_lists {
options.task_lists = v;
}
if let Some(v) = opts.tables {
options.tables = v;
}
if let Some(v) = opts.strikethrough {
options.strikethrough = v;
}
if let Some(v) = opts.autolinks {
options.autolinks = v;
}
options
}
fn transform_options_to_renderer_options(opts: &JsTransformOptions) -> HtmlRendererOptions {
let mut options = HtmlRendererOptions::new();
if let Some(v) = opts.convert_md_links {
options.convert_md_links = v;
}
if let Some(ref v) = opts.base_url {
options.base_url.clone_from(v);
}
options
}
pub struct ParseAndRenderTask {
source: String,
options: ParserOptions,
}
impl Task for ParseAndRenderTask {
type Output = RenderResult;
type JsValue = RenderResult;
fn compute(&mut self) -> Result<Self::Output> {
let allocator = Allocator::new();
let parser = Parser::with_options(&allocator, &self.source, self.options.clone());
let result = match parser.parse() {
Ok(doc) => {
let mut renderer = HtmlRenderer::new();
let html = renderer.render(&doc);
RenderResult { html, errors: vec![] }
}
Err(e) => RenderResult { html: String::new(), errors: vec![e.to_string()] },
};
Ok(result)
}
fn resolve(&mut self, _env: Env, output: Self::Output) -> Result<Self::JsValue> {
Ok(output)
}
}
#[napi]
pub fn parse_and_render_async(
source: String,
options: Option<JsParserOptions>,
) -> AsyncTask<ParseAndRenderTask> {
let parser_options = options.map(ParserOptions::from).unwrap_or_default();
AsyncTask::new(ParseAndRenderTask { source, options: parser_options })
}
pub struct TransformTask {
source: String,
options: JsTransformOptions,
}
impl Task for TransformTask {
type Output = TransformResult;
type JsValue = TransformResult;
fn compute(&mut self) -> Result<Self::Output> {
let toc_max_depth = self.options.toc_max_depth.unwrap_or(3);
let (content, frontmatter) = parse_frontmatter(&self.source);
let allocator = Allocator::new();
let parser_options = transform_options_to_parser_options(&self.options);
let parser = Parser::with_options(&allocator, &content, parser_options);
let result = match parser.parse() {
Ok(doc) => {
let toc = extract_toc(&doc, toc_max_depth);
let renderer_options = transform_options_to_renderer_options(&self.options);
let mut renderer = HtmlRenderer::with_options(renderer_options);
let html = renderer.render(&doc);
TransformResult {
html,
frontmatter: serde_json::to_string(&frontmatter)
.unwrap_or_else(|_| "{}".to_string()),
toc,
errors: vec![],
}
}
Err(e) => TransformResult {
html: String::new(),
frontmatter: "{}".to_string(),
toc: vec![],
errors: vec![e.to_string()],
},
};
Ok(result)
}
fn resolve(&mut self, _env: Env, output: Self::Output) -> Result<Self::JsValue> {
Ok(output)
}
}
#[napi]
pub fn transform_async(
source: String,
options: Option<JsTransformOptions>,
) -> AsyncTask<TransformTask> {
let opts = options.unwrap_or_default();
AsyncTask::new(TransformTask { source, options: opts })
}
#[napi(object)]
#[derive(Default, Clone)]
pub struct JsOgImageConfig {
pub width: Option<u32>,
pub height: Option<u32>,
pub background_color: Option<String>,
pub text_color: Option<String>,
pub title_font_size: Option<u32>,
pub description_font_size: Option<u32>,
}
#[napi(object)]
pub struct JsOgImageData {
pub title: String,
pub description: Option<String>,
pub site_name: Option<String>,
pub author: Option<String>,
}
#[napi]
pub fn generate_og_image_svg(data: JsOgImageData, config: Option<JsOgImageConfig>) -> String {
use ox_content_og_image::{OgImageConfig, OgImageData, OgImageGenerator};
let cfg = config.unwrap_or_default();
let mut og_config = OgImageConfig::default();
if let Some(w) = cfg.width {
og_config.width = w;
}
if let Some(h) = cfg.height {
og_config.height = h;
}
if let Some(ref bg) = cfg.background_color {
og_config.background_color.clone_from(bg);
}
if let Some(ref tc) = cfg.text_color {
og_config.text_color.clone_from(tc);
}
if let Some(ts) = cfg.title_font_size {
og_config.title_font_size = ts;
}
if let Some(ds) = cfg.description_font_size {
og_config.description_font_size = ds;
}
let og_data = OgImageData {
title: data.title,
description: data.description,
site_name: data.site_name,
author: data.author,
date: None,
tags: vec![],
};
let generator = OgImageGenerator::new(og_config);
generator.generate_svg(&og_data)
}
#[napi(object)]
#[derive(Clone)]
pub struct JsSearchDocument {
pub id: String,
pub title: String,
pub url: String,
pub body: String,
pub headings: Vec<String>,
pub code: Vec<String>,
}
#[napi(object)]
pub struct JsSearchResult {
pub id: String,
pub title: String,
pub url: String,
pub score: f64,
pub matches: Vec<String>,
pub snippet: String,
}
#[napi(object)]
#[derive(Default, Clone)]
pub struct JsSearchOptions {
pub limit: Option<u32>,
pub prefix: Option<bool>,
pub fuzzy: Option<bool>,
pub threshold: Option<f64>,
}
impl From<JsSearchOptions> for SearchOptions {
fn from(opts: JsSearchOptions) -> Self {
Self {
limit: opts.limit.unwrap_or(10) as usize,
prefix: opts.prefix.unwrap_or(true),
fuzzy: opts.fuzzy.unwrap_or(false),
threshold: opts.threshold.unwrap_or(0.0),
}
}
}
#[napi]
pub fn build_search_index(documents: Vec<JsSearchDocument>) -> String {
let mut builder = SearchIndexBuilder::new();
for doc in documents {
builder.add_document(ox_content_search::SearchDocument {
id: doc.id,
title: doc.title,
url: doc.url,
body: doc.body,
headings: doc.headings,
code: doc.code,
});
}
let index = builder.build();
index.to_json()
}
#[napi]
pub fn search_index(
index_json: String,
query: String,
options: Option<JsSearchOptions>,
) -> Vec<JsSearchResult> {
let Ok(index) = SearchIndex::from_json(&index_json) else {
return Vec::new();
};
let opts = options.map(SearchOptions::from).unwrap_or_default();
let results = index.search(&query, &opts);
results
.into_iter()
.map(|r| JsSearchResult {
id: r.id,
title: r.title,
url: r.url,
score: r.score,
matches: r.matches,
snippet: r.snippet,
})
.collect()
}
#[napi(object)]
#[derive(Clone)]
pub struct JsSsgNavItem {
pub title: String,
pub path: String,
pub href: String,
}
#[napi(object)]
#[derive(Clone)]
pub struct JsSsgNavGroup {
pub title: String,
pub items: Vec<JsSsgNavItem>,
}
#[napi(object)]
pub struct JsSsgPageData {
pub title: String,
pub description: Option<String>,
pub content: String,
pub toc: Vec<TocEntry>,
pub path: String,
}
#[napi(object)]
#[derive(Clone)]
pub struct JsSsgConfig {
pub site_name: String,
pub base: String,
pub og_image: Option<String>,
}
#[napi]
pub fn generate_ssg_html(
page_data: JsSsgPageData,
nav_groups: Vec<JsSsgNavGroup>,
config: JsSsgConfig,
) -> String {
let ssg_page_data = ox_content_ssg::PageData {
title: page_data.title,
description: page_data.description,
content: page_data.content,
toc: page_data
.toc
.into_iter()
.map(|t| ox_content_ssg::TocEntry { depth: t.depth, text: t.text, slug: t.slug })
.collect(),
path: page_data.path,
};
let ssg_nav_groups: Vec<ox_content_ssg::NavGroup> = nav_groups
.into_iter()
.map(|g| ox_content_ssg::NavGroup {
title: g.title,
items: g
.items
.into_iter()
.map(|i| ox_content_ssg::NavItem { title: i.title, path: i.path, href: i.href })
.collect(),
})
.collect();
let ssg_config = ox_content_ssg::SsgConfig {
site_name: config.site_name,
base: config.base,
og_image: config.og_image,
};
ox_content_ssg::generate_html(&ssg_page_data, &ssg_nav_groups, &ssg_config)
}
#[napi]
pub fn extract_search_content(
source: String,
id: String,
url: String,
options: Option<JsParserOptions>,
) -> JsSearchDocument {
let allocator = Allocator::new();
let parser_options = options.map(ParserOptions::from).unwrap_or_default();
let (content, frontmatter) = parse_frontmatter(&source);
let frontmatter_title = frontmatter.get("title").and_then(|v| v.as_str()).map(String::from);
let parser = Parser::with_options(&allocator, &content, parser_options);
let result = parser.parse();
let (title, body, headings, code) = if let Ok(ref doc) = result {
let mut indexer = DocumentIndexer::new();
indexer.extract(doc);
let title = frontmatter_title
.unwrap_or_else(|| indexer.title().map(String::from).unwrap_or_default());
(title, indexer.body().to_string(), indexer.headings().to_vec(), indexer.code().to_vec())
} else {
(frontmatter_title.unwrap_or_default(), String::new(), Vec::new(), Vec::new())
};
drop(result);
JsSearchDocument { id, title, url, body, headings, code }
}