Skip to main content

tensorlogic_compiler/export/
mod.rs

1//! Export compiled TensorLogic graphs to various formats.
2//!
3//! This module provides functionality to export `EinsumGraph` instances to different
4//! interchange formats for execution on various backends.
5//!
6//! # Supported Formats
7//!
8//! - **ONNX** (`onnx` feature): Export to ONNX format for use with ONNX Runtime,
9//!   PyTorch, TensorFlow, and other ONNX-compatible frameworks.
10//! - **TensorFlow GraphDef** (`tensorflow` feature): Export to TensorFlow GraphDef format
11//!   for execution within TensorFlow runtime and SavedModel workflows.
12//! - **PyTorch Code** (`pytorch` feature): Generate PyTorch nn.Module Python code
13//!   for integration with PyTorch workflows and TorchScript compilation.
14//!
15//! # Example
16//!
17//! ```rust,ignore
18//! use tensorlogic_compiler::export::onnx::export_to_onnx;
19//! use tensorlogic_compiler::compile_to_einsum;
20//! use tensorlogic_ir::{TLExpr, Term};
21//!
22//! let expr = TLExpr::pred("knows", vec![Term::var("x"), Term::var("y")]);
23//! let graph = compile_to_einsum(&expr)?;
24//!
25//! // Export to ONNX
26//! let onnx_bytes = export_to_onnx(&graph, "logic_model")?;
27//! std::fs::write("model.onnx", onnx_bytes)?;
28//!
29//! // Export to TensorFlow
30//! #[cfg(feature = "tensorflow")]
31//! {
32//!     use tensorlogic_compiler::export::tensorflow::export_to_tensorflow;
33//!     let tf_bytes = export_to_tensorflow(&graph, "logic_model")?;
34//!     std::fs::write("model.pb", tf_bytes)?;
35//! }
36//!
37//! // Export to PyTorch
38//! #[cfg(feature = "pytorch")]
39//! {
40//!     use tensorlogic_compiler::export::pytorch::export_to_pytorch;
41//!     let pytorch_code = export_to_pytorch(&graph, "LogicModel")?;
42//!     std::fs::write("model.py", pytorch_code)?;
43//! }
44//! ```
45
46#[cfg(feature = "onnx")]
47pub mod onnx;
48
49#[cfg(feature = "pytorch")]
50pub mod pytorch;
51
52#[cfg(feature = "tensorflow")]
53pub mod tensorflow;