use super::import_session::ImportOptions;
use super::{
MessageRole, SessionImporter, SessionMessage, SessionProvider, SessionSummary, ShareError,
SharedSession,
};
use serde_json::Value;
#[cfg(test)]
use std::path::Path;
use std::path::PathBuf;
const PROVIDER: &str = "generic-openai";
#[derive(Default)]
pub struct GenericOpenAIProvider {
root_override: Option<PathBuf>,
}
impl GenericOpenAIProvider {
pub fn with_root(root: PathBuf) -> Self {
Self { root_override: Some(root) }
}
fn root(&self) -> Option<PathBuf> {
self.root_override
.clone()
.or_else(|| std::env::var("ISELF_GENERIC_DIR").ok().map(PathBuf::from))
}
}
impl SessionProvider for GenericOpenAIProvider {
fn name(&self) -> &str {
PROVIDER
}
fn list_sessions(&self) -> Result<Vec<SessionSummary>, ShareError> {
let root = match self.root() {
Some(r) if r.is_dir() => r,
_ => return Ok(Vec::new()), };
let mut out = Vec::new();
for entry in std::fs::read_dir(&root)?.filter_map(|e| e.ok()) {
let p = entry.path();
if p.extension().and_then(|s| s.to_str()) != Some("json") {
continue;
}
let id = p
.file_stem()
.and_then(|s| s.to_str())
.unwrap_or("")
.to_string();
if id.is_empty() {
continue;
}
if let Ok(content) = std::fs::read_to_string(&p) {
if let Ok(v) = serde_json::from_str::<Value>(&content) {
if let Some(arr) = v.as_array() {
for (idx, item) in arr.iter().enumerate() {
if let Some(s) = summarize_conversation(item, &format!("{}-{}", id, idx))
{
out.push(s);
}
}
} else if let Some(s) = summarize_conversation(&v, &id) {
out.push(s);
}
}
}
}
Ok(out)
}
fn load_session(&self, id: &str) -> Result<SharedSession, ShareError> {
let root = self
.root()
.ok_or_else(|| ShareError::NotFound(id.to_string()))?;
let (file_id, conv_index) = match id.rsplit_once('-') {
Some((stem, suffix)) if suffix.parse::<usize>().is_ok() => {
(stem.to_string(), suffix.parse::<usize>().ok())
}
_ => (id.to_string(), None),
};
let direct = root.join(format!("{}.json", id));
let path = if direct.exists() {
direct
} else {
root.join(format!("{}.json", file_id))
};
if !path.exists() {
return Err(ShareError::NotFound(id.to_string()));
}
let content = std::fs::read_to_string(&path)?;
let v: Value = serde_json::from_str(&content)
.map_err(|e| ShareError::Parse(format!("{}: {}", path.display(), e)))?;
let payload = match (v.as_array(), conv_index) {
(Some(arr), Some(i)) => arr.get(i).cloned().unwrap_or(Value::Null),
(Some(arr), None) => arr.first().cloned().unwrap_or(Value::Null),
_ => v,
};
Ok(decode_conversation(&payload, id))
}
}
fn summarize_conversation(v: &Value, id: &str) -> Option<SessionSummary> {
let messages = v.get("messages").and_then(|m| m.as_array())?;
let title = v
.get("title")
.and_then(|t| t.as_str())
.map(|s| s.chars().take(80).collect::<String>());
let started_at = v
.get("created")
.or_else(|| v.get("create_time"))
.or_else(|| v.get("createdAt"))
.and_then(parse_generic_timestamp);
Some(SessionSummary {
provider: PROVIDER.to_string(),
id: id.to_string(),
project_path: None,
started_at,
message_count: messages.len(),
title_hint: title,
imported: false,
})
}
fn decode_conversation(v: &Value, id: &str) -> SharedSession {
let messages_raw = v.get("messages").and_then(|m| m.as_array()).cloned();
let started_at = v
.get("created")
.or_else(|| v.get("create_time"))
.or_else(|| v.get("createdAt"))
.and_then(parse_generic_timestamp);
let mut messages = Vec::new();
if let Some(mapping) = v.get("mapping").and_then(|m| m.as_object()) {
let mut entries: Vec<&Value> = mapping
.values()
.filter_map(|n| n.get("message"))
.filter(|m| !m.is_null())
.collect();
entries.sort_by_key(|m| {
m.get("create_time")
.and_then(|c| c.as_f64())
.unwrap_or(f64::INFINITY) as i64
});
for entry in entries {
if let Some(m) = decode_chatgpt_message(entry) {
messages.push(m);
}
}
} else if let Some(arr) = messages_raw {
for entry in arr {
if let Some(m) = decode_openai_message(&entry) {
messages.push(m);
}
}
}
SharedSession {
provider: PROVIDER.to_string(),
id: id.to_string(),
project_path: None,
started_at,
messages,
}
}
fn decode_openai_message(v: &Value) -> Option<SessionMessage> {
let role_str = v.get("role").and_then(|r| r.as_str())?;
let role = match role_str {
"user" => MessageRole::User,
"assistant" => MessageRole::Assistant,
"system" => MessageRole::System,
"tool" | "function" => MessageRole::ToolResult,
_ => return None,
};
let content = match v.get("content") {
Some(Value::String(s)) => s.clone(),
Some(Value::Array(arr)) => arr
.iter()
.filter_map(|p| {
p.get("text")
.and_then(|t| t.as_str())
.map(String::from)
.or_else(|| {
p.get("type")
.and_then(|t| t.as_str())
.filter(|t| *t == "input_text" || *t == "output_text")
.and(p.get("text").and_then(|t| t.as_str()).map(String::from))
})
})
.collect::<Vec<_>>()
.join("\n"),
Some(Value::Null) | None => String::new(),
Some(other) => serde_json::to_string(other).unwrap_or_default(),
};
let mut metadata = std::collections::HashMap::new();
if let Some(model) = v.get("model").and_then(|m| m.as_str()) {
metadata.insert("model".to_string(), model.to_string());
}
if content.is_empty() && v.get("tool_calls").is_none() {
return None;
}
Some(SessionMessage {
role,
content,
timestamp: v
.get("create_time")
.or_else(|| v.get("created"))
.and_then(parse_generic_timestamp),
metadata,
})
}
fn decode_chatgpt_message(v: &Value) -> Option<SessionMessage> {
let author_role = v
.get("author")
.and_then(|a| a.get("role"))
.and_then(|r| r.as_str())
.unwrap_or("");
let role = match author_role {
"user" => MessageRole::User,
"assistant" => MessageRole::Assistant,
"system" => MessageRole::System,
"tool" => MessageRole::ToolResult,
_ => return None,
};
let content_obj = v.get("content")?;
let parts = content_obj
.get("parts")
.and_then(|p| p.as_array())
.cloned()
.unwrap_or_default();
let content = parts
.iter()
.filter_map(|p| match p {
Value::String(s) => Some(s.clone()),
obj => obj
.get("text")
.and_then(|t| t.as_str())
.map(String::from),
})
.collect::<Vec<_>>()
.join("\n");
if content.trim().is_empty() {
return None;
}
Some(SessionMessage {
role,
content,
timestamp: v.get("create_time").and_then(parse_generic_timestamp),
metadata: Default::default(),
})
}
fn parse_generic_timestamp(v: &Value) -> Option<chrono::DateTime<chrono::Utc>> {
if let Some(s) = v.as_str() {
if let Ok(d) = chrono::DateTime::parse_from_rfc3339(s) {
return Some(d.with_timezone(&chrono::Utc));
}
}
if let Some(f) = v.as_f64() {
return chrono::DateTime::from_timestamp(f as i64, ((f.fract()) * 1e9) as u32);
}
if let Some(i) = v.as_i64() {
if i > 1_000_000_000_000 {
return chrono::DateTime::from_timestamp_millis(i);
}
return chrono::DateTime::from_timestamp(i, 0);
}
None
}
#[derive(Default)]
pub struct GenericOpenAIImporter;
impl SessionImporter for GenericOpenAIImporter {
fn name(&self) -> &str {
PROVIDER
}
fn import(&self, session: &SharedSession, opts: &ImportOptions) -> Result<String, ShareError> {
let root = opts
.dest_root_override
.clone()
.or_else(|| std::env::var("ISELF_GENERIC_DIR").ok().map(PathBuf::from))
.or_else(|| dirs::home_dir().map(|h| h.join(".i-self").join("generic")))
.ok_or_else(|| ShareError::Parse("no home directory".into()))?;
std::fs::create_dir_all(&root).map_err(ShareError::Io)?;
let new_id = uuid::Uuid::new_v4().to_string();
let path = root.join(format!("{}.json", new_id));
let mut messages_out: Vec<Value> = Vec::with_capacity(session.messages.len() + 1);
messages_out.push(serde_json::json!({
"role": "user",
"content": format!(
"[i-self import] Continued from {} session {}. {} prior messages follow.",
session.provider, session.id, session.messages.len()
),
}));
for msg in &session.messages {
let role = match msg.role {
MessageRole::User | MessageRole::ToolResult => "user",
MessageRole::Assistant | MessageRole::ToolUse => "assistant",
MessageRole::System => "system",
};
let mut content = msg.content.clone();
if msg.role == MessageRole::ToolUse {
let name = msg
.metadata
.get("tool_name")
.map(|s| s.as_str())
.unwrap_or("tool");
content = format!("```{}\n{}\n```", name, content);
}
messages_out.push(serde_json::json!({"role": role, "content": content}));
}
let payload = serde_json::json!({
"id": new_id,
"title": format!("[i-self import] from {}", session.provider),
"created": chrono::Utc::now().timestamp(),
"messages": messages_out,
"imported_from": {"provider": session.provider, "id": session.id},
});
std::fs::write(&path, serde_json::to_string_pretty(&payload).unwrap())
.map_err(ShareError::Io)?;
Ok(format!(
"Wrote {} messages to {}. This is a generic OpenAI Chat Completions JSON — \
feed it to mods, fabric, or any OpenAI SDK consumer.",
session.messages.len() + 1,
path.display()
))
}
}
#[cfg(test)]
mod tests {
use super::*;
fn write(p: &Path, content: &str) {
std::fs::create_dir_all(p.parent().unwrap()).unwrap();
std::fs::write(p, content).unwrap();
}
#[test]
fn parses_canonical_openai_shape() {
let tmp = tempfile::tempdir().unwrap();
write(
&tmp.path().join("conv.json"),
r#"{
"id": "conv-1",
"title": "test",
"created": 1715000000,
"messages": [
{"role": "user", "content": "hi"},
{"role": "assistant", "content": "hello", "model": "gpt-4o"}
]
}"#,
);
let p = GenericOpenAIProvider::with_root(tmp.path().to_path_buf());
let s = p.load_session("conv").unwrap();
assert_eq!(s.messages.len(), 2);
assert_eq!(s.messages[1].metadata.get("model").map(|x| x.as_str()), Some("gpt-4o"));
}
#[test]
fn parses_chatgpt_export_mapping_layout() {
let tmp = tempfile::tempdir().unwrap();
write(
&tmp.path().join("chatgpt.json"),
r#"{
"title": "from chatgpt",
"create_time": 1715000000.5,
"mapping": {
"node-1": {"message": {"author": {"role": "user"}, "content": {"parts": ["hi from chatgpt"]}, "create_time": 1715000001}},
"node-2": {"message": {"author": {"role": "assistant"}, "content": {"parts": ["hello from chatgpt"]}, "create_time": 1715000002}}
}
}"#,
);
let p = GenericOpenAIProvider::with_root(tmp.path().to_path_buf());
let s = p.load_session("chatgpt").unwrap();
assert_eq!(s.messages.len(), 2);
assert_eq!(s.messages[0].content, "hi from chatgpt");
}
#[test]
fn parses_array_of_conversations() {
let tmp = tempfile::tempdir().unwrap();
write(
&tmp.path().join("multi.json"),
r#"[
{"title":"a","messages":[{"role":"user","content":"q1"}]},
{"title":"b","messages":[{"role":"user","content":"q2"}]}
]"#,
);
let p = GenericOpenAIProvider::with_root(tmp.path().to_path_buf());
let sessions = p.list_sessions().unwrap();
assert_eq!(sessions.len(), 2);
assert!(sessions.iter().any(|s| s.id.ends_with("-0")));
assert!(sessions.iter().any(|s| s.id.ends_with("-1")));
let one = p.load_session("multi-1").unwrap();
assert_eq!(one.messages.len(), 1);
assert_eq!(one.messages[0].content, "q2");
}
#[test]
fn list_returns_empty_when_unconfigured() {
let p = GenericOpenAIProvider::default();
std::env::remove_var("ISELF_GENERIC_DIR");
assert!(p.list_sessions().unwrap().is_empty());
}
#[test]
fn round_trip_through_importer() {
let src_dir = tempfile::tempdir().unwrap();
write(
&src_dir.path().join("src.json"),
r#"{"messages":[{"role":"user","content":"refactor"},{"role":"assistant","content":"yes"}]}"#,
);
let session = GenericOpenAIProvider::with_root(src_dir.path().to_path_buf())
.load_session("src")
.unwrap();
let dest_dir = tempfile::tempdir().unwrap();
let opts = ImportOptions {
dest_root_override: Some(dest_dir.path().to_path_buf()),
..ImportOptions::default()
};
GenericOpenAIImporter.import(&session, &opts).unwrap();
let written: Vec<_> = std::fs::read_dir(dest_dir.path())
.unwrap()
.filter_map(|e| e.ok())
.filter(|e| e.path().extension().map(|x| x == "json").unwrap_or(false))
.collect();
assert_eq!(written.len(), 1);
let new_id = written[0].path().file_stem().unwrap().to_str().unwrap().to_string();
let reloaded = GenericOpenAIProvider::with_root(dest_dir.path().to_path_buf())
.load_session(&new_id)
.unwrap();
assert_eq!(reloaded.messages.len(), 3);
}
}