from __future__ import annotations
from pathlib import Path
import duckdb
HOME = Path.home()
META_DB = HOME / ".netsky" / "meta.db"
CLAUDE_SESSIONS = HOME / ".claude" / "projects" / "*" / "sessions" / "*.jsonl"
CODEX_SESSIONS = HOME / ".codex" / "sessions" / "*.jsonl"
def attach_meta(con: duckdb.DuckDBPyConnection) -> None:
if META_DB.exists():
con.execute(f"ATTACH '{META_DB}' AS meta (TYPE SQLITE)")
def maybe_count_jsonl(con: duckdb.DuckDBPyConnection, glob: Path) -> int:
sql = f"""
SELECT COUNT(*)
FROM read_json_auto('{glob.as_posix()}', union_by_name=true, ignore_errors=true, filename=true)
"""
try:
return int(con.execute(sql).fetchone()[0])
except Exception:
return 0
def main() -> None:
con = duckdb.connect(":memory:")
attach_meta(con)
claude_rows = maybe_count_jsonl(con, CLAUDE_SESSIONS)
codex_rows = maybe_count_jsonl(con, CODEX_SESSIONS)
if META_DB.exists():
summary = con.execute(
"""
SELECT
metric,
bucket,
value,
COALESCE(max_gap_min, 0) AS max_gap_min
FROM (
SELECT
'messages/day' AS metric,
date(ts_utc::TIMESTAMPTZ)::VARCHAR AS bucket,
COUNT(*)::BIGINT AS value,
NULL::BIGINT AS max_gap_min
FROM meta.messages
GROUP BY 1, 2
UNION ALL
SELECT
'crashes/agent' AS metric,
agent AS bucket,
COUNT(*)::BIGINT AS value,
NULL::BIGINT AS max_gap_min
FROM meta.crashes
GROUP BY 1, 2
UNION ALL
SELECT
'tick gaps >5m' AS metric,
source AS bucket,
COUNT(*) FILTER (WHERE gap_min > 5)::BIGINT AS value,
MAX(gap_min)::BIGINT AS max_gap_min
FROM (
SELECT
source,
date_diff('minute', LAG(ts_utc::TIMESTAMPTZ) OVER (
PARTITION BY source ORDER BY ts_utc::TIMESTAMPTZ
), ts_utc::TIMESTAMPTZ) AS gap_min
FROM meta.ticks
)
GROUP BY 1, 2
)
ORDER BY metric, bucket DESC
LIMIT 20
"""
).fetchall()
else:
summary = []
print("scanned session rows")
print(f"claude: {claude_rows}")
print(f"codex: {codex_rows}")
print()
print("metric | bucket | value | max gap min")
print("--- | --- | ---: | ---:")
for metric, bucket, value, max_gap in summary:
print(f"{metric} | {bucket} | {value} | {max_gap}")
if __name__ == "__main__":
main()