telemetry-server 0.1.0

Simple receiver of telemetry over HTTP POST/WS to sqlite3, Postgres, DuckDB or JSON files
submit-sample:
    curl http://localhost:4318 -v --data-binary @sample.json

json-files-dir := "json_files"

sqlite-to-json-files:
    mkdir -p {{ json-files-dir }}
    sqlite3 telemetry.db "select json_object('stream_id', stream_id, 'start_datetime', start_datetime, 'headers', headers) from streams" | zstd > {{ json-files-dir / "streams.sqlite.json.zst" }}
    sqlite3 telemetry.db < sql/sqlite-events-to-json-files | zstd > {{ json-files-dir / "events.sqlite.json.zst" }}

duckdb-tables-from-json-files:
    -rm duckdb-json-files-tables
    duckdb duckdb-json-files-tables < sql/duckdb-json-files-tables.sql
    duckdb duckdb-json-files-tables < sql/json-files-to-duckdb-tables.sql

merge-json-files: (merge-json-files-table "events") (merge-json-files-table "streams")

merge-json-files-table table:
    #!/usr/bin/env bash
    set -eu
    declare -a old=(json_files/{{table}}.file.*.json.zst)
    new="$(mktemp json_files/{{table}}.merged.json.zst.XXXXXX)"
    lines=$(zstd -dcf "${old[@]}" | tee >(zstd > $new) | wc -l)
    echo merged $lines lines from ${#old[@]} files into $new
    du -hs "${old[@]}"
    du -hs "$new"
    # trash -v "${old[@]}"

count-file-lines:
    echo -n events && just count-file-lines-table events
    echo -n streams && just count-file-lines-table streams

count-file-lines-table table:
    zstd -dc json_files/{{table}}.file.*.json.zst | wc -l