<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>Otelite Dashboard</title>
<link rel="stylesheet" href="/css/styles.css">
</head>
<body>
<div id="app">
<!-- Header -->
<header class="header">
<div class="header-content">
<h1 class="logo">Otelite Dashboard</h1>
<nav class="nav">
<button class="nav-btn active" data-view="logs">Logs</button>
<button class="nav-btn" data-view="traces">Traces</button>
<button class="nav-btn" data-view="metrics">Metrics</button>
<button class="nav-btn" data-view="usage">Usage</button>
<button class="nav-btn" data-view="setup">Setup</button>
</nav>
<div class="connection-status">
<div class="status-wrapper" id="status-wrapper" role="button" tabindex="0" aria-label="Server status">
<span class="status-indicator" id="status-indicator"></span>
<span class="status-text" id="status-text">Connected</span>
</div>
<div class="status-popover" id="status-popover"></div>
</div>
</div>
</header>
<!-- Main Content Area -->
<main class="main-content">
<!-- Logs View -->
<div id="logs-view" class="view active">
<div class="view-header">
<h2>Logs</h2>
<div class="controls">
<input type="text" id="log-search" class="search-input" placeholder="Search logs...">
<select id="severity-filter" class="filter-select">
<option value="">All Severities</option>
<option value="ERROR">Error</option>
<option value="WARN">Warning</option>
<option value="INFO">Info</option>
<option value="DEBUG">Debug</option>
<option value="TRACE">Trace</option>
</select>
<button id="export-logs" class="btn-secondary">Export</button>
</div>
</div>
<div id="logs-container" class="logs-container">
<div class="empty-state">
<p>No logs yet</p>
<p class="empty-state-hint">Logs will appear here as they are received. See the <strong>Setup</strong> tab for configuration instructions.</p>
</div>
</div>
</div>
<!-- Traces View -->
<div id="traces-view" class="view">
<div class="view-header">
<h2>Traces</h2>
<div class="controls">
<select id="trace-status-filter" class="filter-select">
<option value="">All Statuses</option>
<option value="ok">OK</option>
<option value="error">Error</option>
</select>
</div>
</div>
<div id="traces-container" class="traces-container">
<div class="empty-state">
<p>No traces available</p>
<p class="empty-state-hint">Traces will appear here as they are received</p>
</div>
</div>
</div>
<!-- Metrics View -->
<div id="metrics-view" class="view">
<div class="view-header">
<h2>Metrics</h2>
<div class="controls">
<select id="time-range" class="filter-select">
<option value="300">Last 5 minutes</option>
<option value="3600">Last 1 hour</option>
<option value="21600">Last 6 hours</option>
<option value="86400">Last 24 hours</option>
</select>
</div>
</div>
<div class="metrics-layout">
<div id="metrics-list" class="metrics-list">
<div class="empty-state">
<p>No metrics available</p>
<p class="empty-state-hint">Metrics will appear here as they are received</p>
</div>
</div>
<div id="metrics-graph" class="metrics-graph">
<div class="empty-state">
<p>Select a metric to view</p>
</div>
</div>
</div>
</div>
<!-- Usage View -->
<div id="usage-view" class="view">
<div class="view-header">
<h2>GenAI Usage</h2>
</div>
<div id="usage-container"></div>
</div>
<!-- Setup View -->
<div id="setup-view" class="view">
<div class="view-header">
<h2>Setup</h2>
</div>
<div class="setup-container">
<div class="onboarding-section">
<h3>Otelite is listening on</h3>
<ul class="endpoint-list">
<li><strong>OTLP gRPC:</strong> <code>localhost:4317</code></li>
<li><strong>OTLP HTTP:</strong> <code>http://localhost:4318</code></li>
</ul>
</div>
<div class="onboarding-section">
<h3>Configuration examples</h3>
<details class="config-example">
<summary><strong>Claude Code (Environment Variables)</strong></summary>
<pre><code># Set these environment variables before running Claude Code.
# Telemetry is off by default — CLAUDE_CODE_ENABLE_TELEMETRY=1 is required.
export CLAUDE_CODE_ENABLE_TELEMETRY=1
export OTEL_EXPORTER_OTLP_ENDPOINT=http://localhost:4318
export OTEL_EXPORTER_OTLP_PROTOCOL=http/protobuf
export OTEL_SERVICE_NAME=claude-code
export OTEL_LOGS_EXPORTER=otlp
export OTEL_METRICS_EXPORTER=otlp
# Optional: distributed traces (beta) — interaction / llm_request / tool spans
export CLAUDE_CODE_ENHANCED_TELEMETRY_BETA=1
export OTEL_TRACES_EXPORTER=otlp
# Optional: include prompt / tool input content in events (off by default)
# export OTEL_LOG_USER_PROMPTS=1
# export OTEL_LOG_TOOL_DETAILS=1</code></pre>
<p class="config-doc-link">Official docs: <a href="https://docs.claude.com/en/docs/claude-code/monitoring-usage" target="_blank" rel="noopener">docs.claude.com › Claude Code › Monitoring usage</a> — authoritative list of env vars, metric names, event names, and attributes.</p>
</details>
<details class="config-example">
<summary><strong>Claude Agent SDK</strong></summary>
<pre><code># The Agent SDK runs the Claude Code CLI as a child process,
# so the same env vars apply. Set them before launching your agent:
export CLAUDE_CODE_ENABLE_TELEMETRY=1
export OTEL_EXPORTER_OTLP_ENDPOINT=http://localhost:4318
export OTEL_EXPORTER_OTLP_PROTOCOL=http/protobuf
export OTEL_LOGS_EXPORTER=otlp
export OTEL_METRICS_EXPORTER=otlp
# Note: do NOT use OTEL_*_EXPORTER=console under the SDK
# (console output collides with the SDK's message channel).</code></pre>
<p class="config-doc-link">Official docs: <a href="https://code.claude.com/docs/en/agent-sdk/observability" target="_blank" rel="noopener">code.claude.com › Agent SDK › Observability with OpenTelemetry</a>.</p>
</details>
<details class="config-example">
<summary><strong>Python (OpenTelemetry SDK)</strong></summary>
<pre><code>from opentelemetry import trace
from opentelemetry.exporter.otlp.proto.http.trace_exporter import OTLPSpanExporter
from opentelemetry.sdk.trace import TracerProvider
from opentelemetry.sdk.trace.export import BatchSpanProcessor
exporter = OTLPSpanExporter(endpoint="http://localhost:4318/v1/traces")
provider = TracerProvider()
provider.add_span_processor(BatchSpanProcessor(exporter))
trace.set_tracer_provider(provider)
tracer = trace.get_tracer(__name__)
with tracer.start_as_current_span("my-operation"):
print("Hello from Python!")</code></pre>
<p class="config-doc-link">Official docs: <a href="https://opentelemetry.io/docs/languages/python/" target="_blank" rel="noopener">opentelemetry.io › Python</a>.</p>
</details>
<details class="config-example">
<summary><strong>Mellea (IBM generative programs library)</strong></summary>
<pre><code># Install the telemetry extra:
# pip install "mellea[telemetry]"
# Mellea uses the gRPC OTLP exporter → point at otelite's :4317.
export OTEL_SERVICE_NAME=mellea
export OTEL_EXPORTER_OTLP_ENDPOINT=http://localhost:4317
# Tracing — enable application (m.instruct / m.chat / @generative /
# sampling / requirement validation) and backend (raw LLM calls) scopes.
export MELLEA_TRACE_APPLICATION=true
export MELLEA_TRACE_BACKEND=true
# Metrics — mellea.llm.tokens.input / .output counters
export MELLEA_METRICS_ENABLED=true
export MELLEA_METRICS_OTLP=true
# Optional: also export logs via OTLP
export MELLEA_LOGS_OTLP=true
# Optional: console mirror for debugging
# export MELLEA_TRACE_CONSOLE=true
# Then run your program. Backend spans follow OTel GenAI semconv, so
# token counts, cost, finish-reasons, and chat bubbles all work on the
# Usage page and in the trace detail without extra config.</code></pre>
<p class="config-doc-link">Official docs: <a href="https://docs.mellea.ai/evaluation-and-observability/telemetry" target="_blank" rel="noopener">docs.mellea.ai › Evaluation & Observability › Telemetry</a> and <a href="https://docs.mellea.ai/evaluation-and-observability/tracing" target="_blank" rel="noopener">Tracing</a>. Source: <a href="https://github.com/generative-computing/mellea" target="_blank" rel="noopener">generative-computing/mellea</a>.</p>
</details>
<details class="config-example">
<summary><strong>Python (OpenAI SDK — OpenLLMetry / Traceloop)</strong></summary>
<pre><code># pip install opentelemetry-instrumentation-openai
from opentelemetry.instrumentation.openai import OpenAIInstrumentor
from opentelemetry.exporter.otlp.proto.http.trace_exporter import OTLPSpanExporter
from opentelemetry.sdk.trace import TracerProvider
from opentelemetry.sdk.trace.export import BatchSpanProcessor
import openai
exporter = OTLPSpanExporter(endpoint="http://localhost:4318/v1/traces")
provider = TracerProvider()
provider.add_span_processor(BatchSpanProcessor(exporter))
OpenAIInstrumentor().instrument()
client = openai.OpenAI()
response = client.chat.completions.create(
model="gpt-4",
messages=[{"role": "user", "content": "Hello!"}]
)
# Default: prompts/completions are captured on span attributes.
# Disable with: export TRACELOOP_TRACE_CONTENT=false</code></pre>
<p class="config-doc-link">Official docs: <a href="https://github.com/traceloop/openllmetry/tree/main/packages/opentelemetry-instrumentation-openai" target="_blank" rel="noopener">traceloop/openllmetry › opentelemetry-instrumentation-openai</a>.</p>
</details>
<details class="config-example">
<summary><strong>Python (Anthropic SDK — OpenLLMetry / Traceloop)</strong></summary>
<pre><code># pip install opentelemetry-instrumentation-anthropic
from opentelemetry.instrumentation.anthropic import AnthropicInstrumentor
from opentelemetry.exporter.otlp.proto.http.trace_exporter import OTLPSpanExporter
from opentelemetry.sdk.trace import TracerProvider
from opentelemetry.sdk.trace.export import BatchSpanProcessor
import anthropic
exporter = OTLPSpanExporter(endpoint="http://localhost:4318/v1/traces")
provider = TracerProvider()
provider.add_span_processor(BatchSpanProcessor(exporter))
AnthropicInstrumentor().instrument()
client = anthropic.Anthropic()
message = client.messages.create(
model="claude-sonnet-4-5",
max_tokens=1024,
messages=[{"role": "user", "content": "Hello!"}],
)</code></pre>
<p class="config-doc-link">Official docs: <a href="https://github.com/traceloop/openllmetry/tree/main/packages/opentelemetry-instrumentation-anthropic" target="_blank" rel="noopener">traceloop/openllmetry › opentelemetry-instrumentation-anthropic</a>. OpenLLMetry also covers Bedrock, Cohere, Gemini, Groq, Mistral, Ollama, VertexAI — same pattern; see the <a href="https://github.com/traceloop/openllmetry#%EF%B8%8F-what-do-we-instrument" target="_blank" rel="noopener">full list</a>.</p>
</details>
<details class="config-example">
<summary><strong>Python (OpenAI / Anthropic — OpenInference / Arize)</strong></summary>
<pre><code># pip install openinference-instrumentation-openai opentelemetry-sdk \
# opentelemetry-exporter-otlp
from openinference.instrumentation.openai import OpenAIInstrumentor
from opentelemetry.exporter.otlp.proto.http.trace_exporter import OTLPSpanExporter
from opentelemetry.sdk.trace import TracerProvider
from opentelemetry.sdk.trace.export import BatchSpanProcessor
import openai
provider = TracerProvider()
provider.add_span_processor(BatchSpanProcessor(
OTLPSpanExporter(endpoint="http://localhost:4318/v1/traces")))
OpenAIInstrumentor().instrument(tracer_provider=provider)
# Swap OpenAIInstrumentor for AnthropicInstrumentor (from
# openinference.instrumentation.anthropic) for Claude apps, or for
# LangChainInstrumentor, LlamaIndexInstrumentor, etc.</code></pre>
<p class="config-doc-link">Official docs: <a href="https://github.com/Arize-ai/openinference" target="_blank" rel="noopener">Arize-ai/openinference</a> — covers OpenAI, Anthropic, Bedrock, Mistral, LangChain, LlamaIndex, DSPy, CrewAI, LiteLLM, and more.</p>
</details>
<details class="config-example">
<summary><strong>Node.js (OpenTelemetry SDK)</strong></summary>
<pre><code>const { NodeTracerProvider } = require('@opentelemetry/sdk-trace-node');
const { OTLPTraceExporter } = require('@opentelemetry/exporter-trace-otlp-http');
const { BatchSpanProcessor } = require('@opentelemetry/sdk-trace-base');
const provider = new NodeTracerProvider();
provider.addSpanProcessor(new BatchSpanProcessor(new OTLPTraceExporter({
url: 'http://localhost:4318/v1/traces'
})));
provider.register();</code></pre>
</details>
<details class="config-example">
<summary><strong>Go (OpenTelemetry SDK)</strong></summary>
<pre><code>exporter, _ := otlptracehttp.New(context.Background(),
otlptracehttp.WithEndpoint("localhost:4318"),
otlptracehttp.WithInsecure(),
)
provider := trace.NewTracerProvider(trace.WithBatcher(exporter))
otel.SetTracerProvider(provider)</code></pre>
</details>
<details class="config-example">
<summary><strong>Test with otel-cli</strong></summary>
<pre><code># Install: https://github.com/equinix-labs/otel-cli
otel-cli exec --endpoint http://localhost:4318 --service my-test -- echo "Hello Otelite!"</code></pre>
</details>
</div>
<div class="onboarding-section">
<h3>CLI commands</h3>
<ul>
<li><code>otelite logs list</code> — view recent logs</li>
<li><code>otelite traces list</code> — view recent traces</li>
<li><code>otelite metrics list</code> — view recent metrics</li>
<li><code>otelite tui</code> — launch terminal UI</li>
</ul>
</div>
</div>
</div>
</main>
<footer class="footer">
<a href="https://github.com/planetf1/otelite" target="_blank" rel="noopener">github.com/planetf1/otelite</a>
</footer>
</div>
<!-- Loading Overlay -->
<div id="loading-overlay" class="loading-overlay hidden">
<div class="spinner"></div>
</div>
<!-- Scripts -->
<script type="module" src="/js/api.js"></script>
<script type="module" src="/js/logs.js"></script>
<script type="module" src="/js/traces.js"></script>
<script type="module" src="/js/metrics.js"></script>
<script type="module" src="/js/usage.js"></script>
<script type="module" src="/js/app.js"></script>
</body>
</html>