Observability

[observability.llm]
prompt_log = true
response_log = true
token_cost = true


[observability.logging]
level = "info"
format = "json"

[observability.mertrics]
enabled = true
host = "localhost"
port = 9090
path = "/metrics"

[observability.tracing]

LLM

Logging

Metrics

Tracing