Maintained by Optible
This is Langfuse's unofficial Go client, designed to enable you to use Langfuse's services easily from your own applications.
Langfuse provides traces, evals, prompt management and metrics to debug and improve your LLM application.
- Full Ingestion API Support: Traces, Generations, Spans, Events, and Scores
- Prompt Management: Fetch and cache prompts with version control and labels
- Smart Caching: Built-in caching with configurable TTL for prompts
- Fallback Support: Graceful degradation with fallback prompts
- Batch Processing: Efficient batching of ingestion events
- Type Safety: Strongly typed models for all API interactions
- Context Support: Full Go context support for cancellation and timeouts
| Feature | Status | Description |
|---|---|---|
| Trace | π’ | Create and manage execution traces |
| Generation | π’ | Track LLM generations with metadata |
| Span | π’ | Measure execution spans within traces |
| Event | π’ | Log custom events in traces |
| Score | π’ | Add evaluations and scores to traces/sessions |
| DeleteScore | π’ | Delete scores by ID |
| GetPrompt | π’ | Fetch prompts with caching, versioning, and labels |
You can load langfuse-go into your project by using:
go get github.com/optible/langfuse-go
Just like the official Python SDK, these three environment variables will be used to configure the Langfuse client:
LANGFUSE_HOST: The host of the Langfuse service.LANGFUSE_PUBLIC_KEY: Your public key for the Langfuse service.LANGFUSE_SECRET_KEY: Your secret key for the Langfuse service.
Please refer to the examples folder to see how to use the SDK.
Here's a simple example showing how to create traces, spans, generations, events, and scores:
package main
import (
"context"
"github.com/optible/langfuse-go"
"github.com/optible/langfuse-go/model"
)
func main() {
ctx := context.Background()
l := langfuse.New(ctx)
// Create a trace
trace, err := l.Trace(&model.Trace{
Name: "my-llm-app",
SessionID: "user-session-123",
})
if err != nil {
panic(err)
}
// Create a span within the trace
span, err := l.Span(&model.Span{
Name: "data-processing",
TraceID: trace.ID,
}, nil)
if err != nil {
panic(err)
}
// Track an LLM generation
generation, err := l.Generation(
&model.Generation{
TraceID: trace.ID,
Name: "chat-completion",
Model: "gpt-3.5-turbo",
ModelParameters: model.M{
"maxTokens": "1000",
"temperature": "0.9",
},
Input: []model.M{
{
"role": "system",
"content": "You are a helpful assistant.",
},
{
"role": "user",
"content": "Please generate a summary of the following documents...",
},
},
Metadata: model.M{
"environment": "production",
},
},
&span.ID,
)
if err != nil {
panic(err)
}
// Log an event
_, err = l.Event(
&model.Event{
Name: "user-feedback",
TraceID: trace.ID,
Input: model.M{
"feedback": "positive",
},
},
&generation.ID,
)
if err != nil {
panic(err)
}
// Update generation with output
generation.Output = model.M{
"completion": "Here is the summary...",
}
_, err = l.GenerationEnd(generation)
if err != nil {
panic(err)
}
// Add a score
score, err := l.Score(
&model.Score{
TraceID: trace.ID,
Name: "quality-score",
Value: 0.95,
Comment: "High quality response",
},
)
if err != nil {
panic(err)
}
// Delete a score (optional)
// err = l.DeleteScore(ctx, score.ID)
// if err != nil {
// panic(err)
// }
// End the span
_, err = l.SpanEnd(span)
if err != nil {
panic(err)
}
// Flush all pending events
l.Flush(ctx)
}The SDK supports deleting scores once they've been created. This is useful for removing incorrect or outdated scores:
package main
import (
"context"
"github.com/optible/langfuse-go"
"github.com/optible/langfuse-go/model"
)
func main() {
ctx := context.Background()
l := langfuse.New(ctx)
// Create a trace
trace, err := l.Trace(&model.Trace{
Name: "my-llm-app",
})
if err != nil {
panic(err)
}
// Add a score and capture its ID
score, err := l.Score(&model.Score{
TraceID: trace.ID,
Name: "quality-score",
Value: 0.95,
Comment: "Initial quality assessment",
})
if err != nil {
panic(err)
}
// Delete the score if needed (e.g., if it was incorrect)
err = l.DeleteScore(ctx, score.ID)
if err != nil {
panic(err)
}
// Flush all pending events
l.Flush(ctx)
}Note: Score deletion is asynchronous on the Langfuse backend. The score may remain visible for a short time after deletion.
The SDK includes powerful prompt management capabilities with caching, versioning, and fallback support:
package main
import (
"context"
"fmt"
"time"
"github.com/optible/langfuse-go"
"github.com/optible/langfuse-go/model"
)
func main() {
ctx := context.Background()
// Create client with custom cache TTL
l := langfuse.New(ctx).WithPromptCacheTTL(10 * time.Minute)
// Fetch a prompt (defaults to "production" label)
prompt, err := l.GetPrompt(ctx, "movie-critic", nil)
if err != nil {
panic(err)
}
fmt.Printf("Fetched prompt: %s (version %d)\n", prompt.GetName(), prompt.GetVersion())
// Use text prompts
if prompt.IsText() {
compiled := prompt.TextPrompt.Compile(map[string]string{
"movie": "The Matrix",
"style": "technical",
})
fmt.Printf("Compiled prompt: %s\n", compiled)
}
// Use chat prompts
if prompt.IsChat() {
messages := prompt.ChatPrompt.Compile(map[string]string{
"movie": "The Matrix",
"style": "technical",
})
for i, msg := range messages {
fmt.Printf("Message %d [%s]: %s\n", i+1, msg.Role, msg.Content)
}
}
// Fetch a specific version
version := 2
promptV2, err := l.GetPrompt(ctx, "movie-critic", &langfuse.GetPromptOptions{
Version: &version,
})
if err != nil {
panic(err)
}
// Fetch by label (e.g., "staging")
label := "staging"
promptStaging, err := l.GetPrompt(ctx, "movie-critic", &langfuse.GetPromptOptions{
Label: &label,
})
if err != nil {
panic(err)
}
// Use fallback prompt for high availability
promptWithFallback, err := l.GetPrompt(ctx, "movie-critic", &langfuse.GetPromptOptions{
FallbackPrompt: &model.Prompt{
TextPrompt: &model.TextPrompt{
Name: "movie-critic",
Version: 0,
Prompt: "Please review {{movie}} in a {{style}} style.",
Type: model.PromptTypeText,
},
},
})
if err != nil {
panic(err)
}
// Force refresh (bypass cache)
promptFresh, err := l.GetPrompt(ctx, "movie-critic", &langfuse.GetPromptOptions{
ForceRefresh: true,
})
if err != nil {
panic(err)
}
// Clear cache when needed
l.ClearPromptCache()
}