The Untrace SDK provides zero-latency LLM observability with automatic instrumentation for all major LLM providers. Built on OpenTelemetry standards, it captures comprehensive trace data and routes it to your chosen observability platforms.
// Before SDK initializationimport OpenAI from 'openai';import Anthropic from '@anthropic-ai/sdk';// Initialize SDK - all imports are automatically instrumentedimport { init } from '@untrace/sdk';init({ apiKey: 'your-api-key' });// Use providers normally - traces are captured automaticallyconst openai = new OpenAI();const anthropic = new Anthropic();
import { init } from '@untrace/sdk';import { ChatOpenAI } from '@langchain/openai';import { ConversationChain } from 'langchain/chains';init({ apiKey: 'your-api-key' });// LangChain is automatically instrumentedconst model = new ChatOpenAI({ modelName: 'gpt-4', temperature: 0,});const chain = new ConversationChain({ llm: model });// Traces capture the entire chain executionconst response = await chain.invoke({ input: 'What is the meaning of life?',});
// Initialize as early as possibleimport { init } from '@untrace/sdk';init({ apiKey: process.env.UNTRACE_API_KEY! });// Then import LLM librariesimport OpenAI from 'openai';