This is the langchain wrapper built for Maxim JS SDK.
npm install @maximai/maxim-js-langchain
const maxim = new Maxim({apiKey: "maxim-api-key"});
const logger = await maxim.logger({ id: "log-repository-id" });
logger.trace({ id: "trace-id" });
// Maxim tracer
const maximTracer = new MaximLangchainTracer({
onGenerationStart: (generationInfo) => {
logger.traceGeneration("trace-id", generationInfo);
},
onGenerationEnd: (generationId, result) => {
logger.generationResult(generationId, result);
},
onGenerationError: (generationId, error) => {
logger.generationError(generationId, error);
},
});
const llm = new ChatOpenAI({
openAIApiKey: openAIKey,
modelName: "gpt-4o",
temperature: 0,
callbacks: [maximTracer],
});
const query = "What's the sum of 3 and 2?";
// Optional step to set input of the trace
trace.input = query
const result = await llm.invoke(query);
// Optional step to set output of the trace
trace.output = result
// Ending the trace
logger.traceEnd("trace-id")
- Improvement: Changed pining for core Maxim SDK
- Fix: Token usage were not getting captured for streaming responses
- Fix: Adds empty tags as {}
- Early preview