A package that provides simplified base components to make building and maintaining LLM-powered applications easier.
- Write functions powered by LLM's with easy to use building blocks.
- Pure Javascript and Typescript. Allows you to pass and infer types.
- Support for text-based (llama-3) and chat-based prompts (gpt-4o, claude-3.5).
- Supercharge your prompts by using handlebars within prompt template.
- Allow LLM's to call functions (or call other LLM executors).
- Not very opinionated. You have control on how you use it.
See full docs here
Install llm-exe using npm.
npm i llm-exe
import * as llmExe from "llm-exe";
// or
import { /* specific modules */ } from from "llm-exe"
Below is simple example:
import * as llmExe from "llm-exe";
/**
* Define a yes/no llm-powered function
*/
export async function YesOrNoBot<I extends string>(input: I) {
const llm = llmExe.useLlm("openai.gpt-4o-mini");
const instruction = `You are not an assistant, I need you to reply with only
'yes' or 'no' as an answer to the question below. Do not explain yourself
or ask questions. Answer with only yes or no.`;
const prompt = llmExe
.createChatPrompt(instruction)
.addUserMessage(input)
.addSystemMessage(`yes or no:`);
const parser = llmExe.createParser("stringExtract", { enum: ["yes", "no"] });
return llmExe.createLlmExecutor({ llm, prompt, parser }).execute({ input });
}
const isTheSkyBlue = await YesOrNoBot(`Is AI cool?`)
/**
*
* The prompt sent to the LLM would be:
* (line breaks added for readability)
*
* [{
* role: 'system',
* content: 'You are not an assistant, I need you to reply with only
'yes' or 'no' as an answer to the question asked by the user. Do not explain yourself
or ask questions. Answer only with yes or no.'
* },
* {
* role: 'user',
* content: 'Is AI cool?'
* }]
*
*/
/**
*
* console.log(isTheSkyBlue)
* yes
* /