This package enhances the OpenAI-NODE SDK, enabling it to seamlessly integrate with ScaleGenAI's Llama-3 function calling models. It allows developers to easily incorporate advanced AI functionalities into their applications.
To use this package, you need to import CustomOpenAIClient
from scalegen-function-calling-node
and OpenAI
from the openai
package. Here's a quick example to get you started:
import { CustomOpenAIClient } from "scalegen-function-calling-node";
import type {
ChatCompletionTool,
ChatCompletionMessageParam,
} from "openai/resources/chat";
import OpenAI from "openai";
const tools: ChatCompletionTool[] = [
{
type: "function",
function: {
name: "Expense",
description: "Track an expense",
parameters: {
type: "object",
properties: {
description: { type: "string" },
net_amount: { type: "number" },
gross_amount: { type: "number" },
tax_rate: { type: "number" },
date: { type: "string", format: "date-time" },
},
required: [
"description",
"net_amount",
"gross_amount",
"tax_rate",
"date",
],
},
},
},
{
type: "function",
function: {
name: "ReportTool",
description: "Generate a report",
parameters: {
type: "object",
properties: {
report: { type: "string" },
},
required: ["report"],
},
},
},
];
const modelName = "ScaleGenAI/Llama3-70B-Function-Calling";
const apiKey = "<YOUR_API_KEY>";
const apiEndpoint = "<YOUR_API_ENDPOINT>";
const messages: ChatCompletionMessageParam[] = [
{
role: "user",
content:
"I have spent 5$ on a coffee today please track my expense. The tax rate is 0.2. plz add to expense",
},
];
async function main() {
const client = new OpenAI({
apiKey: apiKey,
baseURL: apiEndpoint,
});
const customClient = new CustomOpenAIClient(client);
try {
const response = await customClient.chat.completions.create({
model: modelName,
messages: messages,
tools: tools,
stream: false,
});
console.log(JSON.stringify(response, null, 2));
} catch (error) {
console.error("Error:", error);
}
}
main();