Iudex client
Iudex is an infrastructure that enables complex and accurate function calling apis. Our infrastructure provides a
natural language interface that can accomplish complex or answer complex queries given the control of your own functions.
Check out iudex.ai to sign up.
Client
To access Iudex, we highly recommend using this JavaScript client.
Installation
npm install iudex
yarn add iudex
pnpm add iudex
Usage
Example where using Iudex replaces OpenAI
Here, Iudex replaces instances of the OpenAI client where function calling is used.
fnMap
just needs to be defined to link all functions you want the function calling
api to be able to call. For Iudex all parameters except messages
is ignored.
Functions only need to be uploaded once.
import dotenv from 'dotenv';
dotenv.config();
import OpenAI from 'openai';
import { Iudex } from 'iudex';
import _ from 'lodash';
const iudex = new Iudex({ apiKey: process.env.IUDEX_API_KEY });
await iudex.uploadFunctions([
{
name: 'getCurrentWeather',
description: 'Gets the current weather',
parameters: {
type: 'object',
properties: {
location: {
type: 'string',
description: 'The city and state, e.g. San Francisco, CA',
},
unit: { type: 'string', enum: ['celsius', 'fahrenheit'] },
},
required: ['location'],
},
},
]);
function getCurrentWeather({ location, unit }: { location: string; unit: string }) {
if (location.toLowerCase().includes('tokyo')) {
return { location: 'Tokyo', temperature: '10', unit: 'celsius' };
} else if (location.toLowerCase().includes('san francisco')) {
return { location: 'San Francisco', temperature: '72', unit: 'fahrenheit' };
} else if (location.toLowerCase().includes('paris')) {
return { location: 'Paris', temperature: '22', unit: 'fahrenheit' };
} else {
return { location, temperature: 'unknown' };
}
}
const fnMap: Record<string, (...args: any[]) => any> = {
getCurrentWeather,
};
const messages = createMessages();
messages.push({ role: 'user', content: `what is the weather in San Francisco?` });
messages.push(await iudex.chat.completions.create({
model: 'gpt-4-turbo-preview',
messages: messages.value,
tools: [],
tool_choice: 'auto',
}).then(res => res.choices[0].message));
let toolMessage = _.last(messages.value);
while (toolMessage && messageHasToolCall(toolMessage)) {
const { function: fnCall, id: tool_call_id } = toolMessage.tool_calls[0];
const { name: fnName, arguments: fnArgs } = fnCall;
const fnReturn = await fnMap[fnName](JSON.parse(fnArgs));
messages.push({
role: 'tool',
tool_call_id,
content: JSON.stringify(fnReturn),
});
messages.push(await iudex.chat.completions.create({
model: 'gpt-4-turbo-preview',
messages: messages.value,
}).then(res => res.choices[0].message));
toolMessage = _.last(messages.value);
}
console.log('FINISHED', toolMessage);
class Messages {
messagesHist: OpenAI.ChatCompletionMessageParam[] = [];
push = (...items: OpenAI.ChatCompletionMessageParam[]) => {
console.log('new message:', items);
this.messagesHist.push(...items);
};
get value() {
return this.messagesHist;
}
}
function createMessages() {
return new Messages();
}
type OpenAIToolCallMessage = OpenAI.ChatCompletionAssistantMessageParam
& { tool_calls: OpenAI.ChatCompletionMessageToolCall[] };
function messageHasToolCall(
message: OpenAI.ChatCompletionMessageParam,
): message is OpenAIToolCallMessage {
return !!(message as OpenAI.ChatCompletionAssistantMessageParam).tool_calls;
}