🚀 Big News: Socket Acquires Coana to Bring Reachability Analysis to Every Appsec Team.Learn more
Socket
DemoInstallSign in
Socket

feathers-openai

Package Overview
Dependencies
Maintainers
1
Versions
11
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

feathers-openai - npm Package Compare versions

Comparing version

to
0.0.3

.release-it.js

8

lib/openai/openai.completion.class.js

@@ -7,4 +7,5 @@ "use strict";

exports.completionOptions = exports.CompletionService = void 0;
var _errors = require("@feathersjs/errors");
var _openaiSetup = require("./openai.setup.js");
var _errorHandler = _interopRequireDefault(require("../errorHandler.js"));
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
class CompletionService {

@@ -17,6 +18,3 @@ constructor(options) {

...data
}).then(r => r.data).catch(e => {
console.log(e);
throw new _errors.BadRequest();
});
}).then(r => r.data).catch(_errorHandler.default);
}

@@ -23,0 +21,0 @@ async setup(app, path) {

@@ -7,4 +7,5 @@ "use strict";

exports.editOptions = exports.EditService = void 0;
var _errors = require("@feathersjs/errors");
var _errorHandler = _interopRequireDefault(require("../errorHandler.js"));
var _openaiSetup = require("./openai.setup.js");
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
class EditService {

@@ -17,6 +18,3 @@ constructor(options) {

...data
}).then(r => r.data).catch(e => {
console.log(e);
throw new _errors.BadRequest();
});
}).then(r => r.data).catch(_errorHandler.default);
}

@@ -23,0 +21,0 @@ async setup(app, path) {

@@ -7,4 +7,5 @@ "use strict";

exports.embeddingOptions = exports.embeddingMethods = exports.EmbeddingService = void 0;
var _errors = require("@feathersjs/errors");
var _openaiSetup = require("./openai.setup.js");
var _errorHandler = _interopRequireDefault(require("../errorHandler.js"));
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
class EmbeddingService {

@@ -17,6 +18,3 @@ constructor(options) {

...data
}).then(r => r.data).catch(e => {
console.log(e);
throw new _errors.BadRequest();
});
}).then(r => r.data).catch(_errorHandler.default);
}

@@ -23,0 +21,0 @@ async setup(app, path) {

@@ -7,4 +7,6 @@ "use strict";

exports.fileOptions = exports.FileService = exports.FileContentService = void 0;
var _errorHandler = _interopRequireDefault(require("../errorHandler.js"));
var _openaiSetup = require("./openai.setup.js");
var _errors = require("@feathersjs/errors");
var _openaiSetup = require("./openai.setup.js");
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
class FileService {

@@ -24,24 +26,19 @@ constructor(options) {

fileBuffer.name = 'file.jsonl';
return this.client.createFile(fileBuffer, data.purpose).then(r => r.data).catch(e => {
console.error(e);
throw new _errors.BadRequest(e.message);
});
return this.client.createFile(fileBuffer, data.purpose).then(r => r.data).catch(_errorHandler.default);
}
async find(params) {
return this.client.listFiles().then(r => r.data).catch(e => {
console.log(e);
throw new _errors.BadRequest();
});
return this.client.listFiles().then(r => {
return {
total: r.data?.data.length || 0,
limit: null,
skip: null,
data: r.data
};
}).catch(_errorHandler.default);
}
async get(id, params) {
return this.client.retrieveFile(id).then(r => r.data).catch(e => {
console.log(e);
throw new _errors.BadRequest();
});
return this.client.retrieveFile(id).then(r => r.data).catch(_errorHandler.default);
}
async remove(id, params) {
return this.client.deleteFile(id).then(r => r.data).catch(e => {
console.log(e);
throw new _errors.BadRequest();
});
return this.client.deleteFile(id).then(r => r.data).catch(_errorHandler.default);
}

@@ -59,6 +56,3 @@ async setup(app, path) {

if (!(params.route && params.route.id)) throw new _errors.BadRequest();
return this.client.downloadFile(params.route.id).then(r => r.data).catch(e => {
console.log(e);
throw new _errors.BadRequest(e);
});
return this.client.downloadFile(params.route.id).then(r => r.data).catch(_errorHandler.default);
}

@@ -65,0 +59,0 @@ }

@@ -7,4 +7,6 @@ "use strict";

exports.fineTuneOptions = exports.fineTuneMethods = exports.FineTuneService = exports.FineTuneEventsService = exports.FineTuneCancelService = void 0;
var _errorHandler = _interopRequireDefault(require("../errorHandler.js"));
var _openaiSetup = require("./openai.setup.js");
var _errors = require("@feathersjs/errors");
var _openaiSetup = require("./openai.setup.js");
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
class FineTuneService {

@@ -17,24 +19,22 @@ constructor(options) {

...data
}).then(r => r.data).catch(e => {
console.log(e);
throw new _errors.BadRequest();
});
}).then(r => r.data).catch(_errorHandler.default);
}
async find(params) {
return this.client.listFineTunes().then(r => r.data).catch(e => {
console.log(e);
throw new _errors.BadRequest();
});
const limit = parseInt(params?.query?.$limit) || 1000;
const skip = parseInt(params?.query?.$skip) || 0;
return this.client.listFineTunes().then(r => {
const data = r.data?.data.slice(skip).slice(0, limit);
return {
total: r.data?.data.length || 0,
limit,
skip,
data
};
}).catch(_errorHandler.default);
}
async get(id, params) {
return this.client.retrieveFineTune(id).then(r => r.data).catch(e => {
console.log(e);
throw new _errors.BadRequest();
});
return this.client.retrieveFineTune(id).then(r => r.data).catch(_errorHandler.default);
}
async remove(id, params) {
return this.client.deleteModel(id).then(r => r.data).catch(e => {
console.log(e);
throw new _errors.BadRequest();
});
return this.client.deleteModel(id).then(r => r.data).catch(_errorHandler.default);
}

@@ -52,6 +52,3 @@ async setup(app, path) {

if (!(params.route && params.route.id)) throw new _errors.BadRequest();
return this.client.cancelFineTune(params.route.id).then(r => r.data).catch(e => {
console.log(e);
throw new _errors.BadRequest(e);
});
return this.client.cancelFineTune(params.route.id).then(r => r.data).catch(_errorHandler.default);
}

@@ -69,6 +66,3 @@ async setup(app, path) {

if (!(params.route && params.route.id)) throw new _errors.BadRequest();
return this.client.listFineTuneEvents(params.route.id).then(r => r.data).catch(e => {
console.log(e);
throw new _errors.BadRequest(e);
});
return this.client.listFineTuneEvents(params.route.id).then(r => r.data).catch(_errorHandler.default);
}

@@ -75,0 +69,0 @@ async setup(app, path) {

@@ -7,4 +7,6 @@ "use strict";

exports.imageVarOptions = exports.imageGenOptions = exports.imageEditOptions = exports.ImageVarService = exports.ImageGenService = exports.ImageEditService = void 0;
var _errorHandler = _interopRequireDefault(require("../errorHandler.js"));
var _openaiSetup = require("./openai.setup.js");
var _errors = require("@feathersjs/errors");
var _openaiSetup = require("./openai.setup.js");
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
class ImageGenService {

@@ -17,6 +19,3 @@ constructor(options) {

...data
}).then(r => r.data).catch(e => {
console.log(e);
throw new _errors.BadRequest();
});
}).then(r => r.data).catch(_errorHandler.default);
}

@@ -41,6 +40,3 @@ async setup(app, path) {

imageBuffer.name = 'image.png';
return this.client.createImageVariation(imageBuffer, params.n || 1, params.size || '256x256').then(r => r.data).catch(e => {
console.error(e);
throw new _errors.BadRequest(e.message);
});
return this.client.createImageVariation(imageBuffer, params.n || 1, params.size || '256x256').then(r => r.data).catch(_errorHandler.default);
}

@@ -71,11 +67,3 @@ async setup(app, path) {

// image, mask, prompt, n, size, responseFormat, user, options = {}
return this.client.createImageEdit(imageBuffer, maskBuffer, data.prompt, data.n || 1, data.size || '256x256', data.response_format || 'url', data.user || undefined).then(r => r.data).catch(error => {
if (error.response) {
console.log(error.response.status);
console.log(error.response.data);
} else {
console.log(error.message);
}
throw new _errors.BadRequest(error.message);
});
return this.client.createImageEdit(imageBuffer, maskBuffer, data.prompt, data.n || 1, data.size || '256x256', data.response_format || 'url', data.user || undefined).then(r => r.data).catch(_errorHandler.default);
}

@@ -82,0 +70,0 @@ async setup(app, path) {

@@ -6,26 +6,29 @@ "use strict";

});
exports.modelMethods = exports.getOptions = exports.ModelService = void 0;
var _errors = require("@feathersjs/errors");
exports.getOptions = exports.ModelService = void 0;
var _errorHandler = _interopRequireDefault(require("../errorHandler.js"));
var _openaiSetup = require("./openai.setup.js");
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
class ModelService {
constructor(options) {
this.options = options;
this.id = 'id';
}
async find(params) {
return this.client.listModels().then(r => r.data).catch(e => {
console.log(e);
throw new _errors.BadRequest();
});
const limit = parseInt(params?.query?.$limit) || 1000;
const skip = parseInt(params?.query?.$skip) || 0;
return this.client.listModels().then(r => {
const data = r.data?.data.slice(skip).slice(0, limit);
return {
total: r.data?.data.length || 0,
limit,
skip,
data
};
}).catch(_errorHandler.default);
}
async get(id, params) {
return this.client.retrieveModel(id).then(r => r.data).catch(e => {
console.log(e);
throw new _errors.BadRequest();
});
return this.client.retrieveModel(id).then(r => r.data).catch(_errorHandler.default);
}
async remove(id, params) {
return this.client.deleteModel(id).then(r => r.data).catch(e => {
console.log(e);
throw new _errors.BadRequest();
});
return this.client.deleteModel(id).then(r => r.data).catch(_errorHandler.default);
}

@@ -37,4 +40,2 @@ async setup(app, path) {

exports.ModelService = ModelService;
const modelMethods = ['find', 'get', 'remove'];
exports.modelMethods = modelMethods;
const getOptions = app => {

@@ -41,0 +42,0 @@ return {

@@ -7,4 +7,5 @@ "use strict";

exports.moderationOptions = exports.ModerationService = void 0;
var _errors = require("@feathersjs/errors");
var _errorHandler = _interopRequireDefault(require("../errorHandler.js"));
var _openaiSetup = require("./openai.setup.js");
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
class ModerationService {

@@ -17,6 +18,3 @@ constructor(options) {

...data
}).then(r => r.data).catch(e => {
console.log(e);
throw new _errors.BadRequest();
});
}).then(r => r.data).catch(_errorHandler.default);
}

@@ -23,0 +21,0 @@ async setup(app, path) {

{
"name": "feathers-openai",
"version": "0.0.1",
"version": "0.0.3",
"description": "An open.ai adapter for FeathersJS",
"main": "lib/",
"main": "src/",
"types": "lib/",

@@ -11,4 +11,7 @@ "directories": {

},
"peerDependencies": {
"@feathersjs/errors": "^5.0.0 || ^4.5.12",
"dependencies": {
"@feathersjs/errors": "^5.0.0",
"@feathersjs/feathers": "^5.0.0",
"@feathersjs/schema": "^5.0.0",
"@feathersjs/typebox": "^5.0.0",
"openai": "^3.1.0"

@@ -19,24 +22,16 @@ },

"@babel/core": "^7.21.0",
"@babel/plugin-transform-modules-commonjs": "^7.21.2",
"@babel/preset-env": "^7.20.2",
"@babel/register": "^7.21.0",
"@feathersjs/errors": "^5.0.0",
"@feathersjs/adapter-tests": "^5.0.0",
"@feathersjs/koa": "^5.0.0",
"@semantic-release/commit-analyzer": "^9.0.2",
"@semantic-release/git": "^10.0.1",
"@semantic-release/npm": "^9.0.1",
"app-root-path": "^3.1.0",
"@koa/multer": "^3.0.2",
"@release-it/conventional-changelog": "^5.1.1",
"cors": "^2.8.5",
"eslint": "^8.34.0",
"f4": "npm:@feathersjs/feathers@^4.0.0",
"f4_exp": "npm:@feathersjs/express@^4.0.0",
"f5": "npm:@feathersjs/feathers@^5.0.0",
"f5_exp": "npm:@feathersjs/express@^5.0.0",
"koa-multer": "^1.0.2",
"mocha": "^10.2.0",
"multer": "^1.4.5-lts.1",
"nock": "^13.3.0",
"nyc": "^15.1.0",
"semantic-release": "^20.1.0",
"semistandard": "^16.0.1",
"supertest": "^6.3.3"
"release-it": "^15.7.0",
"semistandard": "^16.0.1"
},

@@ -49,7 +44,7 @@ "engines": {

"mocha": "mocha --require @babel/register --recursive test/**/*.test.js",
"testd": "docker-compose up -d && npm run test && docker-compose down",
"test": "printf '\\33c\\e[3J' && npm run lint && npm run coverage",
"build": "babel src -d lib",
"test:dev": "printf '\\33c\\e[3J' && npm run lint && npm run mocha -- --watch --esm --parallel --watch-files ./test/**/*,./lib/**",
"coverage": "nyc npm run mocha"
"coverage": "nyc npm run mocha",
"release": "release-it --dry-run --ci --config .release-it.js"
},

@@ -68,7 +63,7 @@ "semistandard": {

"type": "git",
"url": "git+https://github.com/jamesvillarrubia/feathers-rpc.git"
"url": "git+https://github.com/jamesvillarrubia/feathers-openai.git"
},
"keywords": [
"feathers",
"rpc",
"openai",
"nlp",

@@ -81,67 +76,8 @@ "feathers-plugin"

},
"contributors": [
{
"name": "James Villarrubia",
"email": "james.villarrubia@gmail.com"
},
{
"name": "Ben Zelinski",
"email": "bziggz@gmail.com"
}
],
"contributors": [],
"license": "MIT",
"bugs": {
"url": "https://github.com/jamesvillarrubia/feathers-rpc/issues"
"url": "https://github.com/jamesvillarrubia/feathers-openai/issues"
},
"homepage": "https://github.com/jamesvillarrubia/feathers-rpc#readme",
"release": {
"branches": [
{
"name": "main"
},
{
"name": "alpha",
"channel": "alpha",
"prerelease": true
},
{
"name": "beta",
"channel": "beta",
"prerelease": true
}
],
"plugins": [
"@semantic-release/git",
[
"@semantic-release/npm",
{
"npmPublish": true
}
],
[
"@semantic-release/commit-analyzer",
{
"preset": "angular",
"releaseRules": [
{
"type": "refactor",
"release": "patch"
},
{
"type": "style",
"release": "patch"
},
{
"type": "perf",
"release": "patch"
},
{
"type": "chore",
"release": "patch"
}
]
}
]
]
}
"homepage": "https://github.com/jamesvillarrubia/feathers-openai#readme"
}

@@ -1,147 +0,140 @@

# feathers-openai
[![NPM](https://img.shields.io/npm/l/feathers-openai)](https://github.com/jamesvillarrubia/feathers-openai/blob/main/LICENSE) [![npm](https://img.shields.io/npm/v/feathers-openai?label=latest)](https://www.npmjs.com/package/feathers-openai)
![GitHub Workflow Status](https://img.shields.io/github/workflow/status/jamesvillarrubia/feathers-openai/Node%20Lint%20&%20Test?label=build%20%26%20lint)
![Libraries.io dependency status for latest release](https://img.shields.io/librariesio/release/npm/feathers-openai)
<!-- [![Download Status](https://img.shields.io/npm/dm/feathers-openai.svg)](https://www.npmjs.com/package/feathers-openai) -->
This library is a FeathersJS database adapter for openai - a high-scale, LMDB & NodeJS database. It uses a combination of the raw openai RESTful endpoints and [KnexJS](http://knexjs.org/)-translated queries through openai's subset of supported SQL commands. It also uses [Harperive](https://www.npmjs.com/package/harperive) for authentication, promise management, and connectivity. Harperive is exposed internally for developers wishing to build more complex queries in a openai service.
![Download Status](https://img.shields.io/npm/dm/feathers-openai.svg)
```bash
npm install --save feathers-openai
```
# feathers-openai
> __Important:__ `feathers-openai` implements the [Feathers Common database adapter API](https://docs.feathersjs.com/api/databases/common.html) and [querying syntax](https://docs.feathersjs.com/api/databases/querying.html).
>
> NOTE: This library is new
### Introduction
This library enables a series of FeathersJS services that map to the entities and routes of the [OpenAI API](https://platform.openai.com/docs/api-reference/introduction).
By wrapping the API in a set of FeathersJS services, developers can quickly integrate any subset of the OpenAI API into their FeathersJS application. This enables the developer to log, manipulate, wrap hooks around, and make direct or indirect calls to the API.
MULTER IS REQUIRED
### Features
* All OpenAI routes get their own service (e.g. `openai/embeddings`)
* Nested routes are included (e.g.`openai/fine-tunes/:id/cancel`)
* Allows prefixing to prevent route overwriting (`/my-openai-prefix/models`)
* All OpenAI Services can be extended with normal hooks
* Allows for uploads via `multer`
* Light validation via TypeBox on all `create` calls.
* Can disable individual services via configuration
```javascript
app.use(koa_multer().any())
app.use(openaiMulter);
```
### Todo:
[x] - Integrate all routes with FeathersJS
[x] - Incorporate uploads via buffer streams
[x] - Document multer example with KoaJS
[ ] - Document multer example with ExpressJS
[x] - Testing for uploads
[x] - Testing for configuration
[x] - Testing for sample services
[ ] - Expand testing to cover all services
> NOTE: PRs are welcome!
### Installation
To install the services, add the library as a dependency.
```bash
npm install --save feathers-openai multer @koa/multer
```
In your `app.js` file, add the following middleware calls:
```javascript
import koamulter from '@koa/multer';
import { openaiMulter, openai } from 'feathers-openai';
app.use(koamulter().any());
app.use(openaiMulter);
```
The `koamulter` captures multi-part form uploads to your endpoints. The `openaiMulter` appends the file(s) to the `ctx.feathers` object, making the buffers accessible within the hook and service `params`. The `multer` library or equivalent is required by `koamulter`.
In your `src/services/index.js` add the following:
## API
```javascript
import { openai } from 'feathers-openai'
### `service(options)`
```js
const service = require('feathers-openai');
app.use('/messages', service({
//...options
}););
export const services = (app) => {
app.configure(openai('openai'))
// ... other services
}
```
### Configuration
__API Key in [env].json__
__Options:__
- `name` (**required**) - The name of the table
- `config` (**required**) - Usually set in `config/{ENV}.json`. See "Connection Options" below
- `client` (*optional*) - The Harperive Client, can be manually overriden and accessed
- `id` (*optional*, default: `id`) - The name of the id field property.
- `events` (*optional*) - A list of [custom service events](https://docs.feathersjs.com/api/events.html#custom-events) sent by this service
- `paginate` (*optional*) - A [pagination object](https://docs.feathersjs.com/api/databases/common.html#pagination) containing a `default` and `max` page size
- `multi` (*optional*) - Allow `create` with arrays and `update` and `remove` with `id` `null` to change multiple items. Can be `true` for all methods or an array of allowed methods (e.g. `[ 'remove', 'create' ]`)
- `whitelist` (*optional*) - A list of additional query parameters to allow (e..g `[ '$regex', '$geoNear' ]`). Default is the supported `operators`
- `sortField` (*optional*, default: `__createdtime__`) - By default all objects will be sorted ASC by created timestamp, similar to sorting by Integer auto-incremented `id` in most feather SQL operations
- `sortDirection` (*optional*, default: `asc`) - The default sort direction, can be one of `[ 'asc', 'desc' ]`
- `limit` (*optional*, default: `5000`) - The max number of objects to return without pagination, will be overriden by pagination settings
- `sync` (*optional*, default: `true` ) - Setting true will create schema and table on load as part of the `service.setup()` function run by FeathersJS
- `force` (*optional*, default: `false`) , Settign true will delete the schema on setup, starting with fresh database with every boot, much like Sequelize's `forceSync`.
You must add your API key in your `/config/[env].json` file.
__Connection Options:__
The connection options are passed in as a `config` object inside the options object (i.e. `harper({ config: { ...connection_options } })`)
- `schema` (**required**) - The name of the schema (i.e. DB-equivalent) in the openai instance
- `harperHost` (**required**) - The location of the Harper Host
- `username` (**required**) - The username to connect with
- `password` (**required**) - The password to connect with
- `table` (*optional*) - The name of the table referenced by the service, defaults to the configured `name`, but can be overriden by setting `config.table`
These can also be set via a "openai" configuration field in the Feathers `config/{ENV}.json`:
```json
"openai":{
"host": "http://localhost:9925",
"username": "admin",
"password": "password",
"schema": "test"
}
{
"openai":"sk-XXXXXXXXXXXXXXXXXXXXXX"
}
```
## Setting up Service
To set up your service, your service class.js and service.js files should look something like this:
__Service Options:__
```javascript
//books.class.js
const { Service } = require('feathers-openai');
exports.Books = class Books extends Service{
constructor(options, app) {
super({
...options,
name: 'books'
});
}
};
- `prefix` (*optional*, default: `openai`) - The prefix added to every OpenAI service route. This allows you to nest the services so that their common names (i.e. `model`, etc.) don't overwrite your core application services. The default setting will result in routes like `openai/models` and `openai/fine-tunes`.
- `allowedEntities` (*optional*, default: all). This option allows you to disable and prevent the loading of any of the OpenAI API routes and entities. It takes an array of entity names. For example, setting `allowedEntities=['models']` would enable only the `openai/models` service. This list of services are:
- `models`
- `edits`
- `completions`
- `images/generations`
- `images/edits`
- `images/variations`
- `fine-tunes`
- `embeddings`
- `moderations`
- `files`
//books.service.js
const { Books } = require('./books.class');
const hooks = require('./books.hooks');
module.exports = function (app) {
const options = {
paginate: app.get('paginate'),
config: {
...app.get('openai'),
table: 'books'
}
};
app.use('/books', new Books(options, app));
const service = app.service('books');
service.hooks(hooks);
};
```
### Adding Hooks
The services come with no hooks by default. You can add hooks by simply setting the hooks property after `app.configure`. For example:
## Querying
```javascript
export const services = (app) => {
app.configure(openai('openai'))
attachOpenAIhooks(app)
// ... other services
}
function attachOpenAIhooks(app){
app.service('openai/models).hooks({
before:{
all:[()=>console.log('in the models hook!')]
}
})
}
```
In addition to the [common querying mechanism](https://docs.feathersjs.com/api/databases/querying.html), this adapter also supports direct NoSQL submissions via the [Harperive client](https://chandan-24.github.io/Harperive/#/) like this:
You can also directly leverage the service as part of another service flow, like using the AI to write a title for an article.
```javascript
let service = app.service('books')
await service.client.insert({
table: this.table,
records: [
{
user_id: 43,
username: 'simon_j',
first_name: 'James',
middle_name: 'J.',
last_name: 'Simon'
}
]
app.service('articles').hooks({
before:{ create:[
(ctx)=>{
let text = ctx.data.text;
ctx.data.title = ctx.app.service('openai/completions)
.create({
model: "text-davinci-003",
prompt: `Write a clickbait title for the following text:\n${text}\n####`,
max_tokens: 7,
temperature: 0,
stop:'###'
}).then(r=>r.choices[0].text)
}]
}
})
.then((res) => console.log(res))
.catch((err) => console.log(err));
```
You can also use Harperive's generic execution option like so:
```javascript
const options = {
operation: 'openai_operation',
//other fields...
};
// Promise
let service = app.service('books')
await service.client.executeOperation(options)
.then((res) => console.log(res))
.catch((err) => console.log(err));
```
### Known Limitations
__Adapter Syntax:__ `feathers-openai` does NOT implement the [Feathers Common database adapter API](https://docs.feathersjs.com/api/databases/common.html)
__Pagination:__ The current OpenAI API does not demonstrate how they will provide pagination for large responses, so for `find` method calls, the `total`, `$limit` and `$skip` are server-side manipulations of the OpenAI response and will not reduce the call on the OpenAI side. Any calls to `find` will return the standard feathers object `{ total:x ... data:[...]}` but `create` , `get`, `remove`,`update` will return as close to the original OpenAI response.
__Queries/Search:__ There is NO query capability for the OpenAI API so it is not implemented here. This library does not implement the FeathersJS [querying syntax](https://docs.feathersjs.com/api/databases/querying.html).

@@ -18,3 +18,3 @@ import { ModelService, getOptions as modelOptions } from './openai/openai.model.class.js';

export const openaiMulter = async (ctx, next) => {
ctx.feathers.files = [ctx.file, ...ctx.files].filter(f => !!f);
ctx.feathers.files = [ctx.file, ...(ctx.files ? ctx.files : [])].filter(f => !!f);
await next();

@@ -21,0 +21,0 @@ };

import { BadRequest } from '@feathersjs/errors';
import { openaiSetup } from './openai.setup.js';
import errorHandler from '../errorHandler.js';
import { Type } from '@feathersjs/typebox';
import { Ajv } from '@feathersjs/schema';
const ajv = new Ajv({});
const schema = Type.Object(
{
model: Type.String(),
prompt: Type.Optional(Type.Union([Type.Array(Type.String()), Type.String()])),
suffix: Type.Optional(Type.String()),
max_tokens: Type.Optional(Type.Number()),
temperature: Type.Optional(Type.Number()),
top_p: Type.Optional(Type.Number()),
n: Type.Optional(Type.Number()),
stream: Type.Optional(Type.Boolean()),
logprobs: Type.Optional(Type.Number()),
stop: Type.Optional(Type.Union([Type.Array(Type.String()), Type.String()])),
echo: Type.Optional(Type.Boolean()),
presence_penalty: Type.Optional(Type.Number()),
frequency_penalty: Type.Optional(Type.Number()),
best_of: Type.Optional(Type.Number()),
logit_bias: Type.Optional(Type.Number()),
user: Type.Optional(Type.String())
},
{ $id: 'openaiModel', additionalProperties: false }
);
const validator = ajv.compile(schema);
export class CompletionService {

@@ -11,5 +37,6 @@ constructor (options) {

async create (data, params) {
if (!validator(data)) errorHandler(validator.errors[0]);
return this.client.createCompletion({
...data
}).then(r => r.data).catch(e => { console.log(e); throw new BadRequest(); });
}).then(r => r.data).catch(errorHandler);
}

@@ -16,0 +43,0 @@

@@ -1,4 +0,20 @@

import { BadRequest } from '@feathersjs/errors';
import errorHandler from '../errorHandler.js';
import { openaiSetup } from './openai.setup.js';
import { Type } from '@feathersjs/typebox';
import { Ajv } from '@feathersjs/schema';
const ajv = new Ajv({});
const schema = Type.Object(
{
model: Type.String(),
input: Type.Optional(Type.String()),
instruction: Type.String(),
n: Type.Optional(Type.Number()),
temperature: Type.Optional(Type.Number()),
top_p: Type.Optional(Type.Number())
},
{ $id: 'openaiEdit', additionalProperties: false }
);
const validator = ajv.compile(schema);
export class EditService {

@@ -10,5 +26,7 @@ constructor (options) {

async create (data, params) {
if (!validator(data)) errorHandler(validator.errors[0]);
return this.client.createEdit({
...data
}).then(r => r.data).catch(e => { console.log(e); throw new BadRequest(); });
}).then(r => r.data).catch(errorHandler);
}

@@ -15,0 +33,0 @@

@@ -1,4 +0,17 @@

import { BadRequest } from '@feathersjs/errors';
import { openaiSetup } from './openai.setup.js';
import errorHandler from '../errorHandler.js';
import { Type } from '@feathersjs/typebox';
import { Ajv } from '@feathersjs/schema';
const ajv = new Ajv({});
const schema = Type.Object(
{
model: Type.String(),
input: Type.String(),
user: Type.Optional(Type.String())
},
{ $id: 'openaiEmbedding', additionalProperties: false }
);
const validator = ajv.compile(schema);
export class EmbeddingService {

@@ -10,5 +23,8 @@ constructor (options) {

async create (data, params) {
if (!validator(data)) errorHandler(validator.errors[0]);
return this.client.createEmbedding({
...data
}).then(r => r.data).catch(e => { console.log(e); throw new BadRequest(); });
}).then(r => r.data)
.catch(errorHandler);
}

@@ -19,6 +35,4 @@

export const embeddingMethods = [];
export const embeddingOptions = (app) => {
return { app };
};

@@ -0,4 +1,4 @@

import errorHandler from '../errorHandler.js';
import { openaiSetup } from './openai.setup.js';
import { BadRequest } from '@feathersjs/errors';
import { openaiSetup } from './openai.setup.js';
export class FileService {

@@ -24,18 +24,24 @@ constructor (options) {

).then(r => r.data)
.catch(e => {
console.error(e);
throw new BadRequest(e.message);
});
.catch(errorHandler);
}
async find (params) {
return this.client.listFiles().then(r => r.data).catch(e => { console.log(e); throw new BadRequest(); });
return this.client.listFiles()
.then(r => {
return {
total: r.data?.data.length || 0,
limit: null,
skip: null,
data: r.data
};
})
.catch(errorHandler);
}
async get (id, params) {
return this.client.retrieveFile(id).then(r => r.data).catch(e => { console.log(e); throw new BadRequest(); });
return this.client.retrieveFile(id).then(r => r.data).catch(errorHandler);
}
async remove (id, params) {
return this.client.deleteFile(id).then(r => r.data).catch(e => { console.log(e); throw new BadRequest(); });
return this.client.deleteFile(id).then(r => r.data).catch(errorHandler);
}

@@ -53,3 +59,3 @@

if (!(params.route && params.route.id)) throw new BadRequest();
return this.client.downloadFile(params.route.id).then(r => r.data).catch(e => { console.log(e); throw new BadRequest(e); });
return this.client.downloadFile(params.route.id).then(r => r.data).catch(errorHandler);
}

@@ -56,0 +62,0 @@ }

@@ -0,4 +1,27 @@

import errorHandler from '../errorHandler.js';
import { openaiSetup } from './openai.setup.js';
import { BadRequest } from '@feathersjs/errors';
import { openaiSetup } from './openai.setup.js';
import { Type } from '@feathersjs/typebox';
import { Ajv } from '@feathersjs/schema';
const ajv = new Ajv({});
const schema = Type.Object(
{
training_file: Type.String(),
validation_file: Type.Optional(Type.String()),
model: Type.Optional(Type.String()),
n_epochs: Type.Optional(Type.Number()),
batch_size: Type.Optional(Type.Number()),
learning_rate_multiplier: Type.Optional(Type.Number()),
prompt_loss_weight: Type.Optional(Type.Number()),
compute_classification_metrics: Type.Optional(Type.Boolean()),
classification_n_classes: Type.Optional(Type.Number()),
classification_positive_class: Type.Optional(Type.String()),
classification_betas: Type.Optional(Type.Array(Type.Number())),
suffix: Type.Optional(Type.String())
},
{ $id: 'openaiFineTune', additionalProperties: false }
);
const validator = ajv.compile(schema);
export class FineTuneService {

@@ -10,17 +33,32 @@ constructor (options) {

async create (data, params) {
if (!validator(data)) errorHandler(validator.errors[0]);
return this.client.createFineTune({
...data
}).then(r => r.data).catch(e => { console.log(e); throw new BadRequest(); });
}).then(r => r.data)
.catch(errorHandler);
}
async find (params) {
return this.client.listFineTunes().then(r => r.data).catch(e => { console.log(e); throw new BadRequest(); });
const limit = parseInt(params?.query?.$limit) || 1000;
const skip = parseInt(params?.query?.$skip) || 0;
return this.client.listFineTunes()
.then(r => {
const data = r.data?.data.slice(skip).slice(0, limit);
return {
total: r.data?.data.length || 0,
limit,
skip,
data
};
})
.catch(errorHandler);
}
async get (id, params) {
return this.client.retrieveFineTune(id).then(r => r.data).catch(e => { console.log(e); throw new BadRequest(); });
return this.client.retrieveFineTune(id).then(r => r.data).catch(errorHandler);
}
async remove (id, params) {
return this.client.deleteModel(id).then(r => r.data).catch(e => { console.log(e); throw new BadRequest(); });
return this.client.deleteModel(id).then(r => r.data).catch(errorHandler);
}

@@ -38,3 +76,3 @@

if (!(params.route && params.route.id)) throw new BadRequest();
return this.client.cancelFineTune(params.route.id).then(r => r.data).catch(e => { console.log(e); throw new BadRequest(e); });
return this.client.cancelFineTune(params.route.id).then(r => r.data).catch(errorHandler);
}

@@ -52,3 +90,3 @@

if (!(params.route && params.route.id)) throw new BadRequest();
return this.client.listFineTuneEvents(params.route.id).then(r => r.data).catch(e => { console.log(e); throw new BadRequest(e); });
return this.client.listFineTuneEvents(params.route.id).then(r => r.data).catch(errorHandler);
}

@@ -55,0 +93,0 @@

@@ -0,4 +1,27 @@

import errorHandler from '../errorHandler.js';
import { openaiSetup } from './openai.setup.js';
import { BadRequest } from '@feathersjs/errors';
import { openaiSetup } from './openai.setup.js';
import { Type } from '@feathersjs/typebox';
import { Ajv } from '@feathersjs/schema';
const ajv = new Ajv({});
const schema = Type.Object(
{
prompt: Type.String(),
n: Type.Optional(Type.Number()),
size: Type.Optional(Type.Union([
Type.Literal('256x256'),
Type.Literal('512x512'),
Type.Literal('1024x1024')
])),
response_format: Type.Optional(Type.Union([
Type.Literal('url'),
Type.Literal('b64_json')
])),
user: Type.Optional(Type.String())
},
{ $id: 'openaiImage', additionalProperties: false }
);
const validator = ajv.compile(schema);
export class ImageGenService {

@@ -10,5 +33,7 @@ constructor (options) {

async create (data, params) {
if (!validator(data)) errorHandler(validator.errors[0]);
return this.client.createImage({
...data
}).then(r => r.data).catch(e => { console.log(e); throw new BadRequest(); });
}).then(r => r.data)
.catch(errorHandler);
}

@@ -39,6 +64,3 @@

).then(r => r.data)
.catch(e => {
console.error(e);
throw new BadRequest(e.message);
});
.catch(errorHandler);
}

@@ -80,11 +102,3 @@

).then(r => r.data)
.catch(error => {
if (error.response) {
console.log(error.response.status);
console.log(error.response.data);
} else {
console.log(error.message);
}
throw new BadRequest(error.message);
});
.catch(errorHandler);
}

@@ -91,0 +105,0 @@

@@ -1,2 +0,2 @@

import { BadRequest } from '@feathersjs/errors';
import errorHandler from '../errorHandler.js';
import { openaiSetup } from './openai.setup.js';

@@ -7,14 +7,28 @@

this.options = options;
this.id = 'id';
}
async find (params) {
return this.client.listModels().then(r => r.data).catch(e => { console.log(e); throw new BadRequest(); });
const limit = parseInt(params?.query?.$limit) || 1000;
const skip = parseInt(params?.query?.$skip) || 0;
return this.client.listModels()
.then(r => {
const data = r.data?.data.slice(skip).slice(0, limit);
return {
total: r.data?.data.length || 0,
limit,
skip,
data
};
})
.catch(errorHandler);
}
async get (id, params) {
return this.client.retrieveModel(id).then(r => r.data).catch(e => { console.log(e); throw new BadRequest(); });
return this.client.retrieveModel(id).then(r => r.data).catch(errorHandler);
}
async remove (id, params) {
return this.client.deleteModel(id).then(r => r.data).catch(e => { console.log(e); throw new BadRequest(); });
return this.client.deleteModel(id).then(r => r.data).catch(errorHandler);
}

@@ -25,6 +39,4 @@

export const modelMethods = ['find', 'get', 'remove'];
export const getOptions = (app) => {
return { app };
};

@@ -1,4 +0,19 @@

import { BadRequest } from '@feathersjs/errors';
import errorHandler from '../errorHandler.js';
import { openaiSetup } from './openai.setup.js';
import { Type } from '@feathersjs/typebox';
import { Ajv } from '@feathersjs/schema';
const ajv = new Ajv({});
const schema = Type.Object(
{
model: Type.Optional(Type.Union([
Type.Literal('text-moderation-stable'),
Type.Literal('text-moderation-latest')
])),
input: Type.Union([Type.Array(Type.String()), Type.String()])
},
{ $id: 'openaiModeration', additionalProperties: false }
);
const validator = ajv.compile(schema);
export class ModerationService {

@@ -10,5 +25,7 @@ constructor (options) {

async create (data, params) {
if (!validator(data)) errorHandler(validator.errors[0]);
return this.client.createModeration({
...data
}).then(r => r.data).catch(e => { console.log(e); throw new BadRequest(); });
}).then(r => r.data).catch(errorHandler);
}

@@ -15,0 +32,0 @@