openai-mcp-server
Advanced tools
Comparing version
@@ -1,14 +0,8 @@ | ||
#!/usr/bin/env node | ||
import { OpenAI } from "openai"; | ||
import { Server } from "@modelcontextprotocol/sdk/server/index.js"; | ||
import { McpServer } from "@modelcontextprotocol/sdk/server/mcp.js"; | ||
import { z } from "zod"; | ||
import { StdioServerTransport } from "@modelcontextprotocol/sdk/server/stdio.js"; | ||
import { CallToolRequestSchema, ListResourcesRequestSchema, ListToolsRequestSchema, ReadResourceRequestSchema, } from "@modelcontextprotocol/sdk/types.js"; | ||
const server = new Server({ | ||
const server = new McpServer({ | ||
name: "openai", | ||
version: "0.1.0", | ||
}, { | ||
capabilities: { | ||
resources: {}, | ||
tools: {}, | ||
}, | ||
}); | ||
@@ -18,40 +12,27 @@ const openai = new OpenAI({ | ||
}); | ||
const SCHEMA_PATH = "schema"; | ||
const resourceBaseUrl = "https://api.openai.com/v1/"; | ||
server.setRequestHandler(ListResourcesRequestSchema, async () => { | ||
return { | ||
resources: [ | ||
{ | ||
uri: new URL(`openai-chat/${SCHEMA_PATH}`, resourceBaseUrl).href, | ||
mimeType: "application/json", | ||
name: "OpenAI Chat Completions API", | ||
} | ||
], | ||
}; | ||
}); | ||
server.setRequestHandler(ReadResourceRequestSchema, async (request) => { | ||
const resourceUrl = new URL(request.params.uri); | ||
const pathComponents = resourceUrl.pathname.split("/"); | ||
const schema = pathComponents.pop(); | ||
const resourceType = pathComponents.pop(); | ||
if (schema !== SCHEMA_PATH) { | ||
throw new Error("Invalid resource URI"); | ||
// Helper function for making NWS API requests | ||
async function askOpenAI(prompt) { | ||
try { | ||
const response = await openai.chat.completions.create({ | ||
model: "gpt-4o-mini", | ||
messages: [{ role: "user", content: prompt }], | ||
}); | ||
return response.choices[0].message.content; | ||
} | ||
if (resourceType === "openai-chat") { | ||
const openaiChatSchema = { | ||
type: "object", | ||
properties: { | ||
prompt: { | ||
type: "string", | ||
description: "The prompt to send to OpenAI's chat completion API" | ||
} | ||
}, | ||
required: ["prompt"] | ||
}; | ||
catch (error) { | ||
console.error("Error making openai request:", error); | ||
return null; | ||
} | ||
} | ||
server.tool("ask-openai", "Ask OpenAI for a response", { | ||
prompt: z.string().describe("The prompt to ask OpenAI"), | ||
}, async ({ prompt }) => { | ||
// Get grid point data | ||
const response = await askOpenAI(prompt); | ||
if (!response) { | ||
return { | ||
contents: [ | ||
content: [ | ||
{ | ||
uri: request.params.uri, | ||
mimeType: "application/json", | ||
text: JSON.stringify(openaiChatSchema, null, 2), | ||
type: "text", | ||
text: `Failed to retrieve response from OpenAI`, | ||
}, | ||
@@ -61,16 +42,7 @@ ], | ||
} | ||
throw new Error("Unknown resource type"); | ||
}); | ||
server.setRequestHandler(ListToolsRequestSchema, async () => { | ||
return { | ||
tools: [ | ||
content: [ | ||
{ | ||
name: "query", | ||
description: "Pass prompt to openai and get response", | ||
inputSchema: { | ||
type: "object", | ||
properties: { | ||
prompt: { type: "string" }, | ||
}, | ||
}, | ||
type: "text", | ||
text: response, | ||
}, | ||
@@ -80,20 +52,10 @@ ], | ||
}); | ||
server.setRequestHandler(CallToolRequestSchema, async (request) => { | ||
if (request.params.name === "query") { | ||
const prompt = request.params.arguments?.prompt; | ||
const result = await openai.chat.completions.create({ | ||
model: "gpt-4o-mini", | ||
messages: [{ role: "user", content: prompt }], | ||
}); | ||
return { | ||
content: [{ type: "text", text: result.choices[0].message.content }], | ||
isError: false, | ||
}; | ||
} | ||
throw new Error(`Unknown tool: ${request.params.name}`); | ||
}); | ||
async function runServer() { | ||
async function main() { | ||
const transport = new StdioServerTransport(); | ||
await server.connect(transport); | ||
console.error("OpenAI MCP Server running on stdio"); | ||
} | ||
runServer().catch(console.error); | ||
main().catch((error) => { | ||
console.error("Fatal error in main():", error); | ||
process.exit(1); | ||
}); |
{ | ||
"name": "openai-mcp-server", | ||
"version": "0.1.3", | ||
"version": "0.1.4", | ||
"description": "MCP server for interacting with OpenAI", | ||
@@ -15,3 +15,3 @@ "license": "MIT", | ||
"scripts": { | ||
"build": "tsc && shx chmod +x dist/*.js", | ||
"build": "tsc && node -e \"require('fs').chmodSync('dist/index.js', '755')\"", | ||
"prepare": "npm run build", | ||
@@ -21,3 +21,3 @@ "watch": "tsc --watch" | ||
"dependencies": { | ||
"@modelcontextprotocol/sdk": "1.0.1", | ||
"@modelcontextprotocol/sdk": "^1.6.1", | ||
"@types/node": "^22.13.9", | ||
@@ -24,0 +24,0 @@ "openai": "^4.86.2" |
URL strings
Supply chain riskPackage contains fragments of external URLs or IP addresses, which the package may be accessing at runtime.
Found 1 instance in 1 package
2
-33.33%3333
-30.76%58
-39.58%+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
- Removed
- Removed