create-llama
Advanced tools
Comparing version
"use client"; | ||
import { useEffect, useMemo, useState } from "react"; | ||
export interface ChatConfig { | ||
backend?: string; | ||
starterQuestions?: string[]; | ||
} | ||
export function useClientConfig(): ChatConfig { | ||
function getBackendOrigin(): string { | ||
const chatAPI = process.env.NEXT_PUBLIC_CHAT_API; | ||
const [config, setConfig] = useState<ChatConfig>(); | ||
const backendOrigin = useMemo(() => { | ||
if (chatAPI) { | ||
return new URL(chatAPI).origin; | ||
} else { | ||
if (typeof window !== "undefined") { | ||
// Use BASE_URL from window.ENV | ||
return (window as any).ENV?.BASE_URL || ""; | ||
} | ||
return ""; | ||
if (chatAPI) { | ||
return new URL(chatAPI).origin; | ||
} else { | ||
if (typeof window !== "undefined") { | ||
// Use BASE_URL from window.ENV | ||
return (window as any).ENV?.BASE_URL || ""; | ||
} | ||
}, [chatAPI]); | ||
return ""; | ||
} | ||
} | ||
const configAPI = `${backendOrigin}/api/chat/config`; | ||
useEffect(() => { | ||
fetch(configAPI) | ||
.then((response) => response.json()) | ||
.then((data) => setConfig({ ...data, chatAPI })) | ||
.catch((error) => console.error("Error fetching config", error)); | ||
}, [chatAPI, configAPI]); | ||
export function useClientConfig(): ChatConfig { | ||
return { | ||
backend: backendOrigin, | ||
starterQuestions: config?.starterQuestions, | ||
backend: getBackendOrigin(), | ||
}; | ||
} |
/* eslint-disable turbo/no-undeclared-env-vars */ | ||
import * as dotenv from "dotenv"; | ||
import { VectorStoreIndex, storageContextFromDefaults } from "llamaindex"; | ||
import { MongoDBAtlasVectorSearch } from "llamaindex/storage/vectorStore/MongoDBAtlasVectorSearch"; | ||
import { | ||
MongoDBAtlasVectorSearch, | ||
VectorStoreIndex, | ||
storageContextFromDefaults, | ||
} from "llamaindex"; | ||
import { MongoClient } from "mongodb"; | ||
@@ -6,0 +9,0 @@ import { getDocuments } from "./loader"; |
/* eslint-disable turbo/no-undeclared-env-vars */ | ||
import { VectorStoreIndex } from "llamaindex"; | ||
import { MongoDBAtlasVectorSearch } from "llamaindex/storage/vectorStore/MongoDBAtlasVectorSearch"; | ||
import { MongoDBAtlasVectorSearch, VectorStoreIndex } from "llamaindex"; | ||
import { MongoClient } from "mongodb"; | ||
@@ -5,0 +4,0 @@ import { checkRequiredEnvVars } from "./shared"; |
@@ -8,3 +8,3 @@ { | ||
"skipLibCheck": true, | ||
"moduleResolution": "node", | ||
"moduleResolution": "bundler", | ||
"paths": { | ||
@@ -11,0 +11,0 @@ "@/*": ["./*"] |
"use client"; | ||
import { useEffect, useMemo, useState } from "react"; | ||
export interface ChatConfig { | ||
backend?: string; | ||
starterQuestions?: string[]; | ||
} | ||
export function useClientConfig(): ChatConfig { | ||
function getBackendOrigin(): string { | ||
const chatAPI = process.env.NEXT_PUBLIC_CHAT_API; | ||
const [config, setConfig] = useState<ChatConfig>(); | ||
if (chatAPI) { | ||
return new URL(chatAPI).origin; | ||
} else { | ||
if (typeof window !== "undefined") { | ||
// Use BASE_URL from window.ENV | ||
return (window as any).ENV?.BASE_URL || ""; | ||
} | ||
return ""; | ||
} | ||
} | ||
const backendOrigin = useMemo(() => { | ||
return chatAPI ? new URL(chatAPI).origin : ""; | ||
}, [chatAPI]); | ||
const configAPI = `${backendOrigin}/api/chat/config`; | ||
useEffect(() => { | ||
fetch(configAPI) | ||
.then((response) => response.json()) | ||
.then((data) => setConfig({ ...data, chatAPI })) | ||
.catch((error) => console.error("Error fetching config", error)); | ||
}, [chatAPI, configAPI]); | ||
export function useClientConfig(): ChatConfig { | ||
return { | ||
backend: backendOrigin, | ||
starterQuestions: config?.starterQuestions, | ||
backend: getBackendOrigin(), | ||
}; | ||
} |
{ | ||
"name": "create-llama", | ||
"version": "0.1.29", | ||
"version": "0.1.30", | ||
"description": "Create LlamaIndex-powered apps with one command", | ||
@@ -5,0 +5,0 @@ "keywords": [ |
Sorry, the diff of this file is too big to display
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Long strings
Supply chain riskContains long string literals, which may be a sign of obfuscated or packed code.
Found 1 instance in 1 package
Long strings
Supply chain riskContains long string literals, which may be a sign of obfuscated or packed code.
Found 1 instance in 1 package
1453334
0.03%210
0.48%13837
0.09%