New Case Study:See how Anthropic automated 95% of dependency reviews with Socket.Learn More
Socket
Sign inDemoInstall
Socket

@nlux/langchain

Package Overview
Dependencies
Maintainers
1
Versions
124
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@nlux/langchain - npm Package Compare versions

Comparing version 0.10.1 to 0.10.2

2

cjs/langchain.js

@@ -1,1 +0,1 @@

"use strict";var e=require("@nlux/core");const t=e=>{const t=/\/.*\/(invoke|stream)$/g.exec(e);if(!t||t.length<2)return;const r=t[1];return"invoke"===r||"stream"===r?r:void 0},r=r=>{const s=t(r.url),o=r.dataTransferMode,a=s?"stream"===s?"stream":"fetch":void 0;const i=a??r.dataTransferMode??n.defaultDataTransferMode;return o&&a&&o!==a&&e.warnOnce(`The data transfer mode provided to LangServe adapter does not match the LangServe runnable URL action. When you provide a runnable URL that ends with '/${s}', the data transfer mode is automatically set to '${a}' and the 'dataTransferMode' option should not be provided or should be set to '${a}'`),i},s=e=>{const t=e.url;return/\/.*\/(invoke|stream)$/g.test(t)?t.replace(/\/(invoke|stream)$/g,""):t},o=e=>{const o=s(e).replace(/\/$/,""),n=(e=>{const s=e.url,o=t(s);return o||("fetch"===r(e)?"invoke":"stream")})(e);return`${o}/${n}`};class n{constructor(t){this.__instanceId=`${this.info.id}-${e.uid()}`,this.__options={...t},this.theDataTransferModeToUse=r(t),this.theUseInputSchemaOptionToUse="boolean"!=typeof t.useInputSchema||t.useInputSchema,this.theEndpointUrlToUse=o(t),this.theRunnableNameToUse=s(t).replace(/\/$/,"").split("/").pop()||"langserve-runnable",this.theInputSchemaUrlToUse=((e,t)=>{const r=s(e).replace(/\/$/,"");return"input"===t?`${r}/input_schema`:`${r}/output_schema`})(t,"input"),this.init()}get config(){return{encodeMessage:e=>Promise.resolve(e),decodeMessage:e=>Promise.resolve(e)}}get dataTransferMode(){return this.theDataTransferModeToUse}get endpointUrl(){return this.theEndpointUrlToUse}get id(){return this.__instanceId}get info(){return{id:"langserve-adapter",capabilities:{textChat:!0,audio:!1,fileUpload:!1},inputFormats:["text"],outputFormats:["text","markdown"]}}get inputPreProcessor(){return this.__options.inputPreProcessor}get inputSchema(){return this.theInputSchemaToUse}get outputPreProcessor(){return this.__options.outputPreProcessor}get runnableName(){return this.theRunnableNameToUse}get status(){return"idle"}get useInputSchema(){return this.theUseInputSchemaOptionToUse}get inputSchemaUrl(){return this.theInputSchemaUrlToUse}async decode(e){const{decodeMessage:t}=this.config;return t(e)}async encode(e){const{encodeMessage:t}=this.config;return t(e)}async fetchSchema(t){try{const r=await fetch(t),s=await r.json();return"object"==typeof s&&s?s:void e.warn(`LangServe adapter is unable process schema loaded from: ${t}`)}catch(r){return void e.warn(`LangServe adapter is unable to fetch schema from: ${t}`)}}init(){!this.inputPreProcessor&&this.useInputSchema&&this.fetchSchema(this.inputSchemaUrl).then((e=>{this.theInputSchemaToUse=e}))}getDisplayableMessageFromAiOutput(t){if(this.outputPreProcessor)return this.outputPreProcessor(t);if("string"==typeof t)return t;const r=t;if("object"==typeof r&&r&&"string"==typeof r.content)return r.content;e.warn(`LangServe adapter is unable to process output returned from the endpoint:\n ${JSON.stringify(t)}`)}getRequestBody(t){if(this.inputPreProcessor){const e=this.inputPreProcessor(t);return JSON.stringify({input:e})}if(this.inputSchema){const r=((t,r,s)=>{if(!r||"object"!=typeof r.properties)return t;if("object"!=typeof r||!r)return e.warn(`LangServer adapter cannot process the input schema fetched for runnable "${s}". The user message will be sent to LangServe endpoint as is without transformations. To override this behavior, you can either set the "useInputSchema" option to false, or provide a custom input pre-processor via the "inputPreProcessor" option, or update your endpoint and input schema to have an object with a single string property or a string as input.`),t;if("string"===r.type)return t;if("object"===r.type){const s="object"==typeof r.properties&&r.properties?r.properties:{},o=Object.keys(s).filter((e=>e&&"string"==typeof r.properties[e].type)).map((e=>e));if(1===o.length)return{[o[0]]:t};e.warn('LangServer adapter cannot find a valid property to match to user input inside the "${runnableName}" input schema. The user message will be sent to LangServe endpoint as is without transformations. To override this behavior, you can either set the "useInputSchema" option to false, or provide a custom input pre-processor via the "inputPreProcessor" option, or update your endpoint and input schema to have an object with a single string property or a string accepted as part of input schema.')}})(t,this.inputSchema,this.runnableName);if(void 0!==r)return JSON.stringify({input:r})}return JSON.stringify({input:t})}}n.defaultDataTransferMode="fetch";class a extends n{constructor(e){super(e)}async fetchText(e){const t=this.getRequestBody(e),r=await fetch(this.endpointUrl,{method:"POST",body:t});if(!r.ok)throw new Error(`LangServe runnable returned status code: ${r.status}`);const s=await r.json();if("object"!=typeof s||!s||void 0===s.output)throw new Error('Invalid response from LangServe runnable: Response is not an object or does not contain an "output" property');const o="object"==typeof s&&s?s.output:void 0;return this.getDisplayableMessageFromAiOutput(o)??""}streamText(t,r){throw new e.NluxUsageError({source:this.constructor.name,message:"Cannot stream text from the fetch adapter!"})}}const i=t=>{const r=/^event:\s+(?<event>[\w]+)(\n(\r?)data: (?<data>(.|\n)*))?/gm.exec(t);if(!r)return;const{event:s,data:o}=r.groups||{};if(s&&("data"===s||"end"===s))try{return{event:s,data:o?JSON.parse(o):void 0}}catch(t){return e.warn(`LangServe stream adapter failed to parse data for chunk event "${s}" | Data: ${o}`),{event:s,data:void 0}}},u=e=>{if(!e)return[];const t=/(((?<=^)|(?<=\n))event:\s+(\w+))/g,r=[];let s=t.exec(e);for(;s;)r.push(s.index),s=t.exec(e);const o=(t,s)=>{const o=r[s+1]||e.length;return e.substring(t,o)};try{return r.map(o).map(i).filter((e=>void 0!==e)).map((e=>e))}catch(e){return e instanceof Error?e:[]}},c=e=>"object"==typeof e&&null!==e&&e.message?.toLowerCase().includes("connection error")?"NX-NT-001":null;class h extends n{constructor(e){super(e)}async fetchText(t){throw new e.NluxUsageError({source:this.constructor.name,message:"Cannot fetch text using the stream adapter!"})}streamText(t,r){const s=this.getRequestBody(t);fetch(this.endpointUrl,{method:"POST",headers:{"Content-Type":"application/json"},body:s}).then((async t=>{if(!t.ok)throw new e.NluxError({source:this.constructor.name,message:`LangServe runnable returned status code: ${t.status}`});if(!t.body)throw new e.NluxError({source:this.constructor.name,message:`LangServe runnable returned status code: ${t.status}`});const s=t.body.getReader(),o=new TextDecoder;let n=!1;for(;!n;){const{value:t,done:a}=await s.read();if(a){n=!0;continue}const i=o.decode(t),c=u(i);if(Array.isArray(c))for(const e of c){if("data"===e.event&&void 0!==e.data){const t=this.getDisplayableMessageFromAiOutput(e.data);"string"==typeof t&&t&&r.next(t)}if("end"===e.event){r.complete(),n=!0;break}}c instanceof Error&&(e.warn(c),r.error(c),n=!0)}})).catch((t=>{e.warn(t),r.error(new e.NluxUsageError({source:this.constructor.name,message:t.message,exceptionId:c(t)??void 0}))}))}}class p{constructor(e){e&&(this.theDataTransferMode=e.theDataTransferMode,this.theInputPreProcessor=e.theInputPreProcessor,this.theOutputPreProcessor=e.theOutputPreProcessor,this.theUrl=e.theUrl)}create(){if(!this.theUrl)throw new e.NluxUsageError({source:this.constructor.name,message:"Unable to create LangServe adapter. URL is missing. Make sure you are calling withUrl() before calling create()."});const t={url:this.theUrl,dataTransferMode:this.theDataTransferMode,inputPreProcessor:this.theInputPreProcessor,outputPreProcessor:this.theOutputPreProcessor,useInputSchema:this.theUseInputSchema};return"stream"===r(t)?new h(t):new a(t)}withDataTransferMode(t){if(void 0!==this.theDataTransferMode)throw new e.NluxUsageError({source:this.constructor.name,message:"Cannot set the data loading mode more than once"});return this.theDataTransferMode=t,this}withInputPreProcessor(t){if(void 0!==this.theInputPreProcessor)throw new e.NluxUsageError({source:this.constructor.name,message:"Cannot set the input pre-processor option more than once"});return this.theInputPreProcessor=t,this}withInputSchema(t){if(void 0!==this.theUseInputSchema)throw new e.NluxUsageError({source:this.constructor.name,message:"Cannot set the input schema option more than once"});return this.theUseInputSchema=t,this}withOutputPreProcessor(t){if(void 0!==this.theOutputPreProcessor)throw new e.NluxUsageError({source:this.constructor.name,message:"Cannot set the output pre-processor option more than once"});return this.theOutputPreProcessor=t,this}withUrl(t){if(void 0!==this.theUrl)throw new e.NluxUsageError({source:this.constructor.name,message:"Cannot set the runnable URL option more than once"});return this.theUrl=t,this}}Object.defineProperty(exports,"debug",{enumerable:!0,get:function(){return e.debug}}),exports.createAdapter=()=>new p;
"use strict";var e=require("@nlux/core");const t=e=>{const t=/\/.*\/(invoke|stream)$/g.exec(e);if(!t||t.length<2)return;const r=t[1];return"invoke"===r||"stream"===r?r:void 0},r=r=>{const s=t(r.url),o=r.dataTransferMode,a=s?"stream"===s?"stream":"fetch":void 0;const i=a??r.dataTransferMode??n.defaultDataTransferMode;return o&&a&&o!==a&&e.warnOnce(`The data transfer mode provided to LangServe adapter does not match the LangServe runnable URL action. When you provide a runnable URL that ends with '/${s}', the data transfer mode is automatically set to '${a}' and the 'dataTransferMode' option should not be provided or should be set to '${a}'`),i},s=e=>{const t=e.url;return/\/.*\/(invoke|stream)$/g.test(t)?t.replace(/\/(invoke|stream)$/g,""):t},o=e=>{const o=s(e).replace(/\/$/,""),n=(e=>{const s=e.url,o=t(s);return o||("fetch"===r(e)?"invoke":"stream")})(e);return`${o}/${n}`};class n{constructor(t){this.__instanceId=`${this.info.id}-${e.uid()}`,this.__options={...t},this.theDataTransferModeToUse=r(t),this.theUseInputSchemaOptionToUse="boolean"!=typeof t.useInputSchema||t.useInputSchema,this.theEndpointUrlToUse=o(t),this.theRunnableNameToUse=s(t).replace(/\/$/,"").split("/").pop()||"langserve-runnable",this.theInputSchemaUrlToUse=((e,t)=>{const r=s(e).replace(/\/$/,"");return"input"===t?`${r}/input_schema`:`${r}/output_schema`})(t,"input"),this.init()}get config(){return{encodeMessage:e=>Promise.resolve(e),decodeMessage:e=>Promise.resolve(e)}}get dataTransferMode(){return this.theDataTransferModeToUse}get endpointUrl(){return this.theEndpointUrlToUse}get id(){return this.__instanceId}get info(){return{id:"langserve-adapter",capabilities:{textChat:!0,audio:!1,fileUpload:!1},inputFormats:["text"],outputFormats:["text","markdown"]}}get inputPreProcessor(){return this.__options.inputPreProcessor}get inputSchema(){return this.theInputSchemaToUse}get outputPreProcessor(){return this.__options.outputPreProcessor}get runnableName(){return this.theRunnableNameToUse}get status(){return"idle"}get useInputSchema(){return this.theUseInputSchemaOptionToUse}get inputSchemaUrl(){return this.theInputSchemaUrlToUse}async decode(e){const{decodeMessage:t}=this.config;return t(e)}async encode(e){const{encodeMessage:t}=this.config;return t(e)}async fetchSchema(t){try{const r=await fetch(t),s=await r.json();return"object"==typeof s&&s?s:void e.warn(`LangServe adapter is unable process schema loaded from: ${t}`)}catch(r){return void e.warn(`LangServe adapter is unable to fetch schema from: ${t}`)}}init(){!this.inputPreProcessor&&this.useInputSchema&&this.fetchSchema(this.inputSchemaUrl).then((e=>{this.theInputSchemaToUse=e}))}getDisplayableMessageFromAiOutput(t){if(this.outputPreProcessor)return this.outputPreProcessor(t);if("string"==typeof t)return t;const r=t;if("object"==typeof r&&r&&"string"==typeof r.content)return r.content;e.warn(`LangServe adapter is unable to process output returned from the endpoint:\n ${JSON.stringify(t)}`)}getRequestBody(t){if(this.inputPreProcessor){const e=this.inputPreProcessor(t);return JSON.stringify({input:e})}if(this.inputSchema){const r=((t,r,s)=>{if(!r||"object"!=typeof r.properties)return t;if("object"!=typeof r||!r)return e.warn(`LangServer adapter cannot process the input schema fetched for runnable "${s}". The user message will be sent to LangServe endpoint as is without transformations. To override this behavior, you can either set the "useInputSchema" option to false, or provide a custom input pre-processor via the "inputPreProcessor" option, or update your endpoint and input schema to have an object with a single string property or a string as input.`),t;if("string"===r.type)return t;if("object"===r.type){const s="object"==typeof r.properties&&r.properties?r.properties:{},o=Object.keys(s).filter((e=>e&&"string"==typeof r.properties[e].type)).map((e=>e));if(1===o.length)return{[o[0]]:t};e.warn('LangServer adapter cannot find a valid property to match to user input inside the "${runnableName}" input schema. The user message will be sent to LangServe endpoint as is without transformations. To override this behavior, you can either set the "useInputSchema" option to false, or provide a custom input pre-processor via the "inputPreProcessor" option, or update your endpoint and input schema to have an object with a single string property or a string accepted as part of input schema.')}})(t,this.inputSchema,this.runnableName);if(void 0!==r)return JSON.stringify({input:r})}return JSON.stringify({input:t})}}n.defaultDataTransferMode="stream";class a extends n{constructor(e){super(e)}async fetchText(e){const t=this.getRequestBody(e),r=await fetch(this.endpointUrl,{method:"POST",body:t});if(!r.ok)throw new Error(`LangServe runnable returned status code: ${r.status}`);const s=await r.json();if("object"!=typeof s||!s||void 0===s.output)throw new Error('Invalid response from LangServe runnable: Response is not an object or does not contain an "output" property');const o="object"==typeof s&&s?s.output:void 0;return this.getDisplayableMessageFromAiOutput(o)??""}streamText(t,r){throw new e.NluxUsageError({source:this.constructor.name,message:"Cannot stream text from the fetch adapter!"})}}const i=t=>{const r=/^event:\s+(?<event>[\w]+)(\n(\r?)data: (?<data>(.|\n)*))?/gm.exec(t);if(!r)return;const{event:s,data:o}=r.groups||{};if(s&&("data"===s||"end"===s))try{return{event:s,data:o?JSON.parse(o):void 0}}catch(t){return e.warn(`LangServe stream adapter failed to parse data for chunk event "${s}" | Data: ${o}`),{event:s,data:void 0}}},u=e=>{if(!e)return[];const t=/(((?<=^)|(?<=\n))event:\s+(\w+))/g,r=[];let s=t.exec(e);for(;s;)r.push(s.index),s=t.exec(e);const o=(t,s)=>{const o=r[s+1]||e.length;return e.substring(t,o)};try{return r.map(o).map(i).filter((e=>void 0!==e)).map((e=>e))}catch(e){return e instanceof Error?e:[]}},c=e=>"object"==typeof e&&null!==e&&e.message?.toLowerCase().includes("connection error")?"NX-NT-001":null;class h extends n{constructor(e){super(e)}async fetchText(t){throw new e.NluxUsageError({source:this.constructor.name,message:"Cannot fetch text using the stream adapter!"})}streamText(t,r){const s=this.getRequestBody(t);fetch(this.endpointUrl,{method:"POST",headers:{"Content-Type":"application/json"},body:s}).then((async t=>{if(!t.ok)throw new e.NluxError({source:this.constructor.name,message:`LangServe runnable returned status code: ${t.status}`});if(!t.body)throw new e.NluxError({source:this.constructor.name,message:`LangServe runnable returned status code: ${t.status}`});const s=t.body.getReader(),o=new TextDecoder;let n=!1;for(;!n;){const{value:t,done:a}=await s.read();if(a){n=!0;continue}const i=o.decode(t),c=u(i);if(Array.isArray(c))for(const e of c){if("data"===e.event&&void 0!==e.data){const t=this.getDisplayableMessageFromAiOutput(e.data);"string"==typeof t&&t&&r.next(t)}if("end"===e.event){r.complete(),n=!0;break}}c instanceof Error&&(e.warn(c),r.error(c),n=!0)}})).catch((t=>{e.warn(t),r.error(new e.NluxUsageError({source:this.constructor.name,message:t.message,exceptionId:c(t)??void 0}))}))}}class p{constructor(e){e&&(this.theDataTransferMode=e.theDataTransferMode,this.theInputPreProcessor=e.theInputPreProcessor,this.theOutputPreProcessor=e.theOutputPreProcessor,this.theUrl=e.theUrl)}create(){if(!this.theUrl)throw new e.NluxUsageError({source:this.constructor.name,message:"Unable to create LangServe adapter. URL is missing. Make sure you are calling withUrl() before calling create()."});const t={url:this.theUrl,dataTransferMode:this.theDataTransferMode,inputPreProcessor:this.theInputPreProcessor,outputPreProcessor:this.theOutputPreProcessor,useInputSchema:this.theUseInputSchema};return"stream"===r(t)?new h(t):new a(t)}withDataTransferMode(t){if(void 0!==this.theDataTransferMode)throw new e.NluxUsageError({source:this.constructor.name,message:"Cannot set the data loading mode more than once"});return this.theDataTransferMode=t,this}withInputPreProcessor(t){if(void 0!==this.theInputPreProcessor)throw new e.NluxUsageError({source:this.constructor.name,message:"Cannot set the input pre-processor option more than once"});return this.theInputPreProcessor=t,this}withInputSchema(t){if(void 0!==this.theUseInputSchema)throw new e.NluxUsageError({source:this.constructor.name,message:"Cannot set the input schema option more than once"});return this.theUseInputSchema=t,this}withOutputPreProcessor(t){if(void 0!==this.theOutputPreProcessor)throw new e.NluxUsageError({source:this.constructor.name,message:"Cannot set the output pre-processor option more than once"});return this.theOutputPreProcessor=t,this}withUrl(t){if(void 0!==this.theUrl)throw new e.NluxUsageError({source:this.constructor.name,message:"Cannot set the runnable URL option more than once"});return this.theUrl=t,this}}Object.defineProperty(exports,"debug",{enumerable:!0,get:function(){return e.debug}}),exports.createAdapter=()=>new p;

@@ -1,1 +0,1 @@

import{warnOnce as e,warn as t,uid as r,NluxUsageError as o,NluxError as s}from"@nlux/core";export{debug}from"@nlux/core";const n=e=>{const t=/\/.*\/(invoke|stream)$/g.exec(e);if(!t||t.length<2)return;const r=t[1];return"invoke"===r||"stream"===r?r:void 0},a=t=>{const r=n(t.url),o=t.dataTransferMode,s=r?"stream"===r?"stream":"fetch":void 0;const a=s??t.dataTransferMode??c.defaultDataTransferMode;return o&&s&&o!==s&&e(`The data transfer mode provided to LangServe adapter does not match the LangServe runnable URL action. When you provide a runnable URL that ends with '/${r}', the data transfer mode is automatically set to '${s}' and the 'dataTransferMode' option should not be provided or should be set to '${s}'`),a},i=e=>{const t=e.url;return/\/.*\/(invoke|stream)$/g.test(t)?t.replace(/\/(invoke|stream)$/g,""):t},u=e=>{const t=i(e).replace(/\/$/,""),r=(e=>{const t=e.url,r=n(t);return r||("fetch"===a(e)?"invoke":"stream")})(e);return`${t}/${r}`};class c{constructor(e){this.__instanceId=`${this.info.id}-${r()}`,this.__options={...e},this.theDataTransferModeToUse=a(e),this.theUseInputSchemaOptionToUse="boolean"!=typeof e.useInputSchema||e.useInputSchema,this.theEndpointUrlToUse=u(e),this.theRunnableNameToUse=i(e).replace(/\/$/,"").split("/").pop()||"langserve-runnable",this.theInputSchemaUrlToUse=((e,t)=>{const r=i(e).replace(/\/$/,"");return"input"===t?`${r}/input_schema`:`${r}/output_schema`})(e,"input"),this.init()}get config(){return{encodeMessage:e=>Promise.resolve(e),decodeMessage:e=>Promise.resolve(e)}}get dataTransferMode(){return this.theDataTransferModeToUse}get endpointUrl(){return this.theEndpointUrlToUse}get id(){return this.__instanceId}get info(){return{id:"langserve-adapter",capabilities:{textChat:!0,audio:!1,fileUpload:!1},inputFormats:["text"],outputFormats:["text","markdown"]}}get inputPreProcessor(){return this.__options.inputPreProcessor}get inputSchema(){return this.theInputSchemaToUse}get outputPreProcessor(){return this.__options.outputPreProcessor}get runnableName(){return this.theRunnableNameToUse}get status(){return"idle"}get useInputSchema(){return this.theUseInputSchemaOptionToUse}get inputSchemaUrl(){return this.theInputSchemaUrlToUse}async decode(e){const{decodeMessage:t}=this.config;return t(e)}async encode(e){const{encodeMessage:t}=this.config;return t(e)}async fetchSchema(e){try{const r=await fetch(e),o=await r.json();return"object"==typeof o&&o?o:void t(`LangServe adapter is unable process schema loaded from: ${e}`)}catch(r){return void t(`LangServe adapter is unable to fetch schema from: ${e}`)}}init(){!this.inputPreProcessor&&this.useInputSchema&&this.fetchSchema(this.inputSchemaUrl).then((e=>{this.theInputSchemaToUse=e}))}getDisplayableMessageFromAiOutput(e){if(this.outputPreProcessor)return this.outputPreProcessor(e);if("string"==typeof e)return e;const r=e;if("object"==typeof r&&r&&"string"==typeof r.content)return r.content;t(`LangServe adapter is unable to process output returned from the endpoint:\n ${JSON.stringify(e)}`)}getRequestBody(e){if(this.inputPreProcessor){const t=this.inputPreProcessor(e);return JSON.stringify({input:t})}if(this.inputSchema){const r=((e,r,o)=>{if(!r||"object"!=typeof r.properties)return e;if("object"!=typeof r||!r)return t(`LangServer adapter cannot process the input schema fetched for runnable "${o}". The user message will be sent to LangServe endpoint as is without transformations. To override this behavior, you can either set the "useInputSchema" option to false, or provide a custom input pre-processor via the "inputPreProcessor" option, or update your endpoint and input schema to have an object with a single string property or a string as input.`),e;if("string"===r.type)return e;if("object"===r.type){const o="object"==typeof r.properties&&r.properties?r.properties:{},s=Object.keys(o).filter((e=>e&&"string"==typeof r.properties[e].type)).map((e=>e));if(1===s.length)return{[s[0]]:e};t('LangServer adapter cannot find a valid property to match to user input inside the "${runnableName}" input schema. The user message will be sent to LangServe endpoint as is without transformations. To override this behavior, you can either set the "useInputSchema" option to false, or provide a custom input pre-processor via the "inputPreProcessor" option, or update your endpoint and input schema to have an object with a single string property or a string accepted as part of input schema.')}})(e,this.inputSchema,this.runnableName);if(void 0!==r)return JSON.stringify({input:r})}return JSON.stringify({input:e})}}c.defaultDataTransferMode="fetch";class h extends c{constructor(e){super(e)}async fetchText(e){const t=this.getRequestBody(e),r=await fetch(this.endpointUrl,{method:"POST",body:t});if(!r.ok)throw new Error(`LangServe runnable returned status code: ${r.status}`);const o=await r.json();if("object"!=typeof o||!o||void 0===o.output)throw new Error('Invalid response from LangServe runnable: Response is not an object or does not contain an "output" property');const s="object"==typeof o&&o?o.output:void 0;return this.getDisplayableMessageFromAiOutput(s)??""}streamText(e,t){throw new o({source:this.constructor.name,message:"Cannot stream text from the fetch adapter!"})}}const p=e=>{const r=/^event:\s+(?<event>[\w]+)(\n(\r?)data: (?<data>(.|\n)*))?/gm.exec(e);if(!r)return;const{event:o,data:s}=r.groups||{};if(o&&("data"===o||"end"===o))try{return{event:o,data:s?JSON.parse(s):void 0}}catch(e){return t(`LangServe stream adapter failed to parse data for chunk event "${o}" | Data: ${s}`),{event:o,data:void 0}}},d=e=>{if(!e)return[];const t=/(((?<=^)|(?<=\n))event:\s+(\w+))/g,r=[];let o=t.exec(e);for(;o;)r.push(o.index),o=t.exec(e);const s=(t,o)=>{const s=r[o+1]||e.length;return e.substring(t,s)};try{return r.map(s).map(p).filter((e=>void 0!==e)).map((e=>e))}catch(e){return e instanceof Error?e:[]}},m=e=>"object"==typeof e&&null!==e&&e.message?.toLowerCase().includes("connection error")?"NX-NT-001":null;class f extends c{constructor(e){super(e)}async fetchText(e){throw new o({source:this.constructor.name,message:"Cannot fetch text using the stream adapter!"})}streamText(e,r){const n=this.getRequestBody(e);fetch(this.endpointUrl,{method:"POST",headers:{"Content-Type":"application/json"},body:n}).then((async e=>{if(!e.ok)throw new s({source:this.constructor.name,message:`LangServe runnable returned status code: ${e.status}`});if(!e.body)throw new s({source:this.constructor.name,message:`LangServe runnable returned status code: ${e.status}`});const o=e.body.getReader(),n=new TextDecoder;let a=!1;for(;!a;){const{value:e,done:s}=await o.read();if(s){a=!0;continue}const i=n.decode(e),u=d(i);if(Array.isArray(u))for(const e of u){if("data"===e.event&&void 0!==e.data){const t=this.getDisplayableMessageFromAiOutput(e.data);"string"==typeof t&&t&&r.next(t)}if("end"===e.event){r.complete(),a=!0;break}}u instanceof Error&&(t(u),r.error(u),a=!0)}})).catch((e=>{t(e),r.error(new o({source:this.constructor.name,message:e.message,exceptionId:m(e)??void 0}))}))}}class l{constructor(e){e&&(this.theDataTransferMode=e.theDataTransferMode,this.theInputPreProcessor=e.theInputPreProcessor,this.theOutputPreProcessor=e.theOutputPreProcessor,this.theUrl=e.theUrl)}create(){if(!this.theUrl)throw new o({source:this.constructor.name,message:"Unable to create LangServe adapter. URL is missing. Make sure you are calling withUrl() before calling create()."});const e={url:this.theUrl,dataTransferMode:this.theDataTransferMode,inputPreProcessor:this.theInputPreProcessor,outputPreProcessor:this.theOutputPreProcessor,useInputSchema:this.theUseInputSchema};return"stream"===a(e)?new f(e):new h(e)}withDataTransferMode(e){if(void 0!==this.theDataTransferMode)throw new o({source:this.constructor.name,message:"Cannot set the data loading mode more than once"});return this.theDataTransferMode=e,this}withInputPreProcessor(e){if(void 0!==this.theInputPreProcessor)throw new o({source:this.constructor.name,message:"Cannot set the input pre-processor option more than once"});return this.theInputPreProcessor=e,this}withInputSchema(e){if(void 0!==this.theUseInputSchema)throw new o({source:this.constructor.name,message:"Cannot set the input schema option more than once"});return this.theUseInputSchema=e,this}withOutputPreProcessor(e){if(void 0!==this.theOutputPreProcessor)throw new o({source:this.constructor.name,message:"Cannot set the output pre-processor option more than once"});return this.theOutputPreProcessor=e,this}withUrl(e){if(void 0!==this.theUrl)throw new o({source:this.constructor.name,message:"Cannot set the runnable URL option more than once"});return this.theUrl=e,this}}const g=()=>new l;export{g as createAdapter};
import{warnOnce as e,warn as t,uid as r,NluxUsageError as o,NluxError as s}from"@nlux/core";export{debug}from"@nlux/core";const n=e=>{const t=/\/.*\/(invoke|stream)$/g.exec(e);if(!t||t.length<2)return;const r=t[1];return"invoke"===r||"stream"===r?r:void 0},a=t=>{const r=n(t.url),o=t.dataTransferMode,s=r?"stream"===r?"stream":"fetch":void 0;const a=s??t.dataTransferMode??c.defaultDataTransferMode;return o&&s&&o!==s&&e(`The data transfer mode provided to LangServe adapter does not match the LangServe runnable URL action. When you provide a runnable URL that ends with '/${r}', the data transfer mode is automatically set to '${s}' and the 'dataTransferMode' option should not be provided or should be set to '${s}'`),a},i=e=>{const t=e.url;return/\/.*\/(invoke|stream)$/g.test(t)?t.replace(/\/(invoke|stream)$/g,""):t},u=e=>{const t=i(e).replace(/\/$/,""),r=(e=>{const t=e.url,r=n(t);return r||("fetch"===a(e)?"invoke":"stream")})(e);return`${t}/${r}`};class c{constructor(e){this.__instanceId=`${this.info.id}-${r()}`,this.__options={...e},this.theDataTransferModeToUse=a(e),this.theUseInputSchemaOptionToUse="boolean"!=typeof e.useInputSchema||e.useInputSchema,this.theEndpointUrlToUse=u(e),this.theRunnableNameToUse=i(e).replace(/\/$/,"").split("/").pop()||"langserve-runnable",this.theInputSchemaUrlToUse=((e,t)=>{const r=i(e).replace(/\/$/,"");return"input"===t?`${r}/input_schema`:`${r}/output_schema`})(e,"input"),this.init()}get config(){return{encodeMessage:e=>Promise.resolve(e),decodeMessage:e=>Promise.resolve(e)}}get dataTransferMode(){return this.theDataTransferModeToUse}get endpointUrl(){return this.theEndpointUrlToUse}get id(){return this.__instanceId}get info(){return{id:"langserve-adapter",capabilities:{textChat:!0,audio:!1,fileUpload:!1},inputFormats:["text"],outputFormats:["text","markdown"]}}get inputPreProcessor(){return this.__options.inputPreProcessor}get inputSchema(){return this.theInputSchemaToUse}get outputPreProcessor(){return this.__options.outputPreProcessor}get runnableName(){return this.theRunnableNameToUse}get status(){return"idle"}get useInputSchema(){return this.theUseInputSchemaOptionToUse}get inputSchemaUrl(){return this.theInputSchemaUrlToUse}async decode(e){const{decodeMessage:t}=this.config;return t(e)}async encode(e){const{encodeMessage:t}=this.config;return t(e)}async fetchSchema(e){try{const r=await fetch(e),o=await r.json();return"object"==typeof o&&o?o:void t(`LangServe adapter is unable process schema loaded from: ${e}`)}catch(r){return void t(`LangServe adapter is unable to fetch schema from: ${e}`)}}init(){!this.inputPreProcessor&&this.useInputSchema&&this.fetchSchema(this.inputSchemaUrl).then((e=>{this.theInputSchemaToUse=e}))}getDisplayableMessageFromAiOutput(e){if(this.outputPreProcessor)return this.outputPreProcessor(e);if("string"==typeof e)return e;const r=e;if("object"==typeof r&&r&&"string"==typeof r.content)return r.content;t(`LangServe adapter is unable to process output returned from the endpoint:\n ${JSON.stringify(e)}`)}getRequestBody(e){if(this.inputPreProcessor){const t=this.inputPreProcessor(e);return JSON.stringify({input:t})}if(this.inputSchema){const r=((e,r,o)=>{if(!r||"object"!=typeof r.properties)return e;if("object"!=typeof r||!r)return t(`LangServer adapter cannot process the input schema fetched for runnable "${o}". The user message will be sent to LangServe endpoint as is without transformations. To override this behavior, you can either set the "useInputSchema" option to false, or provide a custom input pre-processor via the "inputPreProcessor" option, or update your endpoint and input schema to have an object with a single string property or a string as input.`),e;if("string"===r.type)return e;if("object"===r.type){const o="object"==typeof r.properties&&r.properties?r.properties:{},s=Object.keys(o).filter((e=>e&&"string"==typeof r.properties[e].type)).map((e=>e));if(1===s.length)return{[s[0]]:e};t('LangServer adapter cannot find a valid property to match to user input inside the "${runnableName}" input schema. The user message will be sent to LangServe endpoint as is without transformations. To override this behavior, you can either set the "useInputSchema" option to false, or provide a custom input pre-processor via the "inputPreProcessor" option, or update your endpoint and input schema to have an object with a single string property or a string accepted as part of input schema.')}})(e,this.inputSchema,this.runnableName);if(void 0!==r)return JSON.stringify({input:r})}return JSON.stringify({input:e})}}c.defaultDataTransferMode="stream";class h extends c{constructor(e){super(e)}async fetchText(e){const t=this.getRequestBody(e),r=await fetch(this.endpointUrl,{method:"POST",body:t});if(!r.ok)throw new Error(`LangServe runnable returned status code: ${r.status}`);const o=await r.json();if("object"!=typeof o||!o||void 0===o.output)throw new Error('Invalid response from LangServe runnable: Response is not an object or does not contain an "output" property');const s="object"==typeof o&&o?o.output:void 0;return this.getDisplayableMessageFromAiOutput(s)??""}streamText(e,t){throw new o({source:this.constructor.name,message:"Cannot stream text from the fetch adapter!"})}}const p=e=>{const r=/^event:\s+(?<event>[\w]+)(\n(\r?)data: (?<data>(.|\n)*))?/gm.exec(e);if(!r)return;const{event:o,data:s}=r.groups||{};if(o&&("data"===o||"end"===o))try{return{event:o,data:s?JSON.parse(s):void 0}}catch(e){return t(`LangServe stream adapter failed to parse data for chunk event "${o}" | Data: ${s}`),{event:o,data:void 0}}},d=e=>{if(!e)return[];const t=/(((?<=^)|(?<=\n))event:\s+(\w+))/g,r=[];let o=t.exec(e);for(;o;)r.push(o.index),o=t.exec(e);const s=(t,o)=>{const s=r[o+1]||e.length;return e.substring(t,s)};try{return r.map(s).map(p).filter((e=>void 0!==e)).map((e=>e))}catch(e){return e instanceof Error?e:[]}},m=e=>"object"==typeof e&&null!==e&&e.message?.toLowerCase().includes("connection error")?"NX-NT-001":null;class f extends c{constructor(e){super(e)}async fetchText(e){throw new o({source:this.constructor.name,message:"Cannot fetch text using the stream adapter!"})}streamText(e,r){const n=this.getRequestBody(e);fetch(this.endpointUrl,{method:"POST",headers:{"Content-Type":"application/json"},body:n}).then((async e=>{if(!e.ok)throw new s({source:this.constructor.name,message:`LangServe runnable returned status code: ${e.status}`});if(!e.body)throw new s({source:this.constructor.name,message:`LangServe runnable returned status code: ${e.status}`});const o=e.body.getReader(),n=new TextDecoder;let a=!1;for(;!a;){const{value:e,done:s}=await o.read();if(s){a=!0;continue}const i=n.decode(e),u=d(i);if(Array.isArray(u))for(const e of u){if("data"===e.event&&void 0!==e.data){const t=this.getDisplayableMessageFromAiOutput(e.data);"string"==typeof t&&t&&r.next(t)}if("end"===e.event){r.complete(),a=!0;break}}u instanceof Error&&(t(u),r.error(u),a=!0)}})).catch((e=>{t(e),r.error(new o({source:this.constructor.name,message:e.message,exceptionId:m(e)??void 0}))}))}}class l{constructor(e){e&&(this.theDataTransferMode=e.theDataTransferMode,this.theInputPreProcessor=e.theInputPreProcessor,this.theOutputPreProcessor=e.theOutputPreProcessor,this.theUrl=e.theUrl)}create(){if(!this.theUrl)throw new o({source:this.constructor.name,message:"Unable to create LangServe adapter. URL is missing. Make sure you are calling withUrl() before calling create()."});const e={url:this.theUrl,dataTransferMode:this.theDataTransferMode,inputPreProcessor:this.theInputPreProcessor,outputPreProcessor:this.theOutputPreProcessor,useInputSchema:this.theUseInputSchema};return"stream"===a(e)?new f(e):new h(e)}withDataTransferMode(e){if(void 0!==this.theDataTransferMode)throw new o({source:this.constructor.name,message:"Cannot set the data loading mode more than once"});return this.theDataTransferMode=e,this}withInputPreProcessor(e){if(void 0!==this.theInputPreProcessor)throw new o({source:this.constructor.name,message:"Cannot set the input pre-processor option more than once"});return this.theInputPreProcessor=e,this}withInputSchema(e){if(void 0!==this.theUseInputSchema)throw new o({source:this.constructor.name,message:"Cannot set the input schema option more than once"});return this.theUseInputSchema=e,this}withOutputPreProcessor(e){if(void 0!==this.theOutputPreProcessor)throw new o({source:this.constructor.name,message:"Cannot set the output pre-processor option more than once"});return this.theOutputPreProcessor=e,this}withUrl(e){if(void 0!==this.theUrl)throw new o({source:this.constructor.name,message:"Cannot set the runnable URL option more than once"});return this.theUrl=e,this}}const g=()=>new l;export{g as createAdapter};

@@ -62,3 +62,3 @@ import { DataTransferMode, AdapterBuilder, StandardAdapter } from '@nlux/core';

* to perform (either `/invoke` or `/stream`). If the action is not provided, the default
* data transfer mode will be `fetch`. If the action is provided, the data transfer mode
* data transfer mode will be `stream`. If the action is provided, the data transfer mode
* should match the action (either `fetch` mode for `/invoke` or `stream` mode for `/stream`).

@@ -65,0 +65,0 @@ */

{
"name": "@nlux/langchain",
"version": "0.10.1",
"version": "0.10.2",
"description": "The LangChain adapters for NLUX, the javascript library for building conversational AI interfaces.",

@@ -57,3 +57,3 @@ "keywords": [

"dependencies": {
"@nlux/core": "0.10.1"
"@nlux/core": "0.10.2"
},

@@ -60,0 +60,0 @@ "peerDependencies": {},

@@ -1,7 +0,36 @@

# NLUX LangChain / LangServe Adapter
# NLUX JS LangChain Adapter
This package includes integration for LangChain.
More specifically, it has the adapter to connect to backends built
using [LangServe](https://python.langserve.com/docs/langchain).
This package enables the integration between NLUX and LangChain, the LLM framework.
More specifically ― the package include the adapter to connect **NLUX JS** to backends built
using [LangServe](https://python.langchain.com/docs/langserve).
Please check the [@nlux/core](https://www.npmjs.com/package/@nlux/core) package for more information about NLUX.
For more information on how to use this package, please visit:
[docs.nlux.ai/api/adapters/langchain-langserve](https://docs.nlux.ai/api/adapters/langchain-langserve)
### Vanilla JS 🟨 vs React JS ⚛️
This package `@nlux/langchain` is meant for use with the vanilla JS version of NLUX.
If you're looking for the React JS version, please check
the [`@nlux/langchain-react`](https://www.npmjs.com/package/@nlux/langchain-react) package.
## About NLUX
NLUX _(for Natural Language User Experience)_ is an open-source Javascript library that makes it simple to integrate
powerful large language models (LLMs) like ChatGPT into your web app or website. With just a few lines of code, you
can add conversational AI capabilities and interact with your favourite LLM.
### Key Features 🌟
* **Build AI Chat Interfaces In Minutes** ― High quality conversational AI interfaces with just a few lines of code.
* **React Components & Hooks** ― `<AiChat />` for UI and `useAdapter` hook for easy integration.
* **LLM Adapters** ― For `ChatGPT` / `LangChain` 🦜 LangServe / `HuggingFace` 🤗 Inference.
* A flexible interface to **Create Your Own Adapter** for any LLM or API.
* **Bot and User Personas** ― Customize the bot and user personas with names, images, and more.
* **Streaming LLM Output** ― Streamed the chat response to the UI as it's being generated.
* **Customizable Theme** - Easily customize the look and feel of the chat interface using CSS variables.
* **Event Listeners** - Listen to messages, errors, and other events to customize the UI and behaviour.
* **Zero Dependencies** ― Lightweight codebase, with zero-dep except for LLM front-end libraries.
### Docs & Examples 📖
For developer documentation, examples, and API reference ― you can visit: **[NLUX.ai](https://nlux.ai/)**

@@ -1,1 +0,1 @@

!function(e,t){"object"==typeof exports&&"undefined"!=typeof module?t(exports,require("@nlux/core")):"function"==typeof define&&define.amd?define(["exports","@nlux/core"],t):t((e="undefined"!=typeof globalThis?globalThis:e||self)["@nlux/langchain"]={},e.core)}(this,(function(e,t){"use strict";const r=e=>{const t=/\/.*\/(invoke|stream)$/g.exec(e);if(!t||t.length<2)return;const r=t[1];return"invoke"===r||"stream"===r?r:void 0},o=e=>{const o=r(e.url),n=e.dataTransferMode,s=o?"stream"===o?"stream":"fetch":void 0;const i=s??e.dataTransferMode??a.defaultDataTransferMode;return n&&s&&n!==s&&t.warnOnce(`The data transfer mode provided to LangServe adapter does not match the LangServe runnable URL action. When you provide a runnable URL that ends with '/${o}', the data transfer mode is automatically set to '${s}' and the 'dataTransferMode' option should not be provided or should be set to '${s}'`),i},n=e=>{const t=e.url;return/\/.*\/(invoke|stream)$/g.test(t)?t.replace(/\/(invoke|stream)$/g,""):t},s=e=>{const t=n(e).replace(/\/$/,""),s=(e=>{const t=e.url,n=r(t);return n||("fetch"===o(e)?"invoke":"stream")})(e);return`${t}/${s}`};class a{constructor(e){this.__instanceId=`${this.info.id}-${t.uid()}`,this.__options={...e},this.theDataTransferModeToUse=o(e),this.theUseInputSchemaOptionToUse="boolean"!=typeof e.useInputSchema||e.useInputSchema,this.theEndpointUrlToUse=s(e),this.theRunnableNameToUse=n(e).replace(/\/$/,"").split("/").pop()||"langserve-runnable",this.theInputSchemaUrlToUse=((e,t)=>{const r=n(e).replace(/\/$/,"");return"input"===t?`${r}/input_schema`:`${r}/output_schema`})(e,"input"),this.init()}get config(){return{encodeMessage:e=>Promise.resolve(e),decodeMessage:e=>Promise.resolve(e)}}get dataTransferMode(){return this.theDataTransferModeToUse}get endpointUrl(){return this.theEndpointUrlToUse}get id(){return this.__instanceId}get info(){return{id:"langserve-adapter",capabilities:{textChat:!0,audio:!1,fileUpload:!1},inputFormats:["text"],outputFormats:["text","markdown"]}}get inputPreProcessor(){return this.__options.inputPreProcessor}get inputSchema(){return this.theInputSchemaToUse}get outputPreProcessor(){return this.__options.outputPreProcessor}get runnableName(){return this.theRunnableNameToUse}get status(){return"idle"}get useInputSchema(){return this.theUseInputSchemaOptionToUse}get inputSchemaUrl(){return this.theInputSchemaUrlToUse}async decode(e){const{decodeMessage:t}=this.config;return t(e)}async encode(e){const{encodeMessage:t}=this.config;return t(e)}async fetchSchema(e){try{const r=await fetch(e),o=await r.json();return"object"==typeof o&&o?o:void t.warn(`LangServe adapter is unable process schema loaded from: ${e}`)}catch(r){return void t.warn(`LangServe adapter is unable to fetch schema from: ${e}`)}}init(){!this.inputPreProcessor&&this.useInputSchema&&this.fetchSchema(this.inputSchemaUrl).then((e=>{this.theInputSchemaToUse=e}))}getDisplayableMessageFromAiOutput(e){if(this.outputPreProcessor)return this.outputPreProcessor(e);if("string"==typeof e)return e;const r=e;if("object"==typeof r&&r&&"string"==typeof r.content)return r.content;t.warn(`LangServe adapter is unable to process output returned from the endpoint:\n ${JSON.stringify(e)}`)}getRequestBody(e){if(this.inputPreProcessor){const t=this.inputPreProcessor(e);return JSON.stringify({input:t})}if(this.inputSchema){const r=((e,r,o)=>{if(!r||"object"!=typeof r.properties)return e;if("object"!=typeof r||!r)return t.warn(`LangServer adapter cannot process the input schema fetched for runnable "${o}". The user message will be sent to LangServe endpoint as is without transformations. To override this behavior, you can either set the "useInputSchema" option to false, or provide a custom input pre-processor via the "inputPreProcessor" option, or update your endpoint and input schema to have an object with a single string property or a string as input.`),e;if("string"===r.type)return e;if("object"===r.type){const o="object"==typeof r.properties&&r.properties?r.properties:{},n=Object.keys(o).filter((e=>e&&"string"==typeof r.properties[e].type)).map((e=>e));if(1===n.length)return{[n[0]]:e};t.warn('LangServer adapter cannot find a valid property to match to user input inside the "${runnableName}" input schema. The user message will be sent to LangServe endpoint as is without transformations. To override this behavior, you can either set the "useInputSchema" option to false, or provide a custom input pre-processor via the "inputPreProcessor" option, or update your endpoint and input schema to have an object with a single string property or a string accepted as part of input schema.')}})(e,this.inputSchema,this.runnableName);if(void 0!==r)return JSON.stringify({input:r})}return JSON.stringify({input:e})}}a.defaultDataTransferMode="fetch";class i extends a{constructor(e){super(e)}async fetchText(e){const t=this.getRequestBody(e),r=await fetch(this.endpointUrl,{method:"POST",body:t});if(!r.ok)throw new Error(`LangServe runnable returned status code: ${r.status}`);const o=await r.json();if("object"!=typeof o||!o||void 0===o.output)throw new Error('Invalid response from LangServe runnable: Response is not an object or does not contain an "output" property');const n="object"==typeof o&&o?o.output:void 0;return this.getDisplayableMessageFromAiOutput(n)??""}streamText(e,r){throw new t.NluxUsageError({source:this.constructor.name,message:"Cannot stream text from the fetch adapter!"})}}const u=e=>{const r=/^event:\s+(?<event>[\w]+)(\n(\r?)data: (?<data>(.|\n)*))?/gm.exec(e);if(!r)return;const{event:o,data:n}=r.groups||{};if(o&&("data"===o||"end"===o))try{return{event:o,data:n?JSON.parse(n):void 0}}catch(e){return t.warn(`LangServe stream adapter failed to parse data for chunk event "${o}" | Data: ${n}`),{event:o,data:void 0}}},c=e=>{if(!e)return[];const t=/(((?<=^)|(?<=\n))event:\s+(\w+))/g,r=[];let o=t.exec(e);for(;o;)r.push(o.index),o=t.exec(e);const n=(t,o)=>{const n=r[o+1]||e.length;return e.substring(t,n)};try{return r.map(n).map(u).filter((e=>void 0!==e)).map((e=>e))}catch(e){return e instanceof Error?e:[]}},h=e=>"object"==typeof e&&null!==e&&e.message?.toLowerCase().includes("connection error")?"NX-NT-001":null;class p extends a{constructor(e){super(e)}async fetchText(e){throw new t.NluxUsageError({source:this.constructor.name,message:"Cannot fetch text using the stream adapter!"})}streamText(e,r){const o=this.getRequestBody(e);fetch(this.endpointUrl,{method:"POST",headers:{"Content-Type":"application/json"},body:o}).then((async e=>{if(!e.ok)throw new t.NluxError({source:this.constructor.name,message:`LangServe runnable returned status code: ${e.status}`});if(!e.body)throw new t.NluxError({source:this.constructor.name,message:`LangServe runnable returned status code: ${e.status}`});const o=e.body.getReader(),n=new TextDecoder;let s=!1;for(;!s;){const{value:e,done:a}=await o.read();if(a){s=!0;continue}const i=n.decode(e),u=c(i);if(Array.isArray(u))for(const e of u){if("data"===e.event&&void 0!==e.data){const t=this.getDisplayableMessageFromAiOutput(e.data);"string"==typeof t&&t&&r.next(t)}if("end"===e.event){r.complete(),s=!0;break}}u instanceof Error&&(t.warn(u),r.error(u),s=!0)}})).catch((e=>{t.warn(e),r.error(new t.NluxUsageError({source:this.constructor.name,message:e.message,exceptionId:h(e)??void 0}))}))}}class d{constructor(e){e&&(this.theDataTransferMode=e.theDataTransferMode,this.theInputPreProcessor=e.theInputPreProcessor,this.theOutputPreProcessor=e.theOutputPreProcessor,this.theUrl=e.theUrl)}create(){if(!this.theUrl)throw new t.NluxUsageError({source:this.constructor.name,message:"Unable to create LangServe adapter. URL is missing. Make sure you are calling withUrl() before calling create()."});const e={url:this.theUrl,dataTransferMode:this.theDataTransferMode,inputPreProcessor:this.theInputPreProcessor,outputPreProcessor:this.theOutputPreProcessor,useInputSchema:this.theUseInputSchema};return"stream"===o(e)?new p(e):new i(e)}withDataTransferMode(e){if(void 0!==this.theDataTransferMode)throw new t.NluxUsageError({source:this.constructor.name,message:"Cannot set the data loading mode more than once"});return this.theDataTransferMode=e,this}withInputPreProcessor(e){if(void 0!==this.theInputPreProcessor)throw new t.NluxUsageError({source:this.constructor.name,message:"Cannot set the input pre-processor option more than once"});return this.theInputPreProcessor=e,this}withInputSchema(e){if(void 0!==this.theUseInputSchema)throw new t.NluxUsageError({source:this.constructor.name,message:"Cannot set the input schema option more than once"});return this.theUseInputSchema=e,this}withOutputPreProcessor(e){if(void 0!==this.theOutputPreProcessor)throw new t.NluxUsageError({source:this.constructor.name,message:"Cannot set the output pre-processor option more than once"});return this.theOutputPreProcessor=e,this}withUrl(e){if(void 0!==this.theUrl)throw new t.NluxUsageError({source:this.constructor.name,message:"Cannot set the runnable URL option more than once"});return this.theUrl=e,this}}Object.defineProperty(e,"debug",{enumerable:!0,get:function(){return t.debug}}),e.createAdapter=()=>new d}));
!function(e,t){"object"==typeof exports&&"undefined"!=typeof module?t(exports,require("@nlux/core")):"function"==typeof define&&define.amd?define(["exports","@nlux/core"],t):t((e="undefined"!=typeof globalThis?globalThis:e||self)["@nlux/langchain"]={},e.core)}(this,(function(e,t){"use strict";const r=e=>{const t=/\/.*\/(invoke|stream)$/g.exec(e);if(!t||t.length<2)return;const r=t[1];return"invoke"===r||"stream"===r?r:void 0},o=e=>{const o=r(e.url),n=e.dataTransferMode,s=o?"stream"===o?"stream":"fetch":void 0;const i=s??e.dataTransferMode??a.defaultDataTransferMode;return n&&s&&n!==s&&t.warnOnce(`The data transfer mode provided to LangServe adapter does not match the LangServe runnable URL action. When you provide a runnable URL that ends with '/${o}', the data transfer mode is automatically set to '${s}' and the 'dataTransferMode' option should not be provided or should be set to '${s}'`),i},n=e=>{const t=e.url;return/\/.*\/(invoke|stream)$/g.test(t)?t.replace(/\/(invoke|stream)$/g,""):t},s=e=>{const t=n(e).replace(/\/$/,""),s=(e=>{const t=e.url,n=r(t);return n||("fetch"===o(e)?"invoke":"stream")})(e);return`${t}/${s}`};class a{constructor(e){this.__instanceId=`${this.info.id}-${t.uid()}`,this.__options={...e},this.theDataTransferModeToUse=o(e),this.theUseInputSchemaOptionToUse="boolean"!=typeof e.useInputSchema||e.useInputSchema,this.theEndpointUrlToUse=s(e),this.theRunnableNameToUse=n(e).replace(/\/$/,"").split("/").pop()||"langserve-runnable",this.theInputSchemaUrlToUse=((e,t)=>{const r=n(e).replace(/\/$/,"");return"input"===t?`${r}/input_schema`:`${r}/output_schema`})(e,"input"),this.init()}get config(){return{encodeMessage:e=>Promise.resolve(e),decodeMessage:e=>Promise.resolve(e)}}get dataTransferMode(){return this.theDataTransferModeToUse}get endpointUrl(){return this.theEndpointUrlToUse}get id(){return this.__instanceId}get info(){return{id:"langserve-adapter",capabilities:{textChat:!0,audio:!1,fileUpload:!1},inputFormats:["text"],outputFormats:["text","markdown"]}}get inputPreProcessor(){return this.__options.inputPreProcessor}get inputSchema(){return this.theInputSchemaToUse}get outputPreProcessor(){return this.__options.outputPreProcessor}get runnableName(){return this.theRunnableNameToUse}get status(){return"idle"}get useInputSchema(){return this.theUseInputSchemaOptionToUse}get inputSchemaUrl(){return this.theInputSchemaUrlToUse}async decode(e){const{decodeMessage:t}=this.config;return t(e)}async encode(e){const{encodeMessage:t}=this.config;return t(e)}async fetchSchema(e){try{const r=await fetch(e),o=await r.json();return"object"==typeof o&&o?o:void t.warn(`LangServe adapter is unable process schema loaded from: ${e}`)}catch(r){return void t.warn(`LangServe adapter is unable to fetch schema from: ${e}`)}}init(){!this.inputPreProcessor&&this.useInputSchema&&this.fetchSchema(this.inputSchemaUrl).then((e=>{this.theInputSchemaToUse=e}))}getDisplayableMessageFromAiOutput(e){if(this.outputPreProcessor)return this.outputPreProcessor(e);if("string"==typeof e)return e;const r=e;if("object"==typeof r&&r&&"string"==typeof r.content)return r.content;t.warn(`LangServe adapter is unable to process output returned from the endpoint:\n ${JSON.stringify(e)}`)}getRequestBody(e){if(this.inputPreProcessor){const t=this.inputPreProcessor(e);return JSON.stringify({input:t})}if(this.inputSchema){const r=((e,r,o)=>{if(!r||"object"!=typeof r.properties)return e;if("object"!=typeof r||!r)return t.warn(`LangServer adapter cannot process the input schema fetched for runnable "${o}". The user message will be sent to LangServe endpoint as is without transformations. To override this behavior, you can either set the "useInputSchema" option to false, or provide a custom input pre-processor via the "inputPreProcessor" option, or update your endpoint and input schema to have an object with a single string property or a string as input.`),e;if("string"===r.type)return e;if("object"===r.type){const o="object"==typeof r.properties&&r.properties?r.properties:{},n=Object.keys(o).filter((e=>e&&"string"==typeof r.properties[e].type)).map((e=>e));if(1===n.length)return{[n[0]]:e};t.warn('LangServer adapter cannot find a valid property to match to user input inside the "${runnableName}" input schema. The user message will be sent to LangServe endpoint as is without transformations. To override this behavior, you can either set the "useInputSchema" option to false, or provide a custom input pre-processor via the "inputPreProcessor" option, or update your endpoint and input schema to have an object with a single string property or a string accepted as part of input schema.')}})(e,this.inputSchema,this.runnableName);if(void 0!==r)return JSON.stringify({input:r})}return JSON.stringify({input:e})}}a.defaultDataTransferMode="stream";class i extends a{constructor(e){super(e)}async fetchText(e){const t=this.getRequestBody(e),r=await fetch(this.endpointUrl,{method:"POST",body:t});if(!r.ok)throw new Error(`LangServe runnable returned status code: ${r.status}`);const o=await r.json();if("object"!=typeof o||!o||void 0===o.output)throw new Error('Invalid response from LangServe runnable: Response is not an object or does not contain an "output" property');const n="object"==typeof o&&o?o.output:void 0;return this.getDisplayableMessageFromAiOutput(n)??""}streamText(e,r){throw new t.NluxUsageError({source:this.constructor.name,message:"Cannot stream text from the fetch adapter!"})}}const u=e=>{const r=/^event:\s+(?<event>[\w]+)(\n(\r?)data: (?<data>(.|\n)*))?/gm.exec(e);if(!r)return;const{event:o,data:n}=r.groups||{};if(o&&("data"===o||"end"===o))try{return{event:o,data:n?JSON.parse(n):void 0}}catch(e){return t.warn(`LangServe stream adapter failed to parse data for chunk event "${o}" | Data: ${n}`),{event:o,data:void 0}}},c=e=>{if(!e)return[];const t=/(((?<=^)|(?<=\n))event:\s+(\w+))/g,r=[];let o=t.exec(e);for(;o;)r.push(o.index),o=t.exec(e);const n=(t,o)=>{const n=r[o+1]||e.length;return e.substring(t,n)};try{return r.map(n).map(u).filter((e=>void 0!==e)).map((e=>e))}catch(e){return e instanceof Error?e:[]}},h=e=>"object"==typeof e&&null!==e&&e.message?.toLowerCase().includes("connection error")?"NX-NT-001":null;class p extends a{constructor(e){super(e)}async fetchText(e){throw new t.NluxUsageError({source:this.constructor.name,message:"Cannot fetch text using the stream adapter!"})}streamText(e,r){const o=this.getRequestBody(e);fetch(this.endpointUrl,{method:"POST",headers:{"Content-Type":"application/json"},body:o}).then((async e=>{if(!e.ok)throw new t.NluxError({source:this.constructor.name,message:`LangServe runnable returned status code: ${e.status}`});if(!e.body)throw new t.NluxError({source:this.constructor.name,message:`LangServe runnable returned status code: ${e.status}`});const o=e.body.getReader(),n=new TextDecoder;let s=!1;for(;!s;){const{value:e,done:a}=await o.read();if(a){s=!0;continue}const i=n.decode(e),u=c(i);if(Array.isArray(u))for(const e of u){if("data"===e.event&&void 0!==e.data){const t=this.getDisplayableMessageFromAiOutput(e.data);"string"==typeof t&&t&&r.next(t)}if("end"===e.event){r.complete(),s=!0;break}}u instanceof Error&&(t.warn(u),r.error(u),s=!0)}})).catch((e=>{t.warn(e),r.error(new t.NluxUsageError({source:this.constructor.name,message:e.message,exceptionId:h(e)??void 0}))}))}}class d{constructor(e){e&&(this.theDataTransferMode=e.theDataTransferMode,this.theInputPreProcessor=e.theInputPreProcessor,this.theOutputPreProcessor=e.theOutputPreProcessor,this.theUrl=e.theUrl)}create(){if(!this.theUrl)throw new t.NluxUsageError({source:this.constructor.name,message:"Unable to create LangServe adapter. URL is missing. Make sure you are calling withUrl() before calling create()."});const e={url:this.theUrl,dataTransferMode:this.theDataTransferMode,inputPreProcessor:this.theInputPreProcessor,outputPreProcessor:this.theOutputPreProcessor,useInputSchema:this.theUseInputSchema};return"stream"===o(e)?new p(e):new i(e)}withDataTransferMode(e){if(void 0!==this.theDataTransferMode)throw new t.NluxUsageError({source:this.constructor.name,message:"Cannot set the data loading mode more than once"});return this.theDataTransferMode=e,this}withInputPreProcessor(e){if(void 0!==this.theInputPreProcessor)throw new t.NluxUsageError({source:this.constructor.name,message:"Cannot set the input pre-processor option more than once"});return this.theInputPreProcessor=e,this}withInputSchema(e){if(void 0!==this.theUseInputSchema)throw new t.NluxUsageError({source:this.constructor.name,message:"Cannot set the input schema option more than once"});return this.theUseInputSchema=e,this}withOutputPreProcessor(e){if(void 0!==this.theOutputPreProcessor)throw new t.NluxUsageError({source:this.constructor.name,message:"Cannot set the output pre-processor option more than once"});return this.theOutputPreProcessor=e,this}withUrl(e){if(void 0!==this.theUrl)throw new t.NluxUsageError({source:this.constructor.name,message:"Cannot set the runnable URL option more than once"});return this.theUrl=e,this}}Object.defineProperty(e,"debug",{enumerable:!0,get:function(){return t.debug}}),e.createAdapter=()=>new d}));
SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc