New Case Study:See how Anthropic automated 95% of dependency reviews with Socket.Learn More
Socket
Sign inDemoInstall
Socket

@flatfile/ai-agents

Package Overview
Dependencies
Maintainers
16
Versions
23
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@flatfile/ai-agents - npm Package Compare versions

Comparing version 0.1.0-alpha.22 to 0.1.0-alpha.23

24

dist/index.d.ts

@@ -26,3 +26,22 @@ import { ChatAnthropic } from '@langchain/anthropic';

declare function chunkify<T>(array: T[], size: number): T[][];
interface JSONPatchOperation {
op: 'add' | 'remove' | 'replace' | 'move' | 'copy' | 'test';
path: string;
from?: string;
value?: any;
}
type JSONDocument = Record<string, any>;
declare function applyPatch(document: JSONDocument, patches: JSONPatchOperation[]): JSONDocument;
type AnyFunction = (...args: any[]) => any;
interface MemoizedFunction<T extends AnyFunction> extends CallableFunction {
(...args: Parameters<T>): ReturnType<T>;
clear: () => void;
}
declare function memoize<T extends AnyFunction>(fn: T): MemoizedFunction<T>;
declare function parseFiles(filepath: string): Promise<string[]>;
declare const preserveEmptyCSVData: (input: string) => string;

@@ -42,2 +61,3 @@ declare function blueprintWorkflow(prompt: string | string[], debug?: boolean): Promise<{

type: "stored";
label: string;
description: string;

@@ -68,2 +88,3 @@ validator: "MUST_REPLACE";

type: "stored";
label: string;
description: string;

@@ -92,2 +113,3 @@ validator: "MUST_REPLACE";

type: "stored";
label: string;
description: string;

@@ -108,2 +130,2 @@ validator: "MUST_REPLACE";

export { ANTHROPIC_MODELS, type AnthropicModels, type ChatModels, type ModelOptions, type Models, OLLAMA_MODELS, OPENAI_MODELS, type OllamaModels, type OpenAIModels, type XAIModels, XAI_MODELS, blueprintWorkflow, fetchModel, generateOutput, generateStructuredOutput, parseFiles };
export { ANTHROPIC_MODELS, type AnthropicModels, type AnyFunction, type ChatModels, type JSONPatchOperation, type MemoizedFunction, type ModelOptions, type Models, OLLAMA_MODELS, OPENAI_MODELS, type OllamaModels, type OpenAIModels, type XAIModels, XAI_MODELS, applyPatch, blueprintWorkflow, chunkify, fetchModel, generateOutput, generateStructuredOutput, memoize, parseFiles, preserveEmptyCSVData };

268

dist/index.js

@@ -113,2 +113,104 @@ // src/llm-sdks/langchain.ts

// src/utils/chunkify.ts
function chunkify(array, size) {
if (size <= 0) {
throw new Error("Size must be greater than 0");
}
const chunks = [];
for (let i = 0; i < array.length; i += size) {
chunks.push(array.slice(i, i + size));
}
return chunks;
}
// src/utils/json.patch.ts
function getValueByPath(document, path2) {
const keys = path2.split("/").slice(1);
return keys.reduce((acc, key) => acc[key], document);
}
function setValueByPath(document, path2, value) {
const keys = path2.split("/").slice(1);
const lastKey = keys.pop();
const target = keys.reduce((acc, key) => acc[key], document);
if (Array.isArray(target) && lastKey === "-") {
;
target.push(value);
} else if (lastKey) {
;
target[lastKey] = value;
}
}
function removeValueByPath(document, path2) {
const keys = path2.split("/").slice(1);
const lastKey = keys.pop();
const target = keys.reduce((acc, key) => acc[key], document);
if (Array.isArray(target) && lastKey !== void 0) {
;
target.splice(Number(lastKey), 1);
} else if (lastKey) {
delete target[lastKey];
}
}
function applyPatch(document, patches) {
patches.forEach((patch) => {
const pathExists = getValueByPath(document, patch.path) !== void 0;
if (!pathExists && patch.op !== "add" && patch.op !== "move" && patch.op !== "copy") {
throw new Error(
`Cannot perform a '${patch.op}' operation on a non-existent path: ${patch.path}`
);
}
switch (patch.op) {
case "add":
setValueByPath(document, patch.path, patch.value);
break;
case "remove":
removeValueByPath(document, patch.path);
break;
case "replace":
setValueByPath(document, patch.path, patch.value);
break;
case "move":
const valueToMove = getValueByPath(document, patch.from);
removeValueByPath(document, patch.from);
setValueByPath(document, patch.path, valueToMove);
break;
case "copy":
const valueToCopy = getValueByPath(document, patch.from);
setValueByPath(document, patch.path, valueToCopy);
break;
case "test":
const valueToTest = getValueByPath(document, patch.path);
if (JSON.stringify(valueToTest) !== JSON.stringify(patch.value)) {
throw new Error(`Test operation failed at path: ${patch.path}`);
}
break;
default:
throw new Error(`Unsupported operation: ${patch.op}`);
}
});
return document;
}
// src/utils/memoize.ts
function memoize(fn) {
const cache = /* @__PURE__ */ new Map();
function memoizedFn(...args) {
const key = JSON.stringify(args);
if (cache.has(key)) {
return cache.get(key);
}
const result = fn(...args);
cache.set(key, result);
return result;
}
memoizedFn.clear = function clear() {
cache.clear();
};
Object.defineProperty(memoizedFn, "name", {
value: `memoized_${fn.name}`,
configurable: true
});
return memoizedFn;
}
// src/utils/parseFile.ts

@@ -179,31 +281,33 @@ import fs from "fs/promises";

<instructions>
You have been provided with a subset of fields to update in the {sheetName} sheet.
You are analyzing fields in the {sheetName} sheet to determine required constraints.
ONLY add constraints when ALL of these conditions are met:
1. The source text EXPLICITLY defines a constraint for the field
2. The constraint type is clearly indicated in the source text
IMPORTANT: If no fields meet ALL of these conditions, return an empty fields array. It is perfectly acceptable
and often correct to return {{ "fields": [] }}. Do not create constraints just to provide a response.
CORE REQUIREMENTS:
1. ONLY add constraints that are EXPLICITLY stated in the source text
2. Return empty fields array ({{ "fields": [] }}) if no explicit constraints found
3. Never infer or assume constraints based on field names, conventions, or best practices
There are 3 types of constraints:
1. required - ONLY use when source explicitly states field is "required" or "mandatory"
2. unique - ONLY use when source explicitly states field must be "unique"
3. stored - ONLY use when source explicitly defines a validation rule that doesn't fit the above types
VALID CONSTRAINT TYPES:
1. required: Use ONLY when text explicitly states "required" or "mandatory"
2. unique: Use ONLY when text explicitly states "unique"
3. stored: Use for explicit validation rules that don't fit above types
- Must include both \`description\` (detailed) and \`label\` (short summary)
- Example label: "values between 1 and 100"
- \`validator\` must always be \`MUST_REPLACE\`
DO NOT add constraints just because:
- It seems logical for the field type
- Similar fields typically have this constraint
- The field name implies it should have a constraint
- Other fields have constraints
- It would be a good database design practice
CONSTRAINT REFINEMENT RULES:
1. For stored constraints containing references:
- Remove any reference-specific validation logic
- Keep only non-reference validation rules
- Example:
Original: "References the xyz-field on the ABC-slug sheet. If the fields's value is "Hello world" add a warning message."
Refined: "If the fields's value is "Hello world" add a warning message."
2. Skip constraint entirely if removing reference logic leaves no meaningful validation
The patch path must match a field key in the current sheet. The patch must be of op \`/0/constraints\` where 0
is the index of the field in the current sheet. The op must be \`add\`.
TECHNICAL REQUIREMENTS:
- Patch path must match existing field key in sheet
- Use patch operation \`/0/constraints\` where 0 is field index
- Operation type must be \`add\`
- For enum fields, the enum configuration handles validation
Remember:
- An empty response is better than an incorrect constraint
- Each constraint must be explicitly defined in the source text
- Never infer constraints based on field names or common practices
- The \`validator\` is always \`MUST_REPLACE\` for stored constraints
REMEMBER: Accuracy over completeness. Only include constraints that are unambiguously defined in the source text.
</instructions>

@@ -255,13 +359,23 @@

Criteria for creating reference fields:
1. The field's description explicitly states it should reference another sheet.
2. The description clearly identifies the target sheet and field.
3. The referenced sheet contains the specified field.
4. The patch path matches a field key in the current sheet.
REFERENCE FIELD CRITERIA:
1. The field's description MUST explicitly mention a reference to another sheet
2. The description MUST clearly specify both:
- Target sheet name
- Target field name
3. The referenced sheet and field MUST exist in the workbook
4. The patch path MUST match an existing field key in the current sheet
The patch path must match a field key in the current sheet. The patch path must be the field's index in the sheet.
CONSTRAINTS CLEANUP:
1. After creating reference fields, remove any constraint text that mentions sheet lookups
2. Examples of constraints to remove:
- "Must match with X Sheet"
- "Must match EXACTLY with Y Sheet, column Z"
- Any similar sheet lookup references
3. Leave any stored constraints that do not reference a sheet lookup
The referenced sheet and field MUST exist in the workbook. If it does not, skip the field.
If no fields meet all conditions, return an empty fields array. Do not infer references or create them without explicit instructions.
IMPORTANT NOTES:
- Do not infer or create references without explicit instructions
- Skip any field that doesn't meet ALL criteria
- Return an empty fields array if no fields qualify
- Use the field's index in the sheet as the patch path
</instructions>

@@ -337,3 +451,6 @@

validator: z2.literal("MUST_REPLACE"),
description: z2.string()
description: z2.string().describe("A full description of what this constraint configuration does"),
label: z2.string().describe(
"A short description of what this constraint should do, example - values between 1 and 100"
)
});

@@ -399,79 +516,2 @@ var UniqueConstraintSchema = BaseConstraintSchema.extend({

// src/utils/chunkify.ts
function chunkify(array, size) {
const chunks = [];
for (let i = 0; i < array.length; i += size) {
chunks.push(array.slice(i, i + size));
}
return chunks;
}
// src/utils/json.patch.ts
function getValueByPath(document, path2) {
const keys = path2.split("/").slice(1);
return keys.reduce((acc, key) => acc[key], document);
}
function setValueByPath(document, path2, value) {
const keys = path2.split("/").slice(1);
const lastKey = keys.pop();
const target = keys.reduce((acc, key) => acc[key], document);
if (Array.isArray(target) && lastKey === "-") {
;
target.push(value);
} else if (lastKey) {
;
target[lastKey] = value;
}
}
function removeValueByPath(document, path2) {
const keys = path2.split("/").slice(1);
const lastKey = keys.pop();
const target = keys.reduce((acc, key) => acc[key], document);
if (Array.isArray(target) && lastKey !== void 0) {
;
target.splice(Number(lastKey), 1);
} else if (lastKey) {
delete target[lastKey];
}
}
function applyPatch(document, patches) {
patches.forEach((patch) => {
const pathExists = getValueByPath(document, patch.path) !== void 0;
if (!pathExists && patch.op !== "add" && patch.op !== "move" && patch.op !== "copy") {
throw new Error(
`Cannot perform a '${patch.op}' operation on a non-existent path: ${patch.path}`
);
}
switch (patch.op) {
case "add":
setValueByPath(document, patch.path, patch.value);
break;
case "remove":
removeValueByPath(document, patch.path);
break;
case "replace":
setValueByPath(document, patch.path, patch.value);
break;
case "move":
const valueToMove = getValueByPath(document, patch.from);
removeValueByPath(document, patch.from);
setValueByPath(document, patch.path, valueToMove);
break;
case "copy":
const valueToCopy = getValueByPath(document, patch.from);
setValueByPath(document, patch.path, valueToCopy);
break;
case "test":
const valueToTest = getValueByPath(document, patch.path);
if (JSON.stringify(valueToTest) !== JSON.stringify(patch.value)) {
throw new Error(`Test operation failed at path: ${patch.path}`);
}
break;
default:
throw new Error(`Unsupported operation: ${patch.op}`);
}
});
return document;
}
// src/utils/retry.ts

@@ -633,7 +673,11 @@ async function retry(fn, options = {

XAI_MODELS,
applyPatch,
blueprintWorkflow,
chunkify,
fetchModel,
generateOutput,
generateStructuredOutput,
parseFiles
memoize,
parseFiles,
preserveEmptyCSVData
};
{
"name": "@flatfile/ai-agents",
"version": "0.1.0-alpha.22",
"version": "0.1.0-alpha.23",
"description": "A collection of AI Agents and Workflows for building in Flatfile",

@@ -5,0 +5,0 @@ "packageManager": "npm@11.0.0",

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc