New Case Study:See how Anthropic automated 95% of dependency reviews with Socket.Learn More
Socket
Sign inDemoInstall
Socket

@orama/orama

Package Overview
Dependencies
Maintainers
5
Versions
91
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@orama/orama - npm Package Compare versions

Comparing version 2.0.12 to 2.0.13

2

dist/components/algorithms.d.ts
import { TokenScore, BM25Params } from '../types.js';
export declare function prioritizeTokenScores(arrays: TokenScore[][], boost: number, threshold: number | undefined, keywordsCount: number): TokenScore[];
export declare function BM25(tf: number, matchingCount: number, docsCount: number, fieldLength: number, averageFieldLength: number, BM25Params: Required<BM25Params>): number;
export declare function BM25(tf: number, matchingCount: number, docsCount: number, fieldLength: number, averageFieldLength: number, { k, b, d }: Required<BM25Params>): number;

@@ -80,4 +80,5 @@ import { createError } from '../errors.js';

}
const resultsWithIdAndScore = new Array(keywordsPerToken.length);
for(let i = 0; i < keywordsPerToken.length; i++){
const keywordsPerTokenLength = keywordsPerToken.length;
const resultsWithIdAndScore = new Array(keywordsPerTokenLength);
for(let i = 0; i < keywordsPerTokenLength; i++){
resultsWithIdAndScore[i] = [

@@ -95,7 +96,6 @@ keywordsPerToken[i][0],

// (fuzzy match with a minimum threshold)
const thresholdLength = lastTokenWithAllKeywords + Math.ceil(threshold * 100 * (results.length - lastTokenWithAllKeywords) / 100);
return resultsWithIdAndScore.slice(0, results.length + thresholdLength);
const thresholdLength = lastTokenWithAllKeywords + Math.ceil(threshold * 100 * (allResults - lastTokenWithAllKeywords) / 100);
return resultsWithIdAndScore.slice(0, allResults + thresholdLength);
}
export function BM25(tf, matchingCount, docsCount, fieldLength, averageFieldLength, BM25Params) {
const { k , b , d } = BM25Params;
export function BM25(tf, matchingCount, docsCount, fieldLength, averageFieldLength, { k , b , d }) {
const idf = Math.log(1 + (docsCount - matchingCount + 0.5) / (matchingCount + 0.5));

@@ -102,0 +102,0 @@ return idf * (d + tf * (k + 1)) / (tf + k * (1 - b + b * fieldLength / averageFieldLength));

@@ -14,6 +14,7 @@ import { getInternalDocumentId } from './internal-document-id-store.js';

export async function getMultiple(store, ids) {
const idsLength = ids.length;
const found = Array.from({
length: ids.length
length: idsLength
});
for(let i = 0; i < ids.length; i++){
for(let i = 0; i < idsLength; i++){
const internalId = getInternalDocumentId(store.sharedInternalDocumentStore, ids[i]);

@@ -20,0 +21,0 @@ found[i] = store.docs[internalId];

import { createError } from '../errors.js';
import { getNested } from '../utils.js';
function sortingPredicate(order = 'desc', a, b) {
if (order.toLowerCase() === 'asc') {
return a[1] - b[1];
} else {
return b[1] - a[1];
}
function sortingPredicateBuilder(order = 'desc') {
return order.toLowerCase() === 'asc' ? (a, b)=>a[1] - b[1] : (a, b)=>b[1] - a[1];
}

@@ -17,3 +13,3 @@ export async function getFacets(orama, results, facetsConfig) {

for (const facet of facetKeys){
let values = {};
let values;
// Hack to guarantee the same order of ranges as specified by the user

@@ -23,8 +19,12 @@ // TODO: Revisit this once components land

const { ranges } = facetsConfig[facet];
const tmp = [];
for (const range of ranges){
tmp.push([
const rangesLength = ranges.length;
const tmp = Array.from({
length: rangesLength
});
for(let i = 0; i < rangesLength; i++){
const range = ranges[i];
tmp[i] = [
`${range.from}-${range.to}`,
0
]);
];
}

@@ -35,3 +35,3 @@ values = Object.fromEntries(tmp);

count: 0,
values
values: values ?? {}
};

@@ -45,2 +45,3 @@ }

const propertyType = properties[facet];
const facetValues = facets[facet].values;
switch(propertyType){

@@ -50,3 +51,3 @@ case 'number':

const ranges = facetsConfig[facet].ranges;
calculateNumberFacet(ranges, facets[facet].values, facetValue);
calculateNumberFacet(ranges, facetValues, facetValue);
break;

@@ -59,3 +60,3 @@ }

for (const v of facetValue){
calculateNumberFacet(ranges, facets[facet].values, v, alreadyInsertedValues);
calculateNumberFacet(ranges, facetValues, v, alreadyInsertedValues);
}

@@ -68,3 +69,3 @@ break;

{
calculateBooleanStringOrEnumFacet(facets[facet].values, facetValue, propertyType);
calculateBooleanStringOrEnumFacet(facetValues, facetValue, propertyType);
break;

@@ -79,3 +80,3 @@ }

for (const v of facetValue){
calculateBooleanStringOrEnumFacet(facets[facet].values, v, innerType, alreadyInsertedValues);
calculateBooleanStringOrEnumFacet(facetValues, v, innerType, alreadyInsertedValues);
}

@@ -89,2 +90,3 @@ break;

}
// TODO: We are looping again with the same previous keys, should we creat a single loop instead?
for (const facet of facetKeys){

@@ -96,3 +98,4 @@ // Count the number of values for each facet

const stringFacetDefinition = facetsConfig[facet];
facets[facet].values = Object.fromEntries(Object.entries(facets[facet].values).sort((a, b)=>sortingPredicate(stringFacetDefinition.sort, a, b)).slice(stringFacetDefinition.offset ?? 0, stringFacetDefinition.limit ?? 10));
const sortingPredicate = sortingPredicateBuilder(stringFacetDefinition.sort);
facets[facet].values = Object.fromEntries(Object.entries(facets[facet].values).sort(sortingPredicate).slice(stringFacetDefinition.offset ?? 0, stringFacetDefinition.limit ?? 10));
}

@@ -105,3 +108,3 @@ }

const value = `${range.from}-${range.to}`;
if (alreadyInsertedValues && alreadyInsertedValues.has(value)) {
if (alreadyInsertedValues === null || alreadyInsertedValues === void 0 ? void 0 : alreadyInsertedValues.has(value)) {
continue;

@@ -114,5 +117,3 @@ }

values[value]++;
if (alreadyInsertedValues) {
alreadyInsertedValues.add(value);
}
alreadyInsertedValues === null || alreadyInsertedValues === void 0 ? void 0 : alreadyInsertedValues.add(value);
}

@@ -125,11 +126,9 @@ }

const value = (facetValue === null || facetValue === void 0 ? void 0 : facetValue.toString()) ?? (propertyType === 'boolean' ? 'false' : '');
if (alreadyInsertedValues && alreadyInsertedValues.has(value)) {
if (alreadyInsertedValues === null || alreadyInsertedValues === void 0 ? void 0 : alreadyInsertedValues.has(value)) {
return;
}
values[value] = (values[value] ?? 0) + 1;
if (alreadyInsertedValues) {
alreadyInsertedValues.add(value);
}
alreadyInsertedValues === null || alreadyInsertedValues === void 0 ? void 0 : alreadyInsertedValues.add(value);
}
//# sourceMappingURL=facets.js.map

@@ -7,8 +7,6 @@ export function intersectFilteredIDs(filtered, lookedUp) {

}
for (const [id, score] of lookedUp){
for (const looked of lookedUp){
const [id] = looked;
if (map.has(id)) {
result.push([
id,
score
]);
result.push(looked);
map.delete(id);

@@ -15,0 +13,0 @@ }

@@ -56,14 +56,13 @@ import { createError } from '../errors.js';

const keyValue = typeof value !== 'boolean' ? value : '' + value;
if (typeof group.perValue[keyValue] === 'undefined') {
group.perValue[keyValue] = {
indexes: [],
count: 0
};
}
if (group.perValue[keyValue].count >= returnedCount) {
const perValue = group.perValue[keyValue] ?? {
indexes: [],
count: 0
};
if (perValue.count >= returnedCount) {
continue;
}
// We use the index to keep track of the original order
group.perValue[keyValue].indexes.push(j);
group.perValue[keyValue].count++;
perValue.indexes.push(j);
perValue.count++;
group.perValue[keyValue] = perValue;
values.add(value);

@@ -70,0 +69,0 @@ }

@@ -152,41 +152,43 @@ import { createError } from '../errors.js';

}
async function insertScalar(implementation, index, prop, id, value, schemaType, language, tokenizer, docsCount) {
const internalId = getInternalDocumentId(index.sharedInternalDocumentStore, id);
const { type , node } = index.indexes[prop];
switch(type){
case 'Bool':
{
node[value ? 'true' : 'false'].push(internalId);
break;
}
case 'AVL':
{
avlInsert(node, value, [
internalId
]);
break;
}
case 'Radix':
{
const tokens = await tokenizer.tokenize(value, language, prop);
await implementation.insertDocumentScoreParameters(index, prop, internalId, tokens, docsCount);
for (const token of tokens){
await implementation.insertTokenScoreParameters(index, prop, internalId, tokens, token);
radixInsert(node, token, internalId);
function insertScalarBuilder(implementation, index, prop, id, language, tokenizer, docsCount) {
return async (value)=>{
const internalId = getInternalDocumentId(index.sharedInternalDocumentStore, id);
const { type , node } = index.indexes[prop];
switch(type){
case 'Bool':
{
node[value ? 'true' : 'false'].push(internalId);
break;
}
break;
}
case 'Flat':
{
flatInsert(node, value, internalId);
break;
}
case 'BKD':
{
bkdInsert(node, value, [
internalId
]);
break;
}
}
case 'AVL':
{
avlInsert(node, value, [
internalId
]);
break;
}
case 'Radix':
{
const tokens = await tokenizer.tokenize(value, language, prop);
await implementation.insertDocumentScoreParameters(index, prop, internalId, tokens, docsCount);
for (const token of tokens){
await implementation.insertTokenScoreParameters(index, prop, internalId, tokens, token);
radixInsert(node, token, internalId);
}
break;
}
case 'Flat':
{
flatInsert(node, value, internalId);
break;
}
case 'BKD':
{
bkdInsert(node, value, [
internalId
]);
break;
}
}
};
}

@@ -197,10 +199,10 @@ export async function insert(implementation, index, prop, id, value, schemaType, language, tokenizer, docsCount) {

}
const insertScalar = insertScalarBuilder(implementation, index, prop, id, language, tokenizer, docsCount);
if (!isArrayType(schemaType)) {
return insertScalar(implementation, index, prop, id, value, schemaType, language, tokenizer, docsCount);
return insertScalar(value);
}
const innerSchemaType = getInnerType(schemaType);
const elements = value;
const elementsLength = elements.length;
for(let i = 0; i < elementsLength; i++){
await insertScalar(implementation, index, prop, id, elements[i], innerSchemaType, language, tokenizer, docsCount);
await insertScalar(elements[i]);
}

@@ -207,0 +209,0 @@ }

@@ -18,5 +18,7 @@ export function createInternalDocumentIDStore() {

orama.internalDocumentIDStore.internalIdToId = [];
for(let i = 0; i < internalIdToId.length; i++){
orama.internalDocumentIDStore.idToInternalId.set(internalIdToId[i], i + 1);
orama.internalDocumentIDStore.internalIdToId.push(internalIdToId[i]);
const internalIdToIdLength = internalIdToId.length;
for(let i = 0; i < internalIdToIdLength; i++){
const internalIdItem = internalIdToId[i];
orama.internalDocumentIDStore.idToInternalId.set(internalIdItem, i + 1);
orama.internalDocumentIDStore.internalIdToId.push(internalIdItem);
}

@@ -23,0 +25,0 @@ }

@@ -95,8 +95,5 @@ import { createError } from '../errors.js';

function ensureIsSorted(sorter) {
if (sorter.isSorted) {
if (sorter.isSorted || !sorter.enabled) {
return;
}
if (!sorter.enabled) {
return;
}
const properties = Object.keys(sorter.sorts);

@@ -103,0 +100,0 @@ for (const prop of properties){

@@ -117,5 +117,5 @@ import { formatElapsedTime, getDocumentIndexId, getDocumentProperties, validateSchema } from '../components/defaults.js';

function getVersion() {
return '2.0.12';
return '2.0.13';
}
//# sourceMappingURL=create.js.map

@@ -12,2 +12,10 @@ import { isArrayType, isGeoPointType, isVectorType } from '../components.js';

}
const ENUM_TYPE = new Set([
'enum',
'enum[]'
]);
const STRING_NUMBER_TYPE = new Set([
'string',
'number'
]);
async function innerInsert(orama, doc, language, skipHooks) {

@@ -44,3 +52,3 @@ const { index , docs } = orama.data;

}
if ((expectedType === 'enum' || expectedType === 'enum[]') && (actualType === 'string' || actualType === 'number')) {
if (ENUM_TYPE.has(expectedType) && STRING_NUMBER_TYPE.has(actualType)) {
continue;

@@ -86,4 +94,5 @@ }

const docsLength = docs.length;
const oramaSchema = orama.schema;
for(let i = 0; i < docsLength; i++){
const errorProperty = await orama.validateSchema(docs[i], orama.schema);
const errorProperty = await orama.validateSchema(docs[i], oramaSchema);
if (errorProperty) {

@@ -99,3 +108,3 @@ throw createError('SCHEMA_VALIDATION_FAILURE', errorProperty);

}
timeout = timeout || 0;
timeout ??= 0;
const ids = [];

@@ -105,4 +114,3 @@ await new Promise((resolve, reject)=>{

async function _insertMultiple() {
const batch = docs.slice(i * batchSize, (i + 1) * batchSize);
i++;
const batch = docs.slice(i * batchSize, ++i * batchSize);
if (!batch.length) {

@@ -109,0 +117,0 @@ return resolve();

@@ -61,4 +61,3 @@ import { runMultipleHook, runSingleHook } from '../components/hooks.js';

async function _insertMultiple() {
const batch = ids.slice(i * batchSize, (i + 1) * batchSize);
i++;
const batch = ids.slice(i * batchSize, ++i * batchSize);
if (!batch.length) {

@@ -65,0 +64,0 @@ return resolve();

@@ -124,6 +124,4 @@ import { getInternalDocumentId } from '../components/internal-document-id-store.js';

let results;
if (!isPreflight && distinctOn) {
results = await fetchDocumentsWithDistinct(orama, uniqueDocsArray, offset, limit, distinctOn);
} else if (!isPreflight) {
results = await fetchDocuments(orama, uniqueDocsArray, offset, limit);
if (!isPreflight) {
results = await (distinctOn ? fetchDocumentsWithDistinct(orama, uniqueDocsArray, offset, limit, distinctOn) : fetchDocuments(orama, uniqueDocsArray, offset, limit));
}

@@ -130,0 +128,0 @@ const searchResult = {

@@ -88,5 +88,5 @@ import { getNanosecondsTime, safeArrayPush, formatNanoseconds, removeVectorsFromHits } from '../utils.js';

params.relevance = Object.assign(defaultBM25Params, params.relevance ?? {});
const { term , properties , threshold =1 } = params;
const { term ='' , properties , threshold =1 } = params;
const { index , docs } = orama.data;
const tokens = await orama.tokenizer.tokenize(term ?? '', language);
const tokens = await orama.tokenizer.tokenize(term, language);
// Get searchable string properties

@@ -102,2 +102,3 @@ let propertiesToSearch = orama.caches['propertiesToSearch'];

for (const prop of properties){
// TODO: since propertiesToSearch.includes is repeated multiple times, maybe we should move it in a Set first?
if (!propertiesToSearch.includes(prop)) {

@@ -107,2 +108,3 @@ throw createError('UNKNOWN_INDEX', prop, propertiesToSearch.join(', '));

}
// TODO: since properties.includes is repeated multiple times, maybe we should move it in a Set first?
propertiesToSearch = propertiesToSearch.filter((prop)=>properties.includes(prop));

@@ -215,10 +217,8 @@ }

// ^ 1 here refers to "score"
if (mergedResults.has(vectorResults[i][0])) {
let existingRes = mergedResults.get(vectorResults[i][0]);
// ^ 0 here refers to "id"
mergedResults.set(vectorResults[i][0], existingRes += hybridScore(0, normalizedScore, textWeight, vectorWeight));
// ^ 0 here refers to "id"
const resultId = vectorResults[i][0];
if (mergedResults.has(resultId)) {
let existingRes = mergedResults.get(resultId);
mergedResults.set(resultId, existingRes += hybridScore(0, normalizedScore, textWeight, vectorWeight));
} else {
mergedResults.set(vectorResults[i][0], hybridScore(0, normalizedScore, textWeight, vectorWeight));
// ^ 0 here refers to "id"
mergedResults.set(resultId, hybridScore(0, normalizedScore, textWeight, vectorWeight));
}

@@ -225,0 +225,0 @@ }

@@ -26,3 +26,4 @@ import { createError } from './errors.js';

} else {
for(let i = 0; i < newArr.length; i += MAX_ARGUMENT_FOR_STACK){
const newArrLength = newArr.length;
for(let i = 0; i < newArrLength; i += MAX_ARGUMENT_FOR_STACK){
Array.prototype.push.apply(arr, newArr.slice(i, i + MAX_ARGUMENT_FOR_STACK));

@@ -99,2 +100,3 @@ }

}
// TODO: none of these operations is async. Should we change the signature of this function?
export async function getNanosecondsTime() {

@@ -107,3 +109,3 @@ if (isInsideWebWorker()) {

}
if (typeof process !== 'undefined' && process.hrtime !== undefined) {
if (process === null || process === void 0 ? void 0 : process.hrtime) {
return process.hrtime.bigint();

@@ -110,0 +112,0 @@ }

{
"name": "@orama/orama",
"version": "2.0.12",
"version": "2.0.13",
"type": "module",

@@ -89,4 +89,4 @@ "description": "Next generation full-text and vector search engine, written in TypeScript",

"typescript": "^5.0.0",
"@orama/stemmers": "2.0.12",
"@orama/stopwords": "2.0.12"
"@orama/stopwords": "2.0.13",
"@orama/stemmers": "2.0.13"
},

@@ -93,0 +93,0 @@ "engines": {

@@ -18,13 +18,13 @@ # Orama

- [Vector Search](https://docs.oramasearch.com/open-source/usage/search/vector-search)
- [Hybrid Search](https://docs.oramasearch.com/open-source/usage/search/hybrid-search)
- [Search Filters](https://docs.oramasearch.com/open-source/usage/search/filters)
- [Geosearch](https://docs.oramasearch.com/open-source/usage/search/geosearch)
- [Facets](https://docs.oramasearch.com/open-source/usage/search/facets)
- [Fields Boosting](https://docs.oramasearch.com/open-source/usage/search/fields-boosting)
- [Typo Tolerance](https://docs.oramasearch.com/open-source/usage/search/introduction#typo-tolerance)
- [Exact Match](https://docs.oramasearch.com/open-source/usage/search/introduction#exact-match)
- [BM25](https://docs.oramasearch.com/open-source/usage/search/bm25-algorithm)
- [Stemming and tokenization in 30 languages](https://docs.oramasearch.com/open-source/text-analysis/stemming)
- [Plugin System](https://docs.oramasearch.com/open-source/plugins/introduction)
- [Vector Search](https://docs.askorama.ai/open-source/usage/search/vector-search)
- [Hybrid Search](https://docs.askorama.ai/open-source/usage/search/hybrid-search)
- [Search Filters](https://docs.askorama.ai/open-source/usage/search/filters)
- [Geosearch](https://docs.askorama.ai/open-source/usage/search/geosearch)
- [Facets](https://docs.askorama.ai/open-source/usage/search/facets)
- [Fields Boosting](https://docs.askorama.ai/open-source/usage/search/fields-boosting)
- [Typo Tolerance](https://docs.askorama.ai/open-source/usage/search/introduction#typo-tolerance)
- [Exact Match](https://docs.askorama.ai/open-source/usage/search/introduction#exact-match)
- [BM25](https://docs.askorama.ai/open-source/usage/search/bm25-algorithm)
- [Stemming and tokenization in 30 languages](https://docs.askorama.ai/open-source/text-analysis/stemming)
- [Plugin System](https://docs.askorama.ai/open-source/plugins/introduction)

@@ -59,3 +59,3 @@ # Installation

Read the complete documentation at [https://docs.oramasearch.com](https://docs.oramasearch.com).
Read the complete documentation at [https://docs.askorama.ai](https://docs.askorama.ai).

@@ -205,3 +205,3 @@ # Usage

You can use non-string data to [filter](https://docs.oramasearch.com/open-source/usage/search/filters), [group](https://docs.oramasearch.com/open-source/usage/search/grouping), and create [facets](https://docs.oramasearch.com/open-source/usage/search/facets):
You can use non-string data to [filter](https://docs.askorama.ai/open-source/usage/search/filters), [group](https://docs.askorama.ai/open-source/usage/search/grouping), and create [facets](https://docs.askorama.ai/open-source/usage/search/facets):

@@ -240,3 +240,3 @@ ```js

If you're using the [Orama Secure AI Proxy](https://oramasearch.com/blog/announcing-the-orama-secure-ai-proxy) (highly recommended), you can skip the vector configuration at search time, since the official [Orama Secure AI Proxy plugin](https://www.npmjs.com/package/@orama/plugin-secure-proxy) will take care of it automatically for you:
If you're using the [Orama Secure AI Proxy](https://askorama.ai/blog/announcing-the-orama-secure-ai-proxy) (highly recommended), you can skip the vector configuration at search time, since the official [Orama Secure AI Proxy plugin](https://www.npmjs.com/package/@orama/plugin-secure-proxy) will take care of it automatically for you:

@@ -320,18 +320,18 @@ ```js

Read more in the [official docs](https://docs.oramasearch.com/open-source/usage/search/geosearch).
Read more in the [official docs](https://docs.askorama.ai/open-source/usage/search/geosearch).
# Official Docs
Read the complete documentation at [https://docs.oramasearch.com](https://docs.oramasearch.com).
Read the complete documentation at [https://docs.askorama.ai](https://docs.askorama.ai).
# Official Orama Plugins
- [Plugin Vitepress](https://docs.oramasearch.com/open-source/plugins/plugin-vitepress)
- [Plugin Docusaurus](https://docs.oramasearch.com/open-source/plugins/plugin-docusaurus)
- [Plugin Analytics](https://docs.oramasearch.com/open-source/plugins/plugin-analytics)
- [Plugin Astro](https://docs.oramasearch.com/open-source/plugins/plugin-astro)
- [Plugin Data Persistence](https://docs.oramasearch.com/open-source/plugins/plugin-data-persistence)
- [Plugin Nextra](https://docs.oramasearch.com/open-source/plugins/plugin-nextra)
- [Plugin Vitepress](https://docs.askorama.ai/open-source/plugins/plugin-vitepress)
- [Plugin Docusaurus](https://docs.askorama.ai/open-source/plugins/plugin-docusaurus)
- [Plugin Analytics](https://docs.askorama.ai/open-source/plugins/plugin-analytics)
- [Plugin Astro](https://docs.askorama.ai/open-source/plugins/plugin-astro)
- [Plugin Data Persistence](https://docs.askorama.ai/open-source/plugins/plugin-data-persistence)
- [Plugin Nextra](https://docs.askorama.ai/open-source/plugins/plugin-nextra)
Write your own plugin: [https://docs.oramasearch.com/open-source/plugins/writing-your-own-plugins](https://docs.oramasearch.com/open-source/plugins/writing-your-own-plugins)
Write your own plugin: [https://docs.askorama.ai/open-source/plugins/writing-your-own-plugins](https://docs.askorama.ai/open-source/plugins/writing-your-own-plugins)

@@ -338,0 +338,0 @@ # License

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc