@backstage/plugin-search-backend-node
Advanced tools
Comparing version 1.3.3-next.1 to 1.3.3-next.2
{ | ||
"name": "@backstage/plugin-search-backend-node__alpha", | ||
"version": "1.3.3-next.1", | ||
"version": "1.3.3-next.2", | ||
"main": "../dist/alpha.cjs.js", | ||
"types": "../dist/alpha.d.ts" | ||
} |
# @backstage/plugin-search-backend-node | ||
## 1.3.3-next.2 | ||
### Patch Changes | ||
- Updated dependencies | ||
- @backstage/backend-defaults@0.5.1-next.2 | ||
- @backstage/backend-plugin-api@1.0.1-next.1 | ||
- @backstage/config@1.2.0 | ||
- @backstage/errors@1.2.4 | ||
- @backstage/plugin-permission-common@0.8.1 | ||
- @backstage/plugin-search-common@1.2.14 | ||
## 1.3.3-next.1 | ||
@@ -4,0 +16,0 @@ |
'use strict'; | ||
var stream = require('stream'); | ||
var ndjson = require('ndjson'); | ||
var errors = require('@backstage/errors'); | ||
var lunr = require('lunr'); | ||
var uuid = require('uuid'); | ||
var IndexBuilder = require('./IndexBuilder.cjs.js'); | ||
var Scheduler = require('./Scheduler.cjs.js'); | ||
var NewlineDelimitedJsonCollatorFactory = require('./collators/NewlineDelimitedJsonCollatorFactory.cjs.js'); | ||
var LunrSearchEngine = require('./engines/LunrSearchEngine.cjs.js'); | ||
var errors = require('./errors.cjs.js'); | ||
var BatchSearchEngineIndexer = require('./indexing/BatchSearchEngineIndexer.cjs.js'); | ||
var DecoratorBase = require('./indexing/DecoratorBase.cjs.js'); | ||
var TestPipeline = require('./test-utils/TestPipeline.cjs.js'); | ||
function _interopDefaultCompat (e) { return e && typeof e === 'object' && 'default' in e ? e : { default: e }; } | ||
var lunr__default = /*#__PURE__*/_interopDefaultCompat(lunr); | ||
class Scheduler { | ||
logger; | ||
schedule; | ||
abortControllers; | ||
isRunning; | ||
constructor(options) { | ||
this.logger = options.logger; | ||
this.schedule = {}; | ||
this.abortControllers = []; | ||
this.isRunning = false; | ||
} | ||
/** | ||
* Adds each task and interval to the schedule. | ||
* When running the tasks, the scheduler waits at least for the time specified | ||
* in the interval once the task was completed, before running it again. | ||
*/ | ||
addToSchedule(options) { | ||
const { id, task, scheduledRunner } = options; | ||
if (this.isRunning) { | ||
throw new Error( | ||
"Cannot add task to schedule that has already been started." | ||
); | ||
} | ||
if (this.schedule[id]) { | ||
throw new Error(`Task with id ${id} already exists.`); | ||
} | ||
this.schedule[id] = { task, scheduledRunner }; | ||
} | ||
/** | ||
* Starts the scheduling process for each task | ||
*/ | ||
start() { | ||
this.logger.info("Starting all scheduled search tasks."); | ||
this.isRunning = true; | ||
Object.keys(this.schedule).forEach((id) => { | ||
const abortController = new AbortController(); | ||
this.abortControllers.push(abortController); | ||
const { task, scheduledRunner } = this.schedule[id]; | ||
scheduledRunner.run({ | ||
id, | ||
fn: task, | ||
signal: abortController.signal | ||
}); | ||
}); | ||
} | ||
/** | ||
* Stop all scheduled tasks. | ||
*/ | ||
stop() { | ||
this.logger.info("Stopping all scheduled search tasks."); | ||
for (const abortController of this.abortControllers) { | ||
abortController.abort(); | ||
} | ||
this.abortControllers = []; | ||
this.isRunning = false; | ||
} | ||
} | ||
class IndexBuilder { | ||
collators; | ||
decorators; | ||
documentTypes; | ||
searchEngine; | ||
logger; | ||
constructor(options) { | ||
this.collators = {}; | ||
this.decorators = {}; | ||
this.documentTypes = {}; | ||
this.logger = options.logger; | ||
this.searchEngine = options.searchEngine; | ||
} | ||
/** | ||
* Responsible for returning the registered search engine. | ||
*/ | ||
getSearchEngine() { | ||
return this.searchEngine; | ||
} | ||
/** | ||
* Responsible for returning the registered document types. | ||
*/ | ||
getDocumentTypes() { | ||
return this.documentTypes; | ||
} | ||
/** | ||
* Makes the index builder aware of a collator that should be executed at the | ||
* given refresh interval. | ||
*/ | ||
addCollator(options) { | ||
const { factory, schedule } = options; | ||
this.logger.info( | ||
`Added ${factory.constructor.name} collator factory for type ${factory.type}` | ||
); | ||
this.collators[factory.type] = { | ||
factory, | ||
schedule | ||
}; | ||
this.documentTypes[factory.type] = { | ||
visibilityPermission: factory.visibilityPermission | ||
}; | ||
} | ||
/** | ||
* Makes the index builder aware of a decorator. If no types are provided on | ||
* the decorator, it will be applied to documents from all known collators, | ||
* otherwise it will only be applied to documents of the given types. | ||
*/ | ||
addDecorator(options) { | ||
const { factory } = options; | ||
const types = factory.types || ["*"]; | ||
this.logger.info( | ||
`Added decorator ${factory.constructor.name} to types ${types.join( | ||
", " | ||
)}` | ||
); | ||
types.forEach((type) => { | ||
if (this.decorators.hasOwnProperty(type)) { | ||
this.decorators[type].push(factory); | ||
} else { | ||
this.decorators[type] = [factory]; | ||
} | ||
}); | ||
} | ||
/** | ||
* Compiles collators and decorators into tasks, which are added to a | ||
* scheduler returned to the caller. | ||
*/ | ||
async build() { | ||
const scheduler = new Scheduler({ | ||
logger: this.logger | ||
}); | ||
Object.keys(this.collators).forEach((type) => { | ||
const taskLogger = this.logger.child({ documentType: type }); | ||
scheduler.addToSchedule({ | ||
id: `search_index_${type.replace("-", "_").toLocaleLowerCase("en-US")}`, | ||
scheduledRunner: this.collators[type].schedule, | ||
task: async () => { | ||
const collator = await this.collators[type].factory.getCollator(); | ||
taskLogger.info( | ||
`Collating documents for ${type} via ${this.collators[type].factory.constructor.name}` | ||
); | ||
const decorators = await Promise.all( | ||
(this.decorators["*"] || []).concat(this.decorators[type] || []).map(async (factory) => { | ||
const decorator = await factory.getDecorator(); | ||
taskLogger.info( | ||
`Attached decorator via ${factory.constructor.name} to ${type} index pipeline.` | ||
); | ||
return decorator; | ||
}) | ||
); | ||
const indexer = await this.searchEngine.getIndexer(type); | ||
return new Promise((resolve, reject) => { | ||
stream.pipeline( | ||
[collator, ...decorators, indexer], | ||
(error) => { | ||
if (error) { | ||
taskLogger.error( | ||
`Collating documents for ${type} failed: ${error}` | ||
); | ||
reject(error); | ||
} else { | ||
taskLogger.info(`Collating documents for ${type} succeeded`); | ||
resolve(); | ||
} | ||
} | ||
); | ||
}); | ||
} | ||
}); | ||
}); | ||
return { | ||
scheduler | ||
}; | ||
} | ||
} | ||
class NewlineDelimitedJsonCollatorFactory { | ||
constructor(type, searchPattern, reader, logger, visibilityPermission) { | ||
this.searchPattern = searchPattern; | ||
this.reader = reader; | ||
this.logger = logger; | ||
this.type = type; | ||
this.visibilityPermission = visibilityPermission; | ||
} | ||
type; | ||
visibilityPermission; | ||
/** | ||
* Returns a NewlineDelimitedJsonCollatorFactory instance from configuration | ||
* and a set of options. | ||
*/ | ||
static fromConfig(_config, options) { | ||
return new NewlineDelimitedJsonCollatorFactory( | ||
options.type, | ||
options.searchPattern, | ||
options.reader, | ||
options.logger.child({ documentType: options.type }), | ||
options.visibilityPermission | ||
); | ||
} | ||
/** | ||
* Returns the "latest" URL for the given search pattern (e.g. the one at the | ||
* end of the list, sorted alphabetically). | ||
*/ | ||
async lastUrl() { | ||
try { | ||
this.logger.info( | ||
`Attempting to find latest .ndjson matching ${this.searchPattern}` | ||
); | ||
const { files } = await this.reader.search(this.searchPattern); | ||
const candidates = files.filter((file) => file.url.endsWith(".ndjson")).sort((a, b) => a.url.localeCompare(b.url)).reverse(); | ||
return candidates[0]?.url; | ||
} catch (e) { | ||
this.logger.error(`Could not search for ${this.searchPattern}`, e); | ||
throw e; | ||
} | ||
} | ||
async getCollator() { | ||
const lastUrl = await this.lastUrl(); | ||
if (!lastUrl) { | ||
const noMatchingFile = `Could not find an .ndjson file matching ${this.searchPattern}`; | ||
this.logger.error(noMatchingFile); | ||
throw new Error(noMatchingFile); | ||
} else { | ||
this.logger.info(`Using latest .ndjson file ${lastUrl}`); | ||
} | ||
const readerResponse = await this.reader.readUrl(lastUrl); | ||
const stream = readerResponse.stream(); | ||
return stream.pipe(ndjson.parse()); | ||
} | ||
} | ||
class MissingIndexError extends Error { | ||
/** | ||
* An inner error that caused this error to be thrown, if any. | ||
*/ | ||
cause; | ||
constructor(message, cause) { | ||
super(message); | ||
Error.captureStackTrace?.(this, this.constructor); | ||
this.name = this.constructor.name; | ||
this.cause = errors.isError(cause) ? cause : void 0; | ||
} | ||
} | ||
class BatchSearchEngineIndexer extends stream.Writable { | ||
batchSize; | ||
currentBatch = []; | ||
constructor(options) { | ||
super({ objectMode: true }); | ||
this.batchSize = options.batchSize; | ||
} | ||
/** | ||
* Encapsulates initialization logic. | ||
* @internal | ||
*/ | ||
async _construct(done) { | ||
try { | ||
await this.initialize(); | ||
done(); | ||
} catch (e) { | ||
errors.assertError(e); | ||
done(e); | ||
} | ||
} | ||
/** | ||
* Encapsulates batch stream write logic. | ||
* @internal | ||
*/ | ||
async _write(doc, _e, done) { | ||
this.currentBatch.push(doc); | ||
if (this.currentBatch.length < this.batchSize) { | ||
done(); | ||
return; | ||
} | ||
try { | ||
await this.index(this.currentBatch); | ||
this.currentBatch = []; | ||
done(); | ||
} catch (e) { | ||
errors.assertError(e); | ||
done(e); | ||
} | ||
} | ||
/** | ||
* Encapsulates finalization and final error handling logic. | ||
* @internal | ||
*/ | ||
async _final(done) { | ||
try { | ||
if (this.currentBatch.length) { | ||
await this.index(this.currentBatch); | ||
this.currentBatch = []; | ||
} | ||
await this.finalize(); | ||
done(); | ||
} catch (e) { | ||
errors.assertError(e); | ||
done(e); | ||
} | ||
} | ||
} | ||
class DecoratorBase extends stream.Transform { | ||
constructor() { | ||
super({ objectMode: true }); | ||
} | ||
/** | ||
* Encapsulates initialization logic. | ||
* @internal | ||
*/ | ||
async _construct(done) { | ||
try { | ||
await this.initialize(); | ||
done(); | ||
} catch (e) { | ||
errors.assertError(e); | ||
done(e); | ||
} | ||
} | ||
/** | ||
* Encapsulates simple transform stream logic. | ||
* @internal | ||
*/ | ||
async _transform(document, _, done) { | ||
try { | ||
const decorated = await this.decorate(document); | ||
if (decorated === void 0) { | ||
done(); | ||
return; | ||
} | ||
if (Array.isArray(decorated)) { | ||
decorated.forEach((doc) => { | ||
this.push(doc); | ||
}); | ||
done(); | ||
return; | ||
} | ||
this.push(decorated); | ||
done(); | ||
} catch (e) { | ||
errors.assertError(e); | ||
done(e); | ||
} | ||
} | ||
/** | ||
* Encapsulates finalization and final error handling logic. | ||
* @internal | ||
*/ | ||
async _final(done) { | ||
try { | ||
await this.finalize(); | ||
done(); | ||
} catch (e) { | ||
errors.assertError(e); | ||
done(e); | ||
} | ||
} | ||
} | ||
class LunrSearchEngineIndexer extends BatchSearchEngineIndexer { | ||
schemaInitialized = false; | ||
builder; | ||
docStore = {}; | ||
constructor() { | ||
super({ batchSize: 1e3 }); | ||
this.builder = new lunr__default.default.Builder(); | ||
this.builder.pipeline.add(lunr__default.default.trimmer, lunr__default.default.stopWordFilter, lunr__default.default.stemmer); | ||
this.builder.searchPipeline.add(lunr__default.default.stemmer); | ||
this.builder.metadataWhitelist = ["position"]; | ||
} | ||
// No async initialization required. | ||
async initialize() { | ||
} | ||
async finalize() { | ||
} | ||
async index(documents) { | ||
if (!this.schemaInitialized) { | ||
Object.keys(documents[0]).forEach((field) => { | ||
this.builder.field(field); | ||
}); | ||
this.builder.ref("location"); | ||
this.schemaInitialized = true; | ||
} | ||
documents.forEach((document) => { | ||
this.builder.add(document); | ||
this.docStore[document.location] = document; | ||
}); | ||
} | ||
buildIndex() { | ||
return this.builder.build(); | ||
} | ||
getDocumentStore() { | ||
return this.docStore; | ||
} | ||
} | ||
class LunrSearchEngine { | ||
lunrIndices = {}; | ||
docStore; | ||
logger; | ||
highlightPreTag; | ||
highlightPostTag; | ||
constructor(options) { | ||
this.logger = options.logger; | ||
this.docStore = {}; | ||
const uuidTag = uuid.v4(); | ||
this.highlightPreTag = `<${uuidTag}>`; | ||
this.highlightPostTag = `</${uuidTag}>`; | ||
} | ||
translator = ({ | ||
term, | ||
filters, | ||
types, | ||
pageLimit | ||
}) => { | ||
const pageSize = pageLimit || 25; | ||
return { | ||
lunrQueryBuilder: (q) => { | ||
const termToken = lunr__default.default.tokenizer(term); | ||
q.term(termToken, { | ||
usePipeline: true, | ||
boost: 100 | ||
}); | ||
q.term(termToken, { | ||
usePipeline: false, | ||
boost: 10, | ||
wildcard: lunr__default.default.Query.wildcard.TRAILING | ||
}); | ||
q.term(termToken, { | ||
usePipeline: false, | ||
editDistance: 2, | ||
boost: 1 | ||
}); | ||
if (filters) { | ||
Object.entries(filters).forEach(([field, fieldValue]) => { | ||
if (!q.allFields.includes(field)) { | ||
throw new Error(`unrecognised field ${field}`); | ||
} | ||
const value = Array.isArray(fieldValue) && fieldValue.length === 1 ? fieldValue[0] : fieldValue; | ||
if (["string", "number", "boolean"].includes(typeof value)) { | ||
q.term( | ||
lunr__default.default.tokenizer(value?.toString()).map(lunr__default.default.stopWordFilter).filter((element) => element !== void 0), | ||
{ | ||
presence: lunr__default.default.Query.presence.REQUIRED, | ||
fields: [field] | ||
} | ||
); | ||
} else if (Array.isArray(value)) { | ||
this.logger.warn( | ||
`Non-scalar filter value used for field ${field}. Consider using a different Search Engine for better results.` | ||
); | ||
q.term(lunr__default.default.tokenizer(value), { | ||
presence: lunr__default.default.Query.presence.OPTIONAL, | ||
fields: [field] | ||
}); | ||
} else { | ||
this.logger.warn(`Unknown filter type used on field ${field}`); | ||
} | ||
}); | ||
} | ||
}, | ||
documentTypes: types, | ||
pageSize | ||
}; | ||
}; | ||
setTranslator(translator) { | ||
this.translator = translator; | ||
} | ||
async getIndexer(type) { | ||
const indexer = new LunrSearchEngineIndexer(); | ||
const indexerLogger = this.logger.child({ documentType: type }); | ||
let errorThrown; | ||
indexer.on("error", (err) => { | ||
errorThrown = err; | ||
}); | ||
indexer.on("close", () => { | ||
const newDocuments = indexer.getDocumentStore(); | ||
const docStoreExists = this.lunrIndices[type] !== void 0; | ||
const documentsIndexed = Object.keys(newDocuments).length; | ||
if (!errorThrown && documentsIndexed > 0) { | ||
this.lunrIndices[type] = indexer.buildIndex(); | ||
this.docStore = { ...this.docStore, ...newDocuments }; | ||
} else { | ||
indexerLogger.warn( | ||
`Index for ${type} was not ${docStoreExists ? "replaced" : "created"}: ${errorThrown ? "an error was encountered" : "indexer received 0 documents"}` | ||
); | ||
} | ||
}); | ||
return indexer; | ||
} | ||
async query(query) { | ||
const { lunrQueryBuilder, documentTypes, pageSize } = this.translator( | ||
query | ||
); | ||
const results = []; | ||
const indexKeys = Object.keys(this.lunrIndices).filter( | ||
(type) => !documentTypes || documentTypes.includes(type) | ||
); | ||
if (documentTypes?.length && !indexKeys.length) { | ||
throw new MissingIndexError( | ||
`Missing index for ${documentTypes?.toString()}. This could be because the index hasn't been created yet or there was a problem during index creation.` | ||
); | ||
} | ||
indexKeys.forEach((type) => { | ||
try { | ||
results.push( | ||
...this.lunrIndices[type].query(lunrQueryBuilder).map((result) => { | ||
return { | ||
result, | ||
type | ||
}; | ||
}) | ||
); | ||
} catch (err) { | ||
if (err instanceof Error && err.message.startsWith("unrecognised field")) { | ||
return; | ||
} | ||
throw err; | ||
} | ||
}); | ||
results.sort((doc1, doc2) => { | ||
return doc2.result.score - doc1.result.score; | ||
}); | ||
const { page } = decodePageCursor(query.pageCursor); | ||
const offset = page * pageSize; | ||
const hasPreviousPage = page > 0; | ||
const hasNextPage = results.length > offset + pageSize; | ||
const nextPageCursor = hasNextPage ? encodePageCursor({ page: page + 1 }) : void 0; | ||
const previousPageCursor = hasPreviousPage ? encodePageCursor({ page: page - 1 }) : void 0; | ||
const realResultSet = { | ||
results: results.slice(offset, offset + pageSize).map((d, index) => ({ | ||
type: d.type, | ||
document: this.docStore[d.result.ref], | ||
rank: page * pageSize + index + 1, | ||
highlight: { | ||
preTag: this.highlightPreTag, | ||
postTag: this.highlightPostTag, | ||
fields: parseHighlightFields({ | ||
preTag: this.highlightPreTag, | ||
postTag: this.highlightPostTag, | ||
doc: this.docStore[d.result.ref], | ||
positionMetadata: d.result.matchData.metadata | ||
}) | ||
} | ||
})), | ||
numberOfResults: results.length, | ||
nextPageCursor, | ||
previousPageCursor | ||
}; | ||
return realResultSet; | ||
} | ||
} | ||
function decodePageCursor(pageCursor) { | ||
if (!pageCursor) { | ||
return { page: 0 }; | ||
} | ||
return { | ||
page: Number(Buffer.from(pageCursor, "base64").toString("utf-8")) | ||
}; | ||
} | ||
function encodePageCursor({ page }) { | ||
return Buffer.from(`${page}`, "utf-8").toString("base64"); | ||
} | ||
function parseHighlightFields({ | ||
preTag, | ||
postTag, | ||
doc, | ||
positionMetadata | ||
}) { | ||
const highlightFieldPositions = Object.values(positionMetadata).reduce( | ||
(fieldPositions, metadata) => { | ||
Object.keys(metadata).map((fieldKey) => { | ||
const validFieldMetadataPositions = metadata[fieldKey]?.position?.filter((position) => Array.isArray(position)); | ||
if (validFieldMetadataPositions.length) { | ||
fieldPositions[fieldKey] = fieldPositions[fieldKey] ?? []; | ||
fieldPositions[fieldKey].push(...validFieldMetadataPositions); | ||
} | ||
}); | ||
return fieldPositions; | ||
}, | ||
{} | ||
); | ||
return Object.fromEntries( | ||
Object.entries(highlightFieldPositions).map(([field, positions]) => { | ||
positions.sort((a, b) => b[0] - a[0]); | ||
const highlightedField = positions.reduce((content, pos) => { | ||
return `${String(content).substring(0, pos[0])}${preTag}${String(content).substring(pos[0], pos[0] + pos[1])}${postTag}${String(content).substring(pos[0] + pos[1])}`; | ||
}, doc[field] ?? ""); | ||
return [field, highlightedField]; | ||
}) | ||
); | ||
} | ||
class TestPipeline { | ||
collator; | ||
decorator; | ||
indexer; | ||
constructor({ | ||
collator, | ||
decorator, | ||
indexer | ||
}) { | ||
this.collator = collator; | ||
this.decorator = decorator; | ||
this.indexer = indexer; | ||
} | ||
/** | ||
* Provide the collator, decorator, or indexer to be tested. | ||
* | ||
* @deprecated Use `fromCollator`, `fromDecorator` or `fromIndexer` static | ||
* methods to create a test pipeline instead. | ||
*/ | ||
static withSubject(subject) { | ||
if (subject instanceof stream.Transform) { | ||
return new TestPipeline({ decorator: subject }); | ||
} | ||
if (subject instanceof stream.Writable) { | ||
return new TestPipeline({ indexer: subject }); | ||
} | ||
if (subject.readable || subject instanceof stream.Readable) { | ||
return new TestPipeline({ collator: subject }); | ||
} | ||
throw new Error( | ||
"Unknown test subject: are you passing a readable, writable, or transform stream?" | ||
); | ||
} | ||
/** | ||
* Create a test pipeline given a collator you want to test. | ||
*/ | ||
static fromCollator(collator) { | ||
return new TestPipeline({ collator }); | ||
} | ||
/** | ||
* Add a collator to the test pipeline. | ||
*/ | ||
withCollator(collator) { | ||
this.collator = collator; | ||
return this; | ||
} | ||
/** | ||
* Create a test pipeline given a decorator you want to test. | ||
*/ | ||
static fromDecorator(decorator) { | ||
return new TestPipeline({ decorator }); | ||
} | ||
/** | ||
* Add a decorator to the test pipeline. | ||
*/ | ||
withDecorator(decorator) { | ||
this.decorator = decorator; | ||
return this; | ||
} | ||
/** | ||
* Create a test pipeline given an indexer you want to test. | ||
*/ | ||
static fromIndexer(indexer) { | ||
return new TestPipeline({ indexer }); | ||
} | ||
/** | ||
* Add an indexer to the test pipeline. | ||
*/ | ||
withIndexer(indexer) { | ||
this.indexer = indexer; | ||
return this; | ||
} | ||
/** | ||
* Provide documents for testing decorators and indexers. | ||
*/ | ||
withDocuments(documents) { | ||
if (this.collator) { | ||
throw new Error("Cannot provide documents when testing a collator."); | ||
} | ||
this.collator = new stream.Readable({ objectMode: true }); | ||
this.collator._read = () => { | ||
}; | ||
process.nextTick(() => { | ||
documents.forEach((document) => { | ||
this.collator.push(document); | ||
}); | ||
this.collator.push(null); | ||
}); | ||
return this; | ||
} | ||
/** | ||
* Execute the test pipeline so that you can make assertions about the result | ||
* or behavior of the given test subject. | ||
*/ | ||
async execute() { | ||
const documents = []; | ||
if (!this.collator) { | ||
throw new Error( | ||
"Cannot execute pipeline without a collator or documents" | ||
); | ||
} | ||
if (!this.indexer) { | ||
this.indexer = new stream.Writable({ objectMode: true }); | ||
this.indexer._write = (document, _, done) => { | ||
documents.push(document); | ||
done(); | ||
}; | ||
} | ||
return new Promise((done) => { | ||
const pipes = [this.collator]; | ||
if (this.decorator) { | ||
pipes.push(this.decorator); | ||
} | ||
pipes.push(this.indexer); | ||
stream.pipeline(pipes, (error) => { | ||
done({ | ||
error, | ||
documents | ||
}); | ||
}); | ||
}); | ||
} | ||
} | ||
exports.BatchSearchEngineIndexer = BatchSearchEngineIndexer; | ||
exports.DecoratorBase = DecoratorBase; | ||
exports.IndexBuilder = IndexBuilder; | ||
exports.LunrSearchEngine = LunrSearchEngine; | ||
exports.MissingIndexError = MissingIndexError; | ||
exports.NewlineDelimitedJsonCollatorFactory = NewlineDelimitedJsonCollatorFactory; | ||
exports.Scheduler = Scheduler; | ||
exports.TestPipeline = TestPipeline; | ||
exports.IndexBuilder = IndexBuilder.IndexBuilder; | ||
exports.Scheduler = Scheduler.Scheduler; | ||
exports.NewlineDelimitedJsonCollatorFactory = NewlineDelimitedJsonCollatorFactory.NewlineDelimitedJsonCollatorFactory; | ||
exports.LunrSearchEngine = LunrSearchEngine.LunrSearchEngine; | ||
exports.MissingIndexError = errors.MissingIndexError; | ||
exports.BatchSearchEngineIndexer = BatchSearchEngineIndexer.BatchSearchEngineIndexer; | ||
exports.DecoratorBase = DecoratorBase.DecoratorBase; | ||
exports.TestPipeline = TestPipeline.TestPipeline; | ||
//# sourceMappingURL=index.cjs.js.map |
{ | ||
"name": "@backstage/plugin-search-backend-node", | ||
"version": "1.3.3-next.1", | ||
"version": "1.3.3-next.2", | ||
"description": "A library for Backstage backend plugins that want to interact with the search backend plugin", | ||
@@ -55,4 +55,4 @@ "backstage": { | ||
"dependencies": { | ||
"@backstage/backend-defaults": "0.5.1-next.1", | ||
"@backstage/backend-plugin-api": "1.0.1-next.0", | ||
"@backstage/backend-defaults": "0.5.1-next.2", | ||
"@backstage/backend-plugin-api": "1.0.1-next.1", | ||
"@backstage/config": "1.2.0", | ||
@@ -70,6 +70,6 @@ "@backstage/errors": "1.2.4", | ||
"@backstage/backend-common": "^0.25.0", | ||
"@backstage/backend-test-utils": "1.0.1-next.1", | ||
"@backstage/cli": "0.28.0-next.1", | ||
"@backstage/backend-test-utils": "1.0.1-next.2", | ||
"@backstage/cli": "0.28.0-next.2", | ||
"@types/ndjson": "^2.0.1" | ||
} | ||
} |
Sorry, the diff of this file is not supported yet
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
Major refactor
Supply chain riskPackage has recently undergone a major refactor. It may be unstable or indicate significant internal changes. Use caution when updating to versions that include significant changes.
Found 1 instance in 1 package
Unidentified License
License(Experimental) Something that seems like a license was found, but its contents could not be matched with a known license.
Found 9 instances in 1 package
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
Unidentified License
License(Experimental) Something that seems like a license was found, but its contents could not be matched with a known license.
Found 1 instance in 1 package
177478
28
1334
10
60
1
+ Added@backstage/backend-app-api@1.0.1-next.1(transitive)
+ Added@backstage/backend-defaults@0.5.1-next.2(transitive)
+ Added@backstage/backend-plugin-api@1.0.1-next.1(transitive)
+ Added@backstage/cli-node@0.2.9-next.0(transitive)
+ Added@backstage/integration@1.15.1-next.1(transitive)
+ Added@backstage/plugin-auth-node@0.5.3-next.1(transitive)
+ Added@backstage/plugin-events-node@0.4.1-next.1(transitive)
+ Added@backstage/plugin-permission-node@0.8.4-next.1(transitive)
+ Addedcookie@0.7.2(transitive)
- Removed@backstage/backend-app-api@1.0.1-next.0(transitive)
- Removed@backstage/backend-defaults@0.5.1-next.1(transitive)
- Removed@backstage/backend-plugin-api@1.0.1-next.0(transitive)
- Removed@backstage/cli-node@0.2.8(transitive)
- Removed@backstage/integration@1.15.1-next.0(transitive)
- Removed@backstage/plugin-auth-node@0.5.3-next.0(transitive)
- Removed@backstage/plugin-events-node@0.4.1-next.0(transitive)
- Removed@backstage/plugin-permission-node@0.8.4-next.0(transitive)
- Removedcookie@0.6.0(transitive)