Socket
Socket
Sign inDemoInstall

xeno-test

Package Overview
Dependencies
56
Maintainers
1
Versions
20
Alerts
File Explorer

Advanced tools

Install Socket

Detect and block malicious and high-risk dependencies

Install

Comparing version 0.0.17 to 0.0.18

dist/es2022/file-processor/item-formats/helpers/json-item.helper.d.ts

6

dist/es2022/core/core.models.d.ts
import { AssetModels, ContentItemModels, ElementContracts, ElementModels, LanguageVariantModels } from '@kontent-ai/management-sdk';
import { ProcessingFormat } from '../file-processor/index.js';
import { IImportAsset, IParsedContentItem } from '../import/index.js';
import { IParsedAsset, IParsedContentItem } from '../import/index.js';
import { ContentItemElementsIndexer, IContentItem, IContentType } from '@kontent-ai/delivery-sdk';

@@ -36,3 +36,3 @@ export interface ICliFileConfig {

assets: {
original: IImportAsset;
original: IParsedAsset;
imported: AssetModels.Asset;

@@ -45,3 +45,3 @@ }[];

languageVariants: {
original: any;
original: IParsedContentItem;
imported: LanguageVariantModels.ContentItemLanguageVariant;

@@ -48,0 +48,0 @@ }[];

import { IManagementClient, EnvironmentModels, SharedModels } from '@kontent-ai/management-sdk';
import { IRetryStrategyOptions } from '@kontent-ai/core-sdk';
import { ActionType, ItemType } from './core.models.js';
import { HttpService } from '@kontent-ai/core-sdk';

@@ -11,8 +10,2 @@ export declare const defaultHttpService: HttpService;

export declare function printProjectAndEnvironmentInfoToConsoleAsync(client: IManagementClient<any>): Promise<EnvironmentModels.EnvironmentInformationModel>;
export declare function logAction(actionType: ActionType, itemType: ItemType, data: {
language?: string;
workflowStep?: string;
title: string;
codename?: string;
}): void;
export declare function extractErrorMessage(error: any): string;

@@ -19,0 +12,0 @@ export declare function is404Error(error: any): boolean;

import { SharedModels } from '@kontent-ai/management-sdk';
import { format } from 'bytes';
import { logDebug } from './log-helper.js';
import { logDebug, logErrorAndExit } from './log-helper.js';
import { HttpService } from '@kontent-ai/core-sdk';

@@ -45,12 +45,2 @@ const rateExceededErrorCode = 10000;

}
export function logAction(actionType, itemType, data) {
logDebug({
type: actionType,
message: data.title,
partA: itemType,
partB: data.codename,
partC: data.language,
partD: data.workflowStep
});
}
export function extractErrorMessage(error) {

@@ -78,9 +68,12 @@ if (error instanceof SharedModels.ContentManagementBaseKontentError) {

if (error instanceof SharedModels.ContentManagementBaseKontentError) {
throw {
Message: `Failed to import data with error: ${error.message}`,
ErrorCode: error.errorCode,
RequestId: error.requestId,
ValidationErrors: `${error.validationErrors.map((m) => m.message).join(', ')}`
};
logErrorAndExit({
message: `${error.message}. Error code '${error.errorCode}'. Request Id '${error.requestId}'.${error.validationErrors.length ? ` ${error.validationErrors.map((m) => m.message).join(', ')}` : ''}`
});
}
if (error instanceof Error) {
logErrorAndExit({
message: error.message
});
}
// unhandled error
throw error;

@@ -92,3 +85,5 @@ }

if (splitPaths.length < 3) {
throw Error(`Invalid asset url '${assetUrl}' because asset id could not be determined`);
logErrorAndExit({
message: `Invalid asset url '${assetUrl}' because asset id could not be determined`
});
}

@@ -95,0 +90,0 @@ return splitPaths[2];

export * from './core.models.js';
export * from './translation-helper.js';
export * from './id-translate-helper.js';
export * from './global-helper.js';
export * from './core-properties.js';
export * from './log-helper.js';
export * from './core.models.js';
export * from './translation-helper.js';
export * from './id-translate-helper.js';
export * from './global-helper.js';
export * from './core-properties.js';
export * from './log-helper.js';
//# sourceMappingURL=index.js.map
import { ActionType, ItemType } from './core.models.js';
export type DebugType = 'error' | 'warning' | 'info' | ActionType;
export declare function logErrorAndExit(data: {
message: string;
}): never;
export declare function logProcessingDebug(data: {

@@ -18,1 +21,7 @@ index: number;

}): void;
export declare function logItemAction(actionType: ActionType, itemType: ItemType, data: {
language?: string;
workflowStep?: string;
title: string;
codename?: string;
}): void;
import colors from 'colors';
export function logErrorAndExit(data) {
logDebug({
type: 'error',
message: data.message,
partA: 'Stopping process'
});
process.exit(1);
}
export function logProcessingDebug(data) {

@@ -18,2 +26,12 @@ console.log(`[${colors.bgYellow(colors.black(`${data.index}/${data.totalCount}`))}][${colors.yellow(data.itemType)}]: Starts processing ${data.title}`);

}
export function logItemAction(actionType, itemType, data) {
logDebug({
type: actionType,
message: data.title,
partA: itemType,
partB: data.codename,
partC: data.language,
partD: data.workflowStep
});
}
//# sourceMappingURL=log-helper.js.map
import { ElementType } from '@kontent-ai/delivery-sdk';
import { HttpService } from '@kontent-ai/core-sdk';
import { extractAssetIdFromUrl, getExtension, extractFilenameFromUrl, defaultRetryStrategy, formatBytes } from '../../../../core/index.js';
import { logDebug, logProcessingDebug } from '../../../../core/log-helper.js';
import { extractAssetIdFromUrl, getExtension, extractFilenameFromUrl, defaultRetryStrategy, formatBytes, logDebug, logProcessingDebug } from '../../../../core/index.js';
export class ExportAssetsHelper {

@@ -6,0 +5,0 @@ httpService = new HttpService();

@@ -1,3 +0,3 @@

import { logDebug } from '../../../../core/log-helper.js';
import { translationHelper } from '../../../../core/index.js';
import { logDebug } from '../../../../core/index.js';
import { translationHelper } from '../../../../translation/index.js';
export class ExportContentItemHelper {

@@ -4,0 +4,0 @@ async exportContentItemsAsync(deliveryClient, config, types, languages) {

import { createDeliveryClient } from '@kontent-ai/delivery-sdk';
import { logDebug } from '../../../core/log-helper.js';
import { exportContentItemHelper } from './helpers/export-content-item.helper.js';
import { defaultHttpService, defaultRetryStrategy } from '../../../core/global-helper.js';
import { exportAssetsHelper } from './helpers/export-assets-item.helper.js';
import { logDebug } from '../../../core/index.js';
export class KontentAiExportAdapter {

@@ -7,0 +7,0 @@ config;

import { IParsedAsset } from '../../import/index.js';
import { IFileData } from '../file-processor.models.js';
import { IExportAsset } from '../../export/index.js';
import { AssetsParseData, AssetsTransformData, FileBinaryData } from '../file-processor.models.js';
import { BaseAssetProcessorService } from '../base-asset-processor.service.js';
export declare class AssetCsvProcessorService extends BaseAssetProcessorService {
private readonly csvDelimiter;
private readonly assetsFilename;
readonly name: string;
transformAssetsAsync(assets: IExportAsset[]): Promise<IFileData[]>;
parseAssetsAsync(text: string): Promise<IParsedAsset[]>;
transformAssetsAsync(data: AssetsTransformData): Promise<FileBinaryData>;
parseAssetsAsync(data: AssetsParseData): Promise<IParsedAsset[]>;
private getAssetZipFilename;
private geCsvParser;
private getAssetFields;
}

@@ -7,7 +7,8 @@ import { parse } from 'csv-parse';

csvDelimiter = ',';
assetsFilename = 'assets.csv';
name = 'csv';
async transformAssetsAsync(assets) {
async transformAssetsAsync(data) {
const asssetFiels = this.getAssetFields();
const stream = new Readable();
stream.push(JSON.stringify(assets));
stream.push(JSON.stringify(data.assets));
stream.push(null); // required to end the stream

@@ -17,12 +18,14 @@ const parsingProcessor = this.geCsvParser({

}).fromInput(stream);
const data = (await parsingProcessor.promise()) ?? '';
return [
{
filename: 'assets.csv',
data: data,
itemsCount: assets.length
}
];
const csvContent = (await parsingProcessor.promise()) ?? '';
data.zip.addFile(this.assetsFilename, csvContent);
for (const exportAsset of data.assets) {
await data.zip.addFile(this.getAssetZipFilename(exportAsset.assetId, exportAsset.extension), exportAsset.binaryData);
}
return data.zip.generateZipAsync();
}
async parseAssetsAsync(text) {
async parseAssetsAsync(data) {
const text = await data.zip.getFileContentAsync(this.assetsFilename);
if (!text) {
return [];
}
const parsedAssets = [];

@@ -46,3 +49,4 @@ let index = 0;

filename: '',
url: ''
url: '',
binaryData: undefined
};

@@ -55,2 +59,4 @@ let fieldIndex = 0;

}
// add binary data to record
parsedAsset.binaryData = await data.zip.getBinaryDataAsync(this.getAssetZipFilename(parsedAsset.assetId, parsedAsset.extension));
parsedAssets.push(parsedAsset);

@@ -62,2 +68,5 @@ }

}
getAssetZipFilename(assetId, extension) {
return `${assetId}.${extension}`; // use id as filename to prevent filename conflicts
}
geCsvParser(config) {

@@ -64,0 +73,0 @@ return new AsyncParser({

@@ -1,9 +0,10 @@

import { IExportAsset } from '../../export/index.js';
import { IParsedAsset } from '../../import/index.js';
import { IFileData } from '../file-processor.models.js';
import { AssetsParseData, AssetsTransformData, FileBinaryData } from '../file-processor.models.js';
import { BaseAssetProcessorService } from '../base-asset-processor.service.js';
export declare class AssetJsonProcessorService extends BaseAssetProcessorService {
readonly name: string;
transformAssetsAsync(assets: IExportAsset[]): Promise<IFileData[]>;
parseAssetsAsync(text: string): Promise<IParsedAsset[]>;
private readonly assetsFilename;
transformAssetsAsync(data: AssetsTransformData): Promise<FileBinaryData>;
parseAssetsAsync(data: AssetsParseData): Promise<IParsedAsset[]>;
private getAssetZipFilename;
}
import { BaseAssetProcessorService } from '../base-asset-processor.service.js';
export class AssetJsonProcessorService extends BaseAssetProcessorService {
name = 'json';
async transformAssetsAsync(assets) {
return [
{
filename: 'assets.json',
itemsCount: assets.length,
data: JSON.stringify(assets.map((m) => {
const parsedAsset = {
assetId: m.assetId,
extension: m.extension,
filename: m.filename,
url: m.url
};
return parsedAsset;
}))
}
];
assetsFilename = 'assets.json';
async transformAssetsAsync(data) {
const assetRecords = [];
for (const exportAsset of data.assets) {
assetRecords.push({
assetId: exportAsset.assetId,
extension: exportAsset.extension,
filename: exportAsset.filename,
url: exportAsset.url
});
await data.zip.addFile(this.getAssetZipFilename(exportAsset.assetId, exportAsset.extension), exportAsset.binaryData);
}
data.zip.addFile(this.assetsFilename, JSON.stringify(assetRecords));
return await data.zip.generateZipAsync();
}
async parseAssetsAsync(text) {
return JSON.parse(text);
async parseAssetsAsync(data) {
const text = await data.zip.getFileContentAsync(this.assetsFilename);
if (!text) {
return [];
}
const assetRecords = JSON.parse(text);
const parsedAssets = [];
for (const assetRecord of assetRecords) {
parsedAssets.push({
...assetRecord,
binaryData: await data.zip.getBinaryDataAsync(this.getAssetZipFilename(assetRecord.assetId, assetRecord.extension))
});
}
return parsedAssets;
}
getAssetZipFilename(assetId, extension) {
return `${assetId}.${extension}`; // use id as filename to prevent filename conflicts
}
}
//# sourceMappingURL=asset-json-processor.service.js.map

@@ -1,9 +0,8 @@

import { IExportAsset } from '../export/index.js';
import { IParsedAsset } from '../import/index.js';
import { IFileData, IAssetFormatService } from './file-processor.models.js';
import { IAssetFormatService, AssetsTransformData, FileBinaryData, AssetsParseData } from './file-processor.models.js';
export declare abstract class BaseAssetProcessorService implements IAssetFormatService {
abstract name: string;
abstract transformAssetsAsync(assets: IExportAsset[]): Promise<IFileData[]>;
abstract parseAssetsAsync(text: string): Promise<IParsedAsset[]>;
abstract transformAssetsAsync(data: AssetsTransformData): Promise<FileBinaryData>;
abstract parseAssetsAsync(data: AssetsParseData): Promise<IParsedAsset[]>;
protected getSystemAssetFields(): string[];
}
import { IImportContentType, IImportContentTypeElement, IParsedContentItem } from '../import/index.js';
import { IItemFormatService, IFileData } from './file-processor.models.js';
import { IExportContentItem } from '../export/index.js';
import { IItemFormatService, ItemsTransformData, ItemsParseData, FileBinaryData } from './file-processor.models.js';
export declare abstract class BaseItemProcessorService implements IItemFormatService {
abstract name: string;
abstract transformContentItemsAsync(items: IExportContentItem[]): Promise<IFileData[]>;
abstract parseContentItemsAsync(text: string, types: IImportContentType[]): Promise<IParsedContentItem[]>;
abstract transformContentItemsAsync(data: ItemsTransformData): Promise<FileBinaryData>;
abstract parseContentItemsAsync(data: ItemsParseData): Promise<IParsedContentItem[]>;
protected getSystemContentItemFields(): string[];
protected getElement(types: IImportContentType[], contentItemType: string, elementCodename: string): IImportContentTypeElement;
}

@@ -0,1 +1,2 @@

import { logErrorAndExit } from '../core/index.js';
export class BaseItemProcessorService {

@@ -8,7 +9,11 @@ getSystemContentItemFields() {

if (!type) {
throw Error(`Could not find content type '${contentItemType}'`);
logErrorAndExit({
message: `Could not find content type '${contentItemType}'`
});
}
const element = type.elements.find((m) => m.codename.toLowerCase() === elementCodename.toLowerCase());
if (!element) {
throw Error(`Could not find element with codename '${elementCodename}' for type '${type.contentTypeCodename}'`);
logErrorAndExit({
message: `Could not find element with codename '${elementCodename}' for type '${type.contentTypeCodename}'`
});
}

@@ -15,0 +20,0 @@ return element;

/// <reference types="node" resolution-mode="require"/>
import { IExportContentItem, IExportAsset } from '../export/index.js';
import { IImportContentType, IParsedAsset, IParsedContentItem } from '../import/index.js';
import { ZipPackage } from './zip-package.class.js';
/**

@@ -8,13 +9,29 @@ * Browser is currently not generally upported as we depend on few node.js specific APIs

export type ZipContext = 'node.js' | 'browser';
export type FileBinaryData = Blob | Buffer;
export type ProcessingFormat = 'csv' | 'json' | 'jsonJoined';
export type ZipCompressionLevel = 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9;
export type ItemsTransformData = {
readonly zip: ZipPackage;
readonly items: IExportContentItem[];
};
export type ItemsParseData = {
readonly zip: ZipPackage;
readonly types: IImportContentType[];
};
export interface IItemFormatService {
name: string;
transformContentItemsAsync(items: IExportContentItem[]): Promise<IFileData[]>;
parseContentItemsAsync(text: string, types: IImportContentType[]): Promise<IParsedContentItem[]>;
transformContentItemsAsync(data: ItemsTransformData): Promise<FileBinaryData>;
parseContentItemsAsync(data: ItemsParseData): Promise<IParsedContentItem[]>;
}
export type AssetsTransformData = {
readonly zip: ZipPackage;
readonly assets: IExportAsset[];
};
export type AssetsParseData = {
readonly zip: ZipPackage;
};
export interface IAssetFormatService {
name: string;
transformAssetsAsync(assets: IExportAsset[]): Promise<IFileData[]>;
parseAssetsAsync(text: string): Promise<IParsedAsset[]>;
transformAssetsAsync(data: AssetsTransformData): Promise<FileBinaryData>;
parseAssetsAsync(data: AssetsParseData): Promise<IParsedAsset[]>;
}

@@ -28,8 +45,1 @@ export interface IExtractedBinaryFileData {

}
export interface IFileProcessorConfig {
}
export interface IFileData {
filename: string;
data: string;
itemsCount: number;
}
/// <reference types="node" resolution-mode="require"/>
import { IExportAdapterResult } from '../export/index.js';
import { IImportSource, IImportContentType } from '../import/index.js';
import { IFileProcessorConfig, IItemFormatService, ZipCompressionLevel, IAssetFormatService } from './file-processor.models.js';
import { IItemFormatService, ZipCompressionLevel, IAssetFormatService, FileBinaryData } from './file-processor.models.js';
import { IExportTransformConfig } from '../core/index.js';
export declare class FileProcessorService {
private readonly zipContext;
private readonly metadataName;
private readonly binaryFilesFolderName;
constructor(config?: IFileProcessorConfig);
constructor();
parseZipAsync(data: {

@@ -37,15 +34,7 @@ items?: {

compressionLevel?: ZipCompressionLevel;
}): Promise<any>;
}): Promise<FileBinaryData>;
createAssetsZipAsync(exportData: IExportAdapterResult, config: {
assetFormatService: IAssetFormatService;
compressionLevel?: ZipCompressionLevel;
}): Promise<any>;
private getZipSizeInBytes;
private transformLanguageVariantsAsync;
private parseAssetsFromFileAsync;
private extractBinaryFilesAsync;
private getAssetIdFromFilename;
private getZipOutputType;
private parseContentItemsFromZipAsync;
private parseMetadataFromZipAsync;
}): Promise<FileBinaryData>;
}
import colors from 'colors';
import JSZip from 'jszip';
import { Blob } from 'buffer';
import { formatBytes, getExtension } from '../core/index.js';
import mime from 'mime';
import { logDebug, logProcessingDebug } from '../core/log-helper.js';
import { logDebug } from '../core/index.js';
import { ZipPackage } from './zip-package.class.js';
export class FileProcessorService {
zipContext = 'node.js';
metadataName = '_metadata.json';
binaryFilesFolderName = 'files';
constructor(config) { }
constructor() { }
async parseZipAsync(data) {
let itemsZipFile = undefined;
let assetsZipFile = undefined;
const result = {
importData: {
items: [],
assets: []
}
};
if (data.items) {

@@ -20,3 +19,7 @@ logDebug({

});
itemsZipFile = await JSZip.loadAsync(data.items.file, {});
const itemsZipFile = await JSZip.loadAsync(data.items.file, {});
result.importData.items.push(...(await data.items.formatService.parseContentItemsAsync({
zip: new ZipPackage(itemsZipFile),
types: data.types
})));
logDebug({

@@ -32,3 +35,6 @@ type: 'info',

});
assetsZipFile = await JSZip.loadAsync(data.assets.file, {});
const assetsZipFile = await JSZip.loadAsync(data.assets.file, {});
result.importData.assets.push(...(await data.assets.formatService.parseAssetsAsync({
zip: new ZipPackage(assetsZipFile)
})));
logDebug({

@@ -39,13 +45,2 @@ type: 'info',

}
const result = {
importData: {
items: itemsZipFile && data.items
? await this.parseContentItemsFromZipAsync(itemsZipFile, data.types, data.items.formatService)
: [],
assets: assetsZipFile && data.assets
? await this.parseAssetsFromFileAsync(assetsZipFile, data.assets?.formatService)
: []
},
metadata: itemsZipFile ? await this.parseMetadataFromZipAsync(itemsZipFile, this.metadataName) : undefined
};
logDebug({

@@ -65,3 +60,7 @@ type: 'info',

});
parsedItems = await data.items.formatService.parseContentItemsAsync(data.items.file.toString(), data.types);
const itemsZipFile = await JSZip.loadAsync(data.items.file, {});
parsedItems = await data.items.formatService.parseContentItemsAsync({
zip: new ZipPackage(itemsZipFile),
types: data.types
});
}

@@ -74,3 +73,5 @@ if (data.assets) {

const assetsZipFile = await JSZip.loadAsync(data.assets.file, {});
parsedAssets = await this.parseAssetsFromFileAsync(assetsZipFile, data.assets.formatService);
parsedAssets = await data.assets.formatService.parseAssetsAsync({
zip: new ZipPackage(assetsZipFile)
});
}

@@ -81,4 +82,3 @@ const result = {

assets: parsedAssets
},
metadata: undefined
}
};

@@ -92,235 +92,26 @@ logDebug({

async createItemsZipAsync(exportData, config) {
const zip = new JSZip();
const contentItemsFolder = zip;
logDebug({
type: 'info',
message: `Transforming '${exportData.items.length.toString()}' content items`,
partA: config.itemFormatService?.name
message: `Creating items zip`,
partA: config.itemFormatService.name
});
const transformedLanguageVariantsFileData = await this.transformLanguageVariantsAsync(exportData.items, config.itemFormatService);
for (const fileInfo of transformedLanguageVariantsFileData) {
logDebug({
type: 'info',
message: `Adding '${fileInfo.itemsCount}' items to file within zip`,
partA: fileInfo.filename
});
contentItemsFolder.file(fileInfo.filename, fileInfo.data);
}
const zipOutputType = this.getZipOutputType(this.zipContext);
const compressionLevel = config.compressionLevel ?? 9;
logDebug({
type: 'info',
message: `Creating zip file using '${zipOutputType}' with compression level '${compressionLevel.toString()}'`
const zip = await config.itemFormatService.transformContentItemsAsync({
items: exportData.items,
zip: new ZipPackage(new JSZip())
});
const zipData = await zip.generateAsync({
type: zipOutputType,
compression: 'DEFLATE',
compressionOptions: {
level: compressionLevel
},
streamFiles: true
});
logDebug({
type: 'info',
message: `Zip successfully generated`,
partA: formatBytes(this.getZipSizeInBytes(zipData))
});
return zipData;
return zip;
}
async createAssetsZipAsync(exportData, config) {
const zip = new JSZip();
const assetsFolder = zip;
const filesFolder = zip.folder(this.binaryFilesFolderName);
if (!filesFolder) {
throw Error(`Could not create folder '${this.binaryFilesFolderName}'`);
}
if (exportData.assets.length) {
logDebug({
type: 'info',
message: `Transforming '${exportData.assets.length.toString()}' asssets`,
partA: config.assetFormatService?.name
});
const transformedAssetsFileData = await config.assetFormatService.transformAssetsAsync(exportData.assets);
for (const fileInfo of transformedAssetsFileData) {
logDebug({
type: 'info',
message: `Adding '${fileInfo.itemsCount}' items to file within zip`,
partA: fileInfo.filename
});
assetsFolder.file(fileInfo.filename, fileInfo.data);
}
let assetIndex = 1;
for (const asset of exportData.assets) {
const assetFilename = `${asset.assetId}.${asset.extension}`; // use id as filename to prevent filename conflicts
logProcessingDebug({
index: assetIndex,
totalCount: exportData.assets.length,
itemType: 'zipFile',
title: `'${assetFilename}'`
});
filesFolder.file(assetFilename, asset.binaryData, {
binary: true
});
assetIndex++;
}
logDebug({
type: 'info',
message: `All assets added to zip`
});
}
else {
logDebug({
type: 'info',
message: `There are no assets`
});
}
const zipOutputType = this.getZipOutputType(this.zipContext);
const compressionLevel = config.compressionLevel ?? 9;
logDebug({
type: 'info',
message: `Creating zip file using '${zipOutputType}' with compression level '${compressionLevel.toString()}'`
message: `Creating assets zip`,
partA: config.assetFormatService?.name
});
const zipData = await zip.generateAsync({
type: zipOutputType,
compression: 'DEFLATE',
compressionOptions: {
level: compressionLevel
},
streamFiles: true
const zip = await config.assetFormatService.transformAssetsAsync({
assets: exportData.assets,
zip: new ZipPackage(new JSZip())
});
logDebug({
type: 'info',
message: `Zip successfully generated`,
partA: formatBytes(this.getZipSizeInBytes(zipData))
});
return zipData;
return zip;
}
getZipSizeInBytes(zipData) {
if (zipData instanceof Blob) {
return zipData.size;
}
else if (zipData instanceof Buffer) {
return zipData.byteLength;
}
throw Error(`Unrecognized zip data type '${typeof zipData}'`);
}
async transformLanguageVariantsAsync(items, formatService) {
return await formatService.transformContentItemsAsync(items);
}
async parseAssetsFromFileAsync(zip, assetFormatService) {
const importAssets = [];
const parsedAssets = [];
const files = zip.files;
const binaryFiles = await this.extractBinaryFilesAsync(zip);
for (const [, file] of Object.entries(files)) {
if (file?.name?.endsWith('/')) {
continue;
}
if (file?.name?.toLowerCase() === this.metadataName.toLowerCase()) {
continue;
}
if (file?.name?.startsWith(this.binaryFilesFolderName)) {
continue;
}
const text = await file.async('string');
parsedAssets.push(...(await assetFormatService.parseAssetsAsync(text)));
}
for (const parsedAsset of parsedAssets) {
const binaryFile = binaryFiles.find((m) => m.assetId === parsedAsset.assetId);
if (!binaryFile) {
throw Error(`Could not find binary data for asset with id '${parsedAsset.assetId}'`);
}
importAssets.push({
assetId: parsedAsset.assetId,
extension: binaryFile.extension,
filename: parsedAsset.filename,
mimeType: binaryFile.mimeType,
binaryData: binaryFile.binaryData
});
}
return importAssets;
}
async extractBinaryFilesAsync(zip) {
const extractedFiles = [];
let assetIndex = 0;
const files = Object.entries(zip.files);
for (const [, file] of files) {
assetIndex++;
logProcessingDebug({
index: assetIndex,
totalCount: files.length,
itemType: 'zipFile',
title: file.name
});
if (!file?.name?.startsWith(`${this.binaryFilesFolderName}/`)) {
// iterate through assets only
continue;
}
if (file?.name?.endsWith('/')) {
continue;
}
const binaryData = await file.async(this.getZipOutputType(this.zipContext));
logDebug({
type: 'extractBinaryData',
message: file.name,
partA: formatBytes(this.getZipSizeInBytes(binaryData))
});
const assetId = this.getAssetIdFromFilename(file.name);
const extension = getExtension(file.name) ?? '';
const filename = file.name;
const mimeType = mime.getType(file.name) ?? '';
extractedFiles.push({
assetId: assetId,
binaryData: binaryData,
filename: filename,
mimeType: mimeType,
extension: extension
});
}
logDebug({
type: 'info',
message: `All binary files (${extractedFiles.length}) were extracted`
});
return extractedFiles;
}
getAssetIdFromFilename(filename) {
const split = filename.split('/');
const filenameWithExtension = split[1];
return filenameWithExtension.split('.')[0];
}
getZipOutputType(context) {
if (context === 'browser') {
return 'blob';
}
if (context === 'node.js') {
return 'nodebuffer';
}
throw Error(`Unsupported context '${context}'`);
}
async parseContentItemsFromZipAsync(fileContents, types, formatService) {
const files = fileContents.files;
const parsedItems = [];
for (const file of Object.values(files)) {
if (file?.name?.endsWith('/')) {
continue;
}
if (file?.name?.toLowerCase() === this.metadataName.toLowerCase()) {
continue;
}
const text = await file.async('text');
parsedItems.push(...(await formatService.parseContentItemsAsync(text, types)));
}
return parsedItems;
}
async parseMetadataFromZipAsync(fileContents, filename) {
const files = fileContents.files;
const file = files[filename];
if (!file) {
// metadata is not required
return undefined;
}
const text = await file.async('text');
return JSON.parse(text);
}
}
//# sourceMappingURL=file-processor.service.js.map

@@ -8,1 +8,2 @@ export * from './file-processor.service.js';

export * from './asset-formats/asset-json-processor.service.js';
export * from './zip-package.class.js';

@@ -8,2 +8,3 @@ export * from './file-processor.service.js';

export * from './asset-formats/asset-json-processor.service.js';
export * from './zip-package.class.js';
//# sourceMappingURL=index.js.map

@@ -1,10 +0,9 @@

import { IImportContentType, IParsedContentItem } from '../../import/index.js';
import { IFileData } from '../file-processor.models.js';
import { IParsedContentItem } from '../../import/index.js';
import { FileBinaryData, ItemsParseData, ItemsTransformData } from '../file-processor.models.js';
import { BaseItemProcessorService } from '../base-item-processor.service.js';
import { IExportContentItem } from '../../export/index.js';
export declare class ItemCsvProcessorService extends BaseItemProcessorService {
private readonly csvDelimiter;
readonly name: string;
transformContentItemsAsync(items: IExportContentItem[]): Promise<IFileData[]>;
parseContentItemsAsync(text: string, types: IImportContentType[]): Promise<IParsedContentItem[]>;
transformContentItemsAsync(data: ItemsTransformData): Promise<FileBinaryData>;
parseContentItemsAsync(data: ItemsParseData): Promise<IParsedContentItem[]>;
private getTypeWrappers;

@@ -11,0 +10,0 @@ private mapToCsvItem;

@@ -8,6 +8,5 @@ import { parse } from 'csv-parse';

name = 'csv';
async transformContentItemsAsync(items) {
const fileData = [];
for (const typeWrapper of this.getTypeWrappers(items)) {
const contentItemsOfType = items
async transformContentItemsAsync(data) {
for (const typeWrapper of this.getTypeWrappers(data.items)) {
const contentItemsOfType = data.items
.filter((m) => m.system.type === typeWrapper.typeCodename)

@@ -23,61 +22,60 @@ .map((item) => this.mapToCsvItem(item, typeWrapper));

}).fromInput(languageVariantsStream);
const data = (await parsingProcessor.promise()) ?? '';
fileData.push({
data: data,
filename: filename,
itemsCount: contentItemsOfType.length
});
const csvContent = (await parsingProcessor.promise()) ?? '';
data.zip.addFile(filename, csvContent);
}
return fileData;
return await data.zip.generateZipAsync();
}
async parseContentItemsAsync(text, types) {
async parseContentItemsAsync(data) {
const zipFiles = await data.zip.getAllFilesAsync('string');
const parsedItems = [];
let index = 0;
const parser = parse(text, {
cast: true,
delimiter: this.csvDelimiter
});
let parsedColumns = [];
const systemFields = super.getSystemContentItemFields();
for await (const record of parser) {
if (index === 0) {
// process header row
parsedColumns = record;
}
else {
// process data row
const contentItem = {
system: {
type: '',
codename: '',
collection: '',
language: '',
last_modified: '',
name: '',
workflow_step: ''
},
elements: []
};
let fieldIndex = 0;
const contentItemTypeCodename = record[0]; // type is set in first index
for (const columnName of parsedColumns) {
const columnValue = record[fieldIndex];
if (systemFields.find((m) => m.toLowerCase() === columnName.toLowerCase())) {
// column is system field
contentItem.system[columnName] = columnValue;
for (const file of zipFiles) {
let index = 0;
const parser = parse(file.data, {
cast: true,
delimiter: this.csvDelimiter
});
let parsedColumns = [];
const systemFields = super.getSystemContentItemFields();
for await (const record of parser) {
if (index === 0) {
// process header row
parsedColumns = record;
}
else {
// process data row
const contentItem = {
system: {
type: '',
codename: '',
collection: '',
language: '',
last_modified: '',
name: '',
workflow_step: ''
},
elements: []
};
let fieldIndex = 0;
const contentItemTypeCodename = record[0]; // type is set in first index
for (const columnName of parsedColumns) {
const columnValue = record[fieldIndex];
if (systemFields.find((m) => m.toLowerCase() === columnName.toLowerCase())) {
// column is system field
contentItem.system[columnName] = columnValue;
}
else {
// column is element field
const element = super.getElement(data.types, contentItemTypeCodename, columnName);
contentItem.elements.push({
codename: element.codename,
value: columnValue,
type: element.type
});
}
fieldIndex++;
}
else {
// column is element field
const element = super.getElement(types, contentItemTypeCodename, columnName);
contentItem.elements.push({
codename: element.codename,
value: columnValue,
type: element.type
});
}
fieldIndex++;
parsedItems.push(contentItem);
}
parsedItems.push(contentItem);
index++;
}
index++;
}

@@ -84,0 +82,0 @@ return parsedItems;

@@ -1,10 +0,9 @@

import { IImportContentType, IParsedContentItem } from '../../import/index.js';
import { IFileData } from '../file-processor.models.js';
import { IParsedContentItem } from '../../import/index.js';
import { FileBinaryData, ItemsParseData, ItemsTransformData } from '../file-processor.models.js';
import { BaseItemProcessorService } from '../base-item-processor.service.js';
import { IExportContentItem } from '../../export/index.js';
export declare class ItemJsonJoinedProcessorService extends BaseItemProcessorService {
private readonly jsonProcessorService;
private readonly itemsFileName;
readonly name: string;
transformContentItemsAsync(items: IExportContentItem[]): Promise<IFileData[]>;
parseContentItemsAsync(text: string, types: IImportContentType[]): Promise<IParsedContentItem[]>;
transformContentItemsAsync(data: ItemsTransformData): Promise<FileBinaryData>;
parseContentItemsAsync(data: ItemsParseData): Promise<IParsedContentItem[]>;
}
import { BaseItemProcessorService } from '../base-item-processor.service.js';
import { ItemJsonProcessorService } from './item-json-processor.service.js';
import { mapToJsonItem, parseJsonItem } from './helpers/json-item.helper.js';
export class ItemJsonJoinedProcessorService extends BaseItemProcessorService {
jsonProcessorService = new ItemJsonProcessorService();
itemsFileName = 'items.json';
name = 'json';
async transformContentItemsAsync(items) {
const multiFileJsonFileData = await this.jsonProcessorService.transformContentItemsAsync(items);
const allJsonItems = multiFileJsonFileData
.map((m) => {
const items = JSON.parse(m.data);
return items;
})
.reduce((prev, current) => {
prev.push(...current);
return prev;
}, []);
// join data
const joinedFileData = [
{
data: JSON.stringify(allJsonItems),
filename: 'items.json',
itemsCount: allJsonItems.length
}
];
return joinedFileData;
async transformContentItemsAsync(data) {
const jsonItems = data.items.map((m) => mapToJsonItem(m));
data.zip.addFile(this.itemsFileName, jsonItems.length ? JSON.stringify(jsonItems) : '[]');
return await data.zip.generateZipAsync();
}
async parseContentItemsAsync(text, types) {
return await this.jsonProcessorService.parseContentItemsAsync(text, types);
async parseContentItemsAsync(data) {
const text = await data.zip.getFileContentAsync(this.itemsFileName);
if (!text) {
return [];
}
const jsonItems = JSON.parse(text);
return jsonItems.map((m) => parseJsonItem(m, (typeCodenane, elementCodename) => super.getElement(data.types, typeCodenane, elementCodename)));
}
}
//# sourceMappingURL=item-json-joined-processor.service.js.map

@@ -1,11 +0,9 @@

import { IImportContentType, IParsedContentItem } from '../../import/index.js';
import { IFileData } from '../file-processor.models.js';
import { IParsedContentItem } from '../../import/index.js';
import { FileBinaryData, ItemsParseData, ItemsTransformData } from '../file-processor.models.js';
import { BaseItemProcessorService } from '../base-item-processor.service.js';
import { IExportContentItem } from '../../export/index.js';
export declare class ItemJsonProcessorService extends BaseItemProcessorService {
readonly name: string;
transformContentItemsAsync(items: IExportContentItem[]): Promise<IFileData[]>;
parseContentItemsAsync(text: string, types: IImportContentType[]): Promise<IParsedContentItem[]>;
transformContentItemsAsync(data: ItemsTransformData): Promise<FileBinaryData>;
parseContentItemsAsync(data: ItemsParseData): Promise<IParsedContentItem[]>;
private getTypeWrappers;
private mapToJsonItem;
}
import { BaseItemProcessorService } from '../base-item-processor.service.js';
import { mapToJsonItem, parseJsonItem } from './helpers/json-item.helper.js';
export class ItemJsonProcessorService extends BaseItemProcessorService {
name = 'json';
async transformContentItemsAsync(items) {
const fileData = [];
const typeWrappers = this.getTypeWrappers(items);
async transformContentItemsAsync(data) {
const typeWrappers = this.getTypeWrappers(data.items);
for (const typeWrapper of typeWrappers) {
const filename = `${typeWrapper.typeCodename}.json`;
const contentItemsOfType = items.filter((m) => m.system.type === typeWrapper.typeCodename);
const jsonItems = contentItemsOfType.map((m) => this.mapToJsonItem(m));
fileData.push({
data: jsonItems.length ? JSON.stringify(jsonItems) : '[]',
filename: filename,
itemsCount: jsonItems.length
});
const contentItemsOfType = data.items.filter((m) => m.system.type === typeWrapper.typeCodename);
const jsonItems = contentItemsOfType.map((m) => mapToJsonItem(m));
data.zip.addFile(filename, jsonItems.length ? JSON.stringify(jsonItems) : '[]');
}
return fileData;
return await data.zip.generateZipAsync();
}
async parseContentItemsAsync(text, types) {
async parseContentItemsAsync(data) {
const zipFiles = await data.zip.getAllFilesAsync('string');
const parsedItems = [];
const rawItems = JSON.parse(text);
for (const rawItem of rawItems) {
const elements = [];
for (const propertyName of Object.keys(rawItem.elements)) {
const element = super.getElement(types, rawItem.system.type, propertyName);
elements.push({
codename: propertyName,
value: rawItem.elements[propertyName],
type: element.type
});
for (const zipFile of zipFiles) {
const jsonItems = JSON.parse(zipFile.data);
for (const rawItem of jsonItems) {
parsedItems.push(parseJsonItem(rawItem, (typeCodenane, elementCodename) => super.getElement(data.types, typeCodenane, elementCodename)));
}
const parsedItem = {
system: {
codename: rawItem.system.codename,
collection: rawItem.system.collection,
language: rawItem.system.language,
last_modified: rawItem.system.last_modified,
name: rawItem.system.name,
type: rawItem.system.type,
workflow_step: rawItem.system.workflow_step
},
elements: elements
};
parsedItems.push(parsedItem);
}

@@ -64,22 +42,3 @@ return parsedItems;

}
mapToJsonItem(item) {
const jsonElements = {};
for (const element of item.elements) {
jsonElements[element.codename] = element.value;
}
const jsonItem = {
system: {
codename: item.system.codename,
collection: item.system.collection,
language: item.system.language,
last_modified: item.system.last_modified,
name: item.system.name,
type: item.system.type,
workflow_step: item.system.workflow_step
},
elements: jsonElements
};
return jsonItem;
}
}
//# sourceMappingURL=item-json-processor.service.js.map
import { ManagementClient } from '@kontent-ai/management-sdk';
import { IImportedData } from '../../core/index.js';
import { IImportAsset } from '../import.models.js';
import { IParsedAsset } from '../import.models.js';
export declare class ImportAssetsHelper {
importAssetsAsync(managementClient: ManagementClient, assets: IImportAsset[], importedData: IImportedData): Promise<void>;
importAssetsAsync(data: {
managementClient: ManagementClient;
assets: IParsedAsset[];
importedData: IImportedData;
}): Promise<void>;
}
export declare const importAssetsHelper: ImportAssetsHelper;

@@ -1,10 +0,10 @@

import { is404Error, logAction } from '../../core/index.js';
import { logProcessingDebug } from '../../core/log-helper.js';
import { is404Error, logItemAction, logProcessingDebug } from '../../core/index.js';
import mime from 'mime';
export class ImportAssetsHelper {
async importAssetsAsync(managementClient, assets, importedData) {
async importAssetsAsync(data) {
let assetIndex = 1;
for (const asset of assets) {
for (const asset of data.assets) {
logProcessingDebug({
index: assetIndex,
totalCount: assets.length,
totalCount: data.assets.length,
itemType: 'asset',

@@ -20,3 +20,3 @@ title: `${asset.filename}`

// and such asset should not be imported again
existingAsset = await managementClient
existingAsset = await data.managementClient
.viewAsset()

@@ -34,3 +34,3 @@ .byAssetExternalId(asset.assetId)

// check if asset with given external id was already created
existingAsset = await managementClient
existingAsset = await data.managementClient
.viewAsset()

@@ -48,14 +48,14 @@ .byAssetExternalId(assetExternalId)

// only import asset if it wasn't already there
const uploadedBinaryFile = await managementClient
const uploadedBinaryFile = await data.managementClient
.uploadBinaryFile()
.withData({
binaryData: asset.binaryData,
contentType: asset.mimeType ?? '',
contentType: mime.getType(asset.filename) ?? '',
filename: asset.filename
})
.toPromise();
logAction('upload', 'binaryFile', {
logItemAction('upload', 'binaryFile', {
title: asset.filename
});
const createdAsset = await managementClient
const createdAsset = await data.managementClient
.addAsset()

@@ -73,7 +73,7 @@ .withData((builder) => {

.then((m) => m.data);
importedData.assets.push({
data.importedData.assets.push({
imported: createdAsset,
original: asset
});
logAction('create', 'asset', {
logItemAction('create', 'asset', {
title: asset.filename

@@ -83,7 +83,7 @@ });

else {
importedData.assets.push({
data.importedData.assets.push({
imported: existingAsset,
original: asset
});
logAction('skip', 'asset', {
logItemAction('skip', 'asset', {
title: asset.filename

@@ -90,0 +90,0 @@ });

@@ -5,5 +5,12 @@ import { CollectionModels, ContentItemModels, ManagementClient } from '@kontent-ai/management-sdk';

export declare class ImportContentItemHelper {
importContentItemsAsync(managementClient: ManagementClient, parsedContentItems: IParsedContentItem[], collections: CollectionModels.Collection[], importedData: IImportedData, config: {
skipFailedItems: boolean;
importContentItemsAsync(data: {
managementClient: ManagementClient;
parsedContentItems: IParsedContentItem[];
collections: CollectionModels.Collection[];
importedData: IImportedData;
config: {
skipFailedItems: boolean;
};
}): Promise<ContentItemModels.ContentItem[]>;
private importContentItemAsync;
private shouldUpdateContentItem;

@@ -10,0 +17,0 @@ private prepareContentItemAsync;

@@ -1,10 +0,9 @@

import { logAction, extractErrorMessage, is404Error } from '../../core/index.js';
import { logDebug, logProcessingDebug } from '../../core/log-helper.js';
import { extractErrorMessage, is404Error, logItemAction, logDebug, logErrorAndExit, logProcessingDebug } from '../../core/index.js';
import { parsedItemsHelper } from './parsed-items-helper.js';
export class ImportContentItemHelper {
async importContentItemsAsync(managementClient, parsedContentItems, collections, importedData, config) {
async importContentItemsAsync(data) {
const preparedItems = [];
let itemIndex = 0;
const categorizedParsedItems = parsedItemsHelper.categorizeParsedItems(parsedContentItems);
logAction('skip', 'contentItem', {
const categorizedParsedItems = parsedItemsHelper.categorizeParsedItems(data.parsedContentItems);
logItemAction('skip', 'contentItem', {
title: `Skipping '${categorizedParsedItems.componentItems.length}' because they represent component items`

@@ -20,43 +19,14 @@ });

});
// if content item does not have a workflow step it means it is used as a component within Rich text element
// such items are procesed within element transform
if (!importContentItem.system.workflow_step) {
logAction('skip', 'contentItem', {
title: `Skipping item beause it's a component`,
codename: importContentItem.system.codename
try {
await this.importContentItemAsync({
managementClient: data.managementClient,
collections: data.collections,
importContentItem: importContentItem,
importedData: data.importedData,
parsedContentItems: data.parsedContentItems,
preparedItems: preparedItems
});
continue;
}
try {
const preparedContentItemResult = await this.prepareContentItemAsync(managementClient, importContentItem, importedData);
preparedItems.push(preparedContentItemResult.contentItem);
// check if name should be updated, no other changes are supported
if (preparedContentItemResult.status === 'itemAlreadyExists') {
if (this.shouldUpdateContentItem(importContentItem, preparedContentItemResult.contentItem, collections)) {
const upsertedContentItem = await managementClient
.upsertContentItem()
.byItemCodename(importContentItem.system.codename)
.withData({
name: importContentItem.system.name,
collection: {
codename: importContentItem.system.collection
}
})
.toPromise()
.then((m) => m.data);
logAction('upsert', 'contentItem', {
title: `Upserting item '${upsertedContentItem.name}'`,
codename: importContentItem.system.codename
});
}
else {
logAction('skip', 'contentItem', {
title: `Item '${importContentItem.system.name}' already exists`,
codename: importContentItem.system.codename
});
}
}
}
catch (error) {
if (config.skipFailedItems) {
if (data.config.skipFailedItems) {
logDebug({

@@ -76,6 +46,38 @@ type: 'error',

}
async importContentItemAsync(data) {
const preparedContentItemResult = await this.prepareContentItemAsync(data.managementClient, data.importContentItem, data.importedData);
data.preparedItems.push(preparedContentItemResult.contentItem);
// check if name should be updated, no other changes are supported
if (preparedContentItemResult.status === 'itemAlreadyExists') {
if (this.shouldUpdateContentItem(data.importContentItem, preparedContentItemResult.contentItem, data.collections)) {
const upsertedContentItem = await data.managementClient
.upsertContentItem()
.byItemCodename(data.importContentItem.system.codename)
.withData({
name: data.importContentItem.system.name,
collection: {
codename: data.importContentItem.system.collection
}
})
.toPromise()
.then((m) => m.data);
logItemAction('upsert', 'contentItem', {
title: `Upserting item '${upsertedContentItem.name}'`,
codename: data.importContentItem.system.codename
});
}
else {
logItemAction('skip', 'contentItem', {
title: `Item '${data.importContentItem.system.name}' already exists`,
codename: data.importContentItem.system.codename
});
}
}
}
shouldUpdateContentItem(parsedContentItem, contentItem, collections) {
const collection = collections.find((m) => m.codename === parsedContentItem.system.collection);
if (!collection) {
throw Error(`Invalid collection '${parsedContentItem.system.collection}'`);
logErrorAndExit({
message: `Invalid collection '${parsedContentItem.system.collection}'`
});
}

@@ -92,3 +94,3 @@ return (parsedContentItem.system.name !== contentItem.name ||

.then((m) => m.data);
logAction('fetch', 'contentItem', {
logItemAction('fetch', 'contentItem', {
title: `Loading item '${contentItem.name}'`,

@@ -126,3 +128,3 @@ codename: contentItem.codename

});
logAction('create', 'contentItem', {
logItemAction('create', 'contentItem', {
title: `Creating item '${contentItem.name}'`,

@@ -129,0 +131,0 @@ codename: contentItem.codename

@@ -5,5 +5,13 @@ import { WorkflowModels, ContentItemModels, ManagementClient } from '@kontent-ai/management-sdk';

export declare class ImportLanguageVariantHelper {
importLanguageVariantsAsync(managementClient: ManagementClient, importContentItems: IParsedContentItem[], workflows: WorkflowModels.Workflow[], preparedContentItems: ContentItemModels.ContentItem[], importedData: IImportedData, config: {
skipFailedItems: boolean;
importLanguageVariantsAsync(data: {
managementClient: ManagementClient;
importContentItems: IParsedContentItem[];
workflows: WorkflowModels.Workflow[];
preparedContentItems: ContentItemModels.ContentItem[];
importedData: IImportedData;
config: {
skipFailedItems: boolean;
};
}): Promise<void>;
private importLanguageVariantAsync;
private prepareLanguageVariantForImportAsync;

@@ -10,0 +18,0 @@ private isLanguageVariantPublished;

@@ -1,12 +0,12 @@

import { logDebug, logProcessingDebug } from '../../core/log-helper.js';
import { extractErrorMessage, is404Error, logAction, translationHelper } from '../../core/index.js';
import { extractErrorMessage, is404Error, logItemAction, logDebug, logErrorAndExit, logProcessingDebug } from '../../core/index.js';
import { importWorkflowHelper } from './import-workflow.helper.js';
import { parsedItemsHelper } from './parsed-items-helper.js';
import { translationHelper } from '../../translation/index.js';
export class ImportLanguageVariantHelper {
async importLanguageVariantsAsync(managementClient, importContentItems, workflows, preparedContentItems, importedData, config) {
let itemIndex = 0;
const categorizedParsedItems = parsedItemsHelper.categorizeParsedItems(importContentItems);
logAction('skip', 'languageVariant', {
async importLanguageVariantsAsync(data) {
const categorizedParsedItems = parsedItemsHelper.categorizeParsedItems(data.importContentItems);
logItemAction('skip', 'languageVariant', {
title: `Skipping '${categorizedParsedItems.componentItems.length}' because they represent component items`
});
let itemIndex = 0;
for (const importContentItem of categorizedParsedItems.regularItems) {

@@ -21,39 +21,19 @@ try {

});
if (!importContentItem.system.workflow_step) {
throw Error(`Content item '${importContentItem.system.codename}' required workflow to be set`);
const preparedContentItem = data.preparedContentItems.find((m) => m.codename === importContentItem.system.codename);
if (!preparedContentItem) {
logErrorAndExit({
message: `Invalid content item for codename '${importContentItem.system.codename}'`
});
}
const upsertedContentItem = preparedContentItems.find((m) => m.codename === importContentItem.system.codename);
if (!upsertedContentItem) {
throw Error(`Invalid content item for codename '${importContentItem.system.codename}'`);
}
await this.prepareLanguageVariantForImportAsync(managementClient, importContentItem, workflows);
const upsertedLanguageVariant = await managementClient
.upsertLanguageVariant()
.byItemCodename(upsertedContentItem.codename)
.byLanguageCodename(importContentItem.system.language)
.withData((builder) => {
const mappedElements = importContentItem.elements.map((m) => this.getElementContract(importContentItems, m, importedData));
return {
elements: mappedElements
};
})
.toPromise()
.then((m) => m.data);
importedData.languageVariants.push({
original: importContentItem,
imported: upsertedLanguageVariant
await this.importLanguageVariantAsync({
importContentItem,
preparedContentItem,
managementClient: data.managementClient,
importContentItems: data.importContentItems,
workflows: data.workflows,
importedData: data.importedData
});
logAction('upsert', 'languageVariant', {
title: `${upsertedContentItem.name}`,
language: importContentItem.system.language,
codename: importContentItem.system.codename,
workflowStep: importContentItem.system.workflow_step
});
// set workflow of language variant
if (importContentItem.system.workflow_step) {
await importWorkflowHelper.setWorkflowOfLanguageVariantAsync(managementClient, importContentItem.system.workflow_step, importContentItem, workflows);
}
}
catch (error) {
if (config.skipFailedItems) {
if (data.config.skipFailedItems) {
logDebug({

@@ -72,19 +52,54 @@ type: 'error',

}
async prepareLanguageVariantForImportAsync(managementClient, importContentItem, workflows) {
async importLanguageVariantAsync(data) {
await this.prepareLanguageVariantForImportAsync({
importContentItem: data.importContentItem,
managementClient: data.managementClient,
workflows: data.workflows
});
const upsertedLanguageVariant = await data.managementClient
.upsertLanguageVariant()
.byItemCodename(data.preparedContentItem.codename)
.byLanguageCodename(data.importContentItem.system.language)
.withData((builder) => {
const mappedElements = data.importContentItem.elements.map((m) => this.getElementContract(data.importContentItems, m, data.importedData));
return {
elements: mappedElements
};
})
.toPromise()
.then((m) => m.data);
data.importedData.languageVariants.push({
original: data.importContentItem,
imported: upsertedLanguageVariant
});
logItemAction('upsert', 'languageVariant', {
title: `${data.preparedContentItem.name}`,
language: data.importContentItem.system.language,
codename: data.importContentItem.system.codename,
workflowStep: data.importContentItem.system.workflow_step
});
// set workflow of language variant
if (data.importContentItem.system.workflow_step) {
await importWorkflowHelper.setWorkflowOfLanguageVariantAsync(data.managementClient, data.importContentItem.system.workflow_step, data.importContentItem, data.workflows);
}
}
async prepareLanguageVariantForImportAsync(data) {
let languageVariantOfContentItem;
try {
languageVariantOfContentItem = await managementClient
languageVariantOfContentItem = await data.managementClient
.viewLanguageVariant()
.byItemCodename(importContentItem.system.codename)
.byLanguageCodename(importContentItem.system.language)
.byItemCodename(data.importContentItem.system.codename)
.byLanguageCodename(data.importContentItem.system.language)
.toPromise()
.then((m) => m.data);
logAction('fetch', 'languageVariant', {
title: `${importContentItem.system.name}`,
language: importContentItem.system.language,
codename: importContentItem.system.codename,
workflowStep: importContentItem.system.workflow_step
logItemAction('fetch', 'languageVariant', {
title: `${data.importContentItem.system.name}`,
language: data.importContentItem.system.language,
codename: data.importContentItem.system.codename,
workflowStep: data.importContentItem.system.workflow_step
});
if (!languageVariantOfContentItem) {
throw Error(`Invalid langauge variant for item '${importContentItem.system.codename}' of type '${importContentItem.system.type}' and language '${importContentItem.system.language}'`);
logErrorAndExit({
message: `Invalid langauge variant for item '${data.importContentItem.system.codename}' of type '${data.importContentItem.system.type}' and language '${data.importContentItem.system.language}'`
});
}

@@ -100,32 +115,32 @@ }

// check if variant is published or archived
if (this.isLanguageVariantPublished(languageVariantOfContentItem, workflows)) {
if (this.isLanguageVariantPublished(languageVariantOfContentItem, data.workflows)) {
// create new version
await managementClient
await data.managementClient
.createNewVersionOfLanguageVariant()
.byItemCodename(importContentItem.system.codename)
.byLanguageCodename(importContentItem.system.language)
.byItemCodename(data.importContentItem.system.codename)
.byLanguageCodename(data.importContentItem.system.language)
.toPromise();
logAction('createNewVersion', 'languageVariant', {
title: `${importContentItem.system.name}`,
language: importContentItem.system.language,
codename: importContentItem.system.codename,
workflowStep: importContentItem.system.workflow_step
logItemAction('createNewVersion', 'languageVariant', {
title: `${data.importContentItem.system.name}`,
language: data.importContentItem.system.language,
codename: data.importContentItem.system.codename,
workflowStep: data.importContentItem.system.workflow_step
});
}
else if (this.isLanguageVariantArchived(languageVariantOfContentItem, workflows)) {
else if (this.isLanguageVariantArchived(languageVariantOfContentItem, data.workflows)) {
// change workflow step to draft
if (languageVariantOfContentItem.workflow.stepIdentifier.id) {
const workflow = importWorkflowHelper.getWorkflowForGivenStepById(languageVariantOfContentItem.workflow.stepIdentifier.id, workflows);
const workflow = importWorkflowHelper.getWorkflowForGivenStepById(languageVariantOfContentItem.workflow.stepIdentifier.id, data.workflows);
const newWorkflowStep = workflow.steps[0];
await managementClient
await data.managementClient
.changeWorkflowStepOfLanguageVariant()
.byItemCodename(importContentItem.system.codename)
.byLanguageCodename(importContentItem.system.language)
.byItemCodename(data.importContentItem.system.codename)
.byLanguageCodename(data.importContentItem.system.language)
.byWorkflowStepCodename(newWorkflowStep.codename)
.toPromise();
logAction('unArchive', 'languageVariant', {
title: `${importContentItem.system.name}`,
language: importContentItem.system.language,
codename: importContentItem.system.codename,
workflowStep: importContentItem.system.workflow_step
logItemAction('unArchive', 'languageVariant', {
title: `${data.importContentItem.system.name}`,
language: data.importContentItem.system.language,
codename: data.importContentItem.system.codename,
workflowStep: data.importContentItem.system.workflow_step
});

@@ -155,3 +170,5 @@ }

if (!importContract) {
throw Error(`Missing import contract for element `);
logErrorAndExit({
message: `Missing import contract for element '${element.codename}' `
});
}

@@ -158,0 +175,0 @@ return importContract;

import { ManagementClient, WorkflowModels } from '@kontent-ai/management-sdk';
import { IParsedContentItem } from '../import.models.js';
export declare class ImportWorkflowHelper {
private readonly defaultWorkflowCodename;
getWorkflowForGivenStepById(workflowId: string, workflows: WorkflowModels.Workflow[]): WorkflowModels.Workflow;

@@ -5,0 +6,0 @@ setWorkflowOfLanguageVariantAsync(managementClient: ManagementClient, workflowStepCodename: string, importContentItem: IParsedContentItem, workflows: WorkflowModels.Workflow[]): Promise<void>;

@@ -1,3 +0,4 @@

import { defaultWorkflowCodename, logAction } from '../../core/index.js';
import { logItemAction, logErrorAndExit } from '../../core/index.js';
export class ImportWorkflowHelper {
defaultWorkflowCodename = 'Default';
getWorkflowForGivenStepById(workflowId, workflows) {

@@ -24,3 +25,5 @@ return this.getWorkflowForGivenStep(workflows, (workflow) => {

if (!this.doesWorkflowStepExist(workflowStepCodename, workflows)) {
throw Error(`Could not change workflow step for item '${importContentItem.system.codename}' (${importContentItem.system.name}) because step with codename '${workflowStepCodename}' does not exist in target project.`);
logErrorAndExit({
message: `Could not change workflow step for item '${importContentItem.system.codename}' (${importContentItem.system.name}) because step with codename '${workflowStepCodename}' does not exist in target project.`
});
}

@@ -34,3 +37,3 @@ if (this.doesWorkflowStepCodenameRepresentPublishedStep(workflowStepCodename, workflows)) {

.toPromise();
logAction('publish', 'languageVariant', {
logItemAction('publish', 'languageVariant', {
title: `${importContentItem.system.name}`,

@@ -43,3 +46,3 @@ language: importContentItem.system.language,

else if (this.doesWorkflowStepCodenameRepresentScheduledStep(workflowStepCodename, workflows)) {
logAction('skip', 'languageVariant', {
logItemAction('skip', 'languageVariant', {
title: `Skipping scheduled workflow step for item '${importContentItem.system.name}'`,

@@ -66,3 +69,3 @@ language: importContentItem.system.language,

.toPromise();
logAction('archive', 'languageVariant', {
logItemAction('archive', 'languageVariant', {
title: `${importContentItem.system.name}`,

@@ -93,3 +96,3 @@ language: importContentItem.system.language,

.toPromise();
logAction('changeWorkflowStep', 'languageVariant', {
logItemAction('changeWorkflowStep', 'languageVariant', {
title: `${importContentItem.system.name}`,

@@ -132,5 +135,7 @@ language: importContentItem.system.language,

}
const defaultWorkflow = workflows.find((m) => m.codename.toLowerCase() === defaultWorkflowCodename.toLowerCase());
const defaultWorkflow = workflows.find((m) => m.codename.toLowerCase() === this.defaultWorkflowCodename.toLowerCase());
if (!defaultWorkflow) {
throw Error(`Missing default workflow`);
logErrorAndExit({
message: `Missing default workflow`
});
}

@@ -172,3 +177,2 @@ return defaultWorkflow;

}
return false;
}

@@ -175,0 +179,0 @@ return false;

/// <reference types="node" resolution-mode="require"/>
import { IRetryStrategyOptions } from '@kontent-ai/core-sdk';
import { ContentElementType, IPackageMetadata } from '../core/index.js';
import { ContentElementType } from '../core/index.js';
export interface IImportConfig {

@@ -12,3 +12,3 @@ managementApiKey: string;

contentItem?: (item: IParsedContentItem) => boolean | Promise<boolean>;
asset?: (item: IImportAsset) => boolean | Promise<boolean>;
asset?: (item: IParsedAsset) => boolean | Promise<boolean>;
};

@@ -22,21 +22,16 @@ }

}
export interface IImportAsset {
binaryData: Buffer | Blob;
export interface IParsedAssetRecord {
assetId: string;
filename: string;
mimeType: string | undefined;
extension: string | undefined;
}
export interface IParsedAsset {
assetId: string;
filename: string;
extension: string;
url: string;
}
export interface IParsedAsset extends IParsedAssetRecord {
binaryData: Buffer | Blob | undefined;
}
export interface IImportSource {
importData: {
items: IParsedContentItem[];
assets: IImportAsset[];
assets: IParsedAsset[];
};
metadata?: IPackageMetadata;
}

@@ -43,0 +38,0 @@ export interface IFlattenedFolder {

@@ -8,3 +8,2 @@ import { IImportedData } from '../core/index.js';

getImportContentTypesAsync(): Promise<IImportContentType[]>;
importFromSourceAsync(sourceData: IImportSource): Promise<IImportedData>;
importAsync(sourceData: IImportSource): Promise<IImportedData>;

@@ -16,3 +15,2 @@ private getContentTypeElements;

private getCollectionsAsync;
private handleImportError;
}
import { ManagementClient } from '@kontent-ai/management-sdk';
import { handleError, defaultRetryStrategy, printProjectAndEnvironmentInfoToConsoleAsync, defaultHttpService } from '../core/index.js';
import { logDebug } from '../core/log-helper.js';
import { defaultRetryStrategy, printProjectAndEnvironmentInfoToConsoleAsync, defaultHttpService, logDebug, logErrorAndExit } from '../core/index.js';
import { importAssetsHelper } from './helpers/import-assets.helper.js';

@@ -45,5 +44,2 @@ import { importContentItemHelper } from './helpers/import-content-item.helper.js';

}
async importFromSourceAsync(sourceData) {
return await this.importAsync(sourceData);
}
async importAsync(sourceData) {

@@ -59,39 +55,38 @@ const importedData = {

// import order matters
try {
// Assets
if (dataToImport.importData.assets.length) {
logDebug({
type: 'info',
message: `Importing assets`
});
await importAssetsHelper.importAssetsAsync(this.managementClient, dataToImport.importData.assets, importedData);
}
else {
logDebug({
type: 'info',
message: `There are no assets to import`
});
}
// Content items
if (dataToImport.importData.items.length) {
logDebug({
type: 'info',
message: `Importing content items`
});
await this.importParsedContentItemsAsync(dataToImport.importData.items, importedData);
}
else {
logDebug({
type: 'info',
message: `There are no content items to import`
});
}
// #1 Assets
if (dataToImport.importData.assets.length) {
logDebug({
type: 'info',
message: `Finished import`
message: `Importing assets`
});
await importAssetsHelper.importAssetsAsync({
managementClient: this.managementClient,
assets: dataToImport.importData.assets,
importedData: importedData
});
}
catch (error) {
this.handleImportError(error);
else {
logDebug({
type: 'info',
message: `There are no assets to import`
});
}
// #2 Content items
if (dataToImport.importData.items.length) {
logDebug({
type: 'info',
message: `Importing content items`
});
await this.importParsedContentItemsAsync(dataToImport.importData.items, importedData);
}
else {
logDebug({
type: 'info',
message: `There are no content items to import`
});
}
logDebug({
type: 'info',
message: `Finished import`
});
return importedData;

@@ -114,3 +109,5 @@ }

if (!contentTypeSnippet) {
throw Error(`Could not find content type snippet for element. This snippet is referenced in type '${contentType.codename}'`);
logErrorAndExit({
message: `Could not find content type snippet for element. This snippet is referenced in type '${contentType.codename}'`
});
}

@@ -138,4 +135,3 @@ for (const snippetElement of contentTypeSnippet.elements) {

items: []
},
metadata: source.metadata
}
};

@@ -149,2 +145,4 @@ let removedAssets = 0;

dataToImport.importData.assets.push(asset);
}
else {
removedAssets++;

@@ -162,2 +160,4 @@ }

dataToImport.importData.items.push(item);
}
else {
removedContentItems++;

@@ -188,8 +188,21 @@ }

// first prepare content items
const preparedContentItems = await importContentItemHelper.importContentItemsAsync(this.managementClient, parsedContentItems, collections, importedData, {
skipFailedItems: this.config.skipFailedItems
const preparedContentItems = await importContentItemHelper.importContentItemsAsync({
managementClient: this.managementClient,
collections: collections,
importedData: importedData,
parsedContentItems: parsedContentItems,
config: {
skipFailedItems: this.config.skipFailedItems
}
});
// then process language variants
await importLanguageVariantHelper.importLanguageVariantsAsync(this.managementClient, parsedContentItems, workflows, preparedContentItems, importedData, {
skipFailedItems: this.config.skipFailedItems
await importLanguageVariantHelper.importLanguageVariantsAsync({
managementClient: this.managementClient,
importContentItems: parsedContentItems,
importedData: importedData,
preparedContentItems: preparedContentItems,
workflows: workflows,
config: {
skipFailedItems: this.config.skipFailedItems
}
});

@@ -209,6 +222,3 @@ }

}
handleImportError(error) {
handleError(error);
}
}
//# sourceMappingURL=import.service.js.map

@@ -5,1 +5,2 @@ export * from './core/index.js';

export * from './file-processor/index.js';
export * from './toolkit/index.js';

@@ -6,2 +6,3 @@ // Public API

export * from './file-processor/index.js';
export * from './toolkit/index.js';
//# sourceMappingURL=index.js.map
#!/usr/bin/env node
import { readFileSync } from 'fs';
import yargs from 'yargs';
import { getExtension, extractErrorMessage } from '../../core/index.js';
import { getExtension, logDebug, handleError, logErrorAndExit } from '../../core/index.js';
import { ItemCsvProcessorService, ItemJsonProcessorService, ItemJsonJoinedProcessorService, AssetCsvProcessorService, AssetJsonProcessorService } from '../../file-processor/index.js';
import { logDebug } from '../../core/log-helper.js';
import { ExportToolkit, ImportToolkit } from '../../toolkit/index.js';

@@ -50,3 +49,5 @@ import { KontentAiExportAdapter } from '../../export/index.js';

if (!config.adapter) {
throw Error(`Missing 'adapter' config`);
logErrorAndExit({
message: `Missing 'adapter' config`
});
}

@@ -56,3 +57,5 @@ let adapter;

if (!config.environmentId) {
throw Error(`Invalid environment id`);
logErrorAndExit({
message: `Invalid 'environmentId'`
});
}

@@ -72,8 +75,10 @@ adapter = new KontentAiExportAdapter({

else {
throw Error(`Missing adapter '${config.adapter}'`);
logErrorAndExit({
message: `Missing adapter '${config.adapter}'`
});
}
const exportToolkit = new ExportToolkit({ adapter });
const itemsFilename = config.itemsFilename ?? getDefaultExportFilename('items');
const assetsFilename = config.assetsFilename ?? getDefaultExportFilename('assets');
await exportToolkit.exportAsync({
const exportToolkit = new ExportToolkit({
adapter,
items: {

@@ -90,2 +95,3 @@ filename: itemsFilename,

});
await exportToolkit.exportAsync();
logDebug({ type: 'info', message: `Completed` });

@@ -95,6 +101,10 @@ };

if (!config.managementApiKey) {
throw Error(`Missing 'managementApiKey' configuration option`);
logErrorAndExit({
message: `Missing 'managementApiKey' configuration option`
});
}
if (!config.environmentId) {
throw Error(`Missing 'environmentId' configuration option`);
logErrorAndExit({
message: `Missing 'environmentId' configuration option`
});
}

@@ -140,3 +150,5 @@ const itemsFilename = config.itemsFilename;

else {
throw Error(`Unsupported file type '${itemsFileExtension}'`);
logErrorAndExit({
message: `Unsupported file type '${itemsFileExtension}'`
});
}

@@ -154,3 +166,5 @@ logDebug({ type: 'info', message: `Completed` });

else {
throw Error(`Invalid action '${config.action}'`);
logErrorAndExit({
message: `Invalid action '${config.action}'`
});
}

@@ -182,3 +196,5 @@ };

if (action === 'export') {
throw Error(`Unsupported export format '${format}'`);
logErrorAndExit({
message: `Unsupported export format '${format}'`
});
}

@@ -191,3 +207,5 @@ }

if (action === 'export') {
throw Error(`Unsupported adapter '${adapter}'`);
logErrorAndExit({
message: `Unsupported adapter '${adapter}'`
});
}

@@ -223,4 +241,3 @@ }

.catch((err) => {
console.error(err);
logDebug({ type: 'error', message: extractErrorMessage(err) });
handleError(err);
});

@@ -234,3 +251,5 @@ function getAssetFormatService(format) {

}
throw Error(`Unsupported format '${format}' for assets export`);
logErrorAndExit({
message: `Unsupported format '${format}' for assets export`
});
}

@@ -247,3 +266,5 @@ function getItemFormatService(format) {

}
throw Error(`Unsupported format '${format}' for items export`);
logErrorAndExit({
message: `Unsupported format '${format}' for items export`
});
}

@@ -256,3 +277,5 @@ function getOptionalArgumentValue(args, argName) {

if (!value) {
throw Error(`Missing '${argName}' argument value`);
logErrorAndExit({
message: `Missing '${argName}' argument value`
});
}

@@ -259,0 +282,0 @@ return value;

import { promises } from 'fs';
import { logDebug } from '../../core/log-helper.js';
import { logDebug } from '../../core/index.js';
export class FileService {

@@ -4,0 +4,0 @@ constructor() { }

@@ -5,2 +5,10 @@ import { IExportAdapter, IExportAdapterResult } from '../export/index.js';

adapter: IExportAdapter;
items: {
filename: string;
formatService: IItemFormatService;
};
assets?: {
filename: string;
formatService: IAssetFormatService;
};
}

@@ -12,12 +20,3 @@ export declare class ExportToolkit {

constructor(config: IExporToolkitConfig);
exportAsync(config: {
items: {
filename: string;
formatService: IItemFormatService;
};
assets?: {
filename: string;
formatService: IAssetFormatService;
};
}): Promise<IExportAdapterResult>;
exportAsync(): Promise<IExportAdapterResult>;
}

@@ -10,6 +10,6 @@ import { FileProcessorService } from '../file-processor/index.js';

}
async exportAsync(config) {
async exportAsync() {
const data = await this.config.adapter.exportAsync();
const itemsZipFile = await this.fileProcessorService.createItemsZipAsync(data, {
itemFormatService: config.items.formatService,
itemFormatService: this.config.items.formatService,
transformConfig: {

@@ -21,8 +21,8 @@ richTextConfig: {

});
await this.fileService.writeFileAsync(config.items.filename, itemsZipFile);
if (data.assets.length && config.assets) {
await this.fileService.writeFileAsync(this.config.items.filename, itemsZipFile);
if (data.assets.length && this.config.assets) {
const assetsZipFile = await this.fileProcessorService.createAssetsZipAsync(data, {
assetFormatService: config.assets.formatService
assetFormatService: this.config.assets.formatService
});
await this.fileService.writeFileAsync(config.assets.filename, assetsZipFile);
await this.fileService.writeFileAsync(this.config.assets.filename, assetsZipFile);
}

@@ -29,0 +29,0 @@ return data;

@@ -32,3 +32,3 @@ import { FileProcessorService } from '../file-processor/index.js';

// import into target environment
await importService.importFromSourceAsync(data);
await importService.importAsync(data);
}

@@ -56,5 +56,5 @@ async importFromZipAsync() {

// import into target environment
await importService.importFromSourceAsync(data);
await importService.importAsync(data);
}
}
//# sourceMappingURL=import-toolkit.class.js.map

@@ -9,3 +9,3 @@ import {

import { ProcessingFormat } from '../file-processor/index.js';
import { IImportAsset, IParsedContentItem } from '../import/index.js';
import { IParsedAsset, IParsedContentItem } from '../import/index.js';
import { ContentItemElementsIndexer, IContentItem, IContentType } from '@kontent-ai/delivery-sdk';

@@ -68,3 +68,3 @@

assets: {
original: IImportAsset;
original: IParsedAsset;
imported: AssetModels.Asset;

@@ -77,3 +77,3 @@ }[];

languageVariants: {
original: any;
original: IParsedContentItem;
imported: LanguageVariantModels.ContentItemLanguageVariant;

@@ -80,0 +80,0 @@ }[];

import { IManagementClient, EnvironmentModels, SharedModels } from '@kontent-ai/management-sdk';
import { IRetryStrategyOptions } from '@kontent-ai/core-sdk';
import { format } from 'bytes';
import { logDebug } from './log-helper.js';
import { ActionType, ItemType } from './core.models.js';
import { logDebug, logErrorAndExit } from './log-helper.js';
import { HttpService } from '@kontent-ai/core-sdk';

@@ -59,22 +58,2 @@

export function logAction(
actionType: ActionType,
itemType: ItemType,
data: {
language?: string;
workflowStep?: string;
title: string;
codename?: string;
}
): void {
logDebug({
type: actionType,
message: data.title,
partA: itemType,
partB: data.codename,
partC: data.language,
partD: data.workflowStep
});
}
export function extractErrorMessage(error: any): string {

@@ -109,10 +88,16 @@ if (error instanceof SharedModels.ContentManagementBaseKontentError) {

if (error instanceof SharedModels.ContentManagementBaseKontentError) {
throw {
Message: `Failed to import data with error: ${error.message}`,
ErrorCode: error.errorCode,
RequestId: error.requestId,
ValidationErrors: `${error.validationErrors.map((m) => m.message).join(', ')}`
};
logErrorAndExit({
message: `${error.message}. Error code '${error.errorCode}'. Request Id '${error.requestId}'.${
error.validationErrors.length ? ` ${error.validationErrors.map((m) => m.message).join(', ')}` : ''
}`
});
}
if (error instanceof Error) {
logErrorAndExit({
message: error.message
});
}
// unhandled error
throw error;

@@ -126,3 +111,5 @@ }

if (splitPaths.length < 3) {
throw Error(`Invalid asset url '${assetUrl}' because asset id could not be determined`);
logErrorAndExit({
message: `Invalid asset url '${assetUrl}' because asset id could not be determined`
});
}

@@ -129,0 +116,0 @@

export * from './core.models.js';
export * from './translation-helper.js';
export * from './id-translate-helper.js';
export * from './global-helper.js';
export * from './core-properties.js';
export * from './log-helper.js';

@@ -7,2 +7,11 @@ import colors from 'colors';

export function logErrorAndExit(data: { message: string }): never {
logDebug({
type: 'error',
message: data.message,
partA: 'Stopping process'
});
process.exit(1);
}
export function logProcessingDebug(data: {

@@ -48,1 +57,21 @@ index: number;

}
export function logItemAction(
actionType: ActionType,
itemType: ItemType,
data: {
language?: string;
workflowStep?: string;
title: string;
codename?: string;
}
): void {
logDebug({
type: actionType,
message: data.title,
partA: itemType,
partB: data.codename,
partC: data.language,
partD: data.workflowStep
});
}

@@ -8,6 +8,7 @@ import { IContentItem, IContentType, ElementType, Elements } from '@kontent-ai/delivery-sdk';

defaultRetryStrategy,
formatBytes
formatBytes,
logDebug,
logProcessingDebug
} from '../../../../core/index.js';
import { IExportAsset } from '../../../export.models.js';
import { logDebug, logProcessingDebug } from '../../../../core/log-helper.js';

@@ -14,0 +15,0 @@ type ExportAssetWithoutBinaryData = Omit<IExportAsset, 'binaryData'>;

import { IContentType, ILanguage, IContentItem, IDeliveryClient } from '@kontent-ai/delivery-sdk';
import { logDebug } from '../../../../core/log-helper.js';
import { ActionType, ContentElementType, ItemType, translationHelper } from '../../../../core/index.js';
import { ActionType, ContentElementType, ItemType, logDebug } from '../../../../core/index.js';
import { IExportConfig, IExportContentItem, IExportElement } from '../../../export.models.js';
import { translationHelper } from '../../../../translation/index.js';

@@ -6,0 +6,0 @@ export class ExportContentItemHelper {

import { IContentType, IDeliveryClient, ILanguage, createDeliveryClient } from '@kontent-ai/delivery-sdk';
import { logDebug } from '../../../core/log-helper.js';
import { IExportAdapter, IExportAdapterResult, IExportAsset, IExportConfig } from '../../export.models.js';

@@ -7,2 +6,3 @@ import { exportContentItemHelper } from './helpers/export-content-item.helper.js';

import { exportAssetsHelper } from './helpers/export-assets-item.helper.js';
import { logDebug } from '../../../core/index.js';

@@ -9,0 +9,0 @@ export class KontentAiExportAdapter implements IExportAdapter {

@@ -5,4 +5,3 @@ import { parse } from 'csv-parse';

import { Readable } from 'stream';
import { IFileData } from '../file-processor.models.js';
import { IExportAsset } from '../../export/index.js';
import { AssetsParseData, AssetsTransformData, FileBinaryData } from '../file-processor.models.js';
import { BaseAssetProcessorService } from '../base-asset-processor.service.js';

@@ -12,8 +11,9 @@

private readonly csvDelimiter: string = ',';
private readonly assetsFilename: string = 'assets.csv';
public readonly name: string = 'csv';
async transformAssetsAsync(assets: IExportAsset[]): Promise<IFileData[]> {
async transformAssetsAsync(data: AssetsTransformData): Promise<FileBinaryData> {
const asssetFiels: FieldInfo<string>[] = this.getAssetFields();
const stream = new Readable();
stream.push(JSON.stringify(assets));
stream.push(JSON.stringify(data.assets));
stream.push(null); // required to end the stream

@@ -25,14 +25,23 @@

const data = (await parsingProcessor.promise()) ?? '';
const csvContent = (await parsingProcessor.promise()) ?? '';
return [
{
filename: 'assets.csv',
data: data,
itemsCount: assets.length
}
];
data.zip.addFile(this.assetsFilename, csvContent);
for (const exportAsset of data.assets) {
await data.zip.addFile(
this.getAssetZipFilename(exportAsset.assetId, exportAsset.extension),
exportAsset.binaryData
);
}
return data.zip.generateZipAsync();
}
async parseAssetsAsync(text: string): Promise<IParsedAsset[]> {
async parseAssetsAsync(data: AssetsParseData): Promise<IParsedAsset[]> {
const text = await data.zip.getFileContentAsync(this.assetsFilename);
if (!text) {
return [];
}
const parsedAssets: IParsedAsset[] = [];

@@ -57,3 +66,4 @@ let index = 0;

filename: '',
url: ''
url: '',
binaryData: undefined
};

@@ -68,2 +78,7 @@

// add binary data to record
parsedAsset.binaryData = await data.zip.getBinaryDataAsync(
this.getAssetZipFilename(parsedAsset.assetId, parsedAsset.extension)
);
parsedAssets.push(parsedAsset);

@@ -77,2 +92,6 @@ }

private getAssetZipFilename(assetId: string, extension: string): string {
return `${assetId}.${extension}`; // use id as filename to prevent filename conflicts
}
private geCsvParser(config: { fields: string[] | FieldInfo<string>[] }): AsyncParser<string> {

@@ -79,0 +98,0 @@ return new AsyncParser({

@@ -1,4 +0,3 @@

import { IExportAsset } from '../../export/index.js';
import { IParsedAsset } from '../../import/index.js';
import { IFileData } from '../file-processor.models.js';
import { IParsedAsset, IParsedAssetRecord } from '../../import/index.js';
import { AssetsParseData, AssetsTransformData, FileBinaryData } from '../file-processor.models.js';
import { BaseAssetProcessorService } from '../base-asset-processor.service.js';

@@ -8,26 +7,50 @@

public readonly name: string = 'json';
private readonly assetsFilename: string = 'assets.json';
async transformAssetsAsync(assets: IExportAsset[]): Promise<IFileData[]> {
return [
{
filename: 'assets.json',
itemsCount: assets.length,
data: JSON.stringify(
assets.map((m) => {
const parsedAsset: IParsedAsset = {
assetId: m.assetId,
extension: m.extension,
filename: m.filename,
url: m.url
};
async transformAssetsAsync(data: AssetsTransformData): Promise<FileBinaryData> {
const assetRecords: IParsedAssetRecord[] = [];
return parsedAsset;
})
for (const exportAsset of data.assets) {
assetRecords.push({
assetId: exportAsset.assetId,
extension: exportAsset.extension,
filename: exportAsset.filename,
url: exportAsset.url
});
await data.zip.addFile(
this.getAssetZipFilename(exportAsset.assetId, exportAsset.extension),
exportAsset.binaryData
);
}
data.zip.addFile(this.assetsFilename, JSON.stringify(assetRecords));
return await data.zip.generateZipAsync();
}
async parseAssetsAsync(data: AssetsParseData): Promise<IParsedAsset[]> {
const text = await data.zip.getFileContentAsync(this.assetsFilename);
if (!text) {
return [];
}
const assetRecords: IParsedAssetRecord[] = JSON.parse(text);
const parsedAssets: IParsedAsset[] = [];
for (const assetRecord of assetRecords) {
parsedAssets.push({
...assetRecord,
binaryData: await data.zip.getBinaryDataAsync(
this.getAssetZipFilename(assetRecord.assetId, assetRecord.extension)
)
}
];
});
}
return parsedAssets;
}
async parseAssetsAsync(text: string): Promise<IParsedAsset[]> {
return JSON.parse(text) as IParsedAsset[];
private getAssetZipFilename(assetId: string, extension: string): string {
return `${assetId}.${extension}`; // use id as filename to prevent filename conflicts
}
}

@@ -1,9 +0,8 @@

import { IExportAsset } from '../export/index.js';
import { IParsedAsset } from '../import/index.js';
import { IFileData, IAssetFormatService } from './file-processor.models.js';
import { IAssetFormatService, AssetsTransformData, FileBinaryData, AssetsParseData } from './file-processor.models.js';
export abstract class BaseAssetProcessorService implements IAssetFormatService {
abstract name: string;
abstract transformAssetsAsync(assets: IExportAsset[]): Promise<IFileData[]>;
abstract parseAssetsAsync(text: string): Promise<IParsedAsset[]>;
abstract transformAssetsAsync(data: AssetsTransformData): Promise<FileBinaryData>;
abstract parseAssetsAsync(data: AssetsParseData): Promise<IParsedAsset[]>;

@@ -10,0 +9,0 @@ protected getSystemAssetFields(): string[] {

import { IImportContentType, IImportContentTypeElement, IParsedContentItem } from '../import/index.js';
import { IItemFormatService, IFileData } from './file-processor.models.js';
import { IExportContentItem } from '../export/index.js';
import { IItemFormatService, ItemsTransformData, ItemsParseData, FileBinaryData } from './file-processor.models.js';
import { logErrorAndExit } from '../core/index.js';
export abstract class BaseItemProcessorService implements IItemFormatService {
abstract name: string;
abstract transformContentItemsAsync(items: IExportContentItem[]): Promise<IFileData[]>;
abstract parseContentItemsAsync(text: string, types: IImportContentType[]): Promise<IParsedContentItem[]>;
abstract transformContentItemsAsync(data: ItemsTransformData): Promise<FileBinaryData>;
abstract parseContentItemsAsync(data: ItemsParseData): Promise<IParsedContentItem[]>;

@@ -22,3 +22,5 @@ protected getSystemContentItemFields(): string[] {

if (!type) {
throw Error(`Could not find content type '${contentItemType}'`);
logErrorAndExit({
message: `Could not find content type '${contentItemType}'`
});
}

@@ -29,5 +31,5 @@

if (!element) {
throw Error(
`Could not find element with codename '${elementCodename}' for type '${type.contentTypeCodename}'`
);
logErrorAndExit({
message: `Could not find element with codename '${elementCodename}' for type '${type.contentTypeCodename}'`
});
}

@@ -34,0 +36,0 @@

import { IExportContentItem, IExportAsset } from '../export/index.js';
import { IImportContentType, IParsedAsset, IParsedContentItem } from '../import/index.js';
import { ZipPackage } from './zip-package.class.js';

@@ -9,2 +10,4 @@ /**

export type FileBinaryData = Blob | Buffer;
export type ProcessingFormat = 'csv' | 'json' | 'jsonJoined';

@@ -14,14 +17,31 @@

export type ItemsTransformData = {
readonly zip: ZipPackage;
readonly items: IExportContentItem[];
};
export type ItemsParseData = {
readonly zip: ZipPackage;
readonly types: IImportContentType[];
};
export interface IItemFormatService {
name: string;
transformContentItemsAsync(items: IExportContentItem[]): Promise<IFileData[]>;
parseContentItemsAsync(text: string, types: IImportContentType[]): Promise<IParsedContentItem[]>;
transformContentItemsAsync(data: ItemsTransformData): Promise<FileBinaryData>;
parseContentItemsAsync(data: ItemsParseData): Promise<IParsedContentItem[]>;
}
export type AssetsTransformData = {
readonly zip: ZipPackage;
readonly assets: IExportAsset[];
};
export type AssetsParseData = {
readonly zip: ZipPackage;
};
export interface IAssetFormatService {
name: string;
transformAssetsAsync(assets: IExportAsset[]): Promise<IFileData[]>;
parseAssetsAsync(text: string): Promise<IParsedAsset[]>;
transformAssetsAsync(data: AssetsTransformData): Promise<FileBinaryData>;
parseAssetsAsync(data: AssetsParseData): Promise<IParsedAsset[]>;
}

@@ -37,9 +57,1 @@

export interface IFileProcessorConfig {
}
export interface IFileData {
filename: string;
data: string;
itemsCount: number;
}
import colors from 'colors';
import JSZip from 'jszip';
import { Blob } from 'buffer';
import { IExportAdapterResult, IExportContentItem } from '../export/index.js';
import { IImportAsset, IParsedContentItem, IImportSource, IParsedAsset, IImportContentType } from '../import/index.js';
import { IExportAdapterResult } from '../export/index.js';
import { IParsedContentItem, IImportSource, IImportContentType, IParsedAsset } from '../import/index.js';
import {
IFileData,
IFileProcessorConfig,
IItemFormatService,
IExtractedBinaryFileData,
ZipCompressionLevel,
ZipContext,
IAssetFormatService
IAssetFormatService,
FileBinaryData
} from './file-processor.models.js';
import { IExportTransformConfig, IPackageMetadata, formatBytes, getExtension } from '../core/index.js';
import mime from 'mime';
import { logDebug, logProcessingDebug } from '../core/log-helper.js';
import { IExportTransformConfig, logDebug } from '../core/index.js';
import { ZipPackage } from './zip-package.class.js';
export class FileProcessorService {
private readonly zipContext: ZipContext = 'node.js';
constructor() {}
private readonly metadataName: string = '_metadata.json';
private readonly binaryFilesFolderName: string = 'files';
constructor(config?: IFileProcessorConfig) {}
async parseZipAsync(data: {

@@ -39,4 +29,8 @@ items?: {

}): Promise<IImportSource> {
let itemsZipFile: JSZip | undefined = undefined;
let assetsZipFile: JSZip | undefined = undefined;
const result: IImportSource = {
importData: {
items: [],
assets: []
}
};

@@ -48,3 +42,11 @@ if (data.items) {

});
itemsZipFile = await JSZip.loadAsync(data.items.file, {});
const itemsZipFile = await JSZip.loadAsync(data.items.file, {});
result.importData.items.push(
...(await data.items.formatService.parseContentItemsAsync({
zip: new ZipPackage(itemsZipFile),
types: data.types
}))
);
logDebug({

@@ -61,3 +63,10 @@ type: 'info',

});
assetsZipFile = await JSZip.loadAsync(data.assets.file, {});
const assetsZipFile = await JSZip.loadAsync(data.assets.file, {});
result.importData.assets.push(
...(await data.assets.formatService.parseAssetsAsync({
zip: new ZipPackage(assetsZipFile)
}))
);
logDebug({

@@ -69,16 +78,2 @@ type: 'info',

const result: IImportSource = {
importData: {
items:
itemsZipFile && data.items
? await this.parseContentItemsFromZipAsync(itemsZipFile, data.types, data.items.formatService)
: [],
assets:
assetsZipFile && data.assets
? await this.parseAssetsFromFileAsync(assetsZipFile, data.assets?.formatService)
: []
},
metadata: itemsZipFile ? await this.parseMetadataFromZipAsync(itemsZipFile, this.metadataName) : undefined
};
logDebug({

@@ -104,3 +99,3 @@ type: 'info',

let parsedItems: IParsedContentItem[] = [];
let parsedAssets: IImportAsset[] = [];
let parsedAssets: IParsedAsset[] = [];

@@ -113,3 +108,7 @@ if (data.items) {

parsedItems = await data.items.formatService.parseContentItemsAsync(data.items.file.toString(), data.types);
const itemsZipFile = await JSZip.loadAsync(data.items.file, {});
parsedItems = await data.items.formatService.parseContentItemsAsync({
zip: new ZipPackage(itemsZipFile),
types: data.types
});
}

@@ -124,3 +123,5 @@

const assetsZipFile = await JSZip.loadAsync(data.assets.file, {});
parsedAssets = await this.parseAssetsFromFileAsync(assetsZipFile, data.assets.formatService);
parsedAssets = await data.assets.formatService.parseAssetsAsync({
zip: new ZipPackage(assetsZipFile)
});
}

@@ -132,4 +133,3 @@

assets: parsedAssets
},
metadata: undefined
}
};

@@ -154,51 +154,15 @@

}
): Promise<any> {
const zip = new JSZip();
const contentItemsFolder = zip;
): Promise<FileBinaryData> {
logDebug({
type: 'info',
message: `Transforming '${exportData.items.length.toString()}' content items`,
partA: config.itemFormatService?.name
message: `Creating items zip`,
partA: config.itemFormatService.name
});
const transformedLanguageVariantsFileData = await this.transformLanguageVariantsAsync(
exportData.items,
config.itemFormatService
);
for (const fileInfo of transformedLanguageVariantsFileData) {
logDebug({
type: 'info',
message: `Adding '${fileInfo.itemsCount}' items to file within zip`,
partA: fileInfo.filename
});
contentItemsFolder.file(fileInfo.filename, fileInfo.data);
}
const zipOutputType = this.getZipOutputType(this.zipContext);
const compressionLevel: number = config.compressionLevel ?? 9;
logDebug({
type: 'info',
message: `Creating zip file using '${zipOutputType}' with compression level '${compressionLevel.toString()}'`
const zip = await config.itemFormatService.transformContentItemsAsync({
items: exportData.items,
zip: new ZipPackage(new JSZip())
});
const zipData = await zip.generateAsync({
type: zipOutputType,
compression: 'DEFLATE',
compressionOptions: {
level: compressionLevel
},
streamFiles: true
});
logDebug({
type: 'info',
message: `Zip successfully generated`,
partA: formatBytes(this.getZipSizeInBytes(zipData))
});
return zipData;
return zip;
}

@@ -212,263 +176,16 @@

}
): Promise<any> {
const zip = new JSZip();
const assetsFolder = zip;
const filesFolder = zip.folder(this.binaryFilesFolderName);
if (!filesFolder) {
throw Error(`Could not create folder '${this.binaryFilesFolderName}'`);
}
if (exportData.assets.length) {
logDebug({
type: 'info',
message: `Transforming '${exportData.assets.length.toString()}' asssets`,
partA: config.assetFormatService?.name
});
const transformedAssetsFileData = await config.assetFormatService.transformAssetsAsync(exportData.assets);
for (const fileInfo of transformedAssetsFileData) {
logDebug({
type: 'info',
message: `Adding '${fileInfo.itemsCount}' items to file within zip`,
partA: fileInfo.filename
});
assetsFolder.file(fileInfo.filename, fileInfo.data);
}
let assetIndex: number = 1;
for (const asset of exportData.assets) {
const assetFilename = `${asset.assetId}.${asset.extension}`; // use id as filename to prevent filename conflicts
logProcessingDebug({
index: assetIndex,
totalCount: exportData.assets.length,
itemType: 'zipFile',
title: `'${assetFilename}'`
});
filesFolder.file(assetFilename, asset.binaryData, {
binary: true
});
assetIndex++;
}
logDebug({
type: 'info',
message: `All assets added to zip`
});
} else {
logDebug({
type: 'info',
message: `There are no assets`
});
}
const zipOutputType = this.getZipOutputType(this.zipContext);
const compressionLevel: number = config.compressionLevel ?? 9;
): Promise<FileBinaryData> {
logDebug({
type: 'info',
message: `Creating zip file using '${zipOutputType}' with compression level '${compressionLevel.toString()}'`
message: `Creating assets zip`,
partA: config.assetFormatService?.name
});
const zipData = await zip.generateAsync({
type: zipOutputType,
compression: 'DEFLATE',
compressionOptions: {
level: compressionLevel
},
streamFiles: true
const zip = await config.assetFormatService.transformAssetsAsync({
assets: exportData.assets,
zip: new ZipPackage(new JSZip())
});
logDebug({
type: 'info',
message: `Zip successfully generated`,
partA: formatBytes(this.getZipSizeInBytes(zipData))
});
return zipData;
return zip;
}
private getZipSizeInBytes(zipData: any): number {
if (zipData instanceof Blob) {
return zipData.size;
} else if (zipData instanceof Buffer) {
return zipData.byteLength;
}
throw Error(`Unrecognized zip data type '${typeof zipData}'`);
}
private async transformLanguageVariantsAsync(
items: IExportContentItem[],
formatService: IItemFormatService
): Promise<IFileData[]> {
return await formatService.transformContentItemsAsync(items);
}
private async parseAssetsFromFileAsync(
zip: JSZip,
assetFormatService: IAssetFormatService
): Promise<IImportAsset[]> {
const importAssets: IImportAsset[] = [];
const parsedAssets: IParsedAsset[] = [];
const files = zip.files;
const binaryFiles: IExtractedBinaryFileData[] = await this.extractBinaryFilesAsync(zip);
for (const [, file] of Object.entries(files)) {
if (file?.name?.endsWith('/')) {
continue;
}
if (file?.name?.toLowerCase() === this.metadataName.toLowerCase()) {
continue;
}
if (file?.name?.startsWith(this.binaryFilesFolderName)) {
continue;
}
const text = await file.async('string');
parsedAssets.push(...(await assetFormatService.parseAssetsAsync(text)));
}
for (const parsedAsset of parsedAssets) {
const binaryFile = binaryFiles.find((m) => m.assetId === parsedAsset.assetId);
if (!binaryFile) {
throw Error(`Could not find binary data for asset with id '${parsedAsset.assetId}'`);
}
importAssets.push({
assetId: parsedAsset.assetId,
extension: binaryFile.extension,
filename: parsedAsset.filename,
mimeType: binaryFile.mimeType,
binaryData: binaryFile.binaryData
});
}
return importAssets;
}
private async extractBinaryFilesAsync(zip: JSZip): Promise<IExtractedBinaryFileData[]> {
const extractedFiles: IExtractedBinaryFileData[] = [];
let assetIndex: number = 0;
const files = Object.entries(zip.files);
for (const [, file] of files) {
assetIndex++;
logProcessingDebug({
index: assetIndex,
totalCount: files.length,
itemType: 'zipFile',
title: file.name
});
if (!file?.name?.startsWith(`${this.binaryFilesFolderName}/`)) {
// iterate through assets only
continue;
}
if (file?.name?.endsWith('/')) {
continue;
}
const binaryData = await file.async(this.getZipOutputType(this.zipContext));
logDebug({
type: 'extractBinaryData',
message: file.name,
partA: formatBytes(this.getZipSizeInBytes(binaryData))
});
const assetId = this.getAssetIdFromFilename(file.name);
const extension = getExtension(file.name) ?? '';
const filename = file.name;
const mimeType = mime.getType(file.name) ?? '';
extractedFiles.push({
assetId: assetId,
binaryData: binaryData,
filename: filename,
mimeType: mimeType,
extension: extension
});
}
logDebug({
type: 'info',
message: `All binary files (${extractedFiles.length}) were extracted`
});
return extractedFiles;
}
private getAssetIdFromFilename(filename: string): string {
const split = filename.split('/');
const filenameWithExtension = split[1];
return filenameWithExtension.split('.')[0];
}
private getZipOutputType(context: ZipContext): 'nodebuffer' | 'blob' {
if (context === 'browser') {
return 'blob';
}
if (context === 'node.js') {
return 'nodebuffer';
}
throw Error(`Unsupported context '${context}'`);
}
private async parseContentItemsFromZipAsync(
fileContents: JSZip,
types: IImportContentType[],
formatService: IItemFormatService
): Promise<IParsedContentItem[]> {
const files = fileContents.files;
const parsedItems: IParsedContentItem[] = [];
for (const file of Object.values(files)) {
if (file?.name?.endsWith('/')) {
continue;
}
if (file?.name?.toLowerCase() === this.metadataName.toLowerCase()) {
continue;
}
const text = await file.async('text');
parsedItems.push(...(await formatService.parseContentItemsAsync(text, types)));
}
return parsedItems;
}
private async parseMetadataFromZipAsync(
fileContents: JSZip,
filename: string
): Promise<undefined | IPackageMetadata> {
const files = fileContents.files;
const file = files[filename];
if (!file) {
// metadata is not required
return undefined;
}
const text = await file.async('text');
return JSON.parse(text);
}
}

@@ -8,1 +8,2 @@ export * from './file-processor.service.js';

export * from './asset-formats/asset-json-processor.service.js';
export * from './zip-package.class.js';
import { parse } from 'csv-parse';
import { AsyncParser, FieldInfo } from 'json2csv';
import { IImportContentType, IParsedContentItem } from '../../import/index.js';
import { IParsedContentItem } from '../../import/index.js';
import { Readable } from 'stream';
import { IFileData } from '../file-processor.models.js';
import { FileBinaryData, ItemsParseData, ItemsTransformData } from '../file-processor.models.js';
import { BaseItemProcessorService } from '../base-item-processor.service.js';

@@ -30,7 +30,5 @@ import { IExportContentItem } from '../../export/index.js';

async transformContentItemsAsync(items: IExportContentItem[]): Promise<IFileData[]> {
const fileData: IFileData[] = [];
for (const typeWrapper of this.getTypeWrappers(items)) {
const contentItemsOfType = items
async transformContentItemsAsync(data: ItemsTransformData): Promise<FileBinaryData> {
for (const typeWrapper of this.getTypeWrappers(data.items)) {
const contentItemsOfType = data.items
.filter((m) => m.system.type === typeWrapper.typeCodename)

@@ -49,69 +47,69 @@ .map((item) => this.mapToCsvItem(item, typeWrapper));

const data = (await parsingProcessor.promise()) ?? '';
const csvContent = (await parsingProcessor.promise()) ?? '';
fileData.push({
data: data,
filename: filename,
itemsCount: contentItemsOfType.length
});
data.zip.addFile(filename, csvContent);
}
return fileData;
return await data.zip.generateZipAsync();
}
async parseContentItemsAsync(text: string, types: IImportContentType[]): Promise<IParsedContentItem[]> {
async parseContentItemsAsync(data: ItemsParseData): Promise<IParsedContentItem[]> {
const zipFiles = await data.zip.getAllFilesAsync<string>('string');
const parsedItems: IParsedContentItem[] = [];
let index = 0;
const parser = parse(text, {
cast: true,
delimiter: this.csvDelimiter
});
let parsedColumns: string[] = [];
const systemFields = super.getSystemContentItemFields();
for (const file of zipFiles) {
let index = 0;
const parser = parse(file.data, {
cast: true,
delimiter: this.csvDelimiter
});
for await (const record of parser) {
if (index === 0) {
// process header row
parsedColumns = record;
} else {
// process data row
const contentItem: IParsedContentItem = {
system: {
type: '',
codename: '',
collection: '',
language: '',
last_modified: '',
name: '',
workflow_step: ''
},
elements: []
};
let parsedColumns: string[] = [];
const systemFields = super.getSystemContentItemFields();
let fieldIndex: number = 0;
const contentItemTypeCodename: string = record[0]; // type is set in first index
for (const columnName of parsedColumns) {
const columnValue = record[fieldIndex];
for await (const record of parser) {
if (index === 0) {
// process header row
parsedColumns = record;
} else {
// process data row
const contentItem: IParsedContentItem = {
system: {
type: '',
codename: '',
collection: '',
language: '',
last_modified: '',
name: '',
workflow_step: ''
},
elements: []
};
if (systemFields.find((m) => m.toLowerCase() === columnName.toLowerCase())) {
// column is system field
(contentItem.system as any)[columnName] = columnValue;
} else {
// column is element field
const element = super.getElement(types, contentItemTypeCodename, columnName);
let fieldIndex: number = 0;
const contentItemTypeCodename: string = record[0]; // type is set in first index
for (const columnName of parsedColumns) {
const columnValue = record[fieldIndex];
contentItem.elements.push({
codename: element.codename,
value: columnValue,
type: element.type
});
if (systemFields.find((m) => m.toLowerCase() === columnName.toLowerCase())) {
// column is system field
(contentItem.system as any)[columnName] = columnValue;
} else {
// column is element field
const element = super.getElement(data.types, contentItemTypeCodename, columnName);
contentItem.elements.push({
codename: element.codename,
value: columnValue,
type: element.type
});
}
fieldIndex++;
}
fieldIndex++;
parsedItems.push(contentItem);
}
parsedItems.push(contentItem);
index++;
}
index++;
}

@@ -118,0 +116,0 @@

@@ -1,56 +0,33 @@

import { IImportContentType, IParsedContentItem } from '../../import/index.js';
import { IFileData } from '../file-processor.models.js';
import { IParsedContentItem } from '../../import/index.js';
import { FileBinaryData, ItemsParseData, ItemsTransformData } from '../file-processor.models.js';
import { BaseItemProcessorService } from '../base-item-processor.service.js';
import { ItemJsonProcessorService } from './item-json-processor.service.js';
import { IExportContentItem } from '../../export/index.js';
import { IJsonItem, mapToJsonItem, parseJsonItem } from './helpers/json-item.helper.js';
interface IJsonElements {
[elementCodename: string]: string | string[] | undefined;
}
interface IJsonItem {
system: {
codename: string;
name: string;
language: string;
type: string;
collection: string;
last_modified: string;
workflow_step?: string;
};
elements: IJsonElements;
}
export class ItemJsonJoinedProcessorService extends BaseItemProcessorService {
private readonly jsonProcessorService = new ItemJsonProcessorService();
private readonly itemsFileName: string = 'items.json';
public readonly name: string = 'json';
async transformContentItemsAsync(items: IExportContentItem[]): Promise<IFileData[]> {
const multiFileJsonFileData = await this.jsonProcessorService.transformContentItemsAsync(items);
async transformContentItemsAsync(data: ItemsTransformData): Promise<FileBinaryData> {
const jsonItems: IJsonItem[] = data.items.map((m) => mapToJsonItem(m));
const allJsonItems: IJsonItem[] = multiFileJsonFileData
.map((m) => {
const items: IJsonItem[] = JSON.parse(m.data);
return items;
})
.reduce<IJsonItem[]>((prev, current) => {
prev.push(...current);
return prev;
}, []);
data.zip.addFile(this.itemsFileName, jsonItems.length ? JSON.stringify(jsonItems) : '[]');
// join data
const joinedFileData: IFileData[] = [
{
data: JSON.stringify(allJsonItems),
filename: 'items.json',
itemsCount: allJsonItems.length
}
];
return joinedFileData;
return await data.zip.generateZipAsync();
}
async parseContentItemsAsync(text: string, types: IImportContentType[]): Promise<IParsedContentItem[]> {
return await this.jsonProcessorService.parseContentItemsAsync(text, types);
async parseContentItemsAsync(data: ItemsParseData): Promise<IParsedContentItem[]> {
const text = await data.zip.getFileContentAsync(this.itemsFileName);
if (!text) {
return [];
}
const jsonItems: IJsonItem[] = JSON.parse(text);
return jsonItems.map((m) =>
parseJsonItem(m, (typeCodenane, elementCodename) =>
super.getElement(data.types, typeCodenane, elementCodename)
)
);
}
}

@@ -1,80 +0,37 @@

import { IImportContentType, IParsedContentItem, IParsedElement } from '../../import/index.js';
import { IFileData } from '../file-processor.models.js';
import { IParsedContentItem } from '../../import/index.js';
import { FileBinaryData, ItemsParseData, ItemsTransformData } from '../file-processor.models.js';
import { BaseItemProcessorService } from '../base-item-processor.service.js';
import { IExportContentItem } from '../../export/index.js';
import { IJsonItem, ITypeWrapper, mapToJsonItem, parseJsonItem } from './helpers/json-item.helper.js';
interface IJsonElements {
[elementCodename: string]: string | string[] | undefined;
}
interface IJsonItem {
system: {
codename: string;
name: string;
language: string;
type: string;
collection: string;
last_modified?: string;
workflow_step?: string;
};
elements: IJsonElements;
}
interface ITypeWrapper {
typeCodename: string;
items: IExportContentItem[];
}
export class ItemJsonProcessorService extends BaseItemProcessorService {
public readonly name: string = 'json';
async transformContentItemsAsync(items: IExportContentItem[]): Promise<IFileData[]> {
const fileData: IFileData[] = [];
const typeWrappers: ITypeWrapper[] = this.getTypeWrappers(items);
async transformContentItemsAsync(data: ItemsTransformData): Promise<FileBinaryData> {
const typeWrappers: ITypeWrapper[] = this.getTypeWrappers(data.items);
for (const typeWrapper of typeWrappers) {
const filename: string = `${typeWrapper.typeCodename}.json`;
const contentItemsOfType = items.filter((m) => m.system.type === typeWrapper.typeCodename);
const jsonItems: IJsonItem[] = contentItemsOfType.map((m) => this.mapToJsonItem(m));
const contentItemsOfType = data.items.filter((m) => m.system.type === typeWrapper.typeCodename);
const jsonItems: IJsonItem[] = contentItemsOfType.map((m) => mapToJsonItem(m));
fileData.push({
data: jsonItems.length ? JSON.stringify(jsonItems) : '[]',
filename: filename,
itemsCount: jsonItems.length
});
data.zip.addFile(filename, jsonItems.length ? JSON.stringify(jsonItems) : '[]');
}
return fileData;
return await data.zip.generateZipAsync();
}
async parseContentItemsAsync(text: string, types: IImportContentType[]): Promise<IParsedContentItem[]> {
async parseContentItemsAsync(data: ItemsParseData): Promise<IParsedContentItem[]> {
const zipFiles = await data.zip.getAllFilesAsync<string>('string');
const parsedItems: IParsedContentItem[] = [];
const rawItems: IJsonItem[] = JSON.parse(text) as IJsonItem[];
for (const rawItem of rawItems) {
const elements: IParsedElement[] = [];
for (const zipFile of zipFiles) {
const jsonItems: IJsonItem[] = JSON.parse(zipFile.data) as IJsonItem[];
for (const propertyName of Object.keys(rawItem.elements)) {
const element = super.getElement(types, rawItem.system.type, propertyName);
elements.push({
codename: propertyName,
value: rawItem.elements[propertyName],
type: element.type
});
for (const rawItem of jsonItems) {
parsedItems.push(
parseJsonItem(rawItem, (typeCodenane, elementCodename) =>
super.getElement(data.types, typeCodenane, elementCodename)
)
);
}
const parsedItem: IParsedContentItem = {
system: {
codename: rawItem.system.codename,
collection: rawItem.system.collection,
language: rawItem.system.language,
last_modified: rawItem.system.last_modified,
name: rawItem.system.name,
type: rawItem.system.type,
workflow_step: rawItem.system.workflow_step
},
elements: elements
};
parsedItems.push(parsedItem);
}

@@ -103,24 +60,2 @@

}
private mapToJsonItem(item: IExportContentItem): IJsonItem {
const jsonElements: IJsonElements = {};
for (const element of item.elements) {
jsonElements[element.codename] = element.value;
}
const jsonItem: IJsonItem = {
system: {
codename: item.system.codename,
collection: item.system.collection,
language: item.system.language,
last_modified: item.system.last_modified,
name: item.system.name,
type: item.system.type,
workflow_step: item.system.workflow_step
},
elements: jsonElements
};
return jsonItem;
}
}
import { AssetModels, ManagementClient } from '@kontent-ai/management-sdk';
import { IImportedData, is404Error, logAction } from '../../core/index.js';
import { IImportAsset } from '../import.models.js';
import { logProcessingDebug } from '../../core/log-helper.js';
import { IImportedData, is404Error, logItemAction, logProcessingDebug } from '../../core/index.js';
import { IParsedAsset } from '../import.models.js';
import mime from 'mime';
export class ImportAssetsHelper {
async importAssetsAsync(
managementClient: ManagementClient,
assets: IImportAsset[],
importedData: IImportedData
): Promise<void> {
async importAssetsAsync(data: {
managementClient: ManagementClient;
assets: IParsedAsset[];
importedData: IImportedData;
}): Promise<void> {
let assetIndex: number = 1;
for (const asset of assets) {
for (const asset of data.assets) {
logProcessingDebug({
index: assetIndex,
totalCount: assets.length,
totalCount: data.assets.length,
itemType: 'asset',

@@ -30,3 +30,3 @@ title: `${asset.filename}`

// and such asset should not be imported again
existingAsset = await managementClient
existingAsset = await data.managementClient
.viewAsset()

@@ -44,3 +44,3 @@ .byAssetExternalId(asset.assetId)

// check if asset with given external id was already created
existingAsset = await managementClient
existingAsset = await data.managementClient
.viewAsset()

@@ -58,7 +58,7 @@ .byAssetExternalId(assetExternalId)

// only import asset if it wasn't already there
const uploadedBinaryFile = await managementClient
const uploadedBinaryFile = await data.managementClient
.uploadBinaryFile()
.withData({
binaryData: asset.binaryData,
contentType: asset.mimeType ?? '',
contentType: mime.getType(asset.filename) ?? '',
filename: asset.filename

@@ -68,7 +68,7 @@ })

logAction('upload', 'binaryFile', {
logItemAction('upload', 'binaryFile', {
title: asset.filename
});
const createdAsset = await managementClient
const createdAsset = await data.managementClient
.addAsset()

@@ -87,3 +87,3 @@ .withData((builder) => {

importedData.assets.push({
data.importedData.assets.push({
imported: createdAsset,

@@ -93,11 +93,11 @@ original: asset

logAction('create', 'asset', {
logItemAction('create', 'asset', {
title: asset.filename
});
} else {
importedData.assets.push({
data.importedData.assets.push({
imported: existingAsset,
original: asset
});
logAction('skip', 'asset', {
logItemAction('skip', 'asset', {
title: asset.filename

@@ -104,0 +104,0 @@ });

import { CollectionModels, ContentItemModels, ManagementClient } from '@kontent-ai/management-sdk';
import { IImportedData, logAction, extractErrorMessage, is404Error } from '../../core/index.js';
import { logDebug, logProcessingDebug } from '../../core/log-helper.js';
import {
IImportedData,
extractErrorMessage,
is404Error,
logItemAction,
logDebug,
logErrorAndExit,
logProcessingDebug
} from '../../core/index.js';
import { IParsedContentItem } from '../import.models.js';

@@ -8,18 +15,19 @@ import { ICategorizedParsedItems, parsedItemsHelper } from './parsed-items-helper.js';

export class ImportContentItemHelper {
async importContentItemsAsync(
managementClient: ManagementClient,
parsedContentItems: IParsedContentItem[],
collections: CollectionModels.Collection[],
importedData: IImportedData,
async importContentItemsAsync(data: {
managementClient: ManagementClient;
parsedContentItems: IParsedContentItem[];
collections: CollectionModels.Collection[];
importedData: IImportedData;
config: {
skipFailedItems: boolean;
}
): Promise<ContentItemModels.ContentItem[]> {
};
}): Promise<ContentItemModels.ContentItem[]> {
const preparedItems: ContentItemModels.ContentItem[] = [];
let itemIndex: number = 0;
const categorizedParsedItems: ICategorizedParsedItems =
parsedItemsHelper.categorizeParsedItems(parsedContentItems);
const categorizedParsedItems: ICategorizedParsedItems = parsedItemsHelper.categorizeParsedItems(
data.parsedContentItems
);
logAction('skip', 'contentItem', {
logItemAction('skip', 'contentItem', {
title: `Skipping '${categorizedParsedItems.componentItems.length}' because they represent component items`

@@ -38,54 +46,13 @@ });

// if content item does not have a workflow step it means it is used as a component within Rich text element
// such items are procesed within element transform
if (!importContentItem.system.workflow_step) {
logAction('skip', 'contentItem', {
title: `Skipping item beause it's a component`,
codename: importContentItem.system.codename
try {
await this.importContentItemAsync({
managementClient: data.managementClient,
collections: data.collections,
importContentItem: importContentItem,
importedData: data.importedData,
parsedContentItems: data.parsedContentItems,
preparedItems: preparedItems
});
continue;
}
try {
const preparedContentItemResult = await this.prepareContentItemAsync(
managementClient,
importContentItem,
importedData
);
preparedItems.push(preparedContentItemResult.contentItem);
// check if name should be updated, no other changes are supported
if (preparedContentItemResult.status === 'itemAlreadyExists') {
if (
this.shouldUpdateContentItem(
importContentItem,
preparedContentItemResult.contentItem,
collections
)
) {
const upsertedContentItem = await managementClient
.upsertContentItem()
.byItemCodename(importContentItem.system.codename)
.withData({
name: importContentItem.system.name,
collection: {
codename: importContentItem.system.collection
}
})
.toPromise()
.then((m) => m.data);
logAction('upsert', 'contentItem', {
title: `Upserting item '${upsertedContentItem.name}'`,
codename: importContentItem.system.codename
});
} else {
logAction('skip', 'contentItem', {
title: `Item '${importContentItem.system.name}' already exists`,
codename: importContentItem.system.codename
});
}
}
} catch (error) {
if (config.skipFailedItems) {
if (data.config.skipFailedItems) {
logDebug({

@@ -106,2 +73,51 @@ type: 'error',

private async importContentItemAsync(data: {
importContentItem: IParsedContentItem;
managementClient: ManagementClient;
parsedContentItems: IParsedContentItem[];
collections: CollectionModels.Collection[];
importedData: IImportedData;
preparedItems: ContentItemModels.ContentItem[];
}): Promise<void> {
const preparedContentItemResult = await this.prepareContentItemAsync(
data.managementClient,
data.importContentItem,
data.importedData
);
data.preparedItems.push(preparedContentItemResult.contentItem);
// check if name should be updated, no other changes are supported
if (preparedContentItemResult.status === 'itemAlreadyExists') {
if (
this.shouldUpdateContentItem(
data.importContentItem,
preparedContentItemResult.contentItem,
data.collections
)
) {
const upsertedContentItem = await data.managementClient
.upsertContentItem()
.byItemCodename(data.importContentItem.system.codename)
.withData({
name: data.importContentItem.system.name,
collection: {
codename: data.importContentItem.system.collection
}
})
.toPromise()
.then((m) => m.data);
logItemAction('upsert', 'contentItem', {
title: `Upserting item '${upsertedContentItem.name}'`,
codename: data.importContentItem.system.codename
});
} else {
logItemAction('skip', 'contentItem', {
title: `Item '${data.importContentItem.system.name}' already exists`,
codename: data.importContentItem.system.codename
});
}
}
}
private shouldUpdateContentItem(

@@ -115,3 +131,5 @@ parsedContentItem: IParsedContentItem,

if (!collection) {
throw Error(`Invalid collection '${parsedContentItem.system.collection}'`);
logErrorAndExit({
message: `Invalid collection '${parsedContentItem.system.collection}'`
});
}

@@ -136,3 +154,3 @@ return (

logAction('fetch', 'contentItem', {
logItemAction('fetch', 'contentItem', {
title: `Loading item '${contentItem.name}'`,

@@ -173,3 +191,3 @@ codename: contentItem.codename

logAction('create', 'contentItem', {
logItemAction('create', 'contentItem', {
title: `Creating item '${contentItem.name}'`,

@@ -176,0 +194,0 @@ codename: contentItem.codename

@@ -9,28 +9,36 @@ import {

} from '@kontent-ai/management-sdk';
import { logDebug, logProcessingDebug } from '../../core/log-helper.js';
import { IImportedData, extractErrorMessage, is404Error, logAction, translationHelper } from '../../core/index.js';
import {
IImportedData,
extractErrorMessage,
is404Error,
logItemAction,
logDebug,
logErrorAndExit,
logProcessingDebug
} from '../../core/index.js';
import { IParsedContentItem, IParsedElement } from '../import.models.js';
import { importWorkflowHelper } from './import-workflow.helper.js';
import { ICategorizedParsedItems, parsedItemsHelper } from './parsed-items-helper.js';
import { translationHelper } from '../../translation/index.js';
export class ImportLanguageVariantHelper {
async importLanguageVariantsAsync(
managementClient: ManagementClient,
importContentItems: IParsedContentItem[],
workflows: WorkflowModels.Workflow[],
preparedContentItems: ContentItemModels.ContentItem[],
importedData: IImportedData,
async importLanguageVariantsAsync(data: {
managementClient: ManagementClient;
importContentItems: IParsedContentItem[];
workflows: WorkflowModels.Workflow[];
preparedContentItems: ContentItemModels.ContentItem[];
importedData: IImportedData;
config: {
skipFailedItems: boolean;
}
): Promise<void> {
let itemIndex: number = 0;
};
}): Promise<void> {
const categorizedParsedItems: ICategorizedParsedItems = parsedItemsHelper.categorizeParsedItems(
data.importContentItems
);
const categorizedParsedItems: ICategorizedParsedItems =
parsedItemsHelper.categorizeParsedItems(importContentItems);
logAction('skip', 'languageVariant', {
logItemAction('skip', 'languageVariant', {
title: `Skipping '${categorizedParsedItems.componentItems.length}' because they represent component items`
});
let itemIndex: number = 0;
for (const importContentItem of categorizedParsedItems.regularItems) {

@@ -47,56 +55,22 @@ try {

if (!importContentItem.system.workflow_step) {
throw Error(`Content item '${importContentItem.system.codename}' required workflow to be set`);
}
const upsertedContentItem = preparedContentItems.find(
const preparedContentItem = data.preparedContentItems.find(
(m) => m.codename === importContentItem.system.codename
);
if (!upsertedContentItem) {
throw Error(`Invalid content item for codename '${importContentItem.system.codename}'`);
if (!preparedContentItem) {
logErrorAndExit({
message: `Invalid content item for codename '${importContentItem.system.codename}'`
});
}
await this.prepareLanguageVariantForImportAsync(managementClient, importContentItem, workflows);
const upsertedLanguageVariant = await managementClient
.upsertLanguageVariant()
.byItemCodename(upsertedContentItem.codename)
.byLanguageCodename(importContentItem.system.language)
.withData((builder) => {
const mappedElements: LanguageVariantElements.ILanguageVariantElementBase[] =
importContentItem.elements.map((m) =>
this.getElementContract(importContentItems, m, importedData)
);
return {
elements: mappedElements
};
})
.toPromise()
.then((m) => m.data);
importedData.languageVariants.push({
original: importContentItem,
imported: upsertedLanguageVariant
await this.importLanguageVariantAsync({
importContentItem,
preparedContentItem,
managementClient: data.managementClient,
importContentItems: data.importContentItems,
workflows: data.workflows,
importedData: data.importedData
});
logAction('upsert', 'languageVariant', {
title: `${upsertedContentItem.name}`,
language: importContentItem.system.language,
codename: importContentItem.system.codename,
workflowStep: importContentItem.system.workflow_step
});
// set workflow of language variant
if (importContentItem.system.workflow_step) {
await importWorkflowHelper.setWorkflowOfLanguageVariantAsync(
managementClient,
importContentItem.system.workflow_step,
importContentItem,
workflows
);
}
} catch (error) {
if (config.skipFailedItems) {
if (data.config.skipFailedItems) {
logDebug({

@@ -115,28 +89,82 @@ type: 'error',

private async prepareLanguageVariantForImportAsync(
managementClient: ManagementClient,
importContentItem: IParsedContentItem,
workflows: WorkflowModels.Workflow[]
): Promise<void> {
private async importLanguageVariantAsync(data: {
importContentItem: IParsedContentItem;
preparedContentItem: ContentItemModels.ContentItem;
managementClient: ManagementClient;
importContentItems: IParsedContentItem[];
workflows: WorkflowModels.Workflow[];
importedData: IImportedData;
}): Promise<void> {
await this.prepareLanguageVariantForImportAsync({
importContentItem: data.importContentItem,
managementClient: data.managementClient,
workflows: data.workflows
});
const upsertedLanguageVariant = await data.managementClient
.upsertLanguageVariant()
.byItemCodename(data.preparedContentItem.codename)
.byLanguageCodename(data.importContentItem.system.language)
.withData((builder) => {
const mappedElements: LanguageVariantElements.ILanguageVariantElementBase[] =
data.importContentItem.elements.map((m) =>
this.getElementContract(data.importContentItems, m, data.importedData)
);
return {
elements: mappedElements
};
})
.toPromise()
.then((m) => m.data);
data.importedData.languageVariants.push({
original: data.importContentItem,
imported: upsertedLanguageVariant
});
logItemAction('upsert', 'languageVariant', {
title: `${data.preparedContentItem.name}`,
language: data.importContentItem.system.language,
codename: data.importContentItem.system.codename,
workflowStep: data.importContentItem.system.workflow_step
});
// set workflow of language variant
if (data.importContentItem.system.workflow_step) {
await importWorkflowHelper.setWorkflowOfLanguageVariantAsync(
data.managementClient,
data.importContentItem.system.workflow_step,
data.importContentItem,
data.workflows
);
}
}
private async prepareLanguageVariantForImportAsync(data: {
managementClient: ManagementClient;
importContentItem: IParsedContentItem;
workflows: WorkflowModels.Workflow[];
}): Promise<void> {
let languageVariantOfContentItem: undefined | LanguageVariantModels.ContentItemLanguageVariant;
try {
languageVariantOfContentItem = await managementClient
languageVariantOfContentItem = await data.managementClient
.viewLanguageVariant()
.byItemCodename(importContentItem.system.codename)
.byLanguageCodename(importContentItem.system.language)
.byItemCodename(data.importContentItem.system.codename)
.byLanguageCodename(data.importContentItem.system.language)
.toPromise()
.then((m) => m.data);
logAction('fetch', 'languageVariant', {
title: `${importContentItem.system.name}`,
language: importContentItem.system.language,
codename: importContentItem.system.codename,
workflowStep: importContentItem.system.workflow_step
logItemAction('fetch', 'languageVariant', {
title: `${data.importContentItem.system.name}`,
language: data.importContentItem.system.language,
codename: data.importContentItem.system.codename,
workflowStep: data.importContentItem.system.workflow_step
});
if (!languageVariantOfContentItem) {
throw Error(
`Invalid langauge variant for item '${importContentItem.system.codename}' of type '${importContentItem.system.type}' and language '${importContentItem.system.language}'`
);
logErrorAndExit({
message: `Invalid langauge variant for item '${data.importContentItem.system.codename}' of type '${data.importContentItem.system.type}' and language '${data.importContentItem.system.language}'`
});
}

@@ -152,17 +180,17 @@ } catch (error) {

// check if variant is published or archived
if (this.isLanguageVariantPublished(languageVariantOfContentItem, workflows)) {
if (this.isLanguageVariantPublished(languageVariantOfContentItem, data.workflows)) {
// create new version
await managementClient
await data.managementClient
.createNewVersionOfLanguageVariant()
.byItemCodename(importContentItem.system.codename)
.byLanguageCodename(importContentItem.system.language)
.byItemCodename(data.importContentItem.system.codename)
.byLanguageCodename(data.importContentItem.system.language)
.toPromise();
logAction('createNewVersion', 'languageVariant', {
title: `${importContentItem.system.name}`,
language: importContentItem.system.language,
codename: importContentItem.system.codename,
workflowStep: importContentItem.system.workflow_step
logItemAction('createNewVersion', 'languageVariant', {
title: `${data.importContentItem.system.name}`,
language: data.importContentItem.system.language,
codename: data.importContentItem.system.codename,
workflowStep: data.importContentItem.system.workflow_step
});
} else if (this.isLanguageVariantArchived(languageVariantOfContentItem, workflows)) {
} else if (this.isLanguageVariantArchived(languageVariantOfContentItem, data.workflows)) {
// change workflow step to draft

@@ -172,18 +200,18 @@ if (languageVariantOfContentItem.workflow.stepIdentifier.id) {

languageVariantOfContentItem.workflow.stepIdentifier.id,
workflows
data.workflows
);
const newWorkflowStep = workflow.steps[0];
await managementClient
await data.managementClient
.changeWorkflowStepOfLanguageVariant()
.byItemCodename(importContentItem.system.codename)
.byLanguageCodename(importContentItem.system.language)
.byItemCodename(data.importContentItem.system.codename)
.byLanguageCodename(data.importContentItem.system.language)
.byWorkflowStepCodename(newWorkflowStep.codename)
.toPromise();
logAction('unArchive', 'languageVariant', {
title: `${importContentItem.system.name}`,
language: importContentItem.system.language,
codename: importContentItem.system.codename,
workflowStep: importContentItem.system.workflow_step
logItemAction('unArchive', 'languageVariant', {
title: `${data.importContentItem.system.name}`,
language: data.importContentItem.system.language,
codename: data.importContentItem.system.codename,
workflowStep: data.importContentItem.system.workflow_step
});

@@ -235,3 +263,5 @@ }

if (!importContract) {
throw Error(`Missing import contract for element `);
logErrorAndExit({
message: `Missing import contract for element '${element.codename}' `
});
}

@@ -238,0 +268,0 @@

import { ManagementClient, WorkflowModels } from '@kontent-ai/management-sdk';
import { IParsedContentItem } from '../import.models.js';
import { defaultWorkflowCodename, logAction } from '../../core/index.js';
import { logItemAction, logErrorAndExit } from '../../core/index.js';
export class ImportWorkflowHelper {
private readonly defaultWorkflowCodename: string = 'Default';
getWorkflowForGivenStepById(workflowId: string, workflows: WorkflowModels.Workflow[]): WorkflowModels.Workflow {

@@ -35,5 +37,5 @@ return this.getWorkflowForGivenStep(workflows, (workflow) => {

if (!this.doesWorkflowStepExist(workflowStepCodename, workflows)) {
throw Error(
`Could not change workflow step for item '${importContentItem.system.codename}' (${importContentItem.system.name}) because step with codename '${workflowStepCodename}' does not exist in target project.`
);
logErrorAndExit({
message: `Could not change workflow step for item '${importContentItem.system.codename}' (${importContentItem.system.name}) because step with codename '${workflowStepCodename}' does not exist in target project.`
});
}

@@ -49,3 +51,3 @@

logAction('publish', 'languageVariant', {
logItemAction('publish', 'languageVariant', {
title: `${importContentItem.system.name}`,

@@ -57,3 +59,3 @@ language: importContentItem.system.language,

} else if (this.doesWorkflowStepCodenameRepresentScheduledStep(workflowStepCodename, workflows)) {
logAction('skip', 'languageVariant', {
logItemAction('skip', 'languageVariant', {
title: `Skipping scheduled workflow step for item '${importContentItem.system.name}'`,

@@ -81,3 +83,3 @@ language: importContentItem.system.language,

logAction('archive', 'languageVariant', {
logItemAction('archive', 'languageVariant', {
title: `${importContentItem.system.name}`,

@@ -108,3 +110,3 @@ language: importContentItem.system.language,

logAction('changeWorkflowStep', 'languageVariant', {
logItemAction('changeWorkflowStep', 'languageVariant', {
title: `${importContentItem.system.name}`,

@@ -169,7 +171,9 @@ language: importContentItem.system.language,

const defaultWorkflow = workflows.find(
(m) => m.codename.toLowerCase() === defaultWorkflowCodename.toLowerCase()
(m) => m.codename.toLowerCase() === this.defaultWorkflowCodename.toLowerCase()
);
if (!defaultWorkflow) {
throw Error(`Missing default workflow`);
logErrorAndExit({
message: `Missing default workflow`
});
}

@@ -220,4 +224,2 @@

}
return false;
}

@@ -224,0 +226,0 @@

import { IRetryStrategyOptions } from '@kontent-ai/core-sdk';
import { ContentElementType, IPackageMetadata } from '../core/index.js';
import { ContentElementType } from '../core/index.js';

@@ -13,3 +13,3 @@ export interface IImportConfig {

contentItem?: (item: IParsedContentItem) => boolean | Promise<boolean>;
asset?: (item: IImportAsset) => boolean | Promise<boolean>;
asset?: (item: IParsedAsset) => boolean | Promise<boolean>;
};

@@ -25,13 +25,5 @@ }

export interface IImportAsset {
binaryData: Buffer | Blob;
export interface IParsedAssetRecord {
assetId: string;
filename: string;
mimeType: string | undefined;
extension: string | undefined;
}
export interface IParsedAsset {
assetId: string;
filename: string;
extension: string;

@@ -41,8 +33,11 @@ url: string;

export interface IParsedAsset extends IParsedAssetRecord {
binaryData: Buffer | Blob | undefined;
}
export interface IImportSource {
importData: {
items: IParsedContentItem[];
assets: IImportAsset[];
assets: IParsedAsset[];
};
metadata?: IPackageMetadata;
}

@@ -49,0 +44,0 @@

@@ -8,3 +8,2 @@ import {

ManagementClient,
SharedModels,
WorkflowModels

@@ -15,6 +14,7 @@ } from '@kontent-ai/management-sdk';

IImportedData,
handleError,
defaultRetryStrategy,
printProjectAndEnvironmentInfoToConsoleAsync,
defaultHttpService
defaultHttpService,
logDebug,
logErrorAndExit
} from '../core/index.js';

@@ -28,3 +28,2 @@ import {

} from './import.models.js';
import { logDebug } from '../core/log-helper.js';
import { importAssetsHelper } from './helpers/import-assets.helper.js';

@@ -78,6 +77,2 @@ import { importContentItemHelper } from './helpers/import-content-item.helper.js';

async importFromSourceAsync(sourceData: IImportSource): Promise<IImportedData> {
return await this.importAsync(sourceData);
}
async importAsync(sourceData: IImportSource): Promise<IImportedData> {

@@ -95,42 +90,39 @@ const importedData: IImportedData = {

// import order matters
try {
// Assets
if (dataToImport.importData.assets.length) {
logDebug({
type: 'info',
message: `Importing assets`
});
await importAssetsHelper.importAssetsAsync(
this.managementClient,
dataToImport.importData.assets,
importedData
);
} else {
logDebug({
type: 'info',
message: `There are no assets to import`
});
}
// #1 Assets
if (dataToImport.importData.assets.length) {
logDebug({
type: 'info',
message: `Importing assets`
});
await importAssetsHelper.importAssetsAsync({
managementClient: this.managementClient,
assets: dataToImport.importData.assets,
importedData: importedData
});
} else {
logDebug({
type: 'info',
message: `There are no assets to import`
});
}
// Content items
if (dataToImport.importData.items.length) {
logDebug({
type: 'info',
message: `Importing content items`
});
await this.importParsedContentItemsAsync(dataToImport.importData.items, importedData);
} else {
logDebug({
type: 'info',
message: `There are no content items to import`
});
}
// #2 Content items
if (dataToImport.importData.items.length) {
logDebug({
type: 'info',
message: `Finished import`
message: `Importing content items`
});
} catch (error) {
this.handleImportError(error);
await this.importParsedContentItemsAsync(dataToImport.importData.items, importedData);
} else {
logDebug({
type: 'info',
message: `There are no content items to import`
});
}
logDebug({
type: 'info',
message: `Finished import`
});
return importedData;

@@ -163,5 +155,5 @@ }

if (!contentTypeSnippet) {
throw Error(
`Could not find content type snippet for element. This snippet is referenced in type '${contentType.codename}'`
);
logErrorAndExit({
message: `Could not find content type snippet for element. This snippet is referenced in type '${contentType.codename}'`
});
}

@@ -192,4 +184,3 @@

items: []
},
metadata: source.metadata
}
};

@@ -205,2 +196,3 @@

dataToImport.importData.assets.push(asset);
} else {
removedAssets++;

@@ -218,2 +210,3 @@ }

dataToImport.importData.items.push(item);
} else {
removedContentItems++;

@@ -252,23 +245,23 @@ }

const preparedContentItems: ContentItemModels.ContentItem[] =
await importContentItemHelper.importContentItemsAsync(
this.managementClient,
parsedContentItems,
collections,
importedData,
{
await importContentItemHelper.importContentItemsAsync({
managementClient: this.managementClient,
collections: collections,
importedData: importedData,
parsedContentItems: parsedContentItems,
config: {
skipFailedItems: this.config.skipFailedItems
}
);
});
// then process language variants
await importLanguageVariantHelper.importLanguageVariantsAsync(
this.managementClient,
parsedContentItems,
workflows,
preparedContentItems,
importedData,
{
await importLanguageVariantHelper.importLanguageVariantsAsync({
managementClient: this.managementClient,
importContentItems: parsedContentItems,
importedData: importedData,
preparedContentItems: preparedContentItems,
workflows: workflows,
config: {
skipFailedItems: this.config.skipFailedItems
}
);
});
}

@@ -289,6 +282,2 @@

}
private handleImportError(error: any | SharedModels.ContentManagementBaseKontentError): void {
handleError(error);
}
}

@@ -6,1 +6,2 @@ // Public API

export * from './file-processor/index.js';
export * from './toolkit/index.js';

@@ -5,4 +5,12 @@ #!/usr/bin/env node

import { ICliFileConfig, CliAction, getExtension, extractErrorMessage, ExportAdapter } from '../../core/index.js';
import {
ICliFileConfig,
CliAction,
getExtension,
ExportAdapter,
logDebug,
handleError,
logErrorAndExit
} from '../../core/index.js';
import {
ItemCsvProcessorService,

@@ -17,3 +25,2 @@ ProcessingFormat,

} from '../../file-processor/index.js';
import { logDebug } from '../../core/log-helper.js';
import { ExportToolkit, ImportToolkit } from '../../toolkit/index.js';

@@ -75,3 +82,5 @@ import { IExportAdapter, KontentAiExportAdapter } from '../../export/index.js';

if (!config.adapter) {
throw Error(`Missing 'adapter' config`);
logErrorAndExit({
message: `Missing 'adapter' config`
});
}

@@ -83,3 +92,5 @@

if (!config.environmentId) {
throw Error(`Invalid environment id`);
logErrorAndExit({
message: `Invalid 'environmentId'`
});
}

@@ -99,11 +110,12 @@

} else {
throw Error(`Missing adapter '${config.adapter}'`);
logErrorAndExit({
message: `Missing adapter '${config.adapter}'`
});
}
const exportToolkit = new ExportToolkit({ adapter });
const itemsFilename = config.itemsFilename ?? getDefaultExportFilename('items');
const assetsFilename = config.assetsFilename ?? getDefaultExportFilename('assets');
await exportToolkit.exportAsync({
const exportToolkit = new ExportToolkit({
adapter,
items: {

@@ -121,2 +133,4 @@ filename: itemsFilename,

await exportToolkit.exportAsync();
logDebug({ type: 'info', message: `Completed` });

@@ -127,6 +141,10 @@ };

if (!config.managementApiKey) {
throw Error(`Missing 'managementApiKey' configuration option`);
logErrorAndExit({
message: `Missing 'managementApiKey' configuration option`
});
}
if (!config.environmentId) {
throw Error(`Missing 'environmentId' configuration option`);
logErrorAndExit({
message: `Missing 'environmentId' configuration option`
});
}

@@ -173,3 +191,5 @@

} else {
throw Error(`Unsupported file type '${itemsFileExtension}'`);
logErrorAndExit({
message: `Unsupported file type '${itemsFileExtension}'`
});
}

@@ -188,3 +208,5 @@

} else {
throw Error(`Invalid action '${config.action}'`);
logErrorAndExit({
message: `Invalid action '${config.action}'`
});
}

@@ -218,3 +240,5 @@ };

if (action === 'export') {
throw Error(`Unsupported export format '${format}'`);
logErrorAndExit({
message: `Unsupported export format '${format}'`
});
}

@@ -227,3 +251,5 @@ }

if (action === 'export') {
throw Error(`Unsupported adapter '${adapter}'`);
logErrorAndExit({
message: `Unsupported adapter '${adapter}'`
});
}

@@ -264,4 +290,3 @@ }

.catch((err) => {
console.error(err);
logDebug({ type: 'error', message: extractErrorMessage(err) });
handleError(err);
});

@@ -278,3 +303,5 @@

throw Error(`Unsupported format '${format}' for assets export`);
logErrorAndExit({
message: `Unsupported format '${format}' for assets export`
});
}

@@ -295,3 +322,5 @@

throw Error(`Unsupported format '${format}' for items export`);
logErrorAndExit({
message: `Unsupported format '${format}' for items export`
});
}

@@ -307,3 +336,5 @@

if (!value) {
throw Error(`Missing '${argName}' argument value`);
logErrorAndExit({
message: `Missing '${argName}' argument value`
});
}

@@ -310,0 +341,0 @@

import { promises } from 'fs';
import { logDebug } from '../../core/log-helper.js';
import { logDebug } from '../../core/index.js';

@@ -4,0 +4,0 @@ export class FileService {

@@ -7,2 +7,10 @@ import { IExportAdapter, IExportAdapterResult } from '../export/index.js';

adapter: IExportAdapter;
items: {
filename: string;
formatService: IItemFormatService;
};
assets?: {
filename: string;
formatService: IAssetFormatService;
};
}

@@ -16,16 +24,7 @@

async exportAsync(config: {
items: {
filename: string;
formatService: IItemFormatService;
};
assets?: {
filename: string;
formatService: IAssetFormatService;
};
}): Promise<IExportAdapterResult> {
async exportAsync(): Promise<IExportAdapterResult> {
const data = await this.config.adapter.exportAsync();
const itemsZipFile = await this.fileProcessorService.createItemsZipAsync(data, {
itemFormatService: config.items.formatService,
itemFormatService: this.config.items.formatService,
transformConfig: {

@@ -38,10 +37,10 @@ richTextConfig: {

await this.fileService.writeFileAsync(config.items.filename, itemsZipFile);
await this.fileService.writeFileAsync(this.config.items.filename, itemsZipFile);
if (data.assets.length && config.assets) {
if (data.assets.length && this.config.assets) {
const assetsZipFile = await this.fileProcessorService.createAssetsZipAsync(data, {
assetFormatService: config.assets.formatService
assetFormatService: this.config.assets.formatService
});
await this.fileService.writeFileAsync(config.assets.filename, assetsZipFile);
await this.fileService.writeFileAsync(this.config.assets.filename, assetsZipFile);
}

@@ -48,0 +47,0 @@

@@ -46,3 +46,3 @@ import { FileProcessorService, IAssetFormatService, IItemFormatService } from '../file-processor/index.js';

// import into target environment
await importService.importFromSourceAsync(data);
await importService.importAsync(data);
}

@@ -74,4 +74,4 @@

// import into target environment
await importService.importFromSourceAsync(data);
await importService.importAsync(data);
}
}
{
"name": "xeno-test",
"version": "0.0.17",
"version": "0.0.18",
"description": "This program can be used to import content related data into Kontent.ai from various formats. Additionally, it can also be used to export Kontent.ai data using Delivery API.",

@@ -8,3 +8,3 @@ "preferGlobal": true,

"bin": {
"kdm": "./dist/es2022/node/cli/app.js"
"kontent-ai-migration-toolkit": "./dist/es2022/node/cli/app.js"
},

@@ -11,0 +11,0 @@ "repository": {

@@ -13,7 +13,13 @@ # Kontent.ai Migration Toolkit

> are absolutely sure you know what you are doing. Instead, we recommend that you create a new environment based on your
> production and test the import there first. If the import meets your expectations, you may swap environments or run it again
> on the production.
> production and test the import there first. If the import meets your expectations, you may swap environments or run it
> again on the production.
## How it works
## Installation
Install package globally:
`npm i xeno-test -g`
# Import
> When importing it is essential that `Content types`, `Taxonomies` and `Workflows` matches the input data. Any

@@ -23,3 +29,3 @@ > inconsistency in data such as referencing inexistent taxonomy term, incorrect element type and other problems will

### How are content items imported?
## How are content items imported?

@@ -30,3 +36,3 @@ The Migration Toolkit creates content items that are not present in target project. If the content item exists in target

### How are assets imported?
## How are assets imported?

@@ -37,30 +43,4 @@ If asset exists in target project, the asset upload will be skipped and not uploaded at all. If asset doesn't exist, the

## Installation
## Import Configuration
Install package globally:
`npm i xeno-test -g`
## Use via CLI
### Export Configuration
| Config | Value |
| ------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
| **action** | Action. Available options: `import` & `export` **(required)** |
| **environmentId** | Id of Kontent.ai project **(required)** |
| **adapter** | Adapter used to export data into known format that can be used for importing data. Available options: `kontentAi` **(required for export)** |
| **format** | Format used to export data. Available options: `csv`, `json` and `jsonJoined` **(required for export & import)** |
| secureApiKey | API key for secure Access. `isSecure` also needs to be enabled |
| previewApiKey | API key for preview. `isPreview` also needs to be enabled |
| isSecure | When set to `true`, Secure API will be used to make data export. Defaults to `false` |
| isPreview | When set to `true`, Preview API will be used to make data export. Defaults to `false` |
| exportAssets | When set to `true`, Binary data of assets is exported. Defaults to `false` |
| replaceInvalidLinks | RTE may contain links to invalid items. You won't be able to re-import such items due to validation error. By setting this to `true` the Migration Toolkit will automatically remove these links. Defaults to `false` |
| itemsFilename | Name of the items file that will be created in folder where script is run |
| assetsFilename | Name of the assets file that will be created in folder where script is run. Only zip is supported. |
| baseUrl | Custom base URL for Kontent.ai API calls |
### Import Configuration
| Config | Value |

@@ -71,3 +51,3 @@ | -------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------- |

| **managementApiKey** | Management API key **(required)** |
| **format** | Format used to export data. Available options: `csv`, `json` and `jsonJoined` **(required for export & import)** |
| **format** | Format used to export data. Available options: `csv`, `json` and `jsonJoined` **(required)** |
| itemsFilename | Name of the items file that will be used to parse items |

@@ -78,67 +58,20 @@ | assetsFilename | Name of the items file that will be used to parse assets (only zip supported) |

### Import CLI samples
## Import CLI samples
Import from zip:
`kdm --action=import --apiKey=xxx --environmentId=xxx --itemsFilename=data.zip --format=json`
`kontent-ai-migration-toolkit --action=import --apiKey=xxx --environmentId=xxx --itemsFilename=data.zip --format=json`
Import from zip with assets:
`kdm --action=import --apiKey=xxx --environmentId=xxx --itemsFilename=data.zip --format=json --assetsFilename=assets.zip`
`kontent-ai-migration-toolkit --action=import --apiKey=xxx --environmentId=xxx --itemsFilename=data.zip --format=json --assetsFilename=assets.zip`
Import from json file:
`kdm --action=import --apiKey=xxx --environmentId=xxx --itemsFilename=data.json --format=json`
`kontent-ai-migration-toolkit --action=import --apiKey=xxx --environmentId=xxx --itemsFilename=data.json --format=json`
Import from csv file:
`kdm --action=import --apiKey=xxx --environmentId=xxx --itemsFilename=data.csv --format=csv`
`kontent-ai-migration-toolkit --action=import --apiKey=xxx --environmentId=xxx --itemsFilename=data.csv --format=csv`
### Export CLI samples
Export from Kontent.ai environment as json without assets:
`kdm --action=export --adapter=kontentAi --environmentId=xxx --format=json`
Export from Kontent.ai environment as csv without assets:
`kdm --action=export --adapter=kontentAi --environmentId=xxx --format=csv`
Export from Kontent.ai environment as single json file with assets:
`kdm --action=export --adapter=kontentAi --environmentId=xxx --format=jsonJoined --exportAssets=true`
### CLI help
To see available commands use:
`kdm --help`
### Use with config file
Create a `json` configuration file in the folder where you are attempting to run script. (e.g. `export-config.json`)
```json
{
"environmentId": "x",
"secureApiKey": "y",
"adapter": "kontentAi",
"isSecure": true,
"isPreview": false,
"exportAssets": true,
"action": "export",
"baseUrl": null,
"format": "json"
}
```
To execute your action run:
`kdm --config=export-config.json`
## Use in code
See https://github.com/Enngage/kontent-ai-migration-toolkit/tree/main/samples for examples of how to run this library in
code rather then via command line.
## Importing in code

@@ -168,2 +101,36 @@

# Export
## Export Configuration
| Config | Value |
| ------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
| **action** | Action. Available options: `import` & `export` **(required)** |
| **environmentId** | Id of Kontent.ai project **(required)** |
| **adapter** | Adapter used to export data into known format that can be used for importing data. Available options: `kontentAi` **(required)** |
| **format** | Format used to export data. Available options: `csv`, `json` and `jsonJoined` **(required)** |
| secureApiKey | API key for secure Access. `isSecure` also needs to be enabled |
| previewApiKey | API key for preview. `isPreview` also needs to be enabled |
| isSecure | When set to `true`, Secure API will be used to make data export. Defaults to `false` |
| isPreview | When set to `true`, Preview API will be used to make data export. Defaults to `false` |
| exportAssets | When set to `true`, Binary data of assets is exported. Defaults to `false` |
| replaceInvalidLinks | RTE may contain links to invalid items. You won't be able to re-import such items due to validation error. By setting this to `true` the Migration Toolkit will automatically remove these links. Defaults to `false` |
| itemsFilename | Name of the items file that will be created in folder where script is run |
| assetsFilename | Name of the assets file that will be created in folder where script is run. Only zip is supported. |
| baseUrl | Custom base URL for Kontent.ai API calls |
## Export CLI samples
Export from Kontent.ai environment as json without assets:
`kontent-ai-migration-toolkit --action=export --adapter=kontentAi --environmentId=xxx --format=json`
Export from Kontent.ai environment as csv without assets:
`kontent-ai-migration-toolkit --action=export --adapter=kontentAi --environmentId=xxx --format=csv`
Export from Kontent.ai environment as single json file with assets:
`kontent-ai-migration-toolkit --action=export --adapter=kontentAi --environmentId=xxx --format=jsonJoined --exportAssets=true`
## Exporting in code

@@ -206,2 +173,35 @@

## CLI help
To see available commands use:
`kontent-ai-migration-toolkit --help`
## Use with config file
Create a `json` configuration file in the folder where you are attempting to run script. (e.g. `export-config.json`)
```json
{
"environmentId": "x",
"secureApiKey": "y",
"adapter": "kontentAi",
"isSecure": true,
"isPreview": false,
"exportAssets": true,
"action": "export",
"baseUrl": null,
"format": "json"
}
```
To execute your action run:
`kontent-ai-migration-toolkit --config=export-config.json`
## Code samples for import & export
See https://github.com/Enngage/kontent-ai-migration-toolkit/tree/main/samples for examples of how to run this library in
code rather then via command line.
## Output / Input formats

@@ -221,3 +221,3 @@

### Limitations
## Limitations

@@ -233,5 +233,5 @@ Export is made with `Delivery API` for speed and efficiency, but this brings some limitations:

### FAQ
## FAQ
#### I'm getting `Header overflow` exception
### I'm getting `Header overflow` exception

@@ -238,0 +238,0 @@ The Node.js limits the maximum header size of HTTP requests. In some cases it may be required for you to increase this

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc