Socket
Socket
Sign inDemoInstall

xeno-test

Package Overview
Dependencies
Maintainers
1
Versions
20
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

xeno-test - npm Package Compare versions

Comparing version 0.0.16 to 0.0.17

dist/es2022/export/adapters/kontent-ai/helpers/export-assets-item.helper.d.ts

7

dist/es2022/core/core.models.d.ts

@@ -6,7 +6,8 @@ import { AssetModels, ContentItemModels, ElementContracts, ElementModels, LanguageVariantModels } from '@kontent-ai/management-sdk';

export interface ICliFileConfig {
environmentId: string;
adapter?: ExportAdapter;
environmentId?: string;
previewApiKey?: string;
secureApiKey?: string;
managementApiKey?: string;
format?: ProcessingFormat;
format: ProcessingFormat;
isPreview: boolean;

@@ -16,3 +17,2 @@ isSecure: boolean;

replaceInvalidLinks: boolean;
importAssets: boolean;
action: CliAction;

@@ -26,2 +26,3 @@ itemsFilename?: string;

export type CliAction = 'export' | 'import';
export type ExportAdapter = 'kontentAi';
export type ItemType = 'component' | 'contentItem' | 'languageVariant' | 'asset' | 'binaryFile' | 'zipFile';

@@ -28,0 +29,0 @@ export type ActionType = 'skip' | 'save' | 'readFs' | 'writeFs' | 'download' | 'zip' | 'read' | 'archive' | 'upsert' | 'upload' | 'publish' | 'changeWorkflowStep' | 'createNewVersion' | 'fetch' | 'create' | 'publish' | 'unArchive' | 'extractBinaryData' | 'update';

@@ -9,3 +9,9 @@ import { ContentItemElementsIndexer, IContentItem, IContentType } from '@kontent-ai/delivery-sdk';

private readonly elementsBuilder;
/**
* Elements transform used by Kontent.ai export adapter
*/
private readonly exportTransforms;
/**
* General import transforms used to prepare parsed element values for Management API
*/
private readonly importTransforms;

@@ -12,0 +18,0 @@ transformToExportElementValue(data: {

@@ -11,2 +11,5 @@ import { validate } from 'uuid';

elementsBuilder = new LanguageVariantElementsBuilder();
/**
* Elements transform used by Kontent.ai export adapter
*/
exportTransforms = {

@@ -46,2 +49,5 @@ text: (data) => data.element.value,

};
/**
* General import transforms used to prepare parsed element values for Management API
*/
importTransforms = {

@@ -48,0 +54,0 @@ guidelines: (data) => {

@@ -0,4 +1,12 @@

/// <reference types="node" resolution-mode="require"/>
import { IRetryStrategyOptions } from '@kontent-ai/core-sdk';
import { IPackageMetadata } from '../core/index.js';
import { IContentItem, IContentType, IDeliveryClient, ILanguage } from '@kontent-ai/delivery-sdk';
import { ContentElementType, IExportTransformConfig } from '../core/index.js';
import { IContentItem, IDeliveryClient } from '@kontent-ai/delivery-sdk';
export interface IExportAdapter {
exportAsync(): Promise<IExportAdapterResult>;
}
export interface IExportAdapterResult {
items: IExportContentItem[];
assets: IExportAsset[];
}
export interface IExportFilter {

@@ -22,14 +30,5 @@ /**

customItemsExport?: (client: IDeliveryClient) => Promise<IContentItem[]>;
transformConfig?: IExportTransformConfig;
}
export interface IExportData {
contentItems: IContentItem[];
contentTypes: IContentType[];
languages: ILanguage[];
assets: IExportedAsset[];
}
export interface IExportAllResult {
metadata: IPackageMetadata;
data: IExportData;
}
export interface IExportedAsset {
export interface IExportAsset {
url: string;

@@ -39,2 +38,21 @@ extension: string;

filename: string;
binaryData: Buffer | Blob;
}
export interface IExportElement {
value: string | undefined | string[];
type: ContentElementType;
codename: string;
}
export interface IExportContentItem {
system: {
codename: string;
id: string;
name: string;
language: string;
type: string;
collection: string;
last_modified?: string;
workflow_step?: string;
};
elements: IExportElement[];
}
export * from './export.models.js';
export * from './export.service.js';
export * from './adapters/kontent-ai/kontent-ai-export-adapter.class.js';
export * from './export.models.js';
export * from './export.service.js';
export * from './adapters/kontent-ai/kontent-ai-export-adapter.class.js';
//# sourceMappingURL=index.js.map
import { IParsedAsset } from '../../import/index.js';
import { IFileData } from '../file-processor.models.js';
import { IExportedAsset } from '../../export/index.js';
import { IExportAsset } from '../../export/index.js';
import { BaseAssetProcessorService } from '../base-asset-processor.service.js';

@@ -8,3 +8,3 @@ export declare class AssetCsvProcessorService extends BaseAssetProcessorService {

readonly name: string;
transformAssetsAsync(assets: IExportedAsset[]): Promise<IFileData[]>;
transformAssetsAsync(assets: IExportAsset[]): Promise<IFileData[]>;
parseAssetsAsync(text: string): Promise<IParsedAsset[]>;

@@ -11,0 +11,0 @@ private geCsvParser;

@@ -1,2 +0,2 @@

import { IExportedAsset } from '../../export/index.js';
import { IExportAsset } from '../../export/index.js';
import { IParsedAsset } from '../../import/index.js';

@@ -7,4 +7,4 @@ import { IFileData } from '../file-processor.models.js';

readonly name: string;
transformAssetsAsync(assets: IExportedAsset[]): Promise<IFileData[]>;
transformAssetsAsync(assets: IExportAsset[]): Promise<IFileData[]>;
parseAssetsAsync(text: string): Promise<IParsedAsset[]>;
}

@@ -1,2 +0,2 @@

import { IExportedAsset } from '../export/index.js';
import { IExportAsset } from '../export/index.js';
import { IParsedAsset } from '../import/index.js';

@@ -6,5 +6,5 @@ import { IFileData, IAssetFormatService } from './file-processor.models.js';

abstract name: string;
abstract transformAssetsAsync(assets: IExportedAsset[]): Promise<IFileData[]>;
abstract transformAssetsAsync(assets: IExportAsset[]): Promise<IFileData[]>;
abstract parseAssetsAsync(text: string): Promise<IParsedAsset[]>;
protected getSystemAssetFields(): string[];
}

@@ -1,8 +0,7 @@

import { IContentItem, IContentType } from '@kontent-ai/delivery-sdk';
import { IImportContentType, IImportContentTypeElement, IParsedContentItem } from '../import/index.js';
import { IItemFormatService, IFileData } from './file-processor.models.js';
import { IExportTransformConfig } from 'lib/index.js';
import { IExportContentItem } from '../export/index.js';
export declare abstract class BaseItemProcessorService implements IItemFormatService {
abstract name: string;
abstract transformContentItemsAsync(types: IContentType[], items: IContentItem[], config: IExportTransformConfig): Promise<IFileData[]>;
abstract transformContentItemsAsync(items: IExportContentItem[]): Promise<IFileData[]>;
abstract parseContentItemsAsync(text: string, types: IImportContentType[]): Promise<IParsedContentItem[]>;

@@ -9,0 +8,0 @@ protected getSystemContentItemFields(): string[];

/// <reference types="node" resolution-mode="require"/>
import { IContentItem, IContentType } from '@kontent-ai/delivery-sdk';
import { IExportedAsset } from '../export/index.js';
import { IExportContentItem, IExportAsset } from '../export/index.js';
import { IImportContentType, IParsedAsset, IParsedContentItem } from '../import/index.js';
import { IExportTransformConfig } from '../core/index.js';
/**

@@ -14,3 +12,3 @@ * Browser is currently not generally upported as we depend on few node.js specific APIs

name: string;
transformContentItemsAsync(types: IContentType[], items: IContentItem[], config: IExportTransformConfig): Promise<IFileData[]>;
transformContentItemsAsync(items: IExportContentItem[]): Promise<IFileData[]>;
parseContentItemsAsync(text: string, types: IImportContentType[]): Promise<IParsedContentItem[]>;

@@ -20,3 +18,3 @@ }

name: string;
transformAssetsAsync(assets: IExportedAsset[]): Promise<IFileData[]>;
transformAssetsAsync(assets: IExportAsset[]): Promise<IFileData[]>;
parseAssetsAsync(text: string): Promise<IParsedAsset[]>;

@@ -32,3 +30,2 @@ }

export interface IFileProcessorConfig {
delayBetweenAssetDownloadRequestsMs?: number;
}

@@ -35,0 +32,0 @@ export interface IFileData {

/// <reference types="node" resolution-mode="require"/>
import { IExportAllResult } from '../export/index.js';
import { IExportAdapterResult } from '../export/index.js';
import { IImportSource, IImportContentType } from '../import/index.js';

@@ -7,17 +7,29 @@ import { IFileProcessorConfig, IItemFormatService, ZipCompressionLevel, IAssetFormatService } from './file-processor.models.js';

export declare class FileProcessorService {
private readonly delayBetweenAssetRequestsMs;
private readonly zipContext;
private readonly metadataName;
private readonly binaryFilesFolderName;
private readonly httpService;
private readonly itemCsvProcessorService;
private readonly itemJsonProcessorService;
constructor(config?: IFileProcessorConfig);
extractZipAsync(itemsFile: Buffer, assetsFile: Buffer | undefined, types: IImportContentType[], config: {
itemFormatService: IItemFormatService;
assetFormatService: IAssetFormatService;
parseZipAsync(data: {
items?: {
file: Buffer;
formatService: IItemFormatService;
};
assets?: {
file: Buffer;
formatService: IAssetFormatService;
};
types: IImportContentType[];
}): Promise<IImportSource>;
extractCsvFileAsync(file: Buffer, types: IImportContentType[]): Promise<IImportSource>;
extractJsonFileAsync(file: Buffer, types: IImportContentType[]): Promise<IImportSource>;
createItemsZipAsync(exportData: IExportAllResult, config: {
parseFileAsync(data: {
items?: {
file: Buffer;
formatService: IItemFormatService;
};
assets?: {
file: Buffer;
formatService: IAssetFormatService;
};
types: IImportContentType[];
}): Promise<IImportSource>;
createItemsZipAsync(exportData: IExportAdapterResult, config: {
transformConfig: IExportTransformConfig;

@@ -27,3 +39,3 @@ itemFormatService: IItemFormatService;

}): Promise<any>;
createAssetsZipAsync(exportData: IExportAllResult, config: {
createAssetsZipAsync(exportData: IExportAdapterResult, config: {
assetFormatService: IAssetFormatService;

@@ -40,3 +52,2 @@ compressionLevel?: ZipCompressionLevel;

private parseMetadataFromZipAsync;
private getBinaryDataFromUrlAsync;
}

@@ -1,37 +0,32 @@

import { HttpService } from '@kontent-ai/core-sdk';
import colors from 'colors';
import JSZip from 'jszip';
import { Blob } from 'buffer';
import { defaultRetryStrategy, formatBytes, getExtension, sleepAsync } from '../core/index.js';
import { formatBytes, getExtension } from '../core/index.js';
import mime from 'mime';
import { ItemCsvProcessorService } from './item-formats/item-csv-processor.service.js';
import { ItemJsonProcessorService } from './item-formats/item-json-processor.service.js';
import { logDebug, logProcessingDebug } from '../core/log-helper.js';
export class FileProcessorService {
delayBetweenAssetRequestsMs;
zipContext = 'node.js';
metadataName = '_metadata.json';
binaryFilesFolderName = 'files';
httpService = new HttpService();
itemCsvProcessorService = new ItemCsvProcessorService();
itemJsonProcessorService = new ItemJsonProcessorService();
constructor(config) {
this.delayBetweenAssetRequestsMs = config?.delayBetweenAssetDownloadRequestsMs ?? 10;
}
async extractZipAsync(itemsFile, assetsFile, types, config) {
logDebug({
type: 'info',
message: 'Loading items zip file'
});
const itemsZipFile = await JSZip.loadAsync(itemsFile, {});
logDebug({
type: 'info',
message: 'Parsing items zip data'
});
constructor(config) { }
async parseZipAsync(data) {
let itemsZipFile = undefined;
let assetsZipFile = undefined;
if (assetsFile) {
if (data.items) {
logDebug({
type: 'info',
message: 'Loading items zip file'
});
itemsZipFile = await JSZip.loadAsync(data.items.file, {});
logDebug({
type: 'info',
message: 'Parsing items zip data'
});
}
if (data.assets) {
logDebug({
type: 'info',
message: 'Loading assets zip file'
});
assetsZipFile = await JSZip.loadAsync(assetsFile, {});
assetsZipFile = await JSZip.loadAsync(data.assets.file, {});
logDebug({

@@ -44,8 +39,10 @@ type: 'info',

importData: {
items: await this.parseContentItemsFromZipAsync(itemsZipFile, types, config.itemFormatService),
assets: assetsZipFile
? await this.parseAssetsFromFileAsync(assetsZipFile, config.assetFormatService)
items: itemsZipFile && data.items
? await this.parseContentItemsFromZipAsync(itemsZipFile, data.types, data.items.formatService)
: [],
assets: assetsZipFile && data.assets
? await this.parseAssetsFromFileAsync(assetsZipFile, data.assets?.formatService)
: []
},
metadata: await this.parseMetadataFromZipAsync(itemsZipFile, this.metadataName)
metadata: itemsZipFile ? await this.parseMetadataFromZipAsync(itemsZipFile, this.metadataName) : undefined
};

@@ -58,11 +55,24 @@ logDebug({

}
async extractCsvFileAsync(file, types) {
logDebug({
type: 'info',
message: 'Reading CSV file'
});
async parseFileAsync(data) {
let parsedItems = [];
let parsedAssets = [];
if (data.items) {
logDebug({
type: 'info',
message: `Parsing items file with '${colors.yellow(data.items.formatService.name)}' `
});
parsedItems = await data.items.formatService.parseContentItemsAsync(data.items.file.toString(), data.types);
}
if (data.assets) {
logDebug({
type: 'info',
message: `Parsing assets file with '${colors.yellow(data.assets.formatService.name)}' `
});
const assetsZipFile = await JSZip.loadAsync(data.assets.file, {});
parsedAssets = await this.parseAssetsFromFileAsync(assetsZipFile, data.assets.formatService);
}
const result = {
importData: {
items: await this.itemCsvProcessorService.parseContentItemsAsync(file.toString(), types),
assets: []
items: parsedItems,
assets: parsedAssets
},

@@ -73,24 +83,6 @@ metadata: undefined

type: 'info',
message: 'Reading CSV file completed'
message: `Parsing completed. Parsed '${colors.yellow(result.importData.items.length.toString())}' items and '${colors.yellow(result.importData.assets.length.toString())}' assets`
});
return result;
}
async extractJsonFileAsync(file, types) {
logDebug({
type: 'info',
message: 'Reading JSON file'
});
const result = {
importData: {
items: await this.itemJsonProcessorService.parseContentItemsAsync(file.toString(), types),
assets: []
},
metadata: undefined
};
logDebug({
type: 'info',
message: 'Reading JSON file completed'
});
return result;
}
async createItemsZipAsync(exportData, config) {

@@ -101,12 +93,6 @@ const zip = new JSZip();

type: 'info',
message: `Adding metadata to zip`,
partA: this.metadataName
});
zip.file(this.metadataName, JSON.stringify(exportData.metadata));
logDebug({
type: 'info',
message: `Transforming '${exportData.data.contentItems.length.toString()}' content items`,
message: `Transforming '${exportData.items.length.toString()}' content items`,
partA: config.itemFormatService?.name
});
const transformedLanguageVariantsFileData = await this.transformLanguageVariantsAsync(exportData.data.contentTypes, exportData.data.contentItems, config.itemFormatService, config.transformConfig);
const transformedLanguageVariantsFileData = await this.transformLanguageVariantsAsync(exportData.items, config.itemFormatService);
for (const fileInfo of transformedLanguageVariantsFileData) {

@@ -148,15 +134,9 @@ logDebug({

}
logDebug({
type: 'info',
message: `Storing metadata`,
partA: this.metadataName
});
zip.file(this.metadataName, JSON.stringify(exportData.metadata));
if (exportData.data.assets.length) {
if (exportData.assets.length) {
logDebug({
type: 'info',
message: `Transforming '${exportData.data.assets.length.toString()}' asssets`,
message: `Transforming '${exportData.assets.length.toString()}' asssets`,
partA: config.assetFormatService?.name
});
const transformedAssetsFileData = await config.assetFormatService.transformAssetsAsync(exportData.data.assets);
const transformedAssetsFileData = await config.assetFormatService.transformAssetsAsync(exportData.assets);
for (const fileInfo of transformedAssetsFileData) {

@@ -170,27 +150,14 @@ logDebug({

}
logDebug({
type: 'info',
message: `Preparing to download '${exportData.data.assets.length.toString()}' assets`
});
let assetIndex = 1;
for (const asset of exportData.data.assets) {
for (const asset of exportData.assets) {
const assetFilename = `${asset.assetId}.${asset.extension}`; // use id as filename to prevent filename conflicts
logProcessingDebug({
index: assetIndex,
totalCount: exportData.data.assets.length,
itemType: 'binaryFile',
title: asset.url
totalCount: exportData.assets.length,
itemType: 'zipFile',
title: `'${assetFilename}'`
});
const assetFilename = `${asset.assetId}.${asset.extension}`; // use id as filename to prevent filename conflicts
const binaryDataResponse = await this.getBinaryDataFromUrlAsync(asset.url);
logDebug({
type: 'download',
message: `Binary file downloaded`,
partA: asset.url,
partB: formatBytes(binaryDataResponse.contentLength)
});
filesFolder.file(assetFilename, binaryDataResponse.data, {
filesFolder.file(assetFilename, asset.binaryData, {
binary: true
});
// create artificial delay between request to prevent network errors
await sleepAsync(this.delayBetweenAssetRequestsMs);
assetIndex++;

@@ -239,4 +206,4 @@ }

}
async transformLanguageVariantsAsync(types, items, formatService, config) {
return await formatService.transformContentItemsAsync(types, items, config);
async transformLanguageVariantsAsync(items, formatService) {
return await formatService.transformContentItemsAsync(items);
}

@@ -358,16 +325,3 @@ async parseAssetsFromFileAsync(zip, assetFormatService) {

}
async getBinaryDataFromUrlAsync(url) {
// temp fix for Kontent.ai Repository not validating url
url = url.replace('#', '%23');
const response = await this.httpService.getAsync({
url
}, {
responseType: 'arraybuffer',
retryStrategy: defaultRetryStrategy
});
const contentLengthHeader = response.headers.find((m) => m.header.toLowerCase() === 'content-length');
const contentLength = contentLengthHeader ? +contentLengthHeader.value : 0;
return { data: response.data, contentLength: contentLength };
}
}
//# sourceMappingURL=file-processor.service.js.map

@@ -1,11 +0,11 @@

import { IContentItem, IContentType } from '@kontent-ai/delivery-sdk';
import { IImportContentType, IParsedContentItem } from '../../import/index.js';
import { IFileData } from '../file-processor.models.js';
import { BaseItemProcessorService } from '../base-item-processor.service.js';
import { IExportTransformConfig } from '../../core/index.js';
import { IExportContentItem } from '../../export/index.js';
export declare class ItemCsvProcessorService extends BaseItemProcessorService {
private readonly csvDelimiter;
readonly name: string;
transformContentItemsAsync(types: IContentType[], items: IContentItem[], config: IExportTransformConfig): Promise<IFileData[]>;
transformContentItemsAsync(items: IExportContentItem[]): Promise<IFileData[]>;
parseContentItemsAsync(text: string, types: IImportContentType[]): Promise<IParsedContentItem[]>;
private getTypeWrappers;
private mapToCsvItem;

@@ -12,0 +12,0 @@ private geCsvParser;

@@ -5,13 +5,13 @@ import { parse } from 'csv-parse';

import { BaseItemProcessorService } from '../base-item-processor.service.js';
import { translationHelper } from '../../core/index.js';
export class ItemCsvProcessorService extends BaseItemProcessorService {
csvDelimiter = ',';
name = 'csv';
async transformContentItemsAsync(types, items, config) {
async transformContentItemsAsync(items) {
const fileData = [];
const csvItems = items.map((item) => this.mapToCsvItem(item, types, items, config));
for (const contentType of types) {
const contentItemsOfType = csvItems.filter((m) => m.type === contentType.system.codename);
const filename = `${contentType.system.codename}.csv`;
const fieldsToStore = this.getFieldsToExport(contentType);
for (const typeWrapper of this.getTypeWrappers(items)) {
const contentItemsOfType = items
.filter((m) => m.system.type === typeWrapper.typeCodename)
.map((item) => this.mapToCsvItem(item, typeWrapper));
const filename = `${typeWrapper.typeCodename}.csv`;
const fieldsToStore = this.getFieldsToExport(typeWrapper);
const languageVariantsStream = new Readable();

@@ -85,3 +85,21 @@ languageVariantsStream.push(JSON.stringify(contentItemsOfType));

}
mapToCsvItem(item, types, items, config) {
getTypeWrappers(items) {
const typeWrappers = [];
for (const item of items) {
const existingFileData = typeWrappers.find((m) => m.typeCodename === item.system.type);
if (!existingFileData) {
typeWrappers.push({
typeCodename: item.system.type,
items: [item],
// this is with the assumption that all items of same type have the same elements defined
elementCodenames: item.elements.map((m) => m.codename)
});
}
else {
existingFileData.items.push(item);
}
}
return typeWrappers;
}
mapToCsvItem(item, typeWrapper) {
const csvItem = {

@@ -92,22 +110,10 @@ type: item.system.type,

language: item.system.language,
last_modified: item.system.lastModified,
last_modified: item.system.last_modified,
name: item.system.name,
workflow_step: item.system.workflowStep ?? undefined
workflow_step: item.system.workflow_step
};
const type = types.find((m) => m.system.codename === item.system.type);
if (!type) {
throw Error(`Missing content type '${item.system.type}' for item '${item.system.codename}'`);
}
for (const element of type.elements) {
if (element.codename) {
const variantElement = item.elements[element.codename];
if (variantElement) {
csvItem[element.codename] = translationHelper.transformToExportElementValue({
config: config,
element: variantElement,
item: item,
items: items,
types: types
});
}
for (const elementCodename of typeWrapper.elementCodenames) {
const itemElement = item.elements.find((m) => m.codename === elementCodename);
if (itemElement) {
csvItem[elementCodename] = itemElement.value;
}

@@ -123,3 +129,3 @@ }

}
getFieldsToExport(contentType) {
getFieldsToExport(typeWrapper) {
return [

@@ -133,13 +139,6 @@ ...this.getSystemContentItemFields().map((m) => {

}),
...contentType.elements
.filter((m) => {
if (m.codename?.length) {
return true;
}
return false;
})
.map((m) => {
...typeWrapper.elementCodenames.map((m) => {
const field = {
label: m.codename ?? '',
value: m.codename ?? ''
label: m,
value: m
};

@@ -146,0 +145,0 @@ return field;

@@ -1,11 +0,10 @@

import { IContentItem, IContentType } from '@kontent-ai/delivery-sdk';
import { IImportContentType, IParsedContentItem } from '../../import/index.js';
import { IFileData } from '../file-processor.models.js';
import { BaseItemProcessorService } from '../base-item-processor.service.js';
import { IExportTransformConfig } from '../../core/index.js';
import { IExportContentItem } from '../../export/index.js';
export declare class ItemJsonJoinedProcessorService extends BaseItemProcessorService {
private readonly jsonProcessorService;
readonly name: string;
transformContentItemsAsync(types: IContentType[], items: IContentItem[], config: IExportTransformConfig): Promise<IFileData[]>;
transformContentItemsAsync(items: IExportContentItem[]): Promise<IFileData[]>;
parseContentItemsAsync(text: string, types: IImportContentType[]): Promise<IParsedContentItem[]>;
}

@@ -6,4 +6,4 @@ import { BaseItemProcessorService } from '../base-item-processor.service.js';

name = 'json';
async transformContentItemsAsync(types, items, config) {
const multiFileJsonFileData = await this.jsonProcessorService.transformContentItemsAsync(types, items, config);
async transformContentItemsAsync(items) {
const multiFileJsonFileData = await this.jsonProcessorService.transformContentItemsAsync(items);
const allJsonItems = multiFileJsonFileData

@@ -10,0 +10,0 @@ .map((m) => {

@@ -1,11 +0,11 @@

import { IContentItem, IContentType } from '@kontent-ai/delivery-sdk';
import { IImportContentType, IParsedContentItem } from '../../import/index.js';
import { IFileData } from '../file-processor.models.js';
import { BaseItemProcessorService } from '../base-item-processor.service.js';
import { IExportTransformConfig } from '../../core/index.js';
import { IExportContentItem } from '../../export/index.js';
export declare class ItemJsonProcessorService extends BaseItemProcessorService {
readonly name: string;
transformContentItemsAsync(types: IContentType[], items: IContentItem[], config: IExportTransformConfig): Promise<IFileData[]>;
transformContentItemsAsync(items: IExportContentItem[]): Promise<IFileData[]>;
parseContentItemsAsync(text: string, types: IImportContentType[]): Promise<IParsedContentItem[]>;
private getTypeWrappers;
private mapToJsonItem;
}
import { BaseItemProcessorService } from '../base-item-processor.service.js';
import { translationHelper } from '../../core/index.js';
export class ItemJsonProcessorService extends BaseItemProcessorService {
name = 'json';
async transformContentItemsAsync(types, items, config) {
async transformContentItemsAsync(items) {
const fileData = [];
for (const contentType of types) {
const contentItemsOfType = items.filter((m) => m.system.type === contentType.system.codename);
const filename = `${contentType.system.codename}.json`;
const jsonItems = contentItemsOfType.map((m) => this.mapToJsonItem(m, types, items, config));
const typeWrappers = this.getTypeWrappers(items);
for (const typeWrapper of typeWrappers) {
const filename = `${typeWrapper.typeCodename}.json`;
const contentItemsOfType = items.filter((m) => m.system.type === typeWrapper.typeCodename);
const jsonItems = contentItemsOfType.map((m) => this.mapToJsonItem(m));
fileData.push({

@@ -48,22 +48,23 @@ data: jsonItems.length ? JSON.stringify(jsonItems) : '[]',

}
mapToJsonItem(item, types, items, config) {
const elements = {};
const type = types.find((m) => m.system.codename === item.system.type);
if (!type) {
throw Error(`Missing content type '${item.system.type}' for item '${item.system.codename}'`);
}
for (const element of type.elements) {
if (element.codename) {
const variantElement = item.elements[element.codename];
if (variantElement) {
elements[element.codename] = translationHelper.transformToExportElementValue({
config: config,
element: variantElement,
item: item,
items: items,
types: types
});
}
getTypeWrappers(items) {
const typeWrappers = [];
for (const item of items) {
const existingFileData = typeWrappers.find((m) => m.typeCodename === item.system.type);
if (!existingFileData) {
typeWrappers.push({
typeCodename: item.system.type,
items: [item]
});
}
else {
existingFileData.items.push(item);
}
}
return typeWrappers;
}
mapToJsonItem(item) {
const jsonElements = {};
for (const element of item.elements) {
jsonElements[element.codename] = element.value;
}
const jsonItem = {

@@ -74,8 +75,8 @@ system: {

language: item.system.language,
last_modified: item.system.lastModified,
last_modified: item.system.last_modified,
name: item.system.name,
type: item.system.type,
workflow_step: item.system.workflowStep ?? undefined
workflow_step: item.system.workflow_step
},
elements: elements
elements: jsonElements
};

@@ -82,0 +83,0 @@ return jsonItem;

@@ -9,3 +9,3 @@ import { logDebug, logProcessingDebug } from '../../core/log-helper.js';

const categorizedParsedItems = parsedItemsHelper.categorizeParsedItems(importContentItems);
logAction('skip', 'contentItem', {
logAction('skip', 'languageVariant', {
title: `Skipping '${categorizedParsedItems.componentItems.length}' because they represent component items`

@@ -12,0 +12,0 @@ });

import { defaultWorkflowCodename, logAction } from '../../core/index.js';
import { logDebug } from '../../core/log-helper.js';
export class ImportWorkflowHelper {

@@ -25,7 +24,3 @@ getWorkflowForGivenStepById(workflowId, workflows) {

if (!this.doesWorkflowStepExist(workflowStepCodename, workflows)) {
logDebug({
type: 'warning',
message: `Could not change workflow step for item '${importContentItem.system.codename}' (${importContentItem.system.name}) because step '${workflowStepCodename}' does not exist in target project. Skipping workflow change.`
});
return;
throw Error(`Could not change workflow step for item '${importContentItem.system.codename}' (${importContentItem.system.name}) because step with codename '${workflowStepCodename}' does not exist in target project.`);
}

@@ -32,0 +27,0 @@ if (this.doesWorkflowStepCodenameRepresentPublishedStep(workflowStepCodename, workflows)) {

@@ -1,2 +0,2 @@

import { IParsedContentItem } from 'lib/index.js';
import { IParsedContentItem } from "../import.models.js";
export interface ICategorizedParsedItems {

@@ -3,0 +3,0 @@ componentItems: IParsedContentItem[];

@@ -5,10 +5,13 @@ #!/usr/bin/env node

import { getExtension, extractErrorMessage } from '../../core/index.js';
import { ExportService } from '../../export/index.js';
import { ImportService } from '../../import/index.js';
import { ItemCsvProcessorService, FileProcessorService, ItemJsonProcessorService, ItemJsonJoinedProcessorService, AssetCsvProcessorService, AssetJsonProcessorService } from '../../file-processor/index.js';
import { FileService } from '../file/file.service.js';
import { ItemCsvProcessorService, ItemJsonProcessorService, ItemJsonJoinedProcessorService, AssetCsvProcessorService, AssetJsonProcessorService } from '../../file-processor/index.js';
import { logDebug } from '../../core/log-helper.js';
import { ExportToolkit, ImportToolkit } from '../../toolkit/index.js';
import { KontentAiExportAdapter } from '../../export/index.js';
const argv = yargs(process.argv.slice(2))
.example('csvm --action=export --format=csv|json --apiKey=xxx --environmentId=xxx', 'Creates zip export of Kontent.ai content data')
.example('csvm --action=import --apiKey=xxx --environmentId=xxx --filename=exportFile', 'Read given zip file and recreates data in Kontent.ai environment')
.alias('a', 'action')
.describe('a', 'Type of action to execute')
.alias('ad', 'adapter')
.describe('ad', 'Adapter used to export data')
.alias('e', 'environmentId')

@@ -26,4 +29,2 @@ .describe('e', 'environmentId')

.describe('ea', 'Disables / enables asset export')
.alias('ia', 'importAssets')
.describe('ia', 'Disables / enables asset import')
.alias('is', 'isSecure')

@@ -50,41 +51,52 @@ .describe('is', 'Disables / enables use of Secure API for export')

const exportAsync = async (config) => {
const exportService = new ExportService({
environmentId: config.environmentId,
managementApiKey: config.managementApiKey,
previewApiKey: config.previewApiKey,
secureApiKey: config.secureApiKey,
isPreview: config.isPreview,
isSecure: config.isSecure,
baseUrl: config.baseUrl,
exportTypes: config.exportTypes,
exportAssets: config.exportAssets
});
const fileService = new FileService();
const fileProcessorService = new FileProcessorService();
const response = await exportService.exportAllAsync();
const itemsZipFileData = await fileProcessorService.createItemsZipAsync(response, {
itemFormatService: getItemFormatService(config.format),
transformConfig: {
richTextConfig: {
replaceInvalidLinks: config.replaceInvalidLinks
}
if (!config.adapter) {
throw Error(`Missing 'adapter' config`);
}
let adapter;
if (config.adapter === 'kontentAi') {
if (!config.environmentId) {
throw Error(`Invalid environment id`);
}
});
const itemsFilename = config.itemsFilename ?? getDefaultExportFilename('items');
await fileService.writeFileAsync(getZipFilename(itemsFilename), itemsZipFileData);
if (config.assetsFilename && config.exportAssets) {
const assetsZipFileData = await fileProcessorService.createAssetsZipAsync(response, {
assetFormatService: getAssetFormatService(config.format)
adapter = new KontentAiExportAdapter({
environmentId: config.environmentId,
managementApiKey: config.managementApiKey,
previewApiKey: config.previewApiKey,
secureApiKey: config.secureApiKey,
isPreview: config.isPreview,
isSecure: config.isSecure,
baseUrl: config.baseUrl,
exportTypes: config.exportTypes,
exportAssets: config.exportAssets
});
await fileService.writeFileAsync(getZipFilename(config.assetsFilename), assetsZipFileData);
}
else {
throw Error(`Missing adapter '${config.adapter}'`);
}
const exportToolkit = new ExportToolkit({ adapter });
const itemsFilename = config.itemsFilename ?? getDefaultExportFilename('items');
const assetsFilename = config.assetsFilename ?? getDefaultExportFilename('assets');
await exportToolkit.exportAsync({
items: {
filename: itemsFilename,
formatService: getItemFormatService(config.format)
},
assets: assetsFilename
? {
filename: assetsFilename,
formatService: getAssetFormatService(config.format)
}
: undefined
});
logDebug({ type: 'info', message: `Completed` });
};
const importAsync = async (config) => {
const fileProcessorService = new FileProcessorService();
if (!config.managementApiKey) {
throw Error(`Missing 'managementApiKey' configuration option`);
}
const fileService = new FileService();
const importService = new ImportService({
if (!config.environmentId) {
throw Error(`Missing 'environmentId' configuration option`);
}
const itemsFilename = config.itemsFilename;
const assetsFilename = config.assetsFilename;
const importToolkit = new ImportToolkit({
skipFailedItems: config.skipFailedItems,

@@ -101,43 +113,26 @@ baseUrl: config.baseUrl,

}
}
},
items: itemsFilename
? {
filename: itemsFilename,
formatService: getItemFormatService(config.format)
}
: undefined,
assets: assetsFilename
? {
filename: assetsFilename,
formatService: getAssetFormatService(config.format)
}
: undefined
});
const itemsFilename = config.itemsFilename ?? getDefaultExportFilename('items');
const contentTypes = await importService.getImportContentTypesAsync();
const itemsFile = await fileService.loadFileAsync(itemsFilename);
const itemsFileExtension = getExtension(itemsFilename);
let assetsFile = undefined;
if (config.importAssets) {
const assetsFilename = config.assetsFilename ?? getDefaultExportFilename('assets');
logDebug({
type: 'info',
message: `Importing assets from file`,
partA: assetsFilename
});
assetsFile = await fileService.loadFileAsync(assetsFilename);
const assetsFileExtension = getExtension(assetsFilename);
if (!assetsFileExtension?.endsWith('zip')) {
throw Error(`Assets required zip folder. Received '${config.assetsFilename}'`);
}
const itemsFileExtension = getExtension(itemsFilename ?? '')?.toLowerCase();
if (itemsFileExtension?.endsWith('zip'.toLowerCase())) {
await importToolkit.importFromZipAsync();
}
else {
logDebug({
type: 'info',
message: `Skipping assets import`
});
else if (itemsFileExtension?.endsWith('csv'.toLowerCase())) {
await importToolkit.importFromFileAsync();
}
if (itemsFileExtension?.endsWith('zip')) {
const data = await fileProcessorService.extractZipAsync(itemsFile, assetsFile, contentTypes, {
assetFormatService: getAssetFormatService(config.format),
itemFormatService: getItemFormatService(config.format)
});
await importService.importFromSourceAsync(data);
else if (itemsFileExtension?.endsWith('json'.toLowerCase())) {
await importToolkit.importFromFileAsync();
}
else if (itemsFileExtension?.endsWith('csv')) {
const data = await fileProcessorService.extractCsvFileAsync(itemsFile, contentTypes);
await importService.importFromSourceAsync(data);
}
else if (itemsFileExtension?.endsWith('json')) {
const data = await fileProcessorService.extractJsonFileAsync(itemsFile, contentTypes);
await importService.importFromSourceAsync(data);
}
else {

@@ -170,3 +165,5 @@ throw Error(`Unsupported file type '${itemsFileExtension}'`);

const format = getOptionalArgumentValue(resolvedArgs, 'format');
const adapter = getOptionalArgumentValue(resolvedArgs, 'adapter');
let mappedFormat = 'csv';
let mappedAdapter = 'kontentAi';
if (format?.toLowerCase() === 'csv'.toLowerCase()) {

@@ -186,2 +183,10 @@ mappedFormat = 'csv';

}
if (adapter?.toLowerCase() === 'kontentAi'.toLowerCase()) {
mappedAdapter = 'kontentAi';
}
else {
if (action === 'export') {
throw Error(`Unsupported adapter '${adapter}'`);
}
}
const config = {

@@ -203,4 +208,4 @@ action: action,

isSecure: getBooleanArgumentvalue(resolvedArgs, 'isSecure', false),
importAssets: getBooleanArgumentvalue(resolvedArgs, 'importAssets', false),
replaceInvalidLinks: getBooleanArgumentvalue(resolvedArgs, 'replaceInvalidLinks', false),
adapter: mappedAdapter,
format: mappedFormat

@@ -257,8 +262,2 @@ };

}
function getZipFilename(filename) {
if (filename.toLowerCase()?.endsWith('.zip')) {
return filename;
}
return `${filename}.zip`;
}
//# sourceMappingURL=app.js.map

@@ -13,7 +13,8 @@ import {

export interface ICliFileConfig {
environmentId: string;
adapter?: ExportAdapter;
environmentId?: string;
previewApiKey?: string;
secureApiKey?: string;
managementApiKey?: string;
format?: ProcessingFormat;
format: ProcessingFormat;
isPreview: boolean;

@@ -23,3 +24,2 @@ isSecure: boolean;

replaceInvalidLinks: boolean;
importAssets: boolean;
action: CliAction;

@@ -34,2 +34,3 @@ itemsFilename?: string;

export type CliAction = 'export' | 'import';
export type ExportAdapter = 'kontentAi';
export type ItemType = 'component' | 'contentItem' | 'languageVariant' | 'asset' | 'binaryFile' | 'zipFile';

@@ -76,3 +77,2 @@

}[];
languageVariants: {

@@ -79,0 +79,0 @@ original: any;

@@ -35,2 +35,5 @@ import {

/**
* Elements transform used by Kontent.ai export adapter
*/
private readonly exportTransforms: Readonly<Record<ElementType, ExportTransformFunc>> = {

@@ -71,2 +74,5 @@ text: (data) => data.element.value,

/**
* General import transforms used to prepare parsed element values for Management API
*/
private readonly importTransforms: Readonly<Record<ContentElementType, ImportTransformFunc>> = {

@@ -167,3 +173,3 @@ guidelines: (data) => {

},
value: data.value ? +data.value : null
value: data.value ? +data.value : null
});

@@ -170,0 +176,0 @@ },

import { IRetryStrategyOptions } from '@kontent-ai/core-sdk';
import { IPackageMetadata } from '../core/index.js';
import { IContentItem, IContentType, IDeliveryClient, ILanguage } from '@kontent-ai/delivery-sdk';
import { ContentElementType, IExportTransformConfig } from '../core/index.js';
import { IContentItem, IDeliveryClient } from '@kontent-ai/delivery-sdk';
export interface IExportAdapter {
exportAsync(): Promise<IExportAdapterResult>;
}
export interface IExportAdapterResult {
items: IExportContentItem[];
assets: IExportAsset[];
}
export interface IExportFilter {

@@ -25,17 +34,6 @@ /**

customItemsExport?: (client: IDeliveryClient) => Promise<IContentItem[]>;
transformConfig?: IExportTransformConfig;
}
export interface IExportData {
contentItems: IContentItem[];
contentTypes: IContentType[];
languages: ILanguage[];
assets: IExportedAsset[];
}
export interface IExportAllResult {
metadata: IPackageMetadata;
data: IExportData;
}
export interface IExportedAsset {
export interface IExportAsset {
url: string;

@@ -45,2 +43,23 @@ extension: string;

filename: string;
binaryData: Buffer | Blob;
}
export interface IExportElement {
value: string | undefined | string[];
type: ContentElementType;
codename: string;
}
export interface IExportContentItem {
system: {
codename: string;
id: string;
name: string;
language: string;
type: string;
collection: string;
last_modified?: string;
workflow_step?: string;
};
elements: IExportElement[];
}
export * from './export.models.js';
export * from './export.service.js';
export * from './adapters/kontent-ai/kontent-ai-export-adapter.class.js';

@@ -6,3 +6,3 @@ import { parse } from 'csv-parse';

import { IFileData } from '../file-processor.models.js';
import { IExportedAsset } from '../../export/index.js';
import { IExportAsset } from '../../export/index.js';
import { BaseAssetProcessorService } from '../base-asset-processor.service.js';

@@ -14,3 +14,3 @@

async transformAssetsAsync(assets: IExportedAsset[]): Promise<IFileData[]> {
async transformAssetsAsync(assets: IExportAsset[]): Promise<IFileData[]> {
const asssetFiels: FieldInfo<string>[] = this.getAssetFields();

@@ -17,0 +17,0 @@ const stream = new Readable();

@@ -1,2 +0,2 @@

import { IExportedAsset } from '../../export/index.js';
import { IExportAsset } from '../../export/index.js';
import { IParsedAsset } from '../../import/index.js';

@@ -9,3 +9,3 @@ import { IFileData } from '../file-processor.models.js';

async transformAssetsAsync(assets: IExportedAsset[]): Promise<IFileData[]> {
async transformAssetsAsync(assets: IExportAsset[]): Promise<IFileData[]> {
return [

@@ -12,0 +12,0 @@ {

@@ -1,2 +0,2 @@

import { IExportedAsset } from '../export/index.js';
import { IExportAsset } from '../export/index.js';
import { IParsedAsset } from '../import/index.js';

@@ -7,3 +7,3 @@ import { IFileData, IAssetFormatService } from './file-processor.models.js';

abstract name: string;
abstract transformAssetsAsync(assets: IExportedAsset[]): Promise<IFileData[]>;
abstract transformAssetsAsync(assets: IExportAsset[]): Promise<IFileData[]>;
abstract parseAssetsAsync(text: string): Promise<IParsedAsset[]>;

@@ -10,0 +10,0 @@

@@ -1,13 +0,8 @@

import { IContentItem, IContentType } from '@kontent-ai/delivery-sdk';
import { IImportContentType, IImportContentTypeElement, IParsedContentItem } from '../import/index.js';
import { IItemFormatService, IFileData } from './file-processor.models.js';
import { IExportTransformConfig } from 'lib/index.js';
import { IExportContentItem } from '../export/index.js';
export abstract class BaseItemProcessorService implements IItemFormatService {
abstract name: string;
abstract transformContentItemsAsync(
types: IContentType[],
items: IContentItem[],
config: IExportTransformConfig
): Promise<IFileData[]>;
abstract transformContentItemsAsync(items: IExportContentItem[]): Promise<IFileData[]>;
abstract parseContentItemsAsync(text: string, types: IImportContentType[]): Promise<IParsedContentItem[]>;

@@ -14,0 +9,0 @@

@@ -1,5 +0,3 @@

import { IContentItem, IContentType } from '@kontent-ai/delivery-sdk';
import { IExportedAsset } from '../export/index.js';
import { IExportContentItem, IExportAsset } from '../export/index.js';
import { IImportContentType, IParsedAsset, IParsedContentItem } from '../import/index.js';
import { IExportTransformConfig } from '../core/index.js';

@@ -18,7 +16,3 @@ /**

transformContentItemsAsync(
types: IContentType[],
items: IContentItem[],
config: IExportTransformConfig
): Promise<IFileData[]>;
transformContentItemsAsync(items: IExportContentItem[]): Promise<IFileData[]>;
parseContentItemsAsync(text: string, types: IImportContentType[]): Promise<IParsedContentItem[]>;

@@ -30,3 +24,3 @@ }

transformAssetsAsync(assets: IExportedAsset[]): Promise<IFileData[]>;
transformAssetsAsync(assets: IExportAsset[]): Promise<IFileData[]>;
parseAssetsAsync(text: string): Promise<IParsedAsset[]>;

@@ -44,3 +38,2 @@ }

export interface IFileProcessorConfig {
delayBetweenAssetDownloadRequestsMs?: number;
}

@@ -47,0 +40,0 @@

@@ -1,6 +0,6 @@

import { HttpService } from '@kontent-ai/core-sdk';
import colors from 'colors';
import JSZip from 'jszip';
import { Blob } from 'buffer';
import { IExportAllResult } from '../export/index.js';
import { IExportAdapterResult, IExportContentItem } from '../export/index.js';
import { IImportAsset, IParsedContentItem, IImportSource, IParsedAsset, IImportContentType } from '../import/index.js';

@@ -16,18 +16,7 @@ import {

} from './file-processor.models.js';
import { IContentItem, IContentType } from '@kontent-ai/delivery-sdk';
import {
IExportTransformConfig,
IPackageMetadata,
defaultRetryStrategy,
formatBytes,
getExtension,
sleepAsync
} from '../core/index.js';
import { IExportTransformConfig, IPackageMetadata, formatBytes, getExtension } from '../core/index.js';
import mime from 'mime';
import { ItemCsvProcessorService } from './item-formats/item-csv-processor.service.js';
import { ItemJsonProcessorService } from './item-formats/item-json-processor.service.js';
import { logDebug, logProcessingDebug } from '../core/log-helper.js';
export class FileProcessorService {
private readonly delayBetweenAssetRequestsMs: number;
private readonly zipContext: ZipContext = 'node.js';

@@ -38,28 +27,31 @@

private readonly httpService: HttpService = new HttpService();
private readonly itemCsvProcessorService: ItemCsvProcessorService = new ItemCsvProcessorService();
private readonly itemJsonProcessorService: ItemJsonProcessorService = new ItemJsonProcessorService();
constructor(config?: IFileProcessorConfig) {}
constructor(config?: IFileProcessorConfig) {
this.delayBetweenAssetRequestsMs = config?.delayBetweenAssetDownloadRequestsMs ?? 10;
}
async parseZipAsync(data: {
items?: {
file: Buffer;
formatService: IItemFormatService;
};
assets?: {
file: Buffer;
formatService: IAssetFormatService;
};
types: IImportContentType[];
}): Promise<IImportSource> {
let itemsZipFile: JSZip | undefined = undefined;
let assetsZipFile: JSZip | undefined = undefined;
async extractZipAsync(
itemsFile: Buffer,
assetsFile: Buffer | undefined,
types: IImportContentType[],
config: { itemFormatService: IItemFormatService; assetFormatService: IAssetFormatService }
): Promise<IImportSource> {
logDebug({
type: 'info',
message: 'Loading items zip file'
});
const itemsZipFile = await JSZip.loadAsync(itemsFile, {});
logDebug({
type: 'info',
message: 'Parsing items zip data'
});
if (data.items) {
logDebug({
type: 'info',
message: 'Loading items zip file'
});
itemsZipFile = await JSZip.loadAsync(data.items.file, {});
logDebug({
type: 'info',
message: 'Parsing items zip data'
});
}
let assetsZipFile: JSZip | undefined = undefined;
if (assetsFile) {
if (data.assets) {
logDebug({

@@ -69,4 +61,3 @@ type: 'info',

});
assetsZipFile = await JSZip.loadAsync(assetsFile, {});
assetsZipFile = await JSZip.loadAsync(data.assets.file, {});
logDebug({

@@ -80,8 +71,12 @@ type: 'info',

importData: {
items: await this.parseContentItemsFromZipAsync(itemsZipFile, types, config.itemFormatService),
assets: assetsZipFile
? await this.parseAssetsFromFileAsync(assetsZipFile, config.assetFormatService)
: []
items:
itemsZipFile && data.items
? await this.parseContentItemsFromZipAsync(itemsZipFile, data.types, data.items.formatService)
: [],
assets:
assetsZipFile && data.assets
? await this.parseAssetsFromFileAsync(assetsZipFile, data.assets?.formatService)
: []
},
metadata: await this.parseMetadataFromZipAsync(itemsZipFile, this.metadataName)
metadata: itemsZipFile ? await this.parseMetadataFromZipAsync(itemsZipFile, this.metadataName) : undefined
};

@@ -97,34 +92,39 @@

async extractCsvFileAsync(file: Buffer, types: IImportContentType[]): Promise<IImportSource> {
logDebug({
type: 'info',
message: 'Reading CSV file'
});
const result: IImportSource = {
importData: {
items: await this.itemCsvProcessorService.parseContentItemsAsync(file.toString(), types),
assets: []
},
metadata: undefined
async parseFileAsync(data: {
items?: {
file: Buffer;
formatService: IItemFormatService;
};
assets?: {
file: Buffer;
formatService: IAssetFormatService;
};
types: IImportContentType[];
}): Promise<IImportSource> {
let parsedItems: IParsedContentItem[] = [];
let parsedAssets: IImportAsset[] = [];
logDebug({
type: 'info',
message: 'Reading CSV file completed'
});
if (data.items) {
logDebug({
type: 'info',
message: `Parsing items file with '${colors.yellow(data.items.formatService.name)}' `
});
return result;
}
parsedItems = await data.items.formatService.parseContentItemsAsync(data.items.file.toString(), data.types);
}
async extractJsonFileAsync(file: Buffer, types: IImportContentType[]): Promise<IImportSource> {
logDebug({
type: 'info',
message: 'Reading JSON file'
});
if (data.assets) {
logDebug({
type: 'info',
message: `Parsing assets file with '${colors.yellow(data.assets.formatService.name)}' `
});
const assetsZipFile = await JSZip.loadAsync(data.assets.file, {});
parsedAssets = await this.parseAssetsFromFileAsync(assetsZipFile, data.assets.formatService);
}
const result: IImportSource = {
importData: {
items: await this.itemJsonProcessorService.parseContentItemsAsync(file.toString(), types),
assets: []
items: parsedItems,
assets: parsedAssets
},

@@ -136,3 +136,5 @@ metadata: undefined

type: 'info',
message: 'Reading JSON file completed'
message: `Parsing completed. Parsed '${colors.yellow(
result.importData.items.length.toString()
)}' items and '${colors.yellow(result.importData.assets.length.toString())}' assets`
});

@@ -144,3 +146,3 @@

async createItemsZipAsync(
exportData: IExportAllResult,
exportData: IExportAdapterResult,
config: {

@@ -157,10 +159,3 @@ transformConfig: IExportTransformConfig;

type: 'info',
message: `Adding metadata to zip`,
partA: this.metadataName
});
zip.file(this.metadataName, JSON.stringify(exportData.metadata));
logDebug({
type: 'info',
message: `Transforming '${exportData.data.contentItems.length.toString()}' content items`,
message: `Transforming '${exportData.items.length.toString()}' content items`,
partA: config.itemFormatService?.name

@@ -170,6 +165,4 @@ });

const transformedLanguageVariantsFileData = await this.transformLanguageVariantsAsync(
exportData.data.contentTypes,
exportData.data.contentItems,
config.itemFormatService,
config.transformConfig
exportData.items,
config.itemFormatService
);

@@ -214,3 +207,3 @@

async createAssetsZipAsync(
exportData: IExportAllResult,
exportData: IExportAdapterResult,
config: {

@@ -230,19 +223,10 @@ assetFormatService: IAssetFormatService;

logDebug({
type: 'info',
message: `Storing metadata`,
partA: this.metadataName
});
zip.file(this.metadataName, JSON.stringify(exportData.metadata));
if (exportData.data.assets.length) {
if (exportData.assets.length) {
logDebug({
type: 'info',
message: `Transforming '${exportData.data.assets.length.toString()}' asssets`,
message: `Transforming '${exportData.assets.length.toString()}' asssets`,
partA: config.assetFormatService?.name
});
const transformedAssetsFileData = await config.assetFormatService.transformAssetsAsync(
exportData.data.assets
);
const transformedAssetsFileData = await config.assetFormatService.transformAssetsAsync(exportData.assets);

@@ -258,33 +242,16 @@ for (const fileInfo of transformedAssetsFileData) {

logDebug({
type: 'info',
message: `Preparing to download '${exportData.data.assets.length.toString()}' assets`
});
let assetIndex: number = 1;
for (const asset of exportData.assets) {
const assetFilename = `${asset.assetId}.${asset.extension}`; // use id as filename to prevent filename conflicts
let assetIndex: number = 1;
for (const asset of exportData.data.assets) {
logProcessingDebug({
index: assetIndex,
totalCount: exportData.data.assets.length,
itemType: 'binaryFile',
title: asset.url
totalCount: exportData.assets.length,
itemType: 'zipFile',
title: `'${assetFilename}'`
});
const assetFilename = `${asset.assetId}.${asset.extension}`; // use id as filename to prevent filename conflicts
const binaryDataResponse = await this.getBinaryDataFromUrlAsync(asset.url);
logDebug({
type: 'download',
message: `Binary file downloaded`,
partA: asset.url,
partB: formatBytes(binaryDataResponse.contentLength)
});
filesFolder.file(assetFilename, binaryDataResponse.data, {
filesFolder.file(assetFilename, asset.binaryData, {
binary: true
});
// create artificial delay between request to prevent network errors
await sleepAsync(this.delayBetweenAssetRequestsMs);
assetIndex++;

@@ -341,8 +308,6 @@ }

private async transformLanguageVariantsAsync(
types: IContentType[],
items: IContentItem[],
formatService: IItemFormatService,
config: IExportTransformConfig
items: IExportContentItem[],
formatService: IItemFormatService
): Promise<IFileData[]> {
return await formatService.transformContentItemsAsync(types, items, config);
return await formatService.transformContentItemsAsync(items);
}

@@ -512,22 +477,2 @@

}
private async getBinaryDataFromUrlAsync(url: string): Promise<{ data: any; contentLength: number }> {
// temp fix for Kontent.ai Repository not validating url
url = url.replace('#', '%23');
const response = await this.httpService.getAsync(
{
url
},
{
responseType: 'arraybuffer',
retryStrategy: defaultRetryStrategy
}
);
const contentLengthHeader = response.headers.find((m) => m.header.toLowerCase() === 'content-length');
const contentLength = contentLengthHeader ? +contentLengthHeader.value : 0;
return { data: response.data, contentLength: contentLength };
}
}

@@ -1,2 +0,1 @@

import { IContentItem, IContentType } from '@kontent-ai/delivery-sdk';
import { parse } from 'csv-parse';

@@ -8,3 +7,3 @@ import { AsyncParser, FieldInfo } from 'json2csv';

import { BaseItemProcessorService } from '../base-item-processor.service.js';
import { IExportTransformConfig, translationHelper } from '../../core/index.js';
import { IExportContentItem } from '../../export/index.js';

@@ -17,3 +16,3 @@ interface ICsvItem {

collection: string;
last_modified: string;
last_modified?: string;
workflow_step?: string;

@@ -23,2 +22,8 @@ [propertyName: string]: string | undefined | string[];

interface ITypeWrapper {
typeCodename: string;
items: IExportContentItem[];
elementCodenames: string[];
}
export class ItemCsvProcessorService extends BaseItemProcessorService {

@@ -28,15 +33,12 @@ private readonly csvDelimiter: string = ',';

async transformContentItemsAsync(
types: IContentType[],
items: IContentItem[],
config: IExportTransformConfig
): Promise<IFileData[]> {
async transformContentItemsAsync(items: IExportContentItem[]): Promise<IFileData[]> {
const fileData: IFileData[] = [];
const csvItems: ICsvItem[] = items.map((item) => this.mapToCsvItem(item, types, items, config));
for (const contentType of types) {
const contentItemsOfType = csvItems.filter((m) => m.type === contentType.system.codename);
const filename: string = `${contentType.system.codename}.csv`;
for (const typeWrapper of this.getTypeWrappers(items)) {
const contentItemsOfType = items
.filter((m) => m.system.type === typeWrapper.typeCodename)
.map((item) => this.mapToCsvItem(item, typeWrapper));
const filename: string = `${typeWrapper.typeCodename}.csv`;
const fieldsToStore: FieldInfo<string>[] = this.getFieldsToExport(contentType);
const fieldsToStore: FieldInfo<string>[] = this.getFieldsToExport(typeWrapper);
const languageVariantsStream = new Readable();

@@ -122,8 +124,24 @@ languageVariantsStream.push(JSON.stringify(contentItemsOfType));

private mapToCsvItem(
item: IContentItem,
types: IContentType[],
items: IContentItem[],
config: IExportTransformConfig
): ICsvItem {
private getTypeWrappers(items: IExportContentItem[]): ITypeWrapper[] {
const typeWrappers: ITypeWrapper[] = [];
for (const item of items) {
const existingFileData = typeWrappers.find((m) => m.typeCodename === item.system.type);
if (!existingFileData) {
typeWrappers.push({
typeCodename: item.system.type,
items: [item],
// this is with the assumption that all items of same type have the same elements defined
elementCodenames: item.elements.map((m) => m.codename)
});
} else {
existingFileData.items.push(item);
}
}
return typeWrappers;
}
private mapToCsvItem(item: IExportContentItem, typeWrapper: ITypeWrapper): ICsvItem {
const csvItem: ICsvItem = {

@@ -134,26 +152,12 @@ type: item.system.type,

language: item.system.language,
last_modified: item.system.lastModified,
last_modified: item.system.last_modified,
name: item.system.name,
workflow_step: item.system.workflowStep ?? undefined
workflow_step: item.system.workflow_step
};
const type = types.find((m) => m.system.codename === item.system.type);
for (const elementCodename of typeWrapper.elementCodenames) {
const itemElement = item.elements.find((m) => m.codename === elementCodename);
if (!type) {
throw Error(`Missing content type '${item.system.type}' for item '${item.system.codename}'`);
}
for (const element of type.elements) {
if (element.codename) {
const variantElement = item.elements[element.codename];
if (variantElement) {
csvItem[element.codename] = translationHelper.transformToExportElementValue({
config: config,
element: variantElement,
item: item,
items: items,
types: types
});
}
if (itemElement) {
csvItem[elementCodename] = itemElement.value;
}

@@ -172,3 +176,3 @@ }

private getFieldsToExport(contentType: IContentType): FieldInfo<string>[] {
private getFieldsToExport(typeWrapper: ITypeWrapper): FieldInfo<string>[] {
return [

@@ -183,19 +187,12 @@ ...this.getSystemContentItemFields().map((m) => {

}),
...contentType.elements
.filter((m) => {
if (m.codename?.length) {
return true;
}
return false;
})
.map((m) => {
const field: FieldInfo<string> = {
label: m.codename ?? '',
value: m.codename ?? ''
};
...typeWrapper.elementCodenames.map((m) => {
const field: FieldInfo<string> = {
label: m,
value: m
};
return field;
})
return field;
})
];
}
}

@@ -1,2 +0,1 @@

import { IContentItem, IContentType } from '@kontent-ai/delivery-sdk';
import { IImportContentType, IParsedContentItem } from '../../import/index.js';

@@ -6,4 +5,8 @@ import { IFileData } from '../file-processor.models.js';

import { ItemJsonProcessorService } from './item-json-processor.service.js';
import { IExportTransformConfig } from '../../core/index.js';
import { IExportContentItem } from '../../export/index.js';
interface IJsonElements {
[elementCodename: string]: string | string[] | undefined;
}
interface IJsonItem {

@@ -19,5 +22,3 @@ system: {

};
elements: {
[elementCodename: string]: string | string[] | undefined;
};
elements: IJsonElements;
}

@@ -29,8 +30,4 @@

public readonly name: string = 'json';
async transformContentItemsAsync(
types: IContentType[],
items: IContentItem[],
config: IExportTransformConfig
): Promise<IFileData[]> {
const multiFileJsonFileData = await this.jsonProcessorService.transformContentItemsAsync(types, items, config);
async transformContentItemsAsync(items: IExportContentItem[]): Promise<IFileData[]> {
const multiFileJsonFileData = await this.jsonProcessorService.transformContentItemsAsync(items);

@@ -37,0 +34,0 @@ const allJsonItems: IJsonItem[] = multiFileJsonFileData

@@ -1,7 +0,10 @@

import { IContentItem, IContentType } from '@kontent-ai/delivery-sdk';
import { IImportContentType, IParsedContentItem, IParsedElement } from '../../import/index.js';
import { IFileData } from '../file-processor.models.js';
import { BaseItemProcessorService } from '../base-item-processor.service.js';
import { IExportTransformConfig, translationHelper } from '../../core/index.js';
import { IExportContentItem } from '../../export/index.js';
interface IJsonElements {
[elementCodename: string]: string | string[] | undefined;
}
interface IJsonItem {

@@ -14,24 +17,24 @@ system: {

collection: string;
last_modified: string;
last_modified?: string;
workflow_step?: string;
};
elements: {
[elementCodename: string]: string | string[] | undefined;
};
elements: IJsonElements;
}
interface ITypeWrapper {
typeCodename: string;
items: IExportContentItem[];
}
export class ItemJsonProcessorService extends BaseItemProcessorService {
public readonly name: string = 'json';
async transformContentItemsAsync(
types: IContentType[],
items: IContentItem[],
config: IExportTransformConfig
): Promise<IFileData[]> {
async transformContentItemsAsync(items: IExportContentItem[]): Promise<IFileData[]> {
const fileData: IFileData[] = [];
for (const contentType of types) {
const contentItemsOfType = items.filter((m) => m.system.type === contentType.system.codename);
const typeWrappers: ITypeWrapper[] = this.getTypeWrappers(items);
for (const typeWrapper of typeWrappers) {
const filename: string = `${typeWrapper.typeCodename}.json`;
const contentItemsOfType = items.filter((m) => m.system.type === typeWrapper.typeCodename);
const jsonItems: IJsonItem[] = contentItemsOfType.map((m) => this.mapToJsonItem(m));
const filename: string = `${contentType.system.codename}.json`;
const jsonItems: IJsonItem[] = contentItemsOfType.map((m) => this.mapToJsonItem(m, types, items, config));
fileData.push({

@@ -83,30 +86,26 @@ data: jsonItems.length ? JSON.stringify(jsonItems) : '[]',

private mapToJsonItem(
item: IContentItem,
types: IContentType[],
items: IContentItem[],
config: IExportTransformConfig
): IJsonItem {
const elements: { [elementCodename: string]: string | string[] | undefined } = {};
private getTypeWrappers(items: IExportContentItem[]): ITypeWrapper[] {
const typeWrappers: ITypeWrapper[] = [];
const type = types.find((m) => m.system.codename === item.system.type);
for (const item of items) {
const existingFileData = typeWrappers.find((m) => m.typeCodename === item.system.type);
if (!type) {
throw Error(`Missing content type '${item.system.type}' for item '${item.system.codename}'`);
if (!existingFileData) {
typeWrappers.push({
typeCodename: item.system.type,
items: [item]
});
} else {
existingFileData.items.push(item);
}
}
for (const element of type.elements) {
if (element.codename) {
const variantElement = item.elements[element.codename];
return typeWrappers;
}
if (variantElement) {
elements[element.codename] = translationHelper.transformToExportElementValue({
config: config,
element: variantElement,
item: item,
items: items,
types: types
});
}
}
private mapToJsonItem(item: IExportContentItem): IJsonItem {
const jsonElements: IJsonElements = {};
for (const element of item.elements) {
jsonElements[element.codename] = element.value;
}

@@ -119,8 +118,8 @@

language: item.system.language,
last_modified: item.system.lastModified,
last_modified: item.system.last_modified,
name: item.system.name,
type: item.system.type,
workflow_step: item.system.workflowStep ?? undefined
workflow_step: item.system.workflow_step
},
elements: elements
elements: jsonElements
};

@@ -127,0 +126,0 @@ return jsonItem;

@@ -31,3 +31,3 @@ import {

logAction('skip', 'contentItem', {
logAction('skip', 'languageVariant', {
title: `Skipping '${categorizedParsedItems.componentItems.length}' because they represent component items`

@@ -34,0 +34,0 @@ });

import { ManagementClient, WorkflowModels } from '@kontent-ai/management-sdk';
import { IParsedContentItem } from '../import.models.js';
import { defaultWorkflowCodename, logAction } from '../../core/index.js';
import { logDebug } from '../../core/log-helper.js';

@@ -36,7 +35,5 @@ export class ImportWorkflowHelper {

if (!this.doesWorkflowStepExist(workflowStepCodename, workflows)) {
logDebug({
type: 'warning',
message: `Could not change workflow step for item '${importContentItem.system.codename}' (${importContentItem.system.name}) because step '${workflowStepCodename}' does not exist in target project. Skipping workflow change.`
});
return;
throw Error(
`Could not change workflow step for item '${importContentItem.system.codename}' (${importContentItem.system.name}) because step with codename '${workflowStepCodename}' does not exist in target project.`
);
}

@@ -43,0 +40,0 @@

@@ -1,2 +0,2 @@

import { IParsedContentItem } from 'lib/index.js';
import { IParsedContentItem } from "../import.models.js";

@@ -3,0 +3,0 @@ export interface ICategorizedParsedItems {

@@ -5,9 +5,6 @@ #!/usr/bin/env node

import { ICliFileConfig, CliAction, getExtension, extractErrorMessage } from '../../core/index.js';
import { ExportService } from '../../export/index.js';
import { ImportService } from '../../import/index.js';
import { ICliFileConfig, CliAction, getExtension, extractErrorMessage, ExportAdapter } from '../../core/index.js';
import {
ItemCsvProcessorService,
ProcessingFormat,
FileProcessorService,
IItemFormatService,

@@ -20,4 +17,5 @@ ItemJsonProcessorService,

} from '../../file-processor/index.js';
import { FileService } from '../file/file.service.js';
import { logDebug } from '../../core/log-helper.js';
import { ExportToolkit, ImportToolkit } from '../../toolkit/index.js';
import { IExportAdapter, KontentAiExportAdapter } from '../../export/index.js';

@@ -35,2 +33,6 @@ type Args = { [key: string]: string | unknown };

)
.alias('a', 'action')
.describe('a', 'Type of action to execute')
.alias('ad', 'adapter')
.describe('ad', 'Adapter used to export data')
.alias('e', 'environmentId')

@@ -48,4 +50,2 @@ .describe('e', 'environmentId')

.describe('ea', 'Disables / enables asset export')
.alias('ia', 'importAssets')
.describe('ia', 'Disables / enables asset import')
.alias('is', 'isSecure')

@@ -76,38 +76,46 @@ .describe('is', 'Disables / enables use of Secure API for export')

const exportAsync = async (config: ICliFileConfig) => {
const exportService = new ExportService({
environmentId: config.environmentId,
managementApiKey: config.managementApiKey,
previewApiKey: config.previewApiKey,
secureApiKey: config.secureApiKey,
isPreview: config.isPreview,
isSecure: config.isSecure,
baseUrl: config.baseUrl,
exportTypes: config.exportTypes,
exportAssets: config.exportAssets
});
if (!config.adapter) {
throw Error(`Missing 'adapter' config`);
}
const fileService = new FileService();
const fileProcessorService = new FileProcessorService();
let adapter: IExportAdapter | undefined;
const response = await exportService.exportAllAsync();
const itemsZipFileData = await fileProcessorService.createItemsZipAsync(response, {
itemFormatService: getItemFormatService(config.format),
transformConfig: {
richTextConfig: {
replaceInvalidLinks: config.replaceInvalidLinks
}
if (config.adapter === 'kontentAi') {
if (!config.environmentId) {
throw Error(`Invalid environment id`);
}
});
const itemsFilename = config.itemsFilename ?? getDefaultExportFilename('items');
await fileService.writeFileAsync(getZipFilename(itemsFilename), itemsZipFileData);
if (config.assetsFilename && config.exportAssets) {
const assetsZipFileData = await fileProcessorService.createAssetsZipAsync(response, {
assetFormatService: getAssetFormatService(config.format)
adapter = new KontentAiExportAdapter({
environmentId: config.environmentId,
managementApiKey: config.managementApiKey,
previewApiKey: config.previewApiKey,
secureApiKey: config.secureApiKey,
isPreview: config.isPreview,
isSecure: config.isSecure,
baseUrl: config.baseUrl,
exportTypes: config.exportTypes,
exportAssets: config.exportAssets
});
await fileService.writeFileAsync(getZipFilename(config.assetsFilename), assetsZipFileData);
} else {
throw Error(`Missing adapter '${config.adapter}'`);
}
const exportToolkit = new ExportToolkit({ adapter });
const itemsFilename = config.itemsFilename ?? getDefaultExportFilename('items');
const assetsFilename = config.assetsFilename ?? getDefaultExportFilename('assets');
await exportToolkit.exportAsync({
items: {
filename: itemsFilename,
formatService: getItemFormatService(config.format)
},
assets: assetsFilename
? {
filename: assetsFilename,
formatService: getAssetFormatService(config.format)
}
: undefined
});
logDebug({ type: 'info', message: `Completed` });

@@ -117,10 +125,13 @@ };

const importAsync = async (config: ICliFileConfig) => {
const fileProcessorService = new FileProcessorService();
if (!config.managementApiKey) {
throw Error(`Missing 'managementApiKey' configuration option`);
}
if (!config.environmentId) {
throw Error(`Missing 'environmentId' configuration option`);
}
const fileService = new FileService();
const importService = new ImportService({
const itemsFilename: string | undefined = config.itemsFilename;
const assetsFilename: string | undefined = config.assetsFilename;
const importToolkit = new ImportToolkit({
skipFailedItems: config.skipFailedItems,

@@ -137,47 +148,26 @@ baseUrl: config.baseUrl,

}
}
},
items: itemsFilename
? {
filename: itemsFilename,
formatService: getItemFormatService(config.format)
}
: undefined,
assets: assetsFilename
? {
filename: assetsFilename,
formatService: getAssetFormatService(config.format)
}
: undefined
});
const itemsFilename = config.itemsFilename ?? getDefaultExportFilename('items');
const itemsFileExtension = getExtension(itemsFilename ?? '')?.toLowerCase();
const contentTypes = await importService.getImportContentTypesAsync();
const itemsFile = await fileService.loadFileAsync(itemsFilename);
const itemsFileExtension = getExtension(itemsFilename);
let assetsFile: Buffer | undefined = undefined;
if (config.importAssets) {
const assetsFilename = config.assetsFilename ?? getDefaultExportFilename('assets');
logDebug({
type: 'info',
message: `Importing assets from file`,
partA: assetsFilename
});
assetsFile = await fileService.loadFileAsync(assetsFilename);
const assetsFileExtension = getExtension(assetsFilename);
if (!assetsFileExtension?.endsWith('zip')) {
throw Error(`Assets required zip folder. Received '${config.assetsFilename}'`);
}
if (itemsFileExtension?.endsWith('zip'.toLowerCase())) {
await importToolkit.importFromZipAsync();
} else if (itemsFileExtension?.endsWith('csv'.toLowerCase())) {
await importToolkit.importFromFileAsync();
} else if (itemsFileExtension?.endsWith('json'.toLowerCase())) {
await importToolkit.importFromFileAsync();
} else {
logDebug({
type: 'info',
message: `Skipping assets import`
});
}
if (itemsFileExtension?.endsWith('zip')) {
const data = await fileProcessorService.extractZipAsync(itemsFile, assetsFile, contentTypes, {
assetFormatService: getAssetFormatService(config.format),
itemFormatService: getItemFormatService(config.format)
});
await importService.importFromSourceAsync(data);
} else if (itemsFileExtension?.endsWith('csv')) {
const data = await fileProcessorService.extractCsvFileAsync(itemsFile, contentTypes);
await importService.importFromSourceAsync(data);
} else if (itemsFileExtension?.endsWith('json')) {
const data = await fileProcessorService.extractJsonFileAsync(itemsFile, contentTypes);
await importService.importFromSourceAsync(data);
} else {
throw Error(`Unsupported file type '${itemsFileExtension}'`);

@@ -213,4 +203,6 @@ }

const format: string | undefined = getOptionalArgumentValue(resolvedArgs, 'format');
const adapter: string | undefined = getOptionalArgumentValue(resolvedArgs, 'adapter');
let mappedFormat: ProcessingFormat = 'csv';
let mappedAdapter: ExportAdapter = 'kontentAi';

@@ -229,2 +221,10 @@ if (format?.toLowerCase() === 'csv'.toLowerCase()) {

if (adapter?.toLowerCase() === 'kontentAi'.toLowerCase()) {
mappedAdapter = 'kontentAi';
} else {
if (action === 'export') {
throw Error(`Unsupported adapter '${adapter}'`);
}
}
const config: ICliFileConfig = {

@@ -247,4 +247,4 @@ action: action,

isSecure: getBooleanArgumentvalue(resolvedArgs, 'isSecure', false),
importAssets: getBooleanArgumentvalue(resolvedArgs, 'importAssets', false),
replaceInvalidLinks: getBooleanArgumentvalue(resolvedArgs, 'replaceInvalidLinks', false),
adapter: mappedAdapter,
format: mappedFormat

@@ -318,8 +318,1 @@ };

}
function getZipFilename(filename: string): string {
if (filename.toLowerCase()?.endsWith('.zip')) {
return filename;
}
return `${filename}.zip`;
}
{
"name": "xeno-test",
"version": "0.0.16",
"description": "This utility enables users to export & import content data from / to Kontent.ai projects",
"version": "0.0.17",
"description": "This program can be used to import content related data into Kontent.ai from various formats. Additionally, it can also be used to export Kontent.ai data using Delivery API.",
"preferGlobal": true,

@@ -12,3 +12,3 @@ "private": false,

"type": "git",
"url": "https://github.com/Enngage/kontent-csv-manager"
"url": "https://github.com/Enngage/kontent-ai-migration-toolkit"
},

@@ -41,11 +41,7 @@ "engines": {

"Kontent.ai",
"Kontent.ai import tool",
"Kontent.ai import",
"Kontent.ai import toolkit",
"Kontent.ai migration tool",
"Kontent.ai data manager",
"Kontent.ai CSV export",
"Kontent.ai CSV import",
"Kontent.ai JSON export",
"Kontent.ai JSON import",
"Kontent.ai data export",
"Kontent.ai data import"
"Kontent.ai CSV export & import",
"Kontent.ai JSON export & import"
],

@@ -71,4 +67,4 @@ "license": "MIT",

"@types/mime": "3.0.4",
"@types/node": "20.10.3",
"@types/uuid": "^9.0.7",
"@types/node": "20.10.4",
"@types/uuid": "9.0.7",
"@types/yargs": "17.0.32",

@@ -81,4 +77,4 @@ "@typescript-eslint/eslint-plugin": "6.13.2",

"tslib": "2.6.2",
"typescript": "5.3.2"
"typescript": "5.3.3"
}
}

@@ -1,20 +0,27 @@

# Kontent.ai Data Manager
# Kontent.ai Migration Toolkit
The purpose of this project is to export & import content data to & from [Kontent.ai](https://kontent.ai) projects.
The purpose of this project is to import content data to [Kontent.ai](https://kontent.ai) projects using various formats
and export adapters. Currently we support only `Kontent.ai` export adapter (meaning you can export data from Kontent.ai
and re-import it to the same or different project)
Data is exported via `Delivery Api` and imported back via `Management Api`. There are 3 default formats for data:
`json`, `jsonJoined` and `csv`. It is possible to write custom transformer if you need to add some extra processing or
use different format all together.
This library can only be used in `node.js`. Use in Browsers is not supported.
### Important Disclaimer
> We do not recommend importing data into your production environment directly (= without proper testing), unless you
> are absolutely sure you know what you are doing. Instead, we recommend that you create a new environment based on your
> production and test the import there first. If the import meets your expectations, you may swap environments or run it again
> on the production.
## How it works
> When importing it is imported that that both `source` and `target` project have identical definitions of:
> `Content types`, `taxonomies` and `workflows`. Any inconsistency in data definition may cause import to fail.
> When importing it is essential that `Content types`, `Taxonomies` and `Workflows` matches the input data. Any
> inconsistency in data such as referencing inexistent taxonomy term, incorrect element type and other problems will
> cause import to fail.
### How are content items imported?
The Data manager creates content items that are not present in target project. If the content item exists in target
project (based on item's `codename`) the item will be updated. The workflow will be preserved.
The Migration Toolkit creates content items that are not present in target project. If the content item exists in target
project (based on item `codename`) the item will be updated. The workflow or published state will be set according to
the source data.

@@ -24,5 +31,4 @@ ### How are assets imported?

If asset exists in target project, the asset upload will be skipped and not uploaded at all. If asset doesn't exist, the
asset from the zip folder will be uploaded and it's id will be used as a filename unless you enable the
`fetchAssetsDetails` option. The data Manager will also set `external_id` of newly uploaded assets to equal their
original id. Folder structure of imported assets is not preserved.
asset from the zip folder will be uploaded. The Migration Toolkit will also set `external_id` of newly uploaded assets
to equal their original id. There are some limitations to importing assets, see _Limitations_ sections for more info.

@@ -37,55 +43,69 @@ ## Installation

### Configuration
### Export Configuration
| Config | Value |
| -------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
| **environmentId** | Id of Kontent.ai project **(required)** |
| **managementApiKey** | Management API key **(required for import, optional export)** |
| **action** | Action. Available options: `import` & `export` **(required)** |
| **format** | Format used to export data. Available options: `csv`, `json` and `jsonJoined` **(required)** |
| secureApiKey | API key for secure Access |
| previewApiKey | API key for preview |
| isSecure | When set to `true`, Secure API will be used to make data export. Defaults to `false` |
| isPreview | When set to `true`, Preview API will be used to make data export. Defaults to `false` |
| exportAssets | When set to `true`, Binary data of assets is exported. Defaults to `false` |
| importAssets | When set to `true`, assets & binary files will be imported. This requires the asset's zip file (can be created with export action). Defaults to `false` |
| replaceInvalidLinks | RTE may contain links to invalid items. You won't be able to re-import such items due to validation error. By setting this to `true` the import tool will automatically remove these links. Defaults to `false` |
| itemsFilename | Name of zip used for export / import items |
| assetsFilename | Name of zip used for export / import assets |
| baseUrl | Custom base URL for Management API calls. |
| exportTypes | Array of content types codenames of which content items should be exported. By default all items of all types are exported |
| skipFailedItems | Indicates if failed content items & language variants should be skipped if their import fails. Available options: `true` & `false`. Detaults to `false` |
| Config | Value |
| ------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
| **action** | Action. Available options: `import` & `export` **(required)** |
| **environmentId** | Id of Kontent.ai project **(required)** |
| **adapter** | Adapter used to export data into known format that can be used for importing data. Available options: `kontentAi` **(required for export)** |
| **format** | Format used to export data. Available options: `csv`, `json` and `jsonJoined` **(required for export & import)** |
| secureApiKey | API key for secure Access. `isSecure` also needs to be enabled |
| previewApiKey | API key for preview. `isPreview` also needs to be enabled |
| isSecure | When set to `true`, Secure API will be used to make data export. Defaults to `false` |
| isPreview | When set to `true`, Preview API will be used to make data export. Defaults to `false` |
| exportAssets | When set to `true`, Binary data of assets is exported. Defaults to `false` |
| replaceInvalidLinks | RTE may contain links to invalid items. You won't be able to re-import such items due to validation error. By setting this to `true` the Migration Toolkit will automatically remove these links. Defaults to `false` |
| itemsFilename | Name of the items file that will be created in folder where script is run |
| assetsFilename | Name of the assets file that will be created in folder where script is run. Only zip is supported. |
| baseUrl | Custom base URL for Kontent.ai API calls |
### Execution
### Import Configuration
> We do not recommend importing data back to your production environment directly. Instead, we recommend that you create
> a new environment based on your production and test the import first. If the import completes successfully, you may
> swap environments or run it again on the production.
| Config | Value |
| -------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------- |
| **action** | Action. Available options: `import` & `export` **(required)** |
| **environmentId** | Id of Kontent.ai project **(required)** |
| **managementApiKey** | Management API key **(required)** |
| **format** | Format used to export data. Available options: `csv`, `json` and `jsonJoined` **(required for export & import)** |
| itemsFilename | Name of the items file that will be used to parse items |
| assetsFilename | Name of the items file that will be used to parse assets (only zip supported) |
| baseUrl | Custom base URL for Kontent.ai API calls |
| skipFailedItems | Indicates if failed content items & language variants should be skipped if their import fails. Available options: `true` & `false`. Detaults to `false` |
Export without assets:
### Import CLI samples
`kdm --action=export --environmentId=xxx --format=csv`
Import from zip:
Export with assets:
`kdm --action=import --apiKey=xxx --environmentId=xxx --itemsFilename=data.zip --format=json`
`kdm --action=export --environmentId=xxx --format=csv --exportAssets=false`
Import from zip with assets:
Import without assets:
`kdm --action=import --apiKey=xxx --environmentId=xxx --itemsFilename=data.zip --format=json --assetsFilename=assets.zip`
`kdm --action=import --apiKey=xxx --environmentId=xxx`
Import from json file:
Import with assets:
`kdm --action=import --apiKey=xxx --environmentId=xxx --itemsFilename=data.json --format=json`
`kdm --action=import --apiKey=xxx --environmentId=xxx --importAssets=true`
Import from csv file:
Import from json file:
`kdm --action=import --apiKey=xxx --environmentId=xxx --itemsFilename=data.csv --format=csv`
`kdm --action=import --apiKey=xxx --environmentId=xxx --itemsFilename=data.json`
### Export CLI samples
Import from csv file:
Export from Kontent.ai environment as json without assets:
`kdm --action=import --apiKey=xxx --environmentId=xxx --itemsFilename=data.csv`
`kdm --action=export --adapter=kontentAi --environmentId=xxx --format=json`
To get some help you can use:
Export from Kontent.ai environment as csv without assets:
`kdm --action=export --adapter=kontentAi --environmentId=xxx --format=csv`
Export from Kontent.ai environment as single json file with assets:
`kdm --action=export --adapter=kontentAi --environmentId=xxx --format=jsonJoined --exportAssets=true`
### CLI help
To see available commands use:
`kdm --help`

@@ -101,2 +121,3 @@

"secureApiKey": "y",
"adapter": "kontentAi",
"isSecure": true,

@@ -115,9 +136,33 @@ "isPreview": false,

## Use via code
## Use in code
See https://github.com/Enngage/kontent-data-manager/tree/main/samples for examples of how to run this library in code
rather then via command line.
See https://github.com/Enngage/kontent-ai-migration-toolkit/tree/main/samples for examples of how to run this library in
code rather then via command line.
## Customizing exported items
## Importing in code
Example below shows the most basic example of importing `content items` from a single `json` file
```typescript
const importToolkit = new ImportToolkit({
environmentId: '<id>',
managementApiKey: '<mapiKey>',
skipFailedItems: false,
// be careful when filtering data to import because you might break data consistency.
// for example, it might not be possible to import language variant without first importing content item and so on.
canImport: {
asset: (item) => true, // all assets will be imported
contentItem: (item) => true // all content items will be imported,
},
items: {
filename: 'items.json',
formatService: new ItemJsonProcessorService()
}
});
await importToolkit.importFromFileAsync();
```
## Exporting in code
You may customize what items get exported by using the `customItemsExport` option when exporting in code. This option

@@ -130,3 +175,3 @@ allows you to export exactly the items you need, however be aware that the exported items may reference other items that

```typescript
const exportService = new ExportService({
const adapter = new KontentAiExportAdapter({
environmentId: '<id>',

@@ -136,2 +181,3 @@ exportAssets: true,

isSecure: false,
// optional filter to customize what items are exported
customItemsExport: async (client) => {

@@ -143,17 +189,31 @@ // return only the items you want to export by applying filters, parameters etc..

});
const exportToolkit = new ExportToolkit({ adapter });
await exportToolkit.exportAsync({
items: {
filename: 'items-export.zip',
formatService: new ItemJsonProcessorService() // or different one, see readme.md
},
// assets are optional
assets: {
filename: 'assets-export.zip',
formatService: new AssetJsonProcessorService() // or different one, see readme.md
}
});
```
## Using custom formats
## Output / Input formats
This library provides `csv` and `json` export / import formats out of the box. However, you might want to use different
format or otherwise change how items are processed. For example, you can use this to export into your own `xliff`
format, `xlxs`, some custom `txt` format and so on. By implementing `IFormatService` you can do just that. You may
inspire from these services:
This library provides `csv`, `json`, `jsoinJoined` formats out of the box. However, you can create your own format by
implementing `IFormatService` and supplying that to import / export functions. This is useful if you need to extend the
existing format, change how it's processing or just support new formats such as `xliff`, `xlxs`, `xml` or other.
| Service | Link |
| -------------------------- | ------------------------------------------------------------------------------------------------------------------------ |
| CSV `IItemFormatService ` | https://github.com/Enngage/kontent-data-manager/blob/main/lib/file-processor/item-formats/item-csv-processor.service.ts |
| JSON `IItemFormatService ` | https://github.com/Enngage/kontent-data-manager/blob/main/lib/file-processor/item-formats/item-json-processor.service.ts |
Following is a list of `built-in` format services:
To use your custom formatting service simply pass it to `createZipAsync` or `extractZipAsync`
| Type | Service | Link |
| ------------ | --------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------- |
| `csv` | `ItemCsvProcessorService ` | https://github.com/Enngage/kontent-ai-migration-toolkit/blob/main/lib/file-processor/item-formats/item-csv-processor.service.ts |
| `json` | `ItemJsonProcessorService ` | https://github.com/Enngage/kontent-ai-migration-toolkit/blob/main/lib/file-processor/item-formats/item-json-processor.service.ts |
| `jsonJoined` | `ItemJsonJoinedProcessorService ` | https://github.com/Enngage/kontent-ai-migration-toolkit/blob/main/lib/file-processor/item-formats/item-json-joined-processor.service.ts |

@@ -160,0 +220,0 @@ ### Limitations

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc