New Case Study:See how Anthropic automated 95% of dependency reviews with Socket.Learn More
Socket
Sign inDemoInstall
Socket

sync-cloud-storage

Package Overview
Dependencies
Maintainers
1
Versions
6
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

sync-cloud-storage - npm Package Compare versions

Comparing version 1.0.0-rc.1 to 1.0.0-rc.2

.vscode/launch.json

22

.eslintrc.json

@@ -13,3 +13,4 @@ {

"ecmaVersion": 2023,
"sourceType": "module"
"sourceType": "module",
"project": "./tsconfig.json"
},

@@ -46,3 +47,14 @@ "rules": {

}
]
],
"@typescript-eslint/naming-convention": [
"error",
{
"selector": "variableLike",
"format": ["camelCase"]
}
],
"no-console": 2,
"no-unused-vars": 2,
"@typescript-eslint/no-unused-vars": ["error"],
"import/named": "error"
},

@@ -59,3 +71,7 @@ "env": {

"alwaysTryTypes": true,
"project": "./tsconfig.json"
"project": [
"./tsconfig.json",
"./src/tsconfig.json",
"./test/tsconfig.json"
]
}

@@ -62,0 +78,0 @@ }

2

dist/errors.d.ts
import { ZodError } from 'zod';
import { Storage } from './schemas/input';
export declare class InvalidConfigError extends Error {
constructor(message: string, issues: ZodError);
}
export declare const handleMethodError: (error: Error, storage: Storage) => string;
"use strict";
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.handleMethodError = exports.InvalidConfigError = void 0;
const client_s3_1 = require("@aws-sdk/client-s3");
exports.InvalidConfigError = void 0;
const logger_1 = __importDefault(require("./utils/logger"));
class InvalidConfigError extends Error {

@@ -10,18 +13,5 @@ constructor(message, issues) {

this.message = message;
console.error('InvalidConfigError', { issues });
logger_1.default.error('InvalidConfigError', { issues });
}
}
exports.InvalidConfigError = InvalidConfigError;
const handleMethodError = (error, storage) => {
let message;
if (error instanceof client_s3_1.NoSuchBucket) {
message = `${error.name}: ${storage.name}`;
console.error(message);
}
else {
message = `${error.name}/${error.message}: ${storage.name}`;
}
console.error(message);
return message;
};
exports.handleMethodError = handleMethodError;

@@ -5,3 +5,3 @@ import { S3Client } from '@aws-sdk/client-s3';

import { Custom, Storage } from './schemas/input';
import { IServerless, TagsMethodPromiseResult } from './types';
import { IServerless, TagsSyncResults } from './types';
/**

@@ -15,2 +15,3 @@ * Sync Cloud Storage module.

hooks: ServerlessPlugin.Hooks;
commands: ServerlessPlugin.Commands;
servicePath: string;

@@ -40,2 +41,11 @@ config: Custom;

/**
* Set commands.
* @returns {ServerlessPlugin.Commands} Commands
* @memberof SyncCloudStorage
*
* @example
* const commands = this.setCommands()
*/
setCommands(): ServerlessPlugin.Commands;
/**
* Set hooks.

@@ -72,10 +82,9 @@ * @returns {ServerlessPlugin.Hooks} Hooks

* @private
* @returns {TagsMethodPromiseResult}
* @memberof SyncCloudStorage
* @see https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/clients/client-s3
*
* @returns {Promise<TagsSyncResults>}
* @example
* const result = await this.tags()
*/
tags(): TagsMethodPromiseResult;
tags(): Promise<TagsSyncResults>;
/**

@@ -82,0 +91,0 @@ * On exit.

'use strict';
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });

@@ -6,4 +9,4 @@ const client_s3_1 = require("@aws-sdk/client-s3");

const buckets_1 = require("./providers/s3/buckets");
const credentials_1 = require("./providers/s3/credentials");
const input_1 = require("./schemas/input");
const logger_1 = __importDefault(require("./utils/logger"));
/**

@@ -17,2 +20,3 @@ * Sync Cloud Storage module.

hooks;
commands;
servicePath;

@@ -31,16 +35,7 @@ config;

constructor(serverless, options, logging) {
if (!serverless) {
throw new Error('Serverless instance is required');
}
// Typing with *as* makes testing enable to use a DI version of instance
this.serverless = serverless;
this.servicePath = this.serverless.service.serverless.config.servicePath;
if (!options) {
throw new Error("Options can't be undefined");
}
this.options = options;
if (!logging) {
throw new Error("Logging can't be undefined");
}
this.logging = logging;
this.servicePath = this.serverless.service.serverless.config.servicePath;
const config = this.serverless.service.custom;

@@ -57,2 +52,3 @@ const validatedConfig = input_1.custom.safeParse(config);

this._storages = this.config.syncCloudStorage.storages.filter((bucket) => bucket.enabled);
this.commands = this.setCommands();
this.hooks = this.setHooks();

@@ -70,4 +66,2 @@ }

getS3Client() {
const provider = this.serverless.getProvider('aws');
const credentials = (0, credentials_1.getCredentials)(provider);
const endpoint = this.config.syncCloudStorage.offline

@@ -77,3 +71,3 @@ ? this.config.syncCloudStorage.endpoint ?? process.env.AWS_ENDPOINT_URL

return new client_s3_1.S3Client({
...credentials,
// ...credentials,
endpoint,

@@ -83,2 +77,18 @@ });

/**
* Set commands.
* @returns {ServerlessPlugin.Commands} Commands
* @memberof SyncCloudStorage
*
* @example
* const commands = this.setCommands()
*/
setCommands() {
return {
scs: {
usage: 'Sync Cloud Storage',
lifecycleEvents: ['storages', 'tags'],
},
};
}
/**
* Set hooks.

@@ -95,6 +105,6 @@ * @returns {ServerlessPlugin.Hooks} Hooks

return {
'scs:storages': syncStoragesHook,
'scs:tags': syncTagsHook,
'before:offline:start:init': syncStoragesHook,
'scs:buckets': syncStoragesHook,
'scs:tags': syncTagsHook,
'before:deploy:deploy': () => syncStoragesHook(),
'before:deploy:deploy': syncStoragesHook,
};

@@ -134,6 +144,5 @@ }

* @private
* @returns {TagsMethodPromiseResult}
* @memberof SyncCloudStorage
* @see https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/clients/client-s3
*
* @returns {Promise<TagsSyncResults>}
* @example

@@ -143,8 +152,7 @@ * const result = await this.tags()

async tags() {
const isPluginEnable = this.disableCheck().result;
if (!isPluginEnable) {
return [];
const isPluginDisable = this.disableCheck().result;
if (isPluginDisable) {
return [{ error: 'Plugin is disabled' }];
}
const syncedStorages = (await Promise.allSettled(this._storages.map((bucket) => (0, buckets_1.syncTags)(this.client, bucket))));
return syncedStorages;
return (await Promise.allSettled(this._storages.map((bucket) => (0, buckets_1.syncTags)(this.client, bucket))));
}

@@ -161,5 +169,2 @@ /**

async onExit() {
if (this.taskProcess) {
this.taskProcess.remove();
}
if (this.client) {

@@ -171,3 +176,3 @@ this.client.destroy();

if (this.config.syncCloudStorage.disabled) {
console.warn('SyncCloudStorage is disabled!');
logger_1.default.warning('SyncCloudStorage is disabled!');
return { result: true };

@@ -174,0 +179,0 @@ }

@@ -33,2 +33,3 @@ "use strict";

const path_1 = __importDefault(require("path"));
const objects_1 = require("../../utils/objects");
/**

@@ -52,8 +53,11 @@ * Returns a list of local files recursively.

else {
const internalPath = (0, objects_1.extractAfterSubdirectory)(fullPath, storage.localPath);
let key = internalPath;
if (storage.prefix) {
key = path_1.default.join(storage.prefix, internalPath);
}
const file = {
LocalPath: fullPath,
Key: storage.bucketPrefix
? path_1.default.join(`${storage.bucketPrefix}/${item}`)
: item,
LastModified: stat.mtime,
Key: key,
LastModified: new Date(stat.mtime),
Size: stat.size,

@@ -60,0 +64,0 @@ ETag: await getFileETag(fs.createReadStream(fullPath)),

@@ -6,3 +6,3 @@ import { DeletedObject, S3Client } from '@aws-sdk/client-s3';

/**
* Syncs storage with upload and delete actions by comparing local file and storage's object checksums.
* Syncs storage with upload and delete actions by comparing local file and storage's object `${Key}-${ETag}`.
* @memberof S3

@@ -9,0 +9,0 @@ * @param {S3Client} client

@@ -9,5 +9,4 @@ "use strict";

const client_s3_1 = require("@aws-sdk/client-s3");
const mrmime_1 = require("mrmime");
const objects_1 = require("./objects");
const errors_1 = require("../../errors");
const logger_1 = __importDefault(require("../../utils/logger"));
const objects_2 = require("../../utils/objects");

@@ -27,3 +26,3 @@ const tags_1 = require("../../utils/tags");

/**
* Syncs storage with upload and delete actions by comparing local file and storage's object checksums.
* Syncs storage with upload and delete actions by comparing local file and storage's object `${Key}-${ETag}`.
* @memberof S3

@@ -39,5 +38,5 @@ * @param {S3Client} client

if (!storageExist) {
throw (0, errors_1.handleMethodError)(new Error(`Storage doesn't exist!`), storage);
throw new Error('StorageNotFound');
}
console.log('Syncing storage', { storage: storage.name });
logger_1.default.info('Syncing storage', { storage: storage.name });
const files = await (0, objects_3.getLocalFiles)(path_1.default.join(servicePath, storage.localPath), storage);

@@ -51,10 +50,8 @@ const localFilesChecksum = files.map((file) => (0, objects_2.getChecksum)(file.Key, file.ETag));

let deleted = [];
if (filesToUpload.length > 0 && storage.actions.includes('upload')) {
if (filesToUpload.length >= 1 && storage.actions.includes('upload')) {
uploaded = await (0, objects_1.uploadObjects)(client, storage, files, filesToUpload);
}
if (filesToDelete.length > 0 && storage.actions.includes('delete')) {
if (storage.deleteRemoved) {
const objectsToDelete = objects.filter((object) => filesToDelete.includes((0, objects_2.getChecksum)(object.Key, object.ETag)));
deleted = await (0, objects_1.deleteObjects)(client, storage, objectsToDelete);
}
if (filesToDelete.length >= 1 && storage.actions.includes('delete')) {
const objectsToDelete = objects.filter((object) => filesToDelete.includes((0, objects_2.getChecksum)(object.Key, object.ETag)));
deleted = await (0, objects_1.deleteObjects)(client, storage, objectsToDelete);
}

@@ -83,42 +80,35 @@ const result = {

const syncMetadata = async (client, storage) => {
// Get list of existing objects
const existingObjects = await (0, objects_1.listObjects)(client, storage);
const syncedMetadata = [];
for (const file of existingObjects) {
console.log("Syncing storage's metadata", {
logger_1.default.info("Syncing storage's metadata", {
storage: storage.name,
Key: file.Key,
});
const detectedContentType = (0, mrmime_1.lookup)(file.Key) ?? storage.defaultContentType;
try {
const copyCommand = new client_s3_1.CopyObjectCommand({
Bucket: storage.name,
Key: file.Key,
CopySource: encodeURIComponent(`${storage.name}/${file.Key}`),
ContentType: detectedContentType,
MetadataDirective: client_s3_1.MetadataDirective.REPLACE,
Metadata: storage.metadata,
});
const result = await client.send(copyCommand);
console.log('Metadata synced', {
storage: storage.name,
Key: file.Key,
result,
});
// Get Object metadata
const headCommand = await client.send(new client_s3_1.HeadObjectCommand({
Bucket: storage.name,
Key: storage.bucketPrefix
? path_1.default.join(storage.bucketPrefix, `${file.Key}`)
: file.Key,
}));
syncedMetadata.push({
Key: file.Key,
Bucket: storage.name,
Metadata: headCommand.Metadata,
});
}
catch (error) {
(0, errors_1.handleMethodError)(error, storage);
}
const copyCommand = new client_s3_1.CopyObjectCommand({
Bucket: storage.name,
Key: file.Key,
CopySource: encodeURIComponent(`${storage.name}/${file.Key}`),
ContentType: (0, objects_2.getContentType)(file.Key),
MetadataDirective: client_s3_1.MetadataDirective.REPLACE,
Metadata: storage.metadata,
ACL: storage.acl,
});
const result = await client.send(copyCommand);
logger_1.default.info('Metadata synced', {
storage: storage.name,
Key: file.Key,
result,
});
const headCommand = await client.send(new client_s3_1.HeadObjectCommand({
Bucket: storage.name,
Key: storage.prefix
? path_1.default.join(storage.prefix, `${file.Key}`)
: file.Key,
}));
syncedMetadata.push({
Key: file.Key,
Bucket: storage.name,
Metadata: headCommand.Metadata,
});
}

@@ -136,18 +126,35 @@ return syncedMetadata;

const syncTags = async (client, storage) => {
console.log("Syncing storage's tags", { storage: storage.name });
logger_1.default.info("Syncing storage's tags", { storage: storage.name });
const { name } = storage;
const storageExist = await (0, exports.storageExists)(client, name);
if (!storageExist) {
return { error: new Error('StorageNotFound') };
}
let existingTagSet = { TagSet: [] };
try {
const existingTagSetCommand = new client_s3_1.GetBucketTaggingCommand({
Bucket: storage.name,
});
const existingTagSet = await client.send(existingTagSetCommand);
try {
const existingTagSetCommand = new client_s3_1.GetBucketTaggingCommand({
Bucket: storage.name,
});
existingTagSet = await client.send(existingTagSetCommand);
}
catch (error) {
if (error.name === 'NoSuchTagSet') {
existingTagSet = { TagSet: [] };
}
else {
logger_1.default.error('Failed to get existing tags', {
storage: storage.name,
error: JSON.stringify(error),
});
}
}
const mergedTagSet = (0, tags_1.mergeTags)(existingTagSet.TagSet, storage.tags ?? {});
const Tagging = {
TagSet: mergedTagSet,
};
const command = new client_s3_1.PutBucketTaggingCommand({
await client.send(new client_s3_1.PutBucketTaggingCommand({
Bucket: storage.name,
Tagging: Tagging,
});
await client.send(command);
console.log("Synced storage's tags", {
Tagging: {
TagSet: mergedTagSet,
},
}));
logger_1.default.info("Synced storage's tags", {
storage: storage.name,

@@ -161,3 +168,3 @@ existingTagSet: existingTagSet.TagSet,

catch (error) {
return { storage, error: (0, errors_1.handleMethodError)(error, storage) };
return { storage, error: JSON.stringify(error) };
}

@@ -167,40 +174,29 @@ };

const createStorage = async (client, storage) => {
console.log('Creating storage', { storage: storage.name });
try {
const createCommand = new client_s3_1.CreateBucketCommand({
Bucket: storage.name,
ObjectLockEnabledForBucket: true,
ObjectOwnership: 'BucketOwnerPreferred',
});
await client.send(createCommand);
console.log('Storage created', { storage: storage.name });
const aclCommand = new client_s3_1.PutBucketAclCommand({
Bucket: storage.name,
ACL: 'private',
});
await client.send(aclCommand);
console.log('Storage ACL enabled', { storage: storage.name });
return storage;
}
catch (error) {
throw (0, errors_1.handleMethodError)(error, storage);
}
logger_1.default.info('Creating storage', { storage: storage.name });
const createCommand = new client_s3_1.CreateBucketCommand({
Bucket: storage.name,
ObjectLockEnabledForBucket: true,
ObjectOwnership: 'BucketOwnerPreferred',
});
await client.send(createCommand);
logger_1.default.info('Storage created', { storage: storage.name });
const aclCommand = new client_s3_1.PutBucketAclCommand({
Bucket: storage.name,
ACL: 'private',
});
await client.send(aclCommand);
logger_1.default.info('Storage ACL enabled', { storage: storage.name });
return storage;
};
exports.createStorage = createStorage;
const deleteStorage = async (client, storage) => {
console.log('Deleting storage', { storage: storage.name });
try {
const objects = await (0, objects_1.listObjects)(client, storage);
const deletedObjects = await (0, objects_1.deleteObjects)(client, storage, objects);
const deleteCommand = new client_s3_1.DeleteBucketCommand({
Bucket: storage.name,
});
await client.send(deleteCommand);
console.log('Storage deleted', { storage: storage.name });
return deletedObjects;
}
catch (error) {
throw (0, errors_1.handleMethodError)(error, storage);
}
logger_1.default.info('Deleting storage', { storage: storage.name });
const objects = await (0, objects_1.listObjects)(client, storage);
const deletedObjects = await (0, objects_1.deleteObjects)(client, storage, objects);
await client.send(new client_s3_1.DeleteBucketCommand({
Bucket: storage.name,
}));
logger_1.default.info('Storage deleted', { storage: storage.name });
return deletedObjects;
};
exports.deleteStorage = deleteStorage;

@@ -14,18 +14,25 @@ "use strict";

const getCredentials = (provider) => {
const { cachedCredentials } = provider;
const credentials = cachedCredentials ?? {};
const { cachedCredentials, getRegion, getCredentials } = provider;
const credentials = cachedCredentials ?? {
accessKeyId: process.env.AWS_ACCESS_KEY_ID ?? undefined,
secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY ?? undefined,
};
const { accessKeyId, secretAccessKey } = credentials;
const region = getRegion();
if (accessKeyId && secretAccessKey) {
return {
region: provider.getRegion(),
region,
credentials,
};
}
else {
if (getCredentials() !== undefined) {
return {
region: provider.getRegion() || provider.getCredentials().region,
credentials: provider.getCredentials().credentials,
region: getCredentials().region || region,
credentials: getCredentials().credentials,
};
}
else {
throw new Error('AWS credentials not found!');
}
};
exports.getCredentials = getCredentials;

@@ -21,7 +21,7 @@ import { DeletedObject, S3Client, _Object } from '@aws-sdk/client-s3';

export declare const uploadObjects: (client: S3Client, storage: Storage, localFiles: LocalFile[], filesToUpload: string[]) => Promise<{
Key: string | undefined;
ETag: string | undefined;
VersionId: string | undefined;
Bucket: string | undefined;
Location: string | undefined;
key: string | undefined;
etag: string | undefined;
versionId: string | undefined;
storage: string | undefined;
location: string | undefined;
}[]>;

@@ -28,0 +28,0 @@ /**

@@ -10,4 +10,3 @@ "use strict";

const lib_storage_1 = require("@aws-sdk/lib-storage");
const mrmime_1 = require("mrmime");
const errors_1 = require("../../errors");
const logger_1 = __importDefault(require("../../utils/logger"));
const objects_1 = require("../../utils/objects");

@@ -22,19 +21,14 @@ /**

const listObjects = async (client, storage) => {
console.log('List objects in bucket', {
logger_1.default.info('List objects in bucket', {
name: storage.name,
});
try {
const command = new client_s3_1.ListObjectsV2Command({
Bucket: storage.name,
});
const { Contents = [] } = await client.send(command);
console.log('Storage objects', {
name: storage.name,
objects: Contents,
});
return Contents;
}
catch (error) {
throw (0, errors_1.handleMethodError)(error, storage);
}
const command = new client_s3_1.ListObjectsV2Command({
Bucket: storage.name,
});
const { Contents: contents = [] } = await client.send(command);
logger_1.default.info('Storage objects', {
storage: storage.name,
storageContents: contents,
});
return contents;
};

@@ -54,37 +48,29 @@ exports.listObjects = listObjects;

const fileToUpload = localFiles.find((file) => (0, objects_1.getChecksum)(file.Key, file.ETag) === checksum);
if (!fileToUpload) {
continue;
}
console.log('Upload file to bucket', {
name: storage.name,
Key: fileToUpload.Key,
logger_1.default.info('Upload file to bucket', {
storage: storage.name,
key: fileToUpload.Key,
checksum,
});
try {
const command = new lib_storage_1.Upload({
client,
params: {
Bucket: storage.name,
Key: fileToUpload.Key,
Body: fs_1.default.createReadStream(fileToUpload.LocalPath),
ContentType: (0, mrmime_1.lookup)(fileToUpload.LocalPath) ?? undefined,
},
});
console.log('Uploaded file to bucket', {
name: storage.name,
const command = new lib_storage_1.Upload({
client,
params: {
Bucket: storage.name,
Key: fileToUpload.Key,
command,
});
const result = await command.done();
uploadedObjects.push({
Key: result.Key,
ETag: result.ETag,
VersionId: result.VersionId,
Bucket: result.Bucket,
Location: result.Location,
});
}
catch (error) {
(0, errors_1.handleMethodError)(error, storage);
}
Body: fs_1.default.createReadStream(fileToUpload.LocalPath),
ContentType: (0, objects_1.getContentType)(fileToUpload.Key),
ACL: storage.acl,
},
});
logger_1.default.info('Uploaded file to bucket', {
storage: storage.name,
Key: fileToUpload.Key,
});
const result = await command.done();
uploadedObjects.push({
key: result.Key,
etag: result.ETag,
versionId: result.VersionId,
storage: result.Bucket,
location: result.Location,
});
}

@@ -104,46 +90,41 @@ return uploadedObjects;

const keys = [...objects].map((object) => object.Key);
console.log('Delete following objects from bucket', {
bucket: storage.name,
logger_1.default.info('Delete following objects from bucket', {
storage: storage.name,
keys,
});
try {
const listVersionsParams = {
const versions = await client.send(new client_s3_1.ListObjectVersionsCommand({
Bucket: storage.name,
Prefix: storage.prefix ? storage.prefix : undefined,
}));
const deleteMarkers = (versions.DeleteMarkers ?? []).map((marker) => ({
Key: marker.Key,
VersionId: marker.VersionId,
}));
const versionsToDelete = (versions.Versions ?? []).map((version) => ({
Key: version.Key,
VersionId: version.VersionId,
}));
const objectsToDelete = [
...objects.map((object) => ({
Key: object.Key,
VersionId: object.ETag,
})),
...deleteMarkers,
...versionsToDelete,
];
if (objectsToDelete.length > 0) {
const { Deleted: deleted = [] } = await client.send(new client_s3_1.DeleteObjectsCommand({
Bucket: storage.name,
};
const versions = await client.send(new client_s3_1.ListObjectVersionsCommand(listVersionsParams));
const deleteMarkers = (versions.DeleteMarkers ?? []).map((marker) => ({
Key: marker.Key,
VersionId: marker.VersionId,
Delete: {
Objects: objectsToDelete,
Quiet: false,
},
}));
const versionsToDelete = (versions.Versions ?? []).map((version) => ({
Key: version.Key,
VersionId: version.VersionId,
}));
const objectsToDelete = [
...objects.map((object) => ({
Key: object.Key,
VersionId: object.ETag,
})),
...deleteMarkers,
...versionsToDelete,
];
if (objectsToDelete.length > 0) {
const deleteParams = {
Bucket: storage.name,
Delete: {
Objects: objectsToDelete,
Quiet: false,
},
};
const { Deleted = [] } = await client.send(new client_s3_1.DeleteObjectsCommand(deleteParams));
console.log(`Permanently deleted all versions of object.`);
return Deleted;
}
else {
console.log(`No objects to delete.`);
return [];
}
logger_1.default.info(`Permanently deleted all versions of object.`, {
storage: storage.name,
});
return deleted;
}
catch (error) {
(0, errors_1.handleMethodError)(error, storage);
else {
logger_1.default.info(`No objects to delete.`, { storage: storage.name });
return [];

@@ -150,0 +131,0 @@ }

import { ObjectCannedACL } from '@aws-sdk/client-s3';
import type { ObjectCannedACL as ObjectCannedACLType } from '@aws-sdk/client-s3';
import { z } from 'zod';
type TupleType = [ObjectCannedACLType, ...ObjectCannedACLType[]];
export declare const ObjectCannedACLs: TupleType;
type ObjectCannedACLsTuple = [ObjectCannedACLType, ...ObjectCannedACLType[]];
declare const objectCannedACLs: ObjectCannedACLsTuple;
declare const tags: z.ZodRecord<z.ZodString, z.ZodString>;

@@ -11,7 +11,5 @@ declare const storage: z.ZodObject<{

actions: z.ZodDefault<z.ZodArray<z.ZodString, "many">>;
bucketPrefix: z.ZodDefault<z.ZodString>;
prefix: z.ZodDefault<z.ZodString>;
enabled: z.ZodDefault<z.ZodBoolean>;
deleteRemoved: z.ZodDefault<z.ZodBoolean>;
acl: z.ZodDefault<z.ZodEnum<[ObjectCannedACL, ...ObjectCannedACL[]]>>;
defaultContentType: z.ZodOptional<z.ZodString>;
acl: z.ZodOptional<z.ZodEnum<[ObjectCannedACL, ...ObjectCannedACL[]]>>;
metadata: z.ZodOptional<z.ZodRecord<z.ZodString, z.ZodString>>;

@@ -23,8 +21,6 @@ tags: z.ZodDefault<z.ZodRecord<z.ZodString, z.ZodString>>;

actions: string[];
bucketPrefix: string;
prefix: string;
enabled: boolean;
deleteRemoved: boolean;
acl: "private" | "authenticated-read" | "aws-exec-read" | "bucket-owner-full-control" | "bucket-owner-read" | "public-read" | "public-read-write";
tags: Record<string, string>;
defaultContentType?: string | undefined;
acl?: ObjectCannedACL | undefined;
metadata?: Record<string, string> | undefined;

@@ -35,7 +31,5 @@ }, {

actions?: string[] | undefined;
bucketPrefix?: string | undefined;
prefix?: string | undefined;
enabled?: boolean | undefined;
deleteRemoved?: boolean | undefined;
acl?: ObjectCannedACL | undefined;
defaultContentType?: string | undefined;
metadata?: Record<string, string> | undefined;

@@ -48,7 +42,5 @@ tags?: Record<string, string> | undefined;

actions: z.ZodDefault<z.ZodArray<z.ZodString, "many">>;
bucketPrefix: z.ZodDefault<z.ZodString>;
prefix: z.ZodDefault<z.ZodString>;
enabled: z.ZodDefault<z.ZodBoolean>;
deleteRemoved: z.ZodDefault<z.ZodBoolean>;
acl: z.ZodDefault<z.ZodEnum<[ObjectCannedACL, ...ObjectCannedACL[]]>>;
defaultContentType: z.ZodOptional<z.ZodString>;
acl: z.ZodOptional<z.ZodEnum<[ObjectCannedACL, ...ObjectCannedACL[]]>>;
metadata: z.ZodOptional<z.ZodRecord<z.ZodString, z.ZodString>>;

@@ -60,8 +52,6 @@ tags: z.ZodDefault<z.ZodRecord<z.ZodString, z.ZodString>>;

actions: string[];
bucketPrefix: string;
prefix: string;
enabled: boolean;
deleteRemoved: boolean;
acl: "private" | "authenticated-read" | "aws-exec-read" | "bucket-owner-full-control" | "bucket-owner-read" | "public-read" | "public-read-write";
tags: Record<string, string>;
defaultContentType?: string | undefined;
acl?: ObjectCannedACL | undefined;
metadata?: Record<string, string> | undefined;

@@ -72,7 +62,5 @@ }, {

actions?: string[] | undefined;
bucketPrefix?: string | undefined;
prefix?: string | undefined;
enabled?: boolean | undefined;
deleteRemoved?: boolean | undefined;
acl?: ObjectCannedACL | undefined;
defaultContentType?: string | undefined;
metadata?: Record<string, string> | undefined;

@@ -88,7 +76,5 @@ tags?: Record<string, string> | undefined;

actions: z.ZodDefault<z.ZodArray<z.ZodString, "many">>;
bucketPrefix: z.ZodDefault<z.ZodString>;
prefix: z.ZodDefault<z.ZodString>;
enabled: z.ZodDefault<z.ZodBoolean>;
deleteRemoved: z.ZodDefault<z.ZodBoolean>;
acl: z.ZodDefault<z.ZodEnum<[ObjectCannedACL, ...ObjectCannedACL[]]>>;
defaultContentType: z.ZodOptional<z.ZodString>;
acl: z.ZodOptional<z.ZodEnum<[ObjectCannedACL, ...ObjectCannedACL[]]>>;
metadata: z.ZodOptional<z.ZodRecord<z.ZodString, z.ZodString>>;

@@ -100,8 +86,6 @@ tags: z.ZodDefault<z.ZodRecord<z.ZodString, z.ZodString>>;

actions: string[];
bucketPrefix: string;
prefix: string;
enabled: boolean;
deleteRemoved: boolean;
acl: "private" | "authenticated-read" | "aws-exec-read" | "bucket-owner-full-control" | "bucket-owner-read" | "public-read" | "public-read-write";
tags: Record<string, string>;
defaultContentType?: string | undefined;
acl?: ObjectCannedACL | undefined;
metadata?: Record<string, string> | undefined;

@@ -112,7 +96,5 @@ }, {

actions?: string[] | undefined;
bucketPrefix?: string | undefined;
prefix?: string | undefined;
enabled?: boolean | undefined;
deleteRemoved?: boolean | undefined;
acl?: ObjectCannedACL | undefined;
defaultContentType?: string | undefined;
metadata?: Record<string, string> | undefined;

@@ -129,8 +111,6 @@ tags?: Record<string, string> | undefined;

actions: string[];
bucketPrefix: string;
prefix: string;
enabled: boolean;
deleteRemoved: boolean;
acl: "private" | "authenticated-read" | "aws-exec-read" | "bucket-owner-full-control" | "bucket-owner-read" | "public-read" | "public-read-write";
tags: Record<string, string>;
defaultContentType?: string | undefined;
acl?: ObjectCannedACL | undefined;
metadata?: Record<string, string> | undefined;

@@ -145,7 +125,5 @@ }[];

actions?: string[] | undefined;
bucketPrefix?: string | undefined;
prefix?: string | undefined;
enabled?: boolean | undefined;
deleteRemoved?: boolean | undefined;
acl?: ObjectCannedACL | undefined;
defaultContentType?: string | undefined;
metadata?: Record<string, string> | undefined;

@@ -165,8 +143,6 @@ tags?: Record<string, string> | undefined;

actions: string[];
bucketPrefix: string;
prefix: string;
enabled: boolean;
deleteRemoved: boolean;
acl: "private" | "authenticated-read" | "aws-exec-read" | "bucket-owner-full-control" | "bucket-owner-read" | "public-read" | "public-read-write";
tags: Record<string, string>;
defaultContentType?: string | undefined;
acl?: ObjectCannedACL | undefined;
metadata?: Record<string, string> | undefined;

@@ -183,7 +159,5 @@ }[];

actions?: string[] | undefined;
bucketPrefix?: string | undefined;
prefix?: string | undefined;
enabled?: boolean | undefined;
deleteRemoved?: boolean | undefined;
acl?: ObjectCannedACL | undefined;
defaultContentType?: string | undefined;
metadata?: Record<string, string> | undefined;

@@ -201,2 +175,2 @@ tags?: Record<string, string> | undefined;

export type { Custom, Storage, Tags };
export { custom, tags, storage, storages };
export { custom, tags, storage, storages, objectCannedACLs };
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.storages = exports.storage = exports.tags = exports.custom = exports.ObjectCannedACLs = void 0;
exports.objectCannedACLs = exports.storages = exports.storage = exports.tags = exports.custom = void 0;
const client_s3_1 = require("@aws-sdk/client-s3");
const zod_1 = require("zod");
exports.ObjectCannedACLs = [
...Object.values(client_s3_1.ObjectCannedACL).map((acl) => acl),
];
// Cast to ObjectCannedACLsTuple
const objectCannedACLs = Object.values(client_s3_1.ObjectCannedACL).map((acl) => acl);
exports.objectCannedACLs = objectCannedACLs;
const tags = zod_1.z.record(zod_1.z.string(), zod_1.z.string());

@@ -15,7 +15,5 @@ exports.tags = tags;

actions: zod_1.z.array(zod_1.z.string()).default(['upload', 'delete']),
bucketPrefix: zod_1.z.string().default(''),
prefix: zod_1.z.string().default(''),
enabled: zod_1.z.boolean().default(true),
deleteRemoved: zod_1.z.boolean().default(true),
acl: zod_1.z.enum(exports.ObjectCannedACLs).default(client_s3_1.ObjectCannedACL.authenticated_read),
defaultContentType: zod_1.z.string().optional(),
acl: zod_1.z.enum(objectCannedACLs).optional(),
metadata: zod_1.z.record(zod_1.z.string(), zod_1.z.string()).optional(),

@@ -22,0 +20,0 @@ tags: zod_1.z.record(zod_1.z.string(), zod_1.z.string()).default({}),

@@ -31,2 +31,9 @@ import { DeletedObject, Tag, _Object } from '@aws-sdk/client-s3';

};
export type UploadedObject = {
key: string | undefined;
etag: string | undefined;
versionId: string | undefined;
storage: string | undefined;
location: string | undefined;
};
export type StoragesSyncResult = {

@@ -40,3 +47,3 @@ storage: Storage;

filesToDelete: string[];
uploaded: _Object[];
uploaded: UploadedObject[];
deleted: DeletedObject[];

@@ -62,3 +69,3 @@ error?: string | Error;

export type TagsSyncResults = Array<MethodReturn<Tag[]>>;
export type TagsMethodPromiseResult = Promise<TagsSyncResults>;
export type TagsMethodPromiseResult = PromiseFulfilledResult<TagsSyncResult>;
export type SyncMetadataReturn = Array<Pick<_Object, 'Key'> & {

@@ -65,0 +72,0 @@ Bucket: string;

@@ -9,1 +9,10 @@ /**

export declare const getChecksum: (key?: string, etag?: string) => string;
/**
* Returns the MIME type of a file based on its extension.
* If the file extension is not recognized, it returns a default MIME type.
*
* @param {string} [key] - The name of the file including its extension.
* @returns {string} The MIME type of the file.
*/
export declare const getContentType: (key?: string) => string;
export declare const extractAfterSubdirectory: (fullPath: string, subdirectory: string) => string;
"use strict";
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.getChecksum = void 0;
exports.extractAfterSubdirectory = exports.getContentType = exports.getChecksum = void 0;
const path_1 = __importDefault(require("path"));
const mrmime_1 = require("mrmime");
/**

@@ -12,4 +17,33 @@ * Get a checksum for an object

const getChecksum = (key = '', etag = '') => {
return `${key}-${etag}`;
return `${key}-${etag.replace(/"/g, '')}`;
};
exports.getChecksum = getChecksum;
/**
* Returns the MIME type of a file based on its extension.
* If the file extension is not recognized, it returns a default MIME type.
*
* @param {string} [key] - The name of the file including its extension.
* @returns {string} The MIME type of the file.
*/
const getContentType = (key) => {
const defaultMimeType = 'application/octet-stream';
return key ? (0, mrmime_1.lookup)(key) ?? defaultMimeType : defaultMimeType;
};
exports.getContentType = getContentType;
const extractAfterSubdirectory = (fullPath, subdirectory) => {
// Normalize both paths to ensure consistent separators
const normalizedFullPath = path_1.default.normalize(fullPath);
const normalizedLocalPath = path_1.default.normalize(subdirectory);
// Find the start index of the subdirectory in the full path
const startIndex = normalizedFullPath.indexOf(normalizedLocalPath);
// Assuming subdirectory is always part of fullPath, calculate the end index
const endIndex = startIndex + normalizedLocalPath.length;
// Extract the part of the full path after the subdirectory
// Check if endIndex is at the path's end or adjust for separator
const afterSubdirectory = endIndex >= normalizedFullPath.length
? ''
: normalizedFullPath.substring(endIndex + 1);
// Return the extracted path part, ensuring no leading separators
return afterSubdirectory ? path_1.default.normalize(afterSubdirectory) : '';
};
exports.extractAfterSubdirectory = extractAfterSubdirectory;

@@ -12,5 +12,5 @@ "use strict";

const mergeTags = (existingTags, newTags) => {
const newTagSet = Object.keys(newTags).map((Key) => ({
Key,
Value: newTags[Key],
const newTagSet = Object.keys(newTags).map((key) => ({
Key: key,
Value: newTags[key],
}));

@@ -17,0 +17,0 @@ const mergedTagSet = [...(existingTags ?? [])];

@@ -10,5 +10,10 @@ /** @type {import('ts-jest').JestConfigWithTsJest} */

moduleFileExtensions: ['ts', 'js', 'json'],
setupFilesAfterEnv: ['<rootDir>/test/jest.setup.ts'],
coverageReporters: ['lcov', 'cobertura', 'html', 'text', 'text-summary'],
coveragePathIgnorePatterns: [
'/node_modules/',
'./test/setupEnvs.ts',
'./test/mocks',
],
maxConcurrency: 20,
testTimeout: 600000,
}
{
"name": "sync-cloud-storage",
"version": "1.0.0-rc.1",
"version": "1.0.0-rc.2",
"license": "MIT",

@@ -23,2 +23,17 @@ "main": "dist/index.js",

},
"scripts": {
"prepare": "is-ci || husky",
"build": "rm -rf ./dist && tsc -p tsconfig.build.json",
"build:watch": "tsc -p tsconfig.build.json -w",
"build:type-check": "tsc -p tsconfig.build.json --noEmit",
"test": "jest --verbose",
"test:watch": "jest --watch",
"test:coverage": "jest --no-cache --coverage",
"format": "prettier --write \"**/*.{ts,js,md,json,yaml,yml}\"",
"lint": "eslint .",
"lint:fix": "eslint . --fix",
"docs:build": "pnpm run build && rm -rf ./docs && ./node_modules/.bin/jsdoc -c jsdoc.json",
"docs:start": "pnpm run docs:build && ./node_modules/.bin/http-server -c-1 -o docs",
"preversion": "pnpm run build && pnpm run format"
},
"devDependencies": {

@@ -53,2 +68,3 @@ "@faker-js/faker": "8.4.0",

"mrmime": "2.0.0",
"winston": "^3.11.0",
"zod": "3.22.4"

@@ -61,22 +77,9 @@ },

"*.{ts,js}": [
"npm run lint:fix",
"npm run format"
"pnpm run lint",
"pnpm run format"
],
"*.{json,md,yaml}": [
"npm run format"
"pnpm run format"
]
},
"scripts": {
"build": "rm -rf ./dist && tsc -p tsconfig.build.json",
"build:watch": "tsc -p tsconfig.build.json -w",
"test": "jest --verbose --no-cache",
"test:watch": "jest --no-cache --watch",
"test:coverage": "jest --no-cache --coverage",
"format": "prettier --write \"**/*.{ts,js,md,json,yaml,yml}\"",
"lint": "eslint . --ext .ts --ext .js",
"lint:fix": "eslint . --fix --ext .ts --ext .js",
"docs:build": "pnpm run build && rm -rf ./docs && ./node_modules/.bin/jsdoc -c jsdoc.json",
"docs:start": "pnpm run docs:build && ./node_modules/.bin/http-server -c-1 -o docs",
"preversion": "pnpm run build && pnpm run format"
}
}
}

@@ -35,3 +35,3 @@ # Sync Cloud Storage

- delete
bucketPrefix: assets
prefix: assets
acl: public-read

@@ -41,3 +41,2 @@ metadata:

bar: foo
acl: public-read
```

@@ -44,0 +43,0 @@

@@ -1,5 +0,4 @@

import { NoSuchBucket } from '@aws-sdk/client-s3'
import { ZodError } from 'zod'
import { Storage } from './schemas/input'
import logger from './utils/logger'

@@ -12,16 +11,4 @@ export class InvalidConfigError extends Error {

console.error('InvalidConfigError', { issues })
logger.error('InvalidConfigError', { issues })
}
}
export const handleMethodError = (error: Error, storage: Storage) => {
let message: string
if (error instanceof NoSuchBucket) {
message = `${error.name}: ${storage.name}`
console.error(message)
} else {
message = `${error.name}/${error.message}: ${storage.name}`
}
console.error(message)
return message
}

@@ -9,10 +9,5 @@ 'use strict'

import { sync, syncMetadata, syncTags } from './providers/s3/buckets'
import { getCredentials } from './providers/s3/credentials'
import { Custom, Storage, custom } from './schemas/input'
import {
IServerless,
MethodReturn,
TagsMethodPromiseResult,
TagsSyncResults,
} from './types'
import { IServerless, MethodReturn, TagsSyncResults } from './types'
import logger from './utils/logger'

@@ -27,2 +22,3 @@ /**

hooks: ServerlessPlugin.Hooks
commands: ServerlessPlugin.Commands
servicePath: string

@@ -46,21 +42,7 @@ config: Custom

) {
if (!serverless) {
throw new Error('Serverless instance is required')
}
// Typing with *as* makes testing enable to use a DI version of instance
this.serverless = serverless as unknown as Serverless
this.servicePath = this.serverless.service.serverless.config.servicePath
if (!options) {
throw new Error("Options can't be undefined")
}
this.options = options
if (!logging) {
throw new Error("Logging can't be undefined")
}
this.logging = logging
this.servicePath = this.serverless.service.serverless.config.servicePath

@@ -83,2 +65,3 @@ const config = this.serverless.service.custom

)
this.commands = this.setCommands()
this.hooks = this.setHooks()

@@ -97,4 +80,2 @@ }

getS3Client(): S3Client {
const provider = this.serverless.getProvider('aws')
const credentials = getCredentials(provider)
const endpoint = this.config.syncCloudStorage.offline

@@ -105,3 +86,3 @@ ? this.config.syncCloudStorage.endpoint ?? process.env.AWS_ENDPOINT_URL

return new S3Client({
...credentials,
// ...credentials,
endpoint,

@@ -112,2 +93,19 @@ })

/**
* Set commands.
* @returns {ServerlessPlugin.Commands} Commands
* @memberof SyncCloudStorage
*
* @example
* const commands = this.setCommands()
*/
setCommands(): ServerlessPlugin.Commands {
return {
scs: {
usage: 'Sync Cloud Storage',
lifecycleEvents: ['storages', 'tags'],
},
}
}
/**
* Set hooks.

@@ -125,6 +123,6 @@ * @returns {ServerlessPlugin.Hooks} Hooks

return {
'scs:storages': syncStoragesHook,
'scs:tags': syncTagsHook,
'before:offline:start:init': syncStoragesHook,
'scs:buckets': syncStoragesHook,
'scs:tags': syncTagsHook,
'before:deploy:deploy': () => syncStoragesHook(),
'before:deploy:deploy': syncStoragesHook,
}

@@ -178,21 +176,18 @@ }

* @private
* @returns {TagsMethodPromiseResult}
* @memberof SyncCloudStorage
* @see https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/clients/client-s3
*
* @returns {Promise<TagsSyncResults>}
* @example
* const result = await this.tags()
*/
async tags(): TagsMethodPromiseResult {
const isPluginEnable = this.disableCheck().result
async tags(): Promise<TagsSyncResults> {
const isPluginDisable = this.disableCheck().result
if (!isPluginEnable) {
return []
if (isPluginDisable) {
return [{ error: 'Plugin is disabled' }]
}
const syncedStorages = (await Promise.allSettled(
return (await Promise.allSettled(
this._storages.map((bucket) => syncTags(this.client, bucket))
)) as TagsSyncResults
return syncedStorages
}

@@ -210,6 +205,2 @@

async onExit(): Promise<void> {
if (this.taskProcess) {
this.taskProcess.remove()
}
if (this.client) {

@@ -222,3 +213,3 @@ this.client.destroy()

if (this.config.syncCloudStorage.disabled) {
console.warn('SyncCloudStorage is disabled!')
logger.warning('SyncCloudStorage is disabled!')
return { result: true }

@@ -225,0 +216,0 @@ }

@@ -7,2 +7,3 @@ import { createHash } from 'crypto'

import { LocalFile } from '../../types'
import { extractAfterSubdirectory } from '../../utils/objects'

@@ -31,8 +32,13 @@ /**

} else {
const internalPath = extractAfterSubdirectory(fullPath, storage.localPath)
let key = internalPath
if (storage.prefix) {
key = path.join(storage.prefix, internalPath)
}
const file: LocalFile = {
LocalPath: fullPath,
Key: storage.bucketPrefix
? path.join(`${storage.bucketPrefix}/${item}`)
: item,
LastModified: stat.mtime,
Key: key,
LastModified: new Date(stat.mtime),
Size: stat.size,

@@ -39,0 +45,0 @@ ETag: await getFileETag(fs.createReadStream(fullPath)),

@@ -9,2 +9,3 @@ import path from 'path'

GetBucketTaggingCommand,
GetBucketTaggingOutput,
HeadObjectCommand,

@@ -16,8 +17,5 @@ ListBucketsCommand,

S3Client,
_Object,
} from '@aws-sdk/client-s3'
import { lookup } from 'mrmime'
import { deleteObjects, listObjects, uploadObjects } from './objects'
import { handleMethodError } from '../../errors'
import { Storage } from '../../schemas/input'

@@ -28,4 +26,6 @@ import {

TagsSyncResult,
UploadedObject,
} from '../../types'
import { getChecksum } from '../../utils/objects'
import logger from '../../utils/logger'
import { getChecksum, getContentType } from '../../utils/objects'
import { mergeTags } from '../../utils/tags'

@@ -48,3 +48,3 @@ import { getLocalFiles } from '../local/objects'

/**
* Syncs storage with upload and delete actions by comparing local file and storage's object checksums.
* Syncs storage with upload and delete actions by comparing local file and storage's object `${Key}-${ETag}`.
* @memberof S3

@@ -65,6 +65,6 @@ * @param {S3Client} client

if (!storageExist) {
throw handleMethodError(new Error(`Storage doesn't exist!`), storage)
throw new Error('StorageNotFound')
}
console.log('Syncing storage', { storage: storage.name })
logger.info('Syncing storage', { storage: storage.name })

@@ -93,17 +93,15 @@ const files = await getLocalFiles(

let uploaded: _Object[] = []
let uploaded: UploadedObject[] = []
let deleted: DeletedObject[] = []
if (filesToUpload.length > 0 && storage.actions.includes('upload')) {
if (filesToUpload.length >= 1 && storage.actions.includes('upload')) {
uploaded = await uploadObjects(client, storage, files, filesToUpload)
}
if (filesToDelete.length > 0 && storage.actions.includes('delete')) {
if (storage.deleteRemoved) {
const objectsToDelete = objects.filter((object) =>
filesToDelete.includes(getChecksum(object.Key, object.ETag))
)
if (filesToDelete.length >= 1 && storage.actions.includes('delete')) {
const objectsToDelete = objects.filter((object) =>
filesToDelete.includes(getChecksum(object.Key, object.ETag))
)
deleted = await deleteObjects(client, storage, objectsToDelete)
}
deleted = await deleteObjects(client, storage, objectsToDelete)
}

@@ -137,3 +135,2 @@

): Promise<SyncMetadataReturn> => {
// Get list of existing objects
const existingObjects = await listObjects(client, storage)

@@ -143,3 +140,3 @@ const syncedMetadata = []

for (const file of existingObjects) {
console.log("Syncing storage's metadata", {
logger.info("Syncing storage's metadata", {
storage: storage.name,

@@ -149,41 +146,34 @@ Key: file.Key,

const detectedContentType =
lookup(file.Key as string) ?? storage.defaultContentType
const copyCommand = new CopyObjectCommand({
Bucket: storage.name,
Key: file.Key,
CopySource: encodeURIComponent(`${storage.name}/${file.Key}`),
ContentType: getContentType(file.Key),
MetadataDirective: MetadataDirective.REPLACE,
Metadata: storage.metadata,
ACL: storage.acl,
})
try {
const copyCommand = new CopyObjectCommand({
Bucket: storage.name,
Key: file.Key,
CopySource: encodeURIComponent(`${storage.name}/${file.Key}`),
ContentType: detectedContentType,
MetadataDirective: MetadataDirective.REPLACE,
Metadata: storage.metadata,
})
const result = await client.send(copyCommand)
const result = await client.send(copyCommand)
logger.info('Metadata synced', {
storage: storage.name,
Key: file.Key,
result,
})
console.log('Metadata synced', {
storage: storage.name,
Key: file.Key,
result,
const headCommand = await client.send(
new HeadObjectCommand({
Bucket: storage.name,
Key: storage.prefix
? path.join(storage.prefix, `${file.Key}`)
: file.Key,
})
)
// Get Object metadata
const headCommand = await client.send(
new HeadObjectCommand({
Bucket: storage.name,
Key: storage.bucketPrefix
? path.join(storage.bucketPrefix, `${file.Key}`)
: file.Key,
})
)
syncedMetadata.push({
Key: file.Key,
Bucket: storage.name,
Metadata: headCommand.Metadata,
})
} catch (error) {
handleMethodError(error as Error, storage)
}
syncedMetadata.push({
Key: file.Key,
Bucket: storage.name,
Metadata: headCommand.Metadata,
})
}

@@ -205,23 +195,42 @@

): Promise<TagsSyncResult> => {
console.log("Syncing storage's tags", { storage: storage.name })
logger.info("Syncing storage's tags", { storage: storage.name })
const { name } = storage
const storageExist = await storageExists(client, name)
if (!storageExist) {
return { error: new Error('StorageNotFound') }
}
let existingTagSet: GetBucketTaggingOutput = { TagSet: [] }
try {
const existingTagSetCommand = new GetBucketTaggingCommand({
Bucket: storage.name,
})
const existingTagSet = await client.send(existingTagSetCommand)
const mergedTagSet = mergeTags(existingTagSet.TagSet, storage.tags ?? {})
const Tagging = {
TagSet: mergedTagSet,
try {
const existingTagSetCommand = new GetBucketTaggingCommand({
Bucket: storage.name,
})
existingTagSet = await client.send(existingTagSetCommand)
} catch (error) {
if ((error as Error).name === 'NoSuchTagSet') {
existingTagSet = { TagSet: [] }
} else {
logger.error('Failed to get existing tags', {
storage: storage.name,
error: JSON.stringify(error),
})
}
}
const command = new PutBucketTaggingCommand({
Bucket: storage.name,
Tagging: Tagging,
})
const mergedTagSet = mergeTags(existingTagSet.TagSet, storage.tags ?? {})
await client.send(command)
await client.send(
new PutBucketTaggingCommand({
Bucket: storage.name,
Tagging: {
TagSet: mergedTagSet,
},
})
)
console.log("Synced storage's tags", {
logger.info("Synced storage's tags", {
storage: storage.name,

@@ -235,3 +244,3 @@ existingTagSet: existingTagSet.TagSet,

} catch (error) {
return { storage, error: handleMethodError(error as Error, storage) }
return { storage, error: JSON.stringify(error) }
}

@@ -244,28 +253,24 @@ }

): Promise<Storage> => {
console.log('Creating storage', { storage: storage.name })
logger.info('Creating storage', { storage: storage.name })
try {
const createCommand = new CreateBucketCommand({
Bucket: storage.name,
ObjectLockEnabledForBucket: true,
ObjectOwnership: 'BucketOwnerPreferred',
})
const createCommand = new CreateBucketCommand({
Bucket: storage.name,
ObjectLockEnabledForBucket: true,
ObjectOwnership: 'BucketOwnerPreferred',
})
await client.send(createCommand)
await client.send(createCommand)
console.log('Storage created', { storage: storage.name })
logger.info('Storage created', { storage: storage.name })
const aclCommand = new PutBucketAclCommand({
Bucket: storage.name,
ACL: 'private',
})
const aclCommand = new PutBucketAclCommand({
Bucket: storage.name,
ACL: 'private',
})
await client.send(aclCommand)
await client.send(aclCommand)
console.log('Storage ACL enabled', { storage: storage.name })
logger.info('Storage ACL enabled', { storage: storage.name })
return storage
} catch (error) {
throw handleMethodError(error as Error, storage)
}
return storage
}

@@ -277,21 +282,16 @@

): Promise<DeletedObject[]> => {
console.log('Deleting storage', { storage: storage.name })
logger.info('Deleting storage', { storage: storage.name })
try {
const objects = await listObjects(client, storage)
const objects = await listObjects(client, storage)
const deletedObjects = await deleteObjects(client, storage, objects)
const deletedObjects = await deleteObjects(client, storage, objects)
const deleteCommand = new DeleteBucketCommand({
await client.send(
new DeleteBucketCommand({
Bucket: storage.name,
})
)
await client.send(deleteCommand)
logger.info('Storage deleted', { storage: storage.name })
console.log('Storage deleted', { storage: storage.name })
return deletedObjects
} catch (error) {
throw handleMethodError(error as Error, storage)
}
return deletedObjects
}

@@ -17,17 +17,25 @@ import { Credentials } from 'serverless/plugins/aws/provider/awsProvider'

): Credentials => {
const { cachedCredentials } = provider
const credentials = cachedCredentials ?? {}
const { cachedCredentials, getRegion, getCredentials } = provider
const credentials = cachedCredentials ?? {
accessKeyId: process.env.AWS_ACCESS_KEY_ID ?? undefined,
secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY ?? undefined,
}
const { accessKeyId, secretAccessKey } = credentials
const region = getRegion()
if (accessKeyId && secretAccessKey) {
return {
region: provider.getRegion(),
region,
credentials,
}
} else {
}
if (getCredentials() !== undefined) {
return {
region: provider.getRegion() || provider.getCredentials().region,
credentials: provider.getCredentials().credentials,
region: getCredentials().region || region,
credentials: getCredentials().credentials,
}
} else {
throw new Error('AWS credentials not found!')
}
}

@@ -12,8 +12,7 @@ import fs from 'fs'

import { Upload } from '@aws-sdk/lib-storage'
import { lookup } from 'mrmime'
import { handleMethodError } from '../../errors'
import { Storage } from '../../schemas/input'
import { LocalFile } from '../../types'
import { getChecksum } from '../../utils/objects'
import logger from '../../utils/logger'
import { getChecksum, getContentType } from '../../utils/objects'

@@ -31,22 +30,18 @@ /**

): Promise<_Object[]> => {
console.log('List objects in bucket', {
logger.info('List objects in bucket', {
name: storage.name,
})
try {
const command = new ListObjectsV2Command({
Bucket: storage.name,
})
const command = new ListObjectsV2Command({
Bucket: storage.name,
})
const { Contents = [] } = await client.send(command)
const { Contents: contents = [] } = await client.send(command)
console.log('Storage objects', {
name: storage.name,
objects: Contents,
})
logger.info('Storage objects', {
storage: storage.name,
storageContents: contents,
})
return Contents
} catch (error) {
throw handleMethodError(error as Error, storage)
}
return contents
}

@@ -73,43 +68,35 @@

(file) => getChecksum(file.Key, file.ETag) === checksum
)
) as LocalFile
if (!fileToUpload) {
continue
}
console.log('Upload file to bucket', {
name: storage.name,
Key: fileToUpload.Key,
logger.info('Upload file to bucket', {
storage: storage.name,
key: fileToUpload.Key,
checksum,
})
try {
const command = new Upload({
client,
params: {
Bucket: storage.name,
Key: fileToUpload.Key,
Body: fs.createReadStream(fileToUpload.LocalPath),
ContentType: lookup(fileToUpload.LocalPath) ?? undefined,
},
})
console.log('Uploaded file to bucket', {
name: storage.name,
const command = new Upload({
client,
params: {
Bucket: storage.name,
Key: fileToUpload.Key,
command,
})
Body: fs.createReadStream(fileToUpload.LocalPath),
ContentType: getContentType(fileToUpload.Key),
ACL: storage.acl,
},
})
const result = await command.done()
logger.info('Uploaded file to bucket', {
storage: storage.name,
Key: fileToUpload.Key,
})
uploadedObjects.push({
Key: result.Key,
ETag: result.ETag,
VersionId: result.VersionId,
Bucket: result.Bucket,
Location: result.Location,
})
} catch (error) {
handleMethodError(error as Error, storage)
}
const result = await command.done()
uploadedObjects.push({
key: result.Key,
etag: result.ETag,
versionId: result.VersionId,
storage: result.Bucket,
location: result.Location,
})
}

@@ -134,35 +121,37 @@

const keys = [...objects].map((object) => object.Key as string)
console.log('Delete following objects from bucket', {
bucket: storage.name,
logger.info('Delete following objects from bucket', {
storage: storage.name,
keys,
})
try {
const listVersionsParams = {
const versions = await client.send(
new ListObjectVersionsCommand({
Bucket: storage.name,
}
Prefix: storage.prefix ? storage.prefix : undefined,
})
)
const versions = await client.send(
new ListObjectVersionsCommand(listVersionsParams)
)
const deleteMarkers = (versions.DeleteMarkers ?? []).map((marker) => ({
Key: marker.Key,
VersionId: marker.VersionId,
}))
const deleteMarkers = (versions.DeleteMarkers ?? []).map((marker) => ({
Key: marker.Key,
VersionId: marker.VersionId,
}))
const versionsToDelete = (versions.Versions ?? []).map((version) => ({
Key: version.Key,
VersionId: version.VersionId,
}))
const versionsToDelete = (versions.Versions ?? []).map((version) => ({
Key: version.Key,
VersionId: version.VersionId,
}))
const objectsToDelete = [
...objects.map((object) => ({
Key: object.Key,
VersionId: object.ETag,
})),
...deleteMarkers,
...versionsToDelete,
]
const objectsToDelete = [
...objects.map((object) => ({
Key: object.Key,
VersionId: object.ETag,
})),
...deleteMarkers,
...versionsToDelete,
]
if (objectsToDelete.length > 0) {
const deleteParams = {
if (objectsToDelete.length > 0) {
const { Deleted: deleted = [] } = await client.send(
new DeleteObjectsCommand({
Bucket: storage.name,

@@ -173,19 +162,15 @@ Delete: {

},
}
})
)
const { Deleted = [] } = await client.send(
new DeleteObjectsCommand(deleteParams)
)
console.log(`Permanently deleted all versions of object.`)
logger.info(`Permanently deleted all versions of object.`, {
storage: storage.name,
})
return Deleted
} else {
console.log(`No objects to delete.`)
return deleted
} else {
logger.info(`No objects to delete.`, { storage: storage.name })
return []
}
} catch (error) {
handleMethodError(error as Error, storage)
return []
}
}

@@ -5,8 +5,9 @@ import { ObjectCannedACL } from '@aws-sdk/client-s3'

// Cast to tuple type
type TupleType = [ObjectCannedACLType, ...ObjectCannedACLType[]]
export const ObjectCannedACLs = [
...Object.values(ObjectCannedACL).map((acl) => acl),
] as TupleType
type ObjectCannedACLsTuple = [ObjectCannedACLType, ...ObjectCannedACLType[]]
// Cast to ObjectCannedACLsTuple
const objectCannedACLs = Object.values(ObjectCannedACL).map(
(acl) => acl
) as ObjectCannedACLsTuple
const tags = z.record(z.string(), z.string())

@@ -18,7 +19,5 @@

actions: z.array(z.string()).default(['upload', 'delete']),
bucketPrefix: z.string().default(''),
prefix: z.string().default(''),
enabled: z.boolean().default(true),
deleteRemoved: z.boolean().default(true),
acl: z.enum(ObjectCannedACLs).default(ObjectCannedACL.authenticated_read),
defaultContentType: z.string().optional(),
acl: z.enum(objectCannedACLs).optional(),
metadata: z.record(z.string(), z.string()).optional(),

@@ -47,2 +46,2 @@ tags: z.record(z.string(), z.string()).default({}),

export type { Custom, Storage, Tags }
export { custom, tags, storage, storages }
export { custom, tags, storage, storages, objectCannedACLs }

@@ -39,2 +39,10 @@ import { DeletedObject, Tag, _Object } from '@aws-sdk/client-s3'

export type UploadedObject = {
key: string | undefined
etag: string | undefined
versionId: string | undefined
storage: string | undefined
location: string | undefined
}
export type StoragesSyncResult = {

@@ -48,3 +56,3 @@ storage: Storage

filesToDelete: string[]
uploaded: _Object[]
uploaded: UploadedObject[]
deleted: DeletedObject[]

@@ -74,3 +82,3 @@ error?: string | Error

export type TagsSyncResults = Array<MethodReturn<Tag[]>>
export type TagsMethodPromiseResult = Promise<TagsSyncResults>
export type TagsMethodPromiseResult = PromiseFulfilledResult<TagsSyncResult>

@@ -77,0 +85,0 @@ export type SyncMetadataReturn = Array<

@@ -0,1 +1,5 @@

import path from 'path'
import { lookup } from 'mrmime'
/**

@@ -9,3 +13,40 @@ * Get a checksum for an object

export const getChecksum = (key: string = '', etag: string = ''): string => {
return `${key}-${etag}`
return `${key}-${etag.replace(/"/g, '')}`
}
/**
* Returns the MIME type of a file based on its extension.
* If the file extension is not recognized, it returns a default MIME type.
*
* @param {string} [key] - The name of the file including its extension.
* @returns {string} The MIME type of the file.
*/
export const getContentType = (key?: string): string => {
const defaultMimeType = 'application/octet-stream'
return key ? lookup(key) ?? defaultMimeType : defaultMimeType
}
export const extractAfterSubdirectory = (
fullPath: string,
subdirectory: string
) => {
// Normalize both paths to ensure consistent separators
const normalizedFullPath = path.normalize(fullPath)
const normalizedLocalPath = path.normalize(subdirectory)
// Find the start index of the subdirectory in the full path
const startIndex = normalizedFullPath.indexOf(normalizedLocalPath)
// Assuming subdirectory is always part of fullPath, calculate the end index
const endIndex = startIndex + normalizedLocalPath.length
// Extract the part of the full path after the subdirectory
// Check if endIndex is at the path's end or adjust for separator
const afterSubdirectory =
endIndex >= normalizedFullPath.length
? ''
: normalizedFullPath.substring(endIndex + 1)
// Return the extracted path part, ensuring no leading separators
return afterSubdirectory ? path.normalize(afterSubdirectory) : ''
}

@@ -16,5 +16,5 @@ import { Tag } from '@aws-sdk/client-s3'

): Tag[] => {
const newTagSet = Object.keys(newTags).map((Key) => ({
Key,
Value: newTags[Key],
const newTagSet = Object.keys(newTags).map((key) => ({
Key: key,
Value: newTags[key],
}))

@@ -21,0 +21,0 @@

@@ -1,2 +0,2 @@

import { S3Client } from '@aws-sdk/client-s3'
import { PutObjectCommand, S3Client } from '@aws-sdk/client-s3'
import { mock } from 'jest-mock-extended'

@@ -8,11 +8,8 @@ import { Options } from 'serverless'

import {
createValidAWSInputFixture,
createValidAWSInputFixtureWithMetadata,
createValidAWSInputFixtureWithTags,
createValidDisabledInputFixture,
createValidOfflineInputFixture,
createValidOfflineInputFixtureWithMetadata,
createValidOfflineInputFixtureWithTags,
createValidInputFixture,
createValidInputFixtureWithACLBucketOwner,
createValidInputFixtureWithMetadata,
createValidInputFixtureWithTags,
sampleStorage,
sampleStorageName,
} from './schemas/input.fixture'

@@ -23,2 +20,6 @@ import { setupEnvs } from './setupEnvs'

import { createStorage, deleteStorage } from '../src/providers/s3/buckets'
import * as objects from '../src/providers/s3/objects'
import { Storage } from '../src/schemas/input'
import { LocalFile, TagsMethodPromiseResult } from '../src/types'
import logger from '../src/utils/logger'
import { mergeTags } from '../src/utils/tags'

@@ -29,4 +30,18 @@

const setupStorage = async (client: S3Client, storage: Storage) => {
try {
await deleteStorage(client, storage)
} catch (error) {
logger.error('Error deleting storage:', error)
}
try {
await createStorage(client, storage)
} catch (error) {
logger.error('Error creating storage:', error)
}
}
describe('SyncCloudStorage', () => {
beforeEach(async () => {
beforeAll(async () => {
await setupEnvs()

@@ -37,7 +52,7 @@ })

it('should properly configure S3 client for offline mode', async () => {
const offlineInputCustom = createValidOfflineInputFixture(
const inputCustom = createValidInputFixture(
'./assets/giraffe',
sampleStorageName
sampleStorage.name
)
const mockServerless = getServerlessMock(offlineInputCustom, __dirname)
const mockServerless = getServerlessMock(inputCustom, __dirname)
const syncCloudStorage = new SyncCloudStorage(

@@ -52,4 +67,4 @@ mockServerless,

if (
offlineInputCustom.syncCloudStorage.offline === true &&
offlineInputCustom.syncCloudStorage.endpoint !== undefined
inputCustom.syncCloudStorage.offline === true &&
inputCustom.syncCloudStorage.endpoint !== undefined
) {

@@ -60,3 +75,3 @@ const configuredEndpoint =

expect(
offlineInputCustom.syncCloudStorage.endpoint.includes(
inputCustom.syncCloudStorage.endpoint.includes(
`${configuredEndpoint?.hostname}`

@@ -66,3 +81,3 @@ )

expect(
offlineInputCustom.syncCloudStorage.endpoint.includes(
inputCustom.syncCloudStorage.endpoint.includes(
`${configuredEndpoint?.port}`

@@ -82,15 +97,13 @@ )

)
const bucketsSpy = jest.spyOn(syncCloudStorage, 'storages')
const syncStoragesSpy = jest.spyOn(syncCloudStorage, 'storages')
const response = await syncCloudStorage.storages()
expect(bucketsSpy).toHaveBeenCalledTimes(1)
expect(response).toMatchObject(
expect.objectContaining({ result: expect.arrayContaining([]) })
)
expect(syncStoragesSpy).toHaveBeenCalledTimes(1)
expect(response).toEqual({ result: [] })
})
it("should not sync when there's no bucket", async () => {
const inputCustom = createValidAWSInputFixture(
const inputCustom = createValidInputFixture(
'./assets/giraffe',
sampleStorageName
sampleStorage.name
)

@@ -101,17 +114,7 @@ inputCustom.syncCloudStorage.storages = []

try {
const syncCloudStorage = new SyncCloudStorage(
mockServerless,
optionsMock,
loggingMock
)
await createStorage(syncCloudStorage.getS3Client(), sampleStorage)
const bucketsSpy = jest.spyOn(syncCloudStorage, 'storages')
await syncCloudStorage.storages()
expect(bucketsSpy).toHaveBeenCalledTimes(1)
await deleteStorage(syncCloudStorage.getS3Client(), sampleStorage)
new SyncCloudStorage(mockServerless, optionsMock, loggingMock)
} catch (error) {
expect(error).toBeInstanceOf(InvalidConfigError)
const typedError = error as InvalidConfigError
expect(typedError).toBeInstanceOf(InvalidConfigError)
expect(typedError.name).toEqual('InvalidConfigError')
}

@@ -121,5 +124,5 @@ })

describe('Storage Related Tests (Offline)', () => {
describe('Storage Related Tests', () => {
it("should throw an error when the bucket doesn't exist", async () => {
const inputCustom = createValidOfflineInputFixture(
const inputCustom = createValidInputFixture(
'./assets/giraffe',

@@ -135,19 +138,22 @@ 'non-existent-bucket'

await createStorage(syncCloudStorage.getS3Client(), sampleStorage)
const syncStoragesSpy = jest.spyOn(syncCloudStorage, 'storages')
const response = await syncCloudStorage.storages()
try {
const bucketsSpy = jest.spyOn(syncCloudStorage, 'storages')
await syncCloudStorage.storages()
expect(bucketsSpy).toHaveBeenCalledTimes(1)
} catch (error) {
expect(error).toBe(
`Error/Storage doesn't exist!: ${inputCustom.syncCloudStorage.storages[0].name}`
)
const expectedResponse = {
result: [
{
reason: Error('StorageNotFound'),
status: 'rejected',
},
],
}
expect(syncStoragesSpy).toHaveBeenCalledTimes(1)
expect(response).toEqual(expectedResponse)
})
it('should sync when there are buckets', async () => {
const inputCustom = createValidOfflineInputFixture(
it('should sync when there is a new bucket and acl set to bucket owner', async () => {
const inputCustom = createValidInputFixtureWithACLBucketOwner(
'./assets/giraffe',
sampleStorageName
sampleStorage.name
)

@@ -161,30 +167,63 @@ const mockServerless = getServerlessMock(inputCustom, __dirname)

await createStorage(syncCloudStorage.getS3Client(), sampleStorage)
await setupStorage(
syncCloudStorage.getS3Client(),
inputCustom.syncCloudStorage.storages[0]
)
const bucketsSpy = jest.spyOn(syncCloudStorage, 'storages')
const syncStoragesSpy = jest.spyOn(syncCloudStorage, 'storages')
const response = await syncCloudStorage.storages()
expect(bucketsSpy).toHaveBeenCalledTimes(1)
expect(response).toMatchObject({
result: expect.arrayContaining([
expect.objectContaining({
status: expect.stringContaining('fulfilled'),
value: expect.objectContaining({
uploaded: expect.arrayContaining([
expect.objectContaining({
Bucket: expect.stringContaining(sampleStorageName),
}),
]),
}),
}),
]),
})
await deleteStorage(syncCloudStorage.getS3Client(), sampleStorage)
expect(syncStoragesSpy).toHaveBeenCalledTimes(1)
const giraffeREADME = 'README.md'
const expectedResponse = {
result: [
{
status: 'fulfilled',
value: {
files: [
{
Key: giraffeREADME,
LocalPath: expect.any(String),
ETag: expect.any(String),
LastModified: expect.any(Date),
Size: expect.any(Number),
},
],
filesToDelete: [],
filesToUpload: [expect.any(String)],
localFilesChecksum: [expect.any(String)],
objects: [],
storage: inputCustom.syncCloudStorage.storages[0],
storageObjectsChecksum: [],
uploaded: [
{
storage: inputCustom.syncCloudStorage.storages[0].name,
etag: expect.any(String),
key: giraffeREADME,
location: expect.any(String),
versionId: expect.any(String),
},
],
deleted: [],
},
},
],
}
expect(response).toEqual(expectedResponse)
await deleteStorage(
syncCloudStorage.getS3Client(),
inputCustom.syncCloudStorage.storages[0]
)
})
it('should sync when the bucketPrefix', async () => {
const bucketPrefix = 'animals'
const inputCustom = createValidOfflineInputFixture(
it('should sync when the prefix', async () => {
const prefix = 'animals'
const inputCustom = createValidInputFixture(
'./assets/giraffe',
sampleStorageName,
bucketPrefix
sampleStorage.name,
prefix
)

@@ -197,23 +236,54 @@ const mockServerless = getServerlessMock(inputCustom, __dirname)

)
await setupStorage(
syncCloudStorage.getS3Client(),
inputCustom.syncCloudStorage.storages[0]
)
await createStorage(syncCloudStorage.getS3Client(), sampleStorage)
const bucketsSpy = jest.spyOn(syncCloudStorage, 'storages')
const syncStoragesSpy = jest.spyOn(syncCloudStorage, 'storages')
const response = await syncCloudStorage.storages()
expect(bucketsSpy).toHaveBeenCalledTimes(1)
expect(response).toMatchObject({
result: expect.arrayContaining([
expect.objectContaining({
status: expect.stringContaining('fulfilled'),
value: expect.objectContaining({
uploaded: expect.arrayContaining([
expect.objectContaining({
Key: expect.stringContaining(bucketPrefix),
}),
]),
}),
}),
]),
})
expect(syncStoragesSpy).toHaveBeenCalledTimes(1)
const giraffeREADME = 'README.md'
const expectedResponse = {
result: [
{
status: 'fulfilled',
value: {
files: [
{
Key: expect.stringMatching(
new RegExp(`${prefix}/${giraffeREADME}`)
),
LocalPath: expect.any(String),
ETag: expect.any(String),
LastModified: expect.any(Date),
Size: expect.any(Number),
},
],
filesToDelete: [],
filesToUpload: [expect.any(String)],
localFilesChecksum: [expect.any(String)],
objects: [],
storage: inputCustom.syncCloudStorage.storages[0],
storageObjectsChecksum: [],
uploaded: [
{
storage: inputCustom.syncCloudStorage.storages[0].name,
etag: expect.any(String),
key: expect.stringMatching(
new RegExp(`${prefix}/${giraffeREADME}`)
),
location: expect.any(String),
versionId: expect.any(String),
},
],
deleted: [],
},
},
],
}
expect(response).toEqual(expectedResponse)
for (const syncedStorage of response.result) {

@@ -232,5 +302,5 @@ if (syncedStorage.status === 'rejected') {

it('should sync tags', async () => {
const inputCustom = createValidOfflineInputFixtureWithTags(
const inputCustom = createValidInputFixtureWithTags(
'./assets/giraffe',
sampleStorageName
sampleStorage.name
)

@@ -243,5 +313,7 @@ const mockServerless = getServerlessMock(inputCustom, __dirname)

)
await setupStorage(
syncCloudStorage.getS3Client(),
inputCustom.syncCloudStorage.storages[0]
)
await createStorage(syncCloudStorage.getS3Client(), sampleStorage)
const expectedTags = mergeTags(

@@ -253,10 +325,20 @@ [],

const newTags = await syncCloudStorage.tags()
expect(tagsSpy).toHaveBeenCalledTimes(1)
for (const { result } of newTags) {
expect(result).toBe(expectedTags)
expect(result).toBeGreaterThanOrEqual(1)
for (const newTag of newTags) {
const { status, value } = newTag as TagsMethodPromiseResult
const { result, error, storage } = value
expect(status).toBe('fulfilled')
expect(error).toBe(undefined)
expect(storage).toEqual(inputCustom.syncCloudStorage.storages[0])
expect(result).toEqual(expectedTags)
expect(result?.length).toBeGreaterThanOrEqual(1)
expect(
await deleteStorage(syncCloudStorage.getS3Client(), sampleStorage)
await deleteStorage(
syncCloudStorage.getS3Client(),
inputCustom.syncCloudStorage.storages[0]
)
).not.toBe(undefined)

@@ -266,6 +348,22 @@ }

it('should sync metadata', async () => {
const inputCustom = createValidOfflineInputFixtureWithMetadata(
it('should not sync tags when plugin is disabled', async () => {
const inputCustom = createValidDisabledInputFixture()
const mockServerless = getServerlessMock(inputCustom, __dirname)
const syncCloudStorage = new SyncCloudStorage(
mockServerless,
optionsMock,
loggingMock
)
const tagsSpy = jest.spyOn(syncCloudStorage, 'tags')
const newTags = await syncCloudStorage.tags()
expect(tagsSpy).toHaveBeenCalledTimes(1)
expect(newTags).toEqual([{ error: 'Plugin is disabled' }])
})
it("should not sync tags when storage doesn't exist", async () => {
const inputCustom = createValidInputFixture(
'./assets/giraffe',
sampleStorageName
'non-existent-bucket'
)

@@ -279,26 +377,22 @@ const mockServerless = getServerlessMock(inputCustom, __dirname)

await createStorage(syncCloudStorage.getS3Client(), sampleStorage)
const tagsSpy = jest.spyOn(syncCloudStorage, 'tags')
const response = await syncCloudStorage.tags()
const metadataSpy = jest.spyOn(syncCloudStorage, 'metadata')
const syncedStorages = await syncCloudStorage.metadata()
const expectedResponse = [
{
status: 'fulfilled',
value: {
error: Error('StorageNotFound'),
},
},
]
expect(metadataSpy).toHaveBeenCalledTimes(1)
expect(syncedStorages).toMatchObject(
expect.arrayContaining([
expect.objectContaining({
status: expect.stringContaining('fulfilled'),
value: expect.arrayContaining([]),
}),
])
)
await deleteStorage(syncCloudStorage.getS3Client(), sampleStorage)
expect(tagsSpy).toHaveBeenCalledTimes(1)
expect(response).toEqual(expectedResponse)
})
})
describe.skip('Storage Related Tests (Online)', () => {
it("should throw an error when the bucket doesn't exist", async () => {
const inputCustom = createValidAWSInputFixture(
it('should sync metadata', async () => {
const inputCustom = createValidInputFixtureWithMetadata(
'./assets/giraffe',
'non-existent-bucket'
sampleStorage.name
)

@@ -311,22 +405,45 @@ const mockServerless = getServerlessMock(inputCustom, __dirname)

)
await setupStorage(
syncCloudStorage.getS3Client(),
inputCustom.syncCloudStorage.storages[0]
)
await createStorage(syncCloudStorage.getS3Client(), sampleStorage)
const metadataSpy = jest.spyOn(syncCloudStorage, 'metadata')
const syncedStorages = await syncCloudStorage.metadata()
try {
const bucketsSpy = jest.spyOn(syncCloudStorage, 'storages')
await syncCloudStorage.storages()
expect(bucketsSpy).toHaveBeenCalledTimes(1)
} catch (error) {
expect(error).toBe(
`Error/Storage doesn't exist!: ${inputCustom.syncCloudStorage.storages[0].name}`
)
}
const existingObjects = await objects.listObjects(
syncCloudStorage.getS3Client(),
sampleStorage
)
const expectedResponse = expect.arrayContaining([
expect.objectContaining({
status: 'fulfilled',
value: existingObjects.map(({ Key: key }) => {
return {
Key: key,
Metadata: inputCustom.syncCloudStorage.storages[0].metadata,
Bucket: inputCustom.syncCloudStorage.storages[0].name,
}
}),
}),
])
expect(metadataSpy).toHaveBeenCalledTimes(1)
expect(syncedStorages).toEqual(expectedResponse)
await deleteStorage(
syncCloudStorage.getS3Client(),
inputCustom.syncCloudStorage.storages[0]
)
})
it('should sync when there are buckets', async () => {
const inputCustom = createValidAWSInputFixture(
it('should limit sync to specified actions: upload', async () => {
const inputCustom = createValidInputFixture(
'./assets/giraffe',
sampleStorageName
sampleStorage.name
)
inputCustom.syncCloudStorage.storages[0].actions = ['upload']
const mockServerless = getServerlessMock(inputCustom, __dirname)
const syncCloudStorage = new SyncCloudStorage(

@@ -337,32 +454,68 @@ mockServerless,

)
await setupStorage(
syncCloudStorage.getS3Client(),
inputCustom.syncCloudStorage.storages[0]
)
await createStorage(syncCloudStorage.getS3Client(), sampleStorage)
const syncStoragesSpy = jest.spyOn(syncCloudStorage, 'storages')
const response = await syncCloudStorage.storages()
const giraffeREADME = 'README.md'
const bucketsSpy = jest.spyOn(syncCloudStorage, 'storages')
const response = await syncCloudStorage.storages()
expect(bucketsSpy).toHaveBeenCalledTimes(1)
expect(response).toMatchObject({
result: expect.arrayContaining([
expect.objectContaining({
status: expect.stringContaining('fulfilled'),
value: expect.objectContaining({
uploaded: expect.arrayContaining([
expect.objectContaining({
Bucket: expect.stringContaining(sampleStorageName),
}),
expect(syncStoragesSpy).toHaveBeenCalledTimes(1)
const expectedFile = expect.objectContaining<LocalFile>({
ETag: expect.any(String),
Key: giraffeREADME,
LastModified: expect.any(Date),
LocalPath: expect.stringContaining(giraffeREADME),
Size: expect.any(Number),
})
const expectedResponse = {
result: [
{
status: 'fulfilled',
value: {
deleted: expect.arrayContaining([]),
files: expect.arrayContaining([expectedFile]),
filesToDelete: expect.arrayContaining([]),
filesToUpload: expect.arrayContaining([
expect.stringContaining(giraffeREADME),
]),
}),
}),
]),
})
await deleteStorage(syncCloudStorage.getS3Client(), sampleStorage)
localFilesChecksum: expect.arrayContaining([
expect.stringContaining(giraffeREADME),
]),
objects: expect.arrayContaining([]),
storage: inputCustom.syncCloudStorage.storages[0],
storageObjectsChecksum: expect.arrayContaining([]),
uploaded: [
{
storage: inputCustom.syncCloudStorage.storages[0].name,
etag: expect.any(String),
key: giraffeREADME,
location: expect.any(String),
versionId: expect.any(String),
},
],
},
},
],
}
expect(response).toEqual(expectedResponse)
await deleteStorage(
syncCloudStorage.getS3Client(),
inputCustom.syncCloudStorage.storages[0]
)
})
it('should sync when the bucketPrefix', async () => {
const bucketPrefix = 'animals'
const inputCustom = createValidAWSInputFixture(
it('should limit sync to specified actions: delete', async () => {
const inputCustom = createValidInputFixture(
'./assets/giraffe',
sampleStorageName,
bucketPrefix
sampleStorage.name
)
inputCustom.syncCloudStorage.storages[0].actions = ['delete']
const mockServerless = getServerlessMock(inputCustom, __dirname)

@@ -374,19 +527,41 @@ const syncCloudStorage = new SyncCloudStorage(

)
await setupStorage(
syncCloudStorage.getS3Client(),
inputCustom.syncCloudStorage.storages[0]
)
await createStorage(syncCloudStorage.getS3Client(), sampleStorage)
const giraffeTXT = 'giraffe.txt'
await syncCloudStorage.getS3Client().send(
new PutObjectCommand({
Bucket: inputCustom.syncCloudStorage.storages[0].name,
Key: giraffeTXT,
Body: 'giraffe',
})
)
const bucketsSpy = jest.spyOn(syncCloudStorage, 'storages')
const syncStoragesSpy = jest.spyOn(syncCloudStorage, 'storages')
const response = await syncCloudStorage.storages()
expect(bucketsSpy).toHaveBeenCalledTimes(1)
expect(response).toMatchObject({
const expectedResponse = expect.objectContaining({
result: expect.arrayContaining([
expect.objectContaining({
status: expect.stringContaining('fulfilled'),
value: expect.objectContaining({
uploaded: expect.arrayContaining([
status: 'fulfilled',
value: {
deleted: expect.arrayContaining([
expect.objectContaining({
Key: expect.stringContaining(bucketPrefix),
Key: giraffeTXT,
VersionId: expect.any(String),
}),
]),
}),
files: expect.arrayContaining([]),
filesToDelete: expect.arrayContaining([
expect.stringMatching(giraffeTXT),
]),
filesToUpload: expect.arrayContaining([]),
localFilesChecksum: expect.arrayContaining([]),
objects: expect.arrayContaining([]),
storage: inputCustom.syncCloudStorage.storages[0],
storageObjectsChecksum: expect.arrayContaining([]),
uploaded: expect.arrayContaining([]),
},
}),

@@ -396,19 +571,19 @@ ]),

for (const syncedStorage of response.result) {
if (syncedStorage.status === 'rejected') {
throw syncedStorage.reason
}
expect(syncStoragesSpy).toHaveBeenCalledTimes(1)
expect(response).toEqual(expectedResponse)
await deleteStorage(
syncCloudStorage.getS3Client(),
syncedStorage.value.storage
)
}
await deleteStorage(
syncCloudStorage.getS3Client(),
inputCustom.syncCloudStorage.storages[0]
)
})
it('should sync tags', async () => {
const inputCustom = createValidAWSInputFixtureWithTags(
it('should limit sync to specified actions: upload & delete', async () => {
const inputCustom = createValidInputFixture(
'./assets/giraffe',
sampleStorageName
sampleStorage.name
)
inputCustom.syncCloudStorage.storages[0].actions = ['upload', 'delete']
const mockServerless = getServerlessMock(inputCustom, __dirname)

@@ -420,29 +595,110 @@ const syncCloudStorage = new SyncCloudStorage(

)
await setupStorage(
syncCloudStorage.getS3Client(),
inputCustom.syncCloudStorage.storages[0]
)
await createStorage(syncCloudStorage.getS3Client(), sampleStorage)
const expectedTags = mergeTags(
[],
inputCustom.syncCloudStorage.storages[0].tags
const giraffeTXT = 'giraffe.txt'
await syncCloudStorage.getS3Client().send(
new PutObjectCommand({
Bucket: inputCustom.syncCloudStorage.storages[0].name,
Key: giraffeTXT,
Body: 'giraffe',
})
)
const tagsSpy = jest.spyOn(syncCloudStorage, 'tags')
const newTags = await syncCloudStorage.tags()
expect(tagsSpy).toHaveBeenCalledTimes(1)
for (const { result } of newTags) {
expect(result).toBe(expectedTags)
expect(result).toBeGreaterThanOrEqual(1)
const syncStoragesSpy = jest.spyOn(syncCloudStorage, 'storages')
const response = await syncCloudStorage.storages()
expect(
await deleteStorage(syncCloudStorage.getS3Client(), sampleStorage)
).not.toBe(undefined)
expect(syncStoragesSpy).toHaveBeenCalledTimes(1)
const giraffeREADME = 'README.md'
const expectedFile = expect.objectContaining<LocalFile>({
ETag: expect.any(String),
Key: giraffeREADME,
LastModified: expect.any(Date),
LocalPath: expect.stringContaining('README.md'),
Size: expect.any(Number),
})
const expectedGiraffeTXTObject = expect.objectContaining({
ETag: expect.any(String),
Key: giraffeTXT,
LastModified: expect.any(Date),
Size: expect.any(Number),
StorageClass: expect.any(String),
})
const expectedResponse = {
result: [
{
status: 'fulfilled',
value: {
deleted: expect.arrayContaining([
{
Key: giraffeTXT,
VersionId: expect.any(String),
},
{
Key: expect.stringMatching(giraffeREADME),
VersionId: expect.any(String),
},
]),
files: [expectedFile],
filesToDelete: [expect.stringMatching(giraffeTXT)],
filesToUpload: [expect.stringMatching(giraffeREADME)],
localFilesChecksum: [expect.stringMatching(giraffeREADME)],
objects: [expectedGiraffeTXTObject],
storage: inputCustom.syncCloudStorage.storages[0],
storageObjectsChecksum: [expect.stringMatching(giraffeTXT)],
uploaded: [
{
storage: inputCustom.syncCloudStorage.storages[0].name,
etag: expect.any(String),
key: expect.stringMatching(giraffeREADME),
location: expect.any(String),
versionId: expect.any(String),
},
],
},
},
],
}
expect(response).toEqual(expectedResponse)
await deleteStorage(
syncCloudStorage.getS3Client(),
inputCustom.syncCloudStorage.storages[0]
)
})
it('should sync metadata', async () => {
const inputCustom = createValidAWSInputFixtureWithMetadata(
it('should sync multiple storages with with all actions', async () => {
const inputCustom = createValidInputFixture(
'./assets/giraffe',
sampleStorageName
sampleStorage.name
)
const mockServerless = getServerlessMock(inputCustom, __dirname)
const inputCustom2 = createValidInputFixture(
'./assets/giraffe-multiple',
'giraffe-bucket-2'
)
const {
syncCloudStorage: {
storages: [storage1],
},
} = inputCustom
const {
syncCloudStorage: {
storages: [storage2],
},
} = inputCustom2
const storages = [storage1, storage2]
const mockServerless = getServerlessMock(
{
...inputCustom,
syncCloudStorage: { ...inputCustom.syncCloudStorage, storages },
},
__dirname
)
const syncCloudStorage = new SyncCloudStorage(

@@ -454,20 +710,99 @@ mockServerless,

await createStorage(syncCloudStorage.getS3Client(), sampleStorage)
await setupStorage(syncCloudStorage.getS3Client(), storages[0])
await setupStorage(syncCloudStorage.getS3Client(), storages[1])
const metadataSpy = jest.spyOn(syncCloudStorage, 'metadata')
const syncedStorages = await syncCloudStorage.metadata()
const syncStoragesSpy = jest.spyOn(syncCloudStorage, 'storages')
const response = await syncCloudStorage.storages()
expect(metadataSpy).toHaveBeenCalledTimes(1)
expect(syncedStorages).toMatchObject(
expect.arrayContaining([
expect.objectContaining({
status: expect.stringContaining('fulfilled'),
value: expect.arrayContaining([]),
}),
])
)
expect(syncStoragesSpy).toHaveBeenCalledTimes(1)
await deleteStorage(syncCloudStorage.getS3Client(), sampleStorage)
const giraffeReadme = 'README.md'
const giraffeSubReadme = 'sub/README.md'
const expectedReadmeLocalFile = expect.objectContaining<LocalFile>({
ETag: expect.any(String),
Key: giraffeReadme,
LastModified: expect.any(Date),
LocalPath: expect.stringMatching(giraffeReadme),
Size: expect.any(Number),
})
const expectedSubReadmeLocalFile = expect.objectContaining<LocalFile>({
ETag: expect.any(String),
Key: giraffeSubReadme,
LastModified: expect.any(Date),
LocalPath: expect.stringMatching(giraffeReadme),
Size: expect.any(Number),
})
const expectedUploadedReadmeFile1 = expect.objectContaining({
storage: storage1.name,
etag: expect.any(String),
key: giraffeReadme,
location: expect.any(String),
versionId: expect.any(String),
})
const expectedUploadedReadmeFile2 = expect.objectContaining({
storage: storage2.name,
etag: expect.any(String),
key: giraffeReadme,
location: expect.any(String),
versionId: expect.any(String),
})
const expectedUploadedSubReadmeFile2 = expect.objectContaining({
storage: storage2.name,
etag: expect.any(String),
key: giraffeSubReadme,
location: expect.any(String),
versionId: expect.any(String),
})
const expectedResponse = {
result: [
{
status: 'fulfilled',
value: {
deleted: expect.arrayContaining([]),
files: [expectedReadmeLocalFile],
filesToDelete: expect.arrayContaining([]),
filesToUpload: expect.arrayContaining([
expect.stringContaining(giraffeReadme),
]),
localFilesChecksum: expect.arrayContaining([
expect.stringContaining(giraffeReadme),
]),
objects: expect.arrayContaining([]),
storage: storages[0],
storageObjectsChecksum: expect.arrayContaining([]),
uploaded: [expectedUploadedReadmeFile1],
},
},
{
status: 'fulfilled',
value: {
deleted: expect.arrayContaining([]),
files: [expectedReadmeLocalFile, expectedSubReadmeLocalFile],
filesToDelete: expect.arrayContaining([]),
filesToUpload: expect.arrayContaining([
expect.stringContaining(giraffeReadme),
]),
localFilesChecksum: expect.arrayContaining([
expect.stringContaining(giraffeReadme),
]),
objects: expect.arrayContaining([]),
storage: storages[1],
storageObjectsChecksum: expect.arrayContaining([]),
uploaded: [
expectedUploadedReadmeFile2,
expectedUploadedSubReadmeFile2,
],
},
},
],
}
expect(response).toEqual(expectedResponse)
for (const storage of storages) {
await deleteStorage(syncCloudStorage.getS3Client(), storage)
}
})
})
})

@@ -9,3 +9,2 @@ import { Custom } from '../../src/schemas/input'

secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY ?? '',
sessionToken: process.env.AWS_SESSION_TOKEN ?? '',
},

@@ -12,0 +11,0 @@ })

import { faker } from '@faker-js/faker'
import { Custom, ObjectCannedACLs, Storage } from '../../src/schemas/input'
import { Custom, Storage, objectCannedACLs } from '../../src/schemas/input'
import { DeepPartial } from '../../src/types'
export const sampleStorageName = 'my-static-site-assets'
export const sampleStorage: Storage = {
name: sampleStorageName,
bucketPrefix: 'animals',
name: 'my-static-site-assets',
prefix: 'animals',
localPath: './assets/giraffe',
actions: ['upload', 'delete'],
acl: 'public-read',
deleteRemoved: true,
acl: undefined,
enabled: true,

@@ -26,7 +24,5 @@ tags: {},

actions: ['upload', 'delete'],
bucketPrefix: faker.lorem.word(),
prefix: faker.lorem.word(),
enabled: faker.datatype.boolean(),
deleteRemoved: faker.datatype.boolean(),
acl: faker.helpers.arrayElement(ObjectCannedACLs),
defaultContentType: faker.system.mimeType(),
acl: faker.helpers.arrayElement(objectCannedACLs),
metadata: {

@@ -45,6 +41,6 @@ exampleKey: faker.lorem.word(),

export const createValidOfflineInputFixture = (
export const createValidInputFixture = (
localPath: string,
name = sampleStorageName,
bucketPrefix = '',
name = '',
prefix = '',
endpoint = process.env.AWS_ENDPOINT_URL

@@ -62,3 +58,3 @@ ): Required<Custom> => {

localPath,
bucketPrefix,
prefix: prefix,
},

@@ -70,6 +66,6 @@ ],

export const createValidOfflineInputFixtureWithTags = (
export const createValidInputFixtureWithACLBucketOwner = (
localPath: string,
name = sampleStorageName,
bucketPrefix = '',
name = '',
prefix = '',
endpoint = process.env.AWS_ENDPOINT_URL

@@ -80,3 +76,3 @@ ): Required<Custom> => {

disabled: false,
endpoint,
endpoint: endpoint,
offline: true,

@@ -88,6 +84,4 @@ storages: [

localPath,
bucketPrefix,
tags: {
[faker.lorem.word()]: faker.lorem.word(),
},
prefix: prefix,
acl: 'bucket-owner-full-control',
},

@@ -99,6 +93,6 @@ ],

export const createValidOfflineInputFixtureWithMetadata = (
export const createValidInputFixtureWithTags = (
localPath: string,
name = sampleStorageName,
bucketPrefix = '',
name = '',
prefix = '',
endpoint = process.env.AWS_ENDPOINT_URL

@@ -116,4 +110,4 @@ ): Required<Custom> => {

localPath,
bucketPrefix,
metadata: {
prefix,
tags: {
[faker.lorem.word()]: faker.lorem.word(),

@@ -127,6 +121,7 @@ },

export const createValidAWSInputFixture = (
export const createValidInputFixtureWithMetadata = (
localPath: string,
name = sampleStorageName,
bucketPrefix = ''
name = '',
prefix = '',
endpoint = process.env.AWS_ENDPOINT_URL
): Required<Custom> => {

@@ -136,4 +131,4 @@ return {

disabled: false,
endpoint: undefined,
offline: false,
endpoint,
offline: true,
storages: [

@@ -144,49 +139,3 @@ {

localPath,
bucketPrefix,
},
],
},
}
}
export const createValidAWSInputFixtureWithTags = (
localPath: string,
name = sampleStorageName,
bucketPrefix = ''
): Required<Custom> => {
return {
syncCloudStorage: {
disabled: false,
endpoint: undefined,
offline: false,
storages: [
{
...sampleStorage,
name,
localPath,
bucketPrefix,
tags: {
[faker.lorem.word()]: faker.lorem.word(),
},
},
],
},
}
}
export const createValidAWSInputFixtureWithMetadata = (
localPath: string,
name = sampleStorageName,
bucketPrefix = ''
): Required<Custom> => {
return {
syncCloudStorage: {
disabled: false,
endpoint: undefined,
offline: false,
storages: [
{
...sampleStorage,
name,
localPath,
bucketPrefix,
prefix,
metadata: {

@@ -231,7 +180,5 @@ [faker.lorem.word()]: faker.lorem.word(),

actions: 123,
bucketPrefix: 456,
prefix: 456,
enabled: 'false',
deleteRemoved: 'remove',
acl: undefined,
defaultContentType: false,
metadata: 'key: value',

@@ -238,0 +185,0 @@ tags: [],

import { loadSharedConfigFiles } from '@smithy/shared-ini-file-loader'
import logger from '../src/utils/logger'
const defaultProfile = 'default'
const checkEnvVariables = (env: NodeJS.ProcessEnv) => {
return env.AWS_REGION && env.AWS_ACCESS_KEY_ID && env.AWS_SECRET_ACCESS_KEY
}
export const setupEnvs = async (): Promise<void> => {

@@ -8,23 +16,27 @@ const { credentialsFile, configFile } = await loadSharedConfigFiles({

const profile = process.env.AWS_PROFILE ?? 'default'
const profile = process.env.AWS_PROFILE ?? defaultProfile
const credentials = credentialsFile[profile]
const config = configFile[profile]
if (credentials && config) {
const { aws_access_key_id, aws_secret_access_key, aws_session_token } =
credentials
const { region } = config
if (!credentials && !config && checkEnvVariables(process.env)) {
logger.info(
"AWS Region & Credentials not found in '~/.aws/credentials & '~/.aws/config'!"
)
logger.info('Loaded AWS Region & Credentials from environment variables!')
return
}
if (
aws_access_key_id &&
aws_secret_access_key &&
aws_session_token &&
region
) {
process.env.AWS_ACCESS_KEY_ID = aws_access_key_id
process.env.AWS_SECRET_ACCESS_KEY = aws_secret_access_key
process.env.AWS_SESSION_TOKEN = aws_session_token
process.env.AWS_REGION = region
}
logger.info('Loaded AWS Region & Credentials from AWS config files!')
const {
aws_access_key_id: awsAccessKeyID,
aws_secret_access_key: awsSecretAccessKey,
} = credentials
const { region } = config
if (region && awsAccessKeyID && awsSecretAccessKey) {
process.env.AWS_REGION = region
process.env.AWS_ACCESS_KEY_ID = awsAccessKeyID
process.env.AWS_SECRET_ACCESS_KEY = awsSecretAccessKey
}
}

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc