Huge News!Announcing our $40M Series B led by Abstract Ventures.Learn More ā†’
Socket
Sign inDemoInstall
Socket

verdaccio-aws-s3-storage

Package Overview
Dependencies
Maintainers
2
Versions
54
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

verdaccio-aws-s3-storage - npm Package Compare versions

Comparing version 10.3.0 to 10.3.1

2

lib/addTrailingSlash.js

@@ -7,8 +7,6 @@ "use strict";

exports.default = void 0;
var _default = path => {
return path != null ? path.endsWith('/') ? path : `${path}/` : '';
};
exports.default = _default;
//# sourceMappingURL=addTrailingSlash.js.map

2

lib/config.d.ts

@@ -1,2 +0,2 @@

import { Config } from '@verdaccio/types';
import { Config } from '@verdaccio/legacy-types';
export interface S3Config extends Config {

@@ -3,0 +3,0 @@ bucket: string;

@@ -7,5 +7,3 @@ "use strict";

exports.deleteKeyPrefix = deleteKeyPrefix;
var _s3Errors = require("./s3Errors");
function deleteKeyPrefix(s3, options, callback) {

@@ -12,0 +10,0 @@ s3.listObjectsV2(options, (err, data) => {

@@ -1,2 +0,2 @@

import { Logger, Config, Callback, IPluginStorage, PluginOptions, Token, TokenFilter } from '@verdaccio/types';
import { Logger, Config, Callback, IPluginStorage, PluginOptions, Token, TokenFilter } from '@verdaccio/legacy-types';
import { S3Config } from './config';

@@ -3,0 +3,0 @@ import S3PackageManager from './s3PackageManager';

@@ -7,41 +7,20 @@ "use strict";

exports.default = void 0;
var _commonsApi = require("@verdaccio/commons-api");
var _awsSdk = require("aws-sdk");
var _s3PackageManager = _interopRequireDefault(require("./s3PackageManager"));
var _s3Errors = require("./s3Errors");
var _addTrailingSlash = _interopRequireDefault(require("./addTrailingSlash"));
var _setConfigValue = _interopRequireDefault(require("./setConfigValue"));
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
function _defineProperty(obj, key, value) { if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; }
class S3Database {
constructor(config, options) {
_defineProperty(this, "logger", void 0);
_defineProperty(this, "config", void 0);
_defineProperty(this, "s3", void 0);
_defineProperty(this, "_localData", void 0);
this.logger = options.logger; // copy so we don't mutate
this.logger = options.logger;
// copy so we don't mutate
if (!config) {
throw new Error('s3 storage missing config. Add `store.s3-storage` to your config file');
}
this.config = Object.assign(config, config.store['aws-s3-storage']);
if (!this.config.bucket) {
throw new Error('s3 storage requires a bucket');
}
this.config.bucket = (0, _setConfigValue.default)(this.config.bucket);

@@ -69,7 +48,5 @@ this.config.keyPrefix = (0, _setConfigValue.default)(this.config.keyPrefix);

}
async getSecret() {
return Promise.resolve((await this._getData()).secret);
}
async setSecret(secret) {

@@ -79,3 +56,2 @@ (await this._getData()).secret = secret;

}
add(name, callback) {

@@ -85,3 +61,2 @@ this.logger.debug({

}, 's3: [add] private package @{name}');
this._getData().then(async data => {

@@ -93,3 +68,2 @@ if (data.list.indexOf(name) === -1) {

}, 's3: [add] @{name} has been added');
try {

@@ -106,3 +80,2 @@ await this._sync();

}
async search(onPackage, onEnd) {

@@ -118,3 +91,2 @@ this.logger.debug('s3: [search]');

}
async _fetchPackageInfo(onPackage, packageName) {

@@ -143,3 +115,2 @@ const {

}
if (response.LastModified) {

@@ -158,3 +129,2 @@ const {

}
resolve();

@@ -164,3 +134,2 @@ });

}
remove(name, callback) {

@@ -177,5 +146,3 @@ this.logger.debug({

}
const pkgName = data.indexOf(name);
if (pkgName !== -1) {

@@ -188,3 +155,2 @@ const data = await this._getData();

}
try {

@@ -203,10 +169,8 @@ this.logger.trace('s3: [remove] starting sync');

}
get(callback) {
this.logger.debug('s3: [get]');
this._getData().then(data => callback(null, data.list));
} // Create/write database file to s3
}
// Create/write database file to s3
async _sync() {

@@ -234,3 +198,2 @@ await new Promise((resolve, reject) => {

}
this.logger.debug('s3: [_sync] sucess');

@@ -240,5 +203,5 @@ resolve(undefined);

});
} // returns an instance of a class managing the storage for a single package
}
// returns an instance of a class managing the storage for a single package
getPackageStorage(packageName) {

@@ -250,3 +213,2 @@ this.logger.debug({

}
async _getData() {

@@ -273,3 +235,2 @@ if (!this._localData) {

}, 's3: [_getData] err: @{err}');
if ((0, _s3Errors.is404Error)(s3Err)) {

@@ -284,6 +245,4 @@ this.logger.error('s3: [_getData] err 404 create new database');

}
return;
}
const body = response.Body ? response.Body.toString() : '';

@@ -300,6 +259,4 @@ const data = JSON.parse(body);

}
return this._localData;
}
saveToken(token) {

@@ -311,3 +268,2 @@ this.logger.warn({

}
deleteToken(user, tokenKey) {

@@ -320,3 +276,2 @@ this.logger.warn({

}
readTokens(filter) {

@@ -328,6 +283,4 @@ this.logger.warn({

}
}
exports.default = S3Database;
//# sourceMappingURL=index.js.map

@@ -13,29 +13,21 @@ "use strict";

exports.is503Error = is503Error;
var _commonsApi = require("@verdaccio/commons-api");
function is404Error(err) {
return err.code === _commonsApi.HTTP_STATUS.NOT_FOUND;
}
function create404Error() {
return (0, _commonsApi.getNotFound)('no such package available');
}
function is409Error(err) {
return err.code === _commonsApi.HTTP_STATUS.CONFLICT;
}
function create409Error() {
return (0, _commonsApi.getConflict)('file already exists');
}
function is503Error(err) {
return err.code === _commonsApi.HTTP_STATUS.SERVICE_UNAVAILABLE;
}
function create503Error() {
return (0, _commonsApi.getCode)(_commonsApi.HTTP_STATUS.SERVICE_UNAVAILABLE, 'resource temporarily unavailable');
}
function convertS3Error(err) {

@@ -46,9 +38,6 @@ switch (err.code) {

return (0, _commonsApi.getNotFound)();
case 'StreamContentLengthMismatch':
return (0, _commonsApi.getInternalError)(_commonsApi.API_ERROR.CONTENT_MISMATCH);
case 'RequestAbortedError':
return (0, _commonsApi.getInternalError)('request aborted');
default:

@@ -55,0 +44,0 @@ // @ts-ignore

import { UploadTarball, ReadTarball } from '@verdaccio/streams';
import { Callback, Logger, Package, ILocalPackageManager, CallbackAction, ReadPackageCallback } from '@verdaccio/types';
import { Callback, Logger, Package, ILocalPackageManager, CallbackAction, ReadPackageCallback } from '@verdaccio/legacy-types';
import { S3Config } from './config';

@@ -4,0 +4,0 @@ export default class S3PackageManager implements ILocalPackageManager {

@@ -7,35 +7,12 @@ "use strict";

exports.default = void 0;
var _awsSdk = require("aws-sdk");
var _streams = require("@verdaccio/streams");
var _commonsApi = require("@verdaccio/commons-api");
var _s3Errors = require("./s3Errors");
var _deleteKeyPrefix = require("./deleteKeyPrefix");
var _addTrailingSlash = _interopRequireDefault(require("./addTrailingSlash"));
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
function _defineProperty(obj, key, value) { if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; }
const pkgFileName = 'package.json';
class S3PackageManager {
constructor(config, packageName, logger) {
_defineProperty(this, "config", void 0);
_defineProperty(this, "logger", void 0);
_defineProperty(this, "packageName", void 0);
_defineProperty(this, "s3", void 0);
_defineProperty(this, "packagePath", void 0);
_defineProperty(this, "tarballACL", void 0);
this.config = config;

@@ -87,3 +64,2 @@ this.packageName = packageName;

const packageAccess = this.config.getMatchedPackagesSpec(packageName);
if (packageAccess) {

@@ -97,3 +73,2 @@ const storage = packageAccess.storage;

}
updatePackage(name, updateHandler, onWrite, transformPackage, onEnd) {

@@ -103,3 +78,2 @@ this.logger.debug({

}, 's3: [S3PackageManager updatePackage init] @{name}');
(async () => {

@@ -130,3 +104,2 @@ try {

}
async _getData() {

@@ -150,6 +123,4 @@ this.logger.debug('s3: [S3PackageManager _getData init]');

}
const body = response.Body ? response.Body.toString() : '';
let data;
try {

@@ -164,3 +135,2 @@ data = JSON.parse(body);

}
this.logger.trace({

@@ -173,3 +143,2 @@ data

}
deletePackage(fileName, callback) {

@@ -187,3 +156,2 @@ this.s3.deleteObject({

}
removePackage(callback) {

@@ -201,3 +169,2 @@ (0, _deleteKeyPrefix.deleteKeyPrefix)(this.s3, {

}
createPackage(name, value, callback) {

@@ -216,4 +183,4 @@ this.logger.debug({

if (err) {
const s3Err = (0, _s3Errors.convertS3Error)(err); // only allow saving if this file doesn't exist already
const s3Err = (0, _s3Errors.convertS3Error)(err);
// only allow saving if this file doesn't exist already
if ((0, _s3Errors.is404Error)(s3Err)) {

@@ -239,3 +206,2 @@ this.logger.debug({

}
savePackage(name, value, callback) {

@@ -256,3 +222,2 @@ this.logger.debug({

}
readPackage(name, callback) {

@@ -263,3 +228,2 @@ this.logger.debug({

}, 's3: [S3PackageManager readPackage init] name @{name}/@{packageName}');
(async () => {

@@ -281,3 +245,2 @@ try {

}
writeTarball(name) {

@@ -300,6 +263,7 @@ this.logger.debug({

Key: `${this.packagePath}/${name}`
}; // NOTE: I'm using listObjectVersions so I don't have to download the full object with getObject.
};
// NOTE: I'm using listObjectVersions so I don't have to download the full object with getObject.
// Preferably, I'd use getObjectMetadata or getDetails when it's available in the node sdk
// TODO: convert to headObject
this.s3.headObject({

@@ -314,3 +278,2 @@ Bucket: this.config.bucket,

}, 's3: [S3PackageManager writeTarball headObject] @{error}');
if ((0, _s3Errors.is404Error)(convertedErr) === false) {

@@ -326,4 +289,4 @@ this.logger.error({

ACL: this.tarballACL
})); // NOTE: there's a managedUpload.promise, but it doesn't seem to work
}));
// NOTE: there's a managedUpload.promise, but it doesn't seem to work
const promise = new Promise(resolve => {

@@ -350,3 +313,2 @@ this.logger.debug('s3: [S3PackageManager writeTarball managedUpload] send');

});
uploadStream.done = () => {

@@ -366,3 +328,2 @@ const onEnd = async () => {

};
if (streamEnded) {

@@ -380,6 +341,4 @@ this.logger.trace({

};
uploadStream.abort = () => {
this.logger.debug('s3: [S3PackageManager writeTarball uploadStream abort] init');
try {

@@ -412,3 +371,2 @@ this.logger.debug('s3: [S3PackageManager writeTarball managedUpload abort]');

}
readTarball(name) {

@@ -428,2 +386,3 @@ this.logger.debug({

// otherwise they'll be processed twice
// verdaccio force garbage collects a stream on 404, so we can't emit more

@@ -442,15 +401,14 @@ // than one error or it'll fail

}, 's3: [S3PackageManager readTarball httpHeaders event] statusCode @{statusCode}');
if (statusCode !== _commonsApi.HTTP_STATUS.NOT_FOUND) {
if (headers[_commonsApi.HEADERS.CONTENT_LENGTH]) {
const contentLength = parseInt(headers[_commonsApi.HEADERS.CONTENT_LENGTH], 10); // not sure this is necessary
const contentLength = parseInt(headers[_commonsApi.HEADERS.CONTENT_LENGTH], 10);
// not sure this is necessary
if (headersSent) {
return;
}
headersSent = true;
this.logger.debug('s3: [S3PackageManager readTarball readTarballStream event] emit content-length');
readTarballStream.emit(_commonsApi.HEADERS.CONTENT_LENGTH, contentLength); // we know there's content, so open the stream
readTarballStream.emit(_commonsApi.HEADERS.CONTENT_LENGTH, contentLength);
// we know there's content, so open the stream
readTarballStream.emit('open');

@@ -472,3 +430,2 @@ this.logger.debug('s3: [S3PackageManager readTarball readTarballStream event] emit open');

readStream.pipe(readTarballStream);
readTarballStream.abort = () => {

@@ -480,9 +437,6 @@ this.logger.debug('s3: [S3PackageManager readTarball readTarballStream event] request abort');

};
return readTarballStream;
}
}
exports.default = S3PackageManager;
//# sourceMappingURL=s3PackageManager.js.map

@@ -7,3 +7,2 @@ "use strict";

exports.default = void 0;
var _default = configValue => {

@@ -13,4 +12,3 @@ const envValue = process.env[configValue];

};
exports.default = _default;
//# sourceMappingURL=setConfigValue.js.map
{
"name": "verdaccio-aws-s3-storage",
"version": "10.3.0",
"version": "10.3.1",
"description": "AWS S3 storage implementation for Verdaccio",

@@ -32,8 +32,8 @@ "keywords": [

"@verdaccio/commons-api": "10.2.0",
"@verdaccio/streams": "10.2.0",
"@verdaccio/streams": "10.2.1",
"aws-sdk": "^2.607.0"
},
"devDependencies": {
"@verdaccio/types": "10.5.0",
"recursive-readdir": "2.2.2"
"@verdaccio/legacy-types": "1.0.1",
"recursive-readdir": "2.2.3"
},

@@ -52,4 +52,3 @@ "funding": {

"build": "pnpm run build:js && pnpm run build:types"
},
"readme": "# verdaccio-aws-s3-storage\n\nšŸ“¦ AWS S3 storage plugin for Verdaccio\n\n[![verdaccio (latest)](https://img.shields.io/npm/v/verdaccio-aws-s3-storage/latest.svg)](https://www.npmjs.com/package/verdaccio-aws-s3-storage)\n[![CircleCI](https://circleci.com/gh/verdaccio/verdaccio-aws-s3-storage/tree/master.svg?style=svg)](https://circleci.com/gh/verdaccio/verdaccio-aws-s3-storage/tree/master)\n[![Known Vulnerabilities](https://snyk.io/test/github/verdaccio/verdaccio-aws-s3-storage/badge.svg?targetFile=package.json)](https://snyk.io/test/github/verdaccio/verdaccio-aws-s3-storage?targetFile=package.json)\n[![codecov](https://codecov.io/gh/verdaccio/verdaccio-aws-s3-storage/branch/master/graph/badge.svg)](https://codecov.io/gh/verdaccio/verdaccio-aws-s3-storage)\n[![backers](https://opencollective.com/verdaccio/tiers/backer/badge.svg?label=Backer&color=brightgreen)](https://opencollective.com/verdaccio)\n[![discord](https://img.shields.io/discord/388674437219745793.svg)](http://chat.verdaccio.org/)\n![MIT](https://img.shields.io/github/license/mashape/apistatus.svg)\n[![node](https://img.shields.io/node/v/verdaccio-aws-s3-storage/latest.svg)](https://www.npmjs.com/package/verdaccio-aws-s3-storage)\n\n[![Twitter followers](https://img.shields.io/twitter/follow/verdaccio_npm.svg?style=social&label=Follow)](https://twitter.com/verdaccio_npm)\n[![Github](https://img.shields.io/github/stars/verdaccio/verdaccio.svg?style=social&label=Stars)](https://github.com/verdaccio/verdaccio/stargazers)\n[![backers](https://opencollective.com/verdaccio/tiers/backer/badge.svg?label=Backer&color=brightgreen)](https://opencollective.com/verdaccio)\n[![stackshare](https://img.shields.io/badge/Follow%20on-StackShare-blue.svg?logo=stackshare&style=flat)](https://stackshare.io/verdaccio)\n\n\n> This plugin was forked based on [`verdaccio-s3-storage`](https://github.com/Remitly/verdaccio-s3-storage) built in Typescript + other features added along \nthe time. Both plugins might have vary in behaviour since then, we recommend use the AWS plugin on this repo due\nis under control of Verdaccio community and constantly upated. \n\n## See it in action\n\n* Test on [Docker + LocalStack + Verdaccio 4 + S3 Plugin example](https://github.com/verdaccio/docker-examples/tree/master/amazon-s3-docker-example).\n* Using `docker-compose` on this repo based on [**verdaccio-minio**](https://github.com/barolab/verdaccio-minio) developed by [barolab](https://github.com/barolab).\n* Feel free to propose new ways to run this plugin. \n\n### Basic Requirements\n\n* AWS Account (in case you are using the cloud)\n* Verdaccio server (4.0) (for 3.x use `verdaccio-s3-storage` instead)\n\n```\nnpm install -g verdaccio\n```\n\n## Usage\n\n```\nnpm install verdaccio-aws-s3-storage\n```\n\nThis will pull AWS credentials from your environment.\n\nIn your verdaccio config, configure\n\n```yaml\nstore:\n aws-s3-storage:\n bucket: your-s3-bucket\n keyPrefix: some-prefix # optional, has the effect of nesting all files in a subdirectory\n region: us-west-2 # optional, will use aws s3's default behavior if not specified\n endpoint: https://{service}.{region}.amazonaws.com # optional, will use aws s3's default behavior if not specified\n s3ForcePathStyle: false # optional, will use path style URLs for S3 objects\n tarballACL: private # optional, use public-read to work with CDN like Amazon CloudFront\n accessKeyId: your-access-key-id # optional, aws accessKeyId for private S3 bucket\n secretAccessKey: your-secret-access-key # optional, aws secretAccessKey for private S3 bucket\n sessionToken: your-session-token # optional, aws sessionToken for private S3 bucket\n```\n\nThe configured values can either be the actual value or the name of an environment variable that contains the value for the following options:\n\n- `bucket`\n- `keyPrefix`\n- `region`\n- `endpoint`\n- `accessKeyID`\n- `secretAccessKey`\n- `sessionToken`\n\n``` yaml\nstore:\n aws-s3-storage:\n bucket: S3_BUCKET # If an environment variable named S3_BUCKET is set, it will use that value. Otherwise assumes the bucket is named 'S3_BUCKET'\n keyPrefix: S3_KEY_PREFIX # If an environment variable named S3_KEY_PREFIX is set, it will use that value. Otherwise assumes the bucket is named 'S3_KEY_PREFIX'\n endpoint: S3_ENDPOINT # If an environment variable named S3_ENDPOINT is set, it will use that value. Otherwise assumes the bucket is named 'S3_ENDPOINT'\n ...\n```\n\nstore properties can be defined for packages. The storage location corresponds to the folder in s3 bucket.\n\n```\npackages:\n '@scope/*':\n access: all\n publish: $all\n storage: 'scoped'\n '**':\n access: $all\n publish: $all\n proxy: npmjs\n storage: 'public'\n```\n\n### Specify ACL of Tarball Files\n\nYou can specify ACL of tarball files in S3 by the *tarballACL* configuration, set to 'private' by default. To enable S3 integrated CDN service (Amazon CloudFront for example), set *tarballACL* to 'public-read' to grant tarball files anonymous read permission.\n\n```yaml\nstore:\n aws-s3-storage:\n tarballACL: public-read\n```\n\n## Developer Testing\n\nIn case of local testing, this project can be used self-efficiently. Four main ingredients are as follows:\n\n* `config.yaml`, see [verdaccio documentation](https://verdaccio.org/docs/en/configuration.html)\n* The provided docker file allows to test the plugin, with no need for main verdaccio application\n* The provided docker-compose also provides minio in orchestration as a local substitute for S3 backend\n* Create and set content of `registry.envs` as follows. This file does not exist on the repo and should be generated manually after cloning the project.\n\n```\nAWS_ACCESS_KEY_ID=foobar\nAWS_SECRET_ACCESS_KEY=1234567e\nAWS_DEFAULT_REGION=eu-central-1\nAWS_S3_ENDPOINT=https://localhost:9000/\nAWS_S3_PATH_STYLE=true\n```\n\n## Execute the docker image for testing\n\n> You need the latest docker installed in your computer\n\n```bash\ndocker-compose up\n```\n\n> By default there is no bucket created, **you might need to browse `http://127.0.0.1:9000/minio/` and create\nthe example bucket manually named `rise`** and then restart `docker-compose up`.\n\nThe default values should work out of the box. If you change anything, make sure the corresponding variables are set in\nother parts of the ingredient as well.\n"
}
}

@@ -114,3 +114,3 @@ # verdaccio-aws-s3-storage

* The provided docker-compose also provides minio in orchestration as a local substitute for S3 backend
* Create and set content of `registry.envs` as follows. This file does not exist on the repo and should be generated manually after cloning the project.
* Create and set content of `registry.env` as follows. This file does not exist on the repo and should be generated manually after cloning the project.

@@ -117,0 +117,0 @@ ```

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with āš”ļø by Socket Inc