Huge News!Announcing our $40M Series B led by Abstract Ventures.Learn More
Socket
Sign inDemoInstall
Socket

lerna-publisher

Package Overview
Dependencies
Maintainers
1
Versions
28
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

lerna-publisher - npm Package Compare versions

Comparing version 1.0.20 to 1.0.21

cjs/deploy.d.ts

3

cjs/aws.d.ts

@@ -1,3 +0,2 @@

export declare function walkSync(pathToFolder: string): Promise<any[]>;
export declare function uploadFolder(folderPath: string, pkgName: string, branchName: string): Promise<boolean>;
export declare function uploadFolder(folderPath: string, pkgName: string, branchName: string): Promise<void>;
//# sourceMappingURL=aws.d.ts.map

@@ -6,58 +6,38 @@ "use strict";

Object.defineProperty(exports, "__esModule", { value: true });
const path_1 = __importDefault(require("path"));
const fs_1 = __importDefault(require("fs"));
const glob_1 = __importDefault(require("glob"));
const aws_sdk_1 = __importDefault(require("aws-sdk"));
const fs_1 = __importDefault(require("fs"));
const path_1 = __importDefault(require("path"));
const mime_1 = __importDefault(require("mime"));
async function walkSync(pathToFolder) {
const files = fs_1.default.readdirSync(pathToFolder);
const output = [];
for (const file of files) {
const pathToFile = path_1.default.join(pathToFolder, file);
const isDirectory = fs_1.default.statSync(pathToFile).isDirectory();
if (isDirectory) {
output.push(...await walkSync(pathToFile));
}
else {
output.push(await pathToFile);
}
}
return output;
}
exports.walkSync = walkSync;
async function internalUploadFolder(accessKeyIdPar, secretAccessKeyPar, s3BucketName, s3subFolder, localFolder) {
try {
aws_sdk_1.default.config.setPromisesDependency(Promise);
const s3 = new aws_sdk_1.default.S3({
signatureVersion: 'v4',
accessKeyId: accessKeyIdPar,
secretAccessKey: secretAccessKeyPar,
region: 'us-east-1',
});
const filesPaths = await walkSync(localFolder);
for (let i = 0; i < filesPaths.length; i++) {
const statistics = `(${i + 1}/${filesPaths.length}, ${Math.round((i + 1) / filesPaths.length * 100)}%)`;
const filePath = filesPaths[i];
const fileContent = fs_1.default.readFileSync(filePath);
// If the slash is like this "/" s3 will create a new folder, otherwise will not work properly.
const relativeToBaseFilePath = path_1.default.normalize(path_1.default.relative(localFolder, filePath));
let relativeToBaseFilePathForS3 = relativeToBaseFilePath.split(path_1.default.sep).join('/');
relativeToBaseFilePathForS3 = path_1.default.join(s3subFolder, relativeToBaseFilePathForS3);
const mimeType = mime_1.default.getType(filePath);
console.log(`Uploading`, statistics, relativeToBaseFilePathForS3);
// https://docs.aws.amazon.com/AWSJavaScriptSDK/latest/AWS/S3.html#putObject-property
await s3.putObject({
ACL: `public-read`,
Bucket: s3BucketName,
Key: relativeToBaseFilePathForS3,
Body: fileContent,
ContentType: mimeType,
}).promise();
console.log(`Uploaded `, statistics, relativeToBaseFilePathForS3);
}
return true;
aws_sdk_1.default.config.setPromisesDependency(Promise);
const s3 = new aws_sdk_1.default.S3({
signatureVersion: 'v4',
accessKeyId: accessKeyIdPar,
secretAccessKey: secretAccessKeyPar,
region: 'us-east-1'
});
const filesPaths = glob_1.default.sync(path_1.default.join(localFolder, '**/*.*'), { absolute: true }).map(p => path_1.default.normalize(p));
const numFiles = filesPaths.length;
for (const [i, filePath] of filesPaths.entries()) {
const statistics = `(${i + 1}/${numFiles}, ${Math.round(((i + 1) / numFiles) * 100)}%)`;
const fileContent = fs_1.default.readFileSync(filePath);
// If the slash is like this "/" s3 will create a new folder, otherwise will not work properly.
const relativeToBaseFilePath = path_1.default.normalize(path_1.default.relative(localFolder, filePath));
let relativeToBaseFilePathForS3 = relativeToBaseFilePath.split(path_1.default.sep).join('/');
relativeToBaseFilePathForS3 = path_1.default.join(s3subFolder, relativeToBaseFilePathForS3);
const mimeType = mime_1.default.getType(filePath);
console.log(`Uploading`, statistics, relativeToBaseFilePathForS3);
// https://docs.aws.amazon.com/AWSJavaScriptSDK/latest/AWS/S3.html#putObject-property
await s3
.putObject({
ACL: `public-read`,
Bucket: s3BucketName,
Key: relativeToBaseFilePathForS3,
Body: fileContent,
ContentType: mimeType
})
.promise();
console.log(`Uploaded `, statistics, relativeToBaseFilePathForS3);
}
catch (error) {
console.error(error);
return false;
}
}

@@ -69,2 +49,6 @@ async function uploadFolder(folderPath, pkgName, branchName) {

const s3subfolder = path_1.default.join(pkgName, branchName);
const folderStats = fs_1.default.statSync(folderPath);
if (!folderStats.isDirectory()) {
throw new Error(`${folderPath} is not a directory.`);
}
return await internalUploadFolder(accessKey, accessSecretID, bucketName, s3subfolder, folderPath);

@@ -71,0 +55,0 @@ }

#!/usr/bin/env node
export declare function getRepoAndOrg(githubLink: string): Promise<string[]>;
export declare function runDeployCommand(folder: string, pkgname: string): Promise<void>;
export {};
//# sourceMappingURL=cli.d.ts.map

@@ -7,11 +7,8 @@ #!/usr/bin/env node

Object.defineProperty(exports, "__esModule", { value: true });
const path_1 = __importDefault(require("path"));
const commander_1 = __importDefault(require("commander"));
const path_1 = __importDefault(require("path"));
const publish_lerna_1 = require("./publish_lerna");
const aws_1 = require("./aws");
const github_1 = require("./github");
const getPackages = require('get-monorepo-packages');
const publish_1 = require("./publish");
const deploy_1 = require("./deploy");
const { version, description } = require('../package.json');
const chalk = require('chalk');
const childProcess = require('child_process');
const { AWS_ACCESS_KEY_ID, AWS_SECRET_ID, AWS_BUCKET_NAME, GITHUB_TOKEN, TRAVIS_PULL_REQUEST, TRAVIS_REPO_SLUG, NPM_TOKEN } = process.env;
process.on('unhandledRejection', printErrorAndExit);

@@ -22,4 +19,15 @@ commander_1.default

// function to execute when command is uses
.action((folder) => {
runPublishCommand(folder);
.action(async (folder) => {
if (!NPM_TOKEN) {
console.log('process.env.NPM_TOKEN is empty or not defined. Not publishing.');
return;
}
try {
const directoryPath = path_1.default.resolve(folder);
console.log('lerna-publisher starting in ' + directoryPath);
await publish_1.publish(directoryPath);
}
catch (e) {
printErrorAndExit(e);
}
});

@@ -31,5 +39,34 @@ commander_1.default

// .option('--aws-bucket-name <string>', 'aws bucket name to publish to.')
.action((pkgName, folder) => {
console.log(pkgName, folder);
runDeployCommand(folder, pkgName);
.action(async (pkgName, folder) => {
if (TRAVIS_PULL_REQUEST === 'false' || TRAVIS_PULL_REQUEST === undefined) {
console.log('Not a PR. Not deploying.');
return;
}
try {
if (!AWS_ACCESS_KEY_ID) {
throw new Error('process.env.AWS_ACCESS_KEY_ID is empty or not defined. Not deploying.');
}
else if (!AWS_SECRET_ID) {
throw new Error('process.env.AWS_SECRET_ID is empty or not defined. Not deploying.');
}
else if (!AWS_BUCKET_NAME) {
throw new Error('process.env.AWS_BUCKET_NAME is empty or not defined. Not deploying.');
}
else if (!GITHUB_TOKEN) {
throw new Error('process.env.GITHUB_TOKEN is empty or not defined. Not deploying.');
}
else if (!TRAVIS_PULL_REQUEST) {
throw new Error('process.env.TRAVIS_PULL_REQUEST is empty or not defined. Not deploying.');
}
else if (!TRAVIS_REPO_SLUG) {
throw new Error('process.env.TRAVIS_REPO_SLUG is empty or not defined. Not deploying.');
}
const prNum = parseInt(TRAVIS_PULL_REQUEST, 10) || 0;
const directoryPath = path_1.default.resolve(folder);
console.log(`Deploying demo for ${pkgName} at ${directoryPath}`);
await deploy_1.deploy(directoryPath, pkgName, prNum);
}
catch (e) {
printErrorAndExit(e);
}
});

@@ -40,82 +77,3 @@ commander_1.default

.usage('[options]')
.option('--no-colors', 'turn off colors (default: env detected)')
.parse(process.argv);
async function getWorkingFolder(pathToFolder) {
let pathToProject = process.cwd();
if (pathToFolder !== '' && pathToFolder !== undefined) {
pathToProject = path_1.default.resolve(pathToFolder);
}
return pathToProject;
}
async function getRepoAndOrg(githubLink) {
// git@github.com:wixplosives/lerna-publisher.git
let parts = githubLink.split(':');
parts = parts[1].split('.');
parts = parts[0].split('/');
return parts;
}
exports.getRepoAndOrg = getRepoAndOrg;
async function runDeployCommand(folder, pkgname) {
console.log('Deploy ', pkgname, 'from', folder);
let prNum = 0;
const varValue = process.env.TRAVIS_PULL_REQUEST;
let result = true;
if (varValue === 'false' || varValue === undefined) {
console.log('Not a pull request.Nothing to deploy.');
process.exit(0);
}
else {
prNum = parseInt(varValue, 10);
}
const pathToProject = await getWorkingFolder(folder);
const packages = getPackages(pathToProject);
const pkgToDeploy = packages.find((element) => {
return element.package.name === pkgname;
});
const bucketName = process.env.AWS_BUCKET_NAME || '';
const bucketLink = `http://${bucketName}.s3-website-us-east-1.amazonaws.com/`;
const branchName = process.env.TRAVIS_PULL_REQUEST_BRANCH || '';
const githubToken = process.env.GITHUB_TOKEN || '';
const githubSlug = process.env.TRAVIS_REPO_SLUG || '';
const slugParts = githubSlug.split('/');
const repo = slugParts[1];
const org = slugParts[0];
const relativePathInBucket = path_1.default.join(pkgToDeploy.package.name, branchName);
console.log('Deploy package from folder: ', pkgToDeploy.location, 'to', bucketName, relativePathInBucket);
// pack it before deploying demo server
const cmdPackText = 'yarn pack --non-interactive';
try {
childProcess.execSync(cmdPackText, { cwd: pkgToDeploy.location, stdio: 'inherit' });
const pathToPublish = path_1.default.join(pkgToDeploy.location, 'dist');
result = await aws_1.uploadFolder(pathToPublish, pkgToDeploy.package.name, branchName);
console.debug('Upload folder to s3 result: ', result ? chalk.green('SUCCESS') : chalk.red('FAILED'));
if (result) {
const cureentToime = new Date();
const textToPublish = 'Demo server. Deployed at ' + cureentToime.toString();
const linkToPublish = bucketLink + relativePathInBucket;
console.debug('Link to publish', linkToPublish);
result = await github_1.postLinkToPRTest(textToPublish, linkToPublish, githubToken, org, repo, prNum);
console.debug('Post link to PR result: ', result ? chalk.green('SUCCESS') : chalk.red('FAILED'));
}
}
catch (error) {
console.error(chalk.red('\tD failed'), error);
}
console.log('Exiting', result ? 0 : 1);
process.exit(result ? 0 : 1);
}
exports.runDeployCommand = runDeployCommand;
async function runPublishCommand(folder) {
const pathToProject = await getWorkingFolder(folder);
console.log('lerna-publisher starting in ' + pathToProject);
const result = await publish_lerna_1.CheckAndPublishMonorepo(pathToProject).catch(printErrorAndExit);
if (result) {
console.log('Success');
}
else {
console.log('Failed');
}
console.log('Exiting', result ? 0 : 1);
process.exit(result ? 0 : 1);
}
function printErrorAndExit(message) {

@@ -122,0 +80,0 @@ console.error(message);

{
"name": "lerna-publisher",
"description": "Utility to publish lerna/yarn/workspace types of packages from ci to npm",
"version": "1.0.20",
"version": "1.0.21",
"main": "cjs/index.js",

@@ -13,32 +13,33 @@ "bin": {

"build": "ts-build ./src --cjs",
"test": "mocha -r @ts-tools/node \"./test/**/*.spec.ts\" --watch-extensions ts --timeout 50000",
"test": "mocha -r @ts-tools/node/r \"./test/**/*.spec.ts\" --watch-extensions ts --timeout 50000",
"prepack": "yarn build"
},
"dependencies": {
"aws-sdk": "^2.384.0",
"chalk": "^2.4.1",
"child_process": "^1.0.2",
"commander": "^2.19.0",
"get-monorepo-packages": "^1.1.0",
"github-api": "^3.0.0",
"mime": "^2.4.0",
"pacote": "^9.2.3"
"aws-sdk": "^2.501.0",
"chalk": "^2.4.2",
"commander": "^2.20.0",
"get-monorepo-packages": "^1.2.0",
"github-api": "^3.2.2",
"glob": "^7.1.4",
"mime": "^2.4.4",
"pacote": "^9.5.4"
},
"devDependencies": {
"@ts-tools/build": "^0.1.2",
"@ts-tools/node": "^0.7.4",
"@ts-tools/build": "^0.1.14",
"@ts-tools/node": "^0.9.8",
"@types/chai": "^4.1.7",
"@types/commander": "^2.12.2",
"@types/mime": "^2.0.0",
"@types/mocha": "^5.2.5",
"@types/node": "8",
"@types/semver": "^5.5.0",
"@types/sinon": "^7.0.0",
"@types/chai-as-promised": "^7.1.0",
"@types/glob": "^7.1.1",
"@types/mime": "^2.0.1",
"@types/mocha": "^5.2.7",
"@types/node": "10",
"@types/semver": "^6.0.1",
"@types/sinon": "^7.0.13",
"chai": "^4.2.0",
"mocha": "^5.2.0",
"rimraf": "^2.6.2",
"sinon": "^7.2.2",
"ts-sinon": "^1.0.12",
"tslint": "^5.11.0",
"typescript": "^3.2.2"
"chai-as-promised": "^7.1.1",
"mocha": "^6.2.0",
"rimraf": "^2.6.3",
"sinon": "^7.3.2",
"tslint": "^5.18.0",
"typescript": "^3.5.3"
},

@@ -45,0 +46,0 @@ "files": [

@@ -11,2 +11,11 @@ [![Build Status](https://travis-ci.com/wixplosives/lerna-publisher.svg?branch=master)](https://travis-ci.com/wixplosives/lerna-publisher)[![npm version](https://badge.fury.io/js/lerna-publisher.svg)](https://badge.fury.io/js/lerna-publisher)

# Prepack
lerna-publisher will assume you already have built version of your package in the folder where lerna publisher runs.
To make sure you have built your package, add following to your pacakge.json scripts key.
```"prepack": "yarn build"```
You need to have line like this in every pacakge.json of every package you want to publish to npm.
## Usage

@@ -13,0 +22,0 @@

@@ -1,71 +0,59 @@

import AWS from 'aws-sdk'
import fs from 'fs'
import path from 'path'
import mime from 'mime'
import path from 'path';
import fs from 'fs';
import glob from 'glob';
import AWS from 'aws-sdk';
import mime from 'mime';
export async function walkSync(pathToFolder: string): Promise<any[]> {
const files = fs.readdirSync(pathToFolder)
const output = []
for (const file of files) {
const pathToFile = path.join(pathToFolder, file)
const isDirectory = fs.statSync(pathToFile).isDirectory()
if (isDirectory) {
output.push(...await walkSync(pathToFile))
} else {
output.push(await pathToFile)
}
}
return output
}
async function internalUploadFolder(accessKeyIdPar: string,
secretAccessKeyPar: string,
s3BucketName: string,
s3subFolder: string,
localFolder: string) {
try {
AWS.config.setPromisesDependency(Promise)
async function internalUploadFolder(
accessKeyIdPar: string,
secretAccessKeyPar: string,
s3BucketName: string,
s3subFolder: string,
localFolder: string
) {
AWS.config.setPromisesDependency(Promise);
const s3 = new AWS.S3({
signatureVersion: 'v4',
accessKeyId: accessKeyIdPar,
secretAccessKey: secretAccessKeyPar,
region: 'us-east-1',
})
signatureVersion: 'v4',
accessKeyId: accessKeyIdPar,
secretAccessKey: secretAccessKeyPar,
region: 'us-east-1'
});
const filesPaths = await walkSync(localFolder)
for (let i = 0; i < filesPaths.length; i++) {
const statistics = `(${i + 1}/${filesPaths.length}, ${Math.round((i + 1) / filesPaths.length * 100)}%)`
const filePath = filesPaths[i]
const fileContent = fs.readFileSync(filePath)
// If the slash is like this "/" s3 will create a new folder, otherwise will not work properly.
const relativeToBaseFilePath = path.normalize(path.relative(localFolder, filePath))
let relativeToBaseFilePathForS3 = relativeToBaseFilePath.split(path.sep).join('/')
relativeToBaseFilePathForS3 = path.join(s3subFolder, relativeToBaseFilePathForS3)
const mimeType = mime.getType(filePath)
console.log(`Uploading`, statistics, relativeToBaseFilePathForS3)
// https://docs.aws.amazon.com/AWSJavaScriptSDK/latest/AWS/S3.html#putObject-property
await s3.putObject({
ACL: `public-read`,
Bucket: s3BucketName,
Key: relativeToBaseFilePathForS3,
Body: fileContent,
ContentType: mimeType,
} as AWS.S3.PutObjectRequest).promise()
const filesPaths = glob.sync(path.join(localFolder, '**/*.*'), { absolute: true }).map(p => path.normalize(p));
console.log(`Uploaded `, statistics, relativeToBaseFilePathForS3)
const numFiles = filesPaths.length;
for (const [i, filePath] of filesPaths.entries()) {
const statistics = `(${i + 1}/${numFiles}, ${Math.round(((i + 1) / numFiles) * 100)}%)`;
const fileContent = fs.readFileSync(filePath);
// If the slash is like this "/" s3 will create a new folder, otherwise will not work properly.
const relativeToBaseFilePath = path.normalize(path.relative(localFolder, filePath));
let relativeToBaseFilePathForS3 = relativeToBaseFilePath.split(path.sep).join('/');
relativeToBaseFilePathForS3 = path.join(s3subFolder, relativeToBaseFilePathForS3);
const mimeType = mime.getType(filePath);
console.log(`Uploading`, statistics, relativeToBaseFilePathForS3);
// https://docs.aws.amazon.com/AWSJavaScriptSDK/latest/AWS/S3.html#putObject-property
await s3
.putObject({
ACL: `public-read`,
Bucket: s3BucketName,
Key: relativeToBaseFilePathForS3,
Body: fileContent,
ContentType: mimeType
} as AWS.S3.PutObjectRequest)
.promise();
console.log(`Uploaded `, statistics, relativeToBaseFilePathForS3);
}
return true
} catch (error) {
console.error(error)
return false
}
}
export async function uploadFolder(folderPath: string, pkgName: string, branchName: string) {
const accessKey = process.env.AWS_ACCESS_KEY_ID || 'not defined'
const accessSecretID = process.env.AWS_SECRET_ID || 'not defined'
const bucketName = process.env.AWS_BUCKET_NAME || 'not defined'
const s3subfolder = path.join( pkgName, branchName)
return await internalUploadFolder( accessKey, accessSecretID, bucketName, s3subfolder, folderPath)
const accessKey = process.env.AWS_ACCESS_KEY_ID || 'not defined';
const accessSecretID = process.env.AWS_SECRET_ID || 'not defined';
const bucketName = process.env.AWS_BUCKET_NAME || 'not defined';
const s3subfolder = path.join(pkgName, branchName);
const folderStats = fs.statSync(folderPath);
if (!folderStats.isDirectory()) {
throw new Error(`${folderPath} is not a directory.`);
}
return await internalUploadFolder(accessKey, accessSecretID, bucketName, s3subfolder, folderPath);
}
#!/usr/bin/env node
import program from 'commander'
import path from 'path'
import { CheckAndPublishMonorepo } from './publish_lerna'
import { uploadFolder } from './aws'
import { postLinkToPRTest } from './github'
const getPackages = require( 'get-monorepo-packages' )
const { version, description } = require('../package.json')
import path from 'path';
import program from 'commander';
import { publish } from './publish';
import { deploy } from './deploy';
const { version, description } = require('../package.json');
const chalk = require('chalk')
const childProcess = require( 'child_process')
const {
AWS_ACCESS_KEY_ID,
AWS_SECRET_ID,
AWS_BUCKET_NAME,
GITHUB_TOKEN,
TRAVIS_PULL_REQUEST,
TRAVIS_REPO_SLUG,
NPM_TOKEN
} = process.env;
process.on('unhandledRejection', printErrorAndExit);
process.on('unhandledRejection', printErrorAndExit)
program

@@ -19,5 +23,15 @@ .command('publish [folder]') // sub-command name

// function to execute when command is uses
.action( (folder: string) => {
runPublishCommand(folder)
})
.action(async (folder: string) => {
if (!NPM_TOKEN) {
console.log('process.env.NPM_TOKEN is empty or not defined. Not publishing.');
return;
}
try {
const directoryPath = path.resolve(folder);
console.log('lerna-publisher starting in ' + directoryPath);
await publish(directoryPath);
} catch (e) {
printErrorAndExit(e);
}
});

@@ -29,7 +43,31 @@ program

// .option('--aws-bucket-name <string>', 'aws bucket name to publish to.')
.action( ( pkgName: string, folder: string ) => {
console.log( pkgName, folder )
runDeployCommand(folder, pkgName )
})
.action(async (pkgName: string, folder: string) => {
if (TRAVIS_PULL_REQUEST === 'false' || TRAVIS_PULL_REQUEST === undefined) {
console.log('Not a PR. Not deploying.');
return;
}
try {
if (!AWS_ACCESS_KEY_ID) {
throw new Error('process.env.AWS_ACCESS_KEY_ID is empty or not defined. Not deploying.');
} else if (!AWS_SECRET_ID) {
throw new Error('process.env.AWS_SECRET_ID is empty or not defined. Not deploying.');
} else if (!AWS_BUCKET_NAME) {
throw new Error('process.env.AWS_BUCKET_NAME is empty or not defined. Not deploying.');
} else if (!GITHUB_TOKEN) {
throw new Error('process.env.GITHUB_TOKEN is empty or not defined. Not deploying.');
} else if (!TRAVIS_PULL_REQUEST) {
throw new Error('process.env.TRAVIS_PULL_REQUEST is empty or not defined. Not deploying.');
} else if (!TRAVIS_REPO_SLUG) {
throw new Error('process.env.TRAVIS_REPO_SLUG is empty or not defined. Not deploying.');
}
const prNum = parseInt(TRAVIS_PULL_REQUEST, 10) || 0;
const directoryPath = path.resolve(folder);
console.log(`Deploying demo for ${pkgName} at ${directoryPath}`);
await deploy(directoryPath, pkgName, prNum);
} catch (e) {
printErrorAndExit(e);
}
});
program

@@ -39,91 +77,7 @@ .version(version, '-v, --version')

.usage('[options]')
.option('--no-colors', 'turn off colors (default: env detected)')
.parse(process.argv)
.parse(process.argv);
async function getWorkingFolder( pathToFolder: string) {
let pathToProject = process.cwd()
if ( pathToFolder !== '' && pathToFolder !== undefined) {
pathToProject = path.resolve(pathToFolder)
}
return pathToProject
}
export async function getRepoAndOrg( githubLink: string ) {
// git@github.com:wixplosives/lerna-publisher.git
let parts = githubLink.split(':')
parts = parts[1].split('.')
parts = parts[0].split('/')
return parts
}
export async function runDeployCommand(folder: string, pkgname: string ) {
console.log('Deploy ' , pkgname , 'from' , folder)
let prNum = 0
const varValue = process.env.TRAVIS_PULL_REQUEST
let result = true
if ( varValue === 'false' || varValue === undefined ) {
console.log('Not a pull request.Nothing to deploy.')
process.exit( 0 )
} else {
prNum = parseInt(varValue, 10)
}
const pathToProject = await getWorkingFolder(folder)
const packages = getPackages(pathToProject)
const pkgToDeploy = packages.find((element: { package: { name: string, version: string }, location: string}) => {
return element.package.name === pkgname
})
const bucketName = process.env.AWS_BUCKET_NAME || ''
const bucketLink = `http://${bucketName}.s3-website-us-east-1.amazonaws.com/`
const branchName = process.env.TRAVIS_PULL_REQUEST_BRANCH || ''
const githubToken = process.env.GITHUB_TOKEN || ''
const githubSlug = process.env.TRAVIS_REPO_SLUG || ''
const slugParts = githubSlug.split('/')
const repo = slugParts[1]
const org = slugParts[0]
const relativePathInBucket = path.join(pkgToDeploy.package.name, branchName )
console.log('Deploy package from folder: ', pkgToDeploy.location, 'to', bucketName, relativePathInBucket )
// pack it before deploying demo server
const cmdPackText = 'yarn pack --non-interactive'
try {
childProcess.execSync(cmdPackText , {cwd: pkgToDeploy.location, stdio: 'inherit'})
const pathToPublish = path.join(pkgToDeploy.location, 'dist')
result = await uploadFolder(pathToPublish, pkgToDeploy.package.name, branchName)
console.debug('Upload folder to s3 result: ', result ? chalk.green('SUCCESS') : chalk.red('FAILED'))
if ( result ) {
const cureentToime = new Date()
const textToPublish = 'Demo server. Deployed at ' + cureentToime.toString()
const linkToPublish = bucketLink + relativePathInBucket
console.debug('Link to publish', linkToPublish)
result = await postLinkToPRTest(textToPublish, linkToPublish,
githubToken, org, repo,
prNum)
console.debug('Post link to PR result: ', result ? chalk.green('SUCCESS') : chalk.red('FAILED'))
}
} catch (error) {
console.error(chalk.red('\tD failed'), error)
}
console.log('Exiting', result ? 0 : 1)
process.exit( result ? 0 : 1 )
}
async function runPublishCommand(folder: string) {
const pathToProject = await getWorkingFolder(folder)
console.log('lerna-publisher starting in ' + pathToProject)
const result = await CheckAndPublishMonorepo(pathToProject).catch(printErrorAndExit)
if ( result) {
console.log('Success')
} else {
console.log('Failed')
}
console.log('Exiting', result ? 0 : 1)
process.exit( result ? 0 : 1 )
}
function printErrorAndExit(message: unknown) {
console.error(message)
process.exit(1)
console.error(message);
process.exit(1);
}

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc