Socket
Socket
Sign inDemoInstall

@covector/assemble

Package Overview
Dependencies
Maintainers
1
Versions
28
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@covector/assemble - npm Package Compare versions

Comparing version 0.8.2 to 0.9.0

test/__snapshots__/assemble.test.ts.snap

14

CHANGELOG.md
# Changelog
## \[0.9.0]
- Package file objects now include the dependencies keyed by the name with an array including the dependency type. This information is now passed to the commands pipeline.
- [ba6e7f1](https://www.github.com/jbolda/covector/commit/ba6e7f1c9ead622844ff1c040fffb67b925f0bcf) skip bump for range ([#257](https://www.github.com/jbolda/covector/pull/257)) on 2023-01-12
- Update multiple devDeps, semver, yargs, inquirer, and packages in our action. This is primarily internal upgrades and don't affect external APIs.
- [18ff898](https://www.github.com/jbolda/covector/commit/18ff898a64a0f3677c55d994d22177189700204a) dep update ([#240](https://www.github.com/jbolda/covector/pull/240)) on 2022-04-16
- When collecting `git log` metadata for change files, running it in parallel caused occasional no-op which increasingly became more flaky with more files. Adjust this to run it serially which should be a neglible difference.
- [bf94c90](https://www.github.com/jbolda/covector/commit/bf94c905e05ea8402c596564eea1fa8bcb8d975b) undefined commits in changelog, `git log` needs to be run serially ([#261](https://www.github.com/jbolda/covector/pull/261)) on 2023-01-16
- Remove the `to-vfile` package as a dependency. This allows us to focus our file reference to our specific needs, and one less dependency to maintain. With this change, we also converted a handful of promises into generators for better compatibility and control with effection.
- [1b33933](https://www.github.com/jbolda/covector/commit/1b33933be25094900f647527a82ddba0a08778fe) Remove vfile ([#234](https://www.github.com/jbolda/covector/pull/234)) on 2022-04-10
- Upgrade to `effection` v2. This is primarily an internal improvement, but will enable future features such as fetching from an endpoint to check if a version of a package was published. It also brings an updated dependency to gracefully shutdown windows processes.
- [a0acf81](https://www.github.com/jbolda/covector/commit/a0acf81b2235ac142233d9c0e416d5e07af3cbb3) Effection v2 ([#227](https://www.github.com/jbolda/covector/pull/227)) on 2022-03-19
- [a346221](https://www.github.com/jbolda/covector/commit/a346221102075e647693851fd1019d66641f8014) bump effection to latest on v2 ([#246](https://www.github.com/jbolda/covector/pull/246)) on 2022-10-26
## \[0.8.2]

@@ -4,0 +18,0 @@

13

dist/index.d.ts

@@ -1,6 +0,11 @@

import type { VFile, ConfigFile, CommonBumps, Change, PkgVersion, PkgPublish } from "@covector/types";
import { Operation } from "effection";
import type { File, ConfigFile, Changeset, CommonBumps, Change, PkgVersion, PkgPublish } from "@covector/types";
export declare const parseChange: ({ cwd, file, }: {
cwd?: string | undefined;
file: File;
}) => Operation<Changeset>;
export declare const compareBumps: (bumpOne: CommonBumps, bumpTwo: CommonBumps) => CommonBumps;
export declare const assemble: ({ cwd, vfiles, config, preMode, }: {
export declare const assemble: ({ cwd, files, config, preMode, }: {
cwd?: string | undefined;
vfiles: VFile[];
files: File[];
config?: ConfigFile | undefined;

@@ -11,3 +16,3 @@ preMode?: {

} | undefined;
}) => Generator<Generator<any, Change[], any>, {
}) => Generator<Operation<Change[]>, {
changes?: Change[] | undefined;

@@ -14,0 +19,0 @@ releases?: {

@@ -6,3 +6,3 @@ "use strict";

Object.defineProperty(exports, "__esModule", { value: true });
exports.mergeIntoConfig = exports.mergeChangesToConfig = exports.assemble = exports.compareBumps = void 0;
exports.mergeIntoConfig = exports.mergeChangesToConfig = exports.assemble = exports.compareBumps = exports.parseChange = void 0;
const unified_1 = __importDefault(require("unified"));

@@ -16,4 +16,4 @@ const remark_parse_1 = __importDefault(require("remark-parse"));

const command_1 = require("@covector/command");
const parseChange = function* ({ cwd, vfile, }) {
const processor = unified_1.default()
const parseChange = function* ({ cwd, file, }) {
const processor = (0, unified_1.default)()
.use(remark_parse_1.default)

@@ -24,6 +24,7 @@ .use(remark_frontmatter_1.default, ["yaml"])

});
const parsed = processor.parse(vfile.contents.trim());
const parsed = processor.parse(file.content.trim());
const processed = yield processor.run(parsed);
let changeset = {};
const [parsedChanges, ...remaining] = processed.children;
//@ts-ignore
const parsedYaml = js_yaml_1.default.load(parsedChanges.value);

@@ -33,6 +34,7 @@ changeset.releases =

if (Object.keys(changeset.releases).length === 0)
throw new Error(`${vfile.data.filename} didn't have any packages bumped. Please add a package bump.`);
throw new Error(`${file.path} didn't have any packages bumped. Please add a package bump.`);
changeset.summary = processor
.stringify({
type: "root",
//@ts-ignore
children: remaining,

@@ -43,6 +45,6 @@ })

try {
let gitInfo = yield command_1.runCommand({
const gitInfo = yield (0, command_1.runCommand)({
cwd,
pkgPath: "",
command: `git log --reverse --format="%h %H %as %s" ${vfile.data.filename}`,
pkgPath: ".",
command: `git --no-pager log --reverse --format="%h %H %as %s" ${file.path}`,
log: false,

@@ -59,6 +61,6 @@ });

});
changeset.meta = Object.assign(Object.assign({}, vfile.data), { commits });
changeset.meta = Object.assign(Object.assign({}, file), { commits });
}
catch (e) {
changeset.meta = Object.assign({}, vfile.data);
changeset.meta = Object.assign({}, file);
}

@@ -68,2 +70,3 @@ }

};
exports.parseChange = parseChange;
// major, minor, or patch

@@ -86,3 +89,3 @@ // enum and use Int to compare

const bumpOptions = ["major", "minor", "patch", "noop"].concat(additionalBumpTypes);
assertBumpType(pkg, change.releases[pkg], bumpOptions, !change.meta ? `` : ` in ${change.meta.filename}`);
assertBumpType(pkg, change.releases[pkg], bumpOptions, !change.meta ? `` : ` in ${change.meta.path}`);
const bumpType = additionalBumpTypes.includes(change.releases[pkg])

@@ -94,3 +97,3 @@ ? "noop"

type: bumpType,
changes: lodash_1.cloneDeep([change]),
changes: (0, lodash_1.cloneDeep)([change]),
};

@@ -100,4 +103,4 @@ }

release[pkg] = {
type: exports.compareBumps(release[pkg].type, bumpType),
changes: lodash_1.cloneDeep([...release[pkg].changes, change]),
type: (0, exports.compareBumps)(release[pkg].type, bumpType),
changes: (0, lodash_1.cloneDeep)([...release[pkg].changes, change]),
};

@@ -117,3 +120,3 @@ }

}
const assemble = function* ({ cwd, vfiles, config, preMode = { on: false, prevFiles: [] }, }) {
const assemble = function* ({ cwd, files, config, preMode = { on: false, prevFiles: [] }, }) {
let plan = {};

@@ -123,12 +126,12 @@ // if in prerelease mode, we only make bumps if the new one is "larger" than the last

if (preMode.on) {
const allChanges = yield changesParsed({ cwd, vfiles });
const allChanges = yield changesParsed({ cwd, files });
const allMergedRelease = mergeReleases(allChanges, config || {});
if (preMode.prevFiles.length > 0) {
const newVfiles = vfiles.reduce((newVFiles, vfile) => {
const prevFile = preMode.prevFiles.find((filename) => vfile.data.filename === filename);
const newFiles = files.reduce((newFiles, file) => {
const prevFile = preMode.prevFiles.find((filename) => file.path === filename);
if (!prevFile) {
return newVFiles.concat([vfile]);
return newFiles.concat([file]);
}
else {
return newVFiles;
return newFiles;
}

@@ -138,12 +141,12 @@ }, []);

cwd,
vfiles: newVfiles,
files: newFiles,
});
const newMergedRelease = mergeReleases(newChanges, config || {});
const oldVfiles = vfiles.reduce((newVFiles, vfile) => {
const prevFile = preMode.prevFiles.find((filename) => vfile.data.filename === filename);
const oldFiles = files.reduce((newFiles, file) => {
const prevFile = preMode.prevFiles.find((filename) => file.path === filename);
if (prevFile) {
return newVFiles.concat([vfile]);
return newFiles.concat([file]);
}
else {
return newVFiles;
return newFiles;
}

@@ -153,3 +156,3 @@ }, []);

cwd,
vfiles: oldVfiles,
files: oldFiles,
});

@@ -173,6 +176,7 @@ const oldMergedRelease = mergeReleases(oldChanges, config || {});

else {
let changes = yield changesParsed({ cwd, vfiles });
let changes = yield changesParsed({ cwd, files });
plan.changes = changes;
plan.releases = mergeReleases(changes, config || {});
}
// check that plan only includes pkgs that exist
if (config && Object.keys(config).length > 0) {

@@ -182,3 +186,3 @@ for (let pkg of Object.keys(plan.releases)) {

let changesContainingError = plan.releases[pkg].changes.reduce((files, file) => {
files = `${files}${files === "" ? "" : ", "}${file.meta && file.meta.filename ? file.meta.filename : ""}`;
files = `${files}${files === "" ? "" : ", "}${file.meta && file.meta.path ? file.meta.path : ""}`;
return files;

@@ -193,9 +197,9 @@ }, "");

exports.assemble = assemble;
const changesParsed = function* ({ cwd, vfiles, }) {
const allVfiles = vfiles.map((vfile) => parseChange({ cwd, vfile }));
let yieldedV = [];
for (let v of allVfiles) {
yieldedV = [...yieldedV, yield v];
const changesParsed = function* ({ cwd, files, }) {
const allChangesParsed = [];
for (let file of files) {
const parsed = yield (0, exports.parseChange)({ cwd, file });
allChangesParsed.push(parsed);
}
return yieldedV;
return allChangesParsed;
};

@@ -264,4 +268,4 @@ const changeDiff = ({ allMergedRelease, newMergedRelease, oldMergedRelease, }) => {

if (dryRun) {
console.log("==== data piped into commands ===");
Object.keys(pipeOutput).forEach((pkg) => console.log(pkg, "pipe", pipeOutput[pkg].pipe));
console.dir("==== data piped into commands ===");
Object.keys(pipeOutput).forEach((pkg) => console.dir({ pkg, pipe: pipeOutput[pkg].pipe }, { depth: 5 }));
}

@@ -310,3 +314,3 @@ return { commands, pipeTemplate: pipeOutput };

let extraPublishParams = {
pkgFile: yield files_1.readPkgFile({
pkgFile: yield (0, files_1.readPkgFile)({
cwd,

@@ -323,2 +327,3 @@ pkgConfig: pkgCommands[pkg],

versionPatch: extraPublishParams.pkgFile.versionPatch,
deps: extraPublishParams.pkgFile.deps,
pkg: extraPublishParams.pkgFile.pkg,

@@ -331,3 +336,3 @@ };

: {
[`getPublishedVersion${subPublishCommand}`]: lodash_1.template(
[`getPublishedVersion${subPublishCommand}`]: (0, lodash_1.template)(
//@ts-ignore no index type string

@@ -357,4 +362,4 @@ pkgCommands[pkg][`getPublishedVersion${subPublishCommand}`])(pipeToTemplate),

if (dryRun) {
console.log("==== data piped into commands ===");
Object.keys(pipeOutput).forEach((pkg) => console.log(pkg, "pipe", pipeOutput[pkg].pipe));
console.dir("==== data piped into commands ===");
Object.keys(pipeOutput).forEach((pkg) => console.dir({ pkg, pipe: pipeOutput[pkg].pipe }, { depth: 5 }));
}

@@ -392,3 +397,3 @@ return { commands, pipeTemplate: pipeOutput };

typeof c[complex] === "string"
? lodash_1.template(c[complex])(pipe)
? (0, lodash_1.template)(c[complex])(pipe)
: c[complex];

@@ -400,5 +405,5 @@ return templated;

// if it is a function, we pipe when we run the function
return typeof c === "function" ? c : lodash_1.template(c)(pipe);
return typeof c === "function" ? c : (0, lodash_1.template)(c)(pipe);
}
});
};
{
"name": "@covector/assemble",
"version": "0.8.2",
"version": "0.9.0",
"license": "Apache-2.0",

@@ -15,9 +15,10 @@ "homepage": "https://github.com/jbolda/covector#readme",

"build": "tsc -b",
"clean": "rimraf dist node_modules",
"clean": "rimraf dist tsconfig.tsbuildinfo node_modules",
"prepublishOnly": "tsc -b"
},
"dependencies": {
"@covector/command": "0.5.1",
"@covector/files": "0.5.1",
"js-yaml": "^4.0.0",
"@covector/command": "0.6.0",
"@covector/files": "0.6.0",
"effection": "^2.0.6",
"js-yaml": "^4.1.0",
"lodash": "^4.17.21",

@@ -32,6 +33,6 @@ "remark-frontmatter": "^3.0.0",

"@types/js-yaml": "^4.0.0",
"@types/lodash": "^4.14.168",
"@types/lodash": "^4.14.181",
"fixturez": "^1.1.0",
"tslib": "^2.2.0",
"typescript": "^4.2.3"
"tslib": "^2.4.0",
"typescript": "^4.6.3"
},

@@ -38,0 +39,0 @@ "volta": {

@@ -0,1 +1,2 @@

import { Operation } from "effection";
import unified from "unified";

@@ -12,3 +13,3 @@ import { Root, YAML as Frontmatter, Content } from "mdast";

import type {
VFile,
File,
ConfigFile,

@@ -25,9 +26,9 @@ Changeset,

const parseChange = function* ({
export const parseChange = function* ({
cwd,
vfile,
file,
}: {
cwd?: string;
vfile: VFile;
}): Generator<any, Changeset, any> {
file: File;
}): Operation<Changeset> {
const processor = unified()

@@ -40,9 +41,8 @@ .use(parse)

const parsed = processor.parse(vfile.contents.trim());
const parsed = processor.parse(file.content.trim());
const processed: Root = yield processor.run(parsed);
let changeset: Changeset = {};
const [parsedChanges, ...remaining]: (
| Frontmatter
| Content
)[] = processed.children;
const [parsedChanges, ...remaining]: (Frontmatter | Content)[] =
processed.children;
//@ts-ignore
const parsedYaml = yaml.load(parsedChanges.value as string);

@@ -53,3 +53,3 @@ changeset.releases =

throw new Error(
`${vfile.data.filename} didn't have any packages bumped. Please add a package bump.`
`${file.path} didn't have any packages bumped. Please add a package bump.`
);

@@ -59,2 +59,3 @@ changeset.summary = processor

type: "root",
//@ts-ignore
children: remaining,

@@ -66,6 +67,6 @@ })

try {
let gitInfo = yield runCommand({
const gitInfo = yield runCommand({
cwd,
pkgPath: "",
command: `git log --reverse --format="%h %H %as %s" ${vfile.data.filename}`,
pkgPath: ".",
command: `git --no-pager log --reverse --format="%h %H %as %s" ${file.path}`,
log: false,

@@ -84,3 +85,3 @@ });

changeset.meta = {
...vfile.data,
...file,
commits,

@@ -90,3 +91,3 @@ };

changeset.meta = {
...vfile.data,
...file,
};

@@ -126,3 +127,3 @@ }

bumpOptions,
!change.meta ? `` : ` in ${change.meta.filename}`
!change.meta ? `` : ` in ${change.meta.path}`
);

@@ -167,3 +168,3 @@

cwd,
vfiles,
files,
config,

@@ -173,3 +174,3 @@ preMode = { on: false, prevFiles: [] },

cwd?: string;
vfiles: VFile[];
files: File[];
config?: ConfigFile;

@@ -191,13 +192,13 @@ preMode?: { on: boolean; prevFiles: string[] };

if (preMode.on) {
const allChanges: Change[] = yield changesParsed({ cwd, vfiles });
const allChanges: Change[] = yield changesParsed({ cwd, files });
const allMergedRelease = mergeReleases(allChanges, config || {});
if (preMode.prevFiles.length > 0) {
const newVfiles = vfiles.reduce((newVFiles: VFile[], vfile) => {
const newFiles = files.reduce((newFiles: File[], file) => {
const prevFile = preMode.prevFiles.find(
(filename) => vfile.data.filename === filename
(filename) => file.path === filename
);
if (!prevFile) {
return newVFiles.concat([vfile]);
return newFiles.concat([file]);
} else {
return newVFiles;
return newFiles;
}

@@ -207,14 +208,14 @@ }, []);

cwd,
vfiles: newVfiles,
files: newFiles,
});
const newMergedRelease = mergeReleases(newChanges, config || {});
const oldVfiles = vfiles.reduce((newVFiles: VFile[], vfile) => {
const oldFiles = files.reduce((newFiles: File[], file) => {
const prevFile = preMode.prevFiles.find(
(filename) => vfile.data.filename === filename
(filename) => file.path === filename
);
if (prevFile) {
return newVFiles.concat([vfile]);
return newFiles.concat([file]);
} else {
return newVFiles;
return newFiles;
}

@@ -224,3 +225,3 @@ }, []);

cwd,
vfiles: oldVfiles,
files: oldFiles,
});

@@ -243,3 +244,3 @@ const oldMergedRelease = mergeReleases(oldChanges, config || {});

} else {
let changes: Change[] = yield changesParsed({ cwd, vfiles });
let changes: Change[] = yield changesParsed({ cwd, files });
plan.changes = changes;

@@ -249,2 +250,3 @@ plan.releases = mergeReleases(changes, config || {});

// check that plan only includes pkgs that exist
if (config && Object.keys(config).length > 0) {

@@ -256,3 +258,3 @@ for (let pkg of Object.keys(plan.releases)) {

files = `${files}${files === "" ? "" : ", "}${
file.meta && file.meta.filename ? file.meta.filename : ""
file.meta && file.meta.path ? file.meta.path : ""
}`;

@@ -275,13 +277,15 @@ return files;

cwd,
vfiles,
files,
}: {
cwd?: string;
vfiles: VFile[];
}): Generator<any, Change[], any> {
const allVfiles = vfiles.map((vfile) => parseChange({ cwd, vfile }));
let yieldedV: Change[] = [];
for (let v of allVfiles) {
yieldedV = [...yieldedV, yield v];
files: File[];
}): Operation<Change[]> {
const allChangesParsed = [];
for (let file of files) {
const parsed = yield parseChange({ cwd, file });
allChangesParsed.push(parsed);
}
return yieldedV;
return allChangesParsed;
};

@@ -425,5 +429,5 @@

if (dryRun) {
console.log("==== data piped into commands ===");
console.dir("==== data piped into commands ===");
Object.keys(pipeOutput).forEach((pkg) =>
console.log(pkg, "pipe", pipeOutput[pkg].pipe)
console.dir({ pkg, pipe: pipeOutput[pkg].pipe }, { depth: 5 })
);

@@ -504,5 +508,6 @@ }

: {
[`getPublishedVersion${publishElements.subPublishCommand}`]: publishElements[
`getPublishedVersion${publishElements.subPublishCommand}`
],
[`getPublishedVersion${publishElements.subPublishCommand}`]:
publishElements[
`getPublishedVersion${publishElements.subPublishCommand}`
],
}),

@@ -555,2 +560,3 @@ ...(!publishElements.assets

versionPatch: extraPublishParams.pkgFile.versionPatch,
deps: extraPublishParams.pkgFile.deps,
pkg: extraPublishParams.pkgFile.pkg,

@@ -624,5 +630,5 @@ };

if (dryRun) {
console.log("==== data piped into commands ===");
console.dir("==== data piped into commands ===");
Object.keys(pipeOutput).forEach((pkg) =>
console.log(pkg, "pipe", pipeOutput[pkg].pipe)
console.dir({ pkg, pipe: pipeOutput[pkg].pipe }, { depth: 5 })
);

@@ -629,0 +635,0 @@ }

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc