🚨 Active Supply Chain Attack:node-ipc Package Compromised.Learn More
Socket
Book a DemoSign in
Socket

@changesets/read

Package Overview
Dependencies
Maintainers
4
Versions
39
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@changesets/read - npm Package Compare versions

Comparing version
0.6.5
to
1.0.0-next.0
+3
dist/changesets-read.d.ts
export * from "./declarations/src/index.js";
export { default } from "./declarations/src/index.js";
//# sourceMappingURL=data:application/json;charset=utf-8;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoiY2hhbmdlc2V0cy1yZWFkLmQudHMiLCJzb3VyY2VSb290IjoiIiwic291cmNlcyI6WyIuL2RlY2xhcmF0aW9ucy9zcmMvaW5kZXguZC50cyJdLCJuYW1lcyI6W10sIm1hcHBpbmdzIjoiQUFBQSJ9
import fs from 'node:fs/promises';
import path from 'path';
import parse from '@changesets/parse';
import * as git from '@changesets/git';
async function filterChangesetsSinceRef(changesets, changesetBase, sinceRef) {
const newChangesets = await git.getChangedChangesetFilesSinceRef({
cwd: changesetBase,
ref: sinceRef
});
const newHashes = newChangesets.map(c => c.split("/").pop());
return changesets.filter(dir => newHashes.includes(dir));
}
async function getChangesets(cwd, sinceRef) {
let changesetBase = path.join(cwd, ".changeset");
let contents;
try {
contents = await fs.readdir(changesetBase);
} catch (err) {
if (err.code === "ENOENT") {
throw new Error("There is no .changeset directory in this project");
}
throw err;
}
if (sinceRef !== undefined) {
contents = await filterChangesetsSinceRef(contents, changesetBase, sinceRef);
}
let changesets = contents.filter(file => !file.startsWith(".") && file.endsWith(".md") && !/^README\.md$/i.test(file));
const changesetContents = changesets.map(async file => {
const changeset = await fs.readFile(path.join(changesetBase, file), "utf8");
return {
...parse(changeset),
id: file.replace(".md", "")
};
});
return await Promise.all(changesetContents);
}
export { getChangesets as default };
+21
-0
# @changesets/read
## 1.0.0-next.0
### Major Changes
- [#1482](https://github.com/changesets/changesets/pull/1482) [`df424a4`](https://github.com/changesets/changesets/commit/df424a4a09eea15b0fa9159ee0b98af0d95f58a7) Thanks [@Andarist](https://github.com/Andarist)! - From now on this package is going to be published as ES module.
### Minor Changes
- [#1494](https://github.com/changesets/changesets/pull/1494) [`6d1f384`](https://github.com/changesets/changesets/commit/6d1f384c8feab091f58443f6f7ee2ada64e0e7cc) Thanks [@bluwy](https://github.com/bluwy)! - Remove support for reading changesets from version 1
- [#1479](https://github.com/changesets/changesets/pull/1479) [`7f34a00`](https://github.com/changesets/changesets/commit/7f34a00aab779a941a406b17f5a85895144fc0a5) Thanks [@bluwy](https://github.com/bluwy)! - Add `"engines"` field for explicit node version support. The supported node versions are `>=18.0.0`.
### Patch Changes
- [#1476](https://github.com/changesets/changesets/pull/1476) [`e0e1748`](https://github.com/changesets/changesets/commit/e0e1748369b1f936c665b62590a76a0d57d1545e) Thanks [@pralkarz](https://github.com/pralkarz)! - Replace `fs-extra` usage with `node:fs`
- Updated dependencies [[`e0e1748`](https://github.com/changesets/changesets/commit/e0e1748369b1f936c665b62590a76a0d57d1545e), [`7f34a00`](https://github.com/changesets/changesets/commit/7f34a00aab779a941a406b17f5a85895144fc0a5), [`3628cab`](https://github.com/changesets/changesets/commit/3628cab6cbfd931b7f2a909b38b66c1aa794d4bf), [`df424a4`](https://github.com/changesets/changesets/commit/df424a4a09eea15b0fa9159ee0b98af0d95f58a7)]:
- @changesets/git@4.0.0-next.0
- @changesets/parse@1.0.0-next.0
- @changesets/types@7.0.0-next.0
## 0.6.5

@@ -4,0 +25,0 @@

+1
-1

@@ -1,2 +0,2 @@

import { NewChangeset } from "@changesets/types";
import type { NewChangeset } from "@changesets/types";
export default function getChangesets(cwd: string, sinceRef?: string): Promise<Array<NewChangeset>>;
{
"name": "@changesets/read",
"version": "0.6.5",
"version": "1.0.0-next.0",
"description": "Read changesets from disc, and return the information as JSON",
"main": "dist/changesets-read.cjs.js",
"module": "dist/changesets-read.esm.js",
"type": "module",
"exports": {
".": {
"types": {
"import": "./dist/changesets-read.cjs.mjs",
"default": "./dist/changesets-read.cjs.js"
},
"module": "./dist/changesets-read.esm.js",
"import": "./dist/changesets-read.cjs.mjs",
"default": "./dist/changesets-read.cjs.js"
},
".": "./dist/changesets-read.js",
"./package.json": "./package.json"

@@ -22,15 +13,15 @@ },

"dependencies": {
"@changesets/git": "^3.0.4",
"@changesets/logger": "^0.1.1",
"@changesets/parse": "^0.4.1",
"@changesets/types": "^6.1.0",
"fs-extra": "^7.0.1",
"p-filter": "^2.1.0",
"@changesets/git": "^4.0.0-next.0",
"@changesets/parse": "^1.0.0-next.0",
"@changesets/types": "^7.0.0-next.0",
"picocolors": "^1.1.0"
},
"devDependencies": {
"@changesets/test-utils": "*",
"@changesets/write": "*",
"outdent": "^0.5.0"
"@changesets/test-utils": "0.0.9-next.0",
"@changesets/write": "1.0.0-next.0",
"outdent": "^0.8.0"
},
"engines": {
"node": ">=18.0.0"
}
}

@@ -1,7 +0,7 @@

import fs from "fs-extra";
import path from "node:path";
import outdent from "outdent";
import { outdent } from "outdent";
import read from "./";
import read from "./index.ts";
import { gitdir, silenceLogsInBlock, testdir } from "@changesets/test-utils";
import fs from "node:fs/promises";
import writeChangeset from "@changesets/write";

@@ -103,3 +103,3 @@ import { add } from "@changesets/git";

const cwd = await testdir({});
await fs.mkdir(path.join(cwd, ".changeset"));
await fs.mkdir(path.join(cwd, ".changeset"), { recursive: true });

@@ -133,3 +133,3 @@ const changesets = await read(cwd);

expect(read(cwd)).rejects.toThrow(
await expect(read(cwd)).rejects.toThrow(
outdent`could not parse changeset - invalid frontmatter: ---

@@ -199,26 +199,2 @@

});
it("should read an old changeset", async () => {
const cwd = await testdir({
".changeset/basic-changeset/changes.json": JSON.stringify({
releases: [
{
name: "cool-package",
type: "minor",
},
],
dependents: [],
}),
".changeset/basic-changeset/changes.md": `Nice simple summary`,
});
const changesets = await read(cwd);
expect(changesets).toEqual([
{
releases: [{ name: "cool-package", type: "minor" }],
summary: "Nice simple summary",
id: "basic-changeset",
},
]);
});
it("should read a nested changeset relative to git root", async () => {

@@ -225,0 +201,0 @@ const cwd = await gitdir({

@@ -1,7 +0,6 @@

import fs from "fs-extra";
import fs from "node:fs/promises";
import path from "path";
import parse from "@changesets/parse";
import { NewChangeset } from "@changesets/types";
import type { NewChangeset } from "@changesets/types";
import * as git from "@changesets/git";
import getOldChangesetsAndWarn from "./legacy";

@@ -45,4 +44,2 @@ async function filterChangesetsSinceRef(

let oldChangesetsPromise = getOldChangesetsAndWarn(changesetBase, contents);
let changesets = contents.filter(

@@ -56,13 +53,7 @@ (file) =>

const changesetContents = changesets.map(async (file) => {
const changeset = await fs.readFile(
path.join(changesetBase, file),
"utf-8"
);
const changeset = await fs.readFile(path.join(changesetBase, file), "utf8");
return { ...parse(changeset), id: file.replace(".md", "") };
});
return [
...(await oldChangesetsPromise),
...(await Promise.all(changesetContents)),
];
return await Promise.all(changesetContents);
}
export * from "./declarations/src/index.js";
export { _default as default } from "./changesets-read.cjs.default.js";
//# sourceMappingURL=data:application/json;charset=utf-8;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoiY2hhbmdlc2V0cy1yZWFkLmNqcy5kLm10cyIsInNvdXJjZVJvb3QiOiIiLCJzb3VyY2VzIjpbIi4vZGVjbGFyYXRpb25zL3NyYy9pbmRleC5kLnRzIl0sIm5hbWVzIjpbXSwibWFwcGluZ3MiOiJBQUFBIn0=
export * from "./declarations/src/index.js";
export { default } from "./declarations/src/index.js";
//# sourceMappingURL=data:application/json;charset=utf-8;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoiY2hhbmdlc2V0cy1yZWFkLmNqcy5kLnRzIiwic291cmNlUm9vdCI6IiIsInNvdXJjZXMiOlsiLi9kZWNsYXJhdGlvbnMvc3JjL2luZGV4LmQudHMiXSwibmFtZXMiOltdLCJtYXBwaW5ncyI6IkFBQUEifQ==
export { default as _default } from "./declarations/src/index.js"
exports._default = require("./changesets-read.cjs.js").default;
'use strict';
Object.defineProperty(exports, '__esModule', { value: true });
var fs = require('fs-extra');
var path = require('path');
var parse = require('@changesets/parse');
var git = require('@changesets/git');
var pc = require('picocolors');
var pFilter = require('p-filter');
var logger = require('@changesets/logger');
function _interopDefault (e) { return e && e.__esModule ? e : { 'default': e }; }
function _interopNamespace(e) {
if (e && e.__esModule) return e;
var n = Object.create(null);
if (e) {
Object.keys(e).forEach(function (k) {
if (k !== 'default') {
var d = Object.getOwnPropertyDescriptor(e, k);
Object.defineProperty(n, k, d.get ? d : {
enumerable: true,
get: function () { return e[k]; }
});
}
});
}
n["default"] = e;
return Object.freeze(n);
}
var fs__namespace = /*#__PURE__*/_interopNamespace(fs);
var path__default = /*#__PURE__*/_interopDefault(path);
var parse__default = /*#__PURE__*/_interopDefault(parse);
var git__namespace = /*#__PURE__*/_interopNamespace(git);
var pc__default = /*#__PURE__*/_interopDefault(pc);
var pFilter__default = /*#__PURE__*/_interopDefault(pFilter);
function _toPrimitive(t, r) {
if ("object" != typeof t || !t) return t;
var e = t[Symbol.toPrimitive];
if (void 0 !== e) {
var i = e.call(t, r || "default");
if ("object" != typeof i) return i;
throw new TypeError("@@toPrimitive must return a primitive value.");
}
return ("string" === r ? String : Number)(t);
}
function _toPropertyKey(t) {
var i = _toPrimitive(t, "string");
return "symbol" == typeof i ? i : i + "";
}
function _defineProperty(e, r, t) {
return (r = _toPropertyKey(r)) in e ? Object.defineProperty(e, r, {
value: t,
enumerable: !0,
configurable: !0,
writable: !0
}) : e[r] = t, e;
}
function ownKeys(e, r) {
var t = Object.keys(e);
if (Object.getOwnPropertySymbols) {
var o = Object.getOwnPropertySymbols(e);
r && (o = o.filter(function (r) {
return Object.getOwnPropertyDescriptor(e, r).enumerable;
})), t.push.apply(t, o);
}
return t;
}
function _objectSpread2(e) {
for (var r = 1; r < arguments.length; r++) {
var t = null != arguments[r] ? arguments[r] : {};
r % 2 ? ownKeys(Object(t), !0).forEach(function (r) {
_defineProperty(e, r, t[r]);
}) : Object.getOwnPropertyDescriptors ? Object.defineProperties(e, Object.getOwnPropertyDescriptors(t)) : ownKeys(Object(t)).forEach(function (r) {
Object.defineProperty(e, r, Object.getOwnPropertyDescriptor(t, r));
});
}
return e;
}
// THIS SHOULD BE REMOVED WHEN SUPPORT FOR CHANGESETS FROM V1 IS DROPPED
let importantSeparator = pc__default["default"].red("===============================IMPORTANT!===============================");
let importantEnd = pc__default["default"].red("----------------------------------------------------------------------");
async function getOldChangesets(changesetBase, dirs) {
// this needs to support just not dealing with dirs that aren't set up properly
let changesets = await pFilter__default["default"](dirs, async dir => (await fs__namespace.lstat(path__default["default"].join(changesetBase, dir))).isDirectory());
const changesetContents = changesets.map(async changesetDir => {
const jsonPath = path__default["default"].join(changesetBase, changesetDir, "changes.json");
const [summary, json] = await Promise.all([fs__namespace.readFile(path__default["default"].join(changesetBase, changesetDir, "changes.md"), "utf-8"), fs__namespace.readJson(jsonPath)]);
return {
releases: json.releases,
summary,
id: changesetDir
};
});
return Promise.all(changesetContents);
}
// this function only exists while we wait for v1 changesets to be obsoleted
// and should be deleted before v3
async function getOldChangesetsAndWarn(changesetBase, dirs) {
let oldChangesets = await getOldChangesets(changesetBase, dirs);
if (oldChangesets.length === 0) {
return [];
}
logger.warn(importantSeparator);
logger.warn("There were old changesets from version 1 found");
logger.warn("These are being applied now but the dependents graph may have changed");
logger.warn("Make sure you validate all your dependencies");
logger.warn("In a future major version, we will no longer apply these old changesets, and will instead throw here");
logger.warn(importantEnd);
return oldChangesets;
}
async function filterChangesetsSinceRef(changesets, changesetBase, sinceRef) {
const newChangesets = await git__namespace.getChangedChangesetFilesSinceRef({
cwd: changesetBase,
ref: sinceRef
});
const newHashes = newChangesets.map(c => c.split("/").pop());
return changesets.filter(dir => newHashes.includes(dir));
}
async function getChangesets(cwd, sinceRef) {
let changesetBase = path__default["default"].join(cwd, ".changeset");
let contents;
try {
contents = await fs__namespace["default"].readdir(changesetBase);
} catch (err) {
if (err.code === "ENOENT") {
throw new Error("There is no .changeset directory in this project");
}
throw err;
}
if (sinceRef !== undefined) {
contents = await filterChangesetsSinceRef(contents, changesetBase, sinceRef);
}
let oldChangesetsPromise = getOldChangesetsAndWarn(changesetBase, contents);
let changesets = contents.filter(file => !file.startsWith(".") && file.endsWith(".md") && !/^README\.md$/i.test(file));
const changesetContents = changesets.map(async file => {
const changeset = await fs__namespace["default"].readFile(path__default["default"].join(changesetBase, file), "utf-8");
return _objectSpread2(_objectSpread2({}, parse__default["default"](changeset)), {}, {
id: file.replace(".md", "")
});
});
return [...(await oldChangesetsPromise), ...(await Promise.all(changesetContents))];
}
exports["default"] = getChangesets;
import "./changesets-read.cjs.js";
export { _default as default } from "./changesets-read.cjs.default.js";
import * as fs from 'fs-extra';
import fs__default from 'fs-extra';
import path from 'path';
import parse from '@changesets/parse';
import * as git from '@changesets/git';
import pc from 'picocolors';
import pFilter from 'p-filter';
import { warn } from '@changesets/logger';
function _toPrimitive(t, r) {
if ("object" != typeof t || !t) return t;
var e = t[Symbol.toPrimitive];
if (void 0 !== e) {
var i = e.call(t, r || "default");
if ("object" != typeof i) return i;
throw new TypeError("@@toPrimitive must return a primitive value.");
}
return ("string" === r ? String : Number)(t);
}
function _toPropertyKey(t) {
var i = _toPrimitive(t, "string");
return "symbol" == typeof i ? i : i + "";
}
function _defineProperty(e, r, t) {
return (r = _toPropertyKey(r)) in e ? Object.defineProperty(e, r, {
value: t,
enumerable: !0,
configurable: !0,
writable: !0
}) : e[r] = t, e;
}
function ownKeys(e, r) {
var t = Object.keys(e);
if (Object.getOwnPropertySymbols) {
var o = Object.getOwnPropertySymbols(e);
r && (o = o.filter(function (r) {
return Object.getOwnPropertyDescriptor(e, r).enumerable;
})), t.push.apply(t, o);
}
return t;
}
function _objectSpread2(e) {
for (var r = 1; r < arguments.length; r++) {
var t = null != arguments[r] ? arguments[r] : {};
r % 2 ? ownKeys(Object(t), !0).forEach(function (r) {
_defineProperty(e, r, t[r]);
}) : Object.getOwnPropertyDescriptors ? Object.defineProperties(e, Object.getOwnPropertyDescriptors(t)) : ownKeys(Object(t)).forEach(function (r) {
Object.defineProperty(e, r, Object.getOwnPropertyDescriptor(t, r));
});
}
return e;
}
// THIS SHOULD BE REMOVED WHEN SUPPORT FOR CHANGESETS FROM V1 IS DROPPED
let importantSeparator = pc.red("===============================IMPORTANT!===============================");
let importantEnd = pc.red("----------------------------------------------------------------------");
async function getOldChangesets(changesetBase, dirs) {
// this needs to support just not dealing with dirs that aren't set up properly
let changesets = await pFilter(dirs, async dir => (await fs.lstat(path.join(changesetBase, dir))).isDirectory());
const changesetContents = changesets.map(async changesetDir => {
const jsonPath = path.join(changesetBase, changesetDir, "changes.json");
const [summary, json] = await Promise.all([fs.readFile(path.join(changesetBase, changesetDir, "changes.md"), "utf-8"), fs.readJson(jsonPath)]);
return {
releases: json.releases,
summary,
id: changesetDir
};
});
return Promise.all(changesetContents);
}
// this function only exists while we wait for v1 changesets to be obsoleted
// and should be deleted before v3
async function getOldChangesetsAndWarn(changesetBase, dirs) {
let oldChangesets = await getOldChangesets(changesetBase, dirs);
if (oldChangesets.length === 0) {
return [];
}
warn(importantSeparator);
warn("There were old changesets from version 1 found");
warn("These are being applied now but the dependents graph may have changed");
warn("Make sure you validate all your dependencies");
warn("In a future major version, we will no longer apply these old changesets, and will instead throw here");
warn(importantEnd);
return oldChangesets;
}
async function filterChangesetsSinceRef(changesets, changesetBase, sinceRef) {
const newChangesets = await git.getChangedChangesetFilesSinceRef({
cwd: changesetBase,
ref: sinceRef
});
const newHashes = newChangesets.map(c => c.split("/").pop());
return changesets.filter(dir => newHashes.includes(dir));
}
async function getChangesets(cwd, sinceRef) {
let changesetBase = path.join(cwd, ".changeset");
let contents;
try {
contents = await fs__default.readdir(changesetBase);
} catch (err) {
if (err.code === "ENOENT") {
throw new Error("There is no .changeset directory in this project");
}
throw err;
}
if (sinceRef !== undefined) {
contents = await filterChangesetsSinceRef(contents, changesetBase, sinceRef);
}
let oldChangesetsPromise = getOldChangesetsAndWarn(changesetBase, contents);
let changesets = contents.filter(file => !file.startsWith(".") && file.endsWith(".md") && !/^README\.md$/i.test(file));
const changesetContents = changesets.map(async file => {
const changeset = await fs__default.readFile(path.join(changesetBase, file), "utf-8");
return _objectSpread2(_objectSpread2({}, parse(changeset)), {}, {
id: file.replace(".md", "")
});
});
return [...(await oldChangesetsPromise), ...(await Promise.all(changesetContents))];
}
export { getChangesets as default };
import path from "path";
import pc from "picocolors";
import { NewChangeset } from "@changesets/types";
import * as fs from "fs-extra";
import pFilter from "p-filter";
import { warn } from "@changesets/logger";
// THIS SHOULD BE REMOVED WHEN SUPPORT FOR CHANGESETS FROM V1 IS DROPPED
let importantSeparator = pc.red(
"===============================IMPORTANT!==============================="
);
let importantEnd = pc.red(
"----------------------------------------------------------------------"
);
async function getOldChangesets(
changesetBase: string,
dirs: string[]
): Promise<Array<NewChangeset>> {
// this needs to support just not dealing with dirs that aren't set up properly
let changesets = await pFilter(dirs, async (dir) =>
(await fs.lstat(path.join(changesetBase, dir))).isDirectory()
);
const changesetContents = changesets.map(async (changesetDir) => {
const jsonPath = path.join(changesetBase, changesetDir, "changes.json");
const [summary, json] = await Promise.all([
fs.readFile(
path.join(changesetBase, changesetDir, "changes.md"),
"utf-8"
),
fs.readJson(jsonPath),
]);
return { releases: json.releases, summary, id: changesetDir };
});
return Promise.all(changesetContents);
}
// this function only exists while we wait for v1 changesets to be obsoleted
// and should be deleted before v3
export default async function getOldChangesetsAndWarn(
changesetBase: string,
dirs: string[]
): Promise<Array<NewChangeset>> {
let oldChangesets = await getOldChangesets(changesetBase, dirs);
if (oldChangesets.length === 0) {
return [];
}
warn(importantSeparator);
warn("There were old changesets from version 1 found");
warn("These are being applied now but the dependents graph may have changed");
warn("Make sure you validate all your dependencies");
warn(
"In a future major version, we will no longer apply these old changesets, and will instead throw here"
);
warn(importantEnd);
return oldChangesets;
}