New Research: Supply Chain Attack on Axios Pulls Malicious Dependency from npm.Details
Socket
Book a DemoSign in
Socket

@iridiumcms/sync

Package Overview
Dependencies
Maintainers
0
Versions
33
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@iridiumcms/sync - npm Package Compare versions

Comparing version
1.0.20
to
1.0.21
+89
src/backup.ts
import path from 'path';
import fs from 'fs/promises';
import { nano } from '@iridiumcms/utils';
import { tarball } from './tarball';
import { lake, backup } from '@iridiumcms/config/server';
import { TEMP_DIR } from '@iridiumcms/constants';
import {
tables,
backupTable,
} from './table-management';
export const generator = async function({
isDatabase,
isDataLake,
}: {
isDatabase: boolean;
isDataLake: boolean;
}) {
const id = nano();
const date = new Date();
const tbPath = path.resolve(TEMP_DIR, `${id}.tar.gz`);
const destPath = path.resolve(TEMP_DIR, id);
await fs.mkdir(destPath, { recursive: true });
return {
get id() { return id; },
async*run() {
if (isDatabase) {
yield `database: starting copy`;
const dbDest = path.resolve(destPath, 'database');
await fs.mkdir(dbDest, { recursive: true });
yield `database: ${tables.length} tables`;
for (let i = 0; i < tables.length; i++) {
const [filename, table] = tables[i];
const blob = await backupTable(table);
await Bun.write(path.resolve(dbDest, filename), blob);
yield `database: ${i + 1}/${tables.length} tables copied`;
}
yield `database: copy complete`;
}
if (isDataLake) {
yield `data-lake: starting copy`;
const dlDest = path.resolve(destPath, 'data-lake');
await fs.mkdir(dlDest, { recursive: true });
const files = await lake.list();
yield `data-lake: ${files.length} files found`;
for (let i = 0; i < files.length; i++) {
const file = files[i];
try {
const data = await lake.get(file);
await fs.writeFile(path.resolve(dlDest, file), data);
} catch (err) {
// if we fail to get the file, we don't want to fail the backup
//
// the file could not exist or be corrupted, to be fixed by an admin later
console.error(err);
}
yield `data-lake: ${i + 1}/${files.length} files copied`;
}
yield `data-lake: copy complete`;
}
yield `compressing`;
await tarball(destPath, tbPath);
yield `compression complete`;
yield `saving backup`;
const zip = Bun.file(tbPath);
await backup.set({
id,
date,
file: await zip.arrayBuffer(),
isDatabase,
isDataLake,
});
yield `save complete`;
yield `cleaning up`;
await fs.rm(destPath, { recursive: true });
await fs.rm(tbPath);
yield `cleanup complete`;
},
};
};
import path from 'path';
import fs from 'fs/promises';
import { untarball } from './tarball';
import { lake, backup } from '@iridiumcms/config/server';
import { medias } from '@iridiumcms/db';
import { fromId } from '@iridiumcms/backup/utils';
import { TEMP_DIR } from '@iridiumcms/constants';
import {
tables,
restoreTable,
} from './table-management';
// eslint-disable-next-line require-await
export const generator = async function(entry: string) {
const {
id,
isDatabase,
isDataLake,
} = fromId(entry);
const tbPath = path.resolve(TEMP_DIR, entry);
const destPath = path.resolve(TEMP_DIR, id);
return {
async*run() {
// remove any existing directory to free up space for the tarball
const stat = await fs.stat(destPath).catch(() => null);
if (stat?.isDirectory()) {
await fs.rm(destPath, { recursive: true });
}
await fs.mkdir(destPath, { recursive: true });
yield `retrieving backup`;
await Bun.write(
path.resolve(TEMP_DIR, entry),
await backup.get(entry),
);
yield `backup retrieved`;
yield `decompressing`;
await untarball(destPath, tbPath);
yield `decompressing complete`;
if (isDatabase) {
yield `database: starting restore`;
const dbDest = path.resolve(destPath, 'database');
yield `database: ${tables.length} tables`;
for (let i = 0; i < tables.length; i++) {
const [filename, table] = tables[i];
const jsonPath = path.resolve(dbDest, filename);
const ab = await Bun.file(jsonPath).arrayBuffer();
await restoreTable(table, ab);
yield `database: ${i + 1}/${tables.length} tables restored`;
}
yield `database: restore complete`;
}
if (isDataLake) {
yield `data-lake: starting restore`;
const dlDest = path.resolve(destPath, 'data-lake');
await fs.mkdir(dlDest, { recursive: true });
const records = await medias.find.files();
yield `data-lake: ${records.length} files`;
for (let i = 0; i < records.length; i++) {
const { id, mime: type } = records[i] as { id: string; mime: string };
try {
const file = await Bun.file(path.resolve(dlDest, id)).arrayBuffer();
await lake.set({
id,
file,
type,
});
} catch (err) {
console.error(err);
}
yield `data-lake: ${i + 1}/${records.length} files restored`;
}
yield `data-lake: restore complete`;
}
yield `cleaning up`;
await fs.rm(path.resolve(TEMP_DIR, entry), { recursive: true });
await fs.rm(path.resolve(TEMP_DIR, id), { recursive: true });
yield `cleanup complete`;
yield `restore complete`;
},
};
};
import path from 'path';
import { type SSEStreamingApi } from 'hono/streaming';
import { VOLUMES_DIR } from '@iridiumcms/constants';
import { createWriteStream, createReadStream } from 'fs';
import { promisify } from 'util';
import { pipeline } from 'stream';
const pipelineAsync = promisify(pipeline);
export const write = async (stream: ReadableStream, filename: string) => {
const dir = path.resolve(VOLUMES_DIR, filename);
const writeStream = createWriteStream(dir);
// @ts-ignore
await pipelineAsync(stream, writeStream);
// Ensure the write stream is properly closed
writeStream.end();
return dir;
};
export const read = createReadStream;
export const emitter = ({
name = '',
category = '',
stream,
}: {
name?: string;
category?: string;
stream: SSEStreamingApi;
}) => {
let id = 0;
return {
async emit(evt: string, msg: string) {
return await stream.writeSSE({
id: `${id++}`,
event: evt,
data: JSON.stringify({
name,
category,
message: msg,
}),
});
},
async msg(msg: string) {
return await this.emit('msg', msg);
},
async close(evt = 'close', msg = 'complete') {
return await this.emit(evt, msg);
},
};
};
import superjson from 'superjson';
import { db } from '@iridiumcms/db';
import {
users,
medias,
contents,
webhooks,
apiTokens,
schedules,
} from '@iridiumcms/db/tables';
export type Table = typeof medias | typeof users | typeof contents | typeof webhooks | typeof apiTokens | typeof schedules;
export const tables = [
['users.json', users],
['medias.json', medias],
['contents.json', contents],
['webhooks.json', webhooks],
['apiTokens.json', apiTokens],
['schedules.json', schedules],
// intentionally omit sessions,
// we dont want users from one backup
// to be logged into another environment
// (especially since those sessions are likely
// expired and it would kick whoever is logged in)
] as ([string, Table])[];
export const backupTable = async function(table: Table) {
const records = await db.select().from(table);
return new Blob([superjson.stringify(records)], {
type: 'application/json',
});
};
export const restoreTable = async function(table: Table, file: ArrayBuffer) {
const records = superjson.parse(new TextDecoder().decode(file)) as any[];
await db.delete(table);
if (records.length) {
await db.insert(table).values(records);
}
return records;
};
import path from 'path';
import { VOLUMES_DIR } from '@iridiumcms/constants';
import * as tar from 'tar';
export const tarball = async function(directoryName: string, outputFilename: string) {
const tarballPath = path.resolve(VOLUMES_DIR, outputFilename);
const tarballDir = path.resolve(VOLUMES_DIR, directoryName);
await tar.c({
gzip: true,
file: tarballPath,
cwd: tarballDir,
}, ['.']);
return tarballPath;
};
export const untarball = async (directoryName: string, inputFilename: string) => {
const tarballPath = path.resolve(VOLUMES_DIR, inputFilename);
const tarballDir = path.resolve(VOLUMES_DIR, directoryName);
await tar.x({
file: tarballPath,
cwd: tarballDir,
});
};
+3
-70

@@ -1,71 +0,4 @@

import prompts from 'prompts';
import { createUser } from './src/createUser';
import {
backupDatabase,
backupMedia,
backupAnalytics,
restoreDatabase,
restoreMedia,
restoreAnalytics,
} from './src/backupAndRestore';
export { generator as backupGenerator } from './src/backup';
export { generator as restoreGenerator } from './src/restore';
type Action = {
id: string;
name: string;
action: () => Promise<void> | void;
};
const actions = {
createUser: {
id: 'createUser',
name: 'Create User',
action: createUser,
},
pushDatabase: {
id: 'pushDatabase',
name: 'Push Database',
action: restoreDatabase,
},
pullDatabase: {
id: 'pullDatabase',
name: 'Pull Database',
action: backupDatabase,
},
pushMedia: {
id: 'pushMedia',
name: 'Push Media',
action: restoreMedia,
},
pullMedia: {
id: 'pullMedia',
name: 'Pull Media',
action: backupMedia,
},
pushAnalytics: {
id: 'pushAnalytics',
name: 'Push Analytics',
action: restoreAnalytics,
},
pullAnalytics: {
id: 'pullAnalytics',
name: 'Pull Analytics',
action: backupAnalytics,
},
} as Record<string, Action>;
const actionSelection = await prompts({
type: 'select',
name: 'actionId',
message: '',
choices: Object.values(actions)
.map(({ name, id }) => ({ title: name, value: id })),
});
const { actionId } = actionSelection;
const action = actions[actionId] as Action;
if (!action) process.exit(0);
await action.action();
export { emitter as streamEmitter } from './src/stream';
{
"name": "@iridiumcms/sync",
"version": "1.0.20",
"version": "1.0.21",
"description": "",

@@ -5,0 +5,0 @@ "main": "index.ts",

import path from 'path';
import { red, green } from 'kolorist';
import { confirm } from './confirm';
import { createReadStream, createWriteStream } from 'fs';
import { pipeline } from 'stream';
import { promisify } from 'util';
import { restart } from './restart';
import * as tar from 'tar';
import fetch from 'node-fetch';
import {
SITE_URL,
VOLUMES_DIR,
} from '@iridiumcms/constants';
const dbDir = path.resolve(VOLUMES_DIR, 'cms');
const mediaDir = path.resolve(VOLUMES_DIR, 'media');
const analyticsDir = path.resolve(VOLUMES_DIR, 'analytics');
const pipelineAsync = promisify(pipeline);
const downloadTarball = async (body: ReadableStream, outputPath: string) => {
const writeStream = createWriteStream(outputPath);
// @ts-ignore
await pipelineAsync(body, writeStream);
// Ensure the write stream is properly closed
writeStream.end();
};
const unzipTarball = async (restoreDir: string, filename: string) => {
await tar.x({
file: path.resolve(VOLUMES_DIR, filename),
cwd: restoreDir,
});
};
export const backupDatabase = async function() {
console.log('backupDatabase');
await confirm();
const res = await fetch(`${SITE_URL}/api/rpc/backup/db`, {
method: 'GET',
headers: {
'Content-Type': 'application/json',
Authorization: process.env.IRIDIUM_RPC_TOKEN ?? '',
},
});
if (!res.ok) return console.error(red('Failed to download db'));
const body = res.body;
if (!body) return console.error(red('Failed to download db'));
// @ts-ignore
await downloadTarball(body, path.resolve(VOLUMES_DIR, 'db-backup.tar.gz'));
await unzipTarball(dbDir, 'db-backup.tar.gz');
console.log(green('Backup complete'));
};
export const backupMedia = async function() {
await confirm();
const res = await fetch(`${SITE_URL}/api/rpc/backup/media`, {
method: 'GET',
headers: {
'Content-Type': 'application/json',
Authorization: process.env.IRIDIUM_RPC_TOKEN ?? '',
},
});
if (!res.ok) return console.error(red('Failed to download db'));
const body = res.body;
if (!body) return console.error(red('Failed to download db'));
// @ts-ignore
await downloadTarball(body, path.resolve(VOLUMES_DIR, 'media-backup.tar.gz'));
await unzipTarball(mediaDir, 'media-backup.tar.gz');
console.log(green('Backup complete'));
};
export const backupAnalytics = async function() {
await confirm();
const res = await fetch(`${SITE_URL}/api/rpc/backup/analytics`, {
method: 'GET',
headers: {
'Content-Type': 'application/json',
Authorization: process.env.IRIDIUM_RPC_TOKEN ?? '',
},
});
if (!res.ok) return console.error(red('Failed to download db'));
const body = res.body;
if (!body) return console.error(red('Failed to download db'));
// @ts-ignore
await downloadTarball(body, path.resolve(VOLUMES_DIR, 'analytics-backup.tar.gz'));
await unzipTarball(analyticsDir, 'analytics-backup.tar.gz');
console.log(green('Backup complete'));
};
export const restoreDatabase = async function() {
await confirm();
const tarballPath = path.resolve(VOLUMES_DIR, 'db-restore.tar.gz');
await tar.c({
gzip: true,
file: tarballPath,
cwd: dbDir,
}, ['.']);
const res = await fetch(`${SITE_URL}/api/rpc/restore/db`, {
method: 'PUT',
headers: {
'Content-Type': 'application/gzip',
Authorization: process.env.IRIDIUM_RPC_TOKEN ?? '',
},
body: createReadStream(tarballPath),
});
if (!res.ok) return console.error(red('Failed to restore db'));
await restart();
console.log(green('Restore complete'));
};
export const restoreMedia = async function() {
await confirm();
const tarballPath = path.resolve(VOLUMES_DIR, 'media-restore.tar.gz');
await tar.c({
gzip: true,
file: tarballPath,
cwd: mediaDir,
}, ['.']);
const res = await fetch(`${SITE_URL}/api/rpc/restore/media`, {
method: 'PUT',
headers: {
'Content-Type': 'application/gzip',
Authorization: process.env.IRIDIUM_RPC_TOKEN ?? '',
},
body: createReadStream(tarballPath),
});
if (!res.ok) return console.error(red('Failed to restore media'));
await restart();
console.log(green('Restore complete'));
};
export const restoreAnalytics = async function() {
await confirm();
const tarballPath = path.resolve(VOLUMES_DIR, 'analytics-restore.tar.gz');
await tar.c({
gzip: true,
file: tarballPath,
cwd: analyticsDir,
}, ['.']);
const res = await fetch(`${SITE_URL}/api/rpc/restore/analytics`, {
method: 'PUT',
headers: {
'Content-Type': 'application/gzip',
Authorization: process.env.IRIDIUM_RPC_TOKEN ?? '',
},
body: createReadStream(tarballPath),
});
if (!res.ok) return console.error(red('Failed to restore analytics'));
await restart();
console.log(green('Restore complete'));
};
import prompts from 'prompts';
import { red } from 'kolorist';
export const confirm = async () => {
const response = await prompts([
{
type: 'confirm',
name: 'confirmation',
message: 'Are you sure?',
initial: false,
},
]);
const { confirmation } = response;
if (!confirmation) {
console.log(red('Aborted'));
process.exit(0);
}
return confirmation;
};
import prompts from 'prompts';
import { red, green } from 'kolorist';
import {
API_URL,
} from '@iridiumcms/constants';
import {
login as loginSchema,
email as emailSchema,
password as passwordSchema,
} from '@iridiumcms/db/schemas';
console.log(API_URL);
export const createUser = async () => {
const response = await prompts([
{
type: 'text',
name: 'email',
message: 'What is the email of the user?',
validate: (value: string) => emailSchema.safeParse(value).success || 'Invalid email',
},
{
type: 'password',
name: 'password',
message: 'What is the password of the user?',
validate: (value: string) => passwordSchema.safeParse(value).success || 'Password must be at least 8 characters',
},
]);
const validationResult = loginSchema.safeParse(response);
if (!validationResult.success) {
console.error(red('Validation failed:'), validationResult.error.message);
process.exit(1);
}
try {
await fetch(`${API_URL}/api/rpc/user/setup`, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
Authorization: process.env.IRIDIUM_RPC_TOKEN ?? '',
},
body: JSON.stringify({
email: response.email,
password: response.password,
}),
});
console.log(green('User created successfully'));
} catch (error) {
// @ts-expect-error
console.error(red('Failed to create user:'), error.message);
process.exit(1);
}
process.exit(0);
};
import prompts from 'prompts';
const RAILWAY_API_TOKEN = process.env.RAILWAY_API_TOKEN || '';
const RAILWAY_API_URL = 'https://backboard.railway.app/graphql/v2';
const getProjects = async function() {
const res = await fetch(RAILWAY_API_URL, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
Authorization: `Bearer ${RAILWAY_API_TOKEN}`,
},
body: JSON.stringify({
query: `
query {
me {
projects {
edges {
node {
id
name
services {
edges {
node {
id
name
}
}
}
environments {
edges {
node {
id
name
}
}
}
}
}
}
}
}
`,
}),
});
const data = await res.json();
return data.data.me.projects.edges
.map(edge => edge.node)
.flat();
};
const fetchDeployment = async function(projectId, serviceId, environmentId) {
const response = await fetch(RAILWAY_API_URL, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
Authorization: `Bearer ${RAILWAY_API_TOKEN}`,
},
body: JSON.stringify({
query: `
query deployments {
deployments(
first: 1
input: {
projectId: "${projectId}"
environmentId: "${environmentId}"
serviceId: "${serviceId}"
}
) {
edges {
node {
id
staticUrl
}
}
}
}
`,
}),
});
const data = await response.json();
return data.data.deployments.edges.at(0).node.id;
};
const restartProject = async function(projectId) {
const response = await fetch(RAILWAY_API_URL, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
Authorization: `Bearer ${RAILWAY_API_TOKEN}`,
},
body: JSON.stringify({
query: `
mutation deploymentRestart {
deploymentRestart(id: "${projectId}")
}
`,
}),
});
// console.log(await response.json());
return response.ok;
};
export const restart = async function() {
const projects = await getProjects();
const projectChoices = projects.map(project => ({
title: project.name,
value: project.id,
}));
const response = await prompts({
type: 'select',
name: 'projectId',
message: 'Select a project to restart',
choices: projectChoices,
});
const { projectId } = response;
const project = projects.find(({ id }) => id === projectId);
const services = project.services.edges.map(edge => edge.node);
let service = services.at(0);
if (services.length > 1) {
const response = await prompts({
type: 'select',
name: 'serviceId',
message: 'Select a service',
choices: services.map(({ id, name }) => ({ title: name, value: id })),
});
const { serviceId } = response;
service = services.find(({ id }) => id === serviceId);
}
const environments = project.environments.edges.map(edge => edge.node);
let environment = environments.at(0);
if (environments.length > 1) {
const response = await prompts({
type: 'select',
name: 'environmentId',
message: 'Select an environment',
choices: environments.map(({ id, name }) => ({ title: name, value: id })),
});
const { environmentId } = response;
environment = environments.find(({ id }) => id === environmentId);
}
const deploymentId = await fetchDeployment(project.id, service.id, environment.id);
const restartedProject = await restartProject(deploymentId);
console.log(restartedProject ? `Project is restarting` : 'Failed to restart project');
};