Huge News!Announcing our $40M Series B led by Abstract Ventures.Learn More
Socket
Sign inDemoInstall
Socket

@stackbit/sdk

Package Overview
Dependencies
Maintainers
11
Versions
422
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@stackbit/sdk - npm Package Compare versions

Comparing version 0.1.8 to 0.1.9

4

dist/analyzer/analyzer-utils.js

@@ -19,4 +19,4 @@ "use strict";

const data = await fileBrowser.getFileData(filePath);
const hasDependency = lodash_1.default.some(packageNames, packageName => lodash_1.default.has(data, ['dependencies', packageName]));
const hasDevDependency = lodash_1.default.some(packageNames, packageName => lodash_1.default.has(data, ['devDependencies', packageName]));
const hasDependency = lodash_1.default.some(packageNames, (packageName) => lodash_1.default.has(data, ['dependencies', packageName]));
const hasDevDependency = lodash_1.default.some(packageNames, (packageName) => lodash_1.default.has(data, ['devDependencies', packageName]));
if (hasDependency || hasDevDependency) {

@@ -23,0 +23,0 @@ filePaths.push(filePath);

@@ -69,3 +69,3 @@ "use strict";

owner: this.owner,
repo: this.repo,
repo: this.repo
});

@@ -72,0 +72,0 @@ const branch = lodash_1.default.find(branchResponse.data, { name: this.branch });

@@ -31,4 +31,15 @@ "use strict";

const dataFiles = await readDirRecursivelyWithFilter(fileBrowser, rootDataDir, excludedDataFiles, consts_1.DATA_FILE_EXTENSIONS);
const pageModelsResults = await generatePageModelsForFiles(pageFiles, rootPagesDir, fileBrowser, []);
const dataModelsResults = await generateDataModelsForFiles(dataFiles, rootDataDir, fileBrowser, pageModelsResults.objectModels);
const pageModelsResults = await generatePageModelsForFiles({
filePaths: pageFiles,
dirPath: rootPagesDir,
fileBrowser: fileBrowser,
pageTypeKey: ssgMatchResult.pageTypeKey,
objectModels: []
});
const dataModelsResults = await generateDataModelsForFiles({
filePaths: dataFiles,
dirPath: rootDataDir,
fileBrowser: fileBrowser,
objectModels: pageModelsResults.objectModels
});
let pageModels = analyzePageFileMatchingProperties(pageModelsResults.pageModels);

@@ -120,3 +131,3 @@ let dataModels = analyzeDataFileMatchingProperties(dataModelsResults.dataModels);

}
async function generatePageModelsForFiles(filePaths, dirPath, fileBrowser, objectModels) {
async function generatePageModelsForFiles({ filePaths, dirPath, fileBrowser, pageTypeKey, objectModels }) {
let pageModels = [];

@@ -126,2 +137,6 @@ let modelNameCounter = 1;

let data = await fileBrowser.getFileData(path_1.default.join(dirPath, filePath));
const extension = path_1.default.extname(filePath).substring(1);
if (consts_1.MARKDOWN_FILE_EXTENSIONS.includes(extension) && lodash_1.default.get(data, 'frontmatter') === null) {
continue;
}
if (lodash_1.default.has(data, 'frontmatter') && lodash_1.default.has(data, 'markdown')) {

@@ -172,3 +187,3 @@ data = lodash_1.default.assign(data.frontmatter, { markdown_content: data.markdown });

}
async function generateDataModelsForFiles(filePaths, dirPath, fileBrowser, objectModels) {
async function generateDataModelsForFiles({ filePaths, dirPath, fileBrowser, objectModels }) {
const dataModels = [];

@@ -235,2 +250,3 @@ for (const filePath of filePaths) {

if (fieldValue === null) {
// TODO: return 'unknown' field type and coerce it to string, or consolidate with anything else
// we don't know what is the type of the field

@@ -433,3 +449,3 @@ field = null;

const subtypes = lodash_1.default.compact(lodash_1.default.uniq(lodash_1.default.map(listItemModels, 'subtype')));
const subtype = subtypes.length === 1 ? subtypes[0] : null;
const subtype = subtypes.length === 1 ? subtypes[0] : 'float';
return {

@@ -630,4 +646,4 @@ items: {

case 'number':
const subtypes = lodash_1.default.compact(lodash_1.default.uniq(lodash_1.default.map(fieldTypes, 'subtype')));
const subtype = subtypes.length === 1 ? subtypes[0] : null;
const subtypes = lodash_1.default.compact(lodash_1.default.uniq(lodash_1.default.map(fields, 'subtype')));
const subtype = subtypes.length === 1 ? subtypes[0] : 'float';
return {

@@ -719,3 +735,15 @@ field: {

const sameFolder = allFilePathInSameFolder(pageModel.filePaths);
const modelName = lastPart ? (lodash_1.default.endsWith(lastPart, 's') ? lastPart.substring(0, lastPart.length - 1) : lastPart) : `page_${pageCount++}`;
let modelName;
if (lastPart) {
if (lodash_1.default.endsWith(lastPart, 's')) {
modelName = lastPart.substring(0, lastPart.length - 1);
}
else {
modelName = lastPart;
}
modelName = lodash_1.default.snakeCase(modelName);
}
else {
modelName = `page_${pageCount++}`;
}
return {

@@ -722,0 +750,0 @@ type: 'page',

@@ -14,2 +14,3 @@ import { GetFileBrowserOptions } from './file-browser';

nodeVersion?: string;
pageTypeKey?: string;
options?: {

@@ -16,0 +17,0 @@ ssgDirs?: string[];

@@ -43,3 +43,3 @@ "use strict";

ssgName: ssgMatcher.name,
...lodash_1.default.pick(ssgMatcher, ['publishDir', 'staticDir']),
...lodash_1.default.pick(ssgMatcher, ['publishDir', 'staticDir', 'pageTypeKey']),
...partialMatch

@@ -133,2 +133,3 @@ };

publishDir: '_site',
pageTypeKey: 'layout',
matchNodeVersion: true,

@@ -159,2 +160,3 @@ matchByPackageName: '@11ty/eleventy'

name: 'hugo',
pageTypeKey: 'layout',
match: async (fileBrowser) => {

@@ -205,2 +207,3 @@ let configFiles = ['config.toml', 'config.yaml', 'config.json'];

name: 'jekyll',
pageTypeKey: 'layout',
match: async (fileBrowser) => {

@@ -207,0 +210,0 @@ // We (Stackbit) can only run Jekyll sites, or themes, that have explicitly defined specific 'jekyll' or

"use strict";
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.GLOBAL_EXCLUDES = exports.EXCLUDED_DATA_FILES = exports.EXCLUDED_MARKDOWN_FILES = exports.DATA_FILE_EXTENSIONS = exports.MARKDOWN_FILE_EXTENSIONS = void 0;
const lodash_1 = __importDefault(require("lodash"));
const excludedMarkdownFiles = ['LICENSE.md', '**/README.md', 'README.theme.md', 'CONTRIBUTING.md', 'CHANGELOG.md', 'CODE_OF_CONDUCT.md'];
exports.MARKDOWN_FILE_EXTENSIONS = ['md', 'mdx', 'markdown'];
exports.DATA_FILE_EXTENSIONS = ['yml', 'yaml', 'json', 'toml'];
exports.EXCLUDED_MARKDOWN_FILES = ['LICENSE.md', '**/README.md', 'README.theme.md', 'CONTRIBUTING.md', 'CHANGELOG.md', 'CODE_OF_CONDUCT.md'];
exports.EXCLUDED_MARKDOWN_FILES = lodash_1.default.concat(excludedMarkdownFiles, excludedMarkdownFiles.map(lodash_1.default.toLower));
exports.EXCLUDED_DATA_FILES = ['stackbit.yaml', 'netlify.toml', 'theme.toml', '**/package.json', '**/package-lock.json', '**/yarn-lock.json'];
exports.GLOBAL_EXCLUDES = ['**/node_modules/**', '**/.git/**', '.idea/**', '**/.*'];
//# sourceMappingURL=consts.js.map
{
"name": "@stackbit/sdk",
"version": "0.1.8",
"version": "0.1.9",
"description": "Stackbit SDK",

@@ -5,0 +5,0 @@ "main": "dist/index.js",

@@ -21,4 +21,4 @@ import path from 'path';

const data = await fileBrowser.getFileData(filePath);
const hasDependency = _.some(packageNames, packageName => _.has(data, ['dependencies', packageName]));
const hasDevDependency = _.some(packageNames, packageName => _.has(data, ['devDependencies', packageName]));
const hasDependency = _.some(packageNames, (packageName) => _.has(data, ['dependencies', packageName]));
const hasDevDependency = _.some(packageNames, (packageName) => _.has(data, ['devDependencies', packageName]));
if (hasDependency || hasDevDependency) {

@@ -25,0 +25,0 @@ filePaths.push(filePath);

@@ -122,5 +122,5 @@ import path from 'path';

owner: this.owner,
repo: this.repo,
repo: this.repo
});
const branch = _.find(branchResponse.data, {name: this.branch});
const branch = _.find(branchResponse.data, { name: this.branch });
if (!branch) {

@@ -127,0 +127,0 @@ throw new Error(`branch ${this.branch} not found`);

@@ -15,5 +15,5 @@ import path from 'path';

type PartialObjectModel = Omit<ObjectModel, 'label'> & { refFieldPaths?: FieldPath[]; refFields?: FieldModelProps[] };
type PartialPageModels = Omit<PageModel, 'label'> & { filePaths: string[] };
type PartialDataModels = Omit<DataModel, 'label'> & { filePaths: string[] };
type PartialModels = PartialPageModels | PartialDataModels;
type PartialPageModel = Omit<PageModel, 'label'> & { filePaths: string[] };
type PartialDataModel = Omit<DataModel, 'label'> & { filePaths: string[] };
type PartialModel = PartialPageModel | PartialDataModel;

@@ -52,4 +52,15 @@ const SAME_FOLDER_PAGE_DSC_COEFFICIENT = 0.7;

const pageModelsResults = await generatePageModelsForFiles(pageFiles, rootPagesDir, fileBrowser, []);
const dataModelsResults = await generateDataModelsForFiles(dataFiles, rootDataDir, fileBrowser, pageModelsResults.objectModels);
const pageModelsResults = await generatePageModelsForFiles({
filePaths: pageFiles,
dirPath: rootPagesDir,
fileBrowser: fileBrowser,
pageTypeKey: ssgMatchResult.pageTypeKey,
objectModels: []
});
const dataModelsResults = await generateDataModelsForFiles({
filePaths: dataFiles,
dirPath: rootDataDir,
fileBrowser: fileBrowser,
objectModels: pageModelsResults.objectModels
});

@@ -153,12 +164,24 @@ let pageModels = analyzePageFileMatchingProperties(pageModelsResults.pageModels);

async function generatePageModelsForFiles(
filePaths: string[],
dirPath: string,
fileBrowser: FileBrowser,
objectModels: PartialObjectModel[]
): Promise<{ pageModels: PartialPageModels[]; objectModels: PartialObjectModel[] }> {
let pageModels: PartialPageModels[] = [];
interface GeneratePageModelsOptions {
filePaths: string[];
dirPath: string;
fileBrowser: FileBrowser;
pageTypeKey?: string;
objectModels: PartialObjectModel[];
}
async function generatePageModelsForFiles({
filePaths,
dirPath,
fileBrowser,
pageTypeKey,
objectModels
}: GeneratePageModelsOptions): Promise<{ pageModels: PartialPageModel[]; objectModels: PartialObjectModel[] }> {
let pageModels: PartialPageModel[] = [];
let modelNameCounter = 1;
for (const filePath of filePaths) {
let data = await fileBrowser.getFileData(path.join(dirPath, filePath));
const extension = path.extname(filePath).substring(1);
if (MARKDOWN_FILE_EXTENSIONS.includes(extension) && _.get(data, 'frontmatter') === null) {
continue;
}
if (_.has(data, 'frontmatter') && _.has(data, 'markdown')) {

@@ -172,5 +195,5 @@ data = _.assign(data.frontmatter, { markdown_content: data.markdown });

const pageModelGroups: {
sameFolder: PartialPageModels[];
sameFolder: PartialPageModel[];
sameFolderFieldsList: Field[][];
diffFolder: PartialPageModels[];
diffFolder: PartialPageModel[];
diffFolderFieldsList: Field[][];

@@ -228,9 +251,16 @@ } = {

async function generateDataModelsForFiles(
filePaths: string[],
dirPath: string,
fileBrowser: FileBrowser,
objectModels: PartialObjectModel[]
): Promise<{ dataModels: PartialDataModels[]; objectModels: PartialObjectModel[] }> {
const dataModels: PartialDataModels[] = [];
interface GenerateDataModelsOptions {
filePaths: string[];
dirPath: string;
fileBrowser: FileBrowser;
objectModels: PartialObjectModel[];
}
async function generateDataModelsForFiles({
filePaths,
dirPath,
fileBrowser,
objectModels
}: GenerateDataModelsOptions): Promise<{ dataModels: PartialDataModel[]; objectModels: PartialObjectModel[] }> {
const dataModels: PartialDataModel[] = [];
for (const filePath of filePaths) {

@@ -311,2 +341,3 @@ let data = await fileBrowser.getFileData(path.join(dirPath, filePath));

if (fieldValue === null) {
// TODO: return 'unknown' field type and coerce it to string, or consolidate with anything else
// we don't know what is the type of the field

@@ -512,3 +543,3 @@ field = null;

const subtypes = _.compact(_.uniq(_.map(listItemModels, 'subtype')));
const subtype = subtypes.length === 1 ? subtypes[0] : null;
const subtype = subtypes.length === 1 ? subtypes[0] : 'float';
return {

@@ -736,4 +767,4 @@ items: {

case 'number':
const subtypes = _.compact(_.uniq(_.map(fieldTypes, 'subtype')));
const subtype = subtypes.length === 1 ? subtypes[0] : null;
const subtypes = _.compact(_.uniq(_.map(fields, 'subtype')));
const subtype = subtypes.length === 1 ? subtypes[0] : 'float';
return {

@@ -825,11 +856,21 @@ field: {

function analyzePageFileMatchingProperties(pageModelsWithFilePaths: PartialPageModels[]) {
function analyzePageFileMatchingProperties(pageModelsWithFilePaths: PartialPageModel[]) {
let pageCount = 1;
pageModelsWithFilePaths = _.map(
pageModelsWithFilePaths,
(pageModel: PartialPageModels): PartialPageModels => {
(pageModel: PartialPageModel): PartialPageModel => {
const folder = findCommonAncestorFolder(pageModel.filePaths);
const lastPart = _.last(folder.split(path.sep));
const sameFolder = allFilePathInSameFolder(pageModel.filePaths);
const modelName = lastPart ? (_.endsWith(lastPart, 's') ? lastPart.substring(0, lastPart.length - 1) : lastPart) : `page_${pageCount++}`;
let modelName;
if (lastPart) {
if (_.endsWith(lastPart, 's')) {
modelName = lastPart.substring(0, lastPart.length - 1);
} else {
modelName = lastPart;
}
modelName = _.snakeCase(modelName);
} else {
modelName = `page_${pageCount++}`;
}
return {

@@ -875,3 +916,3 @@ type: 'page',

function analyzeDataFileMatchingProperties(dataModelsWithFilePaths: PartialDataModels[]) {
function analyzeDataFileMatchingProperties(dataModelsWithFilePaths: PartialDataModel[]) {
const dataModels: DataModel[] = [];

@@ -943,18 +984,21 @@ const modelNames: string[] = [];

}
const adjustedModels = _.map(models, (model): T => {
if (model.file) {
return Object.assign(model, {
file: path.relative(commonDirString, model.file)
});
} else {
const folder = path.relative(commonDirString, model.folder!);
if (folder) {
const adjustedModels = _.map(
models,
(model): T => {
if (model.file) {
return Object.assign(model, {
folder: folder
file: path.relative(commonDirString, model.file)
});
} else {
return _.omit(model, 'folder') as T;
const folder = path.relative(commonDirString, model.folder!);
if (folder) {
return Object.assign(model, {
folder: folder
});
} else {
return _.omit(model, 'folder') as T;
}
}
}
});
);
return {

@@ -961,0 +1005,0 @@ models: adjustedModels,

@@ -21,2 +21,3 @@ import path from 'path';

nodeVersion?: string;
pageTypeKey?: string;
options?: {

@@ -59,3 +60,3 @@ ssgDirs?: string[];

ssgName: ssgMatcher.name,
..._.pick(ssgMatcher, ['publishDir', 'staticDir']),
..._.pick(ssgMatcher, ['publishDir', 'staticDir', 'pageTypeKey']),
...partialMatch

@@ -109,2 +110,3 @@ };

staticDir?: string;
pageTypeKey?: string;
match?: (fileBrowser: FileBrowser) => Promise<SSGMatchPartialResult | null>;

@@ -160,2 +162,3 @@ }

publishDir: '_site',
pageTypeKey: 'layout',
matchNodeVersion: true,

@@ -186,2 +189,3 @@ matchByPackageName: '@11ty/eleventy'

name: 'hugo',
pageTypeKey: 'layout',
match: async (fileBrowser) => {

@@ -232,2 +236,3 @@ let configFiles = ['config.toml', 'config.yaml', 'config.json'];

name: 'jekyll',
pageTypeKey: 'layout',
match: async (fileBrowser) => {

@@ -234,0 +239,0 @@ // We (Stackbit) can only run Jekyll sites, or themes, that have explicitly defined specific 'jekyll' or

@@ -0,5 +1,9 @@

import _ from 'lodash';
const excludedMarkdownFiles = ['LICENSE.md', '**/README.md', 'README.theme.md', 'CONTRIBUTING.md', 'CHANGELOG.md', 'CODE_OF_CONDUCT.md'];
export const MARKDOWN_FILE_EXTENSIONS = ['md', 'mdx', 'markdown'];
export const DATA_FILE_EXTENSIONS = ['yml', 'yaml', 'json', 'toml'];
export const EXCLUDED_MARKDOWN_FILES = ['LICENSE.md', '**/README.md', 'README.theme.md', 'CONTRIBUTING.md', 'CHANGELOG.md', 'CODE_OF_CONDUCT.md'];
export const EXCLUDED_MARKDOWN_FILES = _.concat(excludedMarkdownFiles, excludedMarkdownFiles.map(_.toLower));
export const EXCLUDED_DATA_FILES = ['stackbit.yaml', 'netlify.toml', 'theme.toml', '**/package.json', '**/package-lock.json', '**/yarn-lock.json'];
export const GLOBAL_EXCLUDES = ['**/node_modules/**', '**/.git/**', '.idea/**', '**/.*'];

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc