Socket
Socket
Sign inDemoInstall

@microsoft/node-core-library

Package Overview
Dependencies
Maintainers
2
Versions
116
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@microsoft/node-core-library - npm Package Compare versions

Comparing version 0.2.11 to 0.3.0

14

CHANGELOG.json

@@ -5,2 +5,16 @@ {

{
"version": "0.3.0",
"tag": "@microsoft/node-core-library_v0.3.0",
"date": "Fri, 22 Sep 2017 01:04:02 GMT",
"comments": {
"minor": [
{
"author": "Nick Pape <nickpape@users.noreply.github.com>",
"commit": "481a10f460a454fb5a3e336e3cf25a1c3f710645",
"comment": "Upgrade to es6"
}
]
}
},
{
"version": "0.2.11",

@@ -7,0 +21,0 @@ "tag": "@microsoft/node-core-library_v0.2.11",

9

CHANGELOG.md
# Change Log - @microsoft/node-core-library
This log was last generated on Wed, 20 Sep 2017 22:10:17 GMT and should not be manually modified.
This log was last generated on Fri, 22 Sep 2017 01:04:02 GMT and should not be manually modified.
## 0.3.0
Fri, 22 Sep 2017 01:04:02 GMT
### Minor changes
- Upgrade to es6
## 0.2.11

@@ -6,0 +13,0 @@ Wed, 20 Sep 2017 22:10:17 GMT

43

lib/FileDiffTest.js

@@ -5,5 +5,5 @@ "use strict";

Object.defineProperty(exports, "__esModule", { value: true });
var path = require("path");
var fsx = require("fs-extra");
var PackageJsonLookup_1 = require("./PackageJsonLookup");
const path = require("path");
const fsx = require("fs-extra");
const PackageJsonLookup_1 = require("./PackageJsonLookup");
/**

@@ -16,5 +16,3 @@ * Implements a unit testing strategy that generates output files, and then

*/
var FileDiffTest = (function () {
function FileDiffTest() {
}
class FileDiffTest {
/**

@@ -25,5 +23,5 @@ * Clears the internal file cache.

*/
FileDiffTest.clearCache = function () {
static clearCache() {
this._packageJsonLookup.clearCache();
};
}
/**

@@ -37,4 +35,4 @@ * Sets up a folder in the temp directory where the unit test should write its output files

*/
FileDiffTest.prepareFolder = function (unitTestDirName, testModule) {
var packageJsonFolderPath = this._packageJsonLookup.tryGetPackageFolder(unitTestDirName);
static prepareFolder(unitTestDirName, testModule) {
const packageJsonFolderPath = this._packageJsonLookup.tryGetPackageFolder(unitTestDirName);
if (packageJsonFolderPath === undefined) {

@@ -46,7 +44,7 @@ throw new Error('Unable to find a package.json in any parent folder of ' + unitTestDirName);

}
var diffTestPath = path.join(packageJsonFolderPath, 'temp', 'diff-tests', testModule);
const diffTestPath = path.join(packageJsonFolderPath, 'temp', 'diff-tests', testModule);
fsx.mkdirsSync(diffTestPath);
fsx.emptyDirSync(diffTestPath);
return diffTestPath;
};
}
/**

@@ -59,8 +57,8 @@ * Compares the contents of two files, and returns true if they are equivalent.

*/
FileDiffTest.assertEqual = function (actualFilePath, expectedFilePath) {
var actualContent = fsx.readFileSync(actualFilePath).toString('utf8');
var expectedContent = fsx.readFileSync(expectedFilePath).toString('utf8');
static assertEqual(actualFilePath, expectedFilePath) {
const actualContent = fsx.readFileSync(actualFilePath).toString('utf8');
const expectedContent = fsx.readFileSync(expectedFilePath).toString('utf8');
// NOTE: "\s" also matches "\r" and "\n"
var normalizedActual = FileDiffTest._getNormalizedContent(actualContent);
var normalizedExpected = FileDiffTest._getNormalizedContent(expectedContent);
const normalizedActual = FileDiffTest._getNormalizedContent(actualContent);
const normalizedExpected = FileDiffTest._getNormalizedContent(expectedContent);
if (normalizedActual !== normalizedExpected) {

@@ -70,13 +68,12 @@ throw new Error('The test output file does not match the expected input:\n'

}
};
FileDiffTest._getNormalizedContent = function (s) {
}
static _getNormalizedContent(s) {
return s.replace(/\r\n/g, '\n').replace(/\r/g, '') // convert to Unix-style newlines
.replace(/\s+\n/g, '\n') // strip spaces from end of line
.replace(/\n+$/g, ''); // strip newlines from end of file
};
FileDiffTest._packageJsonLookup = new PackageJsonLookup_1.PackageJsonLookup();
return FileDiffTest;
}());
}
}
FileDiffTest._packageJsonLookup = new PackageJsonLookup_1.PackageJsonLookup();
exports.FileDiffTest = FileDiffTest;
//# sourceMappingURL=FileDiffTest.js.map

@@ -5,5 +5,5 @@ "use strict";

Object.defineProperty(exports, "__esModule", { value: true });
var fsx = require("fs-extra");
var os = require("os");
var jju = require("jju");
const fsx = require("fs-extra");
const os = require("os");
const jju = require("jju");
/**

@@ -13,13 +13,11 @@ * Utilities for reading/writing JSON files.

*/
var JsonFile = (function () {
function JsonFile() {
}
class JsonFile {
/**
* Loads a JSON file.
*/
JsonFile.load = function (jsonFilename) {
static load(jsonFilename) {
if (!fsx.existsSync(jsonFilename)) {
throw new Error("Input file not found: " + jsonFilename);
throw new Error(`Input file not found: ${jsonFilename}`);
}
var buffer = fsx.readFileSync(jsonFilename);
const buffer = fsx.readFileSync(jsonFilename);
try {

@@ -29,13 +27,13 @@ return jju.parse(buffer.toString());

catch (error) {
throw new Error("Error reading \"" + jsonFilename + "\":" + os.EOL + (" " + error.message));
throw new Error(`Error reading "${jsonFilename}":` + os.EOL + ` ${error.message}`);
}
};
}
/**
* Loads a JSON file and validate its schema.
*/
JsonFile.loadAndValidate = function (jsonFilename, jsonSchema, options) {
var jsonObject = JsonFile.load(jsonFilename); // tslint:disable-line:no-any
static loadAndValidate(jsonFilename, jsonSchema, options) {
const jsonObject = JsonFile.load(jsonFilename); // tslint:disable-line:no-any
jsonSchema.validateObject(jsonObject, jsonFilename, options);
return jsonObject;
};
}
/**

@@ -46,7 +44,7 @@ * Loads a JSON file and validate its schema, reporting errors using a callback

*/
JsonFile.loadAndValidateWithCallback = function (jsonFilename, jsonSchema, errorCallback) {
var jsonObject = JsonFile.load(jsonFilename); // tslint:disable-line:no-any
static loadAndValidateWithCallback(jsonFilename, jsonSchema, errorCallback) {
const jsonObject = JsonFile.load(jsonFilename); // tslint:disable-line:no-any
jsonSchema.validateObjectWithCallback(jsonObject, errorCallback);
return jsonObject;
};
}
/**

@@ -58,5 +56,5 @@ * Serializes the specified JSON object to a string buffer.

*/
JsonFile.stringify = function (jsonObject, options) {
static stringify(jsonObject, options) {
JsonFile.validateNoUndefinedMembers(jsonObject);
var stringified = JSON.stringify(jsonObject, undefined, 2) + '\n';
const stringified = JSON.stringify(jsonObject, undefined, 2) + '\n';
if (options && options.unixNewlines) {

@@ -68,3 +66,3 @@ return stringified;

}
};
}
/**

@@ -77,6 +75,5 @@ * Saves the file to disk. Returns false if nothing was written due to options.onlyIfChanged.

*/
JsonFile.save = function (jsonObject, jsonFilename, options) {
if (options === void 0) { options = {}; }
var normalized = JsonFile.stringify(jsonObject, options);
var buffer = new Buffer(normalized); // utf8 encoding happens here
static save(jsonObject, jsonFilename, options = {}) {
const normalized = JsonFile.stringify(jsonObject, options);
const buffer = new Buffer(normalized); // utf8 encoding happens here
if (options.onlyIfChanged) {

@@ -86,3 +83,3 @@ // Has the file changed?

try {
var oldBuffer = fsx.readFileSync(jsonFilename);
const oldBuffer = fsx.readFileSync(jsonFilename);
if (Buffer.compare(buffer, oldBuffer) === 0) {

@@ -111,3 +108,3 @@ // Nothing has changed, so don't touch the file

return true;
};
}
/**

@@ -118,7 +115,7 @@ * Used to validate a data structure before writing. Reports an error if there

// tslint:disable-next-line:no-any
JsonFile.validateNoUndefinedMembers = function (jsonObject) {
static validateNoUndefinedMembers(jsonObject) {
return JsonFile._validateNoUndefinedMembers(jsonObject, []);
};
}
// Private implementation of validateNoUndefinedMembers()
JsonFile._validateNoUndefinedMembers = function (jsonObject, keyPath) {
static _validateNoUndefinedMembers(jsonObject, keyPath) {
if (!jsonObject) {

@@ -128,10 +125,9 @@ return;

if (typeof jsonObject === 'object') {
for (var _i = 0, _a = Object.keys(jsonObject); _i < _a.length; _i++) {
var key = _a[_i];
for (const key of Object.keys(jsonObject)) {
keyPath.push(key);
// tslint:disable-next-line:no-any
var value = jsonObject[key];
const value = jsonObject[key];
if (value === undefined) {
var fullPath = JsonFile._formatKeyPath(keyPath);
throw new Error("The value for " + fullPath + " is undefined");
const fullPath = JsonFile._formatKeyPath(keyPath);
throw new Error(`The value for ${fullPath} is undefined`);
}

@@ -142,12 +138,11 @@ JsonFile._validateNoUndefinedMembers(value, keyPath);

}
};
}
// Given this input: ['items', '4', 'syntax', 'parameters', 'string "with" symbols", 'type']
// Return this string: items[4].syntax.parameters["string \"with\" symbols"].type
JsonFile._formatKeyPath = function (keyPath) {
var result = '';
for (var _i = 0, keyPath_1 = keyPath; _i < keyPath_1.length; _i++) {
var key = keyPath_1[_i];
static _formatKeyPath(keyPath) {
let result = '';
for (const key of keyPath) {
if (/^[0-9]+$/.test(key)) {
// It's an integer, so display like this: parent[123]
result += "[" + key + "]";
result += `[${key}]`;
}

@@ -159,3 +154,3 @@ else if (/^[a-z_][a-z_0-9]*$/i.test(key)) {

}
result += "" + key;
result += `${key}`;
}

@@ -166,9 +161,9 @@ else {

// To this: A path: \"C:\\file\"
var escapedKey = key.replace(/[\\]/g, '\\\\') // escape backslashes
const escapedKey = key.replace(/[\\]/g, '\\\\') // escape backslashes
.replace(/["]/g, '\\'); // escape quotes
result += "[\"" + escapedKey + "\"]";
result += `["${escapedKey}"]`;
}
}
return result;
};
}
/**

@@ -181,9 +176,8 @@ * Returns the same thing as targetString.replace(searchValue, replaceValue), except that

*/
JsonFile._getAllReplaced = function (targetString, searchValue, replaceValue) {
static _getAllReplaced(targetString, searchValue, replaceValue) {
return targetString.split(searchValue).join(replaceValue);
};
return JsonFile;
}());
}
}
exports.JsonFile = JsonFile;
//# sourceMappingURL=JsonFile.js.map

@@ -5,7 +5,7 @@ "use strict";

Object.defineProperty(exports, "__esModule", { value: true });
var fsx = require("fs-extra");
var os = require("os");
var path = require("path");
var Validator = require("z-schema");
var JsonFile_1 = require("./JsonFile");
const fsx = require("fs-extra");
const os = require("os");
const path = require("path");
const Validator = require("z-schema");
const JsonFile_1 = require("./JsonFile");
/**

@@ -20,4 +20,4 @@ * Represents a JSON schema that can be used to validate JSON data files loaded by the JsonFile class.

*/
var JsonSchema = (function () {
function JsonSchema() {
class JsonSchema {
constructor() {
this._dependentSchemas = [];

@@ -34,3 +34,3 @@ this._filename = '';

*/
JsonSchema.fromFile = function (filename, options) {
static fromFile(filename, options) {
// This is a quick and inexpensive test to avoid the catch the most common errors early.

@@ -41,3 +41,3 @@ // Full validation will happen later in JsonSchema.compile().

}
var schema = new JsonSchema();
const schema = new JsonSchema();
schema._filename = filename;

@@ -48,3 +48,3 @@ if (options) {

return schema;
};
}
/**

@@ -56,10 +56,9 @@ * Registers a JsonSchema that will be loaded from a file on disk.

*/
JsonSchema.fromLoadedObject = function (schemaObject) {
var schema = new JsonSchema();
static fromLoadedObject(schemaObject) {
const schema = new JsonSchema();
schema._schemaObject = schemaObject;
return schema;
};
JsonSchema._collectDependentSchemas = function (collectedSchemas, dependentSchemas, seenObjects, seenIds) {
for (var _i = 0, dependentSchemas_1 = dependentSchemas; _i < dependentSchemas_1.length; _i++) {
var dependentSchema = dependentSchemas_1[_i];
}
static _collectDependentSchemas(collectedSchemas, dependentSchemas, seenObjects, seenIds) {
for (const dependentSchema of dependentSchemas) {
// It's okay for the same schema to appear multiple times in the tree, but we only process it once

@@ -70,9 +69,9 @@ if (seenObjects.has(dependentSchema)) {

seenObjects.add(dependentSchema);
var schemaId = dependentSchema._ensureLoaded();
const schemaId = dependentSchema._ensureLoaded();
if (schemaId === '') {
throw new Error("This schema " + dependentSchema.shortName + " cannot be referenced"
throw new Error(`This schema ${dependentSchema.shortName} cannot be referenced`
+ ' because is missing the "id" field');
}
if (seenIds.has(schemaId)) {
throw new Error("This schema " + dependentSchema.shortName + " has the same \"id\" as"
throw new Error(`This schema ${dependentSchema.shortName} has the same "id" as`
+ ' another schema in this set');

@@ -84,19 +83,18 @@ }

}
};
}
/**
* Used to nicely format the ZSchema error tree.
*/
JsonSchema._formatErrorDetails = function (errorDetails) {
static _formatErrorDetails(errorDetails) {
return JsonSchema._formatErrorDetailsHelper(errorDetails, '', '');
};
}
/**
* Used by _formatErrorDetails.
*/
JsonSchema._formatErrorDetailsHelper = function (errorDetails, indent, buffer) {
for (var _i = 0, errorDetails_1 = errorDetails; _i < errorDetails_1.length; _i++) {
var errorDetail = errorDetails_1[_i];
buffer += os.EOL + indent + ("Error: " + errorDetail.path);
static _formatErrorDetailsHelper(errorDetails, indent, buffer) {
for (const errorDetail of errorDetails) {
buffer += os.EOL + indent + `Error: ${errorDetail.path}`;
if (errorDetail.description) {
var MAX_LENGTH = 40;
var truncatedDescription = errorDetail.description.trim();
const MAX_LENGTH = 40;
let truncatedDescription = errorDetail.description.trim();
if (truncatedDescription.length > MAX_LENGTH) {

@@ -106,5 +104,5 @@ truncatedDescription = truncatedDescription.substr(0, MAX_LENGTH - 3)

}
buffer += " (" + truncatedDescription + ")";
buffer += ` (${truncatedDescription})`;
}
buffer += os.EOL + indent + (" " + errorDetail.message);
buffer += os.EOL + indent + ` ${errorDetail.message}`;
if (errorDetail.inner) {

@@ -115,27 +113,23 @@ buffer = JsonSchema._formatErrorDetailsHelper(errorDetail.inner, indent + ' ', buffer);

return buffer;
};
Object.defineProperty(JsonSchema.prototype, "shortName", {
/**
* Returns a short name for this schema, for use in error messages.
* @remarks
* If the schema was loaded from a file, then the base filename is used. Otherwise, the "id"
* field is used if available.
*/
get: function () {
if (!this._filename) {
if (this._schemaObject) {
var schemaWithId = this._schemaObject;
if (schemaWithId.id) {
return schemaWithId.id;
}
}
/**
* Returns a short name for this schema, for use in error messages.
* @remarks
* If the schema was loaded from a file, then the base filename is used. Otherwise, the "id"
* field is used if available.
*/
get shortName() {
if (!this._filename) {
if (this._schemaObject) {
const schemaWithId = this._schemaObject;
if (schemaWithId.id) {
return schemaWithId.id;
}
return '(anonymous schema)';
}
else {
return path.basename(this._filename);
}
},
enumerable: true,
configurable: true
});
return '(anonymous schema)';
}
else {
return path.basename(this._filename);
}
}
/**

@@ -146,7 +140,7 @@ * If not already done, this loads the schema from disk and compiles it.

*/
JsonSchema.prototype.ensureCompiled = function () {
ensureCompiled() {
this._ensureLoaded();
if (!this._validator) {
// Don't assign this to _validator until we're sure everything was successful
var newValidator = new Validator({
const newValidator = new Validator({
breakOnFirstError: false,

@@ -156,3 +150,3 @@ noTypeless: true,

});
var anythingSchema = {
const anythingSchema = {
'type': [

@@ -169,12 +163,11 @@ 'array',

newValidator.setRemoteReference('http://json-schema.org/draft-04/schema', anythingSchema);
var collectedSchemas = [];
var seenObjects = new Set();
var seenIds = new Set();
const collectedSchemas = [];
const seenObjects = new Set();
const seenIds = new Set();
JsonSchema._collectDependentSchemas(collectedSchemas, this._dependentSchemas, seenObjects, seenIds);
// Validate each schema in order. We specifically do not supply them all together, because we want
// to make sure that circular references will fail to validate.
for (var _i = 0, collectedSchemas_1 = collectedSchemas; _i < collectedSchemas_1.length; _i++) {
var collectedSchema = collectedSchemas_1[_i];
for (const collectedSchema of collectedSchemas) {
if (!newValidator.validateSchema(collectedSchema._schemaObject)) {
throw new Error("Failed to validate schema \"" + collectedSchema.shortName + "\":" + os.EOL
throw new Error(`Failed to validate schema "${collectedSchema.shortName}":` + os.EOL
+ JsonSchema._formatErrorDetails(newValidator.getLastErrors()));

@@ -185,3 +178,3 @@ }

}
};
}
/**

@@ -195,5 +188,5 @@ * Validates the specified JSON object against this JSON schema. If the validation fails,

*/
JsonSchema.prototype.validateObject = function (jsonObject, filenameForErrors, options) {
this.validateObjectWithCallback(jsonObject, function (errorInfo) {
var prefix = (options && options.customErrorHeader) ? options.customErrorHeader
validateObject(jsonObject, filenameForErrors, options) {
this.validateObjectWithCallback(jsonObject, (errorInfo) => {
const prefix = (options && options.customErrorHeader) ? options.customErrorHeader
: 'JSON validation failed:';

@@ -203,3 +196,3 @@ throw new Error(prefix + os.EOL +

});
};
}
/**

@@ -209,7 +202,7 @@ * Validates the specified JSON object against this JSON schema. If the validation fails,

*/
JsonSchema.prototype.validateObjectWithCallback = function (jsonObject, errorCallback) {
validateObjectWithCallback(jsonObject, errorCallback) {
this.ensureCompiled();
if (!this._validator.validate(jsonObject, this._schemaObject)) {
var errorDetails = JsonSchema._formatErrorDetails(this._validator.getLastErrors());
var args = {
const errorDetails = JsonSchema._formatErrorDetails(this._validator.getLastErrors());
const args = {
details: errorDetails

@@ -219,4 +212,4 @@ };

}
};
JsonSchema.prototype._ensureLoaded = function () {
}
_ensureLoaded() {
if (!this._schemaObject) {

@@ -226,7 +219,6 @@ this._schemaObject = JsonFile_1.JsonFile.load(this._filename);

return this._schemaObject.id || '';
};
return JsonSchema;
}());
}
}
exports.JsonSchema = JsonSchema;
//# sourceMappingURL=JsonSchema.js.map

@@ -6,5 +6,5 @@ "use strict";

/* tslint:disable:no-constant-condition */
var fsx = require("fs-extra");
var path = require("path");
var JsonFile_1 = require("./JsonFile");
const fsx = require("fs-extra");
const path = require("path");
const JsonFile_1 = require("./JsonFile");
/**

@@ -16,4 +16,4 @@ * This class provides methods for finding the nearest "package.json" for a folder

*/
var PackageJsonLookup = (function () {
function PackageJsonLookup() {
class PackageJsonLookup {
constructor() {
this.clearCache();

@@ -26,6 +26,6 @@ }

*/
PackageJsonLookup.prototype.clearCache = function () {
clearCache() {
this._packageFolderCache = new Map();
this._packageNameCache = new Map();
};
}
/**

@@ -39,3 +39,3 @@ * Finds the path to the package folder of a given currentPath, by probing

*/
PackageJsonLookup.prototype.tryGetPackageFolder = function (sourceFilePath) {
tryGetPackageFolder(sourceFilePath) {
// Two lookups are required, because get() cannot distinguish the undefined value

@@ -46,4 +46,4 @@ // versus a missing key.

}
var result;
var parentFolder = path.dirname(sourceFilePath);
let result;
const parentFolder = path.dirname(sourceFilePath);
if (!parentFolder || parentFolder === sourceFilePath) {

@@ -60,3 +60,3 @@ result = undefined;

return result;
};
}
/**

@@ -69,16 +69,15 @@ * Loads the package.json file and returns the name of the package.

*/
PackageJsonLookup.prototype.getPackageName = function (packageJsonPath) {
var result = this._packageNameCache.get(packageJsonPath);
getPackageName(packageJsonPath) {
let result = this._packageNameCache.get(packageJsonPath);
if (result !== undefined) {
return result;
}
var packageJson = JsonFile_1.JsonFile.load(path.join(packageJsonPath, 'package.json'));
const packageJson = JsonFile_1.JsonFile.load(path.join(packageJsonPath, 'package.json'));
result = packageJson.name;
this._packageNameCache.set(packageJsonPath, result);
return result;
};
return PackageJsonLookup;
}());
}
}
exports.PackageJsonLookup = PackageJsonLookup;
//# sourceMappingURL=PackageJsonLookup.js.map

@@ -6,6 +6,6 @@ "use strict";

/// <reference types='mocha' />
var chai_1 = require("chai");
var path = require("path");
var JsonFile_1 = require("../JsonFile");
var JsonSchema_1 = require("../JsonSchema");
const chai_1 = require("chai");
const path = require("path");
const JsonFile_1 = require("../JsonFile");
const JsonSchema_1 = require("../JsonSchema");
function normalize(text) {

@@ -15,17 +15,23 @@ return text.replace(/[\r\n ]+/g, ' ')

}
describe('JsonSchema', function () {
var schemaPath = path.resolve(path.join(__dirname, './test-data/test-schema.json'));
var schema = JsonSchema_1.JsonSchema.fromFile(schemaPath);
it('loadAndValidate successfully validates a JSON file', function (done) {
var jsonPath = path.resolve(path.join(__dirname, './test-data/test.json'));
var jsonObject = JsonFile_1.JsonFile.loadAndValidate(jsonPath, schema);
describe('JsonSchema', () => {
const schemaPath = path.resolve(path.join(__dirname, './test-data/test-schema.json'));
const schema = JsonSchema_1.JsonSchema.fromFile(schemaPath);
it('loadAndValidate successfully validates a JSON file', (done) => {
const jsonPath = path.resolve(path.join(__dirname, './test-data/test.json'));
const jsonObject = JsonFile_1.JsonFile.loadAndValidate(jsonPath, schema);
chai_1.assert.isObject(jsonObject);
done();
});
it('validateObjectWithCallback successfully reports a compound validation error', function (done) {
var jsonPath2 = path.resolve(path.join(__dirname, './test-data/test2.json'));
var jsonObject2 = JsonFile_1.JsonFile.load(jsonPath2);
var expectedError = "\nError: #/exampleOneOf (Description for exampleOneOf - this i...)\n Data does not match any schemas from 'oneOf'\nError: #/exampleOneOf (Description for type1)\n Additional properties not allowed: field2\nError: #/exampleOneOf (Description for type2)\n Missing required property: field3";
var errorCount = 0;
schema.validateObjectWithCallback(jsonObject2, function (errorInfo) {
it('validateObjectWithCallback successfully reports a compound validation error', (done) => {
const jsonPath2 = path.resolve(path.join(__dirname, './test-data/test2.json'));
const jsonObject2 = JsonFile_1.JsonFile.load(jsonPath2);
const expectedError = `
Error: #/exampleOneOf (Description for exampleOneOf - this i...)
Data does not match any schemas from 'oneOf'
Error: #/exampleOneOf (Description for type1)
Additional properties not allowed: field2
Error: #/exampleOneOf (Description for type2)
Missing required property: field3`;
let errorCount = 0;
schema.validateObjectWithCallback(jsonObject2, (errorInfo) => {
++errorCount;

@@ -32,0 +38,0 @@ console.log(errorInfo.details);

@@ -7,17 +7,17 @@ "use strict";

/* tslint:disable:no-function-expression - Mocha uses a poorly scoped "this" pointer */
var chai_1 = require("chai");
var path = require("path");
var PackageJsonLookup_1 = require("../PackageJsonLookup");
const chai_1 = require("chai");
const path = require("path");
const PackageJsonLookup_1 = require("../PackageJsonLookup");
describe('PackageJsonLookup', function () {
describe('basic tests', function () {
it('getPackageName() test', function () {
var packageJsonLookup = new PackageJsonLookup_1.PackageJsonLookup();
var sourceFilePath = path.join(__dirname, './test-data/example-package');
const packageJsonLookup = new PackageJsonLookup_1.PackageJsonLookup();
const sourceFilePath = path.join(__dirname, './test-data/example-package');
chai_1.assert.equal(packageJsonLookup.getPackageName(sourceFilePath), 'example-package');
});
it('tryGetPackageFolder() test', function () {
var packageJsonLookup = new PackageJsonLookup_1.PackageJsonLookup();
var sourceFilePath = path.join(__dirname, './test-data/example-package/src/ExampleFile.txt');
const packageJsonLookup = new PackageJsonLookup_1.PackageJsonLookup();
const sourceFilePath = path.join(__dirname, './test-data/example-package/src/ExampleFile.txt');
// Example: C:\web-build-tools\libraries\node-core-library\src\test\example-package
var foundPath = packageJsonLookup.tryGetPackageFolder(sourceFilePath);
const foundPath = packageJsonLookup.tryGetPackageFolder(sourceFilePath);
chai_1.assert.isTrue(foundPath && foundPath.search(/[\\/]example-package$/i) >= 0, 'Unexpected result: ' + foundPath);

@@ -24,0 +24,0 @@ });

{
"name": "@microsoft/node-core-library",
"version": "0.2.11",
"version": "0.3.0",
"description": "Core libraries that every NodeJS toolchain project should use",

@@ -15,3 +15,3 @@ "main": "lib/index.js",

"@types/fs-extra": "0.0.37",
"@types/node": "6.0.62",
"@types/node": "6.0.88",
"@types/z-schema": "3.16.31",

@@ -18,0 +18,0 @@ "fs-extra": "~0.26.7",

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc