folder-hash
Advanced tools
| # Javascript Node CircleCI 2.0 configuration file | ||
| # | ||
| # Check https://circleci.com/docs/2.0/language-javascript/ for more details | ||
| # | ||
| version: 2 | ||
| jobs: | ||
| build: | ||
| docker: | ||
| # specify the version you desire here | ||
| - image: circleci/node:7.10 | ||
| # Specify service dependencies here if necessary | ||
| # CircleCI maintains a library of pre-built images | ||
| # documented at https://circleci.com/docs/2.0/circleci-images/ | ||
| # - image: circleci/mongo:3.4.4 | ||
| working_directory: ~/repo | ||
| steps: | ||
| - checkout | ||
| # Download and cache dependencies | ||
| - restore_cache: | ||
| keys: | ||
| - v1-dependencies-{{ checksum "package.json" }} | ||
| # fallback to using the latest cache if no exact match is found | ||
| - v1-dependencies- | ||
| - run: yarn install | ||
| - save_cache: | ||
| paths: | ||
| - node_modules | ||
| key: v1-dependencies-{{ checksum "package.json" }} | ||
| # run tests! | ||
| - run: yarn test | ||
| // execute from the base folder | ||
| // node examples\readme-example1.js | ||
| const { hashElement } = require('../index.js'); | ||
| const options = { | ||
| folders: { exclude: ['.*', 'node_modules', 'test_coverage'] }, | ||
| files: { include: ['*.js', '*.json'] } | ||
| }; | ||
| const options2 = { | ||
| folders: { | ||
| exclude: ['.*', '**.*', '**node_modules', '**test_coverage'], | ||
| matchBasename: false, matchPath: true | ||
| }, | ||
| files: { | ||
| //include: ['**.js', '**.json' ], // Windows | ||
| include: ['*.js', '**/*.js', '*.json', '**/*.json'], // *nix | ||
| matchBasename: false, matchPath: true | ||
| } | ||
| }; | ||
| console.log('Creating a hash over the current folder:'); | ||
| hashElement('.', options) | ||
| .then(hash => { | ||
| console.log(hash.toString(), '\n'); | ||
| }) | ||
| .catch(error => { | ||
| return console.error('hashing failed:', error); | ||
| }); |
| /** | ||
| * This file tests the parameters | ||
| */ | ||
| const folderHash = require('../index'), | ||
| assert = require('assert'), | ||
| chai = require('chai'), | ||
| chaiAsPromised = require('chai-as-promised'); | ||
| chai.use(chaiAsPromised); | ||
| const should = chai.should(); | ||
| describe('Initialization', function () { | ||
| function checkError(err) { | ||
| err.name.should.equal('TypeError'); | ||
| err.message.should.equal('First argument must be a string'); | ||
| } | ||
| it('should reject if no name was passed', function () { | ||
| return folderHash.hashElement() | ||
| .then(result => { throw new Error(result); }) | ||
| .catch(checkError); | ||
| }); | ||
| it('should call an error callback if no name was passed', function () { | ||
| return folderHash.hashElement(err => { | ||
| should.exist(err); | ||
| checkError(err); | ||
| }); | ||
| }); | ||
| }); | ||
| describe('Parse parameters', function () { | ||
| it('should not change the supplied options object', function () { | ||
| const params = { | ||
| algo: 'some', | ||
| files: { exclude: ['abc', 'def'], include: [] }, | ||
| folders: { exclude: [], include: ['abc', 'def'] }, | ||
| match: { basename: false, path: 'true' } | ||
| }; | ||
| const str = JSON.stringify(params); | ||
| return folderHash.parseParameters('abc', params) | ||
| .then(() => JSON.stringify(params).should.equal(str)); | ||
| }); | ||
| it('should parse an empty exclude array to undefined', function () { | ||
| const params = { | ||
| algo: 'some', files: { exclude: [] }, | ||
| match: { basename: false, path: 'true' } | ||
| }; | ||
| return folderHash.parseParameters('abc', params) | ||
| .then(parsed => { | ||
| should.exist(parsed.options.files); | ||
| should.equal(parsed.options.files.exclude, undefined); | ||
| }); | ||
| }); | ||
| it('should default excludes to undefined', function () { | ||
| return folderHash.parseParameters('abc', { files: undefined }) | ||
| .then(parsed => { | ||
| should.exist(parsed.options.folders); | ||
| should.equal(parsed.options.folders.exclude, undefined); | ||
| }); | ||
| }); | ||
| }); |
+393
| const { Volume } = require('memfs'), | ||
| path = require('path'), | ||
| assert = require('assert'), | ||
| chai = require('chai'), | ||
| chaiAsPromised = require('chai-as-promised'), | ||
| should = chai.should(), | ||
| inspect = obj => console.log(require('util').inspect(obj, false, null)); | ||
| chai.use(chaiAsPromised); | ||
| const folderHash = require('../index'), | ||
| prep = volume => folderHash.prep(volume, Promise); | ||
| describe('Should generate hashes', function () { | ||
| const json = {}; | ||
| const dir = 'folder', basename = 'file1'; | ||
| json[path.join(dir, basename)] = 'file content'; | ||
| const hashElement = prep(Volume.fromJSON(json)); | ||
| const checkHash = result => { | ||
| should.exist(result); | ||
| should.exist(result.hash); | ||
| result.hash.should.equal('11OqJSEmDW280Sst6dycitwlfCI='); | ||
| }; | ||
| describe('when called as a promise', function () { | ||
| it('with element and folder passed as two strings', function () { | ||
| return hashElement(basename, dir).then(checkHash); | ||
| }); | ||
| it('with element path passed as one string', function () { | ||
| return hashElement(path.join(dir, basename)).then(checkHash); | ||
| }); | ||
| it('with options passed', function () { | ||
| var options = { | ||
| algo: 'sha1', | ||
| encoding: 'base64', | ||
| excludes: [], | ||
| match: { | ||
| basename: false, | ||
| path: false | ||
| } | ||
| }; | ||
| return hashElement(basename, dir, options).then(checkHash); | ||
| }); | ||
| }); | ||
| describe('when executed with an error-first callback', function () { | ||
| it('with element and folder passed as two strings', function () { | ||
| return hashElement(basename, dir, function (err, result) { | ||
| should.not.exist(err); | ||
| checkHash(result); | ||
| }); | ||
| }); | ||
| it('with element path passed as one string', function () { | ||
| return hashElement(path.join(dir, basename), function (err, result) { | ||
| should.not.exist(err); | ||
| checkHash(result); | ||
| }); | ||
| }); | ||
| it('with options passed', function () { | ||
| var options = { | ||
| algo: 'sha1', | ||
| encoding: 'base64', | ||
| excludes: [], | ||
| match: { | ||
| basename: false, | ||
| path: false | ||
| } | ||
| }; | ||
| return hashElement(path.join(dir, basename), options, function (err, result) { | ||
| should.not.exist(err); | ||
| checkHash(result); | ||
| }); | ||
| }); | ||
| }); | ||
| describe('and', function () { | ||
| it('should return a string representation', function () { | ||
| const fs = Volume.fromJSON({ 'folder/file.txt': 'content' }); | ||
| fs.mkdirSync('folder/empty_folder'); | ||
| return prep(fs)('folder').then(hash => { | ||
| should.exist(hash); | ||
| const str = hash.toString(); | ||
| should.exist(str); | ||
| should.equal(str.length > 10, true); | ||
| }); | ||
| }); | ||
| }); | ||
| }); | ||
| describe('Generating hashes over files, it', function () { | ||
| it('should return the same hash if a file was not changed', function () { | ||
| const file = path.join('folder, file'); | ||
| const fs = Volume.fromJSON({ 'file': 'content' }, 'folder'); | ||
| const hash = prep(fs); | ||
| return hash('file', 'folder').then(hash1 => { | ||
| fs.writeFileSync(file, 'content'); | ||
| return hash('file', 'folder').then(result => { | ||
| result.hash.should.equal(hash1.hash); | ||
| }); | ||
| }); | ||
| }); | ||
| it('should return the same hash if a file has the same name and content, but exists in a different folder', function () { | ||
| const json = {}; | ||
| json[path.join('folder one', 'file.txt')] = 'not empty'; | ||
| json[path.join('another folder', 'file.txt')] = 'not empty'; | ||
| const hash = prep(Volume.fromJSON(json)); | ||
| return Promise.all([ | ||
| hash(path.join('folder one', 'file.txt')), | ||
| hash(path.join('another folder', 'file.txt')) | ||
| ]) | ||
| .then(results => results[0].hash.should.equal(results[1].hash)); | ||
| }); | ||
| it('should return a different hash if the file has the same name but a different content', function () { | ||
| const json = {}; | ||
| json[path.join('folder1', 'file.txt')] = '1st file'; | ||
| json[path.join('folder2', 'file.txt')] = '2nd file'; | ||
| const hash = prep(Volume.fromJSON(json)); | ||
| return Promise.all([ | ||
| hash('file.txt', 'folder1'), | ||
| hash('file.txt', 'folder2') | ||
| ]) | ||
| .then(results => results[0].hash.should.not.equal(results[1].hash)); | ||
| }); | ||
| it('should return a different hash if the file has the same content but a different name', function () { | ||
| const hash = prep(Volume.fromJSON({ 'one': 'content', 'two': 'content' })); | ||
| return Promise.all([hash('one'), hash('two')]) | ||
| .then(results => { | ||
| return results[0].hash.should.not.equal(results[1].hash); | ||
| }); | ||
| }); | ||
| }); | ||
| describe('Generating a hash over a folder, it', function () { | ||
| function recAssertHash(hash) { | ||
| assert.ok(hash.hash); | ||
| if (hash.children && hash.children.length > 0) { | ||
| hash.children.forEach(recAssertHash); | ||
| } | ||
| } | ||
| it('generates a hash over the folder name and over the combination hashes of all its children', function () { | ||
| const hashElement = prep(Volume.fromJSON({ | ||
| 'abc/def': 'abc/def', | ||
| 'abc/ghi/jkl/file.js': 'content', | ||
| 'abc/ghi/jkl/file2.js': 'content', | ||
| 'abc/ghi/folder/data.json': 'content', | ||
| 'abc/ghi/folder/subfolder/today.log': 'content' | ||
| })); | ||
| const checkChildren = current => { | ||
| should.exist(current.hash); | ||
| if (current.children && current.children.length > 0) { | ||
| current.children.forEach(checkChildren); | ||
| } | ||
| }; | ||
| return hashElement('abc').then(checkChildren); | ||
| }); | ||
| it('generates different hashes if the folders have the same content but different names', function () { | ||
| const hashElement = prep(Volume.fromJSON({ | ||
| 'folder1/file1': 'content', | ||
| '2nd folder/file1': 'content' | ||
| })); | ||
| return Promise.all([ | ||
| hashElement('folder1'), | ||
| hashElement('2nd folder') | ||
| ]).then(([first, second]) => { | ||
| should.exist(first.hash); | ||
| first.hash.should.not.equal(second.hash); | ||
| should.exist(first.children[0].hash); | ||
| first.children[0].hash.should.equal(second.children[0].hash); | ||
| }); | ||
| }); | ||
| it('generates different hashes if the folders have the same name but different content (one file content changed)', function () { | ||
| const hashElement = prep(Volume.fromJSON({ | ||
| 'folder1/folder2/file1': 'content', | ||
| '2nd folder/file1': 'content' | ||
| })); | ||
| return Promise.all([ | ||
| hashElement('folder1'), | ||
| hashElement('2nd folder') | ||
| ]).then(([first, second]) => { | ||
| should.exist(first.hash); | ||
| first.hash.should.not.equal(second.hash); | ||
| }); | ||
| }); | ||
| it('generates the same hash if the folders have the same name and the same content', function () { | ||
| const hashElement = prep(Volume.fromJSON({ | ||
| 'first/file1': 'content', | ||
| 'first/folder/file2': 'abc', | ||
| 'first/folder/file3': 'abcd', | ||
| '2nd/folder/first/file1': 'content', | ||
| '2nd/folder/first/folder/file2': 'abc', | ||
| '2nd/folder/first/folder/file3': 'abcd' | ||
| })); | ||
| return Promise.all([ | ||
| hashElement('first'), | ||
| hashElement('first', path.join('2nd', 'folder')) | ||
| ]).then(([first, second]) => { | ||
| should.exist(first.hash); | ||
| first.hash.should.equal(second.hash); | ||
| }); | ||
| }); | ||
| it('generates the same hash if the only file with different content is ignored', function () { | ||
| const hashElement = prep(Volume.fromJSON({ | ||
| 'base/file1': 'content', | ||
| 'base/folder/file2': 'abc', | ||
| 'base/folder/file3': 'abcd', | ||
| '2nd/base/file1': 'content', | ||
| '2nd/base/folder/file2': 'another content', | ||
| '2nd/base/folder/file3': 'abcd', | ||
| '3rd/base/file1': 'content', | ||
| '3rd/base/dummy': '', | ||
| '3rd/base/folder/file3': 'abcd' | ||
| })); | ||
| return Promise.all([ | ||
| hashElement('base', { | ||
| files: { | ||
| exclude: ['**/file2', '**file2'], matchBasename: false, matchPath: true | ||
| } | ||
| }), | ||
| hashElement(path.join('2nd', 'base'), { | ||
| files: { | ||
| exclude: ['file2'], matchBasename: true, matchPath: false | ||
| } | ||
| }), | ||
| hashElement('base', '3rd', { | ||
| files: { | ||
| exclude: ['dummy'], matchBasename: true, matchPath: false | ||
| } | ||
| }) | ||
| ]).then(result => { | ||
| should.exist(result[0].hash); | ||
| result[0].hash.should.equal(result[1].hash); | ||
| result[1].hash.should.equal(result[2].hash); | ||
| }); | ||
| }); | ||
| it('generates the same hash if all differences are ignored', function () { | ||
| const hashElement = prep(Volume.fromJSON({ | ||
| 'base/file1': 'content', | ||
| 'base/.gitignore': 'empty', | ||
| 'base/folder/file2': '2', | ||
| '2nd/base/file1': 'content', | ||
| '2nd/base/folder/file2': '2', | ||
| '2nd/base/folder/.git/one': '1', | ||
| '3rd/base/file1': 'content', | ||
| '3rd/base/folder/file2': '2', | ||
| '3rd/base/folder/.hidden': 'hidden', | ||
| '3rd/base/.hidden/file': 'hidden' | ||
| })); | ||
| return Promise.all([ | ||
| hashElement('base', { | ||
| files: { | ||
| exclude: ['**/.*', '**\.*'], | ||
| matchBasename: false, matchPath: true | ||
| } | ||
| }), | ||
| hashElement(path.join('2nd', 'base'), { | ||
| folders: { | ||
| exclude: ['**\/.*', '**\.*'], | ||
| matchBasename: false, matchPath: true | ||
| } | ||
| }), | ||
| hashElement('base', '3rd', { | ||
| files: { exclude: ['.*'] }, | ||
| folders: { exclude: ['.*'] } | ||
| }) | ||
| ]).then(result => { | ||
| should.exist(result[0].hash); | ||
| result[0].hash.should.equal(result[1].hash); | ||
| result[1].hash.should.equal(result[2].hash); | ||
| }); | ||
| }); | ||
| it('ignores a folder it is both included and excluded', function () { | ||
| const hashElement = prep(Volume.fromJSON({ | ||
| 'base/file1': 'content', | ||
| 'base/folder/file2': '2', | ||
| 'base/folder2/file3': '3' | ||
| })); | ||
| return hashElement('base', { | ||
| folders: { | ||
| exclude: ['**/folder', '**folder'], include: ['*'], | ||
| matchBasename: false, matchPath: true | ||
| } | ||
| }) | ||
| .then(result => { | ||
| should.exist(result.hash); | ||
| should.exist(result.children); | ||
| result.children.length.should.equal(2); | ||
| result.children[0].name.should.equal('file1'); | ||
| result.children[1].name.should.equal('folder2'); | ||
| }); | ||
| }); | ||
| it('only includes the wanted folders', function () { | ||
| const hashElement = prep(Volume.fromJSON({ | ||
| 'abc/file': 'content', | ||
| 'def/file': 'content', | ||
| 'abc2/file': 'content', | ||
| 'abc3/file': 'content' | ||
| })); | ||
| return Promise.all([ | ||
| hashElement('./', { | ||
| folders: { | ||
| include: ['abc*'], matchBasename: true, matchPath: false | ||
| } | ||
| }), | ||
| hashElement('./', { | ||
| folders: { | ||
| include: ['**abc*'], matchBasename: false, matchPath: true | ||
| } | ||
| }) | ||
| ]).then(result => { | ||
| should.exist(result[0].children); | ||
| result[0].children.length.should.equal(3); | ||
| result[0].hash.should.equal(result[1].hash); | ||
| }); | ||
| }); | ||
| it('only includes the wanted files', function () { | ||
| const hashElement = prep(Volume.fromJSON({ | ||
| 'file1.js': 'file1', | ||
| 'file1.abc.js': 'content', | ||
| 'file1.js.ext': 'ignore', | ||
| 'def/file1.js': 'content', | ||
| 'def/file1.json': 'ignore' | ||
| })); | ||
| return Promise.all([ | ||
| hashElement('./', { | ||
| files: { | ||
| include: ['*.js'], matchBasename: true, matchPath: false | ||
| } | ||
| }), | ||
| hashElement('./', { | ||
| files: { | ||
| include: ['**/*.js', '**.js'], | ||
| matchBasename: false, matchPath: true | ||
| } | ||
| }) | ||
| ]).then(result => { | ||
| //console.log(result.map(r => r.toString()).join('\n')); | ||
| should.exist(result[0].children); | ||
| result[0].children.length.should.equal(3); | ||
| result[0].hash.should.equal(result[1].hash); | ||
| }); | ||
| }); | ||
| }); | ||
| describe('extra', function () { | ||
| it('check include', function () { | ||
| const hashElement = prep(Volume.fromJSON({ | ||
| 'one/abc/aa.js': 'aa', | ||
| 'one/def/aa.js': 'aa', | ||
| 'one/abc/abc/aa.js': 'aa' | ||
| })); | ||
| return Promise.all([ | ||
| hashElement('one', { folders: { include: ['**\abc'] } }), | ||
| hashElement('one', { folders: { include: ['abc'], matchBasename: true } }) | ||
| ]).then(result => { | ||
| //console.log(result.map(r => r.toString()).join('\n')); | ||
| should.exist(result[0].hash); | ||
| }); | ||
| }); | ||
| }); |
+563
| # THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY. | ||
| # yarn lockfile v1 | ||
| abbrev@1: | ||
| version "1.1.1" | ||
| resolved "https://registry.yarnpkg.com/abbrev/-/abbrev-1.1.1.tgz#f8f2c887ad10bf67f634f005b6987fed3179aac8" | ||
| abbrev@1.0.x: | ||
| version "1.0.9" | ||
| resolved "https://registry.yarnpkg.com/abbrev/-/abbrev-1.0.9.tgz#91b4792588a7738c25f35dd6f63752a2f8776135" | ||
| align-text@^0.1.1, align-text@^0.1.3: | ||
| version "0.1.4" | ||
| resolved "https://registry.yarnpkg.com/align-text/-/align-text-0.1.4.tgz#0cd90a561093f35d0a99256c22b7069433fad117" | ||
| dependencies: | ||
| kind-of "^3.0.2" | ||
| longest "^1.0.1" | ||
| repeat-string "^1.5.2" | ||
| amdefine@>=0.0.4: | ||
| version "1.0.1" | ||
| resolved "https://registry.yarnpkg.com/amdefine/-/amdefine-1.0.1.tgz#4a5282ac164729e93619bcfd3ad151f817ce91f5" | ||
| argparse@^1.0.7: | ||
| version "1.0.10" | ||
| resolved "https://registry.yarnpkg.com/argparse/-/argparse-1.0.10.tgz#bcd6791ea5ae09725e17e5ad988134cd40b3d911" | ||
| dependencies: | ||
| sprintf-js "~1.0.2" | ||
| assertion-error@^1.0.1: | ||
| version "1.1.0" | ||
| resolved "https://registry.yarnpkg.com/assertion-error/-/assertion-error-1.1.0.tgz#e60b6b0e8f301bd97e5375215bda406c85118c0b" | ||
| async@1.x, async@^1.4.0: | ||
| version "1.5.2" | ||
| resolved "https://registry.yarnpkg.com/async/-/async-1.5.2.tgz#ec6a61ae56480c0c3cb241c95618e20892f9672a" | ||
| babylon@7.0.0-beta.19: | ||
| version "7.0.0-beta.19" | ||
| resolved "https://registry.yarnpkg.com/babylon/-/babylon-7.0.0-beta.19.tgz#e928c7e807e970e0536b078ab3e0c48f9e052503" | ||
| balanced-match@^1.0.0: | ||
| version "1.0.0" | ||
| resolved "https://registry.yarnpkg.com/balanced-match/-/balanced-match-1.0.0.tgz#89b4d199ab2bee49de164ea02b89ce462d71b767" | ||
| bluebird@~3.5.0: | ||
| version "3.5.1" | ||
| resolved "https://registry.yarnpkg.com/bluebird/-/bluebird-3.5.1.tgz#d9551f9de98f1fcda1e683d17ee91a0602ee2eb9" | ||
| brace-expansion@^1.1.7: | ||
| version "1.1.11" | ||
| resolved "https://registry.yarnpkg.com/brace-expansion/-/brace-expansion-1.1.11.tgz#3c7fcbf529d87226f3d2f52b966ff5271eb441dd" | ||
| dependencies: | ||
| balanced-match "^1.0.0" | ||
| concat-map "0.0.1" | ||
| browser-stdout@1.3.1: | ||
| version "1.3.1" | ||
| resolved "https://registry.yarnpkg.com/browser-stdout/-/browser-stdout-1.3.1.tgz#baa559ee14ced73452229bad7326467c61fabd60" | ||
| camelcase@^1.0.2: | ||
| version "1.2.1" | ||
| resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-1.2.1.tgz#9bb5304d2e0b56698b2c758b08a3eaa9daa58a39" | ||
| catharsis@~0.8.9: | ||
| version "0.8.9" | ||
| resolved "https://registry.yarnpkg.com/catharsis/-/catharsis-0.8.9.tgz#98cc890ca652dd2ef0e70b37925310ff9e90fc8b" | ||
| dependencies: | ||
| underscore-contrib "~0.3.0" | ||
| center-align@^0.1.1: | ||
| version "0.1.3" | ||
| resolved "https://registry.yarnpkg.com/center-align/-/center-align-0.1.3.tgz#aa0d32629b6ee972200411cbd4461c907bc2b7ad" | ||
| dependencies: | ||
| align-text "^0.1.3" | ||
| lazy-cache "^1.0.3" | ||
| chai-as-promised@^7.1.1: | ||
| version "7.1.1" | ||
| resolved "https://registry.yarnpkg.com/chai-as-promised/-/chai-as-promised-7.1.1.tgz#08645d825deb8696ee61725dbf590c012eb00ca0" | ||
| dependencies: | ||
| check-error "^1.0.2" | ||
| chai@^4.0.2: | ||
| version "4.1.2" | ||
| resolved "https://registry.yarnpkg.com/chai/-/chai-4.1.2.tgz#0f64584ba642f0f2ace2806279f4f06ca23ad73c" | ||
| dependencies: | ||
| assertion-error "^1.0.1" | ||
| check-error "^1.0.1" | ||
| deep-eql "^3.0.0" | ||
| get-func-name "^2.0.0" | ||
| pathval "^1.0.0" | ||
| type-detect "^4.0.0" | ||
| check-error@^1.0.1, check-error@^1.0.2: | ||
| version "1.0.2" | ||
| resolved "https://registry.yarnpkg.com/check-error/-/check-error-1.0.2.tgz#574d312edd88bb5dd8912e9286dd6c0aed4aac82" | ||
| cliui@^2.1.0: | ||
| version "2.1.0" | ||
| resolved "https://registry.yarnpkg.com/cliui/-/cliui-2.1.0.tgz#4b475760ff80264c762c3a1719032e91c7fea0d1" | ||
| dependencies: | ||
| center-align "^0.1.1" | ||
| right-align "^0.1.1" | ||
| wordwrap "0.0.2" | ||
| commander@2.11.0: | ||
| version "2.11.0" | ||
| resolved "https://registry.yarnpkg.com/commander/-/commander-2.11.0.tgz#157152fd1e7a6c8d98a5b715cf376df928004563" | ||
| concat-map@0.0.1: | ||
| version "0.0.1" | ||
| resolved "https://registry.yarnpkg.com/concat-map/-/concat-map-0.0.1.tgz#d8a96bd77fd68df7793a73036a3ba0d5405d477b" | ||
| debug@3.1.0, debug@^3.1.0: | ||
| version "3.1.0" | ||
| resolved "https://registry.yarnpkg.com/debug/-/debug-3.1.0.tgz#5bb5a0672628b64149566ba16819e61518c67261" | ||
| dependencies: | ||
| ms "2.0.0" | ||
| decamelize@^1.0.0: | ||
| version "1.2.0" | ||
| resolved "https://registry.yarnpkg.com/decamelize/-/decamelize-1.2.0.tgz#f6534d15148269b20352e7bee26f501f9a191290" | ||
| deep-eql@^3.0.0: | ||
| version "3.0.1" | ||
| resolved "https://registry.yarnpkg.com/deep-eql/-/deep-eql-3.0.1.tgz#dfc9404400ad1c8fe023e7da1df1c147c4b444df" | ||
| dependencies: | ||
| type-detect "^4.0.0" | ||
| deep-is@~0.1.3: | ||
| version "0.1.3" | ||
| resolved "https://registry.yarnpkg.com/deep-is/-/deep-is-0.1.3.tgz#b369d6fb5dbc13eecf524f91b070feedc357cf34" | ||
| diff@3.5.0: | ||
| version "3.5.0" | ||
| resolved "https://registry.yarnpkg.com/diff/-/diff-3.5.0.tgz#800c0dd1e0a8bfbc95835c202ad220fe317e5a12" | ||
| escape-string-regexp@1.0.5, escape-string-regexp@~1.0.5: | ||
| version "1.0.5" | ||
| resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz#1b61c0562190a8dff6ae3bb2cf0200ca130b86d4" | ||
| escodegen@1.8.x: | ||
| version "1.8.1" | ||
| resolved "https://registry.yarnpkg.com/escodegen/-/escodegen-1.8.1.tgz#5a5b53af4693110bebb0867aa3430dd3b70a1018" | ||
| dependencies: | ||
| esprima "^2.7.1" | ||
| estraverse "^1.9.1" | ||
| esutils "^2.0.2" | ||
| optionator "^0.8.1" | ||
| optionalDependencies: | ||
| source-map "~0.2.0" | ||
| esprima@2.7.x, esprima@^2.7.1: | ||
| version "2.7.3" | ||
| resolved "https://registry.yarnpkg.com/esprima/-/esprima-2.7.3.tgz#96e3b70d5779f6ad49cd032673d1c312767ba581" | ||
| esprima@^4.0.0: | ||
| version "4.0.0" | ||
| resolved "https://registry.yarnpkg.com/esprima/-/esprima-4.0.0.tgz#4499eddcd1110e0b218bacf2fa7f7f59f55ca804" | ||
| estraverse@^1.9.1: | ||
| version "1.9.3" | ||
| resolved "https://registry.yarnpkg.com/estraverse/-/estraverse-1.9.3.tgz#af67f2dc922582415950926091a4005d29c9bb44" | ||
| esutils@^2.0.2: | ||
| version "2.0.2" | ||
| resolved "https://registry.yarnpkg.com/esutils/-/esutils-2.0.2.tgz#0abf4f1caa5bcb1f7a9d8acc6dea4faaa04bac9b" | ||
| fast-extend@0.0.2: | ||
| version "0.0.2" | ||
| resolved "https://registry.yarnpkg.com/fast-extend/-/fast-extend-0.0.2.tgz#f5ec42cf40b9460f521a6387dfb52deeed671dbd" | ||
| fast-levenshtein@~2.0.4: | ||
| version "2.0.6" | ||
| resolved "https://registry.yarnpkg.com/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz#3d8a5c66883a16a30ca8643e851f19baa7797917" | ||
| fs-monkey@^0.3.0: | ||
| version "0.3.1" | ||
| resolved "https://registry.yarnpkg.com/fs-monkey/-/fs-monkey-0.3.1.tgz#69edd8420e04da04d4d3ea200da1ccdc444eecd0" | ||
| fs.realpath@^1.0.0: | ||
| version "1.0.0" | ||
| resolved "https://registry.yarnpkg.com/fs.realpath/-/fs.realpath-1.0.0.tgz#1504ad2523158caa40db4a2787cb01411994ea4f" | ||
| get-func-name@^2.0.0: | ||
| version "2.0.0" | ||
| resolved "https://registry.yarnpkg.com/get-func-name/-/get-func-name-2.0.0.tgz#ead774abee72e20409433a066366023dd6887a41" | ||
| glob@7.1.2: | ||
| version "7.1.2" | ||
| resolved "https://registry.yarnpkg.com/glob/-/glob-7.1.2.tgz#c19c9df9a028702d678612384a6552404c636d15" | ||
| dependencies: | ||
| fs.realpath "^1.0.0" | ||
| inflight "^1.0.4" | ||
| inherits "2" | ||
| minimatch "^3.0.4" | ||
| once "^1.3.0" | ||
| path-is-absolute "^1.0.0" | ||
| glob@^5.0.15: | ||
| version "5.0.15" | ||
| resolved "https://registry.yarnpkg.com/glob/-/glob-5.0.15.tgz#1bc936b9e02f4a603fcc222ecf7633d30b8b93b1" | ||
| dependencies: | ||
| inflight "^1.0.4" | ||
| inherits "2" | ||
| minimatch "2 || 3" | ||
| once "^1.3.0" | ||
| path-is-absolute "^1.0.0" | ||
| graceful-fs@^4.1.9, graceful-fs@~4.1.11: | ||
| version "4.1.11" | ||
| resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.1.11.tgz#0e8bdfe4d1ddb8854d64e04ea7c00e2a026e5658" | ||
| growl@1.10.3: | ||
| version "1.10.3" | ||
| resolved "https://registry.yarnpkg.com/growl/-/growl-1.10.3.tgz#1926ba90cf3edfe2adb4927f5880bc22c66c790f" | ||
| handlebars@^4.0.1: | ||
| version "4.0.11" | ||
| resolved "https://registry.yarnpkg.com/handlebars/-/handlebars-4.0.11.tgz#630a35dfe0294bc281edae6ffc5d329fc7982dcc" | ||
| dependencies: | ||
| async "^1.4.0" | ||
| optimist "^0.6.1" | ||
| source-map "^0.4.4" | ||
| optionalDependencies: | ||
| uglify-js "^2.6" | ||
| has-flag@^1.0.0: | ||
| version "1.0.0" | ||
| resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-1.0.0.tgz#9d9e793165ce017a00f00418c43f942a7b1d11fa" | ||
| has-flag@^2.0.0: | ||
| version "2.0.0" | ||
| resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-2.0.0.tgz#e8207af1cc7b30d446cc70b734b5e8be18f88d51" | ||
| he@1.1.1: | ||
| version "1.1.1" | ||
| resolved "https://registry.yarnpkg.com/he/-/he-1.1.1.tgz#93410fd21b009735151f8868c2f271f3427e23fd" | ||
| inflight@^1.0.4: | ||
| version "1.0.6" | ||
| resolved "https://registry.yarnpkg.com/inflight/-/inflight-1.0.6.tgz#49bd6331d7d02d0c09bc910a1075ba8165b56df9" | ||
| dependencies: | ||
| once "^1.3.0" | ||
| wrappy "1" | ||
| inherits@2: | ||
| version "2.0.3" | ||
| resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.3.tgz#633c2c83e3da42a502f52466022480f4208261de" | ||
| is-buffer@^1.1.5: | ||
| version "1.1.6" | ||
| resolved "https://registry.yarnpkg.com/is-buffer/-/is-buffer-1.1.6.tgz#efaa2ea9daa0d7ab2ea13a97b2b8ad51fefbe8be" | ||
| isexe@^2.0.0: | ||
| version "2.0.0" | ||
| resolved "https://registry.yarnpkg.com/isexe/-/isexe-2.0.0.tgz#e8fbf374dc556ff8947a10dcb0572d633f2cfa10" | ||
| istanbul@^0.4.5: | ||
| version "0.4.5" | ||
| resolved "https://registry.yarnpkg.com/istanbul/-/istanbul-0.4.5.tgz#65c7d73d4c4da84d4f3ac310b918fb0b8033733b" | ||
| dependencies: | ||
| abbrev "1.0.x" | ||
| async "1.x" | ||
| escodegen "1.8.x" | ||
| esprima "2.7.x" | ||
| glob "^5.0.15" | ||
| handlebars "^4.0.1" | ||
| js-yaml "3.x" | ||
| mkdirp "0.5.x" | ||
| nopt "3.x" | ||
| once "1.x" | ||
| resolve "1.1.x" | ||
| supports-color "^3.1.0" | ||
| which "^1.1.1" | ||
| wordwrap "^1.0.0" | ||
| js-yaml@3.x: | ||
| version "3.11.0" | ||
| resolved "https://registry.yarnpkg.com/js-yaml/-/js-yaml-3.11.0.tgz#597c1a8bd57152f26d622ce4117851a51f5ebaef" | ||
| dependencies: | ||
| argparse "^1.0.7" | ||
| esprima "^4.0.0" | ||
| js2xmlparser@~3.0.0: | ||
| version "3.0.0" | ||
| resolved "https://registry.yarnpkg.com/js2xmlparser/-/js2xmlparser-3.0.0.tgz#3fb60eaa089c5440f9319f51760ccd07e2499733" | ||
| dependencies: | ||
| xmlcreate "^1.0.1" | ||
| jsdoc@^3.4.3: | ||
| version "3.5.5" | ||
| resolved "https://registry.yarnpkg.com/jsdoc/-/jsdoc-3.5.5.tgz#484521b126e81904d632ff83ec9aaa096708fa4d" | ||
| dependencies: | ||
| babylon "7.0.0-beta.19" | ||
| bluebird "~3.5.0" | ||
| catharsis "~0.8.9" | ||
| escape-string-regexp "~1.0.5" | ||
| js2xmlparser "~3.0.0" | ||
| klaw "~2.0.0" | ||
| marked "~0.3.6" | ||
| mkdirp "~0.5.1" | ||
| requizzle "~0.2.1" | ||
| strip-json-comments "~2.0.1" | ||
| taffydb "2.6.2" | ||
| underscore "~1.8.3" | ||
| kind-of@^3.0.2: | ||
| version "3.2.2" | ||
| resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-3.2.2.tgz#31ea21a734bab9bbb0f32466d893aea51e4a3c64" | ||
| dependencies: | ||
| is-buffer "^1.1.5" | ||
| klaw@~2.0.0: | ||
| version "2.0.0" | ||
| resolved "https://registry.yarnpkg.com/klaw/-/klaw-2.0.0.tgz#59c128e0dc5ce410201151194eeb9cbf858650f6" | ||
| dependencies: | ||
| graceful-fs "^4.1.9" | ||
| lazy-cache@^1.0.3: | ||
| version "1.0.4" | ||
| resolved "https://registry.yarnpkg.com/lazy-cache/-/lazy-cache-1.0.4.tgz#a1d78fc3a50474cb80845d3b3b6e1da49a446e8e" | ||
| levn@~0.3.0: | ||
| version "0.3.0" | ||
| resolved "https://registry.yarnpkg.com/levn/-/levn-0.3.0.tgz#3b09924edf9f083c0490fdd4c0bc4421e04764ee" | ||
| dependencies: | ||
| prelude-ls "~1.1.2" | ||
| type-check "~0.3.2" | ||
| longest@^1.0.1: | ||
| version "1.0.1" | ||
| resolved "https://registry.yarnpkg.com/longest/-/longest-1.0.1.tgz#30a0b2da38f73770e8294a0d22e6625ed77d0097" | ||
| marked@~0.3.6: | ||
| version "0.3.19" | ||
| resolved "https://registry.yarnpkg.com/marked/-/marked-0.3.19.tgz#5d47f709c4c9fc3c216b6d46127280f40b39d790" | ||
| memfs@^2.8.0: | ||
| version "2.8.0" | ||
| resolved "https://registry.yarnpkg.com/memfs/-/memfs-2.8.0.tgz#a64887d6b540084be5847eb1bd3b5ac8f759c114" | ||
| dependencies: | ||
| fast-extend "0.0.2" | ||
| fs-monkey "^0.3.0" | ||
| "minimatch@2 || 3", minimatch@^3.0.4, minimatch@~3.0.4: | ||
| version "3.0.4" | ||
| resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-3.0.4.tgz#5166e286457f03306064be5497e8dbb0c3d32083" | ||
| dependencies: | ||
| brace-expansion "^1.1.7" | ||
| minimist@0.0.8: | ||
| version "0.0.8" | ||
| resolved "https://registry.yarnpkg.com/minimist/-/minimist-0.0.8.tgz#857fcabfc3397d2625b8228262e86aa7a011b05d" | ||
| minimist@~0.0.1: | ||
| version "0.0.10" | ||
| resolved "https://registry.yarnpkg.com/minimist/-/minimist-0.0.10.tgz#de3f98543dbf96082be48ad1a0c7cda836301dcf" | ||
| mkdirp@0.5.1, mkdirp@0.5.x, mkdirp@~0.5.1: | ||
| version "0.5.1" | ||
| resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-0.5.1.tgz#30057438eac6cf7f8c4767f38648d6697d75c903" | ||
| dependencies: | ||
| minimist "0.0.8" | ||
| mocha@^5.0.5: | ||
| version "5.0.5" | ||
| resolved "https://registry.yarnpkg.com/mocha/-/mocha-5.0.5.tgz#e228e3386b9387a4710007a641f127b00be44b52" | ||
| dependencies: | ||
| browser-stdout "1.3.1" | ||
| commander "2.11.0" | ||
| debug "3.1.0" | ||
| diff "3.5.0" | ||
| escape-string-regexp "1.0.5" | ||
| glob "7.1.2" | ||
| growl "1.10.3" | ||
| he "1.1.1" | ||
| mkdirp "0.5.1" | ||
| supports-color "4.4.0" | ||
| ms@2.0.0: | ||
| version "2.0.0" | ||
| resolved "https://registry.yarnpkg.com/ms/-/ms-2.0.0.tgz#5608aeadfc00be6c2901df5f9861788de0d597c8" | ||
| nopt@3.x: | ||
| version "3.0.6" | ||
| resolved "https://registry.yarnpkg.com/nopt/-/nopt-3.0.6.tgz#c6465dbf08abcd4db359317f79ac68a646b28ff9" | ||
| dependencies: | ||
| abbrev "1" | ||
| once@1.x, once@^1.3.0: | ||
| version "1.4.0" | ||
| resolved "https://registry.yarnpkg.com/once/-/once-1.4.0.tgz#583b1aa775961d4b113ac17d9c50baef9dd76bd1" | ||
| dependencies: | ||
| wrappy "1" | ||
| optimist@^0.6.1: | ||
| version "0.6.1" | ||
| resolved "https://registry.yarnpkg.com/optimist/-/optimist-0.6.1.tgz#da3ea74686fa21a19a111c326e90eb15a0196686" | ||
| dependencies: | ||
| minimist "~0.0.1" | ||
| wordwrap "~0.0.2" | ||
| optionator@^0.8.1: | ||
| version "0.8.2" | ||
| resolved "https://registry.yarnpkg.com/optionator/-/optionator-0.8.2.tgz#364c5e409d3f4d6301d6c0b4c05bba50180aeb64" | ||
| dependencies: | ||
| deep-is "~0.1.3" | ||
| fast-levenshtein "~2.0.4" | ||
| levn "~0.3.0" | ||
| prelude-ls "~1.1.2" | ||
| type-check "~0.3.2" | ||
| wordwrap "~1.0.0" | ||
| path-is-absolute@^1.0.0: | ||
| version "1.0.1" | ||
| resolved "https://registry.yarnpkg.com/path-is-absolute/-/path-is-absolute-1.0.1.tgz#174b9268735534ffbc7ace6bf53a5a9e1b5c5f5f" | ||
| pathval@^1.0.0: | ||
| version "1.1.0" | ||
| resolved "https://registry.yarnpkg.com/pathval/-/pathval-1.1.0.tgz#b942e6d4bde653005ef6b71361def8727d0645e0" | ||
| prelude-ls@~1.1.2: | ||
| version "1.1.2" | ||
| resolved "https://registry.yarnpkg.com/prelude-ls/-/prelude-ls-1.1.2.tgz#21932a549f5e52ffd9a827f570e04be62a97da54" | ||
| repeat-string@^1.5.2: | ||
| version "1.6.1" | ||
| resolved "https://registry.yarnpkg.com/repeat-string/-/repeat-string-1.6.1.tgz#8dcae470e1c88abc2d600fff4a776286da75e637" | ||
| requizzle@~0.2.1: | ||
| version "0.2.1" | ||
| resolved "https://registry.yarnpkg.com/requizzle/-/requizzle-0.2.1.tgz#6943c3530c4d9a7e46f1cddd51c158fc670cdbde" | ||
| dependencies: | ||
| underscore "~1.6.0" | ||
| resolve@1.1.x: | ||
| version "1.1.7" | ||
| resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.1.7.tgz#203114d82ad2c5ed9e8e0411b3932875e889e97b" | ||
| right-align@^0.1.1: | ||
| version "0.1.3" | ||
| resolved "https://registry.yarnpkg.com/right-align/-/right-align-0.1.3.tgz#61339b722fe6a3515689210d24e14c96148613ef" | ||
| dependencies: | ||
| align-text "^0.1.1" | ||
| source-map@^0.4.4: | ||
| version "0.4.4" | ||
| resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.4.4.tgz#eba4f5da9c0dc999de68032d8b4f76173652036b" | ||
| dependencies: | ||
| amdefine ">=0.0.4" | ||
| source-map@~0.2.0: | ||
| version "0.2.0" | ||
| resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.2.0.tgz#dab73fbcfc2ba819b4de03bd6f6eaa48164b3f9d" | ||
| dependencies: | ||
| amdefine ">=0.0.4" | ||
| source-map@~0.5.1: | ||
| version "0.5.7" | ||
| resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.5.7.tgz#8a039d2d1021d22d1ea14c80d8ea468ba2ef3fcc" | ||
| sprintf-js@~1.0.2: | ||
| version "1.0.3" | ||
| resolved "https://registry.yarnpkg.com/sprintf-js/-/sprintf-js-1.0.3.tgz#04e6926f662895354f3dd015203633b857297e2c" | ||
| strip-json-comments@~2.0.1: | ||
| version "2.0.1" | ||
| resolved "https://registry.yarnpkg.com/strip-json-comments/-/strip-json-comments-2.0.1.tgz#3c531942e908c2697c0ec344858c286c7ca0a60a" | ||
| supports-color@4.4.0: | ||
| version "4.4.0" | ||
| resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-4.4.0.tgz#883f7ddabc165142b2a61427f3352ded195d1a3e" | ||
| dependencies: | ||
| has-flag "^2.0.0" | ||
| supports-color@^3.1.0: | ||
| version "3.2.3" | ||
| resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-3.2.3.tgz#65ac0504b3954171d8a64946b2ae3cbb8a5f54f6" | ||
| dependencies: | ||
| has-flag "^1.0.0" | ||
| taffydb@2.6.2: | ||
| version "2.6.2" | ||
| resolved "https://registry.yarnpkg.com/taffydb/-/taffydb-2.6.2.tgz#7cbcb64b5a141b6a2efc2c5d2c67b4e150b2a268" | ||
| type-check@~0.3.2: | ||
| version "0.3.2" | ||
| resolved "https://registry.yarnpkg.com/type-check/-/type-check-0.3.2.tgz#5884cab512cf1d355e3fb784f30804b2b520db72" | ||
| dependencies: | ||
| prelude-ls "~1.1.2" | ||
| type-detect@^4.0.0: | ||
| version "4.0.8" | ||
| resolved "https://registry.yarnpkg.com/type-detect/-/type-detect-4.0.8.tgz#7646fb5f18871cfbb7749e69bd39a6388eb7450c" | ||
| uglify-js@^2.6: | ||
| version "2.8.29" | ||
| resolved "https://registry.yarnpkg.com/uglify-js/-/uglify-js-2.8.29.tgz#29c5733148057bb4e1f75df35b7a9cb72e6a59dd" | ||
| dependencies: | ||
| source-map "~0.5.1" | ||
| yargs "~3.10.0" | ||
| optionalDependencies: | ||
| uglify-to-browserify "~1.0.0" | ||
| uglify-to-browserify@~1.0.0: | ||
| version "1.0.2" | ||
| resolved "https://registry.yarnpkg.com/uglify-to-browserify/-/uglify-to-browserify-1.0.2.tgz#6e0924d6bda6b5afe349e39a6d632850a0f882b7" | ||
| underscore-contrib@~0.3.0: | ||
| version "0.3.0" | ||
| resolved "https://registry.yarnpkg.com/underscore-contrib/-/underscore-contrib-0.3.0.tgz#665b66c24783f8fa2b18c9f8cbb0e2c7d48c26c7" | ||
| dependencies: | ||
| underscore "1.6.0" | ||
| underscore@1.6.0, underscore@~1.6.0: | ||
| version "1.6.0" | ||
| resolved "https://registry.yarnpkg.com/underscore/-/underscore-1.6.0.tgz#8b38b10cacdef63337b8b24e4ff86d45aea529a8" | ||
| underscore@~1.8.3: | ||
| version "1.8.3" | ||
| resolved "https://registry.yarnpkg.com/underscore/-/underscore-1.8.3.tgz#4f3fb53b106e6097fcf9cb4109f2a5e9bdfa5022" | ||
| which@^1.1.1: | ||
| version "1.3.0" | ||
| resolved "https://registry.yarnpkg.com/which/-/which-1.3.0.tgz#ff04bdfc010ee547d780bec38e1ac1c2777d253a" | ||
| dependencies: | ||
| isexe "^2.0.0" | ||
| window-size@0.1.0: | ||
| version "0.1.0" | ||
| resolved "https://registry.yarnpkg.com/window-size/-/window-size-0.1.0.tgz#5438cd2ea93b202efa3a19fe8887aee7c94f9c9d" | ||
| wordwrap@0.0.2: | ||
| version "0.0.2" | ||
| resolved "https://registry.yarnpkg.com/wordwrap/-/wordwrap-0.0.2.tgz#b79669bb42ecb409f83d583cad52ca17eaa1643f" | ||
| wordwrap@^1.0.0, wordwrap@~1.0.0: | ||
| version "1.0.0" | ||
| resolved "https://registry.yarnpkg.com/wordwrap/-/wordwrap-1.0.0.tgz#27584810891456a4171c8d0226441ade90cbcaeb" | ||
| wordwrap@~0.0.2: | ||
| version "0.0.3" | ||
| resolved "https://registry.yarnpkg.com/wordwrap/-/wordwrap-0.0.3.tgz#a3d5da6cd5c0bc0008d37234bbaf1bed63059107" | ||
| wrappy@1: | ||
| version "1.0.2" | ||
| resolved "https://registry.yarnpkg.com/wrappy/-/wrappy-1.0.2.tgz#b5243d8f3ec1aa35f1364605bc0d1036e30ab69f" | ||
| xmlcreate@^1.0.1: | ||
| version "1.0.2" | ||
| resolved "https://registry.yarnpkg.com/xmlcreate/-/xmlcreate-1.0.2.tgz#fa6bf762a60a413fb3dd8f4b03c5b269238d308f" | ||
| yargs@~3.10.0: | ||
| version "3.10.0" | ||
| resolved "https://registry.yarnpkg.com/yargs/-/yargs-3.10.0.tgz#f7ee7bd857dd7c1d2d38c0e74efbd681d1431fd1" | ||
| dependencies: | ||
| camelcase "^1.0.2" | ||
| cliui "^2.1.0" | ||
| decamelize "^1.0.0" | ||
| window-size "0.1.0" |
| // execute from the base folder | ||
| // node examples\readme-with-callbacks.js | ||
| var hasher = require('../index.js'); | ||
| const path = require('path'); | ||
| const { hashElement } = require('../index.js'); | ||
| // pass element name and folder path separately | ||
| hasher.hashElement('node_modules', __dirname).then(function (hash) { | ||
| console.log('Result for folder "node_modules" in directory "' + __dirname + '":'); | ||
| console.log(hash.toString()); | ||
| hashElement('test', path.join(__dirname, '..'), (error, hash) => { | ||
| if (error) { | ||
| return console.error('hashing failed:', error); | ||
| } else { | ||
| console.log('Result for folder "../test":', hash.toString(), '\n'); | ||
| } | ||
| }); | ||
| // pass element path directly | ||
| hasher.hashElement(__dirname).then(function (hash) { | ||
| console.log('Result for folder "' + __dirname + '":'); | ||
| console.log(hash.toString()); | ||
| hashElement(__dirname, (error, hash) => { | ||
| if (error) { | ||
| return console.error('hashing failed:', error); | ||
| } else { | ||
| console.log('Result for folder "' + __dirname + '":'); | ||
| console.log(hash.toString(), '\n'); | ||
| } | ||
| }); | ||
| // pass options (example: exclude dotFiles) | ||
| var options = { excludes: ['.*'], match: { basename: true, path: false } }; | ||
| hasher.hashElement(__dirname, options) | ||
| .then(function (hash) { | ||
| console.log('Result for folder "' + __dirname + '":'); | ||
| console.log(hash.toString()); | ||
| }) | ||
| .catch(function (error) { | ||
| return console.error('hashing failed:', error); | ||
| const options = { algo: 'md5', files: { exclude: ['.*'], matchBasename: true } }; | ||
| hashElement(__dirname, options, (error, hash) => { | ||
| if (error) { | ||
| return console.error('hashing failed:', error); | ||
| } else { | ||
| console.log('Result for folder "' + __dirname + '":'); | ||
| console.log(hash.toString()); | ||
| } | ||
| }); |
| // execute from the base folder | ||
| // node examples\readme-with-promises.js | ||
| var hasher = require('../index.js'); | ||
| const path = require('path'); | ||
| const { hashElement } = require('../index.js'); | ||
| // pass element name and folder path separately | ||
| hasher.hashElement('node_modules', __dirname, function (error, hash) { | ||
| if (error) return console.error('hashing failed:', error); | ||
| console.log('Result for folder "node_modules" in directory "' + __dirname + '":'); | ||
| console.log(hash.toString()); | ||
| }); | ||
| hashElement('test', path.join(__dirname, '..')) | ||
| .then(hash => { | ||
| console.log('Result for folder "../test":', hash.toString(), '\n'); | ||
| }) | ||
| .catch(error => { | ||
| return console.error('hashing failed:', error); | ||
| }); | ||
| // pass element path directly | ||
| hasher.hashElement(__dirname, function (error, hash) { | ||
| if (error) return console.error('hashing failed:', error); | ||
| console.log('Result for folder "' + __dirname + '":'); | ||
| console.log(hash.toString()); | ||
| }); | ||
| hashElement(__dirname) | ||
| .then(hash => { | ||
| console.log(`Result for folder "${__dirname}":`); | ||
| console.log(hash.toString(), '\n'); | ||
| }) | ||
| .catch(error => { | ||
| return console.error('hashing failed:', error); | ||
| }); | ||
| // pass options (example: exclude dotFiles) | ||
| var options = { excludes: ['**/.*'], match: { basename: false, path: true } }; | ||
| hasher.hashElement(__dirname, options, function (error, hash) { | ||
| console.log('Result for folder "' + __dirname + '":'); | ||
| console.log(hash.toString()); | ||
| }); | ||
| // pass options (example: exclude dotFolders) | ||
| const options = { encoding: 'hex', folders: { exclude: ['.*'] } }; | ||
| hashElement(__dirname, options) | ||
| .then(hash => { | ||
| console.log('Result for folder "' + __dirname + '" (with options):'); | ||
| console.log(hash.toString(), '\n'); | ||
| }) | ||
| .catch(error => { | ||
| return console.error('hashing failed:', error); | ||
| }); |
+22
-20
| | ||
| var crypto = require('crypto'); | ||
| var path = require('path'); | ||
| const crypto = require('crypto'), | ||
| path = require('path'); | ||
| var hashFile = require('../index.js'); | ||
| const hashFolder = require('../index.js'); | ||
| console.log('Known hash algorithms: ', '\'' + crypto.getHashes().join('\', \'') + '\''); | ||
| console.log(`Known hash algorithms:\n'${crypto.getHashes().join(`', '`)}'\n`); | ||
| const dir = path.resolve(__dirname, '../'); | ||
| function checkPromise(promise) { | ||
| promise.then(function (result) { | ||
| console.log('Promise resolved:\n', result.toString(), '\n\n'); | ||
| hashFolder.hashElement('README.md', dir) | ||
| .then(result => { | ||
| console.log('\nCreated a hash over a single file:'); | ||
| console.log(result.toString()); | ||
| }) | ||
| .catch(function (reason) { | ||
| console.error('Promise rejected due to:\n', reason, '\n\n'); | ||
| .catch(reason => { | ||
| console.error(`\nPromise rejected due to:\n${reason}\n\n`); | ||
| }); | ||
| } | ||
| var file = 'README.md'; | ||
| var dir = path.resolve(__dirname, '../'); | ||
| console.log('\nCreate a hash over a single file:'); | ||
| checkPromise(hashFile.hashElement(file, dir)); | ||
| console.log('Create hash over a folder:'); | ||
| //checkPromise(hashFile.hashElement(path.basename(dir), path.dirname(dir))); | ||
| checkPromise(hashFile.hashElement('test', dir)); | ||
| hashFolder.hashElement(dir, { | ||
| files: { exclude: ['.*'], matchBasename: true }, | ||
| folders: { include: ['examples', 'test'], matchBasename: true } | ||
| }, (err, result) => { | ||
| if (err) { | ||
| console.error(`\nFailed to create a hash due to:\n${err}`); | ||
| } else { | ||
| console.log('\nCreated a hash over a folder:'); | ||
| console.log(result.toString()); | ||
| } | ||
| }); |
+228
-171
@@ -1,226 +0,283 @@ | ||
| "use strict" | ||
| const crypto = require('crypto'), | ||
| debug = require('debug'), | ||
| minimatch = require('minimatch'), | ||
| path = require('path'); | ||
| var fs = require('graceful-fs'); | ||
| var path = require('path'); | ||
| var crypto = require('crypto'); | ||
| var minimatch = require('minimatch'); | ||
| if (typeof Promise === 'undefined') require('when/es6-shim/Promise'); | ||
| var defaultOptions = { | ||
| const defaultOptions = { | ||
| algo: 'sha1', // see crypto.getHashes() for options | ||
| encoding: 'base64', // 'base64', 'hex' or 'binary' | ||
| excludes: [], | ||
| match: { | ||
| basename: true, | ||
| path: true | ||
| files: { | ||
| exclude: [], | ||
| include: [], | ||
| matchBasename: true, | ||
| matchPath: false | ||
| }, | ||
| folders: { | ||
| exclude: [], | ||
| include: [], | ||
| matchBasename: true, | ||
| matchPath: false | ||
| } | ||
| }; | ||
| module.exports = { | ||
| hashElement: hashElement | ||
| } | ||
| // Use the environment variable DEBUG to log output, e.g. `set DEBUG=fhash:*` | ||
| const log = { | ||
| match: debug('fhash:match'), | ||
| params: (params) => { | ||
| debug('fhash:parameters')(params); | ||
| return params; | ||
| } | ||
| }; | ||
| /** | ||
| * Create a hash over a folder or file, using either promises or error-first-callbacks. | ||
| * | ||
| * Examples: | ||
| * - hashElement(filename, folderpath, options, fn(err, hash) {}), hashElement(filename, folderpath, options); | ||
| * - hashElement(path, fn(err, hash) {}), hashElement(path) | ||
| * | ||
| * @param {string} name - element name or an element's path | ||
| * @param {string} [dir] - directory that contains the element (if omitted is generated from name) | ||
| * @param {Object} [options] - Options | ||
| * @param {string} [options.algo='sha1'] - checksum algorithm, see options in crypto.getHashes() | ||
| * @param {string} [options.encoding='base64'] - encoding of the resulting hash. One of 'base64', 'hex' or 'binary' | ||
| * @param {string[]} [options.excludes=[]] - Array of optional exclude file glob patterns, see minimatch doc | ||
| * @param {bool} [options.match.basename=true] - Match the exclude patterns to the file/folder name | ||
| * @param {bool} [options.match.path=true] - Match the exclude patterns to the file/folder path | ||
| * @param {fn} [callback] - Error-first callback function | ||
| */ | ||
| function hashElement(name, directoryPath, options, callback) { | ||
| var promise = parseParameters(arguments); | ||
| var callback = arguments[arguments.length-1]; | ||
| function prep(fs, Promise) { | ||
| function hashElement(name, dir, options, callback) { | ||
| callback = arguments[arguments.length - 1]; | ||
| return promise | ||
| .then(function (result) { | ||
| if (typeof callback === 'function') return callback(undefined, result); | ||
| return result; | ||
| }) | ||
| .catch(function (reason) { | ||
| if (typeof callback === 'function') return callback(reason); | ||
| throw reason; | ||
| }); | ||
| } | ||
| return parseParameters(arguments) | ||
| .then(({ basename, dir, options }) => { | ||
| // this is only used for the root level | ||
| options.skipMatching = true; | ||
| return hashElementPromise(basename, dir, options); | ||
| }) | ||
| .then(result => { | ||
| if (typeof callback === 'function') { | ||
| return callback(undefined, result); | ||
| } else { | ||
| return result; | ||
| } | ||
| }) | ||
| .catch(reason => { | ||
| if (typeof callback === 'function') { | ||
| return callback(reason); | ||
| } else { | ||
| throw reason; | ||
| } | ||
| }); | ||
| } | ||
| function parseParameters(args) { | ||
| var elementBasename = args[0], | ||
| elementDirname = args[1], | ||
| options = args[2]; | ||
| if (!isString(elementBasename)) { | ||
| return Promise.reject(new TypeError('First argument must be a string')); | ||
| function hashElementPromise(basename, dirname, options) { | ||
| return stat(path.join(dirname, basename)).then(stats => { | ||
| if (stats.isDirectory()) { | ||
| return hashFolderPromise(basename, dirname, options); | ||
| } else if (stats.isFile()) { | ||
| return hashFilePromise(basename, dirname, options); | ||
| } else { | ||
| return { | ||
| name: basename, | ||
| hash: 'unknown element type' | ||
| }; | ||
| } | ||
| }); | ||
| } | ||
| if (!isString(elementDirname)) { | ||
| elementDirname = path.dirname(elementBasename); | ||
| elementBasename = path.basename(elementBasename); | ||
| options = args[1]; | ||
| function stat(filepath) { | ||
| return new Promise((resolve, reject) => { | ||
| fs.stat(filepath, (err, stats) => { | ||
| if (err) { | ||
| return reject(err); | ||
| } else { | ||
| return resolve(stats); | ||
| } | ||
| }); | ||
| }); | ||
| } | ||
| // parse options (fallback default options) | ||
| if (!isObject(options)) options = {}; | ||
| ['algo', 'encoding', 'excludes'].forEach(function(key) { | ||
| if (!options.hasOwnProperty(key)) options[key] = defaultOptions[key]; | ||
| }); | ||
| if (!options.match) options.match = {}; | ||
| if (!options.match.hasOwnProperty('basename')) options.match.basename = defaultOptions.match.basename; | ||
| if (!options.match.hasOwnProperty('path')) options.match.path = defaultOptions.match.path; | ||
| function hashFolderPromise(name, dir, options) { | ||
| const folderPath = path.join(dir, name); | ||
| if (!options.excludes || !Array.isArray(options.excludes) || options.excludes.length == 0) { | ||
| options.excludes = undefined; | ||
| } else { | ||
| // combine globs into one single RegEx | ||
| options.excludes = new RegExp(options.excludes.reduce(function (acc, exclude) { | ||
| return acc + '|' + minimatch.makeRe(exclude).source; | ||
| }, '').substr(1)); | ||
| } | ||
| //console.log('parsed options:', options); | ||
| if (options.skipMatching) { | ||
| // this is currently only used for the root folder | ||
| log.match(`skipped '${folderPath}'`); | ||
| delete options.skipMatching; | ||
| } else if (ignore(name, folderPath, options.folders)) { | ||
| return undefined; | ||
| } | ||
| return hashElementPromise(elementBasename, elementDirname, options); | ||
| } | ||
| return readdir(folderPath).then(files => { | ||
| const children = files.map(child => { | ||
| return hashElementPromise(child, folderPath, options); | ||
| }); | ||
| function hashElementPromise(basename, dirname, options) { | ||
| var filepath = path.join(dirname, basename); | ||
| if (options.match.basename && options.excludes && options.excludes.test(basename)) { | ||
| //console.log('regex', options.excludes, 'matched to', basename); | ||
| return Promise.resolve(undefined); | ||
| return Promise.all(children).then(children => { | ||
| const hash = new HashedFolder(name, children.filter(notUndefined), options); | ||
| return hash; | ||
| }); | ||
| }); | ||
| } | ||
| if (options.match.path && options.excludes && options.excludes.test(filepath)) { | ||
| //console.log('regex', options.excludes, 'matched to', filepath); | ||
| return Promise.resolve(undefined); | ||
| function readdir(folderPath) { | ||
| return new Promise((resolve, reject) => { | ||
| fs.readdir(folderPath, (err, files) => { | ||
| if (err) { | ||
| console.error(err); | ||
| return reject(err); | ||
| } else { | ||
| return resolve(files); | ||
| } | ||
| }); | ||
| }); | ||
| } | ||
| return new Promise(function (resolve, reject, notify) { | ||
| fs.stat(filepath, function (err, stats) { | ||
| if (err) { | ||
| return reject(err); | ||
| } | ||
| function hashFilePromise(name, dir, options) { | ||
| const filePath = path.join(dir, name); | ||
| if (stats.isDirectory()) { | ||
| resolve(hashFolderPromise(basename, dirname, options)); | ||
| } else if (stats.isFile()) { | ||
| resolve(hashFilePromise(basename, dirname, options)); | ||
| } else { | ||
| resolve({ name: basename, hash: 'unknown element type' }); | ||
| } | ||
| }); | ||
| }); | ||
| } | ||
| if (options.skipMatching) { | ||
| // this is currently only used for the root folder | ||
| log.match(`skipped '${filePath}'`); | ||
| delete options.skipMatching; | ||
| } else if (ignore(name, filePath, options.files)) { | ||
| return undefined; | ||
| } | ||
| return new Promise((resolve, reject) => { | ||
| try { | ||
| const hash = crypto.createHash(options.algo); | ||
| hash.write(name); | ||
| function hashFolderPromise(foldername, directoryPath, options) { | ||
| var folderPath = path.join(directoryPath, foldername); | ||
| const f = fs.createReadStream(filePath); | ||
| f.pipe(hash, { end: false }); | ||
| var notExcluded = function notExcluded(basename) { | ||
| return !(options.match.basename && options.excludes && options.excludes.test(basename)); | ||
| f.on('end', () => { | ||
| const hashedFile = new HashedFile(name, hash, options.encoding); | ||
| return resolve(hashedFile); | ||
| }); | ||
| } catch (ex) { | ||
| return reject(ex); | ||
| } | ||
| }); | ||
| } | ||
| return new Promise(function (resolve, reject, notify) { | ||
| fs.readdir(folderPath, function (err, files) { | ||
| if (err) { | ||
| var TAG = 'hashFolderPromise(' + foldername + ', ' + directoryPath + '):'; | ||
| console.error(TAG, err); | ||
| reject(err); | ||
| function ignore(name, path, rules) { | ||
| if (rules.exclude) { | ||
| if (rules.matchBasename && rules.exclude.test(name)) { | ||
| log.match(`exclude basename '${path}'`); | ||
| return true; | ||
| } else if (rules.matchPath && rules.exclude.test(path)) { | ||
| log.match(`exclude path '${path}'`); | ||
| return true; | ||
| } | ||
| } else if (rules.include) { | ||
| if (rules.matchBasename && rules.include.test(name)) { | ||
| log.match(`include basename '${path}'`); | ||
| return false; | ||
| } else if (rules.matchPath && rules.include.test(path)) { | ||
| log.match(`include path '${path}'`); | ||
| return false; | ||
| } else { | ||
| return true; | ||
| } | ||
| } | ||
| var children = files.filter(notExcluded).map(function (child) { | ||
| return hashElementPromise(child, folderPath, options); | ||
| }); | ||
| log.match(`unmatched '${path}'`); | ||
| return false; | ||
| } | ||
| return Promise.all(children).then(function (children) { | ||
| var hash = new HashedFolder(foldername, children.filter(notUndefined), options); | ||
| resolve(hash); | ||
| }); | ||
| const HashedFolder = function HashedFolder(name, children, options) { | ||
| this.name = name; | ||
| this.children = children; | ||
| const hash = crypto.createHash(options.algo); | ||
| hash.write(name); | ||
| children.forEach(child => { | ||
| if (child.hash) { | ||
| hash.write(child.hash); | ||
| } | ||
| }); | ||
| }); | ||
| } | ||
| this.hash = hash.digest(options.encoding); | ||
| }; | ||
| function hashFilePromise(filename, directoryPath, options) { | ||
| return new Promise(function (resolve, reject, notify) { | ||
| try { | ||
| var hash = crypto.createHash(options.algo); | ||
| hash.write(filename); | ||
| HashedFolder.prototype.toString = function (padding = '') { | ||
| const first = `${padding}{ name: '${this.name}', hash: '${this.hash},'\n`; | ||
| padding += ' '; | ||
| var f = fs.createReadStream(path.join(directoryPath, filename)); | ||
| f.pipe(hash, { end: false }); | ||
| return `${first}${padding}children: ${this.childrenToString(padding)}}`; | ||
| }; | ||
| f.on('end', function () { | ||
| var hashedFile = new HashedFile(filename, hash, options); | ||
| resolve(hashedFile); | ||
| }); | ||
| } catch (ex) { | ||
| reject(ex); | ||
| HashedFolder.prototype.childrenToString = function (padding = '') { | ||
| if (this.children.length === 0) { | ||
| return '[]'; | ||
| } else { | ||
| const nextPadding = padding + ' '; | ||
| const children = this.children | ||
| .map(child => child.toString(nextPadding)) | ||
| .join('\n'); | ||
| return `[\n${children}\n${padding}]`; | ||
| } | ||
| }); | ||
| } | ||
| }; | ||
| const HashedFile = function HashedFile(name, hash, encoding) { | ||
| this.name = name; | ||
| this.hash = hash.digest(encoding); | ||
| }; | ||
| var HashedFolder = function (name, children, options) { | ||
| this.name = name; | ||
| this.children = children; | ||
| HashedFile.prototype.toString = function (padding = '') { | ||
| return padding + '{ name: \'' + this.name + '\', hash: \'' + this.hash + '\' }'; | ||
| }; | ||
| var hash = crypto.createHash(options.algo); | ||
| hash.write(name); | ||
| children.forEach(function (child) { | ||
| if (child.hash) { | ||
| hash.write(child.hash); | ||
| } | ||
| }); | ||
| this.hash = hash.digest(options.encoding); | ||
| return hashElement; | ||
| } | ||
| HashedFolder.prototype.toString = function (padding) { | ||
| if (typeof padding === 'undefined') padding = ""; | ||
| var str = padding + '{ name: \'' + this.name + '\', hash: \'' + this.hash + '\'\n'; | ||
| padding += ' '; | ||
| str += padding + 'children: '; | ||
| if (this.children.length === 0) { | ||
| str += '[]'; | ||
| } else { | ||
| var nextPadding = padding + " "; | ||
| var childElements = this.children.map(function (child) { return child.toString(nextPadding); }); | ||
| str += '[\n' + childElements.join('\n') + '\n' + padding + ']'; | ||
| function parseParameters(args) { | ||
| let basename = args[0], | ||
| dir = args[1], | ||
| options_ = args[2]; | ||
| if (!isString(basename)) { | ||
| return Promise.reject(new TypeError('First argument must be a string')); | ||
| } | ||
| return str + ' }'; | ||
| } | ||
| if (!isString(dir)) { | ||
| dir = path.dirname(basename); | ||
| basename = path.basename(basename); | ||
| options_ = args[1]; | ||
| } | ||
| // parse options (fallback default options) | ||
| if (!isObject(options_)) options_ = {}; | ||
| const options = { | ||
| algo: options_.algo || defaultOptions.algo, | ||
| encoding: options_.encoding || defaultOptions.encoding, | ||
| files: Object.assign({}, defaultOptions.files, options_.files), | ||
| folders: Object.assign({}, defaultOptions.folders, options_.folders), | ||
| match: Object.assign({}, defaultOptions.match, options_.match) | ||
| }; | ||
| var HashedFile = function (name, hash, options) { | ||
| this.name = name; | ||
| this.hash = hash.digest(options.encoding); | ||
| } | ||
| // transform match globs to Regex | ||
| options.files.exclude = reduceGlobPatterns(options.files.exclude); | ||
| options.files.include = reduceGlobPatterns(options.files.include); | ||
| options.folders.exclude = reduceGlobPatterns(options.folders.exclude); | ||
| options.folders.include = reduceGlobPatterns(options.folders.include); | ||
| HashedFile.prototype.toString = function (padding) { | ||
| if (typeof padding === 'undefined') padding = ""; | ||
| return padding + '{ name: \'' + this.name + '\', hash: \'' + this.hash + '\' }'; | ||
| return Promise.resolve(log.params({ basename, dir, options })); | ||
| } | ||
| function isString(str) { | ||
| return (typeof str == 'string' || str instanceof String) | ||
| return typeof str === 'string' || str instanceof String; | ||
| } | ||
| function isObject(obj) { | ||
| return obj != null && typeof obj === 'object' | ||
| return obj !== null && typeof obj === 'object'; | ||
| } | ||
| function notUndefined(obj) { | ||
| return typeof obj !== undefined; | ||
| return typeof obj !== 'undefined'; | ||
| } | ||
| function reduceGlobPatterns(globs) { | ||
| if (!globs || !Array.isArray(globs) || globs.length === 0) { | ||
| return undefined; | ||
| } else { | ||
| // combine globs into one single RegEx | ||
| return new RegExp(globs.reduce((acc, exclude) => { | ||
| return acc + '|' + minimatch.makeRe(exclude).source; | ||
| }, '').substr(1)); | ||
| } | ||
| } | ||
| module.exports = { | ||
| hashElement: prep(require("graceful-fs"), Promise), | ||
| // exposed for testing | ||
| prep: prep, | ||
| parseParameters: parseParameters | ||
| }; |
+6
-6
| { | ||
| "name": "folder-hash", | ||
| "version": "1.1.2", | ||
| "version": "2.0.0", | ||
| "description": "Create a hash checksum over a folder and its content - its children and their content", | ||
@@ -22,5 +22,5 @@ "main": "index.js", | ||
| "dependencies": { | ||
| "debug": "^3.1.0", | ||
| "graceful-fs": "~4.1.11", | ||
| "minimatch": "~3.0.4", | ||
| "when": "~3.7.8" | ||
| "minimatch": "~3.0.4" | ||
| }, | ||
@@ -32,8 +32,8 @@ "devDependencies": { | ||
| "jsdoc": "^3.4.3", | ||
| "mocha": "^3.2.0", | ||
| "rimraf": "^2.5.2" | ||
| "memfs": "^2.8.0", | ||
| "mocha": "^5.0.5" | ||
| }, | ||
| "engines": { | ||
| "node": ">=0.10.5" | ||
| "node": ">=7.0.0" | ||
| } | ||
| } |
+201
-58
| Create a hash checksum over a folder or a file. | ||
| The hashes are propagated upwards, the hash that is returned for a folder is generated over all the hashes of its children. | ||
| The hashes are generated with the _sha1_ algorithm and returned in _base64_ encoding. | ||
| The hashes are generated with the _sha1_ algorithm and returned in _base64_ encoding by default. | ||
| The returned information looks like this: | ||
| Each file returns a name and a hash, and each folder returns additionally an array of children (file or folder elements). | ||
| ## Usage | ||
| First, install folder-hash with `npm install --save folder-hash` or `yarn add folder-hash`. | ||
| ### Simple example | ||
| See file *./examples/readme-example1.js*. | ||
| This example excludes all files and folders starting with a dot, (e.g. *.git/* and *.gitignore*), the *node_modules* folder. | ||
| ```js | ||
| { name: 'test', | ||
| hash: 'qmUXLCsTQGOEF6p0w9V78MC7sJI=', | ||
| children: [ | ||
| { name: 'helper', | ||
| hash: 'x1CX3yVH3UuLTw7zcSitSs/PbGE=', | ||
| children: [ | ||
| { name: 'helper.js', hash: 'pHYwd8k/oZV01oABTz9MC8KovkU=' } | ||
| ] }, | ||
| { name: 'test.js', hash: 'L/vqpdQhxmD5w62k24m4TuZJ1PM=' } | ||
| ] | ||
| } | ||
| const { hashElement } = require('folder-hash'); | ||
| const options = { | ||
| folders: { exclude: ['.*', 'node_modules', 'test_coverage'] }, | ||
| files: { include: ['*.js', '*.json'] } | ||
| }; | ||
| console.log('Creating a hash over the current folder:'); | ||
| hashElement('.', options) | ||
| .then(hash => { | ||
| console.log(hash.toString()); | ||
| }) | ||
| .catch(error => { | ||
| return console.error('hashing failed:', error); | ||
| }); | ||
| ``` | ||
| Each file returns a name and a hash, and each folder returns additionally an array of children (file or folder elements). | ||
| The returned information looks for example like this: | ||
| ``` | ||
| Creating a hash over the current folder: | ||
| { name: '.', hash: 'YZOrKDx9LCLd8X39PoFTflXGpRU=,' | ||
| children: [ | ||
| { name: 'examples', hash: 'aG8wg8np5SGddTnw1ex74PC9EnM=,' | ||
| children: [ | ||
| { name: 'readme-example1.js', hash: 'Xlw8S2iomJWbxOJmmDBnKcauyQ8=' } | ||
| { name: 'readme-with-callbacks.js', hash: 'ybvTHLCQBvWHeKZtGYZK7+6VPUw=' } | ||
| { name: 'readme-with-promises.js', hash: '43i9tE0kSFyJYd9J2O0nkKC+tmI=' } | ||
| { name: 'sample.js', hash: 'PRTD9nsZw3l73O/w5B2FH2qniFk=' } | ||
| ]} | ||
| { name: 'index.js', hash: 'kQQWXdgKuGfBf7ND3rxjThTLVNA=' } | ||
| { name: 'package.json', hash: 'w7F0S11l6VefDknvmIy8jmKx+Ng=' } | ||
| { name: 'test', hash: 'H5x0JDoV7dEGxI65e8IsencDZ1A=,' | ||
| children: [ | ||
| { name: 'parameters.js', hash: '3gCEobqzHGzQiHmCDe5yX8weq7M=' } | ||
| { name: 'test.js', hash: 'kg7p8lbaVf1CPtWLAIvkHkdu1oo=' } | ||
| ]} | ||
| ]} | ||
| ``` | ||
| ## Usage | ||
| First, install folder-hash with `npm install --save folder-hash`. | ||
| ### With promises | ||
| It is also possible to only match the full path and not the basename. The same configuration could look like this: | ||
| _But unfortunately *nix and Windows behave differently, so please use caution._ | ||
| ```js | ||
| const options = { | ||
| folders: { | ||
| exclude: ['.*', '**.*', '**node_modules', '**test_coverage'], | ||
| matchBasename: false, matchPath: true | ||
| }, | ||
| files: { | ||
| //include: ['**.js', '**.json' ], // Windows | ||
| include: ['*.js', '**/*.js', '*.json', '**/*.json'], // *nix | ||
| matchBasename: false, matchPath: true | ||
| } | ||
| }; | ||
| ``` | ||
| ### Other examples using promises | ||
| See file *./examples/readme-with-promises.js* | ||
| ```js | ||
| var hasher = require('folder-hash'); | ||
| const path = require('path'); | ||
| const { hashElement } = require('folder-hash'); | ||
| // pass element name and folder path separately | ||
| hasher.hashElement('node_modules', __dirname).then(function (hash) { | ||
| console.log('Result for folder "node_modules" in directory "' + __dirname + '":'); | ||
| console.log(hash.toString()); | ||
| }); | ||
| hashElement('test', path.join(__dirname, '..')) | ||
| .then(hash => { | ||
| console.log('Result for folder "../test":', hash.toString(), '\n'); | ||
| }) | ||
| .catch(error => { | ||
| return console.error('hashing failed:', error); | ||
| }); | ||
| // pass element path directly | ||
| hasher.hashElement(__dirname).then(function (hash) { | ||
| console.log('Result for folder "' + __dirname + '":'); | ||
| console.log(hash.toString()); | ||
| }); | ||
| // pass options (example: exclude dotFiles) | ||
| var options = { excludes: ['.*'], match: { basename: true, path: false } }; | ||
| hasher.hashElement(__dirname, options) | ||
| .then(function (hash) { | ||
| console.log('Result for folder "' + __dirname + '":'); | ||
| console.log(hash.toString()); | ||
| }) | ||
| .catch(function (error) { | ||
| return console.error('hashing failed:', error); | ||
| }); | ||
| hashElement(__dirname) | ||
| .then(hash => { | ||
| console.log(`Result for folder "${__dirname}":`); | ||
| console.log(hash.toString(), '\n'); | ||
| }) | ||
| .catch(error => { | ||
| return console.error('hashing failed:', error); | ||
| }); | ||
| // pass options (example: exclude dotFolders) | ||
| const options = { encoding: 'hex', folders: { exclude: ['.*'] } }; | ||
| hashElement(__dirname, options) | ||
| .then(hash => { | ||
| console.log('Result for folder "' + __dirname + '" (with options):'); | ||
| console.log(hash.toString(), '\n'); | ||
| }) | ||
| .catch(error => { | ||
| return console.error('hashing failed:', error); | ||
| }); | ||
| ``` | ||
| ### With callbacks | ||
| ### Other examples using error-first callbacks | ||
| See *./examples/readme-with-callbacks.js* | ||
| ```js | ||
| var hasher = require('folder-hash'); | ||
| const path = require('path'); | ||
| const { hashElement } = require('folder-hash'); | ||
| // pass element name and folder path separately | ||
| hasher.hashElement('node_modules', __dirname, function (error, hash) { | ||
| if (error) return console.error('hashing failed:', error); | ||
| console.log('Result for folder "node_modules" in directory "' + __dirname + '":'); | ||
| console.log(hash.toString()); | ||
| hashElement('test', path.join(__dirname, '..'), (error, hash) => { | ||
| if (error) { | ||
| return console.error('hashing failed:', error); | ||
| } else { | ||
| console.log('Result for folder "../test":', hash.toString(), '\n'); | ||
| } | ||
| }); | ||
| // pass element path directly | ||
| hasher.hashElement(__dirname, function (error, hash) { | ||
| if (error) return console.error('hashing failed:', error); | ||
| console.log('Result for folder "' + __dirname + '":'); | ||
| console.log(hash.toString()); | ||
| hashElement(__dirname, (error, hash) => { | ||
| if (error) { | ||
| return console.error('hashing failed:', error); | ||
| } else { | ||
| console.log('Result for folder "' + __dirname + '":'); | ||
| console.log(hash.toString(), '\n'); | ||
| } | ||
| }); | ||
| // pass options (example: exclude dotFiles) | ||
| var options = { excludes: ['**/.*'], match: { basename: false, path: true } }; | ||
| hasher.hashElement(__dirname, options, function (error, hash) { | ||
| console.log('Result for folder "' + __dirname + '":'); | ||
| console.log(hash.toString()); | ||
| const options = { algo: 'md5', files: { exclude: ['.*'], matchBasename: true } }; | ||
| hashElement(__dirname, options, (error, hash) => { | ||
| if (error) { | ||
| return console.error('hashing failed:', error); | ||
| } else { | ||
| console.log('Result for folder "' + __dirname + '":'); | ||
| console.log(hash.toString()); | ||
| } | ||
| }); | ||
@@ -105,3 +176,3 @@ ``` | ||
| </td> | ||
| <td>directory that contains the element (if omitted is generated from name)</td> | ||
| <td>directory that contains the element (generated from name if omitted)</td> | ||
| </tr> | ||
@@ -134,2 +205,22 @@ <tr> | ||
| #### Options object properties | ||
| ##### Default values | ||
| ```js | ||
| { | ||
| algo: 'sha1', // see crypto.getHashes() for options | ||
| encoding: 'base64', // 'base64', 'hex' or 'binary' | ||
| files: { | ||
| exclude: [], | ||
| include: [], | ||
| matchBasename: true, | ||
| matchPath: false | ||
| }, | ||
| folders: { | ||
| exclude: [], | ||
| include: [], | ||
| matchBasename: true, | ||
| matchPath: false | ||
| } | ||
| } | ||
| ``` | ||
| <table> | ||
@@ -173,4 +264,43 @@ <thead> | ||
| <tr> | ||
| <td>excludes</td> | ||
| <td>files</td> | ||
| <td> | ||
| <span>Object</span> | ||
| </td> | ||
| <td> | ||
| <optional><br> | ||
| </td> | ||
| <td colspan="2"> | ||
| Rules object (see below) | ||
| </td> | ||
| </tr> | ||
| <tr> | ||
| <td>folders</td> | ||
| <td> | ||
| <span>Object</span> | ||
| </td> | ||
| <td> | ||
| <optional><br> | ||
| </td> | ||
| <td colspan="2"> | ||
| Rules object (see below) | ||
| </td> | ||
| </tr> | ||
| </tbody> | ||
| </table> | ||
| #### Rules object properties | ||
| <table> | ||
| <thead> | ||
| <tr> | ||
| <th>Name</th> | ||
| <th>Type</th> | ||
| <th>Attributes</th> | ||
| <th>Default</th> | ||
| <th>Description</th> | ||
| </tr> | ||
| </thead> | ||
| <tbody> | ||
| <tr> | ||
| <td>exclude</td> | ||
| <td> | ||
| <span>Array.<string></span> | ||
@@ -184,7 +314,20 @@ </td> | ||
| </td> | ||
| <td>Array of optional exclude file glob patterns, see minimatch doc</td> | ||
| <td>Array of optional exclude glob patterns, see <a href="https://github.com/isaacs/minimatch#features">minimatch doc</a></td> | ||
| </tr> | ||
| <tr> | ||
| <td>match.basename</td> | ||
| <td>include</td> | ||
| <td> | ||
| <span>Array.<string></span> | ||
| </td> | ||
| <td> | ||
| <optional><br> | ||
| </td> | ||
| <td> | ||
| [] | ||
| </td> | ||
| <td>Array of optional include glob patterns, see <a href="https://github.com/isaacs/minimatch#features">minimatch doc</a></td> | ||
| </tr> | ||
| <tr> | ||
| <td>matchBasename</td> | ||
| <td> | ||
| <span>bool</span> | ||
@@ -198,6 +341,6 @@ </td> | ||
| </td> | ||
| <td>Match the exclude patterns to the file/folder name</td> | ||
| <td>Match the glob patterns to the file/folder name</td> | ||
| </tr> | ||
| <tr> | ||
| <td>match.path</td> | ||
| <td>matchPath</td> | ||
| <td> | ||
@@ -210,5 +353,5 @@ <span>bool</span> | ||
| <td> | ||
| true | ||
| false | ||
| </td> | ||
| <td>Match the exclude patterns to the file/folder path</td> | ||
| <td>Match the glob patterns to the file/folder path</td> | ||
| </tr> | ||
@@ -215,0 +358,0 @@ </tbody> |
Sorry, the diff of this file is not supported yet
-226
| "use strict" | ||
| var fs = require('graceful-fs'); | ||
| var path = require('path'); | ||
| var crypto = require('crypto'); | ||
| var minimatch = require('minimatch'); | ||
| if (typeof Promise === 'undefined') require('when/es6-shim/Promise'); | ||
| var defaultOptions = { | ||
| algo: 'sha1', // see crypto.getHashes() for options | ||
| encoding: 'base64', // 'base64', 'hex' or 'binary' | ||
| excludes: [], | ||
| match: { | ||
| basename: true, | ||
| path: true | ||
| } | ||
| }; | ||
| module.exports = { | ||
| hashElement: hashElement | ||
| } | ||
| /** | ||
| * Create a hash over a folder or file, using either promises or error-first-callbacks. | ||
| * | ||
| * Examples: | ||
| * - hashElement(filename, folderpath, options, fn(err, hash) {}), hashElement(filename, folderpath, options); | ||
| * - hashElement(path, fn(err, hash) {}), hashElement(path) | ||
| * | ||
| * @param {string} name - element name or an element's path | ||
| * @param {string} [dir] - directory that contains the element (if omitted is generated from name) | ||
| * @param {Object} [options] - Options | ||
| * @param {string} [options.algo='sha1'] - checksum algorithm, see options in crypto.getHashes() | ||
| * @param {string} [options.encoding='base64'] - encoding of the resulting hash. One of 'base64', 'hex' or 'binary' | ||
| * @param {string[]} [options.excludes=[]] - Array of optional exclude file glob patterns, see minimatch doc | ||
| * @param {bool} [options.match.basename=true] - Match the exclude patterns to the file/folder name | ||
| * @param {bool} [options.match.path=true] - Match the exclude patterns to the file/folder path | ||
| * @param {fn} [callback] - Error-first callback function | ||
| */ | ||
| function hashElement(name, directoryPath, options, callback) { | ||
| var promise = parseParameters(arguments); | ||
| var callback = arguments[arguments.length-1]; | ||
| return promise | ||
| .then(function (result) { | ||
| if (typeof callback === 'function') return callback(undefined, result); | ||
| return result; | ||
| }) | ||
| .catch(function (reason) { | ||
| if (typeof callback === 'function') return callback(reason); | ||
| throw reason; | ||
| }); | ||
| } | ||
| function parseParameters(args) { | ||
| var elementBasename = args[0], | ||
| elementDirname = args[1], | ||
| options = args[2]; | ||
| if (!isString(elementBasename)) { | ||
| return Promise.reject(new TypeError('First argument must be a string')); | ||
| } | ||
| if (!isString(elementDirname)) { | ||
| elementDirname = path.dirname(elementBasename); | ||
| elementBasename = path.basename(elementBasename); | ||
| options = args[1]; | ||
| } | ||
| // parse options (fallback default options) | ||
| if (!isObject(options)) options = {}; | ||
| ['algo', 'encoding', 'excludes'].forEach(function(key) { | ||
| if (!options.hasOwnProperty(key)) options[key] = defaultOptions[key]; | ||
| }); | ||
| if (!options.match) options.match = {}; | ||
| if (!options.match.hasOwnProperty('basename')) options.match.basename = defaultOptions.match.basename; | ||
| if (!options.match.hasOwnProperty('path')) options.match.path = defaultOptions.match.path; | ||
| if (!options.excludes || !Array.isArray(options.excludes) || options.excludes.length == 0) { | ||
| options.excludes = undefined; | ||
| } else { | ||
| // combine globs into one single RegEx | ||
| options.excludes = new RegExp(options.excludes.reduce(function (acc, exclude) { | ||
| return acc + '|' + minimatch.makeRe(exclude).source; | ||
| }, '').substr(1)); | ||
| } | ||
| //console.log('parsed options:', options); | ||
| return hashElementPromise(elementBasename, elementDirname, options); | ||
| } | ||
| function hashElementPromise(basename, dirname, options) { | ||
| var filepath = path.join(dirname, basename); | ||
| if (options.match.basename && options.excludes && options.excludes.test(basename)) { | ||
| //console.log('regex', options.excludes, 'matched to', basename); | ||
| return Promise.resolve(undefined); | ||
| } | ||
| if (options.match.path && options.excludes && options.excludes.test(filepath)) { | ||
| //console.log('regex', options.excludes, 'matched to', filepath); | ||
| return Promise.resolve(undefined); | ||
| } | ||
| return new Promise(function (resolve, reject, notify) { | ||
| fs.stat(filepath, function (err, stats) { | ||
| if (err) { | ||
| return reject(err); | ||
| } | ||
| if (stats.isDirectory()) { | ||
| resolve(hashFolderPromise(basename, dirname, options)); | ||
| } else if (stats.isFile()) { | ||
| resolve(hashFilePromise(basename, dirname, options)); | ||
| } else { | ||
| resolve({ name: basename, hash: 'unknown element type' }); | ||
| } | ||
| }); | ||
| }); | ||
| } | ||
| function hashFolderPromise(foldername, directoryPath, options) { | ||
| var folderPath = path.join(directoryPath, foldername); | ||
| var notExcluded = function notExcluded(basename) { | ||
| return !(options.match.basename && options.excludes && options.excludes.test(basename)); | ||
| } | ||
| return new Promise(function (resolve, reject, notify) { | ||
| fs.readdir(folderPath, function (err, files) { | ||
| if (err) { | ||
| var TAG = 'hashFolderPromise(' + foldername + ', ' + directoryPath + '):'; | ||
| console.error(TAG, err); | ||
| reject(err); | ||
| } | ||
| var children = files.filter(notExcluded).map(function (child) { | ||
| return hashElementPromise(child, folderPath, options); | ||
| }); | ||
| return Promise.all(children).then(function (children) { | ||
| var hash = new HashedFolder(foldername, children.filter(notUndefined), options); | ||
| resolve(hash); | ||
| }); | ||
| }); | ||
| }); | ||
| } | ||
| function hashFilePromise(filename, directoryPath, options) { | ||
| return new Promise(function (resolve, reject, notify) { | ||
| try { | ||
| var hash = crypto.createHash(options.algo); | ||
| hash.write(filename); | ||
| var f = fs.createReadStream(path.join(directoryPath, filename)); | ||
| f.pipe(hash, { end: false }); | ||
| f.on('end', function () { | ||
| var hashedFile = new HashedFile(filename, hash, options); | ||
| resolve(hashedFile); | ||
| }); | ||
| } catch (ex) { | ||
| reject(ex); | ||
| } | ||
| }); | ||
| } | ||
| var HashedFolder = function (name, children, options) { | ||
| this.name = name; | ||
| this.children = children; | ||
| var hash = crypto.createHash(options.algo); | ||
| hash.write(name); | ||
| children.forEach(function (child) { | ||
| if (child.hash) { | ||
| hash.write(child.hash); | ||
| } | ||
| }); | ||
| this.hash = hash.digest(options.encoding); | ||
| } | ||
| HashedFolder.prototype.toString = function (padding) { | ||
| if (typeof padding === 'undefined') padding = ""; | ||
| var str = padding + '{ name: \'' + this.name + '\', hash: \'' + this.hash + '\'\n'; | ||
| padding += ' '; | ||
| str += padding + 'children: '; | ||
| if (this.children.length === 0) { | ||
| str += '[]'; | ||
| } else { | ||
| var nextPadding = padding + " "; | ||
| var childElements = this.children.map(function (child) { return child.toString(nextPadding); }); | ||
| str += '[\n' + childElements.join('\n') + '\n' + padding + ']'; | ||
| } | ||
| return str + ' }'; | ||
| } | ||
| var HashedFile = function (name, hash, options) { | ||
| this.name = name; | ||
| this.hash = hash.digest(options.encoding); | ||
| } | ||
| HashedFile.prototype.toString = function (padding) { | ||
| if (typeof padding === 'undefined') padding = ""; | ||
| return padding + '{ name: \'' + this.name + '\', hash: \'' + this.hash + '\' }'; | ||
| } | ||
| function isString(str) { | ||
| return (typeof str == 'string' || str instanceof String) | ||
| } | ||
| function isObject(obj) { | ||
| return obj != null && typeof obj === 'object' | ||
| } | ||
| function notUndefined(obj) { | ||
| return typeof obj !== undefined; | ||
| } |
| MIT License | ||
| Copyright (c) 2015 Marc Walter | ||
| Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: | ||
| The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. | ||
| THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. |
| { | ||
| "name": "folder-hash", | ||
| "version": "1.1.1", | ||
| "description": "Create a hash checksum over a folder and its content - its children and their content", | ||
| "main": "index.js", | ||
| "scripts": { | ||
| "start": "node sample.js", | ||
| "test": "mocha --reporter spec test", | ||
| "cover": "node node_modules/istanbul/lib/cli.js cover --dir test_coverage node_modules/mocha/bin/_mocha test", | ||
| "doc": "./node_modules/.bin/jsdoc index.js" | ||
| }, | ||
| "author": { | ||
| "name": "Marc Walter", | ||
| "email": "walter.marc@outlook.com" | ||
| }, | ||
| "license": "MIT", | ||
| "repository": { | ||
| "type": "git", | ||
| "url": "https://github.com/marc136/node-folder-hash.git" | ||
| }, | ||
| "dependencies": { | ||
| "graceful-fs": "^4.1.11", | ||
| "minimatch": "^3.0.3", | ||
| "when": "^3.7.7" | ||
| }, | ||
| "devDependencies": { | ||
| "chai": "^3.5.0", | ||
| "chai-as-promised": "^6.0.0", | ||
| "istanbul": "^0.4.5", | ||
| "jsdoc": "^3.4.3", | ||
| "mocha": "^3.2.0", | ||
| "rimraf": "^2.5.2" | ||
| }, | ||
| "engines": { | ||
| "node": ">=0.10.5" | ||
| } | ||
| } |
-240
| # folderHash | ||
| ## Description | ||
| Create a hash checksum over a folder or a file. | ||
| The hashes are propagated upwards, the hash that is returned for a folder is generated over all the hashes of its children. | ||
| The hashes are generated with the _sha1_ algorithm and returned in _base64_ encoding. | ||
| The returned information looks like this: | ||
| { name: 'test', | ||
| hash: 'qmUXLCsTQGOEF6p0w9V78MC7sJI=', | ||
| children: [ | ||
| { name: 'helper', | ||
| hash: 'x1CX3yVH3UuLTw7zcSitSs/PbGE=', | ||
| children: [ | ||
| { name: 'helper.js', hash: 'pHYwd8k/oZV01oABTz9MC8KovkU=' } | ||
| ] }, | ||
| { name: 'test.js', hash: 'L/vqpdQhxmD5w62k24m4TuZJ1PM=' } | ||
| ] | ||
| } | ||
| Each file returns a name and a hash, and each folder returns additionally an array of children (file or folder elements). | ||
| ## Usage | ||
| First, install the dependencies by executing `npm install`. | ||
| ### With promises | ||
| var hasher = require('folder-hash'); | ||
| // pass element name and folder path separately | ||
| hasher.hashElement('node_modules', __dirname).then(function (hash) { | ||
| console.log('Result for folder "node_modules" in directory "' + __dirname + '":'); | ||
| console.log(hash.toString()); | ||
| }); | ||
| // pass element path directly | ||
| hasher.hashElement(__dirname).then(function (hash) { | ||
| console.log('Result for folder "' + __dirname + '":'); | ||
| console.log(hash.toString()); | ||
| }); | ||
| // pass options (example: exclude dotFiles) | ||
| var options = { excludes: ['.*'], match: { basename: true, path: false } }; | ||
| hasher.hashElement(__dirname, options, function (error, hash)) { | ||
| if (error) return console.error('hashing failed:', error); | ||
| console.log('Result for folder "' + __dirname + '":'); | ||
| console.log(hash.toString()); | ||
| }); | ||
| ### With callbacks | ||
| var hasher = require('folder-hash'); | ||
| // pass element name and folder path separately | ||
| hasher.hashElement('node_modules', __dirname, function (error, hash)) { | ||
| if (error) return console.error('hashing failed:', error); | ||
| console.log('Result for folder "node_modules" in directory "' + __dirname + '":'); | ||
| console.log(hash.toString()); | ||
| }); | ||
| // pass element path directly | ||
| hasher.hashElement(__dirname, function (error, hash)) { | ||
| if (error) return console.error('hashing failed:', error); | ||
| console.log('Result for folder "' + __dirname + '":'); | ||
| console.log(hash.toString()); | ||
| }); | ||
| // pass options (example: exclude dotFiles) | ||
| var options = { excludes: ['**/.*'], match: { basename: false, path: true } }; | ||
| hasher.hashElement(__dirname, options, function (error, hash)) { | ||
| if (error) return console.error('hashing failed:', error); | ||
| console.log('Result for folder "' + __dirname + '":'); | ||
| console.log(hash.toString()); | ||
| }); | ||
| ### Parameters for the hashElement function | ||
| <table> | ||
| <thead> | ||
| <tr> | ||
| <th>Name</th> | ||
| <th>Type</th> | ||
| <th>Attributes</th> | ||
| <th>Description</th> | ||
| </tr> | ||
| </thead> | ||
| <tbody> | ||
| <tr> | ||
| <td>name</td> | ||
| <td> | ||
| <span>string</span> | ||
| </td> | ||
| <td> | ||
| </td> | ||
| <td>element name or an element's path</td> | ||
| </tr> | ||
| <tr> | ||
| <td>dir</td> | ||
| <td> | ||
| <span>string</span> | ||
| </td> | ||
| <td> | ||
| <optional><br> | ||
| </td> | ||
| <td>directory that contains the element (if omitted is generated from name)</td> | ||
| </tr> | ||
| <tr> | ||
| <td>options</td> | ||
| <td> | ||
| <span>Object</span> | ||
| </td> | ||
| <td> | ||
| <optional><br> | ||
| </td> | ||
| <td> | ||
| Options object (see below) | ||
| </td> | ||
| </tr> | ||
| <tr> | ||
| <td>callback</td> | ||
| <td> | ||
| <span>fn</span> | ||
| </td> | ||
| <td> | ||
| <optional><br> | ||
| </td> | ||
| <td>Error-first callback function</td> | ||
| </tr> | ||
| </tbody> | ||
| </table> | ||
| #### Options object properties | ||
| <table> | ||
| <thead> | ||
| <tr> | ||
| <th>Name</th> | ||
| <th>Type</th> | ||
| <th>Attributes</th> | ||
| <th>Default</th> | ||
| <th>Description</th> | ||
| </tr> | ||
| </thead> | ||
| <tbody> | ||
| <tr> | ||
| <td>algo</td> | ||
| <td> | ||
| <span>string</span> | ||
| </td> | ||
| <td> | ||
| <optional><br> | ||
| </td> | ||
| <td> | ||
| 'sha1' | ||
| </td> | ||
| <td>checksum algorithm, see options in crypto.getHashes()</td> | ||
| </tr> | ||
| <tr> | ||
| <td>encoding</td> | ||
| <td> | ||
| <span>string</span> | ||
| </td> | ||
| <td> | ||
| <optional><br> | ||
| </td> | ||
| <td> | ||
| 'base64' | ||
| </td> | ||
| <td>encoding of the resulting hash. One of 'base64', 'hex' or 'binary'</td> | ||
| </tr> | ||
| <tr> | ||
| <td>excludes</td> | ||
| <td> | ||
| <span>Array.<string></span> | ||
| </td> | ||
| <td> | ||
| <optional><br> | ||
| </td> | ||
| <td> | ||
| [] | ||
| </td> | ||
| <td>Array of optional exclude file glob patterns, see minimatch doc</td> | ||
| </tr> | ||
| <tr> | ||
| <td>match.basename</td> | ||
| <td> | ||
| <span>bool</span> | ||
| </td> | ||
| <td> | ||
| <optional><br> | ||
| </td> | ||
| <td> | ||
| true | ||
| </td> | ||
| <td>Match the exclude patterns to the file/folder name</td> | ||
| </tr> | ||
| <tr> | ||
| <td>match.path</td> | ||
| <td> | ||
| <span>bool</span> | ||
| </td> | ||
| <td> | ||
| <optional><br> | ||
| </td> | ||
| <td> | ||
| true | ||
| </td> | ||
| <td>Match the exclude patterns to the file/folder path</td> | ||
| </tr> | ||
| </tbody> | ||
| </table> | ||
| ## Behavior | ||
| The behavior is documented and verified in the unit tests. Execute `npm test` or `mocha test`, and have a look at the _test_ subfolder. | ||
| ### Creating hashes over files | ||
| **The hashes are the same if:** | ||
| - A file is checked again | ||
| - Two files have the same name and content (but exist in different folders) | ||
| **The hashes are different if:** | ||
| - A file was renamed or its content was changed | ||
| - Two files have the same name but different content | ||
| - Two files have the same content but different names | ||
| ### Creating hashes over folders | ||
| Content means in this case a folders children - both the files and the subfolders with their children. | ||
| **The hashes are the same if:** | ||
| - A folder is checked again | ||
| - Two folders have the same name and content (but have different parent folders) | ||
| **The hashes are different if:** | ||
| - A file somewhere in the directory structure was renamed or its content was changed | ||
| - Two folders have the same name but different content | ||
| - Two folders have the same content but different names | ||
| ## License | ||
| MIT, see LICENSE.txt |
| "use strict" | ||
| var crypto = require('crypto'); | ||
| var path = require('path'); | ||
| var hashFile = require('./index.js'); | ||
| console.log('Known hash algorithms: ', '\'' + crypto.getHashes().join('\', \'') + '\''); | ||
| function checkPromise(promise) { | ||
| promise.then(function (result) { | ||
| console.log('Promise resolved:\n', result.toString(), '\n\n'); | ||
| }, | ||
| function (reason) { | ||
| console.error('Promise rejected due to:\n', reason, '\n\n'); | ||
| }); | ||
| } | ||
| var file = 'README.md'; | ||
| var dir = __dirname; | ||
| console.log('\nCreate a hash over a single file:'); | ||
| checkPromise(hashFile.hashElement(file, dir)); | ||
| console.log('Create hash over a folder:'); | ||
| //checkPromise(hashFile.hashElement(path.basename(dir), path.dirname(dir))); | ||
| checkPromise(hashFile.hashElement('test', dir)); |
| | ||
| var fs = require('graceful-fs'); | ||
| var path = require('path'); | ||
| var rmrf = require('rimraf'); | ||
| module.exports = { | ||
| mkdirSync: mkdirSync, | ||
| writeFileSync: writeFileSync, | ||
| createTestFolderStructure: createTestFolderStructure | ||
| } | ||
| /** | ||
| * structure created: | ||
| * sample-folder | ||
| * - file1 | ||
| * - file2 | ||
| * - subfolder1 | ||
| * - file1 | ||
| * - file2 | ||
| * - f2 | ||
| * - afile | ||
| * - subfolder1 | ||
| * - file1 | ||
| * - file2 | ||
| */ | ||
| function createTestFolderStructure(sampleFolder) { | ||
| var content1 = 'Hello this is some sample text.\nWith two lines'; | ||
| function ignoreExistsError(err) { | ||
| if (err && err.code !== 'EEXIST') { | ||
| throw new Error('Could not write to file system'); | ||
| } | ||
| } | ||
| function dummyFolder(basepath) { | ||
| mkdirSync(basepath); | ||
| writeFileSync(path.join(basepath, 'file1'), content1); | ||
| writeFileSync(path.join(basepath, 'file2'), content1); | ||
| } | ||
| return function (done) { | ||
| rmrf(sampleFolder, function () { | ||
| var folder; | ||
| dummyFolder(sampleFolder); | ||
| dummyFolder(path.join(sampleFolder, 'subfolder1')); | ||
| mkdirSync(path.join(sampleFolder, 'f2')); | ||
| writeFileSync(path.join(sampleFolder, 'f2', 'file1'), 'another text'); | ||
| dummyFolder(path.join(sampleFolder, 'f2', 'subfolder1')); | ||
| dummyFolder(path.join(sampleFolder, 'f2', 'subfolder2')); | ||
| mkdirSync(path.join(sampleFolder, 'f3')); | ||
| dummyFolder(path.join(sampleFolder, 'f3', 'subfolder1')) | ||
| writeFileSync(path.join(sampleFolder, 'f3', 'subfolder1', 'file1'), 'This is another text'); | ||
| mkdirSync(path.join(sampleFolder, 'empty')); | ||
| done(); | ||
| }); | ||
| } | ||
| } | ||
| function ignoreExistError(fn, arg, arg) { | ||
| var args = Array.from ? Array.from(arguments) : Array.prototype.slice.call(arguments) | ||
| args.splice(0, 1); | ||
| if (typeof fn !== 'function') throw new Error('The first argument must be of type function'); | ||
| try { | ||
| var result = fn.apply(null, args); | ||
| } catch (err) { | ||
| if (err.code !== 'EEXIST') throw err; | ||
| } | ||
| return result; | ||
| } | ||
| function mkdirSync(folderpath) { | ||
| return ignoreExistError(fs.mkdirSync, folderpath); | ||
| } | ||
| function writeFileSync(filepath, content) { | ||
| return ignoreExistError(fs.writeFileSync, filepath, content); | ||
| } |
-172
| | ||
| if (typeof Promise === 'undefined') require('when/es6-shim/Promise'); | ||
| var folderHash = require('../index'); | ||
| var helper = require('./helper/helper.js'); | ||
| var fs = require('graceful-fs'); | ||
| var path = require('path'); | ||
| var assert = require('assert'); | ||
| var chai = require('chai'); | ||
| var chaiAsPromised = require('chai-as-promised'); | ||
| chai.use(chaiAsPromised); | ||
| chai.should(); | ||
| var sampleFolder = 'sample-folder'; | ||
| before(helper.createTestFolderStructure(sampleFolder)); | ||
| describe('Initialization', function () { | ||
| it('should throw an error if no name was passed', function () { | ||
| folderHash.hashElement().should.be.rejectedWith(TypeError); | ||
| folderHash.hashElement(function () {}).should.be.rejectedWith(TypeError); | ||
| }); | ||
| }); | ||
| describe('Should generate hashes', function () { | ||
| describe('when called as a promise', function () { | ||
| it('with element and folder passed as two strings', function () { | ||
| return folderHash.hashElement('file1', sampleFolder).should.eventually.have.property('hash'); | ||
| }); | ||
| it('with element path passed as one string', function () { | ||
| return folderHash.hashElement(path.join(sampleFolder, 'file1')).should.eventually.have.property('hash'); | ||
| }); | ||
| it('with options passed', function () { | ||
| var options = { | ||
| algo: 'sha1', | ||
| encoding: 'base64', | ||
| excludes: [], | ||
| match: { | ||
| basename: false, | ||
| path: false | ||
| } | ||
| }; | ||
| return folderHash.hashElement('file1', sampleFolder, options)//.should.eventually.have.property('hash'); | ||
| .then((a) => console.log('a:', a)).catch(b => console.error('b', b)); | ||
| }); | ||
| }); | ||
| describe('when executed with an error-first callback', function () { | ||
| it('with element and folder passed as two strings', function (done) { | ||
| folderHash.hashElement('file1', sampleFolder, function (err, hash) { | ||
| if (err) throw err; | ||
| else { | ||
| assert.ok(hash.hash); | ||
| done(); | ||
| } | ||
| }); | ||
| }); | ||
| it('with element path passed as one string', function (done) { | ||
| folderHash.hashElement(path.join(sampleFolder, 'file1'), function (err, hash) { | ||
| if (err) throw err; | ||
| else { | ||
| assert.ok(hash.hash); | ||
| done(); | ||
| } | ||
| }); | ||
| }); | ||
| }); | ||
| describe('and', function () { | ||
| it('should return a string representation', function () { | ||
| folderHash.hashElement('./', sampleFolder) | ||
| .then(function (hash) { | ||
| var str = hash.toString(); | ||
| assert.ok(str); | ||
| assert.ok(str.length > 10); | ||
| }) | ||
| }); | ||
| }); | ||
| }); | ||
| describe('Generating hashes over files, it', function () { | ||
| var hash1; | ||
| before(function () { | ||
| return folderHash.hashElement('file1', sampleFolder).then(function (hash) { | ||
| hash1 = hash; | ||
| }); | ||
| }); | ||
| it('should return the same hash if a file was not changed', function () { | ||
| return folderHash.hashElement('file1', sampleFolder).then(function (hash2) { | ||
| return assert.equal(hash1.hash, hash2.hash); | ||
| }); | ||
| }); | ||
| it('should return the same hash if a file has the same name and content, but exists in a different folder', function () { | ||
| return folderHash.hashElement('file1', path.join(sampleFolder, 'subfolder1')).then(function (hash2) { | ||
| return assert.equal(hash1.hash, hash2.hash); | ||
| }); | ||
| }); | ||
| it('should return a different hash if the file has the same name but a different content', function () { | ||
| return folderHash.hashElement('file1', path.join(sampleFolder, 'f2')).then(function (hash2) { | ||
| return assert.notEqual(hash1.hash, hash2.hash); | ||
| }); | ||
| }); | ||
| it('should return a different hash if the file has the same content but a different name', function () { | ||
| return folderHash.hashElement('file2', sampleFolder).then(function (hash2) { | ||
| return assert.notEqual(hash1.hash, hash2.hash); | ||
| }); | ||
| }); | ||
| }); | ||
| describe('Generating a hash over a folder, it', function () { | ||
| function recAssertHash(hash) { | ||
| assert.ok(hash.hash); | ||
| if (hash.children && hash.children.length > 0) { | ||
| hash.children.forEach(recAssertHash); | ||
| } | ||
| } | ||
| it('generates a hash over the folder name and over the combination hashes of all its children', function () { | ||
| return folderHash.hashElement('f2', sampleFolder).then(recAssertHash); | ||
| }); | ||
| it('generates different hashes if the folders have the same content but different names', function () { | ||
| return Promise.all([ | ||
| folderHash.hashElement('subfolder2', path.join(sampleFolder, 'f2')), | ||
| folderHash.hashElement('subfolder1', sampleFolder) | ||
| ]).then(function (hashes) { | ||
| assert.ok(hashes.length > 1, 'should have returned at least two hashes'); | ||
| assert.notEqual(hashes[0].hash, hashes[1].hash); | ||
| }); | ||
| }); | ||
| it('generates different hashes if the folders have the same name but different content (one file content changed)', function () { | ||
| return Promise.all([ | ||
| folderHash.hashElement('subfolder1', path.join(sampleFolder, 'f3')), | ||
| folderHash.hashElement('subfolder1', sampleFolder) | ||
| ]).then(function (hashes) { | ||
| assert.ok(hashes.length > 1, 'should have returned at least two hashes'); | ||
| assert.notEqual(hashes[0].hash, hashes[1].hash); | ||
| }); | ||
| }); | ||
| it('generates the same hash if the folders have the same name and the same content', function () { | ||
| return Promise.all([ | ||
| folderHash.hashElement('subfolder1', path.join(sampleFolder, 'f2')), | ||
| folderHash.hashElement('subfolder1', sampleFolder) | ||
| ]).then(function (hashes) { | ||
| assert.ok(hashes.length > 1, 'should have returned at least two hashes'); | ||
| assert.equal(hashes[0].hash, hashes[1].hash); | ||
| }); | ||
| }); | ||
| it('f2/subfolder1 should equal f3/subfolder1 if file1 is ignored', function () { | ||
| return Promise.all([ | ||
| folderHash.hashElement(path.join(sampleFolder, 'f3/subfolder1'), { excludes: ['**/.*', 'file1'] }), | ||
| folderHash.hashElement(path.join(sampleFolder, 'f2/subfolder1'), { excludes: ['**/.*', 'file1'] }) | ||
| ]).then(function (hashes) { | ||
| assert.ok(hashes.length == 2, 'should have returned two hashes'); | ||
| assert.equal(hashes[0].hash, hashes[1].hash); | ||
| }) | ||
| }); | ||
| }); |
| { | ||
| "name": "temp-2017-10-06", | ||
| "version": "1.0.0", | ||
| "description": "", | ||
| "main": "index.js", | ||
| "scripts": { | ||
| "test": "echo \"Error: no test specified\" && exit 1" | ||
| }, | ||
| "keywords": [], | ||
| "author": "", | ||
| "license": "ISC", | ||
| "dependencies": { | ||
| "folder-hash": "^1.1.1" | ||
| } | ||
| } |
| var hasher = require('folder-hash'); | ||
| // pass element name and folder path separately | ||
| hasher.hashElement('node_modules', __dirname).then(function (hash) { | ||
| console.log('Result for folder "node_modules" in directory "' + __dirname + '":'); | ||
| console.log(hash.toString()); | ||
| }); | ||
| // pass element path directly | ||
| hasher.hashElement(__dirname).then(function (hash) { | ||
| console.log('Result for folder "' + __dirname + '":'); | ||
| console.log(hash.toString()); | ||
| }); | ||
| // pass options (example: exclude dotFiles) | ||
| var options = { excludes: ['.*'], match: { basename: true, path: false } }; | ||
| hasher.hashElement(__dirname, options) | ||
| .then(function (hash) { | ||
| console.log('Result for folder "' + __dirname + '":'); | ||
| console.log(hash.toString()); | ||
| }) | ||
| .catch(function (error) { | ||
| return console.error('hashing failed:', error); | ||
| }); |
| var hasher = require('folder-hash'); | ||
| // pass element name and folder path separately | ||
| hasher.hashElement('node_modules', __dirname, function (error, hash) { | ||
| if (error) return console.error('hashing failed:', error); | ||
| console.log('Result for folder "node_modules" in directory "' + __dirname + '":'); | ||
| console.log(hash.toString()); | ||
| }); | ||
| // pass element path directly | ||
| hasher.hashElement(__dirname, function (error, hash) { | ||
| if (error) return console.error('hashing failed:', error); | ||
| console.log('Result for folder "' + __dirname + '":'); | ||
| console.log(hash.toString()); | ||
| }); | ||
| // pass options (example: exclude dotFiles) | ||
| var options = { excludes: ['**/.*'], match: { basename: false, path: true } }; | ||
| hasher.hashElement(__dirname, options, function (error, hash) { | ||
| console.log('Result for folder "' + __dirname + '":'); | ||
| console.log(hash.toString()); | ||
| }); |
Major refactor
Supply chain riskPackage has recently undergone a major refactor. It may be unstable or indicate significant internal changes. Use caution when updating to versions that include significant changes.
Found 1 instance in 1 package
Major refactor
Supply chain riskPackage has recently undergone a major refactor. It may be unstable or indicate significant internal changes. Use caution when updating to versions that include significant changes.
Found 1 instance in 1 package
Mixed license
LicensePackage contains multiple licenses.
Found 1 instance in 1 package
0
-100%765
7.9%387
58.61%63248
-97.06%12
-33.33%+ Added
+ Added
+ Added
- Removed
- Removed