+44
-44
@@ -432,3 +432,3 @@ const Utils = require("./util"); | ||
| entry = new ZipEntry(); | ||
| entry.entryName = entryName; | ||
| entry.entryName = Utils.canonical(entryName); | ||
| } | ||
@@ -468,2 +468,4 @@ entry.comment = comment || ""; | ||
| if (!update) _zip.setEntry(entry); | ||
| return entry; | ||
| }, | ||
@@ -477,3 +479,3 @@ | ||
| getEntries: function (/**String*/ password) { | ||
| _zip.password=password; | ||
| _zip.password = password; | ||
| return _zip ? _zip.entries : []; | ||
@@ -641,2 +643,3 @@ }, | ||
| extractAllToAsync: function (/**String*/ targetPath, /**Boolean*/ overwrite, /**Boolean*/ keepOriginalPermission, /**Function*/ callback) { | ||
| if (typeof overwrite === "function" && !callback) callback = overwrite; | ||
| overwrite = get_Bool(overwrite, false); | ||
@@ -646,5 +649,11 @@ if (typeof keepOriginalPermission === "function" && !callback) callback = keepOriginalPermission; | ||
| if (!callback) { | ||
| callback = function (err) { | ||
| throw new Error(err); | ||
| }; | ||
| return new Promise((resolve, reject) => { | ||
| this.extractAllToAsync(targetPath, overwrite, keepOriginalPermission, function (err) { | ||
| if (err) { | ||
| reject(err); | ||
| } else { | ||
| resolve(this); | ||
| } | ||
| }); | ||
| }); | ||
| } | ||
@@ -663,3 +672,3 @@ if (!_zip) { | ||
| const dirEntries = []; | ||
| const fileEntries = new Set(); | ||
| const fileEntries = []; | ||
| _zip.entries.forEach((e) => { | ||
@@ -669,3 +678,3 @@ if (e.isDirectory) { | ||
| } else { | ||
| fileEntries.add(e); | ||
| fileEntries.push(e); | ||
| } | ||
@@ -690,43 +699,34 @@ }); | ||
| // callback wrapper, for some house keeping | ||
| const done = () => { | ||
| if (fileEntries.size === 0) { | ||
| callback(); | ||
| } | ||
| }; | ||
| // Extract file entries asynchronously | ||
| for (const entry of fileEntries.values()) { | ||
| const entryName = pth.normalize(canonical(entry.entryName.toString())); | ||
| const filePath = sanitize(targetPath, entryName); | ||
| entry.getDataAsync(function (content, err_1) { | ||
| if (err_1) { | ||
| callback(new Error(err_1)); | ||
| return; | ||
| } | ||
| if (!content) { | ||
| callback(new Error(Utils.Errors.CANT_EXTRACT_FILE)); | ||
| fileEntries.reverse().reduce(function (next, entry) { | ||
| return function (err) { | ||
| if (err) { | ||
| next(err); | ||
| } else { | ||
| // The reverse operation for attr depend on method addFile() | ||
| const fileAttr = keepOriginalPermission ? entry.header.fileAttr : undefined; | ||
| filetools.writeFileToAsync(filePath, content, overwrite, fileAttr, function (succ) { | ||
| if (!succ) { | ||
| callback(getError("Unable to write file", filePath)); | ||
| return; | ||
| const entryName = pth.normalize(canonical(entry.entryName.toString())); | ||
| const filePath = sanitize(targetPath, entryName); | ||
| entry.getDataAsync(function (content, err_1) { | ||
| if (err_1) { | ||
| next(new Error(err_1)); | ||
| } else if (!content) { | ||
| next(new Error(Utils.Errors.CANT_EXTRACT_FILE)); | ||
| } else { | ||
| // The reverse operation for attr depend on method addFile() | ||
| const fileAttr = keepOriginalPermission ? entry.header.fileAttr : undefined; | ||
| filetools.writeFileToAsync(filePath, content, overwrite, fileAttr, function (succ) { | ||
| if (!succ) { | ||
| next(getError("Unable to write file", filePath)); | ||
| } | ||
| filetools.fs.utimes(filePath, entry.header.time, entry.header.time, function (err_2) { | ||
| if (err_2) { | ||
| next(getError("Unable to set times", filePath)); | ||
| } else { | ||
| next(); | ||
| } | ||
| }); | ||
| }); | ||
| } | ||
| filetools.fs.utimes(filePath, entry.header.time, entry.header.time, function (err_2) { | ||
| if (err_2) { | ||
| callback(getError("Unable to set times", filePath)); | ||
| return; | ||
| } | ||
| // call the callback if it was last entry | ||
| done(); | ||
| fileEntries.delete(entry); | ||
| }); | ||
| }); | ||
| } | ||
| }); | ||
| } | ||
| // call the callback if fileEntries was empty | ||
| done(); | ||
| }; | ||
| }, callback)(); | ||
| }, | ||
@@ -733,0 +733,0 @@ |
+11
-11
@@ -28,3 +28,3 @@ var Utils = require("../util"), | ||
| var _dataHeader = {}; | ||
| var _localHeader = {}; | ||
@@ -165,7 +165,7 @@ function setTime(val) { | ||
| get encripted() { | ||
| get encrypted() { | ||
| return (_flags & 1) === 1; | ||
| }, | ||
| get entryHeaderSize() { | ||
| get centralHeaderSize() { | ||
| return Constants.CENHDR + _fnameLen + _extraLen + _comLen; | ||
@@ -175,10 +175,10 @@ }, | ||
| get realDataOffset() { | ||
| return _offset + Constants.LOCHDR + _dataHeader.fnameLen + _dataHeader.extraLen; | ||
| return _offset + Constants.LOCHDR + _localHeader.fnameLen + _localHeader.extraLen; | ||
| }, | ||
| get dataHeader() { | ||
| return _dataHeader; | ||
| get localHeader() { | ||
| return _localHeader; | ||
| }, | ||
| loadDataHeaderFromBinary: function (/*Buffer*/ input) { | ||
| loadLocalHeaderFromBinary: function (/*Buffer*/ input) { | ||
| var data = input.slice(_offset, _offset + Constants.LOCHDR); | ||
@@ -189,3 +189,3 @@ // 30 bytes and should start with "PK\003\004" | ||
| } | ||
| _dataHeader = { | ||
| _localHeader = { | ||
| // version needed to extract | ||
@@ -249,3 +249,3 @@ version: data.readUInt16LE(Constants.LOCVER), | ||
| dataHeaderToBinary: function () { | ||
| localHeaderToBinary: function () { | ||
| // LOC header size (30 bytes) | ||
@@ -276,3 +276,3 @@ var data = Buffer.alloc(Constants.LOCHDR); | ||
| entryHeaderToBinary: function () { | ||
| centralHeaderToBinary: function () { | ||
| // CEN header size (46 bytes) | ||
@@ -338,3 +338,3 @@ var data = Buffer.alloc(Constants.CENHDR + _fnameLen + _extraLen + _comLen); | ||
| offset: _offset, | ||
| entryHeaderSize: bytes(Constants.CENHDR + _fnameLen + _extraLen + _comLen) | ||
| centralHeaderSize: bytes(Constants.CENHDR + _fnameLen + _extraLen + _comLen) | ||
| }; | ||
@@ -341,0 +341,0 @@ }, |
@@ -130,2 +130,2 @@ var Utils = require("../util"), | ||
| }; | ||
| // Misspelled | ||
| // Misspelled |
@@ -1,11 +0,14 @@ | ||
| module.exports = function (/*Buffer*/ inbuf) { | ||
| const version = +(process.versions ? process.versions.node : "").split(".")[0] || 0; | ||
| module.exports = function (/*Buffer*/ inbuf, /*number*/ expectedLength) { | ||
| var zlib = require("zlib"); | ||
| const option = version >= 15 ? { maxOutputLength: expectedLength } : {}; | ||
| return { | ||
| inflate: function () { | ||
| return zlib.inflateRawSync(inbuf); | ||
| return zlib.inflateRawSync(inbuf, option); | ||
| }, | ||
| inflateAsync: function (/*Function*/ callback) { | ||
| var tmp = zlib.createInflateRaw(), | ||
| var tmp = zlib.createInflateRaw(option), | ||
| parts = [], | ||
@@ -12,0 +15,0 @@ total = 0; |
@@ -123,3 +123,3 @@ "use strict"; | ||
| // 2 byte data block (as per Info-Zip spec), otherwise check with the high byte of the header entry | ||
| const verifyByte = ((header.flags & 0x8) === 0x8) ? header.timeHighByte : header.crc >>> 24; | ||
| const verifyByte = (header.flags & 0x8) === 0x8 ? header.timeHighByte : header.crc >>> 24; | ||
@@ -126,0 +126,0 @@ //3. does password meet expectations |
+2
-2
| { | ||
| "name": "adm-zip", | ||
| "version": "0.5.12", | ||
| "version": "0.5.13", | ||
| "description": "Javascript implementation of zip for nodejs with support for electron original-fs. Allows user to create or extract zip files both in memory or to/from disk", | ||
@@ -40,3 +40,3 @@ "scripts": { | ||
| "engines": { | ||
| "node": ">=6.0" | ||
| "node": ">=12.0" | ||
| }, | ||
@@ -43,0 +43,0 @@ "devDependencies": { |
+1
-0
@@ -28,2 +28,3 @@ module.exports = { | ||
| CANT_OVERRIDE: "Target file already exists", | ||
| DISK_ENTRY_TOO_LARGE: "Number of disk entries is too large", | ||
| NO_ZIP: "No zip file was loaded", | ||
@@ -30,0 +31,0 @@ NO_ENTRY: "Entry doesn't exist", |
+64
-43
@@ -7,3 +7,3 @@ var Utils = require("./util"), | ||
| module.exports = function (/*Buffer*/ input) { | ||
| var _entryHeader = new Headers.EntryHeader(), | ||
| var _centralHeader = new Headers.EntryHeader(), | ||
| _entryName = Buffer.alloc(0), | ||
@@ -16,7 +16,8 @@ _comment = Buffer.alloc(0), | ||
| function getCompressedDataFromZip() { | ||
| if (!input || !Buffer.isBuffer(input)) { | ||
| //if (!input || !Buffer.isBuffer(input)) { | ||
| if (!input || !(input instanceof Uint8Array)) { | ||
| return Buffer.alloc(0); | ||
| } | ||
| _entryHeader.loadDataHeaderFromBinary(input); | ||
| return input.slice(_entryHeader.realDataOffset, _entryHeader.realDataOffset + _entryHeader.compressedSize); | ||
| _centralHeader.loadLocalHeaderFromBinary(input); | ||
| return input.slice(_centralHeader.realDataOffset, _centralHeader.realDataOffset + _centralHeader.compressedSize); | ||
| } | ||
@@ -26,4 +27,4 @@ | ||
| // if bit 3 (0x08) of the general-purpose flags field is set, then the CRC-32 and file sizes are not known when the header is written | ||
| if ((_entryHeader.flags & 0x8) !== 0x8) { | ||
| if (Utils.crc32(data) !== _entryHeader.dataHeader.crc) { | ||
| if ((_centralHeader.flags & 0x8) !== 0x8) { | ||
| if (Utils.crc32(data) !== _centralHeader.localHeader.crc) { | ||
| return false; | ||
@@ -59,12 +60,12 @@ } | ||
| if (_entryHeader.encripted) { | ||
| if (_centralHeader.encrypted) { | ||
| if ("string" !== typeof pass && !Buffer.isBuffer(pass)) { | ||
| throw new Error("ADM-ZIP: Incompatible password parameter"); | ||
| } | ||
| compressedData = Methods.ZipCrypto.decrypt(compressedData, _entryHeader, pass); | ||
| compressedData = Methods.ZipCrypto.decrypt(compressedData, _centralHeader, pass); | ||
| } | ||
| var data = Buffer.alloc(_entryHeader.size); | ||
| var data = Buffer.alloc(_centralHeader.size); | ||
| switch (_entryHeader.method) { | ||
| switch (_centralHeader.method) { | ||
| case Utils.Constants.STORED: | ||
@@ -81,3 +82,3 @@ compressedData.copy(data); | ||
| case Utils.Constants.DEFLATED: | ||
| var inflater = new Methods.Inflater(compressedData); | ||
| var inflater = new Methods.Inflater(compressedData, _centralHeader.size); | ||
| if (!async) { | ||
@@ -119,5 +120,5 @@ const result = inflater.inflate(data); | ||
| // Local file header | ||
| switch (_entryHeader.method) { | ||
| switch (_centralHeader.method) { | ||
| case Utils.Constants.STORED: | ||
| _entryHeader.compressedSize = _entryHeader.size; | ||
| _centralHeader.compressedSize = _centralHeader.size; | ||
@@ -134,3 +135,3 @@ compressedData = Buffer.alloc(uncompressedData.length); | ||
| var deflated = deflater.deflate(); | ||
| _entryHeader.compressedSize = deflated.length; | ||
| _centralHeader.compressedSize = deflated.length; | ||
| return deflated; | ||
@@ -140,3 +141,3 @@ } else { | ||
| compressedData = Buffer.alloc(data.length); | ||
| _entryHeader.compressedSize = data.length; | ||
| _centralHeader.compressedSize = data.length; | ||
| data.copy(compressedData); | ||
@@ -182,4 +183,4 @@ callback && callback(compressedData); | ||
| size = readUInt64LE(data, Constants.EF_ZIP64_SUNCOMP); | ||
| if (_entryHeader.size === Constants.EF_ZIP64_OR_32) { | ||
| _entryHeader.size = size; | ||
| if (_centralHeader.size === Constants.EF_ZIP64_OR_32) { | ||
| _centralHeader.size = size; | ||
| } | ||
@@ -189,4 +190,4 @@ } | ||
| compressedSize = readUInt64LE(data, Constants.EF_ZIP64_SCOMP); | ||
| if (_entryHeader.compressedSize === Constants.EF_ZIP64_OR_32) { | ||
| _entryHeader.compressedSize = compressedSize; | ||
| if (_centralHeader.compressedSize === Constants.EF_ZIP64_OR_32) { | ||
| _centralHeader.compressedSize = compressedSize; | ||
| } | ||
@@ -196,4 +197,4 @@ } | ||
| offset = readUInt64LE(data, Constants.EF_ZIP64_RHO); | ||
| if (_entryHeader.offset === Constants.EF_ZIP64_OR_32) { | ||
| _entryHeader.offset = offset; | ||
| if (_centralHeader.offset === Constants.EF_ZIP64_OR_32) { | ||
| _centralHeader.offset = offset; | ||
| } | ||
@@ -203,4 +204,4 @@ } | ||
| diskNumStart = data.readUInt32LE(Constants.EF_ZIP64_DSN); | ||
| if (_entryHeader.diskNumStart === Constants.EF_ZIP64_OR_16) { | ||
| _entryHeader.diskNumStart = diskNumStart; | ||
| if (_centralHeader.diskNumStart === Constants.EF_ZIP64_OR_16) { | ||
| _centralHeader.diskNumStart = diskNumStart; | ||
| } | ||
@@ -221,3 +222,3 @@ } | ||
| _isDirectory = lastChar === 47 || lastChar === 92; | ||
| _entryHeader.fileNameLength = _entryName.length; | ||
| _centralHeader.fileNameLength = _entryName.length; | ||
| }, | ||
@@ -230,3 +231,3 @@ | ||
| _extra = val; | ||
| _entryHeader.extraLength = val.length; | ||
| _centralHeader.extraLength = val.length; | ||
| parseExtra(val); | ||
@@ -240,3 +241,3 @@ }, | ||
| _comment = Utils.toBuffer(val); | ||
| _entryHeader.commentLength = _comment.length; | ||
| _centralHeader.commentLength = _comment.length; | ||
| }, | ||
@@ -268,9 +269,9 @@ | ||
| if (!_isDirectory && uncompressedData.length) { | ||
| _entryHeader.size = uncompressedData.length; | ||
| _entryHeader.method = Utils.Constants.DEFLATED; | ||
| _entryHeader.crc = Utils.crc32(value); | ||
| _entryHeader.changed = true; | ||
| _centralHeader.size = uncompressedData.length; | ||
| _centralHeader.method = Utils.Constants.DEFLATED; | ||
| _centralHeader.crc = Utils.crc32(value); | ||
| _centralHeader.changed = true; | ||
| } else { | ||
| // folders and blank files should be stored | ||
| _entryHeader.method = Utils.Constants.STORED; | ||
| _centralHeader.method = Utils.Constants.STORED; | ||
| } | ||
@@ -280,3 +281,3 @@ }, | ||
| getData: function (pass) { | ||
| if (_entryHeader.changed) { | ||
| if (_centralHeader.changed) { | ||
| return uncompressedData; | ||
@@ -289,3 +290,3 @@ } else { | ||
| getDataAsync: function (/*Function*/ callback, pass) { | ||
| if (_entryHeader.changed) { | ||
| if (_centralHeader.changed) { | ||
| callback(uncompressedData); | ||
@@ -298,19 +299,19 @@ } else { | ||
| set attr(attr) { | ||
| _entryHeader.attr = attr; | ||
| _centralHeader.attr = attr; | ||
| }, | ||
| get attr() { | ||
| return _entryHeader.attr; | ||
| return _centralHeader.attr; | ||
| }, | ||
| set header(/*Buffer*/ data) { | ||
| _entryHeader.loadFromBinary(data); | ||
| _centralHeader.loadFromBinary(data); | ||
| }, | ||
| get header() { | ||
| return _entryHeader; | ||
| return _centralHeader; | ||
| }, | ||
| packHeader: function () { | ||
| packCentralHeader: function () { | ||
| // 1. create header (buffer) | ||
| var header = _entryHeader.entryHeaderToBinary(); | ||
| var header = _centralHeader.centralHeaderToBinary(); | ||
| var addpos = Utils.Constants.CENHDR; | ||
@@ -321,8 +322,8 @@ // 2. add file name | ||
| // 3. add extra data | ||
| if (_entryHeader.extraLength) { | ||
| if (_centralHeader.extraLength) { | ||
| _extra.copy(header, addpos); | ||
| addpos += _entryHeader.extraLength; | ||
| addpos += _centralHeader.extraLength; | ||
| } | ||
| // 4. add file comment | ||
| if (_entryHeader.commentLength) { | ||
| if (_centralHeader.commentLength) { | ||
| _comment.copy(header, addpos); | ||
@@ -333,2 +334,22 @@ } | ||
| packLocalHeader: function () { | ||
| let addpos = 0; | ||
| // 1. construct local header Buffer | ||
| const localHeaderBuf = _centralHeader.localHeaderToBinary(); | ||
| // 2. localHeader - crate header buffer | ||
| const localHeader = Buffer.alloc(localHeaderBuf.length + _entryName.length + _extra.length); | ||
| // 2.1 add localheader | ||
| localHeaderBuf.copy(localHeader, addpos); | ||
| addpos += localHeaderBuf.length; | ||
| // 2.2 add file name | ||
| _entryName.copy(localHeader, addpos); | ||
| addpos += _entryName.length; | ||
| // 2.3 add extra field | ||
| _extra.copy(localHeader, addpos); | ||
| addpos += _extra.length; | ||
| return localHeader; | ||
| }, | ||
| toJSON: function () { | ||
@@ -344,3 +365,3 @@ const bytes = function (nr) { | ||
| isDirectory: this.isDirectory, | ||
| header: _entryHeader.toJSON(), | ||
| header: _centralHeader.toJSON(), | ||
| compressedData: bytes(input), | ||
@@ -347,0 +368,0 @@ data: bytes(uncompressedData) |
+34
-35
@@ -37,3 +37,3 @@ const ZipEntry = require("./zipEntry"); | ||
| index += entry.header.entryHeaderSize; | ||
| index += entry.header.centralHeaderSize; | ||
@@ -47,2 +47,5 @@ callback(entry); | ||
| entryTable = {}; | ||
| if (mainHeader.diskEntries > (inBuffer.length - mainHeader.offset) / Utils.Constants.CENHDR) { | ||
| throw new Error(Utils.Errors.DISK_ENTRY_TOO_LARGE); | ||
| } | ||
| entryList = new Array(mainHeader.diskEntries); // total number of entries | ||
@@ -63,3 +66,3 @@ var index = mainHeader.offset; // offset of first CEN header | ||
| index += entry.header.entryHeaderSize; | ||
| index += entry.header.centralHeaderSize; | ||
@@ -249,3 +252,3 @@ entryList[i] = entry; | ||
| const dataBlock = []; | ||
| const entryHeaders = []; | ||
| const headerBlocks = []; | ||
| let totalSize = 0; | ||
@@ -260,26 +263,21 @@ let dindex = 0; | ||
| const compressedData = entry.getCompressedData(); | ||
| // 1. construct data header | ||
| entry.header.offset = dindex; | ||
| const dataHeader = entry.header.dataHeaderToBinary(); | ||
| const entryNameLen = entry.rawEntryName.length; | ||
| // 1.2. postheader - data after data header | ||
| const postHeader = Buffer.alloc(entryNameLen + entry.extra.length); | ||
| entry.rawEntryName.copy(postHeader, 0); | ||
| entry.extra.copy(postHeader, entryNameLen); | ||
| // 1. construct local header | ||
| const localHeader = entry.packLocalHeader(); | ||
| // 2. offsets | ||
| const dataLength = dataHeader.length + postHeader.length + compressedData.length; | ||
| const dataLength = localHeader.length + compressedData.length; | ||
| dindex += dataLength; | ||
| // 3. store values in sequence | ||
| dataBlock.push(dataHeader); | ||
| dataBlock.push(postHeader); | ||
| dataBlock.push(localHeader); | ||
| dataBlock.push(compressedData); | ||
| // 4. construct entry header | ||
| const entryHeader = entry.packHeader(); | ||
| entryHeaders.push(entryHeader); | ||
| // 4. construct central header | ||
| const centralHeader = entry.packCentralHeader(); | ||
| headerBlocks.push(centralHeader); | ||
| // 5. update main header | ||
| mainHeader.size += entryHeader.length; | ||
| totalSize += dataLength + entryHeader.length; | ||
| mainHeader.size += centralHeader.length; | ||
| totalSize += dataLength + centralHeader.length; | ||
| } | ||
@@ -300,3 +298,3 @@ | ||
| // write central directory entries | ||
| for (const content of entryHeaders) { | ||
| for (const content of headerBlocks) { | ||
| content.copy(outBuffer, dindex); | ||
@@ -324,3 +322,3 @@ dindex += content.length; | ||
| const dataBlock = []; | ||
| const entryHeaders = []; | ||
| const centralHeaders = []; | ||
| let totalSize = 0; | ||
@@ -333,4 +331,4 @@ let dindex = 0; | ||
| const compress2Buffer = function (entryLists) { | ||
| if (entryLists.length) { | ||
| const entry = entryLists.pop(); | ||
| if (entryLists.length > 0) { | ||
| const entry = entryLists.shift(); | ||
| const name = entry.entryName + entry.extra.toString(); | ||
@@ -340,19 +338,20 @@ if (onItemStart) onItemStart(name); | ||
| if (onItemEnd) onItemEnd(name); | ||
| entry.header.offset = dindex; | ||
| // data header | ||
| const dataHeader = entry.header.dataHeaderToBinary(); | ||
| const postHeader = Buffer.alloc(name.length, name); | ||
| const dataLength = dataHeader.length + postHeader.length + compressedData.length; | ||
| // 1. construct local header | ||
| const localHeader = entry.packLocalHeader(); | ||
| // 2. offsets | ||
| const dataLength = localHeader.length + compressedData.length; | ||
| dindex += dataLength; | ||
| dataBlock.push(dataHeader); | ||
| dataBlock.push(postHeader); | ||
| // 3. store values in sequence | ||
| dataBlock.push(localHeader); | ||
| dataBlock.push(compressedData); | ||
| const entryHeader = entry.packHeader(); | ||
| entryHeaders.push(entryHeader); | ||
| mainHeader.size += entryHeader.length; | ||
| totalSize += dataLength + entryHeader.length; | ||
| // central header | ||
| const centalHeader = entry.packCentralHeader(); | ||
| centralHeaders.push(centalHeader); | ||
| mainHeader.size += centalHeader.length; | ||
| totalSize += dataLength + centalHeader.length; | ||
@@ -372,3 +371,3 @@ compress2Buffer(entryLists); | ||
| }); | ||
| entryHeaders.forEach(function (content) { | ||
| centralHeaders.forEach(function (content) { | ||
| content.copy(outBuffer, dindex); // write central directory entries | ||
@@ -389,3 +388,3 @@ dindex += content.length; | ||
| compress2Buffer(entryList); | ||
| compress2Buffer(Array.from(entryList)); | ||
| } catch (e) { | ||
@@ -392,0 +391,0 @@ onFail(e); |
Dynamic require
Supply chain riskDynamic require can indicate the package is performing dangerous or unsafe dynamic code execution.
Found 1 instance in 1 package
Filesystem access
Supply chain riskAccesses the file system, and could potentially read sensitive data.
Found 1 instance in 1 package
Long strings
Supply chain riskContains long string literals, which may be a sign of obfuscated or packed code.
Found 1 instance in 1 package
Dynamic require
Supply chain riskDynamic require can indicate the package is performing dangerous or unsafe dynamic code execution.
Found 1 instance in 1 package
Filesystem access
Supply chain riskAccesses the file system, and could potentially read sensitive data.
Found 1 instance in 1 package
Long strings
Supply chain riskContains long string literals, which may be a sign of obfuscated or packed code.
Found 1 instance in 1 package
104992
1.25%2437
0.83%