Socket
Socket
Sign inDemoInstall

@jsenv/filesystem

Package Overview
Dependencies
Maintainers
2
Versions
95
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@jsenv/filesystem - npm Package Compare versions

Comparing version 3.1.0 to 3.2.0

src/internal/guard_second_call.js

2

package.json
{
"name": "@jsenv/filesystem",
"version": "3.1.0",
"version": "3.2.0",
"description": "Collection of functions to interact with filesystem in Node.js",

@@ -5,0 +5,0 @@ "license": "MIT",

@@ -67,3 +67,3 @@ # Jsenv filesystem [![npm package](https://img.shields.io/npm/v/@jsenv/filesystem.svg?logo=npm&label=package)](https://www.npmjs.com/package/@jsenv/filesystem) [![github main](https://github.com/jsenv/filesystem/workflows/main/badge.svg)](https://github.com/jsenv/filesystem/actions?workflow=main) [![codecov coverage](https://codecov.io/gh/jsenv/filesystem/branch/main/graph/badge.svg)](https://codecov.io/gh/jsenv/filesystem)

const unregister = registerDirectoryLifecycle("file:///directory/", {
watchDescription: {
watchPatterns: {
"./**/*": true,

@@ -70,0 +70,0 @@ "./node_modules/": false,

export const fileSystemRootUrl =
process.platform === "win32" ? `file///${process.cwd()[0]}:/` : "file:///"
process.platform === "win32" ? `file:///${process.cwd()[0]}:/` : "file:///"

@@ -1,2 +0,2 @@

import { watch, openSync, closeSync } from "fs"
import { watch, openSync, closeSync } from "node:fs"

@@ -3,0 +3,0 @@ const isWindows = process.platform === "win32"

@@ -1,3 +0,2 @@

/* eslint-disable import/max-dependencies */
import { readdirSync } from "fs"
import { readdirSync, statSync } from "node:fs"
import {

@@ -8,9 +7,9 @@ normalizeStructuredMetaMap,

} from "@jsenv/url-meta"
import { assertAndNormalizeDirectoryUrl } from "./assertAndNormalizeDirectoryUrl.js"
import { statsToType } from "./internal/statsToType.js"
import { guardTooFastSecondCall } from "./internal/guard_second_call.js"
import { replaceBackSlashesWithSlashes } from "./internal/replaceBackSlashesWithSlashes.js"
import { entryToTypeOrNull } from "./internal/entryToTypeOrNull.js"
import { createWatcher } from "./internal/createWatcher.js"
import { trackRessources } from "./internal/trackRessources.js"
import { ensureUrlTrailingSlash } from "./internal/ensureUrlTrailingSlash.js"
import { resolveUrl } from "./resolveUrl.js"
import { assertAndNormalizeFileUrl } from "./assertAndNormalizeFileUrl.js"
import { urlToFileSystemPath } from "./urlToFileSystemPath.js"

@@ -26,6 +25,7 @@ import { urlToRelativeUrl } from "./urlToRelativeUrl.js"

{
debug = false,
added,
updated,
removed,
watchDescription = {
watchPatterns = {
"./**/*": true,

@@ -36,5 +36,11 @@ },

recursive = false,
// filesystem might dispatch more events than expected
// Code can use "cooldownBetweenFileEvents" to prevent that
// BUT it is UNADVISED to rely on this as explained later (search for "is lying" in this file)
// For this reason"cooldownBetweenFileEvents" should be reserved to scenarios
// like unit tests
cooldownBetweenFileEvents = 0,
},
) => {
const sourceUrl = ensureUrlTrailingSlash(assertAndNormalizeFileUrl(source))
const sourceUrl = assertAndNormalizeDirectoryUrl(source)
if (!undefinedOrFunction(added)) {

@@ -53,31 +59,67 @@ throw new TypeError(`added must be a function or undefined, got ${added}`)

}
if (cooldownBetweenFileEvents) {
if (added) {
added = guardTooFastSecondCall(added, cooldownBetweenFileEvents)
}
if (updated) {
updated = guardTooFastSecondCall(updated, cooldownBetweenFileEvents)
}
if (removed) {
removed = guardTooFastSecondCall(removed, cooldownBetweenFileEvents)
}
}
const structuredMetaMap = normalizeStructuredMetaMap(
{ watch: watchDescription },
{ watch: watchPatterns },
sourceUrl,
)
const entryShouldBeWatched = ({ relativeUrl, type }) => {
const entryUrl = resolveUrl(relativeUrl, sourceUrl)
const getWatchPatternValue = ({ url, type }) => {
if (type === "directory") {
const canContainEntryToWatch = urlCanContainsMetaMatching({
url: `${entryUrl}/`,
let firstMeta = false
urlCanContainsMetaMatching({
url: `${url}/`,
structuredMetaMap,
predicate: ({ watch }) => watch,
predicate: ({ watch }) => {
if (watch) {
firstMeta = watch
}
return watch
},
})
return canContainEntryToWatch
return firstMeta
}
const entryMeta = urlToMeta({
url: entryUrl,
const filesystemEntryMeta = urlToMeta({
url,
structuredMetaMap,
})
return entryMeta.watch
return filesystemEntryMeta.watch
}
const tracker = trackRessources()
const infoMap = new Map()
const readEntryInfo = (url) => {
try {
const relativeUrl = urlToRelativeUrl(url, source)
const previousInfo = infoMap.get(relativeUrl)
const stats = statSync(new URL(url))
const type = statsToType(stats)
const patternValue = previousInfo
? previousInfo.patternValue
: getWatchPatternValue({ url, type })
return {
previousInfo,
url,
relativeUrl,
type,
atimeMs: stats.atimeMs,
mtimeMs: stats.mtimeMs,
patternValue,
}
} catch (e) {
if (e.code === "ENOENT") {
return null
}
throw e
}
}
const contentMap = new Map()
const handleDirectoryEvent = ({

@@ -91,11 +133,14 @@ directoryRelativeUrl,

handleChange(`${directoryRelativeUrl}/${filename}`)
} else {
handleChange(`${filename}`)
return
}
} else if ((removed || added) && eventType === "rename") {
handleChange(`${filename}`)
return
}
if (eventType === "rename") {
if (!removed && !added) {
return
}
// we might receive `rename` without filename
// in that case we try to find ourselves which file was removed.
let relativeUrlCandidateArray = Array.from(contentMap.keys())
let relativeUrlCandidateArray = Array.from(infoMap.keys())
if (recursive && !fsWatchSupportsRecursive) {

@@ -106,10 +151,11 @@ relativeUrlCandidateArray = relativeUrlCandidateArray.filter(

// ensure entry is top level
if (relativeUrlCandidate.includes("/")) return false
if (relativeUrlCandidate.includes("/")) {
return false
}
return true
}
// entry not inside this directory
if (!relativeUrlCandidate.startsWith(directoryRelativeUrl))
if (!relativeUrlCandidate.startsWith(directoryRelativeUrl)) {
return false
}
const afterDirectory = relativeUrlCandidate.slice(

@@ -119,4 +165,5 @@ directoryRelativeUrl.length + 1,

// deep inside this directory
if (afterDirectory.includes("/")) return false
if (afterDirectory.includes("/")) {
return false
}
return true

@@ -126,16 +173,17 @@ },

}
const removedEntryRelativeUrl = relativeUrlCandidateArray.find(
(relativeUrlCandidate) => {
const entryUrl = resolveUrl(relativeUrlCandidate, sourceUrl)
const type = entryToTypeOrNull(entryUrl)
return type === null
try {
statSync(new URL(relativeUrlCandidate, sourceUrl))
return false
} catch (e) {
if (e.code === "ENOENT") {
return true
}
throw e
}
},
)
if (removedEntryRelativeUrl) {
handleEntryLost({
relativeUrl: removedEntryRelativeUrl,
type: contentMap.get(removedEntryRelativeUrl),
})
handleEntryLost(infoMap.get(removedEntryRelativeUrl))
}

@@ -146,43 +194,42 @@ }

const handleChange = (relativeUrl) => {
const entryUrl = resolveUrl(relativeUrl, sourceUrl)
const previousType = contentMap.get(relativeUrl)
const type = entryToTypeOrNull(entryUrl)
if (!entryShouldBeWatched({ relativeUrl, type })) {
const entryUrl = new URL(relativeUrl, sourceUrl).href
const entryInfo = readEntryInfo(entryUrl)
if (!entryInfo) {
const previousEntryInfo = infoMap.get(relativeUrl)
if (!previousEntryInfo) {
// on MacOS it's possible to receive a "rename" event for
// a file that does not exists...
return
}
if (debug) {
console.debug(`"${relativeUrl}" removed`)
}
handleEntryLost(previousEntryInfo)
return
}
// it's something new
if (!previousType) {
if (type !== null) {
handleEntryFound({ relativeUrl, type, existent: false })
const { previousInfo } = entryInfo
if (!previousInfo) {
if (debug) {
console.debug(`"${relativeUrl}" added`)
}
handleEntryFound(entryInfo)
return
}
// it existed but now it's not here anymore
if (type === null) {
handleEntryLost({ relativeUrl, type: previousType })
if (entryInfo.type !== previousInfo.type) {
// it existed and was replaced by something else
// we don't handle this as an update. We rather say the ressource
// is lost and something else is found (call removed() then added())
handleEntryLost(previousInfo)
handleEntryFound(entryInfo)
return
}
// it existed and was replaced by something else
// we don't handle this as an update. We rather say the ressource
// is lost and something else is found (call removed() then added())
if (previousType !== type) {
handleEntryLost({ relativeUrl, type: previousType })
handleEntryFound({ relativeUrl, type })
if (entryInfo.type === "directory") {
// a directory cannot really be updated in way that matters for us
// filesystem is trying to tell us the directory content have changed
// but we don't care about that
// we'll already be notified about what has changed
return
}
// a directory cannot really be updated in way that matters for us
// filesystem is trying to tell us the directory content have changed
// but we don't care about that
// we'll already be notified about what has changed
if (type === "directory") {
return
}
// something has changed at this relativeUrl (the file existed and was not deleted)
// it's possible to get there and there is no real update
// it's possible to get there without a real update
// (file content is the same and file mtime is the same).

@@ -195,54 +242,67 @@ // In short filesystem is sometimes "lying"

// - filesystem did not send an event out of nowhere:
// something occured but we don't know what with the information we have.
if (updated) {
updated({ relativeUrl, type })
// something occured but we don't know exactly what
// maybe we should exclude some stuff as done in
// https://github.com/paulmillr/chokidar/blob/b2c4f249b6cfa98c703f0066fb4a56ccd83128b5/lib/nodefs-handler.js#L366
if (debug) {
console.debug(`"${relativeUrl}" modified`)
}
handleEntryUpdated(entryInfo)
}
const handleEntryFound = ({ relativeUrl, type, existent }) => {
if (!entryShouldBeWatched({ relativeUrl, type })) {
return
}
contentMap.set(relativeUrl, type)
const entryUrl = resolveUrl(relativeUrl, sourceUrl)
if (type === "directory") {
visitDirectory({
directoryUrl: `${entryUrl}/`,
entryFound: (entry) => {
handleEntryFound({
relativeUrl: `${relativeUrl}/${entry.relativeUrl}`,
type: entry.type,
existent,
const handleEntryFound = (entryInfo, { notify = true } = {}) => {
infoMap.set(entryInfo.relativeUrl, entryInfo)
if (entryInfo.type === "directory") {
const directoryUrl = `${entryInfo.url}/`
readdirSync(new URL(directoryUrl)).forEach((entryName) => {
const childEntryUrl = new URL(entryName, directoryUrl).href
const childEntryInfo = readEntryInfo(childEntryUrl)
if (childEntryInfo) {
handleEntryFound(childEntryInfo)
}
})
// we must watch manually every directory we find
if (!fsWatchSupportsRecursive) {
const watcher = createWatcher(urlToFileSystemPath(entryInfo.url), {
persistent: keepProcessAlive,
})
tracker.registerCleanupCallback(() => {
watcher.close()
})
watcher.on("change", (eventType, filename) => {
handleDirectoryEvent({
directoryRelativeUrl: entryInfo.relativeUrl,
filename: filename ? replaceBackSlashesWithSlashes(filename) : "",
eventType,
})
},
})
}
if (added) {
if (existent) {
if (notifyExistent) {
added({ relativeUrl, type, existent: true })
}
} else {
added({ relativeUrl, type })
})
}
}
// we must watch manually every directory we find
if (!fsWatchSupportsRecursive && type === "directory") {
const watcher = createWatcher(urlToFileSystemPath(entryUrl), {
persistent: keepProcessAlive,
if (added && entryInfo.patternValue && notify) {
added({
relativeUrl: entryInfo.relativeUrl,
type: entryInfo.type,
patternValue: entryInfo.patternValue,
mtime: entryInfo.mtimeMs,
})
tracker.registerCleanupCallback(() => {
watcher.close()
}
}
const handleEntryLost = (entryInfo) => {
infoMap.delete(entryInfo.relativeUrl)
if (removed && entryInfo.patternValue) {
removed({
relativeUrl: entryInfo.relativeUrl,
type: entryInfo.type,
patternValue: entryInfo.patternValue,
mtime: entryInfo.mtimeMs,
})
watcher.on("change", (eventType, filename) => {
handleDirectoryEvent({
directoryRelativeUrl: relativeUrl,
filename: filename ? replaceBackSlashesWithSlashes(filename) : "",
eventType,
})
}
}
const handleEntryUpdated = (entryInfo) => {
infoMap.set(entryInfo.relativeUrl, entryInfo)
if (updated && entryInfo.patternValue) {
updated({
relativeUrl: entryInfo.relativeUrl,
type: entryInfo.type,
patternValue: entryInfo.patternValue,
mtime: entryInfo.mtimeMs,
previousMtime: entryInfo.previousInfo.mtimeMs,
})

@@ -252,16 +312,22 @@ }

const handleEntryLost = ({ relativeUrl, type }) => {
contentMap.delete(relativeUrl)
if (removed) {
removed({ relativeUrl, type })
readdirSync(new URL(sourceUrl)).forEach((entry) => {
const entryUrl = new URL(entry, sourceUrl).href
const entryInfo = readEntryInfo(entryUrl)
if (entryInfo) {
handleEntryFound(entryInfo, {
notify: notifyExistent,
})
}
})
if (debug) {
const relativeUrls = Array.from(infoMap.keys())
if (relativeUrls.length === 0) {
console.debug(`No file found`)
} else {
console.debug(
`${relativeUrls.length} file found:
${relativeUrls.join("\n")}`,
)
}
}
visitDirectory({
directoryUrl: sourceUrl,
entryFound: ({ relativeUrl, type }) => {
handleEntryFound({ relativeUrl, type, existent: true })
},
})
const watcher = createWatcher(urlToFileSystemPath(sourceUrl), {

@@ -284,20 +350,4 @@ recursive: recursive && fsWatchSupportsRecursive,

const undefinedOrFunction = (value) =>
typeof value === "undefined" || typeof value === "function"
const visitDirectory = ({ directoryUrl, entryFound }) => {
const directoryPath = urlToFileSystemPath(directoryUrl)
readdirSync(directoryPath).forEach((entry) => {
const entryUrl = resolveUrl(entry, directoryUrl)
const type = entryToTypeOrNull(entryUrl)
if (type === null) {
return
}
const relativeUrl = urlToRelativeUrl(entryUrl, directoryUrl)
entryFound({
relativeUrl,
type,
})
})
const undefinedOrFunction = (value) => {
return typeof value === "undefined" || typeof value === "function"
}

@@ -304,0 +354,0 @@

@@ -1,3 +0,6 @@

import { dirname, basename } from "path"
import { entryToTypeOrNull } from "./internal/entryToTypeOrNull.js"
import { statSync } from "node:fs"
import { dirname, basename } from "node:path"
import { guardTooFastSecondCall } from "./internal/guard_second_call.js"
import { statsToType } from "./internal/statsToType.js"
import { createWatcher } from "./internal/createWatcher.js"

@@ -10,3 +13,10 @@ import { trackRessources } from "./internal/trackRessources.js"

source,
{ added, updated, removed, notifyExistent = false, keepProcessAlive = true },
{
added,
updated,
removed,
notifyExistent = false,
keepProcessAlive = true,
cooldownBetweenFileEvents = 0,
},
) => {

@@ -27,2 +37,13 @@ const sourceUrl = assertAndNormalizeFileUrl(source)

}
if (cooldownBetweenFileEvents) {
if (added) {
added = guardTooFastSecondCall(added, cooldownBetweenFileEvents)
}
if (updated) {
updated = guardTooFastSecondCall(updated, cooldownBetweenFileEvents)
}
if (removed) {
removed = guardTooFastSecondCall(removed, cooldownBetweenFileEvents)
}
}

@@ -94,2 +115,14 @@ const tracker = trackRessources()

const entryToTypeOrNull = (url) => {
try {
const stats = statSync(new URL(url))
return statsToType(stats)
} catch (e) {
if (e.code === "ENOENT") {
return null
}
throw e
}
}
const undefinedOrFunction = (value) =>

@@ -96,0 +129,0 @@ typeof value === "undefined" || typeof value === "function"

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc