get-browser-fingerprint
Advanced tools
Comparing version 3.2.3 to 4.0.0
{ | ||
"name": "get-browser-fingerprint", | ||
"version": "3.2.3", | ||
"version": "4.0.0", | ||
"author": "Damiano Barbati <damiano.barbati@gmail.com> (https://github.com/damianobarbati)", | ||
@@ -21,7 +21,7 @@ "repository": "https://github.com/damianobarbati/get-browser-fingerprint", | ||
"devDependencies": { | ||
"@biomejs/biome": "^1.8.3", | ||
"@playwright/test": "^1.47.0", | ||
"@biomejs/biome": "^1.9.2", | ||
"@playwright/test": "^1.47.2", | ||
"serve": "^14.2.3", | ||
"vitest": "^2.0.5" | ||
"vitest": "^2.1.1" | ||
} | ||
} |
# get-browser-fingerprint | ||
Zero dependencies package exporting a single, fast (<15ms) and synchronous function which computes a browser fingerprint, without requiring any permission to the user. | ||
Zero dependencies package exporting a single and fast (<50ms) asynchronous function returning a browser fingerprint, without requiring any permission to the user. | ||
@@ -10,3 +10,3 @@ ## Usage | ||
import getBrowserFingerprint from 'get-browser-fingerprint'; | ||
const fingerprint = getBrowserFingerprint(); | ||
const fingerprint = await getBrowserFingerprint(); | ||
console.log(fingerprint); | ||
@@ -16,9 +16,5 @@ ``` | ||
Options available: | ||
- `hardwareOnly` (default `false`): leverage only hardware info about device | ||
- `enableWebgl` (default `false`): enable webgl renderer, ~4x times slower but adds another deadly powerful hardware detection layer on top of canvas | ||
- `enableScreen` (default `true`): enable screen resolution detection, disable it if your userbase may use multiple screens | ||
- `debug`: log data used to generate fingerprint to console and add canvas/webgl canvas to body to see rendered image (default `false`) | ||
- `hardwareOnly` (default `true`): use only hardware info about device. | ||
- `debug` (default `false`): log data used to generate fingerprint to console and add canvas/webgl/audio elements to body. | ||
⚠️ Be careful: the strongest discriminating factor is canvas token which can't be computed on old devices (eg: iPhone 6), deal accordingly ⚠️ | ||
## Development | ||
@@ -28,5 +24,5 @@ | ||
```sh | ||
nvm install | ||
yarn install | ||
yarn test | ||
fnm install | ||
pnpm install | ||
pnpm test | ||
``` | ||
@@ -33,0 +29,0 @@ |
export interface FingerprintOptions { | ||
hardwareOnly?: boolean; | ||
enableWebgl?: boolean; | ||
enableScreen?: boolean; | ||
debug?: boolean; | ||
@@ -6,0 +4,0 @@ } |
132
src/index.js
@@ -1,4 +0,7 @@ | ||
const getBrowserFingerprint = ({ hardwareOnly = false, enableWebgl = false, enableScreen = true, debug = false } = {}) => { | ||
const getBrowserFingerprint = async ({ hardwareOnly = true, debug = false } = {}) => { | ||
const { cookieEnabled, deviceMemory, doNotTrack, hardwareConcurrency, language, languages, maxTouchPoints, platform, userAgent, vendor } = window.navigator; | ||
// we use screen info only on mobile, because on desktop the user may use multiple monitors | ||
const enableScreen = /Mobi|Android|iPhone|iPad|iPod/i.test(navigator.userAgent); | ||
const { width, height, colorDepth, pixelDepth } = enableScreen ? window.screen : {}; // undefined will remove this from the stringify down here | ||
@@ -11,7 +14,11 @@ const timezoneOffset = new Date().getTimezoneOffset(); | ||
const canvas = getCanvasID(debug); | ||
const webgl = enableWebgl ? getWebglID(debug) : undefined; // undefined will remove this from the stringify down here | ||
const webglInfo = enableWebgl ? getWebglInfo(debug) : undefined; // undefined will remove this from the stringify down here | ||
const audio = await getAudioID(debug); | ||
const audioInfo = getAudioInfo(); | ||
const webgl = getWebglID(debug); | ||
const webglInfo = getWebglInfo(); | ||
const data = hardwareOnly | ||
? JSON.stringify({ | ||
? { | ||
audioInfo, | ||
audio, | ||
canvas, | ||
@@ -30,4 +37,6 @@ colorDepth, | ||
width, | ||
}) | ||
: JSON.stringify({ | ||
} | ||
: { | ||
audioInfo, | ||
audio, | ||
canvas, | ||
@@ -54,9 +63,8 @@ colorDepth, | ||
width, | ||
}); | ||
}; | ||
const datastring = JSON.stringify(data, null, 4); | ||
if (debug) console.log("Fingerprint data:", JSON.stringify(data, null, 2)); | ||
if (debug) console.log("fingerprint data", datastring); | ||
const result = murmurhash3_32_gc(datastring); | ||
const payload = JSON.stringify(data, null, 2); | ||
const result = murmurhash3_32_gc(payload); | ||
return result; | ||
@@ -162,3 +170,3 @@ }; | ||
VENDOR: String(ctx.getParameter(ctx.VENDOR)), | ||
SUPORTED_EXTENSIONS: String(ctx.getSupportedExtensions()), | ||
SUPPORTED_EXTENSIONS: String(ctx.getSupportedExtensions()), | ||
}; | ||
@@ -172,2 +180,102 @@ | ||
const getAudioInfo = () => { | ||
try { | ||
const OfflineAudioContext = window.OfflineAudioContext || window.webkitOfflineAudioContext; | ||
const length = 44100; | ||
const sampleRate = 44100; | ||
const context = new OfflineAudioContext(1, length, sampleRate); | ||
const result = { | ||
sampleRate: context.sampleRate, | ||
channelCount: context.destination.maxChannelCount, | ||
outputLatency: context.outputLatency, | ||
state: context.state, | ||
baseLatency: context.baseLatency, | ||
}; | ||
return result; | ||
} catch { | ||
return null; | ||
} | ||
}; | ||
const getAudioID = async (debug) => { | ||
try { | ||
const OfflineAudioContext = window.OfflineAudioContext || window.webkitOfflineAudioContext; | ||
const sampleRate = 44100; | ||
const length = 44100; // Number of samples (1 second of audio) | ||
const context = new OfflineAudioContext(1, length, sampleRate); | ||
// Create an oscillator to generate sound | ||
const oscillator = context.createOscillator(); | ||
oscillator.type = "sine"; | ||
oscillator.frequency.value = 440; | ||
oscillator.connect(context.destination); | ||
oscillator.start(); | ||
// Render the audio into a buffer | ||
const renderedBuffer = await context.startRendering(); | ||
const channelData = renderedBuffer.getChannelData(0); | ||
// Generate fingerprint by summing the absolute values of the audio data | ||
const result = channelData.reduce((acc, val) => acc + Math.abs(val), 0).toString(); | ||
if (debug) { | ||
const wavBlob = bufferToWav(renderedBuffer); | ||
const audioURL = URL.createObjectURL(wavBlob); | ||
const audioElement = document.createElement("audio"); | ||
audioElement.controls = true; | ||
audioElement.src = audioURL; | ||
document.body.appendChild(audioElement); | ||
} | ||
return murmurhash3_32_gc(result); | ||
} catch { | ||
return null; | ||
} | ||
}; | ||
const bufferToWav = (buffer) => { | ||
const numOfChannels = buffer.numberOfChannels; | ||
const length = buffer.length * numOfChannels * 2 + 44; // Buffer size in bytes | ||
const wavBuffer = new ArrayBuffer(length); | ||
const view = new DataView(wavBuffer); | ||
// Write WAV file header | ||
writeString(view, 0, "RIFF"); | ||
view.setUint32(4, length - 8, true); | ||
writeString(view, 8, "WAVE"); | ||
writeString(view, 12, "fmt "); | ||
view.setUint32(16, 16, true); | ||
view.setUint16(20, 1, true); | ||
view.setUint16(22, numOfChannels, true); | ||
view.setUint32(24, buffer.sampleRate, true); | ||
view.setUint32(28, buffer.sampleRate * numOfChannels * 2, true); | ||
view.setUint16(32, numOfChannels * 2, true); | ||
view.setUint16(34, 16, true); | ||
writeString(view, 36, "data"); | ||
view.setUint32(40, length - 44, true); | ||
// Write interleaved audio data | ||
let offset = 44; | ||
for (let i = 0; i < buffer.length; i++) { | ||
for (let channel = 0; channel < numOfChannels; channel++) { | ||
const sample = buffer.getChannelData(channel)[i]; | ||
const intSample = Math.max(-1, Math.min(1, sample)) * 32767; | ||
view.setInt16(offset, intSample, true); | ||
offset += 2; | ||
} | ||
} | ||
return new Blob([view], { type: "audio/wav" }); | ||
}; | ||
const writeString = (view, offset, string) => { | ||
for (let i = 0; i < string.length; i++) { | ||
view.setUint8(offset + i, string.charCodeAt(i)); | ||
} | ||
}; | ||
const murmurhash3_32_gc = (key) => { | ||
@@ -174,0 +282,0 @@ const remainder = key.length & 3; // key.length % 4 |
11764
291
31