Huge News!Announcing our $40M Series B led by Abstract Ventures.Learn More
Socket
Sign inDemoInstall
Socket

dekkai

Package Overview
Dependencies
Maintainers
1
Versions
10
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

dekkai - npm Package Compare versions

Comparing version 0.3.2 to 0.3.3

src/wasm/.vscode/ipch/7e642673330b806e/types.ipch

2

package.json
{
"name": "dekkai",
"version": "0.3.2",
"version": "0.3.3",
"description": "Modern and fast, really fast, CSV parser for the browser and node.js",

@@ -5,0 +5,0 @@ "entry": "src/index.js",

import {DataFile} from '../data/DataFile';
import {binaryFromBlobs, defaultConfig, readHeader, sliceFile} from '../data/DataTools';
import {binaryChunksFromBlobs, defaultConfig, mergeChunksIntoBuffer, readHeader, sliceFile} from '../data/DataTools';
import {getDecoder} from '../data/Decoder';

@@ -12,3 +12,4 @@ import {BinaryRow} from './BinaryRow';

const blobs = await sliceFile(dataFile, offset, config);
const binary = await binaryFromBlobs(blobs, header, config);
const result = await binaryChunksFromBlobs(blobs, header, config);
const binary = await mergeChunksIntoBuffer(result.chunks, result.header, config);

@@ -15,0 +16,0 @@ return new BinaryTable(binary.header, binary.data, config);

@@ -315,3 +315,3 @@ import {WorkerPool} from '../workers/WorkerPool';

export async function binaryFromBlobs(blobs, header, config = defaultConfig) {
export async function binaryChunksFromBlobs(blobs, header, config = defaultConfig) {
const workerPool = WorkerPool.sharedInstance;

@@ -354,3 +354,2 @@ const promises = [];

const orderedResults = [];
const transferable = [];
for (let i = 0; i < results.length; ++i) {

@@ -362,3 +361,2 @@ binaryHeader.rowCount += results[i].header.rowCount;

orderedResults[results[i].index] = results[i];
transferable[results[i].index] = results[i].data;
}

@@ -375,2 +373,10 @@

return {
header: binaryHeader,
chunks: orderedResults,
};
}
export async function mergeChunksIntoBuffer(chunks, binaryHeader, config) {
const workerPool = WorkerPool.sharedInstance;
let buffer;

@@ -390,6 +396,6 @@ if (config.output && config.output.buffer) {

const promises = [];
let dataOffset = binaryHeader.dataOffset;
if (supportsSharedMemory && buffer instanceof SharedArrayBuffer) { // eslint-disable-line
promises.length = 0;
for (let i = 0; i < orderedResults.length; ++i) {
for (let i = 0; i < chunks.length; ++i) {
promises.push(workerPool.scheduleTask('mergeIntoBuffer', {

@@ -399,10 +405,14 @@ buffer,

dataOffset,
parsed: orderedResults[i],
}), [ orderedResults[i].data ]);
parsed: chunks[i],
}), [ chunks[i].data ]);
dataOffset += orderedResults[i].header.rowCount * binaryHeader.rowLength;
dataOffset += chunks[i].header.rowCount * binaryHeader.rowLength;
}
await Promise.all(promises);
} else {
transferable.push(buffer);
const transferable = [buffer];
for (let i = 0; i < chunks.length; ++i) {
transferable.push(chunks[i].data);
}
buffer = await workerPool.scheduleTask('mergeParsedResults', {

@@ -412,3 +422,3 @@ buffer,

dataOffset,
parsed: orderedResults,
parsed: chunks,
}, transferable);

@@ -415,0 +425,0 @@ }

@@ -7,3 +7,3 @@ import {WorkerPool} from './workers/WorkerPool';

import {DataFile} from './data/DataFile';
import {defaultConfig, readHeader, sliceFile, iterateBlobs} from './data/DataTools';
import * as DataTools from './data/DataTools';
import {BinaryTable} from './CSV/BinaryTable';

@@ -16,2 +16,3 @@

const initializedSymbol = Symbol('dekkai::initialized');
class dekkai {

@@ -22,2 +23,14 @@ constructor() {

get DataFile() {
return DataFile;
}
get DataTools() {
return DataTools;
}
get WorkerPool() {
return WorkerPool;
}
async init(workerCount = -1) {

@@ -74,6 +87,6 @@ if (!this[initializedSymbol]) {

const dataFile = new DataFile(file);
const config = Object.freeze(Object.assign({}, defaultConfig, options));
const {header, offset} = await readHeader(dataFile, config);
const blobs = await sliceFile(dataFile, offset, config);
await iterateBlobs(blobs, header, itr, config);
const config = Object.freeze(Object.assign({}, DataTools.defaultConfig, options));
const {header, offset} = await DataTools.readHeader(dataFile, config);
const blobs = await DataTools.sliceFile(dataFile, offset, config);
await DataTools.iterateBlobs(blobs, header, itr, config);
}

@@ -80,0 +93,0 @@

Sorry, the diff of this file is too big to display

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is too big to display

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is too big to display

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is too big to display

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is too big to display

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc