Huge News!Announcing our $40M Series B led by Abstract Ventures.Learn More
Socket
Sign inDemoInstall
Socket

@dsnp/parquetjs

Package Overview
Dependencies
Maintainers
2
Versions
93
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@dsnp/parquetjs - npm Package Compare versions

Comparing version 0.0.0-ba6065 to 0.0.0-f60585

dist/browser/parquet.js

125

dist/lib/bloom/sbbf.js
"use strict";
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
var __importDefault = (this && this.__importDefault) || function (mod) {

@@ -46,13 +37,29 @@ return (mod && mod.__esModule) ? mod : { "default": mod };

class SplitBlockBloomFilter {
constructor() {
/**
* Instance
*/
this.splitBlockFilter = [];
this.desiredFalsePositiveRate = SplitBlockBloomFilter.DEFAULT_FALSE_POSITIVE_RATE;
this.numBlocks = 0;
this.numDistinctValues = SplitBlockBloomFilter.DEFAULT_DISTINCT_VALUES;
this.hashStrategy = new parquet_types_1.default.BloomFilterHash(new parquet_types_1.default.XxHash());
this.hasher = new xxhasher_1.default();
}
static salt = [
0x47b6137b,
0x44974d91,
0x8824ad5b,
0xa2b7289d,
0x705495c7,
0x2df1424b,
0x9efc4947,
0x5c6bfb31
];
// How many bits are in a single block:
// - Blocks are UInt32 arrays
// - There are 8 UInt32 words in each block.
static WORDS_PER_BLOCK = 8;
static WORD_SIZE = 32;
static BITS_PER_BLOCK = SplitBlockBloomFilter.WORDS_PER_BLOCK * SplitBlockBloomFilter.WORD_SIZE;
// Default number of blocks in a Split Block Bloom filter (SBBF)
static NUMBER_OF_BLOCKS = 32;
// The lower bound of SBBF size in bytes.
// Currently this is 1024
static LOWER_BOUND_BYTES = SplitBlockBloomFilter.NUMBER_OF_BLOCKS * SplitBlockBloomFilter.BITS_PER_BLOCK / 8;
// The upper bound of SBBF size, set to default row group size in bytes.
// Note that the subsquent requirements for an effective bloom filter on a row group this size would mean this
// is unacceptably large for a lightweight client application.
static UPPER_BOUND_BYTES = 128 * 1024 * 1024;
static DEFAULT_FALSE_POSITIVE_RATE = 0.001;
static DEFAULT_DISTINCT_VALUES = 128 * 1024;
/**

@@ -161,5 +168,5 @@ * @function initBlock

for (let j = 0; j < this.WORD_SIZE; j++) {
const isSet = masked[i] & (Math.pow(2, j));
const isSet = masked[i] & (2 ** j);
if (isSet) {
b[i] = b[i] | (Math.pow(2, j));
b[i] = b[i] | (2 ** j);
}

@@ -183,5 +190,5 @@ }

for (let j = 0; j < this.WORD_SIZE; j++) {
const isSet = masked[i] & (Math.pow(2, j));
const isSet = masked[i] & (2 ** j);
if (isSet) {
const match = b[i] & (Math.pow(2, j));
const match = b[i] & (2 ** j);
if (!match) {

@@ -195,2 +202,11 @@ return false;

}
/**
* Instance
*/
splitBlockFilter = [];
desiredFalsePositiveRate = SplitBlockBloomFilter.DEFAULT_FALSE_POSITIVE_RATE;
numBlocks = 0;
numDistinctValues = SplitBlockBloomFilter.DEFAULT_DISTINCT_VALUES;
hashStrategy = new parquet_types_1.default.BloomFilterHash(new parquet_types_1.default.XxHash());
hasher = new xxhasher_1.default();
isInitialized() { return this.splitBlockFilter.length > 0; }

@@ -311,10 +327,8 @@ getFalsePositiveRate() { return this.desiredFalsePositiveRate; }

}
hash(value) {
return __awaiter(this, void 0, void 0, function* () {
if (!this.hashStrategy.hasOwnProperty("XXHASH")) {
throw new Error("unsupported hash strategy");
}
const hashed = yield this.hasher.hash64(value);
return Long.fromString(hashed, true, 16);
});
async hash(value) {
if (!this.hashStrategy.hasOwnProperty("XXHASH")) {
throw new Error("unsupported hash strategy");
}
const hashed = await this.hasher.hash64(value);
return Long.fromString(hashed, true, 16);
}

@@ -335,8 +349,6 @@ insertHash(hashValue) {

*/
insert(value) {
return __awaiter(this, void 0, void 0, function* () {
if (!this.isInitialized())
throw new Error("filter has not been initialized. call init() first");
this.insertHash(yield this.hash(value));
});
async insert(value) {
if (!this.isInitialized())
throw new Error("filter has not been initialized. call init() first");
this.insertHash(await this.hash(value));
}

@@ -358,38 +370,9 @@ checkHash(hashValue) {

*/
check(value) {
return __awaiter(this, void 0, void 0, function* () {
if (!this.isInitialized())
throw new Error("filter has not been initialized");
return this.checkHash(yield this.hash(value));
});
async check(value) {
if (!this.isInitialized())
throw new Error("filter has not been initialized");
return this.checkHash(await this.hash(value));
}
}
SplitBlockBloomFilter.salt = [
0x47b6137b,
0x44974d91,
0x8824ad5b,
0xa2b7289d,
0x705495c7,
0x2df1424b,
0x9efc4947,
0x5c6bfb31
];
// How many bits are in a single block:
// - Blocks are UInt32 arrays
// - There are 8 UInt32 words in each block.
SplitBlockBloomFilter.WORDS_PER_BLOCK = 8;
SplitBlockBloomFilter.WORD_SIZE = 32;
SplitBlockBloomFilter.BITS_PER_BLOCK = SplitBlockBloomFilter.WORDS_PER_BLOCK * SplitBlockBloomFilter.WORD_SIZE;
// Default number of blocks in a Split Block Bloom filter (SBBF)
SplitBlockBloomFilter.NUMBER_OF_BLOCKS = 32;
// The lower bound of SBBF size in bytes.
// Currently this is 1024
SplitBlockBloomFilter.LOWER_BOUND_BYTES = SplitBlockBloomFilter.NUMBER_OF_BLOCKS * SplitBlockBloomFilter.BITS_PER_BLOCK / 8;
// The upper bound of SBBF size, set to default row group size in bytes.
// Note that the subsquent requirements for an effective bloom filter on a row group this size would mean this
// is unacceptably large for a lightweight client application.
SplitBlockBloomFilter.UPPER_BOUND_BYTES = 128 * 1024 * 1024;
SplitBlockBloomFilter.DEFAULT_FALSE_POSITIVE_RATE = 0.001;
SplitBlockBloomFilter.DEFAULT_DISTINCT_VALUES = 128 * 1024;
exports.default = SplitBlockBloomFilter;
//# sourceMappingURL=sbbf.js.map
"use strict";
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
const xxhash_wasm_1 = __importDefault(require("xxhash-wasm"));

@@ -27,6 +19,5 @@ const long_1 = __importDefault(require("long"));

class XxHasher {
hashit(value) {
return __awaiter(this, void 0, void 0, function* () {
return (yield XxHasher.h64)(value);
});
static h64 = xxhash_wasm_1.default().then(x => x.h64);
async hashit(value) {
return (await XxHasher.h64)(value);
}

@@ -39,20 +30,17 @@ /**

*/
hash64(value) {
return __awaiter(this, void 0, void 0, function* () {
if (typeof value === 'string')
return this.hashit(value);
if (value instanceof Buffer ||
value instanceof Uint8Array ||
value instanceof long_1.default ||
typeof value === 'boolean' ||
typeof value === 'number' ||
typeof value === 'bigint') {
return this.hashit(value.toString());
}
throw new Error("unsupported type: " + value);
});
async hash64(value) {
if (typeof value === 'string')
return this.hashit(value);
if (value instanceof Buffer ||
value instanceof Uint8Array ||
value instanceof long_1.default ||
typeof value === 'boolean' ||
typeof value === 'number' ||
typeof value === 'bigint') {
return this.hashit(value.toString());
}
throw new Error("unsupported type: " + value);
}
}
XxHasher.h64 = xxhash_wasm_1.default().then(x => x.h64);
module.exports = XxHasher;
exports.default = XxHasher;
//# sourceMappingURL=xxhasher.js.map
"use strict";
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
var __importDefault = (this && this.__importDefault) || function (mod) {

@@ -43,7 +34,7 @@ return (mod && mod.__esModule) ? mod : { "default": mod };

exports.parseBloomFilterOffsets = parseBloomFilterOffsets;
const getBloomFilterHeader = (offsetBytes, envelopeReader) => __awaiter(void 0, void 0, void 0, function* () {
const getBloomFilterHeader = async (offsetBytes, envelopeReader) => {
const headerByteSizeEstimate = 200;
let bloomFilterHeaderData;
try {
bloomFilterHeaderData = yield envelopeReader.read(offsetBytes, headerByteSizeEstimate);
bloomFilterHeaderData = await envelopeReader.read(offsetBytes, headerByteSizeEstimate);
}

@@ -59,9 +50,9 @@ catch (e) {

};
});
const readFilterData = (offsetBytes, envelopeReader) => __awaiter(void 0, void 0, void 0, function* () {
const { bloomFilterHeader, sizeOfBloomFilterHeader, } = yield getBloomFilterHeader(offsetBytes, envelopeReader);
};
const readFilterData = async (offsetBytes, envelopeReader) => {
const { bloomFilterHeader, sizeOfBloomFilterHeader, } = await getBloomFilterHeader(offsetBytes, envelopeReader);
const { numBytes: filterByteSize } = bloomFilterHeader;
try {
const filterBlocksOffset = offsetBytes + sizeOfBloomFilterHeader;
const buffer = yield envelopeReader.read(filterBlocksOffset, filterByteSize);
const buffer = await envelopeReader.read(filterBlocksOffset, filterByteSize);
return buffer;

@@ -72,3 +63,3 @@ }

}
});
};
const readFilterDataFrom = (offsets, envelopeReader) => {

@@ -81,7 +72,7 @@ return Promise.all(offsets.map((offset) => readFilterData(offset, envelopeReader)));

exports.siftAllByteOffsets = siftAllByteOffsets;
const getBloomFiltersFor = (columnNames, envelopeReader) => __awaiter(void 0, void 0, void 0, function* () {
const getBloomFiltersFor = async (columnNames, envelopeReader) => {
const columnChunkDataCollection = envelopeReader.getAllColumnChunkDataFor(columnNames);
const bloomFilterOffsetData = exports.siftAllByteOffsets(columnChunkDataCollection);
const offsetByteValues = bloomFilterOffsetData.map(({ offsetBytes }) => offsetBytes);
const filterBlocksBuffers = yield readFilterDataFrom(offsetByteValues, envelopeReader);
const filterBlocksBuffers = await readFilterDataFrom(offsetByteValues, envelopeReader);
return filterBlocksBuffers.map((buffer, index) => {

@@ -95,4 +86,4 @@ const { columnName, rowGroupIndex } = bloomFilterOffsetData[index];

});
});
};
exports.getBloomFiltersFor = getBloomFiltersFor;
//# sourceMappingURL=bloomFilterReader.js.map
"use strict";
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
module.exports = class BufferReader {

@@ -21,55 +12,51 @@ constructor(envelopeReader, options) {

}
read(offset, length) {
return __awaiter(this, void 0, void 0, function* () {
if (!this.scheduled) {
this.scheduled = true;
setTimeout(() => {
this.scheduled = false;
this.processQueue();
}, this.queueWait);
}
return new Promise((resolve, reject) => {
this.queue.push({ offset, length, resolve, reject });
});
async read(offset, length) {
if (!this.scheduled) {
this.scheduled = true;
setTimeout(() => {
this.scheduled = false;
this.processQueue();
}, this.queueWait);
}
return new Promise((resolve, reject) => {
this.queue.push({ offset, length, resolve, reject });
});
}
processQueue() {
return __awaiter(this, void 0, void 0, function* () {
const queue = this.queue;
if (!queue.length)
async processQueue() {
const queue = this.queue;
if (!queue.length)
return;
this.queue = [];
queue.sort((a, b) => a.offset - b.offset);
var subqueue = [];
const readSubqueue = async () => {
if (!subqueue.length) {
return;
this.queue = [];
queue.sort((a, b) => a.offset - b.offset);
var subqueue = [];
const readSubqueue = () => __awaiter(this, void 0, void 0, function* () {
if (!subqueue.length) {
return;
}
const processQueue = subqueue;
subqueue = [];
const lastElement = processQueue[processQueue.length - 1];
const start = processQueue[0].offset;
const finish = lastElement.offset + lastElement.length;
const buffer = yield this.envelopeReader.readFn(start, finish - start);
processQueue.forEach((d) => __awaiter(this, void 0, void 0, function* () {
d.resolve(buffer.slice(d.offset - start, d.offset + d.length - start));
}));
}
const processQueue = subqueue;
subqueue = [];
const lastElement = processQueue[processQueue.length - 1];
const start = processQueue[0].offset;
const finish = lastElement.offset + lastElement.length;
const buffer = await this.envelopeReader.readFn(start, finish - start);
processQueue.forEach(async (d) => {
d.resolve(buffer.slice(d.offset - start, d.offset + d.length - start));
});
queue.forEach((d, i) => {
const prev = queue[i - 1];
if (!prev || (d.offset - (prev.offset + prev.length)) < this.maxSpan) {
subqueue.push(d);
if ((d.offset + d.length) - subqueue[0].offset > this.maxLength) {
readSubqueue();
}
}
else {
};
queue.forEach((d, i) => {
const prev = queue[i - 1];
if (!prev || (d.offset - (prev.offset + prev.length)) < this.maxSpan) {
subqueue.push(d);
if ((d.offset + d.length) - subqueue[0].offset > this.maxLength) {
readSubqueue();
subqueue = [d];
}
});
readSubqueue(subqueue);
}
else {
readSubqueue();
subqueue = [d];
}
});
readSubqueue(subqueue);
}
};
//# sourceMappingURL=bufferReader.js.map
'use strict';
Object.defineProperty(exports, "__esModule", { value: true });
const zlib = require('zlib');
const snappy = require('snappyjs');
const lzo = require('lzo');
const brotli = require('brotli');
const wasm_brotli_1 = require("wasm-brotli");
// LZO compression is disabled. See: https://github.com/LibertyDSNP/parquetjs/issues/18
const PARQUET_COMPRESSION_METHODS = {

@@ -19,6 +20,2 @@ 'UNCOMPRESSED': {

},
'LZO': {
deflate: deflate_lzo,
inflate: inflate_lzo
},
'BROTLI': {

@@ -32,3 +29,3 @@ deflate: deflate_brotli,

*/
function deflate(method, value) {
async function deflate(method, value) {
if (!(method in PARQUET_COMPRESSION_METHODS)) {

@@ -48,11 +45,9 @@ throw 'invalid compression method: ' + method;

}
function deflate_lzo(value) {
return lzo.compress(value);
}
function deflate_brotli(value) {
return Buffer.from(brotli.compress(value, {
async function deflate_brotli(value) {
const compressedContent = await wasm_brotli_1.compress(value, {
mode: 0,
quality: 8,
lgwin: 22
}));
});
return Buffer.from(compressedContent);
}

@@ -62,7 +57,7 @@ /**

*/
function inflate(method, value) {
async function inflate(method, value) {
if (!(method in PARQUET_COMPRESSION_METHODS)) {
throw 'invalid compression method: ' + method;
}
return PARQUET_COMPRESSION_METHODS[method].inflate(value);
return await PARQUET_COMPRESSION_METHODS[method].inflate(value);
}

@@ -78,9 +73,7 @@ function inflate_identity(value) {

}
function inflate_lzo(value) {
return lzo.decompress(value);
async function inflate_brotli(value) {
const uncompressedContent = await wasm_brotli_1.decompress(value);
return Buffer.from(uncompressedContent);
}
function inflate_brotli(value) {
return Buffer.from(brotli.decompress(value));
}
module.exports = { PARQUET_COMPRESSION_METHODS, deflate, inflate };
//# sourceMappingURL=compression.js.map
'use strict';
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
const fs = require('fs');

@@ -61,14 +52,12 @@ const thrift = require('thrift');

*/
next() {
return __awaiter(this, void 0, void 0, function* () {
if (this.rowGroup.length === 0) {
if (this.rowGroupIndex >= this.metadata.row_groups.length) {
return null;
}
let rowBuffer = yield this.envelopeReader.readRowGroup(this.schema, this.metadata.row_groups[this.rowGroupIndex], this.columnList);
this.rowGroup = parquet_shredder.materializeRecords(this.schema, rowBuffer);
this.rowGroupIndex++;
async next() {
if (this.rowGroup.length === 0) {
if (this.rowGroupIndex >= this.metadata.row_groups.length) {
return null;
}
return this.rowGroup.shift();
});
let rowBuffer = await this.envelopeReader.readRowGroup(this.schema, this.metadata.row_groups[this.rowGroupIndex], this.columnList);
this.rowGroup = parquet_shredder.materializeRecords(this.schema, rowBuffer);
this.rowGroupIndex++;
}
return this.rowGroup.shift();
}

@@ -96,13 +85,9 @@ /**

*/
static openFile(filePath, options) {
return __awaiter(this, void 0, void 0, function* () {
let envelopeReader = yield ParquetEnvelopeReader.openFile(filePath, options);
return this.openEnvelopeReader(envelopeReader, options);
});
static async openFile(filePath, options) {
let envelopeReader = await ParquetEnvelopeReader.openFile(filePath, options);
return this.openEnvelopeReader(envelopeReader, options);
}
static openBuffer(buffer, options) {
return __awaiter(this, void 0, void 0, function* () {
let envelopeReader = yield ParquetEnvelopeReader.openBuffer(buffer, options);
return this.openEnvelopeReader(envelopeReader, options);
});
static async openBuffer(buffer, options) {
let envelopeReader = await ParquetEnvelopeReader.openBuffer(buffer, options);
return this.openEnvelopeReader(envelopeReader, options);
}

@@ -114,7 +99,5 @@ /**

*/
static openS3(client, params, options) {
return __awaiter(this, void 0, void 0, function* () {
let envelopeReader = yield ParquetEnvelopeReader.openS3(client, params, options);
return this.openEnvelopeReader(envelopeReader, options);
});
static async openS3(client, params, options) {
let envelopeReader = await ParquetEnvelopeReader.openS3(client, params, options);
return this.openEnvelopeReader(envelopeReader, options);
}

@@ -127,23 +110,19 @@ /**

*/
static openUrl(params, options) {
return __awaiter(this, void 0, void 0, function* () {
let envelopeReader = yield ParquetEnvelopeReader.openUrl(params, options);
return this.openEnvelopeReader(envelopeReader, options);
});
static async openUrl(params, options) {
let envelopeReader = await ParquetEnvelopeReader.openUrl(params, options);
return this.openEnvelopeReader(envelopeReader, options);
}
static openEnvelopeReader(envelopeReader, opts) {
return __awaiter(this, void 0, void 0, function* () {
if (opts && opts.metadata) {
return new ParquetReader(opts.metadata, envelopeReader, opts);
}
try {
yield envelopeReader.readHeader();
let metadata = yield envelopeReader.readFooter();
return new ParquetReader(metadata, envelopeReader, opts);
}
catch (err) {
yield envelopeReader.close();
throw err;
}
});
static async openEnvelopeReader(envelopeReader, opts) {
if (opts && opts.metadata) {
return new ParquetReader(opts.metadata, envelopeReader, opts);
}
try {
await envelopeReader.readHeader();
let metadata = await envelopeReader.readFooter();
return new ParquetReader(metadata, envelopeReader, opts);
}
catch (err) {
await envelopeReader.close();
throw err;
}
}

@@ -222,7 +201,5 @@ /**

}
getBloomFiltersFor(columnNames) {
return __awaiter(this, void 0, void 0, function* () {
const bloomFilterData = yield getBloomFiltersFor(columnNames, this.envelopeReader);
return groupBy(bloomFilterData, 'columnName');
});
async getBloomFiltersFor(columnNames) {
const bloomFilterData = await getBloomFiltersFor(columnNames, this.envelopeReader);
return groupBy(bloomFilterData, 'columnName');
}

@@ -292,8 +269,6 @@ /**

*/
close() {
return __awaiter(this, void 0, void 0, function* () {
yield this.envelopeReader.close();
this.envelopeReader = null;
this.metadata = null;
});
async close() {
await this.envelopeReader.close();
this.envelopeReader = null;
this.metadata = null;
}

@@ -312,68 +287,60 @@ decodePages(buffer, opts) {

class ParquetEnvelopeReader {
static openFile(filePath, options) {
return __awaiter(this, void 0, void 0, function* () {
let fileStat = yield parquet_util.fstat(filePath);
let fileDescriptor = yield parquet_util.fopen(filePath);
let readFn = (offset, length, file) => {
if (file) {
return Promise.reject('external references are not supported');
}
return parquet_util.fread(fileDescriptor, offset, length);
};
let closeFn = parquet_util.fclose.bind(undefined, fileDescriptor);
return new ParquetEnvelopeReader(readFn, closeFn, fileStat.size, options);
});
static async openFile(filePath, options) {
let fileStat = await parquet_util.fstat(filePath);
let fileDescriptor = await parquet_util.fopen(filePath);
let readFn = (offset, length, file) => {
if (file) {
return Promise.reject('external references are not supported');
}
return parquet_util.fread(fileDescriptor, offset, length);
};
let closeFn = parquet_util.fclose.bind(undefined, fileDescriptor);
return new ParquetEnvelopeReader(readFn, closeFn, fileStat.size, options);
}
static openBuffer(buffer, options) {
return __awaiter(this, void 0, void 0, function* () {
let readFn = (offset, length, file) => {
if (file) {
return Promise.reject('external references are not supported');
}
return Promise.resolve(buffer.slice(offset, offset + length));
};
let closeFn = () => ({});
return new ParquetEnvelopeReader(readFn, closeFn, buffer.length, options);
});
static async openBuffer(buffer, options) {
let readFn = (offset, length, file) => {
if (file) {
return Promise.reject('external references are not supported');
}
return Promise.resolve(buffer.slice(offset, offset + length));
};
let closeFn = () => ({});
return new ParquetEnvelopeReader(readFn, closeFn, buffer.length, options);
}
static openS3(client, params, options) {
return __awaiter(this, void 0, void 0, function* () {
let fileStat = () => __awaiter(this, void 0, void 0, function* () { return client.headObject(params).promise().then(d => d.ContentLength); });
let readFn = (offset, length, file) => __awaiter(this, void 0, void 0, function* () {
if (file) {
return Promise.reject('external references are not supported');
}
let Range = `bytes=${offset}-${offset + length - 1}`;
let res = yield client.getObject(Object.assign({ Range }, params)).promise();
return Promise.resolve(res.Body);
});
let closeFn = () => ({});
return new ParquetEnvelopeReader(readFn, closeFn, fileStat, options);
});
static async openS3(client, params, options) {
let fileStat = async () => client.headObject(params).promise().then(d => d.ContentLength);
let readFn = async (offset, length, file) => {
if (file) {
return Promise.reject('external references are not supported');
}
let Range = `bytes=${offset}-${offset + length - 1}`;
let res = await client.getObject(Object.assign({ Range }, params)).promise();
return Promise.resolve(res.Body);
};
let closeFn = () => ({});
return new ParquetEnvelopeReader(readFn, closeFn, fileStat, options);
}
static openUrl(params, options) {
return __awaiter(this, void 0, void 0, function* () {
if (typeof params === 'string')
params = { url: params };
if (!params.url)
throw new Error('URL missing');
let base = params.url.split('/');
base = base.slice(0, base.length - 1).join('/') + '/';
let defaultHeaders = params.headers || {};
let filesize = () => __awaiter(this, void 0, void 0, function* () {
const { headers } = yield fetch(params.url);
return headers.get('Content-Length');
});
let readFn = (offset, length, file) => __awaiter(this, void 0, void 0, function* () {
let url = file ? base + file : params.url;
let range = `bytes=${offset}-${offset + length - 1}`;
let headers = Object.assign({}, defaultHeaders, { range });
const response = yield fetch(url, { headers });
const arrayBuffer = yield response.arrayBuffer();
const buffer = Buffer.from(arrayBuffer);
return buffer;
});
let closeFn = () => ({});
return new ParquetEnvelopeReader(readFn, closeFn, filesize, options);
});
static async openUrl(params, options) {
if (typeof params === 'string')
params = { url: params };
if (!params.url)
throw new Error('URL missing');
let base = params.url.split('/');
base = base.slice(0, base.length - 1).join('/') + '/';
let defaultHeaders = params.headers || {};
let filesize = async () => {
const { headers } = await fetch(params.url);
return headers.get('Content-Length');
};
let readFn = async (offset, length, file) => {
let url = file ? base + file : params.url;
let range = `bytes=${offset}-${offset + length - 1}`;
let headers = Object.assign({}, defaultHeaders, { range });
const response = await fetch(url, { headers });
const arrayBuffer = await response.arrayBuffer();
const buffer = Buffer.from(arrayBuffer);
return buffer;
};
let closeFn = () => ({});
return new ParquetEnvelopeReader(readFn, closeFn, filesize, options);
}

@@ -483,45 +450,40 @@ constructor(readFn, closeFn, fileSize, options) {

}
readPage(column, page, records, opts) {
return __awaiter(this, void 0, void 0, function* () {
column = Object.assign({}, column);
column.meta_data = Object.assign({}, column.meta_data);
if (page.offset !== undefined) {
if (isNaN(page.offset) || isNaN(page.compressed_page_size)) {
throw Error('page offset and/or size missing');
}
column.meta_data.data_page_offset = page.offset;
column.meta_data.total_compressed_size = page.compressed_page_size;
async readPage(column, page, records, opts) {
column = Object.assign({}, column);
column.meta_data = Object.assign({}, column.meta_data);
if (page.offset !== undefined) {
if (isNaN(page.offset) || isNaN(page.compressed_page_size)) {
throw Error('page offset and/or size missing');
}
else {
const offsetIndex = column.offsetIndex || (yield this.readOffsetIndex(column, null, opts));
column.meta_data.data_page_offset = offsetIndex.page_locations[page].offset;
column.meta_data.total_compressed_size = offsetIndex.page_locations[page].compressed_page_size;
}
const chunk = yield this.readColumnChunk(this.schema, column);
Object.defineProperty(chunk, 'column', { value: column });
let data = {
columnData: { [chunk.column.meta_data.path_in_schema.join(',')]: chunk }
};
return parquet_shredder.materializeRecords(this.schema, data, records);
});
column.meta_data.data_page_offset = page.offset;
column.meta_data.total_compressed_size = page.compressed_page_size;
}
else {
const offsetIndex = column.offsetIndex || await this.readOffsetIndex(column, null, opts);
column.meta_data.data_page_offset = offsetIndex.page_locations[page].offset;
column.meta_data.total_compressed_size = offsetIndex.page_locations[page].compressed_page_size;
}
const chunk = await this.readColumnChunk(this.schema, column);
Object.defineProperty(chunk, 'column', { value: column });
let data = {
columnData: { [chunk.column.meta_data.path_in_schema.join(',')]: chunk }
};
return parquet_shredder.materializeRecords(this.schema, data, records);
}
readRowGroup(schema, rowGroup, columnList) {
return __awaiter(this, void 0, void 0, function* () {
var buffer = {
rowCount: +rowGroup.num_rows,
columnData: {}
};
for (let colChunk of rowGroup.columns) {
const colMetadata = colChunk.meta_data;
const colKey = colMetadata.path_in_schema;
if (columnList.length > 0 && parquet_util.fieldIndexOf(columnList, colKey) < 0) {
continue;
}
buffer.columnData[colKey] = yield this.readColumnChunk(schema, colChunk);
async readRowGroup(schema, rowGroup, columnList) {
var buffer = {
rowCount: +rowGroup.num_rows,
columnData: {}
};
for (let colChunk of rowGroup.columns) {
const colMetadata = colChunk.meta_data;
const colKey = colMetadata.path_in_schema;
if (columnList.length > 0 && parquet_util.fieldIndexOf(columnList, colKey) < 0) {
continue;
}
return buffer;
});
buffer.columnData[colKey] = await this.readColumnChunk(schema, colChunk);
}
return buffer;
}
readColumnChunk(schema, colChunk, opts) {
let dictionary = Promise.resolve();
async readColumnChunk(schema, colChunk, opts) {
let field = schema.findField(colChunk.meta_data.path_in_schema);

@@ -546,29 +508,28 @@ let type = parquet_util.getThriftEnum(parquet_thrift.Type, colChunk.meta_data.type);

const size = Math.min(+this.fileSize - offset, this.default_dictionary_size);
dictionary = this.read(offset, size, colChunk.file_path).then(buffer => decodePage({ offset: 0, buffer, size: buffer.length }, opts).dictionary);
await this.read(offset, size, colChunk.file_path).then(async (buffer) => {
await decodePage({ offset: 0, buffer, size: buffer.length }, opts).then(dict => {
opts.dictionary = opts.dictionary || dict.dictionary;
});
});
}
return dictionary.then(dict => {
opts.dictionary = opts.dictionary || dict;
return this.read(pagesOffset, pagesSize, colChunk.file_path).then(pagesBuf => decodePages(pagesBuf, opts));
});
return this.read(pagesOffset, pagesSize, colChunk.file_path).then(pagesBuf => decodePages(pagesBuf, opts));
}
readFooter() {
return __awaiter(this, void 0, void 0, function* () {
if (typeof this.fileSize === 'function') {
this.fileSize = yield this.fileSize();
}
let trailerLen = PARQUET_MAGIC.length + 4;
let trailerBuf = yield this.read(this.fileSize - trailerLen, trailerLen);
if (trailerBuf.slice(4).toString() != PARQUET_MAGIC) {
throw 'not a valid parquet file';
}
let metadataSize = trailerBuf.readUInt32LE(0);
let metadataOffset = this.fileSize - metadataSize - trailerLen;
if (metadataOffset < PARQUET_MAGIC.length) {
throw 'invalid metadata size';
}
let metadataBuf = yield this.read(metadataOffset, metadataSize);
let metadata = new parquet_thrift.FileMetaData();
parquet_util.decodeThrift(metadata, metadataBuf);
return metadata;
});
async readFooter() {
if (typeof this.fileSize === 'function') {
this.fileSize = await this.fileSize();
}
let trailerLen = PARQUET_MAGIC.length + 4;
let trailerBuf = await this.read(this.fileSize - trailerLen, trailerLen);
if (trailerBuf.slice(4).toString() != PARQUET_MAGIC) {
throw 'not a valid parquet file';
}
let metadataSize = trailerBuf.readUInt32LE(0);
let metadataOffset = this.fileSize - metadataSize - trailerLen;
if (metadataOffset < PARQUET_MAGIC.length) {
throw 'invalid metadata size';
}
let metadataBuf = await this.read(metadataOffset, metadataSize);
let metadata = new parquet_thrift.FileMetaData();
parquet_util.decodeThrift(metadata, metadataBuf);
return metadata;
}

@@ -613,3 +574,3 @@ }

}
function decodePage(cursor, opts) {
async function decodePage(cursor, opts) {
opts = opts || {};

@@ -627,3 +588,3 @@ let page;

}
page = decodeDataPage(cursor, pageHeader, opts);
page = await decodeDataPage(cursor, pageHeader, opts);
break;

@@ -634,7 +595,8 @@ case 'DATA_PAGE_V2':

}
page = decodeDataPageV2(cursor, pageHeader, opts);
page = await decodeDataPageV2(cursor, pageHeader, opts);
break;
case 'DICTIONARY_PAGE':
const dict = await decodeDictionaryPage(cursor, pageHeader, opts);
page = {
dictionary: decodeDictionaryPage(cursor, pageHeader, opts)
dictionary: dict
};

@@ -650,3 +612,3 @@ break;

}
function decodePages(buffer, opts) {
async function decodePages(buffer, opts) {
opts = opts || {};

@@ -666,3 +628,3 @@ let cursor = {

while (cursor.offset < cursor.size && (!opts.num_values || data.dlevels.length < opts.num_values)) {
const pageData = decodePage(cursor, opts);
const pageData = await decodePage(cursor, opts);
if (pageData.dictionary) {

@@ -688,3 +650,3 @@ opts.dictionary = pageData.dictionary;

}
function decodeDictionaryPage(cursor, header, opts) {
async function decodeDictionaryPage(cursor, header, opts) {
const cursorEnd = cursor.offset + header.compressed_page_size;

@@ -698,3 +660,3 @@ let dictCursor = {

if (opts.compression && opts.compression !== 'UNCOMPRESSED') {
let valuesBuf = parquet_compression.inflate(opts.compression, dictCursor.buffer.slice(dictCursor.offset, cursorEnd));
let valuesBuf = await parquet_compression.inflate(opts.compression, dictCursor.buffer.slice(dictCursor.offset, cursorEnd));
dictCursor = {

@@ -709,3 +671,3 @@ buffer: valuesBuf,

}
function decodeDataPage(cursor, header, opts) {
async function decodeDataPage(cursor, header, opts) {
const cursorEnd = cursor.offset + header.compressed_page_size;

@@ -716,3 +678,3 @@ let valueCount = header.data_page_header.num_values;

if (opts.compression && opts.compression !== 'UNCOMPRESSED') {
let valuesBuf = parquet_compression.inflate(opts.compression, cursor.buffer.slice(cursor.offset, cursorEnd));
let valuesBuf = await parquet_compression.inflate(opts.compression, cursor.buffer.slice(cursor.offset, cursorEnd));
valuesBufCursor = {

@@ -762,3 +724,3 @@ buffer: valuesBuf,

}
function decodeDataPageV2(cursor, header, opts) {
async function decodeDataPageV2(cursor, header, opts) {
const cursorEnd = cursor.offset + header.compressed_page_size;

@@ -793,3 +755,3 @@ const valueCount = header.data_page_header_v2.num_values;

if (header.data_page_header_v2.is_compressed) {
let valuesBuf = parquet_compression.inflate(opts.compression, cursor.buffer.slice(cursor.offset, cursorEnd));
let valuesBuf = await parquet_compression.inflate(opts.compression, cursor.buffer.slice(cursor.offset, cursorEnd));
valuesBufCursor = {

@@ -796,0 +758,0 @@ buffer: valuesBuf,

'use strict';
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
const stream = require('stream');

@@ -48,7 +39,5 @@ const parquet_thrift = require('../gen-nodejs/parquet_types');

*/
static openFile(schema, path, opts) {
return __awaiter(this, void 0, void 0, function* () {
let outputStream = yield parquet_util.osopen(path, opts);
return ParquetWriter.openStream(schema, outputStream, opts);
});
static async openFile(schema, path, opts) {
let outputStream = await parquet_util.osopen(path, opts);
return ParquetWriter.openStream(schema, outputStream, opts);
}

@@ -59,10 +48,8 @@ /**

*/
static openStream(schema, outputStream, opts) {
return __awaiter(this, void 0, void 0, function* () {
if (!opts) {
opts = {};
}
let envelopeWriter = yield ParquetEnvelopeWriter.openStream(schema, outputStream, opts);
return new ParquetWriter(schema, envelopeWriter, opts);
});
static async openStream(schema, outputStream, opts) {
if (!opts) {
opts = {};
}
let envelopeWriter = await ParquetEnvelopeWriter.openStream(schema, outputStream, opts);
return new ParquetWriter(schema, envelopeWriter, opts);
}

@@ -91,21 +78,19 @@ /**

*/
appendRow(row) {
return __awaiter(this, void 0, void 0, function* () {
if (this.closed) {
throw 'writer was closed';
}
parquet_shredder.shredRecord(this.schema, row, this.rowBuffer);
const options = {
useDataPageV2: this.envelopeWriter.useDataPageV2,
bloomFilters: this.envelopeWriter.bloomFilters
};
if (this.rowBuffer.pageRowCount >= this.envelopeWriter.pageSize) {
encodePages(this.schema, this.rowBuffer, options);
}
if (this.rowBuffer.rowCount >= this.rowGroupSize) {
encodePages(this.schema, this.rowBuffer, options);
yield this.envelopeWriter.writeRowGroup(this.rowBuffer);
this.rowBuffer = {};
}
});
async appendRow(row) {
if (this.closed) {
throw 'writer was closed';
}
parquet_shredder.shredRecord(this.schema, row, this.rowBuffer);
const options = {
useDataPageV2: this.envelopeWriter.useDataPageV2,
bloomFilters: this.envelopeWriter.bloomFilters
};
if (this.rowBuffer.pageRowCount >= this.envelopeWriter.pageSize) {
await encodePages(this.schema, this.rowBuffer, options);
}
if (this.rowBuffer.rowCount >= this.rowGroupSize) {
await encodePages(this.schema, this.rowBuffer, options);
await this.envelopeWriter.writeRowGroup(this.rowBuffer);
this.rowBuffer = {};
}
}

@@ -118,22 +103,20 @@ /**

*/
close(callback) {
return __awaiter(this, void 0, void 0, function* () {
if (this.closed) {
throw 'writer was closed';
}
this.closed = true;
if (this.rowBuffer.rowCount > 0 || this.rowBuffer.rowCount >= this.rowGroupSize) {
encodePages(this.schema, this.rowBuffer, { useDataPageV2: this.envelopeWriter.useDataPageV2, bloomFilters: this.envelopeWriter.bloomFilters });
yield this.envelopeWriter.writeRowGroup(this.rowBuffer);
this.rowBuffer = {};
}
yield this.envelopeWriter.writeBloomFilters();
yield this.envelopeWriter.writeIndex();
yield this.envelopeWriter.writeFooter(this.userMetadata);
yield this.envelopeWriter.close();
this.envelopeWriter = null;
if (callback) {
callback();
}
});
async close(callback) {
if (this.closed) {
throw 'writer was closed';
}
this.closed = true;
if (this.rowBuffer.rowCount > 0 || this.rowBuffer.rowCount >= this.rowGroupSize) {
await encodePages(this.schema, this.rowBuffer, { useDataPageV2: this.envelopeWriter.useDataPageV2, bloomFilters: this.envelopeWriter.bloomFilters });
await this.envelopeWriter.writeRowGroup(this.rowBuffer);
this.rowBuffer = {};
}
await this.envelopeWriter.writeBloomFilters();
await this.envelopeWriter.writeIndex();
await this.envelopeWriter.writeFooter(this.userMetadata);
await this.envelopeWriter.close();
this.envelopeWriter = null;
if (callback) {
callback();
}
}

@@ -173,8 +156,6 @@ /**

*/
static openStream(schema, outputStream, opts) {
return __awaiter(this, void 0, void 0, function* () {
let writeFn = parquet_util.oswrite.bind(undefined, outputStream);
let closeFn = parquet_util.osend.bind(undefined, outputStream);
return new ParquetEnvelopeWriter(schema, writeFn, closeFn, 0, opts);
});
static async openStream(schema, outputStream, opts) {
let writeFn = parquet_util.oswrite.bind(undefined, outputStream);
let closeFn = parquet_util.osend.bind(undefined, outputStream);
return new ParquetEnvelopeWriter(schema, writeFn, closeFn, 0, opts);
}

@@ -210,4 +191,4 @@ constructor(schema, writeFn, closeFn, fileOffset, opts) {

*/
writeRowGroup(records) {
let rgroup = encodeRowGroup(this.schema, records, {
async writeRowGroup(records) {
let rgroup = await encodeRowGroup(this.schema, records, {
baseOffset: this.offset,

@@ -342,3 +323,3 @@ pageSize: this.pageSize,

}
function encodePages(schema, rowBuffer, opts) {
async function encodePages(schema, rowBuffer, opts) {
if (!rowBuffer.pageRowCount) {

@@ -372,6 +353,6 @@ return;

if (opts.useDataPageV2) {
page = encodeDataPageV2(field, values.count, values.values, values.rlevels, values.dlevels, statistics);
page = await encodeDataPageV2(field, values.count, values.values, values.rlevels, values.dlevels, statistics);
}
else {
page = encodeDataPage(field, values.values, values.rlevels, values.dlevels, statistics);
page = await encodeDataPage(field, values.values, values.rlevels, values.dlevels, statistics);
}

@@ -399,3 +380,3 @@ let pages = rowBuffer.pages[field.path];

*/
function encodeDataPage(column, values, rlevels, dlevels, statistics) {
async function encodeDataPage(column, values, rlevels, dlevels, statistics) {
/* encode values */

@@ -417,3 +398,3 @@ let valuesBuf = encodeValues(column.primitiveType, column.encoding, values, {

let pageBody = Buffer.concat([rLevelsBuf, dLevelsBuf, valuesBuf]);
pageBody = parquet_compression.deflate(column.compression, pageBody);
pageBody = await parquet_compression.deflate(column.compression, pageBody);
let pageHeader = new parquet_thrift.PageHeader();

@@ -439,3 +420,3 @@ pageHeader.type = parquet_thrift.PageType['DATA_PAGE'];

*/
function encodeDataPageV2(column, rowCount, values, rlevels, dlevels, statistics) {
async function encodeDataPageV2(column, rowCount, values, rlevels, dlevels, statistics) {
/* encode values */

@@ -446,3 +427,3 @@ let valuesBuf = encodeValues(column.primitiveType, column.encoding, values, {

});
let valuesBufCompressed = parquet_compression.deflate(column.compression, valuesBuf);
let valuesBufCompressed = await parquet_compression.deflate(column.compression, valuesBuf);
/* encode repetition and definition levels */

@@ -493,3 +474,3 @@ let rLevelsBuf = Buffer.alloc(0);

*/
function encodeColumnChunk(pages, opts) {
async function encodeColumnChunk(pages, opts) {
let pagesBuf = Buffer.concat(pages.map(d => d.page));

@@ -507,3 +488,3 @@ let num_values = pages.reduce((p, d) => p + d.num_values, 0);

metadata.type = parquet_thrift.Type[opts.column.primitiveType];
metadata.codec = parquet_thrift.CompressionCodec[opts.column.compression];
metadata.codec = await parquet_thrift.CompressionCodec[opts.column.compression];
/* compile statistics ColumnIndex and OffsetIndex*/

@@ -567,3 +548,3 @@ let columnIndex = new parquet_thrift.ColumnIndex();

*/
function encodeRowGroup(schema, data, opts) {
async function encodeRowGroup(schema, data, opts) {
let metadata = new parquet_thrift.RowGroup();

@@ -578,3 +559,3 @@ metadata.num_rows = data.rowCount;

}
let cchunkData = encodeColumnChunk(data.pages[field.path], {
let cchunkData = await encodeColumnChunk(data.pages[field.path], {
column: field,

@@ -581,0 +562,0 @@ baseOffset: opts.baseOffset + body.length,

@@ -5,3 +5,3 @@ {

"main": "dist/parquet.js",
"version": "0.0.0-ba6065",
"version": "0.0.0-f60585",
"homepage": "https://github.com/LibertyDSNP/parquetjs",

@@ -18,3 +18,2 @@ "license": "MIT",

"dependencies": {
"@types/brotli": "^1.3.0",
"@types/bson": "^4.0.3",

@@ -24,31 +23,49 @@ "@types/long": "^4.0.1",

"@types/thrift": "^0.10.10",
"brotli": "^1.3.0",
"assert": "^2.0.0",
"browserify-zlib": "^0.2.0",
"bson": "^2.0.8",
"cross-fetch": "^3.1.4",
"esbuild": "^0.12.11",
"events": "^3.3.0",
"int53": "^0.2.4",
"lodash": "^4.17.21",
"long": "^4.0.0",
"lzo": "^0.4.0",
"object-stream": "0.0.1",
"path-browserify": "^1.0.1",
"readable-stream": "^3.6.0",
"snappyjs": "^0.6.0",
"thrift": "0.14.1",
"typescript": "^4.3.2",
"util": "^0.12.4",
"varint": "^5.0.0",
"wasm-brotli": "^2.0.2",
"xxhash-wasm": "^0.4.1"
},
"devDependencies": {
"@babel/core": "7.13.10",
"@babel/preset-env": "7.13.12",
"@babel/preset-typescript": "7.13.0",
"@babel/core": "^7.14.6",
"@babel/preset-env": "^7.14.7",
"@babel/preset-typescript": "^7.14.5",
"@types/chai": "^4.2.16",
"@types/mocha": "^8.2.2",
"@types/sinon": "^10.0.0",
"babel-loader": "^8.2.2",
"babel-plugin-add-module-exports": "^1.0.4",
"browserfs": "^1.4.3",
"buffer": "^6.0.3",
"chai": "4.3.4",
"core-js": "^3.15.1",
"mocha": "8.3.2",
"msw": "^0.29.0",
"process": "^0.11.10",
"regenerator-runtime": "^0.13.7",
"sinon": "^10.0.0",
"ts-node": "^9.1.1"
"source-map-loader": "^3.0.0",
"stream-browserify": "^3.0.0",
"ts-loader": "^9.2.3",
"ts-node": "^9.1.1",
"typescript": "^4.3.4"
},
"scripts": {
"build": "tsc -b",
"build": "npm run build:node",
"build:node": "tsc -b",
"build:browser": "node esbuild.js",
"type": "tsc --noEmit",

@@ -58,5 +75,16 @@ "lint": "echo 'Linting, it is on the TODO list...'",

"clean": "rm -Rf ./dist",
"prepublishOnly": "npm run clean && npm run build",
"thrift": "thrift -out gen-nodejs --gen js:ts parquet.thrift && thrift -out gen-nodejs --gen js:node parquet.thrift"
"prepublishOnly": "npm run clean && npm run build:node && npm run build:browser",
"thrift": "thrift -out gen-nodejs --gen js:ts parquet.thrift && thrift -out gen-nodejs --gen js:node parquet.thrift",
"serve": "node esbuild-serve.js"
},
"browser": {
"assert": "assert",
"events": "events",
"fs": "browserfs",
"path": "path-browserify",
"stream": "readable-stream",
"thrift": "./node_modules/thrift/lib/nodejs/lib/thrift/browser.js",
"util": "util",
"zlib": "browserify-zlib"
},
"engines": {

@@ -63,0 +91,0 @@ "node": ">=14.16.0"

@@ -17,4 +17,4 @@ # parquet.js

Forked Notice
-------------
## Forked Notice
This is a forked repository with code from various sources:

@@ -24,7 +24,5 @@ - Primary source [ironSource](https://github.com/ironSource/parquetjs) [npm: parquetjs](https://www.npmjs.com/package/parquetjs)

Installation
------------
## Installation
_parquet.js requires node.js >= 14.16.0_
To use parquet.js with node.js, install it using npm:
```

@@ -34,8 +32,21 @@ $ npm install @dsnp/parquetjs

_parquet.js requires node.js >= 14.16.0_
### NodeJS
To use with nodejs:
```javascript
import parquetjs from "@dsnp/parquetjs"
```
### Browser
To use in a browser, in your bundler, depending on your needs, write the appropriate plugin or resolver to point to:
```javascript
"node_modules/@dsnp/parquetjs/browser/parquetjs"
```
or:
Usage: Writing files
--------------------
```javascript
import parquetjs from "@dsnp/parquetjs/browser/parquetjs"
```
## Usage: Writing files
Once you have installed the parquet.js library, you can import it as a single

@@ -116,4 +127,3 @@ module:

Usage: Reading files
--------------------
## Usage: Reading files

@@ -231,4 +241,3 @@ A parquet reader allows retrieving the rows from a parquet file in order.

Encodings
---------
## Encodings

@@ -265,4 +274,3 @@ Internally, the Parquet format will store values from each field as consecutive

Optional Fields
---------------
### Optional Fields

@@ -284,4 +292,3 @@ By default, all fields are required to be present in each row. You can also mark

Nested Rows & Arrays
--------------------
### Nested Rows & Arrays

@@ -356,4 +363,3 @@ Parquet supports nested schemas that allow you to store rows that have a more

Nested Lists for Hive / Athena
-----------------------
### Nested Lists for Hive / Athena

@@ -363,4 +369,3 @@ Lists have to be annotated to be queriable with AWS Athena. See [parquet-format](https://github.com/apache/parquet-format/blob/master/LogicalTypes.md#lists) for more detail and a full working example with comments in the test directory ([`test/list.js`](test/list.js))

List of Supported Types & Encodings
-----------------------------------
### List of Supported Types & Encodings

@@ -398,4 +403,3 @@ We aim to be feature-complete and add new features as they are added to the

Buffering & Row Group Size
--------------------------
## Buffering & Row Group Size

@@ -416,4 +420,3 @@ When writing a Parquet file, the `ParquetWriter` will buffer rows in memory

Dependencies
-------------
## Dependencies

@@ -423,6 +426,6 @@ Parquet uses [thrift](https://thrift.apache.org/) to encode the schema and other

Notes
-----
## Notes
Currently parquet-cpp doesn't fully support DATA_PAGE_V2. You can work around this
by setting the useDataPageV2 option to false.

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc