Huge News!Announcing our $40M Series B led by Abstract Ventures.Learn More
Socket
Sign inDemoInstall
Socket

@apache-arrow/ts

Package Overview
Dependencies
Maintainers
5
Versions
38
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@apache-arrow/ts - npm Package Compare versions

Comparing version 0.4.0 to 0.4.1

util/args.ts

187

bin/arrow2csv.ts

@@ -26,2 +26,3 @@ #! /usr/bin/env node

import { RecordBatch, RecordBatchReader, AsyncByteQueue } from '../Arrow.node';
import { Schema } from '../schema';

@@ -34,4 +35,13 @@ const padLeft = require('pad-left');

const state = { ...argv, closed: false, hasRecords: false };
const state = { ...argv, closed: false, maxColWidths: [10] };
type ToStringState = {
hr: string;
sep: string;
schema: any;
closed: boolean;
metadata: boolean;
maxColWidths: number[];
};
(async () => {

@@ -45,11 +55,13 @@

let reader: RecordBatchReader | null;
let hasReaders = false;
for (const source of sources) {
if (state.closed) { break; }
if (reader = await createRecordBatchReader(source)) {
await pipeline(
reader.toNodeStream(),
recordBatchRowsToString(state),
process.stdout
).catch(() => state.closed = true);
for await (reader of recordBatchReaders(source)) {
hasReaders = true;
const source = reader.toNodeStream();
const xform = batchesToString(state, reader.schema);
const sink = new stream.PassThrough();
sink.pipe(process.stdout, { end: false });
await pipeline(source, xform, sink).catch(() => state.closed = true);
}

@@ -59,3 +71,3 @@ if (state.closed) { break; }

return state.hasRecords ? 0 : print_usage();
return hasReaders ? 0 : print_usage();
})()

@@ -69,3 +81,3 @@ .then((x) => +x || 0, (err) => {

async function createRecordBatchReader(createSourceStream: () => NodeJS.ReadableStream) {
async function *recordBatchReaders(createSourceStream: () => NodeJS.ReadableStream) {

@@ -76,2 +88,3 @@ let json = new AsyncByteQueue();

let reader: RecordBatchReader | null = null;
let readers: AsyncIterable<RecordBatchReader> | null = null;
// tee the input source, just in case it's JSON

@@ -83,7 +96,9 @@ source.on('end', () => [stream, json].forEach((y) => y.close()))

try {
reader = await (await RecordBatchReader.from(stream)).open();
} catch (e) { reader = null; }
for await (reader of RecordBatchReader.readAll(stream)) {
reader && (yield reader);
}
if (reader) return;
} catch (e) { readers = null; }
if (!reader || reader.closed) {
reader = null;
if (!readers) {
await json.closed;

@@ -93,40 +108,73 @@ if (source instanceof fs.ReadStream) { source.close(); }

try {
reader = await (await RecordBatchReader.from(bignumJSONParse(await json.toString()))).open();
} catch (e) { reader = null; }
for await (reader of RecordBatchReader.readAll(bignumJSONParse(await json.toString()))) {
reader && (yield reader);
}
} catch (e) { readers = null; }
}
return (reader && !reader.closed) ? reader : null;
}
function recordBatchRowsToString(state: { closed: boolean, schema: any, separator: string, hasRecords: boolean }) {
function batchesToString(state: ToStringState, schema: Schema) {
let rowId = 0, maxColWidths = [15], separator = `${state.separator || ' |'} `;
let rowId = 0;
let batchId = -1;
let maxColWidths = [10];
const { hr, sep } = state;
return new stream.Transform({ transform, encoding: 'utf8', writableObjectMode: true, readableObjectMode: false });
const header = ['row_id', ...schema.fields.map((f) => `${f}`)].map(valueToString);
state.maxColWidths = header.map((x, i) => Math.max(maxColWidths[i] || 0, x.length));
return new stream.Transform({
transform,
encoding: 'utf8',
writableObjectMode: true,
readableObjectMode: false,
final(this: stream.Transform, cb: (error?: Error | null) => void) {
// if there were no batches, then print the Schema, and metadata
if (batchId === -1) {
this.push(`${horizontalRule(state.maxColWidths, hr, sep)}\n\n`);
this.push(`${formatRow(header, maxColWidths, sep)}\n`);
if (state.metadata && schema.metadata.size > 0) {
this.push(`metadata:\n${formatMetadata(schema.metadata)}\n`);
}
}
this.push(`${horizontalRule(state.maxColWidths, hr, sep)}\n\n`);
cb();
}
});
function transform(this: stream.Transform, batch: RecordBatch, _enc: string, cb: (error?: Error, data?: any) => void) {
batch = !(state.schema && state.schema.length) ? batch : batch.select(...state.schema);
if (batch.length <= 0 || batch.numCols <= 0 || state.closed) {
state.hasRecords || (state.hasRecords = false);
return cb(undefined, null);
}
state.hasRecords = true;
const header = ['row_id', ...batch.schema.fields.map((f) => `${f}`)].map(valueToString);
if (state.closed) { return cb(undefined, null); }
// Pass one to convert to strings and count max column widths
const newMaxWidths = measureColumnWidths(rowId, batch, header.map((x, i) => Math.max(maxColWidths[i] || 0, x.length)));
state.maxColWidths = measureColumnWidths(rowId, batch, header.map((x, i) => Math.max(maxColWidths[i] || 0, x.length)));
// If any of the column widths changed, print the header again
if ((rowId % 350) && JSON.stringify(newMaxWidths) !== JSON.stringify(maxColWidths)) {
this.push(`\n${formatRow(header, newMaxWidths, separator)}`);
// If this is the first batch in a stream, print a top horizontal rule, schema metadata, and
if (++batchId === 0) {
this.push(`${horizontalRule(state.maxColWidths, hr, sep)}\n`);
if (state.metadata && batch.schema.metadata.size > 0) {
this.push(`metadata:\n${formatMetadata(batch.schema.metadata)}\n`);
this.push(`${horizontalRule(state.maxColWidths, hr, sep)}\n`);
}
if (batch.length <= 0 || batch.numCols <= 0) {
this.push(`${formatRow(header, maxColWidths = state.maxColWidths, sep)}\n`);
}
}
maxColWidths = newMaxWidths;
for (const row of batch) {
if (state.closed) { break; }
else if (!row) { continue; }
if (!(rowId % 350)) { this.push(`\n${formatRow(header, maxColWidths, separator)}`); }
this.push(formatRow([rowId++, ...row].map(valueToString), maxColWidths, separator));
if (batch.length > 0 && batch.numCols > 0) {
// If any of the column widths changed, print the header again
if (rowId % 350 !== 0 && JSON.stringify(state.maxColWidths) !== JSON.stringify(maxColWidths)) {
this.push(`${formatRow(header, state.maxColWidths, sep)}\n`);
}
maxColWidths = state.maxColWidths;
for (const row of batch) {
if (state.closed) { break; } else if (!row) { continue; }
if (rowId++ % 350 === 0) {
this.push(`${formatRow(header, maxColWidths, sep)}\n`);
}
this.push(`${formatRow([rowId, ...row].map(valueToString), maxColWidths, sep)}\n`);
}
}

@@ -137,12 +185,32 @@ cb();

function formatRow(row: string[] = [], maxColWidths: number[] = [], separator: string = ' |') {
return row.map((x, j) => padLeft(x, maxColWidths[j])).join(separator) + '\n';
function horizontalRule(maxColWidths: number[], hr = '-', sep = ' |') {
return ` ${padLeft('', maxColWidths.reduce((x, y) => x + y, -2 + maxColWidths.length * sep.length), hr)}`;
}
function formatRow(row: string[] = [], maxColWidths: number[] = [], sep = ' |') {
return `${row.map((x, j) => padLeft(x, maxColWidths[j])).join(sep)}`;
}
function formatMetadata(metadata: Map<string, string>) {
return [...metadata].map(([key, val]) =>
` ${key}: ${formatMetadataValue(val)}`
).join(', \n');
function formatMetadataValue(value: string = '') {
let parsed = value;
try {
parsed = JSON.stringify(JSON.parse(value), null, 2);
} catch (e) { parsed = value; }
return valueToString(parsed).split('\n').join('\n ');
}
}
function measureColumnWidths(rowId: number, batch: RecordBatch, maxColWidths: number[] = []) {
let val: any, j = 0;
for (const row of batch) {
if (!row) { continue; }
maxColWidths[0] = Math.max(maxColWidths[0] || 0, (`${rowId++}`).length);
for (let val: any, j = -1, k = row.length; ++j < k;) {
if (ArrayBuffer.isView(val = row[j]) && (typeof val[Symbol.toPrimitive] !== 'function')) {
maxColWidths[j = 0] = Math.max(maxColWidths[0] || 0, (`${rowId++}`).length);
for (val of row) {
if (val && typedArrayElementWidths.has(val.constructor) && (typeof val[Symbol.toPrimitive] !== 'function')) {
// If we're printing a column of TypedArrays, ensure the column is wide enough to accommodate

@@ -158,3 +226,3 @@ // the widest possible element for a given byte size, since JS omits leading zeroes. For example:

const elementWidth = typedArrayElementWidths.get(val.constructor)!;
maxColWidths[j + 1] = Math.max(maxColWidths[j + 1] || 0,

@@ -168,2 +236,3 @@ 2 + // brackets on each end

}
++j;
}

@@ -210,7 +279,18 @@ }

type: String,
name: 'sep', optional: true, default: '|',
description: 'The column separator character'
name: 'sep', optional: true, default: ' |',
description: 'The column separator character (default: " |")'
},
{
type: String,
name: 'hr', optional: true, default: '-',
description: 'The horizontal border character (default: "-")'
},
{
type: Boolean,
name: 'metadata', alias: 'm',
optional: true, default: false,
description: 'Flag to print Schema metadata (default: false)'
},
{
type: Boolean,
name: 'help', optional: true, default: false,

@@ -244,10 +324,11 @@ description: 'Print this usage guide.'

content: [
'$ arrow2csv --schema foo baz -f simple.arrow --sep ","',
' ',
'> "row_id", "foo: Int32", "bar: Float64", "baz: Utf8"',
'> 0, 1, 1, "aa"',
'> 1, null, null, null',
'> 2, 3, null, null',
'> 3, 4, 4, "bbb"',
'> 4, 5, 5, "cccc"',
'$ arrow2csv --schema foo baz --sep "," -f simple.arrow',
'>--------------------------------------',
'> "row_id", "foo: Int32", "baz: Utf8"',
'> 0, 1, "aa"',
'> 1, null, null',
'> 2, 3, null',
'> 3, 4, "bbb"',
'> 4, 5, "cccc"',
'>--------------------------------------',
]

@@ -254,0 +335,0 @@ }

@@ -18,5 +18,7 @@ // Licensed to the Apache Software Foundation (ASF) under one

import { Data } from './data';
import { Field } from './schema';
import { DataType } from './type';
import { Vector } from './vector';
import { DataType } from './type';
import { VectorCtorArgs, Vector as V } from './interfaces';
import { Clonable, Sliceable, Applicative } from './vector';

@@ -38,4 +40,24 @@ import { Chunked, SearchContinuation } from './vector/chunked';

public static new<T extends DataType>(field: string | Field<T>, ...chunks: (Vector<T> | Vector<T>[])[]): Column<T>;
public static new<T extends DataType>(field: string | Field<T>, data: Data<T>, ...args: VectorCtorArgs<V<T>>): Column<T>;
/** @nocollapse */
public static new<T extends DataType = any>(field: string | Field<T>, data: Data<T> | Vector<T> | (Data<T> | Vector<T>)[], ...rest: any[]) {
const chunks = Chunked.flatten<T>(
Array.isArray(data) ? [...data, ...rest] :
data instanceof Vector ? [data, ...rest] :
[Vector.new(data, ...rest)]
);
if (typeof field === 'string') {
const type = chunks[0].data.type;
field = new Field(field, type, chunks.some(({ nullCount }) => nullCount > 0));
} else if (!field.nullable && chunks.some(({ nullCount }) => nullCount > 0)) {
field = field.clone({ nullable: true });
}
return new Column(field, chunks);
}
constructor(field: Field<T>, vectors: Vector<T>[] = [], offsets?: Uint32Array) {
vectors = Chunked.flatten(...vectors);
vectors = Chunked.flatten<T>(...vectors);
super(field.type, vectors, offsets);

@@ -53,2 +75,4 @@ this._field = field;

public get name() { return this._field.name; }
public get nullable() { return this._field.nullable; }
public get metadata() { return this._field.metadata; }

@@ -55,0 +79,0 @@ public clone(chunks = this._chunks) {

@@ -86,7 +86,8 @@ // Licensed to the Apache Software Foundation (ASF) under one

constructor(values: Vector<T>, counts: V<TCount>) {
const schema = new Schema<{ values: T, counts: TCount }>([
type R = { values: T, counts: TCount };
const schema = new Schema<R>([
new Field('values', values.type),
new Field('counts', counts.type)
]);
super(new RecordBatch(schema, counts.length, [values, counts]));
super(new RecordBatch<R>(schema, counts.length, [values, counts]));
}

@@ -104,3 +105,3 @@ public toJSON(): Object {

export class FilteredDataFrame<T extends { [key: string]: DataType; } = any> extends DataFrame<T> {
export class FilteredDataFrame<T extends { [key: string]: DataType } = any> extends DataFrame<T> {
private _predicate: Predicate;

@@ -107,0 +108,0 @@ constructor (batches: RecordBatch<T>[], predicate: Predicate) {

@@ -19,6 +19,7 @@ // Licensed to the Apache Software Foundation (ASF) under one

import { Vector } from './vector';
import { truncateBitmap } from './util/bit';
import { popcnt_bit_range } from './util/bit';
import { toArrayBufferView } from './util/buffer';
import { DataType, SparseUnion, DenseUnion } from './type';
import { VectorType as BufferType, UnionMode, Type } from './enum';
import { toArrayBufferView, toUint8Array, toInt32Array } from './util/buffer';
import {

@@ -143,2 +144,17 @@ Dictionary,

public _changeLengthAndBackfillNullBitmap(newLength: number): Data<T> {
const { length, nullCount } = this;
// start initialized with 0s (nulls), then fill from 0 to length with 1s (not null)
const bitmap = new Uint8Array(((newLength + 63) & ~63) >> 3).fill(255, 0, length >> 3);
// set all the bits in the last byte (up to bit `length - length % 8`) to 1 (not null)
bitmap[length >> 3] = (1 << (length - (length & ~7))) - 1;
// if we have a nullBitmap, truncate + slice and set it over the pre-filled 1s
if (nullCount > 0) {
bitmap.set(truncateBitmap(this.offset, length, this.nullBitmap), 0);
}
const buffers = this.buffers;
buffers[BufferType.VALIDITY] = bitmap;
return this.clone(this.type, 0, newLength, nullCount + (newLength - length), buffers);
}
protected _sliceBuffers(offset: number, length: number, stride: number, typeId: T['TType']): Buffers<T> {

@@ -163,71 +179,98 @@ let arr: any, { buffers } = this;

/** @nocollapse */
public static Null<T extends Null>(type: T, offset: number, length: number, nullCount: number, nullBitmap: NullBuffer) {
return new Data(type, offset, length, nullCount, [undefined, undefined, toArrayBufferView(Uint8Array, nullBitmap)]);
public static new<T extends DataType>(type: T, offset: number, length: number, nullCount?: number, buffers?: Partial<Buffers<T>> | Data<T>, childData?: (Data | Vector)[]): Data<T> {
if (buffers instanceof Data) { buffers = buffers.buffers; } else if (!buffers) { buffers = [] as Partial<Buffers<T>>; }
switch (type.typeId) {
case Type.Null: return <unknown> Data.Null( <unknown> type as Null, offset, length, nullCount || 0, buffers[2]) as Data<T>;
case Type.Int: return <unknown> Data.Int( <unknown> type as Int, offset, length, nullCount || 0, buffers[2], buffers[1] || []) as Data<T>;
case Type.Dictionary: return <unknown> Data.Dictionary( <unknown> type as Dictionary, offset, length, nullCount || 0, buffers[2], buffers[1] || []) as Data<T>;
case Type.Float: return <unknown> Data.Float( <unknown> type as Float, offset, length, nullCount || 0, buffers[2], buffers[1] || []) as Data<T>;
case Type.Bool: return <unknown> Data.Bool( <unknown> type as Bool, offset, length, nullCount || 0, buffers[2], buffers[1] || []) as Data<T>;
case Type.Decimal: return <unknown> Data.Decimal( <unknown> type as Decimal, offset, length, nullCount || 0, buffers[2], buffers[1] || []) as Data<T>;
case Type.Date: return <unknown> Data.Date( <unknown> type as Date_, offset, length, nullCount || 0, buffers[2], buffers[1] || []) as Data<T>;
case Type.Time: return <unknown> Data.Time( <unknown> type as Time, offset, length, nullCount || 0, buffers[2], buffers[1] || []) as Data<T>;
case Type.Timestamp: return <unknown> Data.Timestamp( <unknown> type as Timestamp, offset, length, nullCount || 0, buffers[2], buffers[1] || []) as Data<T>;
case Type.Interval: return <unknown> Data.Interval( <unknown> type as Interval, offset, length, nullCount || 0, buffers[2], buffers[1] || []) as Data<T>;
case Type.FixedSizeBinary: return <unknown> Data.FixedSizeBinary( <unknown> type as FixedSizeBinary, offset, length, nullCount || 0, buffers[2], buffers[1] || []) as Data<T>;
case Type.Binary: return <unknown> Data.Binary( <unknown> type as Binary, offset, length, nullCount || 0, buffers[2], buffers[0] || [], buffers[1] || []) as Data<T>;
case Type.Utf8: return <unknown> Data.Utf8( <unknown> type as Utf8, offset, length, nullCount || 0, buffers[2], buffers[0] || [], buffers[1] || []) as Data<T>;
case Type.List: return <unknown> Data.List( <unknown> type as List, offset, length, nullCount || 0, buffers[2], buffers[0] || [], (childData || [])[0]) as Data<T>;
case Type.FixedSizeList: return <unknown> Data.FixedSizeList( <unknown> type as FixedSizeList, offset, length, nullCount || 0, buffers[2], (childData || [])[0]) as Data<T>;
case Type.Struct: return <unknown> Data.Struct( <unknown> type as Struct, offset, length, nullCount || 0, buffers[2], childData || []) as Data<T>;
case Type.Map: return <unknown> Data.Map( <unknown> type as Map_, offset, length, nullCount || 0, buffers[2], childData || []) as Data<T>;
case Type.Union: return <unknown> Data.Union( <unknown> type as Union, offset, length, nullCount || 0, buffers[2], buffers[3] || [], buffers[1] || childData, childData) as Data<T>;
}
throw new Error(`Unrecognized typeId ${type.typeId}`);
}
/** @nocollapse */
public static Null<T extends Null>(type: T, offset: number, length: number, nullCount: number, nullBitmap: NullBuffer, _data?: NullBuffer) {
return new Data(type, offset, length, nullCount, [undefined, undefined, toUint8Array(nullBitmap)]);
}
/** @nocollapse */
public static Int<T extends Int>(type: T, offset: number, length: number, nullCount: number, nullBitmap: NullBuffer, data: DataBuffer<T>) {
return new Data(type, offset, length, nullCount, [undefined, toArrayBufferView(type.ArrayType, data), toArrayBufferView(Uint8Array, nullBitmap)]);
return new Data(type, offset, length, nullCount, [undefined, toArrayBufferView(type.ArrayType, data), toUint8Array(nullBitmap)]);
}
/** @nocollapse */
public static Dictionary<T extends Dictionary>(type: T, offset: number, length: number, nullCount: number, nullBitmap: NullBuffer, data: DataBuffer<T>) {
return new Data(type, offset, length, nullCount, [undefined, toArrayBufferView<T['TArray']>(type.indices.ArrayType, data), toArrayBufferView(Uint8Array, nullBitmap)]);
return new Data(type, offset, length, nullCount, [undefined, toArrayBufferView<T['TArray']>(type.indices.ArrayType, data), toUint8Array(nullBitmap)]);
}
/** @nocollapse */
public static Float<T extends Float>(type: T, offset: number, length: number, nullCount: number, nullBitmap: NullBuffer, data: DataBuffer<T>) {
return new Data(type, offset, length, nullCount, [undefined, toArrayBufferView(type.ArrayType, data), toArrayBufferView(Uint8Array, nullBitmap)]);
return new Data(type, offset, length, nullCount, [undefined, toArrayBufferView(type.ArrayType, data), toUint8Array(nullBitmap)]);
}
/** @nocollapse */
public static Bool<T extends Bool>(type: T, offset: number, length: number, nullCount: number, nullBitmap: NullBuffer, data: DataBuffer<T>) {
return new Data(type, offset, length, nullCount, [undefined, toArrayBufferView(type.ArrayType, data), toArrayBufferView(Uint8Array, nullBitmap)]);
return new Data(type, offset, length, nullCount, [undefined, toArrayBufferView(type.ArrayType, data), toUint8Array(nullBitmap)]);
}
/** @nocollapse */
public static Decimal<T extends Decimal>(type: T, offset: number, length: number, nullCount: number, nullBitmap: NullBuffer, data: DataBuffer<T>) {
return new Data(type, offset, length, nullCount, [undefined, toArrayBufferView(type.ArrayType, data), toArrayBufferView(Uint8Array, nullBitmap)]);
return new Data(type, offset, length, nullCount, [undefined, toArrayBufferView(type.ArrayType, data), toUint8Array(nullBitmap)]);
}
/** @nocollapse */
public static Date<T extends Date_>(type: T, offset: number, length: number, nullCount: number, nullBitmap: NullBuffer, data: DataBuffer<T>) {
return new Data(type, offset, length, nullCount, [undefined, toArrayBufferView(type.ArrayType, data), toArrayBufferView(Uint8Array, nullBitmap)]);
return new Data(type, offset, length, nullCount, [undefined, toArrayBufferView(type.ArrayType, data), toUint8Array(nullBitmap)]);
}
/** @nocollapse */
public static Time<T extends Time>(type: T, offset: number, length: number, nullCount: number, nullBitmap: NullBuffer, data: DataBuffer<T>) {
return new Data(type, offset, length, nullCount, [undefined, toArrayBufferView(type.ArrayType, data), toArrayBufferView(Uint8Array, nullBitmap)]);
return new Data(type, offset, length, nullCount, [undefined, toArrayBufferView(type.ArrayType, data), toUint8Array(nullBitmap)]);
}
/** @nocollapse */
public static Timestamp<T extends Timestamp>(type: T, offset: number, length: number, nullCount: number, nullBitmap: NullBuffer, data: DataBuffer<T>) {
return new Data(type, offset, length, nullCount, [undefined, toArrayBufferView(type.ArrayType, data), toArrayBufferView(Uint8Array, nullBitmap)]);
return new Data(type, offset, length, nullCount, [undefined, toArrayBufferView(type.ArrayType, data), toUint8Array(nullBitmap)]);
}
/** @nocollapse */
public static Interval<T extends Interval>(type: T, offset: number, length: number, nullCount: number, nullBitmap: NullBuffer, data: DataBuffer<T>) {
return new Data(type, offset, length, nullCount, [undefined, toArrayBufferView(type.ArrayType, data), toArrayBufferView(Uint8Array, nullBitmap)]);
return new Data(type, offset, length, nullCount, [undefined, toArrayBufferView(type.ArrayType, data), toUint8Array(nullBitmap)]);
}
/** @nocollapse */
public static FixedSizeBinary<T extends FixedSizeBinary>(type: T, offset: number, length: number, nullCount: number, nullBitmap: NullBuffer, data: DataBuffer<T>) {
return new Data(type, offset, length, nullCount, [undefined, toArrayBufferView(type.ArrayType, data), toArrayBufferView(Uint8Array, nullBitmap)]);
return new Data(type, offset, length, nullCount, [undefined, toArrayBufferView(type.ArrayType, data), toUint8Array(nullBitmap)]);
}
/** @nocollapse */
public static Binary<T extends Binary>(type: T, offset: number, length: number, nullCount: number, nullBitmap: NullBuffer, valueOffsets: ValueOffsetsBuffer, data: Uint8Array) {
return new Data(type, offset, length, nullCount, [toArrayBufferView(Int32Array, valueOffsets), toArrayBufferView(Uint8Array, data), toArrayBufferView(Uint8Array, nullBitmap)]);
return new Data(type, offset, length, nullCount, [toInt32Array(valueOffsets), toUint8Array(data), toUint8Array(nullBitmap)]);
}
/** @nocollapse */
public static Utf8<T extends Utf8>(type: T, offset: number, length: number, nullCount: number, nullBitmap: NullBuffer, valueOffsets: ValueOffsetsBuffer, data: Uint8Array) {
return new Data(type, offset, length, nullCount, [toArrayBufferView(Int32Array, valueOffsets), toArrayBufferView(Uint8Array, data), toArrayBufferView(Uint8Array, nullBitmap)]);
return new Data(type, offset, length, nullCount, [toInt32Array(valueOffsets), toUint8Array(data), toUint8Array(nullBitmap)]);
}
/** @nocollapse */
public static List<T extends List>(type: T, offset: number, length: number, nullCount: number, nullBitmap: NullBuffer, valueOffsets: ValueOffsetsBuffer, child: Data<T['valueType']> | Vector<T['valueType']>) {
return new Data(type, offset, length, nullCount, [toArrayBufferView(Int32Array, valueOffsets), undefined, toArrayBufferView(Uint8Array, nullBitmap)], [child]);
return new Data(type, offset, length, nullCount, [toInt32Array(valueOffsets), undefined, toUint8Array(nullBitmap)], [child]);
}
/** @nocollapse */
public static FixedSizeList<T extends FixedSizeList>(type: T, offset: number, length: number, nullCount: number, nullBitmap: NullBuffer, child: Data | Vector) {
return new Data(type, offset, length, nullCount, [undefined, undefined, toArrayBufferView(Uint8Array, nullBitmap)], [child]);
return new Data(type, offset, length, nullCount, [undefined, undefined, toUint8Array(nullBitmap)], [child]);
}
/** @nocollapse */
public static Struct<T extends Struct>(type: T, offset: number, length: number, nullCount: number, nullBitmap: NullBuffer, children: (Data | Vector)[]) {
return new Data(type, offset, length, nullCount, [undefined, undefined, toArrayBufferView(Uint8Array, nullBitmap)], children);
return new Data(type, offset, length, nullCount, [undefined, undefined, toUint8Array(nullBitmap)], children);
}
/** @nocollapse */
public static Map<T extends Map_>(type: T, offset: number, length: number, nullCount: number, nullBitmap: NullBuffer, children: (Data | Vector)[]) {
return new Data(type, offset, length, nullCount, [undefined, undefined, toArrayBufferView(Uint8Array, nullBitmap)], children);
return new Data(type, offset, length, nullCount, [undefined, undefined, toUint8Array(nullBitmap)], children);
}
public static Union<T extends SparseUnion>(type: T, offset: number, length: number, nullCount: number, nullBitmap: NullBuffer, typeIds: TypeIdsBuffer, children: (Data | Vector)[]): Data<T>;
public static Union<T extends SparseUnion>(type: T, offset: number, length: number, nullCount: number, nullBitmap: NullBuffer, typeIds: TypeIdsBuffer, children: (Data | Vector)[], _?: any): Data<T>;
public static Union<T extends DenseUnion>(type: T, offset: number, length: number, nullCount: number, nullBitmap: NullBuffer, typeIds: TypeIdsBuffer, valueOffsets: ValueOffsetsBuffer, children: (Data | Vector)[]): Data<T>;
public static Union<T extends Union>(type: T, offset: number, length: number, nullCount: number, nullBitmap: NullBuffer, typeIds: TypeIdsBuffer, valueOffsetsOrChildren: ValueOffsetsBuffer | (Data | Vector)[], children?: (Data | Vector)[]): Data<T>;
/** @nocollapse */

@@ -237,3 +280,3 @@ public static Union<T extends Union>(type: T, offset: number, length: number, nullCount: number, nullBitmap: NullBuffer, typeIds: TypeIdsBuffer, valueOffsetsOrChildren: ValueOffsetsBuffer | (Data | Vector)[], children?: (Data | Vector)[]) {

undefined, undefined,
toArrayBufferView(Uint8Array, nullBitmap),
toUint8Array(nullBitmap),
toArrayBufferView(type.ArrayType, typeIds)

@@ -244,3 +287,3 @@ ] as Partial<Buffers<T>>;

}
buffers[BufferType.OFFSET] = toArrayBufferView(Int32Array, <ValueOffsetsBuffer> valueOffsetsOrChildren);
buffers[BufferType.OFFSET] = toInt32Array(<ValueOffsetsBuffer> valueOffsetsOrChildren);
return new Data(type, offset, length, nullCount, buffers, children);

@@ -247,0 +290,0 @@ }

@@ -24,8 +24,15 @@ // Licensed to the Apache Software Foundation (ASF) under one

/** @ignore */ type FloatArray = Float32Array | Float64Array;
/** @ignore */ type IntArray = Int8Array | Int16Array | Int32Array;
/** @ignore */ type UintArray = Uint8Array | Uint16Array | Uint32Array | Uint8ClampedArray;
/** @ignore */
export interface ArrayBufferViewConstructor<T extends ArrayBufferView> {
export type TypedArray = FloatArray | IntArray | UintArray;
export type BigIntArray = BigInt64Array | BigUint64Array;
/** @ignore */
export interface TypedArrayConstructor<T extends TypedArray> {
readonly prototype: T;
new(length: number): T;
new(arrayOrArrayBuffer: ArrayLike<number> | ArrayBufferLike): T;
new(buffer: ArrayBufferLike, byteOffset: number, length?: number): T;
new(length?: number): T;
new(array: Iterable<number>): T;
new(buffer: ArrayBufferLike, byteOffset?: number, length?: number): T;
/**

@@ -47,5 +54,31 @@ * The size in bytes of each element in the array.

from(arrayLike: ArrayLike<number>, mapfn?: (v: number, k: number) => number, thisArg?: any): T;
from<U>(arrayLike: ArrayLike<U>, mapfn: (v: U, k: number) => number, thisArg?: any): T;
}
/** @ignore */
export interface BigIntArrayConstructor<T extends BigIntArray> {
readonly prototype: T;
new(length?: number): T;
new(array: Iterable<bigint>): T;
new(buffer: ArrayBufferLike, byteOffset?: number, length?: number): T;
/**
* The size in bytes of each element in the array.
*/
readonly BYTES_PER_ELEMENT: number;
/**
* Returns a new array from a set of elements.
* @param items A set of elements to include in the new array object.
*/
of(...items: bigint[]): T;
/**
* Creates an array from an array-like or iterable object.
* @param arrayLike An array-like or iterable object to convert to an array.
* @param mapfn A mapping function to call on every element of the array.
* @param thisArg Value of 'this' used to invoke the mapfn.
*/
from(arrayLike: ArrayLike<bigint>, mapfn?: (v: bigint, k: number) => bigint, thisArg?: any): T;
from<U>(arrayLike: ArrayLike<U>, mapfn: (v: U, k: number) => bigint, thisArg?: any): T;
}
/** @ignore */
export type VectorCtorArgs<

@@ -81,3 +114,3 @@ T extends Vector<R>,

T extends DataType ? DataTypeToVector<T> :
never
vecs.BaseVector<any>
;

@@ -192,3 +225,3 @@

T extends type.Struct ? vecs.StructVector<T['dataTypes']> :
T extends type.Dictionary ? vecs.DictionaryVector<T['valueType'], T['indices']> :
T extends type.Dictionary ? vecs.DictionaryVector<T['valueType'], T['indices']> :
T extends type.FixedSizeList ? vecs.FixedSizeListVector<T['valueType']> :

@@ -199,3 +232,3 @@ vecs.BaseVector<T>

/** @ignore */
type TypeToDataType<T extends Type> =
export type TypeToDataType<T extends Type> =
T extends Type.Null ? type.Null

@@ -202,0 +235,0 @@ : T extends Type.Bool ? type.Bool

@@ -18,4 +18,4 @@ // Licensed to the Apache Software Foundation (ASF) under one

import { DataType } from '../../type';
import { Duplex, DuplexOptions } from 'stream';
import { DataType } from '../../type';
import { RecordBatch } from '../../recordbatch';

@@ -81,3 +81,3 @@ import { AsyncByteQueue } from '../../io/stream';

}
if ((r && r.done || !this.readable)) {
if (!this.readable || (r && r.done && (reader.autoDestroy || (await reader.reset().open()).closed))) {
this.push(null);

@@ -84,0 +84,0 @@ await reader.cancel();

@@ -18,4 +18,4 @@ // Licensed to the Apache Software Foundation (ASF) under one

import { DataType } from '../../type';
import { Duplex, DuplexOptions } from 'stream';
import { DataType } from '../../type';
import { AsyncByteStream } from '../../io/stream';

@@ -22,0 +22,0 @@ import { RecordBatchWriter } from '../../ipc/writer';

@@ -18,4 +18,4 @@ // Licensed to the Apache Software Foundation (ASF) under one

import { Vector } from '../vector';
import { DataType } from '../type';
import { Vector } from '../vector';
import { MessageHeader } from '../enum';

@@ -22,0 +22,0 @@ import { Footer } from './metadata/file';

@@ -36,3 +36,3 @@ // Licensed to the Apache Software Foundation (ASF) under one

import { Writable, ReadableInterop, ReadableDOMStreamOptions } from '../io/interfaces';
import { isPromise, isAsyncIterable, isWritableDOMStream, isWritableNodeStream } from '../util/compat';
import { isPromise, isAsyncIterable, isWritableDOMStream, isWritableNodeStream, isIterable } from '../util/compat';

@@ -130,3 +130,3 @@ export class RecordBatchWriter<T extends { [key: string]: DataType } = any> extends ReadableInterop<Uint8Array> implements Writable<RecordBatch<T>> {

if (!schema || (schema !== this._schema)) {
if (!schema || !(schema.compareTo(this._schema))) {
if (schema === null) {

@@ -145,9 +145,17 @@ this._position = 0;

public write(chunk?: Table<T> | RecordBatch<T> | null) {
let schema: Schema<T> | null;
public write(payload?: Table<T> | RecordBatch<T> | Iterable<RecordBatch<T>> | null) {
let schema: Schema<T> | null = null;
if (!this._sink) {
throw new Error(`RecordBatchWriter is closed`);
} else if (!chunk || !(schema = chunk.schema)) {
} else if (payload === null || payload === undefined) {
return this.finish() && undefined;
} else if (schema !== this._schema) {
} else if (payload instanceof Table && !(schema = payload.schema)) {
return this.finish() && undefined;
} else if (payload instanceof RecordBatch && !(schema = payload.schema)) {
return this.finish() && undefined;
}
if (schema && !schema.compareTo(this._schema)) {
if (this._started && this._autoDestroy) {

@@ -158,5 +166,10 @@ return this.close();

}
(chunk instanceof Table)
? this.writeAll(chunk.chunks)
: this._writeRecordBatch(chunk);
if (payload instanceof RecordBatch) {
this._writeRecordBatch(payload);
} else if (payload instanceof Table) {
this.writeAll(payload.chunks);
} else if (isIterable(payload)) {
this.writeAll(payload);
}
}

@@ -370,3 +383,7 @@

function writeAll<T extends { [key: string]: DataType } = any>(writer: RecordBatchWriter<T>, input: Table<T> | Iterable<RecordBatch<T>>) {
const chunks = (input instanceof Table) ? input.chunks : input;
let chunks = input as Iterable<RecordBatch<T>>;
if (input instanceof Table) {
chunks = input.chunks;
writer.reset(undefined, input.schema);
}
for (const batch of chunks) {

@@ -373,0 +390,0 @@ writer.write(batch);

@@ -440,61 +440,2 @@

This project includes code from the Boost project
Boost Software License - Version 1.0 - August 17th, 2003
Permission is hereby granted, free of charge, to any person or organization
obtaining a copy of the software and accompanying documentation covered by
this license (the "Software") to use, reproduce, display, distribute,
execute, and transmit the Software, and to prepare derivative works of the
Software, and to permit third-parties to whom the Software is furnished to
do so, all subject to the following:
The copyright notices in the Software and this entire statement, including
the above license grant, this restriction and the following disclaimer,
must be included in all copies of the Software, in whole or in part, and
all derivative works of the Software, unless such copies or derivative
works are solely in the form of machine-executable object code generated by
a source language processor.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE, TITLE AND NON-INFRINGEMENT. IN NO EVENT
SHALL THE COPYRIGHT HOLDERS OR ANYONE DISTRIBUTING THE SOFTWARE BE LIABLE
FOR ANY DAMAGES OR OTHER LIABILITY, WHETHER IN CONTRACT, TORT OR OTHERWISE,
ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
--------------------------------------------------------------------------------
This project includes code from the mapbox/variant project, BSD 3-clause
license
Copyright (c) MapBox
All rights reserved.
Redistribution and use in source and binary forms, with or without modification,
are permitted provided that the following conditions are met:
- Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
- Redistributions in binary form must reproduce the above copyright notice, this
list of conditions and the following disclaimer in the documentation and/or
other materials provided with the distribution.
- Neither the name "MapBox" nor the names of its contributors may be
used to endorse or promote products derived from this software without
specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
--------------------------------------------------------------------------------
This project includes code from the FlatBuffers project

@@ -836,1 +777,85 @@

THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
--------------------------------------------------------------------------------
This project includes code from Apache Kudu.
* cpp/cmake_modules/CompilerInfo.cmake is based on Kudu's cmake_modules/CompilerInfo.cmake
Copyright: 2016 The Apache Software Foundation.
Home page: https://kudu.apache.org/
License: http://www.apache.org/licenses/LICENSE-2.0
--------------------------------------------------------------------------------
This project includes code from Apache Impala (incubating), formerly
Impala. The Impala code and rights were donated to the ASF as part of the
Incubator process after the initial code imports into Apache Parquet.
Copyright: 2012 Cloudera, Inc.
Copyright: 2016 The Apache Software Foundation.
Home page: http://impala.apache.org/
License: http://www.apache.org/licenses/LICENSE-2.0
--------------------------------------------------------------------------------
This project includes code from Apache Aurora.
* dev/release/{release,changelog,release-candidate} are based on the scripts from
Apache Aurora
Copyright: 2016 The Apache Software Foundation.
Home page: https://aurora.apache.org/
License: http://www.apache.org/licenses/LICENSE-2.0
--------------------------------------------------------------------------------
This project includes code from the Google styleguide.
* cpp/build-support/cpplint.py is based on the scripts from the Google styleguide.
Copyright: 2009 Google Inc. All rights reserved.
Homepage: https://github.com/google/styleguide
License: 3-clause BSD
--------------------------------------------------------------------------------
This project includes code from Snappy.
* cpp/cmake_modules/{SnappyCMakeLists.txt,SnappyConfig.h} are based on code
from Google's Snappy project.
Copyright: 2009 Google Inc. All rights reserved.
Homepage: https://github.com/google/snappy
License: 3-clause BSD
--------------------------------------------------------------------------------
This project includes code from the manylinux project.
* python/manylinux1/scripts/{build_python.sh,python-tag-abi-tag.py,
requirements.txt} are based on code from the manylinux project.
Copyright: 2016 manylinux
Homepage: https://github.com/pypa/manylinux
License: The MIT License (MIT)
--------------------------------------------------------------------------------
This project include code from CMake.
* cpp/cmake_modules/FindGTest.cmake is based on code from CMake.
Copyright: Copyright 2000-2019 Kitware, Inc. and Contributors
Homepage: https://gitlab.kitware.com/cmake/cmake
License: 3-clause BSD
--------------------------------------------------------------------------------
This project include code from mingw-w64.
* cpp/src/arrow/util/cpu-info.cc has a polyfill for mingw-w64 < 5
Copyright (c) 2009 - 2013 by the mingw-w64 project
Homepage: https://mingw-w64.org
License: Zope Public License (ZPL) Version 2.1.
Apache Arrow
Copyright 2016 The Apache Software Foundation
Copyright 2016-2019 The Apache Software Foundation

@@ -52,2 +52,6 @@ This product includes software developed at

This product include software from CMake (BSD 3-Clause)
* CMake - Cross Platform Makefile Generator
* Copyright 2000-2019 Kitware, Inc. and Contributors
The web site includes files generated by Jekyll.

@@ -54,0 +58,0 @@

{
"version": "0.4.1",
"name": "@apache-arrow/ts",

@@ -6,3 +7,2 @@ "browser": "Arrow.dom.ts",

"types": "Arrow.node.ts",
"version": "0.4.0",
"license": "Apache-2.0",

@@ -9,0 +9,0 @@ "description": "Apache Arrow columnar in-memory format",

@@ -113,3 +113,3 @@ <!---

const rainfall = Table.fromVectors(
const rainfall = Table.new(
[FloatVector.from(rainAmounts), DateVector.from(rainDates)],

@@ -262,2 +262,3 @@ ['precipitation', 'date']

* [Perspective](https://github.com/jpmorganchase/perspective) -- Perspective is a streaming data visualization engine by J.P. Morgan for JavaScript for building real-time & user-configurable analytics entirely in the browser.
* [Falcon](https://github.com/uwdata/falcon) is a visualization tool for linked interactions across multiple aggregate visualizations of millions or billions of records.

@@ -264,0 +265,0 @@ ## Companies & Organizations

@@ -23,7 +23,12 @@ // Licensed to the Apache Software Foundation (ASF) under one

import { DataType, Struct } from './type';
import { Chunked } from './vector/chunked';
import { StructVector } from './vector/struct';
import { Vector as VType } from './interfaces';
import { Chunked } from './vector/chunked';
import { selectFieldArgs } from './util/args';
import { ensureSameLengthData } from './util/recordbatch';
import { Clonable, Sliceable, Applicative } from './vector';
type VectorMap = { [key: string]: Vector };
type Fields<T extends { [key: string]: DataType }> = (keyof T)[] | Field<T[keyof T]>[];
type ChildData<T extends { [key: string]: DataType }> = (Data<T[keyof T]> | Vector<T[keyof T]>)[];
export interface RecordBatch<T extends { [key: string]: DataType } = any> {

@@ -41,25 +46,32 @@ concat(...others: Vector<Struct<T>>[]): Table<T>;

public static from<T extends VectorMap = any>(children: T): RecordBatch<{ [P in keyof T]: T[P]['type'] }>;
public static from<T extends { [key: string]: DataType } = any>(children: ChildData<T>, fields?: Fields<T>): RecordBatch<T>;
/** @nocollapse */
public static from<T extends { [key: string]: DataType } = any>(vectors: VType<T[keyof T]>[], names: (keyof T)[] = []) {
return new RecordBatch(
Schema.from(vectors, names),
vectors.reduce((len, vec) => Math.max(len, vec.length), 0),
vectors
);
public static from(...args: any[]) {
return RecordBatch.new(args[0], args[1]);
}
public static new<T extends VectorMap = any>(children: T): RecordBatch<{ [P in keyof T]: T[P]['type'] }>;
public static new<T extends { [key: string]: DataType } = any>(children: ChildData<T>, fields?: Fields<T>): RecordBatch<T>;
/** @nocollapse */
public static new<T extends { [key: string]: DataType } = any>(...args: any[]) {
const [fs, xs] = selectFieldArgs<T>(args);
const vs = xs.filter((x): x is Vector<T[keyof T]> => x instanceof Vector);
return new RecordBatch(...ensureSameLengthData(new Schema<T>(fs), vs.map((x) => x.data)));
}
protected _schema: Schema;
constructor(schema: Schema<T>, numRows: number, childData: (Data | Vector)[]);
constructor(schema: Schema<T>, length: number, children: (Data | Vector)[]);
constructor(schema: Schema<T>, data: Data<Struct<T>>, children?: Vector[]);
constructor(...args: any[]) {
let schema = args[0];
let data: Data<Struct<T>>;
let schema = args[0] as Schema<T>;
let children: Vector[] | undefined;
if (typeof args[1] === 'number') {
if (args[1] instanceof Data) {
[, data, children] = (args as [any, Data<Struct<T>>, Vector<T[keyof T]>[]?]);
} else {
const fields = schema.fields as Field<T[keyof T]>[];
const [, numRows, childData] = args as [Schema<T>, number, Data[]];
data = Data.Struct(new Struct<T>(fields), 0, numRows, 0, null, childData);
} else {
[, data, children] = (args as [Schema<T>, Data<Struct<T>>, Vector[]?]);
const [, length, childData] = args as [any, number, Data<T[keyof T]>[]];
data = Data.Struct(new Struct<T>(fields), 0, length, 0, null, childData);
}

@@ -83,9 +95,10 @@ super(data, children);

public select<K extends keyof T = any>(...columnNames: K[]) {
const fields = this._schema.fields;
const schema = this._schema.select(...columnNames);
const childNames = columnNames.reduce((xs, x) => (xs[x] = true) && xs, <any> {});
const childData = this.data.childData.filter((_, i) => childNames[fields[i].name]);
const structData = Data.Struct(new Struct(schema.fields), 0, this.length, 0, null, childData);
return new RecordBatch<{ [P in K]: T[P] }>(schema, structData as Data<Struct<{ [P in K]: T[P] }>>);
const nameToIndex = this._schema.fields.reduce((m, f, i) => m.set(f.name as K, i), new Map<K, number>());
return this.selectAt(...columnNames.map((columnName) => nameToIndex.get(columnName)!).filter((x) => x > -1));
}
public selectAt<K extends T[keyof T] = any>(...columnIndices: number[]) {
const schema = this._schema.selectAt(...columnIndices);
const childData = columnIndices.map((i) => this.data.childData[i]).filter(Boolean);
return new RecordBatch<{ [key: string]: K }>(schema, this.length, childData);
}
}

@@ -18,27 +18,42 @@ // Licensed to the Apache Software Foundation (ASF) under one

import { Data } from './data';
import { Vector } from './vector';
import { selectArgs } from './util/args';
import { DataType, Dictionary } from './type';
import { Vector as VType } from './interfaces';
import { selectFieldArgs } from './util/args';
import { instance as comparer } from './visitor/typecomparator';
type VectorMap = { [key: string]: Vector };
type Fields<T extends { [key: string]: DataType }> = (keyof T)[] | Field<T[keyof T]>[];
type ChildData<T extends { [key: string]: DataType }> = T[keyof T][] | Data<T[keyof T]>[] | Vector<T[keyof T]>[];
export class Schema<T extends { [key: string]: DataType } = any> {
public static from<T extends { [key: string]: DataType } = any>(children: T): Schema<T>;
public static from<T extends VectorMap = any>(children: T): Schema<{ [P in keyof T]: T[P]['type'] }>;
public static from<T extends { [key: string]: DataType } = any>(children: ChildData<T>, fields?: Fields<T>): Schema<T>;
/** @nocollapse */
public static from<T extends { [key: string]: DataType } = any>(vectors: VType<T[keyof T]>[], names: (keyof T)[] = []) {
return new Schema<T>(vectors.map((v, i) => new Field('' + (names[i] || i), v.type)));
public static from(...args: any[]) {
return Schema.new(args[0], args[1]);
}
protected _fields: Field[];
protected _metadata: Map<string, string>;
protected _dictionaries: Map<number, DataType>;
protected _dictionaryFields: Map<number, Field<Dictionary>[]>;
public get fields(): Field[] { return this._fields; }
public get metadata(): Map<string, string> { return this._metadata; }
public get dictionaries(): Map<number, DataType> { return this._dictionaries; }
public get dictionaryFields(): Map<number, Field<Dictionary>[]> { return this._dictionaryFields; }
public static new<T extends { [key: string]: DataType } = any>(children: T): Schema<T>;
public static new<T extends VectorMap = any>(children: T): Schema<{ [P in keyof T]: T[P]['type'] }>;
public static new<T extends { [key: string]: DataType } = any>(children: ChildData<T>, fields?: Fields<T>): Schema<T>;
/** @nocollapse */
public static new(...args: any[]) {
return new Schema(selectFieldArgs(args)[0]);
}
constructor(fields: Field[],
metadata?: Map<string, string>,
dictionaries?: Map<number, DataType>,
dictionaryFields?: Map<number, Field<Dictionary>[]>) {
this._fields = fields || [];
this._metadata = metadata || new Map();
public readonly fields: Field<T[keyof T]>[];
public readonly metadata: Map<string, string>;
public readonly dictionaries: Map<number, DataType>;
public readonly dictionaryFields: Map<number, Field<Dictionary>[]>;
constructor(fields: Field[] = [],
metadata?: Map<string, string> | null,
dictionaries?: Map<number, DataType> | null,
dictionaryFields?: Map<number, Field<Dictionary>[]> | null) {
this.fields = (fields || []) as Field<T[keyof T]>[];
this.metadata = metadata || new Map();
if (!dictionaries || !dictionaryFields) {

@@ -49,9 +64,14 @@ ({ dictionaries, dictionaryFields } = generateDictionaryMap(

}
this._dictionaries = dictionaries;
this._dictionaryFields = dictionaryFields;
this.dictionaries = dictionaries;
this.dictionaryFields = dictionaryFields;
}
public get [Symbol.toStringTag]() { return 'Schema'; }
public toString() {
return `Schema<{ ${this._fields.map((f, i) => `${i}: ${f}`).join(', ')} }>`;
return `Schema<{ ${this.fields.map((f, i) => `${i}: ${f}`).join(', ')} }>`;
}
public compareTo(other?: Schema | null): other is Schema<T> {
return comparer.compareSchemas(this, other);
}
public select<K extends keyof T = any>(...columnNames: K[]) {

@@ -61,28 +81,94 @@ const names = columnNames.reduce((xs, x) => (xs[x] = true) && xs, Object.create(null));

}
public selectAt<K extends T[keyof T] = any>(...columnIndices: number[]) {
return new Schema<{ [key: string]: K }>(columnIndices.map((i) => this.fields[i]).filter(Boolean), this.metadata);
}
public assign<R extends { [key: string]: DataType } = any>(schema: Schema<R>): Schema<T & R>;
public assign<R extends { [key: string]: DataType } = any>(...fields: (Field<R[keyof R]> | Field<R[keyof R]>[])[]): Schema<T & R>;
public assign<R extends { [key: string]: DataType } = any>(...args: (Schema<R> | Field<R[keyof R]> | Field<R[keyof R]>[])[]) {
const other = args[0] instanceof Schema ? args[0] as Schema<R>
: new Schema<R>(selectArgs<Field<R[keyof R]>>(Field, args));
const curFields = [...this.fields] as Field[];
const curDictionaries = [...this.dictionaries];
const curDictionaryFields = this.dictionaryFields;
const metadata = mergeMaps(mergeMaps(new Map(), this.metadata), other.metadata);
const newFields = other.fields.filter((f2) => {
const i = curFields.findIndex((f) => f.name === f2.name);
return ~i ? (curFields[i] = f2.clone({
metadata: mergeMaps(mergeMaps(new Map(), curFields[i].metadata), f2.metadata)
})) && false : true;
}) as Field[];
const { dictionaries, dictionaryFields } = generateDictionaryMap(newFields, new Map(), new Map());
const newDictionaries = [...dictionaries].filter(([y]) => !curDictionaries.every(([x]) => x === y));
const newDictionaryFields = [...dictionaryFields].map(([id, newDictFields]) => {
return [id, [...(curDictionaryFields.get(id) || []), ...newDictFields.map((f) => {
const i = newFields.findIndex((f2) => f.name === f2.name);
const { dictionary, indices, isOrdered, dictionaryVector } = f.type;
const type = new Dictionary(dictionary, indices, id, isOrdered, dictionaryVector);
return newFields[i] = f.clone({ type });
})]] as [number, Field<Dictionary>[]];
});
return new Schema<T & R>(
[...curFields, ...newFields], metadata,
new Map([...curDictionaries, ...newDictionaries]),
new Map([...curDictionaryFields, ...newDictionaryFields])
);
}
}
export class Field<T extends DataType = DataType> {
protected _type: T;
protected _name: string;
protected _nullable: true | false;
protected _metadata?: Map<string, string> | null;
constructor(name: string, type: T, nullable: true | false = false, metadata?: Map<string, string> | null) {
this._name = name;
this._type = type;
this._nullable = nullable;
this._metadata = metadata || new Map();
export class Field<T extends DataType = any> {
public static new<T extends DataType = any>(props: { name: string | number, type: T, nullable?: boolean, metadata?: Map<string, string> | null }): Field<T>;
public static new<T extends DataType = any>(name: string | number | Field<T>, type: T, nullable?: boolean, metadata?: Map<string, string> | null): Field<T>;
/** @nocollapse */
public static new<T extends DataType = any>(...args: any[]) {
let [name, type, nullable, metadata] = args;
if (args[0] && typeof args[0] === 'object') {
({ name } = args[0]);
(type === undefined) && (type = args[0].type);
(nullable === undefined) && (nullable = args[0].nullable);
(metadata === undefined) && (metadata = args[0].metadata);
}
return new Field<T>(`${name}`, type, nullable, metadata);
}
public get type() { return this._type; }
public get name() { return this._name; }
public get nullable() { return this._nullable; }
public get metadata() { return this._metadata; }
public get typeId() { return this._type.typeId; }
public readonly type: T;
public readonly name: string;
public readonly nullable: boolean;
public readonly metadata: Map<string, string>;
constructor(name: string, type: T, nullable = false, metadata?: Map<string, string> | null) {
this.name = name;
this.type = type;
this.nullable = nullable;
this.metadata = metadata || new Map();
}
public get typeId() { return this.type.typeId; }
public get [Symbol.toStringTag]() { return 'Field'; }
public get indices() {
return DataType.isDictionary(this._type) ? this._type.indices : this._type;
public toString() { return `${this.name}: ${this.type}`; }
public compareTo(other?: Field | null): other is Field<T> {
return comparer.compareField(this, other);
}
public toString() { return `${this.name}: ${this.type}`; }
public clone<R extends DataType = T>(props: { name?: string | number, type?: R, nullable?: boolean, metadata?: Map<string, string> | null }): Field<R>;
public clone<R extends DataType = T>(name?: string | number | Field<T>, type?: R, nullable?: boolean, metadata?: Map<string, string> | null): Field<R>;
public clone<R extends DataType = T>(...args: any[]) {
let [name, type, nullable, metadata] = args;
(!args[0] || typeof args[0] !== 'object')
? ([name = this.name, type = this.type, nullable = this.nullable, metadata = this.metadata] = args)
: ({name = this.name, type = this.type, nullable = this.nullable, metadata = this.metadata} = args[0]);
return Field.new<R>(name, type, nullable, metadata);
}
}
/** @ignore */
function mergeMaps<TKey, TVal>(m1?: Map<TKey, TVal> | null, m2?: Map<TKey, TVal> | null): Map<TKey, TVal> {
return new Map([...(m1 || new Map()), ...(m2 || new Map())]);
}
/** @ignore */
function generateDictionaryMap(fields: Field[], dictionaries: Map<number, DataType>, dictionaryFields: Map<number, Field<Dictionary>[]>) {

@@ -111,1 +197,13 @@

}
// Add these here so they're picked up by the externs creator
// in the build, and closure-compiler doesn't minify them away
(Schema.prototype as any).fields = null;
(Schema.prototype as any).metadata = null;
(Schema.prototype as any).dictionaries = null;
(Schema.prototype as any).dictionaryFields = null;
(Field.prototype as any).type = null;
(Field.prototype as any).name = null;
(Field.prototype as any).nullable = null;
(Field.prototype as any).metadata = null;

@@ -18,2 +18,3 @@ // Licensed to the Apache Software Foundation (ASF) under one

import { Data } from './data';
import { Column } from './column';

@@ -23,3 +24,2 @@ import { Schema, Field } from './schema';

import { RecordBatch } from './recordbatch';
import { Vector as VType } from './interfaces';
import { DataFrame } from './compute/dataframe';

@@ -30,6 +30,14 @@ import { RecordBatchReader } from './ipc/reader';

import { Clonable, Sliceable, Applicative } from './vector';
import { selectColumnArgs, selectArgs } from './util/args';
import { distributeColumnsIntoRecordBatches } from './util/recordbatch';
import { distributeVectorsIntoRecordBatches } from './util/recordbatch';
import { RecordBatchFileWriter, RecordBatchStreamWriter } from './ipc/writer';
export interface Table<T extends { [key: string]: DataType; } = any> {
type VectorMap = { [key: string]: Vector };
type Fields<T extends { [key: string]: DataType }> = (keyof T)[] | Field<T[keyof T]>[];
type ChildData<T extends { [key: string]: DataType }> = Data<T[keyof T]>[] | Vector<T[keyof T]>[];
type Columns<T extends { [key: string]: DataType }> = Column<T[keyof T]>[] | Column<T[keyof T]>[][];
export interface Table<T extends { [key: string]: DataType } = any> {
get(index: number): Struct<T>['TValue'];

@@ -47,3 +55,3 @@ [Symbol.iterator](): IterableIterator<RowLike<T>>;

export class Table<T extends { [key: string]: DataType; } = any>
export class Table<T extends { [key: string]: DataType } = any>
extends Chunked<Struct<T>>

@@ -56,3 +64,3 @@ implements DataFrame<T>,

/** @nocollapse */
public static empty<T extends { [key: string]: DataType; } = any>() { return new Table<T>(new Schema([]), []); }
public static empty<T extends { [key: string]: DataType } = any>() { return new Table<T>(new Schema([]), []); }

@@ -96,3 +104,3 @@ public static from<T extends { [key: string]: DataType } = any>(): Table<T>;

/** @nocollapse */
public static async fromAsync<T extends { [key: string]: DataType; } = any>(source: import('./ipc/reader').FromArgs): Promise<Table<T>> {
public static async fromAsync<T extends { [key: string]: DataType } = any>(source: import('./ipc/reader').FromArgs): Promise<Table<T>> {
return await Table.from<T>(source as any);

@@ -102,11 +110,62 @@ }

/** @nocollapse */
public static fromVectors<T extends { [key: string]: DataType; } = any>(vectors: VType<T[keyof T]>[], names?: (keyof T)[]) {
return new Table(RecordBatch.from(vectors, names));
public static fromStruct<T extends { [key: string]: DataType } = any>(struct: Vector<Struct<T>>) {
return Table.new<T>(struct.data.childData as Data<T[keyof T]>[], struct.type.children);
}
/**
* @summary Create a new Table from a collection of Columns or Vectors,
* with an optional list of names or Fields.
*
*
* `Table.new` accepts an Object of
* Columns or Vectors, where the keys will be used as the field names
* for the Schema:
* ```ts
* const i32s = Int32Vector.from([1, 2, 3]);
* const f32s = Float32Vector.from([.1, .2, .3]);
* const table = Table.new({ i32: i32s, f32: f32s });
* assert(table.schema.fields[0].name === 'i32');
* ```
*
* It also accepts a a list of Vectors with an optional list of names or
* Fields for the resulting Schema. If the list is omitted or a name is
* missing, the numeric index of each Vector will be used as the name:
* ```ts
* const i32s = Int32Vector.from([1, 2, 3]);
* const f32s = Float32Vector.from([.1, .2, .3]);
* const table = Table.new([i32s, f32s], ['i32']);
* assert(table.schema.fields[0].name === 'i32');
* assert(table.schema.fields[1].name === '1');
* ```
*
* If the supplied arguments are Columns, `Table.new` will infer the Schema
* from the Columns:
* ```ts
* const i32s = Column.new('i32', Int32Vector.from([1, 2, 3]));
* const f32s = Column.new('f32', Float32Vector.from([.1, .2, .3]));
* const table = Table.new(i32s, f32s);
* assert(table.schema.fields[0].name === 'i32');
* assert(table.schema.fields[1].name === 'f32');
* ```
*
* If the supplied Vector or Column lengths are unequal, `Table.new` will
* extend the lengths of the shorter Columns, allocating additional bytes
* to represent the additional null slots. The memory required to allocate
* these additional bitmaps can be computed as:
* ```ts
* let additionalBytes = 0;
* for (let vec in shorter_vectors) {
* additionalBytes += (((longestLength - vec.length) + 63) & ~63) >> 3;
* }
* ```
*
* For example, an additional null bitmap for one million null values would require
* 125,000 bytes (`((1e6 + 63) & ~63) >> 3`), or approx. `0.11MiB`
*/
public static new<T extends { [key: string]: DataType } = any>(...columns: Columns<T>): Table<T>;
public static new<T extends VectorMap = any>(children: T): Table<{ [P in keyof T]: T[P]['type'] }>;
public static new<T extends { [key: string]: DataType } = any>(children: ChildData<T>, fields?: Fields<T>): Table<T>;
/** @nocollapse */
public static fromStruct<T extends { [key: string]: DataType; } = any>(struct: Vector<Struct<T>>) {
const schema = new Schema<T>(struct.type.children);
const chunks = (struct instanceof Chunked ? struct.chunks : [struct]) as VType<Struct<T>>[];
return new Table(schema, chunks.map((chunk) => new RecordBatch(schema, chunk.data)));
public static new(...cols: any[]) {
return new Table(...distributeColumnsIntoRecordBatches(selectColumnArgs(cols)));
}

@@ -116,4 +175,4 @@

constructor(...batches: RecordBatch<T>[]);
constructor(schema: Schema, batches: RecordBatch<T>[]);
constructor(schema: Schema, ...batches: RecordBatch<T>[]);
constructor(schema: Schema<T>, batches: RecordBatch<T>[]);
constructor(schema: Schema<T>, ...batches: RecordBatch<T>[]);
constructor(...args: any[]) {

@@ -125,5 +184,3 @@

let chunks = args.reduce(function flatten(xs: any[], x: any): any[] {
return Array.isArray(x) ? x.reduce(flatten, xs) : [...xs, x];
}, []).filter((x: any): x is RecordBatch<T> => x instanceof RecordBatch);
let chunks = selectArgs<RecordBatch<T>>(RecordBatch, args);

@@ -134,5 +191,7 @@ if (!schema && !(schema = chunks[0] && chunks[0].schema)) {

if (!chunks[0]) { chunks[0] = new RecordBatch(schema, 0, []); }
if (!chunks[0]) {
chunks[0] = new RecordBatch(schema, 0, schema.fields.map((f) => Data.new(f.type, 0, 0)));
}
super(chunks[0].type, chunks);
super(new Struct<T>(schema.fields), chunks);

@@ -143,3 +202,3 @@ this._schema = schema;

protected _schema: Schema;
protected _schema: Schema<T>;
// List of inner RecordBatches

@@ -158,8 +217,8 @@ protected _chunks: RecordBatch<T>[];

public getColumn<R extends keyof T>(name: R): Column<T[R]> {
return this.getColumnAt(this.getColumnIndex(name)) as Column<T[R]>;
}
public getColumnAt<R extends DataType = any>(index: number): Column<R> | null {
return this.getChildAt(index);
}
public getColumn<R extends keyof T>(name: R): Column<T[R]> | null {
return this.getColumnAt(this.getColumnIndex(name)) as Column<T[R]> | null;
}
public getColumnIndex<R extends keyof T>(name: R) {

@@ -170,8 +229,8 @@ return this._schema.fields.findIndex((f) => f.name === name);

if (index < 0 || index >= this.numChildren) { return null; }
let schema = this._schema;
let column: Column<R>, field: Field<R>, chunks: Vector<R>[];
let columns = this._children || (this._children = []) as Column[];
if (column = columns[index]) { return column as Column<R>; }
if (field = ((schema.fields || [])[index] as Field<R>)) {
chunks = this._chunks
let field: Field<R>, child: Column<R>;
const fields = (this._schema as Schema<any>).fields;
const columns = this._children || (this._children = []) as Column[];
if (child = columns[index]) { return child as Column<R>; }
if (field = fields[index]) {
const chunks = this._chunks
.map((chunk) => chunk.getChildAt<R>(index))

@@ -196,5 +255,31 @@ .filter((vec): vec is Vector<R> => vec != null);

}
public select(...columnNames: string[]) {
return new Table(this._chunks.map((batch) => batch.select(...columnNames)));
public select<K extends keyof T = any>(...columnNames: K[]) {
const nameToIndex = this._schema.fields.reduce((m, f, i) => m.set(f.name as K, i), new Map<K, number>());
return this.selectAt(...columnNames.map((columnName) => nameToIndex.get(columnName)!).filter((x) => x > -1));
}
public selectAt<K extends T[keyof T] = any>(...columnIndices: number[]) {
const schema = this._schema.selectAt<K>(...columnIndices);
return new Table(schema, this._chunks.map(({ length, data: { childData } }) => {
return new RecordBatch(schema, length, columnIndices.map((i) => childData[i]).filter(Boolean));
}));
}
public assign<R extends { [key: string]: DataType } = any>(other: Table<R>) {
const fields = this._schema.fields;
const [indices, oldToNew] = other.schema.fields.reduce((memo, f2, newIdx) => {
const [indices, oldToNew] = memo;
const i = fields.findIndex((f) => f.name === f2.name);
~i ? (oldToNew[i] = newIdx) : indices.push(newIdx);
return memo;
}, [[], []] as number[][]);
const schema = this._schema.assign(other.schema);
const columns = [
...fields.map((_f, i, _fs, j = oldToNew[i]) =>
(j === undefined ? this.getColumnAt(i) : other.getColumnAt(j))!),
...indices.map((i) => other.getColumnAt(i)!)
].filter(Boolean) as Column<(T & R)[keyof T | keyof R]>[];
return new Table(...distributeVectorsIntoRecordBatches<T & R>(schema, columns));
}
}

@@ -23,4 +23,5 @@ // Licensed to the Apache Software Foundation (ASF) under one

import { flatbuffers } from 'flatbuffers';
import { Vector as VType } from './interfaces';
import { ArrayBufferViewConstructor } from './interfaces';
import { TypedArrayConstructor } from './interfaces';
import { Vector as VType, TypeToDataType } from './interfaces';
import { instance as comparer } from './visitor/typecomparator';

@@ -41,10 +42,9 @@ import Long = flatbuffers.Long;

/** @ignore */
export type RowLike<T extends { [key: string]: DataType; }> =
{ readonly length: number }
& ( Iterable<T[keyof T]['TValue']> )
& { [P in keyof T]: T[P]['TValue'] }
& { get<K extends keyof T>(key: K): T[K]['TValue']; }
export type RowLike<T extends { [key: string]: DataType }> =
( Iterable<T[keyof T]['TValue'] | null> )
& { [P in keyof T]: T[P]['TValue'] | null }
& { get<K extends keyof T>(key: K): T[K]['TValue'] | null; }
;
export interface DataType<TType extends Type = Type> {
export interface DataType<TType extends Type = Type, TChildren extends { [key: string]: DataType } = any> {
readonly TType: TType;

@@ -54,2 +54,3 @@ readonly TArray: any;

readonly ArrayType: any;
readonly children: Field<TChildren[keyof TChildren]>[];
}

@@ -81,8 +82,9 @@

public get children() { return this._children; }
public get typeId(): TType { return <any> Type.NONE; }
public compareTo(other: DataType): other is TypeToDataType<TType> {
return comparer.visit(this, other);
}
constructor(protected _children?: Field<TChildren[keyof TChildren]>[]) {}
protected static [Symbol.toStringTag] = ((proto: DataType) => {
(<any> proto).children = null;
(<any> proto).ArrayType = Array;

@@ -107,10 +109,10 @@ return proto[Symbol.toStringTag] = 'DataType';

[Type.Int ]: { bitWidth: IntBitWidth; isSigned: true | false; TArray: IntArray; TValue: number | Int32Array | Uint32Array; };
[Type.Int8 ]: { bitWidth: 8; isSigned: true; TArray: Int8Array; TValue: number; };
[Type.Int16 ]: { bitWidth: 16; isSigned: true; TArray: Int16Array; TValue: number; };
[Type.Int32 ]: { bitWidth: 32; isSigned: true; TArray: Int32Array; TValue: number; };
[Type.Int64 ]: { bitWidth: 64; isSigned: true; TArray: Int32Array; TValue: Int32Array; };
[Type.Uint8 ]: { bitWidth: 8; isSigned: false; TArray: Uint8Array; TValue: number; };
[Type.Uint16]: { bitWidth: 16; isSigned: false; TArray: Uint16Array; TValue: number; };
[Type.Uint32]: { bitWidth: 32; isSigned: false; TArray: Uint32Array; TValue: number; };
[Type.Uint64]: { bitWidth: 64; isSigned: false; TArray: Uint32Array; TValue: Uint32Array; };
[Type.Int8 ]: { bitWidth: 8; isSigned: true; TArray: Int8Array; TValue: number; };
[Type.Int16 ]: { bitWidth: 16; isSigned: true; TArray: Int16Array; TValue: number; };
[Type.Int32 ]: { bitWidth: 32; isSigned: true; TArray: Int32Array; TValue: number; };
[Type.Int64 ]: { bitWidth: 64; isSigned: true; TArray: Int32Array; TValue: Int32Array; };
[Type.Uint8 ]: { bitWidth: 8; isSigned: false; TArray: Uint8Array; TValue: number; };
[Type.Uint16]: { bitWidth: 16; isSigned: false; TArray: Uint16Array; TValue: number; };
[Type.Uint32]: { bitWidth: 32; isSigned: false; TArray: Uint32Array; TValue: number; };
[Type.Uint64]: { bitWidth: 64; isSigned: false; TArray: Uint32Array; TValue: Uint32Array; };
};

@@ -120,20 +122,20 @@

class Int_<T extends Ints = Ints> extends DataType<T> {
constructor(protected _isSigned: IType[T]['isSigned'],
protected _bitWidth: IType[T]['bitWidth']) {
constructor(public readonly isSigned: IType[T]['isSigned'],
public readonly bitWidth: IType[T]['bitWidth']) {
super();
}
public get typeId() { return Type.Int as T; }
public get isSigned() { return this._isSigned; }
public get bitWidth() { return this._bitWidth; }
public get ArrayType(): ArrayBufferViewConstructor<IType[T]['TArray']> {
switch (this._bitWidth) {
case 8: return (this._isSigned ? Int8Array : Uint8Array) as any;
case 16: return (this._isSigned ? Int16Array : Uint16Array) as any;
case 32: return (this._isSigned ? Int32Array : Uint32Array) as any;
case 64: return (this._isSigned ? Int32Array : Uint32Array) as any;
public get ArrayType(): TypedArrayConstructor<IType[T]['TArray']> {
switch (this.bitWidth) {
case 8: return this.isSigned ? Int8Array : Uint8Array;
case 16: return this.isSigned ? Int16Array : Uint16Array;
case 32: return this.isSigned ? Int32Array : Uint32Array;
case 64: return this.isSigned ? Int32Array : Uint32Array;
}
throw new Error(`Unrecognized ${this[Symbol.toStringTag]} type`);
}
public toString() { return `${this._isSigned ? `I` : `Ui`}nt${this._bitWidth}`; }
public toString() { return `${this.isSigned ? `I` : `Ui`}nt${this.bitWidth}`; }
protected static [Symbol.toStringTag] = ((proto: Int_) => {
(<any> proto).isSigned = null;
(<any> proto).bitWidth = null;
return proto[Symbol.toStringTag] = 'Int';

@@ -154,2 +156,11 @@ })(Int_.prototype);

Object.defineProperty(Int8.prototype, 'ArrayType', { value: Int8Array });
Object.defineProperty(Int16.prototype, 'ArrayType', { value: Int16Array });
Object.defineProperty(Int32.prototype, 'ArrayType', { value: Int32Array });
Object.defineProperty(Int64.prototype, 'ArrayType', { value: Int32Array });
Object.defineProperty(Uint8.prototype, 'ArrayType', { value: Uint8Array });
Object.defineProperty(Uint16.prototype, 'ArrayType', { value: Uint16Array });
Object.defineProperty(Uint32.prototype, 'ArrayType', { value: Uint32Array });
Object.defineProperty(Uint64.prototype, 'ArrayType', { value: Uint32Array });
/** @ignore */

@@ -167,8 +178,7 @@ type Floats = Type.Float | Type.Float16 | Type.Float32 | Type.Float64;

export class Float<T extends Floats = Floats> extends DataType<T> {
constructor(protected _precision: Precision) {
constructor(public readonly precision: Precision) {
super();
}
public get typeId() { return Type.Float as T; }
public get precision() { return this._precision; }
public get ArrayType(): ArrayBufferViewConstructor<FType[T]['TArray']> {
public get ArrayType(): TypedArrayConstructor<FType[T]['TArray']> {
switch (this.precision) {

@@ -183,2 +193,3 @@ case Precision.HALF: return Uint16Array;

protected static [Symbol.toStringTag] = ((proto: Float) => {
(<any> proto).precision = null;
return proto[Symbol.toStringTag] = 'Float';

@@ -192,2 +203,6 @@ })(Float.prototype);

Object.defineProperty(Float16.prototype, 'ArrayType', { value: Uint16Array });
Object.defineProperty(Float32.prototype, 'ArrayType', { value: Float32Array });
Object.defineProperty(Float64.prototype, 'ArrayType', { value: Float64Array });
export interface Binary extends DataType<Type.Binary> { TArray: Uint8Array; TValue: Uint8Array; }

@@ -234,11 +249,11 @@ export class Binary extends DataType<Type.Binary> {

export class Decimal extends DataType<Type.Decimal> {
constructor(protected _scale: number,
protected _precision: number) {
constructor(public readonly scale: number,
public readonly precision: number) {
super();
}
public get typeId() { return Type.Decimal as Type.Decimal; }
public get scale() { return this._scale; }
public get precision() { return this._precision; }
public toString() { return `Decimal[${this._precision}e${this._scale > 0 ? `+` : ``}${this._scale}]`; }
public toString() { return `Decimal[${this.precision}e${this.scale > 0 ? `+` : ``}${this.scale}]`; }
protected static [Symbol.toStringTag] = ((proto: Decimal) => {
(<any> proto).scale = null;
(<any> proto).precision = null;
(<any> proto).ArrayType = Uint32Array;

@@ -253,9 +268,9 @@ return proto[Symbol.toStringTag] = 'Decimal';

export class Date_<T extends Dates = Dates> extends DataType<T> {
constructor(protected _unit: DateUnit) {
constructor(public readonly unit: DateUnit) {
super();
}
public get typeId() { return Type.Date as T; }
public get unit() { return this._unit; }
public toString() { return `Date${(this._unit + 1) * 32}<${DateUnit[this._unit]}>`; }
public toString() { return `Date${(this.unit + 1) * 32}<${DateUnit[this.unit]}>`; }
protected static [Symbol.toStringTag] = ((proto: Date_) => {
(<any> proto).unit = null;
(<any> proto).ArrayType = Int32Array;

@@ -282,11 +297,11 @@ return proto[Symbol.toStringTag] = 'Date';

class Time_<T extends Times = Times> extends DataType<T> {
constructor(protected _unit: TimesType[T]['unit'],
protected _bitWidth: TimeBitWidth) {
constructor(public readonly unit: TimesType[T]['unit'],
public readonly bitWidth: TimeBitWidth) {
super();
}
public get typeId() { return Type.Time as T; }
public get unit() { return this._unit; }
public get bitWidth() { return this._bitWidth; }
public toString() { return `Time${this._bitWidth}<${TimeUnit[this._unit]}>`; }
public toString() { return `Time${this.bitWidth}<${TimeUnit[this.unit]}>`; }
protected static [Symbol.toStringTag] = ((proto: Time_) => {
(<any> proto).unit = null;
(<any> proto).bitWidth = null;
(<any> proto).ArrayType = Int32Array;

@@ -308,10 +323,11 @@ return proto[Symbol.toStringTag] = 'Time';

class Timestamp_<T extends Timestamps = Timestamps> extends DataType<T> {
constructor(protected _unit: TimeUnit, protected _timezone?: string | null) {
constructor(public readonly unit: TimeUnit,
public readonly timezone?: string | null) {
super();
}
public get typeId() { return Type.Timestamp as T; }
public get unit() { return this._unit; }
public get timezone() { return this._timezone; }
public toString() { return `Timestamp<${TimeUnit[this._unit]}${this._timezone ? `, ${this._timezone}` : ``}>`; }
public toString() { return `Timestamp<${TimeUnit[this.unit]}${this.timezone ? `, ${this.timezone}` : ``}>`; }
protected static [Symbol.toStringTag] = ((proto: Timestamp_) => {
(<any> proto).unit = null;
(<any> proto).timezone = null;
(<any> proto).ArrayType = Int32Array;

@@ -333,9 +349,9 @@ return proto[Symbol.toStringTag] = 'Timestamp';

class Interval_<T extends Intervals = Intervals> extends DataType<T> {
constructor(protected _unit: IntervalUnit) {
constructor(public readonly unit: IntervalUnit) {
super();
}
public get typeId() { return Type.Interval as T; }
public get unit() { return this._unit; }
public toString() { return `Interval<${IntervalUnit[this._unit]}>`; }
public toString() { return `Interval<${IntervalUnit[this.unit]}>`; }
protected static [Symbol.toStringTag] = ((proto: Interval_) => {
(<any> proto).unit = null;
(<any> proto).ArrayType = Int32Array;

@@ -354,13 +370,13 @@ return proto[Symbol.toStringTag] = 'Interval';

constructor(child: Field<T>) {
super([child]);
super();
this.children = [child];
}
public readonly children: Field<T>[];
public get typeId() { return Type.List as Type.List; }
// @ts-ignore
protected _children: Field<T>[];
public toString() { return `List<${this.valueType}>`; }
public get children() { return this._children; }
public get valueType(): T { return this._children[0].type as T; }
public get valueField(): Field<T> { return this._children[0] as Field<T>; }
public get valueType(): T { return this.children[0].type as T; }
public get valueField(): Field<T> { return this.children[0] as Field<T>; }
public get ArrayType(): T['ArrayType'] { return this.valueType.ArrayType; }
protected static [Symbol.toStringTag] = ((proto: List) => {
(<any> proto).children = null;
return proto[Symbol.toStringTag] = 'List';

@@ -370,11 +386,12 @@ })(List.prototype);

export interface Struct<T extends { [key: string]: DataType; } = any> extends DataType<Type.Struct> { TArray: IterableArrayLike<RowLike<T>>; TValue: RowLike<T>; dataTypes: T; }
export class Struct<T extends { [key: string]: DataType; } = any> extends DataType<Type.Struct, T> {
constructor(protected _children: Field<T[keyof T]>[]) {
super(_children);
export interface Struct<T extends { [key: string]: DataType } = any> extends DataType<Type.Struct> { TArray: IterableArrayLike<RowLike<T>>; TValue: RowLike<T>; dataTypes: T; }
export class Struct<T extends { [key: string]: DataType } = any> extends DataType<Type.Struct, T> {
constructor(public readonly children: Field<T[keyof T]>[]) {
super();
this.children = children;
}
public get typeId() { return Type.Struct as Type.Struct; }
public get children() { return this._children; }
public toString() { return `Struct<[${this._children.map((f) => f.type).join(`, `)}]>`; }
public toString() { return `Struct<[${this.children.map((f) => f.type).join(`, `)}]>`; }
protected static [Symbol.toStringTag] = ((proto: Struct) => {
(<any> proto).children = null;
return proto[Symbol.toStringTag] = 'Struct';

@@ -388,10 +405,14 @@ })(Struct.prototype);

class Union_<T extends Unions = Unions> extends DataType<T> {
protected _typeIds: Int32Array;
protected _children: Field<any>[];
protected _typeIdToChildIndex: { [key: number]: number };
constructor(protected _mode: UnionMode, _typeIds: number[] | Int32Array, _children: Field<any>[]) {
super(_children);
this._children = _children;
this._typeIds = _typeIds = Int32Array.from(_typeIds);
this._typeIdToChildIndex = _typeIds.reduce((typeIdToChildIndex, typeId, idx) => {
public readonly mode: UnionMode;
public readonly typeIds: Int32Array;
public readonly children: Field<any>[];
public readonly typeIdToChildIndex: { [key: number]: number };
constructor(mode: UnionMode,
typeIds: number[] | Int32Array,
children: Field<any>[]) {
super();
this.mode = mode;
this.children = children;
this.typeIds = typeIds = Int32Array.from(typeIds);
this.typeIdToChildIndex = typeIds.reduce((typeIdToChildIndex, typeId, idx) => {
return (typeIdToChildIndex[typeId] = idx) && typeIdToChildIndex || typeIdToChildIndex;

@@ -401,10 +422,10 @@ }, Object.create(null) as { [key: number]: number });

public get typeId() { return Type.Union as T; }
public get mode() { return this._mode; }
public get typeIds() { return this._typeIds; }
public get children() { return this._children; }
public get typeIdToChildIndex() { return this._typeIdToChildIndex; }
public toString() { return `${this[Symbol.toStringTag]}<${
this._children.map((x) => `${x.type}`).join(` | `)
this.children.map((x) => `${x.type}`).join(` | `)
}>`; }
protected static [Symbol.toStringTag] = ((proto: Union_) => {
(<any> proto).mode = null;
(<any> proto).typeIds = null;
(<any> proto).children = null;
(<any> proto).typeIdToChildIndex = null;
(<any> proto).ArrayType = Int32Array;

@@ -431,9 +452,9 @@ return proto[Symbol.toStringTag] = 'Union';

export class FixedSizeBinary extends DataType<Type.FixedSizeBinary> {
constructor(protected _byteWidth: number) {
constructor(public readonly byteWidth: number) {
super();
}
public get typeId() { return Type.FixedSizeBinary as Type.FixedSizeBinary; }
public get byteWidth() { return this._byteWidth; }
public toString() { return `FixedSizeBinary[${this.byteWidth}]`; }
protected static [Symbol.toStringTag] = ((proto: FixedSizeBinary) => {
(<any> proto).byteWidth = null;
(<any> proto).ArrayType = Uint8Array;

@@ -446,15 +467,15 @@ return proto[Symbol.toStringTag] = 'FixedSizeBinary';

export class FixedSizeList<T extends DataType = any> extends DataType<Type.FixedSizeList, { [0]: T }> {
constructor(protected _listSize: number, child: Field<T>) {
super([child]);
public readonly children: Field<T>[];
constructor(public readonly listSize: number, child: Field<T>) {
super();
this.children = [child];
}
public get typeId() { return Type.FixedSizeList as Type.FixedSizeList; }
// @ts-ignore
protected _children: Field<T>[];
public get listSize() { return this._listSize; }
public get children() { return this._children; }
public get valueType(): T { return this.children[0].type as T; }
public get valueField(): Field<T> { return this.children[0] as Field<T>; }
public get ArrayType(): T['ArrayType'] { return this.valueType.ArrayType; }
public toString() { return `FixedSizeList[${this._listSize}]<${this.valueType}>`; }
public toString() { return `FixedSizeList[${this.listSize}]<${this.valueType}>`; }
protected static [Symbol.toStringTag] = ((proto: FixedSizeList) => {
(<any> proto).children = null;
(<any> proto).listSize = null;
return proto[Symbol.toStringTag] = 'FixedSizeList';

@@ -464,13 +485,13 @@ })(FixedSizeList.prototype);

export interface Map_<T extends { [key: string]: DataType; } = any> extends DataType<Type.Map> { TArray: Uint8Array; TValue: RowLike<T>; dataTypes: T; }
export class Map_<T extends { [key: string]: DataType; } = any> extends DataType<Type.Map, T> {
constructor(protected _children: Field<T[keyof T]>[],
protected _keysSorted: boolean = false) {
super(_children);
export interface Map_<T extends { [key: string]: DataType } = any> extends DataType<Type.Map> { TArray: Uint8Array; TValue: RowLike<T>; dataTypes: T; }
export class Map_<T extends { [key: string]: DataType } = any> extends DataType<Type.Map, T> {
constructor(public readonly children: Field<T[keyof T]>[],
public readonly keysSorted: boolean = false) {
super();
}
public get typeId() { return Type.Map as Type.Map; }
public get children() { return this._children; }
public get keysSorted() { return this._keysSorted; }
public toString() { return `Map<{${this._children.map((f) => `${f.name}:${f.type}`).join(`, `)}}>`; }
public toString() { return `Map<{${this.children.map((f) => `${f.name}:${f.type}`).join(`, `)}}>`; }
protected static [Symbol.toStringTag] = ((proto: Map_) => {
(<any> proto).children = null;
(<any> proto).keysSorted = null;
return proto[Symbol.toStringTag] = 'Map_';

@@ -488,23 +509,16 @@ })(Map_.prototype);

export class Dictionary<T extends DataType = any, TKey extends TKeys = TKeys> extends DataType<Type.Dictionary> {
protected _id: number;
protected _indices: TKey;
protected _dictionary: T;
protected _isOrdered: boolean;
protected _dictionaryVector: Vector<T>;
public set dictionaryVector(v) { this._dictionaryVector = v; }
public get dictionaryVector() { return this._dictionaryVector; }
public readonly id: number;
public readonly indices: TKey;
public readonly dictionary: T;
public readonly isOrdered: boolean;
public dictionaryVector: Vector<T>;
constructor(dictionary: T, indices: TKey, id?: Long | number | null, isOrdered?: boolean | null, dictionaryVector?: Vector<T>) {
super();
this._indices = indices;
this._dictionary = dictionary;
this._isOrdered = isOrdered || false;
this._dictionaryVector = dictionaryVector!;
this._id = id == null ? getId() : typeof id === 'number' ? id : id.low;
this.indices = indices;
this.dictionary = dictionary;
this.isOrdered = isOrdered || false;
this.dictionaryVector = dictionaryVector!;
this.id = id == null ? getId() : typeof id === 'number' ? id : id.low;
}
public get typeId() { return Type.Dictionary as Type.Dictionary; }
public get id() { return this._id; }
public get indices() { return this._indices; }
public get dictionary() { return this._dictionary; }
public get isOrdered() { return this._isOrdered; }
public set children(_: T['children']) {}
public get children() { return this.dictionary.children; }

@@ -515,2 +529,7 @@ public get valueType(): T { return this.dictionary as T; }

protected static [Symbol.toStringTag] = ((proto: Dictionary) => {
(<any> proto).id = null;
(<any> proto).indices = null;
(<any> proto).isOrdered = null;
(<any> proto).dictionary = null;
(<any> proto).dictionaryVector = null;
return proto[Symbol.toStringTag] = 'Dictionary';

@@ -517,0 +536,0 @@ })(Dictionary.prototype);

@@ -40,7 +40,6 @@ // Licensed to the Apache Software Foundation (ASF) under one

const bytes = new Uint8Array(alignedSize);
bytes.set((offset % 8 === 0)
// If the offset is a multiple of 8 bits, it's safe to slice the bitmap
? bitmap.subarray(offset >> 3)
// If the offset is a multiple of 8 bits, it's safe to slice the bitmap
bytes.set(offset % 8 === 0 ? bitmap.subarray(offset >> 3) :
// Otherwise iterate each bit from the offset and return a new one
: packBools(iterateBits(bitmap, offset, length, null, getBool)));
packBools(iterateBits(bitmap, offset, length, null, getBool)).subarray(0, alignedSize));
return bytes;

@@ -53,5 +52,4 @@ }

export function packBools(values: Iterable<any>) {
let n = 0, i = 0;
let xs: number[] = [];
let bit = 0, byte = 0;
let i = 0, bit = 0, byte = 0;
for (const value of values) {

@@ -65,6 +63,5 @@ value && (byte |= 1 << bit);

if (i === 0 || bit > 0) { xs[i++] = byte; }
if (i % 8 && (n = i + 8 - i % 8)) {
do { xs[i] = 0; } while (++i < n);
}
return new Uint8Array(xs);
let b = new Uint8Array((xs.length + 7) & ~7);
b.set(xs);
return b;
}

@@ -71,0 +68,0 @@

@@ -18,3 +18,5 @@ // Licensed to the Apache Software Foundation (ASF) under one

import { BigIntArray, BigIntArrayConstructor } from '../interfaces';
import { toArrayBufferView, ArrayBufferViewInput } from './buffer';
import { BigIntAvailable, BigInt64Array, BigUint64Array } from './compat';

@@ -30,10 +32,12 @@ /** @ignore */

const BigNumNMixin = {
toJSON(this: BN<BigNumArray>, ) { return `"${bignumToString(this)}"`; },
valueOf(this: BN<BigNumArray>, ) { return bignumToNumber(this); },
toString(this: BN<BigNumArray>, ) { return bignumToString(this); },
toJSON(this: BN<BigNumArray>) { return `"${bignumToString(this)}"`; },
valueOf(this: BN<BigNumArray>) { return bignumToNumber(this); },
toString(this: BN<BigNumArray>) { return bignumToString(this); },
[Symbol.toPrimitive]<T extends BN<BigNumArray>>(this: T, hint: 'string' | 'number' | 'default') {
if (hint === 'number') { return bignumToNumber(this); }
/** @suppress {missingRequire} */
return hint === 'string' || typeof BigInt !== 'function' ?
bignumToString(this) : BigInt(bignumToString(this));
switch (hint) {
case 'number': return bignumToNumber(this);
case 'string': return bignumToString(this);
case 'default': return bignumToBigInt(this);
}
return bignumToString(this);
}

@@ -43,5 +47,5 @@ };

/** @ignore */
const SignedBigNumNMixin: any = Object.assign({}, BigNumNMixin, { signed: true, constructor: undefined });
const SignedBigNumNMixin: any = Object.assign({}, BigNumNMixin, { signed: true, BigIntArray: BigInt64Array });
/** @ignore */
const UnsignedBigNumNMixin: any = Object.assign({}, BigNumNMixin, { signed: false, constructor: undefined });
const UnsignedBigNumNMixin: any = Object.assign({}, BigNumNMixin, { signed: false, BigIntArray: BigUint64Array });

@@ -80,2 +84,3 @@ /** @ignore */

readonly signed: boolean;
readonly BigIntArray: BigIntArrayConstructor<BigIntArray>;

@@ -115,3 +120,2 @@ [Symbol.toStringTag]:

int64 += words[i++] + (words[i++] * (i ** 32));
// int64 += (words[i++] >>> 0) + (words[i++] * (i ** 32));
}

@@ -122,10 +126,20 @@ return int64;

/** @ignore */
function bignumToString<T extends BN<BigNumArray>>({ buffer, byteOffset, length }: T) {
let bignumToString: { <T extends BN<BigNumArray>>(a: T): string; };
/** @ignore */
let bignumToBigInt: { <T extends BN<BigNumArray>>(a: T): bigint; };
let string = '', i = -1;
if (!BigIntAvailable) {
bignumToString = decimalToString;
bignumToBigInt = <any> bignumToString;
} else {
bignumToBigInt = (<T extends BN<BigNumArray>>(a: T) => a.length === 2 ? new a.BigIntArray(a.buffer, a.byteOffset, 1)[0] : <any>decimalToString(a));
bignumToString = (<T extends BN<BigNumArray>>(a: T) => a.length === 2 ? `${new a.BigIntArray(a.buffer, a.byteOffset, 1)[0]}` : decimalToString(a));
}
function decimalToString<T extends BN<BigNumArray>>(a: T) {
let digits = '';
let base64 = new Uint32Array(2);
let base32 = new Uint16Array(buffer, byteOffset, length * 2);
let base32 = new Uint16Array(a.buffer, a.byteOffset, a.length * 2);
let checks = new Uint32Array((base32 = new Uint16Array(base32).reverse()).buffer);
let n = base32.length - 1;
let i = -1, n = base32.length - 1;
do {

@@ -138,6 +152,5 @@ for (base64[0] = base32[i = 0]; i < n;) {

base64[0] = base64[0] - base64[1] * 10;
string = `${base64[0]}${string}`;
digits = `${base64[0]}${digits}`;
} while (checks[0] || checks[1] || checks[2] || checks[3]);
return string ? string : `0`;
return digits ? digits : `0`;
}

@@ -144,0 +157,0 @@

@@ -21,4 +21,5 @@ // Licensed to the Apache Software Foundation (ASF) under one

import ByteBuffer = flatbuffers.ByteBuffer;
import { ArrayBufferViewConstructor } from '../interfaces';
import { isPromise, isIterable, isAsyncIterable, isIteratorResult } from './compat';
import { TypedArray, TypedArrayConstructor } from '../interfaces';
import { BigIntArray, BigIntArrayConstructor } from '../interfaces';
import { isPromise, isIterable, isAsyncIterable, isIteratorResult, BigInt64Array, BigUint64Array } from './compat';

@@ -96,3 +97,5 @@ /** @ignore */

/** @ignore */
export function toArrayBufferView<T extends ArrayBufferView>(ArrayBufferViewCtor: ArrayBufferViewConstructor<T>, input: ArrayBufferViewInput): T {
export function toArrayBufferView<T extends TypedArray>(ArrayBufferViewCtor: TypedArrayConstructor<T>, input: ArrayBufferViewInput): T;
export function toArrayBufferView<T extends BigIntArray>(ArrayBufferViewCtor: BigIntArrayConstructor<T>, input: ArrayBufferViewInput): T;
export function toArrayBufferView(ArrayBufferViewCtor: any, input: ArrayBufferViewInput) {

@@ -119,5 +122,7 @@ let value: any = isIteratorResult(input) ? input.value : input;

/** @ignore */ export const toInt32Array = (input: ArrayBufferViewInput) => toArrayBufferView(Int32Array, input);
/** @ignore */ export const toBigInt64Array = (input: ArrayBufferViewInput) => toArrayBufferView(BigInt64Array, input);
/** @ignore */ export const toUint8Array = (input: ArrayBufferViewInput) => toArrayBufferView(Uint8Array, input);
/** @ignore */ export const toUint16Array = (input: ArrayBufferViewInput) => toArrayBufferView(Uint16Array, input);
/** @ignore */ export const toUint32Array = (input: ArrayBufferViewInput) => toArrayBufferView(Uint32Array, input);
/** @ignore */ export const toBigUint64Array = (input: ArrayBufferViewInput) => toArrayBufferView(BigUint64Array, input);
/** @ignore */ export const toFloat32Array = (input: ArrayBufferViewInput) => toArrayBufferView(Float32Array, input);

@@ -128,2 +133,23 @@ /** @ignore */ export const toFloat64Array = (input: ArrayBufferViewInput) => toArrayBufferView(Float64Array, input);

/** @ignore */
export const toFloat16Array = (input: ArrayBufferViewInput) => {
let floats: Float32Array | Float64Array | null = null;
if (ArrayBuffer.isView(input)) {
switch (input.constructor) {
case Float32Array: floats = input as Float32Array; break;
case Float64Array: floats = input as Float64Array; break;
}
} else if (isIterable(input)) {
floats = toFloat64Array(input);
}
if (floats) {
const u16s = new Uint16Array(floats.length);
for (let i = -1, n = u16s.length; ++i < n;) {
u16s[i] = (floats[i] * 32767) + 32767;
}
return u16s;
}
return toUint16Array(input);
};
/** @ignore */
type ArrayBufferViewIteratorInput = Iterable<ArrayBufferViewInput> | ArrayBufferViewInput;

@@ -135,3 +161,3 @@

/** @ignore */
export function* toArrayBufferViewIterator<T extends ArrayBufferView>(ArrayCtor: ArrayBufferViewConstructor<T>, source: ArrayBufferViewIteratorInput) {
export function* toArrayBufferViewIterator<T extends TypedArray>(ArrayCtor: TypedArrayConstructor<T>, source: ArrayBufferViewIteratorInput) {

@@ -168,3 +194,3 @@ const wrap = function*<T>(x: T) { yield x; };

/** @ignore */
export async function* toArrayBufferViewAsyncIterator<T extends ArrayBufferView>(ArrayCtor: ArrayBufferViewConstructor<T>, source: ArrayBufferViewAsyncIteratorInput): AsyncIterableIterator<T> {
export async function* toArrayBufferViewAsyncIterator<T extends TypedArray>(ArrayCtor: TypedArrayConstructor<T>, source: ArrayBufferViewAsyncIteratorInput): AsyncIterableIterator<T> {

@@ -171,0 +197,0 @@ // if a Promise, unwrap the Promise and iterate the resolved value

@@ -43,2 +43,39 @@ // Licensed to the Apache Software Foundation (ASF) under one

/** @ignore */
const [BigIntCtor, BigIntAvailable] = (() => {
const BigIntUnavailableError = () => { throw new Error('BigInt is not available in this environment'); };
function BigIntUnavailable() { throw BigIntUnavailableError(); }
BigIntUnavailable.asIntN = () => { throw BigIntUnavailableError(); };
BigIntUnavailable.asUintN = () => { throw BigIntUnavailableError(); };
return typeof BigInt !== 'undefined' ? [BigInt, true] : [<any> BigIntUnavailable, false];
})() as [BigIntConstructor, boolean];
/** @ignore */
const [BigInt64ArrayCtor, BigInt64ArrayAvailable] = (() => {
const BigInt64ArrayUnavailableError = () => { throw new Error('BigInt64Array is not available in this environment'); };
class BigInt64ArrayUnavailable {
static get BYTES_PER_ELEMENT() { return 8; }
static of() { throw BigInt64ArrayUnavailableError(); }
static from() { throw BigInt64ArrayUnavailableError(); }
constructor() { throw BigInt64ArrayUnavailableError(); }
}
return typeof BigInt64Array !== 'undefined' ? [BigInt64Array, true] : [<any> BigInt64ArrayUnavailable, false];
})() as [BigInt64ArrayConstructor, boolean];
/** @ignore */
const [BigUint64ArrayCtor, BigUint64ArrayAvailable] = (() => {
const BigUint64ArrayUnavailableError = () => { throw new Error('BigUint64Array is not available in this environment'); };
class BigUint64ArrayUnavailable {
static get BYTES_PER_ELEMENT() { return 8; }
static of() { throw BigUint64ArrayUnavailableError(); }
static from() { throw BigUint64ArrayUnavailableError(); }
constructor() { throw BigUint64ArrayUnavailableError(); }
}
return typeof BigUint64Array !== 'undefined' ? [BigUint64Array, true] : [<any> BigUint64ArrayUnavailable, false];
})() as [BigUint64ArrayConstructor, boolean];
export { BigIntCtor as BigInt, BigIntAvailable };
export { BigInt64ArrayCtor as BigInt64Array, BigInt64ArrayAvailable };
export { BigUint64ArrayCtor as BigUint64Array, BigUint64ArrayAvailable };
/** @ignore */ const isNumber = (x: any) => typeof x === 'number';

@@ -45,0 +82,0 @@ /** @ignore */ const isBoolean = (x: any) => typeof x === 'boolean';

@@ -24,3 +24,7 @@ // Licensed to the Apache Software Foundation (ASF) under one

if (x === undf) { return 'undefined'; }
if (typeof x === 'string') { return `"${x}"`; }
switch (typeof x) {
case 'number': return `${x}`;
case 'bigint': return `${x}`;
case 'string': return `"${x}"`;
}
// If [Symbol.toPrimitive] is implemented (like in BN)

@@ -27,0 +31,0 @@ // use it instead of JSON.stringify(). This ensures we

@@ -19,4 +19,5 @@ // Licensed to the Apache Software Foundation (ASF) under one

import { Vector } from '../vector';
import { Row } from '../vector/row';
import { Row, kLength } from '../vector/row';
import { compareArrayLike } from '../util/buffer';
import { BigInt, BigIntAvailable } from './compat';

@@ -63,7 +64,12 @@ /** @ignore */

const big0 = BigIntAvailable ? BigInt(0) : 0;
/** @ignore */
export function createElementComparator(search: any) {
let typeofSearch = typeof search;
// Compare primitives
if (search == null || typeof search !== 'object') {
return (value: any) => value === search;
if (typeofSearch !== 'object' || search === null) {
return typeofSearch !== 'bigint'
? (value: any) => value === search
: (value: any) => (big0 + value) === search;
}

@@ -80,34 +86,41 @@ // Compare Dates

if (Array.isArray(search)) {
const n = (search as any).length;
const fns = [] as ((x: any) => boolean)[];
for (let i = -1; ++i < n;) {
fns[i] = createElementComparator((search as any)[i]);
}
return (value: any) => {
if (!value || value.length !== n) { return false; }
// Handle the case where the search element is an Array, but the
// values are Rows or Vectors, e.g. list.indexOf(['foo', 'bar'])
if ((value instanceof Row) || (value instanceof Vector)) {
for (let i = -1, n = value.length; ++i < n;) {
if (!(fns[i]((value as any).get(i)))) { return false; }
}
return true;
return createArrayLikeComparator(search);
}
// Compare Rows
if (search instanceof Row) {
return createRowComparator(search);
}
// Compare Vectors
if (search instanceof Vector) {
return createVectorComparator(search);
}
// Compare non-empty Objects
const keys = Object.keys(search);
if (keys.length > 0) {
return createObjectKeysComparator(search, keys);
}
// No valid comparator
return () => false;
}
/** @ignore */
function createArrayLikeComparator(search: ArrayLike<any>) {
const n = search.length;
const fns = [] as ((x: any) => boolean)[];
for (let i = -1; ++i < n;) {
fns[i] = createElementComparator((search as any)[i]);
}
return (value: any) => {
if (!value) { return false; }
// Handle the case where the search element is an Array, but the
// values are Rows or Vectors, e.g. list.indexOf(['foo', 'bar'])
if (value instanceof Row) {
if (value[kLength] !== n) { return false; }
for (let i = -1; ++i < n;) {
if (!(fns[i](value.get(i)))) { return false; }
}
for (let i = -1, n = value.length; ++i < n;) {
if (!(fns[i](value[i]))) { return false; }
}
return true;
};
}
// Compare Rows and Vectors
if ((search instanceof Row) || (search instanceof Vector)) {
const n = search.length;
const C = search.constructor as any;
const fns = [] as ((x: any) => boolean)[];
for (let i = -1; ++i < n;) {
fns[i] = createElementComparator((search as any).get(i));
}
return (value: any) => {
if (!(value instanceof C)) { return false; }
if (!(value.length === n)) { return false; }
if (value.length !== n) { return false; }
if (value instanceof Vector) {
for (let i = -1; ++i < n;) {

@@ -117,22 +130,60 @@ if (!(fns[i](value.get(i)))) { return false; }

return true;
};
}
for (let i = -1; ++i < n;) {
if (!(fns[i](value[i]))) { return false; }
}
return true;
};
}
/** @ignore */
function createRowComparator(search: Row<any>) {
const n = search[kLength];
const C = search.constructor as any;
const fns = [] as ((x: any) => boolean)[];
for (let i = -1; ++i < n;) {
fns[i] = createElementComparator(search.get(i));
}
// Compare non-empty Objects
const keys = Object.keys(search);
if (keys.length > 0) {
const n = keys.length;
const fns = [] as ((x: any) => boolean)[];
return (value: any) => {
if (!(value instanceof C)) { return false; }
if (!(value[kLength] === n)) { return false; }
for (let i = -1; ++i < n;) {
fns[i] = createElementComparator(search[keys[i]]);
if (!(fns[i](value.get(i)))) { return false; }
}
return (value: any) => {
if (!value || typeof value !== 'object') { return false; }
for (let i = -1; ++i < n;) {
if (!(fns[i](value[keys[i]]))) { return false; }
}
return true;
};
return true;
};
}
/** @ignore */
function createVectorComparator(search: Vector<any>) {
const n = search.length;
const C = search.constructor as any;
const fns = [] as ((x: any) => boolean)[];
for (let i = -1; ++i < n;) {
fns[i] = createElementComparator((search as any).get(i));
}
// No valid comparator
return () => false;
return (value: any) => {
if (!(value instanceof C)) { return false; }
if (!(value.length === n)) { return false; }
for (let i = -1; ++i < n;) {
if (!(fns[i](value.get(i)))) { return false; }
}
return true;
};
}
/** @ignore */
function createObjectKeysComparator(search: any, keys: string[]) {
const n = keys.length;
const fns = [] as ((x: any) => boolean)[];
for (let i = -1; ++i < n;) {
fns[i] = createElementComparator(search[keys[i]]);
}
return (value: any) => {
if (!value || typeof value !== 'object') { return false; }
for (let i = -1; ++i < n;) {
if (!(fns[i](value[keys[i]]))) { return false; }
}
return true;
};
}

@@ -38,3 +38,3 @@ // Licensed to the Apache Software Foundation (ASF) under one

export interface Vector<T extends DataType = any>
export interface AbstractVector<T extends DataType = any>
extends Clonable<Vector<T>>,

@@ -49,3 +49,3 @@ Sliceable<Vector<T>>,

export abstract class Vector<T extends DataType = any> implements Iterable<T['TValue'] | null> {
export abstract class AbstractVector<T extends DataType = any> implements Iterable<T['TValue'] | null> {

@@ -71,1 +71,3 @@ public abstract readonly data: Data<T>;

}
export { AbstractVector as Vector };

@@ -19,3 +19,3 @@ // Licensed to the Apache Software Foundation (ASF) under one

import { Data } from '../data';
import { Vector } from '../vector';
import { AbstractVector, Vector } from '../vector';
import { DataType } from '../type';

@@ -33,3 +33,3 @@ import { Chunked } from './chunked';

export abstract class BaseVector<T extends DataType = any> extends Vector<T>
export abstract class BaseVector<T extends DataType = any> extends AbstractVector<T>
implements Clonable<VType<T>>, Sliceable<VType<T>>, Applicative<T, Chunked<T>> {

@@ -36,0 +36,0 @@

@@ -20,7 +20,8 @@ // Licensed to the Apache Software Foundation (ASF) under one

import { Field } from '../schema';
import { Vector } from '../vector';
import { clampRange } from '../util/vector';
import { DataType, Dictionary } from '../type';
import { DictionaryVector } from './dictionary';
import { AbstractVector, Vector } from '../vector';
import { selectChunkArgs } from '../util/args';
import { Clonable, Sliceable, Applicative } from '../vector';
import { DictionaryVector } from './dictionary';

@@ -37,3 +38,3 @@ /** @ignore */

export class Chunked<T extends DataType = any>
extends Vector<T>
extends AbstractVector<T>
implements Clonable<Chunked<T>>,

@@ -44,11 +45,10 @@ Sliceable<Chunked<T>>,

/** @nocollapse */
public static flatten<T extends DataType>(...vectors: Vector<T>[]) {
return vectors.reduce(function flatten(xs: any[], x: any): any[] {
return x instanceof Chunked ? x.chunks.reduce(flatten, xs) : [...xs, x];
}, []).filter((x: any): x is Vector<T> => x instanceof Vector);
public static flatten<T extends DataType>(...vectors: (Vector<T> | Vector<T>[])[]) {
return selectChunkArgs<Vector<T>>(Vector, vectors);
}
/** @nocollapse */
public static concat<T extends DataType>(...chunks: Vector<T>[]): Chunked<T> {
return new Chunked(chunks[0].type, Chunked.flatten(...chunks));
public static concat<T extends DataType>(...vectors: (Vector<T> | Vector<T>[])[]) {
const chunks = Chunked.flatten<T>(...vectors);
return new Chunked<T>(chunks[0].type, chunks);
}

@@ -76,3 +76,3 @@

public get chunks() { return this._chunks; }
public get typeId() { return this._type.typeId; }
public get typeId(): T['TType'] { return this._type.typeId; }
public get data(): Data<T> {

@@ -245,3 +245,3 @@ return this._chunks[0] ? this._chunks[0].data : <any> null;

const from = Math.max(0, begin - chunkOffset);
const to = from + Math.min(chunkLength - from, end - chunkOffset);
const to = Math.min(end - chunkOffset, chunkLength);
slices.push(chunk.slice(from, to) as Vector<T>);

@@ -248,0 +248,0 @@ }

@@ -33,16 +33,15 @@ // Licensed to the Apache Software Foundation (ASF) under one

}
protected _indices: V<TKey>;
public readonly indices: V<TKey>;
constructor(data: Data<Dictionary<T, TKey>>) {
super(data);
this._indices = Vector.new(data.clone(this.type.indices));
this.indices = Vector.new(data.clone(this.type.indices));
}
// protected _bindDataAccessors() {}
public get indices() { return this._indices; }
public get dictionary() { return this.data.type.dictionaryVector; }
public isValid(index: number) { return this._indices.isValid(index); }
public reverseLookup(value: T) { return this.dictionary.indexOf(value); }
public getKey(idx: number): TKey['TValue'] | null { return this._indices.get(idx); }
public getKey(idx: number): TKey['TValue'] | null { return this.indices.get(idx); }
public getValue(key: number): T['TValue'] | null { return this.dictionary.get(key); }
public setKey(idx: number, key: TKey['TValue'] | null) { return this._indices.set(idx, key); }
public setKey(idx: number, key: TKey['TValue'] | null) { return this.indices.set(idx, key); }
public setValue(key: number, value: T['TValue'] | null) { return this.dictionary.set(key, value); }
}
(DictionaryVector.prototype as any).indices = null;

@@ -21,18 +21,47 @@ // Licensed to the Apache Software Foundation (ASF) under one

import { BaseVector } from './base';
import { Vector as V } from '../interfaces';
import { Float, Float16, Float32, Float64 } from '../type';
import { toFloat16Array, toFloat32Array, toFloat64Array } from '../util/buffer';
export class FloatVector<T extends Float = Float> extends BaseVector<T> {
public static from(this: typeof FloatVector, data: Float16['TArray']): Float16Vector;
public static from(this: typeof FloatVector, data: Float32['TArray']): Float32Vector;
public static from(this: typeof FloatVector, data: Float64['TArray']): Float64Vector;
public static from<T extends Float>(this: typeof FloatVector, data: T['TArray']): V<T>;
public static from(this: typeof Float16Vector, data: Float16['TArray'] | Iterable<number>): Float16Vector;
public static from(this: typeof Float32Vector, data: Float32['TArray'] | Iterable<number>): Float32Vector;
public static from(this: typeof Float64Vector, data: Float64['TArray'] | Iterable<number>): Float64Vector;
/** @nocollapse */
public static from<T extends Float>(data: T['TArray']) {
let type: Float | null = null;
switch (this) {
case Float16Vector: data = toFloat16Array(data); break;
case Float32Vector: data = toFloat32Array(data); break;
case Float64Vector: data = toFloat64Array(data); break;
}
switch (data.constructor) {
case Uint16Array: return Vector.new(Data.Float(new Float16(), 0, data.length, 0, null, data));
case Float32Array: return Vector.new(Data.Float(new Float32(), 0, data.length, 0, null, data));
case Float64Array: return Vector.new(Data.Float(new Float64(), 0, data.length, 0, null, data));
case Uint16Array: type = new Float16(); break;
case Float32Array: type = new Float32(); break;
case Float64Array: type = new Float64(); break;
}
throw new TypeError('Unrecognized Float data');
return type !== null
? Vector.new(Data.Float(type, 0, data.length, 0, null, data))
: (() => { throw new TypeError('Unrecognized FloatVector input'); })();
}
}
export class Float16Vector extends FloatVector<Float16> {}
export class Float16Vector extends FloatVector<Float16> {
// Since JS doesn't have half floats, `toArray()` returns a zero-copy slice
// of the underlying Uint16Array data. This behavior ensures we don't incur
// extra compute or copies if you're calling `toArray()` in order to create
// a buffer for something like WebGL. Buf if you're using JS and want typed
// arrays of 4-to-8-byte precision, these methods will enumerate the values
// and clamp to the desired byte lengths.
public toFloat32Array() { return new Float32Array(this as Iterable<number>); }
public toFloat64Array() { return new Float64Array(this as Iterable<number>); }
}
export class Float32Vector extends FloatVector<Float32> {}
export class Float64Vector extends FloatVector<Float64> {}

@@ -163,3 +163,3 @@ // Licensed to the Apache Software Foundation (ASF) under one

/** @ignore */
function wrapNullable1<T extends DataType, V extends Vector<T>, F extends (i: number) => any>(fn: F): (...args: Parameters<F>) => ReturnType<F> {
function wrapNullableGet<T extends DataType, V extends Vector<T>, F extends (i: number) => any>(fn: F): (...args: Parameters<F>) => ReturnType<F> {
return function(this: V, i: number) { return this.isValid(i) ? fn.call(this, i) : null; };

@@ -171,3 +171,3 @@ }

return function(this: V, i: number, a: any) {
if (setBool(this.nullBitmap, this.offset + i, a != null)) {
if (setBool(this.nullBitmap, this.offset + i, !(a === null || a === undefined))) {
fn.call(this, i, a);

@@ -182,5 +182,5 @@ }

if (nullBitmap && nullBitmap.byteLength > 0) {
this.get = wrapNullable1(this.get);
this.get = wrapNullableGet(this.get);
this.set = wrapNullableSet(this.set);
}
}

@@ -23,22 +23,62 @@ // Licensed to the Apache Software Foundation (ASF) under one

import { Int, Uint8, Uint16, Uint32, Uint64, Int8, Int16, Int32, Int64 } from '../type';
import {
toInt8Array, toInt16Array, toInt32Array,
toUint8Array, toUint16Array, toUint32Array,
toBigInt64Array, toBigUint64Array
} from '../util/buffer';
export class IntVector<T extends Int = Int> extends BaseVector<T> {
public static from<T extends Int>(data: T['TArray']): V<T>;
public static from<T extends Int32 | Uint32>(data: T['TArray'], is64: true): V<T>;
public static from(this: typeof IntVector, data: Int8Array): Int8Vector;
public static from(this: typeof IntVector, data: Int16Array): Int16Vector;
public static from(this: typeof IntVector, data: Int32Array): Int32Vector;
public static from(this: typeof IntVector, data: Uint8Array): Uint8Vector;
public static from(this: typeof IntVector, data: Uint16Array): Uint16Vector;
public static from(this: typeof IntVector, data: Uint32Array): Uint32Vector;
// @ts-ignore
public static from(this: typeof IntVector, data: Int32Array, is64: true): Int64Vector;
public static from(this: typeof IntVector, data: Uint32Array, is64: true): Uint64Vector;
public static from<T extends Int>(this: typeof IntVector, data: T['TArray']): V<T>;
public static from(this: typeof Int8Vector, data: Int8['TArray'] | Iterable<number>): Int8Vector;
public static from(this: typeof Int16Vector, data: Int16['TArray'] | Iterable<number>): Int16Vector;
public static from(this: typeof Int32Vector, data: Int32['TArray'] | Iterable<number>): Int32Vector;
public static from(this: typeof Int64Vector, data: Int32['TArray'] | Iterable<number>): Int64Vector;
public static from(this: typeof Uint8Vector, data: Uint8['TArray'] | Iterable<number>): Uint8Vector;
public static from(this: typeof Uint16Vector, data: Uint16['TArray'] | Iterable<number>): Uint16Vector;
public static from(this: typeof Uint32Vector, data: Uint32['TArray'] | Iterable<number>): Uint32Vector;
public static from(this: typeof Uint64Vector, data: Uint32['TArray'] | Iterable<number>): Uint64Vector;
/** @nocollapse */
public static from(data: any, is64?: boolean) {
public static from<T extends Int>(data: T['TArray'], is64?: boolean) {
let length: number = 0;
let type: Int | null = null;
switch (this) {
case Int8Vector: data = toInt8Array(data); is64 = false; break;
case Int16Vector: data = toInt16Array(data); is64 = false; break;
case Int32Vector: data = toInt32Array(data); is64 = false; break;
case Int64Vector: data = toInt32Array(data); is64 = true; break;
case Uint8Vector: data = toUint8Array(data); is64 = false; break;
case Uint16Vector: data = toUint16Array(data); is64 = false; break;
case Uint32Vector: data = toUint32Array(data); is64 = false; break;
case Uint64Vector: data = toUint32Array(data); is64 = true; break;
}
if (is64 === true) {
return data instanceof Int32Array
? Vector.new(Data.Int(new Int64(), 0, data.length, 0, null, data))
: Vector.new(Data.Int(new Uint64(), 0, data.length, 0, null, data));
length = data.length * 0.5;
type = data instanceof Int32Array ? new Int64() : new Uint64();
} else {
length = data.length;
switch (data.constructor) {
case Int8Array: type = new Int8(); break;
case Int16Array: type = new Int16(); break;
case Int32Array: type = new Int32(); break;
case Uint8Array: type = new Uint8(); break;
case Uint16Array: type = new Uint16(); break;
case Uint32Array: type = new Uint32(); break;
}
}
switch (data.constructor) {
case Int8Array: return Vector.new(Data.Int(new Int8(), 0, data.length, 0, null, data));
case Int16Array: return Vector.new(Data.Int(new Int16(), 0, data.length, 0, null, data));
case Int32Array: return Vector.new(Data.Int(new Int32(), 0, data.length, 0, null, data));
case Uint8Array: return Vector.new(Data.Int(new Uint8(), 0, data.length, 0, null, data));
case Uint16Array: return Vector.new(Data.Int(new Uint16(), 0, data.length, 0, null, data));
case Uint32Array: return Vector.new(Data.Int(new Uint32(), 0, data.length, 0, null, data));
}
throw new TypeError('Unrecognized Int data');
return type !== null
? Vector.new(Data.Int(type, 0, length, 0, null, data))
: (() => { throw new TypeError('Unrecognized IntVector input'); })();
}

@@ -50,6 +90,23 @@ }

export class Int32Vector extends IntVector<Int32> {}
export class Int64Vector extends IntVector<Int64> {}
export class Int64Vector extends IntVector<Int64> {
public toBigInt64Array() {
return toBigInt64Array(this.values);
}
}
export class Uint8Vector extends IntVector<Uint8> {}
export class Uint16Vector extends IntVector<Uint16> {}
export class Uint32Vector extends IntVector<Uint32> {}
export class Uint64Vector extends IntVector<Uint64> {}
export class Uint64Vector extends IntVector<Uint64> {
public toBigUint64Array() {
return toBigUint64Array(this.values);
}
}
export interface Int64Vector extends IntVector<Int64> {
indexOf(value: Int64['TValue'] | bigint | null, fromIndex?: number): number;
}
export interface Uint64Vector extends IntVector<Uint64> {
indexOf(value: Uint64['TValue'] | bigint | null, fromIndex?: number): number;
}

@@ -18,5 +18,6 @@ // Licensed to the Apache Software Foundation (ASF) under one

import { Row } from './row';
import { Field } from '../schema';
import { Vector } from '../vector';
import { BaseVector } from './base';
import { RowProxyGenerator } from './row';
import { DataType, Map_, Struct } from '../type';

@@ -26,9 +27,9 @@

public asStruct() {
return Vector.new(this.data.clone(new Struct<T>(this.type.children)));
return Vector.new(this.data.clone(new Struct<T>(this.type.children as Field<T[keyof T]>[])));
}
// @ts-ignore
private _rowProxy: Row<T>;
public get rowProxy(): Row<T> {
return this._rowProxy || (this._rowProxy = Row.new<T>(this.type.children || [], true));
private _rowProxy: RowProxyGenerator<T>;
public get rowProxy(): RowProxyGenerator<T> {
return this._rowProxy || (this._rowProxy = RowProxyGenerator.new<T>(this, this.type.children || [], true));
}
}

@@ -20,71 +20,29 @@ // Licensed to the Apache Software Foundation (ASF) under one

import { MapVector } from '../vector/map';
import { DataType, RowLike } from '../type';
import { DataType } from '../type';
import { valueToString } from '../util/pretty';
import { StructVector } from '../vector/struct';
/** @ignore */ const columnDescriptor = { enumerable: true, configurable: false, get: () => {} };
/** @ignore */ const lengthDescriptor = { writable: false, enumerable: false, configurable: false, value: -1 };
/** @ignore */ const rowIndexDescriptor = { writable: false, enumerable: false, configurable: true, value: null as any };
/** @ignore */ export const kLength = Symbol.for('length');
/** @ignore */ export const kParent = Symbol.for('parent');
/** @ignore */ export const kRowIndex = Symbol.for('rowIndex');
/** @ignore */ const columnDescriptor = { enumerable: true, configurable: false, get: null as any };
/** @ignore */ const rowLengthDescriptor = { writable: false, enumerable: false, configurable: false, value: -1 };
/** @ignore */ const rowParentDescriptor = { writable: false, enumerable: false, configurable: false, value: null as any };
/** @ignore */ const row = { parent: rowParentDescriptor, rowIndex: rowIndexDescriptor };
/** @ignore */
export class Row<T extends { [key: string]: DataType }> implements Iterable<T[keyof T]['TValue']> {
[key: string]: T[keyof T]['TValue'];
/** @nocollapse */
public static new<T extends { [key: string]: DataType }>(schemaOrFields: T | Field[], fieldsAreEnumerable = false): RowLike<T> & Row<T> {
let schema: T, fields: Field[];
if (Array.isArray(schemaOrFields)) {
fields = schemaOrFields;
} else {
schema = schemaOrFields;
fieldsAreEnumerable = true;
fields = Object.keys(schema).map((x) => new Field(x, schema[x]));
}
return new Row<T>(fields, fieldsAreEnumerable) as RowLike<T> & Row<T>;
}
// @ts-ignore
private parent: TParent;
public [kParent]: MapVector<T> | StructVector<T>;
// @ts-ignore
private rowIndex: number;
public [kRowIndex]: number;
// @ts-ignore
public readonly length: number;
private constructor(fields: Field[], fieldsAreEnumerable: boolean) {
lengthDescriptor.value = fields.length;
Object.defineProperty(this, 'length', lengthDescriptor);
fields.forEach((field, columnIndex) => {
columnDescriptor.get = this._bindGetter(columnIndex);
// set configurable to true to ensure Object.defineProperty
// doesn't throw in the case of duplicate column names
columnDescriptor.configurable = true;
columnDescriptor.enumerable = fieldsAreEnumerable;
Object.defineProperty(this, field.name, columnDescriptor);
columnDescriptor.configurable = false;
columnDescriptor.enumerable = !fieldsAreEnumerable;
Object.defineProperty(this, columnIndex, columnDescriptor);
columnDescriptor.get = null as any;
});
}
*[Symbol.iterator](this: RowLike<T>) {
for (let i = -1, n = this.length; ++i < n;) {
public readonly [kLength]: number;
*[Symbol.iterator]() {
for (let i = -1, n = this[kLength]; ++i < n;) {
yield this[i];
}
}
private _bindGetter(colIndex: number) {
return function (this: Row<T>) {
let child = this.parent.getChildAt(colIndex);
return child ? child.get(this.rowIndex) : null;
};
}
public get<K extends keyof T>(key: K) { return (this as any)[key] as T[K]['TValue']; }
public bind<TParent extends MapVector<T> | StructVector<T>>(parent: TParent, rowIndex: number) {
rowIndexDescriptor.value = rowIndex;
rowParentDescriptor.value = parent;
const bound = Object.create(this, row);
rowIndexDescriptor.value = null;
rowParentDescriptor.value = null;
return bound as RowLike<T>;
}
public toJSON(): any {
return DataType.isStruct(this.parent.type) ? [...this] :
return DataType.isStruct(this[kParent].type) ? [...this] :
Object.getOwnPropertyNames(this).reduce((props: any, prop: string) => {

@@ -95,3 +53,3 @@ return (props[prop] = (this as any)[prop]) && props || props;

public toString() {
return DataType.isStruct(this.parent.type) ?
return DataType.isStruct(this[kParent].type) ?
[...this].map((x) => valueToString(x)).join(', ') :

@@ -103,1 +61,55 @@ Object.getOwnPropertyNames(this).reduce((props: any, prop: string) => {

}
/** @ignore */
export class RowProxyGenerator<T extends { [key: string]: DataType }> {
/** @nocollapse */
public static new<T extends { [key: string]: DataType }>(parent: MapVector<T> | StructVector<T>, schemaOrFields: T | Field[], fieldsAreEnumerable = false): RowProxyGenerator<T> {
let schema: T, fields: Field[];
if (Array.isArray(schemaOrFields)) {
fields = schemaOrFields;
} else {
schema = schemaOrFields;
fieldsAreEnumerable = true;
fields = Object.keys(schema).map((x) => new Field(x, schema[x]));
}
return new RowProxyGenerator<T>(parent, fields, fieldsAreEnumerable);
}
private rowPrototype: Row<T>;
private constructor(parent: MapVector<T> | StructVector<T>, fields: Field[], fieldsAreEnumerable: boolean) {
const proto = Object.create(Row.prototype);
rowParentDescriptor.value = parent;
rowLengthDescriptor.value = fields.length;
Object.defineProperty(proto, kParent, rowParentDescriptor);
Object.defineProperty(proto, kLength, rowLengthDescriptor);
fields.forEach((field, columnIndex) => {
if (!proto.hasOwnProperty(field.name)) {
columnDescriptor.enumerable = fieldsAreEnumerable;
columnDescriptor.get || (columnDescriptor.get = this._bindGetter(columnIndex));
Object.defineProperty(proto, field.name, columnDescriptor);
}
if (!proto.hasOwnProperty(columnIndex)) {
columnDescriptor.enumerable = !fieldsAreEnumerable;
columnDescriptor.get || (columnDescriptor.get = this._bindGetter(columnIndex));
Object.defineProperty(proto, columnIndex, columnDescriptor);
}
columnDescriptor.get = null as any;
});
this.rowPrototype = proto;
}
private _bindGetter(columnIndex: number) {
return function(this: Row<T>) {
const child = this[kParent].getChildAt(columnIndex);
return child ? child.get(this[kRowIndex]) : null;
};
}
public bind(rowIndex: number) {
const bound = Object.create(this.rowPrototype);
bound[kRowIndex] = rowIndex;
return bound;
}
}

@@ -18,5 +18,6 @@ // Licensed to the Apache Software Foundation (ASF) under one

import { Row } from './row';
import { Field } from '../schema';
import { Vector } from '../vector';
import { BaseVector } from './base';
import { RowProxyGenerator } from './row';
import { DataType, Map_, Struct } from '../type';

@@ -26,9 +27,9 @@

public asMap(keysSorted: boolean = false) {
return Vector.new(this.data.clone(new Map_<T>(this.type.children, keysSorted)));
return Vector.new(this.data.clone(new Map_<T>(this.type.children as Field<T[keyof T]>[], keysSorted)));
}
// @ts-ignore
private _rowProxy: Row<T>;
public get rowProxy(): Row<T> {
return this._rowProxy || (this._rowProxy = Row.new<T>(this.type.children || [], false));
private _rowProxy: RowProxyGenerator<T>;
public get rowProxy(): RowProxyGenerator<T> {
return this._rowProxy || (this._rowProxy = RowProxyGenerator.new<T>(this, this.type.children || [], false));
}
}

@@ -214,3 +214,3 @@ // Licensed to the Apache Software Foundation (ASF) under one

>(vector: V, index: number): V['TValue'] => {
return vector.rowProxy.bind(vector, index);
return vector.rowProxy.bind(index);
};

@@ -217,0 +217,0 @@

@@ -27,2 +27,3 @@ // Licensed to the Apache Software Foundation (ASF) under one

import { iterateBits, getBit, getBool } from '../util/bit';
import { selectColumnChildrenArgs } from '../util/args';
import {

@@ -63,11 +64,3 @@ DataType,

public static assemble<T extends Column | RecordBatch>(...args: (T | T[])[]) {
const vectors = args.reduce(function flatten(xs: any[], x: any): any[] {
if (Array.isArray(x)) { return x.reduce(flatten, xs); }
if (!(x instanceof RecordBatch)) { return [...xs, x]; }
return xs.concat(x.schema.fields.map(
(f, i) => new Column(f, [x.getChildAt(i)!])));
}, []).filter((x: any): x is Column => x instanceof Column);
return new JSONVectorAssembler().visitMany(vectors);
return new JSONVectorAssembler().visitMany(selectColumnChildrenArgs(RecordBatch, args));
}

@@ -89,3 +82,3 @@

}
public visitNull() { return {}; }
public visitNull() { return { 'DATA': [] }; }
public visitBool<T extends Bool>({ values, offset, length }: VType<T>) {

@@ -182,4 +175,4 @@ return { 'DATA': [...iterateBits(values, offset, length, null, getBool)] };

for (let i = -1, n = values.length / stride; ++i < n;) {
yield `${BN.new(values.subarray((i + 0) * stride, (i + 1) * stride))}`;
yield `${BN.new(values.subarray((i + 0) * stride, (i + 1) * stride), false)}`;
}
}

@@ -19,5 +19,5 @@ // Licensed to the Apache Software Foundation (ASF) under one

import { Data } from '../data';
import { Type } from '../enum';
import { Visitor } from '../visitor';
import { Vector } from '../interfaces';
import { Type, Precision } from '../enum';
import { instance as iteratorVisitor } from './iterator';

@@ -95,9 +95,6 @@ import {

switch (type.typeId) {
case Type.Int: case Type.Decimal:
case Type.Int:
case Type.Float: case Type.Decimal:
case Type.Time: case Type.Timestamp:
return vector.values.subarray(0, length * stride);
case Type.Float:
return (type as Float).precision === Precision.HALF /* Precision.HALF */
? new Float32Array(vector[Symbol.iterator]())
: vector.values.subarray(0, length * stride);
}

@@ -104,0 +101,0 @@

@@ -26,2 +26,3 @@ // Licensed to the Apache Software Foundation (ASF) under one

import { packBools, truncateBitmap } from '../util/bit';
import { selectVectorChildrenArgs } from '../util/args';
import { BufferRegion, FieldNode } from '../ipc/metadata/message';

@@ -62,10 +63,6 @@ import {

public static assemble<T extends Vector | RecordBatch>(...args: (T | T[])[]) {
const vectors = args.reduce(function flatten(xs: any[], x: any): any[] {
if (Array.isArray(x)) { return x.reduce(flatten, xs); }
if (!(x instanceof RecordBatch)) { return [...xs, x]; }
return [...xs, ...x.schema.fields.map((_, i) => x.getChildAt(i)!)];
}, []).filter((x: any): x is Vector => x instanceof Vector);
return new VectorAssembler().visitMany(vectors)[0];
const assembler = new VectorAssembler();
const vectorChildren = selectVectorChildrenArgs(RecordBatch, args);
const [assembleResult = assembler] = assembler.visitMany(vectorChildren);
return assembleResult;
}

@@ -90,3 +87,5 @@

public visitNull<T extends Null>(_nullV: VType<T>) { return this; }
public visitNull<T extends Null>(_nullV: VType<T>) {
return addBuffer.call(this, new Uint8Array(0));
}
public visitDictionary<T extends Dictionary>(vector: VType<T>) {

@@ -93,0 +92,0 @@ // Assemble the indices here, Dictionary assembled separately.

@@ -52,3 +52,3 @@ // Licensed to the Apache Software Foundation (ASF) under one

public visitNull <T extends type.Null> (type: T, { length, nullCount } = this.nextFieldNode()) { return Data.Null(type, 0, length, nullCount, this.readNullBitmap(type, nullCount)); }
public visitNull <T extends type.Null> (type: T, { length, nullCount } = this.nextFieldNode()) { return Data.Null(type, 0, length, nullCount, this.readNullBitmap(type, nullCount), this.readData(type)); }
public visitBool <T extends type.Bool> (type: T, { length, nullCount } = this.nextFieldNode()) { return Data.Bool(type, 0, length, nullCount, this.readNullBitmap(type, nullCount), this.readData(type)); }

@@ -55,0 +55,0 @@ public visitInt <T extends type.Int> (type: T, { length, nullCount } = this.nextFieldNode()) { return Data.Int(type, 0, length, nullCount, this.readNullBitmap(type, nullCount), this.readData(type)); }

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc