Huge News!Announcing our $40M Series B led by Abstract Ventures.Learn More
Socket
Sign inDemoInstall
Socket

@apache-arrow/ts

Package Overview
Dependencies
Maintainers
6
Versions
38
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@apache-arrow/ts - npm Package Compare versions

Comparing version 4.0.1 to 5.0.0

1

Arrow.dom.ts

@@ -112,2 +112,3 @@ // Licensed to the Apache Software Foundation (ASF) under one

Utf8Builder,
isTypedArray,
} from './Arrow';

@@ -135,1 +135,3 @@ // Licensed to the Apache Software Foundation (ASF) under one

};
export { isTypedArray } from './util/args';

4

bin/arrow2csv.ts

@@ -70,3 +70,3 @@ #! /usr/bin/env node

if (err) {
console.error(`${err && err.stack || err}`);
console.error(`${err?.stack || err}`);
}

@@ -151,3 +151,3 @@ return process.exitCode || 1;

batch = !(state.schema && state.schema.length) ? batch : batch.select(...state.schema);
batch = !state.schema?.length ? batch : batch.select(...state.schema);

@@ -154,0 +154,0 @@ if (state.closed) { return cb(undefined, null); }

@@ -32,7 +32,12 @@ // Licensed to the Apache Software Foundation (ASF) under one

Table.prototype.countBy = function(this: Table, name: Col | string) { return new DataFrame(this.chunks).countBy(name); };
Table.prototype.scan = function(this: Table, next: NextFunc, bind?: BindFunc) { return new DataFrame(this.chunks).scan(next, bind); };
Table.prototype.scanReverse = function(this: Table, next: NextFunc, bind?: BindFunc) { return new DataFrame(this.chunks).scanReverse(next, bind); };
Table.prototype.filter = function(this: Table, predicate: Predicate): FilteredDataFrame { return new DataFrame(this.chunks).filter(predicate); };
/**
* `DataFrame` extends {@link Table} with support for predicate filtering.
*
* You can construct `DataFrames` like tables or convert a `Table` to a `DataFrame`
* with the constructor.
*
* ```ts
* const df = new DataFrame(table);
* ```
*/
export class DataFrame<T extends { [key: string]: DataType } = any> extends Table<T> {

@@ -39,0 +44,0 @@ public filter(predicate: Predicate): FilteredDataFrame<T> {

@@ -166,2 +166,16 @@ // Licensed to the Apache Software Foundation (ASF) under one

/** @ignore */
export type TypedArrayDataType<T extends Exclude<TypedArray, Uint8ClampedArray> | BigIntArray> =
T extends Int8Array ? type.Int8 :
T extends Int16Array ? type.Int16 :
T extends Int32Array ? type.Int32 :
T extends BigInt64Array ? type.Int64 :
T extends Uint8Array ? type.Uint8 :
T extends Uint16Array ? type.Uint16 :
T extends Uint32Array ? type.Uint32 :
T extends BigUint64Array ? type.Uint64 :
T extends Float32Array ? type.Float32 :
T extends Float64Array ? type.Float64 :
never;
/** @ignore */
type TypeToVector<T extends Type> = {

@@ -168,0 +182,0 @@ [key: number ]: vecs.Vector<any> ;

@@ -177,4 +177,4 @@ // Licensed to the Apache Software Foundation (ASF) under one

}
throw new Error(`${this} is closed`);
throw new Error(`AsyncQueue is closed`);
}
}

@@ -57,3 +57,3 @@ // Licensed to the Apache Software Foundation (ASF) under one

it && (fn = e != null && it.throw || it.return);
fn && fn.call(it, e);
fn?.call(it, e);
cb && cb(null);

@@ -70,3 +70,3 @@ }

}
if ((r && r.done || !this.readable) && (this.push(null) || true)) {
if ((r?.done || !this.readable) && (this.push(null) || true)) {
it.return && it.return();

@@ -99,3 +99,3 @@ }

it && (fn = e != null && it.throw || it.return);
fn && fn.call(it, e).then(() => cb && cb(null)) || (cb && cb(null));
fn?.call(it, e).then(() => cb && cb(null)) || (cb && cb(null));
}

@@ -111,3 +111,3 @@ private async _pull(size: number, it: AsyncSourceIterator<T>) {

}
if ((r && r.done || !this.readable) && (this.push(null) || true)) {
if ((r?.done || !this.readable) && (this.push(null) || true)) {
it.return && it.return();

@@ -114,0 +114,0 @@ }

@@ -47,3 +47,3 @@ // Licensed to the Apache Software Foundation (ASF) under one

const aq = this._asyncQueue;
aq && aq.close();
aq?.close();
cb && cb();

@@ -53,3 +53,3 @@ }

const aq = this._asyncQueue;
aq && aq.write(x);
aq?.write(x);
cb && cb();

@@ -82,3 +82,3 @@ return true;

}
if (!this.readable || (r && r.done && (reader.autoDestroy || (await reader.reset().open()).closed))) {
if (!this.readable || (r?.done && (reader.autoDestroy || (await reader.reset().open()).closed))) {
this.push(null);

@@ -85,0 +85,0 @@ await reader.cancel();

@@ -43,3 +43,3 @@ // Licensed to the Apache Software Foundation (ASF) under one

const writer = this._writer;
writer && writer.close();
writer?.close();
cb && cb();

@@ -49,3 +49,3 @@ }

const writer = this._writer;
writer && writer.write(x);
writer?.write(x);
cb && cb();

@@ -73,3 +73,3 @@ return true;

}
if ((r && r.done || !this.readable)) {
if ((r?.done || !this.readable)) {
this.push(null);

@@ -76,0 +76,0 @@ await reader.cancel();

@@ -39,4 +39,4 @@ // Licensed to the Apache Software Foundation (ASF) under one

let it: SourceIterator<T> | null = null;
const bm = (options && options.type === 'bytes') || false;
const hwm = options && options.highWaterMark || (2 ** 24);
const bm = (options?.type === 'bytes') || false;
const hwm = options?.highWaterMark || (2 ** 24);

@@ -47,3 +47,3 @@ return new ReadableStream<T>({

pull(controller) { it ? (next(controller, it)) : controller.close(); },
cancel() { (it && (it.return && it.return()) || true) && (it = null); }
cancel() { (it?.return && it.return() || true) && (it = null); }
}, { highWaterMark: bm ? hwm : undefined, ...options });

@@ -71,4 +71,4 @@

let it: AsyncSourceIterator<T> | null = null;
const bm = (options && options.type === 'bytes') || false;
const hwm = options && options.highWaterMark || (2 ** 24);
const bm = (options?.type === 'bytes') || false;
const hwm = options?.highWaterMark || (2 ** 24);

@@ -79,3 +79,3 @@ return new ReadableStream<T>({

async pull(controller) { it ? (await next(controller, it)) : controller.close(); },
async cancel() { (it && (it.return && await it.return()) || true) && (it = null); },
async cancel() { (it?.return && await it.return() || true) && (it = null); },
}, { highWaterMark: bm ? hwm : undefined, ...options });

@@ -82,0 +82,0 @@

@@ -75,3 +75,3 @@ // Licensed to the Apache Software Foundation (ASF) under one

const message = this.readMessage(type);
const schema = message && message.header();
const schema = message?.header();
if (throwIfNull && !schema) {

@@ -85,3 +85,3 @@ throw new Error(nullMessage(type));

const bb = buf && new ByteBuffer(buf);
const len = bb && bb.readInt32(0) || 0;
const len = bb?.readInt32(0) || 0;
return { done: len === 0, value: len };

@@ -146,3 +146,3 @@ }

const message = await this.readMessage(type);
const schema = message && message.header();
const schema = message?.header();
if (throwIfNull && !schema) {

@@ -156,3 +156,3 @@ throw new Error(nullMessage(type));

const bb = buf && new ByteBuffer(buf);
const len = bb && bb.readInt32(0) || 0;
const len = bb?.readInt32(0) || 0;
return { done: len === 0, value: len };

@@ -227,3 +227,3 @@ }

const message = this.readMessage(type);
const schema = message && message.header();
const schema = message?.header();
if (!message || !schema) {

@@ -230,0 +230,0 @@ throw new Error(nullMessage(type));

@@ -550,3 +550,3 @@ // Licensed to the Apache Software Foundation (ASF) under one

const message = this._reader.readMessage(MessageHeader.RecordBatch);
if (message && message.isRecordBatch()) {
if (message?.isRecordBatch()) {
const header = message.header();

@@ -564,3 +564,3 @@ const buffer = this._reader.readMessageBody(message.bodyLength);

const message = this._reader.readMessage(MessageHeader.DictionaryBatch);
if (message && message.isDictionaryBatch()) {
if (message?.isDictionaryBatch()) {
const header = message.header();

@@ -626,3 +626,3 @@ const buffer = this._reader.readMessageBody(message.bodyLength);

const message = await this._reader.readMessage(MessageHeader.RecordBatch);
if (message && message.isRecordBatch()) {
if (message?.isRecordBatch()) {
const header = message.header();

@@ -640,3 +640,3 @@ const buffer = await this._reader.readMessageBody(message.bodyLength);

const message = await this._reader.readMessage(MessageHeader.DictionaryBatch);
if (message && message.isDictionaryBatch()) {
if (message?.isDictionaryBatch()) {
const header = message.header();

@@ -643,0 +643,0 @@ const buffer = await this._reader.readMessageBody(message.bodyLength);

@@ -166,3 +166,3 @@ // Licensed to the Apache Software Foundation (ASF) under one

throw new Error(`RecordBatchWriter is closed`);
} else if (payload === null || payload === undefined) {
} else if (payload == null) {
return this.finish() && undefined;

@@ -169,0 +169,0 @@ } else if (payload instanceof Table && !(schema = payload.schema)) {

{
"version": "4.0.1",
"version": "5.0.0",
"name": "@apache-arrow/ts",
"browser": "Arrow.dom.ts",
"main": "Arrow.node.ts",
"type": "module",
"module": "Arrow.node.ts",
"sideEffects": false,
"esm": {
"mode": "auto",
"sourceMap": true
},
"types": "Arrow.node.ts",

@@ -24,4 +31,3 @@ "license": "Apache-2.0",

"@types/flatbuffers": "^1.10.0",
"@types/node": "^14.14.37",
"@types/text-encoding-utf-8": "^1.0.1",
"@types/node": "^15.6.1",
"command-line-args": "5.1.1",

@@ -32,5 +38,4 @@ "command-line-usage": "6.1.1",

"pad-left": "^2.1.0",
"text-encoding-utf-8": "^1.0.2",
"tslib": "^2.2.0"
"tslib": "^2.3.0"
}
}

@@ -150,43 +150,2 @@ <!---

### Usage with MapD Core
```js
import MapD from 'rxjs-mapd';
import { Table } from 'apache-arrow';
const port = 9091;
const host = `localhost`;
const db = `mapd`;
const user = `mapd`;
const password = `HyperInteractive`;
MapD.open(host, port)
.connect(db, user, password)
.flatMap((session) =>
// queryDF returns Arrow buffers
session.queryDF(`
SELECT origin_city
FROM flights
WHERE dest_city ILIKE 'dallas'
LIMIT 5`
).disconnect()
)
.map(([schema, records]) =>
// Create Arrow Table from results
Table.from([schema, records]))
.map((table) =>
// Stringify the table to CSV with row numbers
table.toString({ index: true }))
.subscribe((csvStr) =>
console.log(csvStr));
/*
Index, origin_city
0, Oklahoma City
1, Oklahoma City
2, Oklahoma City
3, San Antonio
4, San Antonio
*/
```
# Getting involved

@@ -245,2 +204,8 @@

### Supported Browsers and Platforms
The bundles we compile support moderns browser released in the last 5 years. This includes supported versions of
Firefox, Chrome, Edge, and Safari. We do not actively support Internet Explorer.
Apache Arrow also works on [maintained versions of Node](https://nodejs.org/en/about/releases/).
# People

@@ -250,4 +215,5 @@

* Brian Hulette, _committer_
* Paul Taylor, Graphistry, Inc., _committer_
* Brian Hulette, _committer_
* Paul Taylor, _committer_
* Dominik Moritz, _committer_

@@ -261,12 +227,8 @@ # Powered By Apache Arrow in JS

* [Apache Arrow](https://arrow.apache.org) -- Parent project for Powering Columnar In-Memory Analytics, including affiliated open source projects
* [rxjs-mapd](https://github.com/graphistry/rxjs-mapd) -- A MapD Core node-driver that returns query results as Arrow columns
* [Perspective](https://github.com/jpmorganchase/perspective) -- Perspective is a streaming data visualization engine by J.P. Morgan for JavaScript for building real-time & user-configurable analytics entirely in the browser.
* [Falcon](https://github.com/uwdata/falcon) is a visualization tool for linked interactions across multiple aggregate visualizations of millions or billions of records.
* [Vega](https://github.com/vega) is an ecosystem of tools for interactive visualizations on the web. The Vega team implemented an [Arrow loader](https://github.com/vega/vega-loader-arrow).
* [Arquero](https://github.com/uwdata/arquero) is a library for query processing and transformation of array-backed data tables.
* [OmniSci](https://github.com/omnisci/mapd-connector) is a GPU database. Its JavaScript connector returns Arrow dataframes.
## Companies & Organizations
* [CCRi](https://www.ccri.com/) -- Commonwealth Computer Research Inc, or CCRi, is a Central Virginia based data science and software engineering company
* [GOAI](https://gpuopenanalytics.com/) -- GPU Open Analytics Initiative standardizes on Arrow as part of creating common data frameworks that enable developers and statistical researchers to accelerate data science on GPUs
* [Graphistry, Inc.](https://www.graphistry.com/) - An end-to-end GPU accelerated visual investigation platform used by teams for security, anti-fraud, and related investigations. Graphistry uses Arrow in its NodeJS GPU backend and client libraries, and is an early contributing member to GOAI and Arrow\[JS\] working to bring these technologies to the enterprise.
# License

@@ -273,0 +235,0 @@

@@ -18,17 +18,17 @@ // Licensed to the Apache Software Foundation (ASF) under one

import { Column } from './column';
import { Data } from './data';
import { Column } from './column';
import { Schema, Field } from './schema';
import { TypedArray, TypedArrayDataType } from './interfaces';
import { RecordBatchReader } from './ipc/reader';
import { RecordBatchFileWriter, RecordBatchStreamWriter } from './ipc/writer';
import { RecordBatch, _InternalEmptyPlaceholderRecordBatch } from './recordbatch';
import { DataFrame } from './compute/dataframe';
import { RecordBatchReader } from './ipc/reader';
import { Field, Schema } from './schema';
import { DataType, RowLike, Struct } from './type';
import { selectColumnArgs, selectArgs } from './util/args';
import { Clonable, Sliceable, Applicative } from './vector';
import { isPromise, isIterable, isAsyncIterable } from './util/compat';
import { RecordBatchFileWriter, RecordBatchStreamWriter } from './ipc/writer';
import { selectArgs, selectColumnArgs } from './util/args';
import { isAsyncIterable, isIterable, isPromise } from './util/compat';
import { distributeColumnsIntoRecordBatches, distributeVectorsIntoRecordBatches } from './util/recordbatch';
import { Vector, Chunked, StructVector, VectorBuilderOptions, VectorBuilderOptionsAsync } from './vector/index';
import { Applicative, Clonable, Sliceable } from './vector';
import { Chunked, StructVector, Vector, VectorBuilderOptions, VectorBuilderOptionsAsync } from './vector/index';
type VectorMap = { [key: string]: Vector };
type VectorMap = { [key: string]: Vector | Exclude<TypedArray, Uint8ClampedArray> };
type Fields<T extends { [key: string]: DataType }> = (keyof T)[] | Field<T[keyof T]>[];

@@ -46,7 +46,2 @@ type ChildData<T extends { [key: string]: DataType }> = Data<T[keyof T]>[] | Vector<T[keyof T]>[];

clone(chunks?: RecordBatch<T>[], offsets?: Uint32Array): Table<T>;
scan(next: import('./compute/dataframe').NextFunc, bind?: import('./compute/dataframe').BindFunc): void;
scanReverse(next: import('./compute/dataframe').NextFunc, bind?: import('./compute/dataframe').BindFunc): void;
countBy(name: import('./compute/predicate').Col | string): import('./compute/dataframe').CountByResult;
filter(predicate: import('./compute/predicate').Predicate): import('./compute/dataframe').FilteredDataFrame<T>;
}

@@ -56,4 +51,3 @@

extends Chunked<Struct<T>>
implements DataFrame<T>,
Clonable<Table<T>>,
implements Clonable<Table<T>>,
Sliceable<Table<T>>,

@@ -171,3 +165,3 @@ Applicative<Struct<T>, Table<T>> {

public static new<T extends { [key: string]: DataType } = any>(...columns: Columns<T>): Table<T>;
public static new<T extends VectorMap = any>(children: T): Table<{ [P in keyof T]: T[P]['type'] }>;
public static new<T extends VectorMap = any>(children: T): Table<{ [P in keyof T]: T[P] extends Vector ? T[P]['type'] : T[P] extends Exclude<TypedArray, Uint8ClampedArray> ? TypedArrayDataType<T[P]> : never}>;
public static new<T extends { [key: string]: DataType } = any>(children: ChildData<T>, fields?: Fields<T>): Table<T>;

@@ -179,2 +173,3 @@ /** @nocollapse */

constructor(table: Table<T>);
constructor(batches: RecordBatch<T>[]);

@@ -188,7 +183,7 @@ constructor(...batches: RecordBatch<T>[]);

if (args[0] instanceof Schema) { schema = args.shift(); }
if (args[0] instanceof Schema) { schema = args[0]; }
const chunks = selectArgs<RecordBatch<T>>(RecordBatch, args);
const chunks = args[0] instanceof Table ? (args[0] as Table<T>).chunks : selectArgs<RecordBatch<T>>(RecordBatch, args);
if (!schema && !(schema = chunks[0] && chunks[0].schema)) {
if (!schema && !(schema = chunks[0]?.schema)) {
throw new TypeError('Table must be initialized with a Schema or at least one RecordBatch');

@@ -195,0 +190,0 @@ }

@@ -68,20 +68,20 @@ // Licensed to the Apache Software Foundation (ASF) under one

/** @nocollapse */ static isNull (x: any): x is Null { return x && x.typeId === Type.Null; }
/** @nocollapse */ static isInt (x: any): x is Int_ { return x && x.typeId === Type.Int; }
/** @nocollapse */ static isFloat (x: any): x is Float { return x && x.typeId === Type.Float; }
/** @nocollapse */ static isBinary (x: any): x is Binary { return x && x.typeId === Type.Binary; }
/** @nocollapse */ static isUtf8 (x: any): x is Utf8 { return x && x.typeId === Type.Utf8; }
/** @nocollapse */ static isBool (x: any): x is Bool { return x && x.typeId === Type.Bool; }
/** @nocollapse */ static isDecimal (x: any): x is Decimal { return x && x.typeId === Type.Decimal; }
/** @nocollapse */ static isDate (x: any): x is Date_ { return x && x.typeId === Type.Date; }
/** @nocollapse */ static isTime (x: any): x is Time_ { return x && x.typeId === Type.Time; }
/** @nocollapse */ static isTimestamp (x: any): x is Timestamp_ { return x && x.typeId === Type.Timestamp; }
/** @nocollapse */ static isInterval (x: any): x is Interval_ { return x && x.typeId === Type.Interval; }
/** @nocollapse */ static isList (x: any): x is List { return x && x.typeId === Type.List; }
/** @nocollapse */ static isStruct (x: any): x is Struct { return x && x.typeId === Type.Struct; }
/** @nocollapse */ static isUnion (x: any): x is Union_ { return x && x.typeId === Type.Union; }
/** @nocollapse */ static isFixedSizeBinary (x: any): x is FixedSizeBinary { return x && x.typeId === Type.FixedSizeBinary; }
/** @nocollapse */ static isFixedSizeList (x: any): x is FixedSizeList { return x && x.typeId === Type.FixedSizeList; }
/** @nocollapse */ static isMap (x: any): x is Map_ { return x && x.typeId === Type.Map; }
/** @nocollapse */ static isDictionary (x: any): x is Dictionary { return x && x.typeId === Type.Dictionary; }
/** @nocollapse */ static isNull (x: any): x is Null { return x?.typeId === Type.Null; }
/** @nocollapse */ static isInt (x: any): x is Int_ { return x?.typeId === Type.Int; }
/** @nocollapse */ static isFloat (x: any): x is Float { return x?.typeId === Type.Float; }
/** @nocollapse */ static isBinary (x: any): x is Binary { return x?.typeId === Type.Binary; }
/** @nocollapse */ static isUtf8 (x: any): x is Utf8 { return x?.typeId === Type.Utf8; }
/** @nocollapse */ static isBool (x: any): x is Bool { return x?.typeId === Type.Bool; }
/** @nocollapse */ static isDecimal (x: any): x is Decimal { return x?.typeId === Type.Decimal; }
/** @nocollapse */ static isDate (x: any): x is Date_ { return x?.typeId === Type.Date; }
/** @nocollapse */ static isTime (x: any): x is Time_ { return x?.typeId === Type.Time; }
/** @nocollapse */ static isTimestamp (x: any): x is Timestamp_ { return x?.typeId === Type.Timestamp; }
/** @nocollapse */ static isInterval (x: any): x is Interval_ { return x?.typeId === Type.Interval; }
/** @nocollapse */ static isList (x: any): x is List { return x?.typeId === Type.List; }
/** @nocollapse */ static isStruct (x: any): x is Struct { return x?.typeId === Type.Struct; }
/** @nocollapse */ static isUnion (x: any): x is Union_ { return x?.typeId === Type.Union; }
/** @nocollapse */ static isFixedSizeBinary (x: any): x is FixedSizeBinary { return x?.typeId === Type.FixedSizeBinary; }
/** @nocollapse */ static isFixedSizeList (x: any): x is FixedSizeList { return x?.typeId === Type.FixedSizeList; }
/** @nocollapse */ static isMap (x: any): x is Map_ { return x?.typeId === Type.Map; }
/** @nocollapse */ static isDictionary (x: any): x is Dictionary { return x?.typeId === Type.Dictionary; }

@@ -88,0 +88,0 @@ public get typeId(): TType { return <any> Type.NONE; }

@@ -22,4 +22,7 @@ // Licensed to the Apache Software Foundation (ASF) under one

import { Vector } from '../vector';
import { DataType } from '../type';
import { DataType, Float32, Float64, FloatArray, IntArray, Int16, Int32, Int64, Int8, Uint16, Uint32, Uint64, Uint8 } from '../type';
import { Chunked } from '../vector/chunked';
import { BigIntArray, TypedArray as TypedArray_ } from '../interfaces';
import { FloatArrayCtor } from '../vector/float';
import { IntArrayCtor } from '../vector/int';

@@ -30,3 +33,42 @@ type RecordBatchCtor = typeof import('../recordbatch').RecordBatch;

type TypedArray = Exclude<TypedArray_ | BigIntArray, Uint8ClampedArray>;
/** @ignore */
export function isTypedArray(arr: any): arr is TypedArray {
return ArrayBuffer.isView(arr) && 'BYTES_PER_ELEMENT' in arr;
}
/** @ignore */
type ArrayCtor = FloatArrayCtor | IntArrayCtor;
/** @ignore */
export function arrayTypeToDataType(ctor: ArrayCtor) {
switch (ctor) {
case Int8Array: return Int8;
case Int16Array: return Int16;
case Int32Array: return Int32;
case BigInt64Array: return Int64;
case Uint8Array: return Uint8;
case Uint16Array: return Uint16;
case Uint32Array: return Uint32;
case BigUint64Array: return Uint64;
case Float32Array: return Float32;
case Float64Array: return Float64;
default: return null;
}
}
/** @ignore */
function vectorFromTypedArray(array: TypedArray): Vector {
const ArrowType = arrayTypeToDataType(array.constructor as ArrayCtor);
if (!ArrowType) {
throw new TypeError('Unrecognized Array input');
}
const type = new ArrowType();
const data = Data.new(type, 0, array.length, 0, [undefined, array as IntArray | FloatArray]);
return Vector.new(data);
}
/** @ignore */
export const selectArgs = <T>(Ctor: any, vals: any[]) => _selectArgs(Ctor, vals, [], 0) as T[];

@@ -39,2 +81,3 @@ /** @ignore */

x instanceof Vector ? Column.new(fields[i], x) as Column<T[keyof T]> :
isTypedArray(x) ? Column.new(fields[i], vectorFromTypedArray(x)) as Column<T[keyof T]> :
Column.new(fields[i], [] as Vector<T[keyof T]>[]));

@@ -114,3 +157,3 @@ };

/** @ignore */
function _selectFieldArgs<T extends { [key: string]: DataType }>(vals: any[], ret: [Field<T[keyof T]>[], Vector<T[keyof T]>[]]): [Field<T[keyof T]>[], (T[keyof T] | Vector<T[keyof T]>)[]] {
function _selectFieldArgs<T extends { [key: string]: DataType }>(vals: any[], ret: [Field<T[keyof T]>[], (Vector<T[keyof T]> | TypedArray)[]]): [Field<T[keyof T]>[], (T[keyof T] | Vector<T[keyof T]> | TypedArray)[]] {
let keys: any[];

@@ -124,3 +167,3 @@ let n: number;

if (isArray(vals[0])) { return _selectFieldArgs(vals[0], ret); }
if (!(vals[0] instanceof Data || vals[0] instanceof Vector || vals[0] instanceof DataType)) {
if (!(vals[0] instanceof Data || vals[0] instanceof Vector || isTypedArray(vals[0]) || vals[0] instanceof DataType)) {
[keys, vals] = Object.entries(vals[0]).reduce(toKeysAndValues, ret);

@@ -151,3 +194,3 @@ }

fields[++fieldIndex] = Field.new(field, val as DataType, true) as Field<T[keyof T]>;
} else if (val && val.type && (values[++valueIndex] = val)) {
} else if (val?.type && (values[++valueIndex] = val)) {
val instanceof Data && (values[valueIndex] = val = Vector.new(val) as Vector);

@@ -154,0 +197,0 @@ fields[++fieldIndex] = Field.new(field, val.type, true) as Field<T[keyof T]>;

@@ -180,3 +180,3 @@ // Licensed to the Apache Software Foundation (ASF) under one

do {
r = it.next(yield r && r.value);
r = it.next(yield r?.value);
} while (!r.done);

@@ -183,0 +183,0 @@ })(source[Symbol.iterator]()));

@@ -18,13 +18,8 @@ // Licensed to the Apache Software Foundation (ASF) under one

import {
TextDecoder as TextDecoderPolyfill,
TextEncoder as TextEncoderPolyfill,
} from 'text-encoding-utf-8';
const decoder = new (typeof TextDecoder !== 'undefined' ? TextDecoder : TextDecoderPolyfill)('utf-8');
const decoder = new TextDecoder('utf-8');
/** @ignore */
export const decodeUtf8 = (buffer?: ArrayBuffer | ArrayBufferView) => decoder.decode(buffer);
export const decodeUtf8 = (buffer?: BufferSource) => decoder.decode(buffer);
const encoder = new (typeof TextEncoder !== 'undefined' ? TextEncoder : TextEncoderPolyfill)();
const encoder = new TextEncoder();
/** @ignore */
export const encodeUtf8 = (value?: string) => encoder.encode(value);

@@ -99,3 +99,3 @@ // Licensed to the Apache Software Foundation (ASF) under one

public toJSON(): any { return [...this]; }
public toJSON() { return [...this]; }

@@ -102,0 +102,0 @@ protected _sliceInternal(self: this, begin: number, end: number) {

@@ -44,3 +44,3 @@ // Licensed to the Apache Software Foundation (ASF) under one

/** @ignore */
type FloatArrayCtor = TypedArrayConstructor<FloatArray>;
export type FloatArrayCtor = TypedArrayConstructor<FloatArray>;

@@ -47,0 +47,0 @@ /** @ignore */

@@ -194,3 +194,3 @@ // Licensed to the Apache Software Foundation (ASF) under one

return function(this: V, i: number, a: any) {
if (setBool(this.nullBitmap, this.offset + i, !(a === null || a === undefined))) {
if (setBool(this.nullBitmap, this.offset + i, !((a == null)))) {
fn.call(this, i, a);

@@ -197,0 +197,0 @@ }

@@ -54,3 +54,3 @@ // Licensed to the Apache Software Foundation (ASF) under one

/** @ignore */
type IntArrayCtor = TypedArrayConstructor<IntArray> | BigIntArrayConstructor<BigIntArray>;
export type IntArrayCtor = TypedArrayConstructor<IntArray> | BigIntArrayConstructor<BigIntArray>;

@@ -57,0 +57,0 @@ /** @ignore */

@@ -57,3 +57,3 @@ // Licensed to the Apache Software Foundation (ASF) under one

let val = undefined;
if (key !== null && key !== undefined) {
if (key != null) {
const ktoi = this[kKeyToIdx] || (this[kKeyToIdx] = new Map());

@@ -74,3 +74,3 @@ let idx = ktoi.get(key);

public set(key: K, val: V) {
if (key !== null && key !== undefined) {
if (key != null) {
const ktoi = this[kKeyToIdx] || (this[kKeyToIdx] = new Map());

@@ -77,0 +77,0 @@ let idx = ktoi.get(key);

@@ -242,6 +242,6 @@ // Licensed to the Apache Software Foundation (ASF) under one

/** @ignore */ const _setStructArrayValue = (o: number, v: any[]) => (c: Vector | null, _: Field, i: number) => c && c.set(o, v[i]);
/** @ignore */ const _setStructVectorValue = (o: number, v: Vector) => (c: Vector | null, _: Field, i: number) => c && c.set(o, v.get(i));
/** @ignore */ const _setStructMapValue = (o: number, v: Map<string, any>) => (c: Vector | null, f: Field, _: number) => c && c.set(o, v.get(f.name));
/** @ignore */ const _setStructObjectValue = (o: number, v: { [key: string]: any }) => (c: Vector | null, f: Field, _: number) => c && c.set(o, v[f.name]);
/** @ignore */ const _setStructArrayValue = (o: number, v: any[]) => (c: Vector | null, _: Field, i: number) => c?.set(o, v[i]);
/** @ignore */ const _setStructVectorValue = (o: number, v: Vector) => (c: Vector | null, _: Field, i: number) => c?.set(o, v.get(i));
/** @ignore */ const _setStructMapValue = (o: number, v: Map<string, any>) => (c: Vector | null, f: Field, _: number) => c?.set(o, v.get(f.name));
/** @ignore */ const _setStructObjectValue = (o: number, v: { [key: string]: any }) => (c: Vector | null, f: Field, _: number) => c?.set(o, v[f.name]);
/** @ignore */

@@ -248,0 +248,0 @@ const setStruct = <T extends Struct>(vector: VectorType<T>, index: number, value: T['TValue']) => {

Sorry, the diff of this file is too big to display

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc