nodejs-polars
Advanced tools
Comparing version 0.4.0 to 0.4.1
@@ -25,2 +25,5 @@ /// <reference types="node" /> | ||
}; | ||
declare type WriteAvroOptions = { | ||
compression?: "uncompressed" | "snappy" | "deflate"; | ||
}; | ||
interface WriteMethods { | ||
@@ -75,3 +78,2 @@ /** | ||
* - col will write to a column oriented object | ||
* @deprecated *since 0.4.0* use {@link writeJSON} | ||
* @example | ||
@@ -125,2 +127,10 @@ * ``` | ||
writeParquet(destination: string | Writable, options?: WriteParquetOptions): void; | ||
/** | ||
* Write the DataFrame disk in avro format. | ||
* @param file File path to which the file should be written. | ||
* @param options.compression Compression method *defaults to "uncompressed"* | ||
* | ||
*/ | ||
writeAvro(options?: WriteAvroOptions): Buffer; | ||
writeAvro(destination: string | Writable, options?: WriteAvroOptions): void; | ||
} | ||
@@ -127,0 +137,0 @@ /** |
@@ -478,2 +478,10 @@ "use strict"; | ||
}, | ||
writeAvro(dest, options = { compression: "uncompressed" }) { | ||
if (dest?.compression !== undefined) { | ||
return writeToBufferOrStream(null, "avro", dest); | ||
} | ||
else { | ||
return writeToBufferOrStream(dest, "avro", options); | ||
} | ||
}, | ||
toIPC(dest, options) { | ||
@@ -480,0 +488,0 @@ return this.writeIPC(dest, options); |
@@ -46,2 +46,3 @@ import * as series from "./series/series"; | ||
export import readParquet = io.readParquet; | ||
export import readAvro = io.readAvro; | ||
export import readCSVStream = io.readCSVStream; | ||
@@ -48,0 +49,0 @@ export import readJSONStream = io.readJSONStream; |
@@ -63,2 +63,3 @@ "use strict"; | ||
pl.readParquet = io.readParquet; | ||
pl.readAvro = io.readAvro; | ||
pl.readCSVStream = io.readCSVStream; | ||
@@ -65,0 +66,0 @@ pl.readJSONStream = io.readJSONStream; |
@@ -60,2 +60,8 @@ /// <reference types="node" /> | ||
}; | ||
declare type ReadAvroOptions = { | ||
columns?: string[]; | ||
projection?: number[]; | ||
numRows?: number; | ||
rowCount?: RowCount; | ||
}; | ||
/** | ||
@@ -172,2 +178,11 @@ * __Read a CSV file or string into a Dataframe.__ | ||
/** | ||
* Read into a DataFrame from an avro file. | ||
* @param pathOrBuffer | ||
* Path to a file, list of files, or a file like object. If the path is a directory, that directory will be used | ||
* as partition aware scan. | ||
* @param options.columns Columns to select. Accepts a list of column names. | ||
* @param options.numRows Stop reading from avro file after reading ``n_rows``. | ||
*/ | ||
export declare function readAvro(pathOrBody: string | Buffer, options?: ReadAvroOptions): DataFrame; | ||
/** | ||
* __Lazily read from a parquet file or multiple files via glob patterns.__ | ||
@@ -174,0 +189,0 @@ * ___ |
@@ -18,3 +18,3 @@ "use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.readJSONStream = exports.readCSVStream = exports.scanIPC = exports.readIPC = exports.scanParquet = exports.readParquet = exports.readJSON = exports.scanCSV = exports.readCSV = void 0; | ||
exports.readJSONStream = exports.readCSVStream = exports.scanIPC = exports.readIPC = exports.scanParquet = exports.readAvro = exports.readParquet = exports.readJSON = exports.scanCSV = exports.readCSV = void 0; | ||
const polars_internal_1 = __importDefault(require("./internals/polars_internal")); | ||
@@ -106,2 +106,8 @@ const dataframe_1 = require("./dataframe"); | ||
} | ||
function readAvroBuffer(buff, options) { | ||
return (0, dataframe_1.dfWrapper)(polars_internal_1.default.df.readAvroBuffer({ ...readCsvDefaultOptions, ...options, buff })); | ||
} | ||
function readAvroPath(path, options) { | ||
return (0, dataframe_1.dfWrapper)(polars_internal_1.default.df.readAvroPath({ ...readCsvDefaultOptions, ...options, path })); | ||
} | ||
function readCSV(pathOrBody, options) { | ||
@@ -168,2 +174,20 @@ const extensions = [".tsv", ".csv"]; | ||
exports.readParquet = readParquet; | ||
function readAvro(pathOrBody, options) { | ||
if (Buffer.isBuffer(pathOrBody)) { | ||
return readAvroBuffer(pathOrBody, options); | ||
} | ||
if (typeof pathOrBody === "string") { | ||
const inline = !(0, utils_1.isPath)(pathOrBody, [".avro"]); | ||
if (inline) { | ||
return readAvroBuffer(Buffer.from(pathOrBody, "utf-8"), options); | ||
} | ||
else { | ||
return readAvroPath(pathOrBody, options); | ||
} | ||
} | ||
else { | ||
throw new Error("must supply either a path or body"); | ||
} | ||
} | ||
exports.readAvro = readAvro; | ||
function scanParquet(path, options) { | ||
@@ -170,0 +194,0 @@ return (0, dataframe_2.LazyDataFrame)(polars_internal_1.default.ldf.scanParquet({ path, ...options })); |
{ | ||
"name": "nodejs-polars", | ||
"version": "0.4.0", | ||
"version": "0.4.1", | ||
"repository": "https://github.com/pola-rs/polars.git", | ||
@@ -5,0 +5,0 @@ "license": "SEE LICENSE IN LICENSE", |
{ | ||
"name": "nodejs-polars", | ||
"version": "0.4.0", | ||
"version": "0.4.1", | ||
"repository": "https://github.com/pola-rs/polars.git", | ||
@@ -89,14 +89,14 @@ "license": "SEE LICENSE IN LICENSE", | ||
"optionalDependencies": { | ||
"nodejs-polars-win32-x64-msvc": "0.4.0", | ||
"nodejs-polars-darwin-x64": "0.4.0", | ||
"nodejs-polars-linux-x64-gnu": "0.4.0", | ||
"nodejs-polars-win32-ia32-msvc": "0.4.0", | ||
"nodejs-polars-linux-arm64-gnu": "0.4.0", | ||
"nodejs-polars-linux-arm-gnueabihf": "0.4.0", | ||
"nodejs-polars-darwin-arm64": "0.4.0", | ||
"nodejs-polars-android-arm64": "0.4.0", | ||
"nodejs-polars-linux-x64-musl": "0.4.0", | ||
"nodejs-polars-linux-arm64-musl": "0.4.0", | ||
"nodejs-polars-win32-arm64-msvc": "0.4.0" | ||
"nodejs-polars-win32-x64-msvc": "0.4.1", | ||
"nodejs-polars-darwin-x64": "0.4.1", | ||
"nodejs-polars-linux-x64-gnu": "0.4.1", | ||
"nodejs-polars-win32-ia32-msvc": "0.4.1", | ||
"nodejs-polars-linux-arm64-gnu": "0.4.1", | ||
"nodejs-polars-linux-arm-gnueabihf": "0.4.1", | ||
"nodejs-polars-darwin-arm64": "0.4.1", | ||
"nodejs-polars-android-arm64": "0.4.1", | ||
"nodejs-polars-linux-x64-musl": "0.4.1", | ||
"nodejs-polars-linux-arm64-musl": "0.4.1", | ||
"nodejs-polars-win32-arm64-msvc": "0.4.1" | ||
} | ||
} |
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
339352
8917