@shelf/dynamodb-parallel-scan
Advanced tools
Comparing version 3.0.1 to 3.0.2
@@ -18,3 +18,7 @@ "use strict"; | ||
tls: false, | ||
region: 'local-env' | ||
region: 'local-env', | ||
credentials: { | ||
accessKeyId: 'fakeMyKeyId', | ||
secretAccessKey: 'fakeSecretAccessKey' | ||
} | ||
}) | ||
@@ -25,3 +29,3 @@ }); | ||
async function scan(params) { | ||
function scan(params) { | ||
const command = new _libDynamodb.ScanCommand(params); | ||
@@ -55,3 +59,3 @@ return ddbv3Client.send(command); | ||
async function batchWrite(items) { | ||
function batchWrite(items) { | ||
const command = new _libDynamodb.BatchWriteCommand({ | ||
@@ -58,0 +62,0 @@ RequestItems: items, |
@@ -51,3 +51,3 @@ "use strict"; | ||
debug(`Started parallel scan with ${concurrency} threads. Total items count: ${totalTableItemsCount}`); | ||
Promise.all(segments.map(async (_, segmentIndex) => getItemsFromSegment({ | ||
Promise.all(segments.map((_, segmentIndex) => getItemsFromSegment({ | ||
scanParams, | ||
@@ -54,0 +54,0 @@ stream, |
{ | ||
"name": "@shelf/dynamodb-parallel-scan", | ||
"version": "3.0.1", | ||
"version": "3.0.2", | ||
"description": "Scan large DynamoDB tables faster with parallelism", | ||
@@ -53,13 +53,13 @@ "keywords": [ | ||
"devDependencies": { | ||
"@aws-sdk/client-dynamodb": "3.74.0", | ||
"@aws-sdk/lib-dynamodb": "3.74.0", | ||
"@babel/cli": "7.17.6", | ||
"@babel/core": "7.17.9", | ||
"@aws-sdk/client-dynamodb": "3.105.0", | ||
"@aws-sdk/lib-dynamodb": "3.105.0", | ||
"@babel/cli": "7.17.10", | ||
"@babel/core": "7.18.5", | ||
"@shelf/babel-config": "0.1.8", | ||
"@shelf/eslint-config": "2.16.2", | ||
"@shelf/jest-dynamodb": "2.2.3", | ||
"@shelf/eslint-config": "2.18.0", | ||
"@shelf/jest-dynamodb": "3.0.0", | ||
"@shelf/prettier-config": "1.0.0", | ||
"@shelf/tsconfig": "0.0.6", | ||
"@types/debug": "4.1.7", | ||
"@types/jest": "27.4.1", | ||
"@types/jest": "28.1.1", | ||
"@types/lodash.chunk": "4.2.7", | ||
@@ -70,8 +70,8 @@ "@types/lodash.clonedeep": "4.5.7", | ||
"@types/p-map": "2.0.0", | ||
"eslint": "8.13.0", | ||
"husky": "7.0.4", | ||
"jest": "27.5.1", | ||
"lint-staged": "12.4.0", | ||
"eslint": "8.17.0", | ||
"husky": "8.0.1", | ||
"jest": "28.1.1", | ||
"lint-staged": "13.0.1", | ||
"prettier": "2.6.2", | ||
"typescript": "4.6.3" | ||
"typescript": "4.7.3" | ||
}, | ||
@@ -78,0 +78,0 @@ "peerDependencies": { |
@@ -18,2 +18,12 @@ # dynamodb-parallel-scan [![CircleCI](https://circleci.com/gh/shelfio/dynamodb-parallel-scan/tree/master.svg?style=svg)](https://circleci.com/gh/shelfio/dynamodb-parallel-scan/tree/master) ![](https://img.shields.io/badge/code_style-prettier-ff69b4.svg) [![npm (scoped)](https://img.shields.io/npm/v/@shelf/dynamodb-parallel-scan.svg)](https://www.npmjs.com/package/@shelf/dynamodb-parallel-scan) | ||
## Why this is better than a regular scan | ||
**Easily parallelize** scan requests to fetch all items from a table at once. | ||
This is useful when you need to scan a large table to find a small number of items that will fit the node.js memory. | ||
**Scan huge tables using async generator** or stream. | ||
And yes, it supports streams backpressure! | ||
Useful when you need to process a large number of items while you scan them. | ||
It allows receiving chunks of scanned items, wait until you process them, and then resume scanning when you're ready. | ||
## Usage | ||
@@ -82,3 +92,3 @@ | ||
$ yarn publish | ||
$ git push origin master | ||
$ git push origin master --tags | ||
``` | ||
@@ -85,0 +95,0 @@ |
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
17321
309
97