New Case Study:See how Anthropic automated 95% of dependency reviews with Socket.Learn More
Socket
Sign inDemoInstall
Socket

pgdump-aws-lambda

Package Overview
Dependencies
Maintainers
1
Versions
8
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

pgdump-aws-lambda - npm Package Compare versions

Comparing version 1.4.1 to 1.5.0

.editorconfig

7

lib/config.js

@@ -1,2 +0,2 @@

const path = require('path')
const path = require('path');

@@ -6,6 +6,7 @@ // default config that is overridden by the Lambda event

S3_REGION: 'eu-west-1',
PGDUMP_PATH: path.join(__dirname, '../bin/postgres-13.3'),
PGDUMP_PATH: path.join(__dirname, '../bin/postgres-15.0'),
// maximum time allowed to connect to postgres before a timeout occurs
PGCONNECT_TIMEOUT: 15,
USE_IAM_AUTH: false
USE_IAM_AUTH: false,
S3_STORAGE_CLASS: 'STANDARD'
}

@@ -5,2 +5,3 @@ const utils = require('./utils')

const decorateWithIamToken = require('./iam')
const decorateWithSecretsManagerCredentials = require('./secrets-manager')
const encryption = require('./encryption')

@@ -39,3 +40,14 @@

const baseConfig = { ...DEFAULT_CONFIG, ...event }
const config = event.USE_IAM_AUTH === true ? decorateWithIamToken(baseConfig) : baseConfig
let config
if (event.USE_IAM_AUTH === true) {
config = decorateWithIamToken(baseConfig)
}
else if (event.SECRETS_MANAGER_SECRET_ID) {
config = await decorateWithSecretsManagerCredentials(baseConfig)
}
else {
config = baseConfig
}
try {

@@ -42,0 +54,0 @@ return await backup(config)

@@ -15,3 +15,4 @@ const AWS = require('aws-sdk')

Bucket: config.S3_BUCKET,
Body: stream
Body: stream,
StorageClass: config.S3_STORAGE_CLASS
}).promise()

@@ -18,0 +19,0 @@

{
"name": "pgdump-aws-lambda",
"version": "1.4.1",
"version": "1.5.0",
"description": "Lambda function for executing pg_dump and streaming the output to s3.",
"main": "index.js",
"dependencies": {
"moment": "2.29.1"
"moment": "2.29.4"
},
"devDependencies": {
"aws-sdk": "2.925.0",
"aws-sdk-mock": "5.1.0",
"chai": "4.3.4",
"aws-sdk": "2.1240.0",
"aws-sdk-mock": "5.8.0",
"chai": "4.3.6",
"chai-as-promised": "7.1.1",
"coveralls": "3.1.0",
"eslint": "7.28.0",
"eslint-config-airbnb": "18.2.1",
"eslint-config-airbnb-base": "14.2.1",
"eslint-plugin-import": "2.23.4",
"mocha": "9.0.0",
"coveralls": "3.1.1",
"eslint": "8.26.0",
"eslint-config-airbnb": "19.0.4",
"eslint-config-airbnb-base": "15.0.0",
"eslint-plugin-import": "2.26.0",
"mocha": "10.1.0",
"mock-spawn": "0.2.6",
"mockdate": "3.0.5",
"nyc": "15.1.0",
"rewire": "5.0.0",
"sinon": "11.1.1",
"rewire": "6.0.0",
"sinon": "14.0.1",
"tmp": "0.2.1"

@@ -33,3 +33,3 @@ },

"coverage-html": "NODE_ENV=test nyc --reporter=html --reporter=text mocha test",
"deploy": "bin/makezip.sh"
"makezip": "bin/makezip.sh"
},

@@ -36,0 +36,0 @@ "repository": {

@@ -14,3 +14,4 @@ # pgdump-aws-lambda

- Author from scratch
- Runtime: Node.js 14.x
- Runtime: Node.js 16.x
- Architecture: x86_64
2. tab "Code" -> "Upload from" -> ".zip file":

@@ -22,4 +23,13 @@ - Upload ([pgdump-aws-lambda.zip](https://github.com/jameshy/pgdump-aws-lambda/releases/latest))

- Save
3. Test
3. Give your lambda permissions permissions to write to S3:
- tab "Configuration" -> "Permissions"
- click the existing Execution role
- "Add permissions" -> "Attach policies"
- select "AmazonS3FullAccess" and click "Attach policies"
4. Test
- Create new test event, e.g.:
```json

@@ -31,14 +41,14 @@ {

"PGHOST": "host",
"S3_BUCKET" : "db-backups",
"S3_BUCKET": "db-backups",
"ROOT": "hourly-backups"
}
```
- *Test* and check the output
4. Create a CloudWatch rule:
- _Test_ and check the output
5. Create a CloudWatch rule:
- Event Source: Schedule -> Fixed rate of 1 hour
- Targets: Lambda Function (the one created in step #1)
- Configure input -> Constant (JSON text) and paste your config (as per step #4)
- Configure input -> Constant (JSON text) and paste your config (as per previous step)
#### File Naming

@@ -52,4 +62,4 @@

- If you run the Lambda function outside a VPC, you must enable public access to your database instance, a non VPC Lambda function executes on the public internet.
- If you run the Lambda function inside a VPC (not tested), you must allow access from the Lambda Security Group to your database instance. Also you must add a NAT gateway to your VPC so the Lambda can connect to S3.
- If you run the Lambda function outside a VPC, you must enable public access to your database instance, a non VPC Lambda function executes on the public internet.
- If you run the Lambda function inside a VPC, you must allow access from the Lambda Security Group to your database instance. Also you must either add a NAT gateway ([chargeable](https://aws.amazon.com/vpc/pricing/)) to your VPC so the Lambda can connect to S3 over the Internet, or add an [S3 VPC endpoint (free)](https://docs.aws.amazon.com/vpc/latest/privatelink/vpc-endpoints-s3.html) and allow traffic to the appropriate S3 prefixlist.

@@ -66,3 +76,3 @@ #### Encryption

"PGHOST": "host",
"S3_BUCKET" : "db-backups",
"S3_BUCKET": "db-backups",
"ROOT": "hourly-backups",

@@ -96,10 +106,9 @@ "ENCRYPT_KEY": "c0d71d7ae094bdde1ef60db8503079ce615e71644133dc22e9686dc7216de8d0"

```json
{
"PGDATABASE": "dbname",
"PGUSER": "postgres",
"PGHOST": "host",
"S3_BUCKET" : "db-backups",
"ROOT": "hourly-backups",
"USE_IAM_AUTH": true
"PGDATABASE": "dbname",
"PGUSER": "postgres",
"PGHOST": "host",
"S3_BUCKET": "db-backups",
"ROOT": "hourly-backups",
"USE_IAM_AUTH": true
}

@@ -111,16 +120,46 @@ ```

#### SecretsManager-based Postgres authentication
If you prefer to not send DB details/credentials in the event parameters, you can store such details in SecretsManager and just provide the SecretId, then the function will fetch your DB details/credentials from the secret value.
NOTE: the execution role for the Lambda function must have access to GetSecretValue for the given secret.
Support for this can be enabled by setting the SECRETS_MANAGER_SECRET_ID, so your Cloudwatch Event looks like this:
```json
{
"SECRETS_MANAGER_SECRET_ID": "my/secret/id",
"S3_BUCKET": "db-backups",
"ROOT": "hourly-backups"
}
```
If you supply `SECRETS_MANAGER_SECRET_ID`, you can ommit the 'PG\*' keys, and they will be fetched from your SecretsManager secret value instead with the following mapping:
| Secret Value | PG-Key |
| ------------ | ---------- |
| username | PGUSER |
| password | PGPASSWORD |
| dbname | PGDATABASE |
| host | PGHOST |
| port | PGPORT |
You can provide overrides in your event to any PG\* keys as event parameters will take precedence over secret values.
## Developer
#### Bundling a new `pg_dump` binary
1. Launch an EC2 instance with the Amazon Linux 2 AMI
2. Connect via SSH and:
```bash
# install postgres 13
# install postgres 15
sudo amazon-linux-extras install epel
sudo tee /etc/yum.repos.d/pgdg.repo<<EOF
[pgdg13]
name=PostgreSQL 13 for RHEL/CentOS 7 - x86_64
baseurl=https://download.postgresql.org/pub/repos/yum/13/redhat/rhel-7-x86_64
[pgdg15]
name=PostgreSQL 15 for RHEL/CentOS 7 - x86_64
baseurl=https://download.postgresql.org/pub/repos/yum/15/redhat/rhel-7-x86_64
enabled=1

@@ -130,3 +169,3 @@ gpgcheck=0

sudo yum install postgresql13 postgresql13-server
sudo yum install postgresql15 postgresql15-server

@@ -139,10 +178,12 @@ exit

```bash
scp -i ~/aws.pem ec2-user@18.157.84.236:/usr/bin/pg_dump ./bin/postgres-13.3/pg_dump
scp -i ~/aws.pem ec2-user@18.157.84.236:/usr/lib64/{libcrypt.so.1,libnss3.so,libsmime3.so,libssl3.so,libsasl2.so.3,liblber-2.4.so.2,libldap_r-2.4.so.2} ./bin/postgres-13.3/
scp -i ~/aws.pem ec2-user@18.157.84.236:/usr/pgsql-13/lib/libpq.so.5 ./bin/postgres-13.3/libpq.so.5
scp ec2-user@your-ec2-hostname:/usr/bin/pg_dump ./bin/postgres-15.0/pg_dump
scp ec2-user@your-ec2-hostname:/usr/lib64/{libcrypt.so.1,libnss3.so,libsmime3.so,libssl3.so,libsasl2.so.3,liblber-2.4.so.2,libldap_r-2.4.so.2} ./bin/postgres-15.0/
scp ec2-user@your-ec2-hostname:/usr/pgsql-15/lib/libpq.so.5 ./bin/postgres-15.0/libpq.so.5
```
3. To use the new postgres binary pass PGDUMP_PATH in the event:
```json
{
"PGDUMP_PATH": "bin/postgres-13.3"
"PGDUMP_PATH": "bin/postgres-15.0"
}

@@ -153,3 +194,3 @@ ```

`npm run deploy`
`npm run makezip`

@@ -156,0 +197,0 @@ #### Contributing

@@ -92,2 +92,30 @@ /* eslint no-underscore-dangle: 0 */

it('should be able to authenticate via SecretsManager', async () => {
const { s3Spy, pgSpy } = makeMockHandler()
const secretsManagerMockEvent = { ...mockEvent, SECRETS_MANAGER_SECRET_ID: 'my-secret-id' }
const username = 'myuser'
const password = 'mypassword'
const secretValue = {
SecretString: JSON.stringify({ username, password })
}
AWSMOCK.mock('SecretsManager', 'getSecretValue', (params, callback) => {
expect(params.SecretId).to.eql(secretsManagerMockEvent.SECRETS_MANAGER_SECRET_ID)
callback(null, secretValue)
})
await handler(secretsManagerMockEvent)
// handler should have called pgSpy with correct arguments
expect(pgSpy.calledOnce).to.be.true
expect(s3Spy.calledOnce).to.be.true
expect(s3Spy.firstCall.args).to.have.length(3)
const config = s3Spy.firstCall.args[1]
// production code is synchronous, so this is annoying
expect(config.PGUSER).to.equal(username)
expect(config.PGPASSWORD).to.equal(password)
AWSMOCK.restore('SecretsManager')
})
it('should upload the backup file and an iv file', async () => {

@@ -94,0 +122,0 @@ const { s3Spy } = makeMockHandler()

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc