@cli4ai/postgres
Advanced tools
| #!/usr/bin/env node | ||
| export {}; |
+337
| #!/usr/bin/env node | ||
| import { Client } from 'pg'; | ||
| import { cli, loadEnv, output, outputError, withErrorHandling } from '@cli4ai/lib'; | ||
| loadEnv(); | ||
| // Read-only SQL patterns | ||
| const READONLY_PATTERNS = [ | ||
| /^\s*SELECT\b/i, | ||
| /^\s*SHOW\b/i, | ||
| /^\s*EXPLAIN\b/i, | ||
| /^\s*WITH\b/i, | ||
| ]; | ||
| // Dangerous patterns to block | ||
| const DANGEROUS_PATTERNS = [ | ||
| /^\s*INSERT\b/i, | ||
| /^\s*UPDATE\b/i, | ||
| /^\s*DELETE\b/i, | ||
| /^\s*DROP\b/i, | ||
| /^\s*CREATE\b/i, | ||
| /^\s*ALTER\b/i, | ||
| /^\s*TRUNCATE\b/i, | ||
| /^\s*GRANT\b/i, | ||
| /^\s*REVOKE\b/i, | ||
| /^\s*COPY\b/i, | ||
| /^\s*VACUUM\b/i, | ||
| /^\s*REINDEX\b/i, | ||
| /^\s*CLUSTER\b/i, | ||
| /^\s*REFRESH\b/i, | ||
| /^\s*CALL\b/i, | ||
| /^\s*DO\b/i, | ||
| /^\s*SET\b/i, | ||
| /^\s*LOCK\b/i, | ||
| ]; | ||
| function isReadOnlyQuery(sql) { | ||
| const trimmed = sql.trim(); | ||
| for (const pattern of DANGEROUS_PATTERNS) { | ||
| if (pattern.test(trimmed)) | ||
| return false; | ||
| } | ||
| for (const pattern of READONLY_PATTERNS) { | ||
| if (pattern.test(trimmed)) | ||
| return true; | ||
| } | ||
| return false; | ||
| } | ||
| const PG_IDENTIFIER_PATTERN = /^[A-Za-z_][A-Za-z0-9_]*$/; | ||
| function parsePositiveInt(value, name) { | ||
| const parsed = parseInt(value, 10); | ||
| if (!Number.isFinite(parsed) || parsed < 1) { | ||
| outputError('INVALID_INPUT', `${name} must be a positive integer`, { value }); | ||
| } | ||
| return parsed; | ||
| } | ||
| function getQualifiedTableName(table) { | ||
| const parts = table.split('.'); | ||
| if (parts.length > 2) { | ||
| outputError('INVALID_INPUT', 'Invalid table name', { table }); | ||
| } | ||
| const schema = parts.length === 2 ? parts[0] : 'public'; | ||
| const tableName = parts.length === 2 ? parts[1] : parts[0]; | ||
| if (!PG_IDENTIFIER_PATTERN.test(schema) || !PG_IDENTIFIER_PATTERN.test(tableName)) { | ||
| outputError('INVALID_INPUT', 'Invalid table name', { table }); | ||
| } | ||
| return `"${schema}"."${tableName}"`; | ||
| } | ||
| function getConnectionString(connName) { | ||
| if (connName) { | ||
| const envKey = `POSTGRES_${connName.toUpperCase()}_URL`; | ||
| const url = process.env[envKey]; | ||
| if (url) | ||
| return url; | ||
| const envKey2 = `POSTGRES_${connName.toUpperCase()}`; | ||
| const url2 = process.env[envKey2]; | ||
| if (url2) | ||
| return url2; | ||
| outputError('NOT_FOUND', `Connection '${connName}' not found`, { | ||
| hint: `Set ${envKey} in .env` | ||
| }); | ||
| } | ||
| const defaultUrl = process.env.POSTGRES_URL || process.env.DATABASE_URL; | ||
| if (!defaultUrl) { | ||
| outputError('ENV_MISSING', 'No connection specified', { | ||
| hint: 'Set POSTGRES_URL in .env or specify connection name' | ||
| }); | ||
| } | ||
| return defaultUrl; | ||
| } | ||
| async function withClient(connName, fn) { | ||
| const connectionString = getConnectionString(connName); | ||
| const client = new Client({ connectionString }); | ||
| try { | ||
| await client.connect(); | ||
| return await fn(client); | ||
| } | ||
| finally { | ||
| await client.end(); | ||
| } | ||
| } | ||
| const program = cli('postgres', '1.0.0', 'PostgreSQL read-only queries'); | ||
| program | ||
| .command('databases [conn]') | ||
| .description('List databases') | ||
| .action(withErrorHandling(async (connName) => { | ||
| await withClient(connName, async (client) => { | ||
| const result = await client.query(` | ||
| SELECT datname as name, pg_database_size(datname) as size_bytes, | ||
| pg_size_pretty(pg_database_size(datname)) as size | ||
| FROM pg_database | ||
| WHERE datistemplate = false | ||
| ORDER BY datname | ||
| `); | ||
| output(result.rows); | ||
| }); | ||
| })); | ||
| program | ||
| .command('schemas [conn]') | ||
| .description('List schemas') | ||
| .action(withErrorHandling(async (connName) => { | ||
| await withClient(connName, async (client) => { | ||
| const result = await client.query(` | ||
| SELECT schema_name as name, schema_owner as owner | ||
| FROM information_schema.schemata | ||
| WHERE schema_name NOT IN ('pg_catalog', 'information_schema', 'pg_toast') | ||
| ORDER BY schema_name | ||
| `); | ||
| output(result.rows); | ||
| }); | ||
| })); | ||
| program | ||
| .command('tables [conn] [schema]') | ||
| .description('List tables (default: public)') | ||
| .action(withErrorHandling(async (connName, schema = 'public') => { | ||
| await withClient(connName, async (client) => { | ||
| const result = await client.query(` | ||
| SELECT table_name as name, table_type as type, | ||
| pg_size_pretty(pg_total_relation_size(quote_ident(table_schema) || '.' || quote_ident(table_name))) as size | ||
| FROM information_schema.tables | ||
| WHERE table_schema = $1 | ||
| ORDER BY table_name | ||
| `, [schema]); | ||
| output(result.rows); | ||
| }); | ||
| })); | ||
| program | ||
| .command('views [conn] [schema]') | ||
| .description('List views') | ||
| .action(withErrorHandling(async (connName, schema = 'public') => { | ||
| await withClient(connName, async (client) => { | ||
| const result = await client.query(` | ||
| SELECT table_name as name, view_definition as definition | ||
| FROM information_schema.views | ||
| WHERE table_schema = $1 | ||
| ORDER BY table_name | ||
| `, [schema]); | ||
| output(result.rows); | ||
| }); | ||
| })); | ||
| program | ||
| .command('columns [conn] <table>') | ||
| .description('Column details') | ||
| .action(withErrorHandling(async (connName, table) => { | ||
| const [schema, tableName] = table.includes('.') | ||
| ? table.split('.') | ||
| : ['public', table]; | ||
| await withClient(connName, async (client) => { | ||
| const result = await client.query(` | ||
| SELECT column_name as name, data_type as type, | ||
| is_nullable as nullable, column_default as default, | ||
| character_maximum_length as max_length | ||
| FROM information_schema.columns | ||
| WHERE table_schema = $1 AND table_name = $2 | ||
| ORDER BY ordinal_position | ||
| `, [schema, tableName]); | ||
| output(result.rows); | ||
| }); | ||
| })); | ||
| program | ||
| .command('indexes [conn] <table>') | ||
| .description('Show indexes') | ||
| .action(withErrorHandling(async (connName, table) => { | ||
| const [schema, tableName] = table.includes('.') | ||
| ? table.split('.') | ||
| : ['public', table]; | ||
| await withClient(connName, async (client) => { | ||
| const result = await client.query(` | ||
| SELECT indexname as name, indexdef as definition | ||
| FROM pg_indexes | ||
| WHERE schemaname = $1 AND tablename = $2 | ||
| `, [schema, tableName]); | ||
| output(result.rows); | ||
| }); | ||
| })); | ||
| program | ||
| .command('constraints [conn] <table>') | ||
| .description('Show constraints') | ||
| .action(withErrorHandling(async (connName, table) => { | ||
| const [schema, tableName] = table.includes('.') | ||
| ? table.split('.') | ||
| : ['public', table]; | ||
| await withClient(connName, async (client) => { | ||
| const result = await client.query(` | ||
| SELECT conname as name, contype as type, | ||
| pg_get_constraintdef(c.oid) as definition | ||
| FROM pg_constraint c | ||
| JOIN pg_namespace n ON n.oid = c.connamespace | ||
| JOIN pg_class t ON t.oid = c.conrelid | ||
| WHERE n.nspname = $1 AND t.relname = $2 | ||
| `, [schema, tableName]); | ||
| output(result.rows); | ||
| }); | ||
| })); | ||
| program | ||
| .command('fkeys [conn] <table>') | ||
| .description('Foreign keys') | ||
| .action(withErrorHandling(async (connName, table) => { | ||
| const [schema, tableName] = table.includes('.') | ||
| ? table.split('.') | ||
| : ['public', table]; | ||
| await withClient(connName, async (client) => { | ||
| const result = await client.query(` | ||
| SELECT | ||
| kcu.column_name as column, | ||
| ccu.table_schema as foreign_schema, | ||
| ccu.table_name as foreign_table, | ||
| ccu.column_name as foreign_column, | ||
| tc.constraint_name as constraint | ||
| FROM information_schema.table_constraints tc | ||
| JOIN information_schema.key_column_usage kcu | ||
| ON tc.constraint_name = kcu.constraint_name | ||
| JOIN information_schema.constraint_column_usage ccu | ||
| ON ccu.constraint_name = tc.constraint_name | ||
| WHERE tc.constraint_type = 'FOREIGN KEY' | ||
| AND tc.table_schema = $1 | ||
| AND tc.table_name = $2 | ||
| `, [schema, tableName]); | ||
| output(result.rows); | ||
| }); | ||
| })); | ||
| program | ||
| .command('sample [conn] <table> [limit]') | ||
| .description('Sample rows (default: 10)') | ||
| .action(withErrorHandling(async (connName, table, limit = '10') => { | ||
| await withClient(connName, async (client) => { | ||
| const qualified = getQualifiedTableName(table); | ||
| const parsedLimit = parsePositiveInt(limit, 'limit'); | ||
| const result = await client.query(`SELECT * FROM ${qualified} LIMIT $1`, [parsedLimit]); | ||
| output(result.rows); | ||
| }); | ||
| })); | ||
| program | ||
| .command('count [conn] <table>') | ||
| .description('Count rows') | ||
| .action(withErrorHandling(async (connName, table) => { | ||
| await withClient(connName, async (client) => { | ||
| const qualified = getQualifiedTableName(table); | ||
| const result = await client.query(`SELECT COUNT(*) as count FROM ${qualified}`); | ||
| output(result.rows[0]); | ||
| }); | ||
| })); | ||
| program | ||
| .command('query [conn] <sql>') | ||
| .description('Run read-only SQL') | ||
| .action(withErrorHandling(async (connName, sql) => { | ||
| if (!isReadOnlyQuery(sql)) { | ||
| outputError('INVALID_INPUT', 'Only read-only queries allowed (SELECT, SHOW, EXPLAIN, WITH)'); | ||
| } | ||
| await withClient(connName, async (client) => { | ||
| const result = await client.query(sql); | ||
| output(result.rows); | ||
| }); | ||
| })); | ||
| program | ||
| .command('stats [conn] <table>') | ||
| .description('Table statistics') | ||
| .action(withErrorHandling(async (connName, table) => { | ||
| const [schema, tableName] = table.includes('.') | ||
| ? table.split('.') | ||
| : ['public', table]; | ||
| await withClient(connName, async (client) => { | ||
| const result = await client.query(` | ||
| SELECT | ||
| relname as table, | ||
| n_live_tup as live_rows, | ||
| n_dead_tup as dead_rows, | ||
| last_vacuum, | ||
| last_autovacuum, | ||
| last_analyze, | ||
| pg_size_pretty(pg_total_relation_size(quote_ident($1) || '.' || quote_ident($2))) as total_size, | ||
| pg_size_pretty(pg_table_size(quote_ident($1) || '.' || quote_ident($2))) as table_size, | ||
| pg_size_pretty(pg_indexes_size(quote_ident($1) || '.' || quote_ident($2))) as index_size | ||
| FROM pg_stat_user_tables | ||
| WHERE schemaname = $1 AND relname = $2 | ||
| `, [schema, tableName]); | ||
| output(result.rows[0]); | ||
| }); | ||
| })); | ||
| program | ||
| .command('sizes [conn] [schema]') | ||
| .description('Table sizes') | ||
| .action(withErrorHandling(async (connName, schema = 'public') => { | ||
| await withClient(connName, async (client) => { | ||
| const result = await client.query(` | ||
| SELECT table_name as name, | ||
| pg_size_pretty(pg_total_relation_size(quote_ident(table_schema) || '.' || quote_ident(table_name))) as total, | ||
| pg_size_pretty(pg_table_size(quote_ident(table_schema) || '.' || quote_ident(table_name))) as table, | ||
| pg_size_pretty(pg_indexes_size(quote_ident(table_schema) || '.' || quote_ident(table_name))) as indexes | ||
| FROM information_schema.tables | ||
| WHERE table_schema = $1 AND table_type = 'BASE TABLE' | ||
| ORDER BY pg_total_relation_size(quote_ident(table_schema) || '.' || quote_ident(table_name)) DESC | ||
| `, [schema]); | ||
| output(result.rows); | ||
| }); | ||
| })); | ||
| program | ||
| .command('search [conn] <pattern>') | ||
| .description('Search tables by name') | ||
| .action(withErrorHandling(async (connName, pattern) => { | ||
| await withClient(connName, async (client) => { | ||
| const result = await client.query(` | ||
| SELECT table_schema as schema, table_name as name, table_type as type | ||
| FROM information_schema.tables | ||
| WHERE table_name ILIKE $1 | ||
| AND table_schema NOT IN ('pg_catalog', 'information_schema') | ||
| ORDER BY table_schema, table_name | ||
| `, [`%${pattern}%`]); | ||
| output(result.rows); | ||
| }); | ||
| })); | ||
| program | ||
| .command('version [conn]') | ||
| .description('PostgreSQL version') | ||
| .action(withErrorHandling(async (connName) => { | ||
| await withClient(connName, async (client) => { | ||
| const result = await client.query('SELECT version()'); | ||
| output({ version: result.rows[0].version }); | ||
| }); | ||
| })); | ||
| program.parse(); |
+3
-3
| { | ||
| "name": "postgres", | ||
| "version": "1.0.9", | ||
| "version": "1.0.11", | ||
| "description": "PostgreSQL read-only queries", | ||
| "author": "cliforai", | ||
| "license": "MIT", | ||
| "entry": "run.ts", | ||
| "license": "BUSL-1.1", | ||
| "entry": "dist/run.js", | ||
| "runtime": "node", | ||
@@ -9,0 +9,0 @@ "keywords": [ |
+13
-5
| { | ||
| "name": "@cli4ai/postgres", | ||
| "version": "1.0.9", | ||
| "version": "1.0.11", | ||
| "description": "PostgreSQL read-only queries", | ||
| "author": "cliforai", | ||
| "license": "MIT", | ||
| "main": "run.ts", | ||
| "license": "BUSL-1.1", | ||
| "main": "dist/run.js", | ||
| "bin": { | ||
| "postgres": "./run.ts" | ||
| "postgres": "./dist/run.js" | ||
| }, | ||
@@ -36,3 +36,3 @@ "type": "module", | ||
| "files": [ | ||
| "run.ts", | ||
| "dist", | ||
| "cli4ai.json", | ||
@@ -44,3 +44,11 @@ "README.md", | ||
| "access": "public" | ||
| }, | ||
| "scripts": { | ||
| "build": "tsc", | ||
| "prepublishOnly": "npm run build" | ||
| }, | ||
| "devDependencies": { | ||
| "typescript": "^5.0.0", | ||
| "@types/node": "^22.0.0" | ||
| } | ||
| } |
-369
| #!/usr/bin/env npx tsx | ||
| import { Client } from 'pg'; | ||
| import { cli, loadEnv, output, outputError, withErrorHandling } from '@cli4ai/lib/cli.ts'; | ||
| loadEnv(); | ||
| // Read-only SQL patterns | ||
| const READONLY_PATTERNS = [ | ||
| /^\s*SELECT\b/i, | ||
| /^\s*SHOW\b/i, | ||
| /^\s*EXPLAIN\b/i, | ||
| /^\s*WITH\b/i, | ||
| ]; | ||
| // Dangerous patterns to block | ||
| const DANGEROUS_PATTERNS = [ | ||
| /^\s*INSERT\b/i, | ||
| /^\s*UPDATE\b/i, | ||
| /^\s*DELETE\b/i, | ||
| /^\s*DROP\b/i, | ||
| /^\s*CREATE\b/i, | ||
| /^\s*ALTER\b/i, | ||
| /^\s*TRUNCATE\b/i, | ||
| /^\s*GRANT\b/i, | ||
| /^\s*REVOKE\b/i, | ||
| /^\s*COPY\b/i, | ||
| /^\s*VACUUM\b/i, | ||
| /^\s*REINDEX\b/i, | ||
| /^\s*CLUSTER\b/i, | ||
| /^\s*REFRESH\b/i, | ||
| /^\s*CALL\b/i, | ||
| /^\s*DO\b/i, | ||
| /^\s*SET\b/i, | ||
| /^\s*LOCK\b/i, | ||
| ]; | ||
| function isReadOnlyQuery(sql: string): boolean { | ||
| const trimmed = sql.trim(); | ||
| for (const pattern of DANGEROUS_PATTERNS) { | ||
| if (pattern.test(trimmed)) return false; | ||
| } | ||
| for (const pattern of READONLY_PATTERNS) { | ||
| if (pattern.test(trimmed)) return true; | ||
| } | ||
| return false; | ||
| } | ||
| const PG_IDENTIFIER_PATTERN = /^[A-Za-z_][A-Za-z0-9_]*$/; | ||
| function parsePositiveInt(value: string, name: string): number { | ||
| const parsed = parseInt(value, 10); | ||
| if (!Number.isFinite(parsed) || parsed < 1) { | ||
| outputError('INVALID_INPUT', `${name} must be a positive integer`, { value }); | ||
| } | ||
| return parsed; | ||
| } | ||
| function getQualifiedTableName(table: string): string { | ||
| const parts = table.split('.'); | ||
| if (parts.length > 2) { | ||
| outputError('INVALID_INPUT', 'Invalid table name', { table }); | ||
| } | ||
| const schema = parts.length === 2 ? parts[0] : 'public'; | ||
| const tableName = parts.length === 2 ? parts[1] : parts[0]; | ||
| if (!PG_IDENTIFIER_PATTERN.test(schema) || !PG_IDENTIFIER_PATTERN.test(tableName)) { | ||
| outputError('INVALID_INPUT', 'Invalid table name', { table }); | ||
| } | ||
| return `"${schema}"."${tableName}"`; | ||
| } | ||
| function getConnectionString(connName?: string): string { | ||
| if (connName) { | ||
| const envKey = `POSTGRES_${connName.toUpperCase()}_URL`; | ||
| const url = process.env[envKey]; | ||
| if (url) return url; | ||
| const envKey2 = `POSTGRES_${connName.toUpperCase()}`; | ||
| const url2 = process.env[envKey2]; | ||
| if (url2) return url2; | ||
| outputError('NOT_FOUND', `Connection '${connName}' not found`, { | ||
| hint: `Set ${envKey} in .env` | ||
| }); | ||
| } | ||
| const defaultUrl = process.env.POSTGRES_URL || process.env.DATABASE_URL; | ||
| if (!defaultUrl) { | ||
| outputError('ENV_MISSING', 'No connection specified', { | ||
| hint: 'Set POSTGRES_URL in .env or specify connection name' | ||
| }); | ||
| } | ||
| return defaultUrl; | ||
| } | ||
| async function withClient<T>(connName: string | undefined, fn: (client: Client) => Promise<T>): Promise<T> { | ||
| const connectionString = getConnectionString(connName); | ||
| const client = new Client({ connectionString }); | ||
| try { | ||
| await client.connect(); | ||
| return await fn(client); | ||
| } finally { | ||
| await client.end(); | ||
| } | ||
| } | ||
| const program = cli('postgres', '1.0.0', 'PostgreSQL read-only queries'); | ||
| program | ||
| .command('databases [conn]') | ||
| .description('List databases') | ||
| .action(withErrorHandling(async (connName?: string) => { | ||
| await withClient(connName, async (client) => { | ||
| const result = await client.query(` | ||
| SELECT datname as name, pg_database_size(datname) as size_bytes, | ||
| pg_size_pretty(pg_database_size(datname)) as size | ||
| FROM pg_database | ||
| WHERE datistemplate = false | ||
| ORDER BY datname | ||
| `); | ||
| output(result.rows); | ||
| }); | ||
| })); | ||
| program | ||
| .command('schemas [conn]') | ||
| .description('List schemas') | ||
| .action(withErrorHandling(async (connName?: string) => { | ||
| await withClient(connName, async (client) => { | ||
| const result = await client.query(` | ||
| SELECT schema_name as name, schema_owner as owner | ||
| FROM information_schema.schemata | ||
| WHERE schema_name NOT IN ('pg_catalog', 'information_schema', 'pg_toast') | ||
| ORDER BY schema_name | ||
| `); | ||
| output(result.rows); | ||
| }); | ||
| })); | ||
| program | ||
| .command('tables [conn] [schema]') | ||
| .description('List tables (default: public)') | ||
| .action(withErrorHandling(async (connName?: string, schema = 'public') => { | ||
| await withClient(connName, async (client) => { | ||
| const result = await client.query(` | ||
| SELECT table_name as name, table_type as type, | ||
| pg_size_pretty(pg_total_relation_size(quote_ident(table_schema) || '.' || quote_ident(table_name))) as size | ||
| FROM information_schema.tables | ||
| WHERE table_schema = $1 | ||
| ORDER BY table_name | ||
| `, [schema]); | ||
| output(result.rows); | ||
| }); | ||
| })); | ||
| program | ||
| .command('views [conn] [schema]') | ||
| .description('List views') | ||
| .action(withErrorHandling(async (connName?: string, schema = 'public') => { | ||
| await withClient(connName, async (client) => { | ||
| const result = await client.query(` | ||
| SELECT table_name as name, view_definition as definition | ||
| FROM information_schema.views | ||
| WHERE table_schema = $1 | ||
| ORDER BY table_name | ||
| `, [schema]); | ||
| output(result.rows); | ||
| }); | ||
| })); | ||
| program | ||
| .command('columns [conn] <table>') | ||
| .description('Column details') | ||
| .action(withErrorHandling(async (connName: string | undefined, table: string) => { | ||
| const [schema, tableName] = table.includes('.') | ||
| ? table.split('.') | ||
| : ['public', table]; | ||
| await withClient(connName, async (client) => { | ||
| const result = await client.query(` | ||
| SELECT column_name as name, data_type as type, | ||
| is_nullable as nullable, column_default as default, | ||
| character_maximum_length as max_length | ||
| FROM information_schema.columns | ||
| WHERE table_schema = $1 AND table_name = $2 | ||
| ORDER BY ordinal_position | ||
| `, [schema, tableName]); | ||
| output(result.rows); | ||
| }); | ||
| })); | ||
| program | ||
| .command('indexes [conn] <table>') | ||
| .description('Show indexes') | ||
| .action(withErrorHandling(async (connName: string | undefined, table: string) => { | ||
| const [schema, tableName] = table.includes('.') | ||
| ? table.split('.') | ||
| : ['public', table]; | ||
| await withClient(connName, async (client) => { | ||
| const result = await client.query(` | ||
| SELECT indexname as name, indexdef as definition | ||
| FROM pg_indexes | ||
| WHERE schemaname = $1 AND tablename = $2 | ||
| `, [schema, tableName]); | ||
| output(result.rows); | ||
| }); | ||
| })); | ||
| program | ||
| .command('constraints [conn] <table>') | ||
| .description('Show constraints') | ||
| .action(withErrorHandling(async (connName: string | undefined, table: string) => { | ||
| const [schema, tableName] = table.includes('.') | ||
| ? table.split('.') | ||
| : ['public', table]; | ||
| await withClient(connName, async (client) => { | ||
| const result = await client.query(` | ||
| SELECT conname as name, contype as type, | ||
| pg_get_constraintdef(c.oid) as definition | ||
| FROM pg_constraint c | ||
| JOIN pg_namespace n ON n.oid = c.connamespace | ||
| JOIN pg_class t ON t.oid = c.conrelid | ||
| WHERE n.nspname = $1 AND t.relname = $2 | ||
| `, [schema, tableName]); | ||
| output(result.rows); | ||
| }); | ||
| })); | ||
| program | ||
| .command('fkeys [conn] <table>') | ||
| .description('Foreign keys') | ||
| .action(withErrorHandling(async (connName: string | undefined, table: string) => { | ||
| const [schema, tableName] = table.includes('.') | ||
| ? table.split('.') | ||
| : ['public', table]; | ||
| await withClient(connName, async (client) => { | ||
| const result = await client.query(` | ||
| SELECT | ||
| kcu.column_name as column, | ||
| ccu.table_schema as foreign_schema, | ||
| ccu.table_name as foreign_table, | ||
| ccu.column_name as foreign_column, | ||
| tc.constraint_name as constraint | ||
| FROM information_schema.table_constraints tc | ||
| JOIN information_schema.key_column_usage kcu | ||
| ON tc.constraint_name = kcu.constraint_name | ||
| JOIN information_schema.constraint_column_usage ccu | ||
| ON ccu.constraint_name = tc.constraint_name | ||
| WHERE tc.constraint_type = 'FOREIGN KEY' | ||
| AND tc.table_schema = $1 | ||
| AND tc.table_name = $2 | ||
| `, [schema, tableName]); | ||
| output(result.rows); | ||
| }); | ||
| })); | ||
| program | ||
| .command('sample [conn] <table> [limit]') | ||
| .description('Sample rows (default: 10)') | ||
| .action(withErrorHandling(async (connName: string | undefined, table: string, limit = '10') => { | ||
| await withClient(connName, async (client) => { | ||
| const qualified = getQualifiedTableName(table); | ||
| const parsedLimit = parsePositiveInt(limit, 'limit'); | ||
| const result = await client.query(`SELECT * FROM ${qualified} LIMIT $1`, [parsedLimit]); | ||
| output(result.rows); | ||
| }); | ||
| })); | ||
| program | ||
| .command('count [conn] <table>') | ||
| .description('Count rows') | ||
| .action(withErrorHandling(async (connName: string | undefined, table: string) => { | ||
| await withClient(connName, async (client) => { | ||
| const qualified = getQualifiedTableName(table); | ||
| const result = await client.query(`SELECT COUNT(*) as count FROM ${qualified}`); | ||
| output(result.rows[0]); | ||
| }); | ||
| })); | ||
| program | ||
| .command('query [conn] <sql>') | ||
| .description('Run read-only SQL') | ||
| .action(withErrorHandling(async (connName: string | undefined, sql: string) => { | ||
| if (!isReadOnlyQuery(sql)) { | ||
| outputError('INVALID_INPUT', 'Only read-only queries allowed (SELECT, SHOW, EXPLAIN, WITH)'); | ||
| } | ||
| await withClient(connName, async (client) => { | ||
| const result = await client.query(sql); | ||
| output(result.rows); | ||
| }); | ||
| })); | ||
| program | ||
| .command('stats [conn] <table>') | ||
| .description('Table statistics') | ||
| .action(withErrorHandling(async (connName: string | undefined, table: string) => { | ||
| const [schema, tableName] = table.includes('.') | ||
| ? table.split('.') | ||
| : ['public', table]; | ||
| await withClient(connName, async (client) => { | ||
| const result = await client.query(` | ||
| SELECT | ||
| relname as table, | ||
| n_live_tup as live_rows, | ||
| n_dead_tup as dead_rows, | ||
| last_vacuum, | ||
| last_autovacuum, | ||
| last_analyze, | ||
| pg_size_pretty(pg_total_relation_size(quote_ident($1) || '.' || quote_ident($2))) as total_size, | ||
| pg_size_pretty(pg_table_size(quote_ident($1) || '.' || quote_ident($2))) as table_size, | ||
| pg_size_pretty(pg_indexes_size(quote_ident($1) || '.' || quote_ident($2))) as index_size | ||
| FROM pg_stat_user_tables | ||
| WHERE schemaname = $1 AND relname = $2 | ||
| `, [schema, tableName]); | ||
| output(result.rows[0]); | ||
| }); | ||
| })); | ||
| program | ||
| .command('sizes [conn] [schema]') | ||
| .description('Table sizes') | ||
| .action(withErrorHandling(async (connName?: string, schema = 'public') => { | ||
| await withClient(connName, async (client) => { | ||
| const result = await client.query(` | ||
| SELECT table_name as name, | ||
| pg_size_pretty(pg_total_relation_size(quote_ident(table_schema) || '.' || quote_ident(table_name))) as total, | ||
| pg_size_pretty(pg_table_size(quote_ident(table_schema) || '.' || quote_ident(table_name))) as table, | ||
| pg_size_pretty(pg_indexes_size(quote_ident(table_schema) || '.' || quote_ident(table_name))) as indexes | ||
| FROM information_schema.tables | ||
| WHERE table_schema = $1 AND table_type = 'BASE TABLE' | ||
| ORDER BY pg_total_relation_size(quote_ident(table_schema) || '.' || quote_ident(table_name)) DESC | ||
| `, [schema]); | ||
| output(result.rows); | ||
| }); | ||
| })); | ||
| program | ||
| .command('search [conn] <pattern>') | ||
| .description('Search tables by name') | ||
| .action(withErrorHandling(async (connName: string | undefined, pattern: string) => { | ||
| await withClient(connName, async (client) => { | ||
| const result = await client.query(` | ||
| SELECT table_schema as schema, table_name as name, table_type as type | ||
| FROM information_schema.tables | ||
| WHERE table_name ILIKE $1 | ||
| AND table_schema NOT IN ('pg_catalog', 'information_schema') | ||
| ORDER BY table_schema, table_name | ||
| `, [`%${pattern}%`]); | ||
| output(result.rows); | ||
| }); | ||
| })); | ||
| program | ||
| .command('version [conn]') | ||
| .description('PostgreSQL version') | ||
| .action(withErrorHandling(async (connName?: string) => { | ||
| await withClient(connName, async (client) => { | ||
| const result = await client.query('SELECT version()'); | ||
| output({ version: result.rows[0].version }); | ||
| }); | ||
| })); | ||
| program.parse(); |
Major refactor
Supply chain riskPackage has recently undergone a major refactor. It may be unstable or indicate significant internal changes. Use caution when updating to versions that include significant changes.
Found 1 instance in 1 package
Environment variable access
Supply chain riskPackage accesses environment variables, which may be a sign of credential stuffing or data theft.
Found 3 instances in 1 package
Non-permissive License
LicenseA license not known to be considered permissive was found.
Found 1 instance in 1 package
Environment variable access
Supply chain riskPackage accesses environment variables, which may be a sign of credential stuffing or data theft.
Found 3 instances in 1 package
19554
0.57%5
25%558
1.27%2
Infinity%1
Infinity%90
-10%1
Infinity%