Huge News!Announcing our $40M Series B led by Abstract Ventures.Learn More
Socket
Sign inDemoInstall
Socket

idb-connector

Package Overview
Dependencies
Maintainers
5
Versions
47
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

idb-connector - npm Package Compare versions

Comparing version 1.2.3 to 1.2.4

build/stage/1.2.4/db2ia-v1.2.4-napi3-ibmi-ppc64.tar.gz

5

CHANGELOG.md
# idb-connector changelog
## 1.2.4
- [dbstmt] detect inconsistent data (#94)
- [dberror] move all debugging functions to dberror.h
- [test] add more test cases
## 1.2.3

@@ -3,0 +8,0 @@ - [dbstmt] fix a memory leak issue (#90)

1

docs/README.md

@@ -1436,2 +1436,3 @@ # DB2 for i Access APIs - idb-connector

Enables or disables automatic numeric conversion.
**Syntax 1:**

@@ -1438,0 +1439,0 @@

@@ -10,2 +10,32 @@ 'use strict'

const needEISDIRHandled = fs.lchown &&
!process.version.match(/v1[1-9]+\./) &&
!process.version.match(/v10\.[6-9]/)
/* istanbul ignore next */
const handleEISDIR =
needEISDIRHandled ? (path, uid, gid, cb) => er => {
// Node prior to v10 had a very questionable implementation of
// fs.lchown, which would always try to call fs.open on a directory
// Fall back to fs.chown in those cases.
if (!er || er.code !== 'EISDIR')
cb(er)
else
fs.chown(path, uid, gid, cb)
}
: (_, __, ___, cb) => cb
/* istanbul ignore next */
const handleEISDirSync =
needEISDIRHandled ? (path, uid, gid) => {
try {
return fs[LCHOWNSYNC](path, uid, gid)
} catch (er) {
if (er.code !== 'EISDIR')
throw er
fs.chownSync(path, uid, gid)
}
}
: (path, uid, gid) => fs[LCHOWNSYNC](path, uid, gid)
// fs.readdir could only accept an options object as of node v6

@@ -32,6 +62,9 @@ const nodeVersion = process.version

return cb(er)
fs[LCHOWN](path.resolve(p, child.name), uid, gid, cb)
const cpath = path.resolve(p, child.name)
fs[LCHOWN](cpath, uid, gid, handleEISDIR(cpath, uid, gid, cb))
})
} else
fs[LCHOWN](path.resolve(p, child.name), uid, gid, cb)
} else {
const cpath = path.resolve(p, child.name)
fs[LCHOWN](cpath, uid, gid, handleEISDIR(cpath, uid, gid, cb))
}
}

@@ -46,3 +79,4 @@

return cb(er)
if (er || !children.length) return fs[LCHOWN](p, uid, gid, cb)
if (er || !children.length)
return fs[LCHOWN](p, uid, gid, handleEISDIR(p, uid, gid, cb))

@@ -52,5 +86,8 @@ let len = children.length

const then = er => {
if (errState) return
if (er) return cb(errState = er)
if (-- len === 0) return fs[LCHOWN](p, uid, gid, cb)
if (errState)
return
if (er)
return cb(errState = er)
if (-- len === 0)
return fs[LCHOWN](p, uid, gid, handleEISDIR(p, uid, gid, cb))
}

@@ -72,3 +109,3 @@

fs[LCHOWNSYNC](path.resolve(p, child.name), uid, gid)
handleEISDirSync(path.resolve(p, child.name), uid, gid)
}

@@ -82,3 +119,3 @@

if (er && er.code === 'ENOTDIR' && er.code !== 'ENOTSUP')
return fs[LCHOWNSYNC](p, uid, gid)
return handleEISDirSync(p, uid, gid)
throw er

@@ -90,3 +127,3 @@ }

return fs[LCHOWNSYNC](p, uid, gid)
return handleEISDirSync(p, uid, gid)
}

@@ -93,0 +130,0 @@

33

node_modules/chownr/package.json
{
"_args": [
[
"chownr@1.1.1",
"/home/xumeng/git/nodejs-idb-connector"
]
],
"_from": "chownr@1.1.1",
"_id": "chownr@1.1.1",
"_from": "chownr@^1.1.1",
"_id": "chownr@1.1.3",
"_inBundle": false,
"_integrity": "sha512-j38EvO5+LHX84jlo6h4UzmOwi0UgW61WRyPtJz4qaadK5eY3BTS5TY/S1Stc3Uk2lIM6TPevAlULiEJwie860g==",
"_integrity": "sha512-i70fVHhmV3DtTl6nqvZOnIjbY0Pe4kAUjwHj8z0zAdgBtYrJyYwLKCCuRBQ5ppkyL0AkN7HKRnETdmdp1zqNXw==",
"_location": "/chownr",
"_phantomChildren": {},
"_requested": {
"type": "version",
"type": "range",
"registry": true,
"raw": "chownr@1.1.1",
"raw": "chownr@^1.1.1",
"name": "chownr",
"escapedName": "chownr",
"rawSpec": "1.1.1",
"rawSpec": "^1.1.1",
"saveSpec": null,
"fetchSpec": "1.1.1"
"fetchSpec": "^1.1.1"
},

@@ -27,5 +21,6 @@ "_requiredBy": [

],
"_resolved": "https://registry.npmjs.org/chownr/-/chownr-1.1.1.tgz",
"_spec": "1.1.1",
"_where": "/home/xumeng/git/nodejs-idb-connector",
"_resolved": "https://registry.npmjs.org/chownr/-/chownr-1.1.3.tgz",
"_shasum": "42d837d5239688d55f303003a508230fa6727142",
"_spec": "chownr@^1.1.1",
"_where": "/home/xumeng/git/nodejs-idb-connector/node_modules/tar",
"author": {

@@ -39,2 +34,4 @@ "name": "Isaac Z. Schlueter",

},
"bundleDependencies": false,
"deprecated": false,
"description": "like `chown -R`",

@@ -58,3 +55,3 @@ "devDependencies": {

"scripts": {
"postpublish": "git push origin --all; git push origin --tags",
"postpublish": "git push origin --follow-tags",
"postversion": "npm publish",

@@ -64,3 +61,3 @@ "preversion": "npm test",

},
"version": "1.1.1"
"version": "1.1.3"
}

@@ -9,2 +9,3 @@ 'use strict'

const writeBuffers = binding.writeBuffers
/* istanbul ignore next */
const FSReqWrap = binding.FSReqWrap || binding.FSReqCallback

@@ -11,0 +12,0 @@

{
"_args": [
[
"fs-minipass@1.2.6",
"/home/xumeng/git/nodejs-idb-connector"
]
],
"_from": "fs-minipass@1.2.6",
"_id": "fs-minipass@1.2.6",
"_from": "fs-minipass@^1.2.5",
"_id": "fs-minipass@1.2.7",
"_inBundle": false,
"_integrity": "sha512-crhvyXcMejjv3Z5d2Fa9sf5xLYVCF5O1c71QxbVnbLsmYMBEvDAftewesN/HhY03YRoA7zOMxjNGrF5svGaaeQ==",
"_integrity": "sha512-GWSSJGFy4e9GUeCcbIkED+bgAoFyj7XF1mV8rma3QW4NIqX9Kyx79N/PF61H5udOV3aY1IaMLs6pGbH71nlCTA==",
"_location": "/fs-minipass",
"_phantomChildren": {},
"_requested": {
"type": "version",
"type": "range",
"registry": true,
"raw": "fs-minipass@1.2.6",
"raw": "fs-minipass@^1.2.5",
"name": "fs-minipass",
"escapedName": "fs-minipass",
"rawSpec": "1.2.6",
"rawSpec": "^1.2.5",
"saveSpec": null,
"fetchSpec": "1.2.6"
"fetchSpec": "^1.2.5"
},

@@ -27,5 +21,6 @@ "_requiredBy": [

],
"_resolved": "https://registry.npmjs.org/fs-minipass/-/fs-minipass-1.2.6.tgz",
"_spec": "1.2.6",
"_where": "/home/xumeng/git/nodejs-idb-connector",
"_resolved": "https://registry.npmjs.org/fs-minipass/-/fs-minipass-1.2.7.tgz",
"_shasum": "ccff8570841e7fe4265693da88936c55aed7f7c7",
"_spec": "fs-minipass@^1.2.5",
"_where": "/home/xumeng/git/nodejs-idb-connector/node_modules/tar",
"author": {

@@ -39,9 +34,11 @@ "name": "Isaac Z. Schlueter",

},
"bundleDependencies": false,
"dependencies": {
"minipass": "^2.2.1"
"minipass": "^2.6.0"
},
"deprecated": false,
"description": "fs read and write streams based on minipass",
"devDependencies": {
"mutate-fs": "^2.0.1",
"tap": "^13.1.9"
"tap": "^14.6.4"
},

@@ -69,3 +66,3 @@ "files": [

},
"version": "1.2.6"
"version": "1.2.7"
}

@@ -20,3 +20,3 @@ 'use strict'

this.follow = !!opts.follow
this.result = this.parent ? this.parent.result : []
this.result = this.parent ? this.parent.result : new Set()
this.entries = null

@@ -35,4 +35,8 @@ this.sawError = false

this.sawError = true
else if (ev === 'done' && !this.parent)
data = data.sort(this.sort)
else if (ev === 'done' && !this.parent) {
data = Array.from(data)
.map(e => /^@/.test(e) ? `./${e}` : e).sort(this.sort)
this.result = data
}
if (ev === 'error' && this.parent)

@@ -62,3 +66,3 @@ ret = this.parent.emit('error', data)

if (this.includeEmpty)
this.result.push(this.path.substr(this.root.length + 1))
this.result.add(this.path.substr(this.root.length + 1))
this.emit('done', this.result)

@@ -151,3 +155,3 @@ } else {

if (file)
this.result.push(abs.substr(this.root.length + 1))
this.result.add(abs.substr(this.root.length + 1))
then()

@@ -154,0 +158,0 @@ } else {

{
"_args": [
[
"ignore-walk@3.0.1",
"/home/xumeng/git/nodejs-idb-connector"
]
],
"_from": "ignore-walk@3.0.1",
"_id": "ignore-walk@3.0.1",
"_from": "ignore-walk@^3.0.1",
"_id": "ignore-walk@3.0.3",
"_inBundle": false,
"_integrity": "sha512-DTVlMx3IYPe0/JJcYP7Gxg7ttZZu3IInhuEhbchuqneY9wWe5Ojy2mXLBaQFUQmo0AW2r3qG7m1mg86js+gnlQ==",
"_integrity": "sha512-m7o6xuOaT1aqheYHKf8W6J5pYH85ZI9w077erOzLje3JsB1gkafkAhHHY19dqjulgIZHFm32Cp5uNZgcQqdJKw==",
"_location": "/ignore-walk",
"_phantomChildren": {},
"_requested": {
"type": "version",
"type": "range",
"registry": true,
"raw": "ignore-walk@3.0.1",
"raw": "ignore-walk@^3.0.1",
"name": "ignore-walk",
"escapedName": "ignore-walk",
"rawSpec": "3.0.1",
"rawSpec": "^3.0.1",
"saveSpec": null,
"fetchSpec": "3.0.1"
"fetchSpec": "^3.0.1"
},

@@ -27,5 +21,6 @@ "_requiredBy": [

],
"_resolved": "https://registry.npmjs.org/ignore-walk/-/ignore-walk-3.0.1.tgz",
"_spec": "3.0.1",
"_where": "/home/xumeng/git/nodejs-idb-connector",
"_resolved": "https://registry.npmjs.org/ignore-walk/-/ignore-walk-3.0.3.tgz",
"_shasum": "017e2447184bfeade7c238e4aefdd1e8f95b1e37",
"_spec": "ignore-walk@^3.0.1",
"_where": "/home/xumeng/git/nodejs-idb-connector/node_modules/npm-packlist",
"author": {

@@ -39,5 +34,7 @@ "name": "Isaac Z. Schlueter",

},
"bundleDependencies": false,
"dependencies": {
"minimatch": "^3.0.4"
},
"deprecated": false,
"description": "Nested/recursive `.gitignore`/`.npmignore` parsing and filtering.",

@@ -48,3 +45,3 @@ "devDependencies": {

"rimraf": "^2.6.1",
"tap": "^10.7.2"
"tap": "^14.6.9"
},

@@ -74,5 +71,8 @@ "files": [

"preversion": "npm test",
"test": "tap test/*.js --100"
"test": "tap"
},
"version": "3.0.1"
"tap": {
"jobs": 1
},
"version": "3.0.3"
}
# ignore-walk
[![Build
Status](https://travis-ci.org/isaacs/ignore-walk.svg?branch=master)](https://travis-ci.org/isaacs/ignore-walk)
Status](https://travis-ci.org/npm/ignore-walk.svg?branch=master)](https://travis-ci.org/npm/ignore-walk)

@@ -6,0 +6,0 @@ Nested/recursive `.gitignore`/`.npmignore` parsing and filtering.

'use strict'
const EE = require('events')
const Yallist = require('yallist')
const SD = require('string_decoder').StringDecoder
const EOF = Symbol('EOF')
const MAYBE_EMIT_END = Symbol('maybeEmitEnd')
const EMITTED_END = Symbol('emittedEnd')
const EMITTING_END = Symbol('emittingEnd')
const CLOSED = Symbol('closed')
const READ = Symbol('read')
const FLUSH = Symbol('flush')
const doIter = process.env._MP_NO_ITERATOR_SYMBOLS_ !== '1'
const ASYNCITERATOR = doIter && Symbol.asyncIterator || Symbol('asyncIterator not implemented')
const ITERATOR = doIter && Symbol.iterator || Symbol('iterator not implemented')
const FLUSHCHUNK = Symbol('flushChunk')
const SD = require('string_decoder').StringDecoder
const ENCODING = Symbol('encoding')
const DECODER = Symbol('decoder')
const FLOWING = Symbol('flowing')
const PAUSED = Symbol('paused')
const RESUME = Symbol('resume')

@@ -23,16 +23,39 @@ const BUFFERLENGTH = Symbol('bufferLength')

const OBJECTMODE = Symbol('objectMode')
const DESTROYED = Symbol('destroyed')
// TODO remove when Node v8 support drops
const doIter = global._MP_NO_ITERATOR_SYMBOLS_ !== '1'
const ASYNCITERATOR = doIter && Symbol.asyncIterator
|| Symbol('asyncIterator not implemented')
const ITERATOR = doIter && Symbol.iterator
|| Symbol('iterator not implemented')
// Buffer in node 4.x < 4.5.0 doesn't have working Buffer.from
// or Buffer.alloc, and Buffer in node 10 deprecated the ctor.
// .M, this is fine .\^/M..
let B = Buffer
/* istanbul ignore next */
if (!B.alloc) {
B = require('safe-buffer').Buffer
}
const B = Buffer.alloc ? Buffer
: /* istanbul ignore next */ require('safe-buffer').Buffer
module.exports = class MiniPass extends EE {
// events that mean 'the stream is over'
// these are treated specially, and re-emitted
// if they are listened for after emitting.
const isEndish = ev =>
ev === 'end' ||
ev === 'finish' ||
ev === 'prefinish'
const isArrayBuffer = b => b instanceof ArrayBuffer ||
typeof b === 'object' &&
b.constructor &&
b.constructor.name === 'ArrayBuffer' &&
b.byteLength >= 0
const isArrayBufferView = b => !B.isBuffer(b) && ArrayBuffer.isView(b)
module.exports = class Minipass extends EE {
constructor (options) {
super()
this[FLOWING] = false
// whether we're explicitly paused
this[PAUSED] = false
this.pipes = new Yallist()

@@ -50,2 +73,3 @@ this.buffer = new Yallist()

this[EMITTED_END] = false
this[EMITTING_END] = false
this[CLOSED] = false

@@ -55,2 +79,3 @@ this.writable = true

this[BUFFERLENGTH] = 0
this[DESTROYED] = false
}

@@ -82,2 +107,5 @@

get objectMode () { return this[OBJECTMODE] }
set objectMode (ॐ ) { this[OBJECTMODE] = this[OBJECTMODE] || !!ॐ }
write (chunk, encoding, cb) {

@@ -87,2 +115,10 @@ if (this[EOF])

if (this[DESTROYED]) {
this.emit('error', Object.assign(
new Error('Cannot call write after a stream was destroyed'),
{ code: 'ERR_STREAM_DESTROYED' }
))
return true
}
if (typeof encoding === 'function')

@@ -94,2 +130,27 @@ cb = encoding, encoding = 'utf8'

// convert array buffers and typed array views into buffers
// at some point in the future, we may want to do the opposite!
// leave strings and buffers as-is
// anything else switches us into object mode
if (!this[OBJECTMODE] && !B.isBuffer(chunk)) {
if (isArrayBufferView(chunk))
chunk = B.from(chunk.buffer, chunk.byteOffset, chunk.byteLength)
else if (isArrayBuffer(chunk))
chunk = B.from(chunk)
else if (typeof chunk !== 'string')
// use the setter so we throw if we have encoding set
this.objectMode = true
}
// this ensures at this point that the chunk is a buffer or string
// don't buffer it up or send it to the decoder
if (!this.objectMode && !chunk.length) {
const ret = this.flowing
if (this[BUFFERLENGTH] !== 0)
this.emit('readable')
if (cb)
cb()
return ret
}
// fast-path writing strings of same encoding to a stream with

@@ -111,3 +172,4 @@ // an empty buffer, skipping the buffer/decoder dance

} finally {
this.emit('readable')
if (this[BUFFERLENGTH] !== 0)
this.emit('readable')
if (cb)

@@ -119,2 +181,5 @@ cb()

read (n) {
if (this[DESTROYED])
return null
try {

@@ -172,4 +237,10 @@ if (this[BUFFERLENGTH] === 0 || n === 0 || n > this[BUFFERLENGTH])

this.writable = false
if (this.flowing)
// if we haven't written anything, then go ahead and emit,
// even if we're not reading.
// we'll re-emit if a new 'end' listener is added anyway.
// This makes MP more suitable to write-only use cases.
if (this.flowing || !this[PAUSED])
this[MAYBE_EMIT_END]()
return this
}

@@ -179,2 +250,6 @@

[RESUME] () {
if (this[DESTROYED])
return
this[PAUSED] = false
this[FLOWING] = true

@@ -196,4 +271,9 @@ this.emit('resume')

this[FLOWING] = false
this[PAUSED] = true
}
get destroyed () {
return this[DESTROYED]
}
get flowing () {

@@ -203,2 +283,6 @@ return this[FLOWING]

get paused () {
return this[PAUSED]
}
[BUFFERPUSH] (chunk) {

@@ -234,4 +318,12 @@ if (this[OBJECTMODE])

pipe (dest, opts) {
if (this[DESTROYED])
return
const ended = this[EMITTED_END]
opts = opts || {}
if (dest === process.stdout || dest === process.stderr)
(opts = opts || {}).end = false
opts.end = false
else
opts.end = opts.end !== false
const p = { dest: dest, opts: opts, ondrain: _ => this[RESUME]() }

@@ -242,2 +334,5 @@ this.pipes.push(p)

this[RESUME]()
// piping an ended stream ends immediately
if (ended && p.opts.end)
p.dest.end()
return dest

@@ -256,5 +351,5 @@ }

this[RESUME]()
else if (ev === 'end' && this[EMITTED_END]) {
super.emit('end')
this.removeAllListeners('end')
else if (isEndish(ev) && this[EMITTED_END]) {
super.emit(ev)
this.removeAllListeners(ev)
}

@@ -269,3 +364,8 @@ }

[MAYBE_EMIT_END] () {
if (!this[EMITTED_END] && this.buffer.length === 0 && this[EOF]) {
if (!this[EMITTING_END] &&
!this[EMITTED_END] &&
!this[DESTROYED] &&
this.buffer.length === 0 &&
this[EOF]) {
this[EMITTING_END] = true
this.emit('end')

@@ -276,2 +376,3 @@ this.emit('prefinish')

this.emit('close')
this[EMITTING_END] = false
}

@@ -281,3 +382,6 @@ }

emit (ev, data) {
if (ev === 'data') {
// error and close are only events allowed after calling destroy()
if (ev !== 'error' && ev !== 'close' && ev !== DESTROYED && this[DESTROYED])
return
else if (ev === 'data') {
if (!data)

@@ -287,4 +391,6 @@ return

if (this.pipes.length)
this.pipes.forEach(p => p.dest.write(data) || this.pause())
this.pipes.forEach(p =>
p.dest.write(data) === false && this.pause())
} else if (ev === 'end') {
// only actual end gets this treatment
if (this[EMITTED_END] === true)

@@ -306,3 +412,3 @@ return

p.dest.removeListener('drain', p.ondrain)
if (!p.opts || p.opts.end !== false)
if (p.opts.end)
p.dest.end()

@@ -313,6 +419,7 @@ })

// don't emit close before 'end' and 'finish'
if (!this[EMITTED_END])
if (!this[EMITTED_END] && !this[DESTROYED])
return
}
// TODO: replace with a spread operator when Node v4 support drops
const args = new Array(arguments.length)

@@ -330,6 +437,6 @@ args[0] = ev

} finally {
if (ev !== 'end')
if (!isEndish(ev))
this[MAYBE_EMIT_END]()
else
this.removeAllListeners('end')
this.removeAllListeners(ev)
}

@@ -340,7 +447,27 @@ }

collect () {
const buf = []
buf.dataLength = 0
this.on('data', c => {
buf.push(c)
buf.dataLength += c.length
})
return this.promise().then(() => buf)
}
// const data = await stream.concat()
concat () {
return this[OBJECTMODE]
? Promise.reject(new Error('cannot concat in objectMode'))
: this.collect().then(buf =>
this[OBJECTMODE]
? Promise.reject(new Error('cannot concat in objectMode'))
: this[ENCODING] ? buf.join('') : B.concat(buf, buf.dataLength))
}
// stream.promise().then(() => done, er => emitted error)
promise () {
return new Promise((resolve, reject) => {
const buf = []
this.on('data', c => buf.push(c))
this.on('end', () => resolve(buf))
this.on('error', reject)
this.on(DESTROYED, () => reject(new Error('stream destroyed')))
this.on('end', () => resolve())
this.on('error', er => reject(er))
})

@@ -377,5 +504,7 @@ }

}
const ondestroy = () => onerr(new Error('stream destroyed'))
return new Promise((res, rej) => {
reject = rej
resolve = res
this.once(DESTROYED, ondestroy)
this.once('error', onerr)

@@ -399,2 +528,35 @@ this.once('end', onend)

}
destroy (er) {
if (this[DESTROYED]) {
if (er)
this.emit('error', er)
else
this.emit(DESTROYED)
return this
}
this[DESTROYED] = true
// throw away all buffered data, it's never coming out
this.buffer = new Yallist()
this[BUFFERLENGTH] = 0
if (typeof this.close === 'function' && !this[CLOSED])
this.close()
if (er)
this.emit('error', er)
else // if no error to emit, still reject pending promises
this.emit(DESTROYED)
return this
}
static isStream (s) {
return !!s && (s instanceof Minipass || s instanceof EE && (
typeof s.pipe === 'function' || // readable
(typeof s.write === 'function' && typeof s.end === 'function') // writable
))
}
}
{
"_args": [
[
"minipass@2.3.5",
"/home/xumeng/git/nodejs-idb-connector"
]
],
"_from": "minipass@2.3.5",
"_id": "minipass@2.3.5",
"_from": "minipass@^2.8.6",
"_id": "minipass@2.9.0",
"_inBundle": false,
"_integrity": "sha512-Gi1W4k059gyRbyVUZQ4mEqLm0YIUiGYfvxhF6SIlk3ui1WVxMTGfGdQ2SInh3PDrRTVvPKgULkpJtT4RH10+VA==",
"_integrity": "sha512-wxfUjg9WebH+CUDX/CdbRlh5SmfZiy/hpkxaRI16Y9W56Pa75sWgd/rvFilSgrauD9NyFymP/+JFV3KwzIsJeg==",
"_location": "/minipass",
"_phantomChildren": {},
"_requested": {
"type": "version",
"type": "range",
"registry": true,
"raw": "minipass@2.3.5",
"raw": "minipass@^2.8.6",
"name": "minipass",
"escapedName": "minipass",
"rawSpec": "2.3.5",
"rawSpec": "^2.8.6",
"saveSpec": null,
"fetchSpec": "2.3.5"
"fetchSpec": "^2.8.6"
},

@@ -29,5 +23,6 @@ "_requiredBy": [

],
"_resolved": "https://registry.npmjs.org/minipass/-/minipass-2.3.5.tgz",
"_spec": "2.3.5",
"_where": "/home/xumeng/git/nodejs-idb-connector",
"_resolved": "https://registry.npmjs.org/minipass/-/minipass-2.9.0.tgz",
"_shasum": "e713762e7d3e32fed803115cf93e04bca9fcc9a6",
"_spec": "minipass@^2.8.6",
"_where": "/home/xumeng/git/nodejs-idb-connector/node_modules/tar",
"author": {

@@ -41,2 +36,3 @@ "name": "Isaac Z. Schlueter",

},
"bundleDependencies": false,
"dependencies": {

@@ -46,6 +42,7 @@ "safe-buffer": "^5.1.2",

},
"deprecated": false,
"description": "minimal implementation of a PassThrough stream",
"devDependencies": {
"end-of-stream": "^1.4.0",
"tap": "^12.0.1",
"tap": "^14.6.5",
"through2": "^2.0.3"

@@ -69,8 +66,11 @@ },

"scripts": {
"postpublish": "git push origin --all; git push origin --tags",
"postpublish": "git push origin --follow-tags",
"postversion": "npm publish",
"preversion": "npm test",
"test": "tap test/*.js --100"
"test": "tap"
},
"version": "2.3.5"
"tap": {
"check-coverage": true
},
"version": "2.9.0"
}

@@ -27,2 +27,6 @@ # minipass

`objectMode` can also be set by doing `stream.objectMode = true`, or by
writing any non-string/non-buffer data. `objectMode` cannot be set to
false once it is set.
This is not a `through` or `through2` stream. It doesn't transform

@@ -34,10 +38,225 @@ the data, it just passes it right through. If you want to transform

For an example of a stream that extends MiniPass to provide transform
capabilities, check out [minizlib](http://npm.im/minizlib).
For some examples of streams that extend Minipass in various ways, check
out:
- [minizlib](http://npm.im/minizlib)
- [fs-minipass](http://npm.im/fs-minipass)
- [tar](http://npm.im/tar)
- [minipass-collect](http://npm.im/minipass-collect)
- [minipass-flush](http://npm.im/minipass-flush)
- [minipass-pipeline](http://npm.im/minipass-pipeline)
- [tap](http://npm.im/tap)
- [tap-parser](http://npm.im/tap)
- [treport](http://npm.im/tap)
## Differences from Node.js Streams
There are several things that make Minipass streams different from (and in
some ways superior to) Node.js core streams.
Please read these caveats if you are familiar with noode-core streams and
intend to use Minipass streams in your programs.
### Timing
Minipass streams are designed to support synchronous use-cases. Thus, data
is emitted as soon as it is available, always. It is buffered until read,
but no longer. Another way to look at it is that Minipass streams are
exactly as synchronous as the logic that writes into them.
This can be surprising if your code relies on `PassThrough.write()` always
providing data on the next tick rather than the current one, or being able
to call `resume()` and not have the entire buffer disappear immediately.
However, without this synchronicity guarantee, there would be no way for
Minipass to achieve the speeds it does, or support the synchronous use
cases that it does. Simply put, waiting takes time.
This non-deferring approach makes Minipass streams much easier to reason
about, especially in the context of Promises and other flow-control
mechanisms.
### No High/Low Water Marks
Node.js core streams will optimistically fill up a buffer, returning `true`
on all writes until the limit is hit, even if the data has nowhere to go.
Then, they will not attempt to draw more data in until the buffer size dips
below a minimum value.
Minipass streams are much simpler. The `write()` method will return `true`
if the data has somewhere to go (which is to say, given the timing
guarantees, that the data is already there by the time `write()` returns).
If the data has nowhere to go, then `write()` returns false, and the data
sits in a buffer, to be drained out immediately as soon as anyone consumes
it.
### Hazards of Buffering (or: Why Minipass Is So Fast)
Since data written to a Minipass stream is immediately written all the way
through the pipeline, and `write()` always returns true/false based on
whether the data was fully flushed, backpressure is communicated
immediately to the upstream caller. This minimizes buffering.
Consider this case:
```js
const {PassThrough} = require('stream')
const p1 = new PassThrough({ highWaterMark: 1024 })
const p2 = new PassThrough({ highWaterMark: 1024 })
const p3 = new PassThrough({ highWaterMark: 1024 })
const p4 = new PassThrough({ highWaterMark: 1024 })
p1.pipe(p2).pipe(p3).pipe(p4)
p4.on('data', () => console.log('made it through'))
// this returns false and buffers, then writes to p2 on next tick (1)
// p2 returns false and buffers, pausing p1, then writes to p3 on next tick (2)
// p3 returns false and buffers, pausing p2, then writes to p4 on next tick (3)
// p4 returns false and buffers, pausing p3, then emits 'data' and 'drain'
// on next tick (4)
// p3 sees p4's 'drain' event, and calls resume(), emitting 'resume' and
// 'drain' on next tick (5)
// p2 sees p3's 'drain', calls resume(), emits 'resume' and 'drain' on next tick (6)
// p1 sees p2's 'drain', calls resume(), emits 'resume' and 'drain' on next
// tick (7)
p1.write(Buffer.alloc(2048)) // returns false
```
Along the way, the data was buffered and deferred at each stage, and
multiple event deferrals happened, for an unblocked pipeline where it was
perfectly safe to write all the way through!
Furthermore, setting a `highWaterMark` of `1024` might lead someone reading
the code to think an advisory maximum of 1KiB is being set for the
pipeline. However, the actual advisory buffering level is the _sum_ of
`highWaterMark` values, since each one has its own bucket.
Consider the Minipass case:
```js
const m1 = new Minipass()
const m2 = new Minipass()
const m3 = new Minipass()
const m4 = new Minipass()
m1.pipe(m2).pipe(m3).pipe(m4)
m4.on('data', () => console.log('made it through'))
// m1 is flowing, so it writes the data to m2 immediately
// m2 is flowing, so it writes the data to m3 immediately
// m3 is flowing, so it writes the data to m4 immediately
// m4 is flowing, so it fires the 'data' event immediately, returns true
// m4's write returned true, so m3 is still flowing, returns true
// m3's write returned true, so m2 is still flowing, returns true
// m2's write returned true, so m1 is still flowing, returns true
// No event deferrals or buffering along the way!
m1.write(Buffer.alloc(2048)) // returns true
```
It is extremely unlikely that you _don't_ want to buffer any data written,
or _ever_ buffer data that can be flushed all the way through. Neither
node-core streams nor Minipass ever fail to buffer written data, but
node-core streams do a lot of unnecessary buffering and pausing.
As always, the faster implementation is the one that does less stuff and
waits less time to do it.
### Immediately emit `end` for empty streams (when not paused)
If a stream is not paused, and `end()` is called before writing any data
into it, then it will emit `end` immediately.
If you have logic that occurs on the `end` event which you don't want to
potentially happen immediately (for example, closing file descriptors,
moving on to the next entry in an archive parse stream, etc.) then be sure
to call `stream.pause()` on creation, and then `stream.resume()` once you
are ready to respond to the `end` event.
### Emit `end` When Asked
One hazard of immediately emitting `'end'` is that you may not yet have had
a chance to add a listener. In order to avoid this hazard, Minipass
streams safely re-emit the `'end'` event if a new listener is added after
`'end'` has been emitted.
Ie, if you do `stream.on('end', someFunction)`, and the stream has already
emitted `end`, then it will call the handler right away. (You can think of
this somewhat like attaching a new `.then(fn)` to a previously-resolved
Promise.)
To prevent calling handlers multiple times who would not expect multiple
ends to occur, all listeners are removed from the `'end'` event whenever it
is emitted.
### Impact of "immediate flow" on Tee-streams
A "tee stream" is a stream piping to multiple destinations:
```js
const tee = new Minipass()
t.pipe(dest1)
t.pipe(dest2)
t.write('foo') // goes to both destinations
```
Since Minipass streams _immediately_ process any pending data through the
pipeline when a new pipe destination is added, this can have surprising
effects, especially when a stream comes in from some other function and may
or may not have data in its buffer.
```js
// WARNING! WILL LOSE DATA!
const src = new Minipass()
src.write('foo')
src.pipe(dest1) // 'foo' chunk flows to dest1 immediately, and is gone
src.pipe(dest2) // gets nothing!
```
The solution is to create a dedicated tee-stream junction that pipes to
both locations, and then pipe to _that_ instead.
```js
// Safe example: tee to both places
const src = new Minipass()
src.write('foo')
const tee = new Minipass()
tee.pipe(dest1)
tee.pipe(dest2)
stream.pipe(tee) // tee gets 'foo', pipes to both locations
```
The same caveat applies to `on('data')` event listeners. The first one
added will _immediately_ receive all of the data, leaving nothing for the
second:
```js
// WARNING! WILL LOSE DATA!
const src = new Minipass()
src.write('foo')
src.on('data', handler1) // receives 'foo' right away
src.on('data', handler2) // nothing to see here!
```
Using a dedicated tee-stream can be used in this case as well:
```js
// Safe example: tee to both data handlers
const src = new Minipass()
src.write('foo')
const tee = new Minipass()
tee.on('data', handler1)
tee.on('data', handler2)
src.pipe(tee)
```
## USAGE
It's a stream! Use it like a stream and it'll most likely do what you want.
```js
const MiniPass = require('minipass')
const mp = new MiniPass(options) // optional: { encoding }
const Minipass = require('minipass')
const mp = new Minipass(options) // optional: { encoding, objectMode }
mp.write('foo')

@@ -48,2 +267,132 @@ mp.pipe(someOtherStream)

### OPTIONS
* `encoding` How would you like the data coming _out_ of the stream to be
encoded? Accepts any values that can be passed to `Buffer.toString()`.
* `objectMode` Emit data exactly as it comes in. This will be flipped on
by default if you write() something other than a string or Buffer at any
point. Setting `objectMode: true` will prevent setting any encoding
value.
### API
Implements the user-facing portions of Node.js's `Readable` and `Writable`
streams.
### Methods
* `write(chunk, [encoding], [callback])` - Put data in. (Note that, in the
base Minipass class, the same data will come out.) Returns `false` if
the stream will buffer the next write, or true if it's still in
"flowing" mode.
* `end([chunk, [encoding]], [callback])` - Signal that you have no more
data to write. This will queue an `end` event to be fired when all the
data has been consumed.
* `setEncoding(encoding)` - Set the encoding for data coming of the
stream. This can only be done once.
* `pause()` - No more data for a while, please. This also prevents `end`
from being emitted for empty streams until the stream is resumed.
* `resume()` - Resume the stream. If there's data in the buffer, it is
all discarded. Any buffered events are immediately emitted.
* `pipe(dest)` - Send all output to the stream provided. There is no way
to unpipe. When data is emitted, it is immediately written to any and
all pipe destinations.
* `on(ev, fn)`, `emit(ev, fn)` - Minipass streams are EventEmitters.
Some events are given special treatment, however. (See below under
"events".)
* `promise()` - Returns a Promise that resolves when the stream emits
`end`, or rejects if the stream emits `error`.
* `collect()` - Return a Promise that resolves on `end` with an array
containing each chunk of data that was emitted, or rejects if the
stream emits `error`. Note that this consumes the stream data.
* `concat()` - Same as `collect()`, but concatenates the data into a
single Buffer object. Will reject the returned promise if the stream is
in objectMode, or if it goes into objectMode by the end of the data.
* `read(n)` - Consume `n` bytes of data out of the buffer. If `n` is not
provided, then consume all of it. If `n` bytes are not available, then
it returns null. **Note** consuming streams in this way is less
efficient, and can lead to unnecessary Buffer copying.
* `destroy([er])` - Destroy the stream. If an error is provided, then an
`'error'` event is emitted. If the stream has a `close()` method, and
has not emitted a `'close'` event yet, then `stream.close()` will be
called. Any Promises returned by `.promise()`, `.collect()` or
`.concat()` will be rejected. After being destroyed, writing to the
stream will emit an error. No more data will be emitted if the stream is
destroyed, even if it was previously buffered.
### Properties
* `bufferLength` Read-only. Total number of bytes buffered, or in the case
of objectMode, the total number of objects.
* `encoding` The encoding that has been set. (Setting this is equivalent
to calling `setEncoding(enc)` and has the same prohibition against
setting multiple times.)
* `flowing` Read-only. Boolean indicating whether a chunk written to the
stream will be immediately emitted.
* `emittedEnd` Read-only. Boolean indicating whether the end-ish events
(ie, `end`, `prefinish`, `finish`) have been emitted. Note that
listening on any end-ish event will immediateyl re-emit it if it has
already been emitted.
* `writable` Whether the stream is writable. Default `true`. Set to
`false` when `end()`
* `readable` Whether the stream is readable. Default `true`.
* `buffer` A [yallist](http://npm.im/yallist) linked list of chunks written
to the stream that have not yet been emitted. (It's probably a bad idea
to mess with this.)
* `pipes` A [yallist](http://npm.im/yallist) linked list of streams that
this stream is piping into. (It's probably a bad idea to mess with
this.)
* `destroyed` A getter that indicates whether the stream was destroyed.
* `paused` True if the stream has been explicitly paused, otherwise false.
* `objectMode` Indicates whether the stream is in `objectMode`. Once set
to `true`, it cannot be set to `false`.
### Events
* `data` Emitted when there's data to read. Argument is the data to read.
This is never emitted while not flowing. If a listener is attached, that
will resume the stream.
* `end` Emitted when there's no more data to read. This will be emitted
immediately for empty streams when `end()` is called. If a listener is
attached, and `end` was already emitted, then it will be emitted again.
All listeners are removed when `end` is emitted.
* `prefinish` An end-ish event that follows the same logic as `end` and is
emitted in the same conditions where `end` is emitted. Emitted after
`'end'`.
* `finish` An end-ish event that follows the same logic as `end` and is
emitted in the same conditions where `end` is emitted. Emitted after
`'prefinish'`.
* `close` An indication that an underlying resource has been released.
Minipass does not emit this event, but will defer it until after `end`
has been emitted, since it throws off some stream libraries otherwise.
* `drain` Emitted when the internal buffer empties, and it is again
suitable to `write()` into the stream.
* `readable` Emitted when data is buffered and ready to be read by a
consumer.
* `resume` Emitted when stream changes state from buffering to flowing
mode. (Ie, when `resume` is called, `pipe` is called, or a `data` event
listener is added.)
### Static Methods
* `Minipass.isStream(stream)` Returns `true` if the argument is a stream,
and false otherwise. To be considered a stream, the object must be
either an instance of Minipass, or an EventEmitter that has either a
`pipe()` method, or both `write()` and `end()` methods. (Pretty much any
stream in node-land will return `true` for this.)
## EXAMPLES
Here are some examples of things you can do with Minipass streams.
### simple "are you done yet" promise
```js
mp.promise().then(() => {
// stream is finished
}, er => {
// stream emitted an error
})
```
### collecting

@@ -63,2 +412,15 @@

### collecting into a single blob
This is a bit slower because it concatenates the data into one chunk for
you, but if you're going to do it yourself anyway, it's convenient this
way:
```js
mp.concat().then(onebigchunk => {
// onebigchunk is a string if the stream
// had an encoding set, or a buffer otherwise.
})
```
### iteration

@@ -129,1 +491,121 @@

```
### subclass that `console.log()`s everything written into it
```js
class Logger extends Minipass {
write (chunk, encoding, callback) {
console.log('WRITE', chunk, encoding)
return super.write(chunk, encoding, callback)
}
end (chunk, encoding, callback) {
console.log('END', chunk, encoding)
return super.end(chunk, encoding, callback)
}
}
someSource.pipe(new Logger()).pipe(someDest)
```
### same thing, but using an inline anonymous class
```js
// js classes are fun
someSource
.pipe(new (class extends Minipass {
emit (ev, ...data) {
// let's also log events, because debugging some weird thing
console.log('EMIT', ev)
return super.emit(ev, ...data)
}
write (chunk, encoding, callback) {
console.log('WRITE', chunk, encoding)
return super.write(chunk, encoding, callback)
}
end (chunk, encoding, callback) {
console.log('END', chunk, encoding)
return super.end(chunk, encoding, callback)
}
}))
.pipe(someDest)
```
### subclass that defers 'end' for some reason
```js
class SlowEnd extends Minipass {
emit (ev, ...args) {
if (ev === 'end') {
console.log('going to end, hold on a sec')
setTimeout(() => {
console.log('ok, ready to end now')
super.emit('end', ...args)
}, 100)
} else {
return super.emit(ev, ...args)
}
}
}
```
### transform that creates newline-delimited JSON
```js
class NDJSONEncode extends Minipass {
write (obj, cb) {
try {
// JSON.stringify can throw, emit an error on that
return super.write(JSON.stringify(obj) + '\n', 'utf8', cb)
} catch (er) {
this.emit('error', er)
}
}
end (obj, cb) {
if (typeof obj === 'function') {
cb = obj
obj = undefined
}
if (obj !== undefined) {
this.write(obj)
}
return super.end(cb)
}
}
```
### transform that parses newline-delimited JSON
```js
class NDJSONDecode extends Minipass {
constructor (options) {
// always be in object mode, as far as Minipass is concerned
super({ objectMode: true })
this._jsonBuffer = ''
}
write (chunk, encoding, cb) {
if (typeof chunk === 'string' &&
typeof encoding === 'string' &&
encoding !== 'utf8') {
chunk = Buffer.from(chunk, encoding).toString()
} else if (Buffer.isBuffer(chunk))
chunk = chunk.toString()
}
if (typeof encoding === 'function') {
cb = encoding
}
const jsonData = (this._jsonBuffer + chunk).split('\n')
this._jsonBuffer = jsonData.pop()
for (let i = 0; i < jsonData.length; i++) {
let parsed
try {
super.write(parsed)
} catch (er) {
this.emit('error', er)
continue
}
}
if (cb)
cb()
}
}
```

@@ -1,2 +0,9 @@

module.exports = Object.freeze({
// Update with any zlib constants that are added or changed in the future.
// Node v6 didn't export this, so we just hard code the version and rely
// on all the other hard-coded values from zlib v4736. When node v6
// support drops, we can just export the realZlibConstants object.
const realZlibConstants = require('zlib').constants ||
/* istanbul ignore next */ { ZLIB_VERNUM: 4736 }
module.exports = Object.freeze(Object.assign(Object.create(null), {
Z_NO_FLUSH: 0,

@@ -26,3 +33,2 @@ Z_PARTIAL_FLUSH: 1,

Z_DEFAULT_STRATEGY: 0,
ZLIB_VERNUM: 4736,
DEFLATE: 1,

@@ -35,2 +41,4 @@ INFLATE: 2,

UNZIP: 7,
BROTLI_DECODE: 8,
BROTLI_ENCODE: 9,
Z_MIN_WINDOWBITS: 8,

@@ -47,3 +55,64 @@ Z_MAX_WINDOWBITS: 15,

Z_MAX_LEVEL: 9,
Z_DEFAULT_LEVEL: -1
})
Z_DEFAULT_LEVEL: -1,
BROTLI_OPERATION_PROCESS: 0,
BROTLI_OPERATION_FLUSH: 1,
BROTLI_OPERATION_FINISH: 2,
BROTLI_OPERATION_EMIT_METADATA: 3,
BROTLI_MODE_GENERIC: 0,
BROTLI_MODE_TEXT: 1,
BROTLI_MODE_FONT: 2,
BROTLI_DEFAULT_MODE: 0,
BROTLI_MIN_QUALITY: 0,
BROTLI_MAX_QUALITY: 11,
BROTLI_DEFAULT_QUALITY: 11,
BROTLI_MIN_WINDOW_BITS: 10,
BROTLI_MAX_WINDOW_BITS: 24,
BROTLI_LARGE_MAX_WINDOW_BITS: 30,
BROTLI_DEFAULT_WINDOW: 22,
BROTLI_MIN_INPUT_BLOCK_BITS: 16,
BROTLI_MAX_INPUT_BLOCK_BITS: 24,
BROTLI_PARAM_MODE: 0,
BROTLI_PARAM_QUALITY: 1,
BROTLI_PARAM_LGWIN: 2,
BROTLI_PARAM_LGBLOCK: 3,
BROTLI_PARAM_DISABLE_LITERAL_CONTEXT_MODELING: 4,
BROTLI_PARAM_SIZE_HINT: 5,
BROTLI_PARAM_LARGE_WINDOW: 6,
BROTLI_PARAM_NPOSTFIX: 7,
BROTLI_PARAM_NDIRECT: 8,
BROTLI_DECODER_RESULT_ERROR: 0,
BROTLI_DECODER_RESULT_SUCCESS: 1,
BROTLI_DECODER_RESULT_NEEDS_MORE_INPUT: 2,
BROTLI_DECODER_RESULT_NEEDS_MORE_OUTPUT: 3,
BROTLI_DECODER_PARAM_DISABLE_RING_BUFFER_REALLOCATION: 0,
BROTLI_DECODER_PARAM_LARGE_WINDOW: 1,
BROTLI_DECODER_NO_ERROR: 0,
BROTLI_DECODER_SUCCESS: 1,
BROTLI_DECODER_NEEDS_MORE_INPUT: 2,
BROTLI_DECODER_NEEDS_MORE_OUTPUT: 3,
BROTLI_DECODER_ERROR_FORMAT_EXUBERANT_NIBBLE: -1,
BROTLI_DECODER_ERROR_FORMAT_RESERVED: -2,
BROTLI_DECODER_ERROR_FORMAT_EXUBERANT_META_NIBBLE: -3,
BROTLI_DECODER_ERROR_FORMAT_SIMPLE_HUFFMAN_ALPHABET: -4,
BROTLI_DECODER_ERROR_FORMAT_SIMPLE_HUFFMAN_SAME: -5,
BROTLI_DECODER_ERROR_FORMAT_CL_SPACE: -6,
BROTLI_DECODER_ERROR_FORMAT_HUFFMAN_SPACE: -7,
BROTLI_DECODER_ERROR_FORMAT_CONTEXT_MAP_REPEAT: -8,
BROTLI_DECODER_ERROR_FORMAT_BLOCK_LENGTH_1: -9,
BROTLI_DECODER_ERROR_FORMAT_BLOCK_LENGTH_2: -10,
BROTLI_DECODER_ERROR_FORMAT_TRANSFORM: -11,
BROTLI_DECODER_ERROR_FORMAT_DICTIONARY: -12,
BROTLI_DECODER_ERROR_FORMAT_WINDOW_BITS: -13,
BROTLI_DECODER_ERROR_FORMAT_PADDING_1: -14,
BROTLI_DECODER_ERROR_FORMAT_PADDING_2: -15,
BROTLI_DECODER_ERROR_FORMAT_DISTANCE: -16,
BROTLI_DECODER_ERROR_DICTIONARY_NOT_SET: -19,
BROTLI_DECODER_ERROR_INVALID_ARGUMENTS: -20,
BROTLI_DECODER_ERROR_ALLOC_CONTEXT_MODES: -21,
BROTLI_DECODER_ERROR_ALLOC_TREE_GROUPS: -22,
BROTLI_DECODER_ERROR_ALLOC_CONTEXT_MAP: -25,
BROTLI_DECODER_ERROR_ALLOC_RING_BUFFER_1: -26,
BROTLI_DECODER_ERROR_ALLOC_RING_BUFFER_2: -27,
BROTLI_DECODER_ERROR_ALLOC_BLOCK_TYPE_TREES: -30,
BROTLI_DECODER_ERROR_UNREACHABLE: -31,
}, realZlibConstants))

@@ -8,3 +8,3 @@ 'use strict'

const constants = exports.constants = require('./constants.js')
const MiniPass = require('minipass')
const Minipass = require('minipass')

@@ -14,6 +14,12 @@ const OriginalBufferConcat = Buffer.concat

class ZlibError extends Error {
constructor (msg, errno) {
super('zlib: ' + msg)
this.errno = errno
this.code = codes.get(errno)
constructor (err) {
super('zlib: ' + err.message)
this.code = err.code
this.errno = err.errno
/* istanbul ignore if */
if (!this.code)
this.code = 'ZLIB_ERROR'
this.message = 'zlib: ' + err.message
Error.captureStackTrace(this, this.constructor)
}

@@ -26,32 +32,2 @@

// translation table for return codes.
const codes = new Map([
[constants.Z_OK, 'Z_OK'],
[constants.Z_STREAM_END, 'Z_STREAM_END'],
[constants.Z_NEED_DICT, 'Z_NEED_DICT'],
[constants.Z_ERRNO, 'Z_ERRNO'],
[constants.Z_STREAM_ERROR, 'Z_STREAM_ERROR'],
[constants.Z_DATA_ERROR, 'Z_DATA_ERROR'],
[constants.Z_MEM_ERROR, 'Z_MEM_ERROR'],
[constants.Z_BUF_ERROR, 'Z_BUF_ERROR'],
[constants.Z_VERSION_ERROR, 'Z_VERSION_ERROR']
])
const validFlushFlags = new Set([
constants.Z_NO_FLUSH,
constants.Z_PARTIAL_FLUSH,
constants.Z_SYNC_FLUSH,
constants.Z_FULL_FLUSH,
constants.Z_FINISH,
constants.Z_BLOCK
])
const strategies = new Set([
constants.Z_FILTERED,
constants.Z_HUFFMAN_ONLY,
constants.Z_RLE,
constants.Z_FIXED,
constants.Z_DEFAULT_STRATEGY
])
// the Zlib class they all inherit from

@@ -63,86 +39,41 @@ // This thing manages the queue of requests, and returns

const _flushFlag = Symbol('flushFlag')
const _finishFlush = Symbol('finishFlush')
const _finishFlushFlag = Symbol('finishFlushFlag')
const _fullFlushFlag = Symbol('fullFlushFlag')
const _handle = Symbol('handle')
const _onError = Symbol('onError')
const _sawError = Symbol('sawError')
const _level = Symbol('level')
const _strategy = Symbol('strategy')
const _ended = Symbol('ended')
const _defaultFullFlush = Symbol('_defaultFullFlush')
class Zlib extends MiniPass {
class ZlibBase extends Minipass {
constructor (opts, mode) {
if (!opts || typeof opts !== 'object')
throw new TypeError('invalid options for ZlibBase constructor')
super(opts)
this[_ended] = false
this[_opts] = opts = opts || {}
if (opts.flush && !validFlushFlags.has(opts.flush)) {
throw new TypeError('Invalid flush flag: ' + opts.flush)
}
if (opts.finishFlush && !validFlushFlags.has(opts.finishFlush)) {
throw new TypeError('Invalid flush flag: ' + opts.finishFlush)
}
this[_opts] = opts
this[_flushFlag] = opts.flush || constants.Z_NO_FLUSH
this[_finishFlush] = typeof opts.finishFlush !== 'undefined' ?
opts.finishFlush : constants.Z_FINISH
if (opts.chunkSize) {
if (opts.chunkSize < constants.Z_MIN_CHUNK) {
throw new RangeError('Invalid chunk size: ' + opts.chunkSize)
}
this[_flushFlag] = opts.flush
this[_finishFlushFlag] = opts.finishFlush
// this will throw if any options are invalid for the class selected
try {
this[_handle] = new realZlib[mode](opts)
} catch (er) {
// make sure that all errors get decorated properly
throw new ZlibError(er)
}
if (opts.windowBits) {
if (opts.windowBits < constants.Z_MIN_WINDOWBITS ||
opts.windowBits > constants.Z_MAX_WINDOWBITS) {
throw new RangeError('Invalid windowBits: ' + opts.windowBits)
}
}
if (opts.level) {
if (opts.level < constants.Z_MIN_LEVEL ||
opts.level > constants.Z_MAX_LEVEL) {
throw new RangeError('Invalid compression level: ' + opts.level)
}
}
if (opts.memLevel) {
if (opts.memLevel < constants.Z_MIN_MEMLEVEL ||
opts.memLevel > constants.Z_MAX_MEMLEVEL) {
throw new RangeError('Invalid memLevel: ' + opts.memLevel)
}
}
if (opts.strategy && !(strategies.has(opts.strategy)))
throw new TypeError('Invalid strategy: ' + opts.strategy)
if (opts.dictionary) {
if (!(opts.dictionary instanceof Buffer)) {
throw new TypeError('Invalid dictionary: it should be a Buffer instance')
}
}
this[_handle] = new realZlib[mode](opts)
this[_onError] = (err) => {
this[_sawError] = true
// there is no way to cleanly recover.
// continuing only obscures problems.
this.close()
const error = new ZlibError(err.message, err.errno)
this.emit('error', error)
this.emit('error', err)
}
this[_handle].on('error', this[_onError])
const level = typeof opts.level === 'number' ? opts.level
: constants.Z_DEFAULT_COMPRESSION
var strategy = typeof opts.strategy === 'number' ? opts.strategy
: constants.Z_DEFAULT_STRATEGY
// API changed in node v9
/* istanbul ignore next */
this[_level] = level
this[_strategy] = strategy
this.once('end', this.close)
this[_handle].on('error', er => this[_onError](new ZlibError(er)))
this.once('end', () => this.close)
}

@@ -158,56 +89,16 @@

params (level, strategy) {
if (!this[_handle])
throw new Error('cannot switch params when binding is closed')
// no way to test this without also not supporting params at all
/* istanbul ignore if */
if (!this[_handle].params)
throw new Error('not supported in this implementation')
if (level < constants.Z_MIN_LEVEL ||
level > constants.Z_MAX_LEVEL) {
throw new RangeError('Invalid compression level: ' + level)
}
if (!(strategies.has(strategy)))
throw new TypeError('Invalid strategy: ' + strategy)
if (this[_level] !== level || this[_strategy] !== strategy) {
this.flush(constants.Z_SYNC_FLUSH)
reset () {
if (!this[_sawError]) {
assert(this[_handle], 'zlib binding closed')
// .params() calls .flush(), but the latter is always async in the
// core zlib. We override .flush() temporarily to intercept that and
// flush synchronously.
const origFlush = this[_handle].flush
this[_handle].flush = (flushFlag, cb) => {
this[_handle].flush = origFlush
this.flush(flushFlag)
cb()
}
this[_handle].params(level, strategy)
/* istanbul ignore else */
if (this[_handle]) {
this[_level] = level
this[_strategy] = strategy
}
return this[_handle].reset()
}
}
reset () {
assert(this[_handle], 'zlib binding closed')
return this[_handle].reset()
}
flush (kind) {
if (kind === undefined)
kind = constants.Z_FULL_FLUSH
flush (flushFlag) {
if (this.ended)
return
const flushFlag = this[_flushFlag]
this[_flushFlag] = kind
this.write(Buffer.alloc(0))
this[_flushFlag] = flushFlag
if (typeof flushFlag !== 'number')
flushFlag = this[_fullFlushFlag]
this.write(Object.assign(Buffer.alloc(0), { [_flushFlag]: flushFlag }))
}

@@ -218,3 +109,3 @@

this.write(chunk, encoding)
this.flush(this[_finishFlush])
this.flush(this[_finishFlushFlag])
this[_ended] = true

@@ -237,2 +128,4 @@ return super.end(null, null, cb)

if (this[_sawError])
return
assert(this[_handle], 'zlib binding closed')

@@ -252,7 +145,13 @@

try {
result = this[_handle]._processChunk(chunk, this[_flushFlag])
const flushFlag = typeof chunk[_flushFlag] === 'number'
? chunk[_flushFlag] : this[_flushFlag]
result = this[_handle]._processChunk(chunk, flushFlag)
// if we don't throw, reset it back how it was
Buffer.concat = OriginalBufferConcat
} catch (err) {
this[_onError](err)
// or if we do, put Buffer.concat() back before we emit error
// Error events call into user code, which may call Buffer.concat()
Buffer.concat = OriginalBufferConcat
this[_onError](new ZlibError(err))
} finally {
Buffer.concat = OriginalBufferConcat
if (this[_handle]) {

@@ -291,2 +190,52 @@ // Core zlib resets `_handle` to null after attempting to close the

class Zlib extends ZlibBase {
constructor (opts, mode) {
opts = opts || {}
opts.flush = opts.flush || constants.Z_NO_FLUSH
opts.finishFlush = opts.finishFlush || constants.Z_FINISH
super(opts, mode)
this[_fullFlushFlag] = constants.Z_FULL_FLUSH
this[_level] = opts.level
this[_strategy] = opts.strategy
}
params (level, strategy) {
if (this[_sawError])
return
if (!this[_handle])
throw new Error('cannot switch params when binding is closed')
// no way to test this without also not supporting params at all
/* istanbul ignore if */
if (!this[_handle].params)
throw new Error('not supported in this implementation')
if (this[_level] !== level || this[_strategy] !== strategy) {
this.flush(constants.Z_SYNC_FLUSH)
assert(this[_handle], 'zlib binding closed')
// .params() calls .flush(), but the latter is always async in the
// core zlib. We override .flush() temporarily to intercept that and
// flush synchronously.
const origFlush = this[_handle].flush
this[_handle].flush = (flushFlag, cb) => {
this.flush(flushFlag)
cb()
}
try {
this[_handle].params(level, strategy)
} finally {
this[_handle].flush = origFlush
}
/* istanbul ignore else */
if (this[_handle]) {
this[_level] = level
this[_strategy] = strategy
}
}
}
}
// minimal 2-byte header

@@ -338,2 +287,27 @@ class Deflate extends Zlib {

class Brotli extends ZlibBase {
constructor (opts, mode) {
opts = opts || {}
opts.flush = opts.flush || constants.BROTLI_OPERATION_PROCESS
opts.finishFlush = opts.finishFlush || constants.BROTLI_OPERATION_FINISH
super(opts, mode)
this[_fullFlushFlag] = constants.BROTLI_OPERATION_FLUSH
}
}
class BrotliCompress extends Brotli {
constructor (opts) {
super(opts, 'BrotliCompress')
}
}
class BrotliDecompress extends Brotli {
constructor (opts) {
super(opts, 'BrotliDecompress')
}
}
exports.Deflate = Deflate

@@ -346,1 +320,12 @@ exports.Inflate = Inflate

exports.Unzip = Unzip
/* istanbul ignore else */
if (typeof realZlib.BrotliCompress === 'function') {
exports.BrotliCompress = BrotliCompress
exports.BrotliDecompress = BrotliDecompress
} else {
exports.BrotliCompress = exports.BrotliDecompress = class {
constructor () {
throw new Error('Brotli is not supported in this version of Node.js')
}
}
}
{
"_args": [
[
"minizlib@1.2.1",
"/home/xumeng/git/nodejs-idb-connector"
]
],
"_from": "minizlib@1.2.1",
"_id": "minizlib@1.2.1",
"_from": "minizlib@^1.2.1",
"_id": "minizlib@1.3.3",
"_inBundle": false,
"_integrity": "sha512-7+4oTUOWKg7AuL3vloEWekXY2/D20cevzsrNT2kGWm+39J9hGTCBv8VI5Pm5lXZ/o3/mdR4f8rflAPhnQb8mPA==",
"_integrity": "sha512-6ZYMOEnmVsdCeTJVE0W9ZD+pVnE8h9Hma/iOwwRDsdQoePpoX56/8B6z3P9VNwppJuBKNRuFDRNRqRWexT9G9Q==",
"_location": "/minizlib",
"_phantomChildren": {},
"_requested": {
"type": "version",
"type": "range",
"registry": true,
"raw": "minizlib@1.2.1",
"raw": "minizlib@^1.2.1",
"name": "minizlib",
"escapedName": "minizlib",
"rawSpec": "1.2.1",
"rawSpec": "^1.2.1",
"saveSpec": null,
"fetchSpec": "1.2.1"
"fetchSpec": "^1.2.1"
},

@@ -27,5 +21,6 @@ "_requiredBy": [

],
"_resolved": "https://registry.npmjs.org/minizlib/-/minizlib-1.2.1.tgz",
"_spec": "1.2.1",
"_where": "/home/xumeng/git/nodejs-idb-connector",
"_resolved": "https://registry.npmjs.org/minizlib/-/minizlib-1.3.3.tgz",
"_shasum": "2290de96818a34c29551c8a8d301216bd65a861d",
"_spec": "minizlib@^1.2.1",
"_where": "/home/xumeng/git/nodejs-idb-connector/node_modules/tar",
"author": {

@@ -39,5 +34,7 @@ "name": "Isaac Z. Schlueter",

},
"bundleDependencies": false,
"dependencies": {
"minipass": "^2.2.1"
"minipass": "^2.9.0"
},
"deprecated": false,
"description": "A small fast zlib stream built on [minipass](http://npm.im/minipass) and Node.js's zlib binding.",

@@ -75,3 +72,3 @@ "devDependencies": {

},
"version": "1.2.1"
"version": "1.3.3"
}
# minizlib
A tiny fast zlib stream built on [minipass](http://npm.im/minipass)
and Node.js's zlib binding.
A fast zlib stream built on [minipass](http://npm.im/minipass) and
Node.js's zlib binding.
This module was created to serve the needs of
[node-tar](http://npm.im/tar) v2. If your needs are different, then
it may not be for you.
[node-tar](http://npm.im/tar) and
[minipass-fetch](http://npm.im/minipass-fetch).
Brotli is supported in versions of node with a Brotli binding.
## How does this differ from the streams in `require('zlib')`?

@@ -14,8 +16,10 @@

buffer. If you want those, use the built-in `zlib` module. This is
only streams.
only streams. That being said, Minipass streams to make it fairly easy to
use as one-liners: `new zlib.Deflate().end(data).read()` will return the
deflate compressed result.
This module compresses and decompresses the data as fast as you feed
it in. It is synchronous, and runs on the main process thread. Zlib
operations can be high CPU, but they're very fast, and doing it this
way means much less bookkeeping and artificial deferral.
and Brotli operations can be high CPU, but they're very fast, and doing it
this way means much less bookkeeping and artificial deferral.

@@ -26,21 +30,26 @@ Node's built in zlib streams are built on top of `stream.Transform`.

This module _does_ support backpressure, and will buffer output chunks
that are not consumed, but is less of a mediator between the input and
output. There is no high or low watermarks, no state objects, and so
artificial async deferrals. It will not protect you from Zalgo.
See [Minipass](http://npm.im/minipass) for more on the differences between
Node.js core streams and Minipass streams, and the convenience methods
provided by that class.
If you write, data will be emitted right away. If you write
everything synchronously in one tick, and you are listening to the
`data` event to consume it, then it'll all be emitted right away in
that same tick. If you want data to be emitted in the next tick, then
write it in the next tick.
## Classes
It is thus the responsibility of the reader and writer to manage their
own consumption and process execution flow.
- Deflate
- Inflate
- Gzip
- Gunzip
- DeflateRaw
- InflateRaw
- Unzip
- BrotliCompress (Node v10 and higher)
- BrotliDecompress (Node v10 and higher)
The goal is to compress and decompress as fast as possible, even for
files that are too large to store all in one buffer.
## USAGE
The API is very similar to the built-in zlib module. There are
classes that you instantiate with `new` and they are streams that can
be piped together.
```js
const zlib = require('minizlib')
const input = sourceOfCompressedData()
const decode = new zlib.BrotliDecompress()
const output = whereToWriteTheDecodedData()
input.pipe(decode).pipe(output)
```

@@ -31,3 +31,3 @@ /**

return parse(val);
} else if (type === 'number' && isNaN(val) === false) {
} else if (type === 'number' && isFinite(val)) {
return options.long ? fmtLong(val) : fmtShort(val);

@@ -54,3 +54,3 @@ }

}
var match = /^((?:\d+)?\-?\d?\.?\d+) *(milliseconds?|msecs?|ms|seconds?|secs?|s|minutes?|mins?|m|hours?|hrs?|h|days?|d|weeks?|w|years?|yrs?|y)?$/i.exec(
var match = /^(-?(?:\d+)?\.?\d+) *(milliseconds?|msecs?|ms|seconds?|secs?|s|minutes?|mins?|m|hours?|hrs?|h|days?|d|weeks?|w|years?|yrs?|y)?$/i.exec(
str

@@ -57,0 +57,0 @@ );

{
"_args": [
[
"ms@2.1.1",
"/home/xumeng/git/nodejs-idb-connector"
]
],
"_from": "ms@2.1.1",
"_id": "ms@2.1.1",
"_from": "ms@^2.1.1",
"_id": "ms@2.1.2",
"_inBundle": false,
"_integrity": "sha512-tgp+dl5cGk28utYktBsrFqA7HKgrhgPsg6Z/EfhWI4gl1Hwq8B/GmY/0oXZ6nF8hDVesS/FpnYaD/kOWhYQvyg==",
"_integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==",
"_location": "/needle/ms",
"_phantomChildren": {},
"_requested": {
"type": "version",
"type": "range",
"registry": true,
"raw": "ms@2.1.1",
"raw": "ms@^2.1.1",
"name": "ms",
"escapedName": "ms",
"rawSpec": "2.1.1",
"rawSpec": "^2.1.1",
"saveSpec": null,
"fetchSpec": "2.1.1"
"fetchSpec": "^2.1.1"
},

@@ -27,8 +21,11 @@ "_requiredBy": [

],
"_resolved": "https://registry.npmjs.org/ms/-/ms-2.1.1.tgz",
"_spec": "2.1.1",
"_where": "/home/xumeng/git/nodejs-idb-connector",
"_resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz",
"_shasum": "d09d1f357b443f493382a8eb3ccd183872ae6009",
"_spec": "ms@^2.1.1",
"_where": "/home/xumeng/git/nodejs-idb-connector/node_modules/needle/node_modules/debug",
"bugs": {
"url": "https://github.com/zeit/ms/issues"
},
"bundleDependencies": false,
"deprecated": false,
"description": "Tiny millisecond conversion utility",

@@ -72,3 +69,3 @@ "devDependencies": {

},
"version": "2.1.1"
"version": "2.1.2"
}
# ms
[![Build Status](https://travis-ci.org/zeit/ms.svg?branch=master)](https://travis-ci.org/zeit/ms)
[![Slack Channel](http://zeit-slackin.now.sh/badge.svg)](https://zeit.chat/)
[![Join the community on Spectrum](https://withspectrum.github.io/badge/badge.svg)](https://spectrum.chat/zeit)

@@ -6,0 +6,0 @@ Use this package to easily convert various time formats to milliseconds.

# node-pre-gyp changelog
## 0.14.0
- Defer modules requires in napi.js (https://github.com/mapbox/node-pre-gyp/pull/434)
- Bump dependency on `tar` from `^4` to `^4.4.2` (https://github.com/mapbox/node-pre-gyp/pull/454)
- Support extracting compiled binary from local offline mirror (https://github.com/mapbox/node-pre-gyp/pull/459)
- Added Node 13 support in the local database (https://github.com/mapbox/node-pre-gyp/pull/483)
## 0.13.0
- Added Node 12 support in the local database (https://github.com/mapbox/node-pre-gyp/pull/449)
## 0.12.0

@@ -4,0 +15,0 @@

@@ -94,7 +94,5 @@ "use strict";

var badDownload = false;
var extractCount = 0;
var hasResponse = false;
var tar = require('tar');
function afterTarball(err) {
function afterExtract(err, extractCount) {
if (err) return callback(err);

@@ -109,7 +107,2 @@ if (badDownload) return callback(new Error("bad download"));

function filter_func(entry) {
log.info('install','unpacking ' + entry.path);
extractCount++;
}
// for request compatibility

@@ -156,7 +149,3 @@ req.on('error', function(err) {

// start unzipping and untaring
req.pipe(tar.extract({
cwd: to,
strip: 1,
onentry: filter_func
}).on('close', afterTarball).on('error', callback));
req.pipe(extract(to, afterExtract));
});

@@ -166,2 +155,38 @@ });

function extract_from_local(from, to, callback) {
if (!fs.existsSync(from)) {
return callback(new Error('Cannot find file ' + from));
}
log.info('Found local file to extract from ' + from);
function afterExtract(err, extractCount) {
if (err) return callback(err);
if (extractCount === 0) {
return callback(new Error('There was a fatal problem while extracting the tarball'));
}
log.info('tarball', 'done parsing tarball');
callback();
}
fs.createReadStream(from).pipe(extract(to, afterExtract));
}
function extract(to, callback) {
var extractCount = 0;
function filter_func(entry) {
log.info('install','unpacking ' + entry.path);
extractCount++;
}
function afterTarball(err) {
callback(err, extractCount);
}
var tar = require('tar');
return tar.extract({
cwd: to,
strip: 1,
onentry: filter_func
}).on('close', afterTarball).on('error', callback);
}
function do_build(gyp,argv,callback) {

@@ -242,3 +267,8 @@ var args = ['rebuild'].concat(argv);

} else {
place_binary(from,to,opts,after_place);
var fileName = from.startsWith('file://') && from.replace(/^file:\/\//, '');
if (fileName) {
extract_from_local(fileName, to, after_place);
} else {
place_binary(from,to,opts,after_place);
}
}

@@ -245,0 +275,0 @@ });

@@ -1730,2 +1730,10 @@ {

},
"8.16.1": {
"node_abi": 57,
"v8": "6.2"
},
"8.16.2": {
"node_abi": 57,
"v8": "6.2"
},
"9.0.0": {

@@ -1895,2 +1903,22 @@ "node_abi": 59,

},
"10.16.0": {
"node_abi": 64,
"v8": "6.8"
},
"10.16.1": {
"node_abi": 64,
"v8": "6.8"
},
"10.16.2": {
"node_abi": 64,
"v8": "6.8"
},
"10.16.3": {
"node_abi": 64,
"v8": "6.8"
},
"10.17.0": {
"node_abi": 64,
"v8": "6.8"
},
"11.0.0": {

@@ -1960,6 +1988,86 @@ "node_abi": 67,

},
"11.15.0": {
"node_abi": 67,
"v8": "7.0"
},
"12.0.0": {
"node_abi": 72,
"v8": "7.4"
},
"12.1.0": {
"node_abi": 72,
"v8": "7.4"
},
"12.2.0": {
"node_abi": 72,
"v8": "7.4"
},
"12.3.0": {
"node_abi": 72,
"v8": "7.4"
},
"12.3.1": {
"node_abi": 72,
"v8": "7.4"
},
"12.4.0": {
"node_abi": 72,
"v8": "7.4"
},
"12.5.0": {
"node_abi": 72,
"v8": "7.5"
},
"12.6.0": {
"node_abi": 72,
"v8": "7.5"
},
"12.7.0": {
"node_abi": 72,
"v8": "7.5"
},
"12.8.0": {
"node_abi": 72,
"v8": "7.5"
},
"12.8.1": {
"node_abi": 72,
"v8": "7.5"
},
"12.9.0": {
"node_abi": 72,
"v8": "7.6"
},
"12.9.1": {
"node_abi": 72,
"v8": "7.6"
},
"12.10.0": {
"node_abi": 72,
"v8": "7.6"
},
"12.11.0": {
"node_abi": 72,
"v8": "7.7"
},
"12.11.1": {
"node_abi": 72,
"v8": "7.7"
},
"12.12.0": {
"node_abi": 72,
"v8": "7.7"
},
"12.13.0": {
"node_abi": 72,
"v8": "7.7"
},
"13.0.0": {
"node_abi": 79,
"v8": "7.8"
},
"13.0.1": {
"node_abi": 79,
"v8": "7.8"
}
}
"use strict";
var fs = require('fs');
var rm = require('rimraf');
var log = require('npmlog');

@@ -118,2 +116,3 @@ module.exports = exports;

module.exports.get_napi_build_versions = function(package_json, opts, warnings) { // opts may be undefined
var log = require('npmlog');
var napi_build_versions = [];

@@ -171,2 +170,3 @@ var supported_napi_version = module.exports.get_napi_version(opts ? opts.target : undefined);

if (napi_build_version) {
var rm = require('rimraf');
rm.sync(module.exports.get_build_dir(napi_build_version));

@@ -179,2 +179,3 @@ fs.renameSync('build', module.exports.get_build_dir(napi_build_version));

if (napi_build_version) {
var rm = require('rimraf');
rm.sync('build');

@@ -181,0 +182,0 @@ fs.renameSync(module.exports.get_build_dir(napi_build_version), 'build');

{
"_args": [
[
"node-pre-gyp@0.13.0",
"/home/xumeng/git/nodejs-idb-connector"
]
],
"_from": "node-pre-gyp@0.13.0",
"_id": "node-pre-gyp@0.13.0",
"_from": "node-pre-gyp@^0.14.0",
"_id": "node-pre-gyp@0.14.0",
"_inBundle": false,
"_integrity": "sha512-Md1D3xnEne8b/HGVQkZZwV27WUi1ZRuZBij24TNaZwUPU3ZAFtvT6xxJGaUVillfmMKnn5oD1HoGsp2Ftik7SQ==",
"_integrity": "sha512-+CvDC7ZttU/sSt9rFjix/P05iS43qHCOOGzcr3Ry99bXG7VX953+vFyEuph/tfqoYu8dttBkE86JSKBO2OzcxA==",
"_location": "/node-pre-gyp",
"_phantomChildren": {},
"_requested": {
"type": "version",
"type": "range",
"registry": true,
"raw": "node-pre-gyp@0.13.0",
"raw": "node-pre-gyp@^0.14.0",
"name": "node-pre-gyp",
"escapedName": "node-pre-gyp",
"rawSpec": "0.13.0",
"rawSpec": "^0.14.0",
"saveSpec": null,
"fetchSpec": "0.13.0"
"fetchSpec": "^0.14.0"
},

@@ -27,4 +21,5 @@ "_requiredBy": [

],
"_resolved": "https://registry.npmjs.org/node-pre-gyp/-/node-pre-gyp-0.13.0.tgz",
"_spec": "0.13.0",
"_resolved": "https://registry.npmjs.org/node-pre-gyp/-/node-pre-gyp-0.14.0.tgz",
"_shasum": "9a0596533b877289bcad4e143982ca3d904ddc83",
"_spec": "node-pre-gyp@^0.14.0",
"_where": "/home/xumeng/git/nodejs-idb-connector",

@@ -41,2 +36,3 @@ "author": {

},
"bundleDependencies": false,
"dependencies": {

@@ -52,4 +48,5 @@ "detect-libc": "^1.0.2",

"semver": "^5.3.0",
"tar": "^4"
"tar": "^4.4.2"
},
"deprecated": false,
"description": "Node.js native addon binary install tool",

@@ -91,3 +88,3 @@ "devDependencies": {

},
"version": "0.13.0"
"version": "0.14.0"
}

@@ -37,2 +37,3 @@ 'use strict'

'.*.swp',
'.DS_Store',
'**/.DS_Store/**',

@@ -54,2 +55,5 @@ '._*',

// There may be others, but :?|<> are handled by node-tar
const nameIsBadForWindows = file => /\*/.test(file)
// a decorator that applies our custom rules to an ignore walker

@@ -95,2 +99,12 @@ const npmWalker = Class => class Walker extends Class {

onReaddir (entries) {
if (!this.parent) {
entries = entries.filter(e =>
e !== '.git' &&
!(e === 'node_modules' && this.bundled.length === 0)
)
}
return super.onReaddir(entries)
}
filterEntry (entry, partial) {

@@ -153,2 +167,10 @@ // get the partial path from the root of the walk

// also, don't ignore the package.json itself!
//
// Weird side-effect of this: a readme (etc) file will be included
// if it exists anywhere within a folder with a package.json file.
// The original intent was only to include these files in the root,
// but now users in the wild are dependent on that behavior for
// localized documentation and other use cases. Adding a `/` to
// these rules, while tempting and arguably more "correct", is a
// breaking change.
const rules = [

@@ -158,2 +180,3 @@ pkg.browser ? '!' + pkg.browser : '',

'!package.json',
'!npm-shrinkwrap.json',
'!@(readme|copying|license|licence|notice|changes|changelog|history){,.*[^~$]}'

@@ -179,2 +202,12 @@ ]

// override parent stat function to completely skip any filenames
// that will break windows entirely.
// XXX(isaacs) Next major version should make this an error instead.
stat (entry, file, dir, then) {
if (nameIsBadForWindows(entry))
then()
else
super.stat(entry, file, dir, then)
}
// override parent onstat function to nix all symlinks

@@ -243,12 +276,17 @@ onstat (st, entry, file, dir, then) {

// package.json first, node_modules last, files before folders, alphasort
const sort = (a, b) =>
a === 'package.json' ? -1
: b === 'package.json' ? 1
: /^node_modules/.test(a) && !/^node_modules/.test(b) ? 1
: /^node_modules/.test(b) && !/^node_modules/.test(a) ? -1
: path.dirname(a) === '.' && path.dirname(b) !== '.' ? -1
: path.dirname(b) === '.' && path.dirname(a) !== '.' ? 1
: a.localeCompare(b)
// optimize for compressibility
// extname, then basename, then locale alphabetically
// https://twitter.com/isntitvacant/status/1131094910923231232
const sort = (a, b) => {
const exta = path.extname(a).toLowerCase()
const extb = path.extname(b).toLowerCase()
const basea = path.basename(a).toLowerCase()
const baseb = path.basename(b).toLowerCase()
return exta.localeCompare(extb) ||
basea.localeCompare(baseb) ||
a.localeCompare(b)
}
module.exports = walk

@@ -255,0 +293,0 @@ walk.sync = walkSync

{
"_args": [
[
"npm-packlist@1.4.1",
"/home/xumeng/git/nodejs-idb-connector"
]
],
"_from": "npm-packlist@1.4.1",
"_id": "npm-packlist@1.4.1",
"_from": "npm-packlist@^1.1.6",
"_id": "npm-packlist@1.4.6",
"_inBundle": false,
"_integrity": "sha512-+TcdO7HJJ8peiiYhvPxsEDhF3PJFGUGRcFsGve3vxvxdcpO2Z4Z7rkosRM0kWj6LfbK/P0gu3dzk5RU1ffvFcw==",
"_integrity": "sha512-u65uQdb+qwtGvEJh/DgQgW1Xg7sqeNbmxYyrvlNznaVTjV3E5P6F/EFjM+BVHXl7JJlsdG8A64M0XI8FI/IOlg==",
"_location": "/npm-packlist",
"_phantomChildren": {},
"_requested": {
"type": "version",
"type": "range",
"registry": true,
"raw": "npm-packlist@1.4.1",
"raw": "npm-packlist@^1.1.6",
"name": "npm-packlist",
"escapedName": "npm-packlist",
"rawSpec": "1.4.1",
"rawSpec": "^1.1.6",
"saveSpec": null,
"fetchSpec": "1.4.1"
"fetchSpec": "^1.1.6"
},

@@ -27,5 +21,6 @@ "_requiredBy": [

],
"_resolved": "https://registry.npmjs.org/npm-packlist/-/npm-packlist-1.4.1.tgz",
"_spec": "1.4.1",
"_where": "/home/xumeng/git/nodejs-idb-connector",
"_resolved": "https://registry.npmjs.org/npm-packlist/-/npm-packlist-1.4.6.tgz",
"_shasum": "53ba3ed11f8523079f1457376dd379ee4ea42ff4",
"_spec": "npm-packlist@^1.1.6",
"_where": "/home/xumeng/git/nodejs-idb-connector/node_modules/node-pre-gyp",
"author": {

@@ -39,2 +34,3 @@ "name": "Isaac Z. Schlueter",

},
"bundleDependencies": false,
"dependencies": {

@@ -44,2 +40,3 @@ "ignore-walk": "^3.0.1",

},
"deprecated": false,
"description": "Get a list of the files to add from a folder into an npm package",

@@ -49,3 +46,3 @@ "devDependencies": {

"rimraf": "^2.6.1",
"tap": "^12.0.1"
"tap": "^14.6.9"
},

@@ -67,8 +64,12 @@ "directories": {

"scripts": {
"postpublish": "git push origin --all; git push origin --tags",
"postpublish": "git push origin --follow-tags",
"postversion": "npm publish",
"preversion": "npm test",
"test": "tap test/*.js --100 -J"
"snap": "tap",
"test": "tap"
},
"version": "1.4.1"
"tap": {
"jobs": 1
},
"version": "1.4.6"
}
'use strict';
if (!process.version ||
if (typeof process === 'undefined' ||
!process.version ||
process.version.indexOf('v0.') === 0 ||

@@ -5,0 +6,0 @@ process.version.indexOf('v1.') === 0 && process.version.indexOf('v1.8.') !== 0) {

{
"_args": [
[
"process-nextick-args@2.0.0",
"/home/xumeng/git/nodejs-idb-connector"
]
],
"_from": "process-nextick-args@2.0.0",
"_id": "process-nextick-args@2.0.0",
"_from": "process-nextick-args@~2.0.0",
"_id": "process-nextick-args@2.0.1",
"_inBundle": false,
"_integrity": "sha512-MtEC1TqN0EU5nephaJ4rAtThHtC86dNN9qCuEhtshvpVBkAW5ZO7BASN9REnF9eoXGcRub+pFuKEpOHE+HbEMw==",
"_integrity": "sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag==",
"_location": "/process-nextick-args",
"_phantomChildren": {},
"_requested": {
"type": "version",
"type": "range",
"registry": true,
"raw": "process-nextick-args@2.0.0",
"raw": "process-nextick-args@~2.0.0",
"name": "process-nextick-args",
"escapedName": "process-nextick-args",
"rawSpec": "2.0.0",
"rawSpec": "~2.0.0",
"saveSpec": null,
"fetchSpec": "2.0.0"
"fetchSpec": "~2.0.0"
},

@@ -27,5 +21,6 @@ "_requiredBy": [

],
"_resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.0.tgz",
"_spec": "2.0.0",
"_where": "/home/xumeng/git/nodejs-idb-connector",
"_resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.1.tgz",
"_shasum": "7820d9b16120cc55ca9ae7792680ae7dba6d7fe2",
"_spec": "process-nextick-args@~2.0.0",
"_where": "/home/xumeng/git/nodejs-idb-connector/node_modules/readable-stream",
"author": "",

@@ -35,2 +30,4 @@ "bugs": {

},
"bundleDependencies": false,
"deprecated": false,
"description": "process.nextTick but always with args",

@@ -54,3 +51,3 @@ "devDependencies": {

},
"version": "2.0.0"
"version": "2.0.1"
}
'use strict'
// Tar can encode large and negative numbers using a leading byte of
// 0xff for negative, and 0x80 for positive. The trailing byte in the
// section will always be 0x20, or in some implementations 0x00.
// this module encodes and decodes these things.
// 0xff for negative, and 0x80 for positive.
const encode = exports.encode = (num, buf) => {
buf[buf.length - 1] = 0x20
if (num < 0)
if (!Number.isSafeInteger(num))
// The number is so large that javascript cannot represent it with integer
// precision.
throw TypeError('cannot encode number outside of javascript safe integer range')
else if (num < 0)
encodeNegative(num, buf)

@@ -18,9 +19,6 @@ else

buf[0] = 0x80
for (var i = buf.length - 2; i > 0; i--) {
if (num === 0)
buf[i] = 0
else {
buf[i] = num % 0x100
num = Math.floor(num / 0x100)
}
for (var i = buf.length; i > 1; i--) {
buf[i-1] = num & 0xff
num = Math.floor(num / 0x100)
}

@@ -33,17 +31,12 @@ }

num = num * -1
for (var i = buf.length - 2; i > 0; i--) {
var byte
if (num === 0)
byte = 0
else {
byte = num % 0x100
num = Math.floor(num / 0x100)
}
for (var i = buf.length; i > 1; i--) {
var byte = num & 0xff
num = Math.floor(num / 0x100)
if (flipped)
buf[i] = onesComp(byte)
buf[i-1] = onesComp(byte)
else if (byte === 0)
buf[i] = 0
buf[i-1] = 0
else {
flipped = true
buf[i] = twosComp(byte)
buf[i-1] = twosComp(byte)
}

@@ -56,4 +49,16 @@ }

var pre = buf[0]
return pre === 0x80 ? pos(buf.slice(1, buf.length - 1))
: twos(buf.slice(1, buf.length - 1))
var value;
if (pre === 0x80)
value = pos(buf.slice(1, buf.length))
else if (pre === 0xff)
value = twos(buf)
else
throw TypeError('invalid base256 encoding')
if (!Number.isSafeInteger(value))
// The number is so large that javascript cannot represent it with integer
// precision.
throw TypeError('parsed number outside of javascript safe integer range')
return value
}

@@ -77,5 +82,5 @@

if (f !== 0)
sum += f * Math.pow(256, len - i - 1)
sum -= f * Math.pow(256, len - i - 1)
}
return sum * -1
return sum
}

@@ -82,0 +87,0 @@

@@ -72,3 +72,3 @@ 'use strict'

if (dir === cwd)
return fs.lstat(dir, (er, st) => {
return fs.stat(dir, (er, st) => {
if (er || !st.isDirectory())

@@ -158,3 +158,3 @@ er = new CwdError(dir, er && er.code || 'ENOTDIR')

try {
ok = fs.lstatSync(dir).isDirectory()
ok = fs.statSync(dir).isDirectory()
} catch (er) {

@@ -161,0 +161,0 @@ code = er.code

@@ -105,3 +105,8 @@ 'use strict'

[CONSUMEHEADER] (chunk, position) {
const header = new Header(chunk, position, this[EX], this[GEX])
let header
try {
header = new Header(chunk, position, this[EX], this[GEX])
} catch (er) {
return this.warn('invalid entry', er)
}

@@ -108,0 +113,0 @@ if (header.nullBlock)

@@ -9,2 +9,6 @@ 'use strict'

super()
// read entries always start life paused. this is to avoid the
// situation where Minipass's auto-ending empty streams results
// in an entry ending before we're ready for it.
this.pause()
this.extended = ex

@@ -11,0 +15,0 @@ this.globalExtended = gex

@@ -234,3 +234,3 @@ 'use strict'

er.code = 'EOF'
this[CLOSE](fd)
this[CLOSE](fd, _ => _)
return this.emit('error', er)

@@ -244,3 +244,3 @@ }

er.code = 'EOF'
this[CLOSE](fd)
this[CLOSE](fd, _ => _)
return this.emit('error', er)

@@ -247,0 +247,0 @@ }

{
"_args": [
[
"tar@4.4.8",
"/home/xumeng/git/nodejs-idb-connector"
]
],
"_from": "tar@4.4.8",
"_id": "tar@4.4.8",
"_from": "tar@^4.4.2",
"_id": "tar@4.4.13",
"_inBundle": false,
"_integrity": "sha512-LzHF64s5chPQQS0IYBn9IN5h3i98c12bo4NCO7e0sGM2llXQ3p2FGC5sdENN4cTW48O915Sh+x+EXx7XW96xYQ==",
"_integrity": "sha512-w2VwSrBoHa5BsSyH+KxEqeQBAllHhccyMFVHtGtdMpF4W7IRWfZjFiQceJPChOeTsSDVUpER2T8FA93pr0L+QA==",
"_location": "/tar",
"_phantomChildren": {},
"_requested": {
"type": "version",
"type": "range",
"registry": true,
"raw": "tar@4.4.8",
"raw": "tar@^4.4.2",
"name": "tar",
"escapedName": "tar",
"rawSpec": "4.4.8",
"rawSpec": "^4.4.2",
"saveSpec": null,
"fetchSpec": "4.4.8"
"fetchSpec": "^4.4.2"
},

@@ -27,5 +21,6 @@ "_requiredBy": [

],
"_resolved": "https://registry.npmjs.org/tar/-/tar-4.4.8.tgz",
"_spec": "4.4.8",
"_where": "/home/xumeng/git/nodejs-idb-connector",
"_resolved": "https://registry.npmjs.org/tar/-/tar-4.4.13.tgz",
"_shasum": "43b364bc52888d555298637b10d60790254ab525",
"_spec": "tar@^4.4.2",
"_where": "/home/xumeng/git/nodejs-idb-connector/node_modules/node-pre-gyp",
"author": {

@@ -39,11 +34,13 @@ "name": "Isaac Z. Schlueter",

},
"bundleDependencies": false,
"dependencies": {
"chownr": "^1.1.1",
"fs-minipass": "^1.2.5",
"minipass": "^2.3.4",
"minizlib": "^1.1.1",
"minipass": "^2.8.6",
"minizlib": "^1.2.1",
"mkdirp": "^0.5.0",
"safe-buffer": "^5.1.2",
"yallist": "^3.0.2"
"yallist": "^3.0.3"
},
"deprecated": false,
"description": "tar for node",

@@ -55,4 +52,4 @@ "devDependencies": {

"mutate-fs": "^2.1.1",
"rimraf": "^2.6.2",
"tap": "^12.0.1",
"rimraf": "^2.6.3",
"tap": "^14.6.5",
"tar-fs": "^1.16.3",

@@ -78,8 +75,12 @@ "tar-stream": "^1.6.2"

"genparse": "node scripts/generate-parse-fixtures.js",
"postpublish": "git push origin --all; git push origin --tags",
"postpublish": "git push origin --follow-tags",
"postversion": "npm publish",
"preversion": "npm test",
"test": "tap test/*.js --100 -J --coverage-report=text -c"
"test": "tap"
},
"version": "4.4.8"
"tap": {
"coverage-map": "map.js",
"check-coverage": true
},
"version": "4.4.13"
}
{
"_args": [
[
"yallist@3.0.3",
"/home/xumeng/git/nodejs-idb-connector"
]
],
"_from": "yallist@3.0.3",
"_id": "yallist@3.0.3",
"_from": "yallist@^3.0.3",
"_id": "yallist@3.1.1",
"_inBundle": false,
"_integrity": "sha512-S+Zk8DEWE6oKpV+vI3qWkaK+jSbIK86pCwe2IF/xwIpQ8jEuxpw9NyaGjmp9+BoJv5FV2piqCDcoCtStppiq2A==",
"_integrity": "sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g==",
"_location": "/yallist",
"_phantomChildren": {},
"_requested": {
"type": "version",
"type": "range",
"registry": true,
"raw": "yallist@3.0.3",
"raw": "yallist@^3.0.3",
"name": "yallist",
"escapedName": "yallist",
"rawSpec": "3.0.3",
"rawSpec": "^3.0.3",
"saveSpec": null,
"fetchSpec": "3.0.3"
"fetchSpec": "^3.0.3"
},

@@ -28,5 +22,6 @@ "_requiredBy": [

],
"_resolved": "https://registry.npmjs.org/yallist/-/yallist-3.0.3.tgz",
"_spec": "3.0.3",
"_where": "/home/xumeng/git/nodejs-idb-connector",
"_resolved": "https://registry.npmjs.org/yallist/-/yallist-3.1.1.tgz",
"_shasum": "dbb7daf9bfd8bac9ab45ebf602b8cbad0d5d08fd",
"_spec": "yallist@^3.0.3",
"_where": "/home/xumeng/git/nodejs-idb-connector/node_modules/tar",
"author": {

@@ -40,3 +35,5 @@ "name": "Isaac Z. Schlueter",

},
"bundleDependencies": false,
"dependencies": {},
"deprecated": false,
"description": "Yet Another Linked List",

@@ -67,3 +64,3 @@ "devDependencies": {

},
"version": "3.0.3"
"version": "3.1.1"
}

@@ -57,2 +57,4 @@ 'use strict'

node.list = null
return next
}

@@ -322,2 +324,33 @@

Yallist.prototype.splice = function (start, deleteCount /*, ...nodes */) {
if (start > this.length) {
start = this.length - 1
}
if (start < 0) {
start = this.length + start;
}
for (var i = 0, walker = this.head; walker !== null && i < start; i++) {
walker = walker.next
}
var ret = []
for (var i = 0; walker && i < deleteCount; i++) {
ret.push(walker.value)
walker = this.removeNode(walker)
}
if (walker === null) {
walker = this.tail
}
if (walker !== this.head && walker !== this.tail) {
walker = walker.prev
}
for (var i = 2; i < arguments.length; i++) {
walker = insert(this, walker, arguments[i])
}
return ret;
}
Yallist.prototype.reverse = function () {

@@ -336,2 +369,19 @@ var head = this.head

function insert (self, node, value) {
var inserted = node === self.head ?
new Node(value, null, node, self) :
new Node(value, node, node.next, self)
if (inserted.next === null) {
self.tail = inserted
}
if (inserted.prev === null) {
self.head = inserted
}
self.length++
return inserted
}
function push (self, item) {

@@ -338,0 +388,0 @@ self.tail = new Node(item, self.tail, null, self)

{
"name": "idb-connector",
"version": "1.2.3",
"version": "1.2.4",
"description": "A Node.js DB2 driver for IBM i",

@@ -14,3 +14,3 @@ "os": [

"scripts": {
"test": "./node_modules/mocha/bin/mocha",
"test": "ln -sf ./build-tmp-napi-v3 ./build && ./node_modules/mocha/bin/mocha",
"install": "node-pre-gyp install --fallback-to-build"

@@ -47,12 +47,12 @@ },

"dependencies": {
"node-addon-api": "^1.6.3",
"node-pre-gyp": "^0.13.0"
"node-addon-api": "^1.7.1",
"node-pre-gyp": "^0.14.0"
},
"devDependencies": {
"bindings": "^1.3.0",
"chai": "^4.1.2",
"bindings": "^1.5.0",
"chai": "^4.2.0",
"eslint": "^5.14.1",
"eslint-config-airbnb-base": "^13.1.0",
"eslint-plugin-import": "^2.16.0",
"mocha": "^5.2.0"
"mocha": "^6.2.2"
},

@@ -59,0 +59,0 @@ "bundledDependencies": [

@@ -1,2 +0,2 @@

const {expect} = require('chai');
const { expect } = require('chai');
const db2a = require('../lib/db2a');

@@ -7,11 +7,28 @@

} = db2a;
// Test Statement Class Async Methods
describe('Statement Async Test', () => {
var dbConn, dbStmt;
before(() => {
dbConn = new dbconn();
dbConn.conn('*LOCAL');
});
after(() => {
dbConn.disconn();
dbConn.close();
});
beforeEach(() => {
dbStmt = new dbstmt(dbConn);
});
afterEach(() => {
dbStmt.close();
});
describe('async prepare', () => {
it('Prepares valid SQL and sends it to the DBMS, if fail, error is returned. ', (done) => {
const sql = 'SELECT * FROM QIWS.QCUSTCDT';
const dbConn = new dbconn();
dbConn.conn('*LOCAL');
const dbStmt = new dbstmt(dbConn);
dbStmt.prepare(sql, (error) => {

@@ -30,9 +47,4 @@ if (error) {

const sql = 'INSERT INTO QIWS.QCUSTCDT(CUSNUM,LSTNAM,INIT,STREET,CITY,STATE,ZIPCOD,CDTLMT,CHGCOD,BALDUE,CDTDUE) VALUES (?,?,?,?,?,?,?,?,?,?,?) with NONE ';
const dbConn = new dbconn();
const dbConn2 = new dbconn();
dbConn.conn('*LOCAL');
dbConn2.conn('*LOCAL');
let dbStmt = new dbstmt(dbConn);
const dbStmt2 = new dbstmt(dbConn2);

@@ -85,3 +97,2 @@

rowsAfter = Number(rowsAfter);
dbStmt.close();
expect(rowsAfter).to.equal(rowsBefore + 1);

@@ -95,120 +106,100 @@ done();

});
});
describe('async execute', () => {
// it('retrieves output params from stored proc', (done) => {
// const sql = 'call QXMLSERV.iPLUG512K(?,?,?,?)';
// const ipc = '*NA';
// const ctl = '*here';
// const xmlIn = `<xmlservice><sh>system 'wrksbs'</sh></xmlservice>`;
// const xmlOut = '';
// const params = [
// [ipc, IN, CHAR],
// [ctl, IN, CHAR],
// [xmlIn, IN, CLOB],
// [xmlOut, OUT, CLOB],
// ];
describe('async execute', () => {
it('retrieves output params from stored proc', (done) => {
const sql = 'call QXMLSERV.iPLUG512K(?,?,?,?)';
const dbConn = new dbconn();
// dbStmt.prepare(sql, (error) => {
// if (error) {
// throw error;
// }
// dbStmt.bindParam(params, (error) => {
// if (error) {
// throw error;
// }
// dbStmt.execute((out, error) => {
// if (error) {
// throw error;
// }
// expect(error).to.be.null;
// expect(out).to.be.a('array');
// expect(out.length).to.be.eq(1);
// done();
// });
// });
// });
// });
dbConn.conn('*LOCAL');
const dbStmt = new dbstmt(dbConn);
it('executes prepared statement returns null because no output params are available', (done) => {
const sql = 'SELECT * FROM QIWS.QCUSTCDT WHERE BALDUE > ?';
const params = [
[10.00, IN, NUMERIC],
];
const ipc = '*NA';
const ctl = '*here';
const xmlIn = `<xmlservice><sh>system 'wrksbs'</sh></xmlservice>`;
const xmlOut = '';
const params = [
[ipc, IN, CHAR],
[ctl, IN, CHAR],
[xmlIn, IN, CLOB],
[xmlOut, OUT, CLOB],
];
dbStmt.prepare(sql, (error) => {
dbStmt.prepare(sql, (error) => {
if (error) {
throw error;
}
dbStmt.bindParam(params, (error) => {
if (error) {
throw error;
}
dbStmt.bindParam(params, (error) => {
dbStmt.execute((out, error) => {
if (error) {
throw error;
}
dbStmt.execute((out, error) => {
if (error) {
throw error;
}
expect(error).to.be.null;
expect(out).to.be.a('array');
expect(out.length).to.be.eq(1);
done();
});
expect(error).to.be.null;
expect(out).to.be.null;
done();
});
});
});
it('executes prepared statement returns null because no output params are available', (done) => {
const sql = 'SELECT * FROM QIWS.QCUSTCDT WHERE BALDUE > ?';
const dbConn = new dbconn();
const params = [
[10.00, IN, NUMERIC],
];
dbConn.conn('*LOCAL');
const dbStmt = new dbstmt(dbConn);
dbStmt.prepare(sql, (error) => {
if (error) {
throw error;
}
dbStmt.bindParam(params, (error) => {
if (error) {
throw error;
}
dbStmt.execute((out, error) => {
if (error) {
throw error;
}
expect(error).to.be.null;
expect(out).to.be.null;
done();
});
});
});
});
});
});
describe('async exec', () => {
it('performs action of given SQL String', (done) => {
const sql = 'SELECT * FROM QIWS.QCUSTCDT';
const dbConn = new dbconn();
dbConn.conn('*LOCAL');
const dbStmt = new dbstmt(dbConn);
dbStmt.exec(sql, (result, error) => {
if (error) {
throw error;
}
expect(error).to.be.null;
expect(result).to.be.an('array');
expect(result.length).to.be.greaterThan(0);
done();
});
describe('async exec', () => {
it('performs action of given SQL String', (done) => {
const sql = 'SELECT * FROM QIWS.QCUSTCDT';
dbStmt.exec(sql, (result, error) => {
if (error) {
throw error;
}
expect(error).to.be.null;
expect(result).to.be.an('array');
expect(result.length).to.be.greaterThan(0);
done();
});
});
});
describe('async fetchAll', () => {
it('retrieves all rows from execute function:', (done) => {
const sql = 'SELECT * FROM QIWS.QCUSTCDT';
const dbConn = new dbconn();
dbConn.conn('*LOCAL');
const dbStmt = new dbstmt(dbConn);
dbStmt.prepare(sql, (error) => {
describe('async fetchAll', () => {
it('retrieves all rows from execute function:', (done) => {
const sql = 'SELECT * FROM QIWS.QCUSTCDT';
dbStmt.prepare(sql, (error) => {
if (error) {
throw error;
}
dbStmt.execute((out, error) => {
if (error) {
throw error;
}
dbStmt.execute((out, error) => {
dbStmt.fetchAll((result, error) => {
if (error) {
throw error;
}
dbStmt.fetchAll((result, error) => {
if (error) {
throw error;
}
expect(error).to.be.null;
expect(result).to.be.a('array');
expect(result.length).to.be.greaterThan(0);
done();
});
expect(error).to.be.null;
expect(result).to.be.a('array');
expect(result.length).to.be.greaterThan(0);
done();
});

@@ -218,28 +209,23 @@ });

});
});
describe('async fetch', () => {
it('retrieves one row from result set:', (done) => {
const sql = 'SELECT * FROM QIWS.QCUSTCDT';
const dbConn = new dbconn();
dbConn.conn('*LOCAL');
const dbStmt = new dbstmt(dbConn);
dbStmt.prepare(sql, (error) => {
describe('async fetch', () => {
it('retrieves one row from result set:', (done) => {
const sql = 'SELECT * FROM QIWS.QCUSTCDT';
dbStmt.prepare(sql, (error) => {
if (error) {
throw error;
}
dbStmt.execute((out, error) => {
if (error) {
throw error;
}
dbStmt.execute((out, error) => {
if (error) {
throw error;
dbStmt.fetch((row, returnCode) => {
if (returnCode !== 0) { // SQL_SUCCESS
throw new Error('Rreturn Code was Not SQL SUCESS');
}
dbStmt.fetch((row, returnCode) => {
if (returnCode !== 0) { // SQL_SUCCESS
throw new Error('Rreturn Code was Not SQL SUCESS');
}
expect(returnCode).to.equal(0);
expect(row).to.be.a('object');
done();
});
expect(returnCode).to.equal(0);
expect(row).to.be.a('object');
done();
});

@@ -246,0 +232,0 @@ });

@@ -71,3 +71,3 @@ const {expect} = require('chai');

const result = connection.getConnAttr(attr);
connection.close();
expect(result).to.be.a('number');

@@ -80,3 +80,3 @@ });

const result = connection.getConnAttr(attr);
connection.close();
expect(result).to.be.a('string');

@@ -106,2 +106,4 @@ });

expect(result).to.equal(db2a.SQL_FALSE);
connection.close();
});

@@ -114,3 +116,3 @@

const result = connection.setConnAttr(attr, value);
connection.close();
expect(result).to.be.true;

@@ -130,3 +132,3 @@ });

result = connection.debug(choice);
connection.close();
expect(result).to.equal(choice);

@@ -142,3 +144,3 @@ });

const result = connection.validStmt(sql);
connection.close();
expect(result).to.equal(sql);

@@ -152,3 +154,3 @@ });

const result = connection.validStmt(sql);
connection.close();
expect(result).to.equal(null);

@@ -155,0 +157,0 @@ } catch (e) { }

@@ -1,2 +0,2 @@

const {expect} = require('chai');
const { expect } = require('chai');
const util = require('util');

@@ -11,11 +11,25 @@ const fs = require('fs');

describe('Data Type Test', () => {
var dbConn, dbStmt;
before(() => {
dbConn = new dbconn();
dbConn.conn('*LOCAL');
});
after(() => {
dbConn.disconn();
dbConn.close();
});
beforeEach(() => {
dbStmt = new dbstmt(dbConn);
});
afterEach(() => {
dbStmt.close();
});
describe('select number types', () => {
it('smallint', (done) => {
const sql = 'select * from (values smallint( -32768 )) as x (smallint_val)';
const dbConn = new dbconn();
dbConn.conn('*LOCAL');
const dbStmt = new dbstmt(dbConn);
dbStmt.exec(sql, (result, error) => {

@@ -33,8 +47,2 @@ expect(error).to.be.null;

const sql = 'select * from (values int( -2147483648 )) as x (int_val)';
const dbConn = new dbconn();
dbConn.conn('*LOCAL');
const dbStmt = new dbstmt(dbConn);
dbStmt.exec(sql, (result, error) => {

@@ -52,8 +60,2 @@ expect(error).to.be.null;

const sql = 'select * from (values bigint( -9223372036854775808 )) as x (bigint_val)';
const dbConn = new dbconn();
dbConn.conn('*LOCAL');
const dbStmt = new dbstmt(dbConn);
dbStmt.exec(sql, (result, error) => {

@@ -68,20 +70,19 @@ expect(error).to.be.null;

/* Currently Does not pass real type not supported yet
it('real', (done) => {
let sql = 'select * from (values real( -12345.54321 )) as x (real_val)',
dbConn = new dbconn();
dbConn.conn('*LOCAL');
// it('real', (done) => {
// let sql = 'select * from (values real( -12345.54321 )) as x (real_val)',
// dbConn = new dbconn();
let dbStmt = new dbstmt(dbConn);
// dbConn.conn('*LOCAL');
dbStmt.exec(sql, (result, error) => {
expect(error).to.be.null;
expect(result).to.be.an('array');
expect(result.length).to.be.greaterThan(0);
expect(Object.values(result[0])[0] ).to.equal("-12345.54321");
done();
});
});
*/
// let dbStmt = new dbstmt(dbConn);
// dbStmt.exec(sql, (result, error) => {
// expect(error).to.be.null;
// expect(result).to.be.an('array');
// expect(result.length).to.be.greaterThan(0);
// expect(Object.values(result[0])[0] ).to.equal("-12345.54321");
// done();
// });
// });
});

@@ -91,3 +92,3 @@

describe('bind parameters blob/binary/varbinary', () => {
before(() => {
before((done) => {
const user = (process.env.USER).toUpperCase();

@@ -98,7 +99,3 @@ const sql = `CREATE SCHEMA ${user}`;

const sql4 = `CREATE OR REPLACE TABLE ${user}.VARBINARYTEST(VARBINARY_COLUMN VARBINARY(3000))`;
const dbConn = new dbconn();
dbConn.conn('*LOCAL');
const dbStmt = new dbstmt(dbConn);
dbStmt = new dbstmt(dbConn);
dbStmt.exec(sql, (result, error) => {

@@ -111,3 +108,6 @@ // if Schema already exsists will error but ignore

dbStmt.closeCursor();
dbStmt.exec(sql4, (result, error) => {});
dbStmt.exec(sql4, (result, error) => {
dbStmt.close();
done();
});
});

@@ -160,3 +160,2 @@ });

const dbConn = new dbconn();
fs.readFile(`${__dirname}/../README.md`, (error, buffer) => {

@@ -195,3 +194,2 @@ if (error) {

const sql = `INSERT INTO ${user}.VARBINARYTEST(VARBINARY_COLUMN) VALUES(?)`;
const dbConn = new dbconn();

@@ -202,4 +200,2 @@ fs.readFile(`${__dirname}/../README.md`, (error, buffer) => {

}
dbConn.conn('*LOCAL');
const dbStmt = new dbstmt(dbConn);

@@ -232,8 +228,2 @@ dbStmt.prepare(sql, (error) => {

const sql = 'SELECT CAST(\'test\' AS BLOB(10k)) FROM SYSIBM.SYSDUMMY1';
const dbConn = new dbconn();
dbConn.conn('*LOCAL');
const dbStmt = new dbstmt(dbConn);
dbStmt.exec(sql, (result, error) => {

@@ -257,8 +247,2 @@ if (error) {

const sql = 'SELECT CAST(\'test\' AS BINARY(10)) FROM SYSIBM.SYSDUMMY1';
const dbConn = new dbconn();
dbConn.conn('*LOCAL');
const dbStmt = new dbstmt(dbConn);
dbStmt.exec(sql, (result, error) => {

@@ -282,8 +266,2 @@ if (error) {

const sql = 'SELECT CAST(\'test\' AS VARBINARY(10)) FROM SYSIBM.SYSDUMMY1';
const dbConn = new dbconn();
dbConn.conn('*LOCAL');
const dbStmt = new dbstmt(dbConn);
dbStmt.exec(sql, (result, error) => {

@@ -302,2 +280,100 @@ if (error) {

});
describe('inconsitent data', () => {
it('handle ABC/10 error in exec', (done) => {
const sql = `SELECT 'ABC'/10 AS DIVERR from sysibm.sysdummy1`;
dbStmt.exec(sql, (result, error) => {
if (error) {
console.log(util.inspect(error));
throw error;
}
expect(error).to.be.null;
expect(result).to.be.an('array');
expect(result[0].DIVERR).to.equal('-');
done();
});
});
it('handle ABC/10 error in fetch', (done) => {
const sql = `SELECT 'ABC'/10 AS DIVERR from sysibm.sysdummy1`;
dbStmt.prepare(sql, (error) => {
dbStmt.execute((outParams, error) => {
dbStmt.fetch((result, error) => {
expect(error).to.equal(1);
expect(result).to.be.an('object');
expect(result.DIVERR).to.equal('-');
done();
});
});
});
});
it('handle ABC/10 error in fetchAll', (done) => {
const sql = `SELECT 'ABC'/10 AS DIVERR from sysibm.sysdummy1`;
dbStmt.prepare(sql, (error) => {
dbStmt.execute((outParams, error) => {
dbStmt.fetchAll((result, error) => {
if (error) {
console.log(util.inspect(error));
throw error;
}
expect(error).to.be.null;
expect(result).to.be.an('array');
expect(result[0].DIVERR).to.equal('-');
done();
});
});
});
});
it('handle ABC/10 error in execSync', (done) => {
const sql = `SELECT 'ABC'/10 AS DIVERR from sysibm.sysdummy1`;
dbStmt.execSync(sql, (result, error) => {
if (error) {
console.log(util.inspect(error));
throw error;
}
expect(error).to.be.null;
expect(result).to.be.an('array');
expect(result[0].DIVERR).to.equal('-');
done();
});
});
it('handle ABC/10 error in fetchSync', (done) => {
const sql = `SELECT 'ABC'/10 AS DIVERR from sysibm.sysdummy1`;
const dbConn = new dbconn();
dbConn.conn('*LOCAL');
const dbStmt = new dbstmt(dbConn);
dbStmt.prepareSync(sql, (error) => {
dbStmt.executeSync((out, error) => {
dbStmt.fetchSync((result, error) => {
expect(error).to.equal(1);
expect(result).to.be.an('object');
expect(result.DIVERR).to.equal('-');
done();
});
});
});
});
it('handle ABC/10 error in fetchAllSync', (done) => {
const sql = `SELECT 'ABC'/10 AS DIVERR from sysibm.sysdummy1`;
dbStmt.prepareSync(sql, (error) => {
dbStmt.executeSync((outParams, error) => {
dbStmt.fetchAllSync((result, error) => {
if (error) {
console.log(util.inspect(error));
throw error;
}
expect(error).to.be.null;
expect(result).to.be.an('array');
expect(result[0].DIVERR).to.equal('-');
done();
});
});
});
});
});
});

@@ -1,2 +0,2 @@

const {expect} = require('chai');
const { expect } = require('chai');
const db2a = require('../lib/db2a');

@@ -10,2 +10,22 @@

describe('Statement Misc Test', () => {
var dbConn, dbStmt;
before(() => {
dbConn = new dbconn();
dbConn.conn('*LOCAL');
});
after(() => {
dbConn.disconn();
dbConn.close();
});
beforeEach(() => {
dbStmt = new dbstmt(dbConn);
});
afterEach(() => {
dbStmt.close();
});
describe('setStmtAttr & getStmtAttr', () => {

@@ -15,6 +35,3 @@ it('setStmtAttr(attribute, value) then getStmtAttr(attribute) should equal value', () => {

let value = db2a.SQL_TRUE;
const dbConn = new dbconn();
dbConn.conn('*LOCAL');
const dbStmt = new dbstmt(dbConn);
let result = dbStmt.setStmtAttr(attr, value);

@@ -42,5 +59,2 @@

let sql = 'SELECT * FROM QIWS.QCUSTCDT';
let dbConn = new dbconn();
dbConn.conn('*LOCAL');
let dbStmt = new dbstmt(dbConn);
dbStmt.prepare(sql);

@@ -57,7 +71,3 @@ dbStmt.execute();

const sql = 'SELECT * FROM QIWS.QCUSTCDT';
const dbConn = new dbconn();
dbConn.conn('*LOCAL');
const dbStmt = new dbstmt(dbConn);
dbStmt.prepare(sql, (error) => {

@@ -83,5 +93,2 @@ if (error) {

const sql = 'INSERT INTO QIWS.QCUSTCDT(CUSNUM,LSTNAM,INIT,STREET,CITY,STATE,ZIPCOD,CDTLMT,CHGCOD,BALDUE,CDTDUE) VALUES (?,?,?,?,?,?,?,?,?,?,?) with NONE ';
const dbConn = new dbconn();
dbConn.conn('*LOCAL');
const dbStmt = new dbstmt(dbConn);

@@ -127,7 +134,3 @@ const params = [

const sql = 'SELECT * FROM QIWS.QCUSTCDT';
const dbConn = new dbconn();
dbConn.conn('*LOCAL');
const dbStmt = new dbstmt(dbConn);
dbStmt.prepare(sql, (error) => {

@@ -153,7 +156,3 @@ if (error) {

const sql = 'SELECT * FROM QIWS.QCUSTCDT';
const dbConn = new dbconn();
dbConn.conn('*LOCAL');
const dbStmt = new dbstmt(dbConn);
dbStmt.prepare(sql, (error) => {

@@ -179,6 +178,3 @@ if (error) {

const sql = 'SELECT * FROM QIWS.QCUSTCDT';
const dbConn = new dbconn();
dbConn.conn('*LOCAL');
const dbStmt = new dbstmt(dbConn);
dbStmt.prepare(sql, (error) => {

@@ -207,6 +203,3 @@ if (error) {

const sql = 'SELECT * FROM QIWS.QCUSTCDT';
const dbConn = new dbconn();
dbConn.conn('*LOCAL');
const dbStmt = new dbstmt(dbConn);
dbStmt.prepare(sql, (error) => {

@@ -235,7 +228,3 @@ if (error) {

const sql = 'SELECT * FROM QIWS.QCUSTCDT';
const dbConn = new dbconn();
dbConn.conn('*LOCAL');
const dbStmt = new dbstmt(dbConn);
dbStmt.prepare(sql, (error) => {

@@ -264,7 +253,3 @@ if (error) {

const sql = 'SELECT * FROM QIWS.QCUSTCDT';
const dbConn = new dbconn();
dbConn.conn('*LOCAL');
const dbStmt = new dbstmt(dbConn);
dbStmt.prepare(sql, (error) => {

@@ -293,7 +278,3 @@ if (error) {

const sql = 'SELECT * FROM QIWS.QCUSTCDT';
const dbConn = new dbconn();
dbConn.conn('*LOCAL');
const dbStmt = new dbstmt(dbConn);
dbStmt.prepare(sql, (error) => {

@@ -322,7 +303,3 @@ if (error) {

const sql = 'SELECT * FROM QIWS.QCUSTCDT';
const dbConn = new dbconn();
dbConn.conn('*LOCAL');
const dbStmt = new dbstmt(dbConn);
dbStmt.prepare(sql, (error) => {

@@ -352,6 +329,3 @@ if (error) {

const expectedError = 'SQLSTATE=42704 SQLCODE=-204';
const dbConn = new dbconn();
dbConn.conn('*LOCAL');
const dbStmt = new dbstmt(dbConn);
dbStmt.exec(sql, (out, error) => {

@@ -370,7 +344,3 @@ dbStmt.stmtError(db2a.SQL_HANDLE_STMT, 1, (rs) => {

const sql = 'SELECT * FROM QIWS.QCUSTCDT';
const dbConn = new dbconn();
dbConn.conn('*LOCAL');
const dbStmt = new dbstmt(dbConn);
dbStmt.exec(sql, () => {

@@ -388,7 +358,3 @@ const result = dbStmt.closeCursor();

const sql = 'SELECT * FROM QIWS.QCUSTCDT';
const dbConn = new dbconn();
dbConn.conn('*LOCAL');
const dbStmt = new dbstmt(dbConn);
dbStmt.exec(sql, () => {

@@ -406,7 +372,3 @@ const result = dbStmt.reset();

const sql = 'SELECT * FROM QIWS.QCUSTCDT';
const dbConn = new dbconn();
dbConn.conn('*LOCAL');
const dbStmt = new dbstmt(dbConn);
dbStmt.exec(sql, (result, error) => {

@@ -422,11 +384,7 @@ const isClose = dbStmt.close();

it('should default to false', () => {
const dbConn = new dbconn();
dbConn.conn('*LOCAL');
const dbStmt = new dbstmt(dbConn);
let result = dbStmt.asNumber();
expect(result).to.be.false;
});
it('when false should return numbers as strings', (done) => {

@@ -437,8 +395,5 @@ const sql = `select

from sysibm.sysdummy1`;
const dbConn = new dbconn();
dbConn.conn('*LOCAL');
const dbStmt = new dbstmt(dbConn);
dbStmt.asNumber(false);
dbStmt.exec(sql, (result, error) => {

@@ -448,8 +403,10 @@ expect(error).to.be.null;

expect(result.length).to.be.greaterThan(0);
expect(result).to.eql([{"MIN_SMALLINT":"-32768",
"MAX_SMALLINT":"32767"}]);
expect(result).to.eql([{
"MIN_SMALLINT": "-32768",
"MAX_SMALLINT": "32767"
}]);
done();
});
});
it('when true should return numbers when safe to do so', (done) => {

@@ -468,8 +425,5 @@ const sql = `select

from sysibm.sysdummy1`;
const dbConn = new dbconn();
dbConn.conn('*LOCAL');
const dbStmt = new dbstmt(dbConn);
dbStmt.asNumber(true);
dbStmt.exec(sql, (result, error) => {

@@ -479,12 +433,13 @@ expect(error).to.be.null;

expect(result.length).to.be.greaterThan(0);
expect(result).to.eql([{"MIN_SMALLINT":-32768,
"MAX_SMALLINT":32767,
"MIN_INT":-2147483648,
"MAX_INT":2147483647,
"DEC_SAFE_15_0": 999999999999999,
"DEC_SAFE_15_15":0.999999999999999,
"MIN_BIGINT": "-9223372036854775808",
"MAX_BIGINT": "9223372036854775807",
"DEC_NOT_SAFE_16_0":"9999999999999999"
}]);
expect(result).to.eql([{
"MIN_SMALLINT": -32768,
"MAX_SMALLINT": 32767,
"MIN_INT": -2147483648,
"MAX_INT": 2147483647,
"DEC_SAFE_15_0": 999999999999999,
"DEC_SAFE_15_15": 0.999999999999999,
"MIN_BIGINT": "-9223372036854775808",
"MAX_BIGINT": "9223372036854775807",
"DEC_NOT_SAFE_16_0": "9999999999999999"
}]);
done();

@@ -491,0 +446,0 @@ });

@@ -1,2 +0,2 @@

const {expect} = require('chai');
const { expect } = require('chai');
const util = require('util');

@@ -10,10 +10,25 @@ const db2a = require('../lib/db2a');

describe('Statement Sync Test', () => {
var dbConn, dbStmt;
before(() => {
dbConn = new dbconn();
dbConn.conn('*LOCAL');
});
after(() => {
dbConn.disconn();
dbConn.close();
});
beforeEach(() => {
dbStmt = new dbstmt(dbConn);
});
afterEach(() => {
dbStmt.close();
});
describe('prepare callback', () => {
it('Prepares valid SQL and sends it to the DBMS, if fail, error is returned. ', () => {
const sql = 'SELECT * FROM QIWS.QCUSTCDT';
const dbConn = new dbconn();
dbConn.conn('*LOCAL');
const dbStmt = new dbstmt(dbConn);
dbStmt.prepareSync(sql, (error) => {

@@ -31,7 +46,2 @@ if (error) {

const sql = 'SELECT * FROM QIWS.QCUSTCDT';
const dbConn = new dbconn();
dbConn.conn('*LOCAL');
const dbStmt = new dbstmt(dbConn);
dbStmt.prepareSync(sql);

@@ -44,11 +54,2 @@ });

const sql = 'INSERT INTO QIWS.QCUSTCDT(CUSNUM,LSTNAM,INIT,STREET,CITY,STATE,ZIPCOD,CDTLMT,CHGCOD,BALDUE,CDTDUE) VALUES (?,?,?,?,?,?,?,?,?,?,?) with NONE ';
const dbConn = new dbconn();
const dbConn2 = new dbconn();
dbConn.conn('*LOCAL');
dbConn2.conn('*LOCAL');
let dbStmt = new dbstmt(dbConn);
const dbStmt2 = new dbstmt(dbConn2);
const params = [

@@ -68,2 +69,6 @@ [9997, IN, NUMERIC], // CUSNUM

const dbConn2 = new dbconn();
dbConn2.conn('*LOCAL');
const dbStmt2 = new dbstmt(dbConn2);
const result = dbStmt.execSync('SELECT COUNT(CUSNUM) FROM QIWS.QCUSTCDT');

@@ -95,3 +100,2 @@ let rowsBefore = result[0]['00001'];

rowsAfter = Number(rowsAfter);
dbStmt.close();

@@ -108,4 +112,2 @@ expect(rowsAfter).to.equal(rowsBefore + 1);

const sql = 'INSERT INTO QIWS.QCUSTCDT(CUSNUM,LSTNAM,INIT,STREET,CITY,STATE,ZIPCOD,CDTLMT,CHGCOD,BALDUE,CDTDUE) VALUES (?,?,?,?,?,?,?,?,?,?,?) with NONE ';
const dbConn = new dbconn();
const dbConn2 = new dbconn();
const params = [

@@ -124,5 +126,5 @@ [9997, IN, NUMERIC], // CUSNUM

];
dbConn.conn('*LOCAL');
const dbConn2 = new dbconn();
dbConn2.conn('*LOCAL');
let dbStmt = new dbstmt(dbConn);
const dbStmt2 = new dbstmt(dbConn2);

@@ -156,7 +158,2 @@ // first get count of current rows

const sql = 'SELECT * FROM QIWS.QCUSTCDT';
const dbConn = new dbconn();
dbConn.conn('*LOCAL');
const dbStmt = new dbstmt(dbConn);
dbStmt.execSync(sql, (result, error) => {

@@ -177,7 +174,2 @@ if (error) {

const sql = 'SELECT * FROM QIWS.QCUSTCDT';
const dbConn = new dbconn();
dbConn.conn('*LOCAL');
const dbStmt = new dbstmt(dbConn);
const result = dbStmt.execSync(sql);

@@ -191,51 +183,41 @@ expect(result).to.be.an('array');

describe('execute callback', () => {
it('retrieves output parameters from stored proc using executeSync with a callback', () => {
const sql = 'call QXMLSERV.iPLUG512K(?,?,?,?)';
const dbConn = new dbconn();
// it('retrieves output parameters from stored proc using executeSync with a callback', () => {
// const sql = 'call QXMLSERV.iPLUG512K(?,?,?,?)';
// const ipc = '*NA';
// const ctl = '*here';
// const xmlIn = `<xmlservice><sh>system 'wrksbs'</sh></xmlservice>`;
// const xmlOut = '';
// const params = [
// [ipc, IN, CHAR],
// [ctl, IN, CHAR],
// [xmlIn, IN, CLOB],
// [xmlOut, OUT, CLOB],
// ];
dbConn.conn('*LOCAL');
const dbStmt = new dbstmt(dbConn);
// dbStmt.prepareSync(sql, (error) => {
// if (error) {
// throw error;
// }
// dbStmt.bindParamSync(params, (error) => {
// if (error) {
// throw error;
// }
// dbStmt.executeSync((result, error) => {
// if (error) {
// throw error;
// }
// expect(error).to.be.null;
// expect(result).to.be.a('array');
// expect(result.length).to.be.eq(1);
// });
// });
// });
// });
const ipc = '*NA';
const ctl = '*here';
const xmlIn = `<xmlservice><sh>system 'wrksbs'</sh></xmlservice>`;
const xmlOut = '';
const params = [
[ipc, IN, CHAR],
[ctl, IN, CHAR],
[xmlIn, IN, CLOB],
[xmlOut, OUT, CLOB],
];
dbStmt.prepareSync(sql, (error) => {
if (error) {
throw error;
}
dbStmt.bindParamSync(params, (error) => {
if (error) {
throw error;
}
dbStmt.executeSync((result, error) => {
if (error) {
throw error;
}
expect(error).to.be.null;
expect(result).to.be.a('array');
expect(result.length).to.be.eq(1);
});
});
});
});
it('executes prepared statement using executeSync with callback. Returns null because no output params are available', () => {
const sql = 'SELECT * FROM QIWS.QCUSTCDT WHERE BALDUE > ?';
const dbConn = new dbconn();
const params = [
[10.00, IN, NUMERIC],
];
dbConn.conn('*LOCAL');
const dbStmt = new dbstmt(dbConn);
dbStmt.prepareSync(sql, (error) => {

@@ -262,33 +244,27 @@ if (error) {

describe('execute no-callback', () => {
it('retrieves output parameters from stored procedure using executeSync without a callback:', () => {
const sql = 'CALL QXMLSERV.iPLUG512K(?,?,?,?)';
const dbConn = new dbconn();
// it('retrieves output parameters from stored procedure using executeSync without a callback:', () => {
// const sql = 'CALL QXMLSERV.iPLUG512K(?,?,?,?)';
// const ipc = '*NA';
// const ctl = '*here';
// const xmlIn = `<xmlservice><sh>system 'wrksbs'</sh></xmlservice>`;
// const xmlOut = '';
// const params = [
// [ipc, IN, CHAR],
// [ctl, IN, CHAR],
// [xmlIn, IN, CLOB],
// [xmlOut, OUT, CLOB],
// ];
dbConn.conn('*LOCAL');
const dbStmt = new dbstmt(dbConn);
// dbStmt.prepareSync(sql);
// dbStmt.bindParamSync(params);
const ipc = '*NA';
const ctl = '*here';
const xmlIn = `<xmlservice><sh>system 'wrksbs'</sh></xmlservice>`;
const xmlOut = '';
const params = [
[ipc, IN, CHAR],
[ctl, IN, CHAR],
[xmlIn, IN, CLOB],
[xmlOut, OUT, CLOB],
];
// const out = dbStmt.executeSync();
dbStmt.prepareSync(sql);
dbStmt.bindParamSync(params);
// expect(out).to.be.a('array');
// expect(out.length).to.be.eq(1);
// });
const out = dbStmt.executeSync();
expect(out).to.be.a('array');
expect(out.length).to.be.eq(1);
});
it('executes prepared statement using executeSync without callback. Returns null because no output params are available', () => {
const sql = 'SELECT * FROM QIWS.QCUSTCDT WHERE BALDUE > ?';
const dbConn = new dbconn();
const params = [

@@ -298,5 +274,2 @@ [10.00, IN, NUMERIC],

dbConn.conn('*LOCAL');
const dbStmt = new dbstmt(dbConn);
dbStmt.prepareSync(sql);

@@ -313,7 +286,3 @@ dbStmt.bindParamSync(params);

const sql = 'SELECT * FROM QIWS.QCUSTCDT';
const dbConn = new dbconn();
dbConn.conn('*LOCAL');
const dbStmt = new dbstmt(dbConn);
dbStmt.prepareSync(sql, (error) => {

@@ -344,7 +313,3 @@ if (error) {

const sql = 'SELECT * FROM QIWS.QCUSTCDT';
const dbConn = new dbconn();
dbConn.conn('*LOCAL');
const dbStmt = new dbstmt(dbConn);
dbStmt.prepareSync(sql);

@@ -361,7 +326,3 @@ dbStmt.executeSync();

const sql = 'SELECT * FROM QIWS.QCUSTCDT';
const dbConn = new dbconn();
dbConn.conn('*LOCAL');
const dbStmt = new dbstmt(dbConn);
dbStmt.prepareSync(sql, (error) => {

@@ -390,7 +351,3 @@ if (error) {

const sql = 'SELECT * FROM QIWS.QCUSTCDT';
const dbConn = new dbconn();
dbConn.conn('*LOCAL');
const dbStmt = new dbstmt(dbConn);
dbStmt.prepareSync(sql);

@@ -397,0 +354,0 @@ dbStmt.executeSync();

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc