Socket
Socket
Sign inDemoInstall

@cap-js/hana

Package Overview
Dependencies
88
Maintainers
2
Versions
11
Alerts
File Explorer

Advanced tools

Install Socket

Detect and block malicious and high-risk dependencies

Install

Comparing version 0.0.3 to 0.0.4

20

CHANGELOG.md

@@ -7,2 +7,22 @@ # Changelog

## [0.0.4](https://github.com/cap-js/cds-dbs/compare/hana-v0.0.3...hana-v0.0.4) (2024-02-02)
### Added
* Add fallback for @cap-js/hana for unknown entities ([#403](https://github.com/cap-js/cds-dbs/issues/403)) ([e7dd6de](https://github.com/cap-js/cds-dbs/commit/e7dd6de4ef65881ef66f7ba9c164ff2b4e9b1111))
* SELECT returns binaries as Buffers ([#416](https://github.com/cap-js/cds-dbs/issues/416)) ([d4240d5](https://github.com/cap-js/cds-dbs/commit/d4240d5efb7789851593c83a430e601d6ff87118))
* SELECT returns LargeBinaries as streams unless feature flag "stream_compat" is set ([#251](https://github.com/cap-js/cds-dbs/issues/251)) ([8165a4a](https://github.com/cap-js/cds-dbs/commit/8165a4a3f6bb21c970668c8873f9d9c662b43780))
* Support Readable Streams inside INSERT.entries ([#343](https://github.com/cap-js/cds-dbs/issues/343)) ([f6faf89](https://github.com/cap-js/cds-dbs/commit/f6faf8955b7888479c66f1727ade65b382611c2f))
### Fixed
* Ensure globally unique aliases with large expand queries ([#396](https://github.com/cap-js/cds-dbs/issues/396)) ([c1df747](https://github.com/cap-js/cds-dbs/commit/c1df747e54f3ac224ec98d44cb72315aabe9e16a))
* enumeration issue with session context in @cap-js/hana ([#399](https://github.com/cap-js/cds-dbs/issues/399)) ([8106a20](https://github.com/cap-js/cds-dbs/commit/8106a207543be700d37b1f1b510d00d5dd1370e4))
* ignore empty order by ([#392](https://github.com/cap-js/cds-dbs/issues/392)) ([a69fed0](https://github.com/cap-js/cds-dbs/commit/a69fed01c4ff6c503ec58b5c7997ef1fc1dd5e86))
* improve `!=` and `==` implementation for @cap-js/hana ([#426](https://github.com/cap-js/cds-dbs/issues/426)) ([9b7b5a0](https://github.com/cap-js/cds-dbs/commit/9b7b5a09b2fd4bbc9e28ba3f73afb41941c011d3))
* show clear error message when unable to load project package.json ([#419](https://github.com/cap-js/cds-dbs/issues/419)) ([2ebf783](https://github.com/cap-js/cds-dbs/commit/2ebf783de0ce044323a3ac54a0cac0e5485e360d))
* UPSERT for @cap-js/hana for entities with multiple keys ([#418](https://github.com/cap-js/cds-dbs/issues/418)) ([9bbac6e](https://github.com/cap-js/cds-dbs/commit/9bbac6ebbbddfa2f620833ce195eedeb0a79f43e))
## [0.0.3](https://github.com/cap-js/cds-dbs/compare/hana-v0.0.2...hana-v0.0.3) (2023-12-20)

@@ -9,0 +29,0 @@

152

lib/drivers/hana-client.js

@@ -48,7 +48,66 @@ const { Readable, Stream } = require('stream')

set(variables) {
Object.keys(variables).forEach(k => this._native.setClientInfo(k, variables[k]))
for (const key in variables) {
this._native.setClientInfo(key, variables[key])
}
}
async prepare(sql) {
async prepare(sql, hasBlobs) {
const ret = await super.prepare(sql)
// hana-client ResultSet API does not allow for deferred streaming of blobs
// With the current design of the hana-client ResultSet it is only
// possible to read all LOBs into memory to do deferred streaming
// Main reason is that the ResultSet only allowes using getData() on the current row
// with the current next() implemenation it is only possible to go foward in the ResultSet
// It would be required to allow using getDate() on previous rows
if (hasBlobs) {
ret.all = async (values) => {
const stmt = await ret._prep
// Create result set
const reset = async function () {
if (this) await prom(this, 'close')()
const rs = await prom(stmt, 'executeQuery')(values)
rs.reset = reset
return rs
}
const rs = await reset()
const rsStreamsProm = {}
const rsStreams = new Promise((resolve, reject) => {
rsStreamsProm.resolve = resolve
rsStreamsProm.reject = reject
})
rs._rowPosition = -1
const _next = prom(rs, 'next')
const next = () => {
rs._rowPosition++
return _next()
}
const getValue = prom(rs, 'getValue')
const result = []
// Fetch the next row
while (await next()) {
const cols = stmt.getColumnInfo().map(b => b.columnName)
// column 0-3 are metadata columns
const values = await Promise.all([getValue(0), getValue(1), getValue(2), getValue(3)])
const row = {}
for (let i = 0; i < cols.length; i++) {
const col = cols[i]
// column >3 are all blob columns
row[col] = i > 3 ?
rs.isNull(i)
? null
: Readable.from(streamBlob(rsStreams, rs._rowPosition, i, 'binary'))
: values[i]
}
result.push(row)
}
rs.reset().then(rsStreamsProm.resolve, rsStreamsProm.reject)
return result
}
}
ret.stream = async (values, one) => {

@@ -68,3 +127,3 @@ const stmt = await ret._prep

const rows = await prom(stmt, 'exec')(values, { rowsAsArray: true })
return Readable.from(rowsIterator(rows, stmt.getColumnInfo()))
return Readable.from(rowsIterator(rows, stmt.getColumnInfo()), { objectMode: false })
}

@@ -78,5 +137,5 @@ const rs = await prom(stmt, 'executeQuery')(values)

if (rs.isNull(0)) return null
return Readable.from(streamBlob(rs, 0, 'binary'))
return Readable.from(streamBlob(rs, undefined, 0, 'binary'), { objectMode: false })
}
return Readable.from(rsIterator(rs, one))
return Readable.from(rsIterator(rs, one), { objectMode: false })
}

@@ -191,3 +250,3 @@ return ret

for await (const chunk of streamBlob(rs, columnIndex, 'base64', binaryBuffer)) {
for await (const chunk of streamBlob(rs, undefined, columnIndex, 'base64', binaryBuffer)) {
yield chunk

@@ -215,28 +274,71 @@ }

async function* streamBlob(rs, columnIndex, encoding, binaryBuffer = Buffer.allocUnsafe(1 << 16)) {
const getData = prom(rs, 'getData')
async function* streamBlob(rs, rowIndex = -1, columnIndex, encoding, binaryBuffer = Buffer.allocUnsafe(1 << 16)) {
const promChain = {
resolve: () => { },
reject: () => { }
}
try {
// Check if the resultset is a promise
if (rs.then) {
// Copy the current Promise
const prom = new Promise((resolve, reject) => rs.then(resolve, reject))
// Enqueue all following then calls till after the current call
const next = new Promise((resolve, reject) => {
promChain.resolve = resolve
promChain.reject = reject
})
rs.then = (resolve, reject) => next.then(resolve, reject)
rs = await prom
}
let decoder = new StringDecoder(encoding)
// Check if the provided resultset is on the correct row
if (rowIndex >= 0) {
rs._rowPosition ??= -1
if (rowIndex - rs._rowPosition < 0) {
rs = await rs.reset()
rs._rowPosition ??= -1
}
let blobPosition = 0
const _next = prom(rs, 'next')
const next = () => {
rs._rowPosition++
return _next()
}
while (true) {
// REVISIT: Ensure that the data read is divisible by 3 as that allows for base64 encoding
let start = 0
const read = await getData(columnIndex, blobPosition, binaryBuffer, 0, binaryBuffer.byteLength)
if (blobPosition === 0 && binaryBuffer.slice(0, 7).toString() === 'base64,') {
decoder = {
write: encoding === 'base64' ? c => c : chunk => Buffer.from(chunk.toString(), 'base64'),
end: () => Buffer.allocUnsafe(0),
// Move result set to the correct row
while (rowIndex - rs._rowPosition > 0) {
await next()
}
start = 7
}
blobPosition += read
if (read < binaryBuffer.byteLength) {
yield decoder.write(binaryBuffer.slice(start, read))
break
const getData = prom(rs, 'getData')
let decoder = new StringDecoder(encoding)
let blobPosition = 0
while (true) {
// REVISIT: Ensure that the data read is divisible by 3 as that allows for base64 encoding
let start = 0
const read = await getData(columnIndex, blobPosition, binaryBuffer, 0, binaryBuffer.byteLength)
if (blobPosition === 0 && binaryBuffer.slice(0, 7).toString() === 'base64,') {
decoder = {
write: encoding === 'base64' ? c => c : chunk => Buffer.from(chunk.toString(), 'base64'),
end: () => Buffer.allocUnsafe(0),
}
start = 7
}
blobPosition += read
if (read < binaryBuffer.byteLength) {
yield decoder.write(binaryBuffer.slice(start, read))
break
}
yield decoder.write(binaryBuffer.slice(start).toString('base64'))
}
yield decoder.write(binaryBuffer.slice(start).toString('base64'))
yield decoder.end()
} catch (e) {
promChain.reject(e)
} finally {
promChain.resolve(rs)
}
yield decoder.end()
}

@@ -243,0 +345,0 @@

@@ -1,3 +0,4 @@

const { Readable, PassThrough, Stream } = require('stream')
const { Readable, Stream } = require('stream')
const { StringDecoder } = require('string_decoder')
const { text } = require('stream/consumers')

@@ -43,3 +44,5 @@ const hdb = require('hdb')

const clientInfo = this._native._connection.getClientInfo()
Object.keys(variables).forEach(k => clientInfo.setProperty(k, variables[k]))
for (const key in variables) {
clientInfo.setProperty(key, variables[key])
}
}

@@ -71,4 +74,38 @@

async prepare(sql) {
async prepare(sql, hasBlobs) {
const ret = await super.prepare(sql)
if (hasBlobs) {
ret.all = async (values) => {
const stmt = await ret._prep
// Create result set
const rs = await prom(stmt, 'execute')(values)
const cols = rs.metadata.map(b => b.columnName)
const stream = rs.createReadStream()
const result = []
for await (const row of stream) {
const obj = {}
for (let i = 0; i < cols.length; i++) {
const col = cols[i]
// hdb returns large strings as streams sometimes
if (col === '_json_' && typeof row[col] === 'object') {
obj[col] = await text(row[col].createReadStream())
continue
}
obj[col] = i > 3
? row[col] === null
? null
: (
row[col].createReadStream?.()
|| Readable.from(echoStream(row[col]), { objectMode: false })
)
: row[col]
}
result.push(obj)
}
return result
}
}
ret.stream = async (values, one) => {

@@ -101,7 +138,6 @@ const stmt = await ret._prep

values = values.map((v, i) => {
if (v instanceof Stream && !(v instanceof PassThrough)) {
if (v instanceof Stream) {
streams[i] = v
const passThrough = new PassThrough()
v.pipe(passThrough)
return passThrough
const iterator = v[Symbol.asyncIterator]()
return Readable.from(iterator, { objectMode: false })
}

@@ -117,2 +153,6 @@ return v

function* echoStream(ret) {
yield ret
}
async function* rsIterator(rs, one) {

@@ -152,3 +192,3 @@ // Raw binary data stream unparsed

})
.catch(e => {
.catch(() => {
// TODO: check whether the error is early close

@@ -155,0 +195,0 @@ return true

23

lib/drivers/index.js

@@ -8,16 +8,15 @@ const cds = require('@sap/cds')

get() {
const projectPackage = require(cds.root + '/package.json')
const dependencies = {
...projectPackage.dependencies,
...(process.env.NODE_ENV !== 'production' && projectPackage.devDependencies),
try {
const projectPackage = require(cds.root + '/package.json')
const dependencies = {
...projectPackage.dependencies,
...(process.env.NODE_ENV !== 'production' && projectPackage.devDependencies),
}
// Have a bias to hdb as the default driver
if (dependencies.hdb) return module.exports.hdb
if (dependencies['@sap/hana-client']) return module.exports['hana-client']
} catch (e) {
console.trace(`WARNING! Unable to require the project's package.json at "${cds.root + '/package.json'}". Please check your project setup.`)
}
// Have a bias to hdb as the default driver
if (dependencies.hdb) {
return module.exports.hdb
}
if (dependencies['@sap/hana-client']) {
return module.exports['hana-client']
}
// When no driver is installed still try to load any of the drivers

@@ -24,0 +23,0 @@ try {

@@ -108,3 +108,5 @@ const fs = require('fs')

const sqlScript = this.wrapTemporary(temporary, withclause, blobs)
let rows = values?.length ? await (await this.prepare(sqlScript)).all(values) : await this.exec(sqlScript)
let rows = (values?.length || blobs.length > 0)
? await (await this.prepare(sqlScript, blobs.length)).all(values || [])
: await this.exec(sqlScript)
if (rows.length) {

@@ -121,7 +123,2 @@ rows = this.parseRows(rows)

async onINSERT({ query, data }) {
// Using runBatch for HANA 2.0 and lower sometimes leads to integer underflow errors
// REVISIT: Address runBatch issues in node-hdb and hana-client
if (HANAVERSION <= 2) {
return super.onINSERT(...arguments)
}
const { sql, entries, cqn } = this.cqn2sql(query, data)

@@ -131,24 +128,10 @@ if (!sql) return // Do nothing when there is nothing to be done

// HANA driver supports batch execution
const results = entries ? await ps.runBatch(entries) : await ps.run()
const results = await (entries
? HANAVERSION <= 2
? entries.reduce((l, c) => l.then(() => ps.run(c)), Promise.resolve(0))
: ps.run(entries[0])
: ps.run())
return new this.class.InsertResults(cqn, results)
}
async onSTREAM(req) {
let { cqn, sql, values, temporary, withclause, blobs } = this.cqn2sql(req.query)
// writing stream
if (req.query.STREAM.into) {
const ps = await this.prepare(sql)
return (await ps.run(values)).changes
}
// reading stream
if (temporary?.length) {
// Full SELECT CQN support streaming
sql = this.wrapTemporary(temporary, withclause, blobs)
}
const ps = await this.prepare(sql)
const stream = await ps.stream(values, cqn.SELECT?.one)
if (cqn.SELECT?.count) stream.$count = await this.count(req.query.STREAM.from)
return stream
}
// Allow for running complex expand queries in a single statement

@@ -186,3 +169,3 @@ wrapTemporary(temporary, withclauses, blobs) {

const data = Object.assign(JSON.parse(row._json_), expands, blobs)
Object.keys(blobs).forEach(k => (data[k] = row[k] || data[k]))
Object.keys(blobs).forEach(k => (data[k] = this._stream(row[k] || data[k])))

@@ -228,4 +211,4 @@ // REVISIT: try to unify with handleLevel from base driver used for streaming

// prepare and exec are both implemented inside the drivers
prepare(sql) {
return this.ensureDBC().prepare(sql)
prepare(sql, hasBlobs) {
return this.ensureDBC().prepare(sql, hasBlobs)
}

@@ -274,2 +257,4 @@

const { limit, one, orderBy, expand, columns, localized, count, parent } = q.SELECT
const walkAlias = q => {

@@ -280,8 +265,6 @@ if (q.args) return q.as || walkAlias(q.args[0])

}
const alias = walkAlias(q)
q.as = alias
q.as = walkAlias(q)
const alias = q.alias = `${parent ? parent.alias + '.' : ''}${q.as}`
const src = q
const { limit, one, orderBy, expand, columns, localized, count, from, parent } = q.SELECT
// When one of these is defined wrap the query in a sub query

@@ -325,3 +308,3 @@ if (expand || (parent && (limit || one || orderBy))) {

if (parent) over.xpr.push(`PARTITION BY ${this.ref({ ref: ['_parent_path_'] })}`)
if (orderBy) over.xpr.push(` ORDER BY ${this.orderBy(orderBy, localized)}`)
if (orderBy?.length) over.xpr.push(` ORDER BY ${this.orderBy(orderBy, localized)}`)
const rn = { xpr: [{ func: 'ROW_NUMBER', args: [] }, 'OVER', over], as: '$$RN$$' }

@@ -392,4 +375,5 @@ q.as = q.SELECT.from.as

this.cqn = q
this.withclause.unshift(`${this.quote(alias)} as (${this.sql})`)
this.temporary.unshift({ blobs: this._blobs, select: `SELECT ${this._outputColumns} FROM ${this.quote(alias)}` })
const fromSQL = this.from({ ref: [alias] })
this.withclause.unshift(`${fromSQL} as (${this.sql})`)
this.temporary.unshift({ blobs: this._blobs, select: `SELECT ${this._outputColumns} FROM ${fromSQL}` })
if (this.values) {

@@ -429,3 +413,3 @@ this.temporaryValues.unshift(this.values)

join: 'inner',
args: [{ ref: [parent.as], as: parent.as }, x.SELECT.from],
args: [{ ref: [parent.alias], as: parent.as }, x.SELECT.from],
on: x.SELECT.where,

@@ -537,5 +521,6 @@ as: x.SELECT.from.as,

const elements = q.elements || q.target?.elements
if (!elements && !INSERT.entries?.length) {
return // REVISIT: mtx sends an insert statement without entries and no reference entity
if (!elements) {
return super.INSERT_entries(q)
}
const columns = elements

@@ -559,24 +544,11 @@ ? ObjectKeys(elements).filter(c => c in elements && !elements[c].virtual && !elements[c].value && !elements[c].isAssociation)

if (HANAVERSION <= 2) {
// Simple line splitting would be preferred, but batch execute does not work properly
// Which makes sending every line separately much slower
// this.entries = INSERT.entries.map(e => [JSON.stringify(e)])
this.entries = []
let cur = ['[']
this.entries.push(cur)
INSERT.entries
.map(r => JSON.stringify(r))
.forEach(r => {
if (cur[0].length > 65535) {
cur[0] += ']'
cur = ['[']
this.entries.push(cur)
} else if (cur[0].length > 1) {
cur[0] += ','
}
cur[0] += r
})
cur[0] += ']'
this.entries = INSERT.entries.map(e => (e instanceof Readable
? [e]
: [Readable.from(this.INSERT_entries_stream([e], 'hex'), { objectMode: false })]))
} else {
this.entries = [[JSON.stringify(INSERT.entries)]]
this.entries = [[
INSERT.entries[0] instanceof Readable
? INSERT.entries[0]
: Readable.from(this.INSERT_entries_stream(INSERT.entries, 'hex'), { objectMode: false })
]]
}

@@ -613,2 +585,6 @@

const elements = q.elements || q.target?.elements
if (!elements) {
return super.INSERT_rows(q)
}
const columns = INSERT.columns || (elements && ObjectKeys(elements))

@@ -630,9 +606,13 @@ const entries = new Array(INSERT.rows.length)

UPSERT(q) {
let { UPSERT } = q,
sql = this.INSERT({ __proto__: q, INSERT: UPSERT })
const { UPSERT } = q
const sql = this.INSERT({ __proto__: q, INSERT: UPSERT })
// If no definition is available fallback to INSERT statement
const elements = q.elements || q.target?.elements
if (!elements) {
return (this.sql = sql)
}
// REVISIT: should @cds.persistence.name be considered ?
const entity = q.target?.['@cds.persistence.name'] || this.name(q.target?.name || INSERT.into.ref[0])
const elements = q.elements || q.target?.elements
const dataSelect = sql.substring(sql.indexOf('WITH'))

@@ -653,3 +633,3 @@

.map(k => `NEW.${this.quote(k)}=OLD.${this.quote(k)}`)
.join('AND')
.join(' AND ')

@@ -683,15 +663,18 @@ return (this.sql = `UPSERT ${this.quote(entity)} (${this.columns.map(c =>

where(xpr) {
return this.xpr({ xpr }, ' = TRUE')
return this.xpr({ xpr: [...xpr, 'THEN'] }).slice(0, -4)
}
having(xpr) {
return this.xpr({ xpr }, ' = TRUE')
return this.where(xpr)
}
xpr({ xpr, _internal }, caseSuffix = '') {
xpr(_xpr, caseSuffix = '') {
const { xpr, _internal } = _xpr
// Maps the compare operators to what to return when both sides are null
const compareOperators = {
'=': true,
'==': true,
'!=': false,
// These operators are not allowed in column expressions
/* REVISIT: Only adjust these operators when inside the column expression
'=': null,
'>': null,

@@ -704,2 +687,3 @@ '<': null,

'!>': null,
*/
}

@@ -709,4 +693,7 @@

for (let i = 0; i < xpr.length; i++) {
const x = xpr[i]
let x = xpr[i]
if (typeof x === 'string') {
// Convert =, == and != into is (not) null operator where required
x = xpr[i] = super.operator(xpr[i], i, xpr)
// HANA does not support comparators in all clauses (e.g. SELECT 1>0 FROM DUMMY)

@@ -719,9 +706,6 @@ // HANA does not have an 'IS' or 'IS NOT' operator

const compare = {
xpr: [left, x, right],
_internal: true,
}
const compare = [left, x, right]
const expression = {
xpr: ['CASE', 'WHEN', compare, 'Then', { val: true }, 'WHEN', 'NOT', compare, 'Then', { val: false }],
xpr: ['CASE', 'WHEN', ...compare, 'THEN', { val: true }, 'WHEN', 'NOT', ...compare, 'THEN', { val: false }],
_internal: true,

@@ -737,7 +721,4 @@ }

'WHEN',
{
xpr: [left, 'IS', 'NULL', 'AND', right, 'IS', 'NULL'],
_internal: true,
},
'Then',
...[left, 'IS', 'NULL', 'AND', right, 'IS', 'NULL'],
'THEN',
{ val: ifNull },

@@ -755,3 +736,3 @@ 'ELSE',

xpr[i] = expression
xpr[i + 1] = caseSuffix || ''
xpr[i + 1] = ''
}

@@ -773,3 +754,4 @@ }

// HANA does not allow WHERE TRUE so when the expression is only a single entry "= TRUE" is appended
if (caseSuffix && xpr.length === 1) {
if (caseSuffix && (
xpr.length === 1 || xpr.at(-1) === '')) {
sql.push(caseSuffix)

@@ -782,9 +764,42 @@ }

operator(x, i, xpr) {
const up = x.toUpperCase()
// Add "= TRUE" before THEN in case statements
// As all valid comparators are converted to booleans as SQL specifies
if (x in { THEN: 1, then: 1 }) return ` = TRUE ${x}`
if ((x in { LIKE: 1, like: 1 } && is_regexp(xpr[i + 1]?.val)) || x === 'regexp') return 'LIKE_REGEXPR'
if (
up in logicOperators &&
!this.is_comparator({ xpr }, i - 1)
) {
return ` = TRUE ${x}`
}
if (
(up === 'LIKE' && is_regexp(xpr[i + 1]?.val)) ||
up === 'REGEXP'
) return 'LIKE_REGEXPR'
else return x
}
get is_distinct_from_() { return '!=' }
get is_not_distinct_from_() { return '==' }
/**
* Checks if the xpr is a comparison or a value
* @param {} xpr
* @returns
*/
is_comparator({ xpr }, start) {
for (let i = start ?? xpr.length; i > -1; i--) {
const cur = xpr[i]
if (cur == null) continue
if (typeof cur === 'string') {
const up = cur.toUpperCase()
// When a compare operator is found the expression is a comparison
if (up in compareOperators) return true
// When a case operator is found it is the start of the expression
if (up in caseOperators) break
continue
}
if ('xpr' in cur) return this.is_comparator(cur)
}
return false
}
list(list) {

@@ -794,4 +809,4 @@ const first = list.list[0]

if (this.values && first.list && !first.list.find(v => !v.val)) {
const extraction = first.list.map((v, i) => `"${i}" ${this.constructor.InsertTypeMap[typeof v.val]()} PATH '$.${i}'`)
this.values.push(JSON.stringify(list.list.map(l => l.list.reduce((l, c, i) => { l[i] = c.val; return l }, {}))))
const extraction = first.list.map((v, i) => `"${i}" ${this.constructor.InsertTypeMap[typeof v.val]()} PATH '$.V${i}'`)
this.values.push(JSON.stringify(list.list.map(l => l.list.reduce((l, c, i) => { l[`V${i}`] = c.val; return l }, {}))))
return `(SELECT * FROM JSON_TABLE(?, '$' COLUMNS(${extraction})))`

@@ -851,4 +866,3 @@ }

if (val) managed = this.func({ func: 'session_context', args: [{ val, param: false }] })
const type = this.insertType4(element)
let extract = sql ?? `${this.quote(name)} ${type} PATH '$.${name}'`
let extract = sql ?? `${this.quote(name)} ${this.insertType4(element)} PATH '$.${name}'`
if (!isUpdate) {

@@ -920,3 +934,3 @@ const d = element.default

// Not encoded string with CESU-8 or some UTF-8 except a surrogate pair at "base64_decode" function
Binary: e => `CONCAT('base64,',${e})`,
Binary: e => `HEXTOBIN(${e})`,
Boolean: e => `CASE WHEN ${e} = 'true' THEN TRUE WHEN ${e} = 'false' THEN FALSE END`,

@@ -1016,4 +1030,4 @@ }

const stmt = await this.dbc.prepare(createContainerDatabase)
const res = await stmt.all([creds.user, creds.password, creds.containerGroup, !clean])
DEBUG?.(res.map(r => r.MESSAGE).join('\n'))
const res = await stmt.run([creds.user, creds.password, creds.containerGroup, !clean])
res && DEBUG?.(res.changes.map(r => r.MESSAGE).join('\n'))
} finally {

@@ -1059,4 +1073,4 @@ if (this.dbc) {

const stmt = await this.dbc.prepare(createContainerTenant.replaceAll('{{{GROUP}}}', creds.containerGroup))
const res = await stmt.all([creds.user, creds.password, creds.schema, !clean])
res && DEBUG?.(res.map(r => r.MESSAGE).join('\n'))
const res = await stmt.run([creds.user, creds.password, creds.schema, !clean])
res && DEBUG?.(res.changes.map(r => r.MESSAGE).join('\n'))
} finally {

@@ -1077,3 +1091,3 @@ await this.dbc.disconnect()

Buffer.prototype.toJSON = function () {
return this.toString('base64')
return this.toString('hex')
}

@@ -1089,2 +1103,32 @@

const caseOperators = {
'CASE': 1,
'WHEN': 1,
'THEN': 1,
'ELSE': 1,
}
const logicOperators = {
'THEN': 1,
'AND': 1,
'OR': 1,
}
const compareOperators = {
'=': 1,
'==': 1,
'!=': 1,
'>': 1,
'<': 1,
'<>': 1,
'>=': 1,
'<=': 1,
'!<': 1,
'!>': 1,
'IS': 1,
'IN': 1,
'LIKE': 1,
'IS NOT': 1,
'EXISTS': 1,
'BETWEEN': 1,
}
module.exports = HANAService
{
"name": "@cap-js/hana",
"version": "0.0.3",
"version": "0.0.4",
"description": "CDS database service for SAP HANA",

@@ -30,7 +30,7 @@ "homepage": "https://cap.cloud.sap/",

"hdb": "^0.19.5",
"@cap-js/db-service": "^1.3.1"
"@cap-js/db-service": "^1.6.0"
},
"peerDependencies": {
"@sap/hana-client": ">=2",
"@sap/cds": ">=7"
"@sap/cds": ">=7.6"
},

@@ -37,0 +37,0 @@ "cds": {

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap

Packages

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc