@sap/hdi-deploy
Advanced tools
Comparing version 4.4.1 to 4.5.0
@@ -0,1 +1,7 @@ | ||
## 4.5.0 | ||
Features: | ||
- use @sap/hana-client@2.13.22 | ||
- added option --optimise-file-upload to perform delta detection via local SHA256 calculation instead of DELETE and WRITE calls | ||
- Node 18.x support | ||
## 4.4.1 | ||
@@ -2,0 +8,0 @@ Features: |
@@ -145,2 +145,4 @@ 'use strict'; | ||
args.translateJSONEnvBooleanOptionToOption(logger, options, name, option, process.argv); | ||
} else if (option === 'optimise_file_upload') { | ||
args.translateJSONEnvBooleanOptionToOption(logger, options, name, option, process.argv); | ||
} else if (option === 'info') { | ||
@@ -279,2 +281,5 @@ args.translateJSONEnvStringArrayOptionToOption(logger, options, name, option, process.argv); | ||
'', | ||
" --[no-]optimise-file-upload [don't] perform delta detection via local SHA256 calculation instead of DELETE and WRITE calls on server", | ||
' by default, delta detection is done by DELETE and WRITE calls', | ||
'', | ||
" --[no-]treat-wrong-ownership-as-errors [don't] treat wrong ownership of objects as errors", | ||
@@ -373,2 +378,6 @@ ' by default, wrong ownership will not result in errors', | ||
opt.simulateMake = false; | ||
} else if (arg === '--optimise-file-upload') { | ||
opt.optimiseFileUpload = true; | ||
} else if (arg === '--no-optimise-file-upload') { | ||
opt.optimiseFileUpload = false; | ||
} else if (arg === '--root') { | ||
@@ -375,0 +384,0 @@ ++i; |
@@ -343,2 +343,3 @@ 'use strict'; | ||
this._dirs = dirs; | ||
try { | ||
@@ -461,2 +462,5 @@ this._deployFiles = files.filter(isDeployableFile).map((file) => [deployFilePath(file)]); | ||
} | ||
get dirs () { | ||
return this._dirs; | ||
} | ||
@@ -487,2 +491,5 @@ get deployFiles () { | ||
} | ||
set dirs (value) { | ||
this._dirs = value; | ||
} | ||
@@ -489,0 +496,0 @@ set deployFiles (value) { |
@@ -62,11 +62,191 @@ 'use strict'; | ||
/** | ||
* Filter DeployFiles based on options | ||
* | ||
* @param {Array} deployFiles Files scheduled for undeploy | ||
* @param {Object} options Options | ||
* @returns {Object} deploy files , deployfiles count. | ||
*/ | ||
function filteredDeployFiles (deployFiles, content, options) { | ||
// filter current deployFiles and undeployFiltes via the working set | ||
if (options.workingSet.valid) { | ||
deployFiles = deployFiles.filter(function (file) { | ||
return options.workingSet.matchesPath(file); | ||
}); | ||
} | ||
if (options.treatUnmodifiedAsModified) { | ||
logger.log(`${deployFiles.length} modified, unmodified, or added files are scheduled for deploy`); | ||
} else { | ||
logger.log(`${deployFiles.length} modified or added files are scheduled for deploy based on delta detection`); | ||
} | ||
const explicitDeployFiles = new Set(); | ||
let optionsDeployCount = 0; | ||
/* | ||
* add explicit deploy set, but filter it via the working set | ||
* Since deploy files will be filtered in "Handle client files", already substract files we know will be removed. | ||
*/ | ||
options.deploy.forEachFile(function (p) { | ||
if (options.workingSet.matchesPath(p)) { | ||
explicitDeployFiles.add(p); | ||
if (utils.isGrantorFile(p)) { | ||
optionsDeployCount--; | ||
} | ||
} | ||
}); | ||
// add files defined by file pattern | ||
if (options.deploy) { | ||
options.deploy.filter_by_regex(content.deployFiles.map((item) => item[0])).forEach((file) => explicitDeployFiles.add(file)); | ||
} | ||
explicitDeployFiles.forEach((file) => { | ||
if (!options.excludeFilter.matchesPath(file)) { | ||
deployFiles.push(file); | ||
optionsDeployCount++; | ||
} | ||
}); | ||
return {deployFiles, optionsDeployCount}; | ||
} | ||
/** | ||
* Prepare the make. | ||
* Filter UndeployFiles based on options | ||
* | ||
* @param {any} result Result of a status call. | ||
* @param {Array} undeployFiles Files scheduled for undeploy | ||
* @param {Object} options Options | ||
* @returns {Object} Undeploy files , undeploy files count. | ||
*/ | ||
function filteredUndeployFiles (undeployFiles, options) { | ||
// filter the undeploy set by the include-filter,EW because deleted files are not considered during the file walk | ||
if (options.includeFilter.valid) { | ||
undeployFiles = undeployFiles.filter(function (file) { | ||
return options.includeFilter.matchesPath(file); | ||
}); | ||
} | ||
undeployFiles = undeployFiles.filter(function (file) { | ||
return options.workingSet.matchesPath(file); | ||
}); | ||
logger.log(`${undeployFiles.length} deleted files are scheduled for undeploy based on delta detection (filtered by undeploy allowlist)`); | ||
const explicitUndeployFiles = new Set(); | ||
// add explicit undeploy set, but filter it via the working set | ||
options.undeploy.forEachFile(function (p) { | ||
if (options.workingSet.matchesPath(p)) { | ||
explicitUndeployFiles.add(p); | ||
} | ||
}); | ||
let optionsUndeployCount = 0; | ||
explicitUndeployFiles.forEach((file) => { | ||
if (!options.excludeFilter.matchesPath(file)) { | ||
undeployFiles.push(file); | ||
optionsUndeployCount++; | ||
} | ||
}); | ||
return {undeployFiles, optionsUndeployCount}; | ||
} | ||
/** | ||
* Prepare the sync. | ||
* | ||
* @param {any} result Result of a listDeployed call. | ||
* @param {Content} content Content object | ||
* @returns {Object} addedLocalFiles and deletedLocalFiles, unmodifiedModifiedLocalFiles files. | ||
*/ | ||
function prepareSync (result, content) { | ||
let deployedFilesSystemFiles = []; | ||
const deletedLocalFiles = []; | ||
const unmodifiedModifiedLocalFiles =[]; | ||
const addedLocalFiles = []; | ||
const deployFiles = content.deployFiles.map((fileOrFolder) => { | ||
return fileOrFolder[0]; | ||
}); | ||
deployedFilesSystemFiles = result.map(function (item) { | ||
return item.path; | ||
}); | ||
deployedFilesSystemFiles.forEach((file) => { | ||
//check if it is not folder as deployedFilesSystemFiles includes folders as well eg: src/ , cfg/ | ||
if (!file.endsWith('/')) { | ||
if (deployFiles.includes(file)) { | ||
unmodifiedModifiedLocalFiles.push(file); | ||
} else { | ||
deletedLocalFiles.push(file); | ||
} | ||
} | ||
}); | ||
deployFiles.forEach((file) => { | ||
if (!deployedFilesSystemFiles.includes(file)) { | ||
addedLocalFiles.push(file); | ||
} | ||
}); | ||
return { | ||
addedLocalFiles, | ||
deletedLocalFiles, | ||
unmodifiedModifiedLocalFiles | ||
}; | ||
} | ||
/** | ||
* Prepare the optimise make. | ||
* | ||
* @param {Array} addedFiles added files | ||
* @param {Array} deletedFiles deleted files | ||
* @param {Array} modifiedFiles modified files | ||
* @param {Array} unmodifiedFiles unmodified files | ||
* @param {Object} options Options | ||
* @param {Content} content Content object | ||
* @returns {Object} Deploy and Undeploy files. | ||
*/ | ||
function prepareOptimiseMake (addedFiles, deletedFiles, modifiedFiles, unmodifiedFiles, options, content) { | ||
let deployFiles; | ||
let undeployFiles; | ||
deployFiles = addedFiles.concat(modifiedFiles).filter((item) => !options.excludeFilter.matchesPath(item)); | ||
if (options.treatUnmodifiedAsModified) { | ||
// schedule all locally collected files for deploy; this maps to Added, Modified, or Unmodified | ||
deployFiles = deployFiles.concat(unmodifiedFiles).filter((item) => !options.excludeFilter.matchesPath(item)); | ||
} | ||
undeployFiles = deletedFiles.filter((item) => !options.excludeFilter.matchesPath(item)); | ||
logger.log('added files:', JSON.stringify(addedFiles, null, 2)); | ||
logger.log('modified files:', JSON.stringify(modifiedFiles, null, 2)); | ||
if (options.treatUnmodifiedAsModified) { | ||
logger.log('treated as modified files:', JSON.stringify(unmodifiedFiles, null, 2)); | ||
} | ||
// filter the undeploy set based on the undeploy.json file | ||
if (undeployFiles.length && !options.autoUndeploy) { | ||
undeployFiles = filterUndeploy(undeployFiles, options); | ||
} | ||
logger.log('deleted files:', JSON.stringify(undeployFiles, null, 2)); | ||
const deployFilesAndDeployCount = filteredDeployFiles(deployFiles, content, options); | ||
const undeployFilesAndUndeployCount = filteredUndeployFiles(undeployFiles, options); | ||
deployFiles = handle_client_files(deployFilesAndDeployCount.deployFiles); | ||
undeployFiles = undeployFilesAndUndeployCount.undeployFiles; | ||
// undeployFiles = handle_client_files(undeployFiles); | ||
logger.log(`${deployFilesAndDeployCount.optionsDeployCount} files are scheduled for deploy based on explicit specification`); | ||
logger.log(`${undeployFilesAndUndeployCount.optionsUndeployCount} files are scheduled for undeploy based on explicit specification`); | ||
return { | ||
deployFiles, | ||
undeployFiles | ||
}; | ||
} | ||
function prepareMake (result, options, content) { | ||
@@ -126,78 +306,12 @@ let deployFiles; | ||
// filter the undeploy set by the include-filter,EW because deleted files are not considered during the file walk | ||
if (options.includeFilter.valid) { | ||
undeployFiles = undeployFiles.filter(function (file) { | ||
return options.includeFilter.matchesPath(file); | ||
}); | ||
} | ||
const deployFilesAndDeployCount = filteredDeployFiles(deployFiles, content, options); | ||
const undeployFilesAndUndeployCount = filteredUndeployFiles(undeployFiles, options); | ||
// filter current deployFiles and undeployFiltes via the working set | ||
if (options.workingSet.valid) { | ||
deployFiles = deployFiles.filter(function (file) { | ||
return options.workingSet.matchesPath(file); | ||
}); | ||
undeployFiles = undeployFiles.filter(function (file) { | ||
return options.workingSet.matchesPath(file); | ||
}); | ||
} | ||
if (options.treatUnmodifiedAsModified) { | ||
logger.log(`${deployFiles.length} modified, unmodified, or added files are scheduled for deploy`); | ||
} else { | ||
logger.log(`${deployFiles.length} modified or added files are scheduled for deploy based on delta detection`); | ||
} | ||
logger.log(`${undeployFiles.length} deleted files are scheduled for undeploy based on delta detection (filtered by undeploy allowlist)`); | ||
const explicit_deploy_files = new Set(); | ||
let options_deploy_count = 0; | ||
/* | ||
* add explicit deploy set, but filter it via the working set | ||
* Since deploy files will be filtered in "Handle client files", already substract files we know will be removed. | ||
*/ | ||
options.deploy.forEachFile(function (p) { | ||
if (options.workingSet.matchesPath(p)) { | ||
explicit_deploy_files.add(p); | ||
if (utils.isGrantorFile(p)) { | ||
options_deploy_count--; | ||
} | ||
} | ||
}); | ||
// add files defined by file pattern | ||
if (options.deploy) { | ||
options.deploy.filter_by_regex(content.deployFiles.map((item) => item[0])).forEach((file) => explicit_deploy_files.add(file)); | ||
} | ||
explicit_deploy_files.forEach((file) => { | ||
if (!options.excludeFilter.matchesPath(file)) { | ||
deployFiles.push(file); | ||
options_deploy_count++; | ||
} | ||
}); | ||
const explicit_undeploy_files = new Set(); | ||
// add explicit undeploy set, but filter it via the working set | ||
options.undeploy.forEachFile(function (p) { | ||
if (options.workingSet.matchesPath(p)) { | ||
explicit_undeploy_files.add(p); | ||
} | ||
}); | ||
let options_undeploy_count = 0; | ||
explicit_undeploy_files.forEach((file) => { | ||
if (!options.excludeFilter.matchesPath(file)) { | ||
undeployFiles.push(file); | ||
options_undeploy_count++; | ||
} | ||
}); | ||
deployFiles = handle_client_files(deployFiles); | ||
deployFiles = handle_client_files(deployFilesAndDeployCount.deployFiles); | ||
undeployFiles = undeployFilesAndUndeployCount.undeployFiles; | ||
// undeployFiles = handle_client_files(undeployFiles); | ||
logger.log(`${options_deploy_count} files are scheduled for deploy based on explicit specification`); | ||
logger.log(`${options_undeploy_count} files are scheduled for undeploy based on explicit specification`); | ||
logger.log(`${deployFilesAndDeployCount.optionsDeployCount} files are scheduled for deploy based on explicit specification`); | ||
logger.log(`${undeployFilesAndUndeployCount.optionsUndeployCount} files are scheduled for undeploy based on explicit specification`); | ||
@@ -227,2 +341,2 @@ return { | ||
module.exports = {prepareMake, getDefaultPermissionSet}; | ||
module.exports = {prepareSync, prepareOptimiseMake, prepareMake, getDefaultPermissionSet}; |
@@ -25,4 +25,4 @@ 'use strict'; | ||
...deployTask.lock(), | ||
...deployTask.synchronize(), | ||
...deployTask.make(), | ||
...(options.optimiseFileUpload ? deployTask.optimiseSynchronize() : deployTask.synchronize()), | ||
...(options.optimiseFileUpload ? deployTask.optimiseMake() : deployTask.make()), | ||
...deployTask.deploy(), | ||
@@ -32,2 +32,3 @@ ...deployTask.unlock() | ||
async.series(tasks, function (err, results) { | ||
@@ -34,0 +35,0 @@ if (err) { |
@@ -38,2 +38,3 @@ 'use strict'; | ||
live_messages: true, | ||
optimiseFileUpload: false, | ||
writeTimeout: 15* 60 * 1000, | ||
@@ -40,0 +41,0 @@ deleteTimeout: 15 * 60 * 1000, |
361
lib/tasks.js
@@ -0,1 +1,2 @@ | ||
/* eslint-disable no-unused-vars */ | ||
'use strict'; | ||
@@ -5,7 +6,8 @@ | ||
const hana_helper = require('./hana-helper.js'); | ||
const {prepareMake, getDefaultPermissionSet} = require('./hdi_utils.js'); | ||
const {callbackTimeout} = require('./utils'); | ||
const {prepareSync, prepareOptimiseMake, prepareMake, getDefaultPermissionSet} = require('./hdi_utils.js'); | ||
const {checksum, callbackTimeout} = require('./utils'); | ||
const Make_Task = require('./make'); | ||
const connections = require('./connections'); | ||
const fs = require('fs'); | ||
const path = require('path'); | ||
function hdi_callback (logger, callback, calltype='HDI') { | ||
@@ -27,2 +29,83 @@ return function (error, result) { | ||
function getModifiedAndUnmodifiedFiles (modifiedAndUnmodifiedFiles, alreadyDeployedContent, deployContent, callback) { | ||
let content; | ||
const modifiedFiles =[]; | ||
const unmodifiedFiles =[]; | ||
modifiedAndUnmodifiedFiles.forEach((modifiedAndUnmodifiedFile) => { | ||
let emptyStream = true; | ||
const file = alreadyDeployedContent.find(file => file.path === modifiedAndUnmodifiedFile); | ||
const deployFileContent = deployContent.find(file => file[0] === modifiedAndUnmodifiedFile); | ||
if (file) { | ||
if (Buffer.isBuffer(deployFileContent[1])) { | ||
content = deployFileContent[1]; | ||
emptyStream = false; | ||
} else if (deployFileContent[1].readableBuffer.head !== null && deployFileContent[1].readableBuffer.head.data !== null) { | ||
content = deployFileContent[1].readableBuffer.head.data; | ||
emptyStream = false; | ||
} | ||
let sum; | ||
try { | ||
if (!emptyStream) { | ||
sum = checksum(content); | ||
} else { | ||
sum = checksum(fs.readFileSync(deployFileContent[1].path, 'utf8')); //with node 16 the deployconent is not carrying data in readstream for few files. | ||
//we are getting head = null in readstream so for that reason using the local file to calculate sha256 instead of deployfilecontent. | ||
} | ||
} catch (err) { | ||
return callback(err); | ||
} | ||
if (sum === file.sha256) { | ||
unmodifiedFiles.push(modifiedAndUnmodifiedFile); | ||
} else { | ||
modifiedFiles.push(modifiedAndUnmodifiedFile); | ||
} | ||
} | ||
}); | ||
return callback(null, modifiedFiles, unmodifiedFiles); | ||
} | ||
function getParameters (options, logger) { | ||
// copy parameters from options | ||
const deployParameters = Object.entries(options.parameters).map(([k, v]) => new Parameter(k.toUpperCase(), v)); | ||
/** | ||
* Parse the given raw key value parameter. | ||
* | ||
* @param {any} raw_paramter_key Input key | ||
* @param {any} raw_parameter_value Input value | ||
* @returns {PathParameter} Parsed path parameter | ||
*/ | ||
const turn_into_path_paramter = (parameter) => { | ||
const [raw_key, raw_value] = parameter; | ||
const [path, key] = raw_key.split(':'); | ||
if (!path) { | ||
logger.warn(`Skipping parameter ${raw_key}. Could not extract the path from the given path parameter.`); | ||
return ''; | ||
} else if (!key) { | ||
logger.warn(`Skipping parameter ${raw_key}. Could not extract the key from the given path parameter.`); | ||
return ''; | ||
} | ||
const value = raw_value; | ||
return new PathParameter(path, key, value); | ||
}; | ||
// copy path-parameters from options | ||
const pathParameters = Object.entries(options.path_parameters).map(turn_into_path_paramter).filter(p => p !== ''); | ||
// add explicit parameters | ||
if (options.treatWarningsAsErrors) { | ||
deployParameters.push(new Parameter('TREAT_WARNINGS_AS_ERRORS', 'TRUE')); | ||
} | ||
if (options.simulateMake) { | ||
deployParameters.push(new Parameter('SIMULATE_MAKE', 'TRUE')); | ||
} | ||
if (options.migrationTableDevMode) { | ||
deployParameters.push(new Parameter('com.sap.hana.di.table.migration/development_mode'.toUpperCase(), 'TRUE')); | ||
} | ||
if (options.validateExternalDependencies) { | ||
deployParameters.push(new Parameter('VALIDATE_EXTERNAL_DEPENDENCIES', 'TRUE')); | ||
} | ||
return {deployParameters, pathParameters}; | ||
} | ||
const messages_hdi = require('./messages.hdi.js'); | ||
@@ -161,4 +244,8 @@ const { | ||
this.serverTopDirs = content.serverTopDirs || []; | ||
this._deployFiles = null; | ||
this._undeployFiles = null; | ||
this._deployFiles = []; | ||
this._undeployFiles = []; | ||
this._deletedFiles = []; | ||
this._addedFiles = []; | ||
this._modifiedFiles = []; | ||
this._unmodifiedFiles = []; | ||
this.hdiCreds = hdiCreds; | ||
@@ -194,2 +281,102 @@ connections.push({client: this.container.connection, file: __filename }); | ||
/** | ||
* setter for delete files. | ||
* @param {Array} value List of files to deploy. | ||
* | ||
* @memberOf DeployTask | ||
*/ | ||
set deleteFiles (value) { | ||
this._deleteFiles = value; | ||
} | ||
/** | ||
* Return the deployFiles, but only if present. Otherwise throw an error. | ||
* | ||
* @readonly | ||
* | ||
* @memberOf DeployTask | ||
*/ | ||
get deleteFiles () { | ||
if (!this._deleteFiles) { | ||
throw new Error('Delete files have to be initialized by running the sync task.'); | ||
} else { | ||
return this._deleteFiles; | ||
} | ||
} | ||
/** | ||
* setter for delete files. | ||
* @param {Array} value List of files to deploy. | ||
* | ||
* @memberOf DeployTask | ||
*/ | ||
set addedFiles (value) { | ||
this._addedFiles = value; | ||
} | ||
/** | ||
* Return the deployFiles, but only if present. Otherwise throw an error. | ||
* | ||
* @readonly | ||
* | ||
* @memberOf DeployTask | ||
*/ | ||
get addedFiles () { | ||
if (!this._addedFiles) { | ||
throw new Error('Added files have to be initialized by running the sync task.'); | ||
} else { | ||
return this._addedFiles; | ||
} | ||
} | ||
/** | ||
* setter for delete files. | ||
* @param {Array} value List of files to deploy. | ||
* | ||
* @memberOf DeployTask | ||
*/ | ||
set modifiedFiles (value) { | ||
this._modifiedFiles = value; | ||
} | ||
/** | ||
* Return the modifiedFiles, but only if present. Otherwise throw an error. | ||
* | ||
* @readonly | ||
* | ||
* @memberOf DeployTask | ||
*/ | ||
get modifiedFiles () { | ||
if (!this._modifiedFiles) { | ||
throw new Error('Modified files have to be initialized by running the sync task.'); | ||
} else { | ||
return this._modifiedFiles; | ||
} | ||
} | ||
/** | ||
* setter for delete files. | ||
* @param {Array} value List of files to deploy. | ||
* | ||
* @memberOf DeployTask | ||
*/ | ||
set unmodifiedFiles (value) { | ||
this._unmodifiedFiles = value; | ||
} | ||
/** | ||
* Return the deployFiles, but only if present. Otherwise throw an error. | ||
* | ||
* @readonly | ||
* | ||
* @memberOf DeployTask | ||
*/ | ||
get unmodifiedFiles () { | ||
if (!this._unmodifiedFiles) { | ||
throw new Error('Delete files have to be initialized by running the sync task.'); | ||
} else { | ||
return this._unmodifiedFiles; | ||
} | ||
} | ||
/** | ||
* setter for undeploy files. | ||
@@ -365,2 +552,125 @@ * @param {Array} value List of files to undeploy. | ||
*/ | ||
optimiseSynchronize () { | ||
const tasks = []; | ||
tasks.push(this.logger.logfnTimerInit('synchronizing-files', 'Synchronizing files with the container "%s"...', this.schema)); | ||
const folders = this.content.dirs.map(folder => { | ||
if (folder.endsWith('/')) | ||
return folder; | ||
else | ||
return `${folder}/`; | ||
}); | ||
let updatedLocalFileSystem = [].concat.apply([], folders); | ||
const updatedDeployContent = []; | ||
const isFile = function (path) { | ||
return !path.endsWith('/'); | ||
}; | ||
let deletedFiles; | ||
tasks.push(function (callback) { | ||
async.waterfall([ | ||
(innerCB) => this.container.listDeployed([new Folder('/')], [new Parameter('RECURSIVE', 'TRUE')], hdi_callback(this.logger, innerCB, 'listDeployed')), | ||
(result, innerCB) => { | ||
const {addedLocalFiles, deletedLocalFiles, unmodifiedModifiedLocalFiles} = prepareSync(result.results, this.content); | ||
this._addedFiles = addedLocalFiles; | ||
updatedLocalFileSystem = updatedLocalFileSystem.concat(addedLocalFiles); | ||
this._deleteFiles = deletedLocalFiles; | ||
deletedFiles = deletedLocalFiles.map((a) => { | ||
if (isFile(a)) { | ||
return new File(a); | ||
} else { | ||
return new Folder(a); | ||
} | ||
}); | ||
getModifiedAndUnmodifiedFiles(unmodifiedModifiedLocalFiles, result.results, this.content.deployContent, innerCB); | ||
}, | ||
(modified, unmodified, innerCB) => { | ||
this._modifiedFiles = modified; | ||
this._unmodifiedFiles = unmodified; | ||
if (this.options.treatUnmodifiedAsModified) { | ||
updatedLocalFileSystem = updatedLocalFileSystem.concat(this._unmodifiedFiles); | ||
} | ||
updatedLocalFileSystem = updatedLocalFileSystem.concat(Object.keys(this.options.deploy.files)); | ||
updatedLocalFileSystem = updatedLocalFileSystem.concat(Object.keys(this.options.includeFilter.files)); | ||
updatedLocalFileSystem = updatedLocalFileSystem.concat(this._modifiedFiles); | ||
updatedLocalFileSystem = [...new Set(updatedLocalFileSystem)]; // remove duplicates | ||
updatedLocalFileSystem.forEach((file) => { | ||
this.content.deployContent.every((content) => { | ||
if (content.includes(file)) { | ||
if (isFile(content[0])) { | ||
updatedDeployContent.push(new FileWithContent(content[0], content[1])); | ||
} else { | ||
updatedDeployContent.push(new FolderWithContent(content[0], content[1])); | ||
} | ||
return false; | ||
} | ||
return true; | ||
}); | ||
}); | ||
innerCB(null, updatedDeployContent); | ||
} | ||
], callback); | ||
}.bind(this)); | ||
tasks.push(this.logger.logfnTimerInit('deleting-files', ' Deleting files...')); | ||
const parameters = [new Parameter('IGNORE_NON_EXISTING_PATHS', 'TRUE')]; | ||
tasks.push((callback) => callbackTimeout( | ||
(cb) => this.container.delete(deletedFiles, parameters, cb), | ||
hdi_callback(this.logger, callback, 'DELETE'), | ||
this.options.deleteTimeout, | ||
'DELETE-timeout') | ||
); | ||
tasks.push(this.logger.logfnTimerInit('deleting-files', ' Deleting files... ok')); | ||
tasks.push(this.logger.logfnTimerInit('writing-files', ' Writing files...')); | ||
tasks.push((callback) => callbackTimeout((cb) => | ||
this.container.write(updatedDeployContent, null, cb), | ||
hdi_callback(this.logger, callback, 'WRITE'), | ||
this.options.writeTimeout, | ||
'WRITE-timeout') | ||
); | ||
tasks.push(this.logger.logfnTimerInit('writing-files', ' Writing files... ok')); | ||
tasks.push(this.logger.logfnTimerDelta('synchronizing-files', 'Synchronizing files with the container "%s"... ok', this.schema)); | ||
return tasks; | ||
} | ||
/** | ||
* Prepare files for (un)deployment and stage them. | ||
* | ||
* @returns {Function[]} Returns an array of functions for chaining via async. | ||
* @memberOf DeployTask | ||
*/ | ||
optimiseMake () { | ||
const tasks = []; | ||
const {deployParameters, pathParameters} = getParameters(this.options, this.logger); | ||
tasks.push(function (callback) { | ||
async.waterfall([ | ||
(innerCB) => { | ||
const {deployFiles, undeployFiles} = prepareOptimiseMake(this._addedFiles, this._deleteFiles, this._modifiedFiles, this._unmodifiedFiles, this.options, this.content); | ||
this._deployFiles = deployFiles.map((path) => new File(path)); | ||
this._undeployFiles = undeployFiles.map((path) => new File(path)); | ||
this.logger.logTimerInit('deploying-files', 'Deploying to the container "%s"...', this.schema); | ||
const make_task = new Make_Task(this.connection_id, this.container, this.hdiCreds, this.schema); | ||
make_task.make(this.deployFiles, this.undeployFiles, pathParameters, deployParameters, innerCB); | ||
}, | ||
({results, last_row_id}, innerCB) => { | ||
logfn(results.messages.filter(message => message.ROW_ID > last_row_id), this.logger); | ||
if (results.rc < 0) { | ||
return innerCB(new Error('HDI make failed'), results); | ||
} else { | ||
return innerCB(null, results); | ||
} | ||
} | ||
], callback); | ||
}.bind(this)); | ||
return tasks; | ||
} | ||
/** | ||
* Synchronize files with the container. | ||
* Deletes folders from the container file system. Basically a preparation to then add files by running make + deploy. | ||
* | ||
* @returns {Function[]} Returns an array of functions for chaining via async. | ||
* @memberOf DeployTask | ||
*/ | ||
synchronize () { | ||
@@ -425,42 +735,4 @@ const tasks = []; | ||
// copy parameters from options | ||
const deployParameters = Object.entries(this.options.parameters).map(([k, v]) => new Parameter(k.toUpperCase(), v)); | ||
const {deployParameters, pathParameters} = getParameters(this.options, this.logger); | ||
/** | ||
* Parse the given raw key value parameter. | ||
* | ||
* @param {any} raw_paramter_key Input key | ||
* @param {any} raw_parameter_value Input value | ||
* @returns {PathParameter} Parsed path parameter | ||
*/ | ||
const turn_into_path_paramter = (parameter) => { | ||
const [raw_key, raw_value] = parameter; | ||
const [path, key] = raw_key.split(':'); | ||
if (!path) { | ||
this.logger.warn(`Skipping parameter ${raw_key}. Could not extract the path from the given path parameter.`); | ||
return ''; | ||
} else if (!key) { | ||
this.logger.warn(`Skipping parameter ${raw_key}. Could not extract the key from the given path parameter.`); | ||
return ''; | ||
} | ||
const value = raw_value; | ||
return new PathParameter(path, key, value); | ||
}; | ||
// copy path-parameters from options | ||
const pathParameters = Object.entries(this.options.path_parameters).map(turn_into_path_paramter).filter(p => p !== ''); | ||
// add explicit parameters | ||
if (this.options.treatWarningsAsErrors) { | ||
deployParameters.push(new Parameter('TREAT_WARNINGS_AS_ERRORS', 'TRUE')); | ||
} | ||
if (this.options.simulateMake) { | ||
deployParameters.push(new Parameter('SIMULATE_MAKE', 'TRUE')); | ||
} | ||
if (this.options.migrationTableDevMode) { | ||
deployParameters.push(new Parameter('com.sap.hana.di.table.migration/development_mode'.toUpperCase(), 'TRUE')); | ||
} | ||
if (this.options.validateExternalDependencies) { | ||
deployParameters.push(new Parameter('VALIDATE_EXTERNAL_DEPENDENCIES', 'TRUE')); | ||
} | ||
tasks.push(function (callback) { | ||
@@ -490,3 +762,2 @@ async.waterfall([ | ||
} | ||
/** | ||
@@ -493,0 +764,0 @@ * (un)deploy the staged files. |
@@ -347,3 +347,21 @@ 'use strict'; | ||
const crypto = require('crypto'); | ||
/** | ||
* Calculate the checksum of a file | ||
* | ||
* @param {ReadStream} file ReadStream to a file. | ||
* @returns {String} Checksum | ||
*/ | ||
function checksum (fileContent) { | ||
let hash; | ||
if (fileContent) { | ||
hash = crypto.createHash('sha256'); | ||
hash.update(fileContent); | ||
} | ||
return hash.digest('hex'); | ||
} | ||
exports.checksum = checksum; | ||
/** | ||
* Run the given $fn, expecting to call the $callback at the end. If execution is not done in $timeout milliseconds, | ||
@@ -350,0 +368,0 @@ * call the callback with an error - Timeout of $timeout reached. |
{ | ||
"name": "@sap/hdi-deploy", | ||
"version": "4.4.1", | ||
"version": "4.5.0", | ||
"lockfileVersion": 1, | ||
@@ -8,3 +8,3 @@ "requires": true, | ||
"@sap/hana-client": { | ||
"version": "2.13.13", | ||
"version": "2.13.22", | ||
"requires": { | ||
@@ -26,3 +26,3 @@ "debug": "3.1.0" | ||
"@sap/hdi": { | ||
"version": "4.3.1", | ||
"version": "4.4.0", | ||
"requires": { | ||
@@ -149,3 +149,3 @@ "async": "3.2.3" | ||
"uglify-js": { | ||
"version": "3.16.1", | ||
"version": "3.17.1", | ||
"optional": true | ||
@@ -152,0 +152,0 @@ }, |
{ | ||
"name": "@sap/hdi-deploy", | ||
"description": "HDI content deployment", | ||
"version": "4.4.1", | ||
"version": "4.5.0", | ||
"license": "See LICENSE file", | ||
@@ -9,4 +9,4 @@ "repository": {}, | ||
"dependencies": { | ||
"@sap/hana-client": "2.13.13", | ||
"@sap/hdi": "4.3.1", | ||
"@sap/hana-client": "2.13.22", | ||
"@sap/hdi": "4.4.0", | ||
"@sap/xsenv": "3.3.2", | ||
@@ -20,3 +20,3 @@ "async": "3.2.3", | ||
"engines": { | ||
"node": "^12.0.0 || ^14.0.0 || ^16.0.0" | ||
"node": "^12.0.0 || ^14.0.0 || ^16.0.0 || ^18.0.0" | ||
}, | ||
@@ -23,0 +23,0 @@ "files": [ |
@@ -104,3 +104,3 @@ @sap/hdi-deploy | ||
"dependencies": { | ||
"@sap/hdi-deploy": "4.4.1" | ||
"@sap/hdi-deploy": "4.5.0" | ||
}, | ||
@@ -547,3 +547,3 @@ "scripts": { | ||
"dependencies": { | ||
"@sap/hdi-deploy": "4.4.1", | ||
"@sap/hdi-deploy": "4.5.0", | ||
"module1": "1.3.1", | ||
@@ -1044,2 +1044,3 @@ "module2": "1.7.0" | ||
- `--exclude-filter [<path> ..]`: exclude the given paths during: file walk, delta detection and when explicitly scheduled via --(un)deploy | ||
- `--[no]-optimise-file-upload` : [don't] perform delta detection via local SHA256 calculation instead of DELETE and WRITE calls. Will not have any positive effect when used along with --treat-unmodified-as-modified. | ||
- `--[no-]treat-wrong-ownership-as-errors`: [don't] treat wrong ownership of objects as errors, not enabled by default | ||
@@ -1081,3 +1082,3 @@ - `--[no-]migrationtable-development-mode`: [don't] pass the development mode flag for migration tables to HDI, if the parameter is supported by the server, not enabled by default | ||
"name": "@sap/hdi-deploy", | ||
"version": "4.4.1", | ||
"version": "4.5.0", | ||
"features": { | ||
@@ -1084,0 +1085,0 @@ "info": 2, |
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
497381
10573
1156
48
+ Added@sap/hana-client@2.13.22(transitive)
+ Added@sap/hdi@4.4.0(transitive)
- Removed@sap/hana-client@2.13.13(transitive)
- Removed@sap/hdi@4.3.1(transitive)
Updated@sap/hana-client@2.13.22
Updated@sap/hdi@4.4.0