Socket
Socket
Sign inDemoInstall

@sap/cds-dk

Package Overview
Dependencies
Maintainers
1
Versions
146
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@sap/cds-dk - npm Package Compare versions

Comparing version 7.2.0 to 7.3.0

lib/build/buildPlugin.js

4

bin/activate.js
module.exports = Object.assign(activate, {
options: ['--subdomain', '--passcode', '--username', '--clientid', '--to', '--sync'],
flags: ['--undeploy', '--wsdl'],
shortcuts: ['-s', '-p', '-u', '-c'],
shortcuts: ['-s', '-p', '-u', '-c', '-2'],
help: `

@@ -58,3 +58,3 @@ # SYNOPSIS

(@sap/cds-mtxs).
https://cap.cloud.sap/docs/guides/multitenancy/mtxs for information on

@@ -61,0 +61,0 @@ @sap/cds-mtxs.

module.exports = Object.assign(build, {
options: ['--project', '--src', '--dest', '--for', '--workspaces', '--use', '--opts', '--log-level', '--options-model', '--clean'],
shortcuts: ['-in', '-s', '-o', '-4', '-ws'],
options: ['--project', '--src', '--dest', '--for', '--use', '--opts', '--log-level', '--options-model'],
shortcuts: ['-in', '-s', '-o', '-4'],
flags: ['--ws', '--clean'],
help: `

@@ -91,2 +92,5 @@ # SYNOPSIS

const cds = require('../lib/cds')
// plugins are loaded based on cds.root path
cds.root = path.resolve(process.cwd(), project || options.project || '.')

@@ -96,3 +100,2 @@ // IMPORTANT: call plugins before subsequent access to cds.env

cds.root = path.resolve(process.cwd(), project || options.project || '.')
cds.env = cds.env.for('cds', cds.root);

@@ -99,0 +102,0 @@ delete options.project // avoid that relative project paths are resolved twice

@@ -117,3 +117,3 @@ const path = require("path");

*--odata-version* 4.0|4.01
*--odata-version* 4.0|4.01

@@ -130,3 +130,3 @@ Adds the OData version's functionality of the input CDS/CSN file to the generated OpenAPI document.

and _version_ should be provided as preset.
# EXAMPLES

@@ -141,3 +141,3 @@

function compile_all (root='.') {
async function compile_all (root='.') {

@@ -147,2 +147,4 @@ const {exec} = require ('child_process')

await cds.plugins // ensure plugins that extend compile targets are loaded
exec(`find ${root} -name *.cds ! -path '*/node_modules/*'`, (_,stdout)=>{

@@ -163,3 +165,3 @@ const all = stdout.split('\n').slice(0,-1)

function compile (models, options={}) {
async function compile (models, options={}) {

@@ -171,2 +173,4 @@ if (options.all) return compile_all (models[0])

await cds.plugins // ensure plugins that extend compile targets are loaded
let model, src, _suffix; //> be filled in below

@@ -287,3 +291,3 @@ if (!options.as && !/,/.test(options.to)) options.as = 'str'

function suffix4 (x) { return x && ({
function suffix4 (x) { return x && x !== 'hdbtable' && ({
edmx: '.xml',

@@ -290,0 +294,0 @@ "edmx-v2": '.xml',

@@ -65,5 +65,2 @@ module.exports = Object.assign (env, {

// Ensure loading plugins before calling cds.env!
await cds.plugins
let env = cds.env

@@ -70,0 +67,0 @@ if (options["resolve-bindings"]) {

module.exports = Object.assign(extend, {
options: ['--directory', '--subdomain', '--passcode', '--username', '--clientid'],
flags: ['--wsdl', '--templates', '--force'],
options: ['--directory', '--subdomain', '--passcode', '--username', '--clientid', '--tagRule'],
flags: ['--wsdl', '--templates', '--force', '--download-migrated-projects'],
shortcuts: ['-d', '-s', '-p', '-u', '-c'],

@@ -53,2 +53,10 @@ help: `

*--download-migrated-projects*
To be used only after change to @sap/cds-mtxs. Allows to download migrated extension projects.
*--tagRule*
Rule used to split up migrated extension projects into separate projects (please check migration documentation for more).
# SEE ALSO

@@ -60,3 +68,3 @@

(@sap/cds-mtxs).
https://cap.cloud.sap/docs/guides/multitenancy/mtxs for information on

@@ -69,2 +77,16 @@ @sap/cds-mtxs.

async function extend ([url], options = {}) {
if (options['download-migrated-projects']) {
const [username, password] = options.username?.split(':') ?? [];
delete options.user;
await require('../lib/client/download_legacy').run({
...options,
url,
username,
password
});
return
}
if (options.force) {

@@ -71,0 +93,0 @@ if (! await require('../lib/client/helper/question').askBooleanQuestion('Using option --force will overwrite existing files. Continue (yN)? ', false, false)) {

module.exports = Object.assign(_import, {
_name: 'import',
options: ['--out', '--as', '--include-namespaces', '--from'],
options: ['--out', '--as', '--include-namespaces', '--from', '--config'],
flags: ['--dry', '--force', '--no-copy', '--no-save', '--keep-namespace'],
shortcuts: ['-o', '-as', '-ns', undefined, '-', '-f'],
shortcuts: ['-o', '-as', '-ns', undefined, '-c', '-', '-f'],
help: `

@@ -46,2 +46,5 @@ # SYNOPSIS

"asyncapi".
*-c | --config* <JSON Object>
Adds the provided JSON Object as custom configuration data to the package.json.

@@ -72,2 +75,3 @@ *Below options are valid for OData only*

cds import ~/Downloads/BookStore_AsyncAPI.json --from asyncapi
cds import ~/Downloads/API_BUSINESS_PARTNER.edmx -c {\\"[production]\\":{\\"credentials\\":{\\"destination\\":\\"UI3_noauth\\",\\"path\\":\\"/sap/cap/odata\\"}}}

@@ -100,3 +104,3 @@ `})

// reads the file version
options.inputFileVersion = apiOptions.odataVersion;
options.inputFileKind = apiOptions.odataVersion ? apiOptions.odataVersion : apiOptions.inputFileKind;

@@ -103,0 +107,0 @@ // postprocess

module.exports = Object.assign(push, {
options: ['--subdomain', '--passcode', '--user', '--client', '--to'],
options: ['--subdomain', '--passcode', '--user', '--client', '--to', '--sync'],
flags: [],
shortcuts: ['-s', '-p', '-u', '-c'],
shortcuts: ['-s', '-p', '-u', '-c', '-2'],
help: `

@@ -16,5 +16,5 @@ # SYNOPSIS

'https://user:pass@host'.
By default, the current working directory is used as project directory.
Unless a custom archive is given, the extension project will be

@@ -52,9 +52,13 @@ automatically built in order to update the generated extension archive. This

*--sync*
Use the synchronous server API to upload extensions.
# SEE ALSO
*cds pull* to get the CDS model of the SaaS app.
*cds login* for more information on authentication options.
*cds login* to save project settings and authentication data, simplifying

@@ -61,0 +65,0 @@ multiple runs of this command.

@@ -63,3 +63,3 @@ const watchOnlyOptions = ['--ext', '--livereload', '--open']

const fioriIncludes = /_?(change|variant)$/ // according to fiori: change,variant,ctrl_variant,ctrl_variant_change,ctrl_variant_management_change.
const ignore = RegExp(`app${sep}.*${sep}?webapp${sep}|node_modules|target|\\.cds-services\\.json$`)
const ignore = RegExp(`(node_modules\\${sep}|app(\\${sep}.+)?\\${sep}((webapp|dist|target)\\${sep}|\\.cds-services\\.json$|tsconfig\\.json$|.*\\.tsbuildinfo$))`)

@@ -66,0 +66,0 @@ async function watch ([cwd], {

@@ -8,3 +8,3 @@ const fs = require('fs')

const BuildTaskFactory = require('./buildTaskFactory')
const BuildTaskHandlerInternal = require('./provider/buildTaskHandlerInternal')
const InternalBuildPlugin = require('./provider/internalBuildPlugin')

@@ -44,8 +44,8 @@ const COMPILATION_ERROR = 'CompilationError'

// create build task handlers
const handlers = []
// create build plugins
const plugins = []
tasks.forEach((task) => {
if (task) {
const handler = this._createHandler(task)
handlers.push(handler)
const plugin = this._createPlugin(task)
plugins.push(plugin)
}

@@ -55,11 +55,11 @@ })

try {
await this._executePrepare(handlers)
await this._executeCleanBuildTasks(handlers)
await this._executePrepare(plugins)
await this._executeCleanBuildTasks(plugins)
// throwing Exception in case of compilation errors
const buildResult = await this._executeBuildTasks(handlers)
const buildResult = await this._executeBuildTasks(plugins)
await this._writeGenerationLog(handlers)
this._logBuildOutput(handlers)
this._logMessages(BuildTaskEngine._getHandlerMessages(handlers))
await this._writeGenerationLog(plugins)
this._logOutput(plugins)
this._logMessages(BuildTaskEngine._getMessages(plugins))
this._logTimer(startTime, Date.now())

@@ -69,3 +69,3 @@

} catch (error) {
this._logBuildOutput(handlers)
this._logOutput(plugins)
throw error

@@ -76,19 +76,19 @@ }

/**
* BuildTaskHandler#prepare has been deprecated and was never part of the public API.
* Currently only used by internal FioriBuildTaskHandler.
* BuildPlugin#prepare has been deprecated and was never part of the public API.
* Currently only used by internal FioriBuildPlugin.
* @deprecated
* @param {*} handlers
* @param {*} plugins
* @returns
*/
async _executePrepare(handlers) {
const handlerGroups = new Map()
async _executePrepare(plugins) {
const pluginGroups = new Map()
// group handlers by type
handlers.forEach(handler => {
handlerGroups.has(handler.task.for) ? handlerGroups.get(handler.task.for).push(handler) : handlerGroups.set(handler.task.for, [handler])
// group plugins by type
plugins.forEach(plugin => {
pluginGroups.has(plugin.task.for) ? pluginGroups.get(plugin.task.for).push(plugin) : pluginGroups.set(plugin.task.for, [plugin])
})
const promises = []
for (let handlerGroup of handlerGroups.values()) {
promises.push(this._doPrepare(handlerGroup))
for (let pluginGroup of pluginGroups.values()) {
promises.push(this._doPrepare(pluginGroup))
}

@@ -100,10 +100,10 @@ return Promise.all(promises)

* @deprecated
* @param {*} handlerGroup
* @param {*} pluginGroup
*/
async _doPrepare(handlerGroup) {
for (let handler of handlerGroup) {
async _doPrepare(pluginGroup) {
for (let plugin of pluginGroup) {
// prepare has been deprecated
if (handler instanceof BuildTaskHandlerInternal) {
this.logger._debug && this.logger.debug(`preparing, handler [${handler.constructor.name}], src [${relativePaths(cds.root, handler.task.src)}]`)
const result = await handler.prepare()
if (plugin instanceof InternalBuildPlugin) {
this.logger._debug && this.logger.debug(`preparing, plugin [${plugin.constructor.name}], src [${relativePaths(cds.root, plugin.task.src)}]`)
const result = await plugin.prepare()
if (result === false) {

@@ -116,3 +116,3 @@ break

async _executeCleanBuildTasks(handlers) {
async _executeCleanBuildTasks(plugins) {
if (this.options.clean) {

@@ -126,5 +126,5 @@ // clean entire build staging folder once

const results = await Promise.allSettled(handlers.map((handler) => {
this.logger._debug && this.logger.debug(`cleaning, handler [${handler.constructor.name}], src [${relativePaths(cds.root, handler.task.src)}]`)
return handler.clean()
const results = await Promise.allSettled(plugins.map((plugin) => {
this.logger._debug && this.logger.debug(`cleaning, plugin [${plugin.constructor.name}], src [${relativePaths(cds.root, plugin.task.src)}]`)
return plugin.clean()
}))

@@ -136,17 +136,17 @@ // check for errors and throw exception

async _executeBuildTasks(handlers) {
// sort handlers based on priority in
handlers = handlers.sort((a, b) => {
async _executeBuildTasks(plugins) {
// sort plugins based on priority in
plugins = plugins.sort((a, b) => {
return a.priority === b.priority ? 0 : a.priority > b.priority ? -1 : 1
})
// group handlers with same priority in order to execute in parallel
const buildPipeline = handlers.reduce((acc, handler) => {
// group plugins with same priority in order to execute in parallel
const buildPipeline = plugins.reduce((acc, plugin) => {
if (acc.length === 0) {
acc.push([handler])
acc.push([plugin])
} else {
const currGroup = acc[acc.length - 1]
if (currGroup[0].priority === handler.priority) {
currGroup.push(handler)
if (currGroup[0].priority === plugin.priority) {
currGroup.push(plugin)
} else {
acc.push([handler])
acc.push([plugin])
}

@@ -160,3 +160,3 @@ }

// check for errors and throw exception - return results otherwise including any compiler and build status messages
return this._resolveHandlerResponse(results, BuildTaskEngine._getHandlerMessages(handlers))
return this._resolveHandlerResponse(results, BuildTaskEngine._getMessages(plugins))
}

@@ -167,10 +167,10 @@

for (const group of pipeline) {
const results = await Promise.allSettled(group.map((handler) => {
this.logger._debug && this.logger.debug(`building, handler [${handler.constructor.name}], src [${relativePaths(cds.root, handler.task.src)}]`)
return handler.build()
.then(handlerResult => {
const results = await Promise.allSettled(group.map((plugin) => {
this.logger._debug && this.logger.debug(`building, plugin [${plugin.constructor.name}], src [${relativePaths(cds.root, plugin.task.src)}]`)
return plugin.build()
.then(pluginResult => {
return Promise.resolve({
task: handler.task,
result: handlerResult,
messages: this._sortMessagesUnique(handler.messages)
task: plugin.task,
result: pluginResult,
messages: this._sortMessagesUnique(plugin.messages)
})

@@ -184,3 +184,3 @@ })

_resolveHandlerResponse(results, handlerMessages = []) {
_resolveHandlerResponse(results, pluginMessages = []) {
const errors = []

@@ -207,5 +207,5 @@ const resolvedResults = results.reduce((acc, r) => {

// merge all existing compilation messages into a single CompilationError
// compiler warning and info messages are returned as handler messages
// compiler warning and info messages are returned as plugin messages
const compileErrors = errors.filter(e => e.constructor.name === COMPILATION_ERROR)
const compileMessages = handlerMessages.filter(message => message.constructor.name === COMPILE_MESSAGE)
const compileMessages = pluginMessages.filter(message => message.constructor.name === COMPILE_MESSAGE)
if (compileErrors.length) {

@@ -218,18 +218,18 @@ throw new CompilationError(this._sortMessagesUnique(BuildTaskEngine._getErrorMessages(compileErrors), compileMessages))

_createHandler(task) {
const handler = this.taskFactory.createHandler(task)
handler.init()
_createPlugin(task) {
const plugin = this.taskFactory.createPlugin(task)
plugin.init()
if (!(handler instanceof BuildTaskHandlerInternal) && handler.priority !== 1) {
// Custom build handlers are executed before internal handlers to ensure
if (!(plugin instanceof InternalBuildPlugin) && plugin.priority !== 1) {
// Custom build plugins are executed before internal plugins to ensure
// that generated content cannot be overwritten by mistake
throw new Error(`Illegal priority for ${handler.constructor.name} encountered for custom handler - in this version only priority value '1' is allowed`)
throw new Error(`Illegal priority for ${plugin.constructor.name} encountered for custom plugin - in this version only priority value '1' is allowed`)
}
this._logTaskHandler(handler)
return handler
this._logTaskHandler(plugin)
return plugin
}
_logBuildOutput(handlers) {
_logOutput(plugins) {
// log all generated files
const files = BuildTaskEngine._getBuildOutput(handlers)
const files = BuildTaskEngine._getOutput(plugins)
if (files.length > 0) {

@@ -240,6 +240,6 @@ this.logger.log(`done > wrote output to:\n ${files.join("\n ")}\n`)

async _writeGenerationLog(handlers) {
async _writeGenerationLog(plugins) {
const outputFile = cds.env.build.outputfile || process.env.GENERATION_LOG
if (outputFile) {
const files = BuildTaskEngine._getBuildOutput(handlers)
const files = BuildTaskEngine._getOutput(plugins)
this.logger.log(`writing generation log to [${outputFile}]\n`)

@@ -256,4 +256,4 @@ try {

static _getBuildOutput(handlers) {
const files = handlers.reduce((acc, handler) => acc.concat(handler.files), []).sort()
static _getOutput(plugins) {
const files = plugins.reduce((acc, plugin) => acc.concat(plugin.files), []).sort()
return files.map(file => {

@@ -271,5 +271,5 @@ if (path.isAbsolute(cds.env.build.target)) {

_logTaskHandler(handler) {
this.logger._debug && this.logger.debug(`handler ${handler.constructor.name}`)
this.logger._debug && this.logger.debug(`details src [${relativePaths(cds.root, handler.task.src)}], dest [${relativePaths(cds.root, handler.task.dest)}], use [${handler.task.use}], options [${JSON.stringify(handler.task.options)}]`) //NOSONAR
_logTaskHandler(plugin) {
this.logger._debug && this.logger.debug(`plugin ${plugin.constructor.name}`)
this.logger._debug && this.logger.debug(`details src [${relativePaths(cds.root, plugin.task.src)}], dest [${relativePaths(cds.root, plugin.task.dest)}], use [${plugin.task.use}], options [${JSON.stringify(plugin.task.options)}]`) //NOSONAR
}

@@ -323,7 +323,7 @@

/**
* Returns compiler messages and validation messages issued by handlers.
* @param {Array<BuildTaskHandler>} handlers
* Returns compiler messages and validation messages issued by plugins.
* @param {Array<BuildPlugin>} plugins
*/
static _getHandlerMessages(handlers) {
return handlers.reduce((acc, handler) => acc.concat(handler.messages), [])
static _getMessages(plugins) {
return plugins.reduce((acc, plugin) => acc.concat(plugin.messages), [])
}

@@ -330,0 +330,0 @@

@@ -50,4 +50,4 @@ const fs = require('fs')

createHandler(task) {
return this.providerFactory.createHandler(task)
createPlugin(task) {
return this.providerFactory.createPlugin(task)
}

@@ -69,3 +69,3 @@

// ensure that dependencies get wired up before filtering
await this.providerFactory.lookupTasks(tasks)
await this.providerFactory.lookupTasks(tasks, true)
}

@@ -88,3 +88,3 @@

existingTasks = [...tasks]
await this.providerFactory.lookupTasks(tasks)
await this.providerFactory.lookupTasks(tasks, true)
if (tasks.length > existingTasks.length) {

@@ -99,5 +99,2 @@ const newTasks = tasks.filter(task => !existingTasks.includes(task))

// ensure correct values for optional build task properties, error for missing mandatory properties
BuildTaskFactory._validateBuildTasks(tasks)
this._setDefaultBuildTargetFolder(tasks)

@@ -122,10 +119,2 @@ return tasks

static _validateBuildTasks(tasks) {
tasks.forEach(task => {
if (!task.src) {
throw new Error(`Invalid build task definition - value of property 'src' is missing in [${task.for || task.use}].`)
}
})
}
_setDefaultBuildTargetFolder(tasks) {

@@ -169,9 +158,3 @@ const task = tasks.find(task => task.for === BUILD_TASK_JAVA_CF || task.for === BUILD_TASK_JAVA)

if (options.for || options.use) {
const task = {}
if ((options.for)) {
task.for = options.for
}
if ((options.use)) {
task.use = options.use
}
const task = this.providerFactory.getTask(options.for ? { for: options.for } : { use: options.use })
if (options.src) {

@@ -182,3 +165,3 @@ task.src = options.src

}
} else if (resultTasks.length < tasks.length) {
} else if (resultTasks.length <= tasks.length) {
// return the same array as long as it contains a subset of the given tasks

@@ -185,0 +168,0 @@ tasks.length = 0

/* eslint-disable no-unused-vars */
/**
* @abstract
*/
module.exports = class BuildTaskProvider {
constructor() {
//injected by framework
this._plugin = null
}
canHandleTask(task) {
// return this._plugin.provides.includes(task.for || task.use && this._getTaskId(task.use))
}
loadHandler(task) {
// return module.require(`${this._plugin.path}/${task.for || this._getTaskId(task.use)}`)
}
async lookupTasks(tasks) { }
async applyTaskDefaults(task) {
// task.for = task.for || this._getTaskId(task.use)
}
_getTaskId(use) {
if (this._plugin && this._plugin.id) {
return use.substring(this._plugin.id.length + 1)
}
}
/**
* @param {object} task
*/
providesTask(key) { }
/**
* @returns {object} task
*/
getTask(key) { return key }
/**
* @param {object} task
*/
loadPlugin(task) { }
/**
* @param {Array} tasks
* @param {boolean} dependencies
*/
async lookupTasks(tasks, dependencies) { }
/**
* @param {object} task
*/
async applyTaskDefaults(task) { }
}
const fs = require('fs')
const path = require('path')
const cds = require('../cds')
const { LOG_MODULE_NAMES, OUTPUT_MODE_FILESYSTEM } = require("./constants")
const BuildTaskProviderInternal = require('./provider/buildTaskProviderInternal')
const { LOG_MODULE_NAMES, OUTPUT_MODE_FILESYSTEM } = require('./constants')
const InternalBuildTaskProvider = require('./provider/internalBuildTaskProvider')
const BuildTaskProvider = require('./buildTaskProvider')
const { BuildError } = require('./util')
const BuildPlugin = require('./buildPlugin')
class BuildTaskProviderFactory {
constructor(options = {}) {
// REVISIT: Logger is never passed to BuildTaskProviderFactory except for tests
// -> tests shouldn't pollute library coding
// see cds-dk/test/bin/eval.test.js for a non-invasive solution (intercepting console.log)
options.logger = options.logger || cds.log(LOG_MODULE_NAMES)

@@ -34,18 +39,12 @@ options.outputMode = options.outputMode || OUTPUT_MODE_FILESYSTEM

getTask(key) {
return this._getProvider(key).getTask(key)
}
async applyTaskDefaults(tasks) {
return Promise.all(tasks.map(async task => {
return Promise.all(tasks.map(async (task) => {
if (!task.for && !task.use) {
throw new Error("Invalid build task definition - property 'for' and property 'use' missing")
throw new Error(`Invalid build task definition - property 'for' and property 'use' missing`)
}
const provider = this.providers.find(provider => {
try {
return provider.canHandleTask(task)
} catch (e) {
this.logger.error(`Build task provider ${provider.constructor.name} returned error:\n` + e)
throw e
}
})
if (!provider) {
throw new Error(`No provider found for build task '${task.for || task.use}'`)
}
const provider = this._getProvider(task)
if (provider instanceof DefaultBuildTaskProvider) {

@@ -58,7 +57,7 @@ this.logger._debug && this.logger.debug(`No provider found for build task '${task.use}', using default provider`)

async lookupTasks(tasks = []) {
async lookupTasks(tasks = [], dependencies) {
for (let i = 0; i < this.providers.length; i++) {
const provider = this.providers[i]
const existingTasks = [...tasks]
await this._lookupTasks(provider, tasks)
await this._lookupTasks(provider, tasks, dependencies)
if (existingTasks.length < tasks.length) {

@@ -75,36 +74,32 @@ // apply defaults

/**
* Create a BuildTaskHandler instance for the given build task.
* Create a BuildPlugin instance for the given build task.
* The implementation is loaded based on the build task's 'for' or 'use' option.
* @param {*} task
*/
createHandler(task) {
const BuildTaskHandlerClass = this.loadHandler(task)
const resolvedTask = this.resolveTask(task)
this.logger._debug && this.logger.debug(`loaded build task handler [${resolvedTask.use}]`)
createPlugin(task) {
const BuildPluginClass = this.loadPlugin(task)
const resolvedTask = this._resolveTask(task)
this.logger._debug && this.logger.debug(`loaded build plugin [${resolvedTask.use}]`)
const handler = new BuildTaskHandlerClass()
handler._task = resolvedTask
handler._logger = this.logger
handler._context = this.context
const plugin = new BuildPluginClass()
plugin._task = resolvedTask
plugin._logger = this.logger
plugin._context = this.context
this.context.tasks.push(resolvedTask)
this.logger._debug && this.logger.debug(`created BuildTaskHandler [${resolvedTask.use}]`)
return handler
this.logger._debug && this.logger.debug(`created build plugin [${resolvedTask.use}]`)
return plugin
}
/**
* Loads the build task handler implementation for the given build task.
* 'for' defines an alias for built-in handlers like 'hana', 'java', 'node', 'fiori' or 'mtx'.
* 'use' defines the fully qualified module name of external build task handler implemenations.
* Loads the build plugin implementation for the given build task.
* 'for' defines an alias for built-in plugins like 'hana', 'java', 'node', 'fiori' or 'mtx'.
* 'use' defines the fully qualified module name of custom build plugins implementations.
* @param {object} task
*/
loadHandler(task) {
let provider = this.providers.find(provider => provider.canHandleTask(task))
if (!provider) {
throw new Error(`No provider found for build task '${task.for || task.use}'`)
}
loadPlugin(task) {
const provider = this._getProvider(task)
try {
return provider.loadHandler(task)
}
catch (e) {
this.logger.error(`Provider failed to load handler class - provider: ${provider.constructor.name}, task: ${task.for || task.use} :\n` + e)
return provider.loadPlugin(task)
} catch (e) {
this.logger.error(`Provider failed to load build plugin class - provider: ${provider.constructor.name}, task: ${task.for || task.use} :\n` + e)
throw e

@@ -115,3 +110,3 @@ }

resolveTasks(tasks) {
return tasks.map(task => this.resolveTask(task))
return tasks.map(task => this._resolveTask(task))
}

@@ -124,5 +119,5 @@

*/
resolveTask(task) {
// first validate handler implementation
this.loadHandler(task)
_resolveTask(task) {
// first validate plugin implementation
this.loadPlugin(task)

@@ -147,6 +142,21 @@ // second validate src path

async _lookupTasks(provider, tasks) {
return provider.lookupTasks(tasks)
_getProvider(key) {
const provider = this.providers.find(provider => {
try {
return provider.providesTask(key)
} catch (e) {
this.logger.error(`Build task provider ${provider.constructor.name} returned an error:\n` + e)
throw e
}
})
if (!provider) {
throw new BuildError(`No provider found for build task '${key.for || key.use}'. Ensure that all required dependencies have been added and 'npm install' has been executed.`)
}
return provider
}
async _lookupTasks(provider, tasks, dependencies) {
return provider.lookupTasks(tasks, dependencies)
}
async _applyTaskDefaults(provider, tasks) {

@@ -159,4 +169,5 @@ return Promise.all(tasks.map(task => provider.applyTaskDefaults(task)))

return [
new BuildTaskProviderInternal(this.logger),
new DefaultBuildTaskProvider()
new InternalBuildTaskProvider(this.logger),
new PluginBuildTaskProvider(this.logger),
new DefaultBuildTaskProvider(this.logger)
]

@@ -171,13 +182,15 @@ }

class DefaultBuildTaskProvider extends BuildTaskProvider {
canHandleTask(task) {
return !!task.use
constructor(logger) {
super()
this._logger = logger
}
loadHandler(task) {
if (!task.use) {
throw new Error(`Invalid build task definition [${task.for}] - property 'use' missing`)
}
providesTask(key) {
return !!key.use
}
loadPlugin(task) {
try {
return require(require.resolve(task.use, { paths: [cds.root] }))
}
catch (e) {
} catch (e) {
throw new Error(`Build task could not be resolved - module [${task.use}] cannot be loaded:\n` + e)

@@ -187,2 +200,102 @@ }

}
/**
* Default provider implementation handling fully qualified custom build task declarations.
* Has to be the last entry in the providers list. // REVISIT: Above it says DefaultBuildTaskProvider should be last entry? -> clarify
*/
class PluginBuildTaskProvider extends BuildTaskProvider {
constructor(logger) {
super()
this._logger = logger
const plugins = new Map()
// REVISIT: Remove once fetch mechanism is removed
function _local (id) {
return require(require.resolve (id, {paths:[
process.cwd(), // project-local module is preferred
__dirname // otherwise from our own dependencies
]}))
}
const cdsPlugins = _local('@sap/cds/lib/plugins').fetch?.() // REVISIT: Get rid of fetch mechanism
const buildPlugins = cds.env.build.plugins // REVISIT: Compat to old plugin mechanism, remove if possible
for (const [id, plugin] of Object.entries({ ...cdsPlugins, ...buildPlugins })) {
try {
const clazz = this._loadPlugin(id, plugin)
if (!clazz) continue
plugins.set(id, { id: id, impl: plugin.impl, class: clazz })
} catch (e) {
this._logger.error(`Skipping build plugin '${id}', it cannot be loaded.\n` + e + '\n') // REVISIT: why not throw? seems like an error one should fix, not exit with code 0
}
}
this._plugins = plugins
}
get plugins() {
return this._plugins
}
providesTask(key) {
return this.plugins.has(key.for)
}
loadPlugin(task) {
return this._loadPlugin(task.for, this.plugins.get(task.for))
}
async lookupTasks(tasks, dependencies) {
if (!dependencies) {
this.plugins.forEach(plugin => {
if (plugin.class?.hasTask()) {
const task = this.getTask({ for: plugin.id })
if (!task) {
throw new Error(`Build task '${plugin.id}' required for custom plugin '${plugin.impl}'`)
}
task.for = plugin.id
tasks.push(task)
}
})
}
}
async applyTaskDefaults(task) {
const defaultTask = this.getTask(task)
const names = Object.getOwnPropertyNames(defaultTask)
names.forEach(name => {
task[name] ??= defaultTask[name]
})
}
getTask(key) {
const task = this.plugins.get(key.for).class.getTaskDefaults() ?? key
task.src ??= '.'
if (task.src) {
task.src = task.src.replace(/\/$/, '')
}
task.for = key.for
return task
}
_loadPlugin(id, plugin) {
let pluginClass
try {
pluginClass = require(require.resolve(plugin.impl, { paths: [cds.root] }))
const inheritsFrom = (child, parent) => {
let proto = Object.getPrototypeOf(child)
while (proto) {
if (proto === parent) return true
proto = Object.getPrototypeOf(proto)
}
return false
}
if (!inheritsFrom(pluginClass, BuildPlugin)) return // REVISIT: better way to check if plugin is build-enabled than traversing prototype chain?
} catch (e) {
throw new Error(`Custom build plugin implementation '${plugin.impl}' for '${id}' cannot be loaded:\n` + e)
}
if (!pluginClass.hasTask || !pluginClass.getTaskDefaults) {
throw new Error(`Custom build plugin '${id}' must implement 'hasTask' and 'getTaskDefaults' methods`)
}
return pluginClass
}
}
module.exports = BuildTaskProviderFactory

@@ -22,5 +22,6 @@ exports.OUTPUT_MODE = "outputMode"

exports.CONTENT_LANGUAGE_BUNDLES = "contentLanguageBundles" // create i18n.json language bundles - build task specific
exports.CONTENT_DEFAULT_CSN = "contentDefaultCsn" // create default CSN format flavor: "inferred" - build task specific
exports.CONTENT_EDMX = "contentEdmx" // create EDMX for required languages - build task specific
exports.CONTENT_LANGUAGE_BUNDLES = "contentLanguageBundles" // create i18n.json language bundles
exports.CONTENT_DEFAULT_CSN = "contentDefaultCsn" // create default CSN format flavor: "inferred"
exports.FLAVOR_LOCALIZED_EDMX = "flavorLocalizedEdmx" // create localized EDMX languages based on existing properties files
exports.CONTENT_EDMX = "contentEdmx" // create EDMX for required languages - ENABLED by default
exports.CONTENT_PACKAGE_JSON = "contentPackageJson" // create package.json file if not existing, or modify existing package.json - ENABLED by default

@@ -60,2 +61,1 @@ exports.CONTENT_PACKAGELOCK_JSON = "contentPackageLockJson" // copy package-lock.json file if existing into deployment folder - ENABLED by default

exports.LOG_MODULE_NAMES = "cds|build"
exports.OVERRIDE_METHOD_MSG = "Must override method"

@@ -5,6 +5,6 @@ const fs = require('fs')

const BuildTaskFactory = require('./buildTaskFactory')
const BuildTaskHandler = require('./buildTaskHandler')
const BuildPlugin = require('./buildPlugin')
const { BuildError } = require('./util')
module.exports = { build, BuildTaskFactory, BuildTaskEngine, BuildTaskHandler, BuildError }
module.exports = { build, BuildTaskFactory, BuildTaskEngine, BuildPlugin, BuildError }

@@ -25,2 +25,3 @@ /**

cds._log(e.messages.length ? e.messages : e.message, { 'log-level': options['log-level'] || cds.env['log-level'] })
console.log('')
// CompilationError.message also includes the detail messages - do not log twice

@@ -27,0 +28,0 @@ const message = e.constructor.name === 'CompilationError' ? 'CDS compilation failed' : e.messages.length ? e.message : 'CDS build failed'

const path = require('path')
const cds = require('../../../cds')
const BuildTaskHandlerEdmx = require('../buildTaskHandlerEdmx')
const EdmxBuildPlugin = require('../edmxBuildPlugin')
const URL = require('url')

@@ -14,3 +14,3 @@ const { getProperty, relativePaths } = require('../../util')

*/
class FioriAppModuleBuilder extends BuildTaskHandlerEdmx {
class FioriBuildPlugin extends EdmxBuildPlugin {
init() {

@@ -30,3 +30,3 @@ // enforce

// cache for later use across multiple FioriAppModuleBuilder instances
// cache for later use across multiple FioriBuildPlugin instances
const fioriBuildOptions = this.context.for[BUILD_TASK_FIORI]

@@ -62,3 +62,2 @@ fioriBuildOptions.appModel = new Map()

try {
const edmxOptions = { version: cds.env.odata?.version }
for (let [appFolder, appModelGroup] of appModelGroups.entries()) {

@@ -77,3 +76,3 @@ this.logger.debug(`building module [${appFolder}] using [${this.constructor.name}]`)

await this.compileToEdmx(model, null, edmxOptions)
await this.compileToEdmx(model)

@@ -172,2 +171,2 @@ // cache edmx per fiori app root folder

module.exports = FioriAppModuleBuilder
module.exports = FioriBuildPlugin

@@ -5,3 +5,3 @@ const fs = require('fs')

const to_hdbmigration = require('./2migration')
const BuildTaskHandlerInternal = require('../buildTaskHandlerInternal')
const InternalBuildPlugin = require('../internalBuildPlugin')
const { BuildError, relativePaths, BuildMessage } = require('../../util')

@@ -11,3 +11,3 @@

CONTENT_ENV, CONTENT_DEFAULT_ENV_JSON, CONTENT_NODE_MODULES, OUTPUT_MODE_RESULT, CONTINUE_UNRESOLVED_SCHEMA_CHANGES } = require('../../constants')
const { WARNING } = BuildTaskHandlerInternal
const { WARNING } = InternalBuildPlugin

@@ -27,3 +27,3 @@ const DEFAULT_COMPILE_DEST_FOLDER = path.normalize("src/gen")

class HanaModuleBuilder extends BuildTaskHandlerInternal {
class HanaBuildPlugin extends InternalBuildPlugin {
init() {

@@ -159,3 +159,3 @@ if (this.hasBuildOption(OUTPUT_MODE, OUTPUT_MODE_RESULT)) {

const allFiles = csvDirs.reduce((acc, csvDir) => {
return acc.concat(BuildTaskHandlerInternal._find(csvDir, (entry) => {
return acc.concat(InternalBuildPlugin._find(csvDir, (entry) => {
if (fs.statSync(entry).isDirectory()) {

@@ -188,3 +188,3 @@ return false

const allCsvFiles = BuildTaskHandlerInternal._find(destSrcDir, (entry) => {
const allCsvFiles = InternalBuildPlugin._find(destSrcDir, (entry) => {
if (fs.statSync(entry).isDirectory()) {

@@ -256,3 +256,4 @@ return true

if (this.hasCdsEnvOption('features.journal', false) || format === 'hdbcds') {
let no_migration = cds.env.features.journal === false || format === 'hdbcds' || Object.values(model.definitions).every(def => !def['@cds.persistence.journal'])
if (no_migration) {
return await this._compileToHdb(model, format)

@@ -273,4 +274,6 @@ } else {

for (const [content, key] of result) {
hdiPlugins.add(key.suffix || path.extname(key.file))
const suffix = key.suffix || path.extname(key.file)
const file = key.file ? key.file : key.name + key.suffix
hdiPlugins.add(suffix)
if (this.hasBuildOption(OUTPUT_MODE, OUTPUT_MODE_RESULT)) {

@@ -280,2 +283,12 @@ this._result.hana.push(path.join(relDest, file))

promises.push(this.write(content).to(path.join(this.task.options.compileDest, file)))
if (suffix === FILE_EXT_HDBTABLE) {
const name = key.name || path.parse(key.file).name
const dbSrcDir = path.join(this.task.src, "src")
// issue an error in case a .hdbmigrationtable file already exists
if (fs.existsSync(path.join(dbSrcDir, name + FILE_EXT_HDBMIGRATIONTABLE))) {
const relDbSrcDir = path.relative(cds.root, dbSrcDir)
const relDbDestDir = path.relative(cds.root, this.task.options.compileDest)
throw new BuildError(`Multiple files exist defining the same HANA artifact - [${path.join(relDbSrcDir, name + FILE_EXT_HDBMIGRATIONTABLE)}, ${path.join(relDbDestDir, file)}].\nEither annotate the model entity using @cds.persistence.journal or undeploy the file [${path.join('src', name + FILE_EXT_HDBMIGRATIONTABLE)}] using an undeploy.json file.`)
}
}
}

@@ -307,2 +320,3 @@ await Promise.all(promises)

const afterImage = compilationResult.afterImage
let validationError

@@ -330,3 +344,3 @@ for (const { name, suffix, content, changed } of definitions) {

if (fs.existsSync(path.join(dbSrcDir, name + FILE_EXT_HDBMIGRATIONTABLE))) {
throw new BuildError(`Multiple files exist defining the same HANA artifact - [${path.join(relDbSrcDir, name + FILE_EXT_HDBMIGRATIONTABLE)}, ${path.join(relDbDestDir, file)}].\nEither annotate the model entity using @cds.persistence.journal or undeploy the file [${path.join('src', name + FILE_EXT_HDBMIGRATIONTABLE)}] using an undeploy.json file.`)
validationError = new BuildError(`Multiple files exist defining the same HANA artifact - [${path.join(relDbSrcDir, name + FILE_EXT_HDBMIGRATIONTABLE)}, ${path.join(relDbDestDir, file)}].\nEither annotate the model entity using @cds.persistence.journal or undeploy the file [${path.join('src', name + FILE_EXT_HDBMIGRATIONTABLE)}] using an undeploy.json file.`)
}

@@ -336,19 +350,29 @@ }

}
// the last-dev CSN shall only be updated, if all .hdbmigrationtable files could be successfully updated
// of course, .hdbmigrationtable files already written would need to be manually reverted before the
// cds build command is again executed
await Promise.all(promises)
await this._validateMigrationTableFiles()
// update last development version
if (afterImage) {
if (migrationTableFiles.length > 0) {
if (!HanaModuleBuilder._toEqualIgnoreMeta(lastDev, afterImage)) {
await this.write(afterImage).to(lastDevCsnDir)
// ensure
try {
if (validationError) {
throw validationError
}
await this._validateMigrationTableFiles()
} finally {
// update last-dev CSN version
if (afterImage) {
if (migrationTableFiles.length > 0) {
if (!HanaBuildPlugin._toEqualIgnoreMeta(lastDev, afterImage)) {
await this.write(afterImage).to(lastDevCsnDir)
}
// add src/.hdiconfig if not existing
if (!fs.existsSync(path.join(dbSrcDir, FILE_NAME_HDICONFIG))) {
const template = await HanaBuildPlugin._readTemplateAsJson('.hdiconfig-cloud')
await this.write(template).to(path.join(dbSrcDir, FILE_NAME_HDICONFIG))
}
}
// add src/.hdiconfig if not existing
if (!fs.existsSync(path.join(dbSrcDir, FILE_NAME_HDICONFIG))) {
const template = await HanaModuleBuilder._readTemplateAsJson('.hdiconfig-cloud')
await this.write(template).to(path.join(dbSrcDir, FILE_NAME_HDICONFIG))
}
}
} else {
}
if (!afterImage) {
throw new BuildError(`Inconsistent CDS compilation results - file ${lastDevCsnFolder} missing`)

@@ -367,3 +391,3 @@ }

if (this.isStagingBuild() && !exists) {
const content = await HanaModuleBuilder._readTemplateAsJson(FILE_NAME_PACKAGE_JSON)
const content = await HanaBuildPlugin._readTemplateAsJson(FILE_NAME_PACKAGE_JSON)
await this.write(content).to(path.join(this.task.dest, FILE_NAME_PACKAGE_JSON))

@@ -387,7 +411,7 @@ }

// // in 'gen/db' if none exists
// // ensures correct deployment of all artifacts including HANA native artifacts
// // ensures correct deployment of all artifacts including HANA native artifacts
// // without the need of creating a static .hdiconfig file in the 'db' folder
// // add all known plugins
// const defaultHdiConfig = await HanaModuleBuilder._readTemplateAsJson('.hdiconfig-cloud')
// const defaultHdiConfig = await HanaBuildPlugin._readTemplateAsJson('.hdiconfig-cloud')
// for (const plugin in defaultHdiConfig['file_suffixes']) {

@@ -404,3 +428,3 @@ // hdiPlugins.add('.' + plugin)

const template = await HanaModuleBuilder._readTemplateAsJson('.hdiconfig-all')
const template = await HanaBuildPlugin._readTemplateAsJson('.hdiconfig-all')
let content = {

@@ -427,3 +451,3 @@ 'file_suffixes': {}

const hdiNamespace = path.join(this.task.options.compileDest, FILE_NAME_HDINAMESPACE)
const content = await HanaModuleBuilder._readTemplateAsJson(FILE_NAME_HDINAMESPACE)
const content = await HanaBuildPlugin._readTemplateAsJson(FILE_NAME_HDINAMESPACE)
return await this.write(content).to(hdiNamespace)

@@ -440,3 +464,3 @@ }

const undeployJsonSrc = path.join(this.task.src, FILE_NAME_UNDEPLOY_JSON)
const templateEntries = await HanaModuleBuilder._readTemplateAsJson(FILE_NAME_UNDEPLOY_JSON)
const templateEntries = await HanaBuildPlugin._readTemplateAsJson(FILE_NAME_UNDEPLOY_JSON)
let newEntries = []

@@ -471,3 +495,3 @@ if (fs.existsSync(undeployJsonSrc)) {

if (Array.isArray(undeployList)) {
const hdiconfig = await HanaModuleBuilder._readTemplateAsJson('.hdiconfig-all')
const hdiconfig = await HanaBuildPlugin._readTemplateAsJson('.hdiconfig-all')
const keys = new Set(Object.keys(hdiconfig['file_suffixes']).map(key => '.' + key))

@@ -491,3 +515,3 @@ undeployList.forEach(entry => {

const dbSrcDir = path.join(this.task.src, "src")
const migrationTableFiles = BuildTaskHandlerInternal._find(dbSrcDir, (res) => {
const migrationTableFiles = InternalBuildPlugin._find(dbSrcDir, (res) => {
return fs.statSync(res).isFile() && path.extname(res) === FILE_EXT_HDBMIGRATIONTABLE

@@ -547,2 +571,2 @@ })

}
module.exports = HanaModuleBuilder
module.exports = HanaBuildPlugin
const fs = require('fs')
const path = require('path')
const cds = require('../../../cds')
const BuildTaskHandlerEdmx = require('../buildTaskHandlerEdmx')
const { OUTPUT_MODE, OUTPUT_MODE_RESULT, FILE_EXT_CDS, CONTENT_LANGUAGE_BUNDLES, CONTENT_DEFAULT_CSN, DEFAULT_CSN_FILE_NAME, OUTPUT_MODE_FILESYSTEM } = require('../../constants')
const EdmxBuildPlugin = require('../edmxBuildPlugin')
const { OUTPUT_MODE, OUTPUT_MODE_RESULT, FILE_EXT_CDS, CONTENT_LANGUAGE_BUNDLES, CONTENT_DEFAULT_CSN, DEFAULT_CSN_FILE_NAME,
OUTPUT_MODE_FILESYSTEM, FLAVOR_LOCALIZED_EDMX } = require('../../constants')
const { getI18nDefaultFolder } = require('../../util')
const DEFAULT_COMPILE_DEST_FOLDER = path.normalize('src/main/resources/edmx')
class JavaModuleBuilder extends BuildTaskHandlerEdmx {
class JavaBuildPlugin extends EdmxBuildPlugin {
init() {

@@ -16,8 +17,4 @@ super.init()

async build() {
const compileDest = this.task.options.compileDest
const { src, dest } = this.task
const odataOptions = {
version: cds.env.odata?.version
}
const model = await this.model()

@@ -28,8 +25,10 @@ if (!model) {

const odata = await this._compileForOdata(model, this.task.options.compileDest, odataOptions)
await this.compileToEdmx(odata, this.task.options.compileDest, odataOptions)
// generate edmx files containing all features
const odata = await this._compileForOdata(model, compileDest)
await this.compileToEdmx(odata, compileDest) // localized edmx for backward compatibility
await this.compileToEdmx(odata, path.join(compileDest, 'odata', cds.env.odata.version), { [FLAVOR_LOCALIZED_EDMX]: false }) // non-localized edmx
if (this.hasBuildOption(CONTENT_LANGUAGE_BUNDLES, true)) {
// collect and write language bundles into single i18n.json file
const i18n = await this.collectLanguageBundles(model, path.join(this.task.dest, getI18nDefaultFolder()))
if (!this.hasBuildOption(CONTENT_LANGUAGE_BUNDLES, false)) {
// collect and write language bundles containing all features
const i18n = await this.collectLanguageBundles(model, path.join(compileDest, getI18nDefaultFolder()))
if (i18n && this.hasBuildOption(OUTPUT_MODE, OUTPUT_MODE_RESULT)) {

@@ -64,3 +63,3 @@ this._result.languageBundles = i18n.bundles

async _compileForOdata(model, csnDest, compileOptions) {
async _compileForOdata(model, csnDest, compileOptions = {}) {
// csn for service providers

@@ -72,17 +71,18 @@ const m = cds.compile.for.java(model, {

const csnFile = path.join(csnDest, DEFAULT_CSN_FILE_NAME)
let csnModel
// adding csn to build result containing @source and _where persisted properties
if (this.hasBuildOption(CONTENT_DEFAULT_CSN, true)) { //default true or undefined
const csnStr = await this.compileToJson(model, csnFile)
csnModel = JSON.parse(csnStr)
csnModel.meta = model.meta
if (this.hasBuildOption(OUTPUT_MODE, OUTPUT_MODE_RESULT)) {
const csnModel = JSON.parse(csnStr)
csnModel.meta = model.meta
this._result.csn = csnModel
}
} else {
const csnStr = await this.compileToJson(m, csnFile)
csnModel = JSON.parse(csnStr)
csnModel.meta = m.meta
if (this.hasBuildOption(OUTPUT_MODE, OUTPUT_MODE_RESULT)) {
const csnModel = JSON.parse(csnStr)
csnModel.meta = m.meta
this._result.csn = csnModel
}
}
if (this.hasBuildOption(OUTPUT_MODE, OUTPUT_MODE_RESULT)) {
this._result.csn = csnModel
}
return m

@@ -97,2 +97,2 @@ }

}
module.exports = JavaModuleBuilder
module.exports = JavaBuildPlugin

@@ -5,3 +5,3 @@ const path = require('path')

const BuildTaskHandlerInternal = require('../buildTaskHandlerInternal')
const InternalBuildPlugin = require('../internalBuildPlugin')
const { FOLDER_GEN, EXTENSION_POINT_VALIDATION } = require('../../constants')

@@ -11,3 +11,3 @@ const ResourcesTarBuilder = require('../mtx/resourcesTarBuilder')

class MtxExtensionModuleBuilder extends BuildTaskHandlerInternal {
class MtxExtensionBuildPlugin extends InternalBuildPlugin {
init() {

@@ -42,8 +42,6 @@ super.init()

if (!this.hasBuildOption(EXTENSION_POINT_VALIDATION, false)) {
this._lintExtModel(extModel, model) // throws error in case of linting errors
}
// !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
// perform all model operations before the linter is called as the linter
// will modify the model causing the creation of 'unresolved' associations
await this.compileToJson(extModel, path.join(destExt, 'extension.csn'))
await this.collectLanguageBundles(extModel, path.join(destExt, 'i18n'))

@@ -62,2 +60,12 @@

}
if (!this.hasBuildOption(EXTENSION_POINT_VALIDATION, false)) {
try {
// throws error in case of linting errors
this._lintExtModel(extModel, model)
} catch(error) {
// cleanup existing files in case of an error
await fs.promises.rm(this.task.dest, { recursive: true })
throw error
}
}
}

@@ -97,3 +105,3 @@

}
_lintExtModel(extModel, model) {

@@ -136,2 +144,2 @@ const linter = this._linter()

module.exports = MtxExtensionModuleBuilder
module.exports = MtxExtensionBuildPlugin
const path = require('path')
const cds = require('../../../cds')
const fs = require('fs')
const { FOLDER_GEN, DEFAULT_CSN_FILE_NAME, MTX_SIDECAR_DB_VALIDATION } = require('../../constants')
const NodeCfModuleBuilder = require('../nodejs')
const { FOLDER_GEN, DEFAULT_CSN_FILE_NAME, MTX_SIDECAR_DB_VALIDATION, CONTENT_EDMX, FLAVOR_LOCALIZED_EDMX } = require('../../constants')
const NodejsBuildPlugin = require('../nodejs')
const ResourcesTarProvider = require('../mtx/resourcesTarBuilder')
const { ERROR } = NodeCfModuleBuilder
const { ERROR } = NodejsBuildPlugin
const { relativePaths, BuildError, resolveRequiredSapModels, getI18nDefaultFolder } = require('../../util')

@@ -12,6 +12,6 @@

class MtxSidecarModuleBuilder extends NodeCfModuleBuilder {
class MtxSidecarBuildPlugin extends NodejsBuildPlugin {
get priority() {
// should be scheduled after 'hana' build tasks are finished
return NodeCfModuleBuilder.PRIORITY_MIN_VALUE
return NodejsBuildPlugin.PRIORITY_MIN_VALUE
}

@@ -51,3 +51,2 @@ init() {

await this.compileToJson(model, path.join(destSidecarSrc, DEFAULT_CSN_FILE_NAME))
await this.collectLanguageBundles(model, path.join(destSidecarSrc, getI18nDefaultFolder()))

@@ -81,12 +80,19 @@ await this.copySrvContent(this.task.src, destSidecar, destSidecar)

const destMainSrv = path.join(destMain, cds.env.folders.srv)
const csn = await this.model()
if (!csn) {
const model = await this.model()
if (!model) {
return
}
const { dictionary, sources } = await this.compileAll(csn, destMainSrv, destMain)
const { dictionary, sources } = await this.compileAll(model, destMainSrv, destMain)
await this.collectAllLanguageBundles(dictionary, sources, destMainSrv, destMain)
if (!this.hasBuildOption(CONTENT_EDMX, false)) {
//inferred model expected by cds.compile.to.edmx()
const compileOptions = { [FLAVOR_LOCALIZED_EDMX]: this.hasBuildOption(FLAVOR_LOCALIZED_EDMX, true) } // disabled by default
const baseModel = cds.compiler.compileSources({ 'base.json': dictionary.base }, { messages: this.messages })
await this.compileToEdmx(baseModel, path.join(destMainSrv, 'odata', cds.env.odata.version), compileOptions)
}
// create resources TAR
// resources are determined based on available database build task, SQLite as fallback
await new ResourcesTarProvider(this).createTar(destMain, csn)
await new ResourcesTarProvider(this).createTar(destMain, model)

@@ -145,2 +151,2 @@ // copy package.json and .cdsrc.json from project root

}
module.exports = MtxSidecarModuleBuilder
module.exports = MtxSidecarBuildPlugin

@@ -5,3 +5,3 @@ /* eslint-disable no-empty */

const cds = require('../../../cds')
const BuildTaskHandlerEdmx = require('../buildTaskHandlerEdmx')
const EdmxBuildPlugin = require('../edmxBuildPlugin')
const ResourcesTarProvider = require('./resourcesTarBuilder')

@@ -11,6 +11,6 @@ const { FOLDER_GEN } = require('../../constants')

class MtxModuleBuilder extends BuildTaskHandlerEdmx {
class MtxBuildPlugin extends EdmxBuildPlugin {
get priority() {
// should be scheduled after 'hana' build tasks are finished
return BuildTaskHandlerEdmx.PRIORITY_MIN_VALUE
return EdmxBuildPlugin.PRIORITY_MIN_VALUE
}

@@ -45,2 +45,2 @@ init() {

}
module.exports = MtxModuleBuilder
module.exports = MtxBuildPlugin
const path = require('path')
const cds = require('../../../cds')
const BuildTaskHandlerInternal = require('../buildTaskHandlerInternal')
const InternalBuildPlugin = require('../internalBuildPlugin')
const { BUILD_TASK_HANA } = require('../../constants')
const fs = require('fs')
const { WARNING } = BuildTaskHandlerInternal
const { WARNING } = InternalBuildPlugin
const DEFAULT_TAR_NAME = "resources.tgz"
class ResourcesTarBuilder {
constructor(handler) {
this._handler = handler
constructor(plugin) {
this._plugin = plugin
}
get handler() { return this._handler }
get plugin() { return this._plugin }

@@ -21,3 +21,3 @@ async createTar(dest, model) {

// packTarArchive fails otherwise
this.handler.pushMessage("No deployment resources found - skip resources.tgz", WARNING)
this.plugin.pushMessage("No deployment resources found - skip resources.tgz", WARNING)
return

@@ -40,3 +40,3 @@ }

await tar.czfd(tarFile, root, resources)
this.handler.pushFile(tarFile)
this.plugin.pushFile(tarFile)
}

@@ -88,3 +88,3 @@

async _getHanaTenantDbDest() {
const hanaTask = this.handler.context.tasks.find(task => task.for === BUILD_TASK_HANA)
const hanaTask = this.plugin.context.tasks.find(task => task.for === BUILD_TASK_HANA)
return hanaTask?.dest

@@ -91,0 +91,0 @@ }

const fs = require('fs')
const path = require('path')
const cds = require('../../../cds')
const BuildTaskHandlerEdmx = require('../buildTaskHandlerEdmx')
const EdmxBuildPlugin = require('../edmxBuildPlugin')
const { BuildError } = require('../../util')
const { OUTPUT_MODE, OUTPUT_MODE_FILESYSTEM, ODATA_VERSION_V2, FOLDER_GEN, CONTENT_EDMX,
CONTENT_PACKAGELOCK_JSON, CONTENT_NPMRC, CONTENT_CDSRC_JSON, CONTENT_ENV, CONTENT_DEFAULT_ENV_JSON } = require('../../constants')
const { WARNING } = BuildTaskHandlerEdmx
const { OUTPUT_MODE, OUTPUT_MODE_FILESYSTEM, ODATA_VERSION_V2, FOLDER_GEN, CONTENT_EDMX, CONTENT_PACKAGELOCK_JSON,
CONTENT_NPMRC, CONTENT_CDSRC_JSON, CONTENT_ENV, CONTENT_DEFAULT_ENV_JSON, FLAVOR_LOCALIZED_EDMX } = require('../../constants')
const { WARNING } = EdmxBuildPlugin
class NodejsModuleBuilder extends BuildTaskHandlerEdmx {
class NodejsBuildPlugin extends EdmxBuildPlugin {
init() {

@@ -31,4 +31,3 @@ super.init()

const destRoot = this.isStagingBuild() ? this.task.dest : this.destSrv
if (cds.env.odata?.version === ODATA_VERSION_V2) {
if (cds.env.odata.version === ODATA_VERSION_V2) {
// log warning as nodejs is only supporting odata version V4

@@ -48,5 +47,7 @@ this.pushMessage("OData v2 is not supported by node runtime. Make sure to define OData v2 in cds configuration.", WARNING)

if (this.hasBuildOption(CONTENT_EDMX, true)) {
const m = await cds.load(sources.base, super.options()) // REVISIT: Quick hack to get inferred model as expected by cds.compile.to.edmx()
await this.compileToEdmx(m, this.destSrv)
if (!this.hasBuildOption(CONTENT_EDMX, false)) {
//REVISIT: inferred model expected by cds.compile.to.edmx()
const compileOptions = { [FLAVOR_LOCALIZED_EDMX]: this.hasBuildOption(FLAVOR_LOCALIZED_EDMX, true) }
const baseModel = cds.compiler.compileSources({ 'base.json': dictionary.base }, { messages: this.messages })
await this.compileToEdmx(baseModel, path.join(this.destSrv, 'odata', cds.env.odata.version), compileOptions)
}

@@ -57,3 +58,3 @@

await this._copyNativeContent(cds.root, srcSrv, destRoot, destSrv)
if (this.context.options.workspaces) {
if (this.context.options.ws) {
await this._addWorkspaceDependencies(destRoot)

@@ -97,3 +98,3 @@ }

const plugin = require(require.resolve(path.join(workspaceRoot, folder, 'package.json')))
const tarFile = `${plugin.name.replace(/\//g,'-').replace(/@/, '')}-${plugin.version}.tgz`
const tarFile = `${plugin.name.replace(/\//g, '-').replace(/@/, '')}-${plugin.version}.tgz`
if (dependencies[plugin.name] === '*') {

@@ -216,2 +217,2 @@ const tarFilePath = path.join(dest, tarFile)

module.exports = NodejsModuleBuilder
module.exports = NodejsBuildPlugin
const axios = require('axios');
require('../util/pruneAxiosErrors');
const cds = require('../cds');

@@ -57,12 +59,3 @@ const DEBUG = cds.debug('cli');

async function retrieveTokenOrPasscodeUrl(params) {
if (params.has('token')) {
if (params.get('renewLogin') || params.get('tokenExpirationDate') <= Date.now()) {
DEBUG?.((params.get('renewLogin') ? 'Renewing' : 'Refreshing expired') + ' authentication token');
params.delete('token');
params.delete('tokenExpirationDate');
} else {
return;
}
}
if (!params.has('passcode') && !params.has('refreshToken') && params.has('passcodeUrl')) {
if (params.has('token') || !params.has('refreshToken') && !params.has('passcode') && params.has('passcodeUrl')) {
return;

@@ -80,3 +73,3 @@ }

} catch (error) {
if (error.response?.status === 404) { // may represent 405 in case of wrong method
if (error.status === 404) { // may represent 405 in case of wrong method
continue;

@@ -100,3 +93,4 @@ }

addTokenInfo(params, authData);
} else {
}
if (authData.passcode_url) {
addUrlInfo(params, authData);

@@ -103,0 +97,0 @@ }

@@ -23,3 +23,3 @@ const path = require('path');

const prefix = `Request to ${url} failed`;
switch (error.response?.status) {
switch (error.status) {
case 401:

@@ -26,0 +26,0 @@ throw getMessage(`${prefix}: invalid authentication.\nRetry with valid passcode${

@@ -165,2 +165,4 @@ const cds = require('../cds');

this.tagRule = new Param('tagRule', '');
// - non-persisted, internal

@@ -226,2 +228,5 @@ this.reqAuth = new Param('reqAuth', null, { obfuscate: ObfuscationLevel.full, internal: true });

this["clear-invalid"] = this.clearInvalid.newAlias('clear-invalid');
// download for mxts
this["download-migrated-projects"] = new Param('download-migrated-projects', false)
}

@@ -228,0 +233,0 @@ }

@@ -12,2 +12,9 @@ const cds = require ('../cds'), { local } = cds.utils;

const JOB_STATUS = {
QUEUED: 'QUEUED',
RUNNING: 'RUNNING',
FINISHED: 'FINISHED',
FAILED: 'FAILED'
};
module.exports = class Push extends BaseCommand {

@@ -31,3 +38,7 @@

console.log(`\nPushing extension '${extensionName}' from`, { src }, 'to', target);
await this.pushTgz(params, content, extensionName);
const push = await this.pushTgz(params, content, extensionName);
const async = !params.get('sync');
if (async) {
await this.pollUntilFinished(push.headers.location, params)
}
console.log ('Activation succeeded.\n')

@@ -66,2 +77,6 @@ }

const options = { ...params.get('reqAuth') };
const async = !params.get('sync');
if (async) {
options.headers = { prefer: 'respond-async', ...options.headers }
}

@@ -74,2 +89,27 @@ return axios.post(pushUrl, {

}
static async pollUntilFinished(jobUrl, params) {
return new Promise((resolve, reject) => {
const options = { ...params.get('reqAuth') };
const timeout = setTimeout(() => {
clearInterval(interval);
reject(new Error('cds push timed out after 5 minutes'));
}, 5 * 60 * 1000);
const interval = setInterval(async () => {
const { status, error } = (await axios.get(jobUrl, options)).data;
if (status === JOB_STATUS.FINISHED || status === JOB_STATUS.FAILED) {
clearInterval(interval);
clearTimeout(timeout);
if (status === JOB_STATUS.FINISHED) {
resolve();
} else if (status === JOB_STATUS.FAILED) {
reject(new Error(error));
}
}
}, 1000);
});
}
}

@@ -115,3 +115,3 @@ const fs = require('fs');

} catch (error) {
if ([401, 404].includes(error.response?.status)) {
if ([401, 404].includes(error.status)) {
continue;

@@ -348,2 +348,9 @@ }

}
function removeObsoleteToken() {
if (params.get('renewLogin') || params.get('tokenExpirationDate') <= Date.now()) {
DEBUG?.((params.get('renewLogin') ? 'Renewing' : 'Refreshing expired') + ' authentication token');
params.delete('token');
params.delete('tokenExpirationDate');
}
}

@@ -386,4 +393,6 @@ if (params.has('username')) {

setRelevantAuth(auth);
if (!logout && !params.has('token') && !params.has('passcode') && !params.has('clientsecret') && !params.has('key')) {
if (params.has('token')) {
removeObsoleteToken();
}
if (!logout && !params.has('token') && !params.has('refreshToken') && !params.has('passcode') && !params.has('clientsecret') && !params.has('key')) {
await addPasscode();

@@ -390,0 +399,0 @@ }

@@ -20,3 +20,3 @@ /* eslint-disable no-prototype-builtins */

let apiOptions = ['includeNamespaces', 'keepNamespace'];
let commonOptions = ['no_copy', 'no_save', 'out', 'dry', 'as', 'from', 'force'];
let commonOptions = ['no_copy', 'no_save', 'out', 'dry', 'as', 'from', 'force', 'config'];

@@ -152,2 +152,14 @@ async function generateEDMX2JSON(edmx) {

async function isValidJsonObject(options){
let config;
try {
if (options.config) {
config = JSON.parse(options.config);
return config;
}
} catch (error) {
throw new Error(messages.SPECIFY_CONFIG);
}
}
async function preProcess(file, options, cwd) {

@@ -271,3 +283,3 @@ let srcFilePath = await isValidInputFile(path.resolve(cwd, file));

async function _getResult(output, as, dest, force, version) {
async function _getResult(output, as, dest, force, kind) {
let extension = dest.substring(dest.lastIndexOf(".") + 1);

@@ -277,3 +289,3 @@ as = (as === "cds") ? "cdl" : as;

// output filepath should simply contain .csn as extension
if (version === 'rest' && ['swagger', 'openapi3'].includes(extension)) {
if (kind === 'rest' && ['swagger', 'openapi3'].includes(extension)) {
dest = dest.replace('.' + extension, '');

@@ -332,8 +344,8 @@ }

async function _updatePackageJson(dest, services, version, cwd) {
async function _updatePackageJson(dest, services, kind, cwd, config) {
const package_json = path.resolve(cwd, 'package.json');
const conf = await exists(package_json) ? require(package_json) : {};
const requires = ['cds', 'requires'].reduce((p, n) => p[n] || (p[n] = {}), conf);
if (version !== 'rest') {
version = (version === "V2") ? 'odata-v2' : 'odata';
if (kind !== 'rest') {
kind = (kind === "V2") ? 'odata-v2' : 'odata';
}

@@ -345,3 +357,3 @@ let package_json_updated = false;

const model = isLinux() ? initial_model.replace(/\\/g, '/') : initial_model;
cds.env.requires[service] = requires[service] = { kind: version, model };
cds.env.requires[service] = requires[service] = { kind: kind, model, ...config };
await write(package_json, JSON.stringify(conf, null, ' '));

@@ -358,7 +370,8 @@ package_json_updated = true;

const services = _getServiceNames(csn);
let config = await isValidJsonObject(options);
if (options.out) options.out = _validateAndComputeOutputFilePath(options.out, options.as, filePath);
const version = options.inputFileVersion;
const result = await _getResult(csn, options.as, options.out || filePath.replace(/\.[^.]+$/, ''), options.force, version);
const kind = options.inputFileKind;
const result = await _getResult(csn, options.as, options.out || filePath.replace(/\.[^.]+$/, ''), options.force, kind);
const written = options.dry || await _write(result, services, cwd);
const registered = options.dry || options.no_save || await _updatePackageJson(result[1].replace(/\.[^.]+$/, ''), services, version, cwd);
const registered = options.dry || options.no_save || await _updatePackageJson(result[1].replace(/\.[^.]+$/, ''), services, kind, cwd, config);
if (options.dry) return console.log(result[0]);

@@ -365,0 +378,0 @@ return Promise.all([written, registered]);

@@ -94,3 +94,3 @@ let cds = require('../cds');

let csn = openapi.openAPI2csn(src);
options.inputFileVersion = 'rest';
options.inputFileKind = 'rest';
return csn;

@@ -110,4 +110,4 @@ }

let csn = asyncapi.asyncapi2csn(src);
options.inputFileVersion = 'odata';
options.inputFileKind = 'odata';
return csn;
}

@@ -37,3 +37,5 @@ "use strict";

FIXED_PRECISION_VARIABLE_SCALE: 'WARNING: Fixed Precision with Variable or Floating Scale is not supported in CDS.',
NULLABLE_KEY: 'WARNING: Key element cannot be Nullable.'
NULLABLE_KEY: 'WARNING: Key element cannot be Nullable.',
COLLISION_DETECTED: `INFO: Element name collision detected. Adding suffix to affected type category.`,
SPECIFY_CONFIG: 'For --config option, please specify correct JSON Object.',
};

@@ -40,0 +42,0 @@

@@ -21,1 +21,6 @@ const cds = module.exports = require('./cds')

}
cds.schema = cds.schema || {} // backward compatibility
cds.extend (cds.schema) .with ( {
overlay4: require('./env/schema')
})

@@ -7,3 +7,3 @@ module.exports = {

MAVEN_ARCHETYPE_VERSION: '2.2.0',
MAVEN_ARCHETYPE_VERSION: '2.3.0',

@@ -28,2 +28,3 @@ OPTIONS: Object.freeze({

HANA: 'hana',
POSTGRES: 'postgres',
SQLITE: 'sqlite',

@@ -43,3 +44,4 @@ H2: 'h2',

HELM: 'helm',
TYPER: 'typer'
TYPER: 'typer',
HTML5_REPO: 'html5-repo'
}),

@@ -46,0 +48,0 @@

@@ -129,5 +129,4 @@ const path = require('path');

async stepInit(projectName, options) {
if (!cds.cli?.command) {
// called from CAP generator
if (options) {
// called from CAP generator with options
cds.cli = {

@@ -134,0 +133,0 @@ command: COMMAND_INIT,

@@ -129,2 +129,21 @@ // Registry of sequence matchers in mta.yaml files

const postgres = {
in: 'resources',
where: [{
property: 'parameters.service',
isEqualTo: 'postgresql-db',
}]
}
const postgresDeployer = {
in: 'modules',
where: [{
property: 'type',
isEqualTo: 'nodejs',
}, {
property: 'path',
isEqualTo: 'gen/pg',
}]
}
const saasRegistry = {

@@ -259,2 +278,4 @@ in: 'resources',

serviceManager,
postgres,
postgresDeployer,
saasRegistry,

@@ -261,0 +282,0 @@ redis,

@@ -6,3 +6,3 @@ const cds = require('../../../cds')

const { mergeJSON, mergeYAML, sort } = require('../../util/merge')
const { XSUAA, KIBANA } = require('../../constants').OPTIONS
const { XSUAA, KIBANA, HTML5_REPO } = require('../../constants').OPTIONS
const {

@@ -20,4 +20,3 @@ srvNode4, srvJava4, mtxSidecar4, approuter, // Modules

static hasFacet(env) {
return exists(join(env.folders.app, 'router', 'xs-app.json')) ||
exists(join(env.folders.app, 'xs-app.json'))
return exists(join(env.folders.app, 'xs-app.json'))
}

@@ -30,11 +29,11 @@

getRelatedFacets() {
return [KIBANA]
return [KIBANA, HTML5_REPO]
}
async run() {
const { approuterPath } = await readProject()
const appPackageJSONPath = join(approuterPath, 'package.json')
const { appPath = 'app' } = await readProject()
const appPackageJSONPath = join(appPath, 'package.json')
await mergeJSON(appPackageJSONPath, join(__dirname, 'files', 'package.json'))
await sort(appPackageJSONPath, 'dependencies')
await mergeJSON(join(approuterPath, 'default-env.json'), join(__dirname, 'files', 'default-env.json'))
await mergeJSON(join(appPath, 'default-env.json'), join(__dirname, 'files', 'default-env.json'))
}

@@ -44,3 +43,3 @@

const project = await readProject()
const { isNodejs, isJava, isExtensible, isMultitenant, hasMta, hasHelm, srvPath, approuterPath } = project
const { isNodejs, isJava, isExtensible, isMultitenant, hasMta, hasHelm, srvPath, appPath } = project

@@ -122,5 +121,6 @@ if (hasMta) {

if (isIndependentCommand) await writeMd5File(join('chart', '.cds-add-helm-files.md5'), context.newTrackingData);
}
const xsAppPath = join(approuterPath, 'xs-app.json')
const xsAppPath = join(appPath, 'xs-app.json')
const additions = isExtensible ? [{ ...approuterExtensibility, at: 0 }] : []

@@ -127,0 +127,0 @@ await mergeJSON(xsAppPath, join(__dirname, 'files', 'xs-app.json.hbs'), project, { additions })

const { join } = require('path')
const { readProject } = require('../../util/projectReader')
const { mergeJSON, mergeYAML } = require('../../util/merge')
const { mergeYAML } = require('../../util/merge')
const { copyAndTrack, parseMd5File, writeMd5File } = require('../../util/md5')

@@ -14,5 +14,6 @@ const { srvJava4, srvNode4, connectivity } = require('../_merging/registry-mta')

async run() {
const project = await readProject()
const { configFile } = project
await mergeJSON(configFile, join(__dirname, 'files', 'package.json.hbs'), project)
// Disable setting cds.requires.connectivity for now.
// const project = await readProject()
// const { configFile } = project
// await mergeJSON(configFile, join(__dirname, 'files', 'package.json.hbs'), project)
}

@@ -19,0 +20,0 @@

@@ -25,3 +25,3 @@ const { join } = require('path')

const project = await readProject()
const { isJava, hasHelm, hasDestination, hasMta, srvPath, hasHtml5Repo } = project
const { isJava, hasHelm, hasDestination, hasMta, srvPath } = project

@@ -40,3 +40,2 @@ if (hasMta) {

const binding = hasDestination && { srv: { bindings: { destination: { serviceInstanceName: 'destination' }}}}
const html5Binding = hasHtml5Repo && { 'html5-apps-deployer': { bindings: { destination: { serviceInstanceName: 'destination' }}}}

@@ -53,3 +52,2 @@ //in case facet is being added to the already exisiting charts folder

...binding,
...html5Binding,
destination: {

@@ -59,4 +57,3 @@ serviceOfferingName: 'destination',

parameters: {
version: '1.0.0',
...(hasHtml5Repo) && { HTML5Runtime_enabled: true }
version: '1.0.0'
}

@@ -63,0 +60,0 @@ }

@@ -30,3 +30,3 @@ const { join } = require('path')

async runDependentMerging(context = {}) {
const { appName, hasHelm } = await readProject()
const { appName, hasHelm, hasApprouter} = await readProject()

@@ -73,10 +73,34 @@ if (hasHelm) {

backendDestinations: {},
destination: {
parameters: {
HTML5Runtime_enabled: true
}
}
}
)
// copy content deployment chart if it is not present
const HelmTemplate = require(`../helm`);
const helmTemplate = new HelmTemplate(context.oldTrackingData, context.newTrackingData);
if (hasApprouter) {
await mergeYAML(
join('chart', 'values.yaml'),
{
'html5-apps-repo-runtime': {
serviceOfferingName: 'html5-apps-repo',
servicePlanName: 'app-runtime'
},
approuter: {
bindings: {
'html5-apps-repo-runtime': { serviceInstanceName: 'html5-apps-repo-runtime' }
}
},
}
)
await helmTemplate.addDependency('service-instance', 'html5-apps-repo-runtime');
}
isIndependentCommand = isIndependentCommand | await helmTemplate.addSubChart('content-deployment');
// add dependency entry in chart.yaml

@@ -83,0 +107,0 @@ await helmTemplate.addDependency('content-deployment', 'html5-apps-deployer');

@@ -7,3 +7,9 @@ const { join } = require('path')

const { mergeJSON, sort } = require('../../util/merge')
const {
APPROUTER, XSUAA, MULTITENANCY, EXTENSIBILITY,
MTA, HELM,
HANA, POSTGRES,
ENTERPRISE_MESSAGING, ENTERPRISE_MESSAGING_SHARED, REDIS_MESSAGING,
CONNECTIVITY, DESTINATION
} = require('../../constants').OPTIONS
module.exports = class MtaTemplate extends require('../templateBase') {

@@ -13,3 +19,3 @@

const { options } = cds.cli
if (!options?.add.has('mta') && options?.add.has('helm')) return false
if (!options?.add.has(MTA) && options?.add.has(HELM)) return false
return exists('mta.yaml')

@@ -19,3 +25,3 @@ }

getRelatedFacets() {
return ['approuter', 'xsuaa', 'multitenancy', 'hana', 'extensibility', 'enterprise-messaging', 'enterprise-messaging-shared', 'redis-messaging', 'connectivity', 'destination']
return [APPROUTER, XSUAA, MULTITENANCY, HANA, POSTGRES, EXTENSIBILITY, ENTERPRISE_MESSAGING, ENTERPRISE_MESSAGING_SHARED, REDIS_MESSAGING, CONNECTIVITY, DESTINATION]
}

@@ -22,0 +28,0 @@

@@ -84,3 +84,2 @@ const cds = require('../../cds')

get appPath() { return env.folders.app ?? 'app' },
get approuterPath() { return join(env.folders.app ?? 'app', 'router') },
get appVersion() { return appVersion },

@@ -87,0 +86,0 @@ get appName() { return appName },

@@ -0,17 +1,31 @@

function stacklessError(message) {
const { stackTraceLimit } = Error;
Error.stackTraceLimit = 0;
const error = new Error(message);
Error.stackTraceLimit = stackTraceLimit;
return error;
}
require('axios').interceptors.response.use(
response => response,
axError => {
const { inspect } = require('util');
const { url, method } = axError.config ?? {};
const { code, response } = axError;
const { status, data } = response ?? {};
const reason = data?.error /* RFC 6749 */ ?? axError.message;
const { response = {} } = axError;
const { status, statusText } = response;
let { data } = response;
if (Buffer.isBuffer(data)) {
data = JSON.parse(data.toString());
}
const reason = data?.error /* RFC 6749 */ && (typeof data.error === 'string' ? data.error : inspect(data.error));
const message = (url && method ? `${method.toUpperCase()} ${url} ` : '') +
'failed' +
(status || code ? ':' : '') +
(status || statusText ? ':' : '') +
(status ? ` ${status}` : '') +
(code ? ` ${code}` : '') +
`. ${reason}.` +
(data ? ` Details: '${data.error_description /* RFC 6749 */ || require('util').inspect(data)}'.` : '');
(statusText ? ` ${statusText}` : '') +
(reason ? `. ${reason}` : '') +
(data ? `. Details: '${data.error_description /* RFC 6749 */ || inspect(data)}'` : '');
const error = new Error(message);
const error = stacklessError(message);
error.status = status;
if (require('../').debug('req')) {

@@ -18,0 +32,0 @@ error.cause = axError;

{
"name": "@sap/cds-dk",
"version": "7.2.0",
"version": "7.3.0",
"description": "Command line client and development toolkit for the SAP Cloud Application Programming Model",

@@ -5,0 +5,0 @@ "homepage": "https://cap.cloud.sap/",

Sorry, the diff of this file is too big to display

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is too big to display

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc