Socket
Socket
Sign inDemoInstall

@sap/cds-dk

Package Overview
Dependencies
Maintainers
1
Versions
146
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@sap/cds-dk - npm Package Compare versions

Comparing version 7.9.5 to 8.0.2

bin/completion/scripts/cds-fish.fish

70

bin/add.js
const cds = require('../lib/cds'), { fs } = cds.utils
const DEBUG = cds.debug('add')
const { join } = require('path')
const cmd = require('../lib/util/command')

@@ -20,4 +21,4 @@ const fromDk = fs.readdirSync(join(__dirname, '../lib/init/template'))

const options = ['--for', ...Object.values(conf).flatMap(p => p.options).sort(byShortcut).map(o => o.key)]
const shortcuts = ['-4', ...Object.values(conf).flatMap(p => p.options).sort(byShortcut).map(o => o.short)]
const options = ['--for', '--package', ...Object.values(conf).flatMap(p => p.options).sort(byShortcut).map(o => o.key), '--java:mvn']
const shortcuts = ['-4', '-p', ...Object.values(conf).flatMap(p => p.options).sort(byShortcut).map(o => o.short)]
const flags = ['--force', ...Object.values(conf).flatMap(p => p.flags).map(o => o.key)]

@@ -40,51 +41,4 @@ DEBUG?.('cli config ', { options, shortcuts, flags })

*hana* - add support for SAP HANA
${cds.cli.command !== 'completion' && require('../lib/init').help({ exclude: ['java', 'nodejs'] })}
*sqlite* - add support for SQLite
*postgres* - add support for PostgreSQL
*liquibase* - add support for Liquibase
*xsuaa* - add support for authentication via XSUAA
*multitenancy* - add support for multitenancy
*toggles* - add support for feature toggles
*extensibility* - add support for extensibility
*approuter* - add support for application routing
*local-messaging* - add support for local messaging
*file-based-messaging* - add support for file-based messaging
*enterprise-messaging* - add support for SAP Event Mesh
*redis-messaging* - add support for Redis messaging
*kibana* - add support for Kibana formatting
*mta* - add support for MTA-based deployment
*cf-manifest* - add support for CF-native deployment
*helm* - add support for Helm-based Kyma deployment
*html5-repo* - add support for the HTML5 repository
*pipeline* - add files for CI/CD pipeline integration
*tiny-sample* - add minimal sample files
*sample* - add sample files including Fiori UI
*data* - add CSV headers for modeled entities
*typer* - add type generation for CDS models
*lint* - add support for CDS Lint
# OPTIONS

@@ -100,3 +54,7 @@

*--package* <name>
Pull a package from your npm registry.
# FEATURE OPTIONS

@@ -151,4 +109,12 @@

const generator = new CDSGenerator()
const features = args.map(f => f.split(/[,\s+]/)).flat()
await generator.add(features)
const features = args.map(f => f.split(/[,\s]/)).flat()
if (cds.cli.options?.package) {
console.log(`running with --package, adding dependency ${cds.cli.options.package}`)
await cmd.spawnCommand(`npm`, ['add', cds.cli.options.package], { cwd: cds.root })
await cmd.spawnCommand(`cds`, ['add', ...features], { cwd: cds.root }) // plugins need to reload
// await cds.plugins
// await generator.add(features)
} else {
await generator.add(features)
}
}

@@ -47,3 +47,3 @@ const term = require('../lib/util/term')

*-a | --to-app-services <app>* (Beta)
*-a | --to-app-services* <app>

@@ -56,3 +56,3 @@ Bind to a given application (Cloud Foundry only).

*-c | --credentials* <JSON|{file path}> (Beta)
*-c | --credentials* <JSON|{file path}>

@@ -59,0 +59,0 @@ JSON or file path defining custom fields overwriting service credentials.

module.exports = Object.assign(build, {
handleCompletion,
options: ['--project', '--src', '--dest', '--for', '--use', '--opts', '--log-level', '--options-model'],
options: ['--project', '--src', '--dest', '--for', '--opts', '--log-level', '--options-model'],
shortcuts: ['-in', '-s', '-o', '-4'],

@@ -72,3 +72,3 @@ flags: ['--clean', '--no-clean', '--ws', '--ws-pack'], // REVISIT: --clean is deprecated, backward compatibility

*--no-clean* (beta)
*--no-clean*
Skips cleaning the build output folder before running the actual build

@@ -90,3 +90,3 @@ operation. Clients need to clean the output folder upfront to ensure

With the *ws* option the model scope used in executed build tasks is calculated based on
all workspaces. This also applies to the feature toggles. Feature toggles defined cross workspace
all workspaces. This also applies to the feature toggles. Feature toggles defined cross workspace
are merged. The output is created in the _mtx/sidecar_ for multitenant applications

@@ -167,5 +167,30 @@ or in the 'srv' folder for node.js apps if no sidecar exists.

}
// convert opts string to task options object
if (options.opts) {
options.taskOptions = scanTaskOptionParams(options.opts)
delete options.opts
}
const { build } = require('../lib/build')
await build(options)
}
function scanTaskOptionParams(optsParams) {
// need to create new regex every call since a constant would keep the match state
const quoteRegex = /([\w-]+)=([\w/.-]+|\[([\w/,.-]+)\])/g
// captures a=1 => a:1
// a=[x,y,z] => a:[x,y,z]
// a=1,b=[x,y,z] => a:1 b=[x,y,z]
let match = quoteRegex.exec(optsParams)
const taskOptions = {}
while (match != null) {
const key = match[1]
const value = match[3] || match[2]
const valueArray = value.split(",")
taskOptions[key] = valueArray.length > 1 ? valueArray.map((entry) => entry.trim()) : value
match = quoteRegex.exec(optsParams)
}
return taskOptions
}

@@ -20,3 +20,3 @@ #!/usr/bin/env node

o: 'logout',
t: 'lint',
t: 'test',
v: 'version', '-v':'version', '--version':'version',

@@ -29,2 +29,5 @@ h: 'help', '?':'help', '-?':'help', '--help':'help',

async exec (cmd = process.argv[2], ...argv) {
const path = require('node:path')
const fs = require('node:fs')
if (!argv.length) argv = process.argv.slice(3)

@@ -56,2 +59,11 @@ if (!cmd) cmd = process.stdin.isTTY ? 'help' : 'compile'

}
const args = this.args(task, argv)
args[0].push(...appendArgs)
cds.cli = {
command: cmd,
argv: args[0],
options: args[1],
}
// REVISIT: Have to do it here to allow custom flags for plugins!

@@ -64,11 +76,10 @@ if (cmd === 'add') {

}
if (args[1]?.['resolve-bindings']) await _resolveBindings({ silent: cmd === 'env' })
const args = this.args(task, argv)
args[0].push(...appendArgs)
cds.cli = {
command: cmd,
argv: args[0],
options: args[1],
// cds.root may for some subcommands not point to the correct directory (e.g. cds w where users can pass a project directory)
// showing the full detected path should give the user an idea whether this info is relevant for them
const tsconfig = path.join(cds.root ?? '.', 'tsconfig.json')
if (!process.env.CDS_TYPESCRIPT && fs.existsSync(tsconfig)) {
console.info(`Detected ${tsconfig}. If this is a TypeScript project, consider running commands using cds-ts instead of cds.`)
}
if (args[1]?.['resolve-bindings']) await _resolveBindings({ silent: cmd === 'env' })
return await task.apply(this, args)

@@ -114,3 +125,4 @@ }

// consistent production setting for NODE_ENV and CDS_ENV
if (process.env.NODE_ENV !== 'production') process.env.NODE_ENV = process.env.CDS_ENV?.split(',').find(p => p === 'production') || process.env.NODE_ENV
// if (process.env.NODE_ENV !== 'production') process.env.NODE_ENV = process.env.CDS_ENV?.split(',').find(p => p === 'production') || process.env.NODE_ENV
if (process.env.NODE_ENV !== 'production') { if (process.env.CDS_ENV?.split(',').includes('production')) process.env.NODE_ENV = 'production' }
else process.env.CDS_ENV = Array.from(new Set([...process.env.CDS_ENV?.split(',') ?? [], 'production']))

@@ -221,4 +233,4 @@

function getCommands() {
const fs = require('fs');
const path = require('path');
const fs = require('node:fs');
const path = require('node:path');

@@ -225,0 +237,0 @@ const excludeList = ['cds.js', 'cds-ts.js', 'fix-redirects.js'];

@@ -1,2 +0,3 @@

const path = require("path");
const path = require('path')
const {colors} = require('../../lib/util/term')

@@ -43,4 +44,4 @@ module.exports = Object.assign ( compile, {

- edm, edmx, edmx-v2, edmx-v4, edmx-w4, edmx-x4
- sql, hdbcds, hdbtable
- cdl [beta]
- sql, hdbcds, hdbtable, hana
- cdl
- xsuaa

@@ -163,3 +164,3 @@ - openapi

case '--to':
return ['json', 'yml', 'edm', 'edmx', 'edmx-v2', 'edmx-v4', 'sql', 'hdbcds', 'hdbtable', 'cdl', 'xsuaa', 'openapi', 'asyncapi'];
return ['json', 'yml', 'edm', 'edmx', 'edmx-v2', 'edmx-v4', 'sql', 'hdbcds', 'hdbtable', 'hana', 'cdl', 'xsuaa', 'openapi', 'asyncapi'];
case '-4':

@@ -333,3 +334,3 @@ case '--for':

if (process.stdout.isTTY) {
o = inspect(o,{colors: true, depth: 111, compact:false}).replace(/\[Object: null prototype\] /g, '')
o = inspect(o,{colors, depth: 111, compact:false}).replace(/\[Object: null prototype\] /g, '')
} else

@@ -340,3 +341,3 @@ o = JSON.stringify(o, null, 2)

function suffix4 (x) { return x && x !== 'hdbtable' && ({
function suffix4 (x) { return x && x !== 'hdbtable' && x !== 'hana' && ({
edmx: '.xml',

@@ -343,0 +344,0 @@ "edmx-v2": '.xml',

@@ -6,4 +6,4 @@ const path = require('node:path');

const SUPPORTED_SHELLS = ['bash', 'gitbash', 'zsh', 'ps']
module.exports = Object.assign(completion, {

@@ -17,16 +17,16 @@ handleCompletion,

-- beta feature --
*cds completion* <options>
Adds or removes shell completion for cds commands. Currently supported shells:
bash, Git Bash, zsh, and PowerShell.
Bash, fish, Git Bash, Zsh, and PowerShell.
Depending on the operating system and the shell type, the following files will be changed:
Linux, WSL
bash: ~/.bashrc
zsh: ~/.zshrc
Bash: ~/.bashrc
fish: ~/.config/fish/config.fish
Zsh: ~/.zshrc
macOS
bash: ~/.bash_profile
zsh: ~/.zshrc
Bash: ~/.bash_profile
fish: ~/.config/fish/config.fish
Zsh: ~/.zshrc
Windows

@@ -46,5 +46,6 @@ PowerShell: $PROFILE

*--shell* <bash | gitbash | ps | zsh>
*--shell* <bash | fish | gitbash | ps | zsh>
Force the shell completion code to be added to the correct shell profile file.
bash: macOS, Linux, WSL
fish: macOS, Linux, WSL
zsh: macOS, Linux, WSL

@@ -58,3 +59,3 @@ gitbash: Git Bash for Windows

*cds* completion --remove # Remove shell completion for cds commands
*cds* completion --add --shell bash # Add shell completion for cds commands and enforce using bash shell
*cds* completion --add --shell bash # Add shell completion for cds commands and enforce using Bash shell

@@ -64,3 +65,3 @@ # ALIASES

*cds* add completion # Add shell completion for cds commands
*cds* add completion --shell bash # Add shell completion for cds commands and enforce using bash shell
*cds* add completion --shell bash # Add shell completion for cds commands and enforce using Bash shell

@@ -73,7 +74,9 @@ `});

case 'gitbash':
return 'cds-bash.sh'
return 'cds-bash.sh';
case 'fish':
return 'cds-fish.fish';
case 'ps':
return 'cds-ps.ps1'
return 'cds-ps.ps1';
case 'zsh':
return 'cds-zsh.sh'
return 'cds-zsh.sh';
case '':

@@ -83,4 +86,6 @@ case undefined:

throw 'Internal error: option --shell is missing';
default:
throw `Internal error: unsupported shell: ${shell}`;
default: {
const completionSetup = require('../../lib/init/template/completion/completionSetup');
throw `Internal error: unsupported shell: ${shell}, supported shells are ${completionSetup.supportedShells.join(', ')}`;
}
}

@@ -95,4 +100,4 @@ }

const completionSetup = require('../../lib/init/template/completion/completionSetup');
if (options.add || options.remove) {
const completionSetup = require('../../lib/init/template/completion/completionSetup');
await completionSetup.setup(options.add, options);

@@ -115,17 +120,13 @@ return;

const shell = localOptions.shell;
if (!SUPPORTED_SHELLS.includes(shell)) {
throw `internal Error: no completion handler found for type: ${shell}`;
if (!completionSetup.supportedShells.includes(shell)) {
throw `internal Error: no completion handler found for shell type: ${shell}, supported shells are ${completionSetup.supportedShells.join(', ')}`;
}
const currentWord = localOptions.curr;
const previousWord = localOptions.prev;
const line = localOptions.line;
const currentWord = localOptions.curr?.trim().replace(/^'|'$/g, '');
const previousWord = localOptions.prev?.trim().replace(/^'|'$/g, '');
const line = localOptions.line?.trim().replace(/^'|'$/g, '');
const segments = getSegments(line);
const cdsCmd = segments[1] || '';
let completions = await getCompletions(cdsCmd, previousWord, currentWord, segments);
if (currentWord && shell === 'ps') {
completions = completions.filter(v => v?.startsWith(currentWord));
}
const completions = await getCompletions(cdsCmd, previousWord, currentWord, segments);
reply(completions);

@@ -143,3 +144,4 @@ }

if (previousWord === '--shell') {
return ['bash', 'gitbash', 'ps', 'zsh'];
const { supportedShells } = require('../../lib/init/template/completion/completionSetup');
return supportedShells;
}

@@ -197,27 +199,49 @@

async function getCompletions(cdsCmd, previousWord, currentWord, segments) {
if (cdsCmd === currentWord) {
return cds.cmds;
const _filter = (completions, currentWord) => {
return completions.filter(token => token?.startsWith(currentWord));
}
try {
const cdsCmdObj = getCdsCmd(cdsCmd);
if (typeof cdsCmdObj.handleCompletion === 'function') {
return await cdsCmdObj.handleCompletion(currentWord, previousWord, segments, getUtil());
const _getCompletions = async () => {
if (previousWord === 'cds') {
if (!currentWord) {
return cds.cmds;
} else {
const completions = _filter(cds.cmds, currentWord);
if (completions.length > 0) {
return completions;
}
cdsCmd = 'compile';
previousWord = 'compile';
}
}
if (currentWord?.startsWith('-')) {
return getAllOptions(cdsCmdObj, segments);
}
try {
const cdsCmdObj = getCdsCmd(cdsCmd);
if (typeof cdsCmdObj.handleCompletion === 'function') {
return await cdsCmdObj.handleCompletion(currentWord, previousWord, segments, getUtil());
}
const completionFs = require('./completionFs');
const all = await completionFs.readdir(currentWord);
if (!(cdsCmdObj.options?.includes(previousWord) || cdsCmdObj.shortcuts?.includes(previousWord))) {
const allOptions = getAllOptions(cdsCmdObj, segments);
all.push(...allOptions);
if (currentWord?.startsWith('-')) {
return getAllOptions(cdsCmdObj, segments);
}
const completionFs = require('./completionFs');
const all = await completionFs.readdir(currentWord);
if (!(cdsCmdObj.options?.includes(previousWord) || cdsCmdObj.shortcuts?.includes(previousWord))) {
const allOptions = getAllOptions(cdsCmdObj, segments);
all.push(...allOptions);
}
return all;
} catch (err) {
return [];
}
}
return all;
} catch (err) {
return [];
let completions = await _getCompletions();
if (currentWord) {
completions = _filter(completions, currentWord);
}
return completions;
}

@@ -32,4 +32,5 @@ module.exports = Object.assign(deploy, {

Loads the configuration data for the specified profile(s). 'cds build' is executed based on this profile settings.
The *development* profile is used by default.
Loads the configuration data for the specified profile(s). 'cds build' is executed based on these profile settings,
additionally binding information is resolved based on these profile settings. If a corresponding binding exists its service
name and service key will be used. The *development* profile is used by default.

@@ -48,7 +49,7 @@ *--dry*

*--auto-undeploy* (beta feature)
*--auto-undeploy*
Tell HDI deployer to automatically undeploy deleted resources.
*--tunnel-address* (beta feature)
*--tunnel-address*

@@ -58,3 +59,3 @@ Deploy through the given address (host:port) rather than the original

*--vcap-file* (beta feature)
*--vcap-file*

@@ -61,0 +62,0 @@ Use credentials from the given file when deploying to *SAP HANA*, instead of

@@ -8,3 +8,3 @@ exports.deploy = function deploy_to_sqlite (_model, _db, options) {

const fts = cds.env.requires.toggles && cds.env.features.folders
const model = _model || conf.model || cds.resolve(!fts ? '*' : ['*',fts], false)
const model = _model || cds.resolve(!fts ? '*' : ['*',fts], false)
let file = 'db'

@@ -14,3 +14,3 @@ if (options.to) {

// current 'sqlite' default is bad, as we want to default to 'plain', so ignore it
const env_dialect = cds.env.sql.dialect !== 'sqlite' ? cds.env.sql.dialect : 'plain'
const env_dialect = cds.env.sql.dialect || 'plain'
const to_dialect = options.to && options.to.match(/^(\w+)/) && RegExp.$1

@@ -17,0 +17,0 @@ if (to_dialect) options.dialect = (to_dialect === 'sql')

module.exports = Object.assign(extend, {
options: ['--directory', '--subdomain', '--passcode', '--username', '--clientid', '--tagRule', '--defaultTag'],
flags: ['--wsdl', '--templates', '--force', '--download-migrated-projects'],
flags: ['--download-migrated-projects'],
shortcuts: ['-d', '-s', '-p', '-u', '-c'],

@@ -8,12 +8,6 @@ help: `

*cds extend* <app-url>
*cds extend* --download-migrated-projects <app-url>
**Deprecated - for use with previous MTX Services (@sap/cds-mtx).**
Download extension project after migration to @sap/cds-mtxs.
Create an extension project from an extensible SaaS app.
Obtain the app URL from the SaaS application provider.
To facilitate running this multiple times against the same app, refer to the
*cds login* command.
# OPTIONS

@@ -44,16 +38,2 @@

*--force*
Overwrite existing content. Make sure you are using a version control
system (like git) to avoid data loss.
*--templates*
Download only the templates provided by the SaaS application you want to
extend.
*--download-migrated-projects*
To be used only after change to @sap/cds-mtxs. Allows to download migrated extension projects.
*--tagRule*

@@ -69,10 +49,8 @@

*cds activate* to activate an extension project.
*cds pull* to get the CDS model of a SaaS app based on current MTX Services (@sap/cds-mtxs).
*cds pull* to get the CDS model of a SaaS app based on current MTX Services
(@sap/cds-mtxs).
https://cap.cloud.sap/docs/guides/multitenancy/mtxs for information on @sap/cds-mtxs.
https://cap.cloud.sap/docs/guides/multitenancy/mtxs for information on
@sap/cds-mtxs.
https://cap.cloud.sap/docs/guides/multitenancy/old-mtx-migration for information on migrating
from the old MTX Services to @sap/cds-mtxs.
`})

@@ -82,28 +60,19 @@

async function extend ([url], options = {}) {
if (options['download-migrated-projects']) {
const [username, password] = options.username?.split(':') ?? [];
delete options.user;
await require('../lib/mtx/download_legacy').run({
...options,
url,
username,
password
});
return
if (!options['download-migrated-projects']) {
console.log('Please provide the --download-migrated-projects flag to enable this command.');
return;
}
if (options.force) {
if (! await require('../lib/mtx/util/question').askBooleanQuestion('Using option --force will overwrite existing files. Continue (yN)? ', false, false)) {
console.log('Extend cancelled')
return
}
}
delete options['download-migrated-projects'];
const [username, password] = options.username?.split(':') ?? [];
delete options.user;
Object.assign(options, { url })
await require('../lib/mtx/extend').run(options);
await require('../lib/mtx/download_legacy').run({
...options,
url,
username,
password
});
}
/* eslint no-console: off */
const cds = require('../lib/cds')
/* eslint-disable no-console */
module.exports = exports = Object.assign (help, {formatHelp, handleCompletion, help:`

@@ -33,3 +33,3 @@ # USAGE

* * | *push* push extension to extensible SaaS app
*t* | *lint* run linter for env or model checks [beta]
*t* | *lint* run linter for env or model checks
*v* | *version* get detailed version information

@@ -36,0 +36,0 @@ * * | *completion* add/remove cli completion for cds commands

@@ -1,12 +0,12 @@

const { URLS } = require('../lib/init/constants');
const { join } = require('path')
const { URLS } = require('../lib/init/constants')
const currentOptions = ['--add', '--java:mvn', '--java:package', '--lint:custom', '--lint:custom:example'];
const legacyOptions = ['--db-technology', '--insecure', '--java-package', '--modules',
'--mta', '--odata-version', '--pipeline', '--skip-install', '--skip-sample-models',
'--srv-memory', '--srv-technology']
// REVISIT: necessary because module is loaded too early -> shouldn't be necessary
const cds = require('../lib/cds')
cds.root = join(process.cwd(), process.argv[3]?.startsWith('--') ? '.' : process.argv[3] ?? '')
module.exports = Object.assign(init, {
handleCompletion,
options: [...currentOptions, ...legacyOptions],
shortcuts: [],
options: ['--add', '--java:mvn', '--java:package', '--lint:custom', '--lint:custom:example'],
shortcuts: ['-a'],
flags: ['--force', '--verbose'],

@@ -28,52 +28,4 @@ help: `

*nodejs* - creates as a Node.js-based project
${require('../lib/init').help()}
*java* - creates as a Java-based project
*hana* - add support for SAP HANA
*sqlite* - add support for SQLite
*postgres* - add support for PostgreSQL
*liquibase* - add support for Liquibase
*xsuaa* - add support for authentication via XSUAA
*multitenancy* - add support for multitenancy
*toggles* - add support for feature toggles
*extensibility* - add support for extensibility
*approuter* - add support for application routing
*local-messaging* - add support for local messaging
*enterprise-messaging* - add support for SAP Event Mesh
*file-based-messaging* - add support for file-based messaging
*redis-messaging* - add support for Redis messaging
*mta* - add support for MTA-based deployment
*cf-manifest* - add support for CF-native deployment
*helm* - add support for Helm-based Kyma deployment
*html5-repo* - add support for the HTML5 repository
*pipeline* - add files for CI/CD pipeline integration
*tiny-sample* - add minimal sample files
*sample* - add sample files including Fiori UI
*data* - add CSV headers for modeled entities
*typer* - add type generation for CDS models
*lint* - add support for CDS Lint
*--java:mvn* <Comma separated maven archetype specific parameters>

@@ -105,3 +57,3 @@

const allOptionsFlags = [
...currentOptions ?? [],
...init.options ?? [],
...init.flags ?? []

@@ -132,24 +84,5 @@ ].filter(e => !argv.includes(e));

async function init(args) {
if (process.argv.some(a => legacyOptions.includes(a))) return _showWarningMessage()
const CDSGenerator = require('../lib/init')
const generator = new CDSGenerator()
await generator.initCmd(args[0])
await generator.init(args[0])
}
function _showWarningMessage() {
const message = `
************************************************
You are using 'cds init' with older parameters.
To find out more about the current parameters use
cds help init
************************************************
`
const { warn } = require('../lib/util/term');
console.log(warn(message))
}
module.exports = Object.assign(login, {
options: ['--subdomain', '--directory', '--passcode', '--user', '--client', '--mtls'],
shortcuts: ['-s', '-d', '-p', '-u', '-c', '-m'],
flags: ['--plain'],
flags: ['--plain', '--save-refresh-token'],
get help() {

@@ -14,3 +14,3 @@ const config = require('../lib/mtx/settings_manager').SettingsManager.config

*cds push*) by providing them with automatic authentication: fetches and
saves authentication tokens and provides relevant tokens to those commands.
saves Access Tokens and provides relevant tokens to those commands.
Alternatively saves username for apps running locally.

@@ -37,4 +37,5 @@

If a refresh token has been provided with the original token and the
original token has expired, it will be automatically renewed.
If a Refresh Token has been saved with the original token (see option
--save-refresh-token) and the original token has expired, it will be
automatically renewed.

@@ -118,2 +119,7 @@ *Storing Authentication Data*

*--save-refresh-token*
Also store the Refresh Token (if provided) for automatic renewal of the
Access Token upon expiration.
*--plain*

@@ -171,5 +177,7 @@

renewLogin: true,
saveData: true
saveData: true,
saveRefreshToken: options['save-refresh-token'] ?? false
});
delete options.plain;
delete options['save-refresh-token'];

@@ -176,0 +184,0 @@ await require('../lib/mtx/auth_manager').login(options);

@@ -57,3 +57,3 @@ module.exports = Object.assign(migrate, {

async function migrate (param, options = {}) {
// eslint-disable-next-line cds-dk/no-missing-dependencies
await require(_local('@sap/cds-mtxs/bin/cds-mtx-migrate')).run(param[0], options)

@@ -60,0 +60,0 @@ }

@@ -13,2 +13,13 @@ module.exports = Object.assign(pull, {

CDS or Cloud Foundry login.
Before running this command, prepare your extension project from a template
obtained from the SaaS application provider. It must contain a package.json
file for the project metadata, including the 'extends' field, which
specifies the base-model name.
The base model is saved in a sub-folder named '.base' (may be renamed after
the first pull). This folder is automatically included in package.json as a
workspace. Thus, any subsequent *npm install* will install the base model
into node_modules, in a sub-folder named after the base-model name. You can
thus refer to the base model in your extension by this name.

@@ -15,0 +26,0 @@ Authentication data will not be saved.

@@ -0,1 +1,2 @@

module.exports = Object.assign (repl, { options: [], help: `

@@ -35,4 +36,6 @@ # SYNOPSIS

cds.lazify (Object.assign (repl.context,{ cds,
Foo: lazy => new cds.entity ({name:'Foo'}),
Bar: lazy => new cds.entity ({name:'Bar'}),
expect: lazy => cds.test.expect,
}))

@@ -39,0 +42,0 @@

@@ -1,2 +0,1 @@

/* eslint-disable no-console */
const { join, resolve } = require ('path')

@@ -38,3 +37,3 @@ const { execSync, spawnSync } = require('child_process')

function list_versions(args, options) {
const colorsOff = options['no-colors'] ?? !process.stdout.isTTY
const {colors} = require('../lib/util/term')

@@ -47,3 +46,3 @@ if (options['npm-list'] || options['npm-tree']) {

// if (e) console.error(e)
const replacement = (colorsOff ? '$1 $2$3$4' : '\x1b[91m$1 \x1b[32m$2\x1b[0m\x1b[2m$3\x1b[32m$4\x1b[0m');
const replacement = (!colors ? '$1 $2$3$4' : '\x1b[91m$1 \x1b[32m$2\x1b[0m\x1b[2m$3\x1b[32m$4\x1b[0m');
for (let line of stdout.split(/\n/)) if (line.match(re)) console.log(

@@ -60,3 +59,3 @@ line.replace(/(@sap[^@]*)@([\S]+)( -> [\S]+)?(deduped)?/,replacement)

} else {
const mark = colorsOff ? s => s : require('../lib/util/term').info
const mark = !colors ? s => s : require('../lib/util/term').info
for (let each of Object.keys(versions)) console.log(`${mark(each)}: ${versions[each]}`)

@@ -69,4 +68,5 @@ }

const env = require('../lib').env.for('cds', cwd)
const isJava = env['project-nature'] === 'java'
if (isJava && process.stderr.isTTY) {
env.cli ??= {}; env.cli.version ??= {}
const javaInfo = !env.cli.version.skipjava && env['project-nature'] === 'java'
if (javaInfo && process.stderr.isTTY) {
console.error('\x1b[2m'+'This takes a while. Collecting Java versions...', '\x1b[0m')

@@ -89,3 +89,3 @@ }

if (isJava) {
if (javaInfo) {
Object.assign(versions, capJavaVersion(cwd))

@@ -92,0 +92,0 @@ }

@@ -54,14 +54,5 @@ const watchOnlyOptions = ['--ext', '--livereload', '--open']

const {delimiter,dirname,relative,resolve,sep} = require ('path')
const {codes} = require('../lib/util/term'), t = codes
const fs = require ('fs')
const colors = !!process.stdout.isTTY && !!process.stderr.isTTY
const t = module.exports.codes = {
reset: colors ? '\x1b[0m' : '', // Default
bold: colors ? '\x1b[1m' : '', // Bold/Bright
link: colors ? '\x1b[4m' : '', // underline
red: colors ? '\x1b[91m' : '', // Bright Foreground Red
green: colors ? '\x1b[32m' : '', // Foreground Green
yellow: colors ? '\x1b[33m' : '', // Foreground Green
orange: colors ? '\x1b[38;2;255;140;0m' : '' // darker orange, works with bright and dark background
}
const extDefaults = 'cds,csn,csv,ts,mjs,cjs,js,json,properties,edmx,xml,env'

@@ -68,0 +59,0 @@ const ignore = RegExp(

@@ -1,3 +0,3 @@

const yaml = require('@sap/cds-foss')('yaml');
const { exec } = require('../util/exec');
const { yaml } = require('../cds').utils
const { execSync } = require('child_process');
const { bold } = require('../util/term');

@@ -10,3 +10,3 @@

this.config = once( async() => {
const configString = await exec({result: 'stdout'}) `kubectl config view --raw`;
const configString = execSync('kubectl config view --raw').toString();
const config = yaml.parse(configString);

@@ -60,8 +60,9 @@

const {context, namespace, kind, name} = options;
const result = await exec({stdio: ['ignore', 'pipe', 'ignore'], acceptStatuses: [0, 1]}) `kubectl --output json --context ${context} --namespace ${namespace} get ${kind} ${name}`;
if (result.status === 0)
return JSON.parse(result.stdout);
else
return null;
try {
return JSON.parse(execSync(`kubectl --output json --context ${context} --namespace ${namespace} get ${kind} ${name}`).toString());
}
catch (error) {
if (error.status === 1) return null
else throw error
}
}

@@ -68,0 +69,0 @@ }

@@ -74,3 +74,3 @@ const fs = require('fs')

/**
* BuildPlugin#prepare has been deprecated and was never part of the public API.
* Plugin#prepare has been deprecated and was never part of the public API.
* Currently only used by internal FioriBuildPlugin.

@@ -212,7 +212,11 @@ * @deprecated

if (!(plugin instanceof InternalBuildPlugin) && plugin.priority !== 1) {
// Custom build plugins are executed before internal plugins to ensure
// that generated content cannot be overwritten by mistake
throw new Error(`Illegal priority for ${plugin.constructor.name} encountered for custom plugin - in this version only priority value '1' is allowed`)
if (!(plugin instanceof InternalBuildPlugin) && plugin.priority >= 0 && plugin.priority <= 512) {
throw new Error(
`Invalid priority value for ${plugin.constructor.name}.
The valid priority value range for custom plugins is -1024..-1 and 512..1024.
Range 0..512 is blocked for internal plugins.
The higher the value the earlier the plugin is run.`
)
}
this._logTaskHandler(plugin)

@@ -267,3 +271,3 @@ return plugin

DEBUG?.(`plugin ${plugin.constructor.name}`)
DEBUG?.(`details src [${relativePaths(cds.root, plugin.task.src)}], dest [${relativePaths(cds.root, plugin.task.dest)}], use [${plugin.task.use}], options [${JSON.stringify(plugin.task.options)}]`)
DEBUG?.(`details src [${relativePaths(cds.root, plugin.task.src)}], dest [${relativePaths(cds.root, plugin.task.dest)}], options [${JSON.stringify(plugin.task.options)}]`)
}

@@ -318,3 +322,3 @@

* Returns compiler messages and validation messages issued by plugins.
* @param {Array<BuildPlugin>} plugins
* @param {Array} plugins
*/

@@ -321,0 +325,0 @@ static _getMessages(plugins) {

@@ -5,4 +5,4 @@ const fs = require('fs')

const BuildTaskProviderFactory = require('./buildTaskProviderFactory')
const { hasJavaNature, getProperty, flatten, getDefaultModelOptions, hasOptionValue } = require('./util')
const { FILE_EXT_CDS, BUILD_TASK_JAVA_CF, BUILD_TASK_JAVA, CONTENT_WS } = require("./constants")
const { hasJavaNature, getProperty, getDefaultModelOptions, hasOptionValue, BuildError } = require('./util')
const { FILE_EXT_CDS, OPTION_WS } = require("./constants")
const term = require('../util/term')

@@ -52,6 +52,2 @@ const DEBUG = cds.debug('cli|build')

// clearing model cache (details https://github.tools.sap/cap/cds/pull/181) is no longer required
// because of changes https://github.tools.sap/cap/cds/pull/1121
// cds.resolve.cache = {}
let tasks = BuildTaskFactory._getExistingTasks()

@@ -63,3 +59,3 @@ if (tasks.length === 0) {

this._applyCliOptions(tasks)
// 1. apply default values including task.for and task.use and ensure that for all tasks a provider exists - throwing error otherwise
// 1. apply default values including task.for and ensure that for all tasks a provider exists - throwing error otherwise
await this.providerFactory.applyTaskDefaults(tasks)

@@ -90,3 +86,3 @@ // ensure that dependencies get wired up before filtering

this._setDefaultBuildTargetFolder(tasks)
BuildTaskFactory._setDefaultBuildTargetFolder(tasks)
return tasks

@@ -113,3 +109,3 @@ }

// calculate only once
if (cds.cli.options?.ws || tasks.some(task => hasOptionValue(task.options?.[CONTENT_WS], true))) {
if (cds.cli.options?.[OPTION_WS] || tasks.some(task => hasOptionValue(task.options?.[OPTION_WS], true))) {
wsModelPaths = await getDefaultModelOptions(true)

@@ -119,5 +115,5 @@ }

tasks.forEach(task => {
this._setTaskModelOptions(task, cds.cli.options?.ws || hasOptionValue(task.options?.[CONTENT_WS], true) ? wsModelPaths : modelPaths)
this._setTaskModelOptions(task, cds.cli.options?.[OPTION_WS] || hasOptionValue(task.options?.[OPTION_WS], true) ? wsModelPaths : modelPaths)
if (!task.src) {
throw new Error(`Invalid build task definition - value of property 'src' is missing in [${task.for || task.use}].`)
throw new BuildError(`Mandatory property 'src' not defined for build task '${task.for}'.`)
}

@@ -127,8 +123,5 @@ })

_setDefaultBuildTargetFolder(tasks) {
const task = tasks.find(task => task.for === BUILD_TASK_JAVA_CF || task.for === BUILD_TASK_JAVA)
const srv = task ? task.src : BuildTaskFactory._getModuleFolder(flatten([cds.env.folders.srv])) || "srv"
static _setDefaultBuildTargetFolder() {
// Java projects use "." as the default build target folder
if (hasJavaNature([path.join(cds.root, srv), cds.root]) && BuildTaskFactory._adaptBuildTargetSettingForJava()) {
if (hasJavaNature() && this._adaptBuildTargetSettingForJava()) {
DEBUG?.("using inplace build for java project instead of default staging build")

@@ -160,9 +153,8 @@ }

let resultTasks = tasks.filter(task => {
return (!options.use || options.use === task.use)
&& (!options.for || options.for === task.for)
return (!options.for || options.for === task.for)
&& (!options.src || options.src === task.src)
})
if (resultTasks.length === 0) {
if (options.for || options.use) {
const task = this.providerFactory.getTask(options.for ? { for: options.for } : { use: options.use })
if (options.for) {
const task = this.providerFactory.getTask({ for: options.for })
if (options.src) {

@@ -191,5 +183,5 @@ task.src = options.src

}
if (options.opts) {
const opts = BuildTaskFactory._scanTaskOptionParams(options.opts)
task.options = task.options ? Object.assign(task.options, opts) : opts
if (options.taskOptions) {
const taskOptions = JSON.parse(JSON.stringify(options.taskOptions))
task.options = task.options ? Object.assign(task.options, taskOptions) : taskOptions
}

@@ -245,22 +237,2 @@ })

static _scanTaskOptionParams(optsParams) {
// need to create new regex every call since a constant would keep the match state
const quoteRegex = /([\w-]+)=([\w/.-]+|\[([\w/,.-]+)\])/g
// captures a=1 => a:1
// a=[x,y,z] => a:[x,y,z]
// a=1,b=[x,y,z] => a:1 b=[x,y,z]
let match = quoteRegex.exec(optsParams)
const taskOptions = {}
while (match != null) {
const key = match[1]
const value = match[3] || match[2]
const valueArray = value.split(",")
taskOptions[key] = valueArray.length > 1 ? valueArray.map((entry) => entry.trim()) : value
match = quoteRegex.exec(optsParams)
}
return taskOptions
}
/**

@@ -267,0 +239,0 @@ * Determines the module folder from the past list that may represent files or folders w or w/o .cds file extension.

@@ -21,3 +21,2 @@ const fs = require('fs')

})
this._buildOptions = options
this._context = { options, tasks: [] } // resolved tasks

@@ -44,10 +43,6 @@ }

return Promise.all(tasks.map(async (task) => {
if (!task.for && !task.use) {
throw new Error(`Invalid build task definition - property 'for' and property 'use' missing`)
if (!task.for) {
throw new BuildError(`Mandatory property 'for' not defined for build task.`)
}
const provider = this._getProvider(task)
if (provider instanceof DefaultBuildTaskProvider) {
DEBUG?.(`No provider found for build task '${task.use}', using default provider`)
}
return this._applyTaskDefaults(provider, [task])
return this._applyTaskDefaults(this._getProvider(task), [task])
}))

@@ -72,3 +67,3 @@ }

/**
* Create a BuildPlugin instance for the given build task.
* Create a Plugin instance for the given build task.
* The implementation is loaded based on the build task's 'for' or 'use' option.

@@ -78,8 +73,8 @@ * @param {*} task

createPlugin(task) {
const BuildPlugin = this.getPlugin(task)
const Plugin = this.getPlugin(task)
const resolvedTask = this._resolveTask(task)
DEBUG?.(`loaded build plugin [${resolvedTask.use}]`)
DEBUG?.(`loaded build plugin [${resolvedTask.for}]`)
const plugin = new BuildPlugin()
if (!(plugin instanceof BuildPlugin)) {
const plugin = new Plugin()
if (!(plugin instanceof Plugin)) {
throw new Error(`Invalid Build plugin type ${task.for}`)

@@ -90,3 +85,3 @@ }

this.context.tasks.push(resolvedTask)
DEBUG?.(`created build plugin [${resolvedTask.use}]`)
DEBUG?.(`created build plugin [${resolvedTask.for}]`)
return plugin

@@ -106,3 +101,3 @@ }

} catch (e) {
console.error(`Provider failed to load build plugin class - provider: ${provider.constructor.name}, task: ${task.for || task.use} :\n` + e)
console.error(`Provider failed to load build plugin class ${task.for} - provider: ${provider.constructor.name}`)
throw e

@@ -133,3 +128,3 @@ }

} catch (e) {
throw new Error(`Build task [${resolvedTask.for || resolvedTask.use}] could not be resolved - folder src [${path.resolve(cds.root, task.src)}] does not exist`)
throw new BuildError(`The 'src' folder '${path.resolve(cds.root, task.src)}' for build task '${resolvedTask.for}' does not exist`)
}

@@ -146,3 +141,3 @@ resolvedTask.dest = path.resolve(cds.root, cds.env.build.target, task.dest || task.src)

} catch (e) {
console.error(`Build task provider ${provider.constructor.name} returned an error:\n` + e)
console.error(`Build task provider ${provider.constructor.name} returned an error`)
throw e

@@ -152,3 +147,3 @@ }

if (!provider) {
throw new BuildError(`No provider found for build task '${key.for || key.use}'. Ensure that all required dependencies have been added and 'npm install' has been executed.`)
throw new BuildError(`No provider found for build task '${key.for}'. Ensure that all required dependencies have been added and 'npm install' has been executed.`)
}

@@ -167,7 +162,5 @@ return provider

_loadProviders() {
// DefaultBuildTaskProvider must be last entry
return [
new InternalBuildTaskProvider(),
new PluginBuildTaskProvider(),
new DefaultBuildTaskProvider()
new PluginBuildTaskProvider()
]

@@ -179,25 +172,3 @@ }

* Default provider implementation handling fully qualified custom build task declarations.
* Has to be the last entry in the providers list.
*/
class DefaultBuildTaskProvider extends BuildTaskProvider {
constructor() {
super()
}
providesTask(key) {
return !!key.use
}
getPlugin(task) {
try {
return require(require.resolve(task.use, { paths: [cds.root] }))
} catch (e) {
throw new Error(`Build task could not be resolved - module [${task.use}] cannot be loaded:\n` + e)
}
}
}
/**
* Default provider implementation handling fully qualified custom build task declarations.
*/
class PluginBuildTaskProvider extends BuildTaskProvider {

@@ -240,3 +211,4 @@ constructor() {

getTask(key) {
const task = this.plugins.get(key.for).taskDefaults ?? {}
// plugin entry is already validated, a taskDefaults default member exists
const task = JSON.parse(JSON.stringify(this.plugins.get(key.for).taskDefaults ?? {}))
task.for = key.for

@@ -243,0 +215,0 @@ task.src ??= DEFAULT_SRC_FOLDER

@@ -7,4 +7,2 @@ exports.OUTPUT_MODE = "outputMode"

exports.DEFAULT_SRC_FOLDER = '.'
exports.BUILD_TASK_NPM_SCOPE = "@sap"
exports.BUILD_TASK_PREFIX = exports.BUILD_TASK_NPM_SCOPE + "/cds/build"
exports.BUILD_TASK_JAVA = "java"

@@ -20,2 +18,4 @@ exports.BUILD_TASK_JAVA_CF = "java-cf"

exports.BUILD_TASKS = [this.BUILD_TASK_JAVA, this.BUILD_TASK_JAVA_CF, this.BUILD_TASK_NODEJS, this.BUILD_TASK_NODE_CF, this.BUILD_TASK_HANA, this.BUILD_TASK_FIORI, this.BUILD_TASK_MTX, this.BUILD_TASK_MTX_SIDECAR, this.BUILD_TASK_MTX_EXTENSION]
exports.DEFAULT_BUILTIN_PRIORITY_MIN = 0
exports.DEFAULT_BUILTIN_PRIORITY_MAX = 524

@@ -38,4 +38,4 @@ exports.ODATA_VERSION_V2 = "v2"

exports.CONTENT_DEFAULT_ENV_JSON = "contentDefaultEnvJson" // copy default-env.json file into deployment folder - DISABLED by default
exports.CONTENT_WS = "ws" // determine model paths including submodules + CONTENT_SUBMODULES_HANA - DISABLED by default
exports.OPTION_WS = "ws" // run a workspace build, e.g. including model folders of configured npm workspaces
exports.CSV_FILE_TARGET = "csvFileTarget" // target folder when copying CSV files to the deployment target folder ./db/src/gen/*, default is 'data'

@@ -42,0 +42,0 @@ exports.CSV_FILE_DETECTION = "csvFileDetection" // detect CSV files based on CDS model location - ENABLED by default

const fs = require('fs')
const cds = require('../cds')
const BuildTaskEngine = require('./buildTaskEngine')
const BuildPlugin = require('./buildPlugin')
const Plugin = require('./plugin')
const { BuildError } = require('./util')
module.exports = { build, register, Plugin: BuildPlugin, BuildError, BuildPlugin }
module.exports = { build, register, Plugin, BuildError }

@@ -9,0 +9,0 @@ /**

const cds = require('../cds')
const Plugin = require('./plugin')
const DEBUG = cds.debug('cli|build')

@@ -7,2 +8,3 @@

this._plugins = new Map()
this._registerDefaults()
}

@@ -14,24 +16,19 @@ get plugins() {

if (!id || !plugin) {
throw Error("Invalid parameter")
throw new Error(`Invalid parameter passed, build plugin id: ${id}`)
}
if (plugin.impl) {
// REVISIT: remove compat mode - latest cds-dk@8
const taskDefaults = plugin.taskDefaults
plugin = this._loadPlugin(id, plugin.impl)
plugin.taskDefaults = taskDefaults
}
if (this._plugins.has(id)) {
throw Error(`Build plugin ${id} already registered`)
throw new Error(`CDS build plugin ${id} already registered`)
}
if(!(plugin.prototype instanceof Plugin)) {
// with cds-dk version 7 the following registration format was supported:
// cds.build.register("cov2ap", { impl: "@cap-js-community/odata-v2-adapter/src/build.js", taskDefaults: { src: "srv" } })
throw `The cds build plugin '${id}' is not compatible with this @sap/cds-dk version. Update the npm package '${plugin.impl || id}' to the latest version.`
}
DEBUG?.(`Registering build plugin ${id}`)
this._plugins.set(id, plugin)
}
_loadPlugin(id, impl) {
try {
// project-local module is preferred - otherwise from our own dependencies
return require(require.resolve(impl, { paths: [cds.root, __dirname] }))
} catch (e) {
throw new Error(`Custom build plugin implementation '${impl}' for '${id}' cannot be loaded:\n` + e)
}
_registerDefaults() {
this.register('helm', require('./plugins/helm'))
}
})

@@ -5,3 +5,3 @@ const path = require('path')

const InternalBuildPlugin = require('./internalBuildPlugin')
const { getI18nDefaultFolder } = require('../util')
const { getI18nDefaultFolder, filterFeaturePaths } = require('../util')

@@ -20,8 +20,4 @@ class FeatureToggleBuildPlugin extends InternalBuildPlugin {

get ftsName() {
return path.dirname(cds.env.features.folders || 'fts/*')
}
async compileAll(csn, destBase, destFts) {
const sources = await this._resolveSourcePaths(csn)
const sources = filterFeaturePaths(csn, this.task.options.model || this.task.src)
const dictionary = { base: null, features: null }

@@ -52,2 +48,3 @@

const i18nFolder = getI18nDefaultFolder()
const ftsName = path.dirname(cds.env.features.folders || 'fts/*')
// create language bundle for base model

@@ -64,3 +61,3 @@ const i18n = await this.collectLanguageBundles(dictionary.base, path.join(destBase, i18nFolder))

dictionary.features[ftName]['$sources'] = paths.features[ftName]
await this.collectLanguageBundles(dictionary.features[ftName], path.join(destFts, this.ftsName, ftName, i18nFolder))
await this.collectLanguageBundles(dictionary.features[ftName], path.join(destFts, ftsName, ftName, i18nFolder))
}

@@ -70,32 +67,2 @@ }

async _resolveSourcePaths(csn) {
const regex = new RegExp(this.ftsName + '[/|\\\\](?<ftName>[^/|\\\\]*)')
let paths = { base: [] }
// add ROOT source file paths for the base model
paths.base = this._resolveModel().reduce((acc, file) => {
const match = file.match(regex)
if (!match) {
acc.push(file)
}
return acc
}, [])
// add source file paths for the features
paths.features = csn['$sources'].reduce((acc, file) => {
const match = file.match(regex)
if (match) {
const { ftName } = match.groups
//feature
if (!acc[ftName]) {
acc[ftName] = []
}
acc[ftName].push(file)
}
return acc
}, {})
return paths
}
async _compileFeatures(ftsPaths, destBase, destFts, baseCsn) {

@@ -107,2 +74,3 @@ if (!ftsPaths) {

const options = { ...this.options(), flavor: 'parsed' }
const ftsName = path.dirname(cds.env.features.folders || 'fts/*')

@@ -112,3 +80,3 @@ // create feature models

const ftCsn = await cds.load(ftsPaths[ftName], options)
const ftPath = path.join(destFts, this.ftsName, ftName)
const ftPath = path.join(destFts, ftsName, ftName)

@@ -129,3 +97,3 @@ // replace require paths by base model path to ensure precedence of feature annotations

// assuming only core compiler options are relevant for validation scenario
cds.compiler.compileSources({ [ftCsn.requires[0]]: baseCsn, 'feature.csn': ftCsn }, { ...super.options(), ...cds.env.cdsc ?? {}})
cds.compiler.compileSources({ [ftCsn.requires[0]]: baseCsn, 'feature.csn': ftCsn }, { ...super.options(), ...cds.env.cdsc ?? {} })
}

@@ -132,0 +100,0 @@ }

const path = require('path')
const parser = require('./migrationtable')
const { BuildError, hasOptionValue, BuildMessage } = require('../../util')
const { BuildError, BuildMessage } = require('../../util')
const { SEVERITY_WARNING } = require('../../constants')

@@ -8,44 +8,70 @@ const cds = require('../../../cds'), { compiler: cdsc } = cds

const { getArtifactCdsPersistenceName } = cdsc
const ANNO_PERSISTENCE_JOURNAL = '@cds.persistence.journal'
const DEBUG = cds.debug('cli|build')
module.exports = async (model, lastDevVersion, srcPath, options = {}) => {
const { definitions, deletions, migrations, afterImage, journalFileNames } = _toHdiMigration(model, lastDevVersion, options)
const definitionResult = []
module.exports = async (model, beforeImage, srcPath, options = {}) => {
const { definitions, migrations, afterImage, journalFileNames } = _compileToHana(model, options, beforeImage)
const definitionResult = []
for (const { name, suffix, sql } of definitions) {
let definitionEntry = { name, suffix, content: sql }
for (const { name, suffix, sql } of definitions) {
let definitionEntry = { name, suffix, content: sql }
if (suffix === '.hdbtable') {
if (journalFileNames.has(name)) {
const migration = migrations.find(migration => migration.name === name)
definitionEntry = await _2migrationtable(srcPath, migration || _emptyMigration(name), sql)
}
if (suffix === '.hdbtable') {
if (journalFileNames.has(name)) {
const migration = migrations.find(migration => migration.name === name)
definitionEntry = await _2migrationtable(srcPath, migration || _emptyMigration(name), sql)
}
}
if (definitionEntry) {
definitionResult.push(definitionEntry)
}
}
if (definitionEntry) {
definitionResult.push(definitionEntry)
}
}
return { definitions: definitionResult, deletions, afterImage: _filterJournalArtifacts(afterImage) }
return { definitions: definitionResult, afterImage }
}
function _toHdiMigration(model, lastDevVersion, options) {
const result = cdsc.to.hdi.migration(cds.minify(model), options, lastDevVersion);
const journalFileNames = _getJournalFileNames(result.afterImage, options);
if (DEBUG) {
DEBUG('cdsc.to.hdi.migration returned')
for (const { name, suffix, sql } of result.definitions) {
if (suffix === '.hdbtable' || suffix === '.hdbmigrationtable') {
if (journalFileNames.has(name)) {
const migration = result.migrations.find(migration => migration.name === name)
DEBUG(`
File ${name + '.hdbmigrationtable'} - ${migration ? migration.changeset.length : 0} new changes
${sql}
${migration ? migration.changeset.map(change => change.sql).join('\n') : 'Empty changeset'}
`)
function _compileToHana(model, options, beforeImage) {
let definitions = []
let migrations = []
let afterImage
if (cds.compile.to.hana) {
const result = cds.compile.to.hana(model, options, beforeImage);
for (const [content, { file }] of result) {
switch (file) {
case 'deletions.json':
break
case 'migrations.json':
migrations = content
break
case 'afterImage.json':
afterImage = content
break
default: {
const { name, ext: suffix } = path.parse(file)
definitions.push({ name, suffix, sql: content })
}
}
}
}
} else {
// compatibility with cds < 8.0.0
const result = cdsc.to.hdi.migration(cds.minify(model), options, beforeImage)
definitions = result.definitions
migrations = result.migrations
afterImage = result.afterImage
}
}
return { ...result, journalFileNames }
let journalFileNames = new Set()
if (afterImage) {
// 1. determine journal file names before _filterJournalArtifacts is called as getArtifactCdsPersistenceName
// might no longer return the proper file name for the reduced CSN, e.g. for draft entities
journalFileNames = _getJournalFileNames(afterImage, options)
// 2. leave only persisted journal-related entities in afterImage
afterImage = _filterJournalArtifacts(afterImage)
}
_debug(definitions, migrations, journalFileNames)
return {
definitions, migrations, afterImage, journalFileNames
}
}

@@ -62,46 +88,46 @@

async function _2migrationtable(srcPath, migration, tableSql) {
let migrationTableModel = null
const file = path.join(srcPath, migration.name + migration.suffix)
try {
migrationTableModel = await parser.read(file)
} catch (e) {
// abort build in order to ensure consistent afterImage model state / hdbmigrationtable file state
throw new BuildError(`${path.relative(process.cwd(), file)}: ${e.message}`)
}
let migrationTableModel = null
const file = path.join(srcPath, migration.name + migration.suffix)
try {
migrationTableModel = await parser.read(file)
} catch (e) {
// abort build in order to ensure consistent afterImage model state / hdbmigrationtable file state
throw new BuildError(`${path.relative(process.cwd(), file)}: ${e.message}`)
}
if (migrationTableModel) {
// adding new changeset if change exist, ignore otherwise
const migrationEntry = _getNewMigrationEntry(migration.changeset, migrationTableModel.versionNumber)
if (migrationEntry) {
const versionNumber = migrationTableModel.versionNumber + 1
const migrationCount = migrationTableModel.migrations.entries.length
const migrations = `${migrationEntry.content}${migrationCount > 0 ? '\n' : ''}${migrationTableModel.migrations.toString()}${migrationCount > 0 ? '\n' : ''}`
return {
name: migration.name,
suffix: migration.suffix,
content: `== version=${versionNumber}\n${tableSql}\n\n${migrations}`,
changed: true,
dropColumns: migrationEntry.dropColumns
}
} else {
// existing migration file version
return { name: migration.name, suffix: migration.suffix, content: migrationTableModel.toString(), changed: false }
if (migrationTableModel) {
// adding new changeset if change exist, ignore otherwise
const migrationEntry = _getNewMigrationEntry(migration.changeset, migrationTableModel.versionNumber)
if (migrationEntry) {
const versionNumber = migrationTableModel.versionNumber + 1
const migrationCount = migrationTableModel.migrations.entries.length
const migrations = `${migrationEntry.content}${migrationCount > 0 ? '\n' : ''}${migrationTableModel.migrations.toString()}${migrationCount > 0 ? '\n' : ''}`
return {
name: migration.name,
suffix: migration.suffix,
content: `== version=${versionNumber}\n${tableSql}\n\n${migrations}`,
changed: true,
dropColumns: migrationEntry.dropColumns
}
} else {
// existing migration file version
return { name: migration.name, suffix: migration.suffix, content: migrationTableModel.toString(), changed: false }
}
}
}
// initial migration file version
return { name: migration.name, suffix: migration.suffix, content: `== version=1\n${tableSql}\n`, changed: true }
// initial migration file version
return { name: migration.name, suffix: migration.suffix, content: `== version=1\n${tableSql}\n`, changed: true }
}
function _getNewMigrationEntry(changeset, currentVersion) {
if (changeset && changeset.length > 0) {
const dropColumns = changeset.some(e => e.drop)
const manualChange = changeset.some(e => !e.sql)
const enableDrop = cds.env.hana?.journal?.['enable-drop']
const content = changeset.reduce((acc, e) => {
if (!acc) {
acc = `== migration=${currentVersion + 1}\n`
acc += `${cdscVersion}\n`
if (changeset && changeset.length > 0) {
const dropColumns = changeset.some(e => e.drop)
const manualChange = changeset.some(e => !e.sql)
const enableDrop = cds.env.hana?.journal?.['enable-drop']
const content = changeset.reduce((acc, e) => {
if (!acc) {
acc = `== migration=${currentVersion + 1}\n`
acc += `${cdscVersion}\n`
if (dropColumns && enableDrop !== true) {
acc += `>>>>> Manual resolution required - DROP statements causing data loss are disabled by default.
if (dropColumns && enableDrop !== true) {
acc += `>>>>> Manual resolution required - DROP statements causing data loss are disabled by default.
>>>>> You may either:

@@ -111,59 +137,59 @@ >>>>> uncomment statements to allow incompatible changes, or

>>>>> After manual resolution delete all lines starting with >>>>>\n`
} else if (manualChange) {
acc += `>>>>> Manual resolution required - insert ALTER statement(s) as described below.
} else if (manualChange) {
acc += `>>>>> Manual resolution required - insert ALTER statement(s) as described below.
>>>>> After manual resolution delete all lines starting with >>>>>\n`
}
}
if (e.sql) {
if (e.drop && enableDrop !== true) {
acc += `${e.sql.replace(/^/gm, '-- ')}\n`;
} else {
acc += `${e.sql}\n`
}
} else {
acc +=
`>>>>> Insert ALTER statement for: ${e.description}\n`
}
return acc
}, null)
return { dropColumns, content }
}
return null
}
}
if (e.sql) {
if (e.drop && enableDrop !== true) {
acc += `${e.sql.replace(/^/gm, '-- ')}\n`;
} else {
acc += `${e.sql}\n`
}
} else {
acc +=
`>>>>> Insert ALTER statement for: ${e.description}\n`
}
return acc
}, null)
return { dropColumns, content }
}
return null
}
function _emptyMigration(name) {
return { name, suffix: ".hdbmigrationtable", changeset: [] }
return { name, suffix: ".hdbmigrationtable", changeset: [] }
}
function _getJournalFileNames(model, options) {
const journalNames = new Set(cds.reflect(model).all(item => {
if (item.kind === 'entity' && item[ANNO_PERSISTENCE_JOURNAL] === true) {
if (item['@cds.persistence.skip'] === true || item['@cds.persistence.exists'] === true) {
options.messages?.push(new BuildMessage(`[hdbmigrationtable] annotation @cds.persistence.journal skipped for entity '${item.name}' as persistence exists`, SEVERITY_WARNING))
}
return true
}
return false
}).map(entity => getArtifactCdsPersistenceName(entity.name, 'quoted', model, 'hana')))
const journalNames = new Set(cds.reflect(model).all(item => {
if (item.kind === 'entity' && item['@cds.persistence.journal'] === true) {
if (item['@cds.persistence.skip'] === true || item['@cds.persistence.exists'] === true) {
options.messages?.push(new BuildMessage(`[hdbmigrationtable] annotation @cds.persistence.journal skipped for entity '${item.name}' as persistence exists`, SEVERITY_WARNING))
}
return true
}
return false
}).map(entity => getArtifactCdsPersistenceName(entity.name, 'quoted', model, 'hana')))
DEBUG?.(`\n[hdbmigrationtable] found ${journalNames.size} model entities annotated with '@cds.persistence.journal`)
DEBUG?.(`[hdbmigrationtable] ${[...journalNames].join(', ')}\n`)
DEBUG?.(`\n[hdbmigrationtable] found ${journalNames.size} model entities annotated with '@cds.persistence.journal`)
DEBUG?.(`[hdbmigrationtable] ${[...journalNames].join(', ')}\n`)
return journalNames
return journalNames
}
// delete all entities that are not relevant
function _filterJournalArtifacts(csn) {
if (!csn) {
return
}
const dict = csn.definitions
for (const name in dict) {
if (!_isPersistedAsJournalTable(dict[name])) {
delete dict[name]
// only keep journal related entities
function _filterJournalArtifacts(afterImage) {
if (!afterImage) {
return
}
}
// mark CSN as modified by cds build
csn.meta.build = `CDS Build v${cds.version}`
return csn;
const dict = afterImage.definitions
for (const name in dict) {
if (!_isPersistedAsJournalTable(dict[name])) {
delete dict[name]
}
}
// mark CSN as modified by cds
afterImage.meta.build = `CDS Build v${cds.version}`
return afterImage
}

@@ -173,7 +199,25 @@

function _isPersistedAsJournalTable(artifact) {
return artifact.kind === 'entity' && hasOptionValue(artifact['@cds.persistence.journal'], true) &&
!artifact.abstract &&
!hasOptionValue(artifact['@cds.persistence.skip'], true) &&
!hasOptionValue(artifact['@cds.persistence.exists'], true) &&
(!artifact.query && !artifact.projection || hasOptionValue(artifact['@cds.persistence.table'], true))
return (
artifact.kind === 'entity' &&
artifact['@cds.persistence.journal'] &&
!artifact.abstract &&
!artifact['@cds.persistence.skip'] &&
!artifact['@cds.persistence.exists'] &&
((!artifact.query && !artifact.projection) || artifact['@cds.persistence.table'])
)
}
function _debug(definitions, migrations, journalFileNames) {
if (DEBUG) {
DEBUG('compile.to.hana returned')
for (const { name, suffix, sql } of definitions) {
if (suffix === '.hdbtable' && journalFileNames.has(name)) {
const migration = migrations.find(migration => migration.name === name)
DEBUG(`File ${name + '.hdbmigrationtable'} - ${migration ? migration.changeset.length : 0} new changes
${sql}
${migration ? migration.changeset.map(change => change.sql).join('\n') : 'Empty changeset'}
`)
}
}
}
}

@@ -6,6 +6,6 @@ const fs = require('fs')

const InternalBuildPlugin = require('../internalBuildPlugin')
const { BuildError, relativePaths, BuildMessage, getWorkspaces, hasOptionValue } = require('../../util')
const { BuildError, relativePaths, BuildMessage, getWorkspaces, hasOptionValue, getFiles } = require('../../util')
const { OUTPUT_MODE_NONE, OUTPUT_MODE, CONTENT_PACKAGE_JSON, CONTENT_HDBTABLEDATA, CSV_FILE_DETECTION,
CONTENT_ENV, CONTENT_DEFAULT_ENV_JSON, CONTENT_NODE_MODULES, OUTPUT_MODE_RESULT, CONTINUE_UNRESOLVED_SCHEMA_CHANGES,
CSV_FILE_TARGET, CONTENT_WS } = require('../../constants')
CSV_FILE_TARGET, OPTION_WS} = require('../../constants')
const { WARNING } = InternalBuildPlugin

@@ -54,3 +54,3 @@ const DEFAULT_COMPILE_DEST_FOLDER = path.normalize("src/gen")

// copy native hana artifacts from workspace dependencies
if (cds.cli.options?.ws || hasOptionValue(this.task.options?.[CONTENT_WS], true)) {
if (cds.cli.options?.[OPTION_WS] || hasOptionValue(this.task.options?.[OPTION_WS], true)) {
await this._addWorkspaceContent()

@@ -168,5 +168,6 @@ }

// handle *.csv and *.hdbtabledata located in '<dbSrc>/data' and '<dbSrc>/csv' folder
// handle *.csv and *.hdbtabledata located in '<dbSrc>/data' and '<dbSrc>/csv' folder,
// subfolders are not supported
const allFiles = csvDirs.reduce((acc, csvDir) => {
return acc.concat(InternalBuildPlugin._find(csvDir, (entry) => {
return acc.concat(getFiles(csvDir, (entry) => {
if (fs.statSync(entry).isDirectory()) {

@@ -199,3 +200,3 @@ return false

const allCsvFiles = InternalBuildPlugin._find(destSrcDir, (entry) => {
const allCsvFiles = getFiles(destSrcDir, (entry) => {
if (fs.statSync(entry).isDirectory()) {

@@ -267,4 +268,6 @@ return true

let no_migration = cds.env.features.journal === false || format === 'hdbcds' || Object.values(model.definitions).every(def => !def['@cds.persistence.journal'])
if (no_migration) {
if (cds.env.features.journal === false
|| format === 'hdbcds'
|| (!cds.compile.to.hana && Object.values(model.definitions).every(def => !def['@cds.persistence.journal']))) {
// compatibility with existing cds.compile.to.hdbtable plugins cds < 8.0.0
return await this._compileToHdb(model, format)

@@ -326,5 +329,3 @@ } else {

const compilationResult = await to_hdbmigration(model, lastDev, dbSrcDir, options)
const definitions = compilationResult.definitions
const afterImage = compilationResult.afterImage
const { definitions, afterImage } = await to_hdbmigration(model, lastDev, dbSrcDir, options)
let validationError

@@ -363,3 +364,2 @@

// ensure
try {

@@ -373,17 +373,12 @@ if (validationError) {

if (afterImage) {
if (migrationTableFiles.length > 0) {
if (!HanaBuildPlugin._toEqualIgnoreMeta(lastDev, afterImage)) {
await this.write(afterImage).to(lastDevCsnDir)
}
// add src/.hdiconfig if not existing
if (!fs.existsSync(path.join(dbSrcDir, FILE_NAME_HDICONFIG))) {
const template = await HanaBuildPlugin._readTemplateAsJson('.hdiconfig-cloud')
await this.write(template).to(path.join(dbSrcDir, FILE_NAME_HDICONFIG))
}
if (!HanaBuildPlugin._toEqualIgnoreMeta(lastDev, afterImage)) {
await this.write(afterImage).to(lastDevCsnDir)
}
// add src/.hdiconfig if not existing
if (migrationTableFiles.length > 0 && !fs.existsSync(path.join(dbSrcDir, FILE_NAME_HDICONFIG))) {
const template = await HanaBuildPlugin._readTemplateAsJson('.hdiconfig-cloud')
await this.write(template).to(path.join(dbSrcDir, FILE_NAME_HDICONFIG))
}
}
}
if (!afterImage) {
throw new BuildError(`Inconsistent CDS compilation results - file ${lastDevCsnFolder} missing`)
}
return hdiPlugins

@@ -518,3 +513,3 @@ }

const dbSrcDir = path.join(this.task.src, "src")
const migrationTableFiles = InternalBuildPlugin._find(dbSrcDir, (res) => {
const migrationTableFiles = getFiles(dbSrcDir, (res) => {
return fs.statSync(res).isFile() && path.extname(res) === FILE_EXT_HDBMIGRATIONTABLE

@@ -521,0 +516,0 @@ })

{
"name": "deploy",
"dependencies": {
"@sap/hdi-deploy": "^4"
"hdb": "^0",
"@sap/hdi-deploy": "^5"
},
"engines": {
"node": "^16 || ^18 || ^20"
"node": "^18 || ^20"
},
"scripts": {
"start": "node node_modules/@sap/hdi-deploy/deploy.js"
"start": "node node_modules/@sap/hdi-deploy/deploy.js --use-hdb"
}
}
const fs = require('fs')
const path = require('path')
const cds = require('../../cds')
const BuildPlugin = require('../buildPlugin')
const { hasOptionValue } = require('../util')
const { FOLDER_GEN } = require('../constants')
const Plugin = require('../plugin')
const { hasOptionValue, getFiles } = require('../util')
const { FOLDER_GEN, DEFAULT_BUILTIN_PRIORITY_MAX } = require('../constants')
class InternalBuildPlugin extends BuildPlugin {
class InternalBuildPlugin extends Plugin {
/**
* Custom build plugins are executed before internal build plugins in order
* ensure and content cannot be overwritten by mistake.
* By default custom build plugins are executed before internal build plugins
* to ensure built-in plugin content isn't overwritten by mistake.
*/
get priority() {
return InternalBuildPlugin.PRIORITY_MAX_VALUE
return DEFAULT_BUILTIN_PRIORITY_MAX / 2
}
init() {
init() {
// REVISIT: no default gen folder for now

@@ -22,20 +22,11 @@ }

async clean() {
function _isSubDirectory(parent, child) {
return !path.relative(parent, child).startsWith('..')
}
// build results are deleted by the BuildTaskEngine if the build.target !== '.'
// make sure that src is not a subfolder of dest
if (cds.env.build.target === '.' && this.task.src !== this.task.dest && !this._isSubDirectory(this.task.dest, this.task.src)) {
if (cds.env.build.target === '.' && this.task.src !== this.task.dest && !_isSubDirectory(this.task.dest, this.task.src)) {
await fs.promises.rm(this.task.dest, { force: true, recursive: true })
}
}
/**
* Maximum allowed priority for internal build tasks.
*/
static get PRIORITY_MAX_VALUE() {
return 0;
}
/**
* Minimum allowed priority for custom build tasks.
*/
static get PRIORITY_MIN_VALUE() {
return Number.MIN_SAFE_INTEGER;
}

@@ -57,12 +48,2 @@ /**

/**
* Returns whether cds env has a property with the specified value.
* If the value is omitted the existence of the given property name is checked.
* @param {string} qualifiedName
* @param {any=} value
*/
hasCdsEnvOption(qualifiedName, value) {
return hasOptionValue(cds.env.get(qualifiedName), value)
}
/**
* Determines whether the given build option value has been set for this build task.

@@ -86,10 +67,2 @@ * If the value is omitted, the existence of the given property name is checked.

/**
* Returns a list of CDS model files defining the transitive closure of the CDS model based on the model options
* defined for this build task.
*/
resolveModel() {
return this._resolveModel()
}
/**
* Returns whether the build results of this build plugin are created inplace

@@ -103,3 +76,20 @@ * or in a separate staging folder which is not part of the build tasks src folder.

async copyNativeContent(srcDir, destDir, customFilter) {
const files = InternalBuildPlugin._find(srcDir, (src) => {
function commonStagingBuildFilter(src, destDir) {
if (typeof src !== "string" || typeof destDir !== "string") {
return false
}
if (!fs.statSync(src).isDirectory()) {
return true //file
}
if (src === destDir) {
return false
}
const regex = new RegExp(FOLDER_GEN + "\\b")
if (src === path.resolve(cds.root, cds.env.build.target)) {
return false
}
return !regex.exec(path.basename(src))
}
const files = getFiles(srcDir, (src) => {
// do not copy files that:

@@ -109,4 +99,4 @@ // - from 'cds.env.build.target'

// - from some generation folder
// - do not match filter function
return this._commonStagingBuildFilter(src, destDir) && (!customFilter || customFilter.call(this, src))
// - do not match filter function ,
return commonStagingBuildFilter(src, destDir) && (!customFilter || customFilter.call(this, src))
})

@@ -173,58 +163,3 @@ return Promise.all(

}
_isSubDirectory(parent, child) {
return !path.relative(parent, child).startsWith('..')
}
_commonStagingBuildFilter(src, destDir) {
if (typeof src !== "string" || typeof destDir !== "string") {
return false
}
if (!fs.statSync(src).isDirectory()) {
return true //file
}
if (src === destDir) {
return false
}
const regex = new RegExp(FOLDER_GEN + "\\b")
if (src === path.resolve(cds.root, cds.env.build.target)) {
return false
}
return !regex.exec(path.basename(src))
}
static _find(srcDir, filter) {
const files = []
this._traverseFileSystem(srcDir, files, filter)
return files;
}
static _traverseFileSystem(srcDir, files, filter) {
let entries = []
try {
entries = fs.readdirSync(srcDir)
} catch (e) {
// ignore if not existing
}
entries.map(subDirEntry => path.join(srcDir, subDirEntry)).forEach((entry) => {
this._handleResource(entry, files, filter)
})
}
static _handleResource(entry, files, filter) {
if (!filter || filter.call(this, entry)) {
var stats = this._getResourceStatus(entry)
if (stats.isDirectory()) {
this._traverseFileSystem(entry, files, filter)
} else if (stats.isFile() || stats.isSymbolicLink()) {
files.push(entry)
}
}
}
// for testing purposes
static _getResourceStatus(entry) {
return fs.lstatSync(entry)
}
}
module.exports = InternalBuildPlugin

@@ -7,3 +7,3 @@ const fs = require('fs')

const { FILE_EXT_CDS, BUILD_TASK_HANA, BUILD_TASK_FIORI, BUILD_TASK_JAVA, BUILD_TASK_JAVA_CF, BUILD_TASK_NODEJS, BUILD_TASK_NODE_CF, BUILD_TASK_MTX,
BUILD_TASK_PREFIX, BUILD_TASKS, BUILD_TASK_MTX_SIDECAR, MTX_SIDECAR_FOLDER, BUILD_TASK_MTX_EXTENSION, NODEJS_MODEL_EXCLUDE_LIST,
BUILD_TASKS, BUILD_TASK_MTX_SIDECAR, MTX_SIDECAR_FOLDER, BUILD_TASK_MTX_EXTENSION, NODEJS_MODEL_EXCLUDE_LIST,
IGNORE_DEFAULT_MODELS, CONTENT_WS } = require("../constants")

@@ -14,15 +14,4 @@ const ResourcesTarBuilder = require('./mtx/resourcesTarBuilder')

class InternalBuildTaskProvider extends BuildTaskProvider {
constructor() {
super()
// ensure that classic mtx is not used in combination with cds-dk@7
if (InternalBuildTaskProvider._isClassicMtx()) {
throw new BuildError(`@sap/cds-mtx is deprecated and not supported by @sap/cds-dk >= 7.
Upgrade to @sap/cds-mtxs following the migration guide https://cap.cloud.sap/docs/guides/multitenancy/old-mtx-migration or install @sap/cds-dk version 6 instead.`)
}
}
providesTask(key) {
return BUILD_TASKS.includes(key.for) || key.use && key.use.startsWith(BUILD_TASK_PREFIX)
// REVISIT: do not support different build task implementations of the same build task ID defined by 'for'
// return (BUILD_TASKS.includes(key.for) && !key.use) || (key.use && key.use.startsWith(BUILD_TASK_PREFIX))
return BUILD_TASKS.includes(key.for)
}

@@ -35,6 +24,2 @@

async applyTaskDefaults(task) {
const taskFor = InternalBuildTaskProvider._getForValueFromTask(task)
task.for = task.for || taskFor
task.use = task.use || `${BUILD_TASK_PREFIX}/${taskFor}`
// src folder needs to be initialized first

@@ -47,3 +32,3 @@ InternalBuildTaskProvider._setDefaultSrcFolder(task)

getPlugin(task) {
return require(`./${InternalBuildTaskProvider._getForValueFromTask(task)}`)
return require(`./${task.for}`)
}

@@ -209,3 +194,3 @@

// mtxs nodejs app without sidecar
if (!InternalBuildTaskProvider._hasJavaNature(src)) {
if (!hasJavaNature()) {
DEBUG?.("MTX Nodejs app without sidecar")

@@ -226,3 +211,3 @@ return {

let task = null
if (InternalBuildTaskProvider._hasJavaNature(src)) {
if (hasJavaNature()) {
task = this._createJavaTask(src, taskOptions)

@@ -252,2 +237,5 @@ } else {

DEBUG?.("found implementation technology Nodejs")
if (!fs.existsSync(path.join(cds.root, 'package.json'))) {
throw new BuildError(`No 'package.json' found in project root folder '${cds.root}'`)
}
return {

@@ -264,13 +252,2 @@ src: src,

static _isClassicMtx() {
function _hasMtxDependency() {
try {
return !!require(path.join(cds.root, 'package.json'))?.dependencies?.['@sap/cds-mtx']
} catch (e) {
return false
}
}
return /true/i.test(process.env.OLD_MTX) || _hasMtxDependency()
}
/**

@@ -314,3 +291,3 @@ * Distinguishes whether the Nodejs project is a Streamlined MTX (cds >=6) or an old MTX project.

default:
throw new Error(`Unknown build task '${task.use || task.for}'`)
throw new Error(`Unknown build task '${task.for}'`)
}

@@ -322,3 +299,3 @@ }

// REVISIT: filter nodejs bootstrap service models for Java - issues/12770#issuecomment-1805719
const filter = this._hasJavaNature() ? (m) => !NODEJS_MODEL_EXCLUDE_LIST.includes(m) : null
const filter = hasJavaNature() ? (m) => !NODEJS_MODEL_EXCLUDE_LIST.includes(m) : null
let taskModelPaths = task.options?.model

@@ -350,15 +327,2 @@ if (taskModelPaths && !Array.isArray(taskModelPaths)) {

/**
* Returns whether this project is a java project or not.
* @param {string} srv - the folder name of the service module
*/
static _hasJavaNature(srv = cds.env.folders.srv) {
srv = this._getModuleFolder(Array.isArray(srv) ? srv : [srv])
return hasJavaNature([cds.root, srv && path.join(cds.root, srv)])
}
static _getForValueFromTask(task) {
return task.for ? task.for : task.use && task.use.substring(BUILD_TASK_PREFIX.length + 1)
}
/**
* Determines the module folder from the given list that may represent files or folders w or w/o .cds file extension.

@@ -365,0 +329,0 @@ * @param {Array} filesOrFolders

@@ -27,5 +27,4 @@ const fs = require('fs')

if (!this.hasBuildOption(CONTENT_EDMX, false)) {
// generate edmx files containing all features
// REVISIT: localized edmx for backward compatibility
if (!this.hasBuildOption(CONTENT_LOCALIZED_EDMX, false)) {
// generate edmx files containing all features - localized edmx for backward compatibility
if (this.hasBuildOption(CONTENT_LOCALIZED_EDMX, true)) {
await this.compileToEdmx(model, compileDest, { [FLAVOR_LOCALIZED_EDMX]: true })

@@ -32,0 +31,0 @@ }

@@ -136,4 +136,7 @@ const path = require('path')

static _getSaasPkgPath() {
if (!cds.env.extends) {
throw new BuildError(`Missing configuration 'cds.extends' in package.json`)
}
// cds.env.extends holds the SaaS app package name, also if the extends field is defined on root level
let saasPkgPath = this._getWorkspacePath(cds.env.extends || '_base')
let saasPkgPath = this._getWorkspacePath(cds.env.extends)
if (!saasPkgPath) {

@@ -160,3 +163,3 @@ // older projects might not have a workspace definition

static _getNodeModulesPath() {
const saasPkgPath = path.join(cds.root, 'node_modules', cds.env.extends || '_base')
const saasPkgPath = path.join(cds.root, 'node_modules', cds.env.extends)
if (!fs.existsSync(saasPkgPath)) {

@@ -163,0 +166,0 @@ throw new BuildError(`The SaaS application base model is missing. Did you execute 'cds pull' and 'npm install'?`)

@@ -15,4 +15,4 @@ const path = require('path')

get priority() {
// should be scheduled after 'hana' build tasks are finished
return NodejsBuildPlugin.PRIORITY_MIN_VALUE
// must run after 'hana' build task
return super.priority - 10
}

@@ -19,0 +19,0 @@ init() {

@@ -1,2 +0,2 @@

/* eslint-disable no-empty */
const fs = require('fs')

@@ -12,5 +12,6 @@ const path = require('path')

get priority() {
// should be scheduled after 'hana' build tasks are finished
return EdmxBuildPlugin.PRIORITY_MIN_VALUE
// must run after 'hana' build task
return super.priority - 10
}
init() {

@@ -22,3 +23,3 @@ super.init()

throw new BuildError(`@sap/cds-mtx is deprecated and not supported by @sap/cds-dk >= 7. This also applies to the 'mtx' build task in Java projects.
Upgrade to @sap/cds-mtxs following the migration guide https://cap.cloud.sap/docs/guides/multitenancy/old-mtx-migration or install @sap/cds-dk version 6 instead.`)
Upgrade to @sap/cds-mtxs following the migration guide https://cap.cloud.sap/docs/guides/multitenancy/old-mtx-migration.`)
}

@@ -25,0 +26,0 @@ }

@@ -31,14 +31,5 @@ const fs = require('fs')

* Returns whether this project is a java project or not.
* @param {Array} dirs - the absolute path names to check.
*/
function hasJavaNature(dirs) {
return dirs.flat().reduce((acc, dir) => {
if (dir) {
const file = path.join(dir, 'pom.xml')
if (fs.existsSync(file)) {
acc.push(file)
}
}
return acc
}, []).length > 0
function hasJavaNature() {
return [cds.env.folders.srv, '.'].some(f => f && fs.existsSync(path.join(cds.root, f, 'pom.xml')))
}

@@ -180,4 +171,14 @@

async function getWorkspaces(resolve = false, devDependencies = true, wsRoot = cds.root) {
let pgkJsonWsRoot
try {
pgkJsonWsRoot = require(path.join(wsRoot, 'package.json'))
} catch (e) {
if (e.code === 'MODULE_NOT_FOUND') {
DEBUG?.(`No package.json found in project '${wsRoot}', skipping workspace resolution.`)
return []
}
throw e
}
const workspaces = []
const pgkJsonWsRoot = require(path.join(wsRoot, 'package.json'))
let pgkJson = pgkJsonWsRoot

@@ -207,2 +208,40 @@ if (wsRoot !== cds.root) {

/**
* This JavaScript function filterFeaturePaths is used to filter and categorize file paths
* from a given model into two categories: base and features.
* @param {*} model - represents a data model. It's expected to have a $sources property which is an array of file paths.
* @param {*} modelPaths - an array of file paths.
* @returns
*/
function filterFeaturePaths(model, modelPaths) {
const ftsName = path.dirname(cds.env.features.folders || 'fts/*')
const regex = new RegExp(`[/|\\\\]+${ftsName}[/|\\\\]+(?<ftName>[^/|\\\\]*)`)
const sources = { base: [] }
// add ROOT source file paths for the base model
sources.base = cds.resolve(modelPaths).reduce((acc, file) => {
const match = file.match(regex)
if (!match) {
acc.push(file)
}
return acc
}, [])
// add source file paths for the features
sources.features = model['$sources'].reduce((acc, file) => {
const match = file.match(regex)
if (match) {
const { ftName } = match.groups
//feature
if (!acc[ftName]) {
acc[ftName] = []
}
acc[ftName].push(file)
}
return acc
}, {})
return sources
}
/**
* Read installed npm workspaces using 'npm ls -ws --json'.

@@ -287,2 +326,32 @@ * @returns {Array} list of package paths, or an empty array if either

/**
* Returns a set of files found in the given directory and its subdirectories.
* @param {string} srcDir - the directory to search for files
* @param {Function} filter - the filter function to apply to the found files
* @param {Array} files - the set of files to add the found files to
* @returns {Array} files found in the given directory and its subdirectories
*/
function getFiles(srcDir, filter, files = []) {
function handleResource(entry, filter, files) {
if (!filter || filter.call(this, entry)) {
const stats = fs.lstatSync(entry)
if (stats.isDirectory()) {
getFiles(entry, filter, files)
} else if (stats.isFile() || stats.isSymbolicLink()) {
files.push(entry)
}
}
}
let entries = []
try {
entries = fs.readdirSync(srcDir)
} catch (e) {
// ignore if not existing
}
entries.map(subDirEntry => path.join(srcDir, subDirEntry)).forEach((entry) => {
handleResource(entry, filter, files)
})
return files
}
/**
* Return gnu-style error string for location `loc`:

@@ -378,4 +447,6 @@ * - 'File:Line:Col' without `loc.end`

getWorkspaces,
filterFeaturePaths,
getFiles,
BuildMessage,
BuildError
}

@@ -39,3 +39,3 @@ const path = require('path');

async deploy(options = {}) {
let { model, serviceName, tunnelAddress, vcapFile, undeployWhitelist, hdiOptions = {}, dry, storeCredentials } = options;
let { model, serviceName: pServiceName, tunnelAddress, vcapFile, undeployWhitelist, hdiOptions = {}, dry, storeCredentials } = options;

@@ -55,3 +55,3 @@ options.for = options.for || 'hybrid';

if (vcapFile) {
console.log(`Using VCAP_SERVICES from file ${vcapFile} (beta feature).`);
console.log(`Using VCAP_SERVICES from file ${vcapFile}.`);
}

@@ -64,11 +64,10 @@

const warnings = [];
if (serviceName && vcapEnv.VCAP_SERVICES?.hana?.[0].name !== serviceName) {
if (pServiceName && vcapEnv.VCAP_SERVICES?.hana?.[0].name !== pServiceName) {
if (vcapFile) {
warnings.push(`The specified service name '${serviceName}' was used; the information coming from --vcap-service file was ignored.`);
warnings.push(`The specified service name '${pServiceName}' was used; the information coming from --vcap-service file was ignored.`);
} else if (Object.keys(fromEnv).length > 0) {
warnings.push(`The specified service name '${serviceName}' was used; the information coming from environment variable VCAP_SERVICES was ignored.`);
warnings.push(`The specified service name '${pServiceName}' was used; the information coming from environment variable VCAP_SERVICES was ignored.`);
}
}
const projectPath = cds.root;
const buildResults = await this._build();

@@ -80,4 +79,4 @@

if (dry) {
for (const { task } of buildResults) {
for (const { task } of buildResults) {
if (dry) {
const model = await cds.load(task.src);

@@ -90,7 +89,12 @@ const data = cds.compile.to.hdbtable(model);

}
}
} else {
for (const { task } of buildResults) {
} else {
const dest = task.dest;
let serviceName = pServiceName;
let serviceKeyName;
if (!serviceName && cds.env.requires.db?.binding?.instance) {
serviceName = cds.env.requires.db.binding.instance;
serviceKeyName = cds.env.requires.db.binding.key;
}
if (undeployWhitelist) {

@@ -102,3 +106,2 @@ console.log('Writing undeploy.json');

const hasVCAPEnv = Object.keys(vcapEnv).length > 0;
let serviceKeyName
if (!serviceName && hasVCAPEnv) {

@@ -108,3 +111,3 @@ await fs.mkdir(dest, { recursive: true });

const { cfServiceInstanceName, cfServiceInstanceKeyName, serviceKey } =
await this._getOrCreateCFService(projectPath, dest, serviceName, tunnelAddress);
await this._getOrCreateCFService(dest, serviceName, serviceKeyName, tunnelAddress);

@@ -118,3 +121,3 @@ serviceName ??= cfServiceInstanceName;

if (storeCredentials) {
await this._addInstanceToDefaultEnvJson([dest, projectPath], cfServiceInstanceName, serviceKey);
await this._addInstanceToDefaultEnvJson([dest, cds.root], cfServiceInstanceName, serviceKey);
}

@@ -140,3 +143,3 @@ }

if (storeCredentials) {
await gitUtil.ensureFileIsGitignored('default-env.json', projectPath);
await gitUtil.ensureFileIsGitignored('default-env.json', cds.root);
}

@@ -158,3 +161,3 @@

async _getOrCreateCFService(projectPath, currentModelFolder, serviceName, tunnelAddress) {
async _getOrCreateCFService(currentModelFolder, serviceName, serviceKeyName, tunnelAddress) {
const modelName = path.basename(currentModelFolder);

@@ -168,3 +171,3 @@

} else {
const cfServiceDescriptor = await mtaUtil.getHanaDbModuleDescriptor(projectPath, modelName);
const cfServiceDescriptor = await mtaUtil.getHanaDbModuleDescriptor(cds.root, modelName);
cfServiceInstanceName = cfServiceDescriptor.hdiServiceName;

@@ -181,12 +184,12 @@ cfServiceInstanceMta = cfServiceDescriptor.hdiService

const cfServiceInstanceKeyName = `${cfServiceInstanceName}-key`;
let serviceKey = await cfUtil.getOrCreateServiceKey(serviceInstance, cfServiceInstanceKeyName, { permissions: 'development' });
this._validateServiceKey(serviceKey, cfServiceInstanceKeyName);
const cfServiceInstanceKeyName = serviceKeyName ?? `${cfServiceInstanceName}-key`;
let credentials = await cfUtil.getOrCreateServiceKey(serviceInstance, cfServiceInstanceKeyName, { permissions: 'development' });
this._validateCredentials(credentials, cfServiceInstanceKeyName);
if (tunnelAddress) {
console.log(`Using tunnel address ${bold(tunnelAddress)} (beta feature)`);
serviceKey = this._injectTunnelAddress(serviceKey, tunnelAddress)
credentials = this._injectTunnelAddress(credentials, tunnelAddress)
}
return { cfServiceInstanceName, cfServiceInstanceKeyName, serviceKey, serviceInstance }
return { cfServiceInstanceName, cfServiceInstanceKeyName, serviceKey: credentials, serviceInstance }
}

@@ -213,8 +216,8 @@

_validateServiceKey(serviceKey, cfServiceInstanceKey) {
if (!serviceKey) {
_validateCredentials(credentials, cfServiceInstanceKey) {
if (!credentials) {
throw `Could not create service key ${bold(cfServiceInstanceKey)}.`;
}
if (serviceKey['sm_url']) {
if (credentials['sm_url']) {
throw `Service key credentials are matching to a Service Manager instance of a multitenant environment. Make sure there is no conflict with ${cfServiceInstanceKey}.`;

@@ -225,3 +228,3 @@ }

for (const field of fields) {
if (!serviceKey[field]) {
if (!credentials[field]) {
throw `Service key is missing mandatory field '${field}'. Make sure you are ${bold('not')} using a managed service.`;

@@ -283,3 +286,3 @@ }

_injectTunnelAddress(serviceKey, tunnelAddress) {
_injectTunnelAddress(credentials, tunnelAddress) {
if (!/\w+:\d+/.test(tunnelAddress)) {

@@ -289,10 +292,10 @@ throw `Invalid tunnel address '${tunnelAddress}' - must be in form 'host:port'`;

const [tunnelHost, tunnelPort] = tunnelAddress.split(':')
const { host, port } = serviceKey
serviceKey.host = tunnelHost
serviceKey.port = tunnelPort
serviceKey.url = serviceKey.url.replace(`${host}:${port}`, tunnelAddress)
serviceKey.hostname_in_certificate = host // make cert. verification happy, see xs2/hdideploy.js#527
serviceKey.url = serviceKey.url + (serviceKey.url.includes('?') ? '&' : '?') + 'hostNameInCertificate=' + host
return serviceKey
const { host, port } = credentials
credentials.host = tunnelHost
credentials.port = tunnelPort
credentials.url = credentials.url.replace(`${host}:${port}`, tunnelAddress)
credentials.hostname_in_certificate = host // make cert. verification happy, see xs2/hdideploy.js#527
credentials.url = credentials.url + (credentials.url.includes('?') ? '&' : '?') + 'hostNameInCertificate=' + host
return credentials
}
}

@@ -15,4 +15,4 @@ const path = require('path');

const deployerEnv = JSON.parse(JSON.stringify(process.env));
const hdiDeployOptions = deployerEnv.HDI_DEPLOY_OPTIONS ? JSON.parse(deployerEnv.HDI_DEPLOY_OPTIONS) : {};
delete hdiDeployOptions.root;
const options = deployerEnv.HDI_DEPLOY_OPTIONS ? JSON.parse(deployerEnv.HDI_DEPLOY_OPTIONS) : {};
delete options.root;

@@ -33,8 +33,11 @@ clean_env?.(deployerEnv);

console.log(`HDI deployer automatically undeploys deleted resources using --auto-undeploy.`);
hdiDeployOptions.auto_undeploy = true;
options.auto_undeploy = true;
}
if (Object.entries(hdiDeployOptions).length > 0) {
deployerEnv.HDI_DEPLOY_OPTIONS = JSON.stringify(hdiDeployOptions);
try { require.resolve('hdb') } catch (e) {
if (e.code === 'MODULE_NOT_FOUND') options.use_hdb = false
else throw e
}
if (options.use_hdb !== false) options.use_hdb = true
deployerEnv.HDI_DEPLOY_OPTIONS = JSON.stringify(options)

@@ -52,3 +55,2 @@ const asyncDeploy = util.promisify(deploy);

_loadHdiDeployLib(cwd) {

@@ -55,0 +57,0 @@ const searchPaths = [cwd, __dirname];

@@ -109,3 +109,3 @@ module.exports = {

type: "string",
description: "Optional: Prefer defining 'cds.folders'. Use 'src' if you want it only to be valid for this specific build task, which excludes, for example, 'cds watch'.",
description: "Optional: Prefer defining 'cds.env.folders'. Use 'src' if you want it only to be valid for this specific build task, which excludes, for example, 'cds watch'.",
format: "uri-reference"

@@ -112,0 +112,0 @@ },

@@ -78,6 +78,6 @@ /* eslint-disable no-prototype-builtins */

if (oDataVersion === "V2") {
return (element.includes(["cds.Association"]) && !element.includes(['"on":']));
return (element.includes("cds.Association") || element.includes("cds.Composition") && !element.includes(['"on":']));
}
else {
return (['cds.Association'].includes(element.type)) && !element.on && !element.keys;
return (['cds.Association', 'cds.Composition'].includes(element.type)) && !element.on && !element.keys;
}

@@ -84,0 +84,0 @@ }

@@ -80,2 +80,3 @@ /**

parserContext.allowAllNamespaces = false;
parserContext.mockServerUc = true;
}

@@ -142,8 +143,12 @@

function _hasEntityContainer(jsonObj) {
let isEntityContainerPresent = !!jsonObj["edmx:Edmx"]["edmx:DataServices"].Schema.EntityContainer;
if (!isEntityContainerPresent) {
throw new Error(messages.MISSING_ENTITY_CONTAINER);
function _settingMockServerUc(jsonObj, parserContext) {
let EntityContainerObj = jsonObj["edmx:Edmx"]["edmx:DataServices"].Schema.EntityContainer;
// Setting mockerserverUc to false when there is no EntityContainer or when the EntityContainer exists but has no elements inside it
if (!EntityContainerObj || (EntityContainerObj && !EntityContainerObj.EntitySet && !EntityContainerObj.FunctionImport && !EntityContainerObj.AssociationSet)) {
parserContext.mockServerUc = false;
}
return isEntityContainerPresent;
// Importer throws an error if AssociationSet is present without corresponding entitySets
else if ((EntityContainerObj && !EntityContainerObj.EntitySet && EntityContainerObj.AssociationSet)) {
throw new Error(messages.MISSING_ENTITY_SETS);
}
}

@@ -161,9 +166,2 @@

function _validateEDMX(jsonObj) {
return (
_isValidEDMXProvided(jsonObj) &&
_hasEntityContainer(jsonObj)
);
}
function _getServiceNameSpace(jsonObj) {

@@ -189,2 +187,10 @@ let schemaArr = jsonObj["edmx:Edmx"]["edmx:DataServices"].Schema;

function _checkAssociation(associationSet, parserContext) {
Object.keys(associationSet).forEach((i) => {
if (!parserContext.allEntitiesMC.includes(associationSet[i]._attributes.EntitySet)) {
throw new Error(messages.UNRESOLVED_TYPE + `'${associationSet[i]._attributes.EntitySet}'`);
}
});
}
function _extractAssociationSet(associationSet, parserContext) {

@@ -196,2 +202,3 @@ let associations = {};

associations.End = associationSet.End;
_checkAssociation(associations.End, parserContext);
parserContext.allAssociationSets[associationSet[key].Association] = associations;

@@ -327,4 +334,19 @@ }

function _checkFunctionImports(functionImports, parserContext) {
if(functionImports.length) {
functionImports.forEach((functionImport) => {
if (functionImport._attributes.EntitySet && !parserContext.allEntitiesMC.includes(functionImport._attributes.EntitySet)) {
throw new Error(messages.UNRESOLVED_TYPE + `'${functionImport._attributes.EntitySet}'`);
}
});
} else {
if (functionImports._attributes.EntitySet && !parserContext.allEntitiesMC.includes(functionImports._attributes.EntitySet)) {
throw new Error(messages.UNRESOLVED_TYPE + `'${functionImports._attributes.EntitySet}'`);
}
}
}
//extracting the function imports from the metadata
function _extractAllFunctionImports(functionImports, parserContext) {
_checkFunctionImports(functionImports, parserContext)
if (functionImports.length) {

@@ -1086,3 +1108,3 @@ //Has many function imports in metadata

function _getAssociatedEntity(associationEnds, toRole, isAssociationSetMissing, mockServerUc, parserContext) {
function _getAssociatedEntity(associationEnds, toRole, isAssociationSetMissing, parserContext) {
let entityName;

@@ -1093,9 +1115,9 @@ let entitySetName;

if (toRole === associationEnds[i]._attributes.Role) {
if (isAssociationSetMissing) {
if (isAssociationSetMissing && parserContext.mockServerUc) {
entityTypeName = associationEnds[i]._attributes.Type;
entitySetName = parserContext.allEntitySetMapMC[entityTypeName][0];
} else {
} else {
entitySetName = associationEnds[i]._attributes.EntitySet;
}
if (mockServerUc) {
if (parserContext.mockServerUc) {
entityName = parserContext.serviceNamespace + "." + entitySetName;

@@ -1110,3 +1132,3 @@ } else {

function _getCSNMultiplicity(associationEnds, toRole, entityName, mockServerUc, parserContext) {
function _getCSNMultiplicity(associationEnds, toRole, entityName, parserContext) {
let csn = "";

@@ -1119,3 +1141,3 @@ let multiplicity;

attributes = associationEnds[i]._attributes;
if (mockServerUc) {
if (parserContext.mockServerUc) {
let entityNameWithOutNS = parserContext.allEntitySetMap[_extractEntityFromNamespace(entityName)];

@@ -1134,3 +1156,3 @@ if (

) {
// eslint-disable-line no-lonely-if
multiplicity = attributes.Multiplicity;

@@ -1154,122 +1176,2 @@ stop = true;

function _getCSNRefrenentialCondition(navPropName, from, to) {
let csn = "{\n";
csn = csn + '"ref": [\n';
csn = csn + '"' + navPropName + '",\n';
csn = csn + '"' + from + '"\n';
csn = csn + "]\n";
csn = csn + "},\n";
csn = csn + '"=",\n';
csn = csn + "{\n";
csn = csn + '"ref": [\n';
csn = csn + '"' + to + '"';
csn = csn + "]\n";
csn = csn + "}\n";
return csn;
}
function _getCSNMultipleRefrenentialConditions(refConstraints, navPropName, role) {
let csn = "";
csn = csn + '"on": [\n';
Object.keys(refConstraints).forEach((i) => {
let roleName = refConstraints[i].Principal._attributes.Role;
let principalProperties = refConstraints[i].Principal.PropertyRef;
let dependentProperties = refConstraints[i].Dependent.PropertyRef;
let from = "";
let to = "";
if (roleName && roleName === role) {
from = principalProperties._attributes.Name;
to = dependentProperties._attributes.Name;
} else {
from = dependentProperties._attributes.Name;
to = principalProperties._attributes.Name;
}
csn = csn + _getCSNRefrenentialCondition(navPropName, from, to);
if (i < refConstraints.length - 1) {
csn = csn + ",\n";
csn = csn + '"and",\n';
}
});
csn = csn + "]\n";
return csn;
}
function _getCSNRefrenentialConditions(refConstraints, navPropName, role) {
let csn = "";
let from;
let to;
let roleName = refConstraints.Principal._attributes.Role;
let principalProperties = refConstraints.Principal.PropertyRef;
let dependentProperties = refConstraints.Dependent.PropertyRef;
if (dependentProperties.length > 0) {
// Multiple principal and dependents within Referential Constraint
csn = csn + '"on": [\n';
Object.keys(dependentProperties).forEach((i) => {
if (roleName && roleName === role) {
from = principalProperties[i]._attributes.Name;
to = dependentProperties[i]._attributes.Name;
} else {
from = dependentProperties[i]._attributes.Name;
to = principalProperties[i]._attributes.Name;
}
csn = csn + _getCSNRefrenentialCondition(navPropName, from, to);
if (i < dependentProperties.length - 1) {
csn = csn + ",\n";
csn = csn + '"and",\n';
}
});
csn = csn + "]\n";
} else {
// Single principal and dependents within Referential Constraint
csn = csn + '"on": [\n';
if (roleName && roleName === role) {
from = principalProperties._attributes.Name;
to = dependentProperties._attributes.Name;
} else {
from = dependentProperties._attributes.Name;
to = principalProperties._attributes.Name;
}
csn = csn + _getCSNRefrenentialCondition(navPropName, from, to);
csn = csn + "]\n";
}
return csn;
}
function _getCSNRefrenentialConstraints(associations, navPropName, toRole) {
let csn = "";
let endProperties;
let matchingRole = "";
let refConstraints = associations.ReferentialConstraint;
if (!refConstraints) {
return csn;
}
endProperties = associations.End;
Object.keys(endProperties).forEach((i) => {
let role = endProperties[i]._attributes.Role;
if (role && role === toRole) {
matchingRole = endProperties[i]._attributes.Role;
}
});
if (refConstraints.length > 0) {
// Multiple referential constraints within Association
csn = csn +
_getCSNMultipleRefrenentialConditions(
refConstraints,
navPropName,
matchingRole
);
} else {
// Single referential constraint within Association
csn = csn +
_getCSNRefrenentialConditions(
refConstraints,
navPropName,
matchingRole
);
}
return csn;
}
function _getCSNAssociatedRefrenentialConstraints(

@@ -1280,3 +1182,2 @@ associations,

navPropName,
mockServerUc,
parserContext

@@ -1286,3 +1187,2 @@ ) {

let multiplicity;
let referentialConstraint;
csn = csn + '"target": "' + entityName + '"';

@@ -1296,3 +1196,2 @@ if (!associations) {

entityName,
mockServerUc,
parserContext

@@ -1303,11 +1202,3 @@ );

}
referentialConstraint = _getCSNRefrenentialConstraints(
associations,
navPropName,
toRole,
mockServerUc
);
if (referentialConstraint) {
csn = csn + ",\n" + referentialConstraint;
}
return csn;

@@ -1320,3 +1211,2 @@ }

navPropName,
mockServerUc,
parserContext

@@ -1333,3 +1223,2 @@ ) {

}
let associationEnd = associationSet ? associationSet.End : association.End;

@@ -1340,3 +1229,2 @@ let entityName = _getAssociatedEntity(

isAssociationSetMissing,
mockServerUc,
parserContext

@@ -1363,3 +1251,2 @@ );

navPropName,
mockServerUc,
parserContext

@@ -1394,3 +1281,3 @@ );

function _getServiceEntityNavigationProperty(navPropAttributes, mockServerUc, parserContext) {
function _getServiceEntityNavigationProperty(navPropAttributes, parserContext) {
let csn = "";

@@ -1416,9 +1303,3 @@ let navPropName;

}
// annotation for on conditions
csn = csn + `"@cds.ambiguous": "missing on condition?",\n`;
//Code for notNull working but change is not reflecting when we use: cds compile {file-path} -2 edmx > {newFileName}
/*if (navPropAttributes.Nullable === "false")
csn = csn + '"notNull": true,\n';*/
// if documentation exists for navigation property

@@ -1434,3 +1315,2 @@ if (navPropDoc)

navPropName,
mockServerUc,
parserContext

@@ -1451,3 +1331,2 @@ );

hasProperties,
mockServerUc,
parserContext

@@ -1474,3 +1353,2 @@ ) {

navPropAttributes,
mockServerUc,
parserContext

@@ -1502,3 +1380,2 @@ );

ignorePersistenceSkip,
mockServerUc,
documentation,

@@ -1576,3 +1453,2 @@ parserContext

hasProperties,
mockServerUc,
parserContext

@@ -1621,3 +1497,3 @@ );

function _parseServiceEntity(entityName, entity, ignorePersistenceSkip, mockServerUc, parserContext) {
function _parseServiceEntity(entityName, entity, ignorePersistenceSkip, parserContext) {
let entityAttributes;

@@ -1673,3 +1549,2 @@ let entityKeysList;

ignorePersistenceSkip,
mockServerUc,
documentation,

@@ -1680,8 +1555,8 @@ parserContext

function _getEntitesWithNamesFromEntitySets(entityJson, mockServerUc, parserContext) {
const getEntityName = mockServerUc
function _getEntitesWithNamesFromEntitySets(entityJson, parserContext) {
const getEntityName = parserContext.mockServerUc
? (entity) => parserContext.allEntitySetMapMC[parserContext.serviceNamespace + "." + _getEntityName(entity)]
: (entity) => [_getEntityName(entity)];
const isNameInEntitySet = mockServerUc
const isNameInEntitySet = parserContext.mockServerUc
? (name) => parserContext.allEntitiesMC.indexOf(name) >= 0

@@ -1726,3 +1601,3 @@ : (name) => parserContext.allEntities.indexOf(name) >= 0;

function _parsingServiceEntities(entitiesWithNames, ignorePersistenceSkip, mockServerUc, parserContext) {
function _parsingServiceEntities(entitiesWithNames, ignorePersistenceSkip, parserContext) {
return entitiesWithNames.map((entityWithName) =>

@@ -1734,3 +1609,2 @@ entityWithName.name.map((entityName) =>

ignorePersistenceSkip,
mockServerUc,
parserContext

@@ -1742,3 +1616,3 @@ )

function _getServiceEntitites(entityJson = [], ignorePersistenceSkip, mockServerUc, parserContext) {
function _getServiceEntitites(entityJson = [], ignorePersistenceSkip, parserContext) {
const entities = Array.isArray(entityJson) ? entityJson : [entityJson];

@@ -1748,3 +1622,3 @@ let entitiesWithNames = []; // [0]-> entity set, [1]-> entity type

entitiesWithNames[0] = _getEntitesWithNamesFromEntitySets(entities, mockServerUc, parserContext);
entitiesWithNames[0] = _getEntitesWithNamesFromEntitySets(entities, parserContext);
entitiesWithNames[1] = _getEntitesWithNamesFromEntityTypes(entities, parserContext);

@@ -1754,3 +1628,3 @@

if (entitiesWithNames[i].length) {
parsedEntities[i] = _parsingServiceEntities(entitiesWithNames[i], ignorePersistenceSkip, mockServerUc, parserContext);
parsedEntities[i] = _parsingServiceEntities(entitiesWithNames[i], ignorePersistenceSkip, parserContext);
}

@@ -1785,3 +1659,13 @@ }

function _generateEDMXJSON2CSN(jsonObj, ignorePersistenceSkip, mockServerUc, parserContext) {
function _checkEntityType(entityJson, parserContext) {
const entity = Object.values(entityJson).map(item => item._attributes.Name);
Object.keys(parserContext.allEntities).forEach((i) => {
if (!entity.includes(parserContext.allEntities[i])) {
throw new Error(messages.UNRESOLVED_TYPE + `'${parserContext.allEntities[i]}'`);
}
});
}
function _generateEDMXJSON2CSN(jsonObj, ignorePersistenceSkip, parserContext) {
const csn = {};

@@ -1793,7 +1677,2 @@ csn.meta = {};

// Extract association sets
let associationSets = jsonObj["edmx:Edmx"]["edmx:DataServices"].Schema.EntityContainer.AssociationSet;
if (associationSets) {
_extractAllAssociationSets(associationSets, parserContext);
}
// Extract associations

@@ -1804,12 +1683,2 @@ const associations = jsonObj["edmx:Edmx"]["edmx:DataServices"].Schema.Association;

}
// Extract entities from EntitySets
const entitySets = jsonObj["edmx:Edmx"]["edmx:DataServices"].Schema.EntityContainer.EntitySet;
if (entitySets) {
_extractAllEntityFromEntitySets(entitySets, parserContext);
}
// Extract function imports
const functionImports = jsonObj["edmx:Edmx"]["edmx:DataServices"].Schema.EntityContainer.FunctionImport;
if (functionImports) {
_extractAllFunctionImports(functionImports, parserContext);
}
// Extract complex types

@@ -1825,3 +1694,3 @@ const complexTypes = jsonObj["edmx:Edmx"]["edmx:DataServices"].Schema.ComplexType;

if (mockServerUc) {
if (parserContext.mockServerUc) {
let def = '"' + parserContext.serviceNamespace + '": {"kind": "service"' + ',\n' + '"@cds.external": true';

@@ -1841,6 +1710,22 @@ let serviceDoc = jsonObj["edmx:Edmx"]["edmx:DataServices"]["Schema"].EntityContainer.Documentation;

definitions.push(def);
// Extract entities from EntitySets
const entitySets = jsonObj["edmx:Edmx"]["edmx:DataServices"].Schema.EntityContainer.EntitySet;
if (entitySets) {
_extractAllEntityFromEntitySets(entitySets, parserContext);
_checkEntityType(entityJson, parserContext);
}
// Extract association sets
let associationSets = jsonObj["edmx:Edmx"]["edmx:DataServices"].Schema.EntityContainer.AssociationSet;
if (associationSets) {
_extractAllAssociationSets(associationSets, parserContext);
}
// Extract function imports
const functionImports = jsonObj["edmx:Edmx"]["edmx:DataServices"].Schema.EntityContainer.FunctionImport;
if (functionImports) {
_extractAllFunctionImports(functionImports, parserContext);
}
}
if (entityJson) {
definitions.push(_getServiceEntitites(entityJson, ignorePersistenceSkip, mockServerUc, parserContext));
definitions.push(_getServiceEntitites(entityJson, ignorePersistenceSkip, parserContext));
}

@@ -1875,2 +1760,3 @@

_initialize(parserContext);
parserContext.mockServerUc = mockServerUc

@@ -1884,3 +1770,4 @@ if (!context.include_all_namespaces && context.namespaces.length) {

isValid = _validateEDMX(edmx2jsonModel);
isValid = _isValidEDMXProvided(edmx2jsonModel);
_settingMockServerUc(edmx2jsonModel, parserContext)
if (isValid) {

@@ -1898,3 +1785,2 @@ parserContext.serviceNamespace = _getServiceNameSpace(edmx2jsonModel);

ignorePersistenceSkip,
mockServerUc,
parserContext

@@ -1901,0 +1787,0 @@ );

@@ -107,2 +107,4 @@ const {

let bodyParam;
for (let param of globalParameters.concat(operation.parameters || [])) {

@@ -120,3 +122,6 @@ if (param.$ref) {

if (param.in === "body") continue;
if (param.in === "body") {
bodyParam = param;
continue;
}
if (STANDARD_HEADERS.includes(param.name.toLowerCase())) continue;

@@ -165,7 +170,9 @@

const schema = requestBodySchema(context, operation);
if (schema) {
cdsOperation.params.body = cdsType(context, schema);
cdsOperation.params.body["@openapi.in"] = "body";
}
const schema = context.v3
? requestBodySchema(context, operation.requestBody, input.components)
: v2Schema(context, bodyParam?.schema, operation.consumes);
if (schema) {
cdsOperation.params.body = cdsType(context, schema);
cdsOperation.params.body["@openapi.in"] = "body";
}

@@ -192,4 +199,4 @@ if (!operation.responses)

const responseSchema = context.v3
? response.content?.["application/json"]?.schema
: response.schema;
? v3Schema(context, response)
: v2Schema(context, response.schema, operation.produces);
if (responseSchema)

@@ -216,11 +223,62 @@ cdsOperation.returns = cdsType(context, responseSchema);

function requestBodySchema(context, operation) {
const requestBody = context.v3
? operation.requestBody
: operation.parameters?.find((p) => p.in === "body");
return context.v3
? requestBody?.content["application/json"]?.schema
: requestBody?.schema;
function requestBodySchema(context, requestBody, components) {
while (requestBody?.$ref) {
const expectedPrefix = "#/components/requestBodies/";
if (!requestBody.$ref.startsWith(expectedPrefix)) {
requestBody = undefined;
throw new Error(`TODO: unexpected request body reference ${requestBody.$ref}`)
}
else {
requestBody =
components?.requestBodies[
requestBody.$ref.substring(expectedPrefix.length)
];
}
}
return v3Schema(context, requestBody);
}
function v3Schema(context, body) {
if (!body?.content) return undefined;
const contentTypes = Object.keys(body.content);
if (contentTypes.includes("application/json"))
return body.content["application/json"].schema;
if (contentTypes.length === 0) return undefined;
if (contentTypes.length > 1)
context.messages.push({
message: `Multiple requestBody content-types not including application/json`,
input: contentTypes,
});
const contentType = contentTypes[0];
return {
$contentType: contentType,
...body.content[contentType]?.schema,
};
}
function v2Schema(context, schema, contentTypes) {
if (
!schema ||
!contentTypes ||
contentTypes.includes("application/json") ||
contentTypes.includes("application/json;charset=utf-8") ||
contentTypes.includes("application/json;charset=UTF-8")
)
return schema;
// take the first content type if there are multiple
const contentType = contentTypes[0];
return {
$contentType: contentType,
...schema,
};
}
function cdsType(

@@ -238,5 +296,19 @@ context,

if (schema.description) type["@description"] = schema.description;
if (schema.$contentType && schema.$contentType !== '*/*')
type['@openapi.contentType'] = schema.$contentType;
if (schema.$ref) {
type.type = referencedType(context, schema.$ref).name;
const refType = referencedType(context, schema.$ref);
let nRefType = refType;
if (refType.schema?.$ref)
nRefType = indirectlyReferencedType(context, refType.schema);
if (namedType && normalizeSchemaType(nRefType.schema) === "object") {
type.kind = "type";
type.includes = [refType.name];
type.elements = {};
} else {
type.type = refType.name;
if (schema.maxLength) type.length = schema.maxLength;
}
return type;

@@ -250,3 +322,3 @@ }

//TODO: complain if "xml"
{
if (schema.items) {
const itemsType = cdsType(

@@ -268,2 +340,4 @@ context,

}
} else {
type = someJSON(context, schema, arrayItem, type);
}

@@ -365,5 +439,30 @@ break;

schemaType = bestMatchingType(context, schema);
if (
(!namedType || schemaType !== "object") && schema.allOf && schema.allOf.length === 1 &&
Object.keys(schema).filter(
(k) => !["description", "nullable"].includes(k) && !k.startsWith("x-")
).length === 1) {
const normalizedSchema = { ...schema.allOf[0] };
if (schema.description) normalizedSchema.description = schema.description;
type = cdsType(context, normalizedSchema, arrayItem, namedType, forParameter);
break;
}
if (schema.allOf && schema.allOf.length === 2 && schema.allOf[0].$ref && !schema.allOf[1].$ref && normalizeSchemaType(schema.allOf[1]) !== "object") {
const normalizedSchema = Object.assign(
{ ...schema.allOf[0] },
schema.allOf[1]
);
type = cdsType(
context,
normalizedSchema,
arrayItem,
namedType,
forParameter
);
break;
}
if (schemaType) {
schema.type = schemaType;
type = cdsType(context, schema, arrayItem, namedType);
type = cdsType(context, schema, arrayItem, namedType, forParameter);
break;

@@ -375,15 +474,3 @@ }

default:
{
context.JSON = true;
const jsonSchema = minimalSchema(schema);
if (arrayItem && jsonSchema !== "{}") {
type = anonymousType(context, {
type: "common.JSON",
"@JSON.Schema": jsonSchema,
});
} else {
type.type = "common.JSON";
if (jsonSchema !== "{}") type["@JSON.Schema"] = jsonSchema;
}
}
type = someJSON(context, schema, arrayItem, type);
break;

@@ -394,2 +481,18 @@ }

function someJSON(context, schema, arrayItem, type) {
context.JSON = true;
const jsonSchema = minimalSchema(schema);
if (arrayItem && jsonSchema !== "{}") {
type = anonymousType(context, {
type: "common.JSON",
"@openapi.schema": jsonSchema,
});
}
else {
type.type = "common.JSON";
if (jsonSchema !== "{}") type["@openapi.schema"] = jsonSchema;
}
return type;
}
function addDefault(type, schema, forParameter) {

@@ -411,2 +514,3 @@ if (schema.default !== undefined) type.default = { val: schema.default };

delete s.description;
delete s.$contentType;
return JSON.stringify(s);

@@ -435,15 +539,13 @@ }

if (subSchema.$ref) {
let refType = { schema: subSchema };
let limit = 10;
while (refType.schema.$ref && limit-- > 0)
refType = referencedType(context, refType.schema.$ref);
if (
refType.schema.allOf ||
refType.schema.anyOf ||
refType.schema.oneOf
) {
const refType = indirectlyReferencedType(context, subSchema);
if (!refType.schema) return undefined;
if (refType.schema.allOf || refType.schema.anyOf || refType.schema.oneOf) {
const subType = bestMatchingType(context, refType.schema);
type = betterType(type, { type: subType });
} else type = betterType(type, refType.schema);
} else {
}
else type = betterType(type, refType.schema);
}
else {
//TODO: nested xOf - not yet encountered

@@ -457,2 +559,10 @@ type = betterType(type, subSchema);

function indirectlyReferencedType(context, subSchema) {
let refType = { schema: subSchema };
let limit = 10;
while (refType.schema?.$ref && limit-- > 0)
refType = referencedType(context, refType.schema.$ref);
return refType;
}
function referencedType(context, ref) {

@@ -474,4 +584,10 @@ const expectedPrefix = context.v3

function normalizeSchemaType(schema) {
if (!schema) return undefined;
if (!schema.type && schema.items) return "array";
if (!schema.type && schema.maxLength) return "string";
if (!schema.type && schema.pattern) return "string";
if (!schema.type && schema.enum?.every((v) => typeof v === "string"))
return "string";
if (

@@ -494,2 +610,8 @@ !schema.type &&

if (schemaType.length === 1) schemaType = schemaType[0];
if (
schemaType.length === 2 &&
schemaType[0] === "integer" &&
schemaType[1] === "number"
)
return "number";
}

@@ -496,0 +618,0 @@ return schemaType;

@@ -10,8 +10,10 @@ /** @type { import('./cds') & CdsDk & CdsDkAdd & CdsDkBuild} */

/** @type { import('./cds').compile } */
get compile() {
let compile = require(_local('@sap/cds/lib/compile/cds-compile'))
cds.extend (compile.to.constructor) .with (class {
get asyncapi() { return super.asyncapi = require('./compile/asyncapi') }
get openapi() { return super.openapi = require('./compile/openapi') }
get asyncapi() { return super.asyncapi = require('./compile/to_asyncapi') }
get openapi() { return super.openapi = require('./compile/to_openapi') }
get xsuaa(){ return super.xsuaa = require('./compile/to-xsuaa') }
get mermaid(){ return super.mermaid = require('./compile/to-mermaid') }
'edmx-v2'(csn,o) { return cds.compile.to.edmx(csn,{...o,flavor:'v2'}) }

@@ -22,7 +24,2 @@ 'edmx-v4'(csn,o) { return cds.compile.to.edmx(csn,{...o,flavor:'v4'}) }

})
// Compat to cds <=6. Esp. BAS tools need it, see cap/issues#13918
// TODO remove compat w/ cds-dk 8
if (compile.to.constructor.name === 'Object') { // not a class as in cds 7
compile.to.xsuaa = require('./compile/to-xsuaa')
}
return super.compile = compile

@@ -57,11 +54,12 @@ }

const MIN_SUPPORTED_CDS_VERSION = 6
const [cdsVersion] = cds.version.split('.')
if (parseInt(cdsVersion) < MIN_SUPPORTED_CDS_VERSION) {
const REQUIRED_CDS_MAJOR = 7, REQUIRED_CDS_MINOR = 2
const [major, minor] = cds.version.split('.').map(Number)
if (major < REQUIRED_CDS_MAJOR || major === REQUIRED_CDS_MAJOR && minor < REQUIRED_CDS_MINOR) {
const [dkVersion] = require('../package.json').version.split('.')
console.error(`
This application uses '@sap/cds' version ${cdsVersion}, which is not compatible with the installed '@sap/cds-dk' version ${dkVersion}.
This application uses '@sap/cds' version ${major}.${minor}, which is not compatible with the installed '@sap/cds-dk' version ${dkVersion}.
You can either:
- update '@sap/cds' to the latest version (recommended)
- downgrade '@sap/cds-dk' to version ${MIN_SUPPORTED_CDS_VERSION}
- downgrade '@sap/cds-dk' to version ${REQUIRED_CDS_MAJOR}
`)

@@ -68,0 +66,0 @@ process.exit(1)

@@ -18,12 +18,11 @@ module.exports = {

SAMPLE: 'sample',
PIPELINE: 'pipeline',
HELM_UNIFIED_RUNTIME: 'helm-unified-runtime',
HELM: 'helm',
MTA: 'mta',
CONNECTIVITY: 'connectivity',
CF_MANIFEST: 'cf-manifest',
MTX: 'mtx',
HELM: 'helm',
HELM_UNIFIED_RUNTIME: 'helm-unified-runtime',
CONTAINERIZE: 'containerize',
MULTITENANCY: 'multitenancy',
TOGGLES: 'toggles',
EXTENSIBILITY: 'extensibility',
MTX: 'mtx',
XSUAA: 'xsuaa',

@@ -42,11 +41,19 @@ HANA: 'hana',

APPROUTER: 'approuter',
CONNECTIVITY: 'connectivity',
DESTINATION: 'destination',
HTML5_REPO: 'html5-repo',
PORTAL: 'portal',
APPLICATION_LOGGING: 'application-logging',
AUDIT_LOGGING: 'audit-logging',
NOTIFICATIONS: 'notifications',
WORKZONE: 'workzone',
WORKZONE_STANDARD: 'workzone-standard',
DATA: 'data',
APPLICATION_LOGGING: 'application-logging',
HTTP: 'http',
LINT: 'lint',
LINT_GLOBAL: 'lint:global',
LINT_DEV: 'lint:dev',
PIPELINE: 'pipeline',
TYPER: 'typer',
HTML5_REPO: 'html5-repo',
NOTIFICATIONS: 'notifications'
TYPESCRIPT: 'typescript'
}),

@@ -57,6 +64,5 @@

REGEX_JAVA_VERSION: /^(0|[1-9]\d*)\.(0|[1-9]\d*)\.(0|[1-9]\d*)(-(0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(\.(0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*)?(\+[0-9a-zA-Z-]+(\.[0-9a-zA-Z-]+)*)?$/,
REGEX_PROJECT_NAME: /[^0-9a-zA-Z_-]+/g,
URLS: Object.freeze({
PIPELINE_HELP: 'https://sap.github.io/jenkins-library/',
PIPELINE_HELP: 'https://www.project-piper.io/',
SAMPLES: 'https://github.com/SAP-samples/cloud-cap-samples',

@@ -63,0 +69,0 @@ CAPIRE: 'https://cap.cloud.sap',

@@ -1,102 +0,60 @@

const path = require('path');
const path = require('path')
const term = require('../util/term')
const validate = require('./validate')
const term = require('../util/term');
const { OPTIONS, COMMAND_INIT, PROJECT_FILES } = require('./constants')
const { readdirSync } = require('fs')
const { join } = require('path')
const { NODEJS, JAVA } = OPTIONS
const { OPTIONS, COMMAND_INIT, PROJECT_FILES, REGEX_PROJECT_NAME } = require('./constants')
const {
NODEJS, JAVA,
HANA, APPROUTER, XSUAA, APPLICATION_LOGGING, ENTERPRISE_MESSAGING,
MTA, MULTITENANCY, PIPELINE,
TINY_SAMPLE, SAMPLE
} = OPTIONS
const cds = require('..'), { exists, readdir } = cds.utils
const cds = require('..'), { exists } = cds.utils
const DEBUG = /\b(y|all|cli)\b/.test(process.env.DEBUG) ? console.debug : undefined
module.exports = class CDSGenerator {
constructor() {
this.cwd = process.cwd()
this.uiConfig = [{
title: 'Project Name',
description: 'Enter your project name.',
default: '',
target: 'projectName',
validate: async (value) => {
try {
cds.root = path.resolve(this.cwd, value || '.');
/** @returns {string[]} */
static #findPlugins(excluded) {
// actually needs to be sync to be usable in module.exports of dk/bin/init.js
const fromDk = readdirSync(join(__dirname, 'template'))
const fromPlugins = Object.keys(Object.fromEntries(require('./add').registered))
return [...fromDk, ...fromPlugins]
.filter(plugin => !excluded?.includes(plugin))
}
await this._validateProjectName(value);
await this._validateProjectFolder(path.join(this.cwd, value), this.cwd);
} catch (e) {
return e
}
static help({ exclude = [] } = {}) {
const plugins = this.readPlugins(exclude)
const nameFixedLength = Math.max(...plugins.map(plugin => plugin.name.length))
return plugins
.map(({name, module}) => {
const help = module.help?.() ?? ''
return ` *${name}*${' '.repeat(nameFixedLength - name.length)} - ${help}`
})
.join('\n\n')
}
return true;
},
guiOptions: {
hint: 'This name is restricted to certain characters. A valid project name is compatible across platforms.',
mandatory: true
}
}, {
title: 'Type',
description: `Select your runtime.`,
default: NODEJS,
choices: [
{ name: 'Node.js', value: NODEJS },
{ name: 'Java', value: JAVA }
],
multiple: false,
target: 'add',
guiOptions: {
hint: `The preferred runtime can't be changed afterwards. To use a different runtime, create a new project.`,
mandatory: true
}
}, {
title: 'Features',
description: 'Choose productive runtime capabilities for your application.',
default: '',
choices: [
{ name: 'SAP HANA Cloud', value: HANA },
{ name: 'SAP Application Router', value: APPROUTER },
{ name: 'User Authentication via XSUAA', value: XSUAA },
{ name: 'Application Logging', value: APPLICATION_LOGGING },
{ name: 'SAP BTP Event Mesh', value: ENTERPRISE_MESSAGING }
],
multiple: true,
target: 'add',
guiOptions: {
hint: `Choose any combination of features enriching the capabilities of your project. You can also add features later with ${term.bold('cds add')}`
}
}, {
title: 'Deployment',
description: 'Choose which way to deploy your project.',
default: '',
choices: [
{ name: 'Cloud Foundry: MTA Deployment', value: MTA },
// { name: 'Kyma: Helm Deployment', value: HELM },
{ name: 'Multitenancy', value: MULTITENANCY },
{ name: 'CI/CD Pipeline Integration', value: PIPELINE },
],
multiple: true,
target: 'add',
guiOptions: {
hint: `Choose any combination of features enriching the capabilities of your project. You can also add features later with ${term.bold('cds add')}`
}
}, {
title: 'Samples',
description: `Add sample content to your project.`,
default: '',
choices: [
{ name: 'Minimal Sample', value: TINY_SAMPLE },
{ name: 'Extended Sample with UI', value: SAMPLE }
],
multiple: true,
target: 'add',
guiOptions: {
hint: 'Speed up your ramp-up phase with sample files in your project.'
}
}]
/**
* @returns {{name: string, module: import('module')}[]}
*/
static readPlugins(excluded = []) {
const plugins = this.#findPlugins(excluded)
.map(plugin => ({
name: plugin,
module: cds.add.registered.get(plugin) ?? require('./template/' + plugin)
}))
const cmds = Object.values(OPTIONS)
const byPriority = (lhs, rhs) => {
const a = cmds.indexOf(lhs.name), b = cmds.indexOf(rhs.name)
if (a === -1 && b === -1) return 0
if (a === -1) return 1
if (b === -1) return -1
return a - b
}
return plugins.sort(byPriority)
}
constructor() {
this.cwd = process.cwd()
this.uiConfig = require('./bas') // Used by CAP Generator
}
/**

@@ -116,3 +74,3 @@ * @param {string} facets, comma separated list of facets

const needsProject = facets.some(facet => !nonProjectFacets[facet])
if (needsProject && !PROJECT_FILES.find(exists)) {
if (needsProject && !PROJECT_FILES.some(exists)) {
throw `The current folder doesn't seem to contain a project. None of the following files found: ${PROJECT_FILES.join(', ')}.`

@@ -128,3 +86,3 @@ }

*/
async initCmd(projectName) {
async init(projectName) {
await this.stepInit(projectName);

@@ -134,3 +92,3 @@ await this.stepEnd();

async stepInit(projectName, options) {
async stepInit(projectName, options) { // Also used by CAP Generator
if (options) {

@@ -183,9 +141,14 @@ // called from CAP generator with options

await this._validateOptions();
const { command, options } = cds.cli
const { options, command } = cds.cli
if (command === COMMAND_INIT) {
validate.projectName(this.projectName)
validate.projectFolder(this.cwd)
}
const plugins = command === COMMAND_INIT ? options.add : new Set(facets)
await this._fillTemplateList(plugins)
for (const [i, facet] of Object.entries(Array.from(plugins))) {
const cmds = Object.values(OPTIONS)
const byPriority = (a, b) => cmds.indexOf(a) - cmds.indexOf(b)
const sorted = Array.from(plugins).sort(byPriority)
for (const [i, facet] of Object.entries(sorted)) {
if (!cds.cli.options.dry) console.log(`Adding feature '${facet}'...`);

@@ -216,12 +179,12 @@ const template = this.templateList[i]

if (err.code === 'MODULE_NOT_FOUND') {
const entries = (await readdir(path.join(__dirname, 'template')))
.filter(entry =>
!entry.startsWith('_') &&
!entry.endsWith('.js') &&
entry !== 'nodejs' &&
entry !== 'java'
);
const entries = this.constructor.readPlugins()
const fuzzySearch = require('../../bin/util/fuzzySearch')
const [bestMatch] = fuzzySearch(plugin, entries)
throw `Unknown facet '${term.bold(plugin)}'. Did you mean ${term.bold(`cds add ${bestMatch}`)}?\n\nHaven't found the proper facet yet? Here are all supported facets:\n\n ${term.bold(entries.join('\n '))}\n`
const [bestMatch] = fuzzySearch(plugin, entries.map(e => e.name))
const max = entries.reduce((max, {name}) => Math.max(max, name.length), 0)
const allFacetsText = entries.map(({name, module}) => {
const help = module.help?.() ?? ''
return `${term.bold(name) + ' '.repeat(max - name.length)} ${term.dim(help)}`
}).join('\n')
DEBUG?.(err)
throw `Unknown facet '${term.bold(plugin)}'. Did you mean ${term.bold(`cds add ${bestMatch}`)}?\n\nHaven't found the proper facet yet? Here are all supported facets:\n\n${allFacetsText}\n`
}

@@ -234,3 +197,2 @@ throw err

const templates = new Map
for (let plugin of plugins) {

@@ -271,8 +233,6 @@ // Compat

// check debug to avoid lengthy computation when not in debug mode
DEBUG?.(`Features: ${this.templateList.map(({ name }) => name).join(', ')}`);
}
// CAP Generator API
async stepEnd() {
async stepEnd() { // Also used by CAP Generator
for (const template of this.templateList) {

@@ -317,40 +277,2 @@ await template.finalize();

}
async _validateOptions() {
const { options, command } = cds.cli
if (options.force) {
return;
}
if (command === COMMAND_INIT) {
await this._validateProjectName(this.projectName);
await this._validateProjectFolder(this.cwd);
}
}
async _validateProjectName(projectName) {
if (!projectName) {
throw 'Specify a project name.'
}
// remove duplicates by using a set
const invalidChars = new Set(projectName.match(REGEX_PROJECT_NAME));
if (invalidChars.size > 0) {
throw `Project name '${projectName}' contains at least one character ('${[...invalidChars]}') which is not supported in npm package names.
Specify a different project name by replacing characters with '_' or '-', for example '${projectName.replace(REGEX_PROJECT_NAME, '_')}' or '${projectName.replace(REGEX_PROJECT_NAME, '-')}'.`
}
}
async _validateProjectFolder(cwd) {
const existingProjectFile = PROJECT_FILES.find(exists)
if (existingProjectFile) {
let message;
if (cwd === cds.root) {
message = `You seem to be working in a project which is already initialized. Use ${term.bold('cds add')} to add more features.`;
} else {
message = `You seem to be trying to initialize an existing project. Use a different project name instead.`;
}
throw `${message}\nDetails: File '${existingProjectFile}' exists in the project folder which indicates an existing project. Use ${term.bold('cds add')} to add more features.`
}
}
}

@@ -1,7 +0,8 @@

const YAML = require('@sap/cds-foss').yaml
const { EOL } = require('node:os')
const { inspect } = require('node:util')
const cds = require('../cds')
const { copy, read, write, exists } = cds.utils
const { copy, read, write, exists, path, yaml: YAML } = cds.utils
const { readYAML, writeYAML, readJSON } = require('../util/fs')
const { EOL } = require('node:os')
const { detectIndent } = require('./indent')
const { colors } = require('../util/term')

@@ -31,3 +32,3 @@ function _isObject(item) {

async function sort(path, key) {
module.exports.sort = async function(path, key) {
const json = await read(path)

@@ -45,3 +46,3 @@ json[key] = Object.fromEntries(

*/
async function mergeGitignore (into, from) {
module.exports.mergeGitignore = async function(into, from) {
const source = typeof from === 'string' ? (await read(from)).split(EOL) : from

@@ -201,3 +202,3 @@

collectionStack.unshift(node.value)
} else if (YAML.isScalar(node) && semantics.forceOverwrite && templateNode) {
} else if (YAML.isScalar(node) && semantics.forceOverwrite && templateNode && node.comment != 'cds.noOverwrite') {
node.value = templateNode.value ?? templateNode.items?.[0]?.value?.value

@@ -332,5 +333,7 @@ }

.forEach(item => {
if (!templateEntryMap.get(item)) throw 'Error: did not find entry in template for ' + inspect(item, { colors }) + ' in ' + from
const templateNode = templateEntryMap.get(item).node
item.at !== undefined ? targetNode.items.splice(item.at, 0, templateNode) : targetNode.add(templateNode)
})
}

@@ -379,3 +382,3 @@ },

async function removeFromYAML(name, keyPaths) {
module.exports.removeFromYAML = async function (name, keyPaths) {
const yaml = await readYAML(name)

@@ -397,3 +400,3 @@ for (const keyPath of keyPaths) {

async function removeFromYAMLArray(name, keyPath, aliasToRemove) {
module.exports.removeFromYAMLArray = async function(name, keyPath, aliasToRemove) {
const yaml = await readYAML(name)

@@ -423,5 +426,2 @@

const path = require('path')
/**

@@ -433,3 +433,3 @@ * @param {...(string | Object | YAML.Document)} src The source to merge from.

*/
const merge = (...src) => {
module.exports.merge = (...src) => {
return {

@@ -458,9 +458,1 @@ /**

}
module.exports = {
merge,
sort,
mergeGitignore,
removeFromYAML,
removeFromYAMLArray
}

@@ -0,179 +1,132 @@

const util = require('util')
const { exec } = require('child_process')
const execAsync = util.promisify(exec)
const { URLS, MAVEN_ARCHETYPE_VERSION, OPTIONS: { JAVA_MVN, JAVA_PACKAGE, HANA }, REGEX_JAVA_PACKAGE } = require('./constants');
const term = require('../util/term')
const cmd = require('../util/command')
const cp = require('child_process');
const util = require('util');
const execAsync = util.promisify(cp.exec);
const term = require('../util/term');
const cmd = require('../util/command');
const DEFAULT_GROUP_ID = 'customer';
const LOG = console
const cds = require('../cds')
const DEBUG = cds.debug('cli')
const DEBUG = cds.debug('cli|init|add|mvn')
class MvnArchetypeUtil {
const { URLS, MAVEN_ARCHETYPE_VERSION, OPTIONS: { JAVA_PACKAGE, HANA }, REGEX_JAVA_PACKAGE } = require('./constants')
_scanMvnParams(mvnParams, params) {
// need to create new regex every call since a constant
// would keep the match state
module.exports.add = async function (feature) {
const params = new Map()
const quoteRegex = /([\w\-.]+)=([\w\-.]+|\[([\w.\-,]+)\])/g;
// captures a=1 => a:1
// -Da=[x,y,z] => -Da:x,y,z (Maven specific)
// a=[x,y,z] => a:x,y,z
let match = quoteRegex.exec(mvnParams);
while (match != null) {
const key = (match[1].startsWith('-D') ? match[1] : `-D${match[1]}`);
params.set(key, match[3] || match[2]);
match = quoteRegex.exec(mvnParams);
}
}
_finalize(params) {
params.set(`-Dstyle.color`, `always`);
params.set('-B');
if (DEBUG) {
params.set('-X'); params.set('-e');
}
if (process.env.CI) {
params.set('--batch-mode');
}
return Array.from(params, ([arg, val]) => val !== null && val !== undefined ? `${arg}=${val}` : arg)
}
// REVISIT: make this only public API once mvn:addSample etc. have been moved to generic mvn:add
async add(feature) {
return this.execMvn(this.getAddArgs(feature))
}
getAddArgs(feature) {
const params = new Map();
params.set(`com.sap.cds:cds-maven-plugin:${MAVEN_ARCHETYPE_VERSION}:add`);
const { 'java:mvn': javaMvn, force } = cds.cli.options
if (javaMvn) _scan(javaMvn).forEach(([k,v]) => params.set(k,v))
const archetypeVersion = params.get('-DarchetypeVersion') ?? MAVEN_ARCHETYPE_VERSION
if (feature === HANA) { // REVISIT: compat for HANA when -Dfeature HANA is provided by Java
params.set(`com.sap.cds:cds-maven-plugin:${archetypeVersion}:addTargetPlatform`)
params.set(`-DtargetPlatform`, `cloudfoundry`) // REVISIT: No special handling for HANA
} else {
params.set(`com.sap.cds:cds-maven-plugin:${archetypeVersion}:add`)
params.set(`-Dfeature`, feature)
const profile = cds.cli.options.for
params.set(`-Dprofile`, profile === 'production' ? 'cloud' : profile ?? 'default')
const { options } = cds.cli
if (options[JAVA_MVN]) {
this._scanMvnParams(options[JAVA_MVN], params);
}
if (options.force) {
params.set(`-Doverwrite`, true);
}
return this._finalize(params);
}
getAddHanaCmdArgs() {
const params = new Map();
params.set(`com.sap.cds:cds-maven-plugin:${MAVEN_ARCHETYPE_VERSION}:addTargetPlatform`);
params.set(`-DtargetPlatform`, `cloudfoundry`);
const javaMvn = cds.cli.options[JAVA_MVN]
if (javaMvn) {
this._scanMvnParams(javaMvn, params);
if (force) params.set(`-Doverwrite`, true)
try {
await cmd.spawnCommand('mvn', _finalize(params), { cwd: cds.root })
} catch (err) {
if (err.code === 'ENOENT' && err.path === 'mvn') {
throw `Maven executable 'mvn' not found. Follow ${term.info(URLS.MAVEN_INSTALL_HELP)} and install Maven on your machine.`
}
return this._finalize(params);
throw err
}
}
sanitizePackageName(packageName) {
return packageName.toLowerCase().replace(/-/g, '_').replace(/\.([0-9])/g, '._$1').replace(/^([0-9])/, '_$1').replace(/\.+/g, '.');
}
module.exports.init = async function (projectName, options = cds.cli.options) {
const params = new Map()
async getGenerateCmdArgs(projectName, options = cds.cli.options) {
const params = new Map();
// set header
params.set(`archetype:generate`);
params.set(`-DarchetypeArtifactId`, `cds-services-archetype`);
params.set(`-DarchetypeGroupId`, `com.sap.cds`);
params.set(`-DarchetypeVersion`, MAVEN_ARCHETYPE_VERSION);
params.set('-DinteractiveMode', false);
// set header
params.set(`archetype:generate`);
params.set(`-DarchetypeArtifactId`, `cds-services-archetype`);
params.set(`-DarchetypeGroupId`, `com.sap.cds`);
params.set(`-DarchetypeVersion`, MAVEN_ARCHETYPE_VERSION);
params.set('-DinteractiveMode', false);
// set defaults, might be changed by java:mvn param
params.set(`-DartifactId`, projectName);
params.set(`-DgroupId`, 'customer');
// set defaults, might be changed by java:mvn param
params.set(`-DartifactId`, projectName);
params.set(`-DgroupId`, DEFAULT_GROUP_ID);
const { 'java:mvn': javaMvn } = options
if (javaMvn) _scan(javaMvn).forEach(([k,v]) => params.set(k,v))
const hasHana = !!(options.add && options.add.has(HANA));
if (hasHana) {
params.set(`-DtargetPlatform`, `cloudfoundry`);
// -DjdkVersion=[11|17]
if (!params.get('-DjdkVersion')) {
let javaVersion
try {
javaVersion = await execAsync('java -version');
} catch (err) {
DEBUG?.(err);
}
if (options[JAVA_MVN]) {
this._scanMvnParams(options[JAVA_MVN], params);
}
// -DjdkVersion=[11|17]
if (!params.get('-DjdkVersion')) {
let javaVersion
try {
javaVersion = await execAsync('java -version');
} catch (err) {
DEBUG?.(err);
}
const [,version] = javaVersion?.stderr?.match(/version\s*"?(\d+\.\d+\.\d+)"?/) ?? [];
if (version) {
const [major] = version.split('.').map(Number);
if (major < 17) {
if (process.env.WS_BASE_URL) {
throw `Java version ${version} is not supported. Use Java 17 or higher. You can use command 'Java: Set Default JDK' to switch to Java 17.`
}
throw `Java version ${version} is not supported. Use Java 17 or higher.`
const [,version] = javaVersion?.stderr?.match(/version\s*"?(\d+\.\d+\.\d+)"?/) ?? [];
if (version) {
const [major] = version.split('.').map(Number);
if (major < 17) {
if (process.env.WS_BASE_URL) {
throw `Java version ${version} is not supported. Use Java 17 or higher. You can use command 'Java: Set Default JDK' to switch to Java 17.`
}
throw `Java version ${version} is not supported. Use Java 17 or higher.`
}
}
}
if (options[JAVA_PACKAGE]) {
params.set(`-Dpackage`, options[JAVA_PACKAGE]);
if (options[JAVA_PACKAGE]) {
params.set(`-Dpackage`, options[JAVA_PACKAGE]);
}
const packageFromParam = params.get(`-Dpackage`);
if (packageFromParam) {
// explicitly specified, so do not modify
if (!options.force && !packageFromParam.match(REGEX_JAVA_PACKAGE)) {
throw `Package '${packageFromParam}' is an invalid Java package name. Use --force to use it anyway.`
}
} else {
// potentially overwritten by mvn param
const groupId = params.get(`-DgroupId`);
let packageName = `${groupId}.${projectName}`;
const artifactId = params.get('-DartifactId');
if (!packageName.match(REGEX_JAVA_PACKAGE)) {
packageName = sanitize(packageName);
console.warn(`The derived package name '${groupId}.${projectName}' is not a valid Java package name. Using '${packageName}' instead.`);
}
params.set(`-Dpackage`, packageName);
}
const packageFromParam = params.get(`-Dpackage`);
if (packageFromParam) {
// explicitly specified, so do not modify
if (!options.force && !packageFromParam.match(REGEX_JAVA_PACKAGE)) {
throw `Package '${packageFromParam}' is an invalid Java package name. Use --force to use it anyway.`
}
} else {
// potentially overwritten by mvn param
const groupId = params.get(`-DgroupId`);
let packageName = `${groupId}.${projectName}`;
const artifactId = params.get('-DartifactId')
const archetypeVersion = params.get('-DarchetypeVersion')
return { params: _finalize(params), archetypeVersion, artifactId }
}
if (!packageName.match(REGEX_JAVA_PACKAGE)) {
packageName = this.sanitizePackageName(packageName);
LOG.warn(`The derived package name '${groupId}.${projectName}' is not a valid Java package name. Using '${packageName}' instead.`);
}
params.set(`-Dpackage`, packageName);
}
const sanitize = module.exports.sanitize = name => name
.toLowerCase()
.replace(/-/g, '_')
.replace(/\.([0-9])/g, '._$1')
.replace(/^([0-9])/, '_$1')
.replace(/\.+/g, '.')
return {
cmdLine: this._finalize(params),
archetypeVersion: params.get('-DarchetypeVersion'),
artifactId
};
}
function _scan(mvnParams) {
// need to create new regex every call since a constant
// would keep the match state
// captures a=1 => a:1
// -Da=[x,y,z] => -Da:x,y,z (Maven specific)
// a=[x,y,z] => a:x,y,z
const quoteRegex = /([\w\-.]+)=([\w\-.]+|\[([\w.\-,]+)\])/g
return [...mvnParams.matchAll(quoteRegex)].map(([,key,value,listValues]) => {
key = key.startsWith('-D') ? key : `-D${key}`
return [key, listValues || value]
})
}
async execMvn(args) {
try {
await cmd.spawnCommand('mvn', args, { cwd: cds.root });
} catch (err) {
if (err.code === 'ENOENT' && err.path === 'mvn') {
throw `Maven executable 'mvn' not found, follow ${term.info(URLS.MAVEN_INSTALL_HELP)} and install Maven on your machine.`;
}
throw err;
}
}
function _finalize(params) {
params.set(`-Dstyle.color`, `always`)
if (DEBUG) params.set('-X')
if (DEBUG) params.set('-e')
if (process.env.CI) params.set('--batch-mode')
const final = Array.from(params, ([arg, val]) => val !== null && val !== undefined ? `${arg}=${val}` : arg)
DEBUG?.(term.highlight('mvn' + final.join(' ')))
return final
}
module.exports = new MvnArchetypeUtil();
const { join } = require('path')
const cds = require('../cds')
const { readdir } = cds.utils
const DEBUG = cds.debug('add')
const { env4 } = require('./projectReader')
const DEBUG = cds.debug('cli|add')

@@ -32,2 +31,9 @@ module.exports = class Plugin {

/**
* Gets a brief help text for the plugin.
* Displayed when all plugins are listed with `cds add`.
* @returns {string} The help text.
*/
static help() { return '' }
/**
* Determines if the plugin has already been added to the project.

@@ -38,3 +44,3 @@ * @param {import('@sap/cds').env} env - The env for the 'production' profile.

static hasInProduction(env) { // eslint-disable-line no-unused-vars
return false
return true
}

@@ -53,5 +59,2 @@

/* ––––––––––––

@@ -64,3 +67,3 @@ Yeoman Generator

*/
async finalize() { } // REVISIT: Required for yo-generator, check if needed any more?
async finalize() { }

@@ -76,2 +79,3 @@ /* ––––––––––––

const { registered } = require('./add')
const { env4 } = require('./projectReader')
const processPlugins = async (plugins) => {

@@ -78,0 +82,0 @@ for (const plugin of plugins) {

@@ -12,3 +12,5 @@ const cds = require('../cds')

this.readProject = this.readProject.bind(this)
}
#setPackageInfo() {
// 1. Use project name and static default values

@@ -73,2 +75,4 @@ this.appVersion = cds.cli.options.add?.has('java') ? '1.0.0-SNAPSHOT' : '1.0.0'

* @property {boolean} hasUI Indicates if the project has a UI.
* @property {boolean} hasUI5 Indicates if the project is a UI5 project.
* @property {boolean} isUI5 Indicates if the project is a UI5 project.
* @property {boolean} isJava Indicates if the project is using Java.

@@ -87,5 +91,10 @@ * @property {boolean} isNodejs Indicates if the project is using Node.js.

* @property {boolean} hasEnterpriseMessaging Indicates if the project uses SAP BTP Event Mesh.
* @property {boolean} hasAttachments Indicates if the project uses SAP BTP Object Store Service.
* @property {boolean} hasApprouter Indicates if the project uses the SAP Application Router.
* @property {boolean} hasHtml5Repo Indicates if the project uses the SAP BTP HTML5 Application Repository.
* @property {boolean} hasPortal Indicates if the project uses the SAP Cloud Portal serivce.
* @property {boolean} hasDestination Indicates if the project uses the SAP BTP Destination service.
* @property {boolean} hasMTXRoute Indicates if the approuter should have a multitenant route.
* @property {boolean} hasMTXRouteJava Indicates if the approuter should have a multitenant subscription route (java).
* @property {boolean} hasMalwareScanner Indicates if the project uses the SAP Malware Scanning service.
*

@@ -101,2 +110,3 @@ */

readProject() {
if (!this.appName) this.#setPackageInfo()
const env = this.env4('production')

@@ -107,2 +117,7 @@ const { appVersion, appName, appId, appDescription, jdkVersion } = this

const _inProd = plugin => require(`./template/${plugin}`).hasInProduction(env)
const _ui5 = () => exists('package.json') && !!JSON.parse(fs.readFileSync(path.resolve(cds.root, 'package.json'))).sapux
const _uiEmbedded = () => exists(join(env.folders.app, 'xs-app.json'))
const _uiModule = () => !_uiEmbedded() && isdir(env.folders.app) && fs.readdirSync(resolve(cds.root, env.folders.app)).length > 0
const _isJava = () => exists('pom.xml') || cds.cli.options?.add?.has('java')
const isApp = name => name !== 'appconfig' && !name.startsWith('_') && name !== 'router' && name !== 'portal'
const reserved = {

@@ -113,8 +128,10 @@ has: x => _inProd(x),

language: () => exists('pom.xml') ? 'java' : 'nodejs',
isJava: () => exists('pom.xml') || cds.cli.options?.add?.has('java'),
isNodejs: () => !exists('pom.xml') && !cds.cli.options?.add?.has('java'),
isJava: () => _isJava(),
isNodejs: () => !_isJava(),
srvPath: () => join(env.build.target, env.folders.srv.replace(/\/$/, '')).replace(/\\/g, '/'),
archiveName: () => {
const pom = parseXml(resolve(cds.root, env.folders.srv, 'pom.xml'))
return (pom?.artifactId?.[0] ?? basename(cds.root)) + '-exec.' + pom?.packaging?.[0]
const pomXmlPath = resolve(cds.root, env.folders.srv, 'pom.xml')
const pom = exists(pomXmlPath) ? parseXml(pomXmlPath) : {}
const { artifactId = [basename(cds.root)], packaging = ['jar'] } = pom
return artifactId[0] + '-exec.' + packaging[0]
},

@@ -128,7 +145,17 @@ db: () => {

configFile: () => exists('pom.xml') ? '.cdsrc.json' : 'package.json',
apps: () => (fs.readdirSync(path.resolve(cds.root, cds.env.folders.app))).filter(e =>
isdir(join(cds.root, cds.env.folders.app, e)) && e !== 'appconfig' && e !== '_i18n' && e !== 'router'
).map(app => ({ app })),
apps: () => {
const apps = fs.readdirSync(path.resolve(cds.root, cds.env.folders.app)).filter(e =>
isdir(path.join(cds.root, cds.env.folders.app, e)) && isApp(e)
)
return apps.map((app, i) => {
const manifestPath = path.resolve(cds.root, cds.env.folders.app, app, 'webapp/manifest.json')
const manifest = exists(manifestPath) ? JSON.parse(fs.readFileSync(manifestPath, 'utf-8')) : {}
const inbounds = manifest?.['sap.app']?.crossNavigation?.inbounds
const [firstInbound] = Object.values(inbounds ?? { intent: { semanticObject: 'Books', action: 'display' }})
const vizId = firstInbound.semanticObject + '-' + firstInbound.action
return { app, strippedApp: app.replace(/-/g, ''), vizId, isNotLast: i < apps.length - 1 }
})
},
appUIPaths: () => (fs.readdirSync(path.resolve(cds.root, cds.env.folders.app))).filter(e =>
isdir(join(cds.root, cds.env.folders.app, e)) && e !== 'appconfig' && e !== '_i18n' && e !== 'router'
isdir(join(cds.root, cds.env.folders.app, e)) && isApp(e)
),

@@ -138,14 +165,28 @@ appPath: () => env.folders.app,

jdkVersion: () => jdkVersion,
approuterPath: () => join(env.folders.app, 'router'),
approuterPath: () => _uiEmbedded() ? join(env.folders.app) : join(env.folders.app, 'router'),
appName: () => appName,
cleanedAppName: () => appName.replaceAll('_', '-'),
strippedAppName: () => appName.replace(/-/g, ''),
appId: () => appId,
appDescription: () => appDescription,
hasUIEmbedded: () => exists(join(env.folders.app, 'xs-app.json')),
hasUIModule: () => !this.hasUIEmbedded && isdir(env.folders.app) && fs.readdirSync(resolve(cds.root, env.folders.app)).length > 0,
hasUI: () => this.hasUIEmbedded || this.hasUIModule
hasUIEmbedded: _uiEmbedded,
hasUIModule: _uiModule,
hasUI: () => _uiEmbedded() || _uiModule(),
isUI5: _ui5,
hasUI5: _ui5,
hasMTXRoute: () => _inProd('extensibility') || (_inProd('multitenancy') && !_isJava()),
hasMTXRouteJava: () => (_inProd('helm-unified-runtime') || _inProd('helm')) && _isJava() && _inProd('multitenancy'),
}
// Automatically creates availability checks for `cds add` commands.
// Maps to the `hasInProduction` implemented in the command.
// E.g. `cds add hana` can be checked using `hasHana` or `isHana`
const project = (() => {
const defined = {}
const _get = property => { const p = property
if (p in reserved) return reserved[p]()
for (const prefix of ['has', 'is']) if (p.startsWith(prefix) && p.length > prefix.length) {
return _inProd(p.slice(prefix.length).replace(/([a-z0-9])([A-Z])/g, '$1-$2').toLowerCase())
}
}
return new Proxy({}, {

@@ -158,9 +199,2 @@ get(_, p) { return _get(p) ?? defined[p] },

const _get = property => { const p = property
if (p in reserved) return reserved[p]()
for (const prefix of ['has', 'is']) if (p.startsWith(prefix) && p.length > prefix.length) {
return _inProd(p.slice(prefix.length).replace(/([a-z0-9])([A-Z])/g, '$1-$2').toLowerCase())
}
}
DEBUG?.({ project })

@@ -167,0 +201,0 @@ return project

@@ -6,4 +6,7 @@ // Registry of sequence matchers in mta.yaml files

const srvJava4 = path => ({ in: 'modules', where: { type: 'java', path } })
const srv4 = cds.env['project-nature'] === 'java' || cds.cli.options?.add?.has('java') ? srvJava4 : srvNode4
// REVISIT: Clean up
const isJava = process.argv[process.argv.indexOf('--add') + 1].split(',').includes('java') || cds.env['project-nature'] === 'java' || cds.cli.options?.add?.has('java')
const srv4 = isJava ? srvJava4 : srvNode4
const approuter = { in: 'modules', where: { type: 'approuter.nodejs' } }

@@ -21,2 +24,7 @@

const destinations = {
in: 'modules',
where: { type: 'com.sap.application.content', 'build-parameters.no-source': true }
}
const connectivity = {

@@ -47,2 +55,12 @@ in: 'resources',

const attachments = {
in: 'resources',
where: { type: 'org.cloudfoundry.managed-service', 'parameters.service': 'objectstore' }
}
const malwareScanner = {
in: 'resources',
where: { type: 'org.cloudfoundry.managed-service', 'parameters.service': 'malware-scanner' }
}
const hdbDeployer = {

@@ -128,3 +146,3 @@ in: 'modules',

const approuterExtensibility = {
const approuterMTXRoute = {
in: 'routes',

@@ -134,2 +152,7 @@ where: { source: '^/-/cds/.*', destination: 'mtx-api', authenticationType: 'none' }

const approuterMTXRouteJava = {
in: 'routes',
where: { source: '^/mt/.*', destination: 'srv-api', authenticationType: 'none' }
}
const html5RepoHost = {

@@ -145,7 +168,22 @@ in: 'resources',

const appContent = {
const appDeployer = {
in: 'modules',
where: { type: 'com.sap.application.content' }
where: { type: 'com.sap.application.content', path: isJava ? '.' : 'gen' }
}
const portal = {
in: 'resources',
where: { 'parameters.service': 'portal' },
}
const portalDeployer = {
in: 'modules',
where: { type: 'com.sap.application.content', path: 'app/portal' }
}
const auditlog = {
in: 'resources',
where: { type: 'org.cloudfoundry.managed-service', 'parameters.service': 'auditlog' }
}
module.exports = {

@@ -156,2 +194,3 @@ srv4,

destination,
destinations,
connectivity,

@@ -178,6 +217,12 @@ enterpriseMessaging,

applicationLogging,
approuterExtensibility,
approuterMTXRoute,
approuterMTXRouteJava,
html5RepoHost,
html5Runtime,
appContent
appDeployer,
portal,
portalDeployer,
auditlog,
attachments,
malwareScanner,
}

@@ -7,4 +7,8 @@ const { readProject } = require('../../projectReader')

static help() {
return 'SAP BTP Application Logging Service'
}
static hasInProduction(env) {
return !!env.features.kibana_formatter
return !!env.requires?.['application-logging']
}

@@ -18,2 +22,10 @@

async run() {
const project = readProject()
const { isNodejs } = project
if (isNodejs) {
await merge(__dirname, 'files/package.json.hbs').into('package.json')
}
}
async combine() {

@@ -20,0 +32,0 @@ const project = readProject()

const cds = require('../../../cds')
const { exists, copy } = cds.utils
const { exists } = cds.utils
const { join } = require('path')

@@ -11,3 +11,4 @@ const { readProject } = require('../../projectReader')

requiredAppApi4, providedAppApi,
approuterExtensibility // xs-app.json config
approuterMTXRoute,
approuterMTXRouteJava // xs-app.json config
} = require('../../registries/mta')

@@ -17,2 +18,6 @@

static help() {
return 'dynamic routing using @sap/approuter'
}
static hasInProduction(env) {

@@ -22,6 +27,2 @@ return exists(join(env.folders.app, 'xs-app.json')) || exists(join(env.folders.app, 'router', 'xs-app.json'))

requires() {
return ['xsuaa']
}
async run() {

@@ -37,3 +38,3 @@ const { approuterPath } = readProject()

const project = readProject(this.options)
const { isNodejs, isJava, hasExtensibility, hasMultitenancy, hasMta, hasHelm, hasHelmUnifiedRuntime, srvPath, approuterPath } = project
const { isNodejs, isJava, hasMTXRoute, hasMultitenancy, hasXsuaa, hasMta, hasHelm, hasHelmUnifiedRuntime, hasMTXRouteJava, hasContainerize, srvPath, approuterPath } = project

@@ -48,14 +49,15 @@ if (hasMta) {

if (hasMultitenancy) modules.push(mtxSidecar)
const additions = [...modules, xsuaa, ...apis]
await merge(__dirname, 'files', 'mta.yaml.hbs').into('mta.yaml', {
with: project,
additions,
relationships: [{
insert: [xsuaa, 'name'],
into: [srv, 'requires', 'name']
}, {
insert: [xsuaa, 'name'],
into: [approuter, 'requires', 'name']
}],
const additions = [...modules, ...apis]
if (hasXsuaa) additions.push(xsuaa)
const relationships = []
if (hasXsuaa) relationships.push({
insert: [xsuaa, 'name'],
into: [srv, 'requires', 'name']
}, {
insert: [xsuaa, 'name'],
into: [approuter, 'requires', 'name']
})
await merge(__dirname, 'files/mta.yaml.hbs').into('mta.yaml', {
with: project, additions, relationships,
})
}

@@ -65,15 +67,17 @@

await merge(__dirname, 'files', 'values.yaml.hbs').into('chart/values.yaml', { with: project })
await merge({ approuter: { envFrom: [{
configMapRef: {
name: "{{ .Release.Name }}-approuter-configmap"
}
}]}}).into('chart/values.yaml', { with: project })
}
await copy(join(__dirname, 'files/approuter-configmap.yaml')).to('chart', 'templates', 'approuter-configmap.yaml')
if (hasContainerize) {
const additions = [{
in: 'modules',
where: { name: `${project.appName}-approuter` }
}]
await merge(__dirname, '/files/containerize.yaml.hbs').into('containerize.yaml', { with: project, additions })
}
const xsAppPath = join(approuterPath, 'xs-app.json')
const additions = hasExtensibility ? [{ ...approuterExtensibility, at: 0 }] : []
const additions = hasMTXRoute ? [{ ...approuterMTXRoute, at: 0 }] : []
if (hasMTXRouteJava) additions.push({ ...approuterMTXRouteJava, at: 0 })
await merge(__dirname, 'files/xs-app.json.hbs').into(xsAppPath, { project, additions })
}
}

@@ -11,2 +11,6 @@ const { join } = require('path')

static help() {
return 'Cloud Foundry deployment using manifest files'
}
async canRun() {

@@ -13,0 +17,0 @@ if (cds.cli.options.force) {

@@ -11,7 +11,13 @@

const SUPPORTED_LINUX_SHELLS = ['bash', 'fish', 'gitbash', 'zsh'];
const SUPPORTED_WINDOWS_SHELLS = ['ps'];
const SUPPORTED_SHELLS = [...SUPPORTED_LINUX_SHELLS, ...SUPPORTED_WINDOWS_SHELLS].sort();
module.exports = new class CompletionSetup {
supportedShells = SUPPORTED_SHELLS
async setup(addEntry, options) {
const shell = options.shell
|| (process.env.MSYSTEM?.includes('MINGW64') ? 'gitbash' : process.env.SHELL?.match(/(bash|zsh)/)?.[0])
|| (process.env.MSYSTEM?.includes('MINGW64') ? 'gitbash' : process.env.SHELL?.match(/(bash|fish|zsh)/)?.[0])
|| (process.env.PSModulePath ? 'ps' : null);

@@ -21,5 +27,6 @@

case 'bash':
case 'fish':
case 'gitbash':
case 'zsh':
return this.setupBashZsh(addEntry, shell);
return this.setupLinuxLike(addEntry, shell);
case 'ps':

@@ -31,5 +38,5 @@ return this.setupPs(addEntry);

if (!shellType) {
throw `Unsupported shell.\nSupported shells are: bash, zsh, gitbash, ps.`;
throw `Unsupported shell.\nSupported shells are: ${SUPPORTED_SHELLS.join(', ')}.`;
}
return this.setupBashZsh(addEntry, shellType);
return this.setupLinuxLike(addEntry, shellType);
} catch (err) {

@@ -39,3 +46,3 @@ throw `Error during completion setup: ${err}`;

default:
throw `Unsupported shell: ${shell}.\nSupported shells are: bash, zsh, gitbash, ps.`;
throw `Unsupported shell: ${shell}.\nSupported shells are: ${SUPPORTED_SHELLS.join(', ')}.`;
}

@@ -74,19 +81,22 @@ }

async setupBashZsh(addEntry, shell) {
const isBash = shell.includes('bash'); // also for gitbash
const isZsh = shell.includes('zsh');
const isGitBash = shell.includes('gitbash');
if (!isBash && !isZsh) {
throw 'Completion is only supported for bash, zsh or Git Bash shell.';
async setupLinuxLike(addEntry, shell) {
let profileFile;
switch (shell) {
case 'bash':
profileFile = os.platform() === 'darwin' ? '.bash_profile' : '.bashrc';
break;
case 'gitbash':
profileFile = '.bash_profile'
break;
case 'zsh':
profileFile = '.zshrc';
break;
case 'fish':
profileFile = '.config/fish/config.fish';
break;
default:
throw `Completion is only supported for ${SUPPORTED_LINUX_SHELLS.join(', ')}.`;
}
const profileFile = path.join(
os.homedir(),
isZsh
? '.zshrc'
: os.platform() === 'darwin' || isGitBash
? '.bash_profile'
: '.bashrc'
);
profileFile = path.join(os.homedir(), profileFile);

@@ -117,2 +127,19 @@ await this.changeProfile(profileFile, shell, addEntry);

switch (shell) {
case 'bash':
case 'gitbash':
return `
CDS_PROFILE=$(cds completion --shell ${shell} --profile 2> /dev/null) || CDS_PROFILE=""
if [ -r "$CDS_PROFILE" ]; then
. "$CDS_PROFILE"
fi
`
case 'fish':
return `
if type "cds" >/dev/null 2>&1
set -l CDS_PROFILE (cds completion --shell fish --profile 2> /dev/null)
if test -e "$CDS_PROFILE"
source "$CDS_PROFILE"
end
end
`
case 'ps':

@@ -123,14 +150,7 @@ return `

if (Test-Path -Path "$CDS_PROFILE" -PathType Leaf) {
. $CDS_PROFILE
. "$CDS_PROFILE"
}
} catch {}
`
case 'bash':
case 'gitbash':
return `
CDS_PROFILE=$(cds completion --shell ${shell} --profile 2> /dev/null) || CDS_PROFILE=""
if [ -r "$CDS_PROFILE" ]; then
. "$CDS_PROFILE"
fi
`
case 'zsh':

@@ -146,3 +166,4 @@ return `

`
default: throw `Unsupported shell: ${shell}`;
default:
throw `Unsupported shell: ${shell}`;
}

@@ -182,9 +203,9 @@ }

await this.createBackup(profileFile);
console.log(`Created backup ${bold(`~/${relProfilePath}.cds.bak`)}.`);
console.log(`Created backup ${bold(`~/${relProfilePath}.cds.bak`)}`);
}
await fsp.writeFile(profileFile, profile);
if (addEntry) {
console.log(`Added entry to ${bold(`~/${relProfilePath}`)}.`);
console.log(`Added entry to ${bold(`~/${relProfilePath}`)}`);
} else {
console.log(`Removed entry from ${bold(`~/${relProfilePath}`)}.`);
console.log(`Removed entry from ${bold(`~/${relProfilePath}`)}`);
}

@@ -191,0 +212,0 @@ }

@@ -5,3 +5,9 @@ const cds = require('../../../cds');

module.exports = class CompletionTemplate extends require('../../plugin') {
static help() {
return 'shell completion for cds commands'
}
options() {
const completionSetup = require('./completionSetup');
return {

@@ -13,3 +19,3 @@ 'shell': {

Usually the shell is determined automatically and this is only for cases where the automatic
detection fails. Valid values: bash, gitbash, zsh, ps`
detection fails. Valid values: ${completionSetup.supportedShells.join(', ')}.`
}

@@ -16,0 +22,0 @@ }

@@ -1,4 +0,1 @@

const cds = require('../../../cds')
const { copy } = cds.utils
const { join } = require('path')
const { readProject } = require('../../projectReader')

@@ -10,2 +7,6 @@ const { merge } = require('../../merge')

static help() {
return 'SAP BTP Connectivity Service'
}
static hasInProduction(env) {

@@ -43,5 +44,4 @@ return !!env.requires.connectivity

})
await copy(join(__dirname, 'files', 'connectivity-proxy-info.yaml')).to('chart', 'templates', 'connectivity-proxy-info.yaml') // REVISIT: Move to build
}
}
}

@@ -12,2 +12,6 @@ const cds = require('../../../cds')

static help() {
return 'add CSV headers for modeled entities'
}
options() {

@@ -14,0 +18,0 @@ return {

const { readProject } = require('../../projectReader')
const { merge } = require('../../merge')
const { srv4, destination } = require('../../registries/mta')
const { srv4, approuter, destination } = require('../../registries/mta')
module.exports = class DestinationTemplate extends require('../../plugin') {
static help() {
return 'SAP BTP Destination Service'
}
static hasInProduction(env) {

@@ -23,3 +27,3 @@ const kinds = { odata: 1, 'odata-v2': 1, 'odata-v4': 1, rest: 1 }

const project = readProject()
const { hasHelm, hasHelmUnifiedRuntime, hasMta, srvPath } = project
const { hasHelm, hasApprouter, hasHelmUnifiedRuntime, hasMta, srvPath } = project

@@ -33,2 +37,8 @@ if (hasMta) {

}]
if (hasApprouter) {
relationships.push({
insert: [destination, 'name'],
into: [approuter, 'requires', 'name']
})
}
await merge(__dirname, 'files/mta.yaml.hbs').into('mta.yaml', { with: project, additions, relationships })

@@ -35,0 +45,0 @@ }

@@ -9,2 +9,6 @@ const cds = require('../../../cds')

static help() {
return 'messaging via shared SAP Enterprise Messaging'
}
options() {

@@ -15,3 +19,3 @@ return {

short: 'c',
help: 'Use CloudEvents formatting',
help: 'Use CloudEvents formatting.',
}

@@ -18,0 +22,0 @@ }

@@ -1,4 +0,3 @@

const YAML = require('@sap/cds-foss')('yaml')
const cds = require('../../../cds')
const { read } = cds.utils
const { read, yaml: YAML } = cds.utils
const { readProject } = require('../../projectReader')

@@ -10,2 +9,6 @@ const { merge } = require('../../merge')

static help() {
return 'messaging via SAP Enterprise Messaging'
}
options() {

@@ -16,3 +19,3 @@ return {

short: 'c',
help: 'Use CloudEvents formatting',
help: 'Use CloudEvents formatting.',
}

@@ -19,0 +22,0 @@ }

@@ -9,2 +9,5 @@ const cds = require('../../../cds')

static help() {
return 'tenant-specific model extensibility'
}

@@ -33,3 +36,2 @@ requires() {

{ in: 'scopes', where: { name: '$XSAPPNAME.cds.ExtensionDeveloper' }},
{ in: 'scopes', where: { name: '$XSAPPNAME.cds.UIFlexDeveloper' }},
{ in: 'role-templates', where: { name: 'ExtensionDeveloper' }}

@@ -36,0 +38,0 @@ ]

@@ -7,2 +7,6 @@ const cds = require('../../../cds'), { read } = cds.utils

static help() {
return 'messaging via file system'
}
async canRun() {

@@ -9,0 +13,0 @@ const { isJava } = readProject()

@@ -7,2 +7,6 @@ const mvn = require('../../mvn')

static help() {
return 'database support for H2'
}
async canRun() {

@@ -9,0 +13,0 @@ const { isNodejs } = readProject()

{
"name": "deploy",
"dependencies": {
"@sap/hdi-deploy": "^4"
"hdb": "^0",
"@sap/hdi-deploy": "^5"
},

@@ -10,5 +11,5 @@ "engines": {

"scripts": {
"start": "node node_modules/@sap/hdi-deploy/deploy.js",
"start": "node node_modules/@sap/hdi-deploy/deploy.js --use-hdb",
"build": "npm i && npx cds build .. --for hana --production"
}
}

@@ -8,4 +8,2 @@ const { join } = require('path')

const { OPTIONS: { JAVA } } = require('../../constants')
const { srv4, hdbDeployer, serviceManager, hdiContainer, mtxSidecar4 } = require('../../registries/mta')

@@ -15,4 +13,8 @@

static help() {
return 'database support for SAP HANA'
}
static hasInProduction(env) {
return env.requires?.db?.kind === 'hana'
return env.requires?.db?.kind === 'hana' || env.dependencies?.['@cap-js/hana']
}

@@ -31,5 +33,3 @@

await copy(join(__dirname, 'files', 'db')).to(db.folder)
// REVISIT: shouldn't behave differently during cds init
// if called during cds init the project generation call to AT will add settings
if (!cds.cli.options.add.has(JAVA)) await mvn.execMvn(mvn.getAddHanaCmdArgs())
await mvn.add('hana')
}

@@ -48,3 +48,3 @@

const project = readProject()
const { hasMta, hasHelm, hasHelmUnifiedRuntime, hasHtml5Repo, isJava, hasMultitenancy, srvPath } = project
const { hasMta, hasHelm, hasHelmUnifiedRuntime, hasHtml5Repo, hasContainerize, isJava, hasMultitenancy, srvPath } = project

@@ -105,3 +105,11 @@ if (hasMta) {

}
if (hasContainerize && !hasMultitenancy) {
const additions = [{
in: 'modules',
where: { name: `${project.appName}-hana-deployer` }
}]
await merge(__dirname, '/files/containerize.yaml.hbs').into('containerize.yaml', { with: project, additions })
}
}
}
const cds = require('../../..')
const { copy, rimraf, exists, fs } = cds.utils
const { rimraf, exists, readFileSync, yaml } = cds.utils
const { join } = require('path')
const { merge, readProject } = cds.add ?? {}
module.exports = class HelmUnifiedRuntimeTemplate extends require('../../plugin') {
module.exports = class HelmUnifiedRuntimeTemplate extends require('../helm') {
static help() {
return 'Kyma deployment using Unified Runtime Helm charts'
}
options() {

@@ -17,5 +21,8 @@ return {

async canRun() {
const hasInProduction = HelmUnifiedRuntimeTemplate.hasInProduction()
if (cds.cli.options.force) {
await rimraf(join('chart'))
return true
await rimraf(join('chart'))
return true
} else if (exists(join(cds.root, 'chart')) && !hasInProduction) {
throw `Chart already exists. Use --force to overwrite.`
}

@@ -26,58 +33,14 @@ return true

static hasInProduction() {
const chart = exists(join('chart/Chart.yaml')) ? fs.readFileSync(join(cds.root, 'chart/Chart.yaml'), 'utf8') : ''
return chart.includes('https://int.repositories.cloud.sap/artifactory/virtual-unified-runtime-helm-dmz') || cds.cli.options.add.has('helm-unified-runtime')
}
if (cds.cli.options?.add?.has('helm-unified-runtime')) return true
async run() {
const project = readProject()
if (exists(join(cds.root, 'chart', 'Chart.yaml'))) {
const chartFile = readFileSync(join(cds.root, 'chart', 'Chart.yaml'), 'utf8')
const chartYaml = yaml.parseDocument(chartFile).toJS()
return chartYaml.annotations?.['app.kubernetes.io/managed-by'] === 'cds-dk/helm'
&& chartYaml.dependencies?.find(dep => dep.name === 'web-application')?.repository !== undefined
}
await merge(__dirname, 'files/Chart.yaml.hbs').into('chart/Chart.yaml', { with: project })
await merge(__dirname, 'files/values.yaml.hbs').into('chart/values.yaml', { with: project })
await copy(join(__dirname, 'files', 'templates')).to('chart/templates') // REVISIT: Move to build
await copy(join(__dirname, 'files', 'values.schema.json')).to('chart/values.schema.json') // REVISIT: Move to build (if possible)
await copy(join(__dirname, 'files', 'values.root.schema.json')).to('chart/values.root.schema.json') // REVISIT: Move to build (if possible)
await this._mergeDependency('web-application', 'srv')
return false
}
async combine() {
const { hasDestination, hasEnterpriseMessaging, hasHana, hasMultitenancy, hasXsuaa, hasHtml5Repo, hasApprouter, hasKafka } = readProject()
if (hasApprouter) {
await this._mergeDependency('web-application', 'approuter')
}
if (hasHtml5Repo) {
await this._mergeDependency('service-instance', 'html5-apps-repo-host')
await this._mergeDependency('content-deployment', 'html5-apps-deployer')
if (hasApprouter) {
await this._mergeDependency('service-instance', 'html5-apps-repo-runtime')
}
}
if (hasMultitenancy) {
await this._mergeDependency('web-application', 'sidecar')
await this._mergeDependency('service-instance', 'saas-registry')
}
if (hasDestination) {
await this._mergeDependency('service-instance', 'destination')
}
if (hasEnterpriseMessaging) {
await this._mergeDependency('service-instance', 'event-mesh')
}
if (hasXsuaa) {
await this._mergeDependency('service-instance', 'xsuaa')
}
if (hasKafka) {
await this._mergeDependency('service-instance', 'kafka')
}
if (hasHana) {
if (hasMultitenancy) {
await this._mergeDependency('service-instance', 'service-manager')
} else {
await this._mergeDependency('service-instance', 'hana')
await this._mergeDependency('content-deployment', 'hana-deployer')
}
}
}
async _mergeDependency(name, alias) {

@@ -84,0 +47,0 @@ const project = readProject()

@@ -1,13 +0,31 @@

const { join } = require('path');
const { join } = require('path')
const cds = require('../../../cds')
const { exists, rimraf, copy } = cds.utils
const { readProject } = require('../../projectReader');
const { exists, rimraf, copy, readFileSync, yaml } = cds.utils
const { readProject } = require('../../projectReader')
const { merge } = require('../../merge')
const { ask4 } = require('../../../util/question')
const { execSync } = require('child_process')
module.exports = class HelmTemplate extends require('../../plugin') {
static help() {
return 'Kyma deployment using Helm charts'
}
options() {
return {
'y': {
type: 'boolean',
help: 'If provided, the default values will be used for all prompts.'
}
}
}
async canRun() {
const hasInProduction = HelmTemplate.hasInProduction()
if (cds.cli.options.force) {
await rimraf(join('chart'))
return true
} else if (exists(join(cds.root, 'chart')) && !hasInProduction) {
throw `Chart already exists. Use --force to overwrite.`
}

@@ -19,4 +37,8 @@ return true

static hasInProduction() {
const { add } = cds.cli.options
return exists(join('chart', 'charts', 'web-application')) && exists(join('chart', 'values.yaml') || add.has('helm'))
if (exists('chart/Chart.yaml')) {
const chartFile = readFileSync(join(cds.root, 'chart', 'Chart.yaml'), 'utf8')
const chartYaml = yaml.parseDocument(chartFile).toJS()
return chartYaml.annotations?.["app.kubernetes.io/managed-by"] === 'cds-dk/helm'
}
return false
}

@@ -26,17 +48,53 @@

const project = readProject()
await copy(join(__dirname, 'files', 'chart')).to('chart')
project.domain = 'abc.com'
project.registry = 'registry-name'
project.imagePullSecret = 'docker-registry'
project.tag = 'latest'
if (cds.cli.command == 'add' && !HelmTemplate.hasInProduction()) {
let registry = 'registry-name', tag = 'latest', domain = 'abc.com'
// read domain using kubectl
try {
const domainCmd = execSync(`kubectl config view --minify --output jsonpath="{.clusters[*].cluster.server}"`, { stdio: "pipe" }).toString()
const domainStartIndex = domainCmd.indexOf('api.')
if (domainStartIndex !== -1) {
domain = domainCmd.substring(domainStartIndex + 4)
}
} catch (error) {
// ignore
}
// read project.registry
if (exists('containerize.yaml')) {
const containerizeYamlFile = readFileSync(join(cds.root, 'containerize.yaml'), 'utf8')
const containerizYaml = yaml.parseDocument(containerizeYamlFile).toJS()
registry = containerizYaml.repository
tag = containerizYaml.tag
}
let imagePullSecretInput = 'docker-registry'
if (!cds.cli.options['y']) {
const [domainAns, imagePullSecretAns, registryAns] = await ask4(`domain: (${domain}) `, 'imagePullSecret: (docker-registry) ', `registry: (${registry}) `)
domain = domainAns || domain
imagePullSecretInput = imagePullSecretAns || imagePullSecretInput
registry = registryAns || registry
}
project.domain = domain
project.imagePullSecret = imagePullSecretInput
project.registry = registry
project.tag = tag
}
await copy(join(__dirname, 'files', 'values.schema.json')).to('chart/values.schema.json')
await merge(__dirname, 'files/Chart.yaml.hbs').into('chart/Chart.yaml', { with: project })
await merge(__dirname, 'files/values.yaml.hbs').into('chart/values.yaml', { with: project })
await this._mergeDependency('web-application', 'srv')
await copy(join(__dirname, 'files', 'web-application')).to('chart/charts/web-application')
}
async combine() {
const { hasDestination, hasEnterpriseMessaging, hasHana, hasMultitenancy, hasXsuaa, hasHtml5Repo, hasApprouter, hasKafka } = readProject()
let hasContentDeployment = false
const { hasDestination, hasEnterpriseMessaging, hasEnterpriseMessagingShared, hasHana, hasMultitenancy, hasXsuaa, hasHtml5Repo, hasApprouter, hasKafka, hasAttachments, hasMalwareScanner } = readProject()
if (!exists('chart/charts/service-instance') && (hasHtml5Repo || hasMultitenancy || hasDestination || hasEnterpriseMessaging || hasXsuaa || hasHana || hasKafka)) {
await copy(join(__dirname, 'files', 'service-instance')).to('chart/charts/service-instance')
}
if (hasApprouter) {

@@ -47,3 +105,2 @@ await this._mergeDependency('web-application', 'approuter')

if (hasHtml5Repo) {
hasContentDeployment = true
await this._mergeDependency('service-instance', 'html5-apps-repo-host')

@@ -62,3 +119,6 @@ await this._mergeDependency('content-deployment', 'html5-apps-deployer')

}
if (hasEnterpriseMessaging) {
if (hasAttachments) {
await this._mergeDependency('service-instance', 'attachments')
}
if (hasEnterpriseMessaging || hasEnterpriseMessagingShared) {
await this._mergeDependency('service-instance', 'event-mesh')

@@ -73,3 +133,2 @@ }

} else {
hasContentDeployment = true
await this._mergeDependency('service-instance', 'hana')

@@ -82,4 +141,4 @@ await this._mergeDependency('content-deployment', 'hana-deployer')

}
if(!exists('chart/charts/content-deployment') && hasContentDeployment) {
await copy(join(__dirname, 'files', 'content-deployment')).to('chart/charts/content-deployment')
if (hasMalwareScanner) {
await this._mergeDependency('service-instance', 'malware-scanner')
}

@@ -86,0 +145,0 @@ }

const { join } = require('path')
const cds = require('../../../cds')
const { copy } = cds.utils
const { readProject } = require('../../projectReader')
const { merge } = require('../../merge')
const { srv4, destination, html5RepoHost, html5Runtime, appContent, approuter } = require('../../registries/mta')
const { srv4, destination, destinations, html5RepoHost, html5Runtime, appDeployer, approuter } = require('../../registries/mta')
module.exports = class Html5RepoTemplate extends require('../../plugin') {
static help() {
return 'SAP BTP HTML5 Application Repository'
}
requires() {

@@ -20,13 +22,26 @@ return ['destination']

const project = readProject()
const { apps, appPath, configFile } = project
await merge(__dirname, 'files', 'package.json.hbs').into(configFile, { with: project })
const { appUIPaths, apps, appPath, configFile, hasUI5 } = project
await merge(__dirname, 'files/package.json.hbs').into(configFile, { project })
await Promise.all(apps.map(async ({app}) => {
if (hasUI5) {
for (const { app } of apps) {
project.app = app
await merge(__dirname, 'files/ui5.yaml.hbs').into(join(appPath, app, 'ui5.yaml'), { project })
}
for (const { app } of apps) {
project.app = app
await merge(__dirname, 'files/ui5-deploy.yaml.hbs').into(join(appPath, app, 'ui5-deploy.yaml'), { project })
}
}
for (const { app } of apps) {
project.app = app
await merge(__dirname, 'files/ui5.yaml.hbs').into(join(appPath, app, 'ui5.yaml'), { project })
}))
await Promise.all(apps.map(async ({app}) => {
project.app = app
await merge(__dirname, 'files/app-package.json.hbs').into(join(appPath, app, 'package.json'), { project })
}))
}
// app deployer requires a manifest.json
await Promise.all(appUIPaths.map(p =>
merge(__dirname, 'files/manifest.json')
.into(join(appPath, p, 'webapp/manifest.json'), { project })
))
}

@@ -38,8 +53,6 @@

if (hasApprouter) {
await Promise.all(apps.map(async ({app}) => {
project.app = app
await merge(__dirname, 'files/xs-app.json.hbs').into(join(appPath, app, 'xs-app.json'), { project })
}))
}
await Promise.all(apps.map(async ({app}) => {
project.app = app
await merge(__dirname, 'files/xs-app.json.hbs').into(join(appPath, app, 'xs-app.json'), { project })
}))

@@ -53,9 +66,9 @@ if (hasMta) {

}))
const additions = [srv, appContent, destination, html5RepoHost, ...appModules]
const additions = [srv, appDeployer, destinations, destination, html5RepoHost, ...appModules]
const relationships = [{
insert: [destination, 'name'],
into: [appContent, 'requires', 'name'],
into: [appDeployer, 'requires', 'name'],
}, {
insert: [html5RepoHost, 'name'],
into: [appContent, 'requires', 'name'],
into: [appDeployer, 'requires', 'name'],
}]

@@ -77,14 +90,4 @@ if (hasApprouter) {

await merge(__dirname, 'files/values.yaml.hbs').into('chart/values.yaml', { with: project })
await merge({
'html5-apps-deployer': {
envFrom: [{
configMapRef: {
name: '{{ .Release.Name }}-html5-apps-deployer-configmap'
}
}]
}
}).into('chart/values.yaml')
await copy(join(__dirname, 'files', 'html5-apps-deployer-configmap.yaml')).to('chart', 'templates', 'html5-apps-deployer-configmap.yaml')
}
}
}

@@ -13,5 +13,9 @@ const https = require('node:https')

#javaHost = "http://localhost:8080"
#defaultOutputDir = "http"
#defaultOutputDir = "test/http"
#defaultAuth = `Authorization: Basic alice:`
static help() {
return 'add .http files for modeled services'
}
options() {

@@ -99,3 +103,3 @@ return {

if (c.name.includes('.texts')) continue
c.kind === 'entity' ? entities.push(this.#addNameAndReadOnly(c)) : c.kind === 'action' ? actions.push(c) : null
c.kind === 'entity' ? entities.push(this.#addEntityInfo(c)) : c.kind === 'action' ? actions.push(c) : null
}

@@ -116,6 +120,8 @@ }

#addNameAndReadOnly(e) {
#addEntityInfo(e) {
return {
name: e.name,
readOnly: (e["@readonly"] || e["@cds.autoexpose"] || e['@odata.singleton']) ?? false,
isDraft: e['@odata.draft.enabled'] ?? false,
keys: e.keys
}

@@ -160,15 +166,24 @@ }

requests.push(`${requestTitle}\nGET ${url}\n${headers}\n`)
if (e.readOnly) {
return
}
if (e.readOnly) return
const entityData = payload[`${e.name}`] ? payload[`${e.name}`][0] : null
if (!entityData) return
// TODO handle ids that are not named ID
if (!Object.keys(entityData).some(k => k === "ID")) return // no need to write requests for entities without ids
const postBody = JSON.stringify(entityData, null, 2)
const postBody = JSON.stringify(entityData, null, 2)
requests.push(`${requestTitle}\nPOST ${url}\n${headers}\n\n${postBody}\n`)
requests.push(`${requestTitle}\nPATCH ${url}/${entityData.ID}\n${headers}\n\n${postBody}\n`)
requests.push(`${requestTitle}\nDELETE ${url}/${entityData.ID}\n${headers}\n`)
let id
const compositeKey = Object.keys(e.keys).length > 1
if (compositeKey) {
id = e.keys.map(k => k.type === 'cds.String' ? `${k.name}='${entityData[k.name]}'` : `${k.name}=${entityData[k.name]}`).join(',')
} else {
id = entityData[Object.keys(e.keys)[0]]
}
if (e.isDraft) {
generateDraftRequests(requests, requestTitle, url, headers, postBody, id)
} else {
compositeKey ? id = `(${id})` : id = `/${id}`
requests.push(`${requestTitle}\nPOST ${url}\n${headers}\n\n${postBody}\n`)
requests.push(`${requestTitle}\nPATCH ${url}${id}\n${headers}\n\n${postBody}\n`)
requests.push(`${requestTitle}\nDELETE ${url}${id}\n${headers}\n`)
}
})

@@ -195,2 +210,11 @@

}
function generateDraftRequests(requests, requestTitle, url, headers, postBody, id) {
requests.push(`${requestTitle} Drafts \nGET ${url}?$filter=(IsActiveEntity eq false)\n${headers}\n`) // unencoded URL for better readability (client can handle this)
requests.push(`${requestTitle} Drafts \nPOST ${url}\n${headers}\n\n${postBody}\n`)
requests.push(`\n### Please paste the server-generated draft id here e.g. ID=101 or ID=101,secodKey='stringValue' \n@draftID = ${id}\n`)
requests.push(`${requestTitle} Patch Draft \nPATCH ${url}({{draftID}},IsActiveEntity=false)\n${headers}\n\n${postBody}\n`)
requests.push(`${requestTitle} Prepare Draft \nPOST ${url}({{draftID}},IsActiveEntity=false)/AdminService.draftPrepare\n${headers}\n\n{}\n`)
requests.push(`${requestTitle} Activate Draft \nPOST ${url}({{draftID}},IsActiveEntity=false)/AdminService.draftActivate\n${headers}\n\n{}\n`)
}
}

@@ -197,0 +221,0 @@

const os = require('os')
const cds = require('../../../cds')
const { exists, copy, rimraf, fs, path } = cds.utils, { join } = path
const { exists, copy, rimraf, fs:{promises:{ mkdtemp }}, path } = cds.utils, { join } = path
const cmd = require('../../../util/command')
const mvn = require('../../mvn')
const term = require('../../../util/term')
const { bold, info, link } = require('../../../util/term')
const { URLS, OPTIONS: { NODEJS, JAVA }, COMMAND_ADD } = require('../../constants')
const { command, options } = cds.cli
module.exports = class JavaTemplate extends require('../../plugin') {
static help() {
return 'creates a Java-based project'
}
static hasInProduction() {
return exists('pom.xml') || cds.cli.options?.add?.has(JAVA)
return exists('pom.xml') || options?.add?.has(JAVA)
}
async canRun() {
if (cds.cli.command === COMMAND_ADD) {
if (command === COMMAND_ADD) {
throw `You can't change the runtime of an existing project.`
}
if (cds.cli.options.add?.has(NODEJS)) {
throw `Only one runtime per project is supported. Specify either ${term.bold(JAVA)} or ${term.bold(NODEJS)}.`
if (options.add?.has(NODEJS)) {
throw `Only one runtime per project is supported. Specify either ${bold(JAVA)} or ${bold(NODEJS)}.`
}

@@ -26,12 +31,13 @@ return true

async run() {
const { cmdLine, artifactId, archetypeVersion } = await mvn.getGenerateCmdArgs(path.basename(cds.root))
const projectName = path.basename(cds.root)
const { params, artifactId, archetypeVersion } = await mvn.init(projectName)
console.log(`Using Maven archetype version ${archetypeVersion}`)
const temp = await fs.promises.mkdtemp(join(os.tmpdir(), `${path.basename(cds.root)}_`))
const temp = await mkdtemp(join(os.tmpdir(), `${path.basename(cds.root)}_`))
try {
await cmd.spawnCommand('mvn', cmdLine, { cwd: temp })
await copy(join(temp, artifactId), cds.root);
await cmd.spawnCommand('mvn', params, { cwd: temp })
await copy(join(temp, artifactId)).to(cds.root)
} catch (err) {
if (err.code === 'ENOENT' && err.path === 'mvn') {
throw `Maven executable 'mvn' not found, follow ${term.info(URLS.MAVEN_INSTALL_HELP)} and install Maven on your machine.`
throw `Maven executable 'mvn' not found. Follow ${info(URLS.MAVEN_INSTALL_HELP)} and install Maven on your machine.`
}

@@ -45,4 +51,4 @@ throw err;

async finalize() {
console.log(`Learn about next steps at ${term.link(URLS.CAPIRE)}`)
console.log(`Learn about next steps at ${link(URLS.CAPIRE)}`)
}
}

@@ -10,2 +10,6 @@ const cds = require('../../../cds')

static help() {
return 'messaging via Apache Kafka'
}
static hasInProduction(env) {

@@ -12,0 +16,0 @@ if (exists('pom.xml')) {

@@ -18,2 +18,7 @@ const os = require("os");

module.exports = class LintTemplate extends require('../../plugin') {
static help() {
return 'configure cds lint'
}
constructor() {

@@ -141,3 +146,3 @@ super();

if (!eslintPluginLocal) {
this.missingNpmDependencies["names"].push("@sap/eslint-plugin-cds@^3.0.0");
this.missingNpmDependencies["names"].push("@sap/eslint-plugin-cds@^3");
this.missingNpmDependencies["msgs"].push("ESLint plugin for CDS v>=3.0.0");

@@ -158,3 +163,3 @@ }

if (!configPath) {
configPath = path.join(cds.root, "eslint.config.js");
configPath = path.join(cds.root, "eslint.config.mjs");
}

@@ -161,0 +166,0 @@ await io.sanitizeEslintConfig(configPath, this.customRuleExample);

@@ -6,2 +6,6 @@ const mvn = require('../../mvn')

static help() {
return 'database migration using Liquibase'
}
async canRun() {

@@ -8,0 +12,0 @@ const { isNodejs } = readProject()

@@ -8,2 +8,6 @@ const cds = require('../../../cds')

static help() {
return 'messaging via local event bus'
}
async canRun() {

@@ -10,0 +14,0 @@ const { isJava } = readProject()

{
"devDependencies": {
"@sap/cds-dk": "^7"
"@sap/cds-dk": ">=7"
}
}

@@ -11,2 +11,6 @@ const { join } = require('path')

static help() {
return 'Cloud Foundry deployment using mta.yaml'
}
static hasInProduction() {

@@ -13,0 +17,0 @@ return exists('mta.yaml')

@@ -5,2 +5,6 @@ const { MULTITENANCY, TOGGLES, EXTENSIBILITY } = require('../../constants').OPTIONS

static help() {
return 'multitenancy + toggles + extensibility'
}
requires() {

@@ -7,0 +11,0 @@ return [MULTITENANCY, TOGGLES, EXTENSIBILITY]

const { join } = require('path')
const cds = require('../../../cds')
const { read, write, copy } = cds.utils
const { read, write } = cds.utils
const mvn = require('../../mvn')
const { readProject } = require('../../projectReader')
const { merge, sort, removeFromYAML } = require('../../merge')
const { merge, sort } = require('../../merge')
const { copyRenderedJSON } = require('../../../util/fs')

@@ -17,2 +17,6 @@ const {

static help() {
return 'schema-based multitenancy support'
}
static hasInProduction(env) {

@@ -32,3 +36,3 @@ const { options } = cds.cli.options

await write('package.json', packageJson, { spaces: 2 })
await mvn.execMvn(mvn.getAddArgs('mtx'))
await mvn.add('mtx')
}

@@ -41,3 +45,3 @@ if (isNodejs) await sort('package.json', 'dependencies')

const project = readProject()
const { isNodejs, isJava, hasMta, hasHelm, hasHelmUnifiedRuntime, hasXsuaa, hasHana, srvPath, hasApprouter } = project
const { isNodejs, isJava, hasMta, hasHelm, hasHelmUnifiedRuntime, hasXsuaa, hasHana, hasContainerize, srvPath, hasApprouter } = project

@@ -90,31 +94,49 @@ if (hasMta) {

}] : []
await merge(__dirname, 'files/values.yaml.hbs').into('chart/values.yaml', { with: project, overwrites })
const envFromObject = {
envFrom: [{
configMapRef: {
name: "{{ .Release.Name }}-mtxs-configmap"
const domain = hasApprouter ? 'svc.cluster.local:8080' : '{{ .Values.global.domain }}'
const applicationWorkload = hasApprouter ? 'approuter' : 'srv'
const serviceCommunicatingWithSaasReg = hasApprouter ? 'approuter' : isNodejs ? 'sidecar' : 'srv';
await merge({
'saas-registry': {
'parameters': {
xsappname: `${project.appName}-{{ .Release.Namespace }}`,
appName: `${project.appName}-{{ .Release.Namespace }}`,
appUrls: {
getDependencies: `https://{{ .Release.Name }}-${serviceCommunicatingWithSaasReg}-{{ .Release.Namespace }}.{{ .Values.global.domain }}${isNodejs ? '/-/cds/saas-provisioning' : '/mt/v1.0/subscriptions'}/dependencies`,
onSubscription: `https://{{ .Release.Name }}-${serviceCommunicatingWithSaasReg}-{{ .Release.Namespace }}.{{ .Values.global.domain }}${isNodejs ? '/-/cds/saas-provisioning/tenant/{tenantId}' : '/mt/v1.0/subscriptions/tenants/{tenantId}'}`,
}
}
}]
},
...(isNodejs) && { sidecar: {
env: {
SUBSCRIPTION_URL: `https://\${tenant_subdomain}-{{ .Release.Name }}-${applicationWorkload}-{{ .Release.Namespace }}.{{ .Values.global.domain }}`
}
}},
...(isJava) && { srv: {
env: {
CDS_MULTITENANCY_APPUI_URL: `https://{{ .Release.Name }}-${applicationWorkload}-{{ .Release.Namespace }}.{{ .Values.global.domain }}`,
CDS_MULTITENANCY_SIDECAR_URL: `http${hasApprouter ? '' : 's'}://{{ .Release.Name }}-sidecar${hasApprouter ? '.' : '-'}{{ .Release.Namespace }}.${domain}`
}
}}
}).into('chart/values.yaml',{ forceOverwrite: true })
}
if (hasContainerize) {
const deletions = []
if (hasHana) {
deletions.push({
item: {
in: 'modules',
where: { name: `${project.appName}-hana-deployer` }
}
})
}
await merge({
...(isNodejs) && { sidecar: envFromObject },
...(isJava) && { srv: envFromObject },
'saas-registry': {
parametersFrom: [
{
secretKeyRef: {
name: "{{ .Release.Name }}-saas-registry-secret",
key: "parameters"
}
}
]
}
}).into('chart/values.yaml', { with: project })
await removeFromYAML(join('chart', 'values.yaml'), ['srv.expose', 'srv.networkSecurity'])
await copy(join(__dirname, 'files', `mtxs-configmap-${isJava ? 'java' : 'nodejs'}${hasApprouter ? '-approuter' : ''}.yaml`)).to('chart', 'templates', 'mtxs-configmap.yaml') // REVISIT: Move to build task
await copy(join(__dirname, 'files', 'saas-registry-secret.yaml')).to('chart', 'templates', 'saas-registry-secret.yaml') // REVISIT: Move to build task
const additions = [{
in: 'modules',
where: { name: `${project.appName}-sidecar` }
}]
await merge(__dirname, '/files/containerize.yaml.hbs').into('containerize.yaml', { with: project, additions, deletions })
}

@@ -121,0 +143,0 @@

@@ -11,2 +11,6 @@ const { join } = require('path')

static help() {
return 'creates a Node.js-based project'
}
static hasInProduction() {

@@ -13,0 +17,0 @@ return !exists('pom.xml') && !cds.cli.options?.add?.has(JAVA)

@@ -9,2 +9,6 @@ const { readProject } = require('../../projectReader')

static help() {
return 'SAP BTP Notification Service'
}
requires() {

@@ -11,0 +15,0 @@ return [DESTINATION]

@@ -8,2 +8,6 @@ const { join } = require('path');

static help() {
return 'CI/CD pipeline integration'
}
async canRun() {

@@ -10,0 +14,0 @@ if (cds.cli.options.force) {

@@ -10,2 +10,6 @@ const cds = require('../../../cds')

static help() {
return 'database support for PostgreSQL'
}
async canRun() {

@@ -25,3 +29,3 @@ const { hasMta, hasHelm, hasHelmUnifiedRuntime } = readProject()

project.shortcut = !(await read(configFile)).cds?.requires?.db?.kind
await merge(__dirname, 'files/package.json.hbs').into(configFile, { project })
await merge(__dirname, 'files/package.json.hbs').into('package.json', { project })
if (isJava) await mvn.add('postgresql')

@@ -28,0 +32,0 @@ }

@@ -9,2 +9,6 @@ const cds = require('../../../cds')

static help() {
return 'messaging via Redis'
}
async canRun() {

@@ -11,0 +15,0 @@ const { hasMta, hasHelm, hasHelmUnifiedRuntime, isJava } = readProject()

@@ -14,3 +14,3 @@ {

"AdminService": {
"uri": "/odata/v4/admin/",
"uri": "odata/v4/admin/",
"type": "OData",

@@ -21,2 +21,14 @@ "settings": {

}
},
"crossNavigation": {
"inbounds": {
"intent-Books-manage": {
"signature": {
"parameters": {},
"additionalParameters": "allowed"
},
"semanticObject": "Books",
"action": "manage"
}
}
}

@@ -23,0 +35,0 @@ },

@@ -14,3 +14,3 @@ {

"CatalogService": {
"uri": "/odata/v4/catalog/",
"uri": "odata/v4/catalog/",
"type": "OData",

@@ -24,3 +24,3 @@ "settings": {

"inbounds": {
"intent1": {
"Books-display": {
"signature": {

@@ -35,3 +35,3 @@ "parameters": {

},
"additionalParameters": "ignored"
"additionalParameters": "allowed"
},

@@ -38,0 +38,0 @@ "semanticObject": "Books",

@@ -19,4 +19,4 @@ const cds = require('@sap/cds')

// Add some discount for overstocked books
this.after ('READ','ListOfBooks', each => {
if (each.stock > 111) each.title += ` -- 11% discount!`
this.after ('each','ListOfBooks', book => {
if (book.stock > 111) book.title += ` -- 11% discount!`
})

@@ -23,0 +23,0 @@

const { join } = require('path')
const cds = require('../../../cds'), { exists, read, write, copy } = cds.utils
const { env4, readProject } = require('../../projectReader')
const { merge } = require('../../merge')
const mvn = require('../../mvn')

@@ -8,2 +9,6 @@

static help() {
return 'add sample files including Fiori UI'
}
async run() {

@@ -19,15 +24,17 @@ const { db, srv, app } = env4('production').folders

await copy(join(__dirname, 'files', language)).to(srv)
await copy(join(__dirname, 'files', 'app')).to(app)
const { appName, appPath, appUIPaths } = readProject()
// manifest.json must be unique // REVISIT: Check if relevant for `cds add sample` in Java scenario
await Promise.all(appUIPaths.map(async p => {
const manifest = await read(join(appPath, p, 'webapp/manifest.json'))
manifest['sap.app'].id = appName + '.' + p
await write(join(appPath, p, 'webapp/manifest.json'), manifest, { spaces: 2 })
}))
await copy(join(__dirname, 'files/app')).to(app)
} else if (language === 'java') {
await mvn.add('sample')
}
const { appName, appPath, appUIPaths } = readProject()
await merge(__dirname, 'files/package.json.hbs').into('package.json', { with: { appPath }})
// manifest.json must be unique
await Promise.all(appUIPaths.map(async p => {
const manifest = await read(join(appPath, p, 'webapp/manifest.json'))
manifest['sap.app'].id = appName + '.' + p
await write(join(appPath, p, 'webapp/manifest.json'), manifest, { spaces: 2 })
}))
}
}

@@ -7,2 +7,6 @@ const mvn = require('../../mvn')

static help() {
return 'database support for SQLite'
}
async run() {

@@ -9,0 +13,0 @@ const project = readProject()

@@ -10,2 +10,6 @@ const { join } = require('path')

static help() {
return 'add minimal sample files'
}
async run() {

@@ -12,0 +16,0 @@ const { isJava } = readProject()

@@ -6,2 +6,6 @@ const { readProject } = require('../../projectReader')

static help() {
return 'allow dynamically toggled features'
}
static hasInProduction(env) {

@@ -8,0 +12,0 @@ return !!env.requires?.toggles

@@ -9,2 +9,6 @@ const { join } = require('path')

module.exports = class TyperTemplate extends require('../../plugin') {
static help() {
return 'type generation for CDS models'
}
async canRun() {

@@ -11,0 +15,0 @@ const { isJava } = readProject()

const cds = require('../../../cds')
const { read } = cds.utils
const term = require('../../../util/term')
const { readProject } = require('../../projectReader')

@@ -10,2 +9,6 @@ const { merge } = require('../../merge')

static help() {
return 'authentication via XSUAA'
}
static hasInProduction(env) {

@@ -54,11 +57,12 @@ return env.requires?.auth?.kind === 'xsuaa'

await merge(__dirname, 'files/values.yaml.hbs').into('chart/values.yaml', { with: project })
}
}
finalize() {
const { hasUI, hasApprouter } = readProject()
if (hasUI && !hasApprouter) {
console.log(`\nAlso run ${term.bold('cds add approuter --for production')} to route and authenticate the paths accessed via the UI.`)
await merge({
xsuaa: {
parameters: {
xsappname: `${project.appName}-{{ .Release.Namespace }}`
}
}
}).into('chart/values.yaml')
}
}
}

@@ -37,3 +37,3 @@ const os = require("os");

${cdslintCmd} [beta]
${cdslintCmd}

@@ -40,0 +40,0 @@ Runs environment checks and/or checks the specified models

@@ -0,6 +1,5 @@

const { read, write, exists, yaml } = require('../cds').utils;
const { readJSONC } = require("../util/fs");
const path = require("path");
const yaml = require("@sap/cds-foss")("yaml");
const term = require("../util/term");
const { read, write, exists } = require('../cds').utils;
const { readJSONC } = require("../util/fs");
const JSONC = require('../util/jsonc');

@@ -229,3 +228,2 @@

case "eslint.config.cjs":
case "eslint.config.mjs":
if (exists(configPath)) {

@@ -249,8 +247,8 @@ logger.log(

plugins: {
"@sap/cds": cds
'@sap/cds': cds
},
"files": [
files: [
...cds.configs.recommended.files
],
"rules": {
rules: {
...cds.configs.recommended.rules

@@ -263,2 +261,22 @@ }

break;
case "eslint.config.mjs":
if (exists(configPath)) {
logger.log(
`${term.warn(
`\n\nPlease add the following to your "${configFile}":` +
'\n\n 1. At the top of your file, import the following:')}` +
`\n\n ${term.bold('import cdsPlugin from \'@sap/eslint-plugin-cds\'')}` +
`${term.warn(
'\n\n 2. In your export, add the following to the front of the array:')}` +
`\n\n ${term.bold('cdsPlugin.configs.recommended')}`
);
} else {
await write(configPath, `
import cds from '@sap/cds/eslint.config.mjs'
import cdsPlugin from '@sap/eslint-plugin-cds'
export default [...cds.recommended, cdsPlugin.configs.recommended]
`)
}
break;
default:

@@ -265,0 +283,0 @@ break;

@@ -15,5 +15,9 @@ const axios = require('axios');

if (authData?.access_token) {
let message = 'Retrieved access token';
params.set('token', authData.access_token);
if (authData.expires_in) {
params.set('tokenExpirationDate', Date.now() + authData.expires_in * 1000);
if (params.get('saveData')) {
message += ` (expires on ${(new Date(params.get('tokenExpirationDate'))).toLocaleString()})`;
}
} else {

@@ -27,2 +31,3 @@ params.delete('tokenExpirationDate');

}
console.log(message + '.');
}

@@ -36,2 +41,3 @@ if (authData?.passcode_url) {

const d = {};
const ignored = {};
if (params.has('subdomain')) {

@@ -42,6 +48,9 @@ d.subdomain = params.get('subdomain');

d.refresh_token = params.get('refreshToken');
const param = ['passcode', 'clientid'].find(param => params.has(param));
if (param) {
ignored[param] = 'refresh token is present';
}
} else if (params.has('passcode')) {
d.passcode = params.get('passcode');
}
if (params.has('clientsecret')) {
} else if (params.has('clientsecret')) {
d.clientid = params.get('clientid');

@@ -55,8 +64,8 @@ d.clientsecret = params.get('clientsecret');

return method === 'post'
? { url, data }
: { url: `${url}?${data}`, data: undefined };
? { url, data, ignored }
: { url: `${url}?${data}`, data: undefined, ignored };
}
async function retrieveTokenOrPasscodeUrl(params) {
if (params.has('token') || !params.has('refreshToken') && !params.has('passcode') && params.has('passcodeUrl')) {
async function fetchToken(params) {
if (params.has('token') || !(params.has('refreshToken') || params.has('passcode') || !params.has('passcodeUrl'))) {
return;

@@ -66,29 +75,46 @@ }

let response, error;
reqParams: for (const method of ['post', 'get']) {
do {
const { url, data } = reqParams(method, params);
try {
DEBUG?.(`Getting authentication token or passcode URL from ${method.toUpperCase()} ${params.obfuscateQueryParams(url)}`);
response = await axios[method](url, data);
} catch (e) {
if (e.status === 404) { // may represent 405 in case of wrong method
continue reqParams;
} else if (params.has('refreshToken')) {
DEBUG?.('Discarding invalid refresh token');
params.delete('refreshToken');
} else {
error = e;
break reqParams;
}
const method = 'post';
do {
const { url, data, ignored } = reqParams(method, params);
if (Object.keys(ignored).length) {
console.log(`Ignoring parameters for fetching token: ${Object.entries(ignored).map(([param, reason]) => `${param} (${reason})`).join(', ')}.`);
}
DEBUG?.(`Getting authentication token from ${method.toUpperCase()} ${params.obfuscateQueryParams(url)}`);
error = undefined;
try {
response = await axios[method](url, data);
break;
} catch (e) {
error = e;
DEBUG?.(`HTTP status ${e.status}`);
if (params.has('refreshToken')) {
DEBUG?.('Discarding invalid refresh token');
params.delete('refreshToken');
} else {
break;
}
} while (!response);
}
}
} while (!response);
const data = response?.data ?? error?.auth;
assign(params, data);
if (error) {
throw new Error(REASONS.invalid_scope.test(error.message)
? 'Token has invalid scope. Check if your user has the required roles'
: `No valid passcode or token provided. Get a passcode${
params.get('passcodeUrl') ? ' from ' + params.get('passcodeUrl') : ''
} and re-run the command with '-p <passcode>'`, { cause: error });
let message;
if (REASONS.invalid_scope.test(error.message)) {
message = 'token has invalid scope. Check if your user has the required roles';
} else if (error.status === 404) {
message = 'token endpoint not found. Check if MTX is running with extensibility enabled';
} else if (error.message.includes('<html')) { // should already be caught when fetching passcode URL
message = 'HTML response received. Check if route to MTX is configured correctly in App Router';
} else if (error.status === 401) {
message = `invalid credentials`;
if (params.has('passcode')) {
message += `. Check if passcode is correct`;
if (params.has('passcodeUrl')) {
message += `. Passcode URL: ${params.get('passcodeUrl')}`;
}
}
} else {
message = 'error on token request';
}
throw new Error(message + '.', { cause: error });
}

@@ -107,3 +133,3 @@ }

try {
await retrieveTokenOrPasscodeUrl(params);
await fetchToken(params);
} catch (error) {

@@ -116,2 +142,10 @@ if (params.get('saveData')) {

params.set('reqAuth', { headers: { Authorization: 'Bearer ' + params.get('token') } });
if (params.has('refreshToken') && params.get('saveData')) {
if (params.get('saveRefreshToken')) {
console.log('Refresh Token saved. Token will be refreshed automatically.');
} else {
console.log(`Note: Refresh Token is not saved by default for security reasons. Use '--save-refresh-token' to override.`);
params.delete('refreshToken');
}
}
}

@@ -118,0 +152,0 @@

@@ -26,3 +26,3 @@ const fs = require('fs');

console.log(`Downloading migrated @sap/mtx extension project(s)`);
const downloadedTgz = await this.getTgz(params);
const downloadedTgz = await this.pullTgz(params);
await fs.promises.writeFile(downloadPath, downloadedTgz);

@@ -33,3 +33,3 @@

static async getTgz(params) {
static async pullTgz(params) {
const tagRule = params.get('tagRule');

@@ -36,0 +36,0 @@ const defaultTag = params.get('defaultTag');

@@ -1,4 +0,2 @@

const Activate = require('./activate');
const AuthManager = require('./auth_manager');
const Extend = require('./extend');
const { SettingsManager } = require('./settings_manager');

@@ -8,4 +6,2 @@ const Question = require('./util/question');

module.exports = {
extend: Extend.run,
activate: Activate.run,
login: AuthManager.login,

@@ -12,0 +8,0 @@ logout: AuthManager.logout,

@@ -39,3 +39,3 @@ const cds = require('../cds');

constructor(name,
val,
val = undefined,
options = {}) {

@@ -48,2 +48,7 @@

this.allowedValues = null;
this.type = val === undefined
? this.allowedValues
? typeof this.allowedValues[0]
: 'string' // default
: typeof val;

@@ -107,9 +112,5 @@ this.name = name;

}
let expectedType = typeof this.val;
if (expectedType === 'undefined' && this.allowedValues) {
expectedType = typeof this.allowedValues[0];
if (typeof val !== this.type) {
throw `Failed to set '${(this.name)}' = ${JSON.stringify(val)}: expected value of type ${this.type}`;
}
if (typeof val !== expectedType) {
throw `Failed to set '${(this.name)}' = ${JSON.stringify(val)}: expected value of type ${expectedType}`;
}
if (this.allowedValues && !(this.allowedValues).includes(val)) {

@@ -131,3 +132,3 @@ throw `Failed to set '${(this.name)}' = ${JSON.stringify(val)}: expected one of ${JSON.stringify(this.allowedValues)}`;

? this.val
: this.obfuscate === ObfuscationLevel.partial && process.env.DEBUG && typeof this.val === 'string'
: this.obfuscate === ObfuscationLevel.partial && process.env.DEBUG && this.type === 'string'
? this.val.slice(0, DISCLOSURE_LENGTH) + '...' + this.val.slice(-DISCLOSURE_LENGTH)

@@ -168,14 +169,7 @@ : '...';

// - non-persisted
this.projectFolder = new Param('projectFolder', '');
this.projectFolder = new Param('projectFolder');
this.directory = this.projectFolder.newAlias('directory');
this.tagRule = new Param('tagRule', '');
this.defaultTag = new Param('defaultTag', '');
// - non-persisted, internal
this.reqAuth = new Param('reqAuth', null, { obfuscate: ObfuscationLevel.full, internal: true });
this.clearOtherTokenStorage = new Param('clearOtherTokenStorage', false, { internal: true });
// - persisted
this.appUrl = new Param('appUrl', '', { persist: Persistence.setting }); // any trailing slash (/) is removed
this.appUrl = new Param('appUrl', undefined, { persist: Persistence.setting }); // any trailing slash (/) is removed
this.url = this.appUrl.newAlias('url');

@@ -185,28 +179,31 @@ this.from = this.appUrl.newAlias('from');

this.at = this.appUrl.newAlias('at');
this.subdomain = new Param('subdomain', '', { persist: Persistence.setting }); // only needed to fetch token
this.passcodeUrl = new Param('passcodeUrl', '', { persist: Persistence.setting, abbreviate: true });
this.subdomain = new Param('subdomain', undefined, { persist: Persistence.setting }); // only needed to fetch token
this.passcodeUrl = new Param('passcodeUrl', undefined, { persist: Persistence.setting, abbreviate: true });
this.passcode_url = this.passcodeUrl.newAlias('passcode_url');
this.tokenUrl = new Param('tokenUrl', '', { persist: Persistence.setting, abbreviate: true })
this.tokenUrl = new Param('tokenUrl', undefined, { persist: Persistence.setting, abbreviate: true })
// AUTH PARAMS
// - non-persisted
this.passcode = new Param('passcode', '', { obfuscate: ObfuscationLevel.partial });
this.passcode = new Param('passcode', undefined, { obfuscate: ObfuscationLevel.partial });
this.password = new Param('password', undefined, { obfuscate: ObfuscationLevel.full });
this.clientid = new Param('clientid', undefined);
this.clientsecret = new Param('clientsecret', undefined, { obfuscate: ObfuscationLevel.partial });
this.key = new Param('key', undefined, { obfuscate: ObfuscationLevel.partial }); // private key of client certificate
this.tokenStorage = new Param('tokenStorage', undefined, { allowedValues: ['plain', 'keyring'] });
this.clearOtherTokenStorage = new Param('clearOtherTokenStorage', false, { internal: true });
this.plain = new Param('plain', false, { internal: true }); // for backward compatibility - superseded by tokenStorage
this.saveData = new Param('saveData', false, { internal: true });
this.renewLogin = new Param('renewLogin', false);
this.password = new Param('password', '', { obfuscate: ObfuscationLevel.full });
this.clientid = new Param('clientid', '');
this.clientsecret = new Param('clientsecret', '', { obfuscate: ObfuscationLevel.partial });
this.key = new Param('key', '', { obfuscate: ObfuscationLevel.partial }); // private key of client certificate
this.renewLogin = new Param('renewLogin', false, { internal: true });
this.reqAuth = new Param('reqAuth', undefined, { internal: true, type: 'object', obfuscate: ObfuscationLevel.full });
// - persisted
this.username = new Param('username', '', { persist: Persistence.setting }); // only saved against localhost (non-productive); as setting due to ambiguous app URL
this.username = new Param('username', undefined, { persist: Persistence.setting }); // only saved against localhost (non-productive); as setting due to ambiguous app URL
this.isEmptyPassword = new Param('isEmptyPassword', false, { internal: true, persist: Persistence.setting }); // extra param so we can ensure we never save a password
this.token = new Param('token', '', { persist: Persistence.auth, obfuscate: ObfuscationLevel.partial });
this.token = new Param('token', undefined, { persist: Persistence.auth, obfuscate: ObfuscationLevel.partial });
this.tokenExpirationDate = new Param('tokenExpirationDate', Number.MAX_SAFE_INTEGER, { persist: Persistence.auth });
this.refreshToken = new Param('refreshToken', '', { persist: Persistence.auth, obfuscate: ObfuscationLevel.partial });
this.refreshToken = new Param('refreshToken', undefined, { persist: Persistence.auth, obfuscate: ObfuscationLevel.partial });
}
}
// TODO pass these as extra arguments to lib functions
/**

@@ -220,9 +217,6 @@ * All known params including those needed for specific commands.

// `extend` PARAMS
this.force = new Param('force', false);
this.templates = new Param('templates', false);
this["download-migrated-projects"] = new Param('download-migrated-projects', false)
this.defaultTag = new Param('defaultTag', undefined);
this.tagRule = new Param('tagRule', undefined);
// `activate` PARAMS
this.undeploy = new Param('undeploy', false);
this.wsdl = new Param('wsdl', false);
// `push` PARAMS
this.sync = new Param('sync', false);

@@ -232,6 +226,7 @@ this.async = new Param('async', false);

// `subscribe` PARAMS
this.body = new Param('body', '', { persist: Persistence.none });
this.body = new Param('body', undefined, { internal: true, persist: Persistence.none });
// `login` PARAMS
this.skipToken = new Param('skipToken', false, { internal: true });
this.saveRefreshToken = new Param('saveRefreshToken', false, { internal: true });

@@ -243,3 +238,2 @@ // `logout` PARAMS

this["clear-invalid"] = this.clearInvalid.newAlias('clear-invalid');
}

@@ -423,3 +417,3 @@ }

/**
* Return an object mapping each of the contained params' names to its corresponding value.
* Return an object mapping the names of each of the contained params to their corresponding values.
* @param persistence the type of persistence to filter for (omit to disable filtering)

@@ -426,0 +420,0 @@ */

@@ -5,3 +5,3 @@ const cds = require ('../cds')

const fs = require('fs');
const { join } = require('path');
const { join, dirname } = require('path');
const axios = require('axios');

@@ -14,71 +14,179 @@

const { handleHttpError } = require('./util/errors');
const { grepQR } = require('./util/fs');
const { escapeRegex } = require('./util/strings');
function tabSize(jsonString, defaultSize) {
return /^ +/m.exec(jsonString)?.[0].length ?? defaultSize;
}
function getPackageContentsAndTabSize(pkgPath, defaultContents = undefined, defaultTabSize = 0) {
try {
const contents = fs.readFileSync(pkgPath, { encoding: 'utf-8' });
const tab = tabSize(contents, defaultTabSize);
return [JSON.parse(contents), tab];
} catch (error) {
if (defaultContents) {
return [defaultContents, defaultTabSize];
}
throw getMessage('package.json missing or unreadable', { error });
}
}
function writeObj(obj, objPath, tabSize) {
if (!fs.existsSync(dirname(objPath))) {
fs.mkdirSync(dirname(objPath), { recursive: true });
}
fs.writeFileSync(objPath, JSON.stringify(obj, undefined, tabSize), { encoding: 'utf-8' });
}
const INVALID_BASE_MODEL_NAME = /^[._]/; // See https://docs.npmjs.com/cli/v10/configuring-npm/package-json#name
module.exports = class Pull {
static async run(paramValues) {
if (!tar) {
throw `cds pull requires @sap/cds version >= 6.2. Current version is ${cds.version}\n`;
}
const params = await login(paramValues);
const url = params.get('appUrl')
const projectFolder = params.get('projectFolder');
const env = cds.env.for('cds', projectFolder);
const target = join(projectFolder, 'node_modules', this.getAppPackageName(env));
const subdomain = params.get('subdomain') // REVISIT: Why are subdomains not transparently encoded in URLs?
return new Pull(params).run();
}
this.amendPackageJson(projectFolder, env);
params;
projectFolder;
env;
baseModelName;
packagePath;
baseModelFolder;
#amendPackage = {};
#baseModelWorkspace;
#packageContentsAndTabSize;
console.log(`Pulling app base model`, Object.assign({ from: url, to: local(target) }, subdomain && {subdomain}))
static get DEFAULTS() {
return {
tabSize: 2,
baseModelFolder: '.base',
baseModelName: 'base-model'
};
}
fs.rmSync(target, { force: true, recursive: true });
fs.mkdirSync(target, { recursive: true });
constructor(params) {
this.params = params;
this.projectFolder = params.get('projectFolder');
this.env = cds.env.for('cds', this.projectFolder);
this.baseModelName = this.env.extends;
this.packagePath = join(this.projectFolder, 'package.json');
this.baseModelFolder = join(this.projectFolder, this.baseModelWorkspace);
}
const baseModelTgz = await this.getTgz(params);
await tar.xz(baseModelTgz).to(target);
async run() {
console.log(`Pulling app base model`, {
from: this.params.get('appUrl'),
to: local(this.baseModelFolder),
...(this.params.has('subdomain') && { subdomain: this.params.get('subdomain') })
});
console.log(`Finished. Refer to the base model like so: using from '${this.getAppPackageName(env)}'`);
this.validateBaseModelName();
fs.rmSync(this.baseModelFolder, { force: true, recursive: true });
fs.mkdirSync(this.baseModelFolder, { recursive: true });
const baseModelTgz = await this.pullTgz();
await tar.xz(baseModelTgz).to(this.baseModelFolder);
Pull.transformBasePackage(this.baseModelFolder, this.baseModelName);
this.amendExtPackage();
console.log(`Finished. Refer to the base model like so: using from '${this.baseModelName}'`);
const baseModelSymlink = join(this.projectFolder, 'node_modules', this.baseModelName);
if (!fs.statSync(baseModelSymlink, { throwIfNoEntry: false })?.isSymbolicLink()) {
console.log(`Note: the base model has been pulled to an NPM workspace in folder '${local(this.baseModelFolder)}'. Run 'npm install' to install it.`);
}
}
static getAppPackageName(env) {
return env.extends || '_base';
get baseModelWorkspace() {
const defaultFolder = Pull.DEFAULTS.baseModelFolder;
if (!this.#baseModelWorkspace) {
if (this.packageContents.workspaces) {
if (this.packageContents.workspaces.includes(defaultFolder)) {
this.#baseModelWorkspace = defaultFolder;
} else {
this.#baseModelWorkspace = this.packageContents.workspaces
.map(ws => String(ws))
.find(ws => {
try {
return require(join(this.projectFolder, ws, 'package.json')).name === this.baseModelName;
} catch (e) { /* ignore */ }
}) ?? defaultFolder;
}
} else {
this.#amendPackage.workspaces = [defaultFolder];
this.#baseModelWorkspace = defaultFolder;
}
}
return this.#baseModelWorkspace;
}
static async getTgz(params) {
const pullUrl = `${(params.get('appUrl'))}/-/cds/extensibility/pull`;
const options = { ...params.get('reqAuth'), responseType: 'arraybuffer' };
validateBaseModelName() {
if (!this.baseModelName) {
throw getMessage(`Missing configuration 'cds.extends' in package.json`);
}
if (this.baseModelName.match(INVALID_BASE_MODEL_NAME)) {
console.warn(`Invalid base-model name '${this.baseModelName}': must not start with '_' or '.'. ` +
`Using '${Pull.DEFAULTS.baseModelName}' instead.`);
const invalidNameInUse = grepQR(this.projectFolder, ['.cds', '.csn'], new RegExp(`\\b${escapeRegex(this.baseModelName)}\\b`));
if (invalidNameInUse) {
console.error(`Some model files seem to reference the invalid name. Please update them manually.`);
}
this.baseModelName = Pull.DEFAULTS.baseModelName;
this.#amendPackage.extends = this.baseModelName;
}
}
async pullTgz() {
const pullUrl = `${(this.params.get('appUrl'))}/-/cds/extensibility/pull`;
const options = { ...this.params.get('reqAuth'), responseType: 'arraybuffer' };
return axios.post(pullUrl, {}, options)
.then(response => Buffer.from(response.data))
.catch(error => handleHttpError(error, params, { url: pullUrl }));
.catch(error => handleHttpError(error, this.params, { url: pullUrl }));
}
static getPackageJsonIndentation(packageContents) {
return /^ +/m.exec(packageContents)?.[0].length ?? 2;
}
static transformBasePackage(baseModelFolder, baseModelName) {
const baseRcPath = join(baseModelFolder, '.cdsrc.json');
const baseRc = getPackageContentsAndTabSize(baseRcPath, {}, this.DEFAULTS.tabSize)[0];
fs.rmSync(baseRcPath, { force: true });
static formatPackageJson(newObj, oldContents) {
return JSON.stringify(newObj, undefined, this.getPackageJsonIndentation(oldContents));
const basePkgPath = join(baseModelFolder, 'package.json');
const basePkg = {
...getPackageContentsAndTabSize(basePkgPath, {}, this.DEFAULTS.tabSize)[0],
name: baseModelName,
cds: baseRc
};
writeObj(basePkg, basePkgPath, this.DEFAULTS.tabSize);
}
static amendPackageJson(projectFolder, env) {
if ('extends' in env) {
amendExtPackage() {
if (Object.keys(this.#amendPackage).length === 0) {
return;
}
console.log(`Amending extension package.json with project configuration`);
const packageJson = join(projectFolder, 'package.json');
let packageContents;
try {
packageContents = JSON.parse(fs.readFileSync(packageJson, { encoding: 'utf-8' }));
} catch (error) {
throw getMessage('package.json missing or unreadable', { error });
fs.writeFileSync(this.packagePath, JSON.stringify({
...this.packageContents,
...this.#amendPackage
}, undefined, this.tabSize));
this.#packageContentsAndTabSize = undefined;
}
get packageContents() {
if (!this.#packageContentsAndTabSize) {
this.#packageContentsAndTabSize = getPackageContentsAndTabSize(this.packagePath, undefined, Pull.DEFAULTS.tabSize);
}
fs.writeFileSync(packageJson, this.formatPackageJson({
...packageContents,
cds: {
...(packageContents.cds ?? {}),
extends: this.getAppPackageName(projectFolder) // Default unless configured through current process.env
}
}));
return this.#packageContentsAndTabSize[0];
}
get tabSize() {
if (!this.#packageContentsAndTabSize) {
this.#packageContentsAndTabSize = getPackageContentsAndTabSize(this.packagePath, undefined, Pull.DEFAULTS.tabSize);
}
return this.#packageContentsAndTabSize[1];
}
}

@@ -1,2 +0,2 @@

const cds = require ('../cds'), { local } = cds.utils;
const { utils: { local }} = require ('../cds');
const { join } = require('path');

@@ -36,3 +36,3 @@ const { readFileSync, existsSync } = require('fs');

const subdomain = params.get('subdomain');
const target = Object.assign({ url }, subdomain && { subdomain });
const target = { url, ...subdomain && { subdomain } };

@@ -55,3 +55,3 @@ console.log(`\nPushing extension '${extensionName}' from`, { src }, 'to', target);

if (!schemaRegex.test(src)) {
throw getMessage(`Nonexistent path: ${src}`, { command: 'push' });
throw getMessage(`Non-existent path: ${src}`, { command: 'push' });
}

@@ -58,0 +58,0 @@ try {

@@ -11,2 +11,3 @@ const fs = require('fs');

const isBas = require('./util/env');
const { getMessage } = require('./util/logging');
const Question = require('./util/question');

@@ -38,4 +39,5 @@ const { httpSchema, httpsSchema, schemaRegex, localhostRegex } = require('./util/urls');

const MTX_FULLY_QUALIFIED = 'com.sap.cds.mtx';
const OAUTH_PATH_OLD_MTX = '/mtx/v1/oauth/token';
const OAUTH_PATH_STREAMLINED_MTX = '/-/cds/login/token';
const OAUTH_PATH_LEGACY = '/mtx/v1/oauth/token';
const OAUTH_PATH = '/-/cds/login/token';
const OAUTH_META_PATH = '/-/cds/login/authorization-metadata';

@@ -97,3 +99,9 @@ const SETTINGS_DIR = path.join(os.homedir(), CONFIG_SUBDIRS[os.platform()] || '', MTX_FULLY_QUALIFIED);

async function getTokenBaseUrl(params, renewUrl) {
function appUrlWith(pathname, params) {
const url = new URL(params.get('appUrl'));
url.pathname = url.pathname.replace(/\/*$/, pathname);
return url.toString();
}
async function getTokenUrl(params, renewUrl) {
if (params.has('tokenUrl') && !renewUrl) {

@@ -105,19 +113,29 @@ return params.get('tokenUrl');

}
const appUrlWith = pathname => {
const url = new URL(params.get('appUrl'));
url.pathname = url.pathname.replace(/\/*$/, pathname);
return url.toString();
};
return appUrlWith(OAUTH_PATH, params);
}
let path = OAUTH_PATH_OLD_MTX;
async function fetchPasscodeUrl(params) {
const url = appUrlWith(OAUTH_META_PATH, params);
DEBUG?.(`Trying to get passcode URL from GET`, url);
let responseData;
try {
await axios.head(appUrlWith(OAUTH_PATH_STREAMLINED_MTX));
path = OAUTH_PATH_STREAMLINED_MTX;
const response = await axios.get(url /* no auth required */);
responseData = response.data;
} catch (error) {
if (![401, 404].includes(error.status)) {
throw error;
if (error.status === 404) {
DEBUG?.(`Request unsupported by server, ignoring (${error.message.replace(/ *Details:.*/s, '')})`);
} else {
DEBUG?.(getMessage(`Getting passcode URL failed`, { error }));
}
DEBUG?.(`Falling back to legacy token URL: ${error.message}`);
return undefined;
}
return appUrlWith(path);
DEBUG?.(`Received ${JSON.stringify(responseData)}`);
if (responseData.includes?.('<html')) {
throw 'HTML response received. Check if route to MTX is configured correctly in App Router.';
}
if (responseData.signed_metadata) {
DEBUG?.(`Unsupported signed metadata received. Since these should take precedence, ignoring response.`);
return undefined;
}
return responseData.passcode_url;
}

@@ -142,3 +160,3 @@

function notifyLoggedIn(params) {
function notifyLogin(params) {
return params.get('renewLogin')

@@ -209,4 +227,9 @@ ? console.log.bind(console)

const clashing = ['passcode', 'clientid', 'username'].filter(param => params.has(param));
if (clashing.length > 1) {
throw `Conflicting parameters: ${clashing.join(', ')}. Please provide only one of them.`;
}
if (params.has('passcode') && loadedSettings.has('username')) {
console.log(`Unsetting username${(loadedSettings.has('isEmptyPassword') ? ' and empty-password hint' : '')}`);
console.log(`Discarding saved username${(loadedSettings.has('isEmptyPassword') ? ' and empty-password hint' : '')}: passcode given`);
loadedSettings.delete('username');

@@ -228,2 +251,19 @@ loadedSettings.delete('isEmptyPassword');

static async updateUrls(params, logout, loadedSettings) {
function secure(url, label) {
if (!url) {
return url;
}
if (url.startsWith(httpSchema) && !localhostRegex.test(url)) {
url = url.replace(httpSchema, httpsSchema);
DEBUG?.(`Replaced HTTP with HTTPS in ${label}: ${url}`);
} else if (!schemaRegex.test(url)) {
if (localhostRegex.test(url)) {
url = httpSchema + url;
} else {
url = httpsSchema + url;
}
DEBUG?.(`Added schema to ${label}: ${url}`);
}
return url;
}
async function updateAppUrl() {

@@ -241,3 +281,3 @@ if (!logout && !params.has('appUrl')) {

// Prefix URL schema
let appUrl = params.get('appUrl');
let appUrl = secure(params.get('appUrl'), 'app URL');
if (appUrl.endsWith('/')) {

@@ -247,21 +287,14 @@ appUrl = appUrl.replace(/\/+$/, '');

}
if (!schemaRegex.test(appUrl)) {
if (localhostRegex.test(appUrl)) {
appUrl = httpSchema + appUrl;
} else {
appUrl = httpsSchema + appUrl;
}
DEBUG?.(`Added schema to app URL: ${appUrl}`);
}
params.set('appUrl', appUrl);
if (params.get('appUrl') !== loadedSettings.get('appUrl')) {
DEBUG?.(`Updated app URL from '${loadedSettings.get('appUrl')}' (loaded) to '${appUrl}'`);
DEBUG?.(`Updated app URL from loaded value '${loadedSettings.get('appUrl')}'`);
}
console.log(`App URL: ${params.get('appUrl')}`);
}
async function updateTokenUrl() {
const tokenUrl = params.get('tokenUrl');
const tokenUrl = secure(params.get('tokenUrl'), 'token URL');
const renewUrl = !logout && !params.get('skipToken')
&& (!tokenUrl || tokenUrl.includes('//-/cds') || tokenUrl.includes(OAUTH_PATH_OLD_MTX) || params.get('appUrl') !== loadedSettings.get('appUrl'));
&& (!tokenUrl || tokenUrl.includes('//-/cds') || tokenUrl.includes(OAUTH_PATH_LEGACY) || params.get('appUrl') !== loadedSettings.get('appUrl'));
if (renewUrl) {
const tokenUrl = await getTokenBaseUrl(params, renewUrl);
const tokenUrl = await getTokenUrl(params, renewUrl);
params.set('tokenUrl', tokenUrl);

@@ -329,3 +362,9 @@ DEBUG?.(`Updated token URL from '${loadedSettings.get('tokenUrl')}' (loaded) to '${tokenUrl}'`);

async function addPasscode() {
const prompt = `Passcode${params.get('passcodeUrl') ? ' (visit ' + params.get('passcodeUrl') + ' to generate)' : ''}: `;
if (!params.has('passcodeUrl')) {
const passcodeUrl = await fetchPasscodeUrl(params);
if (passcodeUrl) {
params.set('passcodeUrl', passcodeUrl);
}
}
const prompt = `Passcode${params.has('passcodeUrl') ? ' (visit ' + params.get('passcodeUrl') + ' to generate)' : ''}: `;
params.set('passcode', (await Question.askQuestion(prompt, undefined, true)).trim());

@@ -528,3 +567,3 @@ console.log();

await fs.promises.writeFile(CONFIG.paths.auth, JSON.stringify(allAuthValues, null, 2));
notifyLoggedIn(params)?.(`Saved authentication data to ${location}`);
notifyLogin(params)?.(`Saved authentication data to ${location}`);
} else if (allAuthValues[appUrl]) {

@@ -539,3 +578,3 @@ delete allAuthValues[appUrl][params.get('subdomain')];

} else {
notifyLoggedIn(params)?.(`No authentication data to delete from ${location}`);
notifyLogin(params)?.(`No authentication data to delete from ${location}`);
}

@@ -553,3 +592,3 @@ }

await keytar.setPassword(MTX_FULLY_QUALIFIED, getKeyringAccountName(params), JSON.stringify(auth));
notifyLoggedIn(params)?.(`Saved authentication data to ${location}`);
notifyLogin(params)?.(`Saved authentication data to ${location}`);
} else {

@@ -679,3 +718,4 @@ await keytar.deletePassword(MTX_FULLY_QUALIFIED, getKeyringAccountName(params));

SettingsManager,
other
other,
notifyLogin
};

@@ -26,3 +26,3 @@ const cds = require('../cds');

if (!(command in COMMANDS)) {
throw new Error(`invalid command: ${command}`);
throw `invalid command: ${command}`;
}

@@ -29,0 +29,0 @@ this.command = command;

@@ -12,3 +12,3 @@ const os = require('os');

function runCf(args, callback) {
const env = Object.assign({}, process.env, { LC_ALL: 'en_US.UTF-8' }); // enforce English headings
const env = { ...process.env, LC_ALL: 'en_US.UTF-8' }; // enforce English headings
return execFile('cf', args, { env }, callback);

@@ -15,0 +15,0 @@ }

@@ -75,3 +75,27 @@ const fs = require('fs');

/**
* Searches for a regular expression in a directory tree
* @param folder directory to search in
* @param extensions file extensions to search for (use lower-case strings)
* @param regex regular expression to search for
* @return {boolean} true if the regular expression was found in a file
*/
function grepQR(folder, extensions, regex) {
for (const dirent of fs.readdirSync(folder, { withFileTypes: true })) {
if (dirent.isFile() &&
extensions.some(x => dirent.name.toLowerCase().endsWith(x))) {
try {
if (regex.test(fs.readFileSync(path.join(folder, dirent.name), { encoding: 'utf-8' }))) {
return true;
}
} catch (error) { /* ignore */ }
}
if (dirent.isDirectory() && grepQR(path.join(folder, dirent.name), extensions, regex)) {
return true;
}
}
return false;
}
module.exports = {

@@ -82,3 +106,4 @@ collectFileContent,

normalizePath,
isParent
isParent,
grepQR
};

@@ -1,2 +0,2 @@

/* eslint-disable no-console */

@@ -19,4 +19,8 @@ function getMessage(errorText, { error, command, help = command && getCommandHelp(command) } = {}) {

}
if (debug && error.cause) {
message += '\nCaused by: ' + getMessage('', { error: error.cause }).replace(/^(?= *at)/gm, ' ');
if (error.cause) {
if (debug) {
message += '\nCaused by: ' + getMessage('', { error: error.cause }).replace(/^(?= *at)/gm, ' ');
} else if (error.cause.status) {
message += `\nHTTP status: ${error.cause.status}`;
}
}

@@ -23,0 +27,0 @@ }

@@ -1,3 +0,1 @@

// REVISIT: Consolidate with lib/util/exec.js
const cp = require('child_process');

@@ -4,0 +2,0 @@ const os = require('os');

@@ -1,4 +0,3 @@

const { yaml: YAML } = require('@sap/cds-foss')
const { read, write, yaml: YAML } = require('../cds').utils
const JSONC = require('./jsonc');
const { read, write } = require('../cds').utils;

@@ -31,3 +30,3 @@ // REVISIT: Consider merging with template util

async writeYAML(dst, yaml) {
const content = YAML.stringify(yaml, { collectionStyle: 'block', lineWidth: 120 })
const content = YAML.stringify(yaml, { collectionStyle: 'block', lineWidth: 150 })
await write(content).to(dst)

@@ -34,0 +33,0 @@ }

@@ -12,9 +12,11 @@ const DEBUG = /\b(y|all|cli)\b/.test(process.env.DEBUG) ? console.debug : undefined

blue: '\x1b[34m', // Foreground Blue
yellow:'\x1b[33m', // Foreground Yellow
orange: '\x1b[38;2;255;140;0m' // darker orange, works with bright and dark background
}
Object.defineProperty(module.exports, 'isTTY', { get: () => process.stdin.isTTY && process.stderr.isTTY })
Object.defineProperty(module.exports, 'isTTY', { get: () => process.stdout.isTTY && process.stderr.isTTY })
const useColor = module.exports.isTTY && process.env.NO_COLOR !== 'true' || (process.env.FORCE_COLOR && process.env.FORCE_COLOR !== 'false')
module.exports.colors = useColor
const as = module.exports.as = (codes, o) => {
return module.exports.isTTY && !process.env.NO_COLOR ? (codes + o + t.reset) : ('' + o)
return useColor ? (codes + o + t.reset) : ('' + o)
}

@@ -21,0 +23,0 @@

{
"name": "@sap/cds-dk",
"version": "7.9.5",
"version": "8.0.2",
"description": "Command line client and development toolkit for the SAP Cloud Application Programming Model",

@@ -19,14 +19,16 @@ "homepage": "https://cap.cloud.sap/",

"dependencies": {
"@sap/cds": "^7",
"@cap-js/asyncapi": "^1.0.0",
"@cap-js/openapi": "^1.0.0",
"@sap/cds": ">=7",
"@sap/cds-foss": "^5.0.0",
"@sap/cds-mtxs": "^1.9.0",
"@sap/cds-mtxs": ">=1.9.0",
"@sap/eslint-plugin-cds": "^3.0.1",
"@sap/hdi-deploy": "^4.8.0",
"@sap/hdi-deploy": "^5",
"axios": "^1",
"eslint": "^8",
"eslint": "^9",
"express": "^4.17.3",
"hdb": "^0",
"livereload-js": "^4.0.1",
"mustache": "^4.0.1",
"node-watch": ">=0.7",
"pluralize": "^8.0.0",
"ws": "^8.4.2",

@@ -36,3 +38,3 @@ "xml-js": "^1.6.11"

"optionalDependencies": {
"sqlite3": "^5.0.4"
"@cap-js/sqlite": "^1"
},

@@ -42,6 +44,9 @@ "files": [

"lib/",
"CHANGELOG.md",
"LICENSE",
"npm-shrinkwrap.json",
"LICENSE"
"package.json",
"readme.md"
],
"_hasShrinkwrap": true
}

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is too big to display

Sorry, the diff of this file is too big to display

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is too big to display

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc