Socket
Socket
Sign inDemoInstall

@sap/cds-dk

Package Overview
Dependencies
417
Maintainers
1
Versions
132
Alerts
File Explorer

Advanced tools

Install Socket

Detect and block malicious and high-risk dependencies

Install

Comparing version 6.5.2 to 6.6.0

lib/compile/asyncapi/channels.js

7

bin/activate.js

@@ -10,2 +10,4 @@ module.exports = Object.assign(activate, {

**Deprecated - for use with previous MTX Services (@sap/cds-mtx).**
Activate an extension project.

@@ -48,2 +50,7 @@ By default, the current working directory is used as project directory.

# SEE ALSO
*cds push* to push an extension to a SaaS app based on current MTX Services (@sap/cds-mtxs).
https://cap.cloud.sap/docs/guides/multitenancy/mtxs for information on @sap/cds-mtxs.
`})

@@ -50,0 +57,0 @@

3

bin/add.js

@@ -47,4 +47,3 @@

To add a specific feature in the helm chart you can use the command 'cds add helm:<featureName>'.
For example cds add helm:xsuaa and cds add helm:html5_apps_deployer.
*html5-repo* - adds configuration for HTML5 Application Deployer to Helm Chart (Kyma Only).

@@ -51,0 +50,0 @@

@@ -51,3 +51,3 @@ module.exports = Object.assign(build, {

function build([project], options = {}) {
async function build([project], options = {}) {
options.project = options.project || project

@@ -70,12 +70,17 @@ // switch-off error logging as CLI is logging any exceptions by default

const cds = require('../../lib/cds')
if (typeof cds.build === 'function') {
return cds.build(options)
try {
if (typeof cds.build === 'function') {
return await cds.build(options)
}
console.warn(`
> WARNING: This application uses a very old version ${cds.version} of package '@sap/cds',
> which will no longer be supported with the next major version of '@sap/cds-dk'.
> We strongly recommend an upgrade to latest version 6 of '@sap/cds'.
`)
return await cds.build.build(options)
} catch (e) {
process.exitCode = 1
cds._log(e.errors || e, { 'log-level': cds.env.log.levels.cli })
}
console.warn(`
> WARNING: This application uses a very old version ${cds.version} of package '@sap/cds',
> which will no longer be supported with the next major version of '@sap/cds-dk'.
> We strongly recommend an upgrade to latest version 6 of '@sap/cds'.
`)
return cds.build.build(options)
}

@@ -28,3 +28,3 @@ #!/usr/bin/env node

exec (cmd = process.argv[2], ...argv) {
async exec (cmd = process.argv[2], ...argv) {
if (!argv.length) argv = process.argv.slice(3)

@@ -36,3 +36,2 @@ if (!cmd) cmd = process.stdin.isTTY ? 'help' : 'compile'

const cds_cli = require (_local('@sap/cds/bin/cds'))
if (cmd !== 'repl' && process.env.NODE_ENV !== 'test') cds_cli.errorHandlers()
DEBUG && DEBUG (`[cds] - @sap/cds ${require('../lib/cds').version} loaded: ${require('../lib/cds').home}`)

@@ -49,7 +48,11 @@

}
const args = cds_cli.args(task,argv)
args[0].push(...appendArgs)
const resolveBindings = cmd in { run: 1, serve: 1, migrate: 1 } && argv.includes('--resolve-bindings')
if (resolveBindings) return _resolveBindings().then(() => task.apply(this, args))
else return task.apply(this, args)
try {
const args = cds_cli.args(task,argv)
args[0].push(...appendArgs)
if (args[1]?.['resolve-bindings']) await _resolveBindings()
return task.apply(this, args)
} catch (e) {
process.exitCode = 1
console.error (e)
}
}

@@ -141,3 +144,3 @@ },

process.exit(1)
process.exitCode = 1
}

@@ -144,0 +147,0 @@

@@ -0,1 +1,3 @@

const path = require("path");
module.exports = Object.assign ( compile, {

@@ -9,3 +11,3 @@ options: [

flags: [
'--parse', '--plain', '--clean', '--files', '--sources', '--resolved', '--all', '--beta', '--conceptual', '--min', '--docs', '--locations', '--openapi:diagram'
'--parse', '--plain', '--clean', '--files', '--sources', '--resolved', '--all', '--beta', '--conceptual', '--min', '--docs', '--locations', '--openapi:diagram', '--asyncapi:merged'
],

@@ -45,2 +47,3 @@ help: `

- openapi
- asyncapi

@@ -59,2 +62,3 @@ *-4* | *--for* <target>

Chooses a specific service or _all_ to force output for all services.
The service name must be fully qualified, including the namespace, if any.

@@ -120,2 +124,7 @@ *-l* | *--lang* <languages> | all

*--asyncapi:merged*
A single AsyncAPI document is generated using the details of all input services. Information of _title_
and _version_ should be provided as preset.
# EXAMPLES

@@ -126,2 +135,3 @@

*cds* srv -s all -l all -2 edmx -o _out
*cds* compile srv -s sap.sample.TestService -2 asyncapi -o _out

@@ -204,3 +214,2 @@ `})

}
// add output processor

@@ -210,3 +219,3 @@ const write = require ('../../lib/util/write')

folder: options.dest,
file: options.file || options.service === 'all' ? src : options.service,
file: options.file || options.service === 'all' ? path.basename(src) : options.service,
suffix: options.suffix || suffix4(_suffix),

@@ -284,2 +293,3 @@ [options.dest ? 'foreach' : 'log']: options.log || consoleLog

edm: '.json',
asyncapi: '.json',
xsuaa: '.json'

@@ -286,0 +296,0 @@ }[x] || '.'+x) }

@@ -6,6 +6,5 @@ module.exports = Object.assign (env, {

*cds env*
*cds env* <ls | list> [<key>]
*cds env* set <key> <value>
*cds env* get <key>
*cds env* [get] [<key>]
*cds env* list [<key>]
*cds env* ls [<key>]
*cds env* src

@@ -15,8 +14,6 @@

Lists the effective configuration of the current environment in
_.properties_ format if no command, *ls*, or *list* is given.
Displays the effective configuration for the given key, or all of the
current environment. By default and when using *get* the output is in
_object_ format. When using *list* or *ls* it's in _.properties_ format.
The other commands either get or set a single property or display
the sources from which the effective configuration has been loaded.
# OPTIONS

@@ -50,9 +47,8 @@

const cds = require('../lib/cds')
const fn = (options["process-env"] ? processEnvCommands[cmd] : commands[cmd]) || (()=>{
this.help ('env')
throw new Error (`Don't understand '${cmd}' here...`)
})
const fn = (
options["process-env"] ? processEnvCommands[cmd] :
commands[cmd] || ((key = cmd), commands.get)
)
let env = options.for ? cds.env.for(options.for) : cds.env
if (options["resolve-bindings"]) {

@@ -65,9 +61,6 @@ const BindingManager = require('../lib/bind/bindingManager')

}
env['_home_cds-dk'] = path.resolve(__dirname, '..') // to help tools find the DK
if (!options.for && options.mocked) await cds.service.bindings
fn (env, key, value)
return fn (env, key, value)
}

@@ -78,9 +71,7 @@

get [undefined]() { return commands.list },
get ''() { return commands.list },
get ls() { return commands.list },
get ls() { return this.list },
// eslint-disable-next-line no-unused-vars
list (conf, _key, val) { // NOSONAR
(function _list (o = _key ? conf.get(_key) : conf, key = _key || '') { // NOSONAR, ...tzefix!!
(function _list (o = _key ? conf.get(_key.replace(/^cds\./,'')) : conf, key = _key || '') { // NOSONAR, ...tzefix!!
if (o && typeof o === 'object' && !Array.isArray(o)) for (let p of Object.keys(o).sort()) {

@@ -104,3 +95,3 @@ const d = Reflect.getOwnPropertyDescriptor (o,p)

// tty: display formatted object
const obj = (key ? conf.get(key) : conf)
const obj = key ? conf.get(key.replace(/^cds\./,'')) : conf
console.log (inspect (obj, {depth:22,colors:true}).replace(/\s+use:.*\[Getter\][^,]+,?/g,''))

@@ -111,3 +102,3 @@ },

json (conf, key, val) {
const obj = (key ? conf.get(key) : conf)
const obj = (key ? conf.get(key.replace(/^cds\./,'')) : conf)
const str = JSON.stringify(obj, null, 2)

@@ -118,3 +109,3 @@ return console.log (str ? str.replace(/\s+use:.*\[Getter\][^,]+,?/g,'') : str)

set () {
throw new Error ('cds config set is not yet implemented, sorry')
console.error ('cds env set is not yet implemented, sorry.')
}

@@ -121,0 +112,0 @@ }

@@ -10,2 +10,4 @@ module.exports = Object.assign(extend, {

**Deprecated - for use with previous MTX Services (@sap/cds-mtx).**
Create an extension project from an extensible SaaS app.

@@ -49,2 +51,8 @@ Obtain the app URL from the SaaS application provider.

*cds activate* to activate an extension project.
# SEE ALSO
*cds pull* to get the CDS model of a SaaS app based on current MTX Services (@sap/cds-mtxs).
https://cap.cloud.sap/docs/guides/multitenancy/mtxs for information on @sap/cds-mtxs.
`})

@@ -51,0 +59,0 @@

@@ -55,5 +55,2 @@ const { URLS } = require('../lib/init/constants')

To add a specific feature in the helm chart you can use the command 'cds add helm:<featureName>'.
For example cds add helm:xsuaa and cds add helm:html5_apps_deployer.
*--java:mvn* <Comma separated maven archetype specific parameters>

@@ -60,0 +57,0 @@

@@ -1,3 +0,1 @@

const { bold } = require('../lib/util/term');
module.exports = Object.assign(login, {

@@ -9,4 +7,2 @@ options: ['--subdomain', '--directory', '--passcode', '--user', '--client'],

const config = require('../lib/client/settings_manager').SettingsManager.config
const cdsExtendAndCdsActivate = `${bold('cds extend')} and ${bold('cds activate')}`
const keyring = config.keyringDesignation
return `

@@ -17,3 +13,3 @@ # SYNOPSIS

Simplifies usage of ${cdsExtendAndCdsActivate} commands related to multitenant
Simplifies usage of *cds extend* and *cds activate* commands related to multitenant
SaaS applications by providing them with automatic authentication: fetches and

@@ -30,3 +26,3 @@ saves authentication tokens and provides relevant tokens to those commands.

on SAP Business Technology Platform, Cloud Foundry environment.
${cdsExtendAndCdsActivate} will be authenticated according to the given target.
*cds extend* and *cds activate* will be authenticated according to the given target.

@@ -47,3 +43,3 @@ If the Cloud-Foundry command-line client is installed, the SaaS-app URL <app-url>

Authentication data for all logins is saved by <app-url> and <tenant-subdomain> in the
desktop keyring (${keyring} on your platform) or, if requested, in config
desktop keyring (${(config.keyringDesignation)} on your platform) or, if requested, in config
file ${config.paths.auth}.

@@ -50,0 +46,0 @@

@@ -1,3 +0,1 @@

const { bold } = require('../lib/util/term');
module.exports = Object.assign(logout, {

@@ -8,5 +6,2 @@ options: ['--url', '--subdomain'],

get help() {
const cdsLogin = `${bold('cds login')}`
const cdsExtend = bold('cds extend')
const cdsActivate = bold('cds activate')
return `

@@ -17,4 +12,4 @@ # SYNOPSIS

Removes authentication data saved locally by ${cdsLogin} for use by ${cdsExtend}
and ${cdsActivate}. In default mode, deletes only data for a specific app URL
Removes authentication data saved locally by *cds login* for use by *cds extend*
and *cds activate*. In default mode, deletes only data for a specific app URL
and subdomain, optionally including relevant project settings.

@@ -21,0 +16,0 @@

@@ -112,4 +112,4 @@ const watchOnlyOptions = ['--ext', '--livereload', '--open']

log (`${t.bold}cds ${args.join(' ')}`)
log (`watching: ${ext}...`)
DEBUG && log (`also watching: ${includes.source}`)
log (process.pid, 'watching:', ext, '...')
if (includes) DEBUG && log (`also watching: ${includes.source}`)
if (liveReload) log (`live reload enabled for browsers`)

@@ -129,3 +129,2 @@

log (`\n${t.bold} ___________________________\n\n`)
DEBUG && DEBUG (files)
if (liveReload) liveReload.markPending(files)

@@ -142,3 +141,3 @@ if (this.delayed) clearTimeout (this.delayed)

FileHandlers.changed (files,cwd)
}, 111)
}, 111) .unref()
}).on('message', async (msg) => { switch (msg.code) { // messages from child process

@@ -216,3 +215,3 @@ case 'listening':

case 'EADDRINUSE': return process.send(e) //> tell cds watch
default: { throw e } // cli error handling will catch and format
default: console.error('\n❗️ Error on server start: ❗️'); throw e
}}

@@ -219,0 +218,0 @@ function _no_models_found(e) { console.log (`

@@ -10,2 +10,26 @@

## Version 6.6.0 - 2023-02-28
### Added
- `cds compile` added a new target format `asyncapi` to convert CDS models to AsyncAPI documents.
- `cds pull` now hints at base-model name for `using` statement.
### Changed
- `cds env` now allows inspecting entries with optional `get` command. E.g. `cds env requires.db`.
- `cds add multitenancy` now uses async SaaS Provisioning Service onboarding by default.
- `cds add multitenancy` for Java will now add `sqlite3` to `devDependencies` in the sidecar `package.json`.
- `cds add extensibility` now works for Java projects out-of-the-box.
- `cds import` now captures the Edm Primitive types without [CDS mapping](https://github.tools.sap/cap/cds-dk/blob/main/edm2cdsTypeMapping.md) with annotation `@odata.Type` and marks the type as `cds.String`.
- `cds add helm` connectivity service instance is no longer created.
- `cds init` uses latest Maven Java archetype version 1.32.0 for creating Java projects.
### Fixed
- `cds unsubscribe --from` flag now recognized
- `cds import` now adds `cds.Boolean` as dummy return type if `ReturnType` for `FunctionImport` is missing in the OData V2 edmx.
- `cds import` resolves the `$Cast` construct in the CSN for OData V4 files.
- `cds lint` now reports like ESLint in case of missing plugin `@sap/eslint-plugin-cds`
## Version 6.5.2 - 2023-02-10

@@ -12,0 +36,0 @@

@@ -201,3 +201,3 @@ const fs = require('fs').promises;

};
finisher.timer = setInterval(checkFinished.bind(undefined, jobId, statusUrl, params, finisher), 250);
finisher.timer = setInterval(checkFinished.bind(undefined, jobId, statusUrl, params, finisher), 250).unref();
});

@@ -204,0 +204,0 @@ });

@@ -7,3 +7,4 @@ const { getMessage } = require('./logging');

const help = command && getCommandHelp(command);
throw getMessage(message, { error, help });
Error.stackTraceLimit = 0
throw new Error(getMessage(message, { error, help }));
}

@@ -10,0 +11,0 @@ }

@@ -179,4 +179,5 @@ const log = require('./helper/logging');

this.appUrl = new Param('appUrl', '', { persist: Persistence.setting });
this.url = this.appUrl.newAlias('url');
this.from = this.appUrl.newAlias('from');
this.to = this.appUrl.newAlias('to');
this.url = this.appUrl.newAlias('url');
this.subdomain = new Param('subdomain', '', { persist: Persistence.setting }); // only needed to fetch token

@@ -402,3 +403,3 @@ this.passcodeUrl = new Param('passcodeUrl', '', { persist: Persistence.setting, abbreviate: true });

*/
toValueMap(persistence) {
toEntries(persistence) {
return this.toArray()

@@ -414,3 +415,3 @@ .filter(param => param.persist === persistence || persistence === undefined)

toJSON() {
return this.toValueMap();
return this.toEntries();
}

@@ -417,0 +418,0 @@

@@ -21,5 +21,9 @@ const cds = require ('../cds')

const url = params.get('appUrl')
const home = params.get('projectFolder');
const target = join(home, 'node_modules', this.getAppPackageName(home));
const projectFolder = params.get('projectFolder');
const env = cds.env.for('cds', projectFolder);
const target = join(projectFolder, 'node_modules', this.getAppPackageName(env));
const subdomain = params.get('subdomain') // REVISIT: Why are subdomains not transparently encoded in URLs?
this.amendPackageJson(projectFolder, env);
console.log(`Pulling app base model`, Object.assign({ from: url, to: local(target) }, subdomain && {subdomain}))

@@ -31,7 +35,8 @@

const baseModelTgz = await this.getTgz(params);
return tar.xz(baseModelTgz).to(target);
await tar.xz(baseModelTgz).to(target);
console.log(`Finished. Refer to the base model like so: using from '${this.getAppPackageName(env)}'`);
}
static getAppPackageName(projectFolder) {
const env = cds.env.for('cds', projectFolder);
static getAppPackageName(env) {
return env.extends || '_base';

@@ -48,2 +53,23 @@ }

}
static amendPackageJson(projectFolder, env) {
if ('extends' in env) {
return;
}
console.log(`Amending extension package.json with project configuration`);
const packageJson = join(projectFolder, 'package.json');
let packageContents;
try {
packageContents = JSON.parse(fs.readFileSync(packageJson, { encoding: 'utf-8' }));
} catch (error) {
throw new CliError('package.json missing or unreadable', { error });
}
fs.writeFileSync(packageJson, JSON.stringify({
...packageContents,
cds: {
...(packageContents.cds ?? {}),
extends: this.getAppPackageName(projectFolder) // Default unless configured through current process.env
}
}));
}
}
const cds = require ('../cds'), { local } = cds.utils;
const fs = require('fs');
const { join } = require('path');
const { readFileSync, existsSync } = require('fs');
const axios = require('axios');

@@ -36,4 +36,4 @@

let content;
if (fs.existsSync(src)) {
content = fs.readFileSync(src, { encoding: 'base64' });
if (existsSync(src)) {
content = readFileSync(src, { encoding: 'base64' });
src = local(src);

@@ -40,0 +40,0 @@ } else {

@@ -161,3 +161,3 @@ const fs = require('fs');

const paramsMap = params.toValueMap(Persistence.setting);
const paramsMap = params.toEntries(Persistence.setting);
if (params.has('username')) {

@@ -186,6 +186,6 @@ // Save username only for localhost for security reasons.

if (params.get('tokenStorage') === 'plain') {
await this._saveAuthToFile(params, params.toValueMap(Persistence.auth));
await this._saveAuthToFile(params, params.toEntries(Persistence.auth));
} else if (params.get('tokenStorage') === 'keyring') {
await this.checkKeytar(params);
await this._saveAuthToKeyring(params, params.toValueMap(Persistence.auth));
await this.setKeytar(params);
await this._saveAuthToKeyring(params, params.toEntries(Persistence.auth));
} else {

@@ -216,3 +216,3 @@ console.log('Note: the authentication token is not saved by default. To save the token for later commands, please run `cds login`.');

await this.updateUrls(params, logout, loadedAppUrl);
await this.checkKeytar(params, logout);
await this.setKeytar(params, logout);
await this.addAuth(params, logout);

@@ -288,3 +288,3 @@ log.debug(`Effective project settings: ${params.format()}`);

static async checkKeytar(params, logout = false) {
static async setKeytar(params, logout = false) {
if (params.get('skipToken')) {

@@ -291,0 +291,0 @@ return;

@@ -32,3 +32,3 @@ let cds;

// Connect to running MTXS server
params = await AuthManager.login(params.toValueMap());
params = await AuthManager.login(params.toEntries());
if (!params.has('username')) {

@@ -35,0 +35,0 @@ throw new CliError('Username is required if app URL is given.', { command: this.command });

@@ -7,2 +7,3 @@ const cds = require('../cds')

'openapi' in comp_to || defineProperty (comp_to, 'openapi', {enumerable:true, get:()=> require('./openapi')})
'asyncapi' in comp_to || defineProperty (comp_to, 'asyncapi', {enumerable:true, get:()=> require('./asyncapi')})

@@ -9,0 +10,0 @@ 'edmx-v2' in comp_to || defineProperty (comp_to, 'edmx-v2', {enumerable:true, value: (csn,o) => comp_to.edmx(csn,{...o,flavor:'v2'})})

@@ -18,3 +18,5 @@ "use strict";

COLLECTION_IN_KEY: 'Key Property of an Entity cannot be of the type Collection.',
INVALID_OPENAPI_FILE: 'The OpenAPI file is not valid. Specify the correct OpenAPI file.'
INVALID_OPENAPI_FILE: 'The OpenAPI file is not valid. Specify the correct OpenAPI file.',
UNRESOLVED_TYPE: 'Could not resolve the type: ',
NO_RETURN_TYPE: 'No ReturnType found in Function: '
};

@@ -21,0 +23,0 @@

@@ -12,3 +12,3 @@ /**

let edmxncdsdatatype = {
const edmxncdsdatatype = {
"Edm.String": "cds.LargeString",

@@ -43,2 +43,22 @@ "Edm.Boolean": "cds.Boolean",

const extendedPrimitiveTypes = [
"Edm.Duration",
"Edm.Geography",
"Edm.GeographyPoint",
"Edm.GeographyLineString",
"Edm.GeographyPolygon",
"Edm.GeographyMultiPoint",
"Edm.GeographyMultiLineString",
"Edm.GeographyMultiPolygon",
"Edm.GeographyCollection",
"Edm.Geometry",
"Edm.GeometryPoint",
"Edm.GeometryLineString",
"Edm.GeometryPolygon",
"Edm.GeometryMultiPoint",
"Edm.GeometryMultiLineString",
"Edm.GeometryMultiPolygon",
"Edm.GeometryCollection"
];
function _initialize(parserContext) {

@@ -59,3 +79,3 @@ parserContext.serviceNamespace = "";

parserContext.allInheritedComplexTypes = {};
parserContext.allInheritedEntityTypes = {};
parserContext.allInheritedEntityTypes = [];
parserContext.allowedNamespaces = [];

@@ -386,2 +406,3 @@ parserContext.allowAllNamespaces = false;

nullable,
defaultValue,
collectionKind,

@@ -448,10 +469,12 @@ parserContext

let hasCdsTypeMapping = false;
if (extendedPrimitiveTypes.includes(dataType)) {
cdsDataType = dataType;
hasCdsTypeMapping = true;
}
//if the cdsDataType is undefined/ not supported
if (cdsDataType === undefined) {
let message = '"' + dataType + '" is not supported';
if (propertyName)
message += ' (in element:"' + propertyName + '")';
else
message += ' (in return type)'
console.log(warn(message));
console.log(warn('"' + dataType + '" is not supported (in element:"' + propertyName + '")'));
return "";

@@ -481,6 +504,11 @@ }

}
if (hasCdsTypeMapping) {
propertyJson = propertyJson + '"@odata.Type": "' + dataType + '"';
propertyJson = propertyJson + ',\n"items": { \n'
propertyJson = propertyJson + '"type": "cds.String"';
} else {
propertyJson = propertyJson + '"items": { \n'
propertyJson = propertyJson + '"type":"' + cdsDataType + '"';
}
propertyJson = propertyJson + '"items": { \n'
propertyJson = propertyJson + '"type":"' + cdsDataType + '"';
if (length && length > 0) {

@@ -501,4 +529,9 @@ propertyJson = propertyJson + ',\n"length":' + length;

else {
propertyJson = propertyJson + '"type":"' + cdsDataType + '"';
//if annotations to be added
if (hasCdsTypeMapping) {
propertyJson = propertyJson + '"type": "cds.String"';
propertyJson = propertyJson + ',\n"@odata.Type": "' + dataType + '"';
} else {
propertyJson = propertyJson + '"type":"' + cdsDataType + '"';
}
// if annotations to be added
if (edmxncdsdatatype[dataType + "_a"] && cdsDataType != "cds.Date") {

@@ -511,3 +544,3 @@ if (dataType === 'Edm.Stream')

if (length && length > 0) {
if (length && length > 0 && !hasCdsTypeMapping) {
propertyJson = propertyJson + ',\n"length":' + length;

@@ -524,3 +557,3 @@ } else if (precision && precision > 0 && hasInvalidPrecision === false) {

//adding precision in case of DateTime datatypes
if ((dataType === "Edm.DateTimeOffset" || dataType === "Edm.DateTime") && cdsDataType != "cds.Date") {
if ((dataType === "Edm.DateTimeOffset" || dataType === "Edm.DateTime") && cdsDataType !== "cds.Date" && !hasCdsTypeMapping) {
if (precision && precision > 0){

@@ -544,2 +577,6 @@ propertyJson = propertyJson + ',\n"@odata.Precision": ' + precision;

if (defaultValue !== -1 && defaultValue) {
propertyJson = propertyJson + ', \n"default": {\n"val": "' + defaultValue + '"\n}';
}
if (propertyName)

@@ -555,3 +592,3 @@ propertyJson = propertyJson + "\n}";

// checking if the ComplexType is marked open or acts as a BaseType
if (isOpenType === true || Object.values(parserContext.allInheritedComplexTypes).indexOf(complexTypeKey) > -1) {
if (isOpenType || Object.values(parserContext.allInheritedComplexTypes).indexOf(complexTypeKey) > -1) {
complexTypeCSN = complexTypeCSN + '"@open": true,\n';

@@ -578,3 +615,3 @@ }

let namespaceAttributes = namespaceAttributeFilter(complexTypeProperty, parserContext);
const complexTypePropertyNamespace = namespaceAttributes.length ? namespaceAttributes : -1
const complexTypePropertyNamespace = namespaceAttributes.length ? namespaceAttributes : -1;

@@ -593,2 +630,3 @@ let complexProperty =

complexTypeProperty.Nullable,
complexTypeProperty.DefaultValue,
complexType.CollectionKind,

@@ -629,2 +667,3 @@ parserContext

complexTypeProperty.Nullable,
complexTypeProperty.DefaultValue,
complexTypeProperty.CollectionKind,

@@ -640,6 +679,12 @@ parserContext

complexTypeCSN = complexTypeCSN + "}\n";
if (parserContext.allInheritedComplexTypes[complexTypeKey]) {
complexTypeCSN = complexTypeCSN + ",\n";
complexTypeCSN = complexTypeCSN +
'"includes": ["' + parserContext.allInheritedComplexTypes[complexTypeKey] + '"]\n';
let baseType = parserContext.allInheritedComplexTypes[complexTypeKey];
if (parserContext.allComplexTypes[baseType]) {
complexTypeCSN = complexTypeCSN + ',\n"includes": ["' + baseType + '"]\n';
}
else {
console.log(warn("BaseType " + baseType + " couldn't be resolved"));
}
}

@@ -691,2 +736,3 @@ complexTypeCSN = complexTypeCSN + "}\n";

parameter["_attributes"].Nullable,
-1,
parameter["_attributes"].CollectionKind,

@@ -767,13 +813,24 @@ parserContext

}
// if return type exist
if (functionImport[2] !== -1) {
let returnCsn = _getServiceEntityProperty(noValue, functionImport[2], noValue, noValue, noValue,
noValue, noValue, noValue, -1, noValue, noValue, parserContext);
if (returnCsn != "") {
let returnValue = _getServiceEntityProperty(noValue, functionImport[2], noValue, noValue, noValue, noValue,
noValue, noValue, noValue, noValue, -1, noValue, parserContext);
if (returnValue !== "") {
csn = csn + ", \n";
csn = csn + '"returns": { \n';
//return type
csn = csn + returnCsn;
csn = csn + returnValue;
csn = csn + '\n }';
} else {
throw new Error(messages.UNRESOLVED_TYPE + `'${functionImport[2]}'`);
}
}
// if return type is missing, add boolean only for functions
if (functionImport[2] === -1 && functionImport[1] === "GET") {
csn = csn + ", \n";
csn = csn + '"returns": { \n';
csn = csn + '"type": "cds.Boolean"';
csn = csn + '\n }';
}
if (functionImport[4] !== -1 && functionImport[4]) {

@@ -888,2 +945,8 @@ csn = csn + ',\n "doc": "' + _replaceSpecialCharacters(functionImport[4]) + '"';

}
if (propAttributes.DefaultValue) {
propOthers.push(propAttributes.DefaultValue);
} else {
propOthers.push(-1);
}

@@ -954,3 +1017,3 @@ if (propAttributes.CollectionKind === 'List' || propAttributes.CollectionKind === 'Bag') {

propAttributes = entityPropertiesMap[entityKeysList[i]];
if (propAttributes[7] == "true") {
if (propAttributes[7] === "true") {
console.log(warn("Expected key element to be not nullable"));

@@ -972,2 +1035,3 @@ }

propAttributes[8],
propAttributes[9],
parserContext

@@ -1007,12 +1071,13 @@ );

property,
propAttributes[0],
propAttributes[1],
propAttributes[2],
propAttributes[3],
propAttributes[4],
false,
propAttributes[5],
propAttributes[6],
propAttributes[7],
propAttributes[8],
propAttributes[0], // dataType
propAttributes[1], // length
propAttributes[2], // precision
propAttributes[3], // scale
propAttributes[4], // display
false, // isKey
propAttributes[5], // doc
propAttributes[6], // allowed namespace
propAttributes[7], // nullable
propAttributes[8], // defaultVal
propAttributes[9], // collection
parserContext

@@ -1450,3 +1515,3 @@ );

if ((entityAttributes.Abstract && entityAttributes.Abstract.toUpperCase() === "TRUE") || (entityAttributes.OpenType && entityAttributes.OpenType.toUpperCase() === "TRUE")) {
if ((entityAttributes.Abstract?.toUpperCase() === "TRUE") || (entityAttributes.OpenType?.toUpperCase() === "TRUE")) {
csnEntity = csnEntity + '"@open": true,\n'

@@ -1513,8 +1578,13 @@ }

if (entityAttributes.BaseType) {
let baseTypeName = _getBaseTypeEntityName(entityAttributes.BaseType, parserContext);
csnEntity = csnEntity + ",\n";
csnEntity = csnEntity + '"includes": ["' + baseTypeName + '"]\n';
let baseTypeName = _getBaseTypeEntityName(entityAttributes.BaseType, parserContext).replace(parserContext.serviceNamespace + '.', '');
if (parserContext.allEntitiesMC.includes(baseTypeName) || parserContext.allEntities.includes(baseTypeName)) {
csnEntity = csnEntity + ',\n"includes": ["' + parserContext.serviceNamespace + '.' + baseTypeName + '"]\n';
}
else {
throw new Error(messages.UNRESOLVED_TYPE + `'${parserContext.serviceNamespace}.${baseTypeName}'`);
}
}
//if the function import is bounded to entitySet
// if the function import is bounded to entitySet
if (parserContext.allFunctionImportsMap[entityName]) {

@@ -1580,4 +1650,8 @@ csnEntity = csnEntity + ', \n "actions": {\n';

if (entityAttributes.BaseType) {
parserContext.allEntities.push(_extractEntityFromNamespace(entityAttributes.BaseType));
parserContext.allInheritedEntityTypes[_extractEntityFromNamespace(entityAttributes.BaseType)] = entity["_attributes"].Name;
let entityName;
if (parserContext.allEntitySetMapMC && parserContext.allEntitySetMapMC[entityAttributes.BaseType]) {
entityName = parserContext.allEntitySetMapMC[entityAttributes.BaseType][0];
}
else entityName = _extractEntityFromNamespace(entityAttributes.BaseType);
if (!parserContext.allInheritedEntityTypes.includes(entityName)) parserContext.allInheritedEntityTypes.push(entityName);
}

@@ -1670,8 +1744,16 @@ // adding blob element if entity has m:HasStream as true

if (entitiesWithNames[i].length) {
parsedEntities[i] = _parsingServiceEntities(entitiesWithNames[i],
ignorePersistenceSkip, mockServerUc, parserContext);
// adding @open annotation to BaseTypes
for (let j = 0; j < parsedEntities[i].length; j++) {
if (parserContext.allInheritedEntityTypes[_extractEntityFromNamespace(parserContext.allEntitySetMap[entitiesWithNames[i][j].name])] != undefined || parserContext.allInheritedEntityTypes[entitiesWithNames[i][j].name]) {
parsedEntities[i][j] = parsedEntities[i][j].replace(/}$/, ',"@open": true' + '}');
parsedEntities[i] = _parsingServiceEntities(entitiesWithNames[i], ignorePersistenceSkip, mockServerUc, parserContext);
}
}
// adding @open annotation to BaseTypes
for (let i = 0; i < parsedEntities.length; i++) {
for (let j = 0; j < parserContext.allInheritedEntityTypes.length ; j++) {
let entityName = parserContext.allInheritedEntityTypes[j];
const regEx = RegExp(`"${parserContext.serviceNamespace}.${entityName}": {`, 'g');
for (let k = 0; k < parsedEntities[i].length; k++) {
let entityString = parsedEntities[i][k];
if (entityString.match(regEx)) {
parsedEntities[i][k] = parsedEntities[i][k].replace(/}$/, ',"@open": true' + '}');
break;
}

@@ -1681,2 +1763,3 @@ }

}
// if entities from entity set and entity type both are parsed

@@ -1748,5 +1831,3 @@ if (parsedEntities[0] && parsedEntities[1])

if (entityJson) {
definitions.push(
_getServiceEntitites(entityJson, ignorePersistenceSkip, mockServerUc, parserContext)
);
definitions.push(_getServiceEntitites(entityJson, ignorePersistenceSkip, mockServerUc, parserContext));
}

@@ -1753,0 +1834,0 @@

@@ -132,2 +132,8 @@ const {

// only add the annotation "@openapi.explode" for true scenario
if (param.explode || param.style === "form")
cdsParam["@openapi.explode"] = true;
if (!param.explode)
delete cdsParam["@openapi.explode"];
if ((param.in === "path" && param.style && param.style !== "simple") ||

@@ -137,13 +143,2 @@ (param.in === "query" && param.style && param.style !== "form")

cdsParam["@openapi.style"] = param.style;
if (param.explode && param.style !== "form")
cdsParam["@openapi.explode"] = true;
if (param.explode === false &&
(param.style === "form" || (!param.style && param.in === "query"))
)
cdsParam["@openapi.explode"] = false;
if (param.in === "query" && param.collectionFormat === "csv")
cdsParam["@openapi.explode"] = false;
if (param.in === "query" && param.collectionFormat === "ssv")

@@ -153,9 +148,14 @@ cdsParam["@openapi.style"] = "spaceDelimited";

cdsParam["@openapi.style"] = "pipeDelimited";
if (param.in === "query" && param.allowReserved === true)
cdsParam["@openapi.allowReserved"] = true;
if (param.required && param.in !== "path")
cdsParam["@openapi.required"] = true;
if (!param.required)
delete cdsParam.notNull;
if (!param.required) {
delete param.notNull;
if (param.default !== undefined)
cdsParam["default"] = { "val": param.default };
}
const name = cdsName(param.name);

@@ -388,4 +388,4 @@ if (name !== param.name) cdsParam["@openapi.name"] = param.name;

function addDefault(type, schema, forParameter) {
if (schema.default !== undefined) type.default = { val: schema.default };
if (forParameter) return;
if (schema.default) type.default = { val: schema.default };
}

@@ -392,0 +392,0 @@

const os = require('os');
const MAVEN_ARCHETYPE_VERSION = '1.31.1';
const MAVEN_ARCHETYPE_VERSION = '1.32.0';

@@ -5,0 +5,0 @@ const constants = {

@@ -136,3 +136,3 @@ const path = require('path');

if (!(await this._findProjectFile(this.projectPath))) {
if (!PROJECT_FILES.find(exists)) {
throw new Error(`The current folder doesn't seem to contain a project. None of the following files found: ${PROJECT_FILES.join(', ')}.`);

@@ -163,12 +163,2 @@ }

async _findProjectFile(projectPath) {
for (const file of PROJECT_FILES) {
if (exists(path.join(projectPath, file))) {
return file;
}
}
return null;
}
_initialize(projectName, options = {}) {

@@ -181,4 +171,4 @@ // deprecated, only for compatibility

this.cwd = options.cwd || process.cwd();
this.projectPath = path.resolve(this.cwd, projectName || '.');
this.projectName = path.basename(this.projectPath);
cds.root = path.resolve(this.cwd, projectName || '.');
this.projectName = path.basename(cds.root);

@@ -192,3 +182,3 @@ this.options = options;

case COMMAND_INIT: {
const relativeProjectPath = path.relative(this.cwd, this.projectPath);
const relativeProjectPath = path.relative(this.cwd, cds.root);
const folderName = (relativeProjectPath ? `.${path.sep}${relativeProjectPath}` : 'current folder');

@@ -211,3 +201,3 @@ LOG.info(`Creating new cap project in ${folderName}`);

async _process() {
DEBUG && DEBUG(`Project path: ${this.projectPath}`);
DEBUG && DEBUG(`Project path: ${cds.root}`);

@@ -230,3 +220,3 @@ await this._validateOptions();

const TemplateClass = require(`./template/${filename}`);
return new TemplateClass(this.projectPath, this);
return new TemplateClass(this);
} catch (err) {

@@ -243,3 +233,3 @@ if (err.code === 'MODULE_NOT_FOUND') {

const [bestMatch] = fuzzySearch(filename, entries);
console.log(`Unknown facet '${term.bold(filename)}'. Did you mean ${term.bold(`cds add ${bestMatch}`)}?\n\nHaven't found the proper facet yet? Here are all supported facets:\n\n ${term.bold(entries.join('\n '))}\n`);
LOG.info(`Unknown facet '${term.bold(filename)}'. Did you mean ${term.bold(`cds add ${bestMatch}`)}?\n\nHaven't found the proper facet yet? Here are all supported facets:\n\n ${term.bold(entries.join('\n '))}\n`);
process.exit(1);

@@ -340,3 +330,3 @@ }

await this._validateProjectName(this.projectName);
await this._validateProjectFolder(this.projectPath, this.cwd);
await this._validateProjectFolder(this.cwd);
}

@@ -362,7 +352,7 @@ }

async _validateProjectFolder(projectPath, cwd) {
const existingProjectFile = await this._findProjectFile(projectPath);
async _validateProjectFolder(cwd) {
const existingProjectFile = PROJECT_FILES.find(exists)
if (existingProjectFile) {
let message;
if (cwd === projectPath) {
if (cwd === cds.root) {
message = `You seem to be working in a project which is already initialized. Use ${term.bold('cds add')} to add more features.`;

@@ -369,0 +359,0 @@ } else {

// Registry of sequence matchers in mta.yaml files
const srvNode = {
ref: 'srv',
in: 'modules',

@@ -13,3 +12,2 @@ where: [{

const srvJava = {
ref: 'srv',
in: 'modules',

@@ -23,3 +21,2 @@ where: [{

const approuter = {
ref: 'approuter',
in: 'modules',

@@ -33,3 +30,2 @@ where: [{

const xsuaa = {
ref: 'uaa-resource',
in: 'resources',

@@ -49,3 +45,2 @@ where: [{

const auditlog = {
ref: 'auditlog-resource',
in: 'resources',

@@ -62,3 +57,2 @@ where: [{

const enterpriseMessaging = {
ref: 'enterprise-messaging-resource',
in: 'resources',

@@ -75,3 +69,2 @@ where: [{

const kibanaLogging = {
ref: 'cf-logging',
in: 'resources',

@@ -91,3 +84,2 @@ where: [{

const hdbDeployer = {
ref: 'deployer',
in: 'modules',

@@ -101,3 +93,2 @@ where: [{

const hdiContainer = {
ref: 'hdi-container-resource',
in: 'resources',

@@ -111,3 +102,2 @@ where: [{

const serviceManager = {
ref: 'service-manager-resource',
in: 'resources',

@@ -127,3 +117,2 @@ where: [{

const saasRegistry = {
ref: 'registry-resource',
in: 'resources',

@@ -140,3 +129,2 @@ where: [{

const srvAPI = {
ref: 'srvAPI',
in: 'provides',

@@ -153,3 +141,2 @@ where: [{

const mtxSidecar = {
ref: 'sidecar',
in: 'modules',

@@ -165,5 +152,4 @@ where: [{

const providedMtxAPI = {
ref: 'provided-mtx-api',
in: 'provides',
const providedMtxAPISrv = {
in: [srvNode, 'provides'],
where: [{

@@ -178,4 +164,14 @@ property: 'name',

const providedMtxAPISidecar = {
in: [mtxSidecar, 'provides'],
where: [{
property: 'name',
isEqualTo: 'mtx-api'
}, {
property: 'properties.mtx-url',
isEqualTo: '${default-url}'
}]
}
const requiredMtxAPI = {
ref: 'required-mtx-api',
in: [approuter, 'requires'],

@@ -192,3 +188,2 @@ where: [{

const providedAppAPI = {
ref: 'provided-app-api',
in: [approuter, 'provides'],

@@ -208,3 +203,2 @@ where: [{

const requiredAppAPI = {
ref: 'required-app-api',
in: [srvNode, 'requires'],

@@ -221,3 +215,2 @@ where: [{

const providedMtxSidecarAPI = {
ref: 'provided-mtx-sidecar-api',
in: [mtxSidecar, 'provides'],

@@ -231,3 +224,2 @@ where: [{

const requiredMtxSidecarAPI = {
ref: 'required-mtx-sidecar-api',
in: [srvJava, 'requires'],

@@ -241,3 +233,2 @@ where: [{

const requiredJavaApprouterAPI = {
ref: 'required-java-approuter-api',
in: [srvJava, 'requires'],

@@ -251,3 +242,2 @@ where: [{

const providedJavaApprouterAPI = {
ref: 'provided-java-approuter-api',
in: [approuter, 'requires'],

@@ -261,3 +251,2 @@ where: [{

const approuterExtensibility = {
ref: 'approuter-extensibility',
in: 'routes',

@@ -277,3 +266,2 @@ where: [{

const approuterExtensibilityJava = {
ref: 'approuter-extensibility-java',
in: 'routes',

@@ -305,3 +293,4 @@ where: [{

mtxSidecar,
providedMtxAPI,
providedMtxAPISrv,
providedMtxAPISidecar,
requiredMtxAPI,

@@ -308,0 +297,0 @@ providedMtxSidecarAPI,

{
"name": "approuter",
"dependencies": {
"@sap/approuter": "^13.0.0"
"@sap/approuter": "^14.0.0"
},
"engines": {
"node": "^16"
"node": "^18.0.0"
},

@@ -9,0 +9,0 @@ "scripts": {

const { join } = require('path')
const ProjectReader = require('../../util/projectReader')
const { readProject } = require('../../util/projectReader')
const { mergeJSON, mergeYAML, sortDependencies } = require('../../util/merge')

@@ -16,7 +16,2 @@ const { OPTION_XSUAA, PROJECT_TYPE } = require('../../constants')

constructor(projectPath, generator) {
super(projectPath, generator, __dirname)
this.projectReader = new ProjectReader(projectPath)
}
static hasFacet(env) {

@@ -31,5 +26,5 @@ return !!env.requires?.approuter

async run() {
const projectDescriptor = await this.projectReader.read(this.options)
const projectDescriptor = await readProject(this.options)
const { for: forProfile } = projectDescriptor.cap
const appPath = join(this.projectPath, projectDescriptor.ui.appPath || 'app')
const appPath = join(this.projectPath, projectDescriptor.ui.appPath ?? 'app')
const appPackageJSONPath = join(appPath, 'package.json')

@@ -42,3 +37,3 @@

const projectType = await this.getProjectType()
const projectType = this.getProjectType()
switch (projectType) {

@@ -65,3 +60,3 @@ case PROJECT_TYPE.java: {

async runDependentMerging(context = {}) {
const projectDescriptor = await this.projectReader.read(this.options)
const projectDescriptor = await readProject(this.options)
const { isNodejs, isJava, isExtensible, isMultitenant, hasMta, hasHelm } = projectDescriptor.cap

@@ -107,3 +102,2 @@

{ additions: [{
ref: `web-application-approuter`,
in: `dependencies`,

@@ -144,3 +138,3 @@ where: [{

const appPath = join(this.projectPath, projectDescriptor.ui.appPath || 'app')
const appPath = join(this.projectPath, projectDescriptor.ui.appPath ?? 'app')
const additions = isExtensible && isJava ? [{ ...approuterExtensibilityJava, at: 0 }] :

@@ -147,0 +141,0 @@ isExtensible && isNodejs ? [{ ...approuterExtensibility, at: 0 }] : []

const { join } = require('path')
const ProjectReader = require('../../util/projectReader')
const { readProject } = require('../../util/projectReader')
const { mergeYAML } = require('../../util/merge')
const { copyAndTrack, parseMd5File, writeMd5File } = require('../../util/md5Tracking')
const { parseMd5File, writeMd5File } = require('../../util/md5Tracking')
const { srvNode, srvJava, auditlog } = require('../_merging/registry-mta')
module.exports = class AuditlogTemplate extends require('../templateBase') {
constructor(projectPath, generator) {
super(projectPath, generator, __dirname)
this.projectReader = new ProjectReader(projectPath)
}

@@ -22,3 +18,3 @@ static hasFacet() {

async runDependentMerging(context = {}) {
const projectDescriptor = await this.projectReader.read(this.options)
const projectDescriptor = await readProject(this.options)
const { isNodejs, hasMta, hasHelm } = projectDescriptor.cap

@@ -64,3 +60,22 @@

)
await copyAndTrack(join(__dirname, 'files', 'auditlog.yaml'), join(this.projectPath, 'chart', 'templates', 'auditlog.yaml'), context)
// copy service instance chart if it is not present
const HelmTemplate = require(`../helm`);
const helmTemplate = new HelmTemplate(this.generator, context.oldTrackingData, context.newTrackingData);
isIndependentCommand = isIndependentCommand | await helmTemplate.addSubChart('service-instance');
// add dependency entry in chart.yaml
await mergeYAML(
join(this.projectPath, 'chart', 'Chart.yaml'),
join(__dirname, 'files', 'auditlog.yaml'),
projectDescriptor,
{ additions: [{
in: `dependencies`,
where: [{
property: 'alias',
isEqualTo: 'auditlog'
}]
}]}
)
if(isIndependentCommand) await writeMd5File(join(this.projectPath, 'chart', '.cds-add-helm-files.md5'), context.newTrackingData)

@@ -67,0 +82,0 @@ }

const { join } = require('path')
const { exists } = require('../../../cds').utils
const ProjectReader = require('../../util/projectReader');
const { readProject } = require('../../util/projectReader')
const { copyFiles } = require('../../util/templateUtil')
const { OPTION_CF_MANIFEST } = require('../../constants')
const MANIFEST_FILE = 'manifest.yml'
const SERVICES_MANIFEST_FILE = 'services-manifest.yml'
module.exports = class ManifestTemplate extends require('../templateBase') {
constructor(projectPath, generator) {
super(projectPath, generator, __dirname)
this.projectReader = new ProjectReader(projectPath)
}
module.exports = class CfManifestTemplate extends require('../templateBase') {
async canRun() {
if (!this.options.add.has(OPTION_CF_MANIFEST)) {
return false;
}
if (this.options.force) {
return true;
}
if (exists(join(this.projectPath, MANIFEST_FILE)) || exists(join(this.projectPath, SERVICES_MANIFEST_FILE))) {
if (exists(MANIFEST_FILE) || exists(SERVICES_MANIFEST_FILE)) {
throw new Error(`File ${MANIFEST_FILE} or ${SERVICES_MANIFEST_FILE} already exist in current folder. Use --force to overwrite.`);

@@ -31,5 +21,5 @@ }

async run() {
const projectDescriptor = await this.projectReader.read()
const projectDescriptor = await readProject()
await copyFiles(join(__dirname, 'files'), this.projectPath, projectDescriptor, this.options.force);
}
}
const { join } = require('path')
const ProjectReader = require('../../util/projectReader')
const { readProject } = require('../../util/projectReader')
const { mergeYAML } = require('../../util/merge')

@@ -7,6 +7,2 @@ const { copyAndTrack, parseMd5File, writeMd5File } = require('../../util/md5Tracking')

module.exports = class ConnectivityTemplate extends require('../templateBase') {
constructor(projectPath, generator) {
super(projectPath, generator, __dirname)
this.projectReader = new ProjectReader(projectPath)
}

@@ -18,3 +14,3 @@ async run() {

async runDependentMerging(context = {}) {
const projectDescriptor = await this.projectReader.read(this.options)
const projectDescriptor = await readProject(this.options)
const { hasHelm } = projectDescriptor.cap

@@ -36,3 +32,2 @@

{ additions: [{
ref: `srv-additional-volumes`,
in: `srv.additionalVolumes`,

@@ -47,5 +42,3 @@ where: [{

const templatesPath = join(this.projectPath, 'chart', 'templates')
await copyAndTrack(join(filesPath, 'connectivity-binding.yaml'), join(templatesPath, 'connectivity-binding.yaml'), context)
await copyAndTrack(join(filesPath, 'connectivity-proxy-info.yaml'), join(templatesPath, 'connectivity-proxy-info.yaml'), context)
await copyAndTrack(join(filesPath, 'connectivity.yaml'), join(templatesPath, 'connectivity.yaml'), context)
if(isIndependentCommand) await writeMd5File(join(this.projectPath, 'chart', '.cds-add-helm-files.md5'), context.newTrackingData)

@@ -52,0 +45,0 @@ }

@@ -9,5 +9,2 @@

module.exports = class DataTemplate extends require('../templateBase') {
constructor(projectPath, generator) {
super(projectPath, generator, __dirname);
}

@@ -25,3 +22,2 @@ async run() {

cds.root = this.projectPath;
let csn = await cds.compile.to.csn(cds.env.roots); // normal CSN

@@ -28,0 +24,0 @@

const { join } = require('path')
const ProjectReader = require('../../util/projectReader')
const { readProject } = require('../../util/projectReader')
const { mergeYAML } = require('../../util/merge')
const { copyAndTrack, parseMd5File, writeMd5File } = require('../../util/md5Tracking')
const { parseMd5File, writeMd5File } = require('../../util/md5Tracking')
module.exports = class DestinationsTemplate extends require('../templateBase') {
constructor(projectPath, generator) {
super(projectPath, generator, __dirname)
this.projectReader = new ProjectReader(projectPath)
}
async canRun() {
const projectDescriptor = await this.projectReader.read(this.options)
const projectDescriptor = await readProject(this.options)
const { hasMta, hasHelm } = projectDescriptor.cap

@@ -31,3 +27,3 @@ if (!hasHelm && hasMta) {

async runDependentMerging(context = {}) {
const projectDescriptor = await this.projectReader.read(this.options)
const projectDescriptor = await readProject(this.options)
const { hasHelm, hasDestination, hasHtml5Repo } = projectDescriptor.cap

@@ -37,3 +33,3 @@

//const env = await this.getEnv()
const binding = hasDestination && { srv: { bindings: { destination: { serviceInstanceName: 'destinations' }}}}
const binding = hasDestination && { srv: { bindings: { destination: { serviceInstanceName: 'destination' }}}}
const html5Binding = hasHtml5Repo && { 'html5-apps-deployer': { bindings: { destination: { serviceInstanceName: 'destination' }}}}

@@ -63,3 +59,22 @@

)
await copyAndTrack(join(__dirname, 'files', 'destination.yaml'), join(this.projectPath, 'chart', 'templates', 'destination.yaml'), context)
// copy service instance chart if it is not present
const HelmTemplate = require(`../helm`);
const helmTemplate = new HelmTemplate(this.generator, context.oldTrackingData, context.newTrackingData);
isIndependentCommand = isIndependentCommand | await helmTemplate.addSubChart('service-instance');
// add dependency entry in chart.yaml
await mergeYAML(
join(this.projectPath, 'chart', 'Chart.yaml'),
join(__dirname, 'files', 'destination.yaml'),
projectDescriptor,
{ additions: [{
in: `dependencies`,
where: [{
property: 'alias',
isEqualTo: 'destination'
}]
}]}
)
if(isIndependentCommand) await writeMd5File(join(this.projectPath, 'chart', '.cds-add-helm-files.md5'), context.newTrackingData)

@@ -66,0 +81,0 @@ }

const { join } = require('path')
const ProjectReader = require('../../util/projectReader')
const { readProject } = require('../../util/projectReader')
const { mergeJSON, mergeYAML } = require('../../util/merge')
const { copyAndTrack, parseMd5File, writeMd5File } = require('../../util/md5Tracking')
const { parseMd5File, writeMd5File } = require('../../util/md5Tracking')
const { srvNode, srvJava, enterpriseMessaging } = require('../_merging/registry-mta')
module.exports = class EnterpriseMessagingTemplate extends require('../templateBase') {
constructor(projectPath, generator) {
super(projectPath, generator, __dirname)
this.projectReader = new ProjectReader(projectPath)
}
static hasFacet(env) {
return env.requires?.messaging === 'enterprise-messaging'
return env.requires?.messaging?.kind === 'enterprise-messaging-http'
}
async run() {
const projectDescriptor = await this.projectReader.read(this.options)
const projectDescriptor = await readProject(this.options)
const { for: forProfile, isNodejs } = projectDescriptor.cap

@@ -25,2 +22,3 @@ const templatePath = join(__dirname, 'files')

await mergeJSON(configPath, cdsTemplatePath, projectDescriptor)
await mergeJSON('event-mesh.json', join(__dirname, 'files', 'event-mesh.json.hbs'), projectDescriptor)
await this.runDependentMerging()

@@ -30,10 +28,9 @@ }

async runDependentMerging(context = {}) {
const projectDescriptor = await this.projectReader.read(this.options)
const projectDescriptor = await readProject(this.options)
const { hasHelm, hasMta, isNodejs } = projectDescriptor.cap
if (hasMta) {
const mtaYAMLPath = join(this.projectPath, 'mta.yaml')
const srv = isNodejs ? srvNode : srvJava
await mergeYAML(
mtaYAMLPath,
'mta.yaml',
join(__dirname, 'files', 'mta.yaml.hbs'),

@@ -49,7 +46,2 @@ projectDescriptor,

)
await mergeJSON(
join(this.projectPath, 'event-mesh.json'),
join(__dirname, 'files', 'event-mesh.json.hbs'),
projectDescriptor
)
}

@@ -65,2 +57,3 @@

}
await mergeYAML(

@@ -70,7 +63,22 @@ join(this.projectPath, 'chart', 'values.yaml'),

)
await copyAndTrack(
join(__dirname, 'files', 'event-mesh.yaml'),
join(this.projectPath, 'chart', 'templates', 'event-mesh.yaml'),
context
// copy service instance chart if it is not present
const HelmTemplate = require(`../helm`);
const helmTemplate = new HelmTemplate(this.generator, context.oldTrackingData, context.newTrackingData);
isIndependentCommand = isIndependentCommand | await helmTemplate.addSubChart('service-instance');
// add dependency entry in chart.yaml
await mergeYAML(
join(this.projectPath, 'chart', 'Chart.yaml'),
join(__dirname, 'files', 'event-mesh.yaml'),
projectDescriptor,
{ additions: [{
in: `dependencies`,
where: [{
property: 'alias',
isEqualTo: 'event-mesh'
}]
}]}
)
if(isIndependentCommand) await writeMd5File(join(this.projectPath, 'chart', '.cds-add-helm-files.md5'), context.newTrackingData)

@@ -77,0 +85,0 @@ }

@@ -0,10 +1,13 @@

const cds = require('../../../cds')
const { read, write, exists } = cds.utils
const { join } = require('path')
const ProjectReader = require('../../util/projectReader')
const { readProject } = require('../../util/projectReader')
const { mergeJSON } = require('../../util/merge')
const { copyAndTrack, parseMd5File, writeMd5File } = require('../../util/md5Tracking')
const { OPTION_MULTITENANCY } = require('../../constants')
module.exports = class ExtensibilityTemplate extends require('../templateBase') {
constructor(projectPath, generator) {
super(projectPath, generator, __dirname)
this.projectReader = new ProjectReader(projectPath)
getDependencies() {
return exists('pom.xml') ? [OPTION_MULTITENANCY] : [] // REVISIT: Remove this dependency
}

@@ -17,16 +20,24 @@

async run() {
const projectDescriptor = await this.projectReader.read(this.options)
const { for: forProfile, isJava } = projectDescriptor.cap
const projectDescriptor = await readProject(this.options)
const { for: forProfile, isJava, hasSidecar } = projectDescriptor.cap
const cdsConfigPath = join(this.projectPath, isJava ? '.cdsrc.json' : 'package.json')
const cdsTemplateFile = forProfile ? (isJava ? 'cdsrc.json.hbs' : 'cds.package.json.hbs') : (isJava ? 'cdsrc.json' : 'cds.package.json')
await mergeJSON(cdsConfigPath, join(__dirname, 'files', cdsTemplateFile), projectDescriptor)
if (hasSidecar) {
const out = join('mtx', 'sidecar', 'package.json')
const config = await read(out)
if (!('cds.xt.ExtensibilityService' in config.cds.requires)) {
config.cds.requires['cds.xt.ExtensibilityService'] = true
await write(out, config, { spaces: 2 })
}
}
await this.runDependentMerging()
}
async runDependentMerging(context = {}) {
const projectDescriptor = await this.projectReader.read()
const { hasXsuaa, hasHelm, hasApprouter } = projectDescriptor.cap
async runDependentMerging() {
const projectDescriptor = await readProject()
const { hasXsuaa, hasApprouter } = projectDescriptor.cap
if (hasXsuaa) {
await mergeJSON(
join(this.projectPath, 'xs-security.json'),
'xs-security.json',
join(__dirname, 'files', 'xs-security.json.hbs'),

@@ -36,11 +47,8 @@ projectDescriptor,

additions: [{
ref: 'scope-extension-developer',
in: 'scopes',
where: [{ property: 'name', isEqualTo: '$XSAPPNAME.cds.ExtensionDeveloper' }],
}, {
ref: 'scope-uiflex',
in: 'scopes',
where: [{ property: 'name', isEqualTo: '$XSAPPNAME.cds.UIFlexDeveloper' }],
}, {
ref: 'template-extension-developer',
in: 'role-templates',

@@ -54,22 +62,6 @@ where: [{ property: 'name', isEqualTo: 'ExtensionDeveloper' }],

const ApprouterTemplate = require(`../approuter`)
const approuterTemplate = new ApprouterTemplate(this.projectPath, this.generator)
const approuterTemplate = new ApprouterTemplate(this.generator)
await approuterTemplate.runDependentMerging()
}
if (hasXsuaa && hasHelm) {
//in case facet is being added to the already exisiting charts folder
let isIndependentCommand = false
if(Object.keys(context).length == 0){
isIndependentCommand = true
context.projectPath = this.projectPath
context.oldTrackingData = context.newTrackingData = await parseMd5File(join(this.projectPath, 'chart', '.cds-add-helm-files.md5'))
}
// REVISIT: Shared xs-security.json location for Helm and MTA?
await copyAndTrack(
join(this.projectPath, 'xs-security.json'),
join(this.projectPath, 'chart', 'xs-security.json'),
context
)
if(isIndependentCommand) await writeMd5File(join(this.projectPath, 'chart', '.cds-add-helm-files.md5'), context.newTrackingData)
}
}
}

@@ -8,5 +8,5 @@ const { join } = require('path');

const { mergeJSON, mergeYAML, removeFromYAML, removeFromYAMLArray } = require('../../util/merge')
const { copyAndTrack, parseMd5File, writeMd5File } = require('../../util/md5Tracking')
const { parseMd5File, writeMd5File } = require('../../util/md5Tracking')
const { copyFiles } = require('../../util/templateUtil')
const ProjectReader = require('../../util/projectReader');
const { readProject } = require('../../util/projectReader');
const versionCompare = require('../../util/versionCompare');

@@ -19,6 +19,2 @@

module.exports = class HanaTemplate extends require('../templateBase') {
constructor(projectPath, generator) {
super(projectPath, generator, __dirname);
this.projectReader = new ProjectReader(projectPath);
}

@@ -48,3 +44,3 @@ static hasFacet(env) {

const projectType = await this.getProjectType();
const projectType = this.getProjectType();
switch (projectType) {

@@ -91,5 +87,5 @@ case PROJECT_TYPE.java: {

}
async runDependentMerging(context = {}) {
const projectDescriptor = await this.projectReader.read(this.options)
const projectDescriptor = await readProject(this.options)
const { hasMta, hasHelm, hasHtml5Repo, isNodejs, isJava, isMultitenant } = projectDescriptor.cap

@@ -143,3 +139,3 @@

mtaYAMLPath,
`${__dirname}/files/mta.yaml.hbs`,
join(__dirname, 'files', 'mta.yaml.hbs'),
projectDescriptor,

@@ -186,2 +182,17 @@ { additions: [...modules, db].filter(a => a), deletions, relationships }

}
await mergeYAML(
join(this.projectPath, 'chart', 'Chart.yaml'),
join(__dirname, 'files', 'hana.yaml'),
projectDescriptor,
{ deletions: [{
item: {
in: `dependencies`,
where: [{
property: 'alias',
isEqualTo: 'hana'
}]
}
}]}
)
} else {

@@ -193,14 +204,7 @@ await mergeYAML(

)
await copyAndTrack(
join(__dirname, 'files', 'hana.yaml'),
join(this.projectPath, 'chart', 'templates', 'hana.yaml'),
context
)
// copy content deployment chart if it is not present
const contentDeploymentSubchartPath = join(this.projectPath, 'chart', 'charts', 'content-deployment');
if(!exists(contentDeploymentSubchartPath)) {
const contentDeploymentResourcePath = join(__dirname, '..', 'helm', 'subcharts', 'content-deployment');
await copyAndTrack(contentDeploymentResourcePath, contentDeploymentSubchartPath, context);
}
const HelmTemplate = require(`../helm`);
const helmTemplate = new HelmTemplate(this.generator, context.oldTrackingData, context.newTrackingData);
shouldUpdateTrackingFile = shouldUpdateTrackingFile | await helmTemplate.addSubChart('content-deployment');

@@ -210,6 +214,5 @@ // add dependency entry in chart.yaml

join(this.projectPath, 'chart', 'Chart.yaml'),
join(__dirname, 'files', 'chart.yaml'),
join(__dirname, 'files', 'hana-deployer.yaml'),
projectDescriptor,
{ additions: [{
ref: `content-deployment-hana-deployer`,
in: `dependencies`,

@@ -222,2 +225,19 @@ where: [{

)
// add service instance chart if it is not present
shouldUpdateTrackingFile = shouldUpdateTrackingFile | await helmTemplate.addSubChart('service-instance');
// add dependency entry in chart.yaml
await mergeYAML(
join(this.projectPath, 'chart', 'Chart.yaml'),
join(__dirname, 'files', 'hana.yaml'),
projectDescriptor,
{ additions: [{
in: `dependencies`,
where: [{
property: 'alias',
isEqualTo: 'hana'
}]
}]}
)
}

@@ -260,6 +280,6 @@ if(shouldUpdateTrackingFile) await writeMd5File(join(this.projectPath, 'chart', '.cds-add-helm-files.md5'), context.newTrackingData);

async _updateCdsConfiguration(configPath) {
const projectType = await this.getProjectType();
const projectType = this.getProjectType();
const configFile = projectType === PROJECT_TYPE.nodejs ? 'package.json' : '.cdsrc.json';
configPath = configPath ?? join(this.projectPath, configFile)
const projectDescriptor = await this.projectReader.read(this.options);
const projectDescriptor = await readProject(this.options);
const { for: forProfile } = projectDescriptor.cap;

@@ -266,0 +286,0 @@ let json; try { json = await read(configPath, 'utf8') } catch { /* ignore */ }

@@ -24,4 +24,44 @@ {

"$ref": "./charts/content-deployment/values.schema.json"
},
"event-mesh": {
"$id": "#/properties/event-mesh",
"description": "Service Instance",
"$ref": "./charts/service-instance/values.schema.json"
},
"html5-apps-repo-host": {
"$id": "#/properties/html5-apps-repo-host",
"description": "Service Instance",
"$ref": "./charts/service-instance/values.schema.json"
},
"xsuaa": {
"$id": "#/properties/xsuaa",
"description": "Service Instance",
"$ref": "./charts/service-instance/values.schema.json"
},
"auditlog": {
"$id": "#/properties/auditlog",
"description": "Service Instance",
"$ref": "./charts/service-instance/values.schema.json"
},
"destination": {
"$id": "#/properties/destination",
"description": "Service Instance",
"$ref": "./charts/service-instance/values.schema.json"
},
"hana": {
"$id": "#/properties/hana",
"description": "Service Instance",
"$ref": "./charts/service-instance/values.schema.json"
},
"saas-registry": {
"$id": "#/properties/saas-registry",
"description": "Service Instance",
"$ref": "./charts/service-instance/values.schema.json"
},
"service-manager": {
"$id": "#/properties/service-manager",
"description": "Service Instance",
"$ref": "./charts/service-instance/values.schema.json"
}
}
}

@@ -57,2 +57,34 @@ {

},
"event-mesh": {
"$id": "#/properties/event-mesh",
"description": "Service Instance"
},
"html5-apps-repo-host": {
"$id": "#/properties/html5-apps-repo-host",
"description": "Service Instance"
},
"xsuaa": {
"$id": "#/properties/xsuaa",
"description": "Service Instance"
},
"auditlog": {
"$id": "#/properties/auditlog",
"description": "Service Instance"
},
"destination": {
"$id": "#/properties/destination",
"description": "Service Instance"
},
"hana": {
"$id": "#/properties/hana",
"description": "Service Instance"
},
"saas-registry": {
"$id": "#/properties/saas-registry",
"description": "Service Instance"
},
"service-manager": {
"$id": "#/properties/service-manager",
"description": "Service Instance"
},
"backendDestinations": {

@@ -80,71 +112,2 @@ "$id": "#/properties/backendDestinations",

}
},
"auditlog": {
"$id": "#/properties/auditlog",
"$ref": "#/definitions/ServiceInstance"
},
"destinations": {
"$id": "#/properties/destinations",
"$ref": "#/definitions/ServiceInstance"
},
"connectivity": {
"type": "object",
"additionalProperties": false,
"required": [
"serviceOfferingName",
"servicePlanName",
"configMapName",
"secretName"
],
"properties": {
"fullnameOverride": {
"type": "string",
"pattern": "[0-9a-z][0-9a-z-.]*",
"maxLength": 63,
"description": "If present then this will be used instead of the generated name"
},
"enabled": {
"type": "boolean",
"default": true,
"description": "Service instance will be created (default: true)"
},
"serviceOfferingName": {
"type": "string",
"default": "connectivity",
"description": "Technical service offering name from service catalog"
},
"servicePlanName": {
"type": "string",
"default": "connectivity_proxy",
"description": "Technical service plan name from service catalog"
},
"parameters": {
"type": "object",
"description": "Some services support the provisioning of additional configuration parameters during the instance creation. For the list of supported parameters, check the documentation of the particular service offering."
},
"config": {
"type": "string",
"description": "File name of JSON configuration file in chart folder. Values from the file will be copied and placed in the parameters for the service instance. If both parameters and config are specified, the values in parameters override those read from config."
},
"configMapName": {
"type": "string",
"description": "Required to add additional connection information, compared to what is available from the service binding."
},
"secretName": {
"type": "string",
"description": "Required to add additional connection information."
}
}
},
"event_mesh": {
"$id": "#/properties/event_mesh",
"$ref": "#/definitions/ServiceInstance"
},
"html5_apps_repo_host": {
"$id": "#/properties/html5_apps_repo_host",
"$ref": "#/definitions/ServiceInstance"
},
"xsuaa": {
"$id": "#/properties/xsuaa",
"$ref": "#/definitions/ServiceInstance"
}

@@ -151,0 +114,0 @@ },

const { join } = require('path');
const { exists, rimraf } = require('../../../cds').utils;
const ProjectReader = require('../../util/projectReader');
const { readProject } = require('../../util/projectReader');
const { mergeYAML } = require('../../util/merge')

@@ -9,13 +9,12 @@ const { parseMd5File, writeMd5File, copyAndTrack } = require('../../util/md5Tracking')

module.exports = class HelmTemplate extends require('../templateBase') {
constructor(projectPath, generator) {
super(projectPath, generator, __dirname);
this.projectReader = new ProjectReader(projectPath);
this.oldTrackingData = undefined;
this.newTrackingData = {};
constructor(generator, oldTrackingData = undefined, newTrackingData = {}) {
super(generator)
this.oldTrackingData = oldTrackingData;
this.newTrackingData = newTrackingData;
}
static hasFacet(_, projectPath, options) {
static hasFacet(_, options) {
// REVISIT: this works, but could be semantically nicer with a 'shouldRunDependentMerging' or the like
if (options?.add.has('mta') && !options?.add.has('helm')) return false
return exists(join(projectPath, 'chart', 'values.yaml'))
return exists(join('chart', 'values.yaml'))
}

@@ -31,12 +30,23 @@

async addSubChart(subchart) {
const subchartPath = join(this.projectPath, 'chart', 'charts', subchart);
if(!exists(subchartPath)) {
await copyAndTrack(
join(__dirname, 'subcharts', subchart),
subchartPath,
this
)
return true;
}
return false;
}
async run() {
const projectDescriptor = await this.projectReader.read(this.options)
const projectDescriptor = await readProject(this.options)
this.oldTrackingData = await parseMd5File(join(this.projectPath, 'chart', '.cds-add-helm-files.md5'));
await this.addSubChart('web-application');
await copyAndTrack(
join(__dirname, 'subcharts', 'web-application'),
join(this.projectPath, 'chart', 'charts', 'web-application'),
this
)
await copyAndTrack(
join(__dirname, 'chart'),

@@ -67,6 +77,6 @@ join(this.projectPath, 'chart'),

const Template = require('../'+facet)
const template = new Template(this.projectPath, this.generator)
if (Template.hasFacet(await this.getEnv(), this.projectPath, this.options)) await template.runDependentMerging(this)
const template = new Template(this.generator)
if (Template.hasFacet(await this.getEnv(), this.options)) await template.runDependentMerging(this)
}
}
};

@@ -87,2 +87,8 @@ {

},
"serviceAccountName": {
"$id": "#/properties/serviceAccountName",
"title": "Service Account name",
"description": "Name of the Service Account assigned to pods.",
"type": "string"
},
"image": {

@@ -228,3 +234,3 @@ "$id": "#/properties/image",

"description": "Name of a ConfigMap.",
"$ref": "#/definitions/KubernetesName"
"type": "string"
},

@@ -244,3 +250,3 @@ "key": {

"description": "Name of a Secret.",
"$ref": "#/definitions/KubernetesName"
"type": "string"
},

@@ -260,3 +266,3 @@ "key": {

"description": "Name of a Container.",
"$ref": "#/definitions/KubernetesName"
"type": "string"
},

@@ -313,3 +319,3 @@ "resource": {

"description": "Name of a ConfigMap.",
"$ref": "#/definitions/KubernetesName"
"type": "string"
},

@@ -329,3 +335,3 @@ "key": {

"description": "Name of a Secret.",
"$ref": "#/definitions/KubernetesName"
"type": "string"
},

@@ -345,3 +351,3 @@ "key": {

"description": "Name of a Container.",
"$ref": "#/definitions/KubernetesName"
"type": "string"
},

@@ -398,7 +404,2 @@ "resource": {

"type": "string"
},
"tpl": {
"description": "Flag which tells whether the name is a template string or not.",
"type": "boolean",
"default": false
}

@@ -422,7 +423,2 @@ },

"type": "string"
},
"tpl": {
"description": "Flag which tells whether the name is a template string or not.",
"type": "boolean",
"default": false
}

@@ -466,3 +462,3 @@ },

"description": "Name of a Kubernetes Secret, with the binding content, compliant to the SAP Kubernetes Service Binding spec https://github.tools.sap/Kubernetes-Service-Bindings/doc/",
"$ref": "#/definitions/KubernetesName"
"type": "string"
}

@@ -477,7 +473,7 @@ }

"description": "Name of a BTP Operator Service Instance, created by the `service-instance` Helm chart. Can't be used with the `serviceInstanceFullname` option.",
"$ref": "#/definitions/KubernetesName"
"type": "string"
},
"serviceInstanceFullname": {
"description": "Full name of a BTP Operator Service Instance. Can't be used with the `serviceInstanceName` option.",
"$ref": "#/definitions/KubernetesName"
"type": "string"
},

@@ -490,3 +486,3 @@ "externalName": {

"description": "The name of the secret where the credentials are stored.",
"$ref": "#/definitions/KubernetesName"
"type": "string"
},

@@ -517,3 +513,3 @@ "parameters": {

"description": "Name of a Secret.",
"$ref": "#/definitions/KubernetesName"
"type": "string"
},

@@ -543,3 +539,3 @@ "key": {

"description": "Name of a Config Map",
"$ref": "#/definitions/KubernetesName"
"type": "string"
},

@@ -546,0 +542,0 @@ "key": {

const { join } = require('path')
const { exists } = require('../../../cds').utils
const ProjectReader = require('../../util/projectReader')
const { readProject } = require('../../util/projectReader')
const { mergeYAML } = require('../../util/merge')

@@ -9,7 +9,4 @@ const { copyAndTrack, parseMd5File, writeMd5File } = require('../../util/md5Tracking')

module.exports = class Html5RepoTemplate extends require('../templateBase') {
constructor(projectPath, generator) {
super(projectPath, generator, __dirname)
this.projectReader = new ProjectReader(projectPath)
}
getDependencies() {

@@ -20,3 +17,3 @@ return [OPTION_DESTINATIONS]

async canRun() {
const projectDescriptor = await this.projectReader.read(this.options)
const projectDescriptor = await readProject(this.options)
const { hasMta, hasHelm } = projectDescriptor.cap

@@ -29,5 +26,5 @@ if (!hasHelm && hasMta) {

static hasFacet(_, projectPath) {
static hasFacet() {
// REVISIT: Should be detectable without helm charts
return exists(join(projectPath, 'chart', 'templates', 'html5-apps-repo-host.yaml'))
return exists(join('chart', 'templates', 'html5-apps-deployer-configmap.yaml'))
}

@@ -40,3 +37,3 @@

async runDependentMerging(context = {}) {
const projectDescriptor = await this.projectReader.read(this.options)
const projectDescriptor = await readProject(this.options)
const { appName, hasHelm } = projectDescriptor.cap

@@ -55,3 +52,3 @@

{
html5_apps_repo_host: {
'html5-apps-repo-host': {
serviceOfferingName: 'html5-apps-repo',

@@ -67,4 +64,3 @@ servicePlanName: 'app-host'

configMapRef: {
name: "{{ .Release.Name }}-html5-apps-deployer-configmap",
tpl: true
name: "{{ .Release.Name }}-html5-apps-deployer-configmap"
}

@@ -90,9 +86,7 @@ }

)
// copy content deployment chart if it is not present
const contentDeploymentSubchartPath = join(this.projectPath, 'chart', 'charts', 'content-deployment');
if(!exists(contentDeploymentSubchartPath)) {
const contentDeploymentResourcePath = join(__dirname, '..', 'helm', 'subcharts', 'content-deployment');
await copyAndTrack(contentDeploymentResourcePath, contentDeploymentSubchartPath, context);
}
const HelmTemplate = require(`../helm`);
const helmTemplate = new HelmTemplate(this.generator, context.oldTrackingData, context.newTrackingData);
isIndependentCommand = isIndependentCommand | await helmTemplate.addSubChart('content-deployment');

@@ -102,6 +96,5 @@ // add dependency entry in chart.yaml

join(this.projectPath, 'chart', 'Chart.yaml'),
join(__dirname, 'files', 'chart.yaml'),
join(__dirname, 'files', 'html5-apps-deployer.yaml'),
projectDescriptor,
{ additions: [{
ref: `content-deployment-html5-apps-deployer`,
in: `dependencies`,

@@ -115,4 +108,20 @@ where: [{

// copy service-instance chart
isIndependentCommand = isIndependentCommand | await helmTemplate.addSubChart('service-instance');
// add dependency entry in chart.yaml
await mergeYAML(
join(this.projectPath, 'chart', 'Chart.yaml'),
join(__dirname, 'files', 'html5-apps-repo-host.yaml'),
projectDescriptor,
{ additions: [{
in: `dependencies`,
where: [{
property: 'alias',
isEqualTo: 'html5-apps-repo-host'
}]
}]}
)
await copyAndTrack(join(__dirname, 'files', 'html5-apps-deployer-configmap.yaml'), join(this.projectPath, 'chart', 'templates', 'html5-apps-deployer-configmap.yaml'), context)
await copyAndTrack(join(__dirname, 'files', 'html5-apps-repo-host.yaml'), join(this.projectPath, 'chart', 'templates', 'html5-apps-repo-host.yaml'), context)
if(isIndependentCommand) await writeMd5File(join(this.projectPath, 'chart', '.cds-add-helm-files.md5'), context.newTrackingData)

@@ -119,0 +128,0 @@ }

@@ -15,8 +15,5 @@ const fs = require('fs').promises;

module.exports = class JavaTemplate extends require('../templateBase') {
constructor(projectPath, generator) {
super(projectPath, generator, __dirname);
}
static hasFacet(_, projectPath) {
return exists(join(projectPath, 'pom.xml'))
static hasFacet() {
return exists('pom.xml')
}

@@ -23,0 +20,0 @@

const { join } = require('path')
const cds = require('../../../cds')
const ProjectReader = require('../../util/projectReader')
const { readProject } = require('../../util/projectReader')
const { mergeJSON, mergeYAML } = require('../../util/merge')

@@ -8,10 +8,7 @@ const { PROJECT_TYPE, } = require('../../constants')

module.exports = class KibanaTemplate extends require('../templateBase') {
constructor(projectPath, generator) {
super(projectPath, generator, __dirname)
this.projectReader = new ProjectReader(projectPath)
}
module.exports = class KibanaLoggingTemplate extends require('../templateBase') {
async canRun() {
const projectDescriptor = await this.projectReader.read(this.options)
const projectDescriptor = await readProject(this.options)
const { hasMta, hasHelm } = projectDescriptor.cap

@@ -25,3 +22,3 @@ if (hasHelm && !hasMta) {

async run() {
const projectType = await this.getProjectType()
const projectType = this.getProjectType()

@@ -35,3 +32,3 @@ switch (projectType) {

case PROJECT_TYPE.nodejs: {
const projectDescriptor = await this.projectReader.read(this.options);
const projectDescriptor = await readProject(this.options);
const { for: forProfile } = projectDescriptor.cap

@@ -47,3 +44,3 @@ const cdsTemplateFile = forProfile ? 'cds.profile.package.json.hbs' : 'cds.package.json'

async runDependentMerging() {
const projectDescriptor = await this.projectReader.read(this.options)
const projectDescriptor = await readProject(this.options)
const { hasMta, isNodejs } = projectDescriptor.cap

@@ -50,0 +47,0 @@

@@ -22,5 +22,4 @@ const os = require("os");

module.exports = class LintTemplate extends require('../templateBase') {
constructor(projectPath, generator) {
super(projectPath, generator, __dirname);
this.projectPath = projectPath;
constructor(generator) {
super(generator);
this.customPath = path.join(this.projectPath, ".eslint");

@@ -27,0 +26,0 @@ this.docsPath = path.join(this.customPath, "docs");

const { join } = require('path');
const { exists } = require('../../../cds').utils
const { copyFiles } = require('../../util/templateUtil')
const ProjectReader = require('../../util/projectReader');
const { readProject } = require('../../util/projectReader');
module.exports = class MtaTemplate extends require('../templateBase') {
constructor(projectPath, generator) {
super(projectPath, generator, __dirname);
this.projectReader = new ProjectReader(projectPath);
}
static hasFacet(_, projectPath, options) {
static hasFacet(_, options) {
if (!options?.add.has('mta') && options?.add.has('helm')) return false
return exists(join(projectPath, 'mta.yaml'))
return exists('mta.yaml')
}
async run() {
const projectDescriptor = await this.projectReader.read()
if (!exists(join(this.projectPath, 'mta.yaml'))) {
const projectDescriptor = await readProject()
if (!exists('mta.yaml')) {
await copyFiles(join(__dirname, 'files'), this.projectPath, projectDescriptor);

@@ -27,6 +23,6 @@ }

const Template = require('../'+facet)
const template = new Template(this.projectPath, this.generator)
if (Template.hasFacet(await this.getEnv(), this.projectPath, this.options)) await template.runDependentMerging()
const template = new Template(this.generator)
if (Template.hasFacet(await this.getEnv(), this.options)) await template.runDependentMerging()
}
}
}

@@ -1,9 +0,4 @@

const ProjectReader = require('../../util/projectReader')
const { OPTION_MULTITENANCY, OPTION_TOGGLES, OPTION_EXTENSIBILITY } = require('../../constants')
module.exports = class MtxTemplate extends require('../templateBase') {
constructor(projectPath, generator) {
super(projectPath, generator, __dirname)
this.projectReader = new ProjectReader(projectPath)
}

@@ -10,0 +5,0 @@ getDependencies() {

{
"name": "cap-mtx-sidecar",
"dependencies": {
"@sap/cds": "^6.3.1",
"@sap/cds-mtxs": "^1.3.1",
"@sap/hdi-deploy": "^4.5.1",
"@sap/xssec": "^3.2.14",
"express": "^4.18.2",
"hdb": "^0.19.5",
"@sap/cds": "^6",
"@sap/cds-mtxs": "^1",
"@sap/xssec": "^3",
"express": "^4",
"hdb": "^0",
"passport": "^0.6.0"
},
"devDependencies": {
"sqlite3": "^5"
},
"scripts": {

@@ -18,3 +20,10 @@ "start": "cds run",

"requires": {
"db": "sql-mt",
"[production]": {
"db": "hana-mt",
"auth": "xsuaa"
},
"[development]": {
"db": "sqlite",
"auth": "dummy"
},
"cds.xt.ModelProviderService": "in-sidecar",

@@ -21,0 +30,0 @@ "cds.xt.DeploymentService": true,

const { join } = require('path')
const cds = require('../../../cds')
const { copy, exists } = cds.utils
const ProjectReader = require('../../util/projectReader')
const { readProject } = require('../../util/projectReader')
const { mergeJSON, mergeYAML, sortDependencies } = require('../../util/merge')

@@ -11,20 +11,16 @@ const { copyAndTrack, parseMd5File, writeMd5File } = require('../../util/md5Tracking')

saasRegistry, serviceManager, xsuaa, // BTP Services
providedMtxAPI, srvAPI, providedMtxSidecarAPI, requiredMtxSidecarAPI // APIs
providedMtxAPISrv, providedMtxAPISidecar, srvAPI, providedMtxSidecarAPI, requiredMtxSidecarAPI // APIs
} = require('../_merging/registry-mta');
module.exports = class MultitenancyTemplate extends require('../templateBase') {
constructor(projectPath, generator) {
super(projectPath, generator, __dirname)
this.projectReader = new ProjectReader(projectPath)
}
static hasFacet(env, projectPath, options) {
static hasFacet(env, options) {
// REVISIT: Check if this can be simplified
return options?.add?.has('mtx') || options?.add?.has('multitenancy') ||
!!env.requires?.multitenancy || !!env.requires?.db?.multiTenant ||
this.isJava && exists(join(projectPath, 'mtx', 'sidecar')) || false
this.isJava && exists(join('mtx', 'sidecar')) || false
}
async run() {
const projectDescriptor = await this.projectReader.read(this.options)
const projectDescriptor = await readProject(this.options)
const { for: forProfile, isNodejs, isJava } = projectDescriptor.cap

@@ -46,3 +42,3 @@ if (isNodejs) {

async runDependentMerging(context = {}) {
const projectDescriptor = await this.projectReader.read()
const projectDescriptor = await readProject()
const { isNodejs, isJava, hasMta, hasHelm, hasApprouter, hasXsuaa, hasHana } = projectDescriptor.cap

@@ -55,3 +51,3 @@

if (hasXsuaa) services.push(xsuaa)
const apis = isNodejs ? [providedMtxAPI] : [srvAPI, providedMtxAPI, providedMtxSidecarAPI, requiredMtxSidecarAPI]
const apis = isNodejs ? [providedMtxAPISrv] : [srvAPI, providedMtxAPISidecar, providedMtxSidecarAPI, requiredMtxSidecarAPI]
const additions = [...modules, ...services, ...apis]

@@ -112,2 +108,12 @@

]
},
"saas-registry": {
parametersFrom: [
{
secretKeyRef: {
name: "{{ .Release.Name }}-saas-registry-secret",
key: "parameters"
}
}
]
}

@@ -121,7 +127,6 @@ },

join(this.projectPath, 'chart', 'Chart.yaml'),
join(__dirname, 'files', 'chart.yaml'),
join(__dirname, 'files', 'sidecar.yaml'),
projectDescriptor,
{
additions: [{
ref: `web-application-sidecar`,
in: `dependencies`,

@@ -137,2 +142,34 @@ where: [{

// copy service-instance chart
const HelmTemplate = require(`../helm`);
const helmTemplate = new HelmTemplate(this.generator, context.oldTrackingData, context.newTrackingData);
isIndependentCommand = isIndependentCommand | await helmTemplate.addSubChart('service-instance');
// add dependency entry in chart.yaml
await mergeYAML(
join(this.projectPath, 'chart', 'Chart.yaml'),
join(__dirname, 'files', 'saas-registry.yaml'),
projectDescriptor,
{ additions: [{
in: `dependencies`,
where: [{
property: 'alias',
isEqualTo: 'saas-registry'
}]
}]}
);
await mergeYAML(
join(this.projectPath, 'chart', 'Chart.yaml'),
join(__dirname, 'files', 'service-manager.yaml'),
projectDescriptor,
{ additions: [{
in: `dependencies`,
where: [{
property: 'alias',
isEqualTo: 'service-manager'
}]
}]}
);
const mtxsConfigmapPath = isNodejs

@@ -149,13 +186,9 @@ ? join(__dirname, 'files', 'mtxs-configmap.yaml')

// add saas registry secret
await copyAndTrack(
join(__dirname, 'files', 'service-manager.yaml'),
join(this.projectPath, 'chart', 'templates', 'service-manager.yaml'),
join(__dirname, 'files', 'saas-registry-secret.yaml'),
join(this.projectPath, 'chart', 'templates', 'saas-registry-secret.yaml'),
context
)
);
await copyAndTrack(
join(__dirname, 'files', 'saas-registry.yaml'),
join(this.projectPath, 'chart', 'templates', 'saas-registry.yaml'),
context
)
if (isIndependentCommand) await writeMd5File(join(this.projectPath, 'chart', '.cds-add-helm-files.md5'), context.newTrackingData)

@@ -166,3 +199,3 @@ }

const ApprouterTemplate = require(`../approuter`)
const template = new ApprouterTemplate(this.projectPath, this.generator)
const template = new ApprouterTemplate(this.generator)
await template.runDependentMerging(context)

@@ -173,3 +206,3 @@ }

const HanaTemplate = require(`../hana`)
const template = new HanaTemplate(this.projectPath, this.generator)
const template = new HanaTemplate(this.generator)
await template.runDependentMerging(context)

@@ -185,3 +218,2 @@ }

additions: [{
ref: 'scope-mtcallback',
in: 'scopes',

@@ -195,9 +227,2 @@ where: [{ property: 'name', isEqualTo: '$XSAPPNAME.mtcallback' }],

if (hasXsuaa && hasHelm) {
// REVISIT: Shared xs-security.json location for Helm and MTA?
await copyAndTrack(
join(this.projectPath, 'xs-security.json'),
join(this.projectPath, 'chart', 'xs-security.json'),
context
)
await mergeYAML(

@@ -204,0 +229,0 @@ join(this.projectPath, 'chart', 'values.yaml'),

@@ -7,6 +7,3 @@ const { join, relative } = require('path');

module.exports = class NodeJSTemplate extends require('../templateBase') {
constructor(projectPath, generator) {
super(projectPath, generator, __dirname);
}
module.exports = class NodejsTemplate extends require('../templateBase') {

@@ -35,5 +32,5 @@ static hasFacet() {

await mkdirp(projectPath, env.folders.db);
await mkdirp(projectPath, env.folders.srv);
await mkdirp(projectPath, env.folders.app);
await mkdirp(env.folders.db);
await mkdirp(env.folders.srv);
await mkdirp(env.folders.app);
}

@@ -40,0 +37,0 @@

const term = require('../../../util/term');
module.exports = class NotebookTemplate extends require('../templateBase') {
constructor(projectPath, generator) {
super(projectPath, generator, __dirname);
}
async canRun() {

@@ -9,0 +7,0 @@ console.log(term.warn('CAP Jupyter Notebooks have been replaced by Custom CAP Notebooks for VS Code!'));

@@ -8,6 +8,4 @@ const { join } = require('path');

module.exports = class PipelineTemplate extends require('../templateBase') {
constructor(projectPath, generator) {
super(projectPath, generator, __dirname);
}
async canRun() {

@@ -18,4 +16,3 @@ if (this.options.force) {

if (exists(join(this.projectPath, 'Jenkinsfile')) ||
exists(join(this.projectPath, '.pipeline/config.yml'))) {
if (exists('Jenkinsfile') || exists(join('.pipeline', 'config.yml'))) {
throw new Error(`Pipeline support file exists. Use --force to overwrite.`);

@@ -22,0 +19,0 @@ }

@@ -9,8 +9,6 @@ const { join } = require('path');

module.exports = class SamplesTemplate extends require('../templateBase') {
constructor(projectPath, generator) {
super(projectPath, generator, __dirname);
}
async run() {
switch (await this.getProjectType()) {
switch (this.getProjectType()) {
case PROJECT_TYPE.java:

@@ -53,14 +51,11 @@ return await this._addJavaSamples();

async _addNodejsSamples() {
const folders = (await this.getEnv()).folders;
const dbFolder = join(this.projectPath, folders.db);
const { db, srv } = (await this.getEnv()).folders;
const dbFolder = join(this.projectPath, db);
await copyFiles(join(__dirname, 'files', 'nodejs', 'db'), dbFolder, {}, this.options.force);
const srvFolder = join(this.projectPath, folders.srv);
const srvFolder = join(this.projectPath, srv);
await copyFiles(join(__dirname, 'files', 'nodejs', 'srv'), srvFolder, {
dbFolder: folders.db.replace(/[\\/]+$/, '')
dbFolder: db.replace(/[\\/]+$/, '')
}, this.options.force);
}
async finalize() {
}
}

@@ -11,15 +11,14 @@ const cds = require('../../cds');

*/
constructor(projectPath, generator, dirName) {
if (generator.options && generator.options.for && typeof generator.options.for !== 'string') {
constructor(generator) {
if (generator.options?.for && typeof generator.options?.for !== 'string') {
throw new Error('The --for argument must not be empty.')
}
this.projectPath = path.resolve(projectPath);
this.projectPath = cds.root;
this.options = generator.options || {};
this.projectPath = path.resolve(projectPath);
this.generator = generator;
this.name = this.constructor.name.replace(/template/i, '').replace(/([a-z0-9])([A-Z])/g, '$1-$2').toLowerCase()
this.cwd = this.options.cwd || process.cwd();
this.name = (dirName ? path.basename(dirName) : this.constructor.name.replace(/template/i, '').toLowerCase());
this.projectName = path.basename(this.projectPath);
this.projectName = path.basename(cds.root);
}

@@ -44,3 +43,3 @@

// eslint-disable-next-line no-unused-vars
static hasFacet(env, projectPath, options) {
static hasFacet(env, options) {
return true;

@@ -57,3 +56,3 @@ }

try {
return cds.env.for('cds', exists(this.projectPath) ? this.projectPath : this.cwd);
return cds.env.for('cds');
} finally {

@@ -99,13 +98,7 @@ cdsEnvVar ? process.env.CDS_ENV = cdsEnvVar : delete process.env.CDS_ENV

*/
async getProjectType() {
if (exists(path.join(this.projectPath, 'pom.xml'))) {
return PROJECT_TYPE.java;
}
if (exists(path.join(this.projectPath, 'package.json'))) {
return PROJECT_TYPE.nodejs;
}
getProjectType() {
if (exists('pom.xml')) return PROJECT_TYPE.java;
if (exists('package.json')) return PROJECT_TYPE.nodejs;
return PROJECT_TYPE.unknown;
}
}
const { join } = require('path')
const ProjectReader = require('../../util/projectReader')
const { readProject } = require('../../util/projectReader')
const { mergeJSON, sortDependencies } = require('../../util/merge')
module.exports = class TogglesTemplate extends require('../templateBase') {
constructor(projectPath, generator) {
super(projectPath, generator, __dirname)
this.projectReader = new ProjectReader(projectPath)
}

@@ -16,3 +12,3 @@ static hasFacet(env) {

async run() {
const projectDescriptor = await this.projectReader.read(this.options)
const projectDescriptor = await readProject(this.options)
const { for: forProfile, isNodejs, isJava } = projectDescriptor.cap

@@ -19,0 +15,0 @@ if (isNodejs) {

const { join } = require('path')
const ProjectReader = require('../../util/projectReader')
const { readProject } = require('../../util/projectReader')
const { mergeJSON, mergeYAML } = require('../../util/merge')
const { copyAndTrack, parseMd5File, writeMd5File } = require('../../util/md5Tracking')
const { parseMd5File, writeMd5File } = require('../../util/md5Tracking')
const { srvNode, srvJava, xsuaa, mtxSidecar } = require('../_merging/registry-mta')

@@ -9,6 +9,2 @@ const { PROJECT_TYPE } = require('../../constants')

module.exports = class XsuaaTemplate extends require('../templateBase') {
constructor(projectPath, generator) {
super(projectPath, generator, __dirname)
this.projectReader = new ProjectReader(projectPath)
}

@@ -22,4 +18,4 @@ static hasFacet(env) {

const templatePath = join(__dirname, 'files')
const projectDescriptor = await this.projectReader.read(this.options)
const projectType = await this.getProjectType();
const projectDescriptor = await readProject(this.options)
const projectType = this.getProjectType();
const { for: forProfile } = projectDescriptor.cap

@@ -45,3 +41,3 @@

async runDependentMerging( context = {}) {
const projectDescriptor = await this.projectReader.read(this.options)
const projectDescriptor = await readProject(this.options)
const { isMultitenant, isExtensible, hasMta, hasHelm, isNodejs, isJava } = projectDescriptor.cap

@@ -56,3 +52,2 @@

const mergingSemantics = { additions: xsSecurity.scopes.map(scope => ({
ref: scope.name,
in: 'scopes',

@@ -84,3 +79,3 @@ where: [{

mtaYAMLPath,
`${__dirname}/files/mta.yaml.hbs`,
join(__dirname, 'files', 'mta.yaml.hbs'),
projectDescriptor,

@@ -102,7 +97,22 @@ {

}
await copyAndTrack(
// copy service-instance chart
const HelmTemplate = require(`../helm`);
const helmTemplate = new HelmTemplate(this.generator, context.oldTrackingData, context.newTrackingData);
isIndependentCommand = isIndependentCommand | await helmTemplate.addSubChart('service-instance');
// add dependency entry in chart.yaml
await mergeYAML(
join(this.projectPath, 'chart', 'Chart.yaml'),
join(__dirname, 'files', 'xsuaa.yaml'),
join(this.projectPath, 'chart', 'templates', 'xsuaa.yaml'),
context
)
projectDescriptor,
{ additions: [{
in: `dependencies`,
where: [{
property: 'alias',
isEqualTo: 'xsuaa'
}]
}]}
);
await mergeYAML(

@@ -113,8 +123,2 @@ join(this.projectPath, 'chart', 'values.yaml'),

)
// REVISIT: Shared xs-security.json location for Helm and MTA?
await copyAndTrack(
join(this.projectPath, 'xs-security.json'),
join(this.projectPath, 'chart', 'xs-security.json'),
context
)
if(isIndependentCommand) await writeMd5File(join(this.projectPath, 'chart', '.cds-add-helm-files.md5'), context.newTrackingData);

@@ -128,3 +132,3 @@ }

const MultitenancyTemplate = require(`../multitenancy`)
const multitenancyTemplate = new MultitenancyTemplate(this.projectPath, this.generator)
const multitenancyTemplate = new MultitenancyTemplate(this.generator)
await multitenancyTemplate.runDependentMerging(context)

@@ -135,3 +139,3 @@ }

const ExtensibilityTemplate = require(`../extensibility`)
const extensibilityTemplate = new ExtensibilityTemplate(this.projectPath, this.generator)
const extensibilityTemplate = new ExtensibilityTemplate(this.generator)
await extensibilityTemplate.runDependentMerging(context)

@@ -138,0 +142,0 @@ }

@@ -30,82 +30,1 @@

}
/**
* This is a helper interface to provide support for different file system implementations.
* Depending on the scenario different file systems will be used, e.g. Yeoman vs. native FS.
*/
export interface FsUtil {
/**
* Writes the given string into a file.
* @param filepath the absolute file path
* @param content the string to serialize to JSON
*/
writeFile(filePath: string, content: string, skipLogFile?: boolean): Promise<void>;
/**
* Writes the given object as JSON into a file.
* @param filepath the absolute file path
* @param object the object to serialize to JSON
*/
writeJSON(filepath: string, object: any, options?: any): Promise<void>;
/**
* Writes YAML object
* @param filepath the absolute file path
* @param yaml the yaml object
* @param skipFileLog do not log files if true
*/
writeYAML(filepath: string, yaml: any, skipFileLog?: boolean): Promise<void>;
/**
* Reads a file from given path.
* @param filePath the absolute file path
* @param options optional read options
* @returns the file content as string
*/
readFile(filePath: string, options?: any): Promise<string>;
/**
* Reads a JSON object from a file.
* @param filePath the absolute file path
* @param projectDescriptor a project descriptor
* @returns the file content as object
*/
readJSON(filePath: string, projectDescriptor: any): Promise<any>;
/**
* Reads a JSON object from a file.
* @param filePath the absolute file path
* @returns the file content as object
*/
readJSONC(filePath: string): Promise<any>;
/**
* Reads a JSON object from a file.
* @param filePath the absolute file path
* @param projectDescriptor a project descriptor
* @returns the file content as yaml object
*/
readYAML(filePath: string, projectDescriptor: any): Promise<any>;
/**
* Copies a file or folder from given source to destination.
* @param source the source path
* @param destination the destination path
* @param options optional copy options
* @param skipFileLog do not log files if true
*/
copy(source: string, destination: string, options?: any, skipFileLog?: boolean): Promise<void>;
}
/**
* Helper for managing npm related tasks
* @deprecated
*/
export interface NpmUtil {
/**
* Installs npm packages
* @param cwd working folder
* @param options npm install options, e.g. force
*/
install(cwd?: string, options?: any): Promise<void>
}

@@ -79,7 +79,7 @@ const YAML = require('@sap/cds-foss').yaml

additions?.forEach(existence => {
existenceMap.set(existence.ref, undefined)
templateExistenceMap.set(existence.ref, undefined)
existenceMap.set(existence, undefined)
templateExistenceMap.set(existence, undefined)
})
deletions?.forEach(deletion => {
existenceMap.set(deletion.item.ref, undefined)
existenceMap.set(deletion.item, undefined)
})

@@ -89,4 +89,4 @@ relationships?.forEach(relationship => {

const [insertExistence] = relationship.insert
const ref = insertExistence.ref + ' -> ' + existence.ref
existenceMap.set(ref, undefined)
const hash = insertExistence + ' -> ' + existence
existenceMap.set(hash, undefined)
})

@@ -104,8 +104,8 @@

else where = where[0] // a more complex list of constraints, a sequence/array is involved
const neededParent = dict.get(where.ref)?.node
const neededParent = dict.get(where)?.node
return collectionStack.includes(neededParent) // lookbehind in parent collection stack
// REVISIT: Only look behind until sequence is reached?
}).forEach(({ where, ref }) => {
}).forEach(item => {
const json = JSON.parse(String(node))
const whereFulfilled = where.every(constraint =>
const whereFulfilled = item.where.every(constraint =>
constraint.isEqualTo === _getProperty(json, constraint.property)

@@ -115,3 +115,3 @@ )

const [collection] = collectionStack
dict.set(ref, { json, node, index, collection })
dict.set(item, { json, node, index, collection })
}

@@ -219,3 +219,3 @@ })

const inExistence = Array.isArray(item) && typeof item[0] === 'object' ? item[0] : typeof item === 'object' ? item : item[1]
const node = inExistence && existenceMap.get(inExistence.ref) ? existenceMap.get(inExistence.ref).node : collectionStack[collectionStack.length - 1]
const node = inExistence && existenceMap.get(inExistence) ? existenceMap.get(inExistence).node : collectionStack[collectionStack.length - 1]
const keys = keyPath.split('.')

@@ -228,4 +228,4 @@ if (!node.getIn(keys)) return

// 4. Delete existences from the project (e.g. separate deployer module when adding mtx)
deletions?.forEach(({ item: { ref }, relationships }) => {
const existence = existenceMap.get(ref)
deletions?.forEach(({ item, relationships }) => {
const existence = existenceMap.get(item)
if (!existence) return

@@ -259,3 +259,3 @@ existence.collection.delete(existence.index)

if (typeof addition.in === 'string' || Array.isArray(addition.in) && typeof addition.in[0] === 'string') return true
const inExistence = Array.isArray(addition.in) ? existenceMap.get(addition.in[0].ref) : existenceMap.get(addition.in.ref)
const inExistence = Array.isArray(addition.in) ? existenceMap.get(addition.in[0]) : existenceMap.get(addition.in)
return inExistence?.node === parent

@@ -270,9 +270,7 @@ })

})
.filter(({ ref }) => {
return !existenceMap.get(ref)
.filter(item => !existenceMap.get(item))
.forEach(item => {
const templateNode = templateExistenceMap.get(item).node
item.at !== undefined ? targetNode.items.splice(item.at, 0, templateNode) : targetNode.add(templateNode)
})
.forEach(({ ref, at }) => {
const templateNode = templateExistenceMap.get(ref).node
at !== undefined ? targetNode.items.splice(at, 0, templateNode) : targetNode.add(templateNode)
})
}

@@ -297,4 +295,4 @@ },

const [existence, keyPath] = relationship.into
if (!existenceMap.get(existence.ref)) return false
const existingNode = _getYAMLProperty(existenceMap.get(existence.ref).node, keyPath)
if (!existenceMap.get(existence)) return false
const existingNode = _getYAMLProperty(existenceMap.get(existence).node, keyPath)
return targetNode === existingNode

@@ -307,8 +305,8 @@ })

const missingPairs = [existence]
.filter(({ref}) =>
!targetJSON.some(item =>
_getProperty(existenceMap.get(ref).json, existenceKeyPath) === item[intoKey]
.filter(item =>
!targetJSON.some(targetItem =>
_getProperty(existenceMap.get(item).json, existenceKeyPath) === targetItem[intoKey]
)
)
.map(({ref}) => existenceMap.get(ref).node.get(intoKey))
.map(item => existenceMap.get(item).node.get(intoKey))
missingPairs.forEach(pair => {

@@ -315,0 +313,0 @@ targetNode.add({ [intoKey]: pair })

@@ -1,5 +0,4 @@

const path = require('path')
const cds = require('../../cds')
const { path, read, exists } = cds.utils
const { BuildTaskFactory } = cds.build
const { read, exists } = cds.utils
const P_LANGUAGE_JAVA = 'java'

@@ -19,16 +18,17 @@ const P_LANGUAGE_NODEJS = 'nodejs'

module.exports = class ProjectReader {
module.exports = new class ProjectReader {
constructor(projectPath) {
this.projectPath = projectPath
constructor() {
this.readProject = this.readProject.bind(this)
}
// REVISIT: There should be a better (and parallelizable) API provided by cds.env
/**
* Returns cds.env using 'production' profile by default as mta deployment is executed with having production profile set.
*/
async getEnv(profile = 'production') {
getEnv(profile = 'production') {
const cdsEnvVar = process.env.CDS_ENV
process.env.CDS_ENV = profile
try {
return cds.env.for('cds', exists(this.projectPath) ? this.projectPath : this.cwd);
return cds.env.for('cds')
} finally {

@@ -39,8 +39,10 @@ cdsEnvVar ? process.env.CDS_ENV = cdsEnvVar : delete process.env.CDS_ENV

async read(options) {
const env = await this.getEnv();
async readProject(options) {
if (options?.cwd) cds.root = options.cwd
const env = this.getEnv()
DEBUG && DEBUG({ env })
const cap = await this._getCapDescriptor(env, options)
const ui = await this._getUiDescriptor(env)
const ui = this._getUiDescriptor(env)
const projectDescriptor = { cap, ui }

@@ -54,5 +56,5 @@ this._validateDbFolders(cap)

_validateDbFolders(cap) {
const { root, db: dbs, isJava } = cap
const { db: dbs, isJava } = cap
if (isJava) {
for (const db of dbs) if (!exists(path.join(root, db.path, 'package.json'))) {
for (const db of dbs) if (!exists(path.join(db.path, 'package.json'))) {
LOG.error(`Missing 'package.json' in folder ${db.path} - 'cds add hana' adds configuration for SAP HANA`)

@@ -64,4 +66,3 @@ }

async _getCapDescriptor(env, options) {
const projectPath = this.projectPath
const _hasFacet = template => require(`../template/${template}`).hasFacet(env, projectPath, options)
const _hasFacet = template => require(`../template/${template}`).hasFacet(env, options)
const cap = {

@@ -73,3 +74,2 @@ for: options?.for,

requires: [],
get root() { return projectPath },
get hasRequires() { return this.requires.length > 0 },

@@ -79,2 +79,3 @@ get needsSidecar() { return (options?.add.has('hana') || this.db.length > 0) && (this.isJava || !this.isMultitenant) },

get isJava() { return this.pLanguage === P_LANGUAGE_JAVA },
get hasSidecar() { return exists('mtx', 'sidecar') },
get isMultitenant() { return _hasFacet('multitenancy') },

@@ -101,9 +102,11 @@ get isExtensible() { return _hasFacet('extensibility') },

// no file path resolving as the directories might not exist - e.g. cds add will create the mtx/sidecar folder
const buildTasks = await new BuildTaskFactory(nullLogger, cds).getTasks({ root: this.projectPath, resolve: false, mta: false })
// REVISIT: Leaner API from cds.build
const buildTasks = await new BuildTaskFactory(nullLogger, cds).getTasks({ root: cds.root, resolve: false, mta: false })
await Promise.all(buildTasks.map(async (task) => {
const srcPath = path.resolve(this.projectPath, task.src)
const srcPath = path.resolve(cds.root, task.src)
const srcFolder = path.basename(srcPath)
const relDestPath = path.join(env?.build?.target ?? "gen", task.dest || task.src)?.replace(/\\/g, '/')
switch (task.for) {

@@ -168,3 +171,3 @@ case BUILD_TASK_HANA:

const descriptor = { archiveName: path.basename(this.projectPath) + "-exec.jar" }
const descriptor = { archiveName: path.basename(cds.root) + "-exec.jar" }
const archiveName = pomJson?.project?.artifactId?.[0]

@@ -177,4 +180,4 @@ const suffix = pomJson?.project?.packaging?.[0]

async _getUiDescriptor(env) {
const absoluteAppPath = path.join(this.projectPath, env.folders.app)
_getUiDescriptor(env) {
const absoluteAppPath = path.join(cds.root, env.folders.app)
return {

@@ -192,3 +195,3 @@ appPath: exists(absoluteAppPath) ? env.folders.app : null

if (cap.pLanguage === P_LANGUAGE_JAVA) {
const pomXmlPath = path.join(this.projectPath, 'pom.xml')
const pomXmlPath = path.join(cds.root, 'pom.xml')
try {

@@ -226,3 +229,3 @@ if (exists(pomXmlPath)) {

if (!cap.appName) {
const packageJsonPath = path.join(this.projectPath, 'package.json')
const packageJsonPath = path.join(cds.root, 'package.json')
if (exists(packageJsonPath)) {

@@ -247,3 +250,3 @@ try {

// 2. use project name and static default values
cap.appName = cap.appName ?? path.basename(this.projectPath)
cap.appName = cap.appName ?? path.basename(cds.root)
cap.appDescription = cap.appDescription ?? cap.appName + " application"

@@ -319,3 +322,3 @@ cap.appId = cap.appId ?? cap.appName

const xmljs = require('xml-js');
let xmljsParse = {};
const xmljsParse = {};
try {

@@ -322,0 +325,0 @@ const xml = await read(pomXmlPath, 'utf-8')

@@ -73,10 +73,13 @@ const term = require("../util/term");

const overwriteRules = checks.hasEslintConfigContent(this.configContents, "rules");
if (overwriteRules) {
await this._overwriteRuleSeverities();
if (this.pluginPath) {
const overwriteRules = checks.hasEslintConfigContent(this.configContents, "rules");
// Overwrite rules severities
if (overwriteRules) {
await this._overwriteRuleSeverities();
}
// Limit to CDS file extensions
this._addExtensions();
}
// Limit to CDS file extensions
this._addExtensions();
// Run ESLint with collected options

@@ -217,3 +220,14 @@ try {

} catch (err) {
console.log(err);
// Report identically to ESLint CLI
if (typeof err.messageTemplate === "string") {
try {
const eslintBase = `${require.resolve('eslint').split('eslint')[0]}/eslint`;
const template = require(path.join(eslintBase, `messages/${err.messageTemplate}.js`));
console.log(template(err.messageData || {}));
exit(1)
} catch {
// Ignore template error then fallback to use `error.stack`.
}
console.log(err.stack);
}
exit(1)

@@ -255,6 +269,6 @@ }

});
this.pluginApi = require(this.pluginPath.replace("index.js", "api"));
} catch (err) {
// Do nothing
// CLI will report (no locally installed plugin)
}
this.pluginApi = require(this.pluginPath.replace("index.js", "api"));
}

@@ -265,6 +279,10 @@ }

if (!this.pluginPath) {
this.pluginPath = require.resolve("@sap/eslint-plugin-cds", {
paths: [this.cdsdkPath],
});
this.pluginApi = require(this.pluginPath.replace("index.js", "api"));
try {
this.pluginPath = require.resolve("@sap/eslint-plugin-cds", {
paths: [this.cdsdkPath],
});
this.pluginApi = require(this.pluginPath.replace("index.js", "api"));
} catch (err) {
// CLI will report (no globally installed plugin)
}
}

@@ -271,0 +289,0 @@ // Project path is directory of ESLint config file

@@ -1,2 +0,4 @@

const DEBUG = /\b(y|all|cli|livereload)\b/.test(process.env.DEBUG) && console.debug
const cds = require('../cds')
const DEBUG = cds.debug('cli|livereload')
const axios = require('axios')

@@ -22,3 +24,3 @@ const ws = require('ws')

this.url = await livereloadURL(address, port)
DEBUG && DEBUG(`[cds] - live reload available at ${this.url}`)
DEBUG?.(`live reload available at ${this.url}`)
resolve(this.url)

@@ -43,9 +45,9 @@ }

this.wsServer.on('connection', (socket) => {
DEBUG && DEBUG('Client connected')
DEBUG?.('Client connected')
socket.on('message', (msg) => {
DEBUG && DEBUG('Client message', msg)
DEBUG?.('Client message', msg)
const request = JSON.parse(msg)
if (request.command === 'hello') {
DEBUG && DEBUG('Client handshake')
DEBUG?.('Client handshake')
const data = JSON.stringify({

@@ -64,12 +66,17 @@ command: 'hello',

} else if (request.command === 'info') {
DEBUG && DEBUG('Server received client data. Not sending response.')
DEBUG?.('Server received client data. Not sending response.')
}
})
})
this.wsServer.on('close', () => { DEBUG && DEBUG('Socket closed') })
this.wsServer.on('error', (e) => DEBUG && DEBUG(e))
if (DEBUG) {
server.on('close', () => DEBUG('⚡️','cds watch - livereload closed'))
this.wsServer.on('close', () => DEBUG('⚡️','cds watch - web socket closed'))
this.wsServer.on('error', (e) => DEBUG(e))
}
}
const close = () => { this.wsServer.close(); server.close(); DEBUG && DEBUG('Reload server closed') }
process.once('SIGTERM', close)
process.once('SIGINT', close)
process.once('shutdown', ()=>{
server.close()
this.wsServer.close()
})
})

@@ -87,3 +94,3 @@ }

})
DEBUG && DEBUG(`[cds] - live reload for ${this.files}. ${this.wsServer.clients.size} ws clients`)
DEBUG?.(`live reload for ${this.files}. ${this.wsServer.clients.size} ws clients`)
broadcast(data, this.wsServer.clients)

@@ -100,4 +107,4 @@ this.files = []

sockets.forEach(socket => {
DEBUG && DEBUG(`Sending ${data}`)
socket.send(data, err=> DEBUG && DEBUG(err))
DEBUG?.(`Sending ${data}`)
socket.send(data, err=> DEBUG?.(err))
})

@@ -104,0 +111,0 @@ }

@@ -1,2 +0,3 @@

const DEBUG = /\b(y|all|cli|watch)\b/.test(process.env.DEBUG) && console.debug
const cds = require ('../cds')
const DEBUG = cds.debug('cli|watch')

@@ -11,8 +12,9 @@ module.exports = ({ cwd, script, ext, includes=RegExp(), ignore=RegExp(), env={}, delay=200, options={} })=>{

const killAndExit = (sig) => {
DEBUG && DEBUG('\nreceived signal:', sig)
_kill (child, process.exit)
process.on('SIGINT', ()=>console.log()) //> newline after ^C
process.on('SIGINT', _shutdown)
process.on('SIGTERM', _shutdown)
function _shutdown (signal,n) {
DEBUG?.('⚡️', signal, n, 'received by cds watch')
process.emit('shutdown')
}
process.on('SIGTERM', killAndExit)
process.on('SIGINT', killAndExit)

@@ -22,5 +24,4 @@ // Re-starting child process...

const restart = _coalesceEvents(delay, (events) => {
DEBUG && DEBUG('>> Events:', events)
DEBUG?.('Restart', {events})
_kill (child, () => {
DEBUG && console.trace('Restart')
const execArgv = [...process.execArgv]

@@ -43,2 +44,5 @@ events.forEach(evt => {

})
// child.channel.unref()
// child.unref()
// child.on('exit', process.exit)
})

@@ -49,2 +53,3 @@

restart()
process.on('shutdown', ()=> _kill(child))

@@ -55,7 +60,9 @@ // Watching for touched files...

const filter = f => !ignore.test(f) && include.test(f)
watch (cwd||process.cwd(),{ recursive:true, filter, delay:0 }, restart)
const watcher = watch (cwd||process.cwd(),{ recursive:true, filter, delay:0 })
if (DEBUG) watcher.on('close', ()=> DEBUG('⚡️', 'cds watch - file watcher closed'))
watcher.on('change', restart)
process.on('shutdown', ()=> watcher.close())
// Live commands...
const readline = require('readline')
readline.createInterface(process.stdin).on('line', (input) => {
const readline = require('readline').createInterface(process.stdin).on('line', (input) => {
if (input === '') restart()

@@ -67,8 +74,10 @@ else if (input === 'restart' || input === 'rs' || input.match(/^y$/i)) restart()

else if (input === 'ps') ps(child,env)
else if (input === 'bye' || input.match(/^n$/i)) { _kill (child, () => process.exit()) }
else if (input === 'bye' || input.match(/^n$/i)) { _kill (child) }
else console.log ('?\n')
})
if (DEBUG) readline.on('close', ()=> DEBUG('⚡️', 'cds watch - readline closed'))
process.on('shutdown', ()=> readline.close())
emitter.restart = restart
emitter.stop = () => _kill(child, () => false)
emitter.stop = () => _kill(child)
emitter.setEnv = (name, value) => {

@@ -78,3 +87,3 @@ if (typeof value === "undefined") {

} else {
env[name]
env[name] // REVISIT: no value assigned ???
}

@@ -95,12 +104,9 @@ }

let timer, cache = []
function handle() {
fn(cache)
timer = null
cache = []
}
return function(type, name) { // node-watch callback signature
cache.push({type, name})
if (!timer) {
timer = setTimeout(handle, delay);
}
if (type) cache.push({type, name})
if (!timer) timer = setTimeout(() => {
fn(cache)
timer = null
cache = []
}, delay).unref();
}

@@ -112,40 +118,36 @@ }

*/
function _kill (proc, cb=()=>{}) {
if (!proc) return cb()
if (proc[_pending]) return // safeguard against repeated calls for the same process
proc[_pending] = true
const fn = () => { delete proc[_pending]; return cb(); }
function _kill (child, cb=()=>{}) {
if (!child) return cb()
if (child[_pending]) return // safeguard against repeated calls for the same process
child[_pending] = true
const _done = () => { delete child[_pending]; return cb(); }
const waitTime = parseInt(process.env.CDS_WATCH_KILL_DELAY) || 500
const waitTime = parseInt(process.env.CDS_WATCH_KILL_DELAY) || 2222
// On Windows, child_process.kill() abruptly kills the process (https://nodejs.org/api/child_process.html#subprocesskillsignal)
// Server has cleanup code to run, so send a custom message and give it a bit time.
if (process.platform === 'win32') {
proc.send({ close: true }, () => {}) // see @sap/cds/bin/serve.js
child.send({ close: true }, () => {}) // see @sap/cds/bin/serve.js
setTimeout(()=> {
proc.kill()
DEBUG && DEBUG('Killed process', proc.pid)
fn()
}, waitTime)
child.kill()
DEBUG?.('Killed process', child.pid)
_done()
}, waitTime).unref()
return
}
// fallback for misbehaving processes: SIGKILL it after some time
const forced = setTimeout(()=> {
DEBUG?.('⚡️', 'cds watch - killing child forcefully!')
child.kill('SIGKILL')
}, waitTime).unref()
// first kill normally (using SIGTERM)
let killed = false
proc.on('exit',() => {
if (!killed) { killed = true
DEBUG && DEBUG('Killed process', proc.pid)
fn()
}
}).kill()
child.on('exit',() => { clearTimeout(forced)
DEBUG?.('⚡️', 'cds watch - child exited', child.pid)
_done()
})
// fallback for misbehaving processes: SIGKILL it after some time
setTimeout(()=> {
if (!killed) { killed = true
proc.kill('SIGKILL')
DEBUG && DEBUG('Killed process (forcefully)', proc.pid)
fn()
}
}, waitTime)
child.kill()
}
const _pending = Symbol.for('sap.cds.watch.pendingKill')
{
"name": "@sap/cds-dk",
"version": "6.5.2",
"version": "6.6.0",
"description": "Command line client and development toolkit for the SAP Cloud Application Programming Model",

@@ -26,3 +26,3 @@ "homepage": "https://cap.cloud.sap/",

"express": "^4.17.1",
"livereload-js": "^3.3.1",
"livereload-js": "^4.0.0",
"md5": "^2.3.0",

@@ -29,0 +29,0 @@ "mustache": "^4.0.1",

@@ -7,3 +7,8 @@ # @sap/cds-dk

## How to Obtain Support**
In case you find a bug, please report an [incident](https://cap.cloud.sap/docs/resources/#reporting-incidents) on SAP Support Portal.
## License
This package is provided under the terms of the [SAP Developer License Agreement](https://tools.hana.ondemand.com/developer-license-3_1.txt).

Sorry, the diff of this file is too big to display

Sorry, the diff of this file is too big to display

Sorry, the diff of this file is too big to display

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is too big to display

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is too big to display

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap

Packages

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc