@sap/cds-compiler
Advanced tools
Comparing version 1.43.0 to 1.45.0
@@ -118,2 +118,3 @@ #!/usr/bin/env node | ||
'subElemRedirections': true, | ||
'addTextsLanguageAssoc': true, | ||
'technicalConfig': true, | ||
@@ -120,0 +121,0 @@ 'keyRefError': true, |
@@ -12,2 +12,4 @@ #!/usr/bin/env node | ||
'use strict'; | ||
const compiler = require('../lib/main'); | ||
@@ -14,0 +16,0 @@ const fs = require('fs'); |
@@ -15,2 +15,4 @@ #!/usr/bin/env node | ||
'use strict'; | ||
const commands = { | ||
@@ -17,0 +19,0 @@ complete, find, lint |
@@ -9,2 +9,50 @@ # ChangeLog for cdx compiler and backends | ||
## Version 1.45.0 - 2020-10-30 | ||
### Added | ||
- OData: Warn about non-applicable annotations. | ||
### Changed | ||
- A warning is emitted for annotation definitions inside services/contexts as this won't be | ||
allowed in the next major cds-compiler release. | ||
- OData: Update vocabularies 'Analytics' and 'Common'. | ||
### Fixed | ||
- Association to Join translation: Fill empty select blocks with aliased columns. | ||
- to.edm(x): | ||
+ Some EDM(x) warnings were not properly passed to the user. | ||
+ Don't render references and annotations for unexposed associations. | ||
- to.hdbcds: Warnings during rendering of the hdbcds were not raised to the user. | ||
- Issue which led to wrong on-conditions for `hdbcds` naming mode. | ||
## Version 1.44.4 - 2020-10-16 | ||
### Fixed | ||
- to.hdbcds/hdi/sql: The processing of managed associations as foreign keys now works regardless of the order in which the possible chains are resolved. | ||
- OData: Namespaces are brought back into the exposed types. Dots are replaced with underscores in the name. | ||
## Version 1.44.2 - 2020-10-09 | ||
### Added | ||
- OData: The annotations `@assert.format` and `@assert.range` now result in adding the | ||
`@Validation.Pattern` and `@Validation.AllowedValues` in the resulting EDMX. | ||
- A new compiler option `newResolve` is added (`--new-resolve` for `cdsc`). When set to `true` a new | ||
module resolver is used which needs fewer file lookups. It will become the default in the future. | ||
- Event definitions can be typed with a reference to an event. | ||
- When the new option `withLocation` is set, | ||
the property `$location` in the CSN is enumerable instead of non-enumerable; | ||
the value of `$location` is an object with sub properties `file`, `line` and `col` | ||
which describes the source position of all definitions, elements and other members. | ||
### Changed | ||
- OData: | ||
+ The `namespace` is now not part of the exposed type's name. | ||
+ Update vocabularies 'Aggregation', 'UI' and 'Validation'. | ||
## Version 1.43.0 - 2020-10-02 | ||
@@ -11,0 +59,0 @@ |
@@ -10,2 +10,11 @@ # ChangeLog of Beta Features for cdx compiler and backends | ||
## Version 1.44.0 | ||
### Added `addTextsLanguageAssoc` | ||
When the beta option `addTextsLanguageAssoc` is set to true and | ||
the model contains an entity `sap.common.Languages` with an element `code`, | ||
all generated texts entities additionally contain an element `language` | ||
which is an association to `sap.common.Languages` using element `local`. | ||
## Version 1.43.0 | ||
@@ -12,0 +21,0 @@ |
@@ -264,2 +264,6 @@ # ODATA Transformation | ||
| `@important` | `@UI.Importance` | | ||
| `@assert.format` | `@Validation.pattern: value` | | ||
| `@assert.range` | `@Validation.Minimum/Maximum: value` | | ||
| | or `@Validation.AllowedValues: {` | | ||
| | `@Core.SymbolicName: enum-name }` | | ||
@@ -266,0 +270,0 @@ |
@@ -11,3 +11,3 @@ /** @module API */ | ||
const { | ||
sortMessages, messageString, CompilationError, getMessageFunction, handleMessages, | ||
sortMessages, messageString, CompilationError, | ||
} = require('../base/messages'); | ||
@@ -88,15 +88,19 @@ const { compactModel } = require('../json/to-csn'); | ||
} | ||
const message = getMessageFunction(csn, options); | ||
const { error, warning, throwWithError } = alerts.makeMessageFunction(csn, options); | ||
for (const name of relevantOptionNames ) { | ||
if (options[name] !== csn.meta.options[name]) | ||
message('wrong-pretransformed-csn', null, null, null, 'error', `Expected pre-processed CSN to have option "${ name }" set to "${ options[name] }". Found: "${ csn.meta.options[name] }".`); | ||
if (options[name] !== csn.meta.options[name]) { | ||
error('wrong-pretransformed-csn', null, | ||
`Expected pre-processed CSN to have option "${ name }" set to "${ options[name] }". Found: "${ csn.meta.options[name] }".`); | ||
} | ||
} | ||
for (const name of warnAboutMismatch ) { | ||
if (options[name] !== csn.meta.options[name]) | ||
message('options-mismatch-pretransformed-csn', null, null, null, 'warning', `Expected pre-processed CSN to have option "${ name }" set to "${ options[name] }". Found: "${ csn.meta.options[name] }".`); | ||
if (options[name] !== csn.meta.options[name]) { | ||
warning('options-mismatch-pretransformed-csn', null, | ||
`Expected pre-processed CSN to have option "${ name }" set to "${ options[name] }". Found: "${ csn.meta.options[name] }".`); | ||
} | ||
} | ||
handleMessages(csn, options); | ||
throwWithError(); | ||
} | ||
@@ -269,2 +273,3 @@ | ||
const { alterTable, ...hdiArtifacts } = toSqlDdl(diff, internalOptions); | ||
delete targetState.extensions; | ||
return { | ||
@@ -603,7 +608,7 @@ hdiArtifacts: flattenResultStructure(hdiArtifacts), | ||
if (err instanceof CompilationError || options.noRecompile) | ||
// options.testMode && err instanceof RangeError) // stack overflow | ||
throw err; | ||
const message = getMessageFunction( csn, options ); | ||
message( 'api-recompiled-csn', emptyLocation('csn.json'), | ||
null, {}, 'Info', 'CSN input had to be recompiled' ); | ||
const { info } = alerts.makeMessageFunction( csn, options ); | ||
info( 'api-recompiled-csn', emptyLocation('csn.json'), 'CSN input had to be recompiled' ); | ||
// next line to be replaced by CSN parser call which reads the CSN object | ||
@@ -610,0 +615,0 @@ const xsn = getXsn(csn, options); |
@@ -25,2 +25,3 @@ 'use strict'; | ||
'odataProxies', | ||
'odataXServiceRefs', | ||
'odataDefaultValues', | ||
@@ -39,2 +40,3 @@ 'service', | ||
'hanaFlavor', | ||
'withLocations', | ||
'testMode', | ||
@@ -41,0 +43,0 @@ 'noRecompile', |
'use strict'; | ||
const { getMessageFunction, handleMessages } = require('../base/messages'); | ||
const { makeMessageFunction, handleMessages } = require('../base/messages'); | ||
@@ -113,3 +113,3 @@ /* eslint-disable arrow-body-style */ | ||
const messageCollector = { messages: [] }; | ||
const message = getMessageFunction(messageCollector); | ||
const { error } = makeMessageFunction(messageCollector); | ||
@@ -121,3 +121,3 @@ for (const optionName of Object.keys(options)) { | ||
if (!validator.validate(optionValue)) | ||
message('invalid-option', null, null, null, 'error', `Expected option "${ optionName }" to have "${ validator.expected(optionValue) }". Found: "${ validator.found(optionValue) }"`); | ||
error('invalid-option', null, {}, `Expected option "${ optionName }" to have "${ validator.expected(optionValue) }". Found: "${ validator.found(optionValue) }"`); | ||
} | ||
@@ -127,10 +127,8 @@ handleMessages(messageCollector); | ||
const message = getMessageFunction(options); | ||
const message = makeMessageFunction(options); | ||
for (const combinationValidatorName of combinationValidators.concat([ 'beta-no-test' ])) { | ||
const combinationValidator = allCombinationValidators[combinationValidatorName]; | ||
if (combinationValidator.validate(options)) { | ||
message('invalid-option-combination', null, null, null, | ||
combinationValidator.severity, combinationValidator.message(options)); | ||
} | ||
if (combinationValidator.validate(options)) | ||
message[combinationValidator.severity]('invalid-option-combination', null, {}, combinationValidator.message(options)); | ||
} | ||
@@ -137,0 +135,0 @@ |
@@ -22,2 +22,3 @@ 'use strict'; | ||
const timetrace = require('./utils/timetrace'); | ||
const { makeMessageFunction } = require('./base/messages'); | ||
@@ -149,6 +150,6 @@ function deprecated( model, options, signal, backend ) { | ||
const { warning, signal } = alerts(csn, options); | ||
const { warning } = makeMessageFunction(csn, options); | ||
// Verify options | ||
optionProcessor.verifyOptions(options, 'toHana', true).map(complaint => signal(warning`${complaint}`)); | ||
optionProcessor.verifyOptions(options, 'toHana', true).map(complaint => warning(null, null, `${complaint}`)); | ||
@@ -167,3 +168,11 @@ // Special case: For naming variant 'hdbcds' in combination with 'toHana' (and only there!), 'forHana' | ||
// Assemble result | ||
let result = {}; | ||
let result = { | ||
messages: [] | ||
}; | ||
// Transfer warnings (errors would have resulted in an exception before we come here) | ||
if (forHanaCsn.messages && forHanaCsn.messages.length > 0) { | ||
result.messages = forHanaCsn.messages; | ||
} | ||
if (options.toHana.src) { | ||
@@ -174,7 +183,10 @@ if(options.testMode){ | ||
setProp(sorted, "messages", forHanaCsn.messages); | ||
options.messages = result.messages; | ||
result.hdbcds = toCdsSourceCsn(sorted, options); | ||
} else { | ||
options.messages = result.messages; | ||
result.hdbcds = toCdsSourceCsn(forHanaCsn, options); | ||
} | ||
} | ||
if (options.toHana.csn) { | ||
@@ -184,6 +196,2 @@ result.csn = options.testMode ? sortCsn(forHanaCsn) : forHanaCsn; | ||
// Transfer warnings (errors would have resulted in an exception before we come here) | ||
if (forHanaCsn.messages && forHanaCsn.messages.length > 0) { | ||
result.messages = forHanaCsn.messages; | ||
} | ||
timetrace.stop(); | ||
@@ -352,6 +360,6 @@ return result; | ||
const { error, warning, signal } = alerts(csn, options); | ||
const { error, warning } = makeMessageFunction(csn, options); | ||
// Verify options | ||
optionProcessor.verifyOptions(options, 'toOdata', true).map(complaint => signal(warning`${complaint}`)); | ||
optionProcessor.verifyOptions(options, 'toOdata', true).map(complaint => warning(null, null, `${complaint}`)); | ||
@@ -389,3 +397,3 @@ // Prepare model for ODATA processing | ||
if (options.toOdata.version === 'v2') { | ||
signal(error`ODATA JSON output is not available for ODATA V2`); | ||
error(null, null, `ODATA JSON output is not available for ODATA V2`); | ||
} | ||
@@ -500,6 +508,6 @@ // FIXME: Why only metadata_json - isn't this rather a 'combined_json' ? If so, rename it! | ||
const { warning, signal } = alerts(model, options); | ||
const { warning } = makeMessageFunction(model, options); | ||
// Verify options | ||
optionProcessor.verifyOptions(options, 'toCdl', silent).map(complaint => signal(warning`${complaint}`)); | ||
optionProcessor.verifyOptions(options, 'toCdl', silent).map(complaint => warning(null, null, `${complaint}`)); | ||
@@ -783,6 +791,6 @@ return options; | ||
const { warning, error, signal } = alerts(model, options); | ||
const { warning, error } = makeMessageFunction(model, options); | ||
// Verify options | ||
optionProcessor.verifyOptions(options, 'toSql', true).map(complaint => signal(warning`${complaint}`)); | ||
optionProcessor.verifyOptions(options, 'toSql', true).map(complaint => warning(null, null, `${complaint}`)); | ||
@@ -800,3 +808,3 @@ // FIXME: Currently, '--to-sql' implies transformation for HANA (transferring the options to forHana) | ||
if (options.toSql.dialect === 'sqlite' && options.toSql.associations !== 'joins') { | ||
signal(warning`Option "{ toSql.dialect: 'sqlite' }" should always be combined with "{ toSql.assocs: 'joins' }"`); | ||
warning(null, null, `Option "{ toSql.dialect: 'sqlite' }" should always be combined with "{ toSql.assocs: 'joins' }"`); | ||
} | ||
@@ -807,7 +815,7 @@ | ||
if(['quoted', 'hdbcds'].includes(options.toSql.names)) { | ||
signal(error`Option "{ toSql.dialect: '${options.toSql.dialect}' }" cannot be combined with "{ toSql.names: '${options.toSql.names}' }"`); | ||
error(null, null, `Option "{ toSql.dialect: '${options.toSql.dialect}' }" cannot be combined with "{ toSql.names: '${options.toSql.names}' }"`); | ||
} | ||
// No non-HANA SQL for HDI | ||
if(options.toSql.src === 'hdi') { | ||
signal(error`Option "{ toSql.dialect: '${options.toSql.dialect}' }" cannot be combined with "{ toSql.src: '${options.toSql.src}' }"`); | ||
error(null, null, `Option "{ toSql.dialect: '${options.toSql.dialect}' }" cannot be combined with "{ toSql.src: '${options.toSql.src}' }"`); | ||
} | ||
@@ -883,3 +891,3 @@ } | ||
function toRename(model, options) { | ||
const { error, warning, signal } = alerts(model); | ||
const { error, warning } = makeMessageFunction(model, options); | ||
@@ -899,7 +907,7 @@ // In case of API usage the options are in the 'options' argument | ||
if (options.toRename.names === 'flat') { | ||
signal(warning`Option "{ toRename.names: 'flat' }" is deprecated, use "{ toRename.names: 'plain' }" instead`); | ||
warning(null, null, `Option "{ toRename.names: 'flat' }" is deprecated, use "{ toRename.names: 'plain' }" instead`); | ||
options.toRename.names = 'plain'; | ||
} | ||
else if (options.toRename.names === 'deep') { | ||
signal(warning`Option "{ toRename.names: 'deep' }" is deprecated, use "{ toRename.names: 'quoted' }" instead`); | ||
warning(null, null, `Option "{ toRename.names: 'deep' }" is deprecated, use "{ toRename.names: 'quoted' }" instead`); | ||
options.toRename.names = 'quoted'; | ||
@@ -909,7 +917,7 @@ } | ||
// Verify options | ||
optionProcessor.verifyOptions(options, 'toRename').map(complaint => signal(warning`${complaint}`)); | ||
optionProcessor.verifyOptions(options, 'toRename').map(complaint => warning(null, null, `${complaint}`)); | ||
// Requires beta mode | ||
if (!isBetaEnabled(options, 'toRename')) { | ||
signal(error`Generation of SQL rename statements is not supported yet (only in beta mode)`); | ||
error(null, null, `Generation of SQL rename statements is not supported yet (only in beta mode)`); | ||
} | ||
@@ -952,3 +960,3 @@ | ||
function toCsn(model, options) { | ||
const { warning, signal } = alerts(model); | ||
const { warning } = makeMessageFunction(model); | ||
// In case of API usage the options are in the 'options' argument | ||
@@ -965,3 +973,3 @@ // put the OData specific options under the 'options.toCsn' wrapper | ||
// Verify options | ||
optionProcessor.verifyOptions(options, 'toCsn').map(complaint => signal(warning`${complaint}`)); | ||
optionProcessor.verifyOptions(options, 'toCsn').map(complaint => warning(null, null, `${complaint}`)); | ||
@@ -968,0 +976,0 @@ return compactModel(model); |
@@ -17,3 +17,4 @@ // Implementation of alerts | ||
const messageBuilder = require('./message-builder'); | ||
const { buildMessage } = require('./message-builder'); | ||
const { handleMessages } = require('./messages'); | ||
@@ -95,3 +96,3 @@ /** | ||
function signal(msg, location, severity, message_id='') { | ||
const err = messageBuilder(model, msg, location, message_id in config ? config[message_id] : severity, message_id, options.internalMsg); | ||
const err = buildMessage(model, msg, location, message_id in config ? config[message_id] : severity, message_id, options.internalMsg); | ||
if (err.severity) | ||
@@ -109,2 +110,32 @@ // don't collect stuff that doesn't have a severity | ||
/** | ||
* Wrapper function around the old alerts(). | ||
* Used to make backporting from v2 easier. | ||
* | ||
* Differences to v2: Text arguments are IGNORED. | ||
* | ||
* @param {CSN.Model} model | ||
* @param {CSN.Options} options | ||
*/ | ||
alerts.makeMessageFunction = function makeMessageFunction( model, options = model.options || {}) { | ||
const { signal } = alerts(model, options); | ||
function makeWrapper(severity) { | ||
return (id, loc, params, texts) => { | ||
if (typeof params === 'string') // in new function: params is optional | ||
return signal(params, loc, severity, id); | ||
return signal(texts, loc, severity, id); | ||
}; | ||
} | ||
return { | ||
error: makeWrapper('Error'), | ||
warning: makeWrapper('Warning'), | ||
info: makeWrapper('Info'), | ||
debug: makeWrapper('Debug'), | ||
messages: options.messages || model.messages, | ||
throwWithError: () => handleMessages(model, options), | ||
}; | ||
} | ||
module.exports = alerts; |
@@ -7,3 +7,3 @@ // Deep copy an object structure | ||
// kept in the model itself, so after cloning the model you will have two alert message arrays, but alert still | ||
// points to the alert messages in the original model. You have to reinstantiate alert in the clone to refer | ||
// points to the alert messages in the original model. You have to re-instantiate alert in the clone to refer | ||
// to the cloned messages. | ||
@@ -10,0 +10,0 @@ // In order not to forget such functions that would still alter the original model it's advised to freeze the |
@@ -7,3 +7,3 @@ // Functions for dictionaries (Objects without prototype) | ||
// (called `found`) with the same name is already defined, call | ||
// `messagerCallback` with arguments `name` and `loc` assigned to | ||
// `messageCallback` with arguments `name` and `loc` assigned to | ||
// `entry.name.location`. If this is the first duplicate entry and if the | ||
@@ -10,0 +10,0 @@ // `filename`s are different, call the callback again on `found.name.location`. |
@@ -133,2 +133,5 @@ 'use strict'; | ||
if (!currentThing) | ||
return result; | ||
if (query) | ||
@@ -135,0 +138,0 @@ query = queryDepth(currentThing.query, query); |
@@ -70,3 +70,3 @@ const { CompileMessage, DebugCompileMessage } = require('../base/messages'); | ||
module.exports = buildMessage; | ||
module.exports = { buildMessage }; | ||
@@ -88,2 +88,3 @@ // Functions and classes for syntax messages | ||
* @param {CSN.Message[]} messages | ||
* @returns {boolean} | ||
*/ | ||
@@ -186,2 +187,3 @@ function hasErrors( messages ) { | ||
this.location = normalizeLocation( location ); | ||
this.$location = dollarLocation( this.location ); | ||
this.validNames = null; | ||
@@ -219,2 +221,3 @@ if (home) // semantic location, e.g. 'entity:"E"/element:"x"' | ||
this.location = normalizeLocation( location ); | ||
this.$location = dollarLocation( this.location ); | ||
this.validNames = null; | ||
@@ -234,2 +237,19 @@ if (home) // semantic location, e.g. 'entity:"E"/element:"x"' | ||
/** | ||
* Temporary v1 function to convert an "old-style" location to "new-style". | ||
*/ | ||
function dollarLocation( location ) { | ||
const file = location && location.filename || undefined; | ||
const start = file && location.start || { line: undefined, column: undefined }; | ||
const end = file && !location.$weak && location.end || { line: undefined, column: undefined }; | ||
return { | ||
file, | ||
line: start.line, | ||
col: start.column, | ||
endLine: end.line, | ||
endCol: end.column, | ||
address: undefined, | ||
}; | ||
} | ||
/** | ||
* Handle compiler messages, i.e. throw a compiler exception if there are | ||
@@ -242,3 +262,3 @@ * errors, otherwise sort the messages (see compareMessage()). | ||
function handleMessages( model, options = model.options || {} ) { | ||
let messages = options.messages || model.messages; | ||
const messages = options.messages || model.messages; | ||
if (messages && messages.length) { | ||
@@ -256,9 +276,13 @@ messages.sort( compareMessage ); | ||
info: { name: 'Info', level: 2 }, | ||
debug: { name: 'Debug', level: 3 } | ||
} | ||
debug: { name: 'Debug', level: 3 }, | ||
}; | ||
/** | ||
* @param {string} severity | ||
* @deprecated | ||
*/ | ||
function normalizedSeverity( severity ) { | ||
if (typeof severity !== 'string') | ||
return (severity === null) ? 'Debug' : 'Error'; | ||
let s = severitySpecs[ severity.toLowerCase() ]; | ||
const s = severitySpecs[severity.toLowerCase()]; | ||
return s && s.name || 'Error'; | ||
@@ -270,3 +294,3 @@ } | ||
// If those do not exist, define a non-enumerable property `messages` in `model`. | ||
function getMessageFunction( model, options = model.options || {}, transform ) { | ||
function getMessageFunction( model, options = model.options || {}, transform = null ) { | ||
let messages = options.messages || model.messages || | ||
@@ -287,7 +311,10 @@ Object.defineProperty( model, 'messages', | ||
: messageText( texts || standardTexts[id], params, transform ); | ||
home = (typeof home === 'string') ? home : homeName(home); | ||
const homename = (typeof home === 'string') ? home : homeName(home); | ||
let msg = (options.internalMsg) | ||
? new DebugCompileMessage( location, text, s, id, home ) | ||
: new CompileMessage( location, text, s, id, home ); | ||
? new DebugCompileMessage( location, text, s, id, homename ) | ||
: new CompileMessage( location, text, s, id, homename ); | ||
messages.push( msg ); | ||
const definition = typeof home !== 'string' && homeName( home, true ); | ||
if (definition) | ||
msg.$location.address = { definition }; | ||
return msg; | ||
@@ -297,2 +324,35 @@ } | ||
/** | ||
* Wrapper function around the old getMessageFunction(). | ||
* Used to make backporting from v2 easier. | ||
* | ||
* Differences to v2: Last argument "severity" was added because no central message registry exists. | ||
* | ||
* @param {XSN.Model} model | ||
* @param {CSN.Options} [options] | ||
* @param {any} [transform] | ||
*/ | ||
function makeMessageFunction( model, options = model.options || {}, transform = null ) { | ||
const message = getMessageFunction(model, options, transform); | ||
function makeWrapper(defaultSeverity) { | ||
return (id, loc, params, texts, severity = defaultSeverity) => { | ||
const isTuple = Array.isArray(loc); // [ location, user ] | ||
if (isTuple) | ||
return message(id, loc[0], loc[1], params, severity, texts); | ||
return message(id, loc, null, params, severity, texts); | ||
}; | ||
} | ||
return { | ||
message: makeWrapper(null), | ||
error: makeWrapper('Error'), | ||
warning: makeWrapper('Warning'), | ||
info: makeWrapper('Info'), | ||
debug: makeWrapper('Debug'), | ||
messages: options.messages || model.messages, | ||
throwWithError: () => handleMessages(model, options), | ||
}; | ||
} | ||
const paramsTransform = { | ||
@@ -549,6 +609,8 @@ alias: quoted, | ||
c( aend.column, bend.column ) || | ||
c( homeSortName( a ), homeSortName( b ) ) || | ||
c( a.message, b.message ) ); | ||
} | ||
else if (!a.location === !b.location) | ||
return c( a.message, b.message ) | ||
return ( c( homeSortName( a ), homeSortName( b ) ) || | ||
c( a.message, b.message ) ); | ||
else | ||
@@ -562,2 +624,11 @@ return (!a.location) ? 1 : -1; | ||
// Return sort-relevant part of semantic location (after the ':'). | ||
// Messages without semantic locations are considered smaller (for syntax errors) | ||
// and (currently - should not happen in v2) larger for other messages. | ||
function homeSortName( { home, messageId } ) { | ||
return (!home) | ||
? (messageId && messageId.startsWith( 'syntax-' ) ? '' : '~') | ||
: home.substring( home.indexOf( ':' ) + 1 ); // i.e. starting with the \" | ||
} | ||
/** | ||
@@ -613,7 +684,7 @@ * Removes duplicate messages from the given messages array without destroying | ||
function homeName( art ) { | ||
function homeName( art, absoluteOnly ) { | ||
if (!art) | ||
return art; | ||
if (art._outer) // in returns / items property | ||
return homeName( art._outer ); | ||
return homeName( art._outer, absoluteOnly ); | ||
else if (art.kind === 'source' || !art.name) // error reported in parser or on source level | ||
@@ -624,5 +695,7 @@ return null; | ||
else if (art.kind === 'extend') | ||
return homeNameForExtend ( art ); | ||
return !absoluteOnly && homeNameForExtend ( art ); | ||
else if (art.name._artifact) // block, extend, annotate | ||
return homeName( art.name._artifact ); // use corresponding definition | ||
return homeName( art.name._artifact, absoluteOnly ); // use corresponding definition | ||
else if (absoluteOnly) | ||
return art.name.absolute; | ||
else | ||
@@ -635,2 +708,4 @@ return (art._main ? art._main.kind : art.kind) + ':' + artName( art ); | ||
function homeNameForExtend( art ) { | ||
// TODO: fix the following - do like in collectArtifactExtensions() or | ||
// basically resolveUncheckedPath() | ||
const absoluteName = (art.name.id ? art.name.id : | ||
@@ -716,2 +791,5 @@ art.name.path.map(s => s && s.id).join('.')); | ||
* Returns an array of message IDs that have an explanation text. | ||
* Only used for testing. | ||
* | ||
* @private | ||
*/ | ||
@@ -731,2 +809,3 @@ function messageIdsWithExplanation() { | ||
getMessageFunction, | ||
makeMessageFunction, | ||
artName, | ||
@@ -743,3 +822,2 @@ handleMessages, | ||
messageIdsWithExplanation, | ||
} | ||
}; |
@@ -1,3 +0,1 @@ | ||
// | ||
'use strict'; | ||
@@ -147,5 +145,11 @@ | ||
// Like `obj.prop = value`, but not contained in JSON / CSN | ||
// It's important to set enumerable explicitly to false (although 'false' is the default), | ||
// as else, if the property already exists, it keeps the old setting for enumerable. | ||
/** | ||
* Like `obj.prop = value`, but not contained in JSON / CSN | ||
* It's important to set enumerable explicitly to false (although 'false' is the default), | ||
* as else, if the property already exists, it keeps the old setting for enumerable. | ||
* | ||
* @param {object} obj | ||
* @param {string} prop | ||
* @param {any} value | ||
*/ | ||
function setProp (obj, prop, value) { | ||
@@ -152,0 +156,0 @@ let descriptor = { value, configurable: true, writable: true, enumerable: false }; |
@@ -15,3 +15,3 @@ 'use strict' | ||
// .option('-y --y-in-long-form') | ||
// .option(' --bar-wiz <w>', ['bla', 'fasel']) | ||
// .option(' --bar-wiz <w>', ['bla', 'foo']) | ||
// Options *must* have a long form, can have at most one <param>, and optionally | ||
@@ -18,0 +18,0 @@ // an array of valid param values as strings. Commands and param values must not |
'use strict'; | ||
const alerts = require('../base/alerts'); | ||
const { getMessageFunction } = require('../base/messages'); | ||
const { makeMessageFunction } = require('../base/messages'); | ||
const { hasArtifactTypeInformation } = require('../model/modelUtils') | ||
@@ -19,15 +19,15 @@ const builtins = require('../compiler/builtins'); | ||
const location = art.name && art.name.location || art.location; | ||
const message = getMessageFunction(model); | ||
const { message } = makeMessageFunction(model); | ||
if (art.kind === 'entity') { | ||
message('empty-entity', location, art, { '#': isEmpty ? 'empty' : 'virtual' }, 'Info', { | ||
message('empty-entity', [location, art], { '#': isEmpty ? 'std' : 'virtual' }, { | ||
virtual: 'An entity without non-virtual elements cannot be deployed or used in a service', | ||
empty: 'An entity without elements cannot be deployed or used in a service' | ||
}); | ||
std: 'An entity without elements cannot be deployed or used in a service' | ||
}, 'Info'); | ||
} | ||
else if (art.kind === 'type') { | ||
message('empty-type', location, art, { '#': isEmpty ? 'empty' : 'virtual' }, 'Info', { | ||
message('empty-type', [location, art], { '#': isEmpty ? 'std' : 'virtual' }, { | ||
virtual: 'Structured type without non-virtual elements', | ||
empty: 'Structured type without elements' | ||
}); | ||
std: 'Structured type without elements' | ||
}, 'Info'); | ||
} | ||
@@ -105,8 +105,7 @@ } | ||
// not happen in CDL. | ||
const message = getMessageFunction(model); | ||
message('check-proper-type', art.location, art, { art: artifact }, | ||
['Error'], { | ||
std: 'Dubious type $(ART) without type information', | ||
element: 'Dubious element $(MEMBER) of $(ART) without type information', | ||
}); | ||
const { message } = makeMessageFunction(model); | ||
message('check-proper-type', [art.location, art], { art: artifact }, { | ||
std: 'Dubious type $(ART) without type information', | ||
element: 'Dubious element $(MEMBER) of $(ART) without type information', | ||
}, ['Error']); | ||
} | ||
@@ -151,8 +150,7 @@ | ||
if (finalType && !hasArtifactTypeInformation(finalType)) { | ||
const message = getMessageFunction(model); | ||
message('check-proper-type-of', artifact.type.location, artifact, { art: artifact.type }, | ||
'Info', { | ||
std: 'Referred type of $(ART) does not contain proper type information', | ||
element: 'Referred element $(MEMBER) of $(ART) does not contain proper type information', | ||
}); | ||
const { message } = makeMessageFunction(model); | ||
message('check-proper-type-of', [artifact.type.location, artifact], { art: artifact.type }, { | ||
std: 'Referred type of $(ART) does not contain proper type information', | ||
element: 'Referred element $(MEMBER) of $(ART) does not contain proper type information', | ||
}, 'Info'); | ||
return; | ||
@@ -180,5 +178,6 @@ } | ||
const message = getMessageFunction(model); | ||
message('enum-missing-value', enumNode.enum[failedAt].location, enumNode.enum[failedAt], | ||
{ name: failedAt }, 'Warning', 'Missing value for integer enum element $(NAME)'); | ||
const { warning } = makeMessageFunction(model); | ||
const failedEnum = enumNode.enum[failedAt]; | ||
warning('enum-missing-value', [failedEnum.location, failedEnum], | ||
{ name: failedAt }, 'Missing value for integer enum element $(NAME)'); | ||
} | ||
@@ -214,3 +213,3 @@ | ||
const message = getMessageFunction(model); | ||
const { warning } = makeMessageFunction(model); | ||
@@ -226,4 +225,4 @@ for (const key of Object.keys(enumNode.enum)) { | ||
message('enum-value-type', element.value.location, element, | ||
{ '#': expectedType, name: key, prop: actualType }, 'Warning', { | ||
warning('enum-value-type', [element.value.location, element], | ||
{ '#': expectedType, name: key, prop: actualType }, { | ||
number: 'Expected integer value for enum element $(NAME) but was $(PROP)', | ||
@@ -254,6 +253,5 @@ string: 'Expected string value for enum element $(NAME) but was $(PROP)' | ||
if (!isInteger && !isString) { | ||
const message = getMessageFunction(model); | ||
message('enum-invalid-type', enumNode.type.location, enumNode, | ||
{}, 'Warning', | ||
'Only string- or integer-like types are allowed for enums'); | ||
const { warning } = makeMessageFunction(model); | ||
warning('enum-invalid-type', [enumNode.type.location, enumNode], | ||
{}, 'Only string- or integer-like types are allowed for enums'); | ||
return; | ||
@@ -260,0 +258,0 @@ } |
'use strict'; | ||
const has = Object.prototype.hasOwnProperty; | ||
const alerts = require('../base/alerts'); | ||
const { checkArtifactHasProperType } = require('./checkArtifacts'); | ||
const { isComposition } = require('../model/modelUtils.js') | ||
const { getMessageFunction } = require('../base/messages'); | ||
const { makeMessageFunction } = require('../base/messages'); | ||
@@ -13,3 +12,3 @@ /** | ||
function checkPrimaryKey(elem, model) { | ||
const message = getMessageFunction(model); | ||
const { error, message } = makeMessageFunction(model); | ||
let type = ''; | ||
@@ -27,3 +26,3 @@ // apparently this is the resolved type (over an derived type chain) | ||
if(element.onCond) { | ||
message('unmanaged-as-key', elem.key.location, elem, {}, ['Error'], 'Unmanaged associations cannot be used as primary key'); | ||
message('unmanaged-as-key', [ elem.key.location, elem ], {}, 'Unmanaged associations cannot be used as primary key', ['Error']); | ||
} | ||
@@ -47,3 +46,3 @@ | ||
if(element.items){ | ||
message(null, element.location, element, {}, 'Error', 'Array-like types cannot be used as primary key'); | ||
error(null, [ element.location, element ], {}, 'Array-like types cannot be used as primary key'); | ||
} | ||
@@ -62,3 +61,3 @@ | ||
if(['cds.hana.ST_POINT', 'cds.hana.ST_GEOMETRY'].includes(type)){ | ||
message(null, elem.location, elem, { type }, 'Error', 'Type $(TYPE) cannot be used as primary key'); | ||
error(null, [ elem.location, elem ], { type }, 'Type $(TYPE) cannot be used as primary key'); | ||
} | ||
@@ -112,8 +111,8 @@ | ||
const message = getMessageFunction(model); | ||
const { warning } = makeMessageFunction(model); | ||
const isSubElement = element._parent && element._parent.kind === 'element'; | ||
if (element.localized && element.localized.val === true && isSubElement) { | ||
message('localized-sub-element', element.localized.location, element, {}, | ||
'Warning', 'Keyword "localized" is ignored for sub elements'); | ||
warning('localized-sub-element', [ element.localized.location, element ], {}, | ||
'Keyword "localized" is ignored for sub elements'); | ||
return; | ||
@@ -126,4 +125,4 @@ } | ||
if ((isTypeLocalized(element.type) && isSubElement) || hasTypeLocalizedElements(element._finalType)) { | ||
message('localized-sub-element', element.type.location, element, { type: element.type }, | ||
'Warning', 'Keyword "localized" in type $(TYPE) is ignored for sub elements'); | ||
warning('localized-sub-element', [ element.type.location, element ], { type: element.type }, | ||
'Keyword "localized" in type $(TYPE) is ignored for sub elements'); | ||
} | ||
@@ -138,3 +137,3 @@ } | ||
function checkManagedAssoc(elem, model) { | ||
const { error, warning, signal } = alerts(model); | ||
const { error, warning } = makeMessageFunction(model); | ||
const target = elem.target; | ||
@@ -148,3 +147,3 @@ // Not a managed assoc at all, inferred elem or redirected => nothing to check | ||
if (!foreignKeys) { | ||
signal(error`The target "${target._artifact.name.absolute}" of the managed association "${elem.name.id}" does not have keys`, target.location); | ||
error(null, target.location, `The target "${target._artifact.name.absolute}" of the managed association "${elem.name.id}" does not have keys`); | ||
} | ||
@@ -157,5 +156,4 @@ const targetMax = (elem.cardinality && elem.cardinality.targetMax && elem.cardinality.targetMax.val); | ||
let assocType = isComposition(elem.type) ? 'composition' : 'association'; | ||
signal(warning`The ${assocType} "${elem.name.id}" has cardinality "to many" but no ON-condition`, | ||
elem.cardinality.location || elem.name.location, undefined, | ||
isNoDb ? 'to-many-no-on-noDB' : 'to-many-no-on'); | ||
warning(isNoDb ? 'to-many-no-on-noDB' : 'to-many-no-on', elem.cardinality.location || elem.name.location, {}, | ||
`The ${assocType} "${elem.name.id}" has cardinality "to many" but no ON-condition`); | ||
} | ||
@@ -212,13 +210,13 @@ } | ||
function checkVirtualElement(elem, model) { | ||
const { error, signal } = alerts(model); | ||
const { error } = makeMessageFunction(model); | ||
// when the element itself is declared as virtual | ||
if (elem.virtual) { | ||
if (elem.key && elem.key.val) { | ||
signal(error`Element cannot be virtual and key`, elem.location); | ||
error(null, elem.location, `Element cannot be virtual and key`); | ||
} | ||
if (isStruct(elem)) { | ||
signal(error`Element cannot be virtual and structured`, elem.location); | ||
error(null, elem.location, `Element cannot be virtual and structured`); | ||
} | ||
if (isAssoc(elem)) { | ||
signal(error`Element cannot be virtual and an association`, elem.location); | ||
error(null, elem.location, `Element cannot be virtual and an association`); | ||
} | ||
@@ -233,10 +231,10 @@ } | ||
if (elem.type && !elem.type.$inferred) { | ||
const message = getMessageFunction( model ); | ||
const { error } = makeMessageFunction( model ); | ||
const loc = elem.type.location || elem.location; | ||
if (elem._finalType && elem._finalType.elements) | ||
message('type-cast-structured', loc, elem, {}, 'Error', `Cannot cast to structured element`); | ||
error('type-cast-structured', [ loc, elem ], {}, `Cannot cast to structured element`); | ||
else if (elem.value && elem.value._artifact && elem.value._artifact._finalType && elem.value._artifact._finalType.elements) | ||
message('type-cast-structured', loc, elem, {}, 'Error', `Structured element cannot be casted to a different type`); | ||
error('type-cast-structured', [ loc, elem ], {}, `Structured element cannot be casted to a different type`); | ||
} | ||
@@ -249,3 +247,3 @@ if (elem.value && elem.value.args) { | ||
function checkAssociation(elem, model) { | ||
const { error, signal } = alerts(model); | ||
const { error } = makeMessageFunction(model); | ||
// when virtual element is part of association | ||
@@ -256,3 +254,3 @@ if (elem.foreignKeys) { | ||
if (key && key._artifact && key._artifact.virtual && key._artifact.virtual.val === true) { | ||
signal(error`Virtual elements cannot be used as a foreign key for a managed association`, key.location); | ||
error(null, key.location, `Virtual elements cannot be used as a foreign key for a managed association`); | ||
} | ||
@@ -315,4 +313,4 @@ } | ||
if(!same) { | ||
const { error, signal } = alerts(model); | ||
signal(error`Unmanaged association condition can not follow another unmanaged association.`, path0.location); | ||
const { error } = makeMessageFunction(model); | ||
error(null, path0.location, `Unmanaged association condition can not follow another unmanaged association.`); | ||
} | ||
@@ -326,4 +324,4 @@ } | ||
if(op && op.val!=='=') { | ||
const { error, signal } = alerts(model); | ||
signal(error`$self comparison is only allowed with '='`, op.location); | ||
const { error } = makeMessageFunction(model); | ||
error(null, op.location, `$self comparison is only allowed with '='`); | ||
} | ||
@@ -351,8 +349,7 @@ } | ||
function checkTypeParameters(node, model) { | ||
const { error, signal } = alerts(model); | ||
const { error } = makeMessageFunction(model); | ||
if (node.type) { | ||
if (node.typeArguments) { | ||
signal(error`Too many parameters in type reference to '${node.type._artifact.name.absolute}'`, | ||
node.typeArguments[0].location); | ||
error(null, node.typeArguments[0].location, `Too many parameters in type reference to '${node.type._artifact.name.absolute}'`); | ||
} | ||
@@ -369,3 +366,3 @@ let parameters = (node.type._artifact && node.type._artifact.parameters) || []; | ||
// Are all type factes provided? | ||
// Are all type facets provided? | ||
if(absolute !== 'cds.Decimal' || hasTypeFacets) { | ||
@@ -375,4 +372,3 @@ for (let name in parameters) { | ||
if (!node[param] && !['cds.hana.ST_POINT', 'cds.hana.ST_GEOMETRY'].includes(absolute)) | ||
signal(error`Actual value for type parameter '${param}' missing in reference to type '${absolute}'`, | ||
node.type.location ); | ||
error(null, node.type.location, `Actual value for type parameter '${param}' missing in reference to type '${absolute}'`); | ||
} | ||
@@ -418,4 +414,3 @@ } | ||
if (!(Number.isInteger(paramValue) && paramValue >= 0)) { | ||
signal(error`Actual parameter '${paramName}' for '${node.type._artifact.name.absolute}' must be positive integer`, | ||
node[paramName].location); | ||
error(null, node[paramName].location, `Actual parameter '${paramName}' for '${node.type._artifact.name.absolute}' must be positive integer`); | ||
return false; | ||
@@ -429,9 +424,7 @@ } | ||
if (range.max && paramValue > range.max) { | ||
signal(error`Actual parameter '${paramName}' for '${node.type._artifact.name.absolute}' is larger than allowed (max: ${range.max})`, | ||
node[paramName].location); | ||
error(null, node[paramName].location, `Actual parameter '${paramName}' for '${node.type._artifact.name.absolute}' is larger than allowed (max: ${range.max})`); | ||
return false; | ||
} | ||
if (range.min && paramValue < range.min) { | ||
signal(error`Actual parameter '${paramName}' for '${node.type._artifact.name.absolute}' is smaller than allowed (min: ${range.min})`, | ||
node[paramName].location); | ||
error(null, node[paramName].location, `Actual parameter '${paramName}' for '${node.type._artifact.name.absolute}' is smaller than allowed (min: ${range.min})`); | ||
return false; | ||
@@ -446,3 +439,3 @@ } | ||
function checkCardinality(elem, model) { | ||
const { error, signal } = alerts(model); | ||
const { error } = makeMessageFunction(model); | ||
if (!elem.cardinality) { | ||
@@ -457,3 +450,3 @@ return; | ||
|| elem.cardinality[prop].literal === 'string' && elem.cardinality[prop].val === '*')) { | ||
signal(error`Element "${elem.name.absolute}.${elem.name.id}": Illegal value "${elem.cardinality[prop].val}" for max cardinality (must be a positive number or "*")`, elem.cardinality[prop].location); | ||
error(null, elem.cardinality[prop].location, `Element "${elem.name.absolute}.${elem.name.id}": Illegal value "${elem.cardinality[prop].val}" for max cardinality (must be a positive number or "*")`); | ||
} | ||
@@ -467,3 +460,3 @@ } | ||
if (!(elem.cardinality[prop].literal === 'number' && elem.cardinality[prop].val >= 0)) { | ||
signal(error`Element "${elem.name.absolute}.${elem.name.id}": Illegal value "${elem.cardinality[prop].val}" for min cardinality (must be a non-negative number)`, elem.cardinality[prop].location); | ||
error(null, elem.cardinality[prop].location, `Element "${elem.name.absolute}.${elem.name.id}": Illegal value "${elem.cardinality[prop].val}" for min cardinality (must be a non-negative number)`); | ||
} | ||
@@ -478,3 +471,3 @@ } | ||
&& elem.cardinality[p[0]].val > elem.cardinality[p[1]].val) { | ||
signal(error`Element "${elem.name.absolute}.${elem.name.id}": ${p[2]} minimum cardinality must not be greater than ${p[2].toLowerCase()} maximum cardinality`, elem.cardinality.location); | ||
error(null, elem.cardinality.location, `Element "${elem.name.absolute}.${elem.name.id}": ${p[2]} minimum cardinality must not be greater than ${p[2].toLowerCase()} maximum cardinality`); | ||
} | ||
@@ -485,3 +478,3 @@ }); | ||
function checkLocalizedElement (elem, model) { | ||
const { signal, warning } = alerts(model); | ||
const { warning } = makeMessageFunction(model); | ||
// if it is directly a localized element | ||
@@ -493,3 +486,3 @@ if (elem.localized && elem.localized.val) { | ||
if (!type || type.name.absolute !== 'cds.String') { | ||
signal(warning`Element "${elem.name.absolute}.${elem.name.id}": "localized" may only be used in combination with type "String"`); | ||
warning(null, [ elem.localized.location, elem ], {}, 'Keyword "localized" may only be used in combination with type "String"'); | ||
} | ||
@@ -531,4 +524,4 @@ } | ||
if (itemsType.items){ | ||
const message = getMessageFunction( model ); | ||
message('chained-array-of', obj.location, obj, { art: itemsType }, 'Warning', | ||
const { warning } = makeMessageFunction( model ); | ||
warning('chained-array-of', [ obj.location, obj ], { art: itemsType }, | ||
'"Array of"/"many" must not be chained with $(ART)'); | ||
@@ -557,7 +550,7 @@ } | ||
if (artifact.type && artifact.type._artifact) { | ||
const message = getMessageFunction(model); | ||
const { warning } = makeMessageFunction(model); | ||
if (artifact.items && !finalType.items) { | ||
message('type-items-mismatch', artifact.type.location, artifact, | ||
{ type: artifact.type, prop: 'items' }, 'Warning', | ||
warning('type-items-mismatch', [ artifact.type.location, artifact ], | ||
{ type: artifact.type, prop: 'items' }, | ||
'Used type $(TYPE) is not arrayed and conflicts with $(PROP) property' | ||
@@ -567,4 +560,4 @@ ); | ||
} else if (artifact.elements && !finalType.elements) { | ||
message('type-elements-mismatch', artifact.type.location, artifact, | ||
{ type: artifact.type, prop: 'elements' }, 'Warning', | ||
warning('type-elements-mismatch', [ artifact.type.location, artifact ], | ||
{ type: artifact.type, prop: 'elements' }, | ||
'Used type $(TYPE) is not structured and conflicts with $(PROP) property' | ||
@@ -571,0 +564,0 @@ ); |
@@ -12,9 +12,4 @@ 'use strict'; | ||
// TODO: On removal of the beta flag, filter out all illegal OData default values that are legal in the database | ||
// These are all sorts of expressions, functions, $now and turn the message into appropriate warnings | ||
// Or leave them as errors and get through the spec meeting. | ||
if(member.default && this.csn.options.toOdata && isBetaEnabled(this.csn.options, 'odataDefaultValues')) { | ||
// unary minus is xpr: [ "-", { val: ... } ] | ||
let def = member.default; | ||
if(member.default.xpr) { | ||
@@ -26,7 +21,3 @@ let i = 0; | ||
this.signal(this.error`Illegal number of unary '+/-' operators`, path); | ||
def = member.default.xpr[i]; | ||
} | ||
if(!(def.val !== undefined || def['#'])) { | ||
this.signal(this.error`Default value must be a simple value for OData exposure`, path); | ||
} | ||
} | ||
@@ -33,0 +24,0 @@ } |
@@ -7,3 +7,3 @@ // This is very similar to lib/model/enrichCsn - but the goal and the execution differ a bit: | ||
const csnRefs = require('../../model/csnRefs'); | ||
const { csnRefs } = require('../../model/csnRefs'); | ||
const { setProp } = require('../../base/model'); | ||
@@ -104,3 +104,3 @@ /** | ||
const { links, art, scope } = inspectRef( csnPath ); | ||
if (links) { | ||
if (links) { | ||
setProp(node, '_links', links); | ||
@@ -107,0 +107,0 @@ cleanupCallbacks.push(() => delete node._links); |
@@ -564,3 +564,3 @@ // Consistency checker on model (XSN = augmented CSN) | ||
_origin: { kind: [ 'entity' ], test: TODO }, // origin composition aspect for entity | ||
$from: { kind: true, test: TODO }, // all table refs necesary to compute elements | ||
$from: { kind: true, test: TODO }, // all table refs necessary to compute elements | ||
_redirected: { kind: true, test: TODO }, // for REDIRECTED TO: | ||
@@ -594,3 +594,3 @@ // ...array of table aliases for targets from orig to new | ||
redirected: { kind: true, test: TODO }, // TODO: do it with not-$inferred | ||
$extra: { parser: true, test: TODO }, // for unexpectex properties in CSN | ||
$extra: { parser: true, test: TODO }, // for unexpected properties in CSN | ||
$withLocalized: { test: isBoolean }, | ||
@@ -597,0 +597,0 @@ }; |
@@ -7,2 +7,3 @@ // The builtin artifacts of CDS | ||
const { builtinLocation } = require('../base/location'); | ||
const { setProp } = require('../base/model'); | ||
@@ -246,8 +247,2 @@ const core = { | ||
// Like `obj.prop = value`, but not contained in JSON / CSN | ||
function setProp( obj, prop, value ) { | ||
Object.defineProperty( obj, prop, { value, configurable: true, writable: true } ); | ||
return value; | ||
} | ||
module.exports = { | ||
@@ -254,0 +249,0 @@ initBuiltins, |
@@ -19,2 +19,4 @@ // Detect cycles in the dependencies between nodes (artifacts and elements) | ||
const { setProp } = require('../base/model'); | ||
// Detect cyclic dependencies between all nodes reachable from `definitions`. | ||
@@ -137,9 +139,2 @@ // If such a dependency is found, call `reportCycle` with arguments `dep.art` | ||
// Like `obj.prop = value`, but not contained in JSON / CSN | ||
function setProp( obj, prop, value ) { | ||
Object.defineProperty( obj, prop, { value, configurable: true } ); // not enumerable! | ||
} | ||
module.exports = detectCycles; |
@@ -6,2 +6,3 @@ // Module handling, layers and packages | ||
const detectCycles = require('./cycle-detector'); | ||
const { setProp } = require('../base/model'); | ||
@@ -64,8 +65,2 @@ function setLayers( sources ) { | ||
// Like `obj.prop = value`, but not contained in JSON / CSN | ||
function setProp( obj, prop, value ) { | ||
Object.defineProperty( obj, prop, { value, configurable: true, writable: true } ); | ||
return value; | ||
} | ||
module.exports = { | ||
@@ -72,0 +67,0 @@ setLayers, |
@@ -8,2 +8,3 @@ // Compiler functions and utilities shared across all phases | ||
const { addToDict, addToDictWithIndexNo, pushToDict } = require('../base/dictionaries'); | ||
const { setProp } = require('../base/model'); | ||
@@ -56,3 +57,3 @@ const dictKinds = { | ||
builtin: {}, // = CURRENT_DATE, TODO: improve | ||
$parameters: {}, // $parameters in query entitis | ||
$parameters: {}, // $parameters in query entities | ||
}; | ||
@@ -179,6 +180,6 @@ | ||
function rejectNonType( art ) { | ||
return ([ 'type', 'entity', 'view' ].includes( art.kind ) || | ||
return ([ 'type', 'entity', 'view', 'event' ].includes( art.kind ) || | ||
// art.kind === 'type' || // too strong for positive/BoundFunctions | ||
// art._main && art._main.kind === 'type') // currently too strong | ||
art._main && [ 'type', 'entity', 'view' ].includes( art._main.kind )) | ||
art._main && [ 'type', 'entity', 'view', 'event' ].includes( art._main.kind )) | ||
? undefined | ||
@@ -618,3 +619,3 @@ : 'expected-type'; | ||
} | ||
else if (art.$uncheckedElements) { | ||
else if (art && art.$uncheckedElements) { | ||
// do not check any elements of the path, e.g. $session | ||
@@ -833,8 +834,2 @@ return art; | ||
// Like `obj.prop = value`, but not contained in JSON / CSN | ||
function setProp( obj, prop, value ) { | ||
Object.defineProperty( obj, prop, { value, configurable: true, writable: true } ); | ||
return value; | ||
} | ||
// The link (_artifact,_finalType,...) usually has the artifact as value. | ||
@@ -841,0 +836,0 @@ // Falsy values are: |
@@ -8,3 +8,2 @@ 'use strict'; | ||
/* Vocabulary overview as of January 2020: | ||
@@ -139,3 +138,4 @@ | ||
const { signal } = alerts(csn); | ||
const messageFunctions = alerts.makeMessageFunction(csn, options); | ||
const { info, warning, error } = messageFunctions; | ||
@@ -174,7 +174,7 @@ const Edm = require('../edm.js')(options); | ||
if (dictTerm['$experimental'] && !experimental[termName]) { | ||
message(signal.warning, context, 'Term "' + termName + '" is experimental and can be changed or removed at any time, do not use productively!'); | ||
message(warning, context, 'Term "' + termName + '" is experimental and can be changed or removed at any time, do not use productively!'); | ||
experimental[termName] = true; | ||
} | ||
if (dictTerm['$deprecated'] && !deprecated[termName]) { | ||
message(signal.info, context, 'Term "' + termName + '" is deprecated. ' + dictTerm['$deprecationText']); | ||
message(info, context, 'Term "' + termName + '" is deprecated. ' + dictTerm['$deprecationText']); | ||
deprecated[termName] = true; | ||
@@ -259,3 +259,3 @@ } | ||
function message(severity, context, message) { | ||
let fullMessage = 'in annotation translation: ' + message; | ||
let fullMessage = 'In annotation translation: ' + message; | ||
if (context) { | ||
@@ -268,3 +268,3 @@ let loc = 'target: ' + context.target + ', annotation: ' + context.term; | ||
} | ||
signal(severity`${fullMessage}`); | ||
severity(null, null, `${fullMessage}`); | ||
} | ||
@@ -338,3 +338,3 @@ | ||
if (Object.keys(element).filter( x => x.substr(0,1) === '@' ).filter(filterKnownVocabularies).length > 0) { | ||
message(signal.warning, null, 'annotations at nested elements are not yet supported, object ' + objname + ', element ' + baseElemName + '.' + elemName); | ||
message(warning, null, 'annotations at nested elements are not yet supported, object ' + objname + ', element ' + baseElemName + '.' + elemName); | ||
} | ||
@@ -403,3 +403,3 @@ | ||
let mapType = (p) => (isBuiltinType(p.type)) ? | ||
edmUtils.mapCdsToEdmType(p, signal, false /*is only called for v4*/) : p.type; | ||
edmUtils.mapCdsToEdmType(p, messageFunctions, false /*is only called for v4*/) : p.type; | ||
for (let n in action.params) { | ||
@@ -492,3 +492,3 @@ let p = action.params[n]; | ||
alternativeEdmTargetName = (carrier.entitySetName || edmTargetName).replace(/\.(?=[^.]*$)/, '.EntityContainer/'); | ||
hasAlternativeCarrier = carrier.hasEntitySet; | ||
hasAlternativeCarrier = carrier.$hasEntitySet; | ||
} | ||
@@ -565,6 +565,9 @@ else if (carrier.kind === 'service') { | ||
} | ||
return true; | ||
} | ||
else if(testToStandardEdmTarget(appliesTo)) { | ||
newAnnosStd.append(annotation); | ||
return true; | ||
} | ||
return false; | ||
} | ||
@@ -637,3 +640,6 @@ }(); | ||
let dictTerm = getDictTerm(fullTermName, context); // message for unknown term was already issued in handleTerm | ||
addAnnotationFunc(anno, dictTerm && dictTerm.AppliesTo); | ||
if(!addAnnotationFunc(anno, dictTerm && dictTerm.AppliesTo)) { | ||
if(dictTerm && dictTerm.AppliesTo) | ||
message(warning, context, 'Term "' + fullTermName + '" is not applied (AppliesTo="' + dictTerm.AppliesTo.join(' ') + '")'); | ||
} | ||
} | ||
@@ -660,3 +666,3 @@ } | ||
if(!edmUtils.isSimpleIdentifier(identifier)){ | ||
message(signal.error, context, | ||
message(error, context, | ||
`OData annotation term "${identifier}" must consist of one or more dot separated simple identifiers (each starting with a letter or underscore, followed by at most 127 letters)`) | ||
@@ -674,3 +680,3 @@ } | ||
if (!edmUtils.isSimpleIdentifier(p[1])) { | ||
message(signal.error, context, | ||
message(error, context, | ||
`OData annotation qualifier "${p[1]}" must start with a letter or underscore, followed by at most 127 letters, underscores or digits`); | ||
@@ -682,3 +688,3 @@ } | ||
if (p.length>2) { | ||
message(signal.warning, context, 'multiple qualifiers (' + p[1] + ',' + p[2] + (p.length>3?',...':'') + ')') | ||
message(warning, context, 'multiple qualifiers (' + p[1] + ',' + p[2] + (p.length>3?',...':'') + ')') | ||
} | ||
@@ -693,3 +699,3 @@ | ||
else { | ||
message(signal.info, context, 'unknown term ' + termNameWithoutQualifiers); | ||
message(info, context, 'unknown term ' + termNameWithoutQualifiers); | ||
} | ||
@@ -730,3 +736,3 @@ | ||
if (Object.keys(cAnnoValue).length === 0) { | ||
message(signal.warning, context, 'empty record'); | ||
message(warning, context, 'empty record'); | ||
} | ||
@@ -758,3 +764,3 @@ else if ('=' in cAnnoValue) { | ||
if (!k || k.length === 0) { | ||
message(signal.warning, context, 'pseudo-struct without nested annotation'); | ||
message(warning, context, 'pseudo-struct without nested annotation'); | ||
} | ||
@@ -772,3 +778,3 @@ for (let nestedAnnoName of k) { | ||
// object consists only of properties starting with "@" | ||
message(signal.warning, context, 'nested annotations without corresponding base annotation'); | ||
message(warning, context, 'nested annotations without corresponding base annotation'); | ||
} | ||
@@ -799,12 +805,12 @@ else { | ||
if (!expectedType && !isPrimitiveType(dTypeName)) { | ||
message(signal.warning, context, "internal error: dictionary inconsistency: type '" + dTypeName + "' not found"); | ||
message(warning, context, "internal error: dictionary inconsistency: type '" + dTypeName + "' not found"); | ||
} | ||
else if (isComplexType(dTypeName)) { | ||
message(signal.warning, context, 'found enum value, but expected complex type ' + dTypeName); | ||
message(warning, context, 'found enum value, but expected complex type ' + dTypeName); | ||
} | ||
else if (isPrimitiveType(dTypeName) || expectedType['$kind'] !== 'EnumType') { | ||
message(signal.warning, context, 'found enum value, but expected non-enum type ' + dTypeName); | ||
message(warning, context, 'found enum value, but expected non-enum type ' + dTypeName); | ||
} | ||
else if (!expectedType['Members'].includes(enumValue)) { | ||
message(signal.warning, context, 'enumeration type ' + dTypeName + ' has no value ' + enumValue); | ||
message(warning, context, 'enumeration type ' + dTypeName + ' has no value ' + enumValue); | ||
} | ||
@@ -821,3 +827,3 @@ return; | ||
if (!type || type['IsFlags'] !== 'true') { | ||
message(signal.warning, context, "enum type '" + dTypeName + "' doesn't allow multiple values"); | ||
message(warning, context, "enum type '" + dTypeName + "' doesn't allow multiple values"); | ||
} | ||
@@ -833,3 +839,3 @@ | ||
// TODO improve message: but found ... | ||
message(signal.warning, context, 'expected an enum value'); | ||
message(warning, context, 'expected an enum value'); | ||
} | ||
@@ -859,3 +865,3 @@ context.stack.pop(); | ||
if (!expr) { | ||
message(signal.warning, context, 'empty expression value'); | ||
message(warning, context, 'empty expression value'); | ||
} | ||
@@ -891,3 +897,3 @@ else { | ||
if (!['true','false'].includes(val)) { | ||
message(signal.warning, context, 'found String, but expected type ' + dTypeName); | ||
message(warning, context, 'found String, but expected type ' + dTypeName); | ||
} | ||
@@ -898,3 +904,3 @@ } | ||
if (isNaN(Number(val)) || isNaN(parseFloat(val))) { | ||
message(signal.warning, context, 'found non-numeric string, but expected type ' + dTypeName); | ||
message(warning, context, 'found non-numeric string, but expected type ' + dTypeName); | ||
} | ||
@@ -905,10 +911,10 @@ } | ||
if (isNaN(Number(val)) || isNaN(parseFloat(val))) { | ||
message(signal.warning, context, 'found non-numeric string, but expected type ' + dTypeName); | ||
message(warning, context, 'found non-numeric string, but expected type ' + dTypeName); | ||
} | ||
} | ||
else if (isComplexType(dTypeName)) { | ||
message(signal.warning, context, 'found String, but expected complex type ' + dTypeName); | ||
message(warning, context, 'found String, but expected complex type ' + dTypeName); | ||
} | ||
else if (isEnumType(dTypeName)) { | ||
message(signal.warning, context, 'found String, but expected enum type ' + dTypeName); | ||
message(warning, context, 'found String, but expected enum type ' + dTypeName); | ||
typeName = 'EnumMember'; | ||
@@ -924,3 +930,3 @@ } | ||
// TODO | ||
//message(signal.warning, context, "type is not yet handled: found String, expected type: " + dTypeName); | ||
//message(warning, context, "type is not yet handled: found String, expected type: " + dTypeName); | ||
} | ||
@@ -940,3 +946,3 @@ } | ||
else { | ||
message(signal.warning, context, 'found Boolean, but expected type ' + dTypeName); | ||
message(warning, context, 'found Boolean, but expected type ' + dTypeName); | ||
} | ||
@@ -946,3 +952,3 @@ } | ||
if (isComplexType(dTypeName)) { | ||
message(signal.warning, context, 'found number, but expected complex type ' + dTypeName); | ||
message(warning, context, 'found number, but expected complex type ' + dTypeName); | ||
} | ||
@@ -953,6 +959,6 @@ else if (dTypeName === 'Edm.String') { | ||
else if (dTypeName === 'Edm.PropertyPath') { | ||
message(signal.warning, context, 'found number, but expected type ' + dTypeName); | ||
message(warning, context, 'found number, but expected type ' + dTypeName); | ||
} | ||
else if (dTypeName === 'Edm.Boolean') { | ||
message(signal.warning, context, 'found number, but expected type ' + dTypeName); | ||
message(warning, context, 'found number, but expected type ' + dTypeName); | ||
} | ||
@@ -982,3 +988,3 @@ else if (dTypeName === 'Edm.Decimal') { | ||
} else { | ||
message(signal.warning, context, "expected simple value, but found value '" + val + "' with type '" + typeof val + "'"); | ||
message(warning, context, "expected simple value, but found value '" + val + "' with type '" + typeof val + "'"); | ||
} | ||
@@ -1008,5 +1014,5 @@ | ||
if (!getDictType(dTypeName) && !isPrimitiveType(dTypeName) && !isCollection(dTypeName)) | ||
message(signal.warning, context, "internal error: dictionary inconsistency: type '" + dTypeName + "' not found"); | ||
message(warning, context, "internal error: dictionary inconsistency: type '" + dTypeName + "' not found"); | ||
else | ||
message(signal.warning, context, "found complex type, but expected type '" + dTypeName + "'"); | ||
message(warning, context, "found complex type, but expected type '" + dTypeName + "'"); | ||
return newRecord; | ||
@@ -1020,7 +1026,7 @@ } | ||
// this type doesn't exist | ||
message(signal.warning, context, "explicitly specified type '" + actualTypeName + "' not found in vocabulary"); | ||
message(warning, context, "explicitly specified type '" + actualTypeName + "' not found in vocabulary"); | ||
} | ||
else if (dTypeName && !isDerivedFrom(actualTypeName, dTypeName)) { | ||
// this type doesn't fit the expected one | ||
message(signal.warning, context, "explicitly specified type '" + actualTypeName | ||
message(warning, context, "explicitly specified type '" + actualTypeName | ||
+ "' is not derived from expected type '" + dTypeName + "'"); | ||
@@ -1031,3 +1037,3 @@ actualTypeName = dTypeName; | ||
// this type is abstract | ||
message(signal.warning, context, "explicitly specified type '" + actualTypeName + "' is abstract, specify a concrete type"); | ||
message(warning, context, "explicitly specified type '" + actualTypeName + "' is abstract, specify a concrete type"); | ||
actualTypeName = dTypeName; | ||
@@ -1051,3 +1057,3 @@ } | ||
if (isAbstractType(actualTypeName)) { | ||
message(signal.warning, context, "type '" + dTypeName + "' is abstract, use '$Type' to specify a concrete type"); | ||
message(warning, context, "type '" + dTypeName + "' is abstract, use '$Type' to specify a concrete type"); | ||
} | ||
@@ -1082,3 +1088,3 @@ | ||
if (!dictPropertyTypeName){ | ||
message(signal.warning, context, "record type '" + actualTypeName + "' doesn't have a property '" + i + "'"); | ||
message(warning, context, "record type '" + actualTypeName + "' doesn't have a property '" + i + "'"); | ||
} | ||
@@ -1112,3 +1118,3 @@ } | ||
else { | ||
message(signal.warning, context, 'found collection value, but expected non-collection type ' + dTypeName); | ||
message(warning, context, 'found collection value, but expected non-collection type ' + dTypeName); | ||
} | ||
@@ -1125,3 +1131,3 @@ } | ||
if (Array.isArray(value)) { | ||
message(signal.warning, context, 'nested collections are not supported'); | ||
message(warning, context, 'nested collections are not supported'); | ||
} | ||
@@ -1136,3 +1142,3 @@ else if (value && typeof value === 'object') { | ||
else if (value['#']) { | ||
message(signal.warning, context, 'enum inside collection is not yet supported'); | ||
message(warning, context, 'enum inside collection is not yet supported'); | ||
} | ||
@@ -1169,3 +1175,3 @@ else { | ||
if(subset.length > 1) { // doesn't work for three or more... | ||
message(signal.warning, context, 'edmJson code contains more than one special property: ' + subset); | ||
message(warning, context, 'edmJson code contains more than one special property: ' + subset); | ||
return null; | ||
@@ -1179,3 +1185,3 @@ } | ||
} | ||
message(signal.warning, context, 'edmJson code contains no special property out of: ' + specialProperties); | ||
message(warning, context, 'edmJson code contains no special property out of: ' + specialProperties); | ||
return null; | ||
@@ -1198,3 +1204,3 @@ } | ||
else { | ||
message(signal.warning, context, 'unexpected element without $: ' + p); | ||
message(warning, context, 'unexpected element without $: ' + p); | ||
} | ||
@@ -1211,3 +1217,3 @@ } | ||
else if (Array.isArray(a)) { | ||
message(signal.warning, context, 'verbatim code contains nested array'); | ||
message(warning, context, 'verbatim code contains nested array'); | ||
} | ||
@@ -1214,0 +1220,0 @@ else { |
@@ -36,3 +36,5 @@ 'use strict'; | ||
const { signal } = alerts(csn); | ||
// use original options for messages; cloned CSN for semantic location | ||
const messageFunctions = alerts.makeMessageFunction(csn, _options); | ||
const { info, warning, error } = messageFunctions; | ||
checkCSNVersion(csn, _options); | ||
@@ -45,6 +47,5 @@ | ||
let v = options.v; | ||
if(services.length === 0) { | ||
signal(signal.info`No Services in model`); | ||
info(null, null, `No Services in model`); | ||
return rc; | ||
@@ -56,3 +57,3 @@ } | ||
if(serviceCsn == undefined) { | ||
signal(signal.warning`No service definition with name "${serviceName}" found in the model`); | ||
warning(null, null, `No service definition with name "${serviceName}" found in the model`); | ||
} | ||
@@ -75,3 +76,2 @@ else { | ||
function createEdm(serviceCsn) { | ||
let navigationProperties = []; | ||
@@ -83,546 +83,680 @@ function baseName(str, del) { let l = str.lastIndexOf(del); // eslint-disable-line no-unused-vars | ||
// let alias = serviceCsn.alias || baseName(baseName(serviceCsn.name, '::'), '.'); | ||
// FIXME: UI5 cannot deal with spec conforming simpleid alias names | ||
let serviceName = serviceCsn.name; | ||
let alias = serviceName; | ||
/** @type {object} */ | ||
let Schema = new Edm.Schema(v, serviceName, undefined /* unset alias */, serviceCsn ); | ||
const service = new Edm.DataServices(v); | ||
const edm = new Edm.Edm(v, service); | ||
// now namespace and alias are used to create the fullQualified(name) | ||
const namespace = serviceName + '.' | ||
alias += '.' | ||
/* ------------------------------------------------- | ||
Multi Schema generation in V4: | ||
let service = new Edm.DataServices(v, Schema); | ||
let edm = new Edm.Edm(v, service); | ||
If a service contains nested contexts (exactly one level)! | ||
then these contexts are interpreted as additional schemas: | ||
/* create the entitytypes and sets | ||
Do not create an entity set if: | ||
V4 containment: _containerEntity is set and not equal with the artifact name | ||
Entity starts with 'localserviceNameized.' or ends with '_localized' | ||
*/ | ||
edmUtils.foreach(csn.definitions, | ||
a => edmUtils.isEntityOrView(a) && !a.abstract && a.name.startsWith(serviceName + '.'), | ||
createEntityTypeAndSet | ||
); | ||
// create unbound actions/functions | ||
edmUtils.foreach(csn.definitions, a => edmUtils.isActionOrFunction(a) && a.name.startsWith(serviceName + '.'), | ||
(options.isV4()) ? createActionV4 : createActionV2); | ||
service MainSchema { | ||
entity A { toD: association to SideSchema1.D; }; | ||
context SideSchema1 { | ||
entity D {}; | ||
} | ||
context SideSchema2 { | ||
... | ||
} | ||
}; | ||
// create the complex types (don't render aspects by using $syntax hack until kind='aspect' is available) | ||
edmUtils.foreach(csn.definitions, a => !(['aspect','event'].includes(a.kind) || a.$syntax === 'aspect') && edmUtils.isStructuredType(a) && a.name.startsWith(serviceName + '.'), createComplexType); | ||
Only the main schema has an entity container | ||
Nested definitions are identified by their name in | ||
definitions: | ||
if(options.isV4()) | ||
{ | ||
edmUtils.foreach(csn.definitions, | ||
artifact => edmUtils.isDerivedType(artifact) && | ||
!edmUtils.isAssociationOrComposition(artifact) && | ||
artifact.name.startsWith(serviceName + '.'), | ||
createTypeDefinition); | ||
} | ||
MainSchema.A: {}, | ||
MainSchema.SideSchema1.D: {}, | ||
MainSchema.SideSchema2.... | ||
// fetch all exising children names in a map | ||
let NamesInSchemaXRef = Schema._children.reduce((acc, cur) => { | ||
if(acc[cur.Name] === undefined) { | ||
acc[cur.Name] = [ cur ]; | ||
} else { | ||
acc[cur.Name].push(cur); | ||
} | ||
return acc; | ||
}, Object.create(null) ); | ||
This requires that the names of all members | ||
of the side elements must be stripped to reflect the | ||
schema local name (with single schema prefix). | ||
Also all schema members need to be grouped into | ||
their respective schemas. | ||
navigationProperties.forEach(np => { | ||
if(options.isV4()) { | ||
// V4: No referential constraints for Containment Relationships | ||
if((!np.isContainment() || (options.renderForeignKeys)) && !np.isToMany()) | ||
np.addReferentialConstraintNodes(); | ||
} | ||
else | ||
addAssociation(np); | ||
}); | ||
All type references inside the EDM sub nodes must | ||
also be rewritten to address the individual schema | ||
entries. | ||
-----------------------------------------------*/ | ||
createAnnotations(edm); | ||
if(options.isV4()) { | ||
// tunnel schema xref and servicename in options to edm.Typebase to rectify | ||
// type references that are eventually also prefixed with the service schema name. | ||
options.serviceName = serviceCsn.name; | ||
options.fqSchemaXRef = [serviceCsn.name]; | ||
for(let name in NamesInSchemaXRef) { | ||
if(NamesInSchemaXRef[name].length > 1) { | ||
let artifactName = `${Schema.Namespace}.${name}`; | ||
signal(signal.error`Duplicate name "${name}" in Namespace "${Schema.Namespace}"`, ['definitions',artifactName]); | ||
} | ||
} | ||
// remove EntityContainer if empty | ||
if(Schema._ec._children.length === 0) { | ||
let pos = Schema._children.indexOf(Schema._ec); | ||
Schema._children.splice(pos, 1); | ||
} | ||
if(Schema._children.length === 0) { | ||
signal(signal.warning`Schema is empty`, ['definitions',Schema.Namespace]); | ||
} | ||
const schemas = { [serviceCsn.name]: { name: serviceCsn.name, fqName: serviceCsn.name, _csn: serviceCsn, container: true, definitions: Object.create(null) } }; | ||
Object.keys(csn.definitions).reduce((schemas, fqName) => { | ||
const art = csn.definitions[fqName]; | ||
// add sub schemas | ||
if(fqName.startsWith(serviceCsn.name + '.') && art.kind === 'context') { | ||
options.fqSchemaXRef.push(fqName); | ||
// strip the toplevel service schema name | ||
const name = fqName.replace(serviceCsn.name + '.', ''); | ||
schemas[name] = { name, fqName, _csn: art, container: false, definitions: Object.create(null) }; | ||
} | ||
return schemas; | ||
}, schemas); | ||
return edm | ||
// fill the schemas, unfortunately this can't be done in one step | ||
// as all possible prefix combinations must be known in fqSchemaXRef | ||
Object.keys(csn.definitions).reduce((schemas, name) => { | ||
const art = csn.definitions[name]; | ||
function createEntityTypeAndSet(entityCsn) | ||
{ | ||
// EntityType attributes are: Name, BaseType, Abstract, OpenType, HasStream | ||
// Sub Elements are: Key, Property, NavigationProperty | ||
// Identify service members by their definition name only, this allows | ||
// to let the internal object.name have the sub-schema name. | ||
let schemaName = options.fqSchemaXRef.reduce((a, n) => { | ||
if(name.startsWith( n + '.')) a = n; | ||
return a; }, undefined); | ||
let EntityTypeName = entityCsn.name.replace(namespace, ''); | ||
let EntitySetName = (entityCsn.entitySetName || entityCsn.name).replace(namespace, ''); | ||
if(schemaName && art.kind !== 'context') { | ||
// strip the toplevel serviceName from object.name except if the schema name is the service name itself | ||
// proxy names are not prefixed, as they need to be reused. | ||
if(schemaName !== serviceCsn.name) { | ||
name = art.name = name.replace(serviceCsn.name + '.', ''); | ||
schemaName = schemaName.replace(serviceCsn.name + '.', ''); | ||
} | ||
schemas[schemaName].definitions[name] = art; | ||
} | ||
return schemas; | ||
}, schemas); | ||
let [ properties, hasStream ] = createProperties(entityCsn); | ||
if(properties.length === 0) { | ||
signal(signal.warning`EntityType "${serviceName}.${EntityTypeName}" has no properties`, ['definitions',entityCsn.name]); | ||
} else if(entityCsn.$edmKeyPaths.length === 0) { | ||
signal(signal.warning`EntityType "${serviceName}.${EntityTypeName}" has no primary key`, ['definitions',entityCsn.name]); | ||
} | ||
/* | ||
References into other Schemas | ||
// construct EntityType attributes | ||
let attributes = { Name : EntityTypeName }; | ||
References are top level elements in an EDM. However, | ||
they are valid per service only, so a special link | ||
object needs to be created that link into the target | ||
schema. | ||
// CDXCORE-CDXCORE-173 | ||
if(options.isV2() && hasStream) | ||
attributes['m:HasStream'] = hasStream; | ||
Technically these are also contexts but with kind='reference' | ||
Schema.append(new Edm.EntityType(v, attributes, properties, entityCsn)); | ||
As they are not part of the official CSN spec, they are created | ||
transiently in the type/proxy exposure. | ||
if (entityCsn.hasEntitySet) | ||
{ | ||
/** @type {object} */ | ||
let containerEntry; | ||
let singleton = entityCsn['@odata.singleton']; | ||
let hasNullable = entityCsn['@odata.singleton.nullable'] !== undefined && | ||
entityCsn['@odata.singleton.nullable'] !== null; | ||
ref = { kind: 'reference', | ||
name: targetSchemaName, | ||
ref: { Uri }, | ||
inc: { Namespace: targetSchemaName, optionalAlias }, | ||
$mySchemaName: targetSchemaName, | ||
$proxy: true | ||
}; | ||
if(singleton || ((singleton === undefined || singleton === null) && hasNullable)) { | ||
containerEntry = new Edm.Singleton(v, { Name: EntitySetName, Type: fullQualified(EntityTypeName) }, entityCsn); | ||
if(entityCsn['@odata.singleton.nullable']) | ||
containerEntry.Nullable= true; | ||
*/ | ||
const references = Object.keys(csn.definitions).reduce((references, fqName) => { | ||
const art = csn.definitions[fqName]; | ||
// add references | ||
if(fqName.startsWith(serviceCsn.name + '.') && art.kind === 'reference') { | ||
options.fqSchemaXRef.push(fqName); | ||
references.push(art); | ||
} | ||
else { | ||
containerEntry = new Edm.EntitySet(v, { Name: EntitySetName, EntityType: fullQualified(EntityTypeName) }, entityCsn); | ||
} | ||
return references; | ||
}, []); | ||
// V4: Create NavigationPropertyBinding in EntitySet | ||
// if NavigationProperty is not a Containment and if the target is not a containee | ||
if(options.isV4()) | ||
properties.filter(np => | ||
np instanceof Edm.NavigationProperty && | ||
// @ts-ignore TypeScript does not recognize these properties on type NavigationProperty | ||
!np.isContainment() && !edmUtils.isContainee(np._targetCsn) && !np._targetCsn.$proxy | ||
). forEach(np => | ||
containerEntry.append(np.createNavigationPropertyBinding(namespace))); | ||
// bring the schemas in alphabetical order, service first, root last | ||
const schemaNames = Object.keys(schemas).filter(n => n !== 'root' && n !== serviceCsn.name).sort(); | ||
schemaNames.splice(0,0, serviceCsn.name); | ||
if(schemas.root) | ||
schemaNames.push('root'); | ||
Schema._ec.append(containerEntry); | ||
} | ||
// finally create the schemas and register them in the service. | ||
schemaNames.forEach(name => { | ||
const schema = schemas[name]; | ||
service.append(createSchema(schema)); | ||
}); | ||
// put actions behind entity types in Schema/EntityContainer | ||
edmUtils.forAll(entityCsn.actions, (a, n) => { | ||
(options.isV4()) ? createActionV4(a, n, entityCsn) | ||
: createActionV2(a, n, entityCsn) | ||
references.forEach(ref => { | ||
let r = new Edm.Reference(v, ref.ref); | ||
r.append(new Edm.Include(v, ref.inc)) | ||
edm._defaultRefs.push(r); | ||
}); | ||
} | ||
else { | ||
const schema = { name: serviceCsn.name, _csn: serviceCsn, container: true, definitions: csn.definitions }; | ||
const LeadSchema = createSchema(schema); | ||
service.append(LeadSchema); | ||
} | ||
// add bound/unbound actions/functions for V4 | ||
function createActionV4(actionCsn, name, entityCsn=undefined) | ||
{ | ||
let iAmAnAction = actionCsn.kind === 'action'; | ||
createAnnotations(edm); | ||
return edm | ||
let actionName = actionCsn.name.replace(namespace, ''); | ||
function createSchema(schema) { | ||
let attributes = { Name: actionName, IsBound : false }; | ||
const Schema = new Edm.Schema(v, schema.name, undefined /* unset alias */, schema._csn, /* annotations */ [], schema.container); | ||
if(!iAmAnAction) | ||
attributes.IsComposable = false; | ||
// now namespace and alias are used to create the fullQualified(name) | ||
const schemaNamePrefix = schema.name + '.' | ||
const schemaAliasPrefix = schemaNamePrefix; | ||
const schemaCsn = schema; | ||
let navigationProperties = []; | ||
/** @type {object} */ | ||
let actionNode = (iAmAnAction) ? new Edm.Action(v, attributes) | ||
: new Edm.FunctionDefinition(v, attributes); | ||
/* create the entitytypes and sets | ||
Do not create an entity set if: | ||
V4 containment: _containerEntity is set and not equal with the artifact name | ||
Entity starts with 'localserviceNameized.' or ends with '_localized' | ||
*/ | ||
edmUtils.foreach(schemaCsn.definitions, | ||
a => edmUtils.isEntityOrView(a) && !a.abstract && a.name.startsWith(schemaNamePrefix), | ||
createEntityTypeAndSet | ||
); | ||
// create unbound actions/functions | ||
edmUtils.foreach(schemaCsn.definitions, a => edmUtils.isActionOrFunction(a) && a.name.startsWith(schemaNamePrefix), | ||
(options.isV4()) ? createActionV4 : createActionV2); | ||
// bpName is eventually used later for EntitySetPath | ||
let bpNameAnno = actionCsn['@cds.odata.bindingparameter.name']; | ||
let bpName = bpNameAnno !== undefined ? (bpNameAnno['='] || bpNameAnno) : 'in'; | ||
// create the complex types (don't render aspects by using $syntax hack until kind='aspect' is available) | ||
edmUtils.foreach(schemaCsn.definitions, a => !(['aspect','event'].includes(a.kind) || a.$syntax === 'aspect') && edmUtils.isStructuredType(a) && a.name.startsWith(schemaNamePrefix), createComplexType); | ||
if(entityCsn != undefined) | ||
if(options.isV4()) | ||
{ | ||
actionNode.IsBound = true; | ||
let bpType = fullQualified(entityCsn.name); | ||
// Binding Parameter: 'in' at first position in sequence, this is decisive! | ||
if(actionCsn['@cds.odata.bindingparameter.collection']) | ||
actionNode.append(new Edm.Parameter(v, { Name: bpName, Type: bpType, Collection:true } )); | ||
else | ||
actionNode.append(new Edm.Parameter(v, { Name: bpName, Type: bpType } )); | ||
edmUtils.foreach(schemaCsn.definitions, | ||
artifact => edmUtils.isDerivedType(artifact) && | ||
!edmUtils.isAssociationOrComposition(artifact) && | ||
artifact.name.startsWith(schemaNamePrefix), | ||
createTypeDefinition); | ||
} | ||
else // unbound => produce Action/FunctionImport | ||
{ | ||
/** @type {object} */ | ||
let actionImport = iAmAnAction | ||
? new Edm.ActionImport(v, { Name: actionName, Action : fullQualified(actionName) }) | ||
: new Edm.FunctionImport(v, { Name: actionName, Function : fullQualified(actionName) }); | ||
let rt = actionCsn.returns && ((actionCsn.returns.items && actionCsn.returns.items.type) || actionCsn.returns.type); | ||
if(rt) // add EntitySet attribute only if return type is a non abstract entity | ||
{ | ||
let definition = csn.definitions[rt]; | ||
if(definition && definition.kind === 'entity' && !definition.abstract) | ||
{ | ||
actionImport.EntitySet = rt.replace(namespace, ''); | ||
} | ||
// fetch all exising children names in a map | ||
let NamesInSchemaXRef = Schema._children.reduce((acc, cur) => { | ||
if(acc[cur.Name] === undefined) { | ||
acc[cur.Name] = [ cur ]; | ||
} else { | ||
acc[cur.Name].push(cur); | ||
} | ||
Schema._ec.append(actionImport); | ||
} | ||
return acc; | ||
}, Object.create(null) ); | ||
// Parameter Nodes | ||
edmUtils.forAll(actionCsn.params, (parameterCsn, parameterName) => { | ||
actionNode.append(new Edm.Parameter(v, { Name: parameterName }, parameterCsn )); | ||
navigationProperties.forEach(np => { | ||
if(options.isV4()) { | ||
// V4: No referential constraints for Containment Relationships | ||
if((!np.isContainment() || (options.renderForeignKeys)) && !np.isToMany()) | ||
np.addReferentialConstraintNodes(); | ||
} | ||
else | ||
addAssociation(np); | ||
}); | ||
// return type if any | ||
if(actionCsn.returns) { | ||
actionNode._returnType = new Edm.ReturnType(v, actionCsn.returns); | ||
// if binding type matches return type add attribute EntitySetPath | ||
if(entityCsn != undefined && fullQualified(entityCsn.name) === actionNode._returnType._type) { | ||
actionNode.EntitySetPath = bpName; | ||
// remove EntityContainer if empty | ||
if(Schema._ec && Schema._ec._children.length === 0) { | ||
let pos = Schema._children.indexOf(Schema._ec); | ||
Schema._children.splice(pos, 1); | ||
} | ||
if(Schema._children.length === 0) { | ||
// FIXME: Location for sub schemas? | ||
warning(null, ['definitions', Schema.Namespace], `Schema "${Schema.Namespace}" is empty`); | ||
} | ||
for(let name in NamesInSchemaXRef) { | ||
if(NamesInSchemaXRef[name].length > 1) { | ||
let artifactName = `${Schema.Namespace}.${name}`; | ||
error(null, ['definitions', artifactName], `Duplicate name "${name}" in Schema "${Schema.Namespace}"`); | ||
} | ||
} | ||
Schema.addAction(actionNode); | ||
} | ||
return Schema; | ||
// add bound/unbound actions/functions for V2 | ||
function createActionV2(actionCsn, name, entityCsn=undefined) | ||
{ | ||
/** @type {object} */ | ||
let functionImport = new Edm.FunctionImport(v, { Name: name.replace(namespace, '') } ); | ||
function createEntityTypeAndSet(entityCsn) | ||
{ | ||
let EntityTypeName = entityCsn.name.replace(schemaNamePrefix, ''); | ||
let EntitySetName = (entityCsn.entitySetName || entityCsn.name).replace(schemaNamePrefix, ''); | ||
// inserted now to maintain attribute order with old odata generator... | ||
/* | ||
V2 says (p33): | ||
* If the return type of FunctionImport is a collection of entities, the EntitySet | ||
attribute is defined. | ||
* If the return type of FunctionImport is of ComplexType or scalar type, | ||
the EntitySet attribute cannot be defined. | ||
The spec doesn't mention single ET: Ralf Handls confirmed that there is a gap | ||
in the spec and advised mention it as in V4 | ||
*/ | ||
let [ properties, hasStream ] = createProperties(entityCsn); | ||
let rt = actionCsn.returns && ((actionCsn.returns.items && actionCsn.returns.items.type) || actionCsn.returns.type); | ||
if(rt) // add EntitySet attribute only if return type is an entity | ||
{ | ||
let defintion = csn.definitions[rt]; | ||
if(defintion && edmUtils.isEntityOrView(defintion)) | ||
{ | ||
functionImport.EntitySet = rt.replace(namespace, ''); | ||
if(properties.length === 0) { | ||
warning(null, ['definitions', entityCsn.name], `EntityType "${schema.name}.${EntityTypeName}" has no properties`); | ||
} else if(entityCsn.$edmKeyPaths.length === 0) { | ||
warning(null, ['definitions', entityCsn.name], `EntityType "${schema.name}.${EntityTypeName}" has no primary key`); | ||
} | ||
} | ||
if(actionCsn.returns) | ||
functionImport.ReturnType = getReturnType(actionCsn); | ||
// construct EntityType attributes | ||
let attributes = { Name : EntityTypeName }; | ||
if(actionCsn.kind === 'function') | ||
functionImport.setXml( {'m:HttpMethod': 'GET' }); | ||
else if(actionCsn.kind === 'action') | ||
functionImport.setXml( {'m:HttpMethod': 'POST'}); | ||
else | ||
throw Error('Please debug me: Neither function nor action'); | ||
// CDXCORE-CDXCORE-173 | ||
if(options.isV2() && hasStream) | ||
attributes['m:HasStream'] = hasStream; | ||
if(entityCsn != undefined) | ||
{ | ||
// Make bound function names always unique as per Ralf's recommendation | ||
functionImport.setXml( {'sap:action-for': fullQualified(entityCsn.name) } ); | ||
functionImport.Name = entityCsn.name.replace(namespace, '') + '_' + functionImport.Name; | ||
Schema.append(new Edm.EntityType(v, attributes, properties, entityCsn)); | ||
// Binding Parameter: Primary Keys at first position in sequence, this is decisive! | ||
// V2 XML: Nullable=false is set because we reuse the primary key property for the parameter | ||
edmUtils.foreach(entityCsn.elements, | ||
elementCsn => elementCsn.key && !edmUtils.isAssociationOrComposition(elementCsn), | ||
(elementCsn, elementName) => { | ||
functionImport.append(new Edm.Parameter(v, { Name: elementName }, elementCsn, 'In' )); | ||
if (Schema._ec && entityCsn.$hasEntitySet) | ||
{ | ||
/** @type {object} */ | ||
let containerEntry; | ||
let singleton = entityCsn['@odata.singleton']; | ||
let hasNullable = entityCsn['@odata.singleton.nullable'] !== undefined && | ||
entityCsn['@odata.singleton.nullable'] !== null; | ||
if(singleton || ((singleton === undefined || singleton === null) && hasNullable)) { | ||
containerEntry = new Edm.Singleton(v, { Name: EntitySetName, Type: fullQualified(EntityTypeName) }, entityCsn); | ||
if(entityCsn['@odata.singleton.nullable']) | ||
containerEntry.Nullable= true; | ||
} | ||
); | ||
else { | ||
containerEntry = new Edm.EntitySet(v, { Name: EntitySetName, EntityType: fullQualified(EntityTypeName) }, entityCsn); | ||
} | ||
// V4: Create NavigationPropertyBinding in EntitySet | ||
// if NavigationProperty is not a Containment and if the target is not a containee | ||
if(options.isV4()) | ||
properties.filter(np => | ||
np instanceof Edm.NavigationProperty && | ||
// @ts-ignore TypeScript does not recognize these properties on type NavigationProperty | ||
!np.isContainment() && !edmUtils.isContainee(np._targetCsn) && !np._targetCsn.$proxy && !np._targetCsn.$externalRef | ||
). forEach(np => | ||
containerEntry.append(np.createNavigationPropertyBinding(schemaNamePrefix))); | ||
Schema._ec.append(containerEntry); | ||
} | ||
// put actions behind entity types in Schema/EntityContainer | ||
edmUtils.forAll(entityCsn.actions, (a, n) => { | ||
(options.isV4()) ? createActionV4(a, n, entityCsn) | ||
: createActionV2(a, n, entityCsn) | ||
}); | ||
} | ||
// is this still required? | ||
for (let p in actionCsn) | ||
if (p.match(/^@sap\./)) | ||
functionImport.setXml( { ['sap:' + p.slice(5).replace(/\./g, '-')] : actionCsn[p] }); | ||
// add bound/unbound actions/functions for V4 | ||
function createActionV4(actionCsn, name, entityCsn=undefined) | ||
{ | ||
let iAmAnAction = actionCsn.kind === 'action'; | ||
// then append all other parameters | ||
// V2 XML: Parameters that are not explicitly marked as Nullable or NotNullable in the CSN must become Nullable=true | ||
// V2 XML spec does only mention default Nullable=true for Properties not for Parameters so omitting Nullable=true let | ||
// the client assume that Nullable is false.... Correct Nullable Handling is done inside Parameter constructor | ||
edmUtils.forAll(actionCsn.params, (parameterCsn, parameterName) => { | ||
functionImport.append(new Edm.Parameter(v, { Name: parameterName }, parameterCsn, 'In' )); | ||
}); | ||
let actionName = actionCsn.name.replace(schemaNamePrefix, ''); | ||
Schema._ec.append(functionImport); | ||
} | ||
let attributes = { Name: actionName, IsBound : false }; | ||
function getReturnType(action) | ||
{ | ||
// it is safe to assume that either type or items.type are set | ||
let returns = action.returns.items || action.returns; | ||
let type = returns.type; | ||
if(type) | ||
type = edmUtils.mapCdsToEdmType(returns, signal, options.isV2()); | ||
if(!iAmAnAction) | ||
attributes.IsComposable = false; | ||
if(type && action.returns.items) | ||
type = `Collection(${type})` | ||
/** @type {object} */ | ||
let actionNode = (iAmAnAction) ? new Edm.Action(v, attributes) | ||
: new Edm.FunctionDefinition(v, attributes); | ||
return type; | ||
} | ||
// bpName is eventually used later for EntitySetPath | ||
let bpNameAnno = actionCsn['@cds.odata.bindingparameter.name']; | ||
let bpName = bpNameAnno !== undefined ? (bpNameAnno['='] || bpNameAnno) : 'in'; | ||
/** | ||
* @param {object} parentCsn | ||
* @returns {[object[], boolean]} Returns a [ [ Edm Properties ], boolean hasStream ]: | ||
* array of Edm Properties | ||
* boolean hasStream : true if at least one element has @Core.MediaType assignment | ||
*/ | ||
function createProperties(parentCsn) | ||
{ | ||
let props = []; | ||
let hasStream = false; | ||
edmUtils.forAll(parentCsn.elements, (elementCsn, elementName) => | ||
{ | ||
if(elementCsn._parent == undefined) | ||
setProp(elementCsn, '_parent', parentCsn); | ||
if(entityCsn != undefined) | ||
{ | ||
actionNode.IsBound = true; | ||
let bpType = fullQualified(entityCsn.name); | ||
// Binding Parameter: 'in' at first position in sequence, this is decisive! | ||
if(actionCsn['@cds.odata.bindingparameter.collection']) | ||
actionNode.append(new Edm.Parameter(v, { Name: bpName, Type: bpType, Collection:true } )); | ||
else | ||
actionNode.append(new Edm.Parameter(v, { Name: bpName, Type: bpType } )); | ||
} | ||
else if(Schema._ec)// unbound => produce Action/FunctionImport | ||
{ | ||
/** @type {object} */ | ||
let actionImport = iAmAnAction | ||
? new Edm.ActionImport(v, { Name: actionName, Action : fullQualified(actionName) }) | ||
: new Edm.FunctionImport(v, { Name: actionName, Function : fullQualified(actionName) }); | ||
if(!elementCsn._ignore) { | ||
if(edmUtils.isAssociationOrComposition(elementCsn)) | ||
let rt = actionCsn.returns && ((actionCsn.returns.items && actionCsn.returns.items.type) || actionCsn.returns.type); | ||
if(rt) // add EntitySet attribute only if return type is a non abstract entity | ||
{ | ||
// Foreign keys are part of the generic elementCsn.elements property creation | ||
// This is the V4 edmx:NavigationProperty | ||
// gets rewritten for V2 in addAssociations() | ||
// suppress navprop creation only if @odata.navigable:false is not annotated. | ||
// (undefined !== false) still evaluates to true | ||
if (!elementCsn._target.abstract && elementCsn['@odata.navigable'] !== false) | ||
let definition = schemaCsn.definitions[rt]; | ||
if(definition && definition.kind === 'entity' && !definition.abstract) | ||
{ | ||
let navProp = new Edm.NavigationProperty(v, { | ||
Name: elementName, | ||
Type: fullQualified(elementCsn._target.name) | ||
}, elementCsn); | ||
props.push(navProp); | ||
// save the navProp in the global array for late constraint building | ||
navigationProperties.push(navProp); | ||
actionImport.EntitySet = rt.replace(schemaNamePrefix, ''); | ||
} | ||
} | ||
// render ordinary property if element is NOT ... | ||
// 1) ... annotated @cds.api.ignore | ||
// 2) ... annotated @odata.foreignKey4 and odataFormat: structured | ||
Schema._ec.append(actionImport); | ||
} | ||
else if(isEdmPropertyRendered(elementCsn, options)) | ||
{ | ||
// CDXCORE-CDXCORE-173 | ||
// V2: filter @Core.MediaType | ||
if ( options.isV2() && elementCsn['@Core.MediaType']) { | ||
// CDXCORE-CDXCORE-177: | ||
// V2: don't render element but add attribute 'm:HasStream="true' to EntityType | ||
// V4: render property type 'Edm.Stream' | ||
hasStream = true; | ||
signal(signal.info`"${parentCsn.name}: Property "${elementName}" annotated with '@Core.MediaType' is removed from EDM in Odata V2`, ['definitions', parentCsn.name]); | ||
// Parameter Nodes | ||
edmUtils.forAll(actionCsn.params, (parameterCsn, parameterName) => { | ||
actionNode.append(new Edm.Parameter(v, { Name: parameterName }, parameterCsn )); | ||
}); | ||
} else | ||
props.push(new Edm.Property(v, { Name: elementName }, elementCsn)); | ||
// return type if any | ||
if(actionCsn.returns) { | ||
actionNode._returnType = new Edm.ReturnType(v, actionCsn.returns); | ||
// if binding type matches return type add attribute EntitySetPath | ||
if(entityCsn != undefined && fullQualified(entityCsn.name) === actionNode._returnType._type) { | ||
actionNode.EntitySetPath = bpName; | ||
} | ||
} | ||
Schema.addAction(actionNode); | ||
} | ||
}); | ||
return [ props, hasStream ]; | ||
} | ||
// add bound/unbound actions/functions for V2 | ||
function createActionV2(actionCsn, name, entityCsn=undefined) | ||
{ | ||
/** @type {object} */ | ||
let functionImport = new Edm.FunctionImport(v, { Name: name.replace(schemaNamePrefix, '') } ); | ||
function createComplexType(structuredTypeCsn) | ||
{ | ||
// V4 attributes: Name, BaseType, Abstract, OpenType | ||
let attributes = { Name: structuredTypeCsn.name.replace(namespace, '') }; | ||
// inserted now to maintain attribute order with old odata generator... | ||
/* | ||
V2 says (p33): | ||
* If the return type of FunctionImport is a collection of entities, the EntitySet | ||
attribute is defined. | ||
* If the return type of FunctionImport is of ComplexType or scalar type, | ||
the EntitySet attribute cannot be defined. | ||
The spec doesn't mention single ET: Ralf Handls confirmed that there is a gap | ||
in the spec and advised mention it as in V4 | ||
*/ | ||
let complexType = new Edm.ComplexType(v, attributes, structuredTypeCsn); | ||
let elementsCsn = structuredTypeCsn.items || structuredTypeCsn; | ||
let properties = createProperties(elementsCsn)[0]; | ||
let rt = actionCsn.returns && ((actionCsn.returns.items && actionCsn.returns.items.type) || actionCsn.returns.type); | ||
if(rt) // add EntitySet attribute only if return type is an entity | ||
{ | ||
let defintion = schemaCsn.definitions[rt]; | ||
if(defintion && edmUtils.isEntityOrView(defintion)) | ||
{ | ||
functionImport.EntitySet = rt.replace(schemaNamePrefix, ''); | ||
} | ||
} | ||
if(properties.length === 0) { | ||
signal(signal.warning`ComplexType "${structuredTypeCsn.name}" has no properties`, ['definitions', structuredTypeCsn.name]); | ||
} | ||
complexType.append(...(properties)); | ||
if(actionCsn.returns) | ||
functionImport.ReturnType = getReturnType(actionCsn); | ||
Schema.append(complexType); | ||
} | ||
if(actionCsn.kind === 'function') | ||
functionImport.setXml( {'m:HttpMethod': 'GET' }); | ||
else if(actionCsn.kind === 'action') | ||
functionImport.setXml( {'m:HttpMethod': 'POST'}); | ||
else | ||
throw Error('Please debug me: Neither function nor action'); | ||
// V4 <TypeDefintion> | ||
function createTypeDefinition(typeCsn) | ||
{ | ||
// derived types are already resolved to base types | ||
let typeDef; | ||
let props = { Name: typeCsn.name.replace(namespace, '') }; | ||
if((typeCsn.items && typeCsn.items.enum) || typeCsn.enum) { | ||
if (!builtins.isIntegerTypeName(typeCsn.type)) { | ||
signal(signal.warning`Only integer enums are allowed in OData`, ['definitions', typeCsn.name]); | ||
if(entityCsn != undefined) | ||
{ | ||
// Make bound function names always unique as per Ralf's recommendation | ||
functionImport.setXml( {'sap:action-for': fullQualified(entityCsn.name) } ); | ||
functionImport.Name = entityCsn.name.replace(schemaNamePrefix, '') + '_' + functionImport.Name; | ||
// Binding Parameter: Primary Keys at first position in sequence, this is decisive! | ||
// V2 XML: Nullable=false is set because we reuse the primary key property for the parameter | ||
edmUtils.foreach(entityCsn.elements, | ||
elementCsn => elementCsn.key && !edmUtils.isAssociationOrComposition(elementCsn), | ||
(elementCsn, elementName) => { | ||
functionImport.append(new Edm.Parameter(v, { Name: elementName }, elementCsn, 'In' )); | ||
} | ||
); | ||
} | ||
typeDef = new Edm.EnumType(v, props, typeCsn); | ||
} else { | ||
typeDef = new Edm.TypeDefinition(v, props, typeCsn ); | ||
} | ||
Schema.append(typeDef); | ||
} | ||
/* | ||
* addAssociation() constructs a V2 association. | ||
* In V4 all this has been simplified very much, the only thing actually left over is | ||
* <ReferentialConstriant> that is then a sub element to <NavigationProperty>. | ||
* However, referential constraints are substantially different to its V2 counterpart, | ||
* so it is better to reimplement proper V4 construction of<NavigationProperty> in a separate | ||
* function. | ||
* | ||
* This method does: | ||
* rewrite <NavigationProperty> attributes to be V2 compliant | ||
* add <Association> elements to the schema | ||
* add <End>, <ReferentialConstraint>, <Dependent> and <Principal> sub elements to <Association> | ||
* add <AssociationSet> to the EntityContainer for each <Association> | ||
*/ | ||
function addAssociation(navigationProperty) | ||
{ | ||
let constraints = navigationProperty._csn._constraints; | ||
let parentName = navigationProperty._csn._parent.name.replace(namespace, ''); | ||
let plainAssocName = parentName + NAVPROP_TRENNER + navigationProperty.Name.replace(VALUELIST_NAVPROP_PREFIX, ''); | ||
let assocName = plainAssocName; | ||
let i = 1; | ||
while(NamesInSchemaXRef[assocName] !== undefined) { | ||
assocName = plainAssocName + '_' + i++; | ||
} | ||
// is this still required? | ||
for (let p in actionCsn) | ||
if (p.match(/^@sap\./)) | ||
functionImport.setXml( { ['sap:' + p.slice(5).replace(/\./g, '-')] : actionCsn[p] }); | ||
let fromRole = parentName; | ||
let toRole = navigationProperty.Type.replace(alias, ''); // <= navprops type should be prefixed with alias | ||
// then append all other parameters | ||
// V2 XML: Parameters that are not explicitly marked as Nullable or NotNullable in the CSN must become Nullable=true | ||
// V2 XML spec does only mention default Nullable=true for Properties not for Parameters so omitting Nullable=true let | ||
// the client assume that Nullable is false.... Correct Nullable Handling is done inside Parameter constructor | ||
edmUtils.forAll(actionCsn.params, (parameterCsn, parameterName) => { | ||
functionImport.append(new Edm.Parameter(v, { Name: parameterName }, parameterCsn, 'In' )); | ||
}); | ||
let fromEntityType = fromRole; | ||
let toEntityType = toRole; | ||
if(Schema._ec) | ||
Schema._ec.append(functionImport); | ||
// The entity set name may not be the same as the type name (parameterized entities have | ||
// differing set names (<T>Parameters => <T>, <T>Type => <T>Set) | ||
let fromEntitySet = ( navigationProperty._csn._parent.entitySetName || fromEntityType).replace(namespace, ''); | ||
let toEntitySet = (navigationProperty._targetCsn.entitySetName || toEntityType).replace(namespace, ''); | ||
function getReturnType(action) | ||
{ | ||
// it is safe to assume that either type or items.type are set | ||
let returns = action.returns.items || action.returns; | ||
let type = returns.type; | ||
if(type) | ||
type = edmUtils.mapCdsToEdmType(returns, messageFunctions, options.isV2()); | ||
// from and to roles must be distinguishable (in case of self association entity E { toE: association to E; ... }) | ||
if(type && action.returns.items) | ||
type = `Collection(${type})` | ||
if(fromRole === toRole) { | ||
if(constraints._partnerCsn) | ||
fromRole += '1'; | ||
else | ||
toRole += '1'; | ||
return type; | ||
} | ||
} | ||
// add V2 attributes to navigationProperty | ||
navigationProperty.Relationship = fullQualified(assocName); | ||
navigationProperty.FromRole = fromRole; | ||
navigationProperty.ToRole = toRole; | ||
/** | ||
* @param {object} parentCsn | ||
* @returns {[object[], boolean]} Returns a [ [ Edm Properties ], boolean hasStream ]: | ||
* array of Edm Properties | ||
* boolean hasStream : true if at least one element has @Core.MediaType assignment | ||
*/ | ||
function createProperties(parentCsn) | ||
{ | ||
let props = []; | ||
let hasStream = false; | ||
edmUtils.forAll(parentCsn.elements, (elementCsn, elementName) => | ||
{ | ||
if(elementCsn._parent == undefined) | ||
setProp(elementCsn, '_parent', parentCsn); | ||
// remove V4 attributes | ||
if(navigationProperty.Type != undefined) | ||
delete navigationProperty.Type; | ||
if(navigationProperty.Partner != undefined) | ||
delete navigationProperty.Partner; | ||
if(navigationProperty.ContainsTarget != undefined) | ||
delete navigationProperty.ContainsTarget; | ||
if(!elementCsn._ignore) { | ||
if(edmUtils.isAssociationOrComposition(elementCsn)) | ||
{ | ||
// Foreign keys are part of the generic elementCsn.elements property creation | ||
/* | ||
If NavigationProperty is a backlink association (constraints._partnerCsn is set), then there are two options: | ||
1) Counterpart NavigationProperty exists and is responsible to create the edm:Association element which needs to | ||
be reused by this backlink association. This is save because at this point of the processing all NavProps are created. | ||
2) Counterpart NavigationProperty does not exist (@odata.navigable:false), then the missing edm:Association element | ||
of the origin association needs to be created as if it would have been already available in case (1). | ||
*/ | ||
// This is the V4 edmx:NavigationProperty | ||
// gets rewritten for V2 in addAssociations() | ||
let reuseAssoc = false; | ||
let forwardAssocCsn = constraints._partnerCsn; | ||
if(forwardAssocCsn) | ||
// suppress navprop creation only if @odata.navigable:false is not annotated. | ||
// (undefined !== false) still evaluates to true | ||
if (!elementCsn._target.abstract && elementCsn['@odata.navigable'] !== false) | ||
{ | ||
let navProp = new Edm.NavigationProperty(v, { | ||
Name: elementName, | ||
Type: elementCsn._target.name | ||
}, elementCsn); | ||
props.push(navProp); | ||
// save the navProp in the global array for late constraint building | ||
navigationProperties.push(navProp); | ||
} | ||
} | ||
// render ordinary property if element is NOT ... | ||
// 1) ... annotated @cds.api.ignore | ||
// 2) ... annotated @odata.foreignKey4 and odataFormat: structured | ||
else if(isEdmPropertyRendered(elementCsn, options)) | ||
{ | ||
// CDXCORE-CDXCORE-173 | ||
// V2: filter @Core.MediaType | ||
if ( options.isV2() && elementCsn['@Core.MediaType']) { | ||
// CDXCORE-CDXCORE-177: | ||
// V2: don't render element but add attribute 'm:HasStream="true' to EntityType | ||
// V4: render property type 'Edm.Stream' | ||
hasStream = true; | ||
info(null, ['definitions', parentCsn.name], | ||
`"${parentCsn.name}: Property "${elementName}" annotated with '@Core.MediaType' is removed from EDM in Odata V2`); | ||
} else | ||
props.push(new Edm.Property(v, { Name: elementName }, elementCsn)); | ||
} | ||
} | ||
}); | ||
return [ props, hasStream ]; | ||
} | ||
function createComplexType(structuredTypeCsn) | ||
{ | ||
// This is a backlink, swap the roles and types, rewrite assocName | ||
[ fromRole, toRole ] = [ toRole, fromRole ]; | ||
[ fromEntityType, toEntityType ] = [ toEntityType, fromEntityType ]; | ||
[ fromEntitySet, toEntitySet ] = [ toEntitySet, fromEntitySet ]; | ||
// V4 attributes: Name, BaseType, Abstract, OpenType | ||
let attributes = { Name: structuredTypeCsn.name.replace(schemaNamePrefix, '') }; | ||
parentName = forwardAssocCsn._parent.name.replace(namespace, ''); | ||
assocName = plainAssocName = parentName + NAVPROP_TRENNER + forwardAssocCsn.name.replace(VALUELIST_NAVPROP_PREFIX, ''); | ||
i = 1; | ||
while(NamesInSchemaXRef[assocName] !== undefined && !(NamesInSchemaXRef[assocName][0] instanceof Edm.Association)) { | ||
assocName = plainAssocName + '_' + i++; | ||
let complexType = new Edm.ComplexType(v, attributes, structuredTypeCsn); | ||
let elementsCsn = structuredTypeCsn.items || structuredTypeCsn; | ||
let properties = createProperties(elementsCsn)[0]; | ||
if(properties.length === 0) { | ||
warning(null, structuredTypeCsn.$path, `ComplexType "${structuredTypeCsn.name}" has no properties`); | ||
} | ||
complexType.append(...(properties)); | ||
navigationProperty.Relationship = fullQualified(assocName) | ||
Schema.append(complexType); | ||
} | ||
reuseAssoc = !!forwardAssocCsn._NavigationProperty; | ||
constraints = forwardAssocCsn._constraints; | ||
constraints._multiplicity = edmUtils.determineMultiplicity(forwardAssocCsn); | ||
// V4 <TypeDefintion> | ||
function createTypeDefinition(typeCsn) | ||
{ | ||
// derived types are already resolved to base types | ||
let typeDef; | ||
let props = { Name: typeCsn.name.replace(schemaNamePrefix, '') }; | ||
if((typeCsn.items && typeCsn.items.enum) || typeCsn.enum) { | ||
if (!builtins.isIntegerTypeName(typeCsn.type)) { | ||
warning(null, ['definitions', typeCsn.name], `Only integer enums are allowed in OData`); | ||
} | ||
typeDef = new Edm.EnumType(v, props, typeCsn); | ||
} else { | ||
typeDef = new Edm.TypeDefinition(v, props, typeCsn ); | ||
} | ||
Schema.append(typeDef); | ||
} | ||
if(reuseAssoc) | ||
/* | ||
* addAssociation() constructs a V2 association. | ||
* In V4 all this has been simplified very much, the only thing actually left over is | ||
* <ReferentialConstriant> that is then a sub element to <NavigationProperty>. | ||
* However, referential constraints are substantially different to its V2 counterpart, | ||
* so it is better to reimplement proper V4 construction of<NavigationProperty> in a separate | ||
* function. | ||
* | ||
* This method does: | ||
* rewrite <NavigationProperty> attributes to be V2 compliant | ||
* add <Association> elements to the schema | ||
* add <End>, <ReferentialConstraint>, <Dependent> and <Principal> sub elements to <Association> | ||
* add <AssociationSet> to the EntityContainer for each <Association> | ||
*/ | ||
function addAssociation(navigationProperty) | ||
{ | ||
// Example: | ||
// entity E { key id: Integer; toF: association to F; }; | ||
// entity F { key id: Integer; toE: composition of E on toE.toF = $self; }; | ||
// | ||
// Consider we're in NavigationProperty 'toE' which is the backlink to F. | ||
// Then forwardAssocCsn is 'E_toF' with two Ends: E, F. | ||
// Backlink F.toE is a composition, making E existentially dependant on F. | ||
// So End E of Association E_toF (which is End[0]) receives Edm.OnDelete. | ||
// Depending on the order of the navigation properties it might be that the | ||
// forward Edm.Association has not yet been produced. In this case Edm.OnDelete | ||
// is parked at the forward NavigationProperty. | ||
let constraints = navigationProperty._csn._constraints; | ||
let parentName = navigationProperty._csn._parent.name.replace(schemaNamePrefix, ''); | ||
let plainAssocName = parentName + NAVPROP_TRENNER + navigationProperty.Name.replace(VALUELIST_NAVPROP_PREFIX, ''); | ||
let assocName = plainAssocName; | ||
let i = 1; | ||
while(NamesInSchemaXRef[assocName] !== undefined) { | ||
assocName = plainAssocName + '_' + i++; | ||
} | ||
if(!forwardAssocCsn._NavigationProperty._edmAssociation && navigationProperty._csn.type === 'cds.Composition') | ||
let fromRole = parentName; | ||
let toRole = navigationProperty.Type.replace(schemaAliasPrefix, ''); // <= navprops type should be prefixed with alias | ||
let fromEntityType = fromRole; | ||
let toEntityType = toRole; | ||
// The entity set name may not be the same as the type name (parameterized entities have | ||
// differing set names (<T>Parameters => <T>, <T>Type => <T>Set) | ||
let fromEntitySet = ( navigationProperty._csn._parent.entitySetName || fromEntityType).replace(schemaNamePrefix, ''); | ||
let toEntitySet = (navigationProperty._targetCsn.entitySetName || toEntityType).replace(schemaNamePrefix, ''); | ||
// from and to roles must be distinguishable (in case of self association entity E { toE: association to E; ... }) | ||
if(fromRole === toRole) { | ||
if(constraints._partnerCsn) | ||
fromRole += '1'; | ||
else | ||
toRole += '1'; | ||
} | ||
// add V2 attributes to navigationProperty | ||
navigationProperty.Relationship = fullQualified(assocName); | ||
navigationProperty.FromRole = fromRole; | ||
navigationProperty.ToRole = toRole; | ||
// remove V4 attributes | ||
if(navigationProperty.Type != undefined) | ||
delete navigationProperty.Type; | ||
if(navigationProperty.Partner != undefined) | ||
delete navigationProperty.Partner; | ||
if(navigationProperty.ContainsTarget != undefined) | ||
delete navigationProperty.ContainsTarget; | ||
/* | ||
If NavigationProperty is a backlink association (constraints._originAssocCsn is set), then there are two options: | ||
1) Counterpart NavigationProperty exists and is responsible to create the edm:Association element which needs to | ||
be reused by this backlink association. This is save because at this point of the processing all NavProps are created. | ||
2) Counterpart NavigationProperty does not exist (@odata.navigable:false), then the missing edm:Association element | ||
of the origin association needs to be created as if it would have been already available in case (1). | ||
*/ | ||
let reuseAssoc = false; | ||
let forwardAssocCsn = constraints._partnerCsn; | ||
if(forwardAssocCsn) | ||
{ | ||
// TODO: to be specified via @sap.on.delete | ||
forwardAssocCsn._NavigationProperty.set( { _OnDeleteSrcEnd: new Edm.OnDelete(v, { Action: 'Cascade' }) } ); | ||
// This is a backlink, swap the roles and types, rewrite assocName | ||
[ fromRole, toRole ] = [ toRole, fromRole ]; | ||
[ fromEntityType, toEntityType ] = [ toEntityType, fromEntityType ]; | ||
[ fromEntitySet, toEntitySet ] = [ toEntitySet, fromEntitySet ]; | ||
parentName = forwardAssocCsn._parent.name.replace(schemaNamePrefix, ''); | ||
assocName = plainAssocName = parentName + NAVPROP_TRENNER + forwardAssocCsn.name.replace(VALUELIST_NAVPROP_PREFIX, ''); | ||
i = 1; | ||
while(NamesInSchemaXRef[assocName] !== undefined && !(NamesInSchemaXRef[assocName][0] instanceof Edm.Association)) { | ||
assocName = plainAssocName + '_' + i++; | ||
} | ||
navigationProperty.Relationship = fullQualified(assocName) | ||
reuseAssoc = !!forwardAssocCsn._NavigationProperty; | ||
constraints = forwardAssocCsn._constraints; | ||
constraints._multiplicity = edmUtils.determineMultiplicity(forwardAssocCsn); | ||
} | ||
return; | ||
} | ||
// Create Association and AssociationSet if this is not a backlink association. | ||
// Store association at navigation property because in case the Ends must be modified | ||
// later by the partner (backlink) association | ||
navigationProperty._edmAssociation = new Edm.Association(v, { Name: assocName }, navigationProperty, | ||
[ fromRole, fullQualified(fromEntityType) ], | ||
[ toRole, fullQualified(toEntityType) ], | ||
constraints._multiplicity ); | ||
if(NamesInSchemaXRef[assocName] === undefined) { | ||
NamesInSchemaXRef[assocName] = [ navigationProperty._edmAssociation ]; | ||
} | ||
else { | ||
navigationProperty._edmAssociation.push(navigationProperty._edmAssociation); | ||
} | ||
// Add ReferentialConstraints if any | ||
if(!navigationProperty._isCollection && Object.keys(constraints.constraints).length > 0) { | ||
// A managed composition is treated as association | ||
if(navigationProperty._csn.type === 'cds.Composition' && (navigationProperty._csn.on || navigationProperty._csn.onCond)) { | ||
navigationProperty._edmAssociation.append(Edm.ReferentialConstraint.createV2(v, | ||
toRole, fromRole, constraints.constraints)); | ||
if(reuseAssoc) | ||
{ | ||
// Example: | ||
// entity E { key id: Integer; toF: association to F; }; | ||
// entity F { key id: Integer; toE: composition of E on toE.toF = $self; }; | ||
// | ||
// Consider we're in NavigationProperty 'toE' which is the backlink to F. | ||
// Then forwardAssocCsn is 'E_toF' with two Ends: E, F. | ||
// Backlink F.toE is a composition, making E existentially dependant on F. | ||
// So End E of Association E_toF (which is End[0]) receives Edm.OnDelete. | ||
// Depending on the order of the navigation properties it might be that the | ||
// forward Edm.Association has not yet been produced. In this case Edm.OnDelete | ||
// is parked at the forward NavigationProperty. | ||
if(!forwardAssocCsn._NavigationProperty._edmAssociation && navigationProperty._csn.type === 'cds.Composition') | ||
{ | ||
// TODO: to be specified via @sap.on.delete | ||
forwardAssocCsn._NavigationProperty.set( { _OnDeleteSrcEnd: new Edm.OnDelete(v, { Action: 'Cascade' }) } ); | ||
} | ||
return; | ||
} | ||
// Create Association and AssociationSet if this is not a backlink association. | ||
// Store association at navigation property because in case the Ends must be modified | ||
// later by the partner (backlink) association | ||
navigationProperty._edmAssociation = new Edm.Association(v, { Name: assocName }, navigationProperty, | ||
[ fromRole, fullQualified(fromEntityType) ], | ||
[ toRole, fullQualified(toEntityType) ], | ||
constraints._multiplicity ); | ||
if(NamesInSchemaXRef[assocName] === undefined) { | ||
NamesInSchemaXRef[assocName] = [ navigationProperty._edmAssociation ]; | ||
} | ||
else { | ||
navigationProperty._edmAssociation.append(Edm.ReferentialConstraint.createV2(v, | ||
fromRole, toRole, constraints.constraints)); | ||
navigationProperty._edmAssociation.push(navigationProperty._edmAssociation); | ||
} | ||
// Add ReferentialConstraints if any | ||
if(!navigationProperty._isCollection && Object.keys(constraints.constraints).length > 0) { | ||
// A managed composition is treated as association | ||
if(navigationProperty._csn.type === 'cds.Composition' && (navigationProperty._csn.on || navigationProperty._csn.onCond)) { | ||
navigationProperty._edmAssociation.append(Edm.ReferentialConstraint.createV2(v, | ||
toRole, fromRole, constraints.constraints)); | ||
} | ||
else { | ||
navigationProperty._edmAssociation.append(Edm.ReferentialConstraint.createV2(v, | ||
fromRole, toRole, constraints.constraints)); | ||
} | ||
} | ||
Schema.append(navigationProperty._edmAssociation); | ||
if(Schema._ec && !navigationProperty._targetCsn.$proxy) { | ||
let assocSet = new Edm.AssociationSet(v, { Name: assocName, Association: fullQualified(assocName) }, | ||
fromRole, toRole, fromEntitySet, toEntitySet); | ||
if(navigationProperty._csn._SetAttributes) | ||
assocSet.setSapVocabularyAsAttributes(navigationProperty._csn._SetAttributes); | ||
Schema._ec.append(assocSet); | ||
} | ||
} | ||
Schema.append(navigationProperty._edmAssociation); | ||
if(!navigationProperty._targetCsn.$proxy) { | ||
let assocSet = new Edm.AssociationSet(v, { Name: assocName, Association: fullQualified(assocName) }, | ||
fromRole, toRole, fromEntitySet, toEntitySet); | ||
if(navigationProperty._csn._SetAttributes) | ||
assocSet.setSapVocabularyAsAttributes(navigationProperty._csn._SetAttributes); | ||
Schema._ec.append(assocSet); | ||
// produce a full qualified name replacing the namespace with the alias (if provided) | ||
function fullQualified(name) | ||
{ | ||
return schemaAliasPrefix + name.replace(schemaNamePrefix, '') | ||
} | ||
} | ||
// produce a full qualified name replacing the namespace with the alias (if provided) | ||
function fullQualified(name) | ||
{ | ||
if (name == serviceCsn.name) | ||
return Schema.Alias | ||
else | ||
return alias + name.replace(namespace, '') | ||
} | ||
function createAnnotations(edm) | ||
{ | ||
/** @type {object} */ | ||
let annoEdm = translate.csn2annotationEdm(csn, serviceName, options); | ||
let annoEdm = translate.csn2annotationEdm(csn, serviceCsn.name, options); | ||
for(let i = 0; i < annoEdm.getSchemaCount(); i++) | ||
@@ -632,3 +766,3 @@ { | ||
} | ||
edm._defaultRefs = annoEdm._defaultRefs; | ||
edm._defaultRefs.push(...annoEdm._defaultRefs); | ||
} | ||
@@ -635,0 +769,0 @@ } |
@@ -600,2 +600,13 @@ // @ts-nocheck | ||
if(options.fqSchemaXRef && this[typeName]) { | ||
let schemaName = options.fqSchemaXRef.reduce((a, n) => | ||
{ | ||
if(this[typeName].startsWith(n+'.')) a=n; | ||
return a; | ||
}, undefined); | ||
if(schemaName && schemaName !== options.serviceName) { | ||
this[typeName] = this[typeName].replace(options.serviceName + '.', ''); | ||
} | ||
} | ||
// store undecorated type for JSON | ||
@@ -836,7 +847,10 @@ this.set( { _type : this[typeName] }); | ||
// OData only allows simple values, no complex expressions or function calls | ||
// This is a poor man's expr renderer, assuming that this value has passed | ||
// the defaultValues validator | ||
if (csn.default && isBetaEnabled(options, 'odataDefaultValues')) { | ||
let defVal = csn.default.val; | ||
// This is a poor man's expr renderer, assuming that edmPreprocessor has | ||
// added a @Core.ComputedDefaultValue for complex defaults | ||
if (csn.default && !csn['@Core.ComputedDefaultValue'] && isBetaEnabled(options, 'odataDefaultValues')) { | ||
let def = csn.default; | ||
// if def has a value, it's a simple value | ||
let defVal = def.val; | ||
// if it's a simple value with signs, produce a string representation | ||
if(csn.default.xpr) { | ||
@@ -852,2 +866,3 @@ defVal = csn.default.xpr.map(i => { | ||
} | ||
// complex values should be marked with @Core.ComputedDefaultValue already in the edmPreprocessor | ||
if(defVal !== undefined) { | ||
@@ -1143,3 +1158,3 @@ this[`Default${this.v4 ? 'Value' : ''}`] = ['cds.Boolean', 'cds.Binary', 'cds.LargeBinary', 'cds.Integer64', 'cds.Integer'].includes(csn.type) | ||
// (EnumMember & EnumMember@odata.type) then the value properties must | ||
// be separated by using setJSON(attibute) and setXML(attribute). | ||
// be separated by using setJSON(attribute) and setXML(attribute). | ||
// See genericTranslation::handleValue() for details (especially the code | ||
@@ -1146,0 +1161,0 @@ // that sets the EnumMember code). All this has been done because the |
@@ -24,3 +24,3 @@ 'use strict'; | ||
// V2/V4 flat: yes | ||
// V4/struct: dending on odataForeignKyes | ||
// V4/struct: depending on odataForeignKeys | ||
options.renderForeignKeys = | ||
@@ -80,3 +80,3 @@ options.version === 'v4' ? options.odataFormat === 'structured' && !!options.odataForeignKeys : true; | ||
function isContainee(artifact) { | ||
// if _containerEntity is present, it is guarranteed that it has at least one entry | ||
// if _containerEntity is present, it is guaranteed that it has at least one entry | ||
return (artifact._containerEntity && (artifact._containerEntity.length > 1 || artifact._containerEntity[0] != artifact.name)); | ||
@@ -149,6 +149,8 @@ } | ||
function resolveOnConditionAndPrepareConstraints(assocCsn, signal) { | ||
function resolveOnConditionAndPrepareConstraints(assocCsn, messageFunctions) { | ||
if(!assocCsn._constraints) | ||
throw Error('Please debug me: need _constraints'); | ||
const { info, warning } = messageFunctions; | ||
if(assocCsn.on) | ||
@@ -179,3 +181,4 @@ { | ||
isBacklink = false; | ||
signal(signal.info`"${originAssocCsn._parent.name}:${partner}" with target "${originAssocCsn._target.name}" is compared with $self which represents "${parentArtifactName}"`, ['definitions', parentArtifactName, 'elements', assocCsn.name]); | ||
info(null, ['definitions', parentArtifactName, 'elements', assocCsn.name], | ||
`"${originAssocCsn._parent.name}:${partner}" with target "${originAssocCsn._target.name}" is compared with $self which represents "${parentArtifactName}"`); | ||
} | ||
@@ -202,3 +205,3 @@ if(isAssociationOrComposition(originAssocCsn)) { | ||
key id : Integer; | ||
toMe: associaton to E on toMe.id = $self; }; | ||
toMe: association to E on toMe.id = $self; }; | ||
*/ | ||
@@ -210,3 +213,3 @@ throw Error('Backlink association element is not an association or composition: "' + originAssocCsn.name); | ||
{ | ||
signal(signal.warning`Cannot resolve backlink to ${assocCsn._target.name}/${partner}" from "${parentArtifactName}/${assocCsn.name}"`, ['definitions', parentArtifactName]); | ||
warning(null, ['definitions', parentArtifactName], `Cannot resolve backlink to ${assocCsn._target.name}/${partner}" from "${parentArtifactName}/${assocCsn.name}"`); | ||
} | ||
@@ -457,7 +460,8 @@ }); | ||
function mapCdsToEdmType(csn, signal, isV2=false, isMediaType=false) | ||
function mapCdsToEdmType(csn, messageFunctions, isV2=false, isMediaType=false) | ||
{ | ||
const { warning, error } = messageFunctions; | ||
let cdsType = csn.type; | ||
if(cdsType === undefined) { | ||
signal(signal.error`no type found`, csn.$location); | ||
error(null, csn.$location, `no type found`); | ||
return '<NOTYPE>'; | ||
@@ -517,3 +521,3 @@ } | ||
if (edmType == undefined) { | ||
signal(signal.error`No Edm type available for "${cdsType}"`, csn.$location); | ||
error(null, csn.$location, `No Edm type available for "${cdsType}"`); | ||
} | ||
@@ -527,6 +531,6 @@ if(isV2) | ||
if(['cds.hana.ST_POINT', 'cds.hana.ST_GEOMETRY'].includes(cdsType)) { | ||
signal(signal.error`"OData V2 does not support Geometry data types, ${cdsType}" cannot be mapped`, csn.$location); | ||
error(null, csn.$location, `"OData V2 does not support Geometry data types, ${cdsType}" cannot be mapped`); | ||
} | ||
if(cdsType === 'cds.DecimalFloat' || cdsType === 'cds.hana.SMALLDECIMAL') | ||
signal(signal.warning`"OData V2 does not support ${cdsType}"`, csn.$location); | ||
warning(null, csn.$location, `"OData V2 does not support ${cdsType}"`); | ||
} | ||
@@ -575,3 +579,3 @@ else // isV4 | ||
// From the odata specification: [Simple Identifier] starts with a letter or underscore, followed by at most 127 letters, underscores or digits. | ||
// From the odata specification: [Simple Identifier] starts with a letter or underscore, followed by at most 127 letters, underscores or digits. | ||
function isSimpleIdentifier(identifier){ | ||
@@ -578,0 +582,0 @@ return identifier && identifier.match(/^[a-zA-Z_]{1}[a-zA-Z0-9_]{0,127}$/); |
@@ -16,2 +16,3 @@ // Transform augmented CSN into compact "official" CSN | ||
let strictMode = false; // whether to dump with unknown properties (in standard) | ||
let withLocations = false; | ||
@@ -132,8 +133,5 @@ // IMPORTANT: the order of these properties determine the order of properties | ||
'some', 'any', 'distinct', // 'all' explicitly listed | ||
'ref', '_links', '_art', '_scope', | ||
'ref', | ||
'param', 'val', 'literal', 'SELECT', 'SET', | ||
], | ||
type: [ '_type' ], | ||
target: [ '_target' ], | ||
includes: [ '_includes' ], | ||
foreignKeys: [ 'keys' ], | ||
@@ -150,3 +148,6 @@ exclude: [ 'excluding' ], | ||
generatedFieldName: [ '$generatedFieldName' ], | ||
location: [ '$location' ], | ||
location: [ '$env', '$location' ], // --enrich-csn | ||
_typeIsExplicit: [ | ||
'_type', '_targetAspect', '_target', '_includes', '_links', '_art', '_scope', | ||
], // --enrich-csn | ||
}; | ||
@@ -205,3 +206,4 @@ | ||
else if (csnDictionaries.includes(n)) | ||
else if (csnDictionaries.includes(n) && !Array.isArray(val)) | ||
// Array check for property `args` which may either be a dictionary or an array. | ||
r[n] = csnDictionary( val, n === 'definitions', keepHidden ); | ||
@@ -213,3 +215,3 @@ | ||
if (keepHidden && typeof csn === 'object') { | ||
if (csn.$location) | ||
if (csn.$location && !r.$location) | ||
setHidden(r, '$location', csn.$location); | ||
@@ -248,2 +250,3 @@ if (csn.$env) | ||
strictMode = options.testMode; | ||
withLocations = options.withLocations; | ||
const csn = {}; | ||
@@ -272,4 +275,7 @@ const sources = model.sources || Object.create(null); | ||
const file = src && model.sources[src].filename; | ||
if (file) | ||
setHidden( csn, '$location', { file } ); // no line | ||
if (file) { | ||
Object.defineProperty( csn, '$location', { | ||
value: { file }, configurable: true, writable: true, enumerable: withLocations, | ||
} ); | ||
} | ||
if (!options.testMode) { | ||
@@ -454,3 +460,3 @@ csn.meta = Object.assign( {}, model.meta, { creator } ); | ||
return (val.elements) ? standard( val ) : artifactRef( val, true ); | ||
// For compatibilty, put aspect in 'target' with parse.cdl and csn flavor 'gensrc' | ||
// For compatibility, put aspect in 'target' with parse.cdl and csn flavor 'gensrc' | ||
csn.target = (val.elements) ? standard( val ) : artifactRef( val, true ); | ||
@@ -463,3 +469,3 @@ return undefined; | ||
// target._artifact is different to _artifact from path with explicit target | ||
// to model entity with @cds.autoexpose (TODO: remove, unncessary complication) | ||
// to model entity with @cds.autoexpose (TODO: remove, unnecessary complication) | ||
return val._artifact.name.absolute; | ||
@@ -516,3 +522,5 @@ else if (!val.elements) | ||
const val = { file: l.filename, line: l.start.line, col: l.start.column }; | ||
setHidden( csn, '$location', val ); | ||
Object.defineProperty( csn, '$location', { | ||
value: val, configurable: true, writable: true, enumerable: withLocations, | ||
} ); | ||
} | ||
@@ -1054,2 +1062,3 @@ } | ||
strictMode = false; | ||
withLocations = false; | ||
return q && query( q ); | ||
@@ -1061,2 +1070,3 @@ } | ||
strictMode = false; | ||
withLocations = false; | ||
return e && expression( e ); | ||
@@ -1063,0 +1073,0 @@ } |
@@ -7,10 +7,12 @@ // Wrapper around generated ANTLR parser | ||
var antlr4 = require('antlr4'); | ||
'use strict'; | ||
var { getMessageFunction, CompileMessage, DebugCompileMessage } = require('../base/messages'); | ||
var errorStrategy = require('./errorStrategy'); | ||
const antlr4 = require('antlr4'); | ||
var Parser = require('../gen/languageParser').languageParser; | ||
var Lexer = require('../gen/languageLexer').languageLexer; | ||
const { getMessageFunction, CompileMessage, DebugCompileMessage } = require('../base/messages'); | ||
const errorStrategy = require('./errorStrategy'); | ||
const Parser = require('../gen/languageParser').languageParser; | ||
const Lexer = require('../gen/languageLexer').languageLexer; | ||
// Error listener used for ANTLR4-generated parser | ||
@@ -33,8 +35,9 @@ class ErrorListener extends antlr4.error.ErrorListener { | ||
} | ||
LT( k ) { | ||
let t = super.LT(k); | ||
const t = super.LT(k); | ||
if (!t || !t.type) | ||
return t; | ||
if (t.type === this.DOT) { | ||
let n = super.LT(k+1); | ||
const n = super.LT(k + 1); | ||
if (n && n.type === this.BRACE) | ||
@@ -44,5 +47,5 @@ t.type = this.DOTbeforeBRACE; | ||
else if (t.type === this.NEW) { | ||
let n = super.LT(k+1); | ||
const n = super.LT(k + 1); | ||
if (n && n.type === this.Identifier && /^st_/i.test( n.text )) { | ||
let o = super.LT(k+2); | ||
const o = super.LT(k + 2); | ||
if (o && o.type === this.PAREN) | ||
@@ -55,2 +58,3 @@ return t; | ||
} | ||
getHiddenTokenToLeft( type ) { | ||
@@ -81,3 +85,3 @@ this.lazyInit(); | ||
if (ts.DOT && ts.DOTbeforeBRACE) | ||
recognizer.tokenRewrite[ ts.DOTbeforeBRACE - Parser.Identifier] = ts.DOT; | ||
recognizer.tokenRewrite[ts.DOTbeforeBRACE - Parser.Identifier] = ts.DOT; | ||
} | ||
@@ -88,4 +92,4 @@ // console.log( ts.DOTbeforeBRACE, ts.BRACE, ts.DOT, recognizer.tokenRewrite ); | ||
function tokenTypeOf( recognizer, literalName ) { | ||
let r = recognizer.literalNames.indexOf( literalName ); | ||
return (r>0) ? r : 0; | ||
const r = recognizer.literalNames.indexOf( literalName ); | ||
return (r > 0) ? r : 0; | ||
} | ||
@@ -101,11 +105,11 @@ | ||
query: { func: 'queryEOF', returns: 'query' }, | ||
expr: { func: 'conditionEOF', returns: 'cond' } // yes, condition | ||
} | ||
expr: { func: 'conditionEOF', returns: 'cond' }, // yes, condition | ||
}; | ||
function parse( source, filename = '<undefined>.cds', options = {}, rule = 'cdl' ) { | ||
var lexer = new Lexer( new antlr4.InputStream(source) ); | ||
var tokenStream = new RewriteTypeTokenStream(lexer); | ||
const lexer = new Lexer( new antlr4.InputStream(source) ); | ||
const tokenStream = new RewriteTypeTokenStream(lexer); | ||
/** @type {object} */ | ||
var parser = new Parser( tokenStream ); | ||
var errorListener = new ErrorListener(); | ||
const parser = new Parser( tokenStream ); | ||
const errorListener = new ErrorListener(); | ||
@@ -133,4 +137,4 @@ parser.filename = filename; | ||
else if (options.traceParserAmb) { | ||
let listener = new antlr4.error.DiagnosticErrorListener(); | ||
//listener.exactOnly = false; | ||
const listener = new antlr4.error.DiagnosticErrorListener(); | ||
// listener.exactOnly = false; | ||
parser.addErrorListener( listener ); | ||
@@ -145,9 +149,9 @@ parser._interp.predictionMode = antlr4.atn.PredictionMode.LL_EXACT_AMBIG_DETECTION; | ||
if (options.parseListener) { | ||
if (options.parseListener) | ||
parser.addParseListener(options.parseListener); | ||
} | ||
var rulespec = rules[rule]; | ||
var tree = rule && parser[ rulespec.func ](); | ||
var ast = tree && tree[ rulespec.returns ] || {}; | ||
const rulespec = rules[rule]; | ||
const tree = rule && parser[rulespec.func](); | ||
const ast = tree && tree[rulespec.returns] || {}; | ||
ast.options = options; | ||
@@ -154,0 +158,0 @@ if (rulespec.$frontend) |
@@ -19,3 +19,3 @@ 'use strict'; | ||
if (comment.length <= 5) // at least "/***/" | ||
return null | ||
return null; | ||
@@ -35,5 +35,5 @@ let lines = splitLines(comment); | ||
if (isFencedComment(lines)) { | ||
lines = lines.map((line, index) => (index === 0) ? line : removeFence(line)); | ||
} else if (lines.length == 2) { | ||
lines = lines.map((line, index) => ((index === 0) ? line : removeFence(line))); | ||
} | ||
else if (lines.length === 2) { | ||
// Comment that is essentially just a header + footer. | ||
@@ -49,4 +49,4 @@ // First line, i.e. header, is always trimmed from left. | ||
lines[1] = lines[1].trimLeft(); | ||
} else { | ||
} | ||
else { | ||
const firstNonEmptyLine = lines.find((line, index) => index !== 0 && /[^\s]/.test(line)) || ''; | ||
@@ -56,3 +56,3 @@ // Tabs are regarded as one space. | ||
if (spacesAtBeginning > 0) | ||
lines = lines.map((line) => removeWhitespace(line, spacesAtBeginning)); | ||
lines = lines.map(line => removeWhitespace(line, spacesAtBeginning)); | ||
} | ||
@@ -62,3 +62,3 @@ | ||
const startIndex = (lines[0] === '') ? 1 : 0; | ||
const endIndex = (lines[lines.length-1] === '') ? lines.length-1 : lines.length; | ||
const endIndex = (lines[lines.length - 1] === '') ? lines.length - 1 : lines.length; | ||
@@ -101,3 +101,3 @@ const content = lines.slice(startIndex, endIndex).join('\n'); | ||
function removeWhitespace(line, spaces) { | ||
return line.replace(new RegExp(`^\\s{0,${spaces}}`), ''); // Trailing spaces with '*'? => .replace(/\s+[*]$/, ''); | ||
return line.replace(new RegExp(`^\\s{0,${ spaces }}`), ''); // Trailing spaces with '*'? => .replace(/\s+[*]$/, ''); | ||
} | ||
@@ -138,3 +138,3 @@ | ||
const exclude = (index === 0 || index === lines.length - 1); | ||
return !exclude && !(/^\s*[*]/.test(line)) | ||
return !exclude && !(/^\s*[*]/.test(line)); | ||
}); | ||
@@ -145,3 +145,3 @@ return index === -1 && lines.length > 2; | ||
module.exports = { | ||
parseDocComment | ||
parseDocComment, | ||
}; |
@@ -29,14 +29,16 @@ // Error strategy with special handling for (non-reserved) keywords | ||
var antlr4 = require('antlr4'); | ||
var IntervalSet = require('antlr4/IntervalSet'); | ||
var antlr4_error = require('antlr4/error/ErrorStrategy'); | ||
var antlr4_LL1Analyzer = require('antlr4/LL1Analyzer.js').LL1Analyzer; | ||
var predictionContext = require('antlr4/PredictionContext').predictionContextFromRuleContext; | ||
var ATNState = require('antlr4/atn/ATNState').ATNState; | ||
var InputMismatchException = antlr4.error.InputMismatchException; | ||
'use strict'; | ||
const antlr4 = require('antlr4'); | ||
const IntervalSet = require('antlr4/IntervalSet'); | ||
const antlr4_error = require('antlr4/error/ErrorStrategy'); | ||
const antlr4_LL1Analyzer = require('antlr4/LL1Analyzer.js').LL1Analyzer; | ||
const predictionContext = require('antlr4/PredictionContext').predictionContextFromRuleContext; | ||
const { ATNState } = require('antlr4/atn/ATNState'); | ||
const { InputMismatchException } = antlr4.error; | ||
const keywordRegexp = /^[a-zA-Z]+$/; | ||
var SEMI = null; | ||
var RBRACE = null; | ||
let SEMI = null; | ||
let RBRACE = null; | ||
@@ -47,7 +49,7 @@ // Match current token against token type `ttype` and consume it if successful. | ||
function match( ttype ) { | ||
var identType = this.constructor.Identifier; | ||
const identType = this.constructor.Identifier; | ||
if (ttype !== identType) | ||
return antlr4.Parser.prototype.match.call( this, ttype ); | ||
var reserved = this.getCurrentToken(); | ||
const reserved = this.getCurrentToken(); | ||
if (reserved.type !== identType && !keywordRegexp.test( reserved.text )) | ||
@@ -87,4 +89,5 @@ return antlr4.Parser.prototype.match.call( this, ttype ); | ||
getTokenDisplay, | ||
constructor: KeywordErrorStrategy | ||
}); | ||
constructor: KeywordErrorStrategy, | ||
} | ||
); | ||
@@ -95,12 +98,12 @@ // Attemp to recover from problems in subrules, except if rule has defined a | ||
// If already recovering, don't try to sync | ||
if (this.inErrorRecoveryMode(recognizer)) { | ||
if (this.inErrorRecoveryMode(recognizer)) | ||
return; | ||
} | ||
var token = recognizer.getCurrentToken(); | ||
const token = recognizer.getCurrentToken(); | ||
if (!token) | ||
return; | ||
var s = recognizer._interp.atn.states[recognizer.state]; | ||
const s = recognizer._interp.atn.states[recognizer.state]; | ||
// try cheaper subset first; might get lucky. seems to shave a wee bit off | ||
var nextTokens = recognizer.atn.nextTokens(s); | ||
const nextTokens = recognizer.atn.nextTokens(s); | ||
// console.log('SYNC:', recognizer._ctx._sync, s.stateType, token.text, intervalSetToArray( recognizer, nextTokens )) | ||
@@ -138,3 +141,3 @@ | ||
// report error and recover if possible | ||
if( token.text !== '}' && // do not just delete a '}' | ||
if ( token.text !== '}' && // do not just delete a '}' | ||
this.singleTokenDeletion(recognizer) !== null) { // also calls reportUnwantedToken | ||
@@ -148,14 +151,14 @@ return; | ||
} | ||
else { | ||
throw new InputMismatchException(recognizer); | ||
} | ||
throw new InputMismatchException(recognizer); | ||
case ATNState.PLUS_LOOP_BACK: | ||
case ATNState.STAR_LOOP_BACK: | ||
case ATNState.STAR_LOOP_BACK: { | ||
// TODO: do not delete a '}' | ||
this.reportUnwantedToken(recognizer); | ||
var expecting = new IntervalSet.IntervalSet(); | ||
const expecting = new IntervalSet.IntervalSet(); | ||
expecting.addSet(recognizer.getExpectedTokens()); | ||
var whatFollowsLoopIterationOrRule = expecting.addSet(this.getErrorRecoverySet(recognizer)); | ||
const whatFollowsLoopIterationOrRule = expecting.addSet(this.getErrorRecoverySet(recognizer)); | ||
this.consumeUntil(recognizer, whatFollowsLoopIterationOrRule); | ||
break; | ||
} | ||
default: | ||
@@ -187,5 +190,5 @@ // do nothing if we can't identify the exact kind of ATN state | ||
function reportInputMismatch( recognizer, e, deadEnds ) { | ||
var expecting = deadEnds !== true && // true: cannot compute expecting | ||
const expecting = deadEnds !== true && // true: cannot compute expecting | ||
this.getExpectedTokensForMessage( recognizer, e.offendingToken, deadEnds ); | ||
let offending = this.getTokenDisplay( e.offendingToken, recognizer ); | ||
const offending = this.getTokenDisplay( e.offendingToken, recognizer ); | ||
let err; | ||
@@ -212,8 +215,8 @@ if (expecting && expecting.length) { | ||
var token = recognizer.getCurrentToken(); | ||
var expecting = this.getExpectedTokensForMessage( recognizer, token ); | ||
var offending = this.getTokenDisplay( token, recognizer ); | ||
let err = recognizer.message( 'syntax-extraneous-token', token, | ||
{ offending, expecting: expecting.join(', ') }, | ||
'Error', 'Extraneous $(OFFENDING), expecting $(EXPECTING)' ); | ||
const token = recognizer.getCurrentToken(); | ||
const expecting = this.getExpectedTokensForMessage( recognizer, token ); | ||
const offending = this.getTokenDisplay( token, recognizer ); | ||
const err = recognizer.message( 'syntax-extraneous-token', token, | ||
{ offending, expecting: expecting.join(', ') }, | ||
'Error', 'Extraneous $(OFFENDING), expecting $(EXPECTING)' ); | ||
err.expectedTokens = expecting; // TODO: remove next token? | ||
@@ -230,9 +233,9 @@ if (!recognizer.avoidErrorListeners) // with --trace-parser or --trace-parser-ambig | ||
var token = recognizer.getCurrentToken(); | ||
var expecting = this.getExpectedTokensForMessage( recognizer, token ); | ||
var offending = this.getTokenDisplay( token, recognizer ); | ||
const token = recognizer.getCurrentToken(); | ||
const expecting = this.getExpectedTokensForMessage( recognizer, token ); | ||
const offending = this.getTokenDisplay( token, recognizer ); | ||
// TODO: if non-reserved keyword will not been parsed as keyword, use Identifier for offending | ||
let err = recognizer.message( 'syntax-missing-token', token, | ||
{ offending, expecting: expecting.join(', ') }, | ||
'Error', 'Missing $(EXPECTING) before $(OFFENDING)' ); | ||
const err = recognizer.message( 'syntax-missing-token', token, | ||
{ offending, expecting: expecting.join(', ') }, | ||
'Error', 'Missing $(EXPECTING) before $(OFFENDING)' ); | ||
err.expectedTokens = expecting; | ||
@@ -244,8 +247,8 @@ if (!recognizer.avoidErrorListeners) // with --trace-parser or --trace-parser-ambig | ||
function reportIgnoredWith( recognizer, t ) { | ||
let next = recognizer._interp.atn.states[ recognizer.state ].transitions[0].target; | ||
const next = recognizer._interp.atn.states[recognizer.state].transitions[0].target; | ||
recognizer.state = next.stateNumber; // previous match() does not set the state | ||
let expecting = this.getExpectedTokensForMessage( recognizer, t ); | ||
let m = recognizer.message( 'syntax-ignored-with', t, | ||
{ offending: "';'", expecting: expecting.join(', ') }, | ||
'Warning', 'Unexpected $(OFFENDING), expecting $(EXPECTING) - ignored previous WITH' ); | ||
const expecting = this.getExpectedTokensForMessage( recognizer, t ); | ||
const m = recognizer.message( 'syntax-ignored-with', t, | ||
{ offending: "';'", expecting: expecting.join(', ') }, | ||
'Warning', 'Unexpected $(OFFENDING), expecting $(EXPECTING) - ignored previous WITH' ); | ||
m.expectedTokens = expecting; | ||
@@ -270,3 +273,3 @@ } | ||
// DO NOT modify input param `set`, as the set might be cached in the ATN | ||
let stop = new IntervalSet.IntervalSet(); | ||
const stop = new IntervalSet.IntervalSet(); | ||
stop.addSet( set ); | ||
@@ -301,7 +304,7 @@ stop.removeOne( recognizer.constructor.Identifier ); | ||
function recoverInline( recognizer ) { | ||
var identType = recognizer.constructor.Identifier; | ||
const identType = recognizer.constructor.Identifier; | ||
if (!identType || !recognizer.isExpectedToken( identType )) | ||
return super1.recoverInline.call( this, recognizer ); | ||
var reserved = recognizer.getCurrentToken(); | ||
const reserved = recognizer.getCurrentToken(); | ||
if (!keywordRegexp.test( reserved.text )) | ||
@@ -321,8 +324,9 @@ return super1.recoverInline.call( this, recognizer ); | ||
function getMissingSymbol( recognizer ) { | ||
var expectedTokenType = this.getExpectedTokens(recognizer).first(); // get any element | ||
var current = recognizer.getCurrentToken(); | ||
const expectedTokenType = this.getExpectedTokens(recognizer).first(); // get any element | ||
const current = recognizer.getCurrentToken(); | ||
return recognizer.getTokenFactory().create( | ||
current.source, // do s/th special if EOF like in DefaultErrorStrategy ? | ||
expectedTokenType, '', antlr4.Token.DEFAULT_CHANNEL, // empty string as token text | ||
-1, -1, current.line, current.column); | ||
-1, -1, current.line, current.column | ||
); | ||
} | ||
@@ -332,12 +336,11 @@ | ||
// similar to `IntervalSet#toTokenString` | ||
var names = []; | ||
for (let v of expected.intervals) { | ||
for (var j = v.start; j < v.stop; j++) { | ||
names.push( expected.elementName (recognizer.literalNames, recognizer.symbolicNames, j ) ); | ||
} | ||
let names = []; | ||
for (const v of expected.intervals) { | ||
for (let j = v.start; j < v.stop; j++) | ||
names.push( expected.elementName(recognizer.literalNames, recognizer.symbolicNames, j ) ); | ||
} | ||
if (recognizer.$adaptExpectedToken && recognizer.$nextTokensToken === recognizer.$adaptExpectedToken) { | ||
let excludes = (excludesForNextToken && recognizer.$adaptExpectedExcludes[0] instanceof Array) | ||
? recognizer.$adaptExpectedExcludes[0] | ||
: recognizer.$adaptExpectedExcludes; | ||
const excludes = (excludesForNextToken && recognizer.$adaptExpectedExcludes[0] instanceof Array) | ||
? recognizer.$adaptExpectedExcludes[0] | ||
: recognizer.$adaptExpectedExcludes; | ||
names = names.filter( n => !excludes.includes( n ) ); | ||
@@ -348,3 +351,3 @@ } | ||
} | ||
names.sort( (a, b) => tokenPrecedence(a) < tokenPrecedence(b) ? -1 : 1 ); | ||
names.sort( (a, b) => (tokenPrecedence(a) < tokenPrecedence(b) ? -1 : 1) ); | ||
return names; | ||
@@ -365,12 +368,11 @@ } | ||
// 9: <EOF> | ||
} | ||
}; | ||
function tokenPrecedence( name ) { | ||
if (name.length < 2 || name === '<EOF>') | ||
return '9' + name; | ||
let prec = token1sort[ name.charAt(1) ]; | ||
return `9${ name }`; | ||
const prec = token1sort[name.charAt(1)]; | ||
if (prec) | ||
return '' + prec + name; | ||
else | ||
return (name.charAt(1) < 'a' ? '8' : '0') + name; | ||
return `${ prec }${ name }`; | ||
return (name.charAt(1) < 'a' ? '8' : '0') + name; | ||
} | ||
@@ -381,3 +383,3 @@ | ||
return '<EOF>'; | ||
let t = token.type; | ||
const t = token.type; | ||
if (t === antlr4.Token.EOF || t === antlr4.Token.EPSILON ) | ||
@@ -387,4 +389,3 @@ return '<EOF>'; | ||
return "'.'"; | ||
else | ||
return recognizer.literalNames[t] || recognizer.symbolicNames[t]; | ||
return recognizer.literalNames[t] || recognizer.symbolicNames[t]; | ||
} | ||
@@ -401,29 +402,31 @@ | ||
function getExpectedTokensForMessage( recognizer, offendingToken, deadEnds ) { | ||
var atn = recognizer._interp.atn; | ||
const { atn } = recognizer._interp; | ||
if (recognizer.state < 0) | ||
return []; | ||
if (recognizer.state >= atn.states.length) | ||
throw( 'Invalid state number ' + recognizer.state + ' for ' + | ||
this.getTokenErrorDisplay( offendingToken )); | ||
if (recognizer.state >= atn.states.length) { | ||
throw ( `Invalid state number ${ recognizer.state } for ${ | ||
this.getTokenErrorDisplay( offendingToken ) }`); | ||
} | ||
var identType = recognizer.constructor.Identifier; | ||
var hideAltsType = recognizer.constructor.HideAlternatives; | ||
var beforeUnreserved = recognizer.constructor.Number; | ||
const identType = recognizer.constructor.Identifier; | ||
const hideAltsType = recognizer.constructor.HideAlternatives; | ||
const beforeUnreserved = recognizer.constructor.Number; | ||
if (!identType || !beforeUnreserved || beforeUnreserved + 2 > identType) | ||
return intervalSetToArray( recognizer, super1.getExpectedTokens.call( this, recognizer ) ); | ||
var ll1 = new antlr4_LL1Analyzer(atn); | ||
var expected = new IntervalSet.IntervalSet(); | ||
var orig_addInterval = expected.addInterval; | ||
var orig_addSet = expected.addSet; | ||
const ll1 = new antlr4_LL1Analyzer(atn); | ||
const expected = new IntervalSet.IntervalSet(); | ||
const orig_addInterval = expected.addInterval; | ||
const orig_addSet = expected.addSet; | ||
expected.addInterval = addInterval; | ||
expected.addSet = addSet; | ||
let lookBusy = new antlr4.Utils.Set(); | ||
let calledRules = new antlr4.Utils.BitSet(); | ||
const lookBusy = new antlr4.Utils.Set(); | ||
const calledRules = new antlr4.Utils.BitSet(); | ||
if (deadEnds) { | ||
// "No viable alternative" by adaptivePredict() not on first token | ||
for (let trans of deadEnds) | ||
for (const trans of deadEnds) { | ||
ll1._LOOK( trans.state, null, predictionContext( atn, recognizer._ctx ), | ||
expected, lookBusy, calledRules, true, true ); | ||
} | ||
return intervalSetToArray( recognizer, expected, true ); | ||
@@ -434,3 +437,3 @@ } | ||
// We have a state (via sync()) with more "expecting" for the same token | ||
ll1._LOOK( atn.states[ recognizer.$nextTokensState ], null, | ||
ll1._LOOK( atn.states[recognizer.$nextTokensState], null, | ||
predictionContext( atn, recognizer.$nextTokensContext ), | ||
@@ -441,3 +444,3 @@ expected, lookBusy, calledRules, true, true ); | ||
// Use current state to compute "expecting" | ||
ll1._LOOK( atn.states[ recognizer.state ], null, | ||
ll1._LOOK( atn.states[recognizer.state], null, | ||
predictionContext( atn, recognizer._ctx ), | ||
@@ -458,10 +461,11 @@ expected, lookBusy, calledRules, true, true ); | ||
if (v.stop <= identType) { | ||
orig_addInterval.call(this,v); | ||
orig_addInterval.call(this, v); | ||
} | ||
else if (v.start >= identType) { | ||
if (v.stop === identType+1 || !recognizer.tokenRewrite) | ||
orig_addInterval.call(this,v); | ||
if (v.stop === identType + 1 || !recognizer.tokenRewrite) { | ||
orig_addInterval.call(this, v); | ||
} | ||
else { | ||
for (let j = v.start; j < v.stop; j++) | ||
addRange( this, recognizer.tokenRewrite[ j - identType ] || j ); | ||
addRange( this, recognizer.tokenRewrite[j - identType] || j ); | ||
} | ||
@@ -477,3 +481,3 @@ } | ||
function addRange( interval, start, stop ) { | ||
orig_addInterval.call( interval, new IntervalSet.Interval( start, stop || start+1 ) ); | ||
orig_addInterval.call( interval, new IntervalSet.Interval( start, stop || start + 1 ) ); | ||
} | ||
@@ -484,3 +488,3 @@ } | ||
match, | ||
KeywordErrorStrategy | ||
KeywordErrorStrategy, | ||
}; |
@@ -7,5 +7,7 @@ // Generic ANTLR parser class with AST-building functions | ||
var antlr4 = require('antlr4'); | ||
var ATNState = require('antlr4/atn/ATNState').ATNState; | ||
var { addToDictWithIndexNo } = require('../base/dictionaries'); | ||
'use strict'; | ||
const antlr4 = require('antlr4'); | ||
const { ATNState } = require('antlr4/atn/ATNState'); | ||
const { addToDictWithIndexNo } = require('../base/dictionaries'); | ||
const locUtils = require('../base/location'); | ||
@@ -62,4 +64,5 @@ const { parseDocComment } = require('./docCommentParser'); | ||
isStraightBefore, | ||
constructor: GenericAntlrParser // keep this last | ||
}); | ||
constructor: GenericAntlrParser, // keep this last | ||
} | ||
); | ||
@@ -78,3 +81,3 @@ // Patterns for literal token tests and creation. The value is a map from the | ||
// TODO: think about laxer regexp for date/time/timestamp - normalization? | ||
var quotedLiteralPatterns = { | ||
const quotedLiteralPatterns = { | ||
x: { | ||
@@ -85,16 +88,16 @@ test_msg: 'A binary literal must have an even number of characters', | ||
unexpected_char: /[^0-9a-f]/i, | ||
literal: 'hex' | ||
literal: 'hex', | ||
}, | ||
time: { | ||
test_msg: 'Expected time\'HH:MM:SS\' where H, M and S are numbers and \':SS\' is optional', | ||
test_re: /^[0-9]{1,2}:[0-9]{1,2}(:[0-9]{1,2})?$/ | ||
test_re: /^[0-9]{1,2}:[0-9]{1,2}(:[0-9]{1,2})?$/, | ||
}, | ||
date: { | ||
test_msg: 'Expected date\'YYYY-MM-DD\' where Y, M and D are numbers', | ||
test_re: /^[0-9]{1,4}-[0-9]{1,2}-[0-9]{1,2}$/ | ||
test_re: /^[0-9]{1,4}-[0-9]{1,2}-[0-9]{1,2}$/, | ||
}, | ||
timestamp: { | ||
test_msg: 'Expected timestamp\'YYYY-MM-DD HH:MM:SS.u…u\' where Y, M, D, H, S and u are numbers (optional 1-7×u)', | ||
test_re: /^[0-9]{4}-[0-9]{2}-[0-9]{2} [0-9]{2}:[0-9]{2}(:[0-9]{2}(\.[0-9]{1,7})?)?$/ | ||
} | ||
test_re: /^[0-9]{4}-[0-9]{2}-[0-9]{2} [0-9]{2}:[0-9]{2}(:[0-9]{2}(\.[0-9]{1,7})?)?$/, | ||
}, | ||
}; | ||
@@ -121,5 +124,5 @@ | ||
tokens = [ text, ...tokens ]; | ||
text = tokens.map( t => t.text.toUpperCase() ).join(' ') + ' is not supported'; | ||
text = `${ tokens.map( t => t.text.toUpperCase() ).join(' ') } is not supported`; | ||
} | ||
this.message( null, this.tokenLocation( tokens[0], tokens[ tokens.length-1 ] ), text ); | ||
this.message( null, this.tokenLocation( tokens[0], tokens[tokens.length - 1] ), text ); | ||
} | ||
@@ -135,18 +138,17 @@ | ||
tokens = [ text, ...tokens ]; | ||
text = tokens.map( t => t.text.toUpperCase() ).join(' ') + ' is not supported'; | ||
text = `${ tokens.map( t => t.text.toUpperCase() ).join(' ') } is not supported`; | ||
} | ||
this.message( null, this.tokenLocation( tokens[0], tokens[ tokens.length-1 ] ), text ); | ||
this.message( null, this.tokenLocation( tokens[0], tokens[tokens.length - 1] ), text ); | ||
} | ||
function noSemicolonHere() { | ||
let handler = this._errHandler; | ||
var t = this.getCurrentToken(); | ||
const handler = this._errHandler; | ||
const t = this.getCurrentToken(); | ||
this.$adaptExpectedToken = t; | ||
this.$adaptExpectedExcludes = ["';'", "'}'"]; | ||
this.$adaptExpectedExcludes = [ "';'", "'}'" ]; | ||
this.$nextTokensToken = t; | ||
this.$nextTokensContext = null; // match() of WITH does not reset | ||
this.$nextTokensState = ATNState.INVALID_STATE_NUMBER; | ||
if (t.text === ';' && handler && handler.reportIgnoredWith ) { | ||
if (t.text === ';' && handler && handler.reportIgnoredWith ) | ||
handler.reportIgnoredWith( this, t ); | ||
} | ||
} | ||
@@ -159,5 +161,5 @@ | ||
if (excludes) { | ||
var t = this.getCurrentToken(); | ||
const t = this.getCurrentToken(); | ||
this.$adaptExpectedToken = t; | ||
this.$adaptExpectedExcludes = (excludes instanceof Array) ? excludes : [excludes]; | ||
this.$adaptExpectedExcludes = (excludes instanceof Array) ? excludes : [ excludes ]; | ||
this.$nextTokensToken = t; | ||
@@ -194,6 +196,7 @@ this.$nextTokensContext = null; | ||
function noAssignmentInSameLine() { | ||
var t = this.getCurrentToken(); | ||
if (t.text === '@' && t.line <= this._input.LT(-1).line) | ||
const t = this.getCurrentToken(); | ||
if (t.text === '@' && t.line <= this._input.LT(-1).line) { | ||
this.message( 'syntax-anno-same-line', t, {}, | ||
'Warning', 'Annotation assignment belongs to next statement' ); | ||
} | ||
} | ||
@@ -220,8 +223,8 @@ | ||
art.location = this.startLocation(); | ||
let stop = this._ctx.stop; | ||
const { stop } = this._ctx; | ||
art.location.end = { | ||
offset: stop.stop + 1, // after the last char (special for EOF?) | ||
line: stop.line, | ||
column: stop.stop - stop.start + stop.column + 2 | ||
} | ||
column: stop.stop - stop.start + stop.column + 2, | ||
}; | ||
return art; | ||
@@ -239,3 +242,3 @@ } | ||
filename: this.filename, | ||
start: { offset: token.start, line: token.line, column: token.column + 1} | ||
start: { offset: token.start, line: token.line, column: token.column + 1 }, | ||
}; | ||
@@ -249,10 +252,10 @@ } | ||
return undefined; | ||
let r = { | ||
const r = { | ||
filename: this.filename, | ||
start: { offset: token.start, line: token.line, column: token.column + 1}, | ||
start: { offset: token.start, line: token.line, column: token.column + 1 }, | ||
end: { // we only have single-line tokens | ||
offset: endToken.stop + 1, // after the last char (special for EOF?) | ||
line: endToken.line, | ||
column: endToken.stop - endToken.start + endToken.column + 2 | ||
} | ||
column: endToken.stop - endToken.start + endToken.column + 2, | ||
}, | ||
}; | ||
@@ -276,3 +279,3 @@ if (val !== undefined) | ||
function docComment( node ) { | ||
if(!this.options.docComment) | ||
if (!this.options.docComment) | ||
return; | ||
@@ -290,3 +293,3 @@ const token = this._input.getHiddenTokenToLeft( this.constructor.DocComment ); | ||
// Classify token (identifier category) for implicit names, | ||
// to be used in the empty alternative to AS <explitName>. | ||
// to be used in the empty alternative to AS <explicitName>. | ||
function classifyImplicitName( category, ref ) { | ||
@@ -334,4 +337,4 @@ if (!ref || ref.path) { | ||
const args = xprToken | ||
? [ { op: { location, val: 'xpr' }, args: [], location: this.tokenLocation( xprToken ) } ] | ||
: []; | ||
? [ { op: { location, val: 'xpr' }, args: [], location: this.tokenLocation( xprToken ) } ] | ||
: []; | ||
return { | ||
@@ -350,6 +353,6 @@ op: { location, val: 'call' }, | ||
function numberLiteral( token, sign, text = token.text ) { | ||
var location = this.tokenLocation( token ); | ||
let location = this.tokenLocation( token ); | ||
if (sign) { | ||
// TODO: warning for space in between | ||
let end = location.end; | ||
const { end } = location; | ||
location = this.startLocation( sign ); | ||
@@ -359,3 +362,3 @@ location.end = end; | ||
} | ||
var num = Number.parseFloat( text||'0' ); // not Number.parseInt() ! | ||
const num = Number.parseFloat( text || '0' ); // not Number.parseInt() ! | ||
if (!Number.isSafeInteger(num)) { | ||
@@ -374,9 +377,9 @@ if (sign != null) | ||
function quotedLiteral( token, literal ) { | ||
var location = this.tokenLocation( token ); | ||
var pos = token.text.search( '\'' ) + 1; // pos of char after quote | ||
var val = token.text.slice( pos, -1 ).replace( /''/g, '\'' ); | ||
const location = this.tokenLocation( token ); | ||
const pos = token.text.search( '\'' ) + 1; // pos of char after quote | ||
const val = token.text.slice( pos, -1 ).replace( /''/g, '\'' ); | ||
if (!literal) | ||
literal = token.text.slice( 0, pos-1 ).toLowerCase(); | ||
var p = quotedLiteralPatterns[ literal ] || {}; | ||
literal = token.text.slice( 0, pos - 1 ).toLowerCase(); | ||
const p = quotedLiteralPatterns[literal] || {}; | ||
@@ -388,5 +391,4 @@ // TODO: make tests available for CSN parser | ||
if (p.unexpected_char) | ||
{ | ||
let idx = val.search(p.unexpected_char); | ||
if (p.unexpected_char) { | ||
const idx = val.search(p.unexpected_char); | ||
if (~idx) { | ||
@@ -396,3 +398,3 @@ this.message( null, { // TODO: message id | ||
start: atChar( idx ), | ||
end: atChar( idx + (val[idx] === '\'' ? 2 : 1) ) | ||
end: atChar( idx + (val[idx] === '\'' ? 2 : 1) ), | ||
}, p.unexpected_msg ); | ||
@@ -404,3 +406,3 @@ } | ||
val: p.normalize && p.normalize(val) || val, | ||
location | ||
location, | ||
}; | ||
@@ -412,3 +414,3 @@ | ||
column: location.start.column + pos + i, | ||
offset: location.start.offset + pos + i | ||
offset: location.start.offset + pos + i, | ||
}; | ||
@@ -461,10 +463,13 @@ } | ||
if (name instanceof Array) { | ||
let last = name.length && name[ name.length-1 ]; | ||
if (last && last.id) // // A.B.C -> 'C' | ||
name = { id: last.id, location: last.location, calculated: true, $inferred: 'as' }; | ||
const last = name.length && name[name.length - 1]; | ||
if (last && last.id) { // // A.B.C -> 'C' | ||
name = { | ||
id: last.id, location: last.location, calculated: true, $inferred: 'as', | ||
}; | ||
} | ||
} | ||
else if (name && name.id == null) { | ||
name.id = pathName (name.path ); // A.B.C -> 'A.B.C' | ||
name.id = pathName(name.path ); // A.B.C -> 'A.B.C' | ||
} | ||
var art = this.assignProps( { name }, annos, props, location ); | ||
const art = this.assignProps( { name }, annos, props, location ); | ||
if (kind) | ||
@@ -476,5 +481,5 @@ art.kind = kind; | ||
// no id was parsed, but with error recovery: no further error | ||
env = env + '_'; // could be tested in name search | ||
env += '_'; // could be tested in name search | ||
if (!parent[env]) | ||
parent[env] = [art]; | ||
parent[env] = [ art ]; | ||
else | ||
@@ -489,11 +494,14 @@ parent[env].push(art); | ||
// do not use function(), otherwise `this` is wrong: | ||
if (kind === 0) | ||
if (kind === 0) { | ||
this.message( 'duplicate-argument', loc, { name }, | ||
'Error', 'Duplicate value for parameter $(NAME)' ); | ||
else if (kind === '') | ||
} | ||
else if (kind === '') { | ||
this.message( 'duplicate-excluding', loc, { name }, | ||
'Error', 'Duplicate EXCLUDING for source element $(NAME)' ); | ||
else | ||
} | ||
else { | ||
this.message( 'duplicate-prop', loc, { name }, | ||
'Error', 'Duplicate value for structure property $(NAME)' ); | ||
} | ||
} ); | ||
@@ -516,3 +524,3 @@ } | ||
function addItem( parent, env, kind, annos, props, location ) { | ||
var art = this.assignProps( {}, annos, props, location ); | ||
const art = this.assignProps( {}, annos, props, location ); | ||
if (kind) | ||
@@ -552,4 +560,4 @@ art.kind = kind; | ||
// - token instanceof antlr4.CommonToken => location of token | ||
for (var key in props) { | ||
var val = props[key]; | ||
for (const key in props) { | ||
let val = props[key]; | ||
if (val instanceof antlr4.CommonToken) | ||
@@ -560,5 +568,4 @@ val = this.tokenLocation( val, undefined, true); | ||
(typeof val !== 'object' || | ||
(val instanceof Array ? val.length : Object.getOwnPropertyNames(val).length) ) ) { | ||
(val instanceof Array ? val.length : Object.getOwnPropertyNames(val).length) ) ) | ||
target[key] = val; | ||
} | ||
} | ||
@@ -572,4 +579,4 @@ if (annos) | ||
function createPrefixOp( token, args ) { | ||
let op = this.tokenLocation( token, undefined, token.text.toLowerCase() ); | ||
return { op, args, location: this.combinedLocation( op, args[ args.length-1 ] ) }; | ||
const op = this.tokenLocation( token, undefined, token.text.toLowerCase() ); | ||
return { op, args, location: this.combinedLocation( op, args[args.length - 1] ) }; | ||
} | ||
@@ -581,4 +588,4 @@ | ||
function setOnce( target, prop, value, ...tokens ) { | ||
var loc = this.tokenLocation( tokens[0], tokens[tokens.length-1] ); | ||
var prev = target[prop]; | ||
const loc = this.tokenLocation( tokens[0], tokens[tokens.length - 1] ); | ||
const prev = target[prop]; | ||
if (prev) { | ||
@@ -598,5 +605,5 @@ this.message( 'syntax-repeated-option', loc, { option: prev.option }, | ||
function setMaxCardinality( art, token, max, inferred ) { | ||
let location = this.tokenLocation( token ); | ||
const location = this.tokenLocation( token ); | ||
if (!art.cardinality) { | ||
art.cardinality = { targetMax: Object.assign( {location}, max ), location }; | ||
art.cardinality = { targetMax: Object.assign( { location }, max ), location }; | ||
if (inferred) | ||
@@ -619,4 +626,4 @@ art.cardinality.$inferred = inferred; | ||
const brace1 = (isComposition) ? 'COMPOSITIONofBRACE' : "'{'"; | ||
const manyOne = (cardinality) ? ['MANY', 'ONE'] : []; | ||
this.excludeExpected( [["'}'", 'COMPOSITIONofBRACE'], brace1, ...manyOne] ); | ||
const manyOne = (cardinality) ? [ 'MANY', 'ONE' ] : []; | ||
this.excludeExpected( [ [ "'}'", 'COMPOSITIONofBRACE' ], brace1, ...manyOne ] ); | ||
} | ||
@@ -626,3 +633,3 @@ | ||
module.exports = { | ||
genericAntlrParser: GenericAntlrParser | ||
genericAntlrParser: GenericAntlrParser, | ||
}; |
@@ -33,3 +33,3 @@ // Main entry point for the Research Vanilla CDS Compiler | ||
hasErrors, | ||
getMessageFunction, | ||
makeMessageFunction, | ||
explainMessage, | ||
@@ -51,13 +51,8 @@ hasMessageExplanation | ||
const fs = require('fs'); | ||
const emdx2csn = require('./edm/annotations/edmx2csn'); // translate edmx annotations into csn | ||
const edmx2csn = require('./edm/annotations/edmx2csn'); // translate edmx annotations into csn | ||
const path = require('path'); | ||
const moduleResolve = require('resolve'); | ||
const { resolveCDS, isLocalFile, extensions, packageFilter } = require('./utils/resolve'); | ||
const extensions = ['.cds', '.csn', '.csn.json', '.json']; | ||
function packageFilter( pkg ) { | ||
return { main: pkg.cds && pkg.cds.main || pkg.main }; | ||
} | ||
/** | ||
@@ -75,3 +70,3 @@ * Parse the given source with the correct parser based on the file name's | ||
if (ext === '.xml') { | ||
return emdx2csn( source, filename, options ); | ||
return edmx2csn( source, filename, options ); | ||
} | ||
@@ -84,7 +79,5 @@ else if (['.json', '.csn'].includes(ext)) { | ||
const model = {}; | ||
const message = getMessageFunction( model, options ); | ||
message( 'file-unknown-ext', | ||
emptyWeakLocation(filename), null, | ||
{ file: ext && ext.slice(1), '#': !ext && 'none' }, | ||
'Error', { | ||
const { error } = makeMessageFunction( model, options ); | ||
error( 'file-unknown-ext', emptyWeakLocation(filename), | ||
{ file: ext && ext.slice(1), '#': !ext && 'none' }, { | ||
std: 'Unknown file extension $(FILE)', | ||
@@ -144,4 +137,8 @@ none: 'No file extension' | ||
// For the transition phase, have the option to use the new OR old | ||
// resolve functionality. | ||
const resolveFunction = options.newResolve ? resolveCDS : moduleResolve; | ||
const model = { sources: a.sources, options }; | ||
const message = getMessageFunction( model ); | ||
const { error } = makeMessageFunction( model ); | ||
const parseOptions = optionsWithMessages( options, model ); | ||
@@ -340,3 +337,3 @@ let all = promiseAllDoNotRejectImmediately( a.files.map(readAndParse) ); | ||
: dep.module; | ||
moduleResolve( path, opts, function (err, res) { | ||
resolveFunction( path, opts, function (err, res) { | ||
// console.log('RESOLVE', dep, res, err) | ||
@@ -381,9 +378,9 @@ if (err) | ||
for (let from of dep.usingFroms) | ||
message( 'file-not-readable', from.location, null, { file: resolved }, | ||
'Error', 'Cannot read file $(FILE)' ); | ||
error( 'file-not-readable', from.location, { file: resolved }, | ||
'Cannot read file $(FILE)' ); | ||
} | ||
else if (/^\.\.?\//.test( dep.module ) ) { | ||
else if (isLocalFile( dep.module ) ) { | ||
for (let from of dep.usingFroms) | ||
message( 'file-unknown-local', from.location, null, { file: dep.module }, | ||
'Error', 'Cannot find local module $(FILE)' ); | ||
error( 'file-unknown-local', from.location, { file: dep.module }, | ||
'Cannot find local module $(FILE)' ); | ||
} | ||
@@ -393,5 +390,4 @@ else { | ||
for (let from of dep.usingFroms) | ||
message( 'file-unknown-package', from.location, null, | ||
{ file: dep.module, '#': internal }, | ||
'Error', { | ||
error( 'file-unknown-package', from.location, | ||
{ file: dep.module, '#': internal }, { | ||
std: 'Cannot find package $(FILE)', | ||
@@ -460,3 +456,3 @@ internal: 'Cannot find package module $(FILE)' | ||
const model = { sources, options }; | ||
getMessageFunction( model ); // make sure that we have a "central" messages array | ||
makeMessageFunction( model ); // make sure that we have a "central" messages array | ||
const parseOptions = optionsWithMessages( options, model ); | ||
@@ -463,0 +459,0 @@ |
// CSN functionality for resolving references | ||
// | ||
// The functions in this module expect a well-formed CSN with valid references. | ||
@@ -9,7 +9,7 @@ // If that is not the case, it simply throws an error (which might even be a | ||
// internalDoc/CoreCompiler.md#use-of-the-core-compiler-for-csn-processors. | ||
// | ||
// For details about the name resolution in CSN, see | ||
// internalDoc/CsnSyntax.md#helper-property-for-simplified-name-resolution | ||
// and doc/NameResolution.md. | ||
// | ||
// Terminology used in this file: | ||
@@ -25,4 +25,6 @@ // | ||
'use strict' | ||
'use strict'; | ||
const { setProp: setLink } = require('../base/model'); | ||
const BUILTIN_TYPE = {}; | ||
@@ -33,5 +35,8 @@ | ||
// dictionary) is handled extra here. | ||
const artifactProperties | ||
= ['elements', 'columns', 'keys', 'mixin', 'enum', 'params', 'actions', 'definitions', 'extensions']; | ||
const artifactProperties = [ 'elements', 'columns', 'keys', 'mixin', 'enum', | ||
'params', 'actions', 'definitions', 'extensions' ]; | ||
/** | ||
* @param {CSN.Model} csn | ||
*/ | ||
function csnRefs( csn ) { | ||
@@ -41,8 +46,14 @@ const views = Object.create(null); // cache for views - OK to add it to CSN? | ||
let refLinks = null; | ||
return { effectiveType, artifactRef, inspectRef, queryOrMain }; | ||
return { | ||
effectiveType, artifactRef, inspectRef, queryOrMain, | ||
}; | ||
// Return the type relevant for name resolution, i.e. the object which has a | ||
// `target`, `elements`, `enum` property, or no `type` property. To avoid | ||
// confusion with the "base type", we do not use the term "final type". | ||
// (This function could be omitted if we would use JS prototypes for type refs.) | ||
/** | ||
* Return the type relevant for name resolution, i.e. the object which has a | ||
* `target`, `elements`, `enum` property, or no `type` property. To avoid | ||
* confusion with the "base type", we do not use the term "final type". | ||
* (This function could be omitted if we would use JS prototypes for type refs.) | ||
* | ||
* @param {CSN.ArtifactWithRefs} art | ||
*/ | ||
function effectiveType( art ) { | ||
@@ -68,2 +79,5 @@ if (art._effectiveType) | ||
/** | ||
* @param {CSN.Artifact} art | ||
*/ | ||
function navigationEnv( art ) { | ||
@@ -77,10 +91,16 @@ let type = effectiveType( art ); | ||
// cannot navigate along targetAspect! | ||
return (type.target) ? csn.definitions[ type.target ] : type; | ||
return (type.target) ? csn.definitions[type.target] : type; | ||
} | ||
// Return the object pointing to by the artifact reference (in 'type', | ||
// 'includes', 'target', raw 'from'). | ||
/** | ||
* Return the object pointing to by the artifact reference (in 'type', | ||
* 'includes', 'target', raw 'from'). | ||
* | ||
* @param {CSN.ArtifactReferencePath|string} ref | ||
* @param {any} [notFound] Value that is returned in case the artifact reference | ||
* could not be found. | ||
*/ | ||
function artifactRef( ref, notFound ) { | ||
if (typeof ref === 'string') { | ||
const art = csn.definitions[ ref ] || notFound; | ||
const art = csn.definitions[ref] || notFound; | ||
if (art !== undefined) | ||
@@ -90,7 +110,7 @@ return art; | ||
else { | ||
const [head, ...tail] = ref.ref; | ||
let art = csn.definitions[ pathId( head ) ]; | ||
for (const elem of tail) { | ||
art = navigationEnv( art ).elements[ pathId( elem ) ]; | ||
} | ||
const [ head, ...tail ] = ref.ref; | ||
let art = csn.definitions[pathId( head )]; | ||
for (const elem of tail) | ||
art = navigationEnv( art ).elements[pathId( elem )]; | ||
if (art) | ||
@@ -104,11 +124,20 @@ return art; | ||
// Return the entity we select from | ||
/** | ||
* Return the entity we select from | ||
* | ||
* @param {CSN.ArtifactReferencePath} ref | ||
* @returns {CSN.Definition} | ||
*/ | ||
function fromRef( ref ) { | ||
const path = ref.ref; | ||
const name = (path.length === 1) | ||
? pathId( path[0] ) | ||
: effectiveType( artifactRef( ref ) ).target; | ||
return csn.definitions[ name ]; | ||
? pathId( path[0] ) | ||
: effectiveType( artifactRef( ref ) ).target; | ||
return csn.definitions[name]; | ||
} | ||
/** | ||
* @param {CSN.Path} csnPath | ||
* @param {number} refCsnPathIndex | ||
*/ | ||
function whereEntity( csnPath, refCsnPathIndex ) { | ||
@@ -119,14 +148,19 @@ if (refCsnPath.length !== refCsnPathIndex || | ||
const path = csnPath.slice( 0, refCsnPathIndex ); | ||
const links = inspectRef( path ).links; | ||
const { links } = inspectRef( path ); | ||
refCsnPath = path; | ||
refLinks = links; | ||
} | ||
return refLinks[ csnPath[ refCsnPathIndex + 1 ] ].env; | ||
return refLinks[csnPath[refCsnPathIndex + 1]].env; | ||
} | ||
/** | ||
* @param {CSN.Path} csnPath | ||
*/ | ||
function inspectRef( csnPath ) { | ||
const { obj, parent, query, scope, refCsnPathIndex } = analyseCsnPath( csnPath, csn ); | ||
const { | ||
obj, parent, query, scope, refCsnPathIndex, | ||
} = analyseCsnPath( csnPath, csn ); | ||
const name = csnPath[1]; | ||
const main = csn.definitions[ name ]; | ||
const queries = views[ name ] || main.query && allQueries( name, main ); | ||
const main = csn.definitions[name]; | ||
const queries = views[name] || main.query && allQueries( name, main ); | ||
@@ -139,9 +173,11 @@ const path = (typeof obj === 'string') ? [ obj ] : obj.ref; | ||
// 1,2: with 'param' or 'global' property, in `keys` | ||
if (obj.param) | ||
return expandRefPath( path, main.params[ head ], 'param' ); | ||
else if (obj.global || ['type', 'includes', 'target', 'from'].includes( scope )) | ||
return expandRefPath( path, csn.definitions[ head ], scope ); | ||
if (obj.param) { | ||
return expandRefPath( path, main.params[head], 'param' ); | ||
} | ||
else if (obj.global || [ 'type', 'includes', 'target', 'from' ].includes( scope )) { | ||
return expandRefPath( path, csn.definitions[head], scope ); | ||
} | ||
else if (scope === 'keys') { | ||
const target = csn.definitions[ parent.target || parent.cast.target ]; | ||
return expandRefPath( path, target.elements[ head ], 'keys' ); | ||
const target = csn.definitions[parent.target || parent.cast.target]; | ||
return expandRefPath( path, target.elements[head], 'keys' ); | ||
} | ||
@@ -151,8 +187,7 @@ // 3: $magic | ||
if (head === '$self' || head === '$projection') { | ||
let self = query ? queryOrMain( query, main ) : main; | ||
const self = query ? queryOrMain( query, main ) : main; | ||
return expandRefPath( path, self, '$self' ); | ||
} | ||
else { | ||
return { scope: '$magic' }; | ||
} | ||
return { scope: '$magic' }; | ||
} | ||
@@ -162,27 +197,26 @@ // 4: where inside ref | ||
const { elements } = whereEntity( csnPath, refCsnPathIndex ); | ||
return expandRefPath( path, elements[ head ], scope ); | ||
return expandRefPath( path, elements[head], scope ); | ||
} | ||
// 5,6,7: outside queries, in queries where inferred elements are referred to | ||
if (!query) | ||
return expandRefPath( path, (parent || main).elements[ head ] ); | ||
return expandRefPath( path, (parent || main).elements[head] ); | ||
const select = query.SELECT; | ||
if (!select || obj.$env === true) | ||
// TODO: do not do this if current query has a parent query (except with obj.$env) | ||
return expandRefPath( path, queryOrMain( query, main ).elements[ head ] ); | ||
return expandRefPath( path, queryOrMain( query, main ).elements[head] ); | ||
// With explicitly provided $env: | ||
if (typeof obj.$env === 'number') { // head is mixin or table alias name | ||
const s = (obj.$env) ? queries[ obj.$env - 1 ].SELECT : select; | ||
const m = s.mixin && s.mixin[ head ]; | ||
return expandRefPath( path, m || s._sources[ head ], (m ? 'mixin' : 'alias') ); | ||
const s = (obj.$env) ? queries[obj.$env - 1].SELECT : select; | ||
const m = s.mixin && s.mixin[head]; | ||
return expandRefPath( path, m || s._sources[head], (m ? 'mixin' : 'alias') ); | ||
} | ||
else if (typeof obj.$env === 'string') { | ||
const source = select._sources[ obj.$env ]; | ||
const source = select._sources[obj.$env]; | ||
// Had a case where a obj.$env was the name of a mixin | ||
if(source) | ||
return expandRefPath( path, source.elements[ head ], 'source' ); | ||
else if(select.mixin && select.mixin[obj.$env]) | ||
return expandRefPath( path, select.mixin[ head ], 'source' ); | ||
else | ||
throw new Error('No source found!'); | ||
if (source) | ||
return expandRefPath( path, source.elements[head], 'source' ); | ||
else if (select.mixin && select.mixin[obj.$env]) | ||
return expandRefPath( path, select.mixin[head], 'source' ); | ||
throw new Error('No source found!'); | ||
} | ||
@@ -192,3 +226,3 @@ | ||
if (scope !== 'from-on' && select.mixin) { | ||
const art = select.mixin[ head ]; | ||
const art = select.mixin[head]; | ||
if (art) | ||
@@ -199,3 +233,3 @@ return expandRefPath( path, art, 'mixin' ); | ||
if (path.length > 1 && (select.$alias || scope !== 'from-on')) { | ||
const art = select._sources[ head ]; | ||
const art = select._sources[head]; | ||
if (art) | ||
@@ -206,24 +240,31 @@ return expandRefPath( path, art, 'alias' ); | ||
// TODO: do not do this if current query has a parent query !!! | ||
if (scope === 'on' || scope === 'orderBy') { | ||
return expandRefPath( path, queryOrMain( query, main ).elements[ head ] ); | ||
} | ||
if (scope === 'on' || scope === 'orderBy') | ||
return expandRefPath( path, queryOrMain( query, main ).elements[head] ); | ||
if (typeof select.$alias === 'string') { // with unique source | ||
const source = select._sources[ select.$alias ]; | ||
return expandRefPath( path, source.elements[ head ], 'source' ); | ||
const source = select._sources[select.$alias]; | ||
return expandRefPath( path, source.elements[head], 'source' ); | ||
} | ||
throw new Error( 'Missing helper property $env: ' + scope ); | ||
throw new Error( `Missing helper property $env: ${ scope }` ); | ||
} | ||
/** | ||
* @param {CSN.Path} path | ||
* @param {CSN.Artifact} art | ||
* @param {string | null} [scope] | ||
*/ | ||
function expandRefPath( path, art, scope = null ) { | ||
/** @type {{idx, art?, env?}[]} */ | ||
const links = path.map( (_v, idx) => ({ idx }) ); | ||
links[0].art = art; | ||
for (let i = 1; i < links.length; ++i) { // yes, starting at 1 | ||
for (let i = 1; i < links.length; ++i) { // yes, starting at 1, links[0] is set above | ||
art = navigationEnv( art ); | ||
links[ i - 1 ].env = art; | ||
art = art.elements[ pathId( path[i] ) ]; | ||
links[ i ].art = art; | ||
links[i - 1].env = art; | ||
art = art.elements[pathId( path[i] )]; | ||
links[i].art = art; | ||
} | ||
if (scope === 'from') { | ||
art = navigationEnv( art ); | ||
links[ links.length - 1 ].env = art; | ||
links[links.length - 1].env = art; | ||
} | ||
@@ -233,2 +274,9 @@ return { links, art, scope }; | ||
/** | ||
* Get the array of all (sub-)queries inside the given `main` artifact (of `main.query`). | ||
* | ||
* @param {CSN.PathSegment} name Name of the view (as a string) | ||
* @param {CSN.Definition} main | ||
* @returns {CSN.Query[]} | ||
*/ | ||
function allQueries( name, main ) { | ||
@@ -239,10 +287,10 @@ const all = []; | ||
const as = query.as || implicitAs( query.ref ); | ||
select._sources[ as ] = fromRef( query ); | ||
select.$alias = (select.$alias != null) ? typeof select.$alias === 'string' : as; | ||
select._sources[as] = fromRef( query ); | ||
select.$alias = (select.$alias !== null) ? typeof select.$alias === 'string' : as; | ||
} | ||
else if (select && query.as) { // sub query in FROM | ||
const as = query.as; | ||
select._sources[ as ] = queryOrMain( query, main ); | ||
const { as } = query; | ||
select._sources[as] = queryOrMain( query, main ); | ||
setLink( select, '$alias', | ||
(select.$alias != null) ? typeof select.$alias === 'string' : as ); | ||
(select.$alias !== null) ? typeof select.$alias === 'string' : as ); | ||
} | ||
@@ -260,2 +308,6 @@ if (query.SELECT) { // every SELECT query -- TODO: remember number? | ||
/** | ||
* @param {CSN.Query} query | ||
* @param {CSN.Definition} main | ||
*/ | ||
function queryOrMain( query, main ) { | ||
@@ -272,5 +324,10 @@ while (query.SET) | ||
return main; | ||
throw new Error( 'Query elements not available: ' + Object.keys( query ).join('+')); | ||
throw new Error( `Query elements not available: ${ Object.keys( query ).join('+') }`); | ||
} | ||
/** | ||
* @param {CSN.Query} query | ||
* @param {CSN.QuerySelect} select | ||
* @param {(query: CSN.Query&CSN.QueryFrom, select: CSN.QuerySelectEnriched) => void} callback | ||
*/ | ||
function traverseQuery( query, select, callback ) { | ||
@@ -286,3 +343,3 @@ if (query.SELECT) { | ||
} | ||
for (const prop of ['args', 'xpr', 'columns', 'where', 'having']) { | ||
for (const prop of [ 'args', 'xpr', 'columns', 'where', 'having' ]) { | ||
// all properties which could have sub queries (directly or indirectly) | ||
@@ -297,5 +354,11 @@ const expr = query[prop]; | ||
/** | ||
* @param {CSN.QueryFrom} from | ||
* @param {CSN.QuerySelect} select | ||
* @param {(from: CSN.QueryFrom, select: CSN.QuerySelect) => void} callback | ||
*/ | ||
function traverseFrom( from, select, callback ) { | ||
if (from.ref) | ||
if (from.ref) { | ||
callback( from, select ); | ||
} | ||
else if (from.args) { // join | ||
@@ -306,10 +369,7 @@ from.args.forEach( arg => traverseFrom( arg, select, callback ) ); | ||
} | ||
else | ||
traverseQuery( from, select, callback ); // sub query in FROM | ||
else { | ||
traverseQuery( from, select, callback ); | ||
} // sub query in FROM | ||
} | ||
function setLink( obj, prop, value ) { | ||
Object.defineProperty( obj, prop, { value, configurable: true, writable: true } ); | ||
} | ||
function pathId( item ) { | ||
@@ -320,9 +380,16 @@ return (typeof item === 'string') ? item : item.id; | ||
function implicitAs( ref ) { | ||
const id = pathId( ref[ ref.length - 1 ] ); | ||
const id = pathId( ref[ref.length - 1] ); | ||
return id.substring( id.lastIndexOf('.') + 1 ); | ||
} | ||
function analyseCsnPath( csnPath, obj ) { | ||
/** | ||
* @param {CSN.Path} csnPath | ||
* @param {CSN.Model} csn | ||
*/ | ||
function analyseCsnPath( csnPath, csn ) { | ||
if (csnPath[0] !== 'definitions') | ||
throw new Error( 'References outside definitions not supported yet'); | ||
/** @type {object} */ | ||
let obj = csn; | ||
let parent = null; | ||
@@ -332,14 +399,16 @@ let query = null; | ||
let art = null; | ||
/** @type {boolean|string|number} */ | ||
let isName = false; | ||
let refCsnPathIndex = 0; | ||
csnPath.forEach( function loop( prop, index ) { | ||
if (isName || Array.isArray( obj )) { // array item, name/index of artifact/member, (named) argument | ||
csnPath.forEach( ( prop, index ) => { | ||
// array item, name/index of artifact/member, (named) argument | ||
if (isName || Array.isArray( obj )) { | ||
if (typeof isName === 'string') { | ||
parent = art; | ||
art = obj[ prop ]; | ||
art = obj[prop]; | ||
} | ||
isName = false; | ||
} | ||
else if (artifactProperties.includes( prop )) { | ||
else if (artifactProperties.includes( String(prop) )) { | ||
isName = prop; | ||
@@ -349,3 +418,3 @@ scope = prop; | ||
else if (prop === 'items' || prop === 'returns') { | ||
art = obj[ prop ]; | ||
art = obj[prop]; | ||
} | ||
@@ -379,14 +448,17 @@ else if (prop === 'args') { | ||
} | ||
obj = obj[ prop ]; | ||
obj = obj[prop]; | ||
} ); | ||
// console.log( 'CPATH:', csnPath, scope, obj, parent.$location ); | ||
return { obj, parent, query, scope, refCsnPathIndex }; | ||
return { | ||
obj, parent, query, scope, refCsnPathIndex, | ||
}; | ||
} | ||
csnRefs.traverseQuery = traverseQuery; | ||
csnRefs.artifactProperties = artifactProperties; | ||
csnRefs.implicitAs = implicitAs; | ||
csnRefs.analyseCsnPath = analyseCsnPath; | ||
csnRefs.pathId = pathId; | ||
module.exports = csnRefs; | ||
module.exports = { | ||
csnRefs, | ||
traverseQuery, | ||
artifactProperties, | ||
implicitAs, | ||
analyseCsnPath, | ||
pathId, | ||
}; |
@@ -1,5 +0,5 @@ | ||
'use strict' | ||
'use strict'; | ||
const { setProp } = require('../base/model'); | ||
const csnRefs = require('../model/csnRefs'); | ||
const { csnRefs } = require('../model/csnRefs'); | ||
const { sortCsn } = require('../json/to-csn'); | ||
@@ -71,3 +71,3 @@ | ||
/** | ||
* Check if an identififer has already been visited and | ||
* Check if an identifier has already been visited and | ||
* add it to the list of visited identifiers. | ||
@@ -717,3 +717,3 @@ * @param {string} id unique identifier | ||
* | ||
* @param {string} elemName Name of the elemnt | ||
* @param {string} elemName Name of the element | ||
* @param {('plain'|'quoted'|'hdbcds')} namingConvention The naming convention to use | ||
@@ -720,0 +720,0 @@ * @returns {string} The resulting database element name for 'elemName', depending on the current naming convention. |
@@ -28,5 +28,5 @@ // For testing: reveal non-enumerable properties in CSN, display result of csnRefs | ||
'use strict' | ||
'use strict'; | ||
const csnRefs = require('./csnRefs'); | ||
const { csnRefs, artifactProperties } = require('./csnRefs'); | ||
const { locationString } = require('../base/messages'); | ||
@@ -161,3 +161,3 @@ | ||
return; | ||
const isMember = csnRefs.artifactProperties.includes( prop ); | ||
const isMember = artifactProperties.includes( prop ); | ||
if (!isMember && node.$location) { | ||
@@ -164,0 +164,0 @@ const value = locationString( node.$location ); |
@@ -1,2 +0,2 @@ | ||
'use strict' | ||
'use strict'; | ||
@@ -3,0 +3,0 @@ // Low-level utility functions to work with augmented CSN. |
@@ -12,2 +12,4 @@ // Make internal properties of the XSN / augmented CSN visible | ||
'use strict'; | ||
const msg = require('../base/messages'); | ||
@@ -14,0 +16,0 @@ |
@@ -0,1 +1,3 @@ | ||
'use strict'; | ||
const { | ||
@@ -7,3 +9,2 @@ forEachDefinition, | ||
// TODO clarify API | ||
@@ -10,0 +11,0 @@ function compareModels(beforeModel, afterModel, deltaMode=false) { |
@@ -0,1 +1,3 @@ | ||
'use strict'; | ||
const { createOptionProcessor } = require('./base/optionProcessorHelper'); | ||
@@ -28,2 +30,3 @@ | ||
.option(' --old-transformers') | ||
.option(' --new-resolve') | ||
.option(' --dependent-autoexposed') | ||
@@ -91,2 +94,3 @@ .option(' --long-autoexposed') | ||
Valid values are: | ||
addTextsLanguageAssoc | ||
subElemRedirections | ||
@@ -101,2 +105,3 @@ keyRefError | ||
--old-transformers Use the old transformers that work on XSN instead of CSN | ||
--new-resolve Use new module resolver. Will become the default in the future. | ||
--hana-flavor Compile with backward compatibility for HANA CDS (incomplete) | ||
@@ -175,2 +180,3 @@ --parse-only Stop compilation after parsing and write result to <stdout> | ||
.option(' --odata-proxies') | ||
.option(' --odata-x-service-refs') | ||
.option(' --odata-foreign-keys') | ||
@@ -202,2 +208,4 @@ .option('-c, --csn') | ||
--odata-proxies (highly experimental) Generate Proxies for out-of-service navigation targets. | ||
--odata-x-service-refs (highly experimental) Generate schema references and external proxies, | ||
is overruled by --odata-proxies. | ||
--odata-foreign-keys Render foreign keys in structured format (V4 only) | ||
@@ -204,0 +212,0 @@ -n, --names <style> Annotate artifacts and elements with "@cds.persistence.name", which is |
@@ -9,2 +9,4 @@ /** | ||
'use strict'; | ||
const walker = require('../json/walker') | ||
@@ -11,0 +13,0 @@ |
// Common render functions for toCdl.js and toSql.js | ||
'use strict'; | ||
const functionsWithoutParams = { | ||
@@ -4,0 +6,0 @@ hana: { |
@@ -42,3 +42,3 @@ | ||
'cds.Time': 'TIME', | ||
'cds.DateTime': 'TIMESTAMP', // https://github.wdf.sap.corp/cdx/cds-compiler/issues/2758 | ||
'cds.DateTime': 'TIMESTAMP', // cds-compiler#2758 | ||
'cds.Timestamp': 'TIMESTAMP', | ||
@@ -268,3 +268,4 @@ 'cds.Boolean': 'BOOLEAN', | ||
} else { | ||
signal(error`"${artifactName}": Entity must have at least one element that is non-virtual`, art.location); | ||
// TODO: Already be handled by 'empty-entity' reclassification; better location | ||
signal(error`"${artifactName}": Entity must have at least one element that is non-virtual`, ['definitions', artifactName]); | ||
} | ||
@@ -623,3 +624,3 @@ let primaryKeys = Object.keys(art.elements).filter(name => art.elements[name].key) | ||
// Even the first step might have parameters and/or a filter | ||
// Render the actual parameter list. If the path has no actual parameters, | ||
// Render the actual parameter list. If the path has no actual parameters, | ||
// the ref is not rendered as { id: ...; args: } but as short form of ref[0] ;) | ||
@@ -839,3 +840,4 @@ // An empty actual parameter list is rendered as `()`. | ||
if (options.toSql.dialect === 'sqlite') { | ||
signal(info`"${artifactName}": HANA table types are not supported in SQLite`, art.location); | ||
// TODO: Signal is not covered by tests + better location | ||
signal(info`"${artifactName}": HANA table types are not supported in SQLite`, ['definitions', artifactName]); | ||
return ''; | ||
@@ -856,7 +858,9 @@ } | ||
} else { | ||
signal(error`"${artifactName}": HANA table type must have at least one element that is non-virtual`, art.location); | ||
// TODO: Signal is not covered by tests + better location | ||
signal(error`"${artifactName}": HANA table type must have at least one element that is non-virtual`, ['definitions', artifactName]); | ||
} | ||
} else { | ||
// TODO: Signal is not covered by tests + better location | ||
// Non-structured HANA table type | ||
signal(error`"${artifactName}": HANA table types must have structured types for conversion to SQL`, art.location); | ||
signal(error`"${artifactName}": HANA table types must have structured types for conversion to SQL`, ['definitions', artifactName]); | ||
return ''; | ||
@@ -876,3 +880,4 @@ } | ||
} | ||
signal(error`"${artifactName}.${elementName}": Anonymous structured types are not supported for conversion to SQL`, elm.location); | ||
// TODO: Signal is not covered by tests + better location | ||
signal(error`"${artifactName}.${elementName}": Anonymous structured types are not supported for conversion to SQL`, ['definitions',artifactName, 'elements', elementName]); | ||
return result; | ||
@@ -883,4 +888,5 @@ } | ||
if (elm.target) { | ||
// TODO: Signal is not covered by tests + better location | ||
// We can't do associations yet | ||
signal(error`"${artifactName}.${elementName}": Association and composition types are not yet supported for conversion to SQL`, elm.location); | ||
signal(error`"${artifactName}.${elementName}": Association and composition types are not yet supported for conversion to SQL`, ['definitions',artifactName, 'elements', elementName]); | ||
return result; | ||
@@ -1021,4 +1027,5 @@ } | ||
// #foo | ||
// TODO: Signal is not covered by tests + better location | ||
// FIXME: We can't do enums yet because they are not resolved (and we don't bother finding their value by hand) | ||
signal(error`Enum values are not yet supported for conversion to SQL`, x.location); | ||
signal(error`Enum values are not yet supported for conversion to SQL`, x.$location); | ||
return ''; | ||
@@ -1025,0 +1032,0 @@ } |
@@ -62,3 +62,3 @@ const schemaObjects = require('./swaggerSchemaObjects'); | ||
// the response in the swagger model has a content with schema pointing to the corresponding array declaration in the data model | ||
// and in the headers of the response a 'x-next' entry is inluded (the pointer to the next page) | ||
// and in the headers of the response a 'x-next' entry is included (the pointer to the next page) | ||
let arrayedResponse = action.returns && action.returns.items; | ||
@@ -74,6 +74,6 @@ | ||
'Swagger.DELETE', 'Swagger.delete', 'Swagger.Delete', | ||
// 'Swagger.OPTIONS', 'Swagger.options', 'Swagget.Options', | ||
// 'Swagger.OPTIONS', 'Swagger.options', 'Swagger.Options', | ||
'Swagger.HEAD', 'Swagger.head', 'Swagger.Head', | ||
'Swagger.PATCH', 'Swagger.patch', 'Swagger.Patch' | ||
// 'Swagger.TRACE', 'Swagger.trace', 'Swagget.Trace' | ||
// 'Swagger.TRACE', 'Swagger.trace', 'Swagger.Trace' | ||
].includes(a.name.path.map(p => p.id).join('.'))); | ||
@@ -97,3 +97,3 @@ actMethods.forEach(m => { | ||
// the desired path where the operations serves can be spcified via the annotation @Swagger.path, if not we are taking the name | ||
// the desired path where the operations serves can be specified via the annotation @Swagger.path, if not we are taking the name | ||
function getPathString(action) { | ||
@@ -100,0 +100,0 @@ if (action['@Swagger.path'] && action['@Swagger.path'].val) |
@@ -1,67 +0,69 @@ | ||
function isAlreadyBraced(expression, start, end){ | ||
const isBraced = start - 1 > -1 && end + 1 < expression.length && expression[start-1] === '(' && expression[end+1] === ')'; | ||
return isBraced; | ||
} | ||
'use strict'; | ||
function binarycomparison(expression, token, index){ | ||
if(!isAlreadyBraced(expression, index-1, index+1)){ | ||
expression.splice(index+2 > expression.length ? expression.length : index +2 ,0,')'); | ||
expression.splice(index-1 > -1 ? index - 1 : 0,0,'('); | ||
} | ||
function isAlreadyBraced(expression, start, end){ | ||
const isBraced = start - 1 > -1 && end + 1 < expression.length && expression[start-1] === '(' && expression[end+1] === ')'; | ||
return isBraced; | ||
} | ||
return index + 3; | ||
function binarycomparison(expression, token, index){ | ||
if(!isAlreadyBraced(expression, index-1, index+1)){ | ||
expression.splice(index+2 > expression.length ? expression.length : index +2 ,0,')'); | ||
expression.splice(index-1 > -1 ? index - 1 : 0,0,'('); | ||
} | ||
function beetween(expression, token, index){ | ||
let start = index-1, end = index+4; | ||
if(expression[index-1] === 'not'){ | ||
start -= 1; | ||
} | ||
return index + 3; | ||
} | ||
if(!isAlreadyBraced(expression, start, end)){ | ||
expression.splice(end > expression.length ? expression.length : end ,0,')'); | ||
expression.splice(start > -1 ? start : 0,0,'('); | ||
} | ||
function beetween(expression, token, index){ | ||
let start = index-1, end = index+4; | ||
if(expression[index-1] === 'not'){ | ||
start -= 1; | ||
} | ||
return index + 4; | ||
if(!isAlreadyBraced(expression, start, end)){ | ||
expression.splice(end > expression.length ? expression.length : end ,0,')'); | ||
expression.splice(start > -1 ? start : 0,0,'('); | ||
} | ||
function like(expression, token, index){ | ||
let start = index-1, end = index+2; | ||
if(expression[index-1] === 'not'){ | ||
start -= 1; | ||
} | ||
return index + 4; | ||
} | ||
if(!isAlreadyBraced(expression, start, end)){ | ||
expression.splice(end > expression.length ? expression.length : end ,0,')'); | ||
expression.splice(start > -1 ? start : 0,0,'('); | ||
} | ||
return index + 3; | ||
function like(expression, token, index){ | ||
let start = index-1, end = index+2; | ||
if(expression[index-1] === 'not'){ | ||
start -= 1; | ||
} | ||
const bracers = { | ||
'=' : binarycomparison, | ||
'>' : binarycomparison, | ||
'<' : binarycomparison, | ||
'>=': binarycomparison, | ||
'<=': binarycomparison, | ||
'!=': binarycomparison, | ||
'between': beetween, | ||
'like': like | ||
if(!isAlreadyBraced(expression, start, end)){ | ||
expression.splice(end > expression.length ? expression.length : end ,0,')'); | ||
expression.splice(start > -1 ? start : 0,0,'('); | ||
} | ||
function braceExpression(expr){ | ||
for(let i = 0; i < expr.length; i++){ | ||
const token = expr[i]; | ||
if(token && token.xpr){ | ||
token.xpr = braceExpression(token.xpr); | ||
} | ||
if(bracers[token]){ | ||
i = bracers[token](expr, token, i); | ||
} | ||
return index + 3; | ||
} | ||
const bracers = { | ||
'=' : binarycomparison, | ||
'>' : binarycomparison, | ||
'<' : binarycomparison, | ||
'>=': binarycomparison, | ||
'<=': binarycomparison, | ||
'!=': binarycomparison, | ||
'between': beetween, | ||
'like': like | ||
} | ||
function braceExpression(expr){ | ||
for(let i = 0; i < expr.length; i++){ | ||
const token = expr[i]; | ||
if(token && token.xpr){ | ||
token.xpr = braceExpression(token.xpr); | ||
} | ||
if(bracers[token]){ | ||
i = bracers[token](expr, token, i); | ||
} | ||
} | ||
return expr; | ||
} | ||
module.exports = braceExpression; | ||
return expr; | ||
} | ||
module.exports = braceExpression; |
@@ -274,3 +274,3 @@ 'use strict'; | ||
// https://github.wdf.sap.corp/cdx/cds-compiler/issues/837 | ||
// cds-compiler#837 | ||
// add check here for @Analytics.Measure and @Aggregation.default | ||
@@ -277,0 +277,0 @@ // @Analytics has scope element |
@@ -92,2 +92,3 @@ 'use strict'; | ||
const { error, warning, info, signal } = alerts(csn, options); | ||
const { error: _error, warning: _warning, info: _info } = alerts.makeMessageFunction(csn, options); | ||
@@ -160,3 +161,3 @@ // the new transformer works only with new CSN | ||
if (validKey.length && !(validFrom.length && validTo.length)) { | ||
signal(error`@cds.valid.key was used but @cds.valid.from and @cds.valid.to are missing`, path); | ||
_error(null, path, '@cds.valid.key was used but @cds.valid.from and @cds.valid.to are missing'); | ||
} | ||
@@ -205,3 +206,3 @@ }); | ||
if (!finalTypeDef.type) { | ||
signal(error`"${defName}" has no final type`, ['definitions', defName]); | ||
_error(null, ['definitions', defName], { name: defName }, `${ defName } has no final type`); | ||
return; | ||
@@ -214,3 +215,3 @@ } | ||
} catch (ex) { | ||
signal(error`"${defName}" final base type not found`, ['definitions', defName]); | ||
_error(null, ['definitions', defName], { name: defName }, `Final base type of ${ defName } not found`); | ||
return | ||
@@ -311,3 +312,4 @@ } | ||
if (!isArtifactInSomeService(defName, services)) { | ||
signal(warning`Ignoring annotation "@odata.draft.enabled" - artifact "${defName}" is not part of a service`, ['definitions', defName]); | ||
_warning(null, ['definitions', defName], { art: defName }, | ||
`Ignoring annotation "@odata.draft.enabled" because artifact "${ defName }" is not part of a service`); | ||
} | ||
@@ -337,3 +339,4 @@ else { | ||
if (def.kind === 'type' && options.toOdata.version === 'v2') { | ||
signal(warning`"${defName}.${memberName}": Structured types must not contain associations for OData V2`, path); | ||
_warning(null, path, | ||
`"${defName}.${memberName}": Structured types must not contain associations for OData V2`); | ||
} | ||
@@ -344,3 +347,4 @@ } | ||
if (options.toOdata.version === 'v2') { | ||
signal(error`"${defName}.${memberName}": Element must not be an "array of" for OData V2`, path); | ||
_error(null, path, | ||
`"${defName}.${memberName}": Element must not be an "array of" for OData V2`); | ||
} | ||
@@ -366,6 +370,4 @@ else if (['entity', 'view'].includes(def.kind)) { | ||
if (member['@Analytics.Measure'] && !member['@Aggregation.default']) { | ||
signal( | ||
info`'@Analytics.Measure' expects '@Aggregation.default' to be assigned as well in element '${defName}.${memberName}'`, | ||
// ['definitions', defName, 'elements', memberName] | ||
path | ||
_info(null, path, // ['definitions', defName, 'elements', memberName] | ||
`'@Analytics.Measure' expects '@Aggregation.default' to be assigned as well in element '${defName}.${memberName}'`, | ||
); | ||
@@ -415,3 +417,4 @@ } | ||
if (illV2Prefix.test(elemName)) { | ||
signal(error`"${defName}.${elemName}: Element name must not begin with '${elemName[0]}' for OData V2`, ['definitions', defName, 'elements', elemName]); | ||
_error(null, ['definitions', defName, 'elements', elemName], | ||
`"${defName}.${elemName}: Element name must not begin with '${elemName[0]}' for OData V2`); | ||
} | ||
@@ -430,3 +433,3 @@ } | ||
if (mediaTypes.length > 1) { | ||
signal(error`"${defName}: Multiple elements [${mediaTypes.map(e => e[0]).join(', ')}] annotated with '@Core.MediaType', OData V2 allows only one`, ['definitions', defName]); | ||
_error(null, ['definitions', defName], `"${defName}: Multiple elements [${mediaTypes.map(e => e[0]).join(', ')}] annotated with '@Core.MediaType', OData V2 allows only one`); | ||
} | ||
@@ -439,3 +442,3 @@ } | ||
if (!allowedTypes.includes(e[1].type)) { | ||
signal(error`"${defName}.${e[0]}": Element annotated with '@Core.MediaType' must be of either type "${allowedTypes.join(', ')}"`, ['definitions', defName, 'elements', e[0]]); | ||
_error(null, ['definitions', defName, 'elements', e[0]], `"${defName}.${e[0]}": Element annotated with '@Core.MediaType' must be of either type "${allowedTypes.join(', ')}"`); | ||
} | ||
@@ -521,3 +524,3 @@ }); | ||
rewriteCapabilities = false; | ||
signal(warning`"@readonly" and "@insertonly" cannot be assigned in combination`, path); | ||
_warning(null, path, '"@readonly" and "@insertonly" cannot be assigned in combination'); | ||
} | ||
@@ -561,2 +564,18 @@ for (let name in node) { | ||
} | ||
if (name === '@assert.format') setAnnotation(node, '@Validation.Pattern', node['@assert.format']); | ||
if (name === '@assert.range') { | ||
if (Array.isArray(node['@assert.range']) && node['@assert.range'].length === 2) { | ||
setAnnotation(node, '@Validation.Minimum', node['@assert.range'][0]); | ||
setAnnotation(node, '@Validation.Maximum', node['@assert.range'][1]); | ||
} else if (node.enum) { | ||
let enumValue = Object.keys(node.enum).map( enumName => { | ||
let result = { '@Core.SymbolicName': enumName }; | ||
if (node.enum[enumName].val) result.Value = node.enum[enumName].val; | ||
return result; | ||
}); | ||
setAnnotation(node, '@Validation.AllowedValues', enumValue); | ||
} | ||
} | ||
} | ||
@@ -573,3 +592,3 @@ } | ||
if (typeof node[name] !== 'boolean' && typeof node[name] !== 'string') { | ||
signal(warning`Annotation "${name}" must have a string or boolean value`, defPath); | ||
_warning(null, defPath, { name }, `Annotation "${ name }" must have a string or boolean value`); | ||
} | ||
@@ -648,3 +667,3 @@ } | ||
if (keys.length !== 1) { | ||
signal(warning`"${artifactName}": "@odata.draft.enabled" - Entity should expose exactly one key element`, ['definitions', artifactName]); | ||
_warning(null, ['definitions', artifactName], `"${artifactName}": "@odata.draft.enabled" - Entity should expose exactly one key element`); | ||
} | ||
@@ -656,3 +675,3 @@ | ||
if (uuidCount === 0) { | ||
signal(warning`"${artifactName}": "@odata.draft.enabled" - Entity key element should be of type "cds.UUID"`, ['definitions', artifactName]); | ||
_warning(null, ['definitions', artifactName], `"${artifactName}": "@odata.draft.enabled" - Entity key element should be of type "cds.UUID"`); | ||
} | ||
@@ -668,3 +687,4 @@ | ||
if (draftAdminDataProjection.kind !== 'entity' || !draftAdminDataProjection.elements['DraftUUID']) { | ||
signal(error`Generated entity "${draftAdminDataProjectionName}" conflicts with existing artifact`, ['definitions', draftAdminDataProjectionName]); | ||
_error(null, ['definitions', draftAdminDataProjectionName], { name: draftAdminDataProjectionName }, | ||
`Generated entity "${ draftAdminDataProjectionName }" conflicts with existing artifact`); | ||
} | ||
@@ -749,7 +769,7 @@ // Generate the annotations describing the draft actions (only draft roots can be activated/edited) | ||
if (hasBoolAnnotation(draftNode, '@odata.draft.enabled', true)) { | ||
signal(error`"${artifactName}.${elemName}": Composition in draft-enabled entity cannot lead to another entity with "@odata.draft.enabled"`, ['definitions', artifactName, 'elements', elemName]); | ||
_error(null, ['definitions', artifactName, 'elements', elemName], `"${artifactName}.${elemName}": Composition in draft-enabled entity cannot lead to another entity with "@odata.draft.enabled"`); | ||
} | ||
// Ignore composition if not part of a service | ||
else if (!getServiceName(elem.target)) { | ||
signal(warning`Target "${elem.target}" of composition "${artifactName}.${elemName}" cannot be a draft node because it is not part of a service`, ['definitions', artifactName, 'elements', elemName]); | ||
_warning(null, ['definitions', artifactName, 'elements', elemName], `Target "${elem.target}" of composition "${artifactName}.${elemName}" cannot be a draft node because it is not part of a service`); | ||
continue; | ||
@@ -789,3 +809,3 @@ } | ||
// This must be done before foreign keys are calculated and the annotations are propagated | ||
// to them. This will make sure that association and all its foreing keys are annotated with | ||
// to them. This will make sure that association and all its foreign keys are annotated with | ||
// Common.ValueList in the final EDM. | ||
@@ -792,0 +812,0 @@ // Do this only if the association is navigable and the enclosing artifact is |
@@ -24,6 +24,5 @@ 'use strict'; | ||
forEach(foreignKeyElements, (_name, foreignKeyElement) => { | ||
transformers.toFinalBaseType(foreignKeyElements); | ||
copyAnnotations(element, foreignKeyElement, true) | ||
copyAnnotations(element, foreignKeyElement, true); | ||
}) | ||
arrayOfGeneratedForeignKeyNames.push(...Object.keys(foreignKeyElements)) | ||
arrayOfGeneratedForeignKeyNames.push(...Object.keys(foreignKeyElements)); | ||
} | ||
@@ -30,0 +29,0 @@ return arrayOfGeneratedForeignKeyNames; |
@@ -8,3 +8,3 @@ 'use strict'; | ||
const { setProp } = require('../../base/model'); | ||
const { setProp, isBetaEnabled } = require('../../base/model'); | ||
const { defNameWithoutServiceName, getServiceOfArtifact, isArtifactInService, isArtifactInSomeService } = require('./utils'); | ||
@@ -139,2 +139,7 @@ const { cloneCsn, isBuiltinType, forEachDefinition, forEachMember, forEachMemberRecursively } = require('../../model/csnUtils'); | ||
let newTypeId = node.type ? `${node.type.replace(/\./g, '_')}` : artificialName; | ||
let newTypeFullName = | ||
(structuredOData && (isBetaEnabled(options, 'odataProxies') && (options.toOdata.odataProxies || options.toOdata.odataXServiceRefs))) | ||
? getNewTypeName(node.type || artificialNameWitoutService(artificialName, service), !node.type) | ||
: `${service}.${newTypeId}`; | ||
// With the redirection of sub elements, the element which is of named type with an association is now expanded and contains the association | ||
@@ -144,3 +149,2 @@ // and the new target. Consequently, we now have both type and elements properties in this case, and the elements should be taken as a priority | ||
let newTypeElements = (node.type && node.elements) ? node.elements : typeDef.elements; | ||
let newTypeFullName = `${service}.${newTypeId}`; | ||
@@ -179,2 +183,32 @@ let newType = exposeStructType(newTypeFullName, newTypeElements, memberName, path); | ||
function artificialNameWitoutService(name, service) { | ||
return name.replace(`${service}_`, ''); | ||
} | ||
function getNewTypeName(typeName, isAnonym = false) { | ||
if (isArtifactInSomeService(typeName, services) && !isAnonym) { | ||
// what is the name of the cross service references by the type | ||
let crossServiceName = getServiceOfArtifact(typeName, services); | ||
let typeWithoutServiceName = defNameWithoutServiceName(typeName, crossServiceName); | ||
if (typeWithoutServiceName.startsWith('external.')) { | ||
if (!csn.definitions[`${service}.external`]) | ||
csn.definitions[`${service}.external`] = { kind: 'context' }; | ||
return `${service}.${typeWithoutServiceName}`; | ||
} | ||
let crossServTypeDefName = `${service}.${crossServiceName}`; | ||
// is there such subContext already, if not -> create one | ||
if (!csn.definitions[crossServTypeDefName]) | ||
csn.definitions[crossServTypeDefName] = { kind: 'context' }; | ||
// return the new type name | ||
return `${crossServTypeDefName}.${typeWithoutServiceName.replace(/\./g, '_')}`; | ||
} else { | ||
let typeNamespace = csnUtils.getNamespaceOfArtifact(typeName); | ||
let contextName = typeNamespace ?`${service}.${typeNamespace}` : `${service}.root`; | ||
if (!csn.definitions[`${contextName}`]) | ||
csn.definitions[`${contextName}`] = { kind: 'context' }; | ||
// return the new type name | ||
return `${contextName}.${nameWithoutNamespace(typeName).replace(/\./g, '_')}`; | ||
} | ||
} | ||
/** | ||
@@ -243,3 +277,3 @@ * Expose a new type definition in the 'definitions' of the CSN and return that type(reusing such a type | ||
if (!isArtifactInService(node.type, service)) { | ||
let typeId = `${service}.${node.type}`; | ||
let typeId = `${service}.${node.type.replace(/\./g, '_')}`; | ||
let newType = exposeArrayedType(node.items || csnUtils.getFinalTypeDef(node.type).items, typeId); | ||
@@ -293,2 +327,7 @@ // When we have in the model something like: | ||
} | ||
function nameWithoutNamespace(name) { | ||
let namespace = csnUtils.getNamespaceOfArtifact(name); | ||
return name.replace(`${namespace}.`, ''); | ||
} | ||
} |
@@ -0,1 +1,3 @@ | ||
'use strict'; | ||
const { setProp, forEachDefinition, forEachGeneric, forEachMemberRecursively } = require('../base/model'); | ||
@@ -2,0 +4,0 @@ const { CompilationError, hasErrors, sortMessages } = require('../base/messages'); |
@@ -5,3 +5,3 @@ 'use strict'; | ||
// different backends. | ||
// The sibling of model/tranform/TransformUtil.js whixh works with compacted new CSN. | ||
// The sibling of model/transform/TransformUtil.js which works with compacted new CSN. | ||
@@ -11,7 +11,7 @@ const alerts = require('../base/alerts'); | ||
const { setProp } = require('../base/model'); | ||
const csnRefs = require('../model/csnRefs'); | ||
const { csnRefs } = require('../model/csnRefs'); | ||
// eslint-disable-next-line no-unused-vars | ||
const { copyAnnotations, printableName, hasBoolAnnotation, forEachDefinition } = require('../model/modelUtils'); | ||
const { cloneCsn, forEachMemberRecursively, forEachGeneric, forAllQueries, | ||
const { cloneCsn, forEachMemberRecursively, forEachGeneric, forAllQueries, | ||
forEachRef, getUtils, isBuiltinType } = require('../model/csnUtils'); | ||
@@ -1232,3 +1232,3 @@ | ||
if(art) { | ||
// items in ON conds are illegal but this should be checked elsewere | ||
// items in ON conds are illegal but this should be checked elsewhere | ||
const elements = art.elements || (art.items && art.items.elements); | ||
@@ -1244,3 +1244,3 @@ return (elements || art.target && art.keys) | ||
const type = art.type || (art.items && art.items.type); | ||
// items in ON conds are illegal but this should be checked elsewere | ||
// items in ON conds are illegal but this should be checked elsewhere | ||
const elements = art.elements || (art.items && art.items.elements); | ||
@@ -1247,0 +1247,0 @@ // @Core.Computed has no type |
@@ -0,1 +1,3 @@ | ||
'use strict'; | ||
/** | ||
@@ -2,0 +4,0 @@ * dictionary utils |
{ | ||
"root": true, | ||
"extends": "../../.eslintrc-ydkjsi.json" | ||
"extends": "../../.eslintrc-ydkjsi.json", | ||
"rules": { | ||
"prefer-const": "warn" | ||
} | ||
} |
@@ -8,7 +8,7 @@ // | ||
// | ||
// node myapp.js # stdout.isTTY: true, stderr.isTTY: true | ||
// node myapp.js | cat # stdout.isTTY: undefined, stderr.isTTY: true | ||
// node myapp.js |& cat # stdout.isTTY: undefined, stderr.isTTY: undefined | ||
// node myapp.js > out.txt # stdout.isTTY: undefined, stderr.isTTY: true | ||
// node myapp.js 2> out.txt # stdout.isTTY: true, stderr.isTTY: undefined | ||
// node myApp.js # stdout.isTTY: true, stderr.isTTY: true | ||
// node myApp.js | cat # stdout.isTTY: undefined, stderr.isTTY: true | ||
// node myApp.js |& cat # stdout.isTTY: undefined, stderr.isTTY: undefined | ||
// node myApp.js > out.txt # stdout.isTTY: undefined, stderr.isTTY: true | ||
// node myApp.js 2> out.txt # stdout.isTTY: true, stderr.isTTY: undefined | ||
// | ||
@@ -15,0 +15,0 @@ |
{ | ||
"name": "@sap/cds-compiler", | ||
"version": "1.43.0", | ||
"version": "1.45.0", | ||
"description": "CDS (Core Data Services) compiler and backends", | ||
@@ -5,0 +5,0 @@ "homepage": "https://cap.cloud.sap/", |
Sorry, the diff of this file is too big to display
Sorry, the diff of this file is too big to display
Sorry, the diff of this file is too big to display
Sorry, the diff of this file is too big to display
Sorry, the diff of this file is too big to display
Sorry, the diff of this file is too big to display
Sorry, the diff of this file is too big to display
3790997
127
72619
10