Huge News!Announcing our $40M Series B led by Abstract Ventures.Learn More
Socket
Sign inDemoInstall
Socket

jsonld

Package Overview
Dependencies
Maintainers
4
Versions
214
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

jsonld - npm Package Compare versions

Comparing version 1.3.0 to 1.4.0

14

CHANGELOG.md
# jsonld ChangeLog
## 1.4.0 - 2019-01-05
### Changed
- PhantomJS is deprecated, now using headless Chrome with Karma.
- **NOTE**: Using headless Chrome vs PhantomJS may cause newer JS features to
slip into releases without proper support for older runtimes and browsers.
Please report such issues and they will be addressed.
- Update webpack and babel.
- Use CommonJS style in main file.
- **NOTE**: This change *might* cause problems if code somehow was still
using the long deprecated `jsonldjs` global. Any dependants on this feature
should update to use bundler tools such as webpack or use `jsonld` in the
distributed bundle.
## 1.3.0 - 2019-01-04

@@ -4,0 +18,0 @@

22

dist/node6/lib/ActiveContextCache.js

@@ -6,7 +6,6 @@ /*

var _require = require('./util');
const {
clone
} = require('./util');
const clone = _require.clone;
module.exports = class ActiveContextCache {

@@ -18,5 +17,3 @@ /**

*/
constructor() {
let size = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : 100;
constructor(size = 100) {
this.order = [];

@@ -29,2 +26,3 @@ this.cache = new Map();

const level1 = this.cache.get(activeCtx);
if (level1) {

@@ -35,2 +33,3 @@ const key = JSON.stringify(localCtx);

}
return null;

@@ -44,5 +43,10 @@ }

}
const key = JSON.stringify(localCtx);
this.order.push({ activeCtx, localCtx: key });
this.order.push({
activeCtx,
localCtx: key
});
let level1 = this.cache.get(activeCtx);
if (!level1) {

@@ -52,4 +56,6 @@ level1 = new Map();

}
level1.set(key, clone(result));
}
};

@@ -6,40 +6,36 @@ /*

function _toArray(arr) { return Array.isArray(arr) ? arr : Array.from(arr); }
const JsonLdError = require('./JsonLdError');
var _require = require('./types');
const {
isArray: _isArray,
isObject: _isObject,
isString: _isString
} = require('./types');
const _isArray = _require.isArray,
_isObject = _require.isObject,
_isString = _require.isString;
const {
isList: _isList,
isValue: _isValue,
isGraph: _isGraph,
isSimpleGraph: _isSimpleGraph,
isSubjectReference: _isSubjectReference
} = require('./graphTypes');
var _require2 = require('./graphTypes');
const {
expandIri: _expandIri,
getContextValue: _getContextValue,
isKeyword: _isKeyword,
process: _processContext
} = require('./context');
const _isList = _require2.isList,
_isValue = _require2.isValue,
_isGraph = _require2.isGraph,
_isSimpleGraph = _require2.isSimpleGraph,
_isSubjectReference = _require2.isSubjectReference;
const {
removeBase: _removeBase
} = require('./url');
var _require3 = require('./context');
const {
addValue: _addValue,
compareShortestLeast: _compareShortestLeast
} = require('./util');
const _expandIri = _require3.expandIri,
_getContextValue = _require3.getContextValue,
_isKeyword = _require3.isKeyword,
_processContext = _require3.process;
var _require4 = require('./url');
const _removeBase = _require4.removeBase;
var _require5 = require('./util');
const _addValue = _require5.addValue,
_compareShortestLeast = _require5.compareShortestLeast;
const api = {};
module.exports = api;
/**

@@ -58,15 +54,14 @@ * Recursively compacts an element using the given active context. All values

*/
api.compact = (_ref) => {
let activeCtx = _ref.activeCtx;
var _ref$activeProperty = _ref.activeProperty;
let activeProperty = _ref$activeProperty === undefined ? null : _ref$activeProperty,
element = _ref.element;
var _ref$options = _ref.options;
let options = _ref$options === undefined ? {} : _ref$options;
var _ref$compactionMap = _ref.compactionMap;
let compactionMap = _ref$compactionMap === undefined ? () => undefined : _ref$compactionMap;
api.compact = ({
activeCtx,
activeProperty = null,
element,
options = {},
compactionMap = () => undefined
}) => {
// recursively compact array
if (_isArray(element)) {
let rval = [];
for (let i = 0; i < element.length; ++i) {

@@ -81,2 +76,3 @@ // compact, dropping any null values unless custom mapped

});
if (compacted === null) {

@@ -92,2 +88,3 @@ // TODO: use `await` to support async

});
if (compacted === undefined) {

@@ -97,7 +94,10 @@ continue;

}
rval.push(compacted);
}
if (options.compactArrays && rval.length === 1) {
// use single element if no container is specified
const container = _getContextValue(activeCtx, activeProperty, '@container') || [];
if (container.length === 0) {

@@ -107,12 +107,18 @@ rval = rval[0];

}
return rval;
}
} // use any scoped context on activeProperty
// use any scoped context on activeProperty
const ctx = _getContextValue(activeCtx, activeProperty, '@context');
if (ctx) {
activeCtx = _processContext({ activeCtx, localCtx: ctx, options });
}
activeCtx = _processContext({
activeCtx,
localCtx: ctx,
options
});
} // recursively compact object
// recursively compact object
if (_isObject(element)) {

@@ -122,2 +128,3 @@ if (options.link && '@id' in element && element['@id'] in options.link) {

const linked = options.link[element['@id']];
for (let i = 0; i < linked.length; ++i) {

@@ -128,7 +135,12 @@ if (linked[i].expanded === element) {

}
}
} // do value compaction on @values and subject references
// do value compaction on @values and subject references
if (_isValue(element) || _isSubjectReference(element)) {
const rval = api.compactValue({ activeCtx, activeProperty, value: element });
const rval = api.compactValue({
activeCtx,
activeProperty,
value: element
});
if (options.link && _isSubjectReference(element)) {

@@ -139,10 +151,14 @@ // store linked element

}
options.link[element['@id']].push({ expanded: element, compacted: rval });
options.link[element['@id']].push({
expanded: element,
compacted: rval
});
}
return rval;
}
} // FIXME: avoid misuse of active property as an expanded property?
// FIXME: avoid misuse of active property as an expanded property?
const insideReverse = activeProperty === '@reverse';
const rval = {};

@@ -155,28 +171,44 @@

}
options.link[element['@id']].push({ expanded: element, compacted: rval });
}
// apply any context defined on an alias of @type
options.link[element['@id']].push({
expanded: element,
compacted: rval
});
} // apply any context defined on an alias of @type
// if key is @type and any compacted value is a term having a local
// context, overlay that context
let types = element['@type'] || [];
if (types.length > 1) {
types = Array.from(types).sort();
}
for (const type of types) {
const compactedType = api.compactIri({ activeCtx, iri: type, relativeTo: { vocab: true } });
const compactedType = api.compactIri({
activeCtx,
iri: type,
relativeTo: {
vocab: true
}
}); // Use any scoped context defined on this value
// Use any scoped context defined on this value
const ctx = _getContextValue(activeCtx, compactedType, '@context');
if (ctx) {
activeCtx = _processContext({ activeCtx, localCtx: ctx, options });
activeCtx = _processContext({
activeCtx,
localCtx: ctx,
options
});
}
}
} // process element keys in order
// process element keys in order
const keys = Object.keys(element).sort();
for (const expandedProperty of keys) {
const expandedValue = element[expandedProperty];
const expandedValue = element[expandedProperty]; // compact @id and @type(s)
// compact @id and @type(s)
if (expandedProperty === '@id' || expandedProperty === '@type') {

@@ -190,14 +222,25 @@ let compactedValue = [].concat(expandedValue).map(expandedIri => api.compactIri({

}));
if (compactedValue.length === 1) {
compactedValue = compactedValue[0];
}
} // use keyword alias and add value
// use keyword alias and add value
const alias = api.compactIri({ activeCtx, iri: expandedProperty, relativeTo: { vocab: true } });
const alias = api.compactIri({
activeCtx,
iri: expandedProperty,
relativeTo: {
vocab: true
}
});
const isArray = _isArray(compactedValue) && expandedValue.length === 0;
_addValue(rval, alias, compactedValue, { propertyIsArray: isArray });
_addValue(rval, alias, compactedValue, {
propertyIsArray: isArray
});
continue;
}
} // handle @reverse
// handle @reverse
if (expandedProperty === '@reverse') {

@@ -211,5 +254,4 @@ // recursively compact expanded value

compactionMap
});
}); // handle double-reversed properties
// handle double-reversed properties
for (const compactedProperty in compactedValue) {

@@ -220,3 +262,7 @@ if (activeCtx.mappings[compactedProperty] && activeCtx.mappings[compactedProperty].reverse) {

const useArray = container.includes('@set') || !options.compactArrays;
_addValue(rval, compactedProperty, value, { propertyIsArray: useArray });
_addValue(rval, compactedProperty, value, {
propertyIsArray: useArray
});
delete compactedValue[compactedProperty];

@@ -231,4 +277,7 @@ }

iri: expandedProperty,
relativeTo: { vocab: true }
relativeTo: {
vocab: true
}
});
_addValue(rval, alias, compactedValue);

@@ -247,3 +296,4 @@ }

options,
compactionMap });
compactionMap
});

@@ -253,24 +303,30 @@ if (!(_isArray(compactedValue) && compactedValue.length === 0)) {

}
continue;
}
} // handle @index property
// handle @index property
if (expandedProperty === '@index') {
// drop @index if inside an @index container
const container = _getContextValue(activeCtx, activeProperty, '@container') || [];
if (container.includes('@index')) {
continue;
}
} // use keyword alias and add value
// use keyword alias and add value
const alias = api.compactIri({
activeCtx,
iri: expandedProperty,
relativeTo: { vocab: true }
relativeTo: {
vocab: true
}
});
_addValue(rval, alias, expandedValue);
continue;
}
} // skip array processing for keywords that aren't @graph or @list
// skip array processing for keywords that aren't @graph or @list
if (expandedProperty !== '@graph' && expandedProperty !== '@list' && _isKeyword(expandedProperty)) {

@@ -281,14 +337,18 @@ // use keyword alias and add value as is

iri: expandedProperty,
relativeTo: { vocab: true }
relativeTo: {
vocab: true
}
});
_addValue(rval, alias, expandedValue);
continue;
}
} // Note: expanded value must be an array due to expansion algorithm.
// Note: expanded value must be an array due to expansion algorithm.
if (!_isArray(expandedValue)) {
throw new JsonLdError('JSON-LD expansion error; expanded value must be an array.', 'jsonld.SyntaxError');
}
} // preserve empty arrays
// preserve empty arrays
if (expandedValue.length === 0) {

@@ -299,3 +359,5 @@ const itemActiveProperty = api.compactIri({

value: expandedValue,
relativeTo: { vocab: true },
relativeTo: {
vocab: true
},
reverse: insideReverse

@@ -305,15 +367,19 @@ });

let nestResult = rval;
if (nestProperty) {
_checkNestProperty(activeCtx, nestProperty);
if (!_isObject(rval[nestProperty])) {
rval[nestProperty] = {};
}
nestResult = rval[nestProperty];
}
_addValue(nestResult, itemActiveProperty, expandedValue, {
propertyIsArray: true
});
}
} // recusively process array values
// recusively process array values
for (const expandedItem of expandedValue) {

@@ -325,24 +391,30 @@ // compact property and get container type

value: expandedItem,
relativeTo: { vocab: true },
relativeTo: {
vocab: true
},
reverse: insideReverse
});
}); // if itemActiveProperty is a @nest property, add values to nestResult,
// otherwise rval
// if itemActiveProperty is a @nest property, add values to nestResult,
// otherwise rval
const nestProperty = itemActiveProperty in activeCtx.mappings ? activeCtx.mappings[itemActiveProperty]['@nest'] : null;
let nestResult = rval;
if (nestProperty) {
_checkNestProperty(activeCtx, nestProperty);
if (!_isObject(rval[nestProperty])) {
rval[nestProperty] = {};
}
nestResult = rval[nestProperty];
}
const container = _getContextValue(activeCtx, itemActiveProperty, '@container') || [];
const container = _getContextValue(activeCtx, itemActiveProperty, '@container') || []; // get simple @graph or @list value if appropriate
// get simple @graph or @list value if appropriate
const isGraph = _isGraph(expandedItem);
const isList = _isList(expandedItem);
let inner;
if (isList) {

@@ -352,5 +424,5 @@ inner = expandedItem['@list'];

inner = expandedItem['@graph'];
}
} // recursively compact expanded item
// recursively compact expanded item
let compactedItem = api.compact({

@@ -362,5 +434,4 @@ activeCtx,

compactionMap
});
}); // handle @list
// handle @list
if (isList) {

@@ -378,7 +449,8 @@ // ensure @list value is an array

iri: '@list',
relativeTo: { vocab: true }
relativeTo: {
vocab: true
}
})]: compactedItem
};
}; // include @index from expanded @list, if any
// include @index from expanded @list, if any
if ('@index' in expandedItem) {

@@ -388,3 +460,5 @@ compactedItem[api.compactIri({

iri: '@index',
relativeTo: { vocab: true }
relativeTo: {
vocab: true
}
})] = expandedItem['@index'];

@@ -394,7 +468,9 @@ }

// can't use @list container for more than 1 list
throw new JsonLdError('JSON-LD compact error; property has a "@list" @container ' + 'rule but there is more than a single @list that matches ' + 'the compacted term in the document. Compaction might mix ' + 'unwanted items into the list.', 'jsonld.SyntaxError', { code: 'compaction to list of lists' });
throw new JsonLdError('JSON-LD compact error; property has a "@list" @container ' + 'rule but there is more than a single @list that matches ' + 'the compacted term in the document. Compaction might mix ' + 'unwanted items into the list.', 'jsonld.SyntaxError', {
code: 'compaction to list of lists'
});
}
}
} // Graph object compaction cases
// Graph object compaction cases
if (isGraph) {

@@ -404,2 +480,3 @@ if (container.includes('@graph') && (container.includes('@id') || container.includes('@index') && _isSimpleGraph(expandedItem))) {

let mapObject;
if (itemActiveProperty in nestResult) {

@@ -409,7 +486,10 @@ mapObject = nestResult[itemActiveProperty];

nestResult[itemActiveProperty] = mapObject = {};
}
} // index on @id or @index or alias of @none
// index on @id or @index or alias of @none
const key = (container.includes('@id') ? expandedItem['@id'] : expandedItem['@index']) || api.compactIri({ activeCtx, iri: '@none', vocab: true });
// add compactedItem to map, using value of `@id` or a new blank
const key = (container.includes('@id') ? expandedItem['@id'] : expandedItem['@index']) || api.compactIri({
activeCtx,
iri: '@none',
vocab: true
}); // add compactedItem to map, using value of `@id` or a new blank
// node identifier

@@ -432,2 +512,3 @@

}
compactedItem = {

@@ -437,7 +518,8 @@ [api.compactIri({

iri: '@graph',
relativeTo: { vocab: true }
relativeTo: {
vocab: true
}
})]: compactedItem
};
}; // include @id from expanded graph, if any
// include @id from expanded graph, if any
if ('@id' in expandedItem) {

@@ -447,7 +529,9 @@ compactedItem[api.compactIri({

iri: '@id',
relativeTo: { vocab: true }
relativeTo: {
vocab: true
}
})] = expandedItem['@id'];
}
} // include @index from expanded graph, if any
// include @index from expanded graph, if any
if ('@index' in expandedItem) {

@@ -457,5 +541,8 @@ compactedItem[api.compactIri({

iri: '@index',
relativeTo: { vocab: true }
relativeTo: {
vocab: true
}
})] = expandedItem['@index'];
}
_addValue(nestResult, itemActiveProperty, compactedItem, {

@@ -469,2 +556,3 @@ propertyIsArray: !options.compactArrays || container.includes('@set')

let mapObject;
if (itemActiveProperty in nestResult) {

@@ -477,2 +565,3 @@ mapObject = nestResult[itemActiveProperty];

let key;
if (container.includes('@language')) {

@@ -484,2 +573,3 @@ // if container is a language map, simplify compacted value to

}
key = expandedItem['@language'];

@@ -489,3 +579,7 @@ } else if (container.includes('@index')) {

} else if (container.includes('@id')) {
const idKey = api.compactIri({ activeCtx, iri: '@id', vocab: true });
const idKey = api.compactIri({
activeCtx,
iri: '@id',
vocab: true
});
key = compactedItem[idKey];

@@ -500,10 +594,4 @@ delete compactedItem[idKey];

let types;
[key, ...types] = [].concat(compactedItem[typeKey] || []);
var _concat = [].concat(compactedItem[typeKey] || []);
var _concat2 = _toArray(_concat);
key = _concat2[0];
types = _concat2.slice(1);
switch (types.length) {

@@ -513,5 +601,7 @@ case 0:

break;
case 1:
compactedItem[typeKey] = types[0];
break;
default:

@@ -521,10 +611,15 @@ compactedItem[typeKey] = types;

}
}
} // if compacting this value which has no key, index on @none
// if compacting this value which has no key, index on @none
if (!key) {
key = api.compactIri({ activeCtx, iri: '@none', vocab: true });
}
// add compact value to map object using key from expanded value
key = api.compactIri({
activeCtx,
iri: '@none',
vocab: true
});
} // add compact value to map object using key from expanded value
// based on the container type
_addValue(mapObject, key, compactedItem, {

@@ -537,6 +632,7 @@ propertyIsArray: container.includes('@set')

// array, or key is @graph
const isArray = !options.compactArrays || container.includes('@set') || container.includes('@list') || _isArray(compactedItem) && compactedItem.length === 0 || expandedProperty === '@list' || expandedProperty === '@graph';
const isArray = !options.compactArrays || container.includes('@set') || container.includes('@list') || _isArray(compactedItem) && compactedItem.length === 0 || expandedProperty === '@list' || expandedProperty === '@graph'; // add compact value
// add compact value
_addValue(nestResult, itemActiveProperty, compactedItem, { propertyIsArray: isArray });
_addValue(nestResult, itemActiveProperty, compactedItem, {
propertyIsArray: isArray
});
}

@@ -547,8 +643,7 @@ }

return rval;
}
} // only primitives remain which are already compact
// only primitives remain which are already compact
return element;
};
/**

@@ -567,12 +662,13 @@ * Compacts an IRI or keyword into a term or prefix if it can be. If the

*/
api.compactIri = (_ref2) => {
let activeCtx = _ref2.activeCtx,
iri = _ref2.iri;
var _ref2$value = _ref2.value;
let value = _ref2$value === undefined ? null : _ref2$value;
var _ref2$relativeTo = _ref2.relativeTo;
let relativeTo = _ref2$relativeTo === undefined ? { vocab: false } : _ref2$relativeTo;
var _ref2$reverse = _ref2.reverse;
let reverse = _ref2$reverse === undefined ? false : _ref2$reverse;
api.compactIri = ({
activeCtx,
iri,
value = null,
relativeTo = {
vocab: false
},
reverse = false
}) => {
// can't compact null

@@ -583,26 +679,25 @@ if (iri === null) {

const inverseCtx = activeCtx.getInverse();
const inverseCtx = activeCtx.getInverse(); // if term is a keyword, it may be compacted to a simple alias
// if term is a keyword, it may be compacted to a simple alias
if (_isKeyword(iri) && iri in inverseCtx && '@none' in inverseCtx[iri] && '@type' in inverseCtx[iri]['@none'] && '@none' in inverseCtx[iri]['@none']['@type']) {
return inverseCtx[iri]['@none']['@type']['@none'];
}
} // use inverse context to pick a term if iri is relative to vocab
// use inverse context to pick a term if iri is relative to vocab
if (relativeTo.vocab && iri in inverseCtx) {
const defaultLanguage = activeCtx['@language'] || '@none';
const defaultLanguage = activeCtx['@language'] || '@none'; // prefer @index if available in value
// prefer @index if available in value
const containers = [];
if (_isObject(value) && '@index' in value && !('@graph' in value)) {
containers.push('@index', '@index@set');
}
} // if value is a preserve object, use its value
// if value is a preserve object, use its value
if (_isObject(value) && '@preserve' in value) {
value = value['@preserve'][0];
}
} // prefer most specific container including @graph, prefering @set
// variations
// prefer most specific container including @graph, prefering @set
// variations
if (_isGraph(value)) {

@@ -612,13 +707,16 @@ // favor indexmap if the graph is indexed

containers.push('@graph@index', '@graph@index@set', '@index', '@index@set');
}
// favor idmap if the graph is has an @id
} // favor idmap if the graph is has an @id
if ('@id' in value) {
containers.push('@graph@id', '@graph@id@set');
}
containers.push('@graph', '@graph@set', '@set');
// allow indexmap if the graph is not indexed
containers.push('@graph', '@graph@set', '@set'); // allow indexmap if the graph is not indexed
if (!('@index' in value)) {
containers.push('@graph@index', '@graph@index@set', '@index', '@index@set');
}
// allow idmap if the graph does not have an @id
} // allow idmap if the graph does not have an @id
if (!('@id' in value)) {

@@ -629,5 +727,5 @@ containers.push('@graph@id', '@graph@id@set');

containers.push('@id', '@id@set', '@type', '@set@type');
}
} // defaults for term selection based on type/language
// defaults for term selection based on type/language
let typeOrLanguage = '@language';

@@ -646,3 +744,5 @@ let typeOrLanguageValue = '@null';

}
const list = value['@list'];
if (list.length === 0) {

@@ -656,2 +756,3 @@ // any empty list can be matched against any term that uses the

let commonType = null;
for (let i = 0; i < list.length; ++i) {

@@ -661,2 +762,3 @@ const item = list[i];

let itemType = '@none';
if (_isValue(item)) {

@@ -674,2 +776,3 @@ if ('@language' in item) {

}
if (commonLanguage === null) {

@@ -680,2 +783,3 @@ commonLanguage = itemLanguage;

}
if (commonType === null) {

@@ -685,5 +789,6 @@ commonType = itemType;

commonType = '@none';
}
// there are different languages and types in the list, so choose
} // there are different languages and types in the list, so choose
// the most generic term, no need to keep iterating the list
if (commonLanguage === '@none' && commonType === '@none') {

@@ -693,4 +798,6 @@ break;

}
commonLanguage = commonLanguage || '@none';
commonType = commonType || '@none';
if (commonType !== '@none') {

@@ -716,16 +823,16 @@ typeOrLanguage = '@type';

}
containers.push('@set');
}
} // do term selection
// do term selection
containers.push('@none');
// an index map can be used to index values using @none, so add as a low
containers.push('@none'); // an index map can be used to index values using @none, so add as a low
// priority
if (_isObject(value) && !('@index' in value)) {
// allow indexing even if no @index present
containers.push('@index', '@index@set');
}
} // values without type or language can use @language map
// values without type or language can use @language map
if (_isValue(value) && Object.keys(value).length === 1) {

@@ -737,8 +844,9 @@ // allow indexing even if no @index present

const term = _selectTerm(activeCtx, iri, value, containers, typeOrLanguage, typeOrLanguageValue);
if (term !== null) {
return term;
}
}
} // no term match, use @vocab if available
// no term match, use @vocab if available
if (relativeTo.vocab) {

@@ -748,5 +856,7 @@ if ('@vocab' in activeCtx) {

const vocab = activeCtx['@vocab'];
if (iri.indexOf(vocab) === 0 && iri !== vocab) {
// use suffix as relative iri if it is not a term in the active context
const suffix = iri.substr(vocab.length);
if (!(suffix in activeCtx.mappings)) {

@@ -757,22 +867,26 @@ return suffix;

}
}
} // no term or @vocab match, check for possible CURIEs
// no term or @vocab match, check for possible CURIEs
let choice = null;
// TODO: make FastCurieMap a class with a method to do this lookup
let choice = null; // TODO: make FastCurieMap a class with a method to do this lookup
const partialMatches = [];
let iriMap = activeCtx.fastCurieMap;
// check for partial matches of against `iri`, which means look until
let iriMap = activeCtx.fastCurieMap; // check for partial matches of against `iri`, which means look until
// iri.length - 1, not full length
const maxPartialLength = iri.length - 1;
for (let i = 0; i < maxPartialLength && iri[i] in iriMap; ++i) {
iriMap = iriMap[iri[i]];
if ('' in iriMap) {
partialMatches.push(iriMap[''][0]);
}
}
// check partial matches in reverse order to prefer longest ones first
} // check partial matches in reverse order to prefer longest ones first
for (let i = partialMatches.length - 1; i >= 0; --i) {
const entry = partialMatches[i];
const terms = entry.terms;
for (const term of terms) {

@@ -784,6 +898,5 @@ // a CURIE is usable if:

const curie = term + ':' + iri.substr(entry.iri.length);
const isUsableCurie = activeCtx.mappings[term]._prefix && (!(curie in activeCtx.mappings) || value === null && activeCtx.mappings[curie]['@id'] === iri);
const isUsableCurie = activeCtx.mappings[term]._prefix && (!(curie in activeCtx.mappings) || value === null && activeCtx.mappings[curie]['@id'] === iri); // select curie if it is shorter or the same length but lexicographically
// less than the current choice
// select curie if it is shorter or the same length but lexicographically
// less than the current choice
if (isUsableCurie && (choice === null || _compareShortestLeast(curie, choice) < 0)) {

@@ -793,18 +906,17 @@ choice = curie;

}
}
} // return chosen curie
// return chosen curie
if (choice !== null) {
return choice;
}
} // compact IRI relative to base
// compact IRI relative to base
if (!relativeTo.vocab) {
return _removeBase(activeCtx['@base'], iri);
}
} // return IRI as is
// return IRI as is
return iri;
};
/**

@@ -820,7 +932,9 @@ * Performs value compaction on an object with '@value' or '@id' as the only

*/
api.compactValue = (_ref3) => {
let activeCtx = _ref3.activeCtx,
activeProperty = _ref3.activeProperty,
value = _ref3.value;
api.compactValue = ({
activeCtx,
activeProperty,
value
}) => {
// value is a @value

@@ -830,9 +944,9 @@ if (_isValue(value)) {

const type = _getContextValue(activeCtx, activeProperty, '@type');
const language = _getContextValue(activeCtx, activeProperty, '@language');
const container = _getContextValue(activeCtx, activeProperty, '@container') || [];
// whether or not the value has an @index that must be preserved
const preserveIndex = '@index' in value && !container.includes('@index');
const container = _getContextValue(activeCtx, activeProperty, '@container') || []; // whether or not the value has an @index that must be preserved
// if there's no @index to preserve ...
const preserveIndex = '@index' in value && !container.includes('@index'); // if there's no @index to preserve ...
if (!preserveIndex) {

@@ -843,13 +957,16 @@ // matching @type or @language specified in context, compact value

}
}
// return just the value of @value if all are true:
} // return just the value of @value if all are true:
// 1. @value is the only key or @index isn't being preserved
// 2. there is no default language or @value is not a string or
// the key has a mapping with a null @language
const keyCount = Object.keys(value).length;
const isValueOnlyKey = keyCount === 1 || keyCount === 2 && '@index' in value && !preserveIndex;
const hasDefaultLanguage = '@language' in activeCtx;
const isValueString = _isString(value['@value']);
const hasNullMapping = activeCtx.mappings[activeProperty] && activeCtx.mappings[activeProperty]['@language'] === null;
if (isValueOnlyKey && (!hasDefaultLanguage || !isValueString || hasNullMapping)) {

@@ -859,5 +976,4 @@ return value['@value'];

const rval = {};
const rval = {}; // preserve @index
// preserve @index
if (preserveIndex) {

@@ -867,3 +983,5 @@ rval[api.compactIri({

iri: '@index',
relativeTo: { vocab: true }
relativeTo: {
vocab: true
}
})] = value['@index'];

@@ -877,4 +995,12 @@ }

iri: '@type',
relativeTo: { vocab: true }
})] = api.compactIri({ activeCtx, iri: value['@type'], relativeTo: { vocab: true } });
relativeTo: {
vocab: true
}
})] = api.compactIri({
activeCtx,
iri: value['@type'],
relativeTo: {
vocab: true
}
});
} else if ('@language' in value) {

@@ -885,22 +1011,34 @@ // alias @language

iri: '@language',
relativeTo: { vocab: true }
relativeTo: {
vocab: true
}
})] = value['@language'];
}
} // alias @value
// alias @value
rval[api.compactIri({
activeCtx,
iri: '@value',
relativeTo: { vocab: true }
relativeTo: {
vocab: true
}
})] = value['@value'];
return rval;
}
} // value is a subject reference
// value is a subject reference
const expandedProperty = _expandIri(activeCtx, activeProperty, { vocab: true });
const expandedProperty = _expandIri(activeCtx, activeProperty, {
vocab: true
});
const type = _getContextValue(activeCtx, activeProperty, '@type');
const compacted = api.compactIri({ activeCtx, iri: value['@id'], relativeTo: { vocab: type === '@vocab' } });
// compact to scalar
const compacted = api.compactIri({
activeCtx,
iri: value['@id'],
relativeTo: {
vocab: type === '@vocab'
}
}); // compact to scalar
if (type === '@id' || type === '@vocab' || expandedProperty === '@graph') {

@@ -914,7 +1052,8 @@ return compacted;

iri: '@id',
relativeTo: { vocab: true }
relativeTo: {
vocab: true
}
})]: compacted
};
};
/**

@@ -930,2 +1069,4 @@ * Removes the @preserve keywords as the last step of the compaction

*/
api.removePreserve = (ctx, input, options) => {

@@ -935,5 +1076,6 @@ // recurse through arrays

const output = [];
for (let i = 0; i < input.length; ++i) {
const result = api.removePreserve(ctx, input[i], options);
// drop nulls from arrays
const result = api.removePreserve(ctx, input[i], options); // drop nulls from arrays
if (result !== null) {

@@ -943,2 +1085,3 @@ output.push(result);

}
input = output;

@@ -951,31 +1094,38 @@ } else if (_isObject(input)) {

}
return input['@preserve'];
}
} // skip @values
// skip @values
if (_isValue(input)) {
return input;
}
} // recurse through @lists
// recurse through @lists
if (_isList(input)) {
input['@list'] = api.removePreserve(ctx, input['@list'], options);
return input;
}
} // handle in-memory linked nodes
// handle in-memory linked nodes
const idAlias = api.compactIri({
activeCtx: ctx,
iri: '@id',
relativeTo: { vocab: true }
relativeTo: {
vocab: true
}
});
if (idAlias in input) {
const id = input[idAlias];
if (id in options.link) {
const idx = options.link[id].indexOf(input);
if (idx !== -1) {
// already visited
return options.link[id][idx];
}
// prevent circular visitation
} // prevent circular visitation
options.link[id].push(input);

@@ -986,10 +1136,13 @@ } else {

}
}
} // recurse through properties
// recurse through properties
const graphAlias = api.compactIri({
activeCtx: ctx,
iri: '@graph',
relativeTo: { vocab: true }
relativeTo: {
vocab: true
}
});
for (const prop in input) {

@@ -1004,11 +1157,13 @@ // potentially remove the id, if it is an unreference bnode

const container = _getContextValue(ctx, prop, '@container') || [];
if (options.compactArrays && _isArray(result) && result.length === 1 && container.length === 0 && prop !== graphAlias) {
result = result[0];
}
input[prop] = result;
}
}
return input;
};
/**

@@ -1026,11 +1181,12 @@ * Picks the preferred compaction term from the given inverse context entry.

*/
function _selectTerm(activeCtx, iri, value, containers, typeOrLanguage, typeOrLanguageValue) {
if (typeOrLanguageValue === null) {
typeOrLanguageValue = '@null';
}
} // preferences for the value of @type or @language
// preferences for the value of @type or @language
const prefs = [];
// determine prefs for @id based on whether or not value compacts to a term
const prefs = []; // determine prefs for @id based on whether or not value compacts to a term
if ((typeOrLanguageValue === '@id' || typeOrLanguageValue === '@reverse') && _isSubjectReference(value)) {

@@ -1040,5 +1196,13 @@ // prefer @reverse first

prefs.push('@reverse');
}
// try to compact value to a term
const term = api.compactIri({ activeCtx, iri: value['@id'], relativeTo: { vocab: true } });
} // try to compact value to a term
const term = api.compactIri({
activeCtx,
iri: value['@id'],
relativeTo: {
vocab: true
}
});
if (term in activeCtx.mappings && activeCtx.mappings[term] && activeCtx.mappings[term]['@id'] === value['@id']) {

@@ -1054,8 +1218,10 @@ // prefer @vocab

}
prefs.push('@none');
const containerMap = activeCtx.inverse[iri];
const containerMap = activeCtx.inverse[iri];
for (let ci = 0; ci < containers.length; ++ci) {
// if container not available in the map, continue
const container = containers[ci];
if (!(container in containerMap)) {

@@ -1066,10 +1232,12 @@ continue;

const typeOrLanguageValueMap = containerMap[container][typeOrLanguage];
for (let pi = 0; pi < prefs.length; ++pi) {
// if type/language option not available in the map, continue
const pref = prefs[pi];
if (!(pref in typeOrLanguageValueMap)) {
continue;
}
} // select term
// select term
return typeOrLanguageValueMap[pref];

@@ -1081,3 +1249,2 @@ }

}
/**

@@ -1090,6 +1257,12 @@ * The value of `@nest` in the term definition must either be `@nest`, or a term

*/
function _checkNestProperty(activeCtx, nestProperty) {
if (_expandIri(activeCtx, nestProperty, { vocab: true }) !== '@nest') {
throw new JsonLdError('JSON-LD compact error; nested property must have an @nest value ' + 'resolving to @nest.', 'jsonld.SyntaxError', { code: 'invalid @nest value' });
if (_expandIri(activeCtx, nestProperty, {
vocab: true
}) !== '@nest') {
throw new JsonLdError('JSON-LD compact error; nested property must have an @nest value ' + 'resolving to @nest.', 'jsonld.SyntaxError', {
code: 'invalid @nest value'
});
}
}

@@ -8,6 +8,4 @@ /*

const XSD = 'http://www.w3.org/2001/XMLSchema#';
module.exports = {
LINK_HEADER_REL: 'http://www.w3.org/ns/json-ld#context',
RDF,

@@ -23,3 +21,2 @@ RDF_LIST: RDF + 'List',

RDF_LANGSTRING: RDF + 'langString',
XSD,

@@ -26,0 +23,0 @@ XSD_BOOLEAN: XSD + 'boolean',

@@ -6,143 +6,32 @@ /*

let _retrieveContextUrls = (() => {
var _ref3 = _asyncToGenerator(function* (input, options) {
function asyncGeneratorStep(gen, resolve, reject, _next, _throw, key, arg) { try { var info = gen[key](arg); var value = info.value; } catch (error) { reject(error); return; } if (info.done) { resolve(value); } else { Promise.resolve(value).then(_next, _throw); } }
// recursive function that will retrieve all @context URLs in documents
let retrieve = (() => {
var _ref4 = _asyncToGenerator(function* (doc, cycles, documentLoader) {
if (cycles.size > MAX_CONTEXT_URLS) {
throw new JsonLdError('Maximum number of @context URLs exceeded.', 'jsonld.ContextUrlError', { code: 'loading remote context failed', max: MAX_CONTEXT_URLS });
}
function _asyncToGenerator(fn) { return function () { var self = this, args = arguments; return new Promise(function (resolve, reject) { var gen = fn.apply(self, args); function _next(value) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "next", value); } function _throw(err) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "throw", err); } _next(undefined); }); }; }
// find all URLs in the given document
const urls = new Map();
_findContextUrls(doc, urls, false, options.base);
const util = require('./util');
// queue all unretrieved URLs
const queue = [...urls.keys()].filter(function (u) {
return urls.get(u) === false;
});
const ActiveContextCache = require('./ActiveContextCache');
// retrieve URLs in queue
return Promise.all(queue.map((() => {
var _ref5 = _asyncToGenerator(function* (url) {
// check for context URL cycle
if (cycles.has(url)) {
throw new JsonLdError('Cyclical @context URLs detected.', 'jsonld.ContextUrlError', { code: 'recursive context inclusion', url });
}
const _cycles = new Set(cycles);
_cycles.add(url);
let remoteDoc;
let ctx;
try {
remoteDoc = yield documentLoader(url);
ctx = remoteDoc.document || null;
// parse string context as JSON
if (_isString(ctx)) {
ctx = JSON.parse(ctx);
}
} catch (e) {
throw new JsonLdError('Dereferencing a URL did not result in a valid JSON-LD object. ' + 'Possible causes are an inaccessible URL perhaps due to ' + 'a same-origin policy (ensure the server uses CORS if you are ' + 'using client-side JavaScript), too many redirects, a ' + 'non-JSON response, or more than one HTTP Link Header was ' + 'provided for a remote context.', 'jsonld.InvalidUrl', { code: 'loading remote context failed', url, cause: e });
}
// ensure ctx is an object
if (!_isObject(ctx)) {
throw new JsonLdError('Dereferencing a URL did not result in a JSON object. The ' + 'response was valid JSON, but it was not a JSON object.', 'jsonld.InvalidUrl', { code: 'invalid remote context', url });
}
// use empty context if no @context key is present
if (!('@context' in ctx)) {
ctx = { '@context': {} };
} else {
ctx = { '@context': ctx['@context'] };
}
// append @context URL to context if given
if (remoteDoc.contextUrl) {
if (!_isArray(ctx['@context'])) {
ctx['@context'] = [ctx['@context']];
}
ctx['@context'].push(remoteDoc.contextUrl);
}
// recurse
yield retrieve(ctx, _cycles, documentLoader);
// store retrieved context w/replaced @context URLs
urls.set(url, ctx['@context']);
// replace all @context URLs in the document
_findContextUrls(doc, urls, true, options.base);
});
return function (_x8) {
return _ref5.apply(this, arguments);
};
})()));
});
return function retrieve(_x5, _x6, _x7) {
return _ref4.apply(this, arguments);
};
})();
const documentLoader = util.normalizeDocumentLoader(options.documentLoader);
// retrieve all @context URLs in input
yield retrieve(input, new Set(), documentLoader);
return input;
});
return function _retrieveContextUrls(_x3, _x4) {
return _ref3.apply(this, arguments);
};
})();
/**
* Finds all @context URLs in the given JSON-LD input.
*
* @param input the JSON-LD input.
* @param urls a map of URLs (url => false/@contexts).
* @param replace true to replace the URLs in the given input with the
* @contexts from the urls map, false not to.
* @param base the base IRI to use to resolve relative IRIs.
*
* @return true if new URLs to retrieve were found, false if not.
*/
function _asyncToGenerator(fn) { return function () { var gen = fn.apply(this, arguments); return new Promise(function (resolve, reject) { function step(key, arg) { try { var info = gen[key](arg); var value = info.value; } catch (error) { reject(error); return; } if (info.done) { resolve(value); } else { return Promise.resolve(value).then(function (value) { step("next", value); }, function (err) { step("throw", err); }); } } return step("next"); }); }; }
const util = require('./util');
const ActiveContextCache = require('./ActiveContextCache');
const JsonLdError = require('./JsonLdError');
var _require = require('./types');
const {
isArray: _isArray,
isObject: _isObject,
isString: _isString,
isUndefined: _isUndefined
} = require('./types');
const _isArray = _require.isArray,
_isObject = _require.isObject,
_isString = _require.isString,
_isUndefined = _require.isUndefined;
const {
isAbsolute: _isAbsoluteIri,
isRelative: _isRelativeIri,
prependBase,
parse: parseUrl
} = require('./url');
var _require2 = require('./url');
const _isAbsoluteIri = _require2.isAbsolute,
_isRelativeIri = _require2.isRelative,
prependBase = _require2.prependBase,
parseUrl = _require2.parse;
const MAX_CONTEXT_URLS = 10;
const INITIAL_CONTEXT_CACHE = new Map();
const INITIAL_CONTEXT_CACHE_MAX_SIZE = 10000;
const api = {};
module.exports = api;
api.cache = new ActiveContextCache();
/**

@@ -157,7 +46,8 @@ * Processes a local context and returns a new active context.

*/
api.process = (_ref) => {
let activeCtx = _ref.activeCtx,
localCtx = _ref.localCtx,
options = _ref.options;
api.process = ({
activeCtx,
localCtx,
options
}) => {
// normalize local context to an array of @context objects

@@ -167,34 +57,38 @@ if (_isObject(localCtx) && '@context' in localCtx && _isArray(localCtx['@context'])) {

}
const ctxs = _isArray(localCtx) ? localCtx : [localCtx];
// no contexts in array, return current active context w/o changes
const ctxs = _isArray(localCtx) ? localCtx : [localCtx]; // no contexts in array, return current active context w/o changes
if (ctxs.length === 0) {
return activeCtx;
}
} // process each context in order, update active context
// on each iteration to ensure proper caching
// process each context in order, update active context
// on each iteration to ensure proper caching
let rval = activeCtx;
for (let i = 0; i < ctxs.length; ++i) {
let ctx = ctxs[i];
let ctx = ctxs[i]; // reset to initial context
// reset to initial context
if (ctx === null) {
rval = activeCtx = api.getInitialContext(options);
continue;
}
} // dereference @context key if present
// dereference @context key if present
if (_isObject(ctx) && '@context' in ctx) {
ctx = ctx['@context'];
}
} // context must be an object by now, all URLs retrieved before this call
// context must be an object by now, all URLs retrieved before this call
if (!_isObject(ctx)) {
throw new JsonLdError('Invalid JSON-LD syntax; @context must be an object.', 'jsonld.SyntaxError', { code: 'invalid local context', context: ctx });
}
throw new JsonLdError('Invalid JSON-LD syntax; @context must be an object.', 'jsonld.SyntaxError', {
code: 'invalid local context',
context: ctx
});
} // get context from cache if available
// get context from cache if available
if (api.cache) {
const cached = api.cache.get(activeCtx, ctx);
if (cached) {

@@ -204,33 +98,37 @@ rval = activeCtx = cached;

}
}
} // update active context and clone new one before updating
// update active context and clone new one before updating
activeCtx = rval;
rval = rval.clone();
rval = rval.clone(); // define context mappings for keys in local context
// define context mappings for keys in local context
const defined = {};
const defined = {}; // handle @version
// handle @version
if ('@version' in ctx) {
if (ctx['@version'] !== 1.1) {
throw new JsonLdError('Unsupported JSON-LD version: ' + ctx['@version'], 'jsonld.UnsupportedVersion', { code: 'invalid @version value', context: ctx });
throw new JsonLdError('Unsupported JSON-LD version: ' + ctx['@version'], 'jsonld.UnsupportedVersion', {
code: 'invalid @version value',
context: ctx
});
}
if (activeCtx.processingMode && activeCtx.processingMode === 'json-ld-1.0') {
throw new JsonLdError('@version: ' + ctx['@version'] + ' not compatible with ' + activeCtx.processingMode, 'jsonld.ProcessingModeConflict', { code: 'processing mode conflict', context: ctx });
throw new JsonLdError('@version: ' + ctx['@version'] + ' not compatible with ' + activeCtx.processingMode, 'jsonld.ProcessingModeConflict', {
code: 'processing mode conflict',
context: ctx
});
}
rval.processingMode = 'json-ld-1.1';
rval['@version'] = ctx['@version'];
defined['@version'] = true;
}
} // if not set explicitly, set processingMode to "json-ld-1.0"
// if not set explicitly, set processingMode to "json-ld-1.0"
rval.processingMode = rval.processingMode || activeCtx.processingMode || 'json-ld-1.0';
// handle @base
rval.processingMode = rval.processingMode || activeCtx.processingMode || 'json-ld-1.0'; // handle @base
if ('@base' in ctx) {
let base = ctx['@base'];
if (base === null) {
// no action
if (base === null) {// no action
} else if (_isAbsoluteIri(base)) {

@@ -241,3 +139,6 @@ base = parseUrl(base);

} else {
throw new JsonLdError('Invalid JSON-LD syntax; the value of "@base" in a ' + '@context must be an absolute IRI, a relative IRI, or null.', 'jsonld.SyntaxError', { code: 'invalid base IRI', context: ctx });
throw new JsonLdError('Invalid JSON-LD syntax; the value of "@base" in a ' + '@context must be an absolute IRI, a relative IRI, or null.', 'jsonld.SyntaxError', {
code: 'invalid base IRI',
context: ctx
});
}

@@ -247,38 +148,51 @@

defined['@base'] = true;
}
} // handle @vocab
// handle @vocab
if ('@vocab' in ctx) {
const value = ctx['@vocab'];
if (value === null) {
delete rval['@vocab'];
} else if (!_isString(value)) {
throw new JsonLdError('Invalid JSON-LD syntax; the value of "@vocab" in a ' + '@context must be a string or null.', 'jsonld.SyntaxError', { code: 'invalid vocab mapping', context: ctx });
throw new JsonLdError('Invalid JSON-LD syntax; the value of "@vocab" in a ' + '@context must be a string or null.', 'jsonld.SyntaxError', {
code: 'invalid vocab mapping',
context: ctx
});
} else if (!_isAbsoluteIri(value)) {
throw new JsonLdError('Invalid JSON-LD syntax; the value of "@vocab" in a ' + '@context must be an absolute IRI.', 'jsonld.SyntaxError', { code: 'invalid vocab mapping', context: ctx });
throw new JsonLdError('Invalid JSON-LD syntax; the value of "@vocab" in a ' + '@context must be an absolute IRI.', 'jsonld.SyntaxError', {
code: 'invalid vocab mapping',
context: ctx
});
} else {
rval['@vocab'] = value;
}
defined['@vocab'] = true;
}
} // handle @language
// handle @language
if ('@language' in ctx) {
const value = ctx['@language'];
if (value === null) {
delete rval['@language'];
} else if (!_isString(value)) {
throw new JsonLdError('Invalid JSON-LD syntax; the value of "@language" in a ' + '@context must be a string or null.', 'jsonld.SyntaxError', { code: 'invalid default language', context: ctx });
throw new JsonLdError('Invalid JSON-LD syntax; the value of "@language" in a ' + '@context must be a string or null.', 'jsonld.SyntaxError', {
code: 'invalid default language',
context: ctx
});
} else {
rval['@language'] = value.toLowerCase();
}
defined['@language'] = true;
}
} // process all other keys
// process all other keys
for (const key in ctx) {
api.createTermDefinition(rval, ctx, key, defined);
}
} // cache result
// cache result
if (api.cache) {

@@ -291,3 +205,2 @@ api.cache.set(activeCtx, ctx, rval);

};
/**

@@ -302,2 +215,4 @@ * Creates a term definition during context processing.

*/
api.createTermDefinition = (activeCtx, localCtx, term, defined) => {

@@ -308,27 +223,38 @@ if (term in defined) {

return;
}
// cycle detected
throw new JsonLdError('Cyclical context definition detected.', 'jsonld.CyclicalContext', { code: 'cyclic IRI mapping', context: localCtx, term: term });
}
} // cycle detected
// now defining term
throw new JsonLdError('Cyclical context definition detected.', 'jsonld.CyclicalContext', {
code: 'cyclic IRI mapping',
context: localCtx,
term: term
});
} // now defining term
defined[term] = false;
if (api.isKeyword(term)) {
throw new JsonLdError('Invalid JSON-LD syntax; keywords cannot be overridden.', 'jsonld.SyntaxError', { code: 'keyword redefinition', context: localCtx, term: term });
throw new JsonLdError('Invalid JSON-LD syntax; keywords cannot be overridden.', 'jsonld.SyntaxError', {
code: 'keyword redefinition',
context: localCtx,
term: term
});
}
if (term === '') {
throw new JsonLdError('Invalid JSON-LD syntax; a term cannot be an empty string.', 'jsonld.SyntaxError', { code: 'invalid term definition', context: localCtx });
}
throw new JsonLdError('Invalid JSON-LD syntax; a term cannot be an empty string.', 'jsonld.SyntaxError', {
code: 'invalid term definition',
context: localCtx
});
} // remove old mapping
// remove old mapping
if (activeCtx.mappings[term]) {
delete activeCtx.mappings[term];
}
} // get context term value
// get context term value
let value = localCtx[term];
// clear context entry
let value = localCtx[term]; // clear context entry
if (value === null || _isObject(value) && value['@id'] === null) {

@@ -338,23 +264,27 @@ activeCtx.mappings[term] = null;

return;
}
} // convert short-hand value to object w/@id
// convert short-hand value to object w/@id
let simpleTerm = false;
if (_isString(value)) {
simpleTerm = true;
value = { '@id': value };
value = {
'@id': value
};
}
if (!_isObject(value)) {
throw new JsonLdError('Invalid JSON-LD syntax; @context term values must be ' + 'strings or objects.', 'jsonld.SyntaxError', { code: 'invalid term definition', context: localCtx });
}
throw new JsonLdError('Invalid JSON-LD syntax; @context term values must be ' + 'strings or objects.', 'jsonld.SyntaxError', {
code: 'invalid term definition',
context: localCtx
});
} // create new mapping
// create new mapping
const mapping = activeCtx.mappings[term] = {};
mapping.reverse = false;
mapping.reverse = false; // make sure term definition only has expected keywords
// make sure term definition only has expected keywords
const validKeys = ['@container', '@id', '@language', '@reverse', '@type'];
const validKeys = ['@container', '@id', '@language', '@reverse', '@type']; // JSON-LD 1.1 support
// JSON-LD 1.1 support
if (api.processingMode(activeCtx, 1.1)) {

@@ -366,8 +296,11 @@ validKeys.push('@context', '@nest', '@prefix');

if (!validKeys.includes(kw)) {
throw new JsonLdError('Invalid JSON-LD syntax; a term definition must not contain ' + kw, 'jsonld.SyntaxError', { code: 'invalid term definition', context: localCtx });
throw new JsonLdError('Invalid JSON-LD syntax; a term definition must not contain ' + kw, 'jsonld.SyntaxError', {
code: 'invalid term definition',
context: localCtx
});
}
}
} // always compute whether term has a colon as an optimization for
// _compactIri
// always compute whether term has a colon as an optimization for
// _compactIri
const colon = term.indexOf(':');

@@ -378,17 +311,37 @@ mapping._termHasColon = colon !== -1;

if ('@id' in value) {
throw new JsonLdError('Invalid JSON-LD syntax; a @reverse term definition must not ' + 'contain @id.', 'jsonld.SyntaxError', { code: 'invalid reverse property', context: localCtx });
throw new JsonLdError('Invalid JSON-LD syntax; a @reverse term definition must not ' + 'contain @id.', 'jsonld.SyntaxError', {
code: 'invalid reverse property',
context: localCtx
});
}
if ('@nest' in value) {
throw new JsonLdError('Invalid JSON-LD syntax; a @reverse term definition must not ' + 'contain @nest.', 'jsonld.SyntaxError', { code: 'invalid reverse property', context: localCtx });
throw new JsonLdError('Invalid JSON-LD syntax; a @reverse term definition must not ' + 'contain @nest.', 'jsonld.SyntaxError', {
code: 'invalid reverse property',
context: localCtx
});
}
const reverse = value['@reverse'];
if (!_isString(reverse)) {
throw new JsonLdError('Invalid JSON-LD syntax; a @context @reverse value must be a string.', 'jsonld.SyntaxError', { code: 'invalid IRI mapping', context: localCtx });
}
throw new JsonLdError('Invalid JSON-LD syntax; a @context @reverse value must be a string.', 'jsonld.SyntaxError', {
code: 'invalid IRI mapping',
context: localCtx
});
} // expand and add @id mapping
// expand and add @id mapping
const id = api.expandIri(activeCtx, reverse, { vocab: true, base: false }, localCtx, defined);
const id = api.expandIri(activeCtx, reverse, {
vocab: true,
base: false
}, localCtx, defined);
if (!_isAbsoluteIri(id)) {
throw new JsonLdError('Invalid JSON-LD syntax; a @context @reverse value must be an ' + 'absolute IRI or a blank node identifier.', 'jsonld.SyntaxError', { code: 'invalid IRI mapping', context: localCtx });
throw new JsonLdError('Invalid JSON-LD syntax; a @context @reverse value must be an ' + 'absolute IRI or a blank node identifier.', 'jsonld.SyntaxError', {
code: 'invalid IRI mapping',
context: localCtx
});
}
mapping['@id'] = id;

@@ -398,13 +351,26 @@ mapping.reverse = true;

let id = value['@id'];
if (!_isString(id)) {
throw new JsonLdError('Invalid JSON-LD syntax; a @context @id value must be an array ' + 'of strings or a string.', 'jsonld.SyntaxError', { code: 'invalid IRI mapping', context: localCtx });
throw new JsonLdError('Invalid JSON-LD syntax; a @context @id value must be an array ' + 'of strings or a string.', 'jsonld.SyntaxError', {
code: 'invalid IRI mapping',
context: localCtx
});
}
if (id !== term) {
// expand and add @id mapping
id = api.expandIri(activeCtx, id, { vocab: true, base: false }, localCtx, defined);
id = api.expandIri(activeCtx, id, {
vocab: true,
base: false
}, localCtx, defined);
if (!_isAbsoluteIri(id) && !api.isKeyword(id)) {
throw new JsonLdError('Invalid JSON-LD syntax; a @context @id value must be an ' + 'absolute IRI, a blank node identifier, or a keyword.', 'jsonld.SyntaxError', { code: 'invalid IRI mapping', context: localCtx });
throw new JsonLdError('Invalid JSON-LD syntax; a @context @id value must be an ' + 'absolute IRI, a blank node identifier, or a keyword.', 'jsonld.SyntaxError', {
code: 'invalid IRI mapping',
context: localCtx
});
}
mapping['@id'] = id;
// indicate if this term may be used as a compact IRI prefix
mapping['@id'] = id; // indicate if this term may be used as a compact IRI prefix
mapping._prefix = !mapping._termHasColon && id.match(/[:\/\?#\[\]@]$/) && (simpleTerm || api.processingMode(activeCtx, 1.0));

@@ -418,2 +384,3 @@ }

const prefix = term.substr(0, colon);
if (prefix in localCtx) {

@@ -435,10 +402,15 @@ // define parent prefix

if (!('@vocab' in activeCtx)) {
throw new JsonLdError('Invalid JSON-LD syntax; @context terms must define an @id.', 'jsonld.SyntaxError', { code: 'invalid IRI mapping', context: localCtx, term: term });
}
// prepend vocab to term
throw new JsonLdError('Invalid JSON-LD syntax; @context terms must define an @id.', 'jsonld.SyntaxError', {
code: 'invalid IRI mapping',
context: localCtx,
term: term
});
} // prepend vocab to term
mapping['@id'] = activeCtx['@vocab'] + term;
}
}
} // IRI mapping now defined
// IRI mapping now defined
defined[term] = true;

@@ -448,4 +420,8 @@

let type = value['@type'];
if (!_isString(type)) {
throw new JsonLdError('Invalid JSON-LD syntax; an @context @type values must be a string.', 'jsonld.SyntaxError', { code: 'invalid type mapping', context: localCtx });
throw new JsonLdError('Invalid JSON-LD syntax; an @context @type values must be a string.', 'jsonld.SyntaxError', {
code: 'invalid type mapping',
context: localCtx
});
}

@@ -455,12 +431,23 @@

// expand @type to full IRI
type = api.expandIri(activeCtx, type, { vocab: true, base: false }, localCtx, defined);
type = api.expandIri(activeCtx, type, {
vocab: true,
base: false
}, localCtx, defined);
if (!_isAbsoluteIri(type)) {
throw new JsonLdError('Invalid JSON-LD syntax; an @context @type value must be an ' + 'absolute IRI.', 'jsonld.SyntaxError', { code: 'invalid type mapping', context: localCtx });
throw new JsonLdError('Invalid JSON-LD syntax; an @context @type value must be an ' + 'absolute IRI.', 'jsonld.SyntaxError', {
code: 'invalid type mapping',
context: localCtx
});
}
if (type.indexOf('_:') === 0) {
throw new JsonLdError('Invalid JSON-LD syntax; an @context @type values must be an IRI, ' + 'not a blank node identifier.', 'jsonld.SyntaxError', { code: 'invalid type mapping', context: localCtx });
throw new JsonLdError('Invalid JSON-LD syntax; an @context @type values must be an IRI, ' + 'not a blank node identifier.', 'jsonld.SyntaxError', {
code: 'invalid type mapping',
context: localCtx
});
}
}
} // add @type to mapping
// add @type to mapping
mapping['@type'] = type;

@@ -474,16 +461,20 @@ }

let isValid = true;
const hasSet = container.includes('@set');
const hasSet = container.includes('@set'); // JSON-LD 1.1 support
// JSON-LD 1.1 support
if (api.processingMode(activeCtx, 1.1)) {
validContainers.push('@graph', '@id', '@type');
validContainers.push('@graph', '@id', '@type'); // check container length
// check container length
if (container.includes('@list')) {
if (container.length !== 1) {
throw new JsonLdError('Invalid JSON-LD syntax; @context @container with @list must ' + 'have no other values', 'jsonld.SyntaxError', { code: 'invalid container mapping', context: localCtx });
throw new JsonLdError('Invalid JSON-LD syntax; @context @container with @list must ' + 'have no other values', 'jsonld.SyntaxError', {
code: 'invalid container mapping',
context: localCtx
});
}
} else if (container.includes('@graph')) {
if (container.some(key => key !== '@graph' && key !== '@id' && key !== '@index' && key !== '@set')) {
throw new JsonLdError('Invalid JSON-LD syntax; @context @container with @graph must ' + 'have no other values other than @id, @index, and @set', 'jsonld.SyntaxError', { code: 'invalid container mapping', context: localCtx });
throw new JsonLdError('Invalid JSON-LD syntax; @context @container with @graph must ' + 'have no other values other than @id, @index, and @set', 'jsonld.SyntaxError', {
code: 'invalid container mapping',
context: localCtx
});
}

@@ -497,27 +488,31 @@ } else {

// which is one of the validContainers)
isValid &= !_isArray(value['@container']);
isValid &= !_isArray(value['@container']); // check container length
// check container length
isValid &= container.length <= 1;
}
} // check against valid containers
// check against valid containers
isValid &= container.every(c => validContainers.includes(c));
// @set not allowed with @list
isValid &= container.every(c => validContainers.includes(c)); // @set not allowed with @list
isValid &= !(hasSet && container.includes('@list'));
if (!isValid) {
throw new JsonLdError('Invalid JSON-LD syntax; @context @container value must be ' + 'one of the following: ' + validContainers.join(', '), 'jsonld.SyntaxError', { code: 'invalid container mapping', context: localCtx });
throw new JsonLdError('Invalid JSON-LD syntax; @context @container value must be ' + 'one of the following: ' + validContainers.join(', '), 'jsonld.SyntaxError', {
code: 'invalid container mapping',
context: localCtx
});
}
if (mapping.reverse && !container.every(c => ['@index', '@set'].includes(c))) {
throw new JsonLdError('Invalid JSON-LD syntax; @context @container value for a @reverse ' + 'type definition must be @index or @set.', 'jsonld.SyntaxError', { code: 'invalid reverse property', context: localCtx });
}
throw new JsonLdError('Invalid JSON-LD syntax; @context @container value for a @reverse ' + 'type definition must be @index or @set.', 'jsonld.SyntaxError', {
code: 'invalid reverse property',
context: localCtx
});
} // add @container to mapping
// add @container to mapping
mapping['@container'] = container;
}
} // scoped contexts
// scoped contexts
if ('@context' in value) {

@@ -529,22 +524,34 @@ mapping['@context'] = value['@context'];

let language = value['@language'];
if (language !== null && !_isString(language)) {
throw new JsonLdError('Invalid JSON-LD syntax; @context @language value must be ' + 'a string or null.', 'jsonld.SyntaxError', { code: 'invalid language mapping', context: localCtx });
}
throw new JsonLdError('Invalid JSON-LD syntax; @context @language value must be ' + 'a string or null.', 'jsonld.SyntaxError', {
code: 'invalid language mapping',
context: localCtx
});
} // add @language to mapping
// add @language to mapping
if (language !== null) {
language = language.toLowerCase();
}
mapping['@language'] = language;
}
} // term may be used as a prefix
// term may be used as a prefix
if ('@prefix' in value) {
if (mapping._termHasColon) {
throw new JsonLdError('Invalid JSON-LD syntax; @context @prefix used on a compact IRI term', 'jsonld.SyntaxError', { code: 'invalid term definition', context: localCtx });
throw new JsonLdError('Invalid JSON-LD syntax; @context @prefix used on a compact IRI term', 'jsonld.SyntaxError', {
code: 'invalid term definition',
context: localCtx
});
}
if (typeof value['@prefix'] === 'boolean') {
mapping._prefix = value['@prefix'] === true;
} else {
throw new JsonLdError('Invalid JSON-LD syntax; @context value for @prefix must be boolean', 'jsonld.SyntaxError', { code: 'invalid @prefix value', context: localCtx });
throw new JsonLdError('Invalid JSON-LD syntax; @context value for @prefix must be boolean', 'jsonld.SyntaxError', {
code: 'invalid @prefix value',
context: localCtx
});
}

@@ -555,15 +562,23 @@ }

const nest = value['@nest'];
if (!_isString(nest) || nest !== '@nest' && nest.indexOf('@') === 0) {
throw new JsonLdError('Invalid JSON-LD syntax; @context @nest value must be ' + 'a string which is not a keyword other than @nest.', 'jsonld.SyntaxError', { code: 'invalid @nest value', context: localCtx });
throw new JsonLdError('Invalid JSON-LD syntax; @context @nest value must be ' + 'a string which is not a keyword other than @nest.', 'jsonld.SyntaxError', {
code: 'invalid @nest value',
context: localCtx
});
}
mapping['@nest'] = nest;
}
} // disallow aliasing @context and @preserve
// disallow aliasing @context and @preserve
const id = mapping['@id'];
if (id === '@context' || id === '@preserve') {
throw new JsonLdError('Invalid JSON-LD syntax; @context and @preserve cannot be aliased.', 'jsonld.SyntaxError', { code: 'invalid keyword alias', context: localCtx });
throw new JsonLdError('Invalid JSON-LD syntax; @context and @preserve cannot be aliased.', 'jsonld.SyntaxError', {
code: 'invalid keyword alias',
context: localCtx
});
}
};
/**

@@ -586,2 +601,4 @@ * Expands a string to a full IRI. The string may be a term, a prefix, a

*/
api.expandIri = (activeCtx, value, relativeTo, localCtx, defined) => {

@@ -591,5 +608,5 @@ // already expanded

return value;
}
} // define term dependency if not defined
// define term dependency if not defined
if (localCtx && value in localCtx && defined[value] !== true) {

@@ -600,6 +617,6 @@ api.createTermDefinition(activeCtx, localCtx, value, defined);

relativeTo = relativeTo || {};
if (relativeTo.vocab) {
const mapping = activeCtx.mappings[value];
const mapping = activeCtx.mappings[value]; // value is explicitly ignored with a null mapping
// value is explicitly ignored with a null mapping
if (mapping === null) {

@@ -613,37 +630,38 @@ return null;

}
}
} // split value into prefix:suffix
// split value into prefix:suffix
const colon = value.indexOf(':');
if (colon !== -1) {
const prefix = value.substr(0, colon);
const suffix = value.substr(colon + 1);
const suffix = value.substr(colon + 1); // do not expand blank nodes (prefix of '_') or already-absolute
// IRIs (suffix of '//')
// do not expand blank nodes (prefix of '_') or already-absolute
// IRIs (suffix of '//')
if (prefix === '_' || suffix.indexOf('//') === 0) {
return value;
}
} // prefix dependency not defined, define it
// prefix dependency not defined, define it
if (localCtx && prefix in localCtx) {
api.createTermDefinition(activeCtx, localCtx, prefix, defined);
}
} // use mapping if prefix is defined
// use mapping if prefix is defined
const mapping = activeCtx.mappings[prefix];
if (mapping) {
return mapping['@id'] + suffix;
}
} // already absolute IRI
// already absolute IRI
return value;
}
} // prepend vocab
// prepend vocab
if (relativeTo.vocab && '@vocab' in activeCtx) {
return activeCtx['@vocab'] + value;
}
} // prepend base
// prepend base
if (relativeTo.base) {

@@ -655,3 +673,2 @@ return prependBase(activeCtx['@base'], value);

};
/**

@@ -665,6 +682,12 @@ * Gets the initial context.

*/
api.getInitialContext = options => {
const base = parseUrl(options.base || '');
const key = JSON.stringify({ base, processingMode: options.processingMode });
const key = JSON.stringify({
base,
processingMode: options.processingMode
});
const cached = INITIAL_CONTEXT_CACHE.get(key);
if (cached) {

@@ -681,4 +704,4 @@ return cached;

clone: _cloneActiveContext
};
// TODO: consider using LRU cache instead
}; // TODO: consider using LRU cache instead
if (INITIAL_CONTEXT_CACHE.size === INITIAL_CONTEXT_CACHE_MAX_SIZE) {

@@ -689,5 +712,5 @@ // clear whole cache -- assumes scenario where the cache fills means

}
INITIAL_CONTEXT_CACHE.set(key, initialContext);
return initialContext;
/**

@@ -699,25 +722,25 @@ * Generates an inverse context for use in the compaction algorithm, if

*/
function _createInverseContext() {
const activeCtx = this;
const activeCtx = this; // lazily create inverse
// lazily create inverse
if (activeCtx.inverse) {
return activeCtx.inverse;
}
const inverse = activeCtx.inverse = {};
// variables for building fast CURIE map
const inverse = activeCtx.inverse = {}; // variables for building fast CURIE map
const fastCurieMap = activeCtx.fastCurieMap = {};
const irisToTerms = {};
const irisToTerms = {}; // handle default language
// handle default language
const defaultLanguage = activeCtx['@language'] || '@none';
const defaultLanguage = activeCtx['@language'] || '@none'; // create term selections for each mapping in the context, ordered by
// shortest and then lexicographically least
// create term selections for each mapping in the context, ordered by
// shortest and then lexicographically least
const mappings = activeCtx.mappings;
const terms = Object.keys(mappings).sort(util.compareShortestLeast);
for (let i = 0; i < terms.length; ++i) {
const term = terms[i];
const mapping = mappings[term];
if (mapping === null) {

@@ -728,6 +751,6 @@ continue;

let container = mapping['@container'] || '@none';
container = [].concat(container).sort().join('');
container = [].concat(container).sort().join(''); // iterate over every IRI in the mapping
// iterate over every IRI in the mapping
const ids = [].concat(mapping['@id']);
for (let ii = 0; ii < ids.length; ++ii) {

@@ -745,3 +768,7 @@ const iri = ids[ii];

irisToTerms[iri] = [term];
const fastCurieEntry = { iri: iri, terms: irisToTerms[iri] };
const fastCurieEntry = {
iri: iri,
terms: irisToTerms[iri]
};
if (iri[0] in fastCurieMap) {

@@ -756,5 +783,5 @@ fastCurieMap[iri[0]].push(fastCurieEntry);

irisToTerms[iri].push(term);
}
} // add new entry
// add new entry
if (!entry[container]) {

@@ -767,3 +794,5 @@ entry[container] = {

}
entry = entry[container];
_addPreferredTerm(term, entry['@any'], '@none');

@@ -780,2 +809,3 @@

const language = mapping['@language'] || '@null';
_addPreferredTerm(term, entry['@language'], language);

@@ -786,12 +816,13 @@ } else {

// add an entry for the default language
_addPreferredTerm(term, entry['@language'], defaultLanguage);
_addPreferredTerm(term, entry['@language'], defaultLanguage); // add entries for no type and no language
// add entries for no type and no language
_addPreferredTerm(term, entry['@type'], '@none');
_addPreferredTerm(term, entry['@language'], '@none');
}
}
}
} // build fast CURIE map
// build fast CURIE map
for (const key in fastCurieMap) {

@@ -803,3 +834,2 @@ _buildIriMap(fastCurieMap, key, 1);

}
/**

@@ -813,10 +843,13 @@ * Runs a recursive algorithm to build a lookup map for quickly finding

*/
function _buildIriMap(iriMap, key, idx) {
const entries = iriMap[key];
const next = iriMap[key] = {};
let iri;
let letter;
for (let i = 0; i < entries.length; ++i) {
iri = entries[i].iri;
if (idx >= iri.length) {

@@ -827,2 +860,3 @@ letter = '';

}
if (letter in next) {

@@ -839,6 +873,6 @@ next[letter].push(entries[i]);

}
_buildIriMap(next, key, idx + 1);
}
}
/**

@@ -851,2 +885,4 @@ * Adds the term for the given entry if not already added.

*/
function _addPreferredTerm(term, entry, typeOrLanguageValue) {

@@ -857,3 +893,2 @@ if (!(typeOrLanguageValue in entry)) {

}
/**

@@ -864,2 +899,4 @@ * Clones an active context, creating a child active context.

*/
function _cloneActiveContext() {

@@ -872,12 +909,14 @@ const child = {};

child.getInverse = this.getInverse;
if ('@language' in this) {
child['@language'] = this['@language'];
}
if ('@vocab' in this) {
child['@vocab'] = this['@vocab'];
}
return child;
}
};
/**

@@ -894,2 +933,4 @@ * Gets the value for the given active context key and type, null if none is

*/
api.getContextValue = (ctx, key, type) => {

@@ -899,5 +940,5 @@ // return null for invalid key

return null;
}
} // get specific entry information
// get specific entry information
if (ctx.mappings[key]) {

@@ -910,2 +951,3 @@ const entry = ctx.mappings[key];

}
if (type in entry) {

@@ -915,5 +957,5 @@ // return entry value for type

}
}
} // get default language
// get default language
if (type === '@language' && type in ctx) {

@@ -925,3 +967,2 @@ return ctx[type];

};
/**

@@ -937,4 +978,8 @@ * Retrieves external @context URLs using the given document loader. Every

*/
api.getAllContexts = (() => {
var _ref2 = _asyncToGenerator(function* (input, options) {
api.getAllContexts =
/*#__PURE__*/
function () {
var _ref = _asyncToGenerator(function* (input, options) {
return _retrieveContextUrls(input, options);

@@ -944,6 +989,5 @@ });

return function (_x, _x2) {
return _ref2.apply(this, arguments);
return _ref.apply(this, arguments);
};
})();
}();
/**

@@ -957,2 +1001,4 @@ * Processing Mode check.

*/
api.processingMode = (activeCtx, version) => {

@@ -965,3 +1011,2 @@ if (version.toString() >= '1.1') {

};
/**

@@ -974,2 +1019,4 @@ * Returns whether or not the given value is a keyword.

*/
api.isKeyword = v => {

@@ -979,2 +1026,3 @@ if (!_isString(v)) {

}
switch (v) {

@@ -1006,5 +1054,131 @@ case '@base':

}
return false;
};
function _retrieveContextUrls(_x3, _x4) {
return _retrieveContextUrls2.apply(this, arguments);
}
/**
* Finds all @context URLs in the given JSON-LD input.
*
* @param input the JSON-LD input.
* @param urls a map of URLs (url => false/@contexts).
* @param replace true to replace the URLs in the given input with the
* @contexts from the urls map, false not to.
* @param base the base IRI to use to resolve relative IRIs.
*
* @return true if new URLs to retrieve were found, false if not.
*/
function _retrieveContextUrls2() {
_retrieveContextUrls2 = _asyncToGenerator(function* (input, options) {
const documentLoader = util.normalizeDocumentLoader(options.documentLoader); // retrieve all @context URLs in input
yield retrieve(input, new Set(), documentLoader);
return input; // recursive function that will retrieve all @context URLs in documents
function retrieve(_x5, _x6, _x7) {
return _retrieve.apply(this, arguments);
}
function _retrieve() {
_retrieve = _asyncToGenerator(function* (doc, cycles, documentLoader) {
if (cycles.size > MAX_CONTEXT_URLS) {
throw new JsonLdError('Maximum number of @context URLs exceeded.', 'jsonld.ContextUrlError', {
code: 'loading remote context failed',
max: MAX_CONTEXT_URLS
});
} // find all URLs in the given document
const urls = new Map();
_findContextUrls(doc, urls, false, options.base); // queue all unretrieved URLs
const queue = [...urls.keys()].filter(u => urls.get(u) === false); // retrieve URLs in queue
return Promise.all(queue.map(
/*#__PURE__*/
function () {
var _ref2 = _asyncToGenerator(function* (url) {
// check for context URL cycle
if (cycles.has(url)) {
throw new JsonLdError('Cyclical @context URLs detected.', 'jsonld.ContextUrlError', {
code: 'recursive context inclusion',
url
});
}
const _cycles = new Set(cycles);
_cycles.add(url);
let remoteDoc;
let ctx;
try {
remoteDoc = yield documentLoader(url);
ctx = remoteDoc.document || null; // parse string context as JSON
if (_isString(ctx)) {
ctx = JSON.parse(ctx);
}
} catch (e) {
throw new JsonLdError('Dereferencing a URL did not result in a valid JSON-LD object. ' + 'Possible causes are an inaccessible URL perhaps due to ' + 'a same-origin policy (ensure the server uses CORS if you are ' + 'using client-side JavaScript), too many redirects, a ' + 'non-JSON response, or more than one HTTP Link Header was ' + 'provided for a remote context.', 'jsonld.InvalidUrl', {
code: 'loading remote context failed',
url,
cause: e
});
} // ensure ctx is an object
if (!_isObject(ctx)) {
throw new JsonLdError('Dereferencing a URL did not result in a JSON object. The ' + 'response was valid JSON, but it was not a JSON object.', 'jsonld.InvalidUrl', {
code: 'invalid remote context',
url
});
} // use empty context if no @context key is present
if (!('@context' in ctx)) {
ctx = {
'@context': {}
};
} else {
ctx = {
'@context': ctx['@context']
};
} // append @context URL to context if given
if (remoteDoc.contextUrl) {
if (!_isArray(ctx['@context'])) {
ctx['@context'] = [ctx['@context']];
}
ctx['@context'].push(remoteDoc.contextUrl);
} // recurse
yield retrieve(ctx, _cycles, documentLoader); // store retrieved context w/replaced @context URLs
urls.set(url, ctx['@context']); // replace all @context URLs in the document
_findContextUrls(doc, urls, true, options.base);
});
return function (_x8) {
return _ref2.apply(this, arguments);
};
}()));
});
return _retrieve.apply(this, arguments);
}
});
return _retrieveContextUrls2.apply(this, arguments);
}
function _findContextUrls(input, urls, replace, base) {

@@ -1015,2 +1189,3 @@ if (_isArray(input)) {

}
return;

@@ -1022,12 +1197,13 @@ }

return;
}
} // input is an object
// input is an object
for (const key in input) {
if (key !== '@context') {
_findContextUrls(input[key], urls, replace, base);
continue;
}
} // get @context
// get @context
const ctx = input[key];

@@ -1038,8 +1214,10 @@

let length = ctx.length;
for (let i = 0; i < length; ++i) {
const _ctx = ctx[i];
if (_isString(_ctx)) {
const prepended = prependBase(base, _ctx);
const resolved = urls.get(prepended);
// replace w/@context if requested
const resolved = urls.get(prepended); // replace w/@context if requested
if (replace) {

@@ -1070,4 +1248,4 @@ if (_isArray(resolved)) {

const prepended = prependBase(base, ctx);
const resolved = urls.get(prepended);
// replace w/@context if requested
const resolved = urls.get(prepended); // replace w/@context if requested
if (replace) {

@@ -1074,0 +1252,0 @@ if (resolved !== false) {

@@ -17,8 +17,6 @@ /*

*/
constructor(_ref) {
var _ref$size = _ref.size;
let size = _ref$size === undefined ? 50 : _ref$size;
var _ref$expires = _ref.expires;
let expires = _ref$expires === undefined ? 30000 : _ref$expires;
constructor({
size = 50,
expires = 30000
}) {
this.order = [];

@@ -33,8 +31,11 @@ this.cache = {};

const entry = this.cache[url];
if (entry.expires >= Date.now()) {
return entry.ctx;
}
delete this.cache[url];
this.order.splice(this.order.indexOf(url), 1);
}
return null;

@@ -47,5 +48,10 @@ }

}
this.order.push(url);
this.cache[url] = { ctx: ctx, expires: Date.now() + this.expires };
this.cache[url] = {
ctx: ctx,
expires: Date.now() + this.expires
};
}
};

@@ -6,16 +6,18 @@ /*

function _asyncToGenerator(fn) { return function () { var gen = fn.apply(this, arguments); return new Promise(function (resolve, reject) { function step(key, arg) { try { var info = gen[key](arg); var value = info.value; } catch (error) { reject(error); return; } if (info.done) { resolve(value); } else { return Promise.resolve(value).then(function (value) { step("next", value); }, function (err) { step("throw", err); }); } } return step("next"); }); }; }
function asyncGeneratorStep(gen, resolve, reject, _next, _throw, key, arg) { try { var info = gen[key](arg); var value = info.value; } catch (error) { reject(error); return; } if (info.done) { resolve(value); } else { Promise.resolve(value).then(_next, _throw); } }
var _require = require('../util');
function _asyncToGenerator(fn) { return function () { var self = this, args = arguments; return new Promise(function (resolve, reject) { var gen = fn.apply(self, args); function _next(value) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "next", value); } function _throw(err) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "throw", err); } _next(undefined); }); }; }
const parseLinkHeader = _require.parseLinkHeader,
buildHeaders = _require.buildHeaders;
const {
parseLinkHeader,
buildHeaders
} = require('../util');
var _require2 = require('../constants');
const {
LINK_HEADER_REL
} = require('../constants');
const LINK_HEADER_REL = _require2.LINK_HEADER_REL;
const JsonLdError = require('../JsonLdError');
const JsonLdError = require('../JsonLdError');
const RequestQueue = require('../RequestQueue');
/**

@@ -37,13 +39,51 @@ * Creates a built-in node document loader.

*/
module.exports = function () {
let loadDocument = (() => {
var _ref2 = _asyncToGenerator(function* (url, redirects) {
module.exports = ({
secure,
strictSSL = true,
maxRedirects = -1,
request,
headers = {}
} = {
strictSSL: true,
maxRedirects: -1,
headers: {}
}) => {
headers = buildHeaders(headers); // TODO: use `r2`
request = request || require('request');
const http = require('http'); // TODO: disable cache until HTTP caching implemented
//const cache = new DocumentCache();
const queue = new RequestQueue();
return queue.wrapLoader(function (url) {
return loadDocument(url, []);
});
function loadDocument(_x, _x2) {
return _loadDocument.apply(this, arguments);
}
function _loadDocument() {
_loadDocument = _asyncToGenerator(function* (url, redirects) {
if (url.indexOf('http:') !== 0 && url.indexOf('https:') !== 0) {
throw new JsonLdError('URL could not be dereferenced; only "http" and "https" URLs are ' + 'supported.', 'jsonld.InvalidUrl', { code: 'loading document failed', url: url });
throw new JsonLdError('URL could not be dereferenced; only "http" and "https" URLs are ' + 'supported.', 'jsonld.InvalidUrl', {
code: 'loading document failed',
url: url
});
}
if (secure && url.indexOf('https') !== 0) {
throw new JsonLdError('URL could not be dereferenced; secure mode is enabled and ' + 'the URL\'s scheme is not "https".', 'jsonld.InvalidUrl', { code: 'loading document failed', url: url });
}
// TODO: disable cache until HTTP caching implemented
throw new JsonLdError('URL could not be dereferenced; secure mode is enabled and ' + 'the URL\'s scheme is not "https".', 'jsonld.InvalidUrl', {
code: 'loading document failed',
url: url
});
} // TODO: disable cache until HTTP caching implemented
let doc = null; //cache.get(url);
if (doc !== null) {

@@ -54,2 +94,3 @@ return doc;

let result;
try {

@@ -63,14 +104,21 @@ result = yield _request(request, {

} catch (e) {
throw new JsonLdError('URL could not be dereferenced, an error occurred.', 'jsonld.LoadDocumentError', { code: 'loading document failed', url: url, cause: e });
throw new JsonLdError('URL could not be dereferenced, an error occurred.', 'jsonld.LoadDocumentError', {
code: 'loading document failed',
url: url,
cause: e
});
}
var _result = result;
const res = _result.res,
body = _result.body;
const {
res,
body
} = result;
doc = {
contextUrl: null,
documentUrl: url,
document: body || null
}; // handle error
const statusText = http.STATUS_CODES[res.statusCode];
doc = { contextUrl: null, documentUrl: url, document: body || null };
// handle error
const statusText = http.STATUS_CODES[res.statusCode];
if (res.statusCode >= 400) {

@@ -82,17 +130,22 @@ throw new JsonLdError('URL could not be dereferenced: ' + statusText, 'jsonld.InvalidUrl', {

});
}
} // handle Link Header
// handle Link Header
if (res.headers.link && res.headers['content-type'] !== 'application/ld+json') {
// only 1 related link header permitted
const linkHeader = parseLinkHeader(res.headers.link)[LINK_HEADER_REL];
if (Array.isArray(linkHeader)) {
throw new JsonLdError('URL could not be dereferenced, it has more than one associated ' + 'HTTP Link Header.', 'jsonld.InvalidUrl', { code: 'multiple context link headers', url: url });
throw new JsonLdError('URL could not be dereferenced, it has more than one associated ' + 'HTTP Link Header.', 'jsonld.InvalidUrl', {
code: 'multiple context link headers',
url: url
});
}
if (linkHeader) {
doc.contextUrl = linkHeader.target;
}
}
} // handle redirect
// handle redirect
if (res.statusCode >= 300 && res.statusCode < 400 && res.headers.location) {

@@ -107,2 +160,3 @@ if (redirects.length === maxRedirects) {

}
if (redirects.indexOf(url) !== -1) {

@@ -116,9 +170,10 @@ throw new JsonLdError('URL could not be dereferenced; infinite redirection was detected.', 'jsonld.InfiniteRedirectDetected', {

}
redirects.push(url);
return loadDocument(res.headers.location, redirects);
}
} // cache for each redirected URL
// cache for each redirected URL
redirects.push(url);
// TODO: disable cache until HTTP caching implemented
redirects.push(url); // TODO: disable cache until HTTP caching implemented
/*

@@ -134,30 +189,4 @@ for(let i = 0; i < redirects.length; ++i) {

});
return function loadDocument(_x2, _x3) {
return _ref2.apply(this, arguments);
};
})();
var _ref = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : { strictSSL: true, maxRedirects: -1, headers: {} };
let secure = _ref.secure;
var _ref$strictSSL = _ref.strictSSL;
let strictSSL = _ref$strictSSL === undefined ? true : _ref$strictSSL;
var _ref$maxRedirects = _ref.maxRedirects;
let maxRedirects = _ref$maxRedirects === undefined ? -1 : _ref$maxRedirects,
request = _ref.request;
var _ref$headers = _ref.headers;
let headers = _ref$headers === undefined ? {} : _ref$headers;
headers = buildHeaders(headers);
// TODO: use `r2`
request = request || require('request');
const http = require('http');
// TODO: disable cache until HTTP caching implemented
//const cache = new DocumentCache();
const queue = new RequestQueue();
return queue.wrapLoader(function (url) {
return loadDocument(url, []);
});
return _loadDocument.apply(this, arguments);
}
};

@@ -171,3 +200,6 @@

} else {
resolve({ res: res, body: body });
resolve({
res: res,
body: body
});
}

@@ -174,0 +206,0 @@ });

@@ -6,18 +6,20 @@ /*

function _asyncToGenerator(fn) { return function () { var gen = fn.apply(this, arguments); return new Promise(function (resolve, reject) { function step(key, arg) { try { var info = gen[key](arg); var value = info.value; } catch (error) { reject(error); return; } if (info.done) { resolve(value); } else { return Promise.resolve(value).then(function (value) { step("next", value); }, function (err) { step("throw", err); }); } } return step("next"); }); }; }
function asyncGeneratorStep(gen, resolve, reject, _next, _throw, key, arg) { try { var info = gen[key](arg); var value = info.value; } catch (error) { reject(error); return; } if (info.done) { resolve(value); } else { Promise.resolve(value).then(_next, _throw); } }
var _require = require('../util');
function _asyncToGenerator(fn) { return function () { var self = this, args = arguments; return new Promise(function (resolve, reject) { var gen = fn.apply(self, args); function _next(value) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "next", value); } function _throw(err) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "throw", err); } _next(undefined); }); }; }
const parseLinkHeader = _require.parseLinkHeader,
buildHeaders = _require.buildHeaders;
const {
parseLinkHeader,
buildHeaders
} = require('../util');
var _require2 = require('../constants');
const {
LINK_HEADER_REL
} = require('../constants');
const LINK_HEADER_REL = _require2.LINK_HEADER_REL;
const JsonLdError = require('../JsonLdError');
const JsonLdError = require('../JsonLdError');
const RequestQueue = require('../RequestQueue');
const REGEX_LINK_HEADER = /(^|(\r\n))link:/i;
/**

@@ -34,17 +36,44 @@ * Creates a built-in XMLHttpRequest document loader.

*/
module.exports = function () {
let loader = (() => {
var _ref2 = _asyncToGenerator(function* (url) {
module.exports = ({
secure,
headers = {},
xhr
} = {
headers: {}
}) => {
headers = buildHeaders(headers);
const queue = new RequestQueue();
return queue.wrapLoader(loader);
function loader(_x) {
return _loader.apply(this, arguments);
}
function _loader() {
_loader = _asyncToGenerator(function* (url) {
if (url.indexOf('http:') !== 0 && url.indexOf('https:') !== 0) {
throw new JsonLdError('URL could not be dereferenced; only "http" and "https" URLs are ' + 'supported.', 'jsonld.InvalidUrl', { code: 'loading document failed', url: url });
throw new JsonLdError('URL could not be dereferenced; only "http" and "https" URLs are ' + 'supported.', 'jsonld.InvalidUrl', {
code: 'loading document failed',
url: url
});
}
if (secure && url.indexOf('https') !== 0) {
throw new JsonLdError('URL could not be dereferenced; secure mode is enabled and ' + 'the URL\'s scheme is not "https".', 'jsonld.InvalidUrl', { code: 'loading document failed', url: url });
throw new JsonLdError('URL could not be dereferenced; secure mode is enabled and ' + 'the URL\'s scheme is not "https".', 'jsonld.InvalidUrl', {
code: 'loading document failed',
url: url
});
}
let req;
try {
req = yield _get(xhr, url, headers);
} catch (e) {
throw new JsonLdError('URL could not be dereferenced, an error occurred.', 'jsonld.LoadDocumentError', { code: 'loading document failed', url: url, cause: e });
throw new JsonLdError('URL could not be dereferenced, an error occurred.', 'jsonld.LoadDocumentError', {
code: 'loading document failed',
url: url,
cause: e
});
}

@@ -60,16 +89,26 @@

const doc = { contextUrl: null, documentUrl: url, document: req.response };
const doc = {
contextUrl: null,
documentUrl: url,
document: req.response
}; // handle Link Header (avoid unsafe header warning by existence testing)
// handle Link Header (avoid unsafe header warning by existence testing)
const contentType = req.getResponseHeader('Content-Type');
let linkHeader;
if (REGEX_LINK_HEADER.test(req.getAllResponseHeaders())) {
linkHeader = req.getResponseHeader('Link');
}
if (linkHeader && contentType !== 'application/ld+json') {
// only 1 related link header permitted
linkHeader = parseLinkHeader(linkHeader)[LINK_HEADER_REL];
if (Array.isArray(linkHeader)) {
throw new JsonLdError('URL could not be dereferenced, it has more than one ' + 'associated HTTP Link Header.', 'jsonld.InvalidUrl', { code: 'multiple context link headers', url: url });
throw new JsonLdError('URL could not be dereferenced, it has more than one ' + 'associated HTTP Link Header.', 'jsonld.InvalidUrl', {
code: 'multiple context link headers',
url: url
});
}
if (linkHeader) {

@@ -82,18 +121,4 @@ doc.contextUrl = linkHeader.target;

});
return function loader(_x2) {
return _ref2.apply(this, arguments);
};
})();
var _ref = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : { headers: {} };
let secure = _ref.secure;
var _ref$headers = _ref.headers;
let headers = _ref$headers === undefined ? {} : _ref$headers,
xhr = _ref.xhr;
headers = buildHeaders(headers);
const queue = new RequestQueue();
return queue.wrapLoader(loader);
return _loader.apply(this, arguments);
}
};

@@ -106,9 +131,13 @@

req.onload = () => resolve(req);
req.onerror = err => reject(err);
req.open('GET', url, true);
for (const k in headers) {
req.setRequestHeader(k, headers[k]);
}
req.send();
});
}

@@ -8,36 +8,34 @@ /*

var _require = require('./types');
const {
isArray: _isArray,
isObject: _isObject,
isEmptyObject: _isEmptyObject,
isString: _isString
} = require('./types');
const _isArray = _require.isArray,
_isObject = _require.isObject,
_isEmptyObject = _require.isEmptyObject,
_isString = _require.isString;
const {
isList: _isList,
isValue: _isValue,
isGraph: _isGraph
} = require('./graphTypes');
var _require2 = require('./graphTypes');
const {
expandIri: _expandIri,
getContextValue: _getContextValue,
isKeyword: _isKeyword,
process: _processContext
} = require('./context');
const _isList = _require2.isList,
_isValue = _require2.isValue,
_isGraph = _require2.isGraph;
const {
isAbsolute: _isAbsoluteIri
} = require('./url');
var _require3 = require('./context');
const {
addValue: _addValue,
validateTypeValue: _validateTypeValue,
getValues: _getValues
} = require('./util');
const _expandIri = _require3.expandIri,
_getContextValue = _require3.getContextValue,
_isKeyword = _require3.isKeyword,
_processContext = _require3.process;
var _require4 = require('./url');
const _isAbsoluteIri = _require4.isAbsolute;
var _require5 = require('./util');
const _addValue = _require5.addValue,
_validateTypeValue = _require5.validateTypeValue,
_getValues = _require5.getValues;
const api = {};
module.exports = api;
/**

@@ -60,22 +58,21 @@ * Recursively expands an element using the given context. Any context in

*/
api.expand = (_ref) => {
let activeCtx = _ref.activeCtx;
var _ref$activeProperty = _ref.activeProperty;
let activeProperty = _ref$activeProperty === undefined ? null : _ref$activeProperty,
element = _ref.element;
var _ref$options = _ref.options;
let options = _ref$options === undefined ? {} : _ref$options;
var _ref$insideList = _ref.insideList;
let insideList = _ref$insideList === undefined ? false : _ref$insideList;
var _ref$expansionMap = _ref.expansionMap;
let expansionMap = _ref$expansionMap === undefined ? () => undefined : _ref$expansionMap;
api.expand = ({
activeCtx,
activeProperty = null,
element,
options = {},
insideList = false,
expansionMap = () => undefined
}) => {
// nothing to expand
if (element === null || element === undefined) {
return null;
}
} // disable framing if activeProperty is @default
// disable framing if activeProperty is @default
if (activeProperty === '@default') {
options = Object.assign({}, options, { isFrame: false });
options = Object.assign({}, options, {
isFrame: false
});
}

@@ -85,3 +82,5 @@

// drop free-floating scalars that are not in lists unless custom mapped
if (!insideList && (activeProperty === null || _expandIri(activeCtx, activeProperty, { vocab: true }) === '@graph')) {
if (!insideList && (activeProperty === null || _expandIri(activeCtx, activeProperty, {
vocab: true
}) === '@graph')) {
// TODO: use `await` to support async

@@ -95,13 +94,19 @@ const mapped = expansionMap({

});
if (mapped === undefined) {
return null;
}
return mapped;
}
} // expand element according to value expansion rules
// expand element according to value expansion rules
return _expandValue({ activeCtx, activeProperty, value: element });
}
// recursively expand array
return _expandValue({
activeCtx,
activeProperty,
value: element
});
} // recursively expand array
if (_isArray(element)) {

@@ -111,2 +116,3 @@ let rval = [];

insideList = insideList || container.includes('@list');
for (let i = 0; i < element.length; ++i) {

@@ -121,5 +127,8 @@ // expand element

});
if (insideList && (_isArray(e) || _isList(e))) {
// lists of lists are illegal
throw new JsonLdError('Invalid JSON-LD syntax; lists of lists are not permitted.', 'jsonld.SyntaxError', { code: 'list of lists' });
throw new JsonLdError('Invalid JSON-LD syntax; lists of lists are not permitted.', 'jsonld.SyntaxError', {
code: 'list of lists'
});
}

@@ -139,2 +148,3 @@

});
if (e === undefined) {

@@ -151,33 +161,50 @@ continue;

}
return rval;
}
} // recursively expand object:
// if element has a context, process it
// recursively expand object:
// if element has a context, process it
if ('@context' in element) {
activeCtx = _processContext({ activeCtx, localCtx: element['@context'], options });
}
activeCtx = _processContext({
activeCtx,
localCtx: element['@context'],
options
});
} // look for scoped context on @type
// look for scoped context on @type
let keys = Object.keys(element).sort();
for (const key of keys) {
const expandedProperty = _expandIri(activeCtx, key, { vocab: true });
const expandedProperty = _expandIri(activeCtx, key, {
vocab: true
});
if (expandedProperty === '@type') {
// set scopped contexts from @type
const types = [].concat(element[key]).sort();
for (const type of types) {
const ctx = _getContextValue(activeCtx, type, '@context');
if (ctx) {
activeCtx = _processContext({ activeCtx, localCtx: ctx, options });
activeCtx = _processContext({
activeCtx,
localCtx: ctx,
options
});
}
}
}
}
} // expand the active property
// expand the active property
const expandedActiveProperty = _expandIri(activeCtx, activeProperty, { vocab: true });
// process each key and value in element, ignoring @nest content
const expandedActiveProperty = _expandIri(activeCtx, activeProperty, {
vocab: true
}); // process each key and value in element, ignoring @nest content
let rval = {};
_expandObject({

@@ -191,5 +218,6 @@ activeCtx,

insideList,
expansionMap });
expansionMap
}); // get property count on expanded output
// get property count on expanded output
keys = Object.keys(rval);

@@ -201,21 +229,34 @@ let count = keys.length;

if ('@type' in rval && '@language' in rval) {
throw new JsonLdError('Invalid JSON-LD syntax; an element containing "@value" may not ' + 'contain both "@type" and "@language".', 'jsonld.SyntaxError', { code: 'invalid value object', element: rval });
throw new JsonLdError('Invalid JSON-LD syntax; an element containing "@value" may not ' + 'contain both "@type" and "@language".', 'jsonld.SyntaxError', {
code: 'invalid value object',
element: rval
});
}
let validCount = count - 1;
if ('@type' in rval) {
validCount -= 1;
}
if ('@index' in rval) {
validCount -= 1;
}
if ('@language' in rval) {
validCount -= 1;
}
if (validCount !== 0) {
throw new JsonLdError('Invalid JSON-LD syntax; an element containing "@value" may only ' + 'have an "@index" property and at most one other property ' + 'which can be "@type" or "@language".', 'jsonld.SyntaxError', { code: 'invalid value object', element: rval });
throw new JsonLdError('Invalid JSON-LD syntax; an element containing "@value" may only ' + 'have an "@index" property and at most one other property ' + 'which can be "@type" or "@language".', 'jsonld.SyntaxError', {
code: 'invalid value object',
element: rval
});
}
const values = rval['@value'] === null ? [] : [].concat(rval['@value']);
const types = _getValues(rval, '@type');
// drop null @values unless custom mapped
const types = _getValues(rval, '@type'); // drop null @values unless custom mapped
if (values.length === 0) {

@@ -231,2 +272,3 @@ // TODO: use `await` to support async

});
if (mapped !== undefined) {

@@ -239,5 +281,11 @@ rval = mapped;

// if @language is present, @value must be a string
throw new JsonLdError('Invalid JSON-LD syntax; only strings may be language-tagged.', 'jsonld.SyntaxError', { code: 'invalid language-tagged value', element: rval });
throw new JsonLdError('Invalid JSON-LD syntax; only strings may be language-tagged.', 'jsonld.SyntaxError', {
code: 'invalid language-tagged value',
element: rval
});
} else if (!types.every(t => _isAbsoluteIri(t) && !(_isString(t) && t.indexOf('_:') === 0) || _isEmptyObject(t))) {
throw new JsonLdError('Invalid JSON-LD syntax; an element containing "@value" and "@type" ' + 'must have an absolute IRI for the value of "@type".', 'jsonld.SyntaxError', { code: 'invalid typed value', element: rval });
throw new JsonLdError('Invalid JSON-LD syntax; an element containing "@value" and "@type" ' + 'must have an absolute IRI for the value of "@type".', 'jsonld.SyntaxError', {
code: 'invalid typed value',
element: rval
});
}

@@ -250,5 +298,9 @@ } else if ('@type' in rval && !_isArray(rval['@type'])) {

if (count > 1 && !(count === 2 && '@index' in rval)) {
throw new JsonLdError('Invalid JSON-LD syntax; if an element has the property "@set" ' + 'or "@list", then it can have at most one other property that is ' + '"@index".', 'jsonld.SyntaxError', { code: 'invalid set or list object', element: rval });
}
// optimize away @set
throw new JsonLdError('Invalid JSON-LD syntax; if an element has the property "@set" ' + 'or "@list", then it can have at most one other property that is ' + '"@index".', 'jsonld.SyntaxError', {
code: 'invalid set or list object',
element: rval
});
} // optimize away @set
if ('@set' in rval) {

@@ -270,2 +322,3 @@ rval = rval['@set'];

});
if (mapped !== undefined) {

@@ -276,6 +329,6 @@ rval = mapped;

}
}
} // drop certain top-level objects that do not occur in lists, unless custom
// mapped
// drop certain top-level objects that do not occur in lists, unless custom
// mapped
if (_isObject(rval) && !options.keepFreeFloatingNodes && !insideList && (activeProperty === null || expandedActiveProperty === '@graph')) {

@@ -293,2 +346,3 @@ // drop empty object, top-level @value/@list, or object with only @id

});
if (mapped !== undefined) {

@@ -304,3 +358,2 @@ rval = mapped;

};
/**

@@ -321,28 +374,31 @@ * Expand each key and value of element adding to result

*/
function _expandObject(_ref2) {
let activeCtx = _ref2.activeCtx,
activeProperty = _ref2.activeProperty,
expandedActiveProperty = _ref2.expandedActiveProperty,
element = _ref2.element,
expandedParent = _ref2.expandedParent;
var _ref2$options = _ref2.options;
let options = _ref2$options === undefined ? {} : _ref2$options,
insideList = _ref2.insideList,
expansionMap = _ref2.expansionMap;
function _expandObject({
activeCtx,
activeProperty,
expandedActiveProperty,
element,
expandedParent,
options = {},
insideList,
expansionMap
}) {
const keys = Object.keys(element).sort();
const nests = [];
for (const key of keys) {
let value = element[key];
let expandedValue;
let expandedValue; // skip @context
// skip @context
if (key === '@context') {
continue;
}
} // expand property
// expand property
let expandedProperty = _expandIri(activeCtx, key, { vocab: true });
// drop non-absolute IRI keys that aren't keywords unless custom mapped
let expandedProperty = _expandIri(activeCtx, key, {
vocab: true
}); // drop non-absolute IRI keys that aren't keywords unless custom mapped
if (expandedProperty === null || !(_isAbsoluteIri(expandedProperty) || _isKeyword(expandedProperty))) {

@@ -360,2 +416,3 @@ // TODO: use `await` to support async

});
if (expandedProperty === undefined) {

@@ -368,30 +425,55 @@ continue;

if (expandedActiveProperty === '@reverse') {
throw new JsonLdError('Invalid JSON-LD syntax; a keyword cannot be used as a @reverse ' + 'property.', 'jsonld.SyntaxError', { code: 'invalid reverse property map', value: value });
throw new JsonLdError('Invalid JSON-LD syntax; a keyword cannot be used as a @reverse ' + 'property.', 'jsonld.SyntaxError', {
code: 'invalid reverse property map',
value: value
});
}
if (expandedProperty in expandedParent) {
throw new JsonLdError('Invalid JSON-LD syntax; colliding keywords detected.', 'jsonld.SyntaxError', { code: 'colliding keywords', keyword: expandedProperty });
throw new JsonLdError('Invalid JSON-LD syntax; colliding keywords detected.', 'jsonld.SyntaxError', {
code: 'colliding keywords',
keyword: expandedProperty
});
}
}
} // syntax error if @id is not a string
// syntax error if @id is not a string
if (expandedProperty === '@id') {
if (!_isString(value)) {
if (!options.isFrame) {
throw new JsonLdError('Invalid JSON-LD syntax; "@id" value must a string.', 'jsonld.SyntaxError', { code: 'invalid @id value', value: value });
throw new JsonLdError('Invalid JSON-LD syntax; "@id" value must a string.', 'jsonld.SyntaxError', {
code: 'invalid @id value',
value: value
});
}
if (_isObject(value)) {
// empty object is a wildcard
if (!_isEmptyObject(value)) {
throw new JsonLdError('Invalid JSON-LD syntax; "@id" value an empty object or array ' + 'of strings, if framing', 'jsonld.SyntaxError', { code: 'invalid @id value', value: value });
throw new JsonLdError('Invalid JSON-LD syntax; "@id" value an empty object or array ' + 'of strings, if framing', 'jsonld.SyntaxError', {
code: 'invalid @id value',
value: value
});
}
} else if (_isArray(value)) {
if (!value.every(v => _isString(v))) {
throw new JsonLdError('Invalid JSON-LD syntax; "@id" value an empty object or array ' + 'of strings, if framing', 'jsonld.SyntaxError', { code: 'invalid @id value', value: value });
throw new JsonLdError('Invalid JSON-LD syntax; "@id" value an empty object or array ' + 'of strings, if framing', 'jsonld.SyntaxError', {
code: 'invalid @id value',
value: value
});
}
} else {
throw new JsonLdError('Invalid JSON-LD syntax; "@id" value an empty object or array ' + 'of strings, if framing', 'jsonld.SyntaxError', { code: 'invalid @id value', value: value });
throw new JsonLdError('Invalid JSON-LD syntax; "@id" value an empty object or array ' + 'of strings, if framing', 'jsonld.SyntaxError', {
code: 'invalid @id value',
value: value
});
}
}
_addValue(expandedParent, '@id', [].concat(value).map(v => _isString(v) ? _expandIri(activeCtx, v, { base: true }) : v), { propertyIsArray: options.isFrame });
_addValue(expandedParent, '@id', [].concat(value).map(v => _isString(v) ? _expandIri(activeCtx, v, {
base: true
}) : v), {
propertyIsArray: options.isFrame
});
continue;

@@ -402,22 +484,38 @@ }

_validateTypeValue(value);
_addValue(expandedParent, '@type', [].concat(value).map(v => _isString(v) ? _expandIri(activeCtx, v, { base: true, vocab: true }) : v), { propertyIsArray: options.isFrame });
_addValue(expandedParent, '@type', [].concat(value).map(v => _isString(v) ? _expandIri(activeCtx, v, {
base: true,
vocab: true
}) : v), {
propertyIsArray: options.isFrame
});
continue;
}
} // @graph must be an array or an object
// @graph must be an array or an object
if (expandedProperty === '@graph' && !(_isObject(value) || _isArray(value))) {
throw new JsonLdError('Invalid JSON-LD syntax; "@graph" value must not be an ' + 'object or an array.', 'jsonld.SyntaxError', { code: 'invalid @graph value', value: value });
}
throw new JsonLdError('Invalid JSON-LD syntax; "@graph" value must not be an ' + 'object or an array.', 'jsonld.SyntaxError', {
code: 'invalid @graph value',
value: value
});
} // @value must not be an object or an array (unless framing)
// @value must not be an object or an array (unless framing)
if (expandedProperty === '@value') {
if ((_isObject(value) || _isArray(value)) && !options.isFrame) {
throw new JsonLdError('Invalid JSON-LD syntax; "@value" value must not be an ' + 'object or an array.', 'jsonld.SyntaxError', { code: 'invalid value object value', value: value });
throw new JsonLdError('Invalid JSON-LD syntax; "@value" value must not be an ' + 'object or an array.', 'jsonld.SyntaxError', {
code: 'invalid value object value',
value: value
});
}
_addValue(expandedParent, '@value', value, { propertyIsArray: options.isFrame });
_addValue(expandedParent, '@value', value, {
propertyIsArray: options.isFrame
});
continue;
}
} // @language must be a string
// @language must be a string
if (expandedProperty === '@language') {

@@ -428,25 +526,41 @@ if (value === null) {

}
if (!_isString(value) && !options.isFrame) {
throw new JsonLdError('Invalid JSON-LD syntax; "@language" value must be a string.', 'jsonld.SyntaxError', { code: 'invalid language-tagged string', value: value });
}
// ensure language value is lowercase
throw new JsonLdError('Invalid JSON-LD syntax; "@language" value must be a string.', 'jsonld.SyntaxError', {
code: 'invalid language-tagged string',
value: value
});
} // ensure language value is lowercase
value = [].concat(value).map(v => _isString(v) ? v.toLowerCase() : v);
_addValue(expandedParent, '@language', value, { propertyIsArray: options.isFrame });
_addValue(expandedParent, '@language', value, {
propertyIsArray: options.isFrame
});
continue;
}
} // @index must be a string
// @index must be a string
if (expandedProperty === '@index') {
if (!_isString(value)) {
throw new JsonLdError('Invalid JSON-LD syntax; "@index" value must be a string.', 'jsonld.SyntaxError', { code: 'invalid @index value', value: value });
throw new JsonLdError('Invalid JSON-LD syntax; "@index" value must be a string.', 'jsonld.SyntaxError', {
code: 'invalid @index value',
value: value
});
}
_addValue(expandedParent, '@index', value);
continue;
}
} // @reverse must be an object
// @reverse must be an object
if (expandedProperty === '@reverse') {
if (!_isObject(value)) {
throw new JsonLdError('Invalid JSON-LD syntax; "@reverse" value must be an object.', 'jsonld.SyntaxError', { code: 'invalid @reverse value', value: value });
throw new JsonLdError('Invalid JSON-LD syntax; "@reverse" value must be an object.', 'jsonld.SyntaxError', {
code: 'invalid @reverse value',
value: value
});
}

@@ -460,13 +574,16 @@

expansionMap
});
// properties double-reversed
}); // properties double-reversed
if ('@reverse' in expandedValue) {
for (const property in expandedValue['@reverse']) {
_addValue(expandedParent, property, expandedValue['@reverse'][property], { propertyIsArray: true });
_addValue(expandedParent, property, expandedValue['@reverse'][property], {
propertyIsArray: true
});
}
}
} // FIXME: can this be merged with code below to simplify?
// merge in all reversed properties
// FIXME: can this be merged with code below to simplify?
// merge in all reversed properties
let reverseMap = expandedParent['@reverse'] || null;
for (const property in expandedValue) {

@@ -476,13 +593,26 @@ if (property === '@reverse') {

}
if (reverseMap === null) {
reverseMap = expandedParent['@reverse'] = {};
}
_addValue(reverseMap, property, [], { propertyIsArray: true });
_addValue(reverseMap, property, [], {
propertyIsArray: true
});
const items = expandedValue[property];
for (let ii = 0; ii < items.length; ++ii) {
const item = items[ii];
if (_isValue(item) || _isList(item)) {
throw new JsonLdError('Invalid JSON-LD syntax; "@reverse" value must not be a ' + '@value or an @list.', 'jsonld.SyntaxError', { code: 'invalid reverse property value', value: expandedValue });
throw new JsonLdError('Invalid JSON-LD syntax; "@reverse" value must not be a ' + '@value or an @list.', 'jsonld.SyntaxError', {
code: 'invalid reverse property value',
value: expandedValue
});
}
_addValue(reverseMap, property, item, { propertyIsArray: true });
_addValue(reverseMap, property, item, {
propertyIsArray: true
});
}

@@ -492,15 +622,21 @@ }

continue;
}
} // nested keys
// nested keys
if (expandedProperty === '@nest') {
nests.push(key);
continue;
}
} // use potential scoped context for key
// use potential scoped context for key
let termCtx = activeCtx;
const ctx = _getContextValue(activeCtx, key, '@context');
if (ctx) {
termCtx = _processContext({ activeCtx, localCtx: ctx, options });
termCtx = _processContext({
activeCtx,
localCtx: ctx,
options
});
}

@@ -551,7 +687,10 @@

const isList = expandedProperty === '@list';
if (isList || expandedProperty === '@set') {
let nextActiveProperty = activeProperty;
if (isList && expandedActiveProperty === '@graph') {
nextActiveProperty = null;
}
expandedValue = api.expand({

@@ -565,4 +704,7 @@ activeCtx: termCtx,

});
if (isList && _isList(expandedValue)) {
throw new JsonLdError('Invalid JSON-LD syntax; lists of lists are not permitted.', 'jsonld.SyntaxError', { code: 'list of lists' });
throw new JsonLdError('Invalid JSON-LD syntax; lists of lists are not permitted.', 'jsonld.SyntaxError', {
code: 'list of lists'
});
}

@@ -580,5 +722,5 @@ } else {

}
}
} // drop null values if property is not @value
// drop null values if property is not @value
if (expandedValue === null && expandedProperty !== '@value') {

@@ -597,54 +739,77 @@ // TODO: use `await` to support async

});
if (expandedValue === undefined) {
continue;
}
}
} // convert expanded value to @list if container specifies it
// convert expanded value to @list if container specifies it
if (expandedProperty !== '@list' && !_isList(expandedValue) && container.includes('@list')) {
// ensure expanded value is an array
expandedValue = _isArray(expandedValue) ? expandedValue : [expandedValue];
expandedValue = { '@list': expandedValue };
}
// convert expanded value to @graph if container specifies it
expandedValue = {
'@list': expandedValue
};
} // convert expanded value to @graph if container specifies it
// and value is not, itself, a graph
// index cases handled above
if (container.includes('@graph') && !container.some(key => key === '@id' || key === '@index')) {
// ensure expanded values are arrays
expandedValue = [].concat(expandedValue).map(v => _isGraph(v) ? v : { '@graph': [].concat(v) });
}
expandedValue = [].concat(expandedValue).map(v => _isGraph(v) ? v : {
'@graph': [].concat(v)
});
} // FIXME: can this be merged with code above to simplify?
// merge in reverse properties
// FIXME: can this be merged with code above to simplify?
// merge in reverse properties
if (termCtx.mappings[key] && termCtx.mappings[key].reverse) {
const reverseMap = expandedParent['@reverse'] = expandedParent['@reverse'] || {};
if (!_isArray(expandedValue)) {
expandedValue = [expandedValue];
}
for (let ii = 0; ii < expandedValue.length; ++ii) {
const item = expandedValue[ii];
if (_isValue(item) || _isList(item)) {
throw new JsonLdError('Invalid JSON-LD syntax; "@reverse" value must not be a ' + '@value or an @list.', 'jsonld.SyntaxError', { code: 'invalid reverse property value', value: expandedValue });
throw new JsonLdError('Invalid JSON-LD syntax; "@reverse" value must not be a ' + '@value or an @list.', 'jsonld.SyntaxError', {
code: 'invalid reverse property value',
value: expandedValue
});
}
_addValue(reverseMap, expandedProperty, item, { propertyIsArray: true });
_addValue(reverseMap, expandedProperty, item, {
propertyIsArray: true
});
}
continue;
}
} // add value for property
// use an array except for certain keywords
// add value for property
// use an array except for certain keywords
const useArray = !['@index', '@id', '@type', '@value', '@language'].includes(expandedProperty);
_addValue(expandedParent, expandedProperty, expandedValue, {
propertyIsArray: useArray
});
}
} // expand each nested key
// expand each nested key
for (const key of nests) {
const nestedValues = _isArray(element[key]) ? element[key] : [element[key]];
for (const nv of nestedValues) {
if (!_isObject(nv) || Object.keys(nv).some(k => _expandIri(activeCtx, k, { vocab: true }) === '@value')) {
throw new JsonLdError('Invalid JSON-LD syntax; nested value must be a node object.', 'jsonld.SyntaxError', { code: 'invalid @nest value', value: nv });
if (!_isObject(nv) || Object.keys(nv).some(k => _expandIri(activeCtx, k, {
vocab: true
}) === '@value')) {
throw new JsonLdError('Invalid JSON-LD syntax; nested value must be a node object.', 'jsonld.SyntaxError', {
code: 'invalid @nest value',
value: nv
});
}
_expandObject({

@@ -658,7 +823,7 @@ activeCtx,

insideList,
expansionMap });
expansionMap
});
}
}
}
/**

@@ -674,33 +839,53 @@ * Expands the given value by using the coercion and keyword rules in the

*/
function _expandValue(_ref3) {
let activeCtx = _ref3.activeCtx,
activeProperty = _ref3.activeProperty,
value = _ref3.value;
function _expandValue({
activeCtx,
activeProperty,
value
}) {
// nothing to expand
if (value === null || value === undefined) {
return null;
}
} // special-case expand @id and @type (skips '@id' expansion)
// special-case expand @id and @type (skips '@id' expansion)
const expandedProperty = _expandIri(activeCtx, activeProperty, { vocab: true });
const expandedProperty = _expandIri(activeCtx, activeProperty, {
vocab: true
});
if (expandedProperty === '@id') {
return _expandIri(activeCtx, value, { base: true });
return _expandIri(activeCtx, value, {
base: true
});
} else if (expandedProperty === '@type') {
return _expandIri(activeCtx, value, { vocab: true, base: true });
}
return _expandIri(activeCtx, value, {
vocab: true,
base: true
});
} // get type definition from context
// get type definition from context
const type = _getContextValue(activeCtx, activeProperty, '@type');
// do @id expansion (automatic for @graph)
const type = _getContextValue(activeCtx, activeProperty, '@type'); // do @id expansion (automatic for @graph)
if ((type === '@id' || expandedProperty === '@graph') && _isString(value)) {
return { '@id': _expandIri(activeCtx, value, { base: true }) };
}
// do @id expansion w/vocab
return {
'@id': _expandIri(activeCtx, value, {
base: true
})
};
} // do @id expansion w/vocab
if (type === '@vocab' && _isString(value)) {
return { '@id': _expandIri(activeCtx, value, { vocab: true, base: true }) };
}
return {
'@id': _expandIri(activeCtx, value, {
vocab: true,
base: true
})
};
} // do not expand keyword values
// do not expand keyword values
if (_isKeyword(expandedProperty)) {

@@ -718,15 +903,16 @@ return value;

const language = _getContextValue(activeCtx, activeProperty, '@language');
if (language !== null) {
rval['@language'] = language;
}
}
// do conversion of values that aren't basic JSON types to strings
} // do conversion of values that aren't basic JSON types to strings
if (!['boolean', 'number', 'string'].includes(typeof value)) {
value = value.toString();
}
rval['@value'] = value;
return rval;
}
/**

@@ -740,11 +926,19 @@ * Expands a language map.

*/
function _expandLanguageMap(activeCtx, languageMap) {
const rval = [];
const keys = Object.keys(languageMap).sort();
for (const key of keys) {
const expandedKey = _expandIri(activeCtx, key, { vocab: true });
const expandedKey = _expandIri(activeCtx, key, {
vocab: true
});
let val = languageMap[key];
if (!_isArray(val)) {
val = [val];
}
for (const item of val) {

@@ -755,43 +949,65 @@ if (item === null) {

}
if (!_isString(item)) {
throw new JsonLdError('Invalid JSON-LD syntax; language map values must be strings.', 'jsonld.SyntaxError', { code: 'invalid language map value', languageMap: languageMap });
throw new JsonLdError('Invalid JSON-LD syntax; language map values must be strings.', 'jsonld.SyntaxError', {
code: 'invalid language map value',
languageMap: languageMap
});
}
const val = { '@value': item };
const val = {
'@value': item
};
if (expandedKey !== '@none') {
val['@language'] = key.toLowerCase();
}
rval.push(val);
}
}
return rval;
}
function _expandIndexMap(_ref4) {
let activeCtx = _ref4.activeCtx,
options = _ref4.options,
activeProperty = _ref4.activeProperty,
value = _ref4.value,
expansionMap = _ref4.expansionMap,
asGraph = _ref4.asGraph,
indexKey = _ref4.indexKey;
function _expandIndexMap({
activeCtx,
options,
activeProperty,
value,
expansionMap,
asGraph,
indexKey
}) {
const rval = [];
const keys = Object.keys(value).sort();
for (let key of keys) {
// if indexKey is @type, there may be a context defined for it
const ctx = _getContextValue(activeCtx, key, '@context');
if (ctx) {
activeCtx = _processContext({ activeCtx, localCtx: ctx, options });
activeCtx = _processContext({
activeCtx,
localCtx: ctx,
options
});
}
let val = value[key];
if (!_isArray(val)) {
val = [val];
}
} // expand for @type, but also for @none
// expand for @type, but also for @none
const expandedKey = _expandIri(activeCtx, key, { vocab: true });
const expandedKey = _expandIri(activeCtx, key, {
vocab: true
});
if (indexKey === '@id') {
// expand document relative
key = _expandIri(activeCtx, key, { base: true });
key = _expandIri(activeCtx, key, {
base: true
});
} else if (indexKey === '@type') {

@@ -809,10 +1025,13 @@ key = expandedKey;

});
for (let item of val) {
// If this is also a @graph container, turn items into graphs
if (asGraph && !_isGraph(item)) {
item = { '@graph': [item] };
item = {
'@graph': [item]
};
}
if (indexKey === '@type') {
if (expandedKey === '@none') {
// ignore @none
if (expandedKey === '@none') {// ignore @none
} else if (item['@type']) {

@@ -826,6 +1045,8 @@ item['@type'] = [key].concat(item['@type']);

}
rval.push(item);
}
}
return rval;
}

@@ -6,14 +6,12 @@ /*

var _require = require('./graphTypes');
const {
isSubjectReference: _isSubjectReference
} = require('./graphTypes');
const _isSubjectReference = _require.isSubjectReference;
const {
createMergedNodeMap: _createMergedNodeMap
} = require('./nodeMap');
var _require2 = require('./nodeMap');
const _createMergedNodeMap = _require2.createMergedNodeMap;
const api = {};
module.exports = api;
/**

@@ -26,11 +24,13 @@ * Performs JSON-LD flattening.

*/
api.flatten = input => {
const defaultGraph = _createMergedNodeMap(input);
const defaultGraph = _createMergedNodeMap(input); // produce flattened output
// produce flattened output
const flattened = [];
const keys = Object.keys(defaultGraph).sort();
for (let ki = 0; ki < keys.length; ++ki) {
const node = defaultGraph[keys[ki]];
// only add full subjects to top-level
const node = defaultGraph[keys[ki]]; // only add full subjects to top-level
if (!_isSubjectReference(node)) {

@@ -40,3 +40,4 @@ flattened.push(node);

}
return flattened;
};

@@ -6,20 +6,21 @@ /*

var _require = require('./context');
const {
isKeyword
} = require('./context');
const isKeyword = _require.isKeyword;
const graphTypes = require('./graphTypes');
const graphTypes = require('./graphTypes');
const types = require('./types');
const util = require('./util');
const JsonLdError = require('./JsonLdError');
var _require2 = require('./nodeMap');
const {
createNodeMap: _createNodeMap,
mergeNodeMapGraphs: _mergeNodeMapGraphs
} = require('./nodeMap');
const _createNodeMap = _require2.createNodeMap,
_mergeNodeMapGraphs = _require2.mergeNodeMapGraphs;
const api = {};
module.exports = api;
/**

@@ -34,2 +35,3 @@ * Performs JSON-LD `merged` framing.

*/
api.frameMergedOrDefault = (input, frame, options) => {

@@ -40,3 +42,5 @@ // create framing state

graph: '@default',
graphMap: { '@default': {} },
graphMap: {
'@default': {}
},
graphStack: [],

@@ -46,8 +50,9 @@ subjectStack: [],

bnodeMap: {}
};
}; // produce a map of all graphs and name each bnode
// FIXME: currently uses subjects from @merged graph only
// produce a map of all graphs and name each bnode
// FIXME: currently uses subjects from @merged graph only
const issuer = new util.IdentifierIssuer('_:b');
_createNodeMap(input, state.graphMap, '@default', issuer);
if (options.merged) {

@@ -57,9 +62,8 @@ state.graphMap['@merged'] = _mergeNodeMapGraphs(state.graphMap);

}
state.subjects = state.graphMap[state.graph];
// frame the subjects
state.subjects = state.graphMap[state.graph]; // frame the subjects
const framed = [];
api.frame(state, Object.keys(state.subjects).sort(), frame, framed);
api.frame(state, Object.keys(state.subjects).sort(), frame, framed); // If pruning blank nodes, find those to prune
// If pruning blank nodes, find those to prune
if (options.pruneBlankNodeIdentifiers) {

@@ -72,3 +76,2 @@ // remove all blank nodes appearing only once, done in compaction

};
/**

@@ -83,10 +86,10 @@ * Frames subjects according to the given frame.

*/
api.frame = function (state, subjects, frame, parent) {
let property = arguments.length > 4 && arguments[4] !== undefined ? arguments[4] : null;
api.frame = (state, subjects, frame, parent, property = null) => {
// validate the frame
_validateFrame(frame);
frame = frame[0];
// get flags for current frame
frame = frame[0]; // get flags for current frame
const options = state.options;

@@ -97,9 +100,9 @@ const flags = {

requireAll: _getFrameFlag(frame, options, 'requireAll')
};
}; // filter out subjects that match the frame
// filter out subjects that match the frame
const matches = _filterSubjects(state, subjects, frame, flags);
const matches = _filterSubjects(state, subjects, frame, flags); // add matches to output
// add matches to output
const ids = Object.keys(matches).sort();
for (const id of ids) {

@@ -112,35 +115,42 @@ const subject = matches[id];

// subjects that are only shared in-memory when the frames are the same
// add existing linked subject
_addFrameOutput(parent, property, state.link[id]);
continue;
}
/* Note: In order to treat each top-level match as a compartmentalized
result, clear the unique embedded subjects map when the property is null,
which only occurs at the top-level. */
if (property === null) {
state.uniqueEmbeds = { [state.graph]: {} };
state.uniqueEmbeds = {
[state.graph]: {}
};
} else {
state.uniqueEmbeds[state.graph] = state.uniqueEmbeds[state.graph] || {};
}
} // start output for subject
// start output for subject
const output = {};
output['@id'] = id;
if (id.indexOf('_:') === 0) {
util.addValue(state.bnodeMap, id, output, { propertyIsArray: true });
util.addValue(state.bnodeMap, id, output, {
propertyIsArray: true
});
}
state.link[id] = output;
// if embed is @never or if a circular reference would be created by an
state.link[id] = output; // if embed is @never or if a circular reference would be created by an
// embed, the subject cannot be embedded, just add the reference;
// note that a circular reference won't occur when the embed flag is
// `@link` as the above check will short-circuit before reaching this point
if (flags.embed === '@never' || _createsCircularReference(subject, state.graph, state.subjectStack)) {
_addFrameOutput(parent, property, output);
continue;
}
} // if only the last match should be embedded
// if only the last match should be embedded
if (flags.embed === '@last') {

@@ -151,12 +161,19 @@ // remove any existing embed

}
state.uniqueEmbeds[state.graph][id] = { parent: parent, property: property };
}
// push matching subject onto stack to enable circular embed checks
state.subjectStack.push({ subject: subject, graph: state.graph });
state.uniqueEmbeds[state.graph][id] = {
parent: parent,
property: property
};
} // push matching subject onto stack to enable circular embed checks
// subject is also the name of a graph
state.subjectStack.push({
subject: subject,
graph: state.graph
}); // subject is also the name of a graph
if (id in state.graphMap) {
let recurse = false;
let subframe = null;
if (!('@graph' in frame)) {

@@ -167,5 +184,7 @@ recurse = state.graph !== '@merged';

subframe = frame['@graph'][0];
if (!types.isObject(subframe)) {
subframe = {};
}
recurse = !(id === '@merged' || id === '@default');

@@ -176,10 +195,10 @@ }

state.graphStack.push(state.graph);
state.graph = id;
// recurse into graph
state.graph = id; // recurse into graph
api.frame(state, Object.keys(state.graphMap[id]).sort(), [subframe], output, '@graph');
state.graph = state.graphStack.pop;
}
}
} // iterate over subject properties
// iterate over subject properties
for (const prop of Object.keys(subject).sort()) {

@@ -194,31 +213,38 @@ // copy keywords to output

if (type.indexOf('_:') === 0) {
util.addValue(state.bnodeMap, type, output, { propertyIsArray: true });
util.addValue(state.bnodeMap, type, output, {
propertyIsArray: true
});
}
}
}
continue;
}
} // explicit is on and property isn't in the frame, skip processing
// explicit is on and property isn't in the frame, skip processing
if (flags.explicit && !(prop in frame)) {
continue;
}
} // add objects
// add objects
for (let o of subject[prop]) {
const subframe = prop in frame ? frame[prop] : _createImplicitFrame(flags);
const subframe = prop in frame ? frame[prop] : _createImplicitFrame(flags); // recurse into list
// recurse into list
if (graphTypes.isList(o)) {
// add empty list
const list = { '@list': [] };
_addFrameOutput(output, prop, list);
const list = {
'@list': []
};
// add list objects
_addFrameOutput(output, prop, list); // add list objects
const src = o['@list'];
for (const n in src) {
o = src[n];
if (graphTypes.isSubjectReference(o)) {
const subframe = prop in frame ? frame[prop][0]['@list'] : _createImplicitFrame(flags);
// recurse into subject reference
const subframe = prop in frame ? frame[prop][0]['@list'] : _createImplicitFrame(flags); // recurse into subject reference
api.frame(state, [o['@id']], subframe, list, '@list');

@@ -230,2 +256,3 @@ } else {

}
continue;

@@ -242,5 +269,5 @@ }

}
}
} // handle defaults
// handle defaults
for (const prop of Object.keys(frame).sort()) {

@@ -250,31 +277,42 @@ // skip keywords

continue;
}
} // if omit default is off, then include default values for properties
// that appear in the next frame but are not in the matching subject
// if omit default is off, then include default values for properties
// that appear in the next frame but are not in the matching subject
const next = frame[prop][0] || {};
const omitDefaultOn = _getFrameFlag(next, options, 'omitDefault');
if (!omitDefaultOn && !(prop in output)) {
let preserve = '@null';
if ('@default' in next) {
preserve = util.clone(next['@default']);
}
if (!types.isArray(preserve)) {
preserve = [preserve];
}
output[prop] = [{ '@preserve': preserve }];
output[prop] = [{
'@preserve': preserve
}];
}
}
} // if embed reverse values by finding nodes having this subject as a value
// of the associated property
// if embed reverse values by finding nodes having this subject as a value
// of the associated property
if ('@reverse' in frame) {
for (const reverseProp of Object.keys(frame['@reverse']).sort()) {
const subframe = frame['@reverse'][reverseProp];
for (const subject of Object.keys(state.subjects)) {
const nodeValues = util.getValues(state.subjects[subject], reverseProp);
if (nodeValues.some(v => v['@id'] === id)) {
// node has property referencing this subject, recurse
output['@reverse'] = output['@reverse'] || {};
util.addValue(output['@reverse'], reverseProp, [], { propertyIsArray: true });
util.addValue(output['@reverse'], reverseProp, [], {
propertyIsArray: true
});
api.frame(state, [subject], subframe, output['@reverse'][reverseProp], property);

@@ -284,12 +322,11 @@ }

}
}
} // add output to parent
// add output to parent
_addFrameOutput(parent, property, output);
// pop matching subject from circular ref-checking stack
_addFrameOutput(parent, property, output); // pop matching subject from circular ref-checking stack
state.subjectStack.pop();
}
};
/**

@@ -305,4 +342,7 @@ * Creates an implicit frame when recursing through subject matches. If

*/
function _createImplicitFrame(flags) {
const frame = {};
for (const key in flags) {

@@ -313,5 +353,5 @@ if (flags[key] !== undefined) {

}
return [frame];
}
/**

@@ -327,5 +367,8 @@ * Checks the current subject stack to see if embedding the given subject

*/
function _createsCircularReference(subjectToEmbed, graph, subjectStack) {
for (let i = subjectStack.length - 1; i >= 0; --i) {
const subject = subjectStack[i];
if (subject.graph === graph && subject.subject['@id'] === subjectToEmbed['@id']) {

@@ -335,5 +378,5 @@ return true;

}
return false;
}
/**

@@ -348,5 +391,8 @@ * Gets the frame flag value for the given flag name.

*/
function _getFrameFlag(frame, options, name) {
const flag = '@' + name;
let rval = flag in frame ? frame[flag][0] : options[name];
if (name === 'embed') {

@@ -365,5 +411,5 @@ // default is "@last"

}
return rval;
}
/**

@@ -374,8 +420,11 @@ * Validates a JSON-LD frame, throwing an exception if the frame is invalid.

*/
function _validateFrame(frame) {
if (!types.isArray(frame) || frame.length !== 1 || !types.isObject(frame[0])) {
throw new JsonLdError('Invalid JSON-LD syntax; a JSON-LD frame must be a single object.', 'jsonld.SyntaxError', { frame: frame });
throw new JsonLdError('Invalid JSON-LD syntax; a JSON-LD frame must be a single object.', 'jsonld.SyntaxError', {
frame: frame
});
}
}
/**

@@ -391,7 +440,11 @@ * Returns a map of all of the subjects that match a parsed frame.

*/
function _filterSubjects(state, subjects, frame, flags) {
// filter subjects in @id order
const rval = {};
for (const id of subjects) {
const subject = state.graphMap[state.graph][id];
if (_filterSubject(state, subject, frame, flags)) {

@@ -401,5 +454,5 @@ rval[id] = subject;

}
return rval;
}
/**

@@ -423,2 +476,4 @@ * Returns true if the given subject matches the given frame.

*/
function _filterSubject(state, subject, frame, flags) {

@@ -439,5 +494,5 @@ // check ducktype

}
wildcard = false;
// check @id for a specific @id value
wildcard = false; // check @id for a specific @id value
if (key === '@id') {

@@ -449,7 +504,8 @@ // if @id is not a wildcard and is not empty, then match or not on

}
matchThis = true;
continue;
}
} // check @type (object value means 'any' type, fall through to ducktyping)
// check @type (object value means 'any' type, fall through to ducktyping)
if ('@type' in frame) {

@@ -461,2 +517,3 @@ if (isEmpty) {

}
matchThis = true;

@@ -473,25 +530,27 @@ } else if (frame['@type'].length === 1 && types.isEmptyObject(frame['@type'][0])) {

}
return false;
}
}
}
} // Forc a copy of this frame entry so it can be manipulated
// Forc a copy of this frame entry so it can be manipulated
const thisFrame = util.getValues(frame, key)[0];
let hasDefault = false;
if (thisFrame) {
_validateFrame([thisFrame]);
hasDefault = '@default' in thisFrame;
}
} // no longer a wildcard pattern if frame has any non-keyword properties
// no longer a wildcard pattern if frame has any non-keyword properties
wildcard = false;
// skip, but allow match if node has no value for property, and frame has a
wildcard = false; // skip, but allow match if node has no value for property, and frame has a
// default value
if (nodeValues.length === 0 && hasDefault) {
continue;
}
} // if frame value is empty, don't match if subject has any value
// if frame value is empty, don't match if subject has any value
if (nodeValues.length > 0 && isEmpty) {

@@ -507,2 +566,3 @@ return false;

}
matchThis = true;

@@ -521,2 +581,3 @@ } else if (types.isObject(thisFrame)) {

const listValue = thisFrame['@list'][0];
if (graphTypes.isList(nodeValues[0])) {

@@ -536,5 +597,5 @@ const nodeListValues = nodeValues[0]['@list'];

}
}
} // all non-defaulted values must match if requireAll is set
// all non-defaulted values must match if requireAll is set
if (!matchThis && flags.requireAll) {

@@ -545,8 +606,7 @@ return false;

matchesSome = matchesSome || matchThis;
}
} // return true if wildcard or subject matches some properties
// return true if wildcard or subject matches some properties
return wildcard || matchesSome;
}
/**

@@ -558,2 +618,4 @@ * Removes an existing embed.

*/
function _removeEmbed(state, id) {

@@ -564,8 +626,8 @@ // get existing embed

const parent = embed.parent;
const property = embed.property;
const property = embed.property; // create reference to replace embed
// create reference to replace embed
const subject = { '@id': id };
const subject = {
'@id': id
}; // remove existing embed
// remove existing embed
if (types.isArray(parent)) {

@@ -582,10 +644,15 @@ // replace subject with reference

const useArray = types.isArray(parent[property]);
util.removeValue(parent, property, subject, { propertyIsArray: useArray });
util.addValue(parent, property, subject, { propertyIsArray: useArray });
}
util.removeValue(parent, property, subject, {
propertyIsArray: useArray
});
util.addValue(parent, property, subject, {
propertyIsArray: useArray
});
} // recursively remove dependent dangling embeds
// recursively remove dependent dangling embeds
const removeDependents = id => {
// get embed keys as a separate array to enable deleting keys in map
const ids = Object.keys(embeds);
for (const next of ids) {

@@ -598,5 +665,5 @@ if (next in embeds && types.isObject(embeds[next].parent) && embeds[next].parent['@id'] === id) {

};
removeDependents(id);
}
/**

@@ -609,5 +676,9 @@ * Adds framing output to the given parent.

*/
function _addFrameOutput(parent, property, output) {
if (types.isObject(parent)) {
util.addValue(parent, property, output, { propertyIsArray: true });
util.addValue(parent, property, output, {
propertyIsArray: true
});
} else {

@@ -617,3 +688,2 @@ parent.push(output);

}
/**

@@ -627,2 +697,4 @@ * Node matches if it is a node, and matches the pattern as a frame.

*/
function _nodeMatch(state, pattern, value, flags) {

@@ -632,6 +704,6 @@ if (!('@id' in value)) {

}
const nodeObject = state.subjects[value['@id']];
return nodeObject && _filterSubject(state, nodeObject, pattern, flags);
}
/**

@@ -652,2 +724,4 @@ * Value matches if it is a value and matches the value pattern

*/
function _valueMatch(pattern, value) {

@@ -664,12 +738,16 @@ const v1 = value['@value'];

}
if (!(v2.includes(v1) || types.isEmptyObject(v2[0]))) {
return false;
}
if (!(!t1 && t2.length === 0 || t2.includes(t1) || t1 && types.isEmptyObject(t2[0]))) {
return false;
}
if (!(!l1 && l2.length === 0 || l2.includes(l1) || l1 && types.isEmptyObject(l2[0]))) {
return false;
}
return true;
}

@@ -6,26 +6,33 @@ /*

function _asyncToGenerator(fn) { return function () { var gen = fn.apply(this, arguments); return new Promise(function (resolve, reject) { function step(key, arg) { try { var info = gen[key](arg); var value = info.value; } catch (error) { reject(error); return; } if (info.done) { resolve(value); } else { return Promise.resolve(value).then(function (value) { step("next", value); }, function (err) { step("throw", err); }); } } return step("next"); }); }; }
function asyncGeneratorStep(gen, resolve, reject, _next, _throw, key, arg) { try { var info = gen[key](arg); var value = info.value; } catch (error) { reject(error); return; } if (info.done) { resolve(value); } else { Promise.resolve(value).then(_next, _throw); } }
function _asyncToGenerator(fn) { return function () { var self = this, args = arguments; return new Promise(function (resolve, reject) { var gen = fn.apply(self, args); function _next(value) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "next", value); } function _throw(err) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "throw", err); } _next(undefined); }); }; }
const graphTypes = require('./graphTypes');
const types = require('./types');
const util = require('./util');
// constants
const util = require('./util'); // constants
var _require = require('./constants');
const RDF_LIST = _require.RDF_LIST,
RDF_FIRST = _require.RDF_FIRST,
RDF_REST = _require.RDF_REST,
RDF_NIL = _require.RDF_NIL,
RDF_TYPE = _require.RDF_TYPE,
XSD_BOOLEAN = _require.XSD_BOOLEAN,
XSD_DOUBLE = _require.XSD_DOUBLE,
XSD_INTEGER = _require.XSD_INTEGER,
XSD_STRING = _require.XSD_STRING;
const {
// RDF,
RDF_LIST,
RDF_FIRST,
RDF_REST,
RDF_NIL,
RDF_TYPE,
// RDF_PLAIN_LITERAL,
// RDF_XML_LITERAL,
// RDF_OBJECT,
// RDF_LANGSTRING,
// XSD,
XSD_BOOLEAN,
XSD_DOUBLE,
XSD_INTEGER,
XSD_STRING
} = require('./constants');
const api = {};
module.exports = api;
/**

@@ -39,11 +46,14 @@ * Converts an RDF dataset to JSON-LD.

*/
api.fromRDF = (() => {
var _ref2 = _asyncToGenerator(function* (dataset, _ref) {
var _ref$useRdfType = _ref.useRdfType;
let useRdfType = _ref$useRdfType === undefined ? false : _ref$useRdfType;
var _ref$useNativeTypes = _ref.useNativeTypes;
let useNativeTypes = _ref$useNativeTypes === undefined ? false : _ref$useNativeTypes;
api.fromRDF =
/*#__PURE__*/
function () {
var _ref = _asyncToGenerator(function* (dataset, {
useRdfType = false,
useNativeTypes = false
}) {
const defaultGraph = {};
const graphMap = { '@default': defaultGraph };
const graphMap = {
'@default': defaultGraph
};
const referencedOnce = {};

@@ -54,12 +64,15 @@

const name = quad.graph.termType === 'DefaultGraph' ? '@default' : quad.graph.value;
if (!(name in graphMap)) {
graphMap[name] = {};
}
if (name !== '@default' && !(name in defaultGraph)) {
defaultGraph[name] = { '@id': name };
defaultGraph[name] = {
'@id': name
};
}
const nodeMap = graphMap[name];
const nodeMap = graphMap[name]; // get subject, predicate, object
// get subject, predicate, object
const s = quad.subject.value;

@@ -70,13 +83,20 @@ const p = quad.predicate.value;

if (!(s in nodeMap)) {
nodeMap[s] = { '@id': s };
nodeMap[s] = {
'@id': s
};
}
const node = nodeMap[s];
const objectIsNode = o.termType.endsWith('Node');
const objectIsNode = o.termType.endsWith('Node');
if (objectIsNode && !(o.value in nodeMap)) {
nodeMap[o.value] = { '@id': o.value };
nodeMap[o.value] = {
'@id': o.value
};
}
if (p === RDF_TYPE && !useRdfType && objectIsNode) {
util.addValue(node, '@type', o.value, { propertyIsArray: true });
util.addValue(node, '@type', o.value, {
propertyIsArray: true
});
continue;

@@ -86,6 +106,8 @@ }

const value = _RDFToObject(o, useNativeTypes);
util.addValue(node, p, value, { propertyIsArray: true });
// object may be an RDF list/partial list node but we can't know easily
util.addValue(node, p, value, {
propertyIsArray: true
}); // object may be an RDF list/partial list node but we can't know easily
// until all triples are read
if (objectIsNode) {

@@ -95,5 +117,7 @@ if (o.value === RDF_NIL) {

const object = nodeMap[o.value];
if (!('usages' in object)) {
object.usages = [];
}
object.usages.push({

@@ -117,3 +141,2 @@ node: node,

}
/*

@@ -177,17 +200,19 @@ for(let name in dataset) {

}*/
// convert linked lists to @list arrays
// convert linked lists to @list arrays
for (const name in graphMap) {
const graphObject = graphMap[name];
const graphObject = graphMap[name]; // no @lists to be converted, continue
// no @lists to be converted, continue
if (!(RDF_NIL in graphObject)) {
continue;
}
} // iterate backwards through each RDF list
// iterate backwards through each RDF list
const nil = graphObject[RDF_NIL];
if (!nil.usages) {
continue;
}
for (let usage of nil.usages) {

@@ -198,5 +223,3 @@ let node = usage.node;

const list = [];
const listNodes = [];
// ensure node is a well-formed list node; it must:
const listNodes = []; // ensure node is a well-formed list node; it must:
// 1. Be referenced only once.

@@ -207,8 +230,9 @@ // 2. Have an array for rdf:first that has 1 item.

// optionally, @type where the value is rdf:List.
let nodeKeyCount = Object.keys(node).length;
while (property === RDF_REST && types.isObject(referencedOnce[node['@id']]) && types.isArray(node[RDF_FIRST]) && node[RDF_FIRST].length === 1 && types.isArray(node[RDF_REST]) && node[RDF_REST].length === 1 && (nodeKeyCount === 3 || nodeKeyCount === 4 && types.isArray(node['@type']) && node['@type'].length === 1 && node['@type'][0] === RDF_LIST)) {
list.push(node[RDF_FIRST][0]);
listNodes.push(node['@id']);
listNodes.push(node['@id']); // get next node, moving backwards through list
// get next node, moving backwards through list
usage = referencedOnce[node['@id']];

@@ -218,11 +242,10 @@ node = usage.node;

head = usage.value;
nodeKeyCount = Object.keys(node).length;
nodeKeyCount = Object.keys(node).length; // if node is not a blank node, then list head found
// if node is not a blank node, then list head found
if (!graphTypes.isBlankNode(node)) {
break;
}
}
} // the list is nested in another list
// the list is nested in another list
if (property === RDF_FIRST) {

@@ -234,13 +257,14 @@ // empty list

continue;
}
} // preserve list head
// preserve list head
head = graphObject[head['@id']][RDF_REST][0];
list.pop();
listNodes.pop();
}
} // transform list into @list object
// transform list into @list object
delete head['@id'];
head['@list'] = list.reverse();
for (const listNode of listNodes) {

@@ -256,4 +280,6 @@ delete graphObject[listNode];

const subjects = Object.keys(defaultGraph).sort();
for (const subject of subjects) {
const node = defaultGraph[subject];
if (subject in graphMap) {

@@ -263,5 +289,6 @@ const graph = node['@graph'] = [];

const graphSubjects = Object.keys(graphObject).sort();
for (const graphSubject of graphSubjects) {
const node = graphObject[graphSubject];
// only add full subjects to top-level
const node = graphObject[graphSubject]; // only add full subjects to top-level
if (!graphTypes.isSubjectReference(node)) {

@@ -271,4 +298,5 @@ graph.push(node);

}
}
// only add full subjects to top-level
} // only add full subjects to top-level
if (!graphTypes.isSubjectReference(node)) {

@@ -283,6 +311,5 @@ result.push(node);

return function (_x, _x2) {
return _ref2.apply(this, arguments);
return _ref.apply(this, arguments);
};
})();
}();
/**

@@ -296,12 +323,17 @@ * Converts an RDF triple object to a JSON-LD object.

*/
function _RDFToObject(o, useNativeTypes) {
// convert NamedNode/BlankNode object to JSON-LD
if (o.termType.endsWith('Node')) {
return { '@id': o.value };
}
return {
'@id': o.value
};
} // convert literal to JSON-LD
// convert literal to JSON-LD
const rval = { '@value': o.value };
// add language
const rval = {
'@value': o.value
}; // add language
if (o.language) {

@@ -311,6 +343,8 @@ rval['@language'] = o.language;

let type = o.datatype.value;
if (!type) {
type = XSD_STRING;
}
// use native types for certain xsd types
} // use native types for certain xsd types
if (useNativeTypes) {

@@ -326,2 +360,3 @@ if (type === XSD_BOOLEAN) {

const i = parseInt(rval['@value'], 10);
if (i.toFixed(0) === rval['@value']) {

@@ -333,4 +368,5 @@ rval['@value'] = i;

}
}
// do not add native type
} // do not add native type
if (![XSD_BOOLEAN, XSD_INTEGER, XSD_DOUBLE, XSD_STRING].includes(type)) {

@@ -337,0 +373,0 @@ rval['@type'] = type;

@@ -10,3 +10,2 @@ /*

module.exports = api;
/**

@@ -19,2 +18,3 @@ * Returns true if the given value is a subject with properties.

*/
api.isSubject = v => {

@@ -29,5 +29,5 @@ // Note: A value is a subject if all of these hold true:

}
return false;
};
/**

@@ -40,8 +40,8 @@ * Returns true if the given value is a subject reference.

*/
api.isSubjectReference = v =>
// Note: A value is a subject reference if all of these hold true:
api.isSubjectReference = v => // Note: A value is a subject reference if all of these hold true:
// 1. It is an Object.
// 2. It has a single key: @id.
types.isObject(v) && Object.keys(v).length === 1 && '@id' in v;
/**

@@ -54,8 +54,8 @@ * Returns true if the given value is a @value.

*/
api.isValue = v =>
// Note: A value is a @value if all of these hold true:
api.isValue = v => // Note: A value is a @value if all of these hold true:
// 1. It is an Object.
// 2. It has the @value property.
types.isObject(v) && '@value' in v;
/**

@@ -68,8 +68,8 @@ * Returns true if the given value is a @list.

*/
api.isList = v =>
// Note: A value is a @list if all of these hold true:
api.isList = v => // Note: A value is a @list if all of these hold true:
// 1. It is an Object.
// 2. It has the @list property.
types.isObject(v) && '@list' in v;
/**

@@ -80,2 +80,4 @@ * Returns true if the given value is a @graph.

*/
api.isGraph = v => {

@@ -88,3 +90,2 @@ // Note: A value is a graph if all of these hold true:

};
/**

@@ -95,2 +96,4 @@ * Returns true if the given value is a simple @graph.

*/
api.isSimpleGraph = v => {

@@ -103,3 +106,2 @@ // Note: A value is a simple graph if all of these hold true:

};
/**

@@ -112,2 +114,4 @@ * Returns true if the given value is a blank node.

*/
api.isBlankNode = v => {

@@ -122,5 +126,7 @@ // Note: A value is a blank node if all of these hold true:

}
return Object.keys(v).length === 0 || !('@value' in v || '@set' in v || '@list' in v);
}
return false;
};

@@ -1,2 +0,2 @@

'use strict';
"use strict";

@@ -3,0 +3,0 @@ /**

@@ -1,7 +0,11 @@

'use strict';
"use strict";
function _objectWithoutProperties(obj, keys) { var target = {}; for (var i in obj) { if (keys.indexOf(i) >= 0) continue; if (!Object.prototype.hasOwnProperty.call(obj, i)) continue; target[i] = obj[i]; } return target; }
function _objectWithoutProperties(source, excluded) { if (source == null) return {}; var target = _objectWithoutPropertiesLoose(source, excluded); var key, i; if (Object.getOwnPropertySymbols) { var sourceSymbolKeys = Object.getOwnPropertySymbols(source); for (i = 0; i < sourceSymbolKeys.length; i++) { key = sourceSymbolKeys[i]; if (excluded.indexOf(key) >= 0) continue; if (!Object.prototype.propertyIsEnumerable.call(source, key)) continue; target[key] = source[key]; } } return target; }
function _asyncToGenerator(fn) { return function () { var gen = fn.apply(this, arguments); return new Promise(function (resolve, reject) { function step(key, arg) { try { var info = gen[key](arg); var value = info.value; } catch (error) { reject(error); return; } if (info.done) { resolve(value); } else { return Promise.resolve(value).then(function (value) { step("next", value); }, function (err) { step("throw", err); }); } } return step("next"); }); }; }
function _objectWithoutPropertiesLoose(source, excluded) { if (source == null) return {}; var target = {}; var sourceKeys = Object.keys(source); var key, i; for (i = 0; i < sourceKeys.length; i++) { key = sourceKeys[i]; if (excluded.indexOf(key) >= 0) continue; target[key] = source[key]; } return target; }
function asyncGeneratorStep(gen, resolve, reject, _next, _throw, key, arg) { try { var info = gen[key](arg); var value = info.value; } catch (error) { reject(error); return; } if (info.done) { resolve(value); } else { Promise.resolve(value).then(_next, _throw); } }
function _asyncToGenerator(fn) { return function () { var self = this, args = arguments; return new Promise(function (resolve, reject) { var gen = fn.apply(self, args); function _next(value) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "next", value); } function _throw(err) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "throw", err); } _next(undefined); }); }; }
/**

@@ -42,1135 +46,1192 @@ * A JavaScript implementation of the JSON-LD API.

*/
(function () {
const canonize = require('rdf-canonize');
const canonize = require('rdf-canonize');
const util = require('./util');
const IdentifierIssuer = util.IdentifierIssuer;
const JsonLdError = require('./JsonLdError');
const NQuads = require('./NQuads');
const Rdfa = require('./Rdfa');
const util = require('./util');
var _require = require('./expand');
const IdentifierIssuer = util.IdentifierIssuer;
const _expand = _require.expand;
const JsonLdError = require('./JsonLdError');
var _require2 = require('./flatten');
const NQuads = require('./NQuads');
const _flatten = _require2.flatten;
const Rdfa = require('./Rdfa');
var _require3 = require('./fromRdf');
const {
expand: _expand
} = require('./expand');
const _fromRDF = _require3.fromRDF;
const {
flatten: _flatten
} = require('./flatten');
var _require4 = require('./toRdf');
const {
fromRDF: _fromRDF
} = require('./fromRdf');
const _toRDF = _require4.toRDF;
const {
toRDF: _toRDF
} = require('./toRdf');
var _require5 = require('./frame');
const {
frameMergedOrDefault: _frameMergedOrDefault
} = require('./frame');
const _frameMergedOrDefault = _require5.frameMergedOrDefault;
const {
isArray: _isArray,
isObject: _isObject,
isString: _isString
} = require('./types');
var _require6 = require('./types');
const {
isSubjectReference: _isSubjectReference
} = require('./graphTypes');
const _isArray = _require6.isArray,
_isObject = _require6.isObject,
_isString = _require6.isString;
const {
getInitialContext: _getInitialContext,
process: _processContext,
getAllContexts: _getAllContexts,
expandIri: _expandIri
} = require('./context');
var _require7 = require('./graphTypes');
const {
compact: _compact,
compactIri: _compactIri,
removePreserve: _removePreserve
} = require('./compact');
const _isSubjectReference = _require7.isSubjectReference;
const {
createNodeMap: _createNodeMap,
createMergedNodeMap: _createMergedNodeMap,
mergeNodeMaps: _mergeNodeMaps
} = require('./nodeMap'); // determine if in-browser or using node.js
var _require8 = require('./context');
const _getInitialContext = _require8.getInitialContext,
_processContext = _require8.process,
_getAllContexts = _require8.getAllContexts,
_expandIri = _require8.expandIri;
const _nodejs = typeof process !== 'undefined' && process.versions && process.versions.node;
var _require9 = require('./compact');
const _browser = !_nodejs && (typeof window !== 'undefined' || typeof self !== 'undefined'); // attaches jsonld API to the given object
const _compact = _require9.compact,
_compactIri = _require9.compactIri,
_removePreserve = _require9.removePreserve;
var _require10 = require('./nodeMap');
const wrapper = function (jsonld) {
/* Core API */
const _createNodeMap = _require10.createNodeMap,
_createMergedNodeMap = _require10.createMergedNodeMap,
_mergeNodeMaps = _require10.mergeNodeMaps;
/**
* Performs JSON-LD compaction.
*
* @param input the JSON-LD input to compact.
* @param ctx the context to compact with.
* @param [options] options to use:
* [base] the base IRI to use.
* [compactArrays] true to compact arrays to single values when
* appropriate, false not to (default: true).
* [compactToRelative] true to compact IRIs to be relative to document base,
* false to keep absolute (default: true)
* [graph] true to always output a top-level graph (default: false).
* [expandContext] a context to expand with.
* [skipExpansion] true to assume the input is expanded and skip
* expansion, false not to, defaults to false.
* [documentLoader(url, callback(err, remoteDoc))] the document loader.
* [expansionMap(info)] a function that can be used to custom map
* unmappable values (or to throw an error when they are detected);
* if this function returns `undefined` then the default behavior
* will be used.
* [framing] true if compaction is occuring during a framing operation.
* [compactionMap(info)] a function that can be used to custom map
* unmappable values (or to throw an error when they are detected);
* if this function returns `undefined` then the default behavior
* will be used.
* @param [callback(err, compacted)] called once the operation completes.
*
* @return a Promise that resolves to the compacted output.
*/
jsonld.compact = util.callbackify(
/*#__PURE__*/
function () {
var _ref = _asyncToGenerator(function* (input, ctx, options) {
if (arguments.length < 2) {
throw new TypeError('Could not compact, too few arguments.');
}
// determine if in-browser or using node.js
if (ctx === null) {
throw new JsonLdError('The compaction context must not be null.', 'jsonld.CompactError', {
code: 'invalid local context'
});
} // nothing to compact
const _nodejs = typeof process !== 'undefined' && process.versions && process.versions.node;
const _browser = !_nodejs && (typeof window !== 'undefined' || typeof self !== 'undefined');
// attaches jsonld API to the given object
const wrapper = function (jsonld) {
if (input === null) {
return null;
} // set default options
/* Core API */
/**
* Performs JSON-LD compaction.
*
* @param input the JSON-LD input to compact.
* @param ctx the context to compact with.
* @param [options] options to use:
* [base] the base IRI to use.
* [compactArrays] true to compact arrays to single values when
* appropriate, false not to (default: true).
* [compactToRelative] true to compact IRIs to be relative to document base,
* false to keep absolute (default: true)
* [graph] true to always output a top-level graph (default: false).
* [expandContext] a context to expand with.
* [skipExpansion] true to assume the input is expanded and skip
* expansion, false not to, defaults to false.
* [documentLoader(url, callback(err, remoteDoc))] the document loader.
* [expansionMap(info)] a function that can be used to custom map
* unmappable values (or to throw an error when they are detected);
* if this function returns `undefined` then the default behavior
* will be used.
* [framing] true if compaction is occuring during a framing operation.
* [compactionMap(info)] a function that can be used to custom map
* unmappable values (or to throw an error when they are detected);
* if this function returns `undefined` then the default behavior
* will be used.
* @param [callback(err, compacted)] called once the operation completes.
*
* @return a Promise that resolves to the compacted output.
*/
jsonld.compact = util.callbackify((() => {
var _ref = _asyncToGenerator(function* (input, ctx, options) {
if (arguments.length < 2) {
throw new TypeError('Could not compact, too few arguments.');
}
options = _setDefaults(options, {
base: _isString(input) ? input : '',
compactArrays: true,
compactToRelative: true,
graph: false,
skipExpansion: false,
link: false,
issuer: new IdentifierIssuer('_:b')
});
if (ctx === null) {
throw new JsonLdError('The compaction context must not be null.', 'jsonld.CompactError', { code: 'invalid local context' });
}
if (options.link) {
// force skip expansion when linking, "link" is not part of the public
// API, it should only be called from framing
options.skipExpansion = true;
}
// nothing to compact
if (input === null) {
return null;
}
if (!options.compactToRelative) {
delete options.base;
} // expand input
// set default options
options = _setDefaults(options, {
base: _isString(input) ? input : '',
compactArrays: true,
compactToRelative: true,
graph: false,
skipExpansion: false,
link: false,
issuer: new IdentifierIssuer('_:b')
});
if (options.link) {
// force skip expansion when linking, "link" is not part of the public
// API, it should only be called from framing
options.skipExpansion = true;
let expanded;
if (options.skipExpansion) {
expanded = input;
} else {
expanded = yield jsonld.expand(input, options);
} // process context
const activeCtx = yield jsonld.processContext(_getInitialContext(options), ctx, options); // do compaction
let compacted = _compact({
activeCtx,
element: expanded,
options,
compactionMap: options.compactionMap
}); // perform clean up
if (options.compactArrays && !options.graph && _isArray(compacted)) {
if (compacted.length === 1) {
// simplify to a single item
compacted = compacted[0];
} else if (compacted.length === 0) {
// simplify to an empty object
compacted = {};
}
if (!options.compactToRelative) {
delete options.base;
}
} else if (options.graph && _isObject(compacted)) {
// always use array if graph option is on
compacted = [compacted];
} // follow @context key
// expand input
let expanded;
if (options.skipExpansion) {
expanded = input;
} else {
expanded = yield jsonld.expand(input, options);
if (_isObject(ctx) && '@context' in ctx) {
ctx = ctx['@context'];
} // build output context
ctx = util.clone(ctx);
if (!_isArray(ctx)) {
ctx = [ctx];
} // remove empty contexts
const tmp = ctx;
ctx = [];
for (let i = 0; i < tmp.length; ++i) {
if (!_isObject(tmp[i]) || Object.keys(tmp[i]).length > 0) {
ctx.push(tmp[i]);
}
} // remove array if only one context
// process context
const activeCtx = yield jsonld.processContext(_getInitialContext(options), ctx, options);
// do compaction
let compacted = _compact({
const hasContext = ctx.length > 0;
if (ctx.length === 1) {
ctx = ctx[0];
} // add context and/or @graph
if (_isArray(compacted)) {
// use '@graph' keyword
const graphAlias = _compactIri({
activeCtx,
element: expanded,
options,
compactionMap: options.compactionMap
iri: '@graph',
relativeTo: {
vocab: true
}
});
// perform clean up
if (options.compactArrays && !options.graph && _isArray(compacted)) {
if (compacted.length === 1) {
// simplify to a single item
compacted = compacted[0];
} else if (compacted.length === 0) {
// simplify to an empty object
compacted = {};
}
} else if (options.graph && _isObject(compacted)) {
// always use array if graph option is on
compacted = [compacted];
}
const graph = compacted;
compacted = {};
// follow @context key
if (_isObject(ctx) && '@context' in ctx) {
ctx = ctx['@context'];
if (hasContext) {
compacted['@context'] = ctx;
}
// build output context
ctx = util.clone(ctx);
if (!_isArray(ctx)) {
ctx = [ctx];
}
// remove empty contexts
const tmp = ctx;
ctx = [];
for (let i = 0; i < tmp.length; ++i) {
if (!_isObject(tmp[i]) || Object.keys(tmp[i]).length > 0) {
ctx.push(tmp[i]);
}
}
compacted[graphAlias] = graph;
} else if (_isObject(compacted) && hasContext) {
// reorder keys so @context is first
const graph = compacted;
compacted = {
'@context': ctx
};
// remove array if only one context
const hasContext = ctx.length > 0;
if (ctx.length === 1) {
ctx = ctx[0];
for (let key in graph) {
compacted[key] = graph[key];
}
}
// add context and/or @graph
if (_isArray(compacted)) {
// use '@graph' keyword
const graphAlias = _compactIri({ activeCtx, iri: '@graph', relativeTo: { vocab: true } });
const graph = compacted;
compacted = {};
if (hasContext) {
compacted['@context'] = ctx;
if (options.framing) {
// get graph alias
const graph = _compactIri({
activeCtx,
iri: '@graph',
relativeTo: {
vocab: true
}
compacted[graphAlias] = graph;
} else if (_isObject(compacted) && hasContext) {
// reorder keys so @context is first
const graph = compacted;
compacted = { '@context': ctx };
for (let key in graph) {
compacted[key] = graph[key];
}
}
}); // remove @preserve from results
if (options.framing) {
// get graph alias
const graph = _compactIri({ activeCtx, iri: '@graph', relativeTo: { vocab: true } });
// remove @preserve from results
options.link = {};
compacted[graph] = _removePreserve(activeCtx, compacted[graph], options);
}
return compacted;
options.link = {};
compacted[graph] = _removePreserve(activeCtx, compacted[graph], options);
}
return compacted;
});
return function (_x, _x2, _x3) {
return _ref.apply(this, arguments);
};
}());
/**
* Performs JSON-LD expansion.
*
* @param input the JSON-LD input to expand.
* @param [options] the options to use:
* [base] the base IRI to use.
* [expandContext] a context to expand with.
* [keepFreeFloatingNodes] true to keep free-floating nodes,
* false not to, defaults to false.
* [documentLoader(url, callback(err, remoteDoc))] the document loader.
* [expansionMap(info)] a function that can be used to custom map
* unmappable values (or to throw an error when they are detected);
* if this function returns `undefined` then the default behavior
* will be used.
* @param [callback(err, expanded)] called once the operation completes.
*
* @return a Promise that resolves to the expanded output.
*/
jsonld.expand = util.callbackify(
/*#__PURE__*/
function () {
var _ref2 = _asyncToGenerator(function* (input, options) {
if (arguments.length < 1) {
throw new TypeError('Could not expand, too few arguments.');
} // set default options
options = _setDefaults(options, {
keepFreeFloatingNodes: false
});
return function (_x, _x2, _x3) {
return _ref.apply(this, arguments);
};
})());
if (options.expansionMap === false) {
options.expansionMap = undefined;
} // build set of objects that may have @contexts to resolve
/**
* Performs JSON-LD expansion.
*
* @param input the JSON-LD input to expand.
* @param [options] the options to use:
* [base] the base IRI to use.
* [expandContext] a context to expand with.
* [keepFreeFloatingNodes] true to keep free-floating nodes,
* false not to, defaults to false.
* [documentLoader(url, callback(err, remoteDoc))] the document loader.
* [expansionMap(info)] a function that can be used to custom map
* unmappable values (or to throw an error when they are detected);
* if this function returns `undefined` then the default behavior
* will be used.
* @param [callback(err, expanded)] called once the operation completes.
*
* @return a Promise that resolves to the expanded output.
*/
jsonld.expand = util.callbackify((() => {
var _ref2 = _asyncToGenerator(function* (input, options) {
if (arguments.length < 1) {
throw new TypeError('Could not expand, too few arguments.');
}
// set default options
options = _setDefaults(options, {
keepFreeFloatingNodes: false
});
if (options.expansionMap === false) {
options.expansionMap = undefined;
}
const toResolve = {}; // build set of contexts to process prior to expansion
// build set of objects that may have @contexts to resolve
const toResolve = {};
const contextsToProcess = []; // if an `expandContext` has been given ensure it gets resolved
// build set of contexts to process prior to expansion
const contextsToProcess = [];
if ('expandContext' in options) {
const expandContext = util.clone(options.expandContext);
// if an `expandContext` has been given ensure it gets resolved
if ('expandContext' in options) {
const expandContext = util.clone(options.expandContext);
if (_isObject(expandContext) && '@context' in expandContext) {
toResolve.expandContext = expandContext;
} else {
toResolve.expandContext = { '@context': expandContext };
}
contextsToProcess.push(toResolve.expandContext);
}
// if input is a string, attempt to dereference remote document
let defaultBase;
if (!_isString(input)) {
// input is not a URL, do not need to retrieve it first
toResolve.input = util.clone(input);
if (_isObject(expandContext) && '@context' in expandContext) {
toResolve.expandContext = expandContext;
} else {
// load remote doc
const remoteDoc = yield jsonld.get(input, options);
defaultBase = remoteDoc.documentUrl;
toResolve.input = remoteDoc.document;
if (remoteDoc.contextUrl) {
// context included in HTTP link header and must be resolved
toResolve.remoteContext = { '@context': remoteDoc.contextUrl };
contextsToProcess.push(toResolve.remoteContext);
}
toResolve.expandContext = {
'@context': expandContext
};
}
// set default base
if (!('base' in options)) {
options.base = defaultBase || '';
contextsToProcess.push(toResolve.expandContext);
} // if input is a string, attempt to dereference remote document
let defaultBase;
if (!_isString(input)) {
// input is not a URL, do not need to retrieve it first
toResolve.input = util.clone(input);
} else {
// load remote doc
const remoteDoc = yield jsonld.get(input, options);
defaultBase = remoteDoc.documentUrl;
toResolve.input = remoteDoc.document;
if (remoteDoc.contextUrl) {
// context included in HTTP link header and must be resolved
toResolve.remoteContext = {
'@context': remoteDoc.contextUrl
};
contextsToProcess.push(toResolve.remoteContext);
}
} // set default base
// get all contexts in `toResolve`
yield _getAllContexts(toResolve, options);
// process any additional contexts
let activeCtx = _getInitialContext(options);
contextsToProcess.forEach(function (localCtx) {
activeCtx = _processContext({ activeCtx, localCtx, options });
});
if (!('base' in options)) {
options.base = defaultBase || '';
} // get all contexts in `toResolve`
// expand resolved input
let expanded = _expand({
yield _getAllContexts(toResolve, options); // process any additional contexts
let activeCtx = _getInitialContext(options);
contextsToProcess.forEach(localCtx => {
activeCtx = _processContext({
activeCtx,
element: toResolve.input,
options,
expansionMap: options.expansionMap
localCtx,
options
});
}); // expand resolved input
// optimize away @graph with no other properties
if (_isObject(expanded) && '@graph' in expanded && Object.keys(expanded).length === 1) {
expanded = expanded['@graph'];
} else if (expanded === null) {
expanded = [];
}
let expanded = _expand({
activeCtx,
element: toResolve.input,
options,
expansionMap: options.expansionMap
}); // optimize away @graph with no other properties
// normalize to an array
if (!_isArray(expanded)) {
expanded = [expanded];
}
return expanded;
});
if (_isObject(expanded) && '@graph' in expanded && Object.keys(expanded).length === 1) {
expanded = expanded['@graph'];
} else if (expanded === null) {
expanded = [];
} // normalize to an array
return function (_x4, _x5) {
return _ref2.apply(this, arguments);
};
})());
/**
* Performs JSON-LD flattening.
*
* @param input the JSON-LD to flatten.
* @param ctx the context to use to compact the flattened output, or null.
* @param [options] the options to use:
* [base] the base IRI to use.
* [expandContext] a context to expand with.
* [documentLoader(url, callback(err, remoteDoc))] the document loader.
* @param [callback(err, flattened)] called once the operation completes.
*
* @return a Promise that resolves to the flattened output.
*/
jsonld.flatten = util.callbackify((() => {
var _ref3 = _asyncToGenerator(function* (input, ctx, options) {
if (arguments.length < 1) {
return new TypeError('Could not flatten, too few arguments.');
}
if (!_isArray(expanded)) {
expanded = [expanded];
}
if (typeof ctx === 'function') {
ctx = null;
} else {
ctx = ctx || null;
}
return expanded;
});
// set default options
options = _setDefaults(options, {
base: _isString(input) ? input : ''
});
return function (_x4, _x5) {
return _ref2.apply(this, arguments);
};
}());
/**
* Performs JSON-LD flattening.
*
* @param input the JSON-LD to flatten.
* @param ctx the context to use to compact the flattened output, or null.
* @param [options] the options to use:
* [base] the base IRI to use.
* [expandContext] a context to expand with.
* [documentLoader(url, callback(err, remoteDoc))] the document loader.
* @param [callback(err, flattened)] called once the operation completes.
*
* @return a Promise that resolves to the flattened output.
*/
// expand input
const expanded = yield jsonld.expand(input, options);
jsonld.flatten = util.callbackify(
/*#__PURE__*/
function () {
var _ref3 = _asyncToGenerator(function* (input, ctx, options) {
if (arguments.length < 1) {
return new TypeError('Could not flatten, too few arguments.');
}
// do flattening
const flattened = _flatten(expanded);
if (typeof ctx === 'function') {
ctx = null;
} else {
ctx = ctx || null;
} // set default options
if (ctx === null) {
// no compaction required
return flattened;
}
// compact result (force @graph option to true, skip expansion)
options.graph = true;
options.skipExpansion = true;
const compacted = yield jsonld.compact(flattened, ctx, options);
options = _setDefaults(options, {
base: _isString(input) ? input : ''
}); // expand input
return compacted;
});
const expanded = yield jsonld.expand(input, options); // do flattening
return function (_x6, _x7, _x8) {
return _ref3.apply(this, arguments);
};
})());
const flattened = _flatten(expanded);
/**
* Performs JSON-LD framing.
*
* @param input the JSON-LD input to frame.
* @param frame the JSON-LD frame to use.
* @param [options] the framing options.
* [base] the base IRI to use.
* [expandContext] a context to expand with.
* [embed] default @embed flag: '@last', '@always', '@never', '@link'
* (default: '@last').
* [explicit] default @explicit flag (default: false).
* [requireAll] default @requireAll flag (default: true).
* [omitDefault] default @omitDefault flag (default: false).
* [documentLoader(url, callback(err, remoteDoc))] the document loader.
* @param [callback(err, framed)] called once the operation completes.
*
* @return a Promise that resolves to the framed output.
*/
jsonld.frame = util.callbackify((() => {
var _ref4 = _asyncToGenerator(function* (input, frame, options) {
if (arguments.length < 2) {
throw new TypeError('Could not frame, too few arguments.');
}
if (ctx === null) {
// no compaction required
return flattened;
} // compact result (force @graph option to true, skip expansion)
// set default options
options = _setDefaults(options, {
base: _isString(input) ? input : '',
embed: '@last',
explicit: false,
requireAll: true,
omitDefault: false,
pruneBlankNodeIdentifiers: true,
bnodesToClear: []
});
// if frame is a string, attempt to dereference remote document
if (_isString(frame)) {
// load remote doc
const remoteDoc = yield jsonld.get(frame, options);
frame = remoteDoc.document;
options.graph = true;
options.skipExpansion = true;
const compacted = yield jsonld.compact(flattened, ctx, options);
return compacted;
});
if (remoteDoc.contextUrl) {
// inject link header @context into frame
let ctx = frame['@context'];
if (!ctx) {
ctx = remoteDoc.contextUrl;
} else if (_isArray(ctx)) {
ctx.push(remoteDoc.contextUrl);
} else {
ctx = [ctx, remoteDoc.contextUrl];
}
frame['@context'] = ctx;
return function (_x6, _x7, _x8) {
return _ref3.apply(this, arguments);
};
}());
/**
* Performs JSON-LD framing.
*
* @param input the JSON-LD input to frame.
* @param frame the JSON-LD frame to use.
* @param [options] the framing options.
* [base] the base IRI to use.
* [expandContext] a context to expand with.
* [embed] default @embed flag: '@last', '@always', '@never', '@link'
* (default: '@last').
* [explicit] default @explicit flag (default: false).
* [requireAll] default @requireAll flag (default: true).
* [omitDefault] default @omitDefault flag (default: false).
* [documentLoader(url, callback(err, remoteDoc))] the document loader.
* @param [callback(err, framed)] called once the operation completes.
*
* @return a Promise that resolves to the framed output.
*/
jsonld.frame = util.callbackify(
/*#__PURE__*/
function () {
var _ref4 = _asyncToGenerator(function* (input, frame, options) {
if (arguments.length < 2) {
throw new TypeError('Could not frame, too few arguments.');
} // set default options
options = _setDefaults(options, {
base: _isString(input) ? input : '',
embed: '@last',
explicit: false,
requireAll: true,
omitDefault: false,
pruneBlankNodeIdentifiers: true,
bnodesToClear: []
}); // if frame is a string, attempt to dereference remote document
if (_isString(frame)) {
// load remote doc
const remoteDoc = yield jsonld.get(frame, options);
frame = remoteDoc.document;
if (remoteDoc.contextUrl) {
// inject link header @context into frame
let ctx = frame['@context'];
if (!ctx) {
ctx = remoteDoc.contextUrl;
} else if (_isArray(ctx)) {
ctx.push(remoteDoc.contextUrl);
} else {
ctx = [ctx, remoteDoc.contextUrl];
}
frame['@context'] = ctx;
}
}
let frameContext = frame ? frame['@context'] || {} : {};
let frameContext = frame ? frame['@context'] || {} : {}; // expand input
// expand input
const expanded = yield jsonld.expand(input, options);
const expanded = yield jsonld.expand(input, options); // expand frame
// expand frame
const opts = util.clone(options);
opts.isFrame = true;
opts.keepFreeFloatingNodes = true;
const expandedFrame = yield jsonld.expand(frame, opts);
const opts = util.clone(options);
opts.isFrame = true;
opts.keepFreeFloatingNodes = true;
const expandedFrame = yield jsonld.expand(frame, opts); // if the unexpanded frame includes a key expanding to @graph, frame the default graph, otherwise, the merged graph
// if the unexpanded frame includes a key expanding to @graph, frame the default graph, otherwise, the merged graph
let framed;
// FIXME should look for aliases of @graph
opts.merged = !('@graph' in frame);
// do framing
framed = _frameMergedOrDefault(expanded, expandedFrame, opts);
let framed; // FIXME should look for aliases of @graph
// compact result (force @graph option to true, skip expansion,
// check for linked embeds)
opts.graph = true;
opts.skipExpansion = true;
opts.link = {};
opts.framing = true;
const compacted = yield jsonld.compact(framed, frameContext, opts);
opts.merged = !('@graph' in frame); // do framing
return compacted;
});
framed = _frameMergedOrDefault(expanded, expandedFrame, opts); // compact result (force @graph option to true, skip expansion,
// check for linked embeds)
return function (_x9, _x10, _x11) {
return _ref4.apply(this, arguments);
};
})());
opts.graph = true;
opts.skipExpansion = true;
opts.link = {};
opts.framing = true;
const compacted = yield jsonld.compact(framed, frameContext, opts);
return compacted;
});
/**
* **Experimental**
*
* Links a JSON-LD document's nodes in memory.
*
* @param input the JSON-LD document to link.
* @param [ctx] the JSON-LD context to apply.
* @param [options] the options to use:
* [base] the base IRI to use.
* [expandContext] a context to expand with.
* [documentLoader(url, callback(err, remoteDoc))] the document loader.
* @param [callback(err, linked)] called once the operation completes.
*
* @return a Promise that resolves to the linked output.
*/
jsonld.link = util.callbackify((() => {
var _ref5 = _asyncToGenerator(function* (input, ctx, options) {
// API matches running frame with a wildcard frame and embed: '@link'
// get arguments
const frame = {};
if (ctx) {
frame['@context'] = ctx;
}
frame['@embed'] = '@link';
return jsonld.frame(input, frame, options);
});
return function (_x9, _x10, _x11) {
return _ref4.apply(this, arguments);
};
}());
/**
* **Experimental**
*
* Links a JSON-LD document's nodes in memory.
*
* @param input the JSON-LD document to link.
* @param [ctx] the JSON-LD context to apply.
* @param [options] the options to use:
* [base] the base IRI to use.
* [expandContext] a context to expand with.
* [documentLoader(url, callback(err, remoteDoc))] the document loader.
* @param [callback(err, linked)] called once the operation completes.
*
* @return a Promise that resolves to the linked output.
*/
return function (_x12, _x13, _x14) {
return _ref5.apply(this, arguments);
};
})());
jsonld.link = util.callbackify(
/*#__PURE__*/
function () {
var _ref5 = _asyncToGenerator(function* (input, ctx, options) {
// API matches running frame with a wildcard frame and embed: '@link'
// get arguments
const frame = {};
/**
* Performs RDF dataset normalization on the given input. The input is JSON-LD
* unless the 'inputFormat' option is used. The output is an RDF dataset
* unless the 'format' option is used.
*
* @param input the input to normalize as JSON-LD or as a format specified by
* the 'inputFormat' option.
* @param [options] the options to use:
* [algorithm] the normalization algorithm to use, `URDNA2015` or
* `URGNA2012` (default: `URGNA2012`).
* [base] the base IRI to use.
* [expandContext] a context to expand with.
* [skipExpansion] true to assume the input is expanded and skip
* expansion, false not to, defaults to false.
* [inputFormat] the format if input is not JSON-LD:
* 'application/n-quads' for N-Quads.
* [format] the format if output is a string:
* 'application/n-quads' for N-Quads.
* [documentLoader(url, callback(err, remoteDoc))] the document loader.
* @param [callback(err, normalized)] called once the operation completes.
*
* @return a Promise that resolves to the normalized output.
*/
jsonld.normalize = jsonld.canonize = util.callbackify((() => {
var _ref6 = _asyncToGenerator(function* (input, options) {
if (arguments.length < 1) {
throw new TypeError('Could not canonize, too few arguments.');
}
if (ctx) {
frame['@context'] = ctx;
}
// set default options
options = _setDefaults(options, {
base: _isString(input) ? input : '',
algorithm: 'URDNA2015',
skipExpansion: false
});
if ('inputFormat' in options) {
if (options.inputFormat !== 'application/n-quads' && options.inputFormat !== 'application/nquads') {
throw new JsonLdError('Unknown canonicalization input format.', 'jsonld.CanonizeError');
}
// TODO: `await` for async parsers
const parsedInput = NQuads.parse(input);
frame['@embed'] = '@link';
return jsonld.frame(input, frame, options);
});
// do canonicalization
return canonize.canonize(parsedInput, options);
}
return function (_x12, _x13, _x14) {
return _ref5.apply(this, arguments);
};
}());
/**
* Performs RDF dataset normalization on the given input. The input is JSON-LD
* unless the 'inputFormat' option is used. The output is an RDF dataset
* unless the 'format' option is used.
*
* @param input the input to normalize as JSON-LD or as a format specified by
* the 'inputFormat' option.
* @param [options] the options to use:
* [algorithm] the normalization algorithm to use, `URDNA2015` or
* `URGNA2012` (default: `URGNA2012`).
* [base] the base IRI to use.
* [expandContext] a context to expand with.
* [skipExpansion] true to assume the input is expanded and skip
* expansion, false not to, defaults to false.
* [inputFormat] the format if input is not JSON-LD:
* 'application/n-quads' for N-Quads.
* [format] the format if output is a string:
* 'application/n-quads' for N-Quads.
* [documentLoader(url, callback(err, remoteDoc))] the document loader.
* @param [callback(err, normalized)] called once the operation completes.
*
* @return a Promise that resolves to the normalized output.
*/
// convert to RDF dataset then do normalization
const opts = util.clone(options);
delete opts.format;
opts.produceGeneralizedRdf = false;
const dataset = yield jsonld.toRDF(input, opts);
jsonld.normalize = jsonld.canonize = util.callbackify(
/*#__PURE__*/
function () {
var _ref6 = _asyncToGenerator(function* (input, options) {
if (arguments.length < 1) {
throw new TypeError('Could not canonize, too few arguments.');
} // set default options
// do canonicalization
return canonize.canonize(dataset, options);
options = _setDefaults(options, {
base: _isString(input) ? input : '',
algorithm: 'URDNA2015',
skipExpansion: false
});
return function (_x15, _x16) {
return _ref6.apply(this, arguments);
};
})());
if ('inputFormat' in options) {
if (options.inputFormat !== 'application/n-quads' && options.inputFormat !== 'application/nquads') {
throw new JsonLdError('Unknown canonicalization input format.', 'jsonld.CanonizeError');
} // TODO: `await` for async parsers
/**
* Converts an RDF dataset to JSON-LD.
*
* @param dataset a serialized string of RDF in a format specified by the
* format option or an RDF dataset to convert.
* @param [options] the options to use:
* [format] the format if dataset param must first be parsed:
* 'application/n-quads' for N-Quads (default).
* [rdfParser] a custom RDF-parser to use to parse the dataset.
* [useRdfType] true to use rdf:type, false to use @type
* (default: false).
* [useNativeTypes] true to convert XSD types into native types
* (boolean, integer, double), false not to (default: false).
* @param [callback(err, output)] called once the operation completes.
*
* @return a Promise that resolves to the JSON-LD document.
*/
jsonld.fromRDF = util.callbackify((() => {
var _ref7 = _asyncToGenerator(function* (dataset, options) {
if (arguments.length < 1) {
throw new TypeError('Could not convert from RDF, too few arguments.');
}
// set default options
options = _setDefaults(options, {
format: _isString(dataset) ? 'application/n-quads' : undefined
});
const parsedInput = NQuads.parse(input); // do canonicalization
var _options = options;
let format = _options.format,
rdfParser = _options.rdfParser;
return canonize.canonize(parsedInput, options);
} // convert to RDF dataset then do normalization
// handle special format
if (format) {
// check supported formats
rdfParser = rdfParser || _rdfParsers[format];
if (!rdfParser) {
throw new JsonLdError('Unknown input format.', 'jsonld.UnknownFormat', { format });
}
} else {
// no-op parser, assume dataset already parsed
rdfParser = function () {
return dataset;
};
}
const opts = util.clone(options);
delete opts.format;
opts.produceGeneralizedRdf = false;
const dataset = yield jsonld.toRDF(input, opts); // do canonicalization
// TODO: call `normalizeAsyncFn` on parser fn
return canonize.canonize(dataset, options);
});
// rdfParser can be callback, promise-based, or synchronous
let parsedDataset;
if (rdfParser.length > 1) {
// convert callback-based rdf parser to promise-based
parsedDataset = new Promise(function (resolve, reject) {
rdfParser(dataset, function (err, dataset) {
if (err) {
reject(err);
} else {
resolve(dataset);
}
});
});
} else {
parsedDataset = Promise.resolve(rdfParser(dataset));
}
return function (_x15, _x16) {
return _ref6.apply(this, arguments);
};
}());
/**
* Converts an RDF dataset to JSON-LD.
*
* @param dataset a serialized string of RDF in a format specified by the
* format option or an RDF dataset to convert.
* @param [options] the options to use:
* [format] the format if dataset param must first be parsed:
* 'application/n-quads' for N-Quads (default).
* [rdfParser] a custom RDF-parser to use to parse the dataset.
* [useRdfType] true to use rdf:type, false to use @type
* (default: false).
* [useNativeTypes] true to convert XSD types into native types
* (boolean, integer, double), false not to (default: false).
* @param [callback(err, output)] called once the operation completes.
*
* @return a Promise that resolves to the JSON-LD document.
*/
parsedDataset = yield parsedDataset;
jsonld.fromRDF = util.callbackify(
/*#__PURE__*/
function () {
var _ref7 = _asyncToGenerator(function* (dataset, options) {
if (arguments.length < 1) {
throw new TypeError('Could not convert from RDF, too few arguments.');
} // set default options
// back-compat with old parsers that produced legacy dataset format
if (!Array.isArray(parsedDataset)) {
parsedDataset = NQuads.legacyDatasetToQuads(parsedDataset);
}
return _fromRDF(parsedDataset, options);
options = _setDefaults(options, {
format: _isString(dataset) ? 'application/n-quads' : undefined
});
let {
format,
rdfParser
} = options; // handle special format
return function (_x17, _x18) {
return _ref7.apply(this, arguments);
};
})());
if (format) {
// check supported formats
rdfParser = rdfParser || _rdfParsers[format];
/**
* Outputs the RDF dataset found in the given JSON-LD object.
*
* @param input the JSON-LD input.
* @param [options] the options to use:
* [base] the base IRI to use.
* [expandContext] a context to expand with.
* [skipExpansion] true to assume the input is expanded and skip
* expansion, false not to, defaults to false.
* [format] the format to use to output a string:
* 'application/n-quads' for N-Quads.
* [produceGeneralizedRdf] true to output generalized RDF, false
* to produce only standard RDF (default: false).
* [documentLoader(url, callback(err, remoteDoc))] the document loader.
* @param [callback(err, dataset)] called once the operation completes.
*
* @return a Promise that resolves to the RDF dataset.
*/
jsonld.toRDF = util.callbackify((() => {
var _ref8 = _asyncToGenerator(function* (input, options) {
if (arguments.length < 1) {
throw new TypeError('Could not convert to RDF, too few arguments.');
if (!rdfParser) {
throw new JsonLdError('Unknown input format.', 'jsonld.UnknownFormat', {
format
});
}
} else {
// no-op parser, assume dataset already parsed
rdfParser = () => dataset;
} // TODO: call `normalizeAsyncFn` on parser fn
// rdfParser can be callback, promise-based, or synchronous
// set default options
options = _setDefaults(options, {
base: _isString(input) ? input : '',
skipExpansion: false
let parsedDataset;
if (rdfParser.length > 1) {
// convert callback-based rdf parser to promise-based
parsedDataset = new Promise((resolve, reject) => {
rdfParser(dataset, (err, dataset) => {
if (err) {
reject(err);
} else {
resolve(dataset);
}
});
});
} else {
parsedDataset = Promise.resolve(rdfParser(dataset));
}
// TODO: support toRDF custom map?
let expanded;
if (options.skipExpansion) {
expanded = input;
} else {
// expand input
expanded = yield jsonld.expand(input, options);
}
parsedDataset = yield parsedDataset; // back-compat with old parsers that produced legacy dataset format
// output RDF dataset
const dataset = _toRDF(expanded, options);
if (options.format) {
if (options.format === 'application/n-quads' || options.format === 'application/nquads') {
return yield NQuads.serialize(dataset);
}
throw new JsonLdError('Unknown output format.', 'jsonld.UnknownFormat', { format: options.format });
}
if (!Array.isArray(parsedDataset)) {
parsedDataset = NQuads.legacyDatasetToQuads(parsedDataset);
}
return dataset;
});
return _fromRDF(parsedDataset, options);
});
return function (_x19, _x20) {
return _ref8.apply(this, arguments);
};
})());
return function (_x17, _x18) {
return _ref7.apply(this, arguments);
};
}());
/**
* Outputs the RDF dataset found in the given JSON-LD object.
*
* @param input the JSON-LD input.
* @param [options] the options to use:
* [base] the base IRI to use.
* [expandContext] a context to expand with.
* [skipExpansion] true to assume the input is expanded and skip
* expansion, false not to, defaults to false.
* [format] the format to use to output a string:
* 'application/n-quads' for N-Quads.
* [produceGeneralizedRdf] true to output generalized RDF, false
* to produce only standard RDF (default: false).
* [documentLoader(url, callback(err, remoteDoc))] the document loader.
* @param [callback(err, dataset)] called once the operation completes.
*
* @return a Promise that resolves to the RDF dataset.
*/
/**
* **Experimental**
*
* Recursively flattens the nodes in the given JSON-LD input into a merged
* map of node ID => node. All graphs will be merged into the default graph.
*
* @param input the JSON-LD input.
* @param [options] the options to use:
* [base] the base IRI to use.
* [expandContext] a context to expand with.
* [issuer] a jsonld.IdentifierIssuer to use to label blank nodes.
* [documentLoader(url, callback(err, remoteDoc))] the document loader.
* @param [callback(err, nodeMap)] called once the operation completes.
*
* @return a Promise that resolves to the merged node map.
*/
jsonld.createNodeMap = util.callbackify((() => {
var _ref9 = _asyncToGenerator(function* (input, options) {
if (arguments.length < 1) {
throw new TypeError('Could not create node map, too few arguments.');
}
jsonld.toRDF = util.callbackify(
/*#__PURE__*/
function () {
var _ref8 = _asyncToGenerator(function* (input, options) {
if (arguments.length < 1) {
throw new TypeError('Could not convert to RDF, too few arguments.');
} // set default options
// set default options
options = _setDefaults(options, {
base: _isString(input) ? input : ''
});
options = _setDefaults(options, {
base: _isString(input) ? input : '',
skipExpansion: false
}); // TODO: support toRDF custom map?
let expanded;
if (options.skipExpansion) {
expanded = input;
} else {
// expand input
const expanded = yield jsonld.expand(input, options);
expanded = yield jsonld.expand(input, options);
} // output RDF dataset
return _createMergedNodeMap(expanded, options);
});
return function (_x21, _x22) {
return _ref9.apply(this, arguments);
};
})());
const dataset = _toRDF(expanded, options);
/**
* **Experimental**
*
* Merges two or more JSON-LD documents into a single flattened document.
*
* @param docs the JSON-LD documents to merge together.
* @param ctx the context to use to compact the merged result, or null.
* @param [options] the options to use:
* [base] the base IRI to use.
* [expandContext] a context to expand with.
* [issuer] a jsonld.IdentifierIssuer to use to label blank nodes.
* [mergeNodes] true to merge properties for nodes with the same ID,
* false to ignore new properties for nodes with the same ID once
* the ID has been defined; note that this may not prevent merging
* new properties where a node is in the `object` position
* (default: true).
* [documentLoader(url, callback(err, remoteDoc))] the document loader.
* @param [callback(err, merged)] called once the operation completes.
*
* @return a Promise that resolves to the merged output.
*/
jsonld.merge = util.callbackify((() => {
var _ref10 = _asyncToGenerator(function* (docs, ctx, options) {
if (arguments.length < 1) {
throw new TypeError('Could not merge, too few arguments.');
if (options.format) {
if (options.format === 'application/n-quads' || options.format === 'application/nquads') {
return yield NQuads.serialize(dataset);
}
if (!_isArray(docs)) {
throw new TypeError('Could not merge, "docs" must be an array.');
}
if (typeof ctx === 'function') {
ctx = null;
} else {
ctx = ctx || null;
}
throw new JsonLdError('Unknown output format.', 'jsonld.UnknownFormat', {
format: options.format
});
}
// set default options
options = _setDefaults(options, {});
return dataset;
});
// expand all documents
const expanded = yield Promise.all(docs.map(function (doc) {
const opts = Object.assign({}, options);
return jsonld.expand(doc, opts);
}));
return function (_x19, _x20) {
return _ref8.apply(this, arguments);
};
}());
/**
* **Experimental**
*
* Recursively flattens the nodes in the given JSON-LD input into a merged
* map of node ID => node. All graphs will be merged into the default graph.
*
* @param input the JSON-LD input.
* @param [options] the options to use:
* [base] the base IRI to use.
* [expandContext] a context to expand with.
* [issuer] a jsonld.IdentifierIssuer to use to label blank nodes.
* [documentLoader(url, callback(err, remoteDoc))] the document loader.
* @param [callback(err, nodeMap)] called once the operation completes.
*
* @return a Promise that resolves to the merged node map.
*/
let mergeNodes = true;
if ('mergeNodes' in options) {
mergeNodes = options.mergeNodes;
}
jsonld.createNodeMap = util.callbackify(
/*#__PURE__*/
function () {
var _ref9 = _asyncToGenerator(function* (input, options) {
if (arguments.length < 1) {
throw new TypeError('Could not create node map, too few arguments.');
} // set default options
const issuer = options.issuer || new IdentifierIssuer('_:b');
const graphs = { '@default': {} };
for (let i = 0; i < expanded.length; ++i) {
// uniquely relabel blank nodes
const doc = util.relabelBlankNodes(expanded[i], {
issuer: new IdentifierIssuer('_:b' + i + '-')
});
options = _setDefaults(options, {
base: _isString(input) ? input : ''
}); // expand input
// add nodes to the shared node map graphs if merging nodes, to a
// separate graph set if not
const _graphs = mergeNodes || i === 0 ? graphs : { '@default': {} };
_createNodeMap(doc, _graphs, '@default', issuer);
const expanded = yield jsonld.expand(input, options);
return _createMergedNodeMap(expanded, options);
});
if (_graphs !== graphs) {
// merge document graphs but don't merge existing nodes
for (let graphName in _graphs) {
const _nodeMap = _graphs[graphName];
if (!(graphName in graphs)) {
graphs[graphName] = _nodeMap;
continue;
}
const nodeMap = graphs[graphName];
for (let key in _nodeMap) {
if (!(key in nodeMap)) {
nodeMap[key] = _nodeMap[key];
}
}
}
}
}
return function (_x21, _x22) {
return _ref9.apply(this, arguments);
};
}());
/**
* **Experimental**
*
* Merges two or more JSON-LD documents into a single flattened document.
*
* @param docs the JSON-LD documents to merge together.
* @param ctx the context to use to compact the merged result, or null.
* @param [options] the options to use:
* [base] the base IRI to use.
* [expandContext] a context to expand with.
* [issuer] a jsonld.IdentifierIssuer to use to label blank nodes.
* [mergeNodes] true to merge properties for nodes with the same ID,
* false to ignore new properties for nodes with the same ID once
* the ID has been defined; note that this may not prevent merging
* new properties where a node is in the `object` position
* (default: true).
* [documentLoader(url, callback(err, remoteDoc))] the document loader.
* @param [callback(err, merged)] called once the operation completes.
*
* @return a Promise that resolves to the merged output.
*/
// add all non-default graphs to default graph
const defaultGraph = _mergeNodeMaps(graphs);
jsonld.merge = util.callbackify(
/*#__PURE__*/
function () {
var _ref10 = _asyncToGenerator(function* (docs, ctx, options) {
if (arguments.length < 1) {
throw new TypeError('Could not merge, too few arguments.');
}
// produce flattened output
const flattened = [];
const keys = Object.keys(defaultGraph).sort();
for (let ki = 0; ki < keys.length; ++ki) {
const node = defaultGraph[keys[ki]];
// only add full subjects to top-level
if (!_isSubjectReference(node)) {
flattened.push(node);
}
}
if (!_isArray(docs)) {
throw new TypeError('Could not merge, "docs" must be an array.');
}
if (ctx === null) {
return flattened;
}
if (typeof ctx === 'function') {
ctx = null;
} else {
ctx = ctx || null;
} // set default options
// compact result (force @graph option to true, skip expansion)
options.graph = true;
options.skipExpansion = true;
const compacted = yield jsonld.compact(flattened, ctx, options);
return compacted;
});
options = _setDefaults(options, {}); // expand all documents
return function (_x23, _x24, _x25) {
return _ref10.apply(this, arguments);
};
})());
const expanded = yield Promise.all(docs.map(doc => {
const opts = Object.assign({}, options);
return jsonld.expand(doc, opts);
}));
let mergeNodes = true;
/**
* The default document loader for external documents. If the environment
* is node.js, a callback-continuation-style document loader is used; otherwise,
* a promises-style document loader is used.
*
* @param url the URL to load.
* @param callback(err, remoteDoc) called once the operation completes,
* if using a non-promises API.
*
* @return a promise, if using a promises API.
*/
Object.defineProperty(jsonld, 'documentLoader', {
get: () => jsonld._documentLoader,
set: v => jsonld._documentLoader = util.normalizeDocumentLoader(v)
});
// default document loader not implemented
jsonld.documentLoader = (() => {
var _ref11 = _asyncToGenerator(function* (url) {
throw new JsonLdError('Could not retrieve a JSON-LD document from the URL. URL ' + 'dereferencing not implemented.', 'jsonld.LoadDocumentError', { code: 'loading document failed', url: url });
});
if ('mergeNodes' in options) {
mergeNodes = options.mergeNodes;
}
return function (_x26) {
return _ref11.apply(this, arguments);
const issuer = options.issuer || new IdentifierIssuer('_:b');
const graphs = {
'@default': {}
};
})();
/**
* Deprecated default document loader. Do not use or override.
*/
jsonld.loadDocument = util.callbackify(_asyncToGenerator(function* () {
return jsonld.documentLoader.apply(null, arguments);
}));
for (let i = 0; i < expanded.length; ++i) {
// uniquely relabel blank nodes
const doc = util.relabelBlankNodes(expanded[i], {
issuer: new IdentifierIssuer('_:b' + i + '-')
}); // add nodes to the shared node map graphs if merging nodes, to a
// separate graph set if not
/**
* Gets a remote JSON-LD document using the default document loader or
* one given in the passed options.
*
* @param url the URL to fetch.
* @param [options] the options to use:
* [documentLoader] the document loader to use.
* @param [callback(err, remoteDoc)] called once the operation completes.
*
* @return a Promise that resolves to the retrieved remote document.
*/
jsonld.get = util.callbackify((() => {
var _ref13 = _asyncToGenerator(function* (url, options) {
let load;
if (typeof options.documentLoader === 'function') {
load = util.normalizeDocumentLoader(options.documentLoader);
} else {
load = jsonld.documentLoader;
}
const _graphs = mergeNodes || i === 0 ? graphs : {
'@default': {}
};
const remoteDoc = yield load(url);
_createNodeMap(doc, _graphs, '@default', issuer);
// TODO: can this be moved into `normalizeDocumentLoader`?
try {
if (!remoteDoc.document) {
throw new JsonLdError('No remote document found at the given URL.', 'jsonld.NullRemoteDocument');
if (_graphs !== graphs) {
// merge document graphs but don't merge existing nodes
for (let graphName in _graphs) {
const _nodeMap = _graphs[graphName];
if (!(graphName in graphs)) {
graphs[graphName] = _nodeMap;
continue;
}
const nodeMap = graphs[graphName];
for (let key in _nodeMap) {
if (!(key in nodeMap)) {
nodeMap[key] = _nodeMap[key];
}
}
}
if (_isString(remoteDoc.document)) {
remoteDoc.document = JSON.parse(remoteDoc.document);
}
} catch (e) {
throw new JsonLdError('Could not retrieve a JSON-LD document from the URL.', 'jsonld.LoadDocumentError', {
code: 'loading document failed',
cause: e,
remoteDoc: remoteDoc
});
}
} // add all non-default graphs to default graph
return remoteDoc;
});
return function (_x27, _x28) {
return _ref13.apply(this, arguments);
};
})());
const defaultGraph = _mergeNodeMaps(graphs); // produce flattened output
/**
* Processes a local context, resolving any URLs as necessary, and returns a
* new active context in its callback.
*
* @param activeCtx the current active context.
* @param localCtx the local context to process.
* @param [options] the options to use:
* [documentLoader(url, callback(err, remoteDoc))] the document loader.
* @param [callback(err, activeCtx)] called once the operation completes.
*
* @return a Promise that resolves to the new active context.
*/
jsonld.processContext = util.callbackify((() => {
var _ref14 = _asyncToGenerator(function* (activeCtx, localCtx, options) {
// set default options
options = _setDefaults(options, {
base: ''
});
// return initial context early for null context
if (localCtx === null) {
return _getInitialContext(options);
}
const flattened = [];
const keys = Object.keys(defaultGraph).sort();
// get URLs in localCtx
localCtx = util.clone(localCtx);
if (!(_isObject(localCtx) && '@context' in localCtx)) {
localCtx = { '@context': localCtx };
for (let ki = 0; ki < keys.length; ++ki) {
const node = defaultGraph[keys[ki]]; // only add full subjects to top-level
if (!_isSubjectReference(node)) {
flattened.push(node);
}
let ctx = yield _getAllContexts(localCtx, options);
}
return _processContext({ activeCtx, localCtx: ctx, options });
if (ctx === null) {
return flattened;
} // compact result (force @graph option to true, skip expansion)
options.graph = true;
options.skipExpansion = true;
const compacted = yield jsonld.compact(flattened, ctx, options);
return compacted;
});
return function (_x23, _x24, _x25) {
return _ref10.apply(this, arguments);
};
}());
/**
* The default document loader for external documents. If the environment
* is node.js, a callback-continuation-style document loader is used; otherwise,
* a promises-style document loader is used.
*
* @param url the URL to load.
* @param callback(err, remoteDoc) called once the operation completes,
* if using a non-promises API.
*
* @return a promise, if using a promises API.
*/
Object.defineProperty(jsonld, 'documentLoader', {
get: () => jsonld._documentLoader,
set: v => jsonld._documentLoader = util.normalizeDocumentLoader(v)
}); // default document loader not implemented
jsonld.documentLoader =
/*#__PURE__*/
function () {
var _ref11 = _asyncToGenerator(function* (url) {
throw new JsonLdError('Could not retrieve a JSON-LD document from the URL. URL ' + 'dereferencing not implemented.', 'jsonld.LoadDocumentError', {
code: 'loading document failed',
url: url
});
});
return function (_x29, _x30, _x31) {
return _ref14.apply(this, arguments);
};
})());
return function (_x26) {
return _ref11.apply(this, arguments);
};
}();
/**
* Deprecated default document loader. Do not use or override.
*/
// backwards compatibility
jsonld.getContextValue = require('./context').getContextValue;
/**
* Document loaders.
*/
jsonld.documentLoaders = {};
jsonld.documentLoaders.node = require('./documentLoaders/node');
jsonld.documentLoaders.xhr = require('./documentLoaders/xhr');
jsonld.loadDocument = util.callbackify(
/*#__PURE__*/
_asyncToGenerator(function* () {
return jsonld.documentLoader.apply(null, arguments);
}));
/**
* Gets a remote JSON-LD document using the default document loader or
* one given in the passed options.
*
* @param url the URL to fetch.
* @param [options] the options to use:
* [documentLoader] the document loader to use.
* @param [callback(err, remoteDoc)] called once the operation completes.
*
* @return a Promise that resolves to the retrieved remote document.
*/
/**
* Assigns the default document loader for external document URLs to a built-in
* default. Supported types currently include: 'xhr' and 'node'.
*
* @param type the type to set.
* @param [params] the parameters required to use the document loader.
*/
jsonld.useDocumentLoader = function (type) {
if (!(type in jsonld.documentLoaders)) {
throw new JsonLdError('Unknown document loader type: "' + type + '"', 'jsonld.UnknownDocumentLoader', { type: type });
jsonld.get = util.callbackify(
/*#__PURE__*/
function () {
var _ref13 = _asyncToGenerator(function* (url, options) {
let load;
if (typeof options.documentLoader === 'function') {
load = util.normalizeDocumentLoader(options.documentLoader);
} else {
load = jsonld.documentLoader;
}
// set document loader
jsonld.documentLoader = jsonld.documentLoaders[type].apply(jsonld, Array.prototype.slice.call(arguments, 1));
};
const remoteDoc = yield load(url); // TODO: can this be moved into `normalizeDocumentLoader`?
/** Registered RDF dataset parsers hashed by content-type. */
const _rdfParsers = {};
try {
if (!remoteDoc.document) {
throw new JsonLdError('No remote document found at the given URL.', 'jsonld.NullRemoteDocument');
}
/**
* Registers an RDF dataset parser by content-type, for use with
* jsonld.fromRDF. An RDF dataset parser will always be given two parameters,
* a string of input and a callback. An RDF dataset parser can be synchronous
* or asynchronous.
*
* If the parser function returns undefined or null then it will be assumed to
* be asynchronous w/a continuation-passing style and the callback parameter
* given to the parser MUST be invoked.
*
* If it returns a Promise, then it will be assumed to be asynchronous, but the
* callback parameter MUST NOT be invoked. It should instead be ignored.
*
* If it returns an RDF dataset, it will be assumed to be synchronous and the
* callback parameter MUST NOT be invoked. It should instead be ignored.
*
* @param contentType the content-type for the parser.
* @param parser(input, callback(err, dataset)) the parser function (takes a
* string as a parameter and either returns null/undefined and uses
* the given callback, returns a Promise, or returns an RDF dataset).
*/
jsonld.registerRDFParser = function (contentType, parser) {
_rdfParsers[contentType] = parser;
};
if (_isString(remoteDoc.document)) {
remoteDoc.document = JSON.parse(remoteDoc.document);
}
} catch (e) {
throw new JsonLdError('Could not retrieve a JSON-LD document from the URL.', 'jsonld.LoadDocumentError', {
code: 'loading document failed',
cause: e,
remoteDoc: remoteDoc
});
}
/**
* Unregisters an RDF dataset parser by content-type.
*
* @param contentType the content-type for the parser.
*/
jsonld.unregisterRDFParser = function (contentType) {
delete _rdfParsers[contentType];
return remoteDoc;
});
return function (_x27, _x28) {
return _ref13.apply(this, arguments);
};
}());
/**
* Processes a local context, resolving any URLs as necessary, and returns a
* new active context in its callback.
*
* @param activeCtx the current active context.
* @param localCtx the local context to process.
* @param [options] the options to use:
* [documentLoader(url, callback(err, remoteDoc))] the document loader.
* @param [callback(err, activeCtx)] called once the operation completes.
*
* @return a Promise that resolves to the new active context.
*/
// register the N-Quads RDF parser
jsonld.registerRDFParser('application/n-quads', NQuads.parse);
jsonld.registerRDFParser('application/nquads', NQuads.parse);
jsonld.processContext = util.callbackify(
/*#__PURE__*/
function () {
var _ref14 = _asyncToGenerator(function* (activeCtx, localCtx, options) {
// set default options
options = _setDefaults(options, {
base: ''
}); // return initial context early for null context
// register the RDFa API RDF parser
jsonld.registerRDFParser('rdfa-api', Rdfa.parse);
if (localCtx === null) {
return _getInitialContext(options);
} // get URLs in localCtx
/* URL API */
jsonld.url = require('./url');
/* Utility API */
jsonld.util = util;
// backwards compatibility
Object.assign(jsonld, util);
localCtx = util.clone(localCtx);
// reexpose API as jsonld.promises for backwards compatability
jsonld.promises = jsonld;
if (!(_isObject(localCtx) && '@context' in localCtx)) {
localCtx = {
'@context': localCtx
};
}
// backwards compatibility
jsonld.RequestQueue = require('./RequestQueue');
let ctx = yield _getAllContexts(localCtx, options);
return _processContext({
activeCtx,
localCtx: ctx,
options
});
});
/* WebIDL API */
jsonld.JsonLdProcessor = require('./JsonLdProcessor')(jsonld);
return function (_x29, _x30, _x31) {
return _ref14.apply(this, arguments);
};
}()); // backwards compatibility
// setup browser global JsonLdProcessor
if (_browser && typeof global.JsonLdProcessor === 'undefined') {
Object.defineProperty(global, 'JsonLdProcessor', {
writable: true,
enumerable: false,
configurable: true,
value: jsonld.JsonLdProcessor
jsonld.getContextValue = require('./context').getContextValue;
/**
* Document loaders.
*/
jsonld.documentLoaders = {};
jsonld.documentLoaders.node = require('./documentLoaders/node');
jsonld.documentLoaders.xhr = require('./documentLoaders/xhr');
/**
* Assigns the default document loader for external document URLs to a built-in
* default. Supported types currently include: 'xhr' and 'node'.
*
* @param type the type to set.
* @param [params] the parameters required to use the document loader.
*/
jsonld.useDocumentLoader = function (type) {
if (!(type in jsonld.documentLoaders)) {
throw new JsonLdError('Unknown document loader type: "' + type + '"', 'jsonld.UnknownDocumentLoader', {
type: type
});
}
} // set document loader
// set platform-specific defaults/APIs
if (_nodejs) {
// use node document loader by default
jsonld.useDocumentLoader('node');
} else if (typeof XMLHttpRequest !== 'undefined') {
// use xhr document loader by default
jsonld.useDocumentLoader('xhr');
}
function _setDefaults(options, _ref15) {
var _ref15$documentLoader = _ref15.documentLoader;
jsonld.documentLoader = jsonld.documentLoaders[type].apply(jsonld, Array.prototype.slice.call(arguments, 1));
};
/** Registered RDF dataset parsers hashed by content-type. */
let documentLoader = _ref15$documentLoader === undefined ? jsonld.documentLoader : _ref15$documentLoader,
defaults = _objectWithoutProperties(_ref15, ['documentLoader']);
if (typeof options === 'function') {
options = {};
}
options = options || {};
return Object.assign({}, { documentLoader }, defaults, options);
}
const _rdfParsers = {};
/**
* Registers an RDF dataset parser by content-type, for use with
* jsonld.fromRDF. An RDF dataset parser will always be given two parameters,
* a string of input and a callback. An RDF dataset parser can be synchronous
* or asynchronous.
*
* If the parser function returns undefined or null then it will be assumed to
* be asynchronous w/a continuation-passing style and the callback parameter
* given to the parser MUST be invoked.
*
* If it returns a Promise, then it will be assumed to be asynchronous, but the
* callback parameter MUST NOT be invoked. It should instead be ignored.
*
* If it returns an RDF dataset, it will be assumed to be synchronous and the
* callback parameter MUST NOT be invoked. It should instead be ignored.
*
* @param contentType the content-type for the parser.
* @param parser(input, callback(err, dataset)) the parser function (takes a
* string as a parameter and either returns null/undefined and uses
* the given callback, returns a Promise, or returns an RDF dataset).
*/
// end of jsonld API `wrapper` factory
return jsonld;
jsonld.registerRDFParser = function (contentType, parser) {
_rdfParsers[contentType] = parser;
};
/**
* Unregisters an RDF dataset parser by content-type.
*
* @param contentType the content-type for the parser.
*/
// external APIs:
// used to generate a new jsonld API instance
const factory = function () {
return wrapper(function () {
return factory();
});
};
jsonld.unregisterRDFParser = function (contentType) {
delete _rdfParsers[contentType];
}; // register the N-Quads RDF parser
if (!_nodejs && typeof define === 'function' && define.amd) {
// export AMD API
define([], function () {
// now that module is defined, wrap main jsonld API instance
wrapper(factory);
return factory;
jsonld.registerRDFParser('application/n-quads', NQuads.parse);
jsonld.registerRDFParser('application/nquads', NQuads.parse); // register the RDFa API RDF parser
jsonld.registerRDFParser('rdfa-api', Rdfa.parse);
/* URL API */
jsonld.url = require('./url');
/* Utility API */
jsonld.util = util; // backwards compatibility
Object.assign(jsonld, util); // reexpose API as jsonld.promises for backwards compatability
jsonld.promises = jsonld; // backwards compatibility
jsonld.RequestQueue = require('./RequestQueue');
/* WebIDL API */
jsonld.JsonLdProcessor = require('./JsonLdProcessor')(jsonld); // setup browser global JsonLdProcessor
if (_browser && typeof global.JsonLdProcessor === 'undefined') {
Object.defineProperty(global, 'JsonLdProcessor', {
writable: true,
enumerable: false,
configurable: true,
value: jsonld.JsonLdProcessor
});
} else {
// wrap the main jsonld API instance
wrapper(factory);
} // set platform-specific defaults/APIs
if (typeof require === 'function' && typeof module !== 'undefined' && module.exports) {
// export CommonJS/nodejs API
module.exports = factory;
}
if (_browser) {
// export simple browser API
if (typeof jsonld === 'undefined') {
jsonld = jsonldjs = factory;
} else {
jsonldjs = factory;
}
}
if (_nodejs) {
// use node document loader by default
jsonld.useDocumentLoader('node');
} else if (typeof XMLHttpRequest !== 'undefined') {
// use xhr document loader by default
jsonld.useDocumentLoader('xhr');
}
return factory;
})();
function _setDefaults(options, _ref15) {
let {
documentLoader = jsonld.documentLoader
} = _ref15,
defaults = _objectWithoutProperties(_ref15, ["documentLoader"]);
if (typeof options === 'function') {
options = {};
}
options = options || {};
return Object.assign({}, {
documentLoader
}, defaults, options);
} // end of jsonld API `wrapper` factory
return jsonld;
}; // external APIs:
// used to generate a new jsonld API instance
const factory = function () {
return wrapper(function () {
return factory();
});
}; // wrap the main jsonld API instance
wrapper(factory); // export API
module.exports = factory;

@@ -14,7 +14,3 @@ /*

*/
constructor() {
let message = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : 'An unspecified JSON-LD error occurred.';
let name = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 'jsonld.Error';
let details = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : {};
constructor(message = 'An unspecified JSON-LD error occurred.', name = 'jsonld.Error', details = {}) {
super(message);

@@ -25,2 +21,3 @@ this.name = name;

}
};

@@ -11,3 +11,5 @@ /*

}
}
Object.defineProperty(JsonLdProcessor, 'prototype', {

@@ -22,5 +24,3 @@ writable: false,

value: JsonLdProcessor
});
// The Web IDL test harness will check the number of parameters defined in
}); // The Web IDL test harness will check the number of parameters defined in
// the functions below. The number of parameters must exactly match the

@@ -35,4 +35,6 @@ // required (non-optional) parameters of the JsonLdProcessor interface as

}
return jsonld.compact(input, ctx);
};
JsonLdProcessor.expand = function (input) {

@@ -42,4 +44,6 @@ if (arguments.length < 1) {

}
return jsonld.expand(input);
};
JsonLdProcessor.flatten = function (input) {

@@ -49,2 +53,3 @@ if (arguments.length < 1) {

}
return jsonld.flatten(input);

@@ -51,0 +56,0 @@ };

@@ -6,9 +6,12 @@ /*

var _require = require('./context');
const {
isKeyword
} = require('./context');
const isKeyword = _require.isKeyword;
const graphTypes = require('./graphTypes');
const graphTypes = require('./graphTypes');
const types = require('./types');
const util = require('./util');
const JsonLdError = require('./JsonLdError');

@@ -18,3 +21,2 @@

module.exports = api;
/**

@@ -29,14 +31,14 @@ * Creates a merged JSON-LD node map (node ID => node).

*/
api.createMergedNodeMap = (input, options) => {
options = options || {};
options = options || {}; // produce a map of all subjects and name each bnode
// produce a map of all subjects and name each bnode
const issuer = options.issuer || new util.IdentifierIssuer('_:b');
const graphs = { '@default': {} };
api.createNodeMap(input, graphs, '@default', issuer);
const graphs = {
'@default': {}
};
api.createNodeMap(input, graphs, '@default', issuer); // add all non-default graphs to default graph
// add all non-default graphs to default graph
return api.mergeNodeMaps(graphs);
};
/**

@@ -53,2 +55,4 @@ * Recursively flattens the subjects in the given JSON-LD expanded input

*/
api.createNodeMap = (input, graphs, graph, issuer, name, list) => {

@@ -60,6 +64,7 @@ // recurse through array

}
return;
}
} // add non-object to list
// add non-object to list
if (!types.isObject(input)) {

@@ -69,10 +74,11 @@ if (list) {

}
return;
}
} // add values to list
// add values to list
if (graphTypes.isValue(input)) {
if ('@type' in input) {
let type = input['@type'];
// rename @type blank node
let type = input['@type']; // rename @type blank node
if (type.indexOf('_:') === 0) {

@@ -82,15 +88,18 @@ input['@type'] = type = issuer.getId(type);

}
if (list) {
list.push(input);
}
return;
}
} // Note: At this point, input must be a subject.
// spec requires @type to be named first, so assign names early
// Note: At this point, input must be a subject.
// spec requires @type to be named first, so assign names early
if ('@type' in input) {
const types = input['@type'];
for (let i = 0; i < types.length; ++i) {
const type = types[i];
if (type.indexOf('_:') === 0) {

@@ -100,15 +109,17 @@ issuer.getId(type);

}
}
} // get name for subject
// get name for subject
if (types.isUndefined(name)) {
name = graphTypes.isBlankNode(input) ? issuer.getId(input['@id']) : input['@id'];
}
} // add subject reference to list
// add subject reference to list
if (list) {
list.push({ '@id': name });
}
list.push({
'@id': name
});
} // create new subject or merge into existing one
// create new subject or merge into existing one
const subjects = graphs[graph];

@@ -118,30 +129,40 @@ const subject = subjects[name] = subjects[name] || {};

const properties = Object.keys(input).sort();
for (let pi = 0; pi < properties.length; ++pi) {
let property = properties[pi];
let property = properties[pi]; // skip @id
// skip @id
if (property === '@id') {
continue;
}
} // handle reverse properties
// handle reverse properties
if (property === '@reverse') {
const referencedNode = { '@id': name };
const referencedNode = {
'@id': name
};
const reverseMap = input['@reverse'];
for (const reverseProperty in reverseMap) {
const items = reverseMap[reverseProperty];
for (let ii = 0; ii < items.length; ++ii) {
const item = items[ii];
let itemName = item['@id'];
if (graphTypes.isBlankNode(item)) {
itemName = issuer.getId(itemName);
}
api.createNodeMap(item, graphs, graph, issuer, itemName);
util.addValue(subjects[itemName], reverseProperty, referencedNode, { propertyIsArray: true, allowDuplicate: false });
util.addValue(subjects[itemName], reverseProperty, referencedNode, {
propertyIsArray: true,
allowDuplicate: false
});
}
}
continue;
}
} // recurse into graph
// recurse into graph
if (property === '@graph') {

@@ -152,28 +173,35 @@ // add graph subjects map entry

}
api.createNodeMap(input[property], graphs, name, issuer);
continue;
}
} // copy non-@type keywords
// copy non-@type keywords
if (property !== '@type' && isKeyword(property)) {
if (property === '@index' && property in subject && (input[property] !== subject[property] || input[property]['@id'] !== subject[property]['@id'])) {
throw new JsonLdError('Invalid JSON-LD syntax; conflicting @index property detected.', 'jsonld.SyntaxError', { code: 'conflicting indexes', subject: subject });
throw new JsonLdError('Invalid JSON-LD syntax; conflicting @index property detected.', 'jsonld.SyntaxError', {
code: 'conflicting indexes',
subject: subject
});
}
subject[property] = input[property];
continue;
}
} // iterate over objects
// iterate over objects
const objects = input[property];
// if property is a bnode, assign it a new id
const objects = input[property]; // if property is a bnode, assign it a new id
if (property.indexOf('_:') === 0) {
property = issuer.getId(property);
}
} // ensure property is added for empty arrays
// ensure property is added for empty arrays
if (objects.length === 0) {
util.addValue(subject, property, [], { propertyIsArray: true });
util.addValue(subject, property, [], {
propertyIsArray: true
});
continue;
}
for (let oi = 0; oi < objects.length; ++oi) {

@@ -185,11 +213,15 @@ let o = objects[oi];

o = o.indexOf('_:') === 0 ? issuer.getId(o) : o;
}
} // handle embedded subject or subject reference
// handle embedded subject or subject reference
if (graphTypes.isSubject(o) || graphTypes.isSubjectReference(o)) {
// relabel blank node @id
const id = graphTypes.isBlankNode(o) ? issuer.getId(o['@id']) : o['@id'];
const id = graphTypes.isBlankNode(o) ? issuer.getId(o['@id']) : o['@id']; // add reference and recurse
// add reference and recurse
util.addValue(subject, property, { '@id': id }, { propertyIsArray: true, allowDuplicate: false });
util.addValue(subject, property, {
'@id': id
}, {
propertyIsArray: true,
allowDuplicate: false
});
api.createNodeMap(o, graphs, graph, issuer, id);

@@ -200,8 +232,16 @@ } else if (graphTypes.isList(o)) {

api.createNodeMap(o['@list'], graphs, graph, issuer, name, _list);
o = { '@list': _list };
util.addValue(subject, property, o, { propertyIsArray: true, allowDuplicate: false });
o = {
'@list': _list
};
util.addValue(subject, property, o, {
propertyIsArray: true,
allowDuplicate: false
});
} else {
// handle @value
api.createNodeMap(o, graphs, graph, issuer, name);
util.addValue(subject, property, o, { propertyIsArray: true, allowDuplicate: false });
util.addValue(subject, property, o, {
propertyIsArray: true,
allowDuplicate: false
});
}

@@ -211,3 +251,2 @@ }

};
/**

@@ -221,10 +260,17 @@ * Merge separate named graphs into a single merged graph including

*/
api.mergeNodeMapGraphs = graphs => {
const merged = {};
for (const name of Object.keys(graphs).sort()) {
for (const id of Object.keys(graphs[name]).sort()) {
const node = graphs[name][id];
if (!(id in merged)) {
merged[id] = { '@id': id };
merged[id] = {
'@id': id
};
}
const mergedNode = merged[id];

@@ -239,3 +285,6 @@

for (const value of node[property]) {
util.addValue(mergedNode, property, util.clone(value), { propertyIsArray: true, allowDuplicate: false });
util.addValue(mergedNode, property, util.clone(value), {
propertyIsArray: true,
allowDuplicate: false
});
}

@@ -254,9 +303,13 @@ }

const graphNames = Object.keys(graphs).sort();
for (let i = 0; i < graphNames.length; ++i) {
const graphName = graphNames[i];
if (graphName === '@default') {
continue;
}
const nodeMap = graphs[graphName];
let subject = defaultGraph[graphName];
if (!subject) {

@@ -270,7 +323,9 @@ defaultGraph[graphName] = subject = {

}
const graph = subject['@graph'];
const ids = Object.keys(nodeMap).sort();
for (let ii = 0; ii < ids.length; ++ii) {
const node = nodeMap[ids[ii]];
// only add full subjects
const node = nodeMap[ids[ii]]; // only add full subjects
if (!graphTypes.isSubjectReference(node)) {

@@ -281,3 +336,4 @@ graph.push(node);

}
return defaultGraph;
};
/*
* Copyright (c) 2017 Digital Bazaar, Inc. All rights reserved.
*/
'use strict';
'use strict'; // TODO: move `NQuads` to its own package
// TODO: move `NQuads` to its own package
module.exports = require('rdf-canonize').NQuads;
/*
* Copyright (c) 2017 Digital Bazaar, Inc. All rights reserved.
*/
/* global Node, XMLSerializer */
'use strict';
var _require = require('./constants');
const {
RDF_LANGSTRING,
RDF_PLAIN_LITERAL,
RDF_OBJECT,
RDF_XML_LITERAL,
XSD_STRING
} = require('./constants');
const RDF_LANGSTRING = _require.RDF_LANGSTRING,
RDF_PLAIN_LITERAL = _require.RDF_PLAIN_LITERAL,
RDF_OBJECT = _require.RDF_OBJECT,
RDF_XML_LITERAL = _require.RDF_XML_LITERAL,
XSD_STRING = _require.XSD_STRING;
let _Node;
let _Node;
if (typeof Node !== 'undefined') {

@@ -47,41 +48,57 @@ _Node = Node;

dataset['@default'] = [];
const subjects = data.getSubjects();
const subjects = data.getSubjects();
for (let si = 0; si < subjects.length; ++si) {
const subject = subjects[si];
if (subject === null) {
continue;
}
} // get all related triples
// get all related triples
const triples = data.getSubjectTriples(subject);
if (triples === null) {
continue;
}
const predicates = triples.predicates;
for (const predicate in predicates) {
// iterate over objects
const objects = predicates[predicate].objects;
for (let oi = 0; oi < objects.length; ++oi) {
const object = objects[oi];
const object = objects[oi]; // create RDF triple
// create RDF triple
const triple = {};
const triple = {}; // add subject
// add subject
if (subject.indexOf('_:') === 0) {
triple.subject = { type: 'blank node', value: subject };
triple.subject = {
type: 'blank node',
value: subject
};
} else {
triple.subject = { type: 'IRI', value: subject };
}
triple.subject = {
type: 'IRI',
value: subject
};
} // add predicate
// add predicate
if (predicate.indexOf('_:') === 0) {
triple.predicate = { type: 'blank node', value: predicate };
triple.predicate = {
type: 'blank node',
value: predicate
};
} else {
triple.predicate = { type: 'IRI', value: predicate };
}
triple.predicate = {
type: 'IRI',
value: predicate
};
} // serialize XML literal
// serialize XML literal
let value = object.value;
if (object.type === RDF_XML_LITERAL) {

@@ -92,2 +109,3 @@ // initialize XMLSerializer

value = '';
for (let x = 0; x < object.value.length; x++) {

@@ -100,8 +118,7 @@ if (object.value[x].nodeType === _Node.ELEMENT_NODE) {

}
}
} // add object
// add object
triple.object = {};
// object is an IRI
triple.object = {}; // object is an IRI
if (object.type === RDF_OBJECT) {

@@ -116,2 +133,3 @@ if (object.value.indexOf('_:') === 0) {

triple.object.type = 'literal';
if (object.type === RDF_PLAIN_LITERAL) {

@@ -128,5 +146,5 @@ if (object.language) {

}
triple.object.value = value;
// add triple to dataset in default graph
triple.object.value = value; // add triple to dataset in default graph
dataset['@default'].push(triple);

@@ -139,2 +157,3 @@ }

}
};

@@ -146,3 +165,4 @@

}
return XMLSerializer;
}

@@ -6,10 +6,11 @@ /*

function _asyncToGenerator(fn) { return function () { var gen = fn.apply(this, arguments); return new Promise(function (resolve, reject) { function step(key, arg) { try { var info = gen[key](arg); var value = info.value; } catch (error) { reject(error); return; } if (info.done) { resolve(value); } else { return Promise.resolve(value).then(function (value) { step("next", value); }, function (err) { step("throw", err); }); } } return step("next"); }); }; }
function asyncGeneratorStep(gen, resolve, reject, _next, _throw, key, arg) { try { var info = gen[key](arg); var value = info.value; } catch (error) { reject(error); return; } if (info.done) { resolve(value); } else { Promise.resolve(value).then(_next, _throw); } }
var _require = require('./util');
function _asyncToGenerator(fn) { return function () { var self = this, args = arguments; return new Promise(function (resolve, reject) { var gen = fn.apply(self, args); function _next(value) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "next", value); } function _throw(err) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "throw", err); } _next(undefined); }); }; }
const callbackify = _require.callbackify,
normalizeDocumentLoader = _require.normalizeDocumentLoader;
const {
callbackify,
normalizeDocumentLoader
} = require('./util');
module.exports = class RequestQueue {

@@ -27,3 +28,5 @@ /**

self._loader = normalizeDocumentLoader(loader);
return function () /* url */{
return function ()
/* url */
{
return self.add.apply(self, arguments);

@@ -38,10 +41,10 @@ };

const self = _this;
let promise = self._requests[url];
let promise = self._requests[url];
if (promise) {
// URL already queued, wait for it to load
return Promise.resolve(promise);
}
} // queue URL and load it
// queue URL and load it
promise = self._requests[url] = self._loader(url);

@@ -56,2 +59,3 @@

}
};

@@ -6,34 +6,40 @@ /*

var _require = require('./nodeMap');
const {
createNodeMap
} = require('./nodeMap');
const createNodeMap = _require.createNodeMap;
const {
isKeyword
} = require('./context');
var _require2 = require('./context');
const graphTypes = require('./graphTypes');
const isKeyword = _require2.isKeyword;
const types = require('./types');
const graphTypes = require('./graphTypes');
const types = require('./types');
const util = require('./util');
var _require3 = require('./constants');
const {
// RDF,
// RDF_LIST,
RDF_FIRST,
RDF_REST,
RDF_NIL,
RDF_TYPE,
// RDF_PLAIN_LITERAL,
// RDF_XML_LITERAL,
// RDF_OBJECT,
RDF_LANGSTRING,
// XSD,
XSD_BOOLEAN,
XSD_DOUBLE,
XSD_INTEGER,
XSD_STRING
} = require('./constants');
const RDF_FIRST = _require3.RDF_FIRST,
RDF_REST = _require3.RDF_REST,
RDF_NIL = _require3.RDF_NIL,
RDF_TYPE = _require3.RDF_TYPE,
RDF_LANGSTRING = _require3.RDF_LANGSTRING,
XSD_BOOLEAN = _require3.XSD_BOOLEAN,
XSD_DOUBLE = _require3.XSD_DOUBLE,
XSD_INTEGER = _require3.XSD_INTEGER,
XSD_STRING = _require3.XSD_STRING;
const {
isAbsolute: _isAbsoluteIri
} = require('./url');
var _require4 = require('./url');
const _isAbsoluteIri = _require4.isAbsolute;
const api = {};
module.exports = api;
/**

@@ -47,20 +53,32 @@ * Outputs an RDF dataset for the expanded JSON-LD input.

*/
api.toRDF = (input, options) => {
// create node map for default graph (and any named graphs)
const issuer = new util.IdentifierIssuer('_:b');
const nodeMap = { '@default': {} };
const nodeMap = {
'@default': {}
};
createNodeMap(input, nodeMap, '@default', issuer);
const dataset = [];
const graphNames = Object.keys(nodeMap).sort();
for (const graphName of graphNames) {
let graphTerm;
if (graphName === '@default') {
graphTerm = { termType: 'DefaultGraph', value: '' };
graphTerm = {
termType: 'DefaultGraph',
value: ''
};
} else if (_isAbsoluteIri(graphName)) {
if (graphName.startsWith('_:')) {
graphTerm = { termType: 'BlankNode' };
graphTerm = {
termType: 'BlankNode'
};
} else {
graphTerm = { termType: 'NamedNode' };
graphTerm = {
termType: 'NamedNode'
};
}
graphTerm.value = graphName;

@@ -71,2 +89,3 @@ } else {

}
_graphToRDF(dataset, nodeMap[graphName], graphTerm, issuer, options);

@@ -77,3 +96,2 @@ }

};
/**

@@ -90,4 +108,7 @@ * Adds RDF quads for a particular graph to the given dataset.

*/
function _graphToRDF(dataset, graph, graphTerm, issuer, options) {
const ids = Object.keys(graph).sort();
for (let i = 0; i < ids.length; ++i) {

@@ -97,4 +118,6 @@ const id = ids[i];

const properties = Object.keys(node).sort();
for (let property of properties) {
const items = node[property];
if (property === '@type') {

@@ -111,26 +134,24 @@ property = RDF_TYPE;

value: id
};
}; // skip relative IRI subjects (not valid RDF)
// skip relative IRI subjects (not valid RDF)
if (!_isAbsoluteIri(id)) {
continue;
}
} // RDF predicate
// RDF predicate
const predicate = {
termType: property.startsWith('_:') ? 'BlankNode' : 'NamedNode',
value: property
};
}; // skip relative IRI predicates (not valid RDF)
// skip relative IRI predicates (not valid RDF)
if (!_isAbsoluteIri(property)) {
continue;
}
} // skip blank node predicates unless producing generalized RDF
// skip blank node predicates unless producing generalized RDF
if (predicate.termType === 'BlankNode' && !options.produceGeneralizedRdf) {
continue;
}
} // convert @list to triples
// convert @list to triples
if (graphTypes.isList(item)) {

@@ -140,4 +161,5 @@ _listToRDF(item['@list'], issuer, subject, predicate, dataset, graphTerm);

// convert value or node object to triple
const object = _objectToRDF(item);
// skip null objects (they are relative IRIs)
const object = _objectToRDF(item); // skip null objects (they are relative IRIs)
if (object) {

@@ -156,3 +178,2 @@ dataset.push({

}
/**

@@ -169,9 +190,23 @@ * Converts a @list value into linked list of blank node RDF quads

*/
function _listToRDF(list, issuer, subject, predicate, dataset, graphTerm) {
const first = { termType: 'NamedNode', value: RDF_FIRST };
const rest = { termType: 'NamedNode', value: RDF_REST };
const nil = { termType: 'NamedNode', value: RDF_NIL };
const first = {
termType: 'NamedNode',
value: RDF_FIRST
};
const rest = {
termType: 'NamedNode',
value: RDF_REST
};
const nil = {
termType: 'NamedNode',
value: RDF_NIL
};
for (const item of list) {
const blankNode = { termType: 'BlankNode', value: issuer.getId() };
const blankNode = {
termType: 'BlankNode',
value: issuer.getId()
};
dataset.push({

@@ -183,8 +218,8 @@ subject: subject,

});
subject = blankNode;
predicate = first;
const object = _objectToRDF(item);
// skip null objects (they are relative IRIs)
const object = _objectToRDF(item); // skip null objects (they are relative IRIs)
if (object) {

@@ -209,3 +244,2 @@ dataset.push({

}
/**

@@ -219,6 +253,7 @@ * Converts a JSON-LD value object to an RDF literal or a JSON-LD string or

*/
function _objectToRDF(item) {
const object = {};
const object = {}; // convert value object to RDF
// convert value object to RDF
if (graphTypes.isValue(item)) {

@@ -231,5 +266,4 @@ object.termType = 'Literal';

let value = item['@value'];
const datatype = item['@type'] || null;
const datatype = item['@type'] || null; // convert to XSD datatypes as appropriate
// convert to XSD datatypes as appropriate
if (types.isBoolean(value)) {

@@ -241,4 +275,5 @@ object.value = value.toString();

value = parseFloat(value);
}
// canonical double representation
} // canonical double representation
object.value = value.toExponential(15).replace(/(\d)0*e\+?/, '$1E');

@@ -262,5 +297,5 @@ object.datatype.value = datatype || XSD_DOUBLE;

object.value = id;
}
} // skip relative IRIs, not valid RDF
// skip relative IRIs, not valid RDF
if (object.termType === 'NamedNode' && !_isAbsoluteIri(object.value)) {

@@ -267,0 +302,0 @@ return null;

@@ -8,3 +8,2 @@ /*

module.exports = api;
/**

@@ -17,4 +16,4 @@ * Returns true if the given value is an Array.

*/
api.isArray = Array.isArray;
/**

@@ -27,4 +26,4 @@ * Returns true if the given value is a Boolean.

*/
api.isBoolean = v => typeof v === 'boolean' || Object.prototype.toString.call(v) === '[object Boolean]';
/**

@@ -37,4 +36,5 @@ * Returns true if the given value is a double.

*/
api.isDouble = v => api.isNumber(v) && String(v).indexOf('.') !== -1;
/**

@@ -47,4 +47,5 @@ * Returns true if the given value is an empty Object.

*/
api.isEmptyObject = v => api.isObject(v) && Object.keys(v).length === 0;
/**

@@ -57,4 +58,5 @@ * Returns true if the given value is a Number.

*/
api.isNumber = v => typeof v === 'number' || Object.prototype.toString.call(v) === '[object Number]';
/**

@@ -67,4 +69,5 @@ * Returns true if the given value is numeric.

*/
api.isNumeric = v => !isNaN(parseFloat(v)) && isFinite(v);
/**

@@ -77,4 +80,5 @@ * Returns true if the given value is an Object.

*/
api.isObject = v => Object.prototype.toString.call(v) === '[object Object]';
/**

@@ -87,4 +91,5 @@ * Returns true if the given value is a String.

*/
api.isString = v => typeof v === 'string' || Object.prototype.toString.call(v) === '[object String]';
/**

@@ -97,2 +102,4 @@ * Returns true if the given value is undefined.

*/
api.isUndefined = v => typeof v === 'undefined';

@@ -9,5 +9,3 @@ /*

const api = {};
module.exports = api;
// define URL parser
module.exports = api; // define URL parser
// parseUri 1.2.2

@@ -17,2 +15,3 @@ // (c) Steven Levithan <stevenlevithan.com>

// with local jsonld.js modifications
api.parsers = {

@@ -29,2 +28,3 @@ simple: {

};
api.parse = (str, parser) => {

@@ -35,7 +35,8 @@ const parsed = {};

let i = o.keys.length;
while (i--) {
parsed[o.keys[i]] = m[i] === undefined ? null : m[i];
}
} // remove default ports in found in URLs
// remove default ports in found in URLs
if (parsed.scheme === 'https' && parsed.port === '443' || parsed.scheme === 'http' && parsed.port === '80') {

@@ -50,3 +51,2 @@ parsed.href = parsed.href.replace(':' + parsed.port, '');

};
/**

@@ -60,2 +60,4 @@ * Prepends a base IRI to the given relative IRI.

*/
api.prependBase = (base, iri) => {

@@ -65,17 +67,17 @@ // skip IRI processing

return iri;
}
// already an absolute IRI
} // already an absolute IRI
if (iri.indexOf(':') !== -1) {
return iri;
}
} // parse base if it is a string
// parse base if it is a string
if (types.isString(base)) {
base = api.parse(base || '');
}
} // parse given IRI
// parse given IRI
const rel = api.parse(iri);
// per RFC3986 5.2.2
const rel = api.parse(iri); // per RFC3986 5.2.2
const transform = {

@@ -94,2 +96,3 @@ protocol: base.protocol || ''

transform.path = base.path;
if (rel.query !== null) {

@@ -106,13 +109,14 @@ transform.query = rel.query;

// merge paths
let path = base.path;
let path = base.path; // append relative path to the end of the last directory from base
// append relative path to the end of the last directory from base
path = path.substr(0, path.lastIndexOf('/') + 1);
if (path.length > 0 && path.substr(-1) !== '/') {
path += '/';
}
path += rel.path;
transform.path = path;
}
transform.query = rel.query;

@@ -125,18 +129,22 @@ }

transform.path = api.removeDotSegments(transform.path);
}
} // construct URL
// construct URL
let rval = transform.protocol;
if (transform.authority !== null) {
rval += '//' + transform.authority;
}
rval += transform.path;
if (transform.query !== null) {
rval += '?' + transform.query;
}
if (rel.fragment !== null) {
rval += '#' + rel.fragment;
}
} // handle empty base
// handle empty base
if (rval === '') {

@@ -148,3 +156,2 @@ rval = './';

};
/**

@@ -158,2 +165,4 @@ * Removes a base IRI from the given absolute IRI.

*/
api.removeBase = (base, iri) => {

@@ -167,6 +176,7 @@ // skip IRI processing

base = api.parse(base || '');
}
} // establish base root
// establish base root
let root = '';
if (base.href !== '') {

@@ -177,17 +187,17 @@ root += (base.protocol || '') + '//' + (base.authority || '');

root += '//';
}
} // IRI not relative to base
// IRI not relative to base
if (iri.indexOf(root) !== 0) {
return iri;
}
} // remove root from IRI and parse remainder
// remove root from IRI and parse remainder
const rel = api.parse(iri.substr(root.length));
// remove path segments that match (do not remove last segment unless there
const rel = api.parse(iri.substr(root.length)); // remove path segments that match (do not remove last segment unless there
// is a hash or query)
const baseSegments = base.normalizedPath.split('/');
const iriSegments = rel.normalizedPath.split('/');
const last = rel.fragment || rel.query ? 0 : 1;
while (baseSegments.length > 0 && iriSegments.length > last) {

@@ -197,8 +207,10 @@ if (baseSegments[0] !== iriSegments[0]) {

}
baseSegments.shift();
iriSegments.shift();
}
} // use '../' for each non-matching base segment
// use '../' for each non-matching base segment
let rval = '';
if (baseSegments.length > 0) {

@@ -208,19 +220,20 @@ // don't count the last segment (if it ends with '/' last path doesn't

baseSegments.pop();
for (let i = 0; i < baseSegments.length; ++i) {
rval += '../';
}
}
} // prepend remaining segments
// prepend remaining segments
rval += iriSegments.join('/');
// add query and hash
rval += iriSegments.join('/'); // add query and hash
if (rel.query !== null) {
rval += '?' + rel.query;
}
if (rel.fragment !== null) {
rval += '#' + rel.fragment;
}
} // handle empty base
// handle empty base
if (rval === '') {

@@ -232,3 +245,2 @@ rval = './';

};
/**

@@ -239,5 +251,6 @@ * Removes dot segments from a URL path.

*/
api.removeDotSegments = path => {
// RFC 3986 5.2.4 (reworked)
// empty path shortcut

@@ -260,2 +273,3 @@ if (path.length === 0) {

}
continue;

@@ -266,2 +280,3 @@ }

output.pop();
if (done) {

@@ -271,2 +286,3 @@ // ensure output has trailing /

}
continue;

@@ -276,8 +292,9 @@ }

output.push(next);
}
} // ensure output has leading /
// ensure output has leading /
if (output.length > 0 && output[0] !== '') {
output.unshift('');
}
if (output.length === 1 && output[0] === '') {

@@ -288,10 +305,8 @@ return '/';

return output.join('/');
};
// TODO: time better isAbsolute/isRelative checks using full regexes:
}; // TODO: time better isAbsolute/isRelative checks using full regexes:
// http://jmrware.com/articles/2009/uri_regexp/URI_regex.html
// regex to check for absolute IRI (starting scheme and ':') or blank node IRI
// regex to check for absolute IRI (starting scheme and ':') or blank node IRI
const isAbsoluteRegex = /^([A-Za-z][A-Za-z0-9+-.]*|_):/;
/**

@@ -306,4 +321,4 @@ * Returns true if the given value is an absolute IRI or blank node IRI, false

*/
api.isAbsolute = v => types.isString(v) && isAbsoluteRegex.test(v);
/**

@@ -317,2 +332,4 @@ * Returns true if the given value is a relative IRI, false if not.

*/
api.isRelative = v => types.isString(v);

@@ -6,15 +6,19 @@ /*

function _asyncToGenerator(fn) { return function () { var gen = fn.apply(this, arguments); return new Promise(function (resolve, reject) { function step(key, arg) { try { var info = gen[key](arg); var value = info.value; } catch (error) { reject(error); return; } if (info.done) { resolve(value); } else { return Promise.resolve(value).then(function (value) { step("next", value); }, function (err) { step("throw", err); }); } } return step("next"); }); }; }
function asyncGeneratorStep(gen, resolve, reject, _next, _throw, key, arg) { try { var info = gen[key](arg); var value = info.value; } catch (error) { reject(error); return; } if (info.done) { resolve(value); } else { Promise.resolve(value).then(_next, _throw); } }
function _asyncToGenerator(fn) { return function () { var self = this, args = arguments; return new Promise(function (resolve, reject) { var gen = fn.apply(self, args); function _next(value) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "next", value); } function _throw(err) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "throw", err); } _next(undefined); }); }; }
const graphTypes = require('./graphTypes');
const types = require('./types');
// TODO: move `IdentifierIssuer` to its own package
const types = require('./types'); // TODO: move `IdentifierIssuer` to its own package
const IdentifierIssuer = require('rdf-canonize').IdentifierIssuer;
const JsonLdError = require('./JsonLdError');
// constants
const JsonLdError = require('./JsonLdError'); // constants
const REGEX_LINK_HEADERS = /(?:<[^>]*?>|"[^"]*?"|[^,])+/g;
const REGEX_LINK_HEADER = /\s*<([^>]*?)>\s*(?:;\s*(.*))?/;
const REGEX_LINK_HEADER_PARAMS = /(.*?)=(?:(?:"([^"]*?)")|([^"]*?))\s*(?:(?:;\s*)|$)/g;
const DEFAULTS = {

@@ -25,16 +29,12 @@ headers: {

};
const api = {};
module.exports = api;
api.IdentifierIssuer = IdentifierIssuer;
// define setImmediate and nextTick
api.IdentifierIssuer = IdentifierIssuer; // define setImmediate and nextTick
// // nextTick implementation with browser-compatible fallback // //
// from https://github.com/caolan/async/blob/master/lib/async.js
// capture the global reference to guard against fakeTimer mocks
// capture the global reference to guard against fakeTimer mocks
const _setImmediate = typeof setImmediate === 'function' && setImmediate;
const _delay = _setImmediate ?
// not a direct alias (for IE10 compatibility)
const _delay = _setImmediate ? // not a direct alias (for IE10 compatibility)
fn => _setImmediate(fn) : fn => setTimeout(fn, 0);

@@ -47,4 +47,4 @@

}
api.setImmediate = _setImmediate ? _delay : api.nextTick;
/**

@@ -58,7 +58,10 @@ * Clones an object, array, or string/number. If a typed JavaScript object

*/
api.clone = function (value) {
if (value && typeof value === 'object') {
let rval;
if (types.isArray(value)) {
rval = [];
for (let i = 0; i < value.length; ++i) {

@@ -69,2 +72,3 @@ rval[i] = api.clone(value[i]);

rval = {};
for (const key in value) {

@@ -76,7 +80,8 @@ rval[key] = api.clone(value[key]);

}
return rval;
}
return value;
};
/**

@@ -91,5 +96,5 @@ * Builds an HTTP headers object for making a JSON-LD request from custom

*/
api.buildHeaders = function () {
let headers = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {};
api.buildHeaders = (headers = {}) => {
const hasAccept = Object.keys(headers).some(h => h.toLowerCase() === 'accept');

@@ -101,5 +106,6 @@

return Object.assign({ 'Accept': DEFAULTS.headers.accept }, headers);
return Object.assign({
'Accept': DEFAULTS.headers.accept
}, headers);
};
/**

@@ -123,17 +129,27 @@ * Parses a link header. The results will be key'd by the value of "rel".

*/
api.parseLinkHeader = header => {
const rval = {};
// split on unbracketed/unquoted commas
const rval = {}; // split on unbracketed/unquoted commas
const entries = header.match(REGEX_LINK_HEADERS);
for (let i = 0; i < entries.length; ++i) {
let match = entries[i].match(REGEX_LINK_HEADER);
if (!match) {
continue;
}
const result = { target: match[1] };
const result = {
target: match[1]
};
const params = match[2];
while (match = REGEX_LINK_HEADER_PARAMS.exec(params)) {
result[match[1]] = match[2] === undefined ? match[3] : match[2];
}
const rel = result['rel'] || '';
if (Array.isArray(rval[rel])) {

@@ -147,5 +163,5 @@ rval[rel].push(result);

}
return rval;
};
/**

@@ -156,2 +172,4 @@ * Throws an exception if the given value is not a valid @type value.

*/
api.validateTypeValue = v => {

@@ -161,9 +179,11 @@ // can be a string or an empty object

return;
}
} // must be an array
// must be an array
let isValid = false;
if (types.isArray(v)) {
// must contain only strings
isValid = true;
for (let i = 0; i < v.length; ++i) {

@@ -178,6 +198,8 @@ if (!types.isString(v[i])) {

if (!isValid) {
throw new JsonLdError('Invalid JSON-LD syntax; "@type" value must a string, an array of ' + 'strings, or an empty object.', 'jsonld.SyntaxError', { code: 'invalid type value', value: v });
throw new JsonLdError('Invalid JSON-LD syntax; "@type" value must a string, an array of ' + 'strings, or an empty object.', 'jsonld.SyntaxError', {
code: 'invalid type value',
value: v
});
}
};
/**

@@ -191,2 +213,4 @@ * Returns true if the given subject has the given property.

*/
api.hasProperty = (subject, property) => {

@@ -197,5 +221,5 @@ if (property in subject) {

}
return false;
};
/**

@@ -210,2 +234,4 @@ * Determines if the given value is a property of the given subject.

*/
api.hasValue = (subject, property, value) => {

@@ -215,2 +241,3 @@ if (api.hasProperty(subject, property)) {

const isList = graphTypes.isList(val);
if (types.isArray(val) || isList) {

@@ -220,2 +247,3 @@ if (isList) {

}
for (let i = 0; i < val.length; ++i) {

@@ -231,5 +259,5 @@ if (api.compareValues(value, val[i])) {

}
return false;
};
/**

@@ -248,7 +276,11 @@ * Adds a value to a subject. If the value is an array, all values in the

*/
api.addValue = (subject, property, value, options) => {
options = options || {};
if (!('propertyIsArray' in options)) {
options.propertyIsArray = false;
}
if (!('allowDuplicate' in options)) {

@@ -262,2 +294,3 @@ options.allowDuplicate = true;

}
for (let i = 0; i < value.length; ++i) {

@@ -268,10 +301,9 @@ api.addValue(subject, property, value[i], options);

// check if subject already has value if duplicates not allowed
const hasValue = !options.allowDuplicate && api.hasValue(subject, property, value);
const hasValue = !options.allowDuplicate && api.hasValue(subject, property, value); // make property an array if value not present or always an array
// make property an array if value not present or always an array
if (!types.isArray(subject[property]) && (!hasValue || options.propertyIsArray)) {
subject[property] = [subject[property]];
}
} // add new value
// add new value
if (!hasValue) {

@@ -285,3 +317,2 @@ subject[property].push(value);

};
/**

@@ -295,4 +326,5 @@ * Gets all of the values for a subject's property as an array.

*/
api.getValues = (subject, property) => [].concat(subject[property] || []);
/**

@@ -304,6 +336,7 @@ * Removes a property from a subject.

*/
api.removeProperty = (subject, property) => {
delete subject[property];
};
/**

@@ -319,9 +352,12 @@ * Removes a value from a subject.

*/
api.removeValue = (subject, property, value, options) => {
options = options || {};
if (!('propertyIsArray' in options)) {
options.propertyIsArray = false;
}
} // filter out value
// filter out value
const values = api.getValues(subject, property).filter(e => !api.compareValues(e, value));

@@ -337,3 +373,2 @@

};
/**

@@ -346,2 +381,4 @@ * Relabels all blank nodes in the given JSON-LD input.

*/
api.relabelBlankNodes = (input, options) => {

@@ -352,3 +389,2 @@ options = options || {};

};
/**

@@ -368,2 +404,4 @@ * Compares two JSON-LD values for equality. Two JSON-LD values will be

*/
api.compareValues = (v1, v2) => {

@@ -373,10 +411,10 @@ // 1. equal primitives

return true;
}
} // 2. equal @values
// 2. equal @values
if (graphTypes.isValue(v1) && graphTypes.isValue(v2) && v1['@value'] === v2['@value'] && v1['@type'] === v2['@type'] && v1['@language'] === v2['@language'] && v1['@index'] === v2['@index']) {
return true;
}
} // 3. equal @ids
// 3. equal @ids
if (types.isObject(v1) && '@id' in v1 && types.isObject(v2) && '@id' in v2) {

@@ -388,3 +426,2 @@ return v1['@id'] === v2['@id'];

};
/**

@@ -398,2 +435,4 @@ * Compares two strings first based on length and then lexicographically.

*/
api.compareShortestLeast = (a, b) => {

@@ -403,8 +442,11 @@ if (a.length < b.length) {

}
if (b.length < a.length) {
return 1;
}
if (a === b) {
return 0;
}
return a < b ? -1 : 1;

@@ -418,58 +460,70 @@ };

return (() => {
var _ref = _asyncToGenerator(function* (url) {
const callback = arguments[1];
return new Promise(function (resolve, reject) {
try {
fn(url, function (err, remoteDoc) {
return (
/*#__PURE__*/
function () {
var _ref = _asyncToGenerator(function* (url) {
const callback = arguments[1];
return new Promise((resolve, reject) => {
try {
fn(url, (err, remoteDoc) => {
if (typeof callback === 'function') {
return _invokeCallback(callback, err, remoteDoc);
} else if (err) {
reject(err);
} else {
resolve(remoteDoc);
}
});
} catch (e) {
if (typeof callback === 'function') {
return _invokeCallback(callback, err, remoteDoc);
} else if (err) {
reject(err);
} else {
resolve(remoteDoc);
return _invokeCallback(callback, e);
}
});
} catch (e) {
if (typeof callback === 'function') {
return _invokeCallback(callback, e);
reject(e);
}
reject(e);
}
});
});
});
return function (_x2) {
return _ref.apply(this, arguments);
};
})();
return function (_x) {
return _ref.apply(this, arguments);
};
}()
);
};
api.callbackify = fn => {
return _asyncToGenerator(function* () {
for (var _len = arguments.length, args = Array(_len), _key = 0; _key < _len; _key++) {
args[_key] = arguments[_key];
}
return (
/*#__PURE__*/
function () {
var _ref2 = _asyncToGenerator(function* (...args) {
const callback = args[args.length - 1];
const callback = args[args.length - 1];
if (typeof callback === 'function') {
args.pop();
}
if (typeof callback === 'function') {
args.pop();
}
let result;
try {
result = yield fn.apply(null, args);
} catch (e) {
if (typeof callback === 'function') {
return _invokeCallback(callback, e);
}
throw e;
}
let result;
if (typeof callback === 'function') {
return _invokeCallback(callback, null, result);
}
try {
result = yield fn.apply(null, args);
} catch (e) {
if (typeof callback === 'function') {
return _invokeCallback(callback, e);
}
return result;
});
throw e;
}
if (typeof callback === 'function') {
return _invokeCallback(callback, null, result);
}
return result;
});
return function () {
return _ref2.apply(this, arguments);
};
}()
);
};

@@ -482,3 +536,2 @@

}
/**

@@ -492,2 +545,4 @@ * Labels the blank nodes in the given value using the given IdentifierIssuer.

*/
function _labelBlankNodes(issuer, element) {

@@ -504,8 +559,10 @@ if (types.isArray(element)) {

element['@id'] = issuer.getId(element['@id']);
}
} // recursively apply to all keys
// recursively apply to all keys
const keys = Object.keys(element).sort();
for (let ki = 0; ki < keys.length; ++ki) {
const key = keys[ki];
if (key !== '@id') {

@@ -512,0 +569,0 @@ element[key] = _labelBlankNodes(issuer, element[key]);

@@ -36,4 +36,2 @@ /**

*/
(function() {
const canonize = require('rdf-canonize');

@@ -1073,31 +1071,5 @@ const util = require('./util');

if(!_nodejs && (typeof define === 'function' && define.amd)) {
// export AMD API
define([], function() {
// now that module is defined, wrap main jsonld API instance
wrapper(factory);
return factory;
});
} else {
// wrap the main jsonld API instance
wrapper(factory);
if(typeof require === 'function' &&
typeof module !== 'undefined' && module.exports) {
// export CommonJS/nodejs API
module.exports = factory;
}
if(_browser) {
// export simple browser API
if(typeof jsonld === 'undefined') {
jsonld = jsonldjs = factory;
} else {
jsonldjs = factory;
}
}
}
return factory;
})();
// wrap the main jsonld API instance
wrapper(factory);
// export API
module.exports = factory;
{
"name": "jsonld",
"version": "1.3.0",
"version": "1.4.0",
"description": "A JSON-LD Processor and API implementation in JavaScript.",

@@ -26,3 +26,2 @@ "homepage": "https://github.com/digitalbazaar/jsonld.js",

"files": [
"browser/*.js",
"dist/*.js",

@@ -41,12 +40,16 @@ "dist/*.js.map",

"devDependencies": {
"babel-cli": "^6.26.0",
"babel-loader": "^7.1.5",
"babel-plugin-transform-object-rest-spread": "^6.26.0",
"babel-preset-env": "^1.6.1",
"babel-preset-node6-es6": "^11.2.5",
"@babel/cli": "^7.2.3",
"@babel/core": "^7.2.2",
"@babel/plugin-proposal-object-rest-spread": "^7.2.0",
"@babel/plugin-transform-modules-commonjs": "^7.2.0",
"@babel/plugin-transform-runtime": "^7.2.0",
"@babel/preset-env": "^7.2.3",
"@babel/register": "^7.0.0",
"@babel/runtime": "^7.2.0",
"babel-loader": "^8.0.5",
"benchmark": "^2.1.4",
"browserify": "^15.2.0",
"browserify": "^16.2.3",
"chai": "^4.2.0",
"commander": "^2.19.0",
"core-js": "^2.5.7",
"core-js": "^2.6.1",
"cors": "^2.7.1",

@@ -61,4 +64,4 @@ "cross-env": "^5.2.0",

"karma": "^3.1.1",
"karma-babel-preprocessor": "^7.0.0",
"karma-browserify": "^5.2.0",
"karma-babel-preprocessor": "^8.0.0-beta.0",
"karma-browserify": "^6.0.0",
"karma-chrome-launcher": "^2.2.0",

@@ -70,3 +73,2 @@ "karma-edge-launcher": "^0.4.2",

"karma-mocha-reporter": "^2.2.5",
"karma-phantomjs-launcher": "^1.0.4",
"karma-safari-launcher": "^1.0.0",

@@ -80,5 +82,5 @@ "karma-server-side": "^1.7.0",

"nyc": "^13.1.0",
"regenerator-runtime": "^0.12.1",
"webpack": "^3.12.0",
"webpack-merge": "^4.1.4"
"webpack": "^4.28.3",
"webpack-cli": "^3.2.0",
"webpack-merge": "^4.2.1"
},

@@ -100,3 +102,3 @@ "engines": {

"build-webpack": "webpack",
"build-node6": "babel --no-babelrc --out-dir dist/node6 --presets=node6-es6 lib/*.js lib/*/*.js",
"build-node6": "BROWSERSLIST='node 6' babel --no-babelrc lib --out-dir dist/node6/lib --presets=@babel/preset-env",
"fetch-test-suites": "npm run fetch-json-ld-wg-test-suite && npm run fetch-json-ld-org-test-suite && npm run fetch-normalization-test-suite",

@@ -124,10 +126,10 @@ "fetch-json-ld-wg-test-suite": "npm run fetch-json-ld-api-test-suite && npm run fetch-json-ld-framing-test-suite",

"lib/index.js": "./lib/jsonld.js",
"crypto": "./browser/ignore.js",
"http": "./browser/ignore.js",
"jsonld-request": "./browser/ignore.js",
"request": "./browser/ignore.js",
"url": "./browser/ignore.js",
"util": "./browser/ignore.js",
"xmldom": "./browser/ignore.js"
"crypto": false,
"http": false,
"jsonld-request": false,
"request": false,
"url": false,
"util": false,
"xmldom": false
}
}

Sorry, the diff of this file is too big to display

Sorry, the diff of this file is too big to display

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc