@zazuko/query-rdf-data-cube
Advanced tools
Comparing version 0.0.2 to 0.0.3
@@ -0,1 +1,27 @@ | ||
<a name="0.0.3"></a> | ||
## [0.0.3](https://github.com/zazuko/query-rdf-data-cube/compare/v0.0.2...v0.0.3) (2019-09-09) | ||
### Bug Fixes | ||
* **datacube:** querying dataset by IRI does not fetch all datasets ([7ba4936](https://github.com/zazuko/query-rdf-data-cube/commit/7ba4936)) | ||
* **dataset:** #graphs only return graphs related to a dataset ([7df53f5](https://github.com/zazuko/query-rdf-data-cube/commit/7df53f5)) | ||
* **dataset:** graphIri is a string, not a namedNode ([2ec850c](https://github.com/zazuko/query-rdf-data-cube/commit/2ec850c)) | ||
* **lang:** labels default to '' instead of defaulting to an IRI ([a7a37f7](https://github.com/zazuko/query-rdf-data-cube/commit/a7a37f7)) | ||
* **query:** binding/comp maps use Map interface ([4979630](https://github.com/zazuko/query-rdf-data-cube/commit/4979630)) | ||
* **query:** orderBy is always taken into account ([b0b0eb5](https://github.com/zazuko/query-rdf-data-cube/commit/b0b0eb5)) | ||
### Features | ||
* **component:** components can have one label per language ([9b659be](https://github.com/zazuko/query-rdf-data-cube/commit/9b659be)) | ||
* **cube:** implement languages for datasets fetching ([e1411a2](https://github.com/zazuko/query-rdf-data-cube/commit/e1411a2)) | ||
* **datacube:** fetch datasets by an IRI or a graph IRI ([42f67b2](https://github.com/zazuko/query-rdf-data-cube/commit/42f67b2)) | ||
* **dataset:** datasets can have one label per language ([edd7879](https://github.com/zazuko/query-rdf-data-cube/commit/edd7879)) | ||
* **lang:** dataset passes down lang info to queries ([ea9bf11](https://github.com/zazuko/query-rdf-data-cube/commit/ea9bf11)) | ||
* **query:** get labels with language preferences ([1602a43](https://github.com/zazuko/query-rdf-data-cube/commit/1602a43)) | ||
* **serialization:** implement #toJSON and .fromJSON ([04d0e39](https://github.com/zazuko/query-rdf-data-cube/commit/04d0e39)) | ||
<a name="0.0.2"></a> | ||
@@ -2,0 +28,0 @@ ## 0.0.2 (2019-09-04) |
import { Term } from "rdf-js"; | ||
import { Label } from "../dataset"; | ||
import BaseExpr from "../expressions/base"; | ||
import { IExpr } from "../expressions/utils"; | ||
declare class Component extends BaseExpr { | ||
label: Term; | ||
/** | ||
* Deserializes a Component from JSON generated by Component#toJSON | ||
*/ | ||
static fromJSON(json: string): Component; | ||
labels: Label[]; | ||
iri: Term; | ||
@@ -11,6 +16,11 @@ aggregateType: string; | ||
componentType: string; | ||
constructor({ label, iri }: { | ||
label: string | Term; | ||
constructor({ labels, iri }: { | ||
labels: Label[]; | ||
iri: string | Term; | ||
}); | ||
/** | ||
* Serializes a Component to JSON in a way that makes it deserializable | ||
* by calling Component#fromJSON | ||
*/ | ||
toJSON(): string; | ||
clone(): any; | ||
@@ -17,0 +27,0 @@ avg(): any; |
@@ -1,6 +0,7 @@ | ||
import { literal, namedNode } from "@rdfjs/data-model"; | ||
import { namedNode } from "@rdfjs/data-model"; | ||
import clone from "clone"; | ||
import BaseExpr from "../expressions/base"; | ||
import Binding from "../expressions/binding"; | ||
class Component extends BaseExpr { | ||
constructor({ label, iri }) { | ||
constructor({ labels, iri }) { | ||
super(); | ||
@@ -10,8 +11,3 @@ this.isDistinct = false; | ||
this.componentType = ""; | ||
if (typeof label === "string") { | ||
this.label = literal(label); | ||
} | ||
else { | ||
this.label = label; | ||
} | ||
this.labels = labels || []; | ||
if (typeof iri === "string") { | ||
@@ -24,5 +20,33 @@ this.iri = namedNode(iri); | ||
} | ||
/** | ||
* Deserializes a Component from JSON generated by Component#toJSON | ||
*/ | ||
static fromJSON(json) { | ||
const obj = JSON.parse(json); | ||
obj.iri = namedNode(obj.iri); | ||
switch (obj.componentType) { | ||
case "measure": | ||
return new Measure(obj); | ||
case "dimension": | ||
return new Dimension(obj); | ||
case "attribute": | ||
return new Attribute(obj); | ||
} | ||
throw new Error(`Unknown component type '${obj.componentType}'`); | ||
} | ||
/** | ||
* Serializes a Component to JSON in a way that makes it deserializable | ||
* by calling Component#fromJSON | ||
*/ | ||
toJSON() { | ||
const obj = { | ||
componentType: this.componentType, | ||
iri: this.iri.value, | ||
labels: this.labels, | ||
}; | ||
return JSON.stringify(obj); | ||
} | ||
clone() { | ||
const Constructor = Object.getPrototypeOf(this).constructor; | ||
const state = { label: this.label, iri: this.iri }; | ||
const state = { labels: clone(this.labels), iri: this.iri }; | ||
const instance = new Constructor(state); | ||
@@ -53,2 +77,5 @@ instance.aggregateType = this.aggregateType; | ||
export default Component; | ||
import Attribute from "./attribute"; | ||
import Dimension from "./dimension"; | ||
import Measure from "./measure"; | ||
//# sourceMappingURL=component.js.map |
import { NamedNode } from "rdf-js"; | ||
import DataSet from "./dataset"; | ||
export interface ICubeOptions { | ||
languages?: string[]; | ||
} | ||
export declare class DataCube { | ||
/** | ||
* Deserializes a DataCube from JSON generated by DataCube#toJSON | ||
*/ | ||
static fromJSON(json: string): DataCube; | ||
private endpoint; | ||
private languages; | ||
private fetcher; | ||
private cachedDatasets; | ||
private datasetsLoaded; | ||
private allDatasetsLoaded; | ||
private cachedGraphs; | ||
private graphsLoaded; | ||
/** | ||
* A DataCube queries a SPARQL endpoint and retrieves [[DataSet]]s and | ||
* their [[Dimension]]s, [[Measure]]s and [[Attribute]]s. | ||
* @class DataCube | ||
* @param endpoint SPARQL endpoint | ||
* @param options Options | ||
* @param options.languages Languages in which to get the labels, by priority, e.g. `["de", "en"]`. | ||
* Passed down to [[DataSet]]s and [[DataSetQuery]]. | ||
*/ | ||
constructor(endpoint: string); | ||
constructor(endpoint: string, options?: ICubeOptions); | ||
/** | ||
* Serializes a DataCube to JSON in a way that makes it deserializable | ||
* by calling DataCube#fromJSON | ||
*/ | ||
toJSON(): string; | ||
/** | ||
* Fetch all [[DataSet]]s from the endpoint. | ||
@@ -19,5 +38,19 @@ */ | ||
/** | ||
* Fetch a [[DataSet]] by its IRI. | ||
* | ||
* @param iri IRI of the DataSet to return. | ||
*/ | ||
datasetByIri(dataSetIri: string): Promise<DataSet>; | ||
/** | ||
* Fetch [[DataSet]]s by their graph IRI. | ||
* | ||
* @param graphIri IRI of the graph to look for in all DataSets. | ||
*/ | ||
datasetsByGraphIri(iri: string): Promise<DataSet[]>; | ||
/** | ||
* Fetch all graphs from the endpoint. | ||
*/ | ||
graphs(): Promise<NamedNode[]>; | ||
private cacheDatasets; | ||
private generateQuery; | ||
} |
@@ -0,37 +1,108 @@ | ||
import { namedNode, variable } from "@rdfjs/data-model"; | ||
import { Generator as SparqlGenerator } from "sparqljs"; | ||
import DataSet from "./dataset"; | ||
import { generateLangCoalesce, generateLangOptionals, prefixes } from "./query/utils"; | ||
import SparqlFetcher from "./sparqlfetcher"; | ||
export class DataCube { | ||
/** | ||
* A DataCube queries a SPARQL endpoint and retrieves [[DataSet]]s and | ||
* their [[Dimension]]s, [[Measure]]s and [[Attribute]]s. | ||
* @class DataCube | ||
* @param endpoint SPARQL endpoint | ||
* @param options Options | ||
* @param options.languages Languages in which to get the labels, by priority, e.g. `["de", "en"]`. | ||
* Passed down to [[DataSet]]s and [[DataSetQuery]]. | ||
*/ | ||
constructor(endpoint) { | ||
this.datasetsLoaded = false; | ||
constructor(endpoint, options = {}) { | ||
this.allDatasetsLoaded = false; | ||
this.graphsLoaded = false; | ||
this.endpoint = endpoint; | ||
this.languages = options.languages || ["de", "it"]; | ||
this.fetcher = new SparqlFetcher(endpoint); | ||
this.cachedDatasets = []; | ||
this.cachedDatasets = new Map(); | ||
this.cachedGraphs = []; | ||
} | ||
/** | ||
* Deserializes a DataCube from JSON generated by DataCube#toJSON | ||
*/ | ||
static fromJSON(json) { | ||
const obj = JSON.parse(json); | ||
const datacube = new DataCube(obj.endpoint, { | ||
languages: obj.languages, | ||
}); | ||
datacube.cachedDatasets = obj.datasets.reduce((map, str) => { | ||
const dataset = DataSet.fromJSON(str); | ||
map.set(dataset.iri, dataset); | ||
return map; | ||
}, new Map()); | ||
return datacube; | ||
} | ||
/** | ||
* Serializes a DataCube to JSON in a way that makes it deserializable | ||
* by calling DataCube#fromJSON | ||
*/ | ||
toJSON() { | ||
const obj = { | ||
endpoint: this.endpoint, | ||
languages: this.languages, | ||
datasets: Array.from(this.cachedDatasets.values()).map((dataset) => dataset.toJSON()), | ||
}; | ||
return JSON.stringify(obj); | ||
} | ||
/** | ||
* Fetch all [[DataSet]]s from the endpoint. | ||
*/ | ||
async datasets() { | ||
if (this.datasetsLoaded) { | ||
return this.cachedDatasets; | ||
if (this.allDatasetsLoaded) { | ||
return Array.from(this.cachedDatasets.values()); | ||
} | ||
const query = ` | ||
PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#> | ||
PREFIX qb: <http://purl.org/linked-data/cube#> | ||
SELECT ?dataSetIri ?dataSetLabel ?graphIri WHERE { | ||
GRAPH ?graphIri { | ||
?dataSetIri a qb:DataSet ; | ||
rdfs:label ?dataSetLabel . | ||
const sparql = this.generateQuery(); | ||
const queryResult = await this.fetcher.select(sparql); | ||
this.cacheDatasets(queryResult); | ||
this.allDatasetsLoaded = true; | ||
return Array.from(this.cachedDatasets.values()); | ||
} | ||
/** | ||
* Fetch a [[DataSet]] by its IRI. | ||
* | ||
* @param iri IRI of the DataSet to return. | ||
*/ | ||
async datasetByIri(dataSetIri) { | ||
const found = Array.from(this.cachedDatasets.values()).find((dataset) => dataset.iri === dataSetIri); | ||
if (found) { | ||
return found; | ||
} | ||
} | ||
`; | ||
const datasets = await this.fetcher.select(query); | ||
this.datasetsLoaded = true; | ||
return this.cachedDatasets = datasets.map(({ dataSetIri, dataSetLabel, graphIri }) => new DataSet(this.endpoint, { dataSetIri, dataSetLabel, graphIri })); | ||
const sparql = this.generateQuery({ dataSetIri: namedNode(dataSetIri) }); | ||
const queryResult = await this.fetcher.select(sparql); | ||
if (!queryResult.length) { | ||
throw new Error(`No dataset with iri <${dataSetIri}> on ${this.endpoint}`); | ||
} | ||
this.cacheDatasets(queryResult); | ||
return this.datasetByIri(dataSetIri); | ||
} | ||
/** | ||
* Fetch [[DataSet]]s by their graph IRI. | ||
* | ||
* @param graphIri IRI of the graph to look for in all DataSets. | ||
*/ | ||
async datasetsByGraphIri(iri) { | ||
const datasets = Array.from(this.cachedDatasets.values()) | ||
.filter((ds) => ds.graphIri === iri); | ||
if (datasets.length) { | ||
return datasets; | ||
} | ||
const graphIri = namedNode(iri); | ||
const sparql = this.generateQuery({ graphIri }); | ||
const queryResult = await this.fetcher.select(sparql); | ||
if (!queryResult.length) { | ||
// avoid infinite recursion | ||
throw new Error(`Cannot find graph <${iri}> in ${this.endpoint}`); | ||
} | ||
this.cacheDatasets(queryResult.map((result) => { | ||
result.graphIri = graphIri; | ||
return result; | ||
})); | ||
return this.datasetsByGraphIri(iri); | ||
} | ||
/** | ||
* Fetch all graphs from the endpoint. | ||
@@ -43,3 +114,10 @@ */ | ||
} | ||
const query = "SELECT DISTINCT ?graph WHERE { GRAPH ?graph {?s ?p ?o}}"; | ||
const query = ` | ||
PREFIX qb: <http://purl.org/linked-data/cube#> | ||
SELECT DISTINCT ?graph WHERE { | ||
GRAPH ?graph { | ||
?dataset a qb:DataSet . | ||
} | ||
} | ||
`; | ||
const graphs = await this.fetcher.select(query); | ||
@@ -49,3 +127,74 @@ this.graphsLoaded = true; | ||
} | ||
cacheDatasets(datasets) { | ||
const datasetsByIri = datasets.reduce((acc, { iri, label, graphIri }) => { | ||
if (!acc[iri.value]) { | ||
acc[iri.value] = { | ||
iri, | ||
labels: [], | ||
graphIri, | ||
languages: this.languages, | ||
}; | ||
} | ||
acc[iri.value].labels.push({ | ||
value: label.value, | ||
language: label.language, | ||
}); | ||
return acc; | ||
}, {}); | ||
Object.entries(datasetsByIri) | ||
.forEach(([iri, dataset]) => { | ||
this.cachedDatasets.set(iri, new DataSet(this.endpoint, dataset)); | ||
}); | ||
} | ||
generateQuery({ graphIri, dataSetIri } = {}) { | ||
const graphIriBinding = variable("graphIri"); | ||
const iriBinding = variable("iri"); | ||
const labelBinding = variable("label"); | ||
const query = { | ||
prefixes, | ||
queryType: "SELECT", | ||
variables: [ | ||
iriBinding, | ||
graphIriBinding, | ||
labelBinding, | ||
], | ||
where: [ | ||
{ | ||
type: "graph", | ||
name: graphIri || graphIriBinding, | ||
patterns: [ | ||
{ | ||
type: "bgp", | ||
triples: [ | ||
{ | ||
subject: iriBinding, | ||
predicate: namedNode("http://www.w3.org/1999/02/22-rdf-syntax-ns#type"), | ||
object: namedNode("http://purl.org/linked-data/cube#DataSet"), | ||
}, | ||
], | ||
}, | ||
...generateLangOptionals(iriBinding, labelBinding, this.languages), | ||
], | ||
}, | ||
generateLangCoalesce(labelBinding, this.languages), | ||
], | ||
type: "query", | ||
}; | ||
if (dataSetIri) { | ||
query.where.push({ | ||
type: "filter", | ||
expression: { | ||
type: "operation", | ||
operator: "=", | ||
args: [ | ||
iriBinding, | ||
dataSetIri, | ||
], | ||
}, | ||
}); | ||
} | ||
const generator = new SparqlGenerator({ allPrefixes: true }); | ||
return generator.stringify(query); | ||
} | ||
} | ||
//# sourceMappingURL=datacube.js.map |
@@ -1,27 +0,44 @@ | ||
import { NamedNode, Term } from "rdf-js"; | ||
import Attribute from "./components/attribute"; | ||
import Dimension from "./components/dimension"; | ||
import Measure from "./components/measure"; | ||
import DataSetQuery from "./datasetquery"; | ||
import { NamedNode } from "rdf-js"; | ||
import { Attribute, Dimension, Measure } from "./components"; | ||
import { ICubeOptions } from "./datacube"; | ||
import DataSetQuery from "./query/datasetquery"; | ||
import { IQueryOptions } from "./query/utils"; | ||
export declare type Label = { | ||
value: string; | ||
language?: string; | ||
}; | ||
export interface IDataSetOptions extends ICubeOptions { | ||
iri: NamedNode; | ||
labels?: Label[]; | ||
graphIri: NamedNode; | ||
} | ||
declare class DataSet { | ||
label: any; | ||
/** | ||
* Deserializes a DataSet from JSON generated by DataSet#toJSON | ||
*/ | ||
static fromJSON(json: string): DataSet; | ||
labels: Label[]; | ||
iri: string; | ||
endpoint: string; | ||
graphIri: NamedNode | undefined; | ||
graphIri?: string; | ||
private languages; | ||
private fetcher; | ||
private metadataLoaded; | ||
private cachedMetadata; | ||
private componentsLoaded; | ||
private cachedComponents; | ||
/** | ||
* @param endpoint SPARQL endpoint where the DataSet lives. | ||
* @param options Additional info about the DataSet. | ||
* @param options.dataSetIri The IRI of the DataSet. | ||
* @param options.dataSetLabel (Optional) A label for the DataSet. | ||
* @param options.iri The IRI of the DataSet. | ||
* @param options.graphIri The IRI of the graph from which the data will be fetched. | ||
* @param options.labels (Optional) A list of labels for the DataSet in the following form: | ||
* `[ { value: "Something", language: "en" }, { value: "Etwas", language: "de" }, … ]` | ||
* @param options.languages Languages in which to get the labels, by priority, e.g. `["de", "en"]`. | ||
*/ | ||
constructor(endpoint: string, options: { | ||
dataSetIri: NamedNode; | ||
dataSetLabel?: Term; | ||
graphIri: NamedNode; | ||
}); | ||
constructor(endpoint: string, options: IDataSetOptions); | ||
/** | ||
* Serializes a DataSet to JSON in a way that makes it deserializable | ||
* by calling DataSet#fromJSON | ||
*/ | ||
toJSON(): string; | ||
/** | ||
* Fetch all [[Attribute]]s from the DataSet. | ||
@@ -41,5 +58,5 @@ */ | ||
*/ | ||
query(): DataSetQuery; | ||
private metadata; | ||
query(opts?: IQueryOptions): DataSetQuery; | ||
private components; | ||
} | ||
export default DataSet; |
@@ -1,5 +0,6 @@ | ||
import Attribute from "./components/attribute"; | ||
import Dimension from "./components/dimension"; | ||
import Measure from "./components/measure"; | ||
import DataSetQuery from "./datasetquery"; | ||
import { namedNode, variable } from "@rdfjs/data-model"; | ||
import { Generator as SparqlGenerator } from "sparqljs"; | ||
import Component, { Attribute, Dimension, Measure } from "./components"; | ||
import DataSetQuery from "./query/datasetquery"; | ||
import { generateLangCoalesce, generateLangOptionals, prefixes } from "./query/utils"; | ||
import SparqlFetcher from "./sparqlfetcher"; | ||
@@ -10,21 +11,75 @@ class DataSet { | ||
* @param options Additional info about the DataSet. | ||
* @param options.dataSetIri The IRI of the DataSet. | ||
* @param options.dataSetLabel (Optional) A label for the DataSet. | ||
* @param options.iri The IRI of the DataSet. | ||
* @param options.graphIri The IRI of the graph from which the data will be fetched. | ||
* @param options.labels (Optional) A list of labels for the DataSet in the following form: | ||
* `[ { value: "Something", language: "en" }, { value: "Etwas", language: "de" }, … ]` | ||
* @param options.languages Languages in which to get the labels, by priority, e.g. `["de", "en"]`. | ||
*/ | ||
constructor(endpoint, options) { | ||
this.metadataLoaded = false; | ||
const { dataSetIri, dataSetLabel, graphIri } = options; | ||
this.componentsLoaded = false; | ||
const { iri, labels, graphIri } = options; | ||
this.fetcher = new SparqlFetcher(endpoint); | ||
this.iri = dataSetIri.value; | ||
this.label = (dataSetLabel && dataSetLabel.value) || ""; | ||
this.graphIri = graphIri; | ||
this.endpoint = endpoint; | ||
this.iri = iri.value; | ||
this.graphIri = graphIri.value; | ||
this.labels = labels || []; | ||
this.languages = options.languages || ["de", "it"]; | ||
this.cachedComponents = { | ||
dimensions: new Map(), | ||
measures: new Map(), | ||
attributes: new Map(), | ||
}; | ||
} | ||
/** | ||
* Deserializes a DataSet from JSON generated by DataSet#toJSON | ||
*/ | ||
static fromJSON(json) { | ||
const obj = JSON.parse(json); | ||
const dataset = new DataSet(obj.endpoint, { | ||
iri: namedNode(obj.iri), | ||
graphIri: namedNode(obj.graphIri), | ||
labels: obj.labels, | ||
languages: obj.languages, | ||
}); | ||
["dimensions", "measures", "attributes"].forEach((componentTypes) => { | ||
dataset.cachedComponents[componentTypes] = obj.components[componentTypes] | ||
.map(Component.fromJSON) | ||
.reduce((cache, component) => { | ||
cache.set(component.iri.value, component); | ||
return cache; | ||
}, new Map()); | ||
}); | ||
return dataset; | ||
} | ||
/** | ||
* Serializes a DataSet to JSON in a way that makes it deserializable | ||
* by calling DataSet#fromJSON | ||
*/ | ||
toJSON() { | ||
const dimensions = Array.from(this.cachedComponents.dimensions.values()) | ||
.map((component) => component.toJSON()); | ||
const measures = Array.from(this.cachedComponents.measures.values()) | ||
.map((component) => component.toJSON()); | ||
const attributes = Array.from(this.cachedComponents.attributes.values()) | ||
.map((component) => component.toJSON()); | ||
const obj = { | ||
endpoint: this.endpoint, | ||
iri: this.iri, | ||
graphIri: this.graphIri, | ||
labels: this.labels, | ||
languages: this.languages, | ||
components: { | ||
dimensions, | ||
measures, | ||
attributes, | ||
}, | ||
}; | ||
return JSON.stringify(obj); | ||
} | ||
/** | ||
* Fetch all [[Attribute]]s from the DataSet. | ||
*/ | ||
async attributes() { | ||
await this.metadata(); | ||
return this.cachedMetadata.attributes; | ||
await this.components(); | ||
return Array.from(this.cachedComponents.attributes.values()); | ||
} | ||
@@ -35,4 +90,4 @@ /** | ||
async dimensions() { | ||
await this.metadata(); | ||
return this.cachedMetadata.dimensions; | ||
await this.components(); | ||
return Array.from(this.cachedComponents.dimensions.values()); | ||
} | ||
@@ -43,4 +98,4 @@ /** | ||
async measures() { | ||
await this.metadata(); | ||
return this.cachedMetadata.measures; | ||
await this.components(); | ||
return Array.from(this.cachedComponents.measures.values()); | ||
} | ||
@@ -50,35 +105,100 @@ /** | ||
*/ | ||
query() { | ||
return new DataSetQuery(this); | ||
query(opts = {}) { | ||
if (!opts.languages) { | ||
opts.languages = this.languages; | ||
} | ||
return new DataSetQuery(this, opts); | ||
} | ||
async metadata() { | ||
if (this.metadataLoaded) { | ||
async components() { | ||
if (this.componentsLoaded) { | ||
return; | ||
} | ||
const query = ` | ||
PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> | ||
PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#> | ||
PREFIX qb: <http://purl.org/linked-data/cube#> | ||
PREFIX skos: <http://www.w3.org/2004/02/skos/core#> | ||
SELECT ?label ?kind ?iri | ||
${this.graphIri ? `FROM <${this.graphIri.value}>` : ""} | ||
WHERE { | ||
<${this.iri}> a qb:DataSet ; | ||
qb:structure/qb:component ?componentSpec . | ||
?componentSpec ?kind ?iri . | ||
?iri rdfs:label|skos:prefLabel ?label . | ||
}`; | ||
const metadata = await this.fetcher.select(query); | ||
this.cachedMetadata = metadata.reduce((metadataProp, { kind, label, iri }) => { | ||
const binding = variable("iri"); | ||
const labelBinding = variable("label"); | ||
const query = { | ||
prefixes, | ||
queryType: "SELECT", | ||
variables: [ | ||
binding, | ||
variable("kind"), | ||
labelBinding, | ||
], | ||
from: { default: [namedNode(this.graphIri)], named: [] }, | ||
where: [ | ||
{ | ||
type: "bgp", | ||
triples: [ | ||
{ | ||
subject: namedNode(this.iri), | ||
predicate: namedNode("http://www.w3.org/1999/02/22-rdf-syntax-ns#type"), | ||
object: namedNode("http://purl.org/linked-data/cube#DataSet"), | ||
}, | ||
{ | ||
subject: namedNode(this.iri), | ||
predicate: { | ||
type: "path", | ||
pathType: "/", | ||
items: [ | ||
namedNode("http://purl.org/linked-data/cube#structure"), | ||
namedNode("http://purl.org/linked-data/cube#component"), | ||
], | ||
}, | ||
object: variable("componentSpec"), | ||
}, | ||
{ | ||
subject: variable("componentSpec"), | ||
predicate: variable("kind"), | ||
object: binding, | ||
}, | ||
], | ||
}, | ||
{ | ||
type: "filter", | ||
expression: { | ||
type: "operation", | ||
operator: "in", | ||
args: [ | ||
variable("kind"), | ||
[ | ||
namedNode("http://purl.org/linked-data/cube#attribute"), | ||
namedNode("http://purl.org/linked-data/cube#dimension"), | ||
namedNode("http://purl.org/linked-data/cube#measure"), | ||
], | ||
], | ||
}, | ||
}, | ||
...generateLangOptionals(binding, labelBinding, this.languages), | ||
generateLangCoalesce(labelBinding, this.languages), | ||
], | ||
type: "query", | ||
}; | ||
const generator = new SparqlGenerator({ allPrefixes: true }); | ||
const sparql = generator.stringify(query); | ||
const components = await this.fetcher.select(sparql); | ||
const componentsByIri = components.reduce((acc, { kind, label, iri }) => { | ||
if (!acc[iri.value]) { | ||
acc[iri.value] = { | ||
kind, | ||
labels: [], | ||
iri, | ||
}; | ||
} | ||
acc[iri.value].labels.push({ | ||
value: label.value, | ||
language: label.language, | ||
}); | ||
return acc; | ||
}, {}); | ||
const groupedComponents = Object.values(componentsByIri); | ||
this.cachedComponents = groupedComponents | ||
.reduce((componentsProp, { kind, labels, iri }) => { | ||
switch (kind.value) { | ||
case "http://purl.org/linked-data/cube#attribute": | ||
metadataProp.attributes.push(new Attribute({ label, iri })); | ||
componentsProp.attributes.set(iri.value, new Attribute({ labels, iri })); | ||
break; | ||
case "http://purl.org/linked-data/cube#dimension": | ||
metadataProp.dimensions.push(new Dimension({ label, iri })); | ||
componentsProp.dimensions.set(iri.value, new Dimension({ labels, iri })); | ||
break; | ||
case "http://purl.org/linked-data/cube#measure": | ||
metadataProp.measures.push(new Measure({ label, iri })); | ||
componentsProp.measures.set(iri.value, new Measure({ labels, iri })); | ||
break; | ||
@@ -88,9 +208,9 @@ default: | ||
} | ||
return metadataProp; | ||
return componentsProp; | ||
}, { | ||
attributes: [], | ||
dimensions: [], | ||
measures: [], | ||
attributes: new Map(), | ||
dimensions: new Map(), | ||
measures: new Map(), | ||
}); | ||
this.metadataLoaded = true; | ||
this.componentsLoaded = true; | ||
} | ||
@@ -97,0 +217,0 @@ } |
@@ -1,2 +0,2 @@ | ||
import { Term } from "rdf-js"; | ||
import { Literal, Term } from "rdf-js"; | ||
export interface IExpr { | ||
@@ -18,1 +18,2 @@ resolve(mapping: Map<string, string>): IExpr; | ||
export declare function into(what: IntoExpr): IExpr; | ||
export declare function toLiteral(arg: any): Literal; |
// tslint:disable: max-classes-per-file | ||
import { blankNode, defaultGraph, literal, namedNode, variable } from "@rdfjs/data-model"; | ||
import { toLiteral } from "../toLiteral"; | ||
import namespace from "@rdfjs/namespace"; | ||
import Binding from "./binding"; | ||
import Operator from "./operator"; | ||
const xsd = namespace("http://www.w3.org/2001/XMLSchema#"); | ||
const dateTime = /^\d{4}(-[01]\d(-[0-3]\d(T[0-2]\d:[0-5]\d:?([0-5]\d(\.\d+)?)?([+-][0-2]\d:[0-5]\d)?Z?)?)?)$/; | ||
const bool = /^(true|false)$/; | ||
const numb = /^[\-+]?(?:\d+\.?\d*([eE](?:[\-\+])?\d+)|\d*\.?\d+)$/; | ||
export class TermExpr { | ||
@@ -60,2 +64,41 @@ constructor(term) { | ||
} | ||
export function toLiteral(arg) { | ||
if (isLiteral(arg)) { | ||
return arg; | ||
} | ||
if (arg === true || arg === false) { | ||
return literal(String(arg), xsd("boolean")); | ||
} | ||
if (bool.test(arg)) { | ||
return literal(arg, xsd("boolean")); | ||
} | ||
if (arg instanceof Date) { | ||
return literal(arg.toISOString(), xsd("dateTime")); | ||
} | ||
if (dateTime.test(arg)) { | ||
const date = new Date(arg); | ||
return literal(date.toISOString(), xsd("dateTime")); | ||
} | ||
if (/^[0-9+-]/.test(arg)) { | ||
const match = numb.exec(arg); | ||
if (match) { | ||
const value = match[0]; | ||
let type; | ||
if (match[1]) { | ||
type = xsd("double"); | ||
} | ||
else if (/^[+\-]?\d+$/.test(match[0])) { | ||
type = xsd("integer"); | ||
} | ||
else { | ||
type = xsd("decimal"); | ||
} | ||
return literal(String(value), type); | ||
} | ||
} | ||
return literal(arg); | ||
} | ||
function isLiteral(term) { | ||
return (term instanceof literal("").constructor); | ||
} | ||
//# sourceMappingURL=utils.js.map |
import { Term } from "rdf-js"; | ||
import { Label } from "../dataset"; | ||
import BaseExpr from "../expressions/base"; | ||
import { IExpr } from "../expressions/utils"; | ||
declare class Component extends BaseExpr { | ||
label: Term; | ||
/** | ||
* Deserializes a Component from JSON generated by Component#toJSON | ||
*/ | ||
static fromJSON(json: string): Component; | ||
labels: Label[]; | ||
iri: Term; | ||
@@ -11,6 +16,11 @@ aggregateType: string; | ||
componentType: string; | ||
constructor({ label, iri }: { | ||
label: string | Term; | ||
constructor({ labels, iri }: { | ||
labels: Label[]; | ||
iri: string | Term; | ||
}); | ||
/** | ||
* Serializes a Component to JSON in a way that makes it deserializable | ||
* by calling Component#fromJSON | ||
*/ | ||
toJSON(): string; | ||
clone(): any; | ||
@@ -17,0 +27,0 @@ avg(): any; |
@@ -7,6 +7,7 @@ "use strict"; | ||
const data_model_1 = require("@rdfjs/data-model"); | ||
const clone_1 = __importDefault(require("clone")); | ||
const base_1 = __importDefault(require("../expressions/base")); | ||
const binding_1 = __importDefault(require("../expressions/binding")); | ||
class Component extends base_1.default { | ||
constructor({ label, iri }) { | ||
constructor({ labels, iri }) { | ||
super(); | ||
@@ -16,8 +17,3 @@ this.isDistinct = false; | ||
this.componentType = ""; | ||
if (typeof label === "string") { | ||
this.label = data_model_1.literal(label); | ||
} | ||
else { | ||
this.label = label; | ||
} | ||
this.labels = labels || []; | ||
if (typeof iri === "string") { | ||
@@ -30,5 +26,33 @@ this.iri = data_model_1.namedNode(iri); | ||
} | ||
/** | ||
* Deserializes a Component from JSON generated by Component#toJSON | ||
*/ | ||
static fromJSON(json) { | ||
const obj = JSON.parse(json); | ||
obj.iri = data_model_1.namedNode(obj.iri); | ||
switch (obj.componentType) { | ||
case "measure": | ||
return new measure_1.default(obj); | ||
case "dimension": | ||
return new dimension_1.default(obj); | ||
case "attribute": | ||
return new attribute_1.default(obj); | ||
} | ||
throw new Error(`Unknown component type '${obj.componentType}'`); | ||
} | ||
/** | ||
* Serializes a Component to JSON in a way that makes it deserializable | ||
* by calling Component#fromJSON | ||
*/ | ||
toJSON() { | ||
const obj = { | ||
componentType: this.componentType, | ||
iri: this.iri.value, | ||
labels: this.labels, | ||
}; | ||
return JSON.stringify(obj); | ||
} | ||
clone() { | ||
const Constructor = Object.getPrototypeOf(this).constructor; | ||
const state = { label: this.label, iri: this.iri }; | ||
const state = { labels: clone_1.default(this.labels), iri: this.iri }; | ||
const instance = new Constructor(state); | ||
@@ -59,2 +83,5 @@ instance.aggregateType = this.aggregateType; | ||
exports.default = Component; | ||
const attribute_1 = __importDefault(require("./attribute")); | ||
const dimension_1 = __importDefault(require("./dimension")); | ||
const measure_1 = __importDefault(require("./measure")); | ||
//# sourceMappingURL=component.js.map |
import { NamedNode } from "rdf-js"; | ||
import DataSet from "./dataset"; | ||
export interface ICubeOptions { | ||
languages?: string[]; | ||
} | ||
export declare class DataCube { | ||
/** | ||
* Deserializes a DataCube from JSON generated by DataCube#toJSON | ||
*/ | ||
static fromJSON(json: string): DataCube; | ||
private endpoint; | ||
private languages; | ||
private fetcher; | ||
private cachedDatasets; | ||
private datasetsLoaded; | ||
private allDatasetsLoaded; | ||
private cachedGraphs; | ||
private graphsLoaded; | ||
/** | ||
* A DataCube queries a SPARQL endpoint and retrieves [[DataSet]]s and | ||
* their [[Dimension]]s, [[Measure]]s and [[Attribute]]s. | ||
* @class DataCube | ||
* @param endpoint SPARQL endpoint | ||
* @param options Options | ||
* @param options.languages Languages in which to get the labels, by priority, e.g. `["de", "en"]`. | ||
* Passed down to [[DataSet]]s and [[DataSetQuery]]. | ||
*/ | ||
constructor(endpoint: string); | ||
constructor(endpoint: string, options?: ICubeOptions); | ||
/** | ||
* Serializes a DataCube to JSON in a way that makes it deserializable | ||
* by calling DataCube#fromJSON | ||
*/ | ||
toJSON(): string; | ||
/** | ||
* Fetch all [[DataSet]]s from the endpoint. | ||
@@ -19,5 +38,19 @@ */ | ||
/** | ||
* Fetch a [[DataSet]] by its IRI. | ||
* | ||
* @param iri IRI of the DataSet to return. | ||
*/ | ||
datasetByIri(dataSetIri: string): Promise<DataSet>; | ||
/** | ||
* Fetch [[DataSet]]s by their graph IRI. | ||
* | ||
* @param graphIri IRI of the graph to look for in all DataSets. | ||
*/ | ||
datasetsByGraphIri(iri: string): Promise<DataSet[]>; | ||
/** | ||
* Fetch all graphs from the endpoint. | ||
*/ | ||
graphs(): Promise<NamedNode[]>; | ||
private cacheDatasets; | ||
private generateQuery; | ||
} |
@@ -6,38 +6,109 @@ "use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
const data_model_1 = require("@rdfjs/data-model"); | ||
const sparqljs_1 = require("sparqljs"); | ||
const dataset_1 = __importDefault(require("./dataset")); | ||
const utils_1 = require("./query/utils"); | ||
const sparqlfetcher_1 = __importDefault(require("./sparqlfetcher")); | ||
class DataCube { | ||
/** | ||
* A DataCube queries a SPARQL endpoint and retrieves [[DataSet]]s and | ||
* their [[Dimension]]s, [[Measure]]s and [[Attribute]]s. | ||
* @class DataCube | ||
* @param endpoint SPARQL endpoint | ||
* @param options Options | ||
* @param options.languages Languages in which to get the labels, by priority, e.g. `["de", "en"]`. | ||
* Passed down to [[DataSet]]s and [[DataSetQuery]]. | ||
*/ | ||
constructor(endpoint) { | ||
this.datasetsLoaded = false; | ||
constructor(endpoint, options = {}) { | ||
this.allDatasetsLoaded = false; | ||
this.graphsLoaded = false; | ||
this.endpoint = endpoint; | ||
this.languages = options.languages || ["de", "it"]; | ||
this.fetcher = new sparqlfetcher_1.default(endpoint); | ||
this.cachedDatasets = []; | ||
this.cachedDatasets = new Map(); | ||
this.cachedGraphs = []; | ||
} | ||
/** | ||
* Deserializes a DataCube from JSON generated by DataCube#toJSON | ||
*/ | ||
static fromJSON(json) { | ||
const obj = JSON.parse(json); | ||
const datacube = new DataCube(obj.endpoint, { | ||
languages: obj.languages, | ||
}); | ||
datacube.cachedDatasets = obj.datasets.reduce((map, str) => { | ||
const dataset = dataset_1.default.fromJSON(str); | ||
map.set(dataset.iri, dataset); | ||
return map; | ||
}, new Map()); | ||
return datacube; | ||
} | ||
/** | ||
* Serializes a DataCube to JSON in a way that makes it deserializable | ||
* by calling DataCube#fromJSON | ||
*/ | ||
toJSON() { | ||
const obj = { | ||
endpoint: this.endpoint, | ||
languages: this.languages, | ||
datasets: Array.from(this.cachedDatasets.values()).map((dataset) => dataset.toJSON()), | ||
}; | ||
return JSON.stringify(obj); | ||
} | ||
/** | ||
* Fetch all [[DataSet]]s from the endpoint. | ||
*/ | ||
async datasets() { | ||
if (this.datasetsLoaded) { | ||
return this.cachedDatasets; | ||
if (this.allDatasetsLoaded) { | ||
return Array.from(this.cachedDatasets.values()); | ||
} | ||
const query = ` | ||
PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#> | ||
PREFIX qb: <http://purl.org/linked-data/cube#> | ||
SELECT ?dataSetIri ?dataSetLabel ?graphIri WHERE { | ||
GRAPH ?graphIri { | ||
?dataSetIri a qb:DataSet ; | ||
rdfs:label ?dataSetLabel . | ||
const sparql = this.generateQuery(); | ||
const queryResult = await this.fetcher.select(sparql); | ||
this.cacheDatasets(queryResult); | ||
this.allDatasetsLoaded = true; | ||
return Array.from(this.cachedDatasets.values()); | ||
} | ||
/** | ||
* Fetch a [[DataSet]] by its IRI. | ||
* | ||
* @param iri IRI of the DataSet to return. | ||
*/ | ||
async datasetByIri(dataSetIri) { | ||
const found = Array.from(this.cachedDatasets.values()).find((dataset) => dataset.iri === dataSetIri); | ||
if (found) { | ||
return found; | ||
} | ||
} | ||
`; | ||
const datasets = await this.fetcher.select(query); | ||
this.datasetsLoaded = true; | ||
return this.cachedDatasets = datasets.map(({ dataSetIri, dataSetLabel, graphIri }) => new dataset_1.default(this.endpoint, { dataSetIri, dataSetLabel, graphIri })); | ||
const sparql = this.generateQuery({ dataSetIri: data_model_1.namedNode(dataSetIri) }); | ||
const queryResult = await this.fetcher.select(sparql); | ||
if (!queryResult.length) { | ||
throw new Error(`No dataset with iri <${dataSetIri}> on ${this.endpoint}`); | ||
} | ||
this.cacheDatasets(queryResult); | ||
return this.datasetByIri(dataSetIri); | ||
} | ||
/** | ||
* Fetch [[DataSet]]s by their graph IRI. | ||
* | ||
* @param graphIri IRI of the graph to look for in all DataSets. | ||
*/ | ||
async datasetsByGraphIri(iri) { | ||
const datasets = Array.from(this.cachedDatasets.values()) | ||
.filter((ds) => ds.graphIri === iri); | ||
if (datasets.length) { | ||
return datasets; | ||
} | ||
const graphIri = data_model_1.namedNode(iri); | ||
const sparql = this.generateQuery({ graphIri }); | ||
const queryResult = await this.fetcher.select(sparql); | ||
if (!queryResult.length) { | ||
// avoid infinite recursion | ||
throw new Error(`Cannot find graph <${iri}> in ${this.endpoint}`); | ||
} | ||
this.cacheDatasets(queryResult.map((result) => { | ||
result.graphIri = graphIri; | ||
return result; | ||
})); | ||
return this.datasetsByGraphIri(iri); | ||
} | ||
/** | ||
* Fetch all graphs from the endpoint. | ||
@@ -49,3 +120,10 @@ */ | ||
} | ||
const query = "SELECT DISTINCT ?graph WHERE { GRAPH ?graph {?s ?p ?o}}"; | ||
const query = ` | ||
PREFIX qb: <http://purl.org/linked-data/cube#> | ||
SELECT DISTINCT ?graph WHERE { | ||
GRAPH ?graph { | ||
?dataset a qb:DataSet . | ||
} | ||
} | ||
`; | ||
const graphs = await this.fetcher.select(query); | ||
@@ -55,4 +133,75 @@ this.graphsLoaded = true; | ||
} | ||
cacheDatasets(datasets) { | ||
const datasetsByIri = datasets.reduce((acc, { iri, label, graphIri }) => { | ||
if (!acc[iri.value]) { | ||
acc[iri.value] = { | ||
iri, | ||
labels: [], | ||
graphIri, | ||
languages: this.languages, | ||
}; | ||
} | ||
acc[iri.value].labels.push({ | ||
value: label.value, | ||
language: label.language, | ||
}); | ||
return acc; | ||
}, {}); | ||
Object.entries(datasetsByIri) | ||
.forEach(([iri, dataset]) => { | ||
this.cachedDatasets.set(iri, new dataset_1.default(this.endpoint, dataset)); | ||
}); | ||
} | ||
generateQuery({ graphIri, dataSetIri } = {}) { | ||
const graphIriBinding = data_model_1.variable("graphIri"); | ||
const iriBinding = data_model_1.variable("iri"); | ||
const labelBinding = data_model_1.variable("label"); | ||
const query = { | ||
prefixes: utils_1.prefixes, | ||
queryType: "SELECT", | ||
variables: [ | ||
iriBinding, | ||
graphIriBinding, | ||
labelBinding, | ||
], | ||
where: [ | ||
{ | ||
type: "graph", | ||
name: graphIri || graphIriBinding, | ||
patterns: [ | ||
{ | ||
type: "bgp", | ||
triples: [ | ||
{ | ||
subject: iriBinding, | ||
predicate: data_model_1.namedNode("http://www.w3.org/1999/02/22-rdf-syntax-ns#type"), | ||
object: data_model_1.namedNode("http://purl.org/linked-data/cube#DataSet"), | ||
}, | ||
], | ||
}, | ||
...utils_1.generateLangOptionals(iriBinding, labelBinding, this.languages), | ||
], | ||
}, | ||
utils_1.generateLangCoalesce(labelBinding, this.languages), | ||
], | ||
type: "query", | ||
}; | ||
if (dataSetIri) { | ||
query.where.push({ | ||
type: "filter", | ||
expression: { | ||
type: "operation", | ||
operator: "=", | ||
args: [ | ||
iriBinding, | ||
dataSetIri, | ||
], | ||
}, | ||
}); | ||
} | ||
const generator = new sparqljs_1.Generator({ allPrefixes: true }); | ||
return generator.stringify(query); | ||
} | ||
} | ||
exports.DataCube = DataCube; | ||
//# sourceMappingURL=datacube.js.map |
@@ -1,27 +0,44 @@ | ||
import { NamedNode, Term } from "rdf-js"; | ||
import Attribute from "./components/attribute"; | ||
import Dimension from "./components/dimension"; | ||
import Measure from "./components/measure"; | ||
import DataSetQuery from "./datasetquery"; | ||
import { NamedNode } from "rdf-js"; | ||
import { Attribute, Dimension, Measure } from "./components"; | ||
import { ICubeOptions } from "./datacube"; | ||
import DataSetQuery from "./query/datasetquery"; | ||
import { IQueryOptions } from "./query/utils"; | ||
export declare type Label = { | ||
value: string; | ||
language?: string; | ||
}; | ||
export interface IDataSetOptions extends ICubeOptions { | ||
iri: NamedNode; | ||
labels?: Label[]; | ||
graphIri: NamedNode; | ||
} | ||
declare class DataSet { | ||
label: any; | ||
/** | ||
* Deserializes a DataSet from JSON generated by DataSet#toJSON | ||
*/ | ||
static fromJSON(json: string): DataSet; | ||
labels: Label[]; | ||
iri: string; | ||
endpoint: string; | ||
graphIri: NamedNode | undefined; | ||
graphIri?: string; | ||
private languages; | ||
private fetcher; | ||
private metadataLoaded; | ||
private cachedMetadata; | ||
private componentsLoaded; | ||
private cachedComponents; | ||
/** | ||
* @param endpoint SPARQL endpoint where the DataSet lives. | ||
* @param options Additional info about the DataSet. | ||
* @param options.dataSetIri The IRI of the DataSet. | ||
* @param options.dataSetLabel (Optional) A label for the DataSet. | ||
* @param options.iri The IRI of the DataSet. | ||
* @param options.graphIri The IRI of the graph from which the data will be fetched. | ||
* @param options.labels (Optional) A list of labels for the DataSet in the following form: | ||
* `[ { value: "Something", language: "en" }, { value: "Etwas", language: "de" }, … ]` | ||
* @param options.languages Languages in which to get the labels, by priority, e.g. `["de", "en"]`. | ||
*/ | ||
constructor(endpoint: string, options: { | ||
dataSetIri: NamedNode; | ||
dataSetLabel?: Term; | ||
graphIri: NamedNode; | ||
}); | ||
constructor(endpoint: string, options: IDataSetOptions); | ||
/** | ||
* Serializes a DataSet to JSON in a way that makes it deserializable | ||
* by calling DataSet#fromJSON | ||
*/ | ||
toJSON(): string; | ||
/** | ||
* Fetch all [[Attribute]]s from the DataSet. | ||
@@ -41,5 +58,5 @@ */ | ||
*/ | ||
query(): DataSetQuery; | ||
private metadata; | ||
query(opts?: IQueryOptions): DataSetQuery; | ||
private components; | ||
} | ||
export default DataSet; |
"use strict"; | ||
var __importStar = (this && this.__importStar) || function (mod) { | ||
if (mod && mod.__esModule) return mod; | ||
var result = {}; | ||
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; | ||
result["default"] = mod; | ||
return result; | ||
}; | ||
var __importDefault = (this && this.__importDefault) || function (mod) { | ||
@@ -6,6 +13,7 @@ return (mod && mod.__esModule) ? mod : { "default": mod }; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
const attribute_1 = __importDefault(require("./components/attribute")); | ||
const dimension_1 = __importDefault(require("./components/dimension")); | ||
const measure_1 = __importDefault(require("./components/measure")); | ||
const datasetquery_1 = __importDefault(require("./datasetquery")); | ||
const data_model_1 = require("@rdfjs/data-model"); | ||
const sparqljs_1 = require("sparqljs"); | ||
const components_1 = __importStar(require("./components")); | ||
const datasetquery_1 = __importDefault(require("./query/datasetquery")); | ||
const utils_1 = require("./query/utils"); | ||
const sparqlfetcher_1 = __importDefault(require("./sparqlfetcher")); | ||
@@ -16,21 +24,75 @@ class DataSet { | ||
* @param options Additional info about the DataSet. | ||
* @param options.dataSetIri The IRI of the DataSet. | ||
* @param options.dataSetLabel (Optional) A label for the DataSet. | ||
* @param options.iri The IRI of the DataSet. | ||
* @param options.graphIri The IRI of the graph from which the data will be fetched. | ||
* @param options.labels (Optional) A list of labels for the DataSet in the following form: | ||
* `[ { value: "Something", language: "en" }, { value: "Etwas", language: "de" }, … ]` | ||
* @param options.languages Languages in which to get the labels, by priority, e.g. `["de", "en"]`. | ||
*/ | ||
constructor(endpoint, options) { | ||
this.metadataLoaded = false; | ||
const { dataSetIri, dataSetLabel, graphIri } = options; | ||
this.componentsLoaded = false; | ||
const { iri, labels, graphIri } = options; | ||
this.fetcher = new sparqlfetcher_1.default(endpoint); | ||
this.iri = dataSetIri.value; | ||
this.label = (dataSetLabel && dataSetLabel.value) || ""; | ||
this.graphIri = graphIri; | ||
this.endpoint = endpoint; | ||
this.iri = iri.value; | ||
this.graphIri = graphIri.value; | ||
this.labels = labels || []; | ||
this.languages = options.languages || ["de", "it"]; | ||
this.cachedComponents = { | ||
dimensions: new Map(), | ||
measures: new Map(), | ||
attributes: new Map(), | ||
}; | ||
} | ||
/** | ||
* Deserializes a DataSet from JSON generated by DataSet#toJSON | ||
*/ | ||
static fromJSON(json) { | ||
const obj = JSON.parse(json); | ||
const dataset = new DataSet(obj.endpoint, { | ||
iri: data_model_1.namedNode(obj.iri), | ||
graphIri: data_model_1.namedNode(obj.graphIri), | ||
labels: obj.labels, | ||
languages: obj.languages, | ||
}); | ||
["dimensions", "measures", "attributes"].forEach((componentTypes) => { | ||
dataset.cachedComponents[componentTypes] = obj.components[componentTypes] | ||
.map(components_1.default.fromJSON) | ||
.reduce((cache, component) => { | ||
cache.set(component.iri.value, component); | ||
return cache; | ||
}, new Map()); | ||
}); | ||
return dataset; | ||
} | ||
/** | ||
* Serializes a DataSet to JSON in a way that makes it deserializable | ||
* by calling DataSet#fromJSON | ||
*/ | ||
toJSON() { | ||
const dimensions = Array.from(this.cachedComponents.dimensions.values()) | ||
.map((component) => component.toJSON()); | ||
const measures = Array.from(this.cachedComponents.measures.values()) | ||
.map((component) => component.toJSON()); | ||
const attributes = Array.from(this.cachedComponents.attributes.values()) | ||
.map((component) => component.toJSON()); | ||
const obj = { | ||
endpoint: this.endpoint, | ||
iri: this.iri, | ||
graphIri: this.graphIri, | ||
labels: this.labels, | ||
languages: this.languages, | ||
components: { | ||
dimensions, | ||
measures, | ||
attributes, | ||
}, | ||
}; | ||
return JSON.stringify(obj); | ||
} | ||
/** | ||
* Fetch all [[Attribute]]s from the DataSet. | ||
*/ | ||
async attributes() { | ||
await this.metadata(); | ||
return this.cachedMetadata.attributes; | ||
await this.components(); | ||
return Array.from(this.cachedComponents.attributes.values()); | ||
} | ||
@@ -41,4 +103,4 @@ /** | ||
async dimensions() { | ||
await this.metadata(); | ||
return this.cachedMetadata.dimensions; | ||
await this.components(); | ||
return Array.from(this.cachedComponents.dimensions.values()); | ||
} | ||
@@ -49,4 +111,4 @@ /** | ||
async measures() { | ||
await this.metadata(); | ||
return this.cachedMetadata.measures; | ||
await this.components(); | ||
return Array.from(this.cachedComponents.measures.values()); | ||
} | ||
@@ -56,35 +118,100 @@ /** | ||
*/ | ||
query() { | ||
return new datasetquery_1.default(this); | ||
query(opts = {}) { | ||
if (!opts.languages) { | ||
opts.languages = this.languages; | ||
} | ||
return new datasetquery_1.default(this, opts); | ||
} | ||
async metadata() { | ||
if (this.metadataLoaded) { | ||
async components() { | ||
if (this.componentsLoaded) { | ||
return; | ||
} | ||
const query = ` | ||
PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> | ||
PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#> | ||
PREFIX qb: <http://purl.org/linked-data/cube#> | ||
PREFIX skos: <http://www.w3.org/2004/02/skos/core#> | ||
SELECT ?label ?kind ?iri | ||
${this.graphIri ? `FROM <${this.graphIri.value}>` : ""} | ||
WHERE { | ||
<${this.iri}> a qb:DataSet ; | ||
qb:structure/qb:component ?componentSpec . | ||
?componentSpec ?kind ?iri . | ||
?iri rdfs:label|skos:prefLabel ?label . | ||
}`; | ||
const metadata = await this.fetcher.select(query); | ||
this.cachedMetadata = metadata.reduce((metadataProp, { kind, label, iri }) => { | ||
const binding = data_model_1.variable("iri"); | ||
const labelBinding = data_model_1.variable("label"); | ||
const query = { | ||
prefixes: utils_1.prefixes, | ||
queryType: "SELECT", | ||
variables: [ | ||
binding, | ||
data_model_1.variable("kind"), | ||
labelBinding, | ||
], | ||
from: { default: [data_model_1.namedNode(this.graphIri)], named: [] }, | ||
where: [ | ||
{ | ||
type: "bgp", | ||
triples: [ | ||
{ | ||
subject: data_model_1.namedNode(this.iri), | ||
predicate: data_model_1.namedNode("http://www.w3.org/1999/02/22-rdf-syntax-ns#type"), | ||
object: data_model_1.namedNode("http://purl.org/linked-data/cube#DataSet"), | ||
}, | ||
{ | ||
subject: data_model_1.namedNode(this.iri), | ||
predicate: { | ||
type: "path", | ||
pathType: "/", | ||
items: [ | ||
data_model_1.namedNode("http://purl.org/linked-data/cube#structure"), | ||
data_model_1.namedNode("http://purl.org/linked-data/cube#component"), | ||
], | ||
}, | ||
object: data_model_1.variable("componentSpec"), | ||
}, | ||
{ | ||
subject: data_model_1.variable("componentSpec"), | ||
predicate: data_model_1.variable("kind"), | ||
object: binding, | ||
}, | ||
], | ||
}, | ||
{ | ||
type: "filter", | ||
expression: { | ||
type: "operation", | ||
operator: "in", | ||
args: [ | ||
data_model_1.variable("kind"), | ||
[ | ||
data_model_1.namedNode("http://purl.org/linked-data/cube#attribute"), | ||
data_model_1.namedNode("http://purl.org/linked-data/cube#dimension"), | ||
data_model_1.namedNode("http://purl.org/linked-data/cube#measure"), | ||
], | ||
], | ||
}, | ||
}, | ||
...utils_1.generateLangOptionals(binding, labelBinding, this.languages), | ||
utils_1.generateLangCoalesce(labelBinding, this.languages), | ||
], | ||
type: "query", | ||
}; | ||
const generator = new sparqljs_1.Generator({ allPrefixes: true }); | ||
const sparql = generator.stringify(query); | ||
const components = await this.fetcher.select(sparql); | ||
const componentsByIri = components.reduce((acc, { kind, label, iri }) => { | ||
if (!acc[iri.value]) { | ||
acc[iri.value] = { | ||
kind, | ||
labels: [], | ||
iri, | ||
}; | ||
} | ||
acc[iri.value].labels.push({ | ||
value: label.value, | ||
language: label.language, | ||
}); | ||
return acc; | ||
}, {}); | ||
const groupedComponents = Object.values(componentsByIri); | ||
this.cachedComponents = groupedComponents | ||
.reduce((componentsProp, { kind, labels, iri }) => { | ||
switch (kind.value) { | ||
case "http://purl.org/linked-data/cube#attribute": | ||
metadataProp.attributes.push(new attribute_1.default({ label, iri })); | ||
componentsProp.attributes.set(iri.value, new components_1.Attribute({ labels, iri })); | ||
break; | ||
case "http://purl.org/linked-data/cube#dimension": | ||
metadataProp.dimensions.push(new dimension_1.default({ label, iri })); | ||
componentsProp.dimensions.set(iri.value, new components_1.Dimension({ labels, iri })); | ||
break; | ||
case "http://purl.org/linked-data/cube#measure": | ||
metadataProp.measures.push(new measure_1.default({ label, iri })); | ||
componentsProp.measures.set(iri.value, new components_1.Measure({ labels, iri })); | ||
break; | ||
@@ -94,9 +221,9 @@ default: | ||
} | ||
return metadataProp; | ||
return componentsProp; | ||
}, { | ||
attributes: [], | ||
dimensions: [], | ||
measures: [], | ||
attributes: new Map(), | ||
dimensions: new Map(), | ||
measures: new Map(), | ||
}); | ||
this.metadataLoaded = true; | ||
this.componentsLoaded = true; | ||
} | ||
@@ -103,0 +230,0 @@ } |
@@ -1,2 +0,2 @@ | ||
import { Term } from "rdf-js"; | ||
import { Literal, Term } from "rdf-js"; | ||
export interface IExpr { | ||
@@ -18,1 +18,2 @@ resolve(mapping: Map<string, string>): IExpr; | ||
export declare function into(what: IntoExpr): IExpr; | ||
export declare function toLiteral(arg: any): Literal; |
@@ -8,5 +8,9 @@ "use strict"; | ||
const data_model_1 = require("@rdfjs/data-model"); | ||
const toLiteral_1 = require("../toLiteral"); | ||
const namespace_1 = __importDefault(require("@rdfjs/namespace")); | ||
const binding_1 = __importDefault(require("./binding")); | ||
const operator_1 = __importDefault(require("./operator")); | ||
const xsd = namespace_1.default("http://www.w3.org/2001/XMLSchema#"); | ||
const dateTime = /^\d{4}(-[01]\d(-[0-3]\d(T[0-2]\d:[0-5]\d:?([0-5]\d(\.\d+)?)?([+-][0-2]\d:[0-5]\d)?Z?)?)?)$/; | ||
const bool = /^(true|false)$/; | ||
const numb = /^[\-+]?(?:\d+\.?\d*([eE](?:[\-\+])?\d+)|\d*\.?\d+)$/; | ||
class TermExpr { | ||
@@ -41,3 +45,3 @@ constructor(term) { | ||
case "number": | ||
return new TermExpr(toLiteral_1.toLiteral(what)); | ||
return new TermExpr(toLiteral(what)); | ||
case "string": | ||
@@ -48,3 +52,3 @@ const iriRegExp = new RegExp("^https?://"); | ||
} | ||
return new TermExpr(toLiteral_1.toLiteral(what)); | ||
return new TermExpr(toLiteral(what)); | ||
case "object": | ||
@@ -72,2 +76,42 @@ if (what instanceof operator_1.default || | ||
exports.into = into; | ||
function toLiteral(arg) { | ||
if (isLiteral(arg)) { | ||
return arg; | ||
} | ||
if (arg === true || arg === false) { | ||
return data_model_1.literal(String(arg), xsd("boolean")); | ||
} | ||
if (bool.test(arg)) { | ||
return data_model_1.literal(arg, xsd("boolean")); | ||
} | ||
if (arg instanceof Date) { | ||
return data_model_1.literal(arg.toISOString(), xsd("dateTime")); | ||
} | ||
if (dateTime.test(arg)) { | ||
const date = new Date(arg); | ||
return data_model_1.literal(date.toISOString(), xsd("dateTime")); | ||
} | ||
if (/^[0-9+-]/.test(arg)) { | ||
const match = numb.exec(arg); | ||
if (match) { | ||
const value = match[0]; | ||
let type; | ||
if (match[1]) { | ||
type = xsd("double"); | ||
} | ||
else if (/^[+\-]?\d+$/.test(match[0])) { | ||
type = xsd("integer"); | ||
} | ||
else { | ||
type = xsd("decimal"); | ||
} | ||
return data_model_1.literal(String(value), type); | ||
} | ||
} | ||
return data_model_1.literal(arg); | ||
} | ||
exports.toLiteral = toLiteral; | ||
function isLiteral(term) { | ||
return (term instanceof data_model_1.literal("").constructor); | ||
} | ||
//# sourceMappingURL=utils.js.map |
{ | ||
"name": "@zazuko/query-rdf-data-cube", | ||
"version": "0.0.2", | ||
"version": "0.0.3", | ||
"description": "Query (or introspect) [RDF Data Cubes](https://www.w3.org/TR/vocab-data-cube/) with a JavaScript API, without writing SPARQL.", | ||
@@ -15,6 +15,7 @@ "scripts": { | ||
"prepare": "npm run build", | ||
"version": "conventional-changelog -p angular -i CHANGELOG.md -s && git add CHANGELOG.md" | ||
"version": "conventional-changelog -p angular -i CHANGELOG.md -s && git add CHANGELOG.md", | ||
"postversion": "npm run docs:compile && npm run docs:publish" | ||
}, | ||
"main": "dist/node/datacube.js", | ||
"module": "dist/es/index.js", | ||
"main": "dist/node/src/datacube.js", | ||
"module": "dist/es/src/datacube.js", | ||
"files": [ | ||
@@ -21,0 +22,0 @@ "dist" |
# [@zazuko/query-rdf-data-cube](https://github.com/zazuko/query-rdf-data-cube) | ||
[![Build Status](https://travis-ci.org/zazuko/query-rdf-data-cube.svg?branch=master)](https://travis-ci.org/zazuko/query-rdf-data-cube) [![Coverage Status](https://coveralls.io/repos/github/zazuko/query-rdf-data-cube/badge.svg?branch=master)](https://coveralls.io/github/zazuko/query-rdf-data-cube?branch=master) | ||
[![npm version](https://img.shields.io/npm/v/@zazuko/query-rdf-data-cube.svg?style=flat)](https://npmjs.org/package/@zazuko/query-rdf-data-cube "View this project on npm") | ||
[![Build Status](https://travis-ci.org/zazuko/query-rdf-data-cube.svg?branch=master)](https://travis-ci.org/zazuko/query-rdf-data-cube) | ||
[![Coverage Status](https://coveralls.io/repos/github/zazuko/query-rdf-data-cube/badge.svg?branch=master)](https://coveralls.io/github/zazuko/query-rdf-data-cube?branch=master) | ||
@@ -16,2 +18,4 @@ Query and explore [RDF Data Cubes](https://www.w3.org/TR/vocab-data-cube/) with a JavaScript API, | ||
Changelog: <https://github.com/zazuko/query-rdf-data-cube/blob/master/CHANGELOG.md> | ||
## Examples | ||
@@ -18,0 +22,0 @@ |
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
Major refactor
Supply chain riskPackage has recently undergone a major refactor. It may be unstable or indicate significant internal changes. Use caution when updating to versions that include significant changes.
Found 1 instance in 1 package
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
343337
129
5317
124
1