New Case Study:See how Anthropic automated 95% of dependency reviews with Socket.Learn More
Socket
Sign inDemoInstall
Socket

@malloydata/malloy

Package Overview
Dependencies
Maintainers
3
Versions
1202
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@malloydata/malloy - npm Package Compare versions

Comparing version 0.0.22-dev230117165007 to 0.0.22-dev230117200639

dist/run_sql_options.d.ts

4

dist/dialect/dialect_map.js

@@ -26,3 +26,3 @@ "use strict";

exports.registerDialect = exports.getDialect = void 0;
const _1 = require(".");
const duckdb_1 = require("./duckdb");
const postgres_1 = require("./postgres");

@@ -45,3 +45,3 @@ const standardsql_1 = require("./standardsql");

registerDialect(new standardsql_1.StandardSQLDialect());
registerDialect(new _1.DuckDBDialect());
registerDialect(new duckdb_1.DuckDBDialect());
//# sourceMappingURL=dialect_map.js.map

@@ -1,3 +0,2 @@

import { AtomicFieldTypeInner, TimeFieldType, TimestampUnit, ExtractUnit, DialectFragment, TimeValue } from "..";
import { Expr, Sampling, StructDef, TypecastFragment } from "../model/malloy_types";
import { AtomicFieldType as AtomicFieldTypeInner, TimeFieldType, TimestampUnit, ExtractUnit, DialectFragment, TimeValue, Expr, Sampling, StructDef, TypecastFragment } from "../model/malloy_types";
interface DialectField {

@@ -4,0 +3,0 @@ type: string;

@@ -26,6 +26,2 @@ "use strict";

exports.Dialect = void 0;
// Can't get these from "../model" because model includes this file
// and that can create a circular reference problem. This is a patch
// and really indicates a problem in the relationship between
// dialect and model, it's going to come up again some time.
const malloy_types_1 = require("../model/malloy_types");

@@ -32,0 +28,0 @@ class Dialect {

@@ -1,2 +0,2 @@

import { DateUnit, Expr, ExtractUnit, Sampling, StructDef, TimeFieldType, TimestampUnit, TimeValue, TypecastFragment } from "../model";
import { DateUnit, Expr, ExtractUnit, Sampling, StructDef, TimeFieldType, TimestampUnit, TimeValue, TypecastFragment } from "../model/malloy_types";
import { Dialect, DialectFieldList, FunctionInfo } from "./dialect";

@@ -3,0 +3,0 @@ export declare class DuckDBDialect extends Dialect {

@@ -26,3 +26,3 @@ "use strict";

exports.DuckDBDialect = void 0;
const model_1 = require("../model");
const malloy_types_1 = require("../model/malloy_types");
const utils_1 = require("../model/utils");

@@ -224,3 +224,3 @@ const dialect_1 = require("./dialect");

return `SELECT LIST(ROW(${structDef.fields
.map((fieldDef) => this.sqlMaybeQuoteIdentifier((0, model_1.getIdentifier)(fieldDef)))
.map((fieldDef) => this.sqlMaybeQuoteIdentifier((0, malloy_types_1.getIdentifier)(fieldDef)))
.join(",")})) FROM ${lastStageName}\n`;

@@ -256,24 +256,24 @@ }

if (inSeconds[units]) {
lVal = (0, model_1.mkExpr) `EXTRACT(EPOCH FROM ${lVal})`;
rVal = (0, model_1.mkExpr) `EXTRACT(EPOCH FROM ${rVal})`;
const duration = (0, model_1.mkExpr) `(${rVal} - ${lVal})`;
lVal = (0, malloy_types_1.mkExpr) `EXTRACT(EPOCH FROM ${lVal})`;
rVal = (0, malloy_types_1.mkExpr) `EXTRACT(EPOCH FROM ${rVal})`;
const duration = (0, malloy_types_1.mkExpr) `(${rVal} - ${lVal})`;
return units == "second"
? duration
: (0, model_1.mkExpr) `FLOOR(${duration}/${inSeconds[units].toString()})`;
: (0, malloy_types_1.mkExpr) `FLOOR(${duration}/${inSeconds[units].toString()})`;
}
if (from.valueType != "date") {
lVal = (0, model_1.mkExpr) `CAST((${lVal}) AS DATE)`;
lVal = (0, malloy_types_1.mkExpr) `CAST((${lVal}) AS DATE)`;
}
if (to.valueType != "date") {
rVal = (0, model_1.mkExpr) `CAST((${rVal}) AS DATE)`;
rVal = (0, malloy_types_1.mkExpr) `CAST((${rVal}) AS DATE)`;
}
if (units == "week") {
// DuckDB's weeks start on Monday, but Malloy's weeks start on Sunday
lVal = (0, model_1.mkExpr) `(${lVal} + INTERVAL 1 DAY)`;
rVal = (0, model_1.mkExpr) `(${rVal} + INTERVAL 1 DAY)`;
lVal = (0, malloy_types_1.mkExpr) `(${lVal} + INTERVAL 1 DAY)`;
rVal = (0, malloy_types_1.mkExpr) `(${rVal} + INTERVAL 1 DAY)`;
}
return (0, model_1.mkExpr) `DATE_DIFF('${units}', ${lVal}, ${rVal})`;
return (0, malloy_types_1.mkExpr) `DATE_DIFF('${units}', ${lVal}, ${rVal})`;
}
sqlNow() {
return (0, model_1.mkExpr) `CURRENT_TIMESTAMP::TIMESTAMP`;
return (0, malloy_types_1.mkExpr) `CURRENT_TIMESTAMP::TIMESTAMP`;
}

@@ -284,11 +284,11 @@ sqlTrunc(sqlTime, units) {

const truncThis = week
? (0, model_1.mkExpr) `${sqlTime.value} + INTERVAL 1 DAY`
? (0, malloy_types_1.mkExpr) `${sqlTime.value} + INTERVAL 1 DAY`
: sqlTime.value;
const trunced = (0, model_1.mkExpr) `DATE_TRUNC('${units}', ${truncThis})`;
return week ? (0, model_1.mkExpr) `(${trunced} - INTERVAL 1 DAY)` : trunced;
const trunced = (0, malloy_types_1.mkExpr) `DATE_TRUNC('${units}', ${truncThis})`;
return week ? (0, malloy_types_1.mkExpr) `(${trunced} - INTERVAL 1 DAY)` : trunced;
}
sqlExtract(from, units) {
const pgUnits = pgExtractionMap[units] || units;
const extracted = (0, model_1.mkExpr) `EXTRACT(${pgUnits} FROM ${from.value})`;
return units == "day_of_week" ? (0, model_1.mkExpr) `(${extracted}+1)` : extracted;
const extracted = (0, malloy_types_1.mkExpr) `EXTRACT(${pgUnits} FROM ${from.value})`;
return units == "day_of_week" ? (0, malloy_types_1.mkExpr) `(${extracted}+1)` : extracted;
}

@@ -298,10 +298,10 @@ sqlAlterTime(op, expr, n, timeframe) {

timeframe = "month";
n = (0, model_1.mkExpr) `${n}*3`;
n = (0, malloy_types_1.mkExpr) `${n}*3`;
}
if (timeframe == "week") {
timeframe = "day";
n = (0, model_1.mkExpr) `${n}*7`;
n = (0, malloy_types_1.mkExpr) `${n}*7`;
}
const interval = (0, model_1.mkExpr) `INTERVAL (${n}) ${timeframe}`;
return (0, model_1.mkExpr) `((${expr.value})) ${op} ${interval}`;
const interval = (0, malloy_types_1.mkExpr) `INTERVAL (${n}) ${timeframe}`;
return (0, malloy_types_1.mkExpr) `((${expr.value})) ${op} ${interval}`;
}

@@ -311,3 +311,3 @@ sqlCast(cast) {

const castTo = castMap[cast.dstType] || cast.dstType;
return (0, model_1.mkExpr) `cast(${cast.expr} as ${castTo})`;
return (0, malloy_types_1.mkExpr) `cast(${cast.expr} as ${castTo})`;
}

@@ -317,3 +317,3 @@ return cast.expr;

sqlRegexpMatch(expr, regexp) {
return (0, model_1.mkExpr) `REGEXP_MATCHES(${expr}, ${regexp})`;
return (0, malloy_types_1.mkExpr) `REGEXP_MATCHES(${expr}, ${regexp})`;
}

@@ -351,9 +351,9 @@ sqlLiteralTime(timeString, type, _timezone) {

if (sample !== undefined) {
if ((0, model_1.isSamplingEnable)(sample) && sample.enable) {
if ((0, malloy_types_1.isSamplingEnable)(sample) && sample.enable) {
sample = this.defaultSampling;
}
if ((0, model_1.isSamplingRows)(sample)) {
if ((0, malloy_types_1.isSamplingRows)(sample)) {
return `(SELECT * FROM ${tableSQL} USING SAMPLE ${sample.rows})`;
}
else if ((0, model_1.isSamplingPercent)(sample)) {
else if ((0, malloy_types_1.isSamplingPercent)(sample)) {
return `(SELECT * FROM ${tableSQL} USING SAMPLE ${sample.percent} PERCENT (bernoulli))`;

@@ -360,0 +360,0 @@ }

@@ -1,2 +0,2 @@

import { DateUnit, ExtractUnit, TimeFieldType, TimestampUnit, Expr, TimeValue, TypecastFragment, Sampling } from "../model";
import { DateUnit, ExtractUnit, TimeFieldType, TimestampUnit, Expr, TimeValue, TypecastFragment, Sampling } from "../model/malloy_types";
import { Dialect, DialectFieldList, FunctionInfo } from "./dialect";

@@ -3,0 +3,0 @@ export declare class PostgresDialect extends Dialect {

@@ -27,3 +27,3 @@ "use strict";

const utils_1 = require("../model/utils");
const model_1 = require("../model");
const malloy_types_1 = require("../model/malloy_types");
const dialect_1 = require("./dialect");

@@ -220,3 +220,3 @@ const castMap = {

sqlNow() {
return (0, model_1.mkExpr) `CURRENT_TIMESTAMP`;
return (0, malloy_types_1.mkExpr) `CURRENT_TIMESTAMP`;
}

@@ -227,11 +227,11 @@ sqlTrunc(sqlTime, units) {

const truncThis = week
? (0, model_1.mkExpr) `${sqlTime.value}+interval'1'day`
? (0, malloy_types_1.mkExpr) `${sqlTime.value}+interval'1'day`
: sqlTime.value;
const trunced = (0, model_1.mkExpr) `DATE_TRUNC('${units}', ${truncThis})`;
return week ? (0, model_1.mkExpr) `(${trunced}-interval'1'day)` : trunced;
const trunced = (0, malloy_types_1.mkExpr) `DATE_TRUNC('${units}', ${truncThis})`;
return week ? (0, malloy_types_1.mkExpr) `(${trunced}-interval'1'day)` : trunced;
}
sqlExtract(from, units) {
const pgUnits = pgExtractionMap[units] || units;
const extracted = (0, model_1.mkExpr) `EXTRACT(${pgUnits} FROM ${from.value})`;
return units == "day_of_week" ? (0, model_1.mkExpr) `(${extracted}+1)` : extracted;
const extracted = (0, malloy_types_1.mkExpr) `EXTRACT(${pgUnits} FROM ${from.value})`;
return units == "day_of_week" ? (0, malloy_types_1.mkExpr) `(${extracted}+1)` : extracted;
}

@@ -241,6 +241,6 @@ sqlAlterTime(op, expr, n, timeframe) {

timeframe = "month";
n = (0, model_1.mkExpr) `${n}*3`;
n = (0, malloy_types_1.mkExpr) `${n}*3`;
}
const interval = (0, model_1.mkExpr) `make_interval(${pgMakeIntervalMap[timeframe]}=>${n})`;
return (0, model_1.mkExpr) `((${expr.value})${op}${interval})`;
const interval = (0, malloy_types_1.mkExpr) `make_interval(${pgMakeIntervalMap[timeframe]}=>${n})`;
return (0, malloy_types_1.mkExpr) `((${expr.value})${op}${interval})`;
}

@@ -250,3 +250,3 @@ sqlCast(cast) {

const castTo = castMap[cast.dstType] || cast.dstType;
return (0, model_1.mkExpr) `cast(${cast.expr} as ${castTo})`;
return (0, malloy_types_1.mkExpr) `cast(${cast.expr} as ${castTo})`;
}

@@ -256,3 +256,3 @@ return cast.expr;

sqlRegexpMatch(expr, regexp) {
return (0, model_1.mkExpr) `(${expr} ~ ${regexp})`;
return (0, malloy_types_1.mkExpr) `(${expr} ~ ${regexp})`;
}

@@ -277,13 +277,13 @@ sqlLiteralTime(timeString, type, _timezone) {

if (inSeconds[units]) {
lVal = (0, model_1.mkExpr) `EXTRACT(EPOCH FROM ${lVal})`;
rVal = (0, model_1.mkExpr) `EXTRACT(EPOCH FROM ${rVal})`;
const duration = (0, model_1.mkExpr) `(${rVal} - ${lVal})`;
lVal = (0, malloy_types_1.mkExpr) `EXTRACT(EPOCH FROM ${lVal})`;
rVal = (0, malloy_types_1.mkExpr) `EXTRACT(EPOCH FROM ${rVal})`;
const duration = (0, malloy_types_1.mkExpr) `(${rVal} - ${lVal})`;
return units == "second"
? duration
: (0, model_1.mkExpr) `TRUNC(${duration}/${inSeconds[units].toString()})`;
: (0, malloy_types_1.mkExpr) `TRUNC(${duration}/${inSeconds[units].toString()})`;
}
if (units === "day") {
return (0, model_1.mkExpr) `${rVal}::date - ${lVal}::date`;
return (0, malloy_types_1.mkExpr) `${rVal}::date - ${lVal}::date`;
}
const yearDiff = (0, model_1.mkExpr) `(DATE_PART('year', ${rVal}) - DATE_PART('year', ${lVal}))`;
const yearDiff = (0, malloy_types_1.mkExpr) `(DATE_PART('year', ${rVal}) - DATE_PART('year', ${lVal}))`;
if (units == "year") {

@@ -293,12 +293,12 @@ return yearDiff;

if (units == "week") {
const dayDiffForWeekStart = (0, model_1.mkExpr) `(DATE_TRUNC('week', ${rVal} + '1 day'::interval)::date - DATE_TRUNC('week', ${lVal} + '1 day'::interval)::date)`;
return (0, model_1.mkExpr) `${dayDiffForWeekStart} / 7`;
const dayDiffForWeekStart = (0, malloy_types_1.mkExpr) `(DATE_TRUNC('week', ${rVal} + '1 day'::interval)::date - DATE_TRUNC('week', ${lVal} + '1 day'::interval)::date)`;
return (0, malloy_types_1.mkExpr) `${dayDiffForWeekStart} / 7`;
}
if (units == "month") {
const monthDiff = (0, model_1.mkExpr) `DATE_PART('month', ${rVal}) - DATE_PART('month', ${lVal})`;
return (0, model_1.mkExpr) `${yearDiff} * 12 + ${monthDiff}`;
const monthDiff = (0, malloy_types_1.mkExpr) `DATE_PART('month', ${rVal}) - DATE_PART('month', ${lVal})`;
return (0, malloy_types_1.mkExpr) `${yearDiff} * 12 + ${monthDiff}`;
}
if (units == "quarter") {
const qDiff = (0, model_1.mkExpr) `DATE_PART('quarter', ${rVal}) - DATE_PART('quarter', ${lVal})`;
return (0, model_1.mkExpr) `${yearDiff} * 4 + ${qDiff}`;
const qDiff = (0, malloy_types_1.mkExpr) `DATE_PART('quarter', ${rVal}) - DATE_PART('quarter', ${lVal})`;
return (0, malloy_types_1.mkExpr) `${yearDiff} * 4 + ${qDiff}`;
}

@@ -318,9 +318,9 @@ throw new Error(`Unknown or unhandled postgres time unit: ${units}`);

if (sample !== undefined) {
if ((0, model_1.isSamplingEnable)(sample) && sample.enable) {
if ((0, malloy_types_1.isSamplingEnable)(sample) && sample.enable) {
sample = this.defaultSampling;
}
if ((0, model_1.isSamplingRows)(sample)) {
if ((0, malloy_types_1.isSamplingRows)(sample)) {
return `(SELECT * FROM ${tableSQL} TABLESAMPLE SYSTEM_ROWS(${sample.rows}))`;
}
else if ((0, model_1.isSamplingPercent)(sample)) {
else if ((0, malloy_types_1.isSamplingPercent)(sample)) {
return `(SELECT * FROM ${tableSQL} TABLESAMPLE SYSTEM (${sample.percent}))`;

@@ -327,0 +327,0 @@ }

@@ -1,2 +0,2 @@

import { TimestampUnit, ExtractUnit, Expr, TimeValue, TypecastFragment, Sampling } from "../model";
import { TimestampUnit, ExtractUnit, Expr, TimeValue, TypecastFragment, Sampling } from "../model/malloy_types";
import { Dialect, DialectFieldList, FunctionInfo } from "./dialect";

@@ -3,0 +3,0 @@ export declare class StandardSQLDialect extends Dialect {

@@ -27,3 +27,3 @@ "use strict";

const utils_1 = require("../model/utils");
const model_1 = require("../model");
const malloy_types_1 = require("../model/malloy_types");
const dialect_1 = require("./dialect");

@@ -274,16 +274,16 @@ const castMap = {

sqlNow() {
return (0, model_1.mkExpr) `CURRENT_TIMESTAMP()`;
return (0, malloy_types_1.mkExpr) `CURRENT_TIMESTAMP()`;
}
sqlTrunc(sqlTime, units) {
if (sqlTime.valueType == "date") {
if ((0, model_1.isDateUnit)(units)) {
return (0, model_1.mkExpr) `DATE_TRUNC(${sqlTime.value},${units})`;
if ((0, malloy_types_1.isDateUnit)(units)) {
return (0, malloy_types_1.mkExpr) `DATE_TRUNC(${sqlTime.value},${units})`;
}
return (0, model_1.mkExpr) `TIMESTAMP(${sqlTime.value})`;
return (0, malloy_types_1.mkExpr) `TIMESTAMP(${sqlTime.value})`;
}
return (0, model_1.mkExpr) `TIMESTAMP_TRUNC(${sqlTime.value},${units})`;
return (0, malloy_types_1.mkExpr) `TIMESTAMP_TRUNC(${sqlTime.value},${units})`;
}
sqlExtract(expr, units) {
const extractTo = extractMap[units] || units;
return (0, model_1.mkExpr) `EXTRACT(${extractTo} FROM ${expr.value})`;
return (0, malloy_types_1.mkExpr) `EXTRACT(${extractTo} FROM ${expr.value})`;
}

@@ -297,12 +297,12 @@ sqlAlterTime(op, expr, n, timeframe) {

if (expr.valueType != "timestamp") {
theTime = (0, model_1.mkExpr) `TIMESTAMP(${theTime})`;
theTime = (0, malloy_types_1.mkExpr) `TIMESTAMP(${theTime})`;
}
}
else if (expr.valueType == "timestamp") {
theTime = (0, model_1.mkExpr) `DATETIME(${theTime})`;
theTime = (0, malloy_types_1.mkExpr) `DATETIME(${theTime})`;
computeType = "datetime";
}
const funcName = computeType.toUpperCase() + (op == "+" ? "_ADD" : "_SUB");
const newTime = (0, model_1.mkExpr) `${funcName}(${theTime}, INTERVAL ${n} ${timeframe})`;
return computeType == "datetime" ? (0, model_1.mkExpr) `TIMESTAMP(${newTime})` : newTime;
const newTime = (0, malloy_types_1.mkExpr) `${funcName}(${theTime}, INTERVAL ${n} ${timeframe})`;
return computeType == "datetime" ? (0, malloy_types_1.mkExpr) `TIMESTAMP(${newTime})` : newTime;
}

@@ -316,10 +316,10 @@ ignoreInProject(fieldName) {

// This just makes the code look a little prettier ...
if (!cast.safe && cast.srcType && (0, model_1.isTimeFieldType)(cast.srcType)) {
if (!cast.safe && cast.srcType && (0, malloy_types_1.isTimeFieldType)(cast.srcType)) {
if (dstType == "date") {
return (0, model_1.mkExpr) `DATE(${cast.expr})`;
return (0, malloy_types_1.mkExpr) `DATE(${cast.expr})`;
}
return (0, model_1.mkExpr) `TIMESTAMP(${cast.expr})`;
return (0, malloy_types_1.mkExpr) `TIMESTAMP(${cast.expr})`;
}
const castFunc = cast.safe ? "SAFE_CAST" : "CAST";
return (0, model_1.mkExpr) `${castFunc}(${cast.expr} AS ${dstType})`;
return (0, malloy_types_1.mkExpr) `${castFunc}(${cast.expr} AS ${dstType})`;
}

@@ -329,3 +329,3 @@ return cast.expr;

sqlRegexpMatch(expr, regexp) {
return (0, model_1.mkExpr) `REGEXP_CONTAINS(${expr}, r${regexp})`;
return (0, malloy_types_1.mkExpr) `REGEXP_CONTAINS(${expr}, r${regexp})`;
}

@@ -349,6 +349,6 @@ sqlLiteralTime(timeString, type, timezone) {

if (from.valueType != "timestamp") {
lVal = (0, model_1.mkExpr) `TIMESTAMP(${lVal})`;
lVal = (0, malloy_types_1.mkExpr) `TIMESTAMP(${lVal})`;
}
if (to.valueType != "timestamp") {
rVal = (0, model_1.mkExpr) `TIMESTAMP(${rVal})`;
rVal = (0, malloy_types_1.mkExpr) `TIMESTAMP(${rVal})`;
}

@@ -359,19 +359,19 @@ }

if (from.valueType != "date") {
lVal = (0, model_1.mkExpr) `DATE(${lVal})`;
lVal = (0, malloy_types_1.mkExpr) `DATE(${lVal})`;
}
if (to.valueType != "date") {
rVal = (0, model_1.mkExpr) `DATE(${rVal})`;
rVal = (0, malloy_types_1.mkExpr) `DATE(${rVal})`;
}
}
return (0, model_1.mkExpr) `${diffUsing}(${rVal}, ${lVal}, ${units})`;
return (0, malloy_types_1.mkExpr) `${diffUsing}(${rVal}, ${lVal}, ${units})`;
}
sqlSampleTable(tableSQL, sample) {
if (sample !== undefined) {
if ((0, model_1.isSamplingEnable)(sample) && sample.enable) {
if ((0, malloy_types_1.isSamplingEnable)(sample) && sample.enable) {
sample = this.defaultSampling;
}
if ((0, model_1.isSamplingRows)(sample)) {
if ((0, malloy_types_1.isSamplingRows)(sample)) {
throw new Error(`StandardSQL doesn't support sampling by rows only percent`);
}
else if ((0, model_1.isSamplingPercent)(sample)) {
else if ((0, malloy_types_1.isSamplingPercent)(sample)) {
return `(SELECT * FROM ${tableSQL} TABLESAMPLE SYSTEM (${sample.percent} PERCENT))`;

@@ -378,0 +378,0 @@ }

@@ -6,5 +6,6 @@ export type { QueryDataRow, Fragment, StructDef, StructRelationship, NamedStructDefs, MalloyQueryData, AtomicFieldType as AtomicFieldTypeInner, DateUnit, ExtractUnit, TimestampUnit, TimeFieldType, QueryData, FieldTypeDef, Expr, DialectFragment, TimeValue, FilterExpression, SQLBlock, FieldDef, FilteredAliasedName, PipeSegment, QueryFieldDef, TurtleDef, SearchValueMapResult, SearchIndexResult, ModelDef, Query, NamedQuery, NamedModelObject, ExpressionType, } from "./model";

export { Malloy, Runtime, AtomicFieldType, ConnectionRuntime, SingleConnectionRuntime, EmptyURLReader, InMemoryURLReader, FixedConnectionMap, MalloyError, JoinRelationship, SourceRelationship, DateTimeframe, TimestampTimeframe, Result, parseTableURI, QueryMaterializer, CSVWriter, JSONWriter, DataWriter, } from "./malloy";
export type { Explore, Model, PreparedQuery, PreparedResult, Field, AtomicField, ExploreField, QueryField, DataArray, DataRecord, DataColumn, DataArrayOrRecord, ModelMaterializer, DocumentSymbol, DocumentHighlight, ResultJSON, RunSQLOptions, PreparedResultMaterializer, SQLBlockMaterializer, ExploreMaterializer, WriteStream, } from "./malloy";
export type { Explore, Model, PreparedQuery, PreparedResult, Field, AtomicField, ExploreField, QueryField, DataArray, DataRecord, DataColumn, DataArrayOrRecord, ModelMaterializer, DocumentSymbol, DocumentHighlight, ResultJSON, PreparedResultMaterializer, SQLBlockMaterializer, ExploreMaterializer, WriteStream, } from "./malloy";
export type { RunSQLOptions } from "./run_sql_options";
export type { URLReader, InfoConnection, LookupConnection, Connection, QueryString, ModelString, QueryURL, ModelURL, PooledConnection, TestableConnection, PersistSQLResults, StreamingConnection, } from "./runtime_types";
export type { Loggable } from "./malloy";
export { toAsyncGenerator } from "./connection_utils";
/// <reference types="node" />
import { InfoConnection } from ".";
import { RunSQLOptions } from "./run_sql_options";
import { DocumentHighlight as DocumentHighlightDefinition, DocumentSymbol as DocumentSymbolDefinition, DocumentCompletion as DocumentCompletionDefinition, LogMessage, MalloyTranslator } from "./lang";
import { DocumentHelpContext } from "./lang/parse-tree-walkers/document-help-context-walker";
import { CompiledQuery, FieldBooleanDef, FieldDateDef, FieldNumberDef, FieldStringDef, FieldTimestampDef, FieldTypeDef, FilterExpression, ModelDef, Query as InternalQuery, QueryData, QueryDataRow, QueryResult, StructDef, TurtleDef, SQLBlock, DocumentReference, DocumentPosition as ModelDocumentPosition, SearchIndexResult, SearchValueMapResult, NamedQuery, SQLBlockStructDef, FieldJSONDef } from "./model";
import { LookupConnection, ModelString, ModelURL, QueryString, QueryURL, URLReader, Connection } from "./runtime_types";
import { LookupConnection, ModelString, ModelURL, QueryString, QueryURL, URLReader, Connection, InfoConnection } from "./runtime_types";
export interface Loggable {

@@ -13,5 +13,2 @@ debug: (message?: any, ...optionalParams: any[]) => void;

}
export interface RunSQLOptions {
rowLimit?: number;
}
export declare class Malloy {

@@ -18,0 +15,0 @@ static get version(): string;

import { Dialect, DialectFieldList } from "../dialect";
import { FieldDef, FieldRef, FilterExpression, ModelDef, Query, QueryFieldDef, StructDef, StructRef, OrderBy, ResultMetadataDef, Expr, FieldFragment, AggregateFragment, CompiledQuery, FilterFragment, PipeSegment, TurtleDef, QuerySegment, Filtered, ParameterFragment, Parameter, JoinRelationship, DialectFragment, UngroupFragment, NamedQuery, AnalyticFragment } from "./malloy_types";
import { FieldDef, FieldRef, FilterExpression, ModelDef, Query, QueryFieldDef, StructDef, StructRef, OrderBy, ResultMetadataDef, Expr, FieldFragment, AggregateFragment, CompiledQuery, FilterFragment, PipeSegment, TurtleDef, QuerySegment, Filtered, ParameterFragment, Parameter, JoinRelationship, DialectFragment, UngroupFragment, NamedQuery, AnalyticFragment, ResultStructMetadataDef, SearchIndexResult } from "./malloy_types";
import { AndChain } from "./utils";
import { ResultStructMetadataDef, SearchIndexResult } from ".";
import { Connection } from "..";
import { Connection } from "../runtime_types";
interface TurtleDefPlus extends TurtleDef, Filtered {

@@ -7,0 +6,0 @@ }

@@ -1,3 +0,3 @@

import { RunSQLOptions } from "./malloy";
import { MalloyQueryData, QueryDataRow, SQLBlock, StructDef } from "./model";
import { RunSQLOptions } from "./run_sql_options";
import { MalloyQueryData, QueryDataRow, SQLBlock, StructDef } from "./model/malloy_types";
/**

@@ -4,0 +4,0 @@ * The contents of a Malloy query document.

{
"name": "@malloydata/malloy",
"version": "0.0.22-dev230117165007",
"version": "0.0.22-dev230117200639",
"license": "MIT",

@@ -5,0 +5,0 @@ "main": "dist/index.js",

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc