Huge News!Announcing our $40M Series B led by Abstract Ventures.Learn More
Socket
Sign inDemoInstall
Socket

@harmoniclabs/plu-ts

Package Overview
Dependencies
Maintainers
1
Versions
91
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@harmoniclabs/plu-ts - npm Package Compare versions

Comparing version 0.3.0-dev0 to 0.3.0-dev1

dist/onchain/IR/interfaces/IRMetadata.d.ts

5

dist/onchain/IR/IRNodes/IRApp.d.ts

@@ -6,5 +6,3 @@ import { Cloneable } from "../../../types/interfaces/Cloneable.js";

import { ToJson } from "../../../utils/ts/ToJson.js";
import { ToUPLC } from "../../UPLC/interfaces/ToUPLC.js";
import { UPLCTerm } from "../../UPLC/UPLCTerm/index.js";
export declare class IRApp implements Cloneable<IRApp>, IHash, IIRParent, ToJson, ToUPLC {
export declare class IRApp implements Cloneable<IRApp>, IHash, IIRParent, ToJson {
fn: IRTerm;

@@ -19,3 +17,2 @@ arg: IRTerm;

toJson(): any;
toUPLC(): UPLCTerm;
}

6

dist/onchain/IR/IRNodes/IRApp.js

@@ -7,3 +7,2 @@ "use strict";

var isIRTerm_1 = require("../utils/isIRTerm.js");
var Application_1 = require("../../UPLC/UPLCTerms/Application.js");
var PlutsIRError_1 = require("../../../errors/PlutsIRError/index.js");

@@ -26,3 +25,3 @@ var IRApp = /** @class */ (function () {

// basically a merkle tree
hash = (0, crypto_1.blake2b_224)((0, concatUint8Arr_1.concatUint8Arr)(IRApp.tag, fn.hash, arg.hash));
hash = (0, crypto_1.blake2b_128)((0, concatUint8Arr_1.concatUint8Arr)(IRApp.tag, fn.hash, arg.hash));
}

@@ -99,7 +98,4 @@ // return a copy

};
IRApp.prototype.toUPLC = function () {
return new Application_1.Application(this.fn.toUPLC(), this.arg.toUPLC());
};
return IRApp;
}());
exports.IRApp = IRApp;

@@ -11,6 +11,4 @@ import { Data } from "../../../types/Data/index.js";

import { ToJson } from "../../../utils/ts/ToJson.js";
import { ToUPLC } from "../../UPLC/interfaces/ToUPLC.js";
import { UPLCConst } from "../../UPLC/UPLCTerms/UPLCConst/index.js";
export type IRConstValue = CanBeUInteger | ByteString | Uint8Array | string | boolean | IRConstValue[] | Pair<IRConstValue, IRConstValue> | Data | undefined;
export declare class IRConst implements Cloneable<IRConst>, IHash, IIRParent, ToJson, ToUPLC {
export declare class IRConst implements Cloneable<IRConst>, IHash, IIRParent, ToJson {
readonly hash: Uint8Array;

@@ -25,3 +23,2 @@ markHashAsInvalid: () => void;

toJson(): any;
toUPLC(): UPLCConst;
static get unit(): IRConst;

@@ -28,0 +25,0 @@ static bool(b: boolean): IRConst;

@@ -49,3 +49,2 @@ "use strict";

var UnexpectedMarkHashInvalidCall_1 = require("../../../errors/PlutsIRError/UnexpectedMarkHashInvalidCall.js");
var UPLCConst_1 = require("../../UPLC/UPLCTerms/UPLCConst/index.js");
var typeExtends_1 = require("../../pluts/type_system/typeExtends.js");

@@ -83,3 +82,3 @@ var utils_1 = require("../../pluts/type_system/utils.js");

if (!(hash instanceof Uint8Array)) {
hash = (0, blake2b_1.blake2b_224)((0, concatUint8Arr_1.concatUint8Arr)(IRConst.tag, new Uint8Array((0, termTyToConstTy_1.termTyToConstTy)(_this.type)), serializeIRConstValue(_this.value, _this.type)));
hash = (0, blake2b_1.blake2b_128)((0, concatUint8Arr_1.concatUint8Arr)(IRConst.tag, new Uint8Array((0, termTyToConstTy_1.termTyToConstTy)(_this.type)), serializeIRConstValue(_this.value, _this.type)));
}

@@ -114,5 +113,2 @@ return hash.slice();

};
IRConst.prototype.toUPLC = function () {
return new UPLCConst_1.UPLCConst((0, termTyToConstTy_1.termTyToConstTy)(this.type), this.value);
};
Object.defineProperty(IRConst, "unit", {

@@ -119,0 +115,0 @@ get: function () {

import { Cloneable } from "../../../types/interfaces/Cloneable.js";
import { ToJson } from "../../../utils/ts/ToJson.js";
import { UPLCTerm } from "../../UPLC/UPLCTerm/index.js";
import { ToUPLC } from "../../UPLC/interfaces/ToUPLC.js";
import { IRTerm } from "../IRTerm.js";
import { IHash } from "../interfaces/IHash.js";
import { IIRParent } from "../interfaces/IIRParent.js";
export declare class IRDelayed implements Cloneable<IRDelayed>, IHash, IIRParent, ToJson, ToUPLC {
export declare class IRDelayed implements Cloneable<IRDelayed>, IHash, IIRParent, ToJson {
delayed: IRTerm;

@@ -17,3 +15,2 @@ readonly hash: Uint8Array;

toJson(): any;
toUPLC(): UPLCTerm;
}

@@ -6,3 +6,2 @@ "use strict";

var BasePlutsError_1 = require("../../../errors/BasePlutsError.js");
var Delay_1 = require("../../UPLC/UPLCTerms/Delay.js");
var concatUint8Arr_1 = require("../utils/concatUint8Arr.js");

@@ -17,3 +16,3 @@ var isIRTerm_1 = require("../utils/isIRTerm.js");

if (!(hash instanceof Uint8Array)) {
hash = (0, crypto_1.blake2b_224)((0, concatUint8Arr_1.concatUint8Arr)(IRDelayed.tag, _this.delayed.hash));
hash = (0, crypto_1.blake2b_128)((0, concatUint8Arr_1.concatUint8Arr)(IRDelayed.tag, _this.delayed.hash));
}

@@ -76,8 +75,4 @@ return hash.slice();

};
IRDelayed.prototype.toUPLC = function () {
return new Delay_1.Delay(this.delayed.toUPLC());
};
;
return IRDelayed;
}());
exports.IRDelayed = IRDelayed;
import { Cloneable } from "../../../types/interfaces/Cloneable.js";
import { ToJson } from "../../../utils/ts/ToJson.js";
import { ErrorUPLC } from "../../UPLC/UPLCTerms/ErrorUPLC.js";
import { IRTerm } from "../IRTerm.js";

@@ -19,3 +18,2 @@ import { IHash } from "../interfaces/IHash.js";

};
toUPLC(): ErrorUPLC;
}

@@ -6,6 +6,5 @@ "use strict";

var UnexpectedMarkHashInvalidCall_1 = require("../../../errors/PlutsIRError/UnexpectedMarkHashInvalidCall.js");
var ErrorUPLC_1 = require("../../UPLC/UPLCTerms/ErrorUPLC.js");
var isIRTerm_1 = require("../utils/isIRTerm.js");
var irErrorBitTag = new Uint8Array([7]);
var errorHash = (0, crypto_1.blake2b_224)(irErrorBitTag.slice());
var errorHash = (0, crypto_1.blake2b_128)(irErrorBitTag.slice());
var IRError = /** @class */ (function () {

@@ -52,5 +51,2 @@ function IRError(msg, addInfos) {

};
IRError.prototype.toUPLC = function () {
return new ErrorUPLC_1.ErrorUPLC(this.msg, this.addInfos);
};
return IRError;

@@ -57,0 +53,0 @@ }());

import { Cloneable } from "../../../types/interfaces/Cloneable.js";
import { ToJson } from "../../../utils/ts/ToJson.js";
import { UPLCTerm } from "../../UPLC/UPLCTerm/index.js";
import { ToUPLC } from "../../UPLC/interfaces/ToUPLC.js";
import { IRTerm } from "../IRTerm.js";
import { IHash } from "../interfaces/IHash.js";
import { IIRParent } from "../interfaces/IIRParent.js";
export declare class IRForced implements Cloneable<IRForced>, IHash, IIRParent, ToJson, ToUPLC {
export declare class IRForced implements Cloneable<IRForced>, IHash, IIRParent, ToJson {
forced: IRTerm;

@@ -17,3 +15,2 @@ readonly hash: Uint8Array;

toJson(): any;
toUPLC(): UPLCTerm;
}

@@ -6,3 +6,2 @@ "use strict";

var BasePlutsError_1 = require("../../../errors/BasePlutsError.js");
var Force_1 = require("../../UPLC/UPLCTerms/Force.js");
var concatUint8Arr_1 = require("../utils/concatUint8Arr.js");

@@ -17,3 +16,3 @@ var isIRTerm_1 = require("../utils/isIRTerm.js");

if (!(hash instanceof Uint8Array)) {
hash = (0, crypto_1.blake2b_224)((0, concatUint8Arr_1.concatUint8Arr)(IRForced.tag, _this.forced.hash));
hash = (0, crypto_1.blake2b_128)((0, concatUint8Arr_1.concatUint8Arr)(IRForced.tag, _this.forced.hash));
}

@@ -76,7 +75,4 @@ return hash.slice();

};
IRForced.prototype.toUPLC = function () {
return new Force_1.Force(this.forced.toUPLC());
};
return IRForced;
}());
exports.IRForced = IRForced;
import { Cloneable } from "../../../types/interfaces/Cloneable.js";
import { ToJson } from "../../../utils/ts/ToJson.js";
import { Lambda } from "../../UPLC/UPLCTerms/Lambda.js";
import { ToUPLC } from "../../UPLC/interfaces/ToUPLC.js";
import { IRTerm } from "../IRTerm.js";
import { IHash } from "../interfaces/IHash.js";
import { IIRParent } from "../interfaces/IIRParent.js";
export declare class IRFunc implements Cloneable<IRFunc>, IHash, IIRParent, ToJson, ToUPLC {
export declare class IRFunc implements Cloneable<IRFunc>, IHash, IIRParent, ToJson {
readonly arity: number;

@@ -18,3 +16,2 @@ readonly hash: Uint8Array;

toJson(): any;
toUPLC(): Lambda;
}

@@ -10,3 +10,2 @@ "use strict";

var ObjectUtils_1 = __importDefault(require("../../../utils/ObjectUtils/index.js"));
var Lambda_1 = require("../../UPLC/UPLCTerms/Lambda.js");
var concatUint8Arr_1 = require("../utils/concatUint8Arr.js");

@@ -26,3 +25,3 @@ var isIRTerm_1 = require("../utils/isIRTerm.js");

if (!(hash instanceof Uint8Array)) {
hash = (0, crypto_1.blake2b_224)((0, concatUint8Arr_1.concatUint8Arr)(IRFunc.tag, (0, positiveIntAsBytes_1.positiveIntAsBytes)(_this.arity), _body.hash));
hash = (0, crypto_1.blake2b_128)((0, concatUint8Arr_1.concatUint8Arr)(IRFunc.tag, (0, positiveIntAsBytes_1.positiveIntAsBytes)(_this.arity), _body.hash));
}

@@ -91,11 +90,4 @@ return hash.slice();

};
IRFunc.prototype.toUPLC = function () {
var lam = new Lambda_1.Lambda(this.body.toUPLC());
for (var i = 1; i < this.arity; i++) {
lam = new Lambda_1.Lambda(lam);
}
return lam;
};
return IRFunc;
}());
exports.IRFunc = IRFunc;

@@ -6,4 +6,3 @@ import { Cloneable } from "../../../types/interfaces/Cloneable.js";

import { ToJson } from "../../../utils/ts/ToJson.js";
import { ToUPLC } from "../../UPLC/interfaces/ToUPLC.js";
import { UPLCTerm } from "../../UPLC/UPLCTerm/index.js";
import { IRMetadata } from "../interfaces/IRMetadata.js";
export type HoistedSetEntry = {

@@ -13,3 +12,14 @@ hoisted: IRHoisted;

};
export declare class IRHoisted implements Cloneable<IRHoisted>, IHash, IIRParent, ToJson, ToUPLC {
export interface IRHoistedMeta {
/**
* force hoisting even if only a single reference is found
*
* useful to hoist terms used once in recursive expressions
**/
forceHoist: boolean;
}
export interface IRHoistedMetadata extends IRMetadata {
meta: IRHoistedMeta;
}
export declare class IRHoisted implements Cloneable<IRHoisted>, IHash, IIRParent, ToJson, IRHoistedMetadata {
readonly hash: Uint8Array;

@@ -21,6 +31,6 @@ markHashAsInvalid: () => void;

clone: () => IRHoisted;
constructor(hoisted: IRTerm);
readonly meta: IRHoistedMeta;
constructor(hoisted: IRTerm, metadata?: Partial<IRHoistedMeta>);
static get tag(): Uint8Array;
toJson(): any;
toUPLC(): UPLCTerm;
}

@@ -27,0 +37,0 @@ /**

"use strict";
var __assign = (this && this.__assign) || function () {
__assign = Object.assign || function(t) {
for (var s, i = 1, n = arguments.length; i < n; i++) {
s = arguments[i];
for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p))
t[p] = s[p];
}
return t;
};
return __assign.apply(this, arguments);
};
var __read = (this && this.__read) || function (o, n) {

@@ -42,8 +53,10 @@ var m = typeof Symbol === "function" && o[Symbol.iterator];

var isIRTerm_1 = require("../utils/isIRTerm.js");
var IllegalIRToUPLC_1 = require("../../../errors/PlutsIRError/IRCompilationError/IllegalIRToUPLC.js");
var showIR_1 = require("../utils/showIR.js");
var IRForced_1 = require("./IRForced.js");
var IRDelayed_1 = require("./IRDelayed.js");
var defaultHoistedMeta = ObjectUtils_1.default.freezeAll({
forceHoist: false
});
var IRHoisted = /** @class */ (function () {
function IRHoisted(hoisted) {
function IRHoisted(hoisted, metadata) {
if (metadata === void 0) { metadata = {}; }
var _this = this;

@@ -69,3 +82,3 @@ // unwrap

if (!(hash instanceof Uint8Array)) {
hash = (0, crypto_1.blake2b_224)((0, concatUint8Arr_1.concatUint8Arr)(IRHoisted.tag, hoisted.hash));
hash = (0, crypto_1.blake2b_128)((0, concatUint8Arr_1.concatUint8Arr)(IRHoisted.tag, hoisted.hash));
}

@@ -127,6 +140,10 @@ return hash.slice();

});
Object.defineProperty(this, "meta", {
value: __assign(__assign({}, defaultHoistedMeta), metadata),
writable: false,
enumerable: true,
configurable: false
});
ObjectUtils_1.default.defineProperty(this, "clone", function () {
return new IRHoisted(_this.hoisted.clone()
// _getDeps().slice() // as long as `dependecies` getter returns clones this is fine
);
return new IRHoisted(_this.hoisted.clone(), _this.meta);
});

@@ -146,8 +163,2 @@ }

};
IRHoisted.prototype.toUPLC = function () {
// return this.hoisted.toUPLC();
throw new IllegalIRToUPLC_1.IllegalIRToUPLC("Can't convert 'IRHoisted' to valid UPLC;" +
"\nhoisted hash was: " + (0, uint8array_utils_1.toHex)(this.hash) +
"\nhoisted term was: " + (0, showIR_1.showIR)(this.hoisted).text);
};
return IRHoisted;

@@ -193,3 +204,7 @@ }());

else {
set[idxInSet].nReferences += thisHoistedEntry.nReferences;
var entry = set[idxInSet];
entry.nReferences += thisHoistedEntry.nReferences;
entry.hoisted.meta.forceHoist =
entry.hoisted.meta.forceHoist ||
thisHoistedEntry.hoisted.meta.forceHoist;
}

@@ -196,0 +211,0 @@ };

@@ -6,4 +6,3 @@ import { Cloneable } from "../../../types/interfaces/Cloneable.js";

import { ToJson } from "../../../utils/ts/ToJson.js";
import { ToUPLC } from "../../UPLC/interfaces/ToUPLC.js";
import { UPLCTerm } from "../../UPLC/UPLCTerm/index.js";
import { IRMetadata } from "../interfaces/IRMetadata.js";
export type LettedSetEntry = {

@@ -17,3 +16,14 @@ letted: IRLetted;

};
export declare class IRLetted implements Cloneable<IRLetted>, IHash, IIRParent, ToJson, ToUPLC {
export interface IRLettedMeta {
/**
* force hoisting even if only a single reference is found
*
* useful to hoist letted terms used once in recursive expressions
**/
forceHoist: boolean;
}
export interface IRLettedMetadata extends IRMetadata {
meta: IRLettedMeta;
}
export declare class IRLetted implements Cloneable<IRLetted>, IHash, IIRParent, ToJson, IRLettedMetadata {
readonly hash: Uint8Array;

@@ -34,6 +44,6 @@ markHashAsInvalid: () => void;

clone: () => IRLetted;
constructor(DeBruijn: number, toLet: IRTerm);
readonly meta: IRLettedMeta;
constructor(DeBruijn: number, toLet: IRTerm, metadata?: Partial<IRLettedMeta>);
static get tag(): Uint8Array;
toJson(): any;
toUPLC(): UPLCTerm;
}

@@ -40,0 +50,0 @@ /**

"use strict";
var __read = (this && this.__read) || function (o, n) {
var m = typeof Symbol === "function" && o[Symbol.iterator];
if (!m) return o;
var i = m.call(o), r, ar = [], e;
try {
while ((n === void 0 || n-- > 0) && !(r = i.next()).done) ar.push(r.value);
}
catch (error) { e = { error: error }; }
finally {
try {
if (r && !r.done && (m = i["return"])) m.call(i);
var __assign = (this && this.__assign) || function () {
__assign = Object.assign || function(t) {
for (var s, i = 1, n = arguments.length; i < n; i++) {
s = arguments[i];
for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p))
t[p] = s[p];
}
finally { if (e) throw e.error; }
}
return ar;
return t;
};
return __assign.apply(this, arguments);
};
var __spreadArray = (this && this.__spreadArray) || function (to, from, pack) {
if (pack || arguments.length === 2) for (var i = 0, l = from.length, ar; i < l; i++) {
if (ar || !(i in from)) {
if (!ar) ar = Array.prototype.slice.call(from, 0, i);
ar[i] = from[i];
}
}
return to.concat(ar || Array.prototype.slice.call(from));
};
var __importDefault = (this && this.__importDefault) || function (mod) {

@@ -42,3 +28,2 @@ return (mod && mod.__esModule) ? mod : { "default": mod };

var IRDelayed_1 = require("./IRDelayed.js");
var IllegalIRToUPLC_1 = require("../../../errors/PlutsIRError/IRCompilationError/IllegalIRToUPLC.js");
var iterTree_1 = require("../toUPLC/_internal/iterTree.js");

@@ -54,4 +39,8 @@ var IRVar_1 = require("./IRVar.js");

exports.jsonLettedSetEntry = jsonLettedSetEntry;
var defaultLettedMeta = ObjectUtils_1.default.freezeAll({
forceHoist: false
});
var IRLetted = /** @class */ (function () {
function IRLetted(DeBruijn, toLet) {
function IRLetted(DeBruijn, toLet, metadata) {
if (metadata === void 0) { metadata = {}; }
var _this = this;

@@ -66,4 +55,7 @@ if (!(Number.isSafeInteger(DeBruijn) && DeBruijn >= 0))

throw new BasePlutsError_1.BasePlutsError("invalid index for an `IRLetted` instance");
if (newDbn === _dbn)
return; // everything ok
if (newDbn === _dbn) {
// everything ok
// avoid calling `markHashAsInvalid`
return;
}
_this.markHashAsInvalid();

@@ -91,6 +83,6 @@ _dbn = newDbn;

// `IRLetted` dbn instantiation
hash = (0, crypto_1.blake2b_224)((0, concatUint8Arr_1.concatUint8Arr)(IRLetted.tag, _value.hash));
hash = (0, crypto_1.blake2b_128)((0, concatUint8Arr_1.concatUint8Arr)(IRLetted.tag, _value.hash));
}
else {
hash = (0, crypto_1.blake2b_224)((0, concatUint8Arr_1.concatUint8Arr)(IRLetted.tag, (0, positiveIntAsBytes_1.positiveIntAsBytes)(normalized[0]), normalized[1].hash));
hash = (0, crypto_1.blake2b_128)((0, concatUint8Arr_1.concatUint8Arr)(IRLetted.tag, (0, positiveIntAsBytes_1.positiveIntAsBytes)(normalized[0]), normalized[1].hash));
}

@@ -161,7 +153,10 @@ }

});
Object.defineProperty(this, "meta", {
value: __assign(__assign({}, defaultLettedMeta), metadata),
writable: false,
enumerable: true,
configurable: false
});
ObjectUtils_1.default.defineReadOnlyProperty(this, "clone", function () {
return new IRLetted(_this.dbn, _this.value.clone()
// doesn't work because dependecies need to be bounded to the cloned value
// _getDeps().slice() // as long as `dependecies` getter returns clones this is fine
);
return new IRLetted(_this.dbn, _this.value.clone(), _this.meta);
});

@@ -181,5 +176,2 @@ }

};
IRLetted.prototype.toUPLC = function () {
throw new IllegalIRToUPLC_1.IllegalIRToUPLC("Can't convert 'IRLetted' to valid UPLC");
};
return IRLetted;

@@ -206,7 +198,3 @@ }());

*/
function addToSet() {
var _terms = [];
for (var _i = 0; _i < arguments.length; _i++) {
_terms[_i] = arguments[_i];
}
function addToSet(_terms) {
var _loop_1 = function (i) {

@@ -221,6 +209,9 @@ var thisLettedEntry = _terms[i];

// add dependencies first
// dependencies don't have references
// to the current letted
// dependencies don't have references to the current letted
// (of course, wouldn't be much of a dependecy otherwhise)
addToSet.apply(void 0, __spreadArray([], __read(thisLettedEntry.letted.dependencies), false));
//
// don't add dependecies of dependecies since `dependecies` proerty
// already calls `getSortedLettedSet( getLettedTerms( _value ) )`
// so repeating it here would count deps twice (exponentially for deps of deps)
addToSet(getLettedTerms(thisLettedEntry.letted.value));
hashesSet.push(thisHash);

@@ -233,3 +224,7 @@ set.push({

else {
set[idxInSet].nReferences += thisLettedEntry.nReferences;
var entry = set[idxInSet];
entry.nReferences += thisLettedEntry.nReferences;
entry.letted.meta.forceHoist =
entry.letted.meta.forceHoist ||
thisLettedEntry.letted.meta.forceHoist;
}

@@ -241,3 +236,3 @@ };

}
addToSet.apply(void 0, __spreadArray([], __read(lettedTerms), false));
addToSet(lettedTerms);
return set;

@@ -248,31 +243,26 @@ }

var lettedTerms = [];
function searchIn(term) {
if (term instanceof IRLetted) {
lettedTerms.push({ letted: term, nReferences: 1 });
return;
var stack = [irTerm];
while (stack.length > 0) {
var t = stack.pop();
if (t instanceof IRLetted) {
lettedTerms.push({ letted: t, nReferences: 1 });
continue;
}
if (term instanceof IRApp_1.IRApp) {
searchIn(term.fn);
searchIn(term.arg);
return;
if (t instanceof IRApp_1.IRApp) {
stack.push(t.fn, t.arg);
continue;
}
if (term instanceof IRFunc_1.IRFunc) {
searchIn(term.body);
return;
if (t instanceof IRFunc_1.IRFunc) {
stack.push(t.body);
continue;
}
if (term instanceof IRForced_1.IRForced) {
searchIn(term.forced);
return;
if (t instanceof IRForced_1.IRForced) {
stack.push(t.forced);
continue;
}
if (term instanceof IRDelayed_1.IRDelayed) {
searchIn(term.delayed);
return;
if (t instanceof IRDelayed_1.IRDelayed) {
stack.push(t.delayed);
continue;
}
// if( term instanceof IRHoisted ) return; // hoisted terms are closed
// if( term instanceof IRNative ) return;
// if( term instanceof IRVar ) return;
// if( term instanceof IRConst ) return;
// if( term instanceof IRError ) return;
}
searchIn(irTerm);
return lettedTerms;

@@ -279,0 +269,0 @@ }

import { Cloneable } from "../../../../types/interfaces/Cloneable.js";
import { ToJson } from "../../../../utils/ts/ToJson.js";
import { Builtin } from "../../../UPLC/UPLCTerms/Builtin/index.js";
import { ToUPLC } from "../../../UPLC/interfaces/ToUPLC.js";
import { IRTerm } from "../../IRTerm.js";

@@ -12,3 +10,3 @@ import { IHash } from "../../interfaces/IHash.js";

**/
export declare class IRNative implements Cloneable<IRNative>, IHash, IIRParent, ToJson, ToUPLC {
export declare class IRNative implements Cloneable<IRNative>, IHash, IIRParent, ToJson {
readonly tag: IRNativeTag;

@@ -25,3 +23,2 @@ readonly hash: Uint8Array;

};
toUPLC(): Builtin;
static get addInteger(): IRNative;

@@ -28,0 +25,0 @@ static get subtractInteger(): IRNative;

@@ -8,6 +8,4 @@ "use strict";

var crypto_1 = require("../../../../crypto/index.js");
var IllegalIRToUPLC_1 = require("../../../../errors/PlutsIRError/IRCompilationError/IllegalIRToUPLC.js");
var UnexpectedMarkHashInvalidCall_1 = require("../../../../errors/PlutsIRError/UnexpectedMarkHashInvalidCall.js");
var UPLCFlatUtils_1 = __importDefault(require("../../../../utils/UPLCFlatUtils/index.js"));
var Builtin_1 = require("../../../UPLC/UPLCTerms/Builtin/index.js");
var concatUint8Arr_1 = require("../../utils/concatUint8Arr.js");

@@ -49,3 +47,3 @@ var isIRTerm_1 = require("../../utils/isIRTerm.js");

if (nativeHashesCache[_this.tag] === undefined) {
nativeHashesCache[_this.tag] = (0, crypto_1.blake2b_224)((0, concatUint8Arr_1.concatUint8Arr)(IRNative.tag, (0, positiveIntAsBytes_1.positiveBigIntAsBytes)(BigInt("0b" +
nativeHashesCache[_this.tag] = (0, crypto_1.blake2b_128)((0, concatUint8Arr_1.concatUint8Arr)(IRNative.tag, (0, positiveIntAsBytes_1.positiveBigIntAsBytes)(BigInt("0b" +
UPLCFlatUtils_1.default.zigzagBigint(BigInt(_this.tag))

@@ -84,7 +82,2 @@ // builtin tag takes 7 bits

};
IRNative.prototype.toUPLC = function () {
if (this.tag < 0)
throw new IllegalIRToUPLC_1.IllegalIRToUPLC("Can't translate '" + (0, IRNativeTag_1.nativeTagToString)(this.tag) + "' 'IRNative' to 'UPLCBuiltin'");
return new Builtin_1.Builtin(this.tag);
};
Object.defineProperty(IRNative, "addInteger", {

@@ -91,0 +84,0 @@ get: function () { return new IRNative(0 /* IRNativeTag.addInteger */); },

import { Cloneable } from "../../../types/interfaces/Cloneable.js";
import { ToJson } from "../../../utils/ts/ToJson.js";
import { UPLCVar } from "../../UPLC/UPLCTerms/UPLCVar/index.js";
import { IRTerm } from "../IRTerm.js";

@@ -24,3 +23,2 @@ import { IHash } from "../interfaces/IHash.js";

toJson(): any;
toUPLC(_n?: number | bigint): UPLCVar;
}

@@ -6,3 +6,2 @@ "use strict";

var BasePlutsError_1 = require("../../../errors/BasePlutsError.js");
var UPLCVar_1 = require("../../UPLC/UPLCTerms/UPLCVar/index.js");
var concatUint8Arr_1 = require("../utils/concatUint8Arr.js");

@@ -41,4 +40,5 @@ var isIRTerm_1 = require("../utils/isIRTerm.js");

set: function (newDbn) {
if (!(Number.isSafeInteger(newDbn) && newDbn >= 0))
throw new BasePlutsError_1.BasePlutsError("invalid index for an `IRVar` instance");
if (!(Number.isSafeInteger(newDbn) && newDbn >= 0)) {
throw new BasePlutsError_1.BasePlutsError("invalid index for an `IRVar` instance; new DeBruijn was: " + newDbn);
}
if (newDbn === _dbn)

@@ -79,5 +79,2 @@ return; // everything ok

};
IRVar.prototype.toUPLC = function (_n) {
return new UPLCVar_1.UPLCVar(this.dbn);
};
return IRVar;

@@ -89,5 +86,5 @@ }());

while ((bdnVarHashCache.length - 1) < dbn) {
bdnVarHashCache.push((0, crypto_1.blake2b_224)((0, concatUint8Arr_1.concatUint8Arr)(IRVar.tag, (0, positiveIntAsBytes_1.positiveIntAsBytes)(bdnVarHashCache.length))));
bdnVarHashCache.push((0, crypto_1.blake2b_128)((0, concatUint8Arr_1.concatUint8Arr)(IRVar.tag, (0, positiveIntAsBytes_1.positiveIntAsBytes)(bdnVarHashCache.length))));
}
return bdnVarHashCache[dbn];
}
import { IRTerm } from "../../IRTerm.js";
export declare function _addDepth(_term: IRTerm, initialDepth?: number): void;
export declare function _addDepths(_term: IRTerm, initialDepth?: number): void;
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports._addDepth = void 0;
exports._addDepths = void 0;
var IRApp_1 = require("../../IRNodes/IRApp.js");

@@ -11,3 +11,3 @@ var IRDelayed_1 = require("../../IRNodes/IRDelayed.js");

var depth_1 = require("./depth.js");
function _addDepth(_term, initialDepth) {
function _addDepths(_term, initialDepth) {
if (initialDepth === void 0) { initialDepth = 0; }

@@ -48,2 +48,2 @@ var stack = [(0, depth_1.defineDepth)(_term, initialDepth)];

}
exports._addDepth = _addDepth;
exports._addDepths = _addDepths;

@@ -1,2 +0,6 @@

import { IRTerm } from "../../IRTerm.js";
export declare function lowestCommonAncestor(n1: IRTerm | undefined, n2: IRTerm | undefined): IRTerm | undefined;
import type { IRTerm } from "../../IRTerm.js";
type IRWithDept = IRTerm & {
depth: number;
};
export declare function lowestCommonAncestor(n1: IRWithDept | undefined, n2: IRWithDept | undefined): IRTerm | string;
export {};
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.lowestCommonAncestor = void 0;
var isIRTerm_1 = require("../../utils/isIRTerm.js");
function lowestCommonAncestor(n1, n2) {
if (n1 === undefined || n2 === undefined)
return undefined;
if (!(0, isIRTerm_1.isIRTerm)(n1) || !(0, isIRTerm_1.isIRTerm)(n2))
return "not IR";
var d1 = n1.depth;

@@ -28,4 +29,4 @@ var d2 = n2.depth;

}
return undefined;
return "no lca";
}
exports.lowestCommonAncestor = lowestCommonAncestor;

@@ -10,2 +10,3 @@ "use strict";

var replaceClosedLettedWithHoisted_1 = require("./subRoutines/replaceClosedLettedWithHoisted.js");
var _irToUplc_1 = require("./_internal/_irToUplc.js");
function compileIRToUPLC(term) {

@@ -45,4 +46,8 @@ ///////////////////////////////////////////////////////////////////////////////

(0, replaceClosedLettedWithHoisted_1.replaceClosedLettedWithHoisted)(term);
// handle letted before hoisted because the three is smaller
// and we also have less letted dependecies to handle
(0, handleLetted_1.handleLetted)(term);
term = (0, handleHoistedAndReturnRoot_1.handleHoistedAndReturnRoot)(term);
// replaced hoisted terms might include new letted terms
(0, handleLetted_1.handleLetted)(term);
///////////////////////////////////////////////////////////////////////////////

@@ -53,4 +58,4 @@ // ------------------------------------------------------------------------- //

///////////////////////////////////////////////////////////////////////////////
return term.toUPLC(0);
return (0, _irToUplc_1._irToUplc)(term);
}
exports.compileIRToUPLC = compileIRToUPLC;

@@ -15,2 +15,3 @@ "use strict";

var showIR_1 = require("../../../utils/showIR.js");
var markRecursiveHoistsAsForced_1 = require("../markRecursiveHoistsAsForced.js");
function toHashArr(arr) {

@@ -22,4 +23,12 @@ return arr.map(function (h) { return (0, uint8array_utils_1.toHex)(h.hash); });

if (term instanceof IRHoisted_1.IRHoisted) {
return handleHoistedAndReturnRoot(term.hoisted.clone());
// we know `handleHoistedAndReturnRoot` modifies the term
// so we are probably ok not cloning here
// top level hoisted terms should be handled in `compileIRToUPLC` anyway
var theTerm = term.hoisted; // .clone()
// we still need to remove this parent otherhiwe there will be an unknown hoisted to handle
theTerm.parent = undefined;
return handleHoistedAndReturnRoot(theTerm);
}
// TODO: should probably merge `markRecursiveHoistsAsForced` inside `getHoistedTerms` to iter once
(0, markRecursiveHoistsAsForced_1.markRecursiveHoistsAsForced)(term);
var directHoisteds = (0, IRHoisted_1.getHoistedTerms)(term);

@@ -33,21 +42,25 @@ var allHoisteds = (0, IRHoisted_1.getSortedHoistedSet)(directHoisteds);

var b = 0;
var hoisteds = new Array(n);
var hoistedsToInline = new Array(n);
var toHoist = new Array(n);
var toInline = new Array(n);
// filter out hoisted terms with single reference
for (var i = 0; i < n; i++) {
var thisHoistedEntry = allHoisteds[i];
if (thisHoistedEntry.hoisted.meta.forceHoist === true) {
toHoist[a++] = thisHoistedEntry.hoisted;
continue;
}
if (thisHoistedEntry.nReferences === 1 &&
thisHoistedEntry.hoisted.parent) {
// inline hoisted with single reference
hoistedsToInline[b++] = thisHoistedEntry.hoisted;
toInline[b++] = thisHoistedEntry.hoisted;
}
else
hoisteds[a++] = thisHoistedEntry.hoisted;
toHoist[a++] = thisHoistedEntry.hoisted;
}
// drop unused space
hoisteds.length = a;
hoistedsToInline.length = b;
var hoistedsToInlineHashes = hoistedsToInline.map(function (h) { return h.hash; });
// console.log( "hoisteds", hoisteds.map( h => ({ ...showIR( h.hoisted ), hash: toHex( h.hash ) }) ) );
// console.log( "hoistedsToInline", hoistedsToInline.map( h => ({ ...showIR( h.hoisted ), hash: toHex( h.hash ) }) ) );
toHoist.length = a;
toInline.length = b;
var hoistedsToInlineHashes = toInline.map(function (h) { return h.hash; });
// console.log( "toHoist", toHoist.map( h => ({ ...showIR( h.hoisted ), hash: toHex( h.hash ) }) ) );
// console.log( "toInline", toInline.map( h => ({ ...showIR( h.hoisted ), hash: toHex( h.hash ) }) ) );
var root = term;

@@ -58,5 +71,5 @@ while (root.parent !== undefined)

function getIRVarForHoistedAtLevel(_hoistedHash, level) {
var levelOfTerm = hoisteds.findIndex(function (sortedH) { return (0, uint8array_utils_1.uint8ArrayEq)(sortedH.hash, _hoistedHash); });
var levelOfTerm = toHoist.findIndex(function (sortedH) { return (0, uint8array_utils_1.uint8ArrayEq)(sortedH.hash, _hoistedHash); });
if (levelOfTerm < 0) {
throw new PlutsIRError_1.PlutsIRError("missing hoisted with hash ".concat((0, uint8array_utils_1.toHex)(_hoistedHash), " between hoisteds [\n\t").concat(hoisteds.map(function (h) { return (0, uint8array_utils_1.toHex)(h.hash); })
throw new PlutsIRError_1.PlutsIRError("missing hoisted with hash ".concat((0, uint8array_utils_1.toHex)(_hoistedHash), " between toHoist [\n\t").concat(toHoist.map(function (h) { return (0, uint8array_utils_1.toHex)(h.hash); })
.join(",\n\t"), "\n]; can't replace with IRVar"));

@@ -68,4 +81,4 @@ }

// from last to first
for (var i = hoisteds.length - 1; i >= 0; i--) {
var thisHoisted = hoisteds[i];
for (var i = toHoist.length - 1; i >= 0; i--) {
var thisHoisted = toHoist[i];
prevRoot = root;

@@ -79,2 +92,3 @@ root = new IRApp_1.IRApp(new IRFunc_1.IRFunc(1, root), thisHoisted.hoisted.clone());

var irTermHash = irTerm.hash;
var isHoistedToinline = hoistedsToInlineHashes.some(function (h) { return (0, uint8array_utils_1.uint8ArrayEq)(h, irTermHash); });
if (

@@ -84,3 +98,3 @@ // is hoiseted

// is not one to be inlined
!hoistedsToInlineHashes.some(function (h) { return (0, uint8array_utils_1.uint8ArrayEq)(h, irTermHash); })) {
!isHoistedToinline) {
var irvar = getIRVarForHoistedAtLevel(irTermHash, dbn);

@@ -94,9 +108,9 @@ if (irvar.dbn >= dbn) {

else if (irTerm instanceof IRHoisted_1.IRHoisted) {
if (!hoistedsToInlineHashes.some(function (h) { return (0, uint8array_utils_1.uint8ArrayEq)(h, irTermHash); })) {
if (!isHoistedToinline) {
throw new PlutsIRError_1.PlutsIRError("unexpected hoisted term found with hash: " + (0, uint8array_utils_1.toHex)(irTermHash) +
"\n showIR of the term: " + JSON.stringify((0, showIR_1.showIR)(irTerm), undefined, 2));
}
var toInline = irTerm.hoisted;
(0, _modifyChildFromTo_1._modifyChildFromTo)(irTerm.parent, irTerm, toInline);
stack.push({ irTerm: toInline, dbn: dbn });
var toInline_1 = irTerm.hoisted;
(0, _modifyChildFromTo_1._modifyChildFromTo)(irTerm.parent, irTerm, toInline_1);
stack.push({ irTerm: toInline_1, dbn: dbn });
return "continue";

@@ -103,0 +117,0 @@ }

@@ -77,3 +77,3 @@ "use strict";

if (term instanceof IRVar_1.IRVar) {
if (dbn <= term.dbn) // some val outside
if (term.dbn >= dbn) // some val outside
{

@@ -100,12 +100,9 @@ var outsideDbn = term.dbn - dbn;

}
// closed
// if( term instanceof IRHoisted )
// letted do count too
// letted terms do count too
if (term instanceof IRLetted_1.IRLetted) {
// same stuff as the hoisted terms
// the only difference is that depth is then incremented
// once the letted term reaches its final position
stack.push({ term: term.value, dbn: dbn });
continue;
}
// closed
// if( term instanceof IRHoisted )
}

@@ -112,0 +109,0 @@ return minDbn;

@@ -20,6 +20,6 @@ "use strict";

var IRVar_1 = require("../../../IRNodes/IRVar.js");
var _addDepth_1 = require("../../_internal/_addDepth.js");
var _modifyChildFromTo_1 = require("../../_internal/_modifyChildFromTo.js");
var findAll_1 = require("../../_internal/findAll.js");
var getDebruijnInTerm_1 = require("../../_internal/getDebruijnInTerm.js");
var iterTree_1 = require("../../_internal/iterTree.js");
var groupByScope_1 = require("./groupByScope.js");

@@ -30,12 +30,21 @@ var IRCompilationError_1 = require("../../../../../errors/PlutsIRError/IRCompilationError/index.js");

var IRForced_1 = require("../../../IRNodes/IRForced.js");
var lowestCommonAncestor_1 = require("../../_internal/lowestCommonAncestor.js");
var PlutsIRError_1 = require("../../../../../errors/PlutsIRError/index.js");
var isIRTerm_1 = require("../../../utils/isIRTerm.js");
var markRecursiveHoistsAsForced_1 = require("../markRecursiveHoistsAsForced.js");
function handleLetted(term) {
var e_1, _a;
var _b;
// TODO: should probably merge `markRecursiveHoistsAsForced` inside `getLettedTerms` to iter once
(0, markRecursiveHoistsAsForced_1.markRecursiveHoistsAsForced)(term);
var allLetteds = (0, IRLetted_1.getLettedTerms)(term);
// console.log("direct letted", allLetteds.map( jsonLettedSetEntry ) );
var groupedLetteds = (0, groupByScope_1.groupByScope)(allLetteds);
var _loop_1 = function (maxScope, group) {
if (maxScope === undefined) {
throw new IRCompilationError_1.IRCompilationError("found 'IRLetted' with closed value not replaced by an 'IRHoisted'\n\nclosed letted terms: " +
throw new IRCompilationError_1.IRCompilationError("found 'IRLetted' with closed value not replaced by an 'IRHoisted'\n\nclosed letted terms:\n\n" +
JSON.stringify(group.map(function (entry) { return (0, showIR_1.showIR)(entry.letted.value); }), undefined, 2));
}
var lettedSet = (0, IRLetted_1.getSortedLettedSet)(group);
// console.log( "all group letted", lettedSet.map( jsonLettedSetEntry ) );
var n = lettedSet.length;

@@ -46,11 +55,16 @@ var a = 0;

var toInline = new Array(n);
// console.log( lettedSet.map( jsonLettedSetEntry ) );
// console.log( lettedSet.map( letted => letted.letted.dependencies ) );
// filter out terms with single reference
for (var i = 0; i < n; i++) {
var thisLettedEntry = lettedSet[i];
// console.log( thisHoistedEntry.nReferences, thisHoistedEntry.letted.parent )
if (thisLettedEntry.nReferences === 1 &&
thisLettedEntry.letted.parent) {
// inline with single reference
if (thisLettedEntry.letted.meta.forceHoist === true) {
toLet[a++] = thisLettedEntry.letted;
continue;
}
if (
// inline
// - terms used once (with single reference)
// - letted varibles (even if used multiple times)
(thisLettedEntry.nReferences === 1 &&
thisLettedEntry.letted.parent) ||
thisLettedEntry.letted.value instanceof IRVar_1.IRVar) {
toInline[b++] = thisLettedEntry.letted;

@@ -64,52 +78,115 @@ }

toInline.length = b;
/**
* temp varible to hold reference to the letted term we are operating with
*/
var letted;
var toInlineHashes = toInline.map(function (termToInline) { return termToInline.hash; });
var letted;
// inline single references from last to first
// needs to be from last to first so that hashes will not change
for (var i = toInline.length - 1; i >= 0; i--) {
letted = toInline[i];
(0, _modifyChildFromTo_1._modifyChildFromTo)(letted.parent, letted, letted.value);
}
var _loop_2 = function (i) {
var e_2, _c, e_3, _d;
var e_2, _d;
// needs to be at the start of the loop because it could be undefined at first
// but needs also to be below in order to make sure that the reference we have
// is from the tree itself (which has been possibly modified)
letted = lettedSet[i].letted;
// one of the many to be letted
letted = toLet[i];
var lettedSetHashes = lettedSet.map(function (setEntry) { return setEntry.letted.hash; });
var replacedLettedSetEntry = new Array(lettedSet.length).fill(false);
/**
* all the letted corresponding to this value
*
* !!! IMPORTANT !!!
* the `toInline` and `toLet` arrays might include cloned instances
* that are not part of the tree
* we must collect the instances directly from the tree
*
* @type {IRLetted[]}
* we know is an `IRLetted` array
* an not a generic `IRTerm` array
* because that's what the
* filter funciton checks for
* we know is an `IRLetted` array an not a generic `IRTerm` array
* because that's what the filter funciton checks for
*/
var refs = (0, findAll_1.findAll)(maxScope, function (elem) {
return elem instanceof IRLetted_1.IRLetted &&
(0, uint8array_utils_1.uint8ArrayEq)(elem.hash, letted.hash);
});
// if letting a plain varible
// just inline the variable as it is more efficient
// and then continue with next group
if (letted.value instanceof IRVar_1.IRVar) {
try {
// inline directly the refereces
for (var refs_1 = (e_2 = void 0, __values(refs)), refs_1_1 = refs_1.next(); !refs_1_1.done; refs_1_1 = refs_1.next()) {
var ref = refs_1_1.value;
(0, _modifyChildFromTo_1._modifyChildFromTo)(ref === null || ref === void 0 ? void 0 : ref.parent, ref, ref.value);
if (!(elem instanceof IRLetted_1.IRLetted))
return false;
var elHash = elem.hash;
/*
little side-effect here
we update the references in the `lettedSet`
with nodes actually present in the tree
so that if (when) the letted node is updated
the update is reflected in the lettedSet automaitcally
*/
var lettedSetIdx = lettedSetHashes.findIndex(function (h) { return (0, uint8array_utils_1.uint8ArrayEq)(elHash, h); });
var toLetIdx = toLet.findIndex(function (_toLet) { return (0, uint8array_utils_1.uint8ArrayEq)(_toLet.hash, elHash); });
if (lettedSetIdx >= 0) {
if (replacedLettedSetEntry[lettedSetIdx]) {
if (elem.dbn < lettedSet[lettedSetIdx].letted.dbn) {
lettedSet[lettedSetIdx].letted = elem;
if (toLetIdx >= 0) {
toLet[toLetIdx] = elem;
}
else {
var toInlIdx = toInline.findIndex(function (toInl) { return (0, uint8array_utils_1.uint8ArrayEq)(toInl.hash, elHash); });
toInline[toInlIdx] = elem;
}
}
}
}
catch (e_2_1) { e_2 = { error: e_2_1 }; }
finally {
try {
if (refs_1_1 && !refs_1_1.done && (_c = refs_1.return)) _c.call(refs_1);
else {
lettedSet[lettedSetIdx].letted = elem;
replacedLettedSetEntry[lettedSetIdx] = true;
if (toLetIdx >= 0) {
toLet[toLetIdx] = elem;
}
else {
var toInlIdx = toInline.findIndex(function (toInl) { return (0, uint8array_utils_1.uint8ArrayEq)(toInl.hash, elHash); });
toInline[toInlIdx] = elem;
}
}
finally { if (e_2) throw e_2.error; }
}
// return true if `elem` is the `letted` being handled in this turn
return (0, uint8array_utils_1.uint8ArrayEq)(elHash, letted.hash);
});
if (refs.length === 0)
return "continue";
// !!! IMPORTANT !!!
// !!! DO NOT REMOVE !!!
// makes sure the reference comes form the tree (possibly modified)
letted = lettedSet[i].letted;
if (letted.value instanceof IRVar_1.IRVar || // always inline vars
// the letted hash is one of the ones to be inlined
toInlineHashes.some(function (h) { return (0, uint8array_utils_1.uint8ArrayEq)(h, letted.hash); })) {
// console.log( "inlining", toHex( letted.hash ) );
// console.log( prettyIRJsonStr( term ) );
// inline single references from last to first
// needs to be from last to first so that hashes will not change
for (var i_1 = refs.length - 1; i_1 >= 0; i_1--) {
letted = refs[i_1];
(0, _modifyChildFromTo_1._modifyChildFromTo)(letted.parent, letted, letted.value);
}
return "continue";
}
// add 1 to every var's DeBruijn that accesses stuff outside the max scope
// maxScope node is non inclusive since the new function is added insite the node
var stack = [{ term: maxScope.body, dbn: 0 }];
var _loop_3 = function () {
// subtree migh change so depth will change
// needs to be updated every loop
(0, _addDepth_1._addDepths)(maxScope);
var lca = refs[0];
for (var i_2 = 1; i_2 < refs.length; i_2++) {
lca = (0, lowestCommonAncestor_1.lowestCommonAncestor)(lca, refs[i_2]);
}
if (!(0, isIRTerm_1.isIRTerm)(lca)) {
throw new PlutsIRError_1.PlutsIRError(refs.length + " letting nodes with hash " + (0, uint8array_utils_1.toHex)(letted.hash) + " from different trees; error:" + lca);
}
while (!(lca instanceof IRFunc_1.IRFunc ||
lca instanceof IRDelayed_1.IRDelayed)) {
lca = (_b = lca === null || lca === void 0 ? void 0 : lca.parent) !== null && _b !== void 0 ? _b : "";
if (!(0, isIRTerm_1.isIRTerm)(lca)) {
throw new PlutsIRError_1.PlutsIRError("lowest common ancestor outside the max scope");
}
}
var parentNode = lca;
var parentNodeDirectChild = parentNode instanceof IRFunc_1.IRFunc ? parentNode.body : parentNode.delayed;
// add 1 to every var's DeBruijn that accesses stuff outside the parent node
// maxScope node is non inclusive since the new function is added inside the node
var stack = [{ term: parentNodeDirectChild, dbn: 0 }];
while (stack.length > 0) {
var _e = stack.pop(), t = _e.term, dbn = _e.dbn;
// console.log( prettyIRText( t ), "stack length:", stack.length );
if (t instanceof IRVar_1.IRVar &&

@@ -119,35 +196,38 @@ t.dbn >= dbn) {

t.dbn++;
continue;
}
if (t instanceof IRLetted_1.IRLetted) {
if ( // the letted has is one of the ones to be inlined
toInlineHashes.some(function (h) { return (0, uint8array_utils_1.uint8ArrayEq)(h, t.hash); })) {
// inline
(0, _modifyChildFromTo_1._modifyChildFromTo)(t.parent, t, t.value);
if ((0, uint8array_utils_1.uint8ArrayEq)(t.hash, letted.hash)) {
// don't modify letted to be hoisted
continue;
}
else {
else // other letted to be handled in one of the next cycles
{
// `IRLambdas` DeBruijn are tracking the level of instantiation
// since a new var has been introduced above
// we must increment regardless
t.dbn++;
// we add a new variable so the dbn of instantiation increments
t.dbn += 1;
// DO NOT increment also dbns of the letted value
// that would change nothing since letted terms are normalized
// relative to the letted dbn
stack.push({ term: t.value, dbn: dbn });
}
continue;
}
if (t instanceof IRApp_1.IRApp) {
stack.push({ term: t.fn, dbn: dbn }, { term: t.arg, dbn: dbn });
return "continue";
stack.push({ term: t.arg, dbn: dbn }, { term: t.fn, dbn: dbn });
continue;
}
if (t instanceof IRDelayed_1.IRDelayed) {
stack.push({ term: t.delayed, dbn: dbn });
return "continue";
continue;
}
if (t instanceof IRForced_1.IRForced) {
stack.push({ term: t.forced, dbn: dbn });
return "continue";
continue;
}
if (t instanceof IRFunc_1.IRFunc) {
stack.push({ term: t.body, dbn: dbn + t.arity });
return "continue";
continue;
}
};
while (stack.length > 0) {
_loop_3();
// skip hoisted since closed
}

@@ -158,7 +238,8 @@ // get the difference in DeBruijn

var diffDbn = 0;
while (tmpNode !== maxScope) {
while (tmpNode !== parentNode) {
tmpNode = tmpNode.parent;
if ( // is an intermediate `IRFunc`
tmpNode instanceof IRFunc_1.IRFunc &&
tmpNode !== maxScope) {
tmpNode !== parentNode // avoid counting parent node arity if IRFunc
) {
// increment differential in DeBruijn by n vars indroduced here

@@ -168,3 +249,4 @@ diffDbn += tmpNode.arity;

}
// now we inline
// console.log( "-------------------- adding letted value --------------------\n".repeat(3) );
// now we replace
var clonedLettedVal = letted.value.clone();

@@ -175,11 +257,37 @@ // if there is any actual difference between the letted term

if (diffDbn > 0) {
// adapt the variables in the term to be instantiated
(0, iterTree_1.iterTree)(clonedLettedVal, function (elem) {
if (elem instanceof IRVar_1.IRVar || elem instanceof IRLetted_1.IRLetted) {
elem.dbn -= diffDbn;
var stack_1 = [{ term: clonedLettedVal, dbn: 0 }];
while (stack_1.length > 0) {
var _f = stack_1.pop(), t = _f.term, dbn = _f.dbn;
// console.log( prettyIRText( t ) );
if (t instanceof IRVar_1.IRVar &&
t.dbn > dbn) {
t.dbn -= diffDbn;
}
});
if (t instanceof IRLetted_1.IRLetted) {
t.dbn -= diffDbn;
// reduce dbn in letted value too
stack_1.push({ term: t.value, dbn: dbn });
continue;
}
if (t instanceof IRApp_1.IRApp) {
stack_1.push({ term: t.arg, dbn: dbn }, { term: t.fn, dbn: dbn });
continue;
}
if (t instanceof IRDelayed_1.IRDelayed) {
stack_1.push({ term: t.delayed, dbn: dbn });
continue;
}
if (t instanceof IRForced_1.IRForced) {
stack_1.push({ term: t.forced, dbn: dbn });
continue;
}
if (t instanceof IRFunc_1.IRFunc) {
stack_1.push({ term: t.body, dbn: dbn + t.arity });
continue;
}
// no hoisted
}
}
// save parent so when replacing we don't create a circular refs
var parent_1 = maxScope;
var parent_1 = parentNode;
// keep pointer to the old body

@@ -189,8 +297,9 @@ // so we don't have to count the newly introduced `IRFunc` in `newNode`

// (subtracting 1 works too but this is an operation less)
var oldBody = maxScope.body;
var newNode = new IRApp_1.IRApp(new IRFunc_1.IRFunc(1, maxScope.body), clonedLettedVal);
(0, _modifyChildFromTo_1._modifyChildFromTo)(parent_1, maxScope.body, newNode);
var oldBody = parentNodeDirectChild;
var newNode = new IRApp_1.IRApp(new IRFunc_1.IRFunc(1, parentNodeDirectChild), clonedLettedVal);
(0, _modifyChildFromTo_1._modifyChildFromTo)(parent_1, parentNodeDirectChild, // not really used since we know parent is not `IRApp`
newNode);
try {
for (var refs_2 = (e_3 = void 0, __values(refs)), refs_2_1 = refs_2.next(); !refs_2_1.done; refs_2_1 = refs_2.next()) {
var ref = refs_2_1.value;
for (var refs_1 = (e_2 = void 0, __values(refs)), refs_1_1 = refs_1.next(); !refs_1_1.done; refs_1_1 = refs_1.next()) {
var ref = refs_1_1.value;
(0, _modifyChildFromTo_1._modifyChildFromTo)(ref === null || ref === void 0 ? void 0 : ref.parent, ref,

@@ -201,11 +310,14 @@ // "- 1" is couting the `IRFunc` introduced with `newNode`

}
catch (e_3_1) { e_3 = { error: e_3_1 }; }
catch (e_2_1) { e_2 = { error: e_2_1 }; }
finally {
try {
if (refs_2_1 && !refs_2_1.done && (_d = refs_2.return)) _d.call(refs_2);
if (refs_1_1 && !refs_1_1.done && (_d = refs_1.return)) _d.call(refs_1);
}
finally { if (e_3) throw e_3.error; }
finally { if (e_2) throw e_2.error; }
}
};
for (var i = toLet.length - 1; i >= 0; i--) {
// increase the debruijn index to account for newly introduced (and applied) `IRFunc`
// needs to be from last to first so that hashes will not change
// (aka. we replace dependents before dependecies)
for (var i = lettedSet.length - 1; i >= 0; i--) {
_loop_2(i);

@@ -216,3 +328,3 @@ }

for (var groupedLetteds_1 = __values(groupedLetteds), groupedLetteds_1_1 = groupedLetteds_1.next(); !groupedLetteds_1_1.done; groupedLetteds_1_1 = groupedLetteds_1.next()) {
var _b = groupedLetteds_1_1.value, maxScope = _b.maxScope, group = _b.group;
var _c = groupedLetteds_1_1.value, maxScope = _c.maxScope, group = _c.group;
_loop_1(maxScope, group);

@@ -219,0 +331,0 @@ }

@@ -12,1 +12,20 @@ import { IRTerm } from "../IRTerm.js";

};
export declare function prettyIRText(_ir: IRTerm, _indent?: number): string | {
text: string;
letted: {
[hash: string]: string;
};
hoisted: {
[hash: string]: string;
};
};
export declare function prettyIR(_ir: IRTerm, _indent?: number): {
text: string;
letted: {
[hash: string]: string;
};
hoisted: {
[hash: string]: string;
};
};
export declare function prettyIRJsonStr(ir: IRTerm, indent?: number): string;
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.showIR = exports.showIRText = void 0;
exports.prettyIRJsonStr = exports.prettyIR = exports.prettyIRText = exports.showIR = exports.showIRText = void 0;
var uint8array_utils_1 = require("@harmoniclabs/uint8array-utils");

@@ -34,3 +34,3 @@ var IRApp_1 = require("../IRNodes/IRApp.js");

// addLetted( ir );
return "(letted ".concat((0, uint8array_utils_1.toHex)(ir.hash), ")");
return "(letted ".concat(ir.dbn, " ").concat((0, uint8array_utils_1.toHex)(ir.hash), ")");
}

@@ -87,3 +87,4 @@ if (ir instanceof IRHoisted_1.IRHoisted) {

var deps = l.dependencies;
for (var i = 0; i < 0; i++) {
var nDeps = deps.length;
for (var i = 0; i < nDeps; i++) {
addLetted(deps[i].letted);

@@ -111,3 +112,3 @@ }

addLetted(ir);
return "(letted ".concat((0, uint8array_utils_1.toHex)(ir.hash), ")");
return "(letted ".concat(ir.dbn, " ").concat((0, uint8array_utils_1.toHex)(ir.hash), ")");
}

@@ -145,1 +146,138 @@ if (ir instanceof IRHoisted_1.IRHoisted) {

exports.showIR = showIR;
function prettyIRText(_ir, _indent) {
if (_indent === void 0) { _indent = 2; }
if (!Number.isSafeInteger(_indent) || _indent < 1)
return showIR(_ir);
var indentStr = " ".repeat(_indent);
function _internal(ir, dbn, depth) {
var indent = "\n".concat(indentStr.repeat(depth));
if (ir instanceof IRApp_1.IRApp)
return "".concat(indent, "[").concat(_internal(ir.fn, dbn, depth + 1), " ").concat(_internal(ir.arg, dbn, depth + 1)).concat(indent, "]");
if (ir instanceof IRNative_1.IRNative)
return "".concat(indent, "(native ").concat((0, IRNativeTag_1.nativeTagToString)(ir.tag), ")");
if (ir instanceof IRLetted_1.IRLetted) {
// addLetted( ir );
return "".concat(indent, "(letted ").concat(ir.dbn, " ").concat((0, uint8array_utils_1.toHex)(ir.hash), ")");
}
if (ir instanceof IRHoisted_1.IRHoisted) {
return "".concat(indent, "(hoisted ").concat((0, uint8array_utils_1.toHex)(ir.hash), ")");
}
if (ir instanceof IRVar_1.IRVar)
return indent + getVarNameForDbn(dbn - 1 - ir.dbn);
if (ir instanceof IRConst_1.IRConst)
return "".concat(indent, "(const ").concat((0, utils_1.termTypeToString)(ir.type), " ").concat((0, UPLCTerm_1.showUPLCConstValue)(ir.value), ")");
if (ir instanceof IRDelayed_1.IRDelayed)
return "".concat(indent, "(delay ").concat(_internal(ir.delayed, dbn, depth + 1)).concat(indent, ")");
if (ir instanceof IRForced_1.IRForced)
return "".concat(indent, "(force ").concat(_internal(ir.forced, dbn, depth + 1)).concat(indent, ")");
if (ir instanceof IRError_1.IRError)
return "(error)";
if (ir instanceof IRFunc_1.IRFunc) {
var vars_3 = new Array(ir.arity);
for (var i = 0; i < ir.arity; i++) {
vars_3[i] = getVarNameForDbn(dbn++);
}
return "".concat(indent, "(func ").concat(vars_3.join(" "), " ").concat(_internal(ir.body, dbn, depth + 1)).concat(indent, ")");
}
return "";
}
return _internal(_ir, 0, 0);
}
exports.prettyIRText = prettyIRText;
function prettyIR(_ir, _indent) {
if (_indent === void 0) { _indent = 2; }
if (!Number.isSafeInteger(_indent) || _indent < 1)
return showIR(_ir);
var indentStr = " ".repeat(_indent);
var hoistedHashes = [];
var hoisted = {};
function addHoisted(h) {
var hash = h.hash;
if (!hoistedHashes.some(function (hoistedHash) { return (0, uint8array_utils_1.uint8ArrayEq)(hoistedHash, hash); })) {
var deps = h.dependencies;
for (var i = 0; i < deps.length; i++) {
addHoisted(deps[i].hoisted);
}
hoistedHashes.push(hash.slice());
Object.defineProperty(hoisted, (0, uint8array_utils_1.toHex)(hash), {
value: prettyIRText(h.hoisted, _indent),
writable: false,
enumerable: true,
configurable: false
});
}
}
var lettedHashes = [];
var letted = {};
function addLetted(l) {
var hash = l.hash;
if (!lettedHashes.some(function (lettedHash) { return (0, uint8array_utils_1.uint8ArrayEq)(lettedHash, hash); })) {
var deps = l.dependencies;
var nDeps = deps.length;
for (var i = 0; i < nDeps; i++) {
addLetted(deps[i].letted);
}
lettedHashes.push(hash.slice());
(0, IRHoisted_1.getHoistedTerms)(l.value.clone()).forEach(function (_a) {
var hoisted = _a.hoisted;
return addHoisted(hoisted);
});
Object.defineProperty(letted, (0, uint8array_utils_1.toHex)(hash), {
value: prettyIRText(l.value, _indent),
writable: false,
enumerable: true,
configurable: false
});
}
}
function _internal(ir, dbn, depth) {
var indent = "\n".concat(indentStr.repeat(depth));
if (ir instanceof IRApp_1.IRApp)
return "".concat(indent, "[").concat(_internal(ir.fn, dbn, depth + 1), " ").concat(_internal(ir.arg, dbn, depth + 1)).concat(indent, "]");
if (ir instanceof IRNative_1.IRNative)
return "".concat(indent, "(native ").concat((0, IRNativeTag_1.nativeTagToString)(ir.tag), ")");
if (ir instanceof IRLetted_1.IRLetted) {
addLetted(ir);
return "".concat(indent, "(letted ").concat(ir.dbn, " ").concat((0, uint8array_utils_1.toHex)(ir.hash), ")");
}
if (ir instanceof IRHoisted_1.IRHoisted) {
addHoisted(ir);
return "".concat(indent, "(hoisted ").concat((0, uint8array_utils_1.toHex)(ir.hash), ")");
}
if (ir instanceof IRVar_1.IRVar)
return indent + getVarNameForDbn(dbn - 1 - ir.dbn);
if (ir instanceof IRConst_1.IRConst)
return "".concat(indent, "(const ").concat((0, utils_1.termTypeToString)(ir.type), " ").concat((0, UPLCTerm_1.showUPLCConstValue)(ir.value), ")");
if (ir instanceof IRDelayed_1.IRDelayed)
return "".concat(indent, "(delay ").concat(_internal(ir.delayed, dbn, depth + 1)).concat(indent, ")");
if (ir instanceof IRForced_1.IRForced)
return "".concat(indent, "(force ").concat(_internal(ir.forced, dbn, depth + 1)).concat(indent, ")");
if (ir instanceof IRError_1.IRError)
return "(error)";
if (ir instanceof IRFunc_1.IRFunc) {
var vars_4 = new Array(ir.arity);
for (var i = 0; i < ir.arity; i++) {
vars_4[i] = getVarNameForDbn(dbn++);
}
return "".concat(indent, "(func ").concat(vars_4.join(" "), " ").concat(_internal(ir.body, dbn, depth + 1)).concat(indent, ")");
}
return "";
}
var text = _internal(_ir, 0, 0);
return {
text: text,
letted: letted,
hoisted: hoisted
};
}
exports.prettyIR = prettyIR;
function prettyIRJsonStr(ir, indent) {
if (indent === void 0) { indent = 2; }
return JSON.stringify(prettyIR(ir, indent), function (k, v) {
if ((k === "text" || (typeof k === "string" && k.length === 32)) && typeof v === "string") {
return v.split("\n");
}
return v;
}, indent);
}
exports.prettyIRJsonStr = prettyIRJsonStr;

@@ -5,2 +5,3 @@ import { PType } from "../../PType/index.js";

import { UtilityTermOf } from "../addUtilityForType.js";
export declare function pforce<PInstance extends PType>(toForce: Term<PDelayed<PInstance>> | Term<PInstance>): UtilityTermOf<PInstance>;
export declare function pforce<PInstance extends PType>(toForce: Term<PDelayed<PInstance>>): UtilityTermOf<PInstance>;
export declare function pforce<PInstance extends PType>(toForce: Term<PInstance>): UtilityTermOf<PInstance>;

@@ -8,2 +8,3 @@ "use strict";

var pfn_1 = require("../../pfn.js");
var phoist_1 = require("../../phoist.js");
var plam_1 = require("../../plam.js");

@@ -13,3 +14,3 @@ var pInt_1 = require("../int/pInt.js");

var plength = function (elemsT) {
return (0, precursiveList_1.precursiveList)(type_system_1.int, elemsT)
return (0, phoist_1.phoist)((0, precursiveList_1.precursiveList)(type_system_1.int, elemsT)
.$((0, plam_1.plam)((0, type_system_1.lam)((0, type_system_1.list)(elemsT), type_system_1.int), (0, type_system_1.delayed)(type_system_1.int))(function (_self) { return (0, pdelay_1.pdelay)((0, pInt_1.pInt)(0)); }))

@@ -20,4 +21,4 @@ .$((0, pfn_1.pfn)([

(0, type_system_1.list)(elemsT)
], type_system_1.int)(function (self, _x, xs) { return (0, pInt_1.pInt)(1).add((0, papp_1.papp)(self, xs)); }));
], type_system_1.int)(function (self, _x, xs) { return (0, pInt_1.pInt)(1).add((0, papp_1.papp)(self, xs)); })));
};
exports.plength = plength;

@@ -7,3 +7,4 @@ "use strict";

var types_1 = require("../../../type_system/types.js");
var pmakeUnit = function () { return new Term_1.Term(types_1.unit, function (_dbn) { return IRConst_1.IRConst.unit; }); };
var pmakeUnit = function () { return new Term_1.Term(types_1.unit, function (_dbn) { return IRConst_1.IRConst.unit; }, true // isConstant
); };
exports.pmakeUnit = pmakeUnit;

@@ -25,2 +25,3 @@ "use strict";

var phoist_1 = require("../../phoist.js");
var plet_1 = require("../../plet/index.js");
var subByteString = (0, phoist_1.phoist)((0, pfn_1.pfn)([

@@ -46,4 +47,4 @@ types_1.bs,

function addPByteStringMethods(term) {
ObjectUtils_1.default.definePropertyIfNotPresent(term, "length", __assign({ get: function () { return bs_1.plengthBs.$(term); } }, getterOnly));
ObjectUtils_1.default.definePropertyIfNotPresent(term, "utf8Decoded", __assign({ get: function () { return str_1.pdecodeUtf8.$(term); } }, getterOnly));
ObjectUtils_1.default.definePropertyIfNotPresent(term, "length", __assign({ get: function () { return (0, plet_1.plet)(bs_1.plengthBs.$(term)); } }, getterOnly));
ObjectUtils_1.default.definePropertyIfNotPresent(term, "utf8Decoded", __assign({ get: function () { return (0, plet_1.plet)(str_1.pdecodeUtf8.$(term)); } }, getterOnly));
ObjectUtils_1.default.definePropertyIfNotPresent(term, "concatTerm", __assign({ get: function () { return bs_1.pappendBs.$(term); } }, getterOnly));

@@ -50,0 +51,0 @@ ObjectUtils_1.default.defineReadOnlyProperty(term, "concat", function (other) { return bs_1.pappendBs.$(term).$(other); });

@@ -27,2 +27,3 @@ "use strict";

var phoist_1 = require("../../phoist.js");
var plet_1 = require("../../plet/index.js");
var combinators_1 = require("../combinators/index.js");

@@ -59,7 +60,7 @@ var pevery_1 = require("../list/pevery.js");

ObjectUtils_1.default.definePropertyIfNotPresent(_lst, "head", __assign({ get: function () {
return (0, list_1.phead)(elemsT).$(_lst);
return (0, plet_1.plet)((0, list_1.phead)(elemsT).$(_lst));
} }, getterOnly));
ObjectUtils_1.default.definePropertyIfNotPresent(_lst, "tail", __assign({ get: function () { return (0, list_1.ptail)(elemsT).$(_lst); } }, getterOnly));
ObjectUtils_1.default.definePropertyIfNotPresent(_lst, "length", __assign({ get: function () { return (0, plength_1.plength)(elemsT).$(_lst); } }, getterOnly));
ObjectUtils_1.default.definePropertyIfNotPresent(_lst, "reversed", __assign({ get: function () { return (0, preverse_1.preverse)(elemsT).$(_lst); } }, getterOnly));
ObjectUtils_1.default.definePropertyIfNotPresent(_lst, "tail", __assign({ get: function () { return (0, plet_1.plet)((0, list_1.ptail)(elemsT).$(_lst)); } }, getterOnly));
ObjectUtils_1.default.definePropertyIfNotPresent(_lst, "length", __assign({ get: function () { return (0, plet_1.plet)((0, plength_1.plength)(elemsT).$(_lst)); } }, getterOnly));
ObjectUtils_1.default.definePropertyIfNotPresent(_lst, "reversed", __assign({ get: function () { return (0, plet_1.plet)((0, preverse_1.preverse)(elemsT).$(_lst)); } }, getterOnly));
ObjectUtils_1.default.definePropertyIfNotPresent(_lst, "atTerm", __assign({ get: function () { return (0, pindexList_1.pindexList)(elemsT).$(_lst); } }, getterOnly));

@@ -66,0 +67,0 @@ ObjectUtils_1.default.defineReadOnlyProperty(_lst, "at", function (index) { return (0, pindexList_1.pindexList)(elemsT).$(_lst).$(index); });

@@ -23,2 +23,3 @@ "use strict";

var builtins_1 = require("../../builtins/index.js");
var plet_1 = require("../../plet/index.js");
var getterOnly = {

@@ -44,7 +45,7 @@ set: function () { },

if ((0, type_system_1.isWellFormedType)(fstT))
ObjectUtils_1.default.definePropertyIfNotPresent(_pair, "fst", __assign({ get: function () { return (0, builtins_1.pfstPair)(fstT, sndT).$(_pair); } }, getterOnly));
ObjectUtils_1.default.definePropertyIfNotPresent(_pair, "fst", __assign({ get: function () { return (0, plet_1.plet)((0, builtins_1.pfstPair)(fstT, sndT).$(_pair)); } }, getterOnly));
if ((0, type_system_1.isWellFormedType)(sndT))
ObjectUtils_1.default.definePropertyIfNotPresent(_pair, "snd", __assign({ get: function () { return (0, builtins_1.psndPair)(fstT, sndT).$(_pair); } }, getterOnly));
ObjectUtils_1.default.definePropertyIfNotPresent(_pair, "snd", __assign({ get: function () { return (0, plet_1.plet)((0, builtins_1.psndPair)(fstT, sndT).$(_pair)); } }, getterOnly));
return _pair;
}
exports.addPPairMethods = addPPairMethods;

@@ -20,2 +20,3 @@ "use strict";

var str_1 = require("../../builtins/str/index.js");
var plet_1 = require("../../plet/index.js");
var getterOnly = {

@@ -27,6 +28,6 @@ set: function () { },

function addPStringMethods(term) {
ObjectUtils_1.default.definePropertyIfNotPresent(term, "utf8Encoded", __assign({ get: function () { return str_1.pencodeUtf8.$(term); } }, getterOnly));
ObjectUtils_1.default.definePropertyIfNotPresent(term, "concatTerm", __assign({ get: function () { return str_1.pappendStr.$(term); } }, getterOnly));
ObjectUtils_1.default.definePropertyIfNotPresent(term, "utf8Encoded", __assign({ get: function () { return (0, plet_1.plet)(str_1.pencodeUtf8.$(term)); } }, getterOnly));
ObjectUtils_1.default.definePropertyIfNotPresent(term, "concatTerm", __assign({ get: function () { return (0, plet_1.plet)(str_1.pappendStr.$(term)); } }, getterOnly));
ObjectUtils_1.default.defineReadOnlyProperty(term, "concat", function (other) { return str_1.pappendStr.$(term).$(other); });
ObjectUtils_1.default.definePropertyIfNotPresent(term, "eqTerm", __assign({ get: function () { return str_1.peqStr.$(term); } }, getterOnly));
ObjectUtils_1.default.definePropertyIfNotPresent(term, "eqTerm", __assign({ get: function () { return (0, plet_1.plet)(str_1.peqStr.$(term)); } }, getterOnly));
ObjectUtils_1.default.defineReadOnlyProperty(term, "eq", function (other) { return str_1.peqStr.$(term).$(other); });

@@ -33,0 +34,0 @@ return term;

@@ -18,2 +18,5 @@ import { Term } from "../../../Term/index.js";

extract: <Fields extends (keyof SDef[keyof SDef])[]>(...fields: Fields) => {
/**
* @deprecated
*/
in: <PExprResult extends PType>(expr: (extracted: RestrictedStructInstance<SDef[keyof SDef], Fields>) => Term<PExprResult>) => UtilityTermOf<PExprResult>;

@@ -20,0 +23,0 @@ };

"use strict";
var __read = (this && this.__read) || function (o, n) {
var m = typeof Symbol === "function" && o[Symbol.iterator];
if (!m) return o;
var i = m.call(o), r, ar = [], e;
try {
while ((n === void 0 || n-- > 0) && !(r = i.next()).done) ar.push(r.value);
}
catch (error) { e = { error: error }; }
finally {
try {
if (r && !r.done && (m = i["return"])) m.call(i);
}
finally { if (e) throw e.error; }
}
return ar;
};
var __spreadArray = (this && this.__spreadArray) || function (to, from, pack) {
if (pack || arguments.length === 2) for (var i = 0, l = from.length, ar; i < l; i++) {
if (ar || !(i in from)) {
if (!ar) ar = Array.prototype.slice.call(from, 0, i);
ar[i] = from[i];
}
}
return to.concat(ar || Array.prototype.slice.call(from));
};
var __importDefault = (this && this.__importDefault) || function (mod) {

@@ -34,3 +9,2 @@ return (mod && mod.__esModule) ? mod : { "default": mod };

var Term_1 = require("../../../Term/index.js");
var capitalize_1 = require("../../../../../utils/ts/capitalize.js");
// !!! IMPORTANT !!!

@@ -47,2 +21,6 @@ // DO NOT change the order of imports

var IRNative_1 = require("../../../../IR/IRNodes/IRNative/index.js");
var punsafeConvertType_1 = require("../../punsafeConvertType/index.js");
var IRHoisted_1 = require("../../../../IR/IRNodes/IRHoisted.js");
var IRFunc_1 = require("../../../../IR/IRNodes/IRFunc.js");
var IRVar_1 = require("../../../../IR/IRNodes/IRVar.js");
var getterOnly = {

@@ -53,2 +31,4 @@ set: function () { },

};
var hoisted_getFields = new IRHoisted_1.IRHoisted(new IRFunc_1.IRFunc(1, // struct
new IRApp_1.IRApp(IRNative_1.IRNative.sndPair, new IRApp_1.IRApp(IRNative_1.IRNative.unConstrData, new IRVar_1.IRVar(0)))));
function addPStructMethods(struct) {

@@ -64,4 +44,17 @@ var t = struct.type;

if (ctors.length === 1) {
var ctorName_1 = ctors[0];
var ctor = sDef[ctorName_1];
var ctorName = ctors[0];
var ctor = sDef[ctorName];
var fieldsNames = Object.keys(ctor);
var nFields = fieldsNames.length;
var letted_fieldsListData = new Term_1.Term((0, type_system_1.list)(type_system_1.data), function (dbn) { return new IRLetted_1.IRLetted(Number(dbn), new IRApp_1.IRApp(hoisted_getFields.clone(), struct.toIR(dbn))); });
for (var i = 0; i < nFields; i++) {
var thisFieldName = fieldsNames[i];
var thisFieldType = ctor[thisFieldName];
(!ObjectUtils_1.default.hasOwn(struct, thisFieldName)) && Object.defineProperty(struct, thisFieldName, {
value: (0, punsafeConvertType_1.punsafeConvertType)((0, plet_1.plet)((0, fromData_minimal_1._fromData)(thisFieldType)((0, pmatch_1.getElemAtTerm)(i).$(letted_fieldsListData))), thisFieldType),
writable: false,
enumerable: true,
configurable: false
});
}
/**

@@ -76,23 +69,8 @@ * @deprecated

return {
in: function (expr) {
return (0, pmatch_1.pmatch)(struct)[("on" + (0, capitalize_1.capitalize)(ctorName_1))](function (rawFields) { return rawFields.extract.apply(rawFields, __spreadArray([], __read(fields), false)).in(expr); });
}
in: function (expr) { return expr(struct); }
};
});
var fieldsNames = Object.keys(ctor);
var nFields = fieldsNames.length;
var letted_fieldsListData = new Term_1.Term((0, type_system_1.list)(type_system_1.data), function (dbn) { return new IRLetted_1.IRLetted(Number(dbn), new IRApp_1.IRApp(IRNative_1.IRNative.sndPair, data_1.punConstrData.$(struct).toIR(dbn))); });
for (var i = 0; i < nFields; i++) {
var thisFieldName = fieldsNames[i];
var thisFieldType = ctor[thisFieldName];
(!ObjectUtils_1.default.hasOwn(struct, thisFieldName)) && Object.defineProperty(struct, thisFieldName, {
value: (0, plet_1.plet)((0, fromData_minimal_1._fromData)(thisFieldType)((0, pmatch_1.getElemAtTerm)(i).$(letted_fieldsListData))),
writable: false,
enumerable: true,
configurable: false
});
}
}
ObjectUtils_1.default.definePropertyIfNotPresent(struct, "eqTerm", {
get: function () { return data_1.peqData.$(struct); },
get: function () { return (0, plet_1.plet)(data_1.peqData.$(struct)); },
set: function () { },

@@ -99,0 +77,0 @@ configurable: false,

@@ -0,6 +1,11 @@

import { Term } from "../../Term/index.js";
import { TermType } from "../../type_system/index.js";
export declare const matchSingleCtorStruct: (returnT: TermType) => import("../../index.js").Term<import("../index.js").PLam<import("../index.js").PData, import("../index.js").PLam<import("../index.js").PLam<import("../index.js").PList<import("../index.js").PData>, import("../../index.js").PType | import("./pstruct.js").PStruct<import("../../type_system/index.js").StructDefinition>>, import("../../index.js").PType | import("./pstruct.js").PStruct<import("../../type_system/index.js").StructDefinition>>>> & {
$: (input: import("../../index.js").Term<import("../index.js").PData>) => import("../../index.js").Term<import("../index.js").PLam<import("../index.js").PLam<import("../index.js").PList<import("../index.js").PData>, import("../../index.js").PType | import("./pstruct.js").PStruct<import("../../type_system/index.js").StructDefinition>>, import("../../index.js").PType | import("./pstruct.js").PStruct<import("../../type_system/index.js").StructDefinition>>> & {
$: (input: import("../../index.js").PappArg<import("../index.js").PLam<import("../index.js").PList<import("../index.js").PData>, import("../../index.js").PType | import("./pstruct.js").PStruct<import("../../type_system/index.js").StructDefinition>>>) => import("../../index.js").UtilityTermOf<import("../../index.js").PType | import("./pstruct.js").PStruct<import("../../type_system/index.js").StructDefinition>>;
import { PData } from "../PData/index.js";
import { PLam } from "../PFn/index.js";
import { PList } from "../PList.js";
export declare const getFields: Term<PLam<PData, PList<PData>>>;
export declare const matchSingleCtorStruct: (returnT: TermType) => Term<PLam<PData, PLam<PLam<PList<PData>, import("../../index.js").PType | import("./pstruct.js").PStruct<import("../../type_system/index.js").StructDefinition>>, import("../../index.js").PType | import("./pstruct.js").PStruct<import("../../type_system/index.js").StructDefinition>>>> & {
$: (input: Term<PData>) => Term<PLam<PLam<PList<PData>, import("../../index.js").PType | import("./pstruct.js").PStruct<import("../../type_system/index.js").StructDefinition>>, import("../../index.js").PType | import("./pstruct.js").PStruct<import("../../type_system/index.js").StructDefinition>>> & {
$: (input: import("../../index.js").PappArg<PLam<PList<PData>, import("../../index.js").PType | import("./pstruct.js").PStruct<import("../../type_system/index.js").StructDefinition>>>) => import("../../index.js").UtilityTermOf<import("../../index.js").PType | import("./pstruct.js").PStruct<import("../../type_system/index.js").StructDefinition>>;
};
};
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.matchSingleCtorStruct = void 0;
exports.matchSingleCtorStruct = exports.getFields = void 0;
var IRApp_1 = require("../../../IR/IRNodes/IRApp.js");
var IRFunc_1 = require("../../../IR/IRNodes/IRFunc.js");
var IRHoisted_1 = require("../../../IR/IRNodes/IRHoisted.js");
var IRNative_1 = require("../../../IR/IRNodes/IRNative/index.js");
var IRVar_1 = require("../../../IR/IRNodes/IRVar.js");
var Term_1 = require("../../Term/index.js");
var data_1 = require("../../lib/builtins/data/index.js");

@@ -10,2 +16,5 @@ var noUnwrap_1 = require("../../lib/builtins/pair/noUnwrap.js");

var type_system_1 = require("../../type_system/index.js");
var hoisted_getFields = new IRHoisted_1.IRHoisted(new IRFunc_1.IRFunc(1, // struct
new IRApp_1.IRApp(IRNative_1.IRNative.sndPair, new IRApp_1.IRApp(IRNative_1.IRNative.unConstrData, new IRVar_1.IRVar(0)))));
exports.getFields = new Term_1.Term((0, type_system_1.lam)(type_system_1.data, (0, type_system_1.list)(type_system_1.data)), function (_dbn) { return hoisted_getFields.clone(); });
exports.matchSingleCtorStruct = (function (returnT) { return (0, phoist_1.phoist)((0, pfn_1.pfn)([

@@ -12,0 +21,0 @@ type_system_1.data,

@@ -1,2 +0,2 @@

import { RestrictedStructInstance, PStruct } from "../pstruct.js";
import { PStruct, StructInstance } from "../pstruct.js";
import { PType } from "../../../PType/index.js";

@@ -11,7 +11,2 @@ import { Term } from "../../../Term/index.js";

export declare function getElemAtTerm(n: number): TermFn<[PList<PData>], PData>;
export type RawFields<CtorDef extends StructCtorDef> = Term<PList<PData>> & {
extract: <Fields extends (keyof CtorDef)[]>(...fields: Fields) => {
in: <PExprResult extends PType>(expr: (extracted: RestrictedStructInstance<CtorDef, Fields>) => Term<PExprResult>) => UtilityTermOf<PExprResult>;
};
};
type EmptyObject = {

@@ -21,11 +16,23 @@ [x: string | number | symbol]: never;

type MatchRest<PReturnT extends PType> = {
_: (continuation: (rawFields: TermList<PData>) => Term<PReturnT>) => UtilityTermOf<PReturnT>;
_: (continuation: (mathcedCtorsFields: TermList<PData>) => Term<PReturnT>) => UtilityTermOf<PReturnT>;
};
type TypedPMatchOptions<SDef extends StructDefinition, PReturnT extends PType> = {
[Ctor in keyof SDef as `on${Capitalize<string & Ctor>}`]: (cb: (rawFields: RawFields<SDef[Ctor]>) => Term<PReturnT>) => Omit<SDef, Ctor> extends EmptyObject ? UtilityTermOf<PReturnT> : TypedPMatchOptions<Omit<SDef, Ctor>, PReturnT>;
[Ctor in keyof SDef as `on${Capitalize<string & Ctor>}`]: (cb: (mathcedCtorsFields: StructInstance<SDef[Ctor]>) => Term<PReturnT>) => Omit<SDef, Ctor> extends EmptyObject ? UtilityTermOf<PReturnT> : TypedPMatchOptions<Omit<SDef, Ctor>, PReturnT>;
} & MatchRest<PReturnT>;
type MathcedCtorsFields<SCtorDef extends StructCtorDef> = StructInstance<SCtorDef> & {
/**
* @deprecated
* you can use simple dot notation instead
**/
extract: (...fields: (keyof SCtorDef)[]) => {
/**
* @deprecated
**/
in: <PExprReturn extends PType>(cb: (mathcedCtorsFields: StructInstance<SCtorDef>) => Term<PExprReturn>) => Term<PExprReturn>;
};
};
export type PMatchOptions<SDef extends StructDefinition> = {
[Ctor in keyof SDef as `on${Capitalize<string & Ctor>}`]: <PReturnT extends PType>(cb: (rawFields: RawFields<SDef[Ctor]>) => Term<PReturnT>) => Omit<SDef, Ctor> extends EmptyObject ? UtilityTermOf<PReturnT> : TypedPMatchOptions<Omit<SDef, Ctor>, PReturnT>;
[Ctor in keyof SDef as `on${Capitalize<string & Ctor>}`]: <PReturnT extends PType>(cb: (mathcedCtorsFields: MathcedCtorsFields<SDef[Ctor]>) => Term<PReturnT>) => Omit<SDef, Ctor> extends EmptyObject ? UtilityTermOf<PReturnT> : TypedPMatchOptions<Omit<SDef, Ctor>, PReturnT>;
} & {
_: <PReturnT extends PType>(continuation: (rawFields: TermList<PData>) => Term<PReturnT>) => UtilityTermOf<PReturnT>;
_: <PReturnT extends PType>(continuation: (mathcedCtorsFields: TermList<PData>) => Term<PReturnT>) => UtilityTermOf<PReturnT>;
};

@@ -32,0 +39,0 @@ export declare function matchNCtorsIdxs(_n: number, returnT: TermType): Term<any>;

@@ -41,3 +41,3 @@ "use strict";

var papp_1 = require("../../../lib/papp.js");
var punsafeConvertType_1 = require("../../../lib/punsafeConvertType/index.js");
var addUtilityForType_1 = require("../../../lib/addUtilityForType.js");
var plam_1 = require("../../../lib/plam.js");

@@ -58,2 +58,3 @@ var types_1 = require("../../../type_system/types.js");

var old_1 = require("../../../lib/plet/old.js");
var minimal_1 = require("../../../lib/plet/minimal.js");
var elemAtCache = {};

@@ -82,40 +83,34 @@ function getElemAtTerm(n) {

exports.getElemAtTerm = getElemAtTerm;
function getExtractedFieldsExpr(fieldsData, ctorDef, allFIndexes, expr, partialExtracted) {
var allFieldsNames = Object.keys(ctorDef);
if (allFIndexes.length === 0) {
return expr(partialExtracted);
function getStructInstance(fieldsList, ctorDef) {
var instance = {};
var fieldNames = Object.keys(ctorDef);
for (var i = 0; i < fieldNames.length; i++) {
var fieldName = fieldNames[i];
Object.defineProperty(instance, fieldName, {
value: (0, addUtilityForType_1.addUtilityForType)(ctorDef[fieldName])((0, minimal_1._plet)((0, fromData_minimal_1._fromData)(ctorDef[fieldName])(getElemAtTerm(i).$(fieldsList)))),
writable: false,
enumerable: true,
configurable: false
});
}
var idx = allFIndexes[0];
var fieldType = ctorDef[allFieldsNames[idx]];
return (0, old_1._old_plet)(
// needs to be minimal
// it MUST not add the fieldType utilities
// it will add the `withAllPairElemsAsData` version utilities
(0, fromData_minimal_1._fromData)(fieldType)(getElemAtTerm(idx).$(fieldsData))).in(function (value) {
ObjectUtils_1.default.defineNormalProperty(partialExtracted, allFieldsNames[idx], (0, punsafeConvertType_1.punsafeConvertType)(value, (0, type_system_1.withAllPairElemsAsData)(fieldType)));
return getExtractedFieldsExpr(fieldsData, ctorDef, allFIndexes.slice(1), expr, partialExtracted);
/**
* to remove in 1.0.0
* @deprecated
*/
Object.defineProperty(instance, "extract", {
value: function () {
var _fields = [];
for (var _i = 0; _i < arguments.length; _i++) {
_fields[_i] = arguments[_i];
}
return {
in: function (cb) { return cb(instance); }
};
},
writable: false,
enumerable: true,
configurable: false
});
return instance;
}
function defineExtract(fieldsList, ctorDef) {
var fieldsNames = Object.keys(ctorDef);
// basically cloning;
var _fieldsList = fieldsList;
return ObjectUtils_1.default.defineReadOnlyProperty(_fieldsList, "extract", function () {
var fields = [];
for (var _i = 0; _i < arguments.length; _i++) {
fields[_i] = arguments[_i];
}
var fieldsIndicies = fields
.map(function (f) { return fieldsNames.findIndex(function (fName) { return fName === f; }); })
// ignore fields not present in the definion or duplicates
.filter(function (idx, i, thisArr) { return idx >= 0 && thisArr.indexOf(idx) === i; })
.sort(function (a, b) { return a < b ? -1 : (a === b ? 0 : 1); });
return ObjectUtils_1.default.defineReadOnlyProperty({}, "in", function (expr) {
if (fieldsIndicies.length === 0)
return expr({});
var res = getExtractedFieldsExpr(_fieldsList, ctorDef, fieldsIndicies, expr, {});
return res;
});
});
}
var matchNCtorsIdxsCache = {};

@@ -173,3 +168,3 @@ function matchNCtorsIdxs(_n, returnT) {

function getReturnTypeFromContinuation(cont, ctorDef) {
return cont(defineExtract(
return cont(
// mock the fields

@@ -180,3 +175,3 @@ // we are not really interested in the result here; only in the type

.fill(new DataI_1.DataI(0)));
}), ctorDef)).type;
})).type;
}

@@ -205,9 +200,7 @@ /**

}
return (0, papp_1.papp)((0, papp_1.papp)((0, matchSingleCtorStruct_1.matchSingleCtorStruct)(cont_1.type[2]), structData), cont_1);
return (0, papp_1.papp)(cont_1, (0, papp_1.papp)(matchSingleCtorStruct_1.getFields, structData));
}
var thisCtor_1 = sDef[ctors[0]];
var returnT_1 = getReturnTypeFromContinuation(cont_1, thisCtor_1);
return (0, papp_1.papp)((0, papp_1.papp)((0, matchSingleCtorStruct_1.matchSingleCtorStruct)(returnT_1), structData), (0, plam_1.plam)((0, types_1.list)(types_1.data), returnT_1)(function (fieldsListData) {
return cont_1(defineExtract(fieldsListData, thisCtor_1));
}));
var thisCtorDef_1 = sDef[ctors[0]];
var returnT_1 = getReturnTypeFromContinuation(cont_1, thisCtorDef_1);
return (0, papp_1.papp)((0, plam_1.plam)((0, types_1.list)(types_1.data), returnT_1)(cont_1), (0, papp_1.papp)(matchSingleCtorStruct_1.getFields, structData));
}

@@ -223,18 +216,12 @@ // multiple ctors struct case

})) !== null && _a !== void 0 ? _a : ctorCbs[0];
var thisCtor = sDef[Object.keys(sDef)[ctorIdx]];
var thisCtorDef = sDef[Object.keys(sDef)[ctorIdx]];
var returnT = cont instanceof Term_1.Term ?
cont.type[2] :
getReturnTypeFromContinuation(cont, thisCtor);
getReturnTypeFromContinuation(cont, thisCtorDef);
var result = (0, papp_1.papp)(matchNCtorsIdxs(ctors.length, returnT), structData);
var _loop_1 = function (i) {
for (var i = ctors.length - 1; i >= 0; i--) {
var thisCont = ctorCbs[i];
var thisCtor_2 = sDef[ctors[i]];
result = (0, papp_1.papp)(result, thisCont instanceof Term_1.Term ?
thisCont :
(0, plam_1.plam)((0, types_1.list)(types_1.data), returnT !== null && returnT !== void 0 ? returnT : getReturnTypeFromContinuation(thisCont, thisCtor_2))(function (fieldsListData) {
return thisCont(defineExtract(fieldsListData, thisCtor_2));
}));
};
for (var i = ctors.length - 1; i >= 0; i--) {
_loop_1(i);
var thisCtorDef_2 = sDef[ctors[i]];
result = (0, papp_1.papp)(result, thisCont instanceof Term_1.Term ? thisCont :
(0, plam_1.plam)((0, types_1.list)(types_1.data), returnT !== null && returnT !== void 0 ? returnT : getReturnTypeFromContinuation(thisCont, thisCtorDef_2))(thisCont));
}

@@ -265,3 +252,4 @@ return result;

return {};
// last permutation reurns the expression
// last permutation
// returns the final expression
if (missingCtors.length === 1) {

@@ -272,5 +260,9 @@ var ctor_1 = missingCtors[0];

var result = {};
result = ObjectUtils_1.default.defineReadOnlyProperty(result, matcher, function (cb) {
ObjectUtils_1.default.defineReadOnlyProperty(result, matcher, function (cb) {
// build the `StructInstance` input from the struct fields
var callback = function (mathcedCtorsFields) {
return cb(getStructInstance(mathcedCtorsFields, sDef[ctor_1]));
};
// same stuff of previous ctors
ctorCbs[idx_1] = cb;
ctorCbs[idx_1] = callback;
return hoistedMatchCtors(struct, sDef, ctorCbs);

@@ -285,3 +277,3 @@ });

ObjectUtils_1.default.defineReadOnlyProperty(remainingCtorsObj, "on" + (0, capitalize_1.capitalize)(ctor), function (cb) {
ctorCbs[idx] = cb;
ctorCbs[idx] = function (fieldsList) { return cb(getStructInstance(fieldsList, sDef[ctor])); };
return permutations(missingCtors.filter(function (c) { return c !== ctor; }));

@@ -301,3 +293,8 @@ });

}
var res = hoistedMatchCtors(struct, sDef, ctorCbs);
var res = hoistedMatchCtors(
/*
Argument of type 'Term<PStruct<SDef>>' is not assignable to parameter of type 'Term<PStruct<StructDefinition>>'.
Type 'PStruct<SDef>' is not assignable to type 'PStruct<StructDefinition>'.
*/
struct, sDef, ctorCbs);
return res;

@@ -304,0 +301,0 @@ });

@@ -9,2 +9,3 @@ import type { TermFn } from "../PFn/index.js";

import { TermStruct } from "../../lib/std/UtilityTerms/TermStruct.js";
import { PType } from "../../PType/index.js";
/**

@@ -20,3 +21,3 @@ * intermediate class useful to reconize structs form primitives

export type StructInstanceAsData<SCtorDef extends StructCtorDef> = {
[Field in keyof SCtorDef]: Term<PAsData<ToPType<SCtorDef[Field]>>>;
[Field in keyof SCtorDef]: Term<PAsData<any>> | Term<PStruct<any>> | Term<PData> | Term<undefined & PType>;
};

@@ -23,0 +24,0 @@ export type PStruct<SDef extends StructDefinition> = {

@@ -19,3 +19,3 @@ import { UPLCVar } from "../UPLCTerms/UPLCVar/index.js";

*/
export declare function isUPLCTerm(t: any): t is UPLCTerm;
export declare function isUPLCTerm(t: object): t is UPLCTerm;
/**

@@ -22,0 +22,0 @@ * **_O(n)_**

@@ -222,3 +222,3 @@ "use strict";

if (t instanceof Lambda_1.Lambda) {
return "\n".concat(indentStr.repeat(depth), "(lam ").concat(getVarNameForDbn(dbn), " ").concat(loop(t.body, dbn + 1, depth + 1)).concat(indent, ")");
return "".concat(indent, "(lam ").concat(getVarNameForDbn(dbn), " ").concat(loop(t.body, dbn + 1, depth + 1)).concat(indent, ")");
}

@@ -225,0 +225,0 @@ if (t instanceof Application_1.Application)

{
"name": "@harmoniclabs/plu-ts",
"version": "0.3.0-dev0",
"version": "0.3.0-dev1",
"description": "An embedded DSL for Cardano smart contracts creation coupled with a library for Cardano transactions, all in Typescript",

@@ -20,2 +20,4 @@ "main": "./dist/index.js",

"build": "tsc --project ./tsconfig.json && tsc-alias -p ./tsconfig.json",
"buidl": "npm run build",
"prerelease": "npm run test && npm run build",
"start": "npm-run-all --parallel build:watch run:watch"

@@ -22,0 +24,0 @@ },

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc