New Case Study:See how Anthropic automated 95% of dependency reviews with Socket.Learn More
Socket
Sign inDemoInstall
Socket

babelute

Package Overview
Dependencies
Maintainers
1
Versions
9
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

babelute - npm Package Compare versions

Comparing version 0.5.0 to 0.5.1

14

CHANGELOG.md

@@ -5,2 +5,16 @@ # Change Log

<a name="0.5.1"></a>
## [0.5.1](https://github.com/nomocas/babelute/compare/v0.5.0...v0.5.1) (2017-04-30)
### Bug Fixes
* **build:** update dev dependencies ([8963497](https://github.com/nomocas/babelute/commit/8963497))
* **facade-pragmas:** remove facade initializer and finalise and clean with more test ([b563383](https://github.com/nomocas/babelute/commit/b563383))
* **index:** remove createFacadeInitializer from index.js ([21aa722](https://github.com/nomocas/babelute/commit/21aa722))
* **package:** update last dev dependencies ([fadce95](https://github.com/nomocas/babelute/commit/fadce95))
* **translation:** add args translation ([c132c16](https://github.com/nomocas/babelute/commit/c132c16))
<a name="0.5.0"></a>

@@ -7,0 +21,0 @@ # [0.5.0](https://github.com/nomocas/babelute/compare/v0.4.4...v0.5.0) (2017-04-18)

77

dist/bundles/index.js

@@ -754,2 +754,12 @@ (function (global, factory) {

* translation through lexicon (already delcared in Babelute proto)
* @TODO: translation and each and if
* each :
* .each(collec, handler)
* translated to
* .each(collec, wrap(handler, translationInfos))
*
* ==> should translate automatically output from handler
* if ==> same things : wrap handlers
* ==> while translating : when lexem.name === "each" (or "if") (should always be present in target lexicon)
* ==> apply wrapping
*/

@@ -764,7 +774,15 @@

if (!lexicon) return null;
var args = translateArgs(lexem.args, lexicon, firstLevel);
var b = new (firstLevel ? lexicon.FirstLevel : lexicon.Atomic)();
return b[lexem.name] && b[lexem.name].apply(b, toConsumableArray(lexem.args));
return b[lexem.name] && b[lexem.name].apply(b, toConsumableArray(args));
});
};
function translateArgs(args, lexicon, firstLevel) {
var result = [];
for (var i = 0, len = args.length; i < len; ++i) {
if (args[i] && args[i].__babelute__) result.push(args[i]._translateLexemsThrough(lexicon, firstLevel));else result.push(args[i]);
}return result;
}
/**

@@ -953,4 +971,3 @@ * _use handeling

*/
function FacadePragmatics(targets) {
var pragmas = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : null;
function FacadePragmatics(targets, pragmas) {
classCallCheck(this, FacadePragmatics);

@@ -980,3 +997,4 @@ return possibleConstructorReturn(this, (FacadePragmatics.__proto__ || Object.getPrototypeOf(FacadePragmatics)).call(this, targets, pragmas));

templ = itemHandler(item, i);
if (templ) this.$output(subject, templ, percolator);
if (!templ) throw new Error('.each function should return a sentence.');
this.$output(subject, templ, percolator);
}

@@ -1026,52 +1044,2 @@ }

/**
* create a facade-ready-to-run initializer function.
* @param {Lexicon} lexicon the lexicon from where take the api
* @param {Object} pragmatics the pragmatics object where to find interpretation method to fire immediatly
* @return {Function} the facade initializer function
* @example
*
* import babelute from 'babelute';
* const myLexicon = babelute.createLexicon('my-lexicon');
* myLexicon.addAtoms(['foo', 'bar']);
*
* const myPragmas = babelute.createFacadePragmatics({
* 'my-lexicon':true
* }, {
* foo(subject, args, percolator){
* // do something
* },
* bar(subject, args, percolator){
* // do something
* }
* });
*
* const mlp = babelute.createFacadeInitializer(myLexicon, myPragmas);
*
* mlp(mySubject).foo(...).bar(...); // apply pragmas immediatly on subject through lexicon api's
*
*/
function createFacadeInitializer(lexicon, pragmatics) {
var Facade = function Facade(subject) {
var percolator = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : null;
lexicon.Atomic.call(this);
this._subject = subject;
this._percolator = percolator;
};
Facade.prototype = Object.create(lexicon.Atomic.prototype);
Facade.prototype.constructor = Facade;
Facade.prototype._lexicon = null;
Facade.prototype._append = function (lexiconName, name, args) {
if ((!pragmatics._targets || pragmatics._targets[lexiconName]) && pragmatics[name]) pragmatics[name](this._subject, args, this._percolator);
return this;
};
return function (subject) {
var percolator = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : null;
return new Facade(subject, percolator);
};
}
/**
* create a FacadePragmatics instance

@@ -1108,3 +1076,2 @@ * @param {Object} targets the pragmatics targets DSL

createPragmatics: createPragmatics,
createFacadeInitializer: createFacadeInitializer,
createFacadePragmatics: createFacadePragmatics,

@@ -1111,0 +1078,0 @@ init: init,

@@ -1,1 +0,1 @@

(function(e,t){typeof exports==="object"&&typeof module!=="undefined"?module.exports=t():typeof define==="function"&&define.amd?define(t):e.Babelute=t()})(this,function(){"use strict";var e=function(e,t){if(!(e instanceof t)){throw new TypeError("Cannot call a class as a function")}};var t=function(){function e(e,t){for(var n=0;n<t.length;n++){var i=t[n];i.enumerable=i.enumerable||false;i.configurable=true;if("value"in i)i.writable=true;Object.defineProperty(e,i.key,i)}}return function(t,n,i){if(n)e(t.prototype,n);if(i)e(t,i);return t}}();var n=function(e,t){if(typeof t!=="function"&&t!==null){throw new TypeError("Super expression must either be null or a function, not "+typeof t)}e.prototype=Object.create(t&&t.prototype,{constructor:{value:e,enumerable:false,writable:true,configurable:true}});if(t)Object.setPrototypeOf?Object.setPrototypeOf(e,t):e.__proto__=t};var i=function(e,t){if(!e){throw new ReferenceError("this hasn't been initialised - super() hasn't been called")}return t&&(typeof t==="object"||typeof t==="function")?t:e};var r=function(e){if(Array.isArray(e)){for(var t=0,n=Array(e.length);t<e.length;t++)n[t]=e[t];return n}else{return Array.from(e)}};var o=function t(n,i,r){e(this,t);this.lexicon=n;this.name=i;this.args=r};var u=function(){function n(){var t=arguments.length>0&&arguments[0]!==undefined?arguments[0]:null;e(this,n);this._lexems=t||[];this.__babelute__=true}t(n,[{key:"_append",value:function e(t,n,i){this._lexems.push(new o(t,n,i));return this}},{key:"_if",value:function e(t,n){var i=arguments.length>2&&arguments[2]!==undefined?arguments[2]:null;if(t)this._lexems=this._lexems.concat(n._lexems);else if(i)this._lexems=this._lexems.concat(i._lexems);return this}},{key:"_each",value:function e(t,n){var i=this;if(t)t.forEach(function(e,t){var r=n(e,t);i._lexems.push.apply(i._lexems,r._lexems)});return this}},{key:"_use",value:function e(t){}},{key:"_lexicon",value:function e(t){}},{key:"_translate",value:function e(t){return t(this)}},{key:"_translateLexems",value:function e(t){return this._translate(function(e){var i=new n;e._lexems.forEach(function(e){return i._use(t(e))});return i})}},{key:"_translateLexemsThrough",value:function e(t){var n=arguments.length>1&&arguments[1]!==undefined?arguments[1]:false}}],[{key:"extends",value:function e(t){var n=arguments.length>1&&arguments[1]!==undefined?arguments[1]:null;var i=function e(n){t.call(this,n)};i.prototype=Object.create(t.prototype);i.prototype.constructor=i;for(var r in n){i.prototype[r]=n[r]}return i}}]);return n}();function a(e){return JSON.parse(e,function(e,t){if(t&&t.__babelute__)return new u(t._lexems.map(function(e){return new o(e.lexicon,e.name,e.args)}));return t})}var l=function(r){n(u,r);function u(t){e(this,u);var n=i(this,(u.__proto__||Object.getPrototypeOf(u)).call(this,t));n.__first_level_babelute__=true;return n}t(u,null,[{key:"getFirstLevelMethod",value:function e(t,n){return function(){for(var e=arguments.length,i=Array(e),r=0;r<e;r++){i[r]=arguments[r]}this._lexems.push(new o(t,n,i));return this}}}]);return u}(u);var s=function(){function n(){e(this,n)}t(n,null,[{key:"extends",value:function e(t){var n=function e(){};n.prototype=Object.create(t.prototype);n.prototype.constructor=n;return n}}]);return n}();function c(e){var t=arguments.length>1&&arguments[1]!==undefined?arguments[1]:null;var n=e.Initializer=t?s.extends(t):s;e.initializer=new n;e.initializer._empty=function(){return new e};e.initializer.BabeluteClass=e;Object.keys(e).forEach(function(e){f(n,e)});return e.initializer}function f(e,t){e.prototype[t]=function(){return this.BabeluteClass.prototype[t].apply(new this.BabeluteClass,arguments)}}["_use","_each","_if","_append","_lexicon"].forEach(function(e){f(s,e)});var h={};var p=function(){function n(t,i){var r=this;e(this,n);this.parent=i;i=i||{};this.name=t;this.Atomic=y(i.Atomic||u);this.FirstLevel=y(i.FirstLevel||l);this.SecondLevel=u.extends(i.SecondLevel||u);this.secondLevel=new this.SecondLevel;if(i.Atomic)Object.keys(i.Atomic.initializer).forEach(function(e){f(r.Atomic.Initializer,e);f(r.FirstLevel.Initializer,e)})}t(n,[{key:"addAtoms",value:function e(t){var n=this;t.forEach(function(e){return v(n,e)});return this}},{key:"addCompounds",value:function e(t){var n=this;var i=t(this.Atomic.initializer);for(var r in i){this.Atomic.prototype[r]=i[r]}var o=t(this.FirstLevel.initializer);for(var u in o){this.SecondLevel.prototype[u]=o[u]}Object.keys(i).forEach(function(e){n.FirstLevel.prototype[e]=l.getFirstLevelMethod(n.name,e);f(n.Atomic.Initializer,e);f(n.FirstLevel.Initializer,e)});return this}},{key:"addAliases",value:function e(t){var n=this;Object.keys(t).forEach(function(e){n.Atomic.prototype[e]=n.FirstLevel.prototype[e]=n.SecondLevel.prototype[e]=t[e];f(n.Atomic.Initializer,e);f(n.FirstLevel.Initializer,e)});return this}},{key:"use",value:function e(t,n,i,r){var o=r?this.FirstLevel.instance:this.Atomic.instance;if(!o[n])throw new Error("Babelute ("+this.name+") : method not found : "+n);o[n].apply(t,i)}},{key:"initializer",value:function e(t){return t?this.FirstLevel.initializer:this.Atomic.initializer}},{key:"createDialect",value:function e(t){return new n(t,this)}}]);return n}();function v(e,t){e.Atomic.prototype[t]=e.FirstLevel.prototype[t]=e.SecondLevel.prototype[t]=l.getFirstLevelMethod(e.name,t);f(e.Atomic.Initializer,t);f(e.FirstLevel.Initializer,t)}function y(e){var t=u.extends(e);c(t,e.Initializer);t.instance=new t;return t}function d(e){var t=arguments.length>1&&arguments[1]!==undefined?arguments[1]:null;return new p(e,t)}function m(e){var t=h[e];if(!t)throw new Error("lexicon not found : "+e);return t}function _(e){var t=arguments.length>1&&arguments[1]!==undefined?arguments[1]:null;h[t||e.name]=e}u.prototype._lexicon=function(e){return new(m(e).Atomic)(this._lexems)};l.prototype._lexicon=function(e){return new(m(e).FirstLevel)(this._lexems)};u.prototype._translateLexemsThrough=function(e){var t=arguments.length>1&&arguments[1]!==undefined?arguments[1]:false;var n=e instanceof p?null:e;return this._translateLexems(function(i){if(n)e=n[i.lexicon];if(!e)return null;var o=new(t?e.FirstLevel:e.Atomic);return o[i.name]&&o[i.name].apply(o,r(i.args))})};u.prototype._use=function(e){for(var t=arguments.length,n=Array(t>1?t-1:0),i=1;i<t;i++){n[i-1]=arguments[i]}return e?g(this,e,n,false):this};l.prototype._use=function(e){return e?g(this,e,[].slice.call(arguments,1),true):this};function g(e,t,n,i){if(typeof t==="string"){var r=t.split(":");m(r[0]).use(e,r[1],n,i)}else e._lexems=e._lexems.concat(t._lexems);return e}function x(e,t){if(e)return new(m(e)[t?"FirstLevel":"Atomic"]);else if(t)return new l;return new u}function b(e){var t=arguments.length>1&&arguments[1]!==undefined?arguments[1]:null;t=t||m(e.lexicon);return t.secondLevel[e.name].apply(new t.FirstLevel,e.args)}function L(e){var t=arguments.length>1&&arguments[1]!==undefined?arguments[1]:null;t=t||m(e.lexicon);return t.Atomic.instance[e.name].apply(new t.Atomic,e.args)}function w(e,t){if(!t)return m(e).Atomic.initializer;return m(e).FirstLevel.initializer}var A=function(){function n(t,i){e(this,n);this._targets=t;if(i)this.addPragmas(i)}t(n,[{key:"addPragmas",value:function e(t){for(var n in t){this[n]=t[n]}}},{key:"$output",value:function e(){throw new Error("pragmatics.$output should be implemented in subclasses")}}]);return n}();function k(){var e=arguments.length>0&&arguments[0]!==undefined?arguments[0]:{};var t=arguments.length>1&&arguments[1]!==undefined?arguments[1]:{};return new A(e,t)}var z=function(r){n(o,r);function o(t){var n=arguments.length>1&&arguments[1]!==undefined?arguments[1]:null;e(this,o);return i(this,(o.__proto__||Object.getPrototypeOf(o)).call(this,t,n))}t(o,[{key:"each",value:function e(t,n,i){var r=n[0],o=n[1];if(r&&r.length)for(var u=0,a=r.length,l,s;u<a;++u){l=r[u];s=o(l,u);if(s)this.$output(t,s,i)}}},{key:"if",value:function e(t,n,i){if(n[0])this.$output(t,n[1],i);else if(n[2])this.$output(t,n[2],i)}},{key:"$output",value:function e(t,n){var i=arguments.length>2&&arguments[2]!==undefined?arguments[2]:null;for(var r=0,o,u=n._lexems.length;r<u;++r){o=n._lexems[r];if(this._targets[o.lexicon]&&this[o.name])this[o.name](t,o.args,i)}return t}}]);return o}(A);function F(e,t){var n=function t(n){var i=arguments.length>1&&arguments[1]!==undefined?arguments[1]:null;e.Atomic.call(this);this._subject=n;this._percolator=i};n.prototype=Object.create(e.Atomic.prototype);n.prototype.constructor=n;n.prototype._lexicon=null;n.prototype._append=function(e,n,i){if((!t._targets||t._targets[e])&&t[n])t[n](this._subject,i,this._percolator);return this};return function(e){var t=arguments.length>1&&arguments[1]!==undefined?arguments[1]:null;return new n(e,t)}}function O(e){var t=arguments.length>1&&arguments[1]!==undefined?arguments[1]:null;return new z(e,t)}var j={createLexicon:d,createPragmatics:k,createFacadeInitializer:F,createFacadePragmatics:O,init:x,initializer:w,getLexicon:m,registerLexicon:_,developOneLevel:b,developToAtoms:L,fromJSON:a,Babelute:u,Lexem:o,FirstLevel:l,Pragmatics:A,FacadePragmatics:z,Lexicon:p,lexicons:h};return j});
(function(e,t){typeof exports==="object"&&typeof module!=="undefined"?module.exports=t():typeof define==="function"&&define.amd?define(t):e.Babelute=t()})(this,function(){"use strict";var e=function(e,t){if(!(e instanceof t)){throw new TypeError("Cannot call a class as a function")}};var t=function(){function e(e,t){for(var n=0;n<t.length;n++){var r=t[n];r.enumerable=r.enumerable||false;r.configurable=true;if("value"in r)r.writable=true;Object.defineProperty(e,r.key,r)}}return function(t,n,r){if(n)e(t.prototype,n);if(r)e(t,r);return t}}();var n=function(e,t){if(typeof t!=="function"&&t!==null){throw new TypeError("Super expression must either be null or a function, not "+typeof t)}e.prototype=Object.create(t&&t.prototype,{constructor:{value:e,enumerable:false,writable:true,configurable:true}});if(t)Object.setPrototypeOf?Object.setPrototypeOf(e,t):e.__proto__=t};var r=function(e,t){if(!e){throw new ReferenceError("this hasn't been initialised - super() hasn't been called")}return t&&(typeof t==="object"||typeof t==="function")?t:e};var i=function(e){if(Array.isArray(e)){for(var t=0,n=Array(e.length);t<e.length;t++)n[t]=e[t];return n}else{return Array.from(e)}};var o=function t(n,r,i){e(this,t);this.lexicon=n;this.name=r;this.args=i};var u=function(){function n(){var t=arguments.length>0&&arguments[0]!==undefined?arguments[0]:null;e(this,n);this._lexems=t||[];this.__babelute__=true}t(n,[{key:"_append",value:function e(t,n,r){this._lexems.push(new o(t,n,r));return this}},{key:"_if",value:function e(t,n){var r=arguments.length>2&&arguments[2]!==undefined?arguments[2]:null;if(t)this._lexems=this._lexems.concat(n._lexems);else if(r)this._lexems=this._lexems.concat(r._lexems);return this}},{key:"_each",value:function e(t,n){var r=this;if(t)t.forEach(function(e,t){var i=n(e,t);r._lexems.push.apply(r._lexems,i._lexems)});return this}},{key:"_use",value:function e(t){}},{key:"_lexicon",value:function e(t){}},{key:"_translate",value:function e(t){return t(this)}},{key:"_translateLexems",value:function e(t){return this._translate(function(e){var r=new n;e._lexems.forEach(function(e){return r._use(t(e))});return r})}},{key:"_translateLexemsThrough",value:function e(t){var n=arguments.length>1&&arguments[1]!==undefined?arguments[1]:false}}],[{key:"extends",value:function e(t){var n=arguments.length>1&&arguments[1]!==undefined?arguments[1]:null;var r=function e(n){t.call(this,n)};r.prototype=Object.create(t.prototype);r.prototype.constructor=r;for(var i in n){r.prototype[i]=n[i]}return r}}]);return n}();function a(e){return JSON.parse(e,function(e,t){if(t&&t.__babelute__)return new u(t._lexems.map(function(e){return new o(e.lexicon,e.name,e.args)}));return t})}var l=function(i){n(u,i);function u(t){e(this,u);var n=r(this,(u.__proto__||Object.getPrototypeOf(u)).call(this,t));n.__first_level_babelute__=true;return n}t(u,null,[{key:"getFirstLevelMethod",value:function e(t,n){return function(){for(var e=arguments.length,r=Array(e),i=0;i<e;i++){r[i]=arguments[i]}this._lexems.push(new o(t,n,r));return this}}}]);return u}(u);var s=function(){function n(){e(this,n)}t(n,null,[{key:"extends",value:function e(t){var n=function e(){};n.prototype=Object.create(t.prototype);n.prototype.constructor=n;return n}}]);return n}();function c(e){var t=arguments.length>1&&arguments[1]!==undefined?arguments[1]:null;var n=e.Initializer=t?s.extends(t):s;e.initializer=new n;e.initializer._empty=function(){return new e};e.initializer.BabeluteClass=e;Object.keys(e).forEach(function(e){f(n,e)});return e.initializer}function f(e,t){e.prototype[t]=function(){return this.BabeluteClass.prototype[t].apply(new this.BabeluteClass,arguments)}}["_use","_each","_if","_append","_lexicon"].forEach(function(e){f(s,e)});var h={};var v=function(){function n(t,r){var i=this;e(this,n);this.parent=r;r=r||{};this.name=t;this.Atomic=m(r.Atomic||u);this.FirstLevel=m(r.FirstLevel||l);this.SecondLevel=u.extends(r.SecondLevel||u);this.secondLevel=new this.SecondLevel;if(r.Atomic)Object.keys(r.Atomic.initializer).forEach(function(e){f(i.Atomic.Initializer,e);f(i.FirstLevel.Initializer,e)})}t(n,[{key:"addAtoms",value:function e(t){var n=this;t.forEach(function(e){return p(n,e)});return this}},{key:"addCompounds",value:function e(t){var n=this;var r=t(this.Atomic.initializer);for(var i in r){this.Atomic.prototype[i]=r[i]}var o=t(this.FirstLevel.initializer);for(var u in o){this.SecondLevel.prototype[u]=o[u]}Object.keys(r).forEach(function(e){n.FirstLevel.prototype[e]=l.getFirstLevelMethod(n.name,e);f(n.Atomic.Initializer,e);f(n.FirstLevel.Initializer,e)});return this}},{key:"addAliases",value:function e(t){var n=this;Object.keys(t).forEach(function(e){n.Atomic.prototype[e]=n.FirstLevel.prototype[e]=n.SecondLevel.prototype[e]=t[e];f(n.Atomic.Initializer,e);f(n.FirstLevel.Initializer,e)});return this}},{key:"use",value:function e(t,n,r,i){var o=i?this.FirstLevel.instance:this.Atomic.instance;if(!o[n])throw new Error("Babelute ("+this.name+") : method not found : "+n);o[n].apply(t,r)}},{key:"initializer",value:function e(t){return t?this.FirstLevel.initializer:this.Atomic.initializer}},{key:"createDialect",value:function e(t){return new n(t,this)}}]);return n}();function p(e,t){e.Atomic.prototype[t]=e.FirstLevel.prototype[t]=e.SecondLevel.prototype[t]=l.getFirstLevelMethod(e.name,t);f(e.Atomic.Initializer,t);f(e.FirstLevel.Initializer,t)}function m(e){var t=u.extends(e);c(t,e.Initializer);t.instance=new t;return t}function y(e){var t=arguments.length>1&&arguments[1]!==undefined?arguments[1]:null;return new v(e,t)}function d(e){var t=h[e];if(!t)throw new Error("lexicon not found : "+e);return t}function _(e){var t=arguments.length>1&&arguments[1]!==undefined?arguments[1]:null;h[t||e.name]=e}u.prototype._lexicon=function(e){return new(d(e).Atomic)(this._lexems)};l.prototype._lexicon=function(e){return new(d(e).FirstLevel)(this._lexems)};u.prototype._translateLexemsThrough=function(e){var t=arguments.length>1&&arguments[1]!==undefined?arguments[1]:false;var n=e instanceof v?null:e;return this._translateLexems(function(r){if(n)e=n[r.lexicon];if(!e)return null;var o=g(r.args,e,t);var u=new(t?e.FirstLevel:e.Atomic);return u[r.name]&&u[r.name].apply(u,i(o))})};function g(e,t,n){var r=[];for(var i=0,o=e.length;i<o;++i){if(e[i]&&e[i].__babelute__)r.push(e[i]._translateLexemsThrough(t,n));else r.push(e[i])}return r}u.prototype._use=function(e){for(var t=arguments.length,n=Array(t>1?t-1:0),r=1;r<t;r++){n[r-1]=arguments[r]}return e?x(this,e,n,false):this};l.prototype._use=function(e){return e?x(this,e,[].slice.call(arguments,1),true):this};function x(e,t,n,r){if(typeof t==="string"){var i=t.split(":");d(i[0]).use(e,i[1],n,r)}else e._lexems=e._lexems.concat(t._lexems);return e}function b(e,t){if(e)return new(d(e)[t?"FirstLevel":"Atomic"]);else if(t)return new l;return new u}function L(e){var t=arguments.length>1&&arguments[1]!==undefined?arguments[1]:null;t=t||d(e.lexicon);return t.secondLevel[e.name].apply(new t.FirstLevel,e.args)}function w(e){var t=arguments.length>1&&arguments[1]!==undefined?arguments[1]:null;t=t||d(e.lexicon);return t.Atomic.instance[e.name].apply(new t.Atomic,e.args)}function A(e,t){if(!t)return d(e).Atomic.initializer;return d(e).FirstLevel.initializer}var k=function(){function n(t,r){e(this,n);this._targets=t;if(r)this.addPragmas(r)}t(n,[{key:"addPragmas",value:function e(t){for(var n in t){this[n]=t[n]}}},{key:"$output",value:function e(){throw new Error("pragmatics.$output should be implemented in subclasses")}}]);return n}();function z(){var e=arguments.length>0&&arguments[0]!==undefined?arguments[0]:{};var t=arguments.length>1&&arguments[1]!==undefined?arguments[1]:{};return new k(e,t)}var F=function(i){n(o,i);function o(t,n){e(this,o);return r(this,(o.__proto__||Object.getPrototypeOf(o)).call(this,t,n))}t(o,[{key:"each",value:function e(t,n,r){var i=n[0],o=n[1];if(i&&i.length)for(var u=0,a=i.length,l,s;u<a;++u){l=i[u];s=o(l,u);if(!s)throw new Error(".each function should return a sentence.");this.$output(t,s,r)}}},{key:"if",value:function e(t,n,r){if(n[0])this.$output(t,n[1],r);else if(n[2])this.$output(t,n[2],r)}},{key:"$output",value:function e(t,n){var r=arguments.length>2&&arguments[2]!==undefined?arguments[2]:null;for(var i=0,o,u=n._lexems.length;i<u;++i){o=n._lexems[i];if(this._targets[o.lexicon]&&this[o.name])this[o.name](t,o.args,r)}return t}}]);return o}(k);function O(e){var t=arguments.length>1&&arguments[1]!==undefined?arguments[1]:null;return new F(e,t)}var E={createLexicon:y,createPragmatics:z,createFacadePragmatics:O,init:b,initializer:A,getLexicon:d,registerLexicon:_,developOneLevel:L,developToAtoms:w,fromJSON:a,Babelute:u,Lexem:o,FirstLevel:l,Pragmatics:k,FacadePragmatics:F,Lexicon:v,lexicons:h};return E});

@@ -24,3 +24,2 @@ 'use strict';

createPragmatics: _pragmaticsCore.createPragmatics,
createFacadeInitializer: _facadePragmatics.createFacadeInitializer,
createFacadePragmatics: _facadePragmatics.createFacadePragmatics,

@@ -27,0 +26,0 @@ init: _lexicon.init,

@@ -293,2 +293,12 @@ 'use strict';

* translation through lexicon (already delcared in Babelute proto)
* @TODO: translation and each and if
* each :
* .each(collec, handler)
* translated to
* .each(collec, wrap(handler, translationInfos))
*
* ==> should translate automatically output from handler
* if ==> same things : wrap handlers
* ==> while translating : when lexem.name === "each" (or "if") (should always be present in target lexicon)
* ==> apply wrapping
*/

@@ -303,7 +313,15 @@

if (!lexicon) return null;
var args = translateArgs(lexem.args, lexicon, firstLevel);
var b = new (firstLevel ? lexicon.FirstLevel : lexicon.Atomic)();
return b[lexem.name] && b[lexem.name].apply(b, _toConsumableArray(lexem.args));
return b[lexem.name] && b[lexem.name].apply(b, _toConsumableArray(args));
});
};
function translateArgs(args, lexicon, firstLevel) {
var result = [];
for (var i = 0, len = args.length; i < len; ++i) {
if (args[i] && args[i].__babelute__) result.push(args[i]._translateLexemsThrough(lexicon, firstLevel));else result.push(args[i]);
}return result;
}
/**

@@ -310,0 +328,0 @@ * _use handeling

@@ -10,3 +10,2 @@ 'use strict';

exports.createFacadeInitializer = createFacadeInitializer;
exports.createFacadePragmatics = createFacadePragmatics;

@@ -44,5 +43,3 @@

*/
function FacadePragmatics(targets) {
var pragmas = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : null;
function FacadePragmatics(targets, pragmas) {
_classCallCheck(this, FacadePragmatics);

@@ -73,3 +70,4 @@

templ = itemHandler(item, i);
if (templ) this.$output(subject, templ, percolator);
if (!templ) throw new Error('.each function should return a sentence.');
this.$output(subject, templ, percolator);
}

@@ -120,54 +118,2 @@ }

/**
* create a facade-ready-to-run initializer function.
* @param {Lexicon} lexicon the lexicon from where take the api
* @param {Object} pragmatics the pragmatics object where to find interpretation method to fire immediatly
* @return {Function} the facade initializer function
* @example
*
* import babelute from 'babelute';
* const myLexicon = babelute.createLexicon('my-lexicon');
* myLexicon.addAtoms(['foo', 'bar']);
*
* const myPragmas = babelute.createFacadePragmatics({
* 'my-lexicon':true
* }, {
* foo(subject, args, percolator){
* // do something
* },
* bar(subject, args, percolator){
* // do something
* }
* });
*
* const mlp = babelute.createFacadeInitializer(myLexicon, myPragmas);
*
* mlp(mySubject).foo(...).bar(...); // apply pragmas immediatly on subject through lexicon api's
*
*/
function createFacadeInitializer(lexicon, pragmatics) {
var Facade = function Facade(subject) {
var percolator = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : null;
lexicon.Atomic.call(this);
this._subject = subject;
this._percolator = percolator;
};
Facade.prototype = Object.create(lexicon.Atomic.prototype);
Facade.prototype.constructor = Facade;
Facade.prototype._lexicon = null;
Facade.prototype._append = function (lexiconName, name, args) {
if ((!pragmatics._targets || pragmatics._targets[lexiconName]) && pragmatics[name]) pragmatics[name](this._subject, args, this._percolator);
return this;
};
return function (subject) {
var percolator = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : null;
return new Facade(subject, percolator);
};
}
/**
* create a FacadePragmatics instance

@@ -189,2 +135,4 @@ * @param {Object} targets the pragmatics targets DSL

*/
function createFacadePragmatics(targets) {

@@ -191,0 +139,0 @@ var pragmas = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : null;

@@ -14,3 +14,3 @@ 'use strict';

/**
* Inner-sentence-scopes manager : hold array as stacks for inner-scopes of sentences (if needed). It's only avaiable in pragmatics, while traversing, and is dependent of what pragmatics do. See babelute-html-view as an example of usage.
* Inner-sentence-scopes manager : hold array as stacks for inner-scopes of sentences (if needed). It's only avaiable in pragmatics, while traversing, and is dependent of what pragmatics do. See htsl-view as an example of usage.
*

@@ -23,3 +23,3 @@ * So its a simple helper aimed to (while interpreting sentences) :

*
* For certain output types (as in Babelute-html diffing) it has to be "pure".
* For certain output types (as in htsl diffing) it has to be "pure".
* (in a functional way of thinking).

@@ -31,7 +31,7 @@ * It means that it should contains nothing else

*
* So by example, Babelute-html-view use it to keep track (for managing view's life cycle)
* So by example, htsl-view use it to keep track (for managing view's life cycle)
* of views tree while rendering (with the scope facility prodived here).
*
* Views are inner-sentences objects, and so as needed,
* two render on same babelute-html sentence will provide same output.
* two render on same htsl sentence will provide same output.
*

@@ -38,0 +38,0 @@ * For other DSL and outputs types, it depends what you want and implement, but be sure of what your doing

@@ -891,3 +891,3 @@ # Designing a DSL

(see babelute-html-* for workable examples and current implementations of .if and .each)
(see htsl-* for workable examples and current implementations of .if and .each)

@@ -919,3 +919,3 @@

See [babelute-html-dom-diffing-pragmatics](https://github.com/nomocas/babelute-html-dom-diffing-pragmatics) for real world example...
See [htsl-dom-diffing-pragmatics](https://github.com/nomocas/htsl-dom-diffing-pragmatics) for real world example...

@@ -922,0 +922,0 @@

@@ -115,9 +115,9 @@ # Theory part

Precisely, all GPL are Dialects of the generic DSL for handling variables and objects. Nothing more. As dialects, they all give a particular vision (and particular associated concepts) of the same problem : managing pure atomic Code related objects : string, number, bool, object, arrays, var, const, if, loops, ...
Precisely, all GPL are Dialects of the generic DSL for handling variables and objects. Nothing more. As dialects, they all give a particular vision (and particular associated concepts) of the same problem : managing pure atomic Code related objects : string, number, bool, object, arrays, var, const, functions, if, loops, ...
No one, a part programmers, should take a look at it. That's pure esoteric experts driven language.
No one, a part programmers, should take a look at it. That's pure esoteric expert driven language.
GPLs are called GPL because we pretend to be able to Model everything with those primitives. Not because they effectively and efficiently cover every Domains. And so to high level business related consideratins, we try to answer with low level code related stuffs, or at best (in the sens of less purely technical) with UML.
GPLs are called GPL because we pretend to be able to Model everything with those primitives. Not because they effectively and efficiently cover every Domains (without further abstractions). And so to high level business related considerations, we try to answer with low level code related stuffs, or at best (in the sens of less purely technical) with UML, which is by essence to generic, exactly as GPL are.
DSMM, by filling the gap between fuzzy imprecise customer thinking and pure technical considerations, with __Multi-Level of abstractions__ that fits naturally Business Domains Languages inclusions and structurations - __because it mimics how we think__ - provides simply the final form of programmation.
DSMM, by filling the gap between fuzzy imprecise customer thinking and pure technical considerations, with __Multi-Level of abstractions__ that fits naturally Business Domains Languages inclusions and structurations - __because it mimics how we think and by extension how the world is structured__ - provides simply one of the "final" form of programmation.

@@ -281,14 +281,3 @@ https://martinfowler.com/bliki/OneLanguage.html

### Sentences as Optimal Structure Catcher
Babelute's sentences could be seen (structurally) as a super set of XML with better expressivity, which is so much more readable and so much less verbose.
### Pragmatics
#### Facade Patterns
## Disambiguation

@@ -300,11 +289,18 @@

Babelute allows a big step toward WSD's resolution. For two reasons :
Babelute allows a big step toward WSD's resolution. For 3 reasons :
- Lexicon Second Dimension
- Lexicon Second Dimension :
- aka the fact that word's lexicon reference is always stored in any lexem.
- So sentences that need to be recognized contain directly the missing information (i.e. how it could be understand, word by word).
- Dry & Context Free Grammar
- babelute sentences syntax focus on structuration (through parenthesis) in place of linearisation as in Human related languages (where lexem are placed side by side in sentence, with few punctuation). It make sentences Unambiguous in their structure.
- 4 Dimensions full disambiguation : lexicon, word, arguments, pragmas
- In fact, to get __FULL__ meaning of a concept, we need to know its effects in the real world (which is the fondamentals of Pragmatics Philosophy from Charles Sanders Pierce). So to get the precise meaning of something we need to get 4 variables :
- the word used (the model name)
- its arguments (the instance parameters)
- the lexicon (the word(s) abstract semantic (the models))
- the pragmatics (the word(s) concreet implementations)
=> 3 dimensions disambiguation : lexicon, word, pragmas
## Sentence's Forms

@@ -327,3 +323,2 @@

"Internal AST" should be seen as an AST lite that focus on the important part of your program.

@@ -334,23 +329,42 @@

### Semantic Network
- https://en.wikipedia.org/wiki/Lexical_semantics#Semantic_networks
### Pragmatics
Babelute does not provide any Semantic Network parser out-of-the-box.
Pragmatics are implementation of lexems. "Implement" means here to provide __particular behaviour for specific context__.
Semantic Network are produced from collections of sentences and are closely linked to DSLs themselves.
Babelute sentences hold only information, that need interpretation before to become useful.
#### Facade Patterns (Introducing Side Effects)
One common way to implement pragmatics is to use the ["Facade" Design Pattern](https://en.wikipedia.org/wiki/Facade_pattern).
It means here that sentences __will be apply on an object__ (it will modify its state), one lexem after the other, and so sentence itself could be seen as a Facade.
By example, with htsl-lexicon, we could use sentences to set state of DOM elements by applying directly dom-implementation-of-lexems to it.
The htsl sentence is just a simple way of setting properties and children of DOM Elements (which could look like jquery).
## Translation
Translation (or transformation) are the fundamental concept behind everything. No translation === no meaning nor actions.
## Serialization
See [babelute-uus](https://github.com/nomocas/babelute-uus) for more infos.
## World Consequences
### Optimal Information Model
## Denotation, Extansion, Connotation, Intention
What about the four aspects of Comprehension from Logic Science ?
- __Denotation__ : provided by internal grammar/semantic, and translations
- __Connotation__ : provided by other lexicons where word(s) exist(s) and links between them
- __Extansion__ : Similarities notion, provided by pattern matching between DSLs (Internal Denotations and/or Data Population (set of sentences), or by transitivity)
- __Intention__ : the only aspect that is not stored in the model. There is no easy way to deduce the exact intention behind a simple sentence or even behind a full text (bunch of sentences). Only an explicit text that focus on real (conscious) intention of the data provider could help. And even then, as meaning depending on receiver, there is no way to be sure that we have grab the exact intention.
## Optimal Information Model
https://en.wikipedia.org/wiki/Information_model

@@ -371,13 +385,7 @@

#### Denotation, Extansion, Connotation, Intention
What about the four aspects of Comprehension from Logic Science ?
- __Denotation__ : provided by internal grammar/semantic, and translations
- __Connotation__ : provided by other lexicons where word(s) exist(s) and links between them
- __Extansion__ : Similarities notion, provided by pattern matching between DSLs (Internal Denotations and/or Data Population (set of sentences), or by transitivity)
- __Intention__ : the only aspect that is not stored in the model. There is no easy way to deduce the exact intention behind a simple sentence or even behind a full text (bunch of sentences). Only an explicit text that focus on real (conscious) intention of the data provider could help. And even then, as meaning depending on receiver, there is no way to be sure that we have grab the exact intention.
## World Consequences
### Maximal Shareability

@@ -384,0 +392,0 @@

{
"name": "babelute",
"version": "0.5.0",
"version": "0.5.1",
"description": "Internal Domain Specific (Multi)Modeling javascript framework",

@@ -57,3 +57,3 @@ "main": "dist/bundles/index.js",

"babel-plugin-external-helpers": "^6.22.0",
"babel-plugin-istanbul": "^2.0.2",
"babel-plugin-istanbul": "^4.1.1",
"babel-plugin-transform-object-assign": "^6.22.0",

@@ -66,3 +66,3 @@ "babel-plugin-unassert": "^2.1.2",

"coveralls": "^2.12.0",
"cross-env": "^2.0.1",
"cross-env": "^4.0.0",
"eslint": "^3.15.0",

@@ -75,7 +75,7 @@ "eslint-plugin-import": "^2.2.0",

"karma-mocha": "^1.3.0",
"karma-rollup-preprocessor": "^3.0.3",
"karma-rollup-preprocessor": "^4.0.0",
"karma-safari-launcher": "^1.0.0",
"mocha": "^3.2.0",
"nyc": "^8.1.0",
"rollup": "^0.41.4",
"nyc": "^10.2.0",
"rollup": "^0.41.6",
"rollup-plugin-babel": "^2.7.1",

@@ -82,0 +82,0 @@ "rollup-watch": "^3.2.2",

@@ -35,4 +35,5 @@ # Babelute.js

- babelute (this lib)
- [babelute](https://github.com/nomocas/babelute) (this lib)
- [babelute-uus](https://github.com/nomocas/babelute-uus) : Universal Unambiguous Sentences proposal : Welcome in Sharing Era.
- [babelute-ldl](https://github.com/nomocas/babelute-ldl) : Babelute Lexicon Definition DSL and its generators.

@@ -48,5 +49,4 @@ ## Understanding by examples

Low Level DSLs (Developement related domains) :
- babelute-aright : Objects and types validation DSL (ultra-fast, ultra-modular) (realease in april 2017)
- babelute-lexicon-definition-language : Babelute Lexic Definition DSL and its generators. (realease in april 2017)
- [babelute-html-lexicon](https://github.com/nomocas/babelute-html-lexicon) : HTML5 DSL and its render engines. (modern, __one of the world's fastest__, one-way-binding templating (React philosophy))
- [aright-*](https://github.com/nomocas/aright-lexicon) : Objects and types validation DSL (ultra-fast, ultra-modular)
- [htsl-*](https://github.com/nomocas/htsl-lexicon) : HTML5 DSL and its render engines. (modern, __one of the world's fastest__, one-way-binding templating (React philosophy))

@@ -53,0 +53,0 @@ High Level DSLs (Human related domains) :

@@ -27,3 +27,3 @@ /*

import { Pragmatics, createPragmatics } from './pragmatics/pragmatics-core.js';
import { FacadePragmatics, createFacadeInitializer, createFacadePragmatics } from './pragmatics/facade-pragmatics.js';
import { FacadePragmatics, createFacadePragmatics } from './pragmatics/facade-pragmatics.js';

@@ -33,3 +33,2 @@ export default {

createPragmatics,
createFacadeInitializer,
createFacadePragmatics,

@@ -36,0 +35,0 @@ init,

@@ -265,2 +265,12 @@ /**

* translation through lexicon (already delcared in Babelute proto)
* @TODO: translation and each and if
* each :
* .each(collec, handler)
* translated to
* .each(collec, wrap(handler, translationInfos))
*
* ==> should translate automatically output from handler
* if ==> same things : wrap handlers
* ==> while translating : when lexem.name === "each" (or "if") (should always be present in target lexicon)
* ==> apply wrapping
*/

@@ -275,9 +285,20 @@

return null;
const args = translateArgs(lexem.args, lexicon, firstLevel);
const b = new (firstLevel ? lexicon.FirstLevel : lexicon.Atomic)();
return b[lexem.name] && b[lexem.name](...lexem.args);
return b[lexem.name] && b[lexem.name](...args);
});
};
function translateArgs(args, lexicon, firstLevel){
const result = [];
for(let i = 0, len = args.length; i < len; ++i)
if(args[i] && args[i].__babelute__)
result.push(args[i]._translateLexemsThrough(lexicon, firstLevel));
else
result.push(args[i]);
return result;
}
/**

@@ -284,0 +305,0 @@ * _use handeling

@@ -27,3 +27,3 @@ /**

*/
constructor(targets, pragmas = null) {
constructor(targets, pragmas) {
super(targets, pragmas);

@@ -42,3 +42,3 @@ }

assert(typeof subject === 'object', '.each facade pragma need an object as subject (first argument)');
assert(Array.isArray(args[0]) || args[0].length, '.each facade pragma need an array (or iterable with bracket access) as first args object (first argument passed to lexem)');
assert(!args[0] || Array.isArray(args[0]), '.each facade pragma need an array (or iterable with bracket access) as first args object (first argument passed to lexem)');
assert(typeof args[1] === 'function', '.each facade pragma need a function as second args object (second argument passed to lexem)');

@@ -53,4 +53,5 @@

templ = itemHandler(item, i);
if (templ)
this.$output(subject, templ, percolator);
if (!templ)
throw new Error('.each function should return a sentence.');
this.$output(subject, templ, percolator);
}

@@ -102,49 +103,2 @@ }

/**
* create a facade-ready-to-run initializer function.
* @param {Lexicon} lexicon the lexicon from where take the api
* @param {Object} pragmatics the pragmatics object where to find interpretation method to fire immediatly
* @return {Function} the facade initializer function
* @example
*
* import babelute from 'babelute';
* const myLexicon = babelute.createLexicon('my-lexicon');
* myLexicon.addAtoms(['foo', 'bar']);
*
* const myPragmas = babelute.createFacadePragmatics({
* 'my-lexicon':true
* }, {
* foo(subject, args, percolator){
* // do something
* },
* bar(subject, args, percolator){
* // do something
* }
* });
*
* const mlp = babelute.createFacadeInitializer(myLexicon, myPragmas);
*
* mlp(mySubject).foo(...).bar(...); // apply pragmas immediatly on subject through lexicon api's
*
*/
export function createFacadeInitializer(lexicon, pragmatics) {
const Facade = function(subject, percolator = null) {
lexicon.Atomic.call(this);
this._subject = subject;
this._percolator = percolator;
};
Facade.prototype = Object.create(lexicon.Atomic.prototype);
Facade.prototype.constructor = Facade;
Facade.prototype._lexicon = null;
Facade.prototype._append = function(lexiconName, name, args) {
if ((!pragmatics._targets || pragmatics._targets[lexiconName]) && pragmatics[name])
pragmatics[name](this._subject, args, this._percolator);
return this;
};
return (subject, percolator = null) => {
return new Facade(subject, percolator);
};
}
/**
* create a FacadePragmatics instance

@@ -151,0 +105,0 @@ * @param {Object} targets the pragmatics targets DSL

@@ -14,3 +14,3 @@ /**

/**
* Inner-sentence-scopes manager : hold array as stacks for inner-scopes of sentences (if needed). It's only avaiable in pragmatics, while traversing, and is dependent of what pragmatics do. See babelute-html-view as an example of usage.
* Inner-sentence-scopes manager : hold array as stacks for inner-scopes of sentences (if needed). It's only avaiable in pragmatics, while traversing, and is dependent of what pragmatics do. See htsl-view as an example of usage.
*

@@ -23,3 +23,3 @@ * So its a simple helper aimed to (while interpreting sentences) :

*
* For certain output types (as in Babelute-html diffing) it has to be "pure".
* For certain output types (as in htsl diffing) it has to be "pure".
* (in a functional way of thinking).

@@ -31,7 +31,7 @@ * It means that it should contains nothing else

*
* So by example, Babelute-html-view use it to keep track (for managing view's life cycle)
* So by example, htsl-view use it to keep track (for managing view's life cycle)
* of views tree while rendering (with the scope facility prodived here).
*
* Views are inner-sentences objects, and so as needed,
* two render on same babelute-html sentence will provide same output.
* two render on same htsl sentence will provide same output.
*

@@ -38,0 +38,0 @@ * For other DSL and outputs types, it depends what you want and implement, but be sure of what your doing

@@ -41,3 +41,3 @@ /* global describe, it */

});
describe('simple call', () => {
describe('simple facadePragmas call', () => {

@@ -82,3 +82,3 @@ const pragmas = babelute.createFacadePragmatics({

var Dsl = babelute.Babelute.extends(babelute.Babelute, {
zoo(arg){
zoo(arg) {
return this._append('test', 'zoo', [arg]);

@@ -101,3 +101,3 @@ }

const subject = {};
pragmas.foo(subject, [ new Dsl().zoo('hop') ]);
pragmas.foo(subject, [new Dsl().zoo('hop')]);

@@ -111,6 +111,6 @@ it('should', () => {

var Dsl = babelute.Babelute.extends(babelute.Babelute, {
zoo(arg){
zoo(arg) {
return this._append('test', 'zoo', [arg]);
},
if(condition, s, f){
if (condition, s, f) {
return this._append('test', 'if', [condition, s, f]);

@@ -139,6 +139,6 @@ }

var Dsl = babelute.Babelute.extends(babelute.Babelute, {
zoo(arg){
zoo(arg) {
return this._append('test', 'zoo', [arg]);
},
if(condition, s, f){
if (condition, s, f) {
return this._append('test', 'if', [condition, s, f]);

@@ -161,3 +161,3 @@ }

it('should', () => {
expect(subject).to.deep.equal({ });
expect(subject).to.deep.equal({});
});

@@ -168,6 +168,6 @@ });

var Dsl = babelute.Babelute.extends(babelute.Babelute, {
zoo(arg){
zoo(arg) {
return this._append('test', 'zoo', [arg]);
},
if(condition, s, f){
if (condition, s, f) {
return this._append('test', 'if', [condition, s, f]);

@@ -196,6 +196,6 @@ }

var Dsl = babelute.Babelute.extends(babelute.Babelute, {
zoo(arg){
zoo(arg) {
return this._append('test', 'zoo', [arg]);
},
each(collec, handler){
each(collec, handler) {
return this._append('test', 'each', [collec, handler]);

@@ -215,3 +215,5 @@ }

const subject = {};
pragmas.$output(subject, new Dsl().each(['a', 'b', 'c'], function(item){ return new Dsl().zoo(item); }));
pragmas.$output(subject, new Dsl().each(['a', 'b', 'c'], function(item) {
return new Dsl().zoo(item);
}));

@@ -222,3 +224,98 @@ it('should', () => {

});
describe('facade each throw if nothing is returned from handler', () => {
var Dsl = babelute.Babelute.extends(babelute.Babelute, {
each(collec, handler) {
return this._append('test', 'each', [collec, handler]);
}
});
const pragmas = babelute.createFacadePragmatics({
test: true
}, {
zoo(subject, args) {
subject.zoo = subject.zoo || '';
subject.zoo += args[0];
}
});
const willThrow = function() {
pragmas.$output({}, new Dsl().each(['a', 'b', 'c'], function() {}));
};
it('should', () => {
expect(willThrow).to.throw();
});
});
describe('facade each no collection', () => {
var Dsl = babelute.Babelute.extends(babelute.Babelute, {
zoo(arg) {
return this._append('test', 'zoo', [arg]);
},
each(collec, handler) {
return this._append('test', 'each', [collec, handler]);
}
});
const pragmas = babelute.createFacadePragmatics({
test: true
}, {
zoo(subject, args) {
subject.zoo = subject.zoo || '';
subject.zoo += args[0];
}
});
const subject = {};
pragmas.$output(subject, new Dsl().each(null, function(item) {
return new Dsl().zoo(item);
}));
it('should', () => {
expect(subject).to.deep.equal({});
});
});
describe('lexicon + pragmatics with percolator', () => {
const lexicon = babelute.createLexicon('test');
lexicon.addAtoms(['zoo']);
const pragmas = babelute.createFacadePragmatics({
test: true
}, {
zoo(subject, args) {
subject.zoo = args[0];
}
});
const b = lexicon.initializer().zoo(true),
percolator = {};
const result = pragmas.$output({}, b, percolator);
it('should', () => {
expect(result).to.deep.equal({ zoo: true });
});
});
describe('facade each throw if nothing is returned from handler', () => {
var Dsl = babelute.Babelute.extends(babelute.Babelute, {
foo(title) {
return this._append('test', 'foo', [title]);
}
});
const pragmas = babelute.createFacadePragmatics({
test: true
}, {});
var result = pragmas.$output({}, new Dsl().foo('bar'));
it('should', () => {
expect(result).to.deep.equals({});
});
});
});

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc