Comparing version 8.0.3 to 10.0.0
@@ -404,3 +404,2 @@ (function() { | ||
this.settings = this.freeze(settings); | ||
this.VNR = require('./vnr'); | ||
this.Datom = Datom; | ||
@@ -407,0 +406,0 @@ return this; |
@@ -49,18 +49,2 @@ (function() { | ||
//----------------------------------------------------------------------------------------------------------- | ||
this.declare('datom_vnr_settings', { | ||
tests: { | ||
"x is a object": function(x) { | ||
return this.isa.object(x); | ||
}, | ||
"x.ordering is 'fair', 'total' or 'partial": function(x) { | ||
var ref; | ||
return (ref = x.ordering) === 'fair' || ref === 'total' || ref === 'partial'; | ||
}, | ||
"x.validate is a ?boolean": function(x) { | ||
return (x.validate == null) || this.isa.boolean(x.validate); | ||
} | ||
} | ||
}); | ||
//----------------------------------------------------------------------------------------------------------- | ||
this.declare('datom_nonempty_list_of_positive_integers', function(x) { | ||
@@ -144,6 +128,2 @@ if (!this.isa.nonempty_list(x)) { | ||
dirty: false | ||
}, | ||
vnr_settings: { | ||
validate: true, | ||
ordering: 'fair' | ||
} | ||
@@ -150,0 +130,0 @@ }; |
{ | ||
"name": "datom", | ||
"version": "8.0.3", | ||
"version": "10.0.0", | ||
"description": "standardized immutable objects in the spirit of datomic, especially suited for use in data pipelines", | ||
"main": "lib/main.js", | ||
"scripts": { | ||
"build": "coffee --map -o lib -c src", | ||
"test": "echo 'see https://github.com/loveencounterflow/hengist/tree/master/dev/datom'" | ||
}, | ||
"repository": { | ||
@@ -36,3 +32,8 @@ "type": "git", | ||
"multimix": "5.0.0" | ||
} | ||
} | ||
}, | ||
"scripts": { | ||
"build": "coffee --map -o lib -c src", | ||
"test": "echo 'see https://github.com/loveencounterflow/hengist/tree/master/dev/datom'" | ||
}, | ||
"readme": "\n\n\n# Datom ⚛\n\n\n<!-- START doctoc generated TOC please keep comment here to allow auto update -->\n<!-- DON'T EDIT THIS SECTION, INSTEAD RE-RUN doctoc TO UPDATE -->\n**Table of Contents** *generated with [DocToc](https://github.com/thlorenz/doctoc)*\n\n- [Export Bound Methods](#export-bound-methods)\n- [Creation of Bespoke Library Instances](#creation-of-bespoke-library-instances)\n- [Configuration Parameters](#configuration-parameters)\n- [Methods](#methods)\n - [Freezing & Thawing](#freezing--thawing)\n - [Stamping](#stamping)\n - [Type Testing](#type-testing)\n - [Value Creation](#value-creation)\n - [Selecting](#selecting)\n- [System Properties](#system-properties)\n- [WIP](#wip)\n - [PipeDreams Datoms (Data Events)](#pipedreams-datoms-data-events)\n - [`select = ( d, selector ) ->`](#select---d-selector---)\n - [The XEmitter (XE) Sub-Module](#the-xemitter-xe-sub-module)\n - [XE Sending API](#xe-sending-api)\n - [XE Receiving API](#xe-receiving-api)\n - [Sample](#sample)\n - [Managing Scope](#managing-scope)\n- [Vectorial NumbeRs (VNRs)](#vectorial-numbers-vnrs)\n- [Cup Of Datom](#cup-of-datom)\n- [Benchmarks](#benchmarks)\n- [To Do](#to-do)\n\n<!-- END doctoc generated TOC please keep comment here to allow auto update -->\n\n\nstandardized immutable objects in the spirit of datomic, especially suited for use in data pipelines\n\n\n**NOTE: Documentation is still fragmentary. WIP.**\n\n# Export Bound Methods\n\nIf you plan on using methods like `new_datom()` or `select()` a lot, consider using `.export()`:\n\n```coffee\nDATOM = require 'datom'\n{ new_datom\n select } = DATOM.export()\n```\n\nNow `new_datom()` and `select()` are methods bound to `DATOM`. (Observe that because of the JavaScript\n'tear-off' effect, when you do `method = DATOM.method`, then `method()` will likely fail as its reference to\n`this` has been lost.)\n\n# Creation of Bespoke Library Instances\n\nIn order to configure a copy of the library, pass in a settings object:\n\n```coffee\n_DATOM = require 'datom'\nsettings = { merge_values: false, }\nDATOM = new _DATOM.Datom settings\n{ new_datom\n select } = DATOM.export()\n```\n\nOr, mode idiomatically:\n\n```coffee\nDATOM = new ( require 'datom' ).Datom { merge_values: false, }\n{ new_datom\n select } = DATOM.export()\n```\n\nThe second form also helps to avoid accidental usage of the result of `require 'datom'`, which is of\ncourse the same library with a different configuration.\n\n# Configuration Parameters\n\n* **`merge_values`** (boolean, default: `true`)—Whether to merge attributes of the second argument to\n `new_datom()` into the resulting value. When set to `false`, `new_datom '^somekey', somevalue` will always\n result in a datom `{ $key: '^somekey', $value: somevalue, }`; when left to the default, and if `somevalue`\n is an object, then its attributes will become attributes of the datom, which may result in name clashes in\n case any attribute name should start with a `$` (dollar sign).\n\n* **`freeze`** (boolean, default: `true`)—Whether to freeze datoms. When set to `false`, no freezing will\n be performed, which may entail slightly improved performance.\n\n* **`dirty`** (boolean, default: `true`)—Whether to automatically set `{ $dirty: true, }` when the copy\n of a datom has been treated with `lets()` and a modifyer function.\n\n\n# Methods\n\n## Freezing & Thawing\n\n* **`@freeze = ( d ) ->`**\n* **`@thaw = ( d ) ->`**\n* **`@lets = ( original, modifier ) ->`**\n* **`@set = ( d, k, P... ) ->`**\n* **`@unset = ( d, k ) ->`**\n\n## Stamping\n\n* **`@stamp = ( d, P... ) ->`**\n* **`@unstamp = ( d ) ->`**\n\n## Type Testing\n\n* **`@is_system = ( d ) ->`**\n* **`@is_stamped = ( d ) ->`**\n* **`@is_fresh = ( d ) ->`**\n* **`@is_dirty = ( d ) ->`**\n\n## Value Creation\n\n* **`@new_datom = ( $key, $value, other... ) ->`**\n* **`@new_single_datom = ( $key, $value, other... ) ->`**\n* **`@new_open_datom = ( $key, $value, other... ) ->`**\n* **`@new_close_datom = ( $key, $value, other... ) ->`**\n* **`@new_system_datom = ( $key, $value, other... ) ->`**\n* **`@new_text_datom = ( $value, other... ) ->`**\n* **`@new_end_datom = ->`**\n* **`@new_warning = ( ref, message, d, other... ) ->`**\n\n## Selecting\n\n* **`@select = ( d, selector ) ->`**\n\n# System Properties\n\n* **`d.$key`**—key (i.e., type) of a datom.\n* **`d.$value`**—'the' proper value of a datom. This is always used in case `new_datom()` was called with a\n non-object in the value slot (as in `new_datom '^mykey', 123`), or when the library was configured with `{\n merge_values: false, }`.—In case there is no `d.$value`, the datom's proper value is the object that would\n result from deleting all properties whose names start with a `$` (dollar sign).\n* **`d.$dirty`**—whether the object has been (thawed, then) changed (and then frozen again) since its\n `$dirty` property was last cleared or set to `false`.\n* **`d.$stamped`**—whether the object has been marked as 'stamped' (i.e., processed).\n\n-------------------------------------------------------------------------------\n\n# WIP\n\n**The below copied from PipeDreams docs, to be updated**\n\n## PipeDreams Datoms (Data Events)\n\nData streams—of which [pull-streams](https://pull-stream.github.io/),\n[PipeStreams](https://github.com/loveencounterflow/pipestreams), and [NodeJS\nStreams](https://nodejs.org/api/stream.html) are examples—do their work by\nsending pieces of data (that originate from a data source) through a number of\ntransforms (to finally end up in a data sink).<sup>*note*</sup>\n\n> (*note*) I will ignore here alternative ways of dealing with streams, especially\n> the [`EventEmitter` way of dealing with streamed\n> data](https://nodejs.org/api/stream.html#stream_api_for_stream_consumers).\n> When I say 'streams', I also implicitly mean 'pipelines'; when I say\n> 'pipelines', I also implicitly mean 'pipelines to stream data' and 'streams'\n> in general.\n\nWhen NodeJS streams started out, the thinking about those streams was pretty\nmuch confined to saying that ['a stream is a series of\nbytes'](http://dominictarr.com/post/145135293917/history-of-streams). Already back then,\nan alternative view took hold (I'm slightly paraphrasing here):\n\n> The core interpretation was that stream could be buffers or strings - but the\n> userland interpretation was that a stream could be anything that is\n> serializeable [...] it was a sequence of buffers, bytes, strings or objects.\n> Why not use the same api?\n\nI will no repeat here [what I've written about perceived shortcomings of NodeJS\nstreams](https://github.com/loveencounterflow/pipestreams/blob/master/pipestreams-manual/chapter-00-comparison.md);\ninstead, let me iterate a few observations:\n\n* In streaming, data is just data. There's no need for having [a separate\n 'Object Mode'](https://nodejs.org/api/stream.html#stream_object_mode) or\n somesuch.\n\n* There's a single exception to the above rule, and that is when the data item\n being sent down the line is `null`. This has historically—by both NodeJS\n streams and pull-streams—been interpreted as a termination signal, and I'm not\n going to change that (although at some point I might as well).\n\n* When starting out with streams and building fairly simple-minded pipelines,\n sending down either raw pieces of business data or else `null` to indicate\n termination is enough to satisfy most needs. However, when one transitions to\n more complex environments, raw data is not sufficient any more: When\n processing text from one format to another, how could a downstream transform\n tell whether a given piece of text is raw data or the output of an upstream\n transform?\n\n Another case where raw data becomes insufficient are circular\n pipelines—pipelines that re-compute (some or all) output values in a recursive\n manner. An example which outputs the integer sequences of the [Collatz\n Conjecture](https://en.wikipedia.org/wiki/Collatz_conjecture) is [in the tests\n folder](https://github.com/loveencounterflow/pipedreams/blob/master/src/tests/circular-pipelines.test.coffee#L36).\n There, whenever we see an even number `n`, we send down that even number `n`\n alongside with half its value, `n/2`; whenever we see an odd number `n`, we\n send it on, followed by its value tripled plus one, `3*n+1`. No matter whether\n you put the transform for even numbers in front of that for odd numbers or the\n other way round, there will be numbers that come out at the bottom that need\n to be re-input into the top of the pipeline, and since there's no telling in\n advance how long a Collatz sequence will be for a given integer, it is, in the\n general case, insufficient to build a pipeline made from a (necessarily\n finite) repetitive sequence of copies of those individual transforms. Thus,\n classical streams cannot easily model this kind of processing.\n\nThe idea of **datoms**—short for *data atoms*, a term borrowed from [Rich\nHickey's Datomic](https://www.infoq.com/articles/Datomic-Information-Model)—is\nto simply to wrap each piece of raw data in a higher-level structure. This is of\ncourse an old idea, but not one that is very prevalent in NodeJS streams, the\nfundamental assumption (of classical stream processing) being that all stream\ntransforms get to process each piece of data, and that all pieces of data are of\nequal status (with the exception of `null`).\n\nThe PipeDreams sample implementation of Collatz Sequences uses datoms to (1)\nwrap the numerical pieces of data, which allows to mark data as processed\n(a.k.a. 'stamped'), to (2) mark data as 'to be recycled', and to (3) inject\nsystem-level `sync`hronization signals into the data stream to make sure that\nrecycled data gets processed before new data is allowed into the stream.\n\nIn PipeDreams datoms, **each piece of data is explicitly labelled for its\ntype**; **each datom may have a different status**: there are **system-level\ndatoms that serve to orchestrate the flow of data within the pipeline**; there\nare **user-level datoms which originate from the application**; there are\n**datoms to indicate the opening and closing of regions (phases) in the data\nstream**; there are **stream transforms that listen to and act on specific\nsystem-level events**.\n\nDatoms are JS objects that must minimally have a `key` property, a string that\nspecifies the datom's category, namespace and name; in addition, they may have a\n`value` property with the payload (where desired), and any number of other\nattributes. The property `$` is used to carry metadata (e.g. from which line in\na source file a given datom was generated from). Thus, we may give the outline\nof a datom as (in a rather informal notation) `d := { $key, ?$value, ?$stamped,...,\n?$, }`.\n\nThe `key` of a datom must be a string that consists of at least two parts, the\n`sigil` and the `name`. The `sigil`, a single punctuation character, indicates\nthe 'category' of each datom; there are two levels and three elementary\ncategories, giving six types of datoms:\n\n* Application level:\n * `^` for **data datoms** (a.k.a. 'singletons'),\n * `<` for **start-of-region datoms**,\n * `>` for **end-of-region datoms**.\n\n* System level:\n * `~` for **data datoms**,\n * `[` for **start-of-region datoms**,\n * `]` for **end-of-region datoms**.\n\n<!-- System-level events, in particular those without further payload data, are also\ncalled 'signals'; thus, `~collect` is a 'collect signal', and `[data` is a\n'start-of-data signal'. Aggregate transforms such as `$collect()`, `$sort()` and\nso on listen to the signals of the same name, `~collect` and `~sort`: In the\ncase of `$collect()`, a collect signal will trigger the sending of the\ncollection as it looks at that point in time; likewise, `$sort()` will react to\na sort signal by sending all buffered events in the configured ordering.\n -->\n\nNormally, one will probably want to send around business data inside (the\n`value` property of) application-level data datoms (hence their name, also\nshortened to D-datoms); however, one can also set other properties of datom\nobjects, or send data around using properties of start- or end-of-region datoms.\n\nRegion events are intended to be used e.g. when parsing text with markup; say\nyou want to turn a snippet of HTML like this:\n\n```\n<document><div>Helo <em>world!</em></div></document>\n```\n\ninto another textual representation, you may want to turn that into a sequence\nof datoms similar to these, in the order of sending and regions symbolized by\nboxes:<sup>*note*</sup>\n\n```\n--------------------------------------------------------+\n { key: '<document', } # d1 |\n------------------------------------------------------+ |\n { key: '<div', } # d2 | |\n { key: '^text', value: \"Helo \", } # d3 | |\n----------------------------------------------------+ | |\n { key: '<em', } # d4 | | |\n { key: '^text' value: \"world!\", } # d5 | | |\n { key: '>em', } # d6 | | |\n----------------------------------------------------+ | |\n { key: '>div', } # d7 | |\n------------------------------------------------------+ |\n { key: '>document', } # d8 |\n--------------------------------------------------------+\n```\n\n> *note* by 'in the order of sending' I mean you'd have to send datom `d1`\n> first, then `d2` and so on. Trivial until you imagine you write a pipeline and\n> then picture how the events will travel down that pipeline:\n>\n> `pipeline.push $do_this() # s1, might be processing d3 right now`<br>\n> `pipeline.push $do_that() # s2, might be processing d2 right now`<br>\n> `pipeline.push $do_something_else() # s3, might be processing d1 right now`<br>\n>\n> Although there's really no telling whether step `s3` will really process datom\n> `d1` at the 'same point in time' that step `s2` processes datom `d2` and so on\n> (in the strict sense, this is hardly possible in a single-threaded language\n> anyway), the visualization still holds a grain of truth: stream transforms\n> that come 'later' (further down) in the pipeline will see events near the top\n> of your to-do list first, and vice versa. This can be mildly confusing.\n\n\n## `select = ( d, selector ) ->`\n\nThe `select` method can be used to determine whether a given event `d` matches a\nset of conditions; typically, one will want to use `select d, selector` to decide\nwhether a given event is suitable for processing by the stream transform at\nhand, or whether it should be passed on unchanged.\n\nThe current implementation of `select()` is much dumber and faster than its predecessors; where previously,\nit was possible to match datoms with multiple selectors that contained multiple sigils and so forth, the new\nversion does little more than check wheter the single selector allowed equals the given datom's `key`\nvalue—that's about it, except that one can still `select d, '^somekey#stamped'` to match both unstamped and\nstamped datoms.\n\n\n\n## The XEmitter (XE) Sub-Module\n\n### XE Sending API\n\n* **`XE.emit = ( key, d ) ->`** emit (a.k.a. 'publish', 'send to whom it may concern') an event. To\n be called either as `XE.emit '^mykey', 'myvalue'` or as `XE.emit PD.new_event '^mykey', 'myvalue'` (in\n which latter case the datom's key will become the channel key). When called with await as in\n `return_values = await XE.emit '^foo', ...`, `return_values` will be a list with all values returned by\n all listeners that got called for this event.\n\n* **`XE.delegate = ( key, d ) ->`** like `XE.emit()` but will pick out and unwrap the event value\n from the event contractor (see below). If no event contractor was listening, an error will be raised.\n\n### XE Receiving API\n\n* **`XE.listen_to_all = ( listener ) ->`** Register a listener for all events.\n\n* **`XE.listen_to_unheard = ( listener ) ->`** Register a listener for all events that do not have a\n listener or a contractor.\n\n* **`XE.listen_to = ( key, listener ) ->`** Register a listener for events that match `key`. No\n pattern matching is implemented atm, so you can only listen to all keys or a single key.\n\n* **`XE.contract = ( key, listener ) ->`** Register a contractor (a.k.a. 'result producer') for\n events that match `key`.\n\n<!-- The above methods—`XE.listen_to_all()`, `XE.listen_to()` and `XE.contract()`—will return an `unsubscribe()`\nfunction that, when called once, will unsubscribe the event listener from the event.\n -->\n\n### Sample\n\n```coffee\nPD = require 'pipedreams'\ndefer = setImmediate\nXE = PD.XE.new_scope()\n\n#-----------------------------------------------------------------------------------------------------------\n### Register a 'contractor' (a.k.a. 'result producer') for `^plus-async` events; observe that asynchronous\ncontractors should return a promise: ###\nXE.contract '^plus-async', ( d ) =>\n return new Promise ( resolve, reject ) =>\n defer => resolve d.value.a + d.value.b\n\n############################################################################################################\ndo =>\n info 'µ28823-5', await XE.emit PD.new_event '^plus-async', { a: 42, b: 108, }\n # in case other listeners were registered that returned values like `'listener #1'` and so on, the\n # returned list of values might look like:\n # -> [ 'listener #4', { key: '~xemitter-preferred', value: 150 }, 'listener #1', 'listener #2' ]\n\n ### When using `delegate()` instead of `emit()`, the preferred value (a.k.a. '*the* event result')\n will be picked out of the list and unwrapped for you: ###\n info 'µ28823-6', await XE.delegate PD.new_event '^plus-async', { a: 42, b: 108, }\n # -> 150\n\n```\n\nFor a demo with more coverage, have a look at\n[experiments/demo-xemitter.coffee](https://github.com/loveencounterflow/pipedreams/blob/master/blob/master/src/experiments/demo-xemitter.coffee).\n\n### Managing Scope\n\nTypically, you'll start using XEmitter with `XE = PD.XE.new_scope()`; this creates a new 'scope' for events.\nOnly methods that emit and listen to the same scope can exchange messages. When used within an application,\nyou will want to publish that scope to all participating modules; one way to do so is to write a dedicated\nmodule with a single line in it, `module.exports = ( require 'pipedreams' ).XE.new_scope()`.\n\n# Vectorial NumbeRs (VNRs)\n\nWhere a consistent relative ordering of streams of datoms is needed, especially if any number of datoms may\nget deleted and inserted at some mid-stream point, [Vectorial Numbers (VNRs)](./VNRs.md), which\nare implemented as lists of integers, can be used to avoid a re-numbering of elements and still be able to\ninsert arbitrarily many new elements between any two given elements.\n\n# Cup Of Datom\n\nClass `Cupofdatom` is a derivative of [`Cupofjoe`](https://github.com/loveencounterflow/cupofjoe) that is\ngeared towards easy declarative generation of nested sequences of datoms with a\n[teacup](https://github.com/goodeggs/teacup)-like syntax:\n\n```coffee\nc = new DATOM.Cupofdatom()\nc.cram 'helo', 'world'\nc.cram 'foo', ->\n c.cram 'bold', ->\n c.cram null, 'content'\nds = c.expand()\n# `ds` is now a list of datoms:\n[\n { $key: '<helo' },\n { $key: '^text', text: 'world' },\n { $key: '>helo' },\n { $key: '<foo' },\n { $key: '<bold' },\n { $key: '^text', text: 'content' },\n { $key: '>bold' },\n { $key: '>foo' } ]\n```\n\n* First argument to `cram()` becomes key of datom\n* therefore, must be a valid datom name\n* sigil will be `^` if called with no further arguments\n* or else two datoms with sigils `<` and `>` will be generated that surround their contents\n* text arguments will be turned into `^text` datoms\n* as with `Cupofjoe`, functions will be called, may either call `cram()` method or return value\n* return values will *not* be further analyzed but be kept as-is in the list returned by `expand()`\n* also possible to provide (in non-initial positions) objects whose members will become attributes of the\n respective datom:\n\n```coffee\nc = new DATOM.Cupofdatom { absorb: true, } # default value; Note: turn attributes off with { absorb: false, }\nc.cram 'greeting', 'helo', 'world'\nc.cram 'greeting', '早安', { lang: 'zh_CN', }\nc.cram 'greeting', { lang: 'zh_CN', 问候: '早安', time_of_day: 'morning', }\nc.cram 'text', { lang: 'hi', text: 'नमस्ते', }\nc.cram 'greeting', ->\n c.cram 'language', { $value: 'Japanese', }\n c.cram 'time_of_day', { $value: 'morning', }\n c.cram null, 'お早うございます'\n```\n\ngives\n\n```\n{ $key: '<greeting' }\n{ $key: '^text', text: 'helo', }\n{ $key: '^text', text: 'world', }\n{ $key: '>greeting' }\n{ $key: '<greeting', lang: 'zh_CN', }\n{ $key: '^text', text: '早安', }\n{ $key: '>greeting' }\n{ $key: '^greeting', lang: 'zh_CN', '问候': '早安', time_of_day: 'morning', }\n{ $key: '^text', text: 'नमस्ते', lang: 'hi', }\n{ $key: '<greeting' }\n{ $key: '^language', $value: 'Japanese', }\n{ $key: '^time_of_day', $value: 'morning', }\n{ $key: '^text', text: 'お早うございます', }\n{ $key: '>greeting' }\n```\n\nCall patterns:\n\n* first argument is always:\n * the **basic name** (the `$key` of the datom minus the sigil) of the datom,\n * or else the **comprehensive name**, where implemented (for example in InterText `CupOfHtml`, this means\n one can give `div#c59.draggable.hilite` as first argument to produce elements with a tag name (`div`),\n an ID (`c59`), and HTML `class` attribute (`draggable hilite`) in one go\n * or else **`null`** to indicate absence of a specific name\n * when a name has been given\n * and there are content arguments, then a pair of `{ $key: '<name', }`, `{ $key: '>name', }` datoms\n will be produced, with the content arguments coming in between\n * in case no content has been given, a single `{ $key: '^name', }` datom will be produced\n* as for arguments in non-initial positions:\n * objects will be merged with `Object.assign()` and passed on to `DATOM.new_datom()`, so `cram 'foo', {\n id: 'c221', frob: true, x: 1, }, ... { x: 2, }` will produce `{ $key: '^foo', id: 'c221', frob: true, x:\n 2, }`.\n * In case a key/value pair attributes argument conflicts with one set by an comprehensive name (as in\n `cram 'foo#IDA', { id: 'IDB', }`), the one in the attributes argument wins (as it would in a similar\n situation when using `Object.assign()`)\n * functions will be called without arguments\n * if a function itself calls `cram()` from the same instance, its return value will be discarded;\n * in case it does not call `cram()`, its return value will be discarded if it is `null` or `undefined`,\n and otherwise become a content argument *without being processed*, so contained functions will not be\n called and text values will not be wrapped in `{ $key: '^text', }` datoms\n * texts will be wrapped in `{ $key: '^text', 'text': ..., }` datoms\n * other values will be wrapped in `{ $key: '^value', '$value': ..., }` datoms, e.g. `cram null, 42, 'some\n text', true` will emit `{ $key: '^value', $value: 42, }, { $key: '^text', $text: 'some text', }, { $key:\n '^value', $value: true, }`\n\n```coffee\ncram name\ncram name, content1, content2, ...\ncram name, content1, ( -> function ), ...\ncram name, content1, ( -> cram ... ), ...\ncram name, { key: value, }, content1, ( -> cram ... ), ...\n```\n\n# Benchmarks\n\nHere is a speed comparison ([code on GitHub](https://github.com/loveencounterflow/hengist/tree/master/dev/datom)) between Datom versions 7 and 8, using two methods of dealing with object freezing\nand two Datom configurations, `f1` standing for the standard configuration (i.e. either `DATOM = require\n'datom'` or `DATOM = ( require 'datom' ).new { freeze: true, }`) and `f0` for the non-freezing configuration\n(obtained by `DATOM = ( require 'datom' ).new { freeze: true, }`). `datom_v7_thaw_freeze_f0` is missing here\nbecause of a bug in the `thaw` method used in v7. Each run involved thawing 100 datoms with 5 key/value\npairs each (ex.: `{ '$key': '^vapeurs', '𤭨': 447, '媑': true, escamote: false, auditionnerais: true,\nexacerbant: true, }`), changing 3 values and freezing the object again. Tests marked `...thaw_freeze...` use\nexplicit calls to `d = thaw d; ...; d = freeze d` to do this, the ones marked `...lets...` use a single call\n`d = lets d, ( d ) -> ...` to accomplish the same.\n\nWe see an overall improvement in the performance of v8 as compared to v7 which can be ascribed to the update\nof the [`letsfreezethat`](https://github.com/loveencounterflow/letsfreezethat) dependency which represents a\ncomplete overhaul of that library:\n\n```\ndatom_v8_thaw_freeze_f0 144,938 Hz 100.0 % │████████████▌│\ndatom_v8_lets_f0 128,930 Hz 89.0 % │███████████▏ │\ndatom_v8_thaw_freeze_f1 126,920 Hz 87.6 % │███████████ │\ndatom_v7_lets_f0 92,669 Hz 63.9 % │████████ │\ndatom_v8_lets_f1 81,917 Hz 56.5 % │███████▏ │\ndatom_v7_lets_f1 40,063 Hz 27.6 % │███▌ │\ndatom_v7_thaw_freeze_f1 39,334 Hz 27.1 % │███▍ │\n```\n\nFor best performance, it is recommended to\n\n* prefer `d = thaw d; ...; d = freeze d` over `lets()` although the latter is more elegant and prevents\n one from forgetting to `freeze()` a `thaw()`ed value, and to\n* configure the `DATOM` library to forego actual freezing when moving from development to production, where\n appropriate, for a speed gain of around 10%.\n\n# To Do\n\n* [ ] implement piecemeal structural validation such that on repeated calls to a validator instance's\n `validate()` method an error will be thrown as soon as unbalanced regions (delimeted by `{ $key: '<token',\n ..., }` and `{ $key: '>token', ..., }`) are encountered.\n\n* [ ] VNRs:\n * [X] implement Vectorial NumbeRs (VNRs)\n * [ ] document Vectorial NumbeRs (VNRs)\n * [ ] remove either `cmp_total()` or `cmp_partial()` for simplification\n * [ ] assert and document that VNRs may be sorted element-wise lexicographically (e.g in Postgres, but\n also in JS) by appending a single zero element (or, for that matter, by padding as many zeroes as needed\n to make all VNRs the same length)\n * [ ] consider to disallow giving VNRs a final zero element\n * [ ] consider to store VNRs with an apended zero element\n* [ ] implement & document standard attributes, `$`-prefixed and otherwise (?), such as\n * [ ] `^text`—key for 'text datoms'\n * [ ] `text`—the underlying source text where code, data is parsed\n * [ ] `$`—'produced by', contains short label to point to source position, may be left-chained (most\n recent first) to obtain breadcrumbs path of responsible source locations\n * [ ] `$vnr`—for VNRs, the primary ordering criterium\n * [ ] `$ref`—do we still use this? See DataMill\n * [ ] `$pos`? `$range`? for `[ start, stop, ]` pairs, indices into a source; use inclusive or exclusive\n upper bound?\n * [ ] `$loc`? for `[ line_nr, col_nr, ]` pairs; NB might also want to use stop position of ranges\n* [X] make `{ dirty: false, }` the default setting (i.e. not marking changed datoms)\n* [ ] consider to remove `$dirty` altogether; datoms-as-immutable-values can not be updated anyway, and\n whether an operation like `d2 = lets d1, ( d ) -> ...` has or has not caused any differences between `d1`\n and `d2` (short of a Turing-complete analysis of the function passed in to `lets()`) is only answerable\n by comparing all members of both datoms.\n\n* [ ] Dependency [`emittery@0.7.0`](https://github.com/sindresorhus/emittery/releases/tag/v0.7.0) changed\n behavior: \"Ensure `.emit()` doesn't return a value\" which breaks contracts. The fix currently consists in\n not upgrading from 0.6.0 until a workaround has been implemented.\n\n\n\n" | ||
} |
@@ -565,2 +565,6 @@ | ||
* [X] make `{ dirty: false, }` the default setting (i.e. not marking changed datoms) | ||
* [ ] consider to remove `$dirty` altogether; datoms-as-immutable-values can not be updated anyway, and | ||
whether an operation like `d2 = lets d1, ( d ) -> ...` has or has not caused any differences between `d1` | ||
and `d2` (short of a Turing-complete analysis of the function passed in to `lets()`) is only answerable | ||
by comparing all members of both datoms. | ||
@@ -567,0 +571,0 @@ * [ ] Dependency [`emittery@0.7.0`](https://github.com/sindresorhus/emittery/releases/tag/v0.7.0) changed |
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
576
161296
13
622