Huge News!Announcing our $40M Series B led by Abstract Ventures.Learn More
Socket
Sign inDemoInstall
Socket

nmr-simulation

Package Overview
Dependencies
Maintainers
2
Versions
27
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

nmr-simulation - npm Package Compare versions

Comparing version 0.0.4 to 0.1.4

README.md

14

package.json
{
"name": "nmr-simulation",
"version": "0.0.4",
"version": "0.1.4",
"description": "Simulate NMR spectra from spin systems",

@@ -32,11 +32,15 @@ "main": "src/index.js",

"dependencies": {
"ml-binary-search": "^1.0.2",
"binary-search": "^1.3.2",
"ml-hclust": "1.1.0",
"ml-matrix": "^1.1.2",
"ml-simple-clustering": "0.1.0",
"ml-sparse-matrix": "^0.2.1",
"new-array": "^1.0.0"
"new-array": "^1.0.0",
"num-sort": "^1.0.0"
},
"devDependencies": {
"cheminfo-tools": "^1.3.4",
"request": "2.72.0"
"cheminfo-tools": "^1.11.0",
"request": "2.78.0",
"nmr-predictor": "0.1.0"
}
}

@@ -5,3 +5,4 @@ 'use strict';

const SparseMatrix = require('ml-sparse-matrix');
const binarySearch = require('ml-binary-search');
const binarySearch = require('binary-search');
const sortAsc = require('num-sort').asc;
const newArray = require('new-array');

@@ -11,4 +12,7 @@

function simulate1d(spinSystem, options = {}) {
var i;
const DEBUG = false;
const smallValue = 1e-2;
function simulate1d(spinSystem, options) {
var i, j;
const frequencyMHz = (options.frequency || 400);

@@ -20,2 +24,3 @@ const from = (options.from || 0) * frequencyMHz;

const maxClusterSize = options.maxClusterSize || 10;
const output = options.output || "y";

@@ -28,3 +33,3 @@ const chemicalShifts = spinSystem.chemicalShifts.slice();

let lineWidthPoints = (nbPoints * lineWidth / Math.abs(to - from)) / 2.355;
let lnPoints = lineWidthPoints * 50;
let lnPoints = lineWidthPoints * 20;

@@ -45,6 +50,7 @@ const gaussianLength = lnPoints | 0;

if (cluster.length > maxClusterSize) {
throw new Error('too big cluster: ' + cluster.length);
var clusterFake = new Array(cluster.length);
for (i = 0; i < cluster.length; i++) {
clusterFake[i] = cluster[i]<0?-cluster[i]-1:cluster[i];
}
var weight = 1;

@@ -54,4 +60,30 @@ var sumI = 0;

const intensities = [];
if (false) {
// if(tnonZeros.contains(2)){
if (cluster.length>maxClusterSize) {
//This is a single spin, but the cluster exceeds the maxClusterSize criteria
//we use the simple multiplicity algorithm
//Add the central peak. It will be split with every single J coupling.
var index = 0;
while (cluster[index++]<0);
index = cluster[index-1];
var currentSize, jc;
frequencies.push(-chemicalShifts[index]);
for (i = 0;i < cluster.length; i++) {
if (cluster[i] < 0) {
jc = spinSystem.couplingConstants[index][clusterFake[i]];
currentSize = frequencies.length;
for ( j=0 ; j < currentSize; j++) {
frequencies.push(frequencies[j] + jc);
frequencies[j] -= jc;
}
}
}
frequencies.sort(sortAsc);
sumI=frequencies.length;
weight=1;
for (i=0;i<sumI;i++){
intensities.push(1);
}
} else {

@@ -63,3 +95,3 @@ const hamiltonian = getHamiltonian(

spinSystem.connectivity,
cluster
clusterFake
);

@@ -75,7 +107,7 @@

for (var n = 0; n < multLen; n++) {
const L = getPauli(multiplicity[cluster[n]]);
const L = getPauli(multiplicity[clusterFake[n]]);
let temp = 1;
for (var j = 0; j < n; j++) {
temp *= multiplicity[cluster[j]];
for (j = 0; j < n; j++) {
temp *= multiplicity[clusterFake[j]];
}

@@ -86,3 +118,3 @@ const A = SparseMatrix.eye(temp);

for (j = n + 1; j < multLen; j++) {
temp *= multiplicity[cluster[j]];
temp *= multiplicity[clusterFake[j]];
}

@@ -95,4 +127,3 @@ const B = SparseMatrix.eye(temp);

} else {
assignmentMatrix.add(tempMat.mul(0 - (cluster[n] + 1)));
assignmentMatrix.add(tempMat.mul(cluster[n]));
}

@@ -129,7 +160,7 @@ }

rhoip = tV.mmul(rhoip);
rhoip = new SparseMatrix(rhoip, {threshold: 1e-1});
triuTimesAbs(rhoip, 1e-1);
rhoip = new SparseMatrix(rhoip, {threshold: smallValue});
triuTimesAbs(rhoip, smallValue);
rhoip2 = tV.mmul(rhoip2);
rhoip2 = new SparseMatrix(rhoip2, {threshold: 1e-1});
triuTimesAbs(rhoip2, 1e-1);
rhoip2 = new SparseMatrix(rhoip2, {threshold: smallValue});
triuTimesAbs(rhoip2, smallValue);

@@ -143,3 +174,3 @@ rhoip2.forEachNonZero((i, j, v) => {

var valFreq = diagB[i] - diagB[j];
var insertIn = binarySearch(frequencies, valFreq);
var insertIn = binarySearch(frequencies, valFreq, sortAsc);
if (insertIn < 0) {

@@ -153,7 +184,7 @@ frequencies.splice(-1 - insertIn, 0, valFreq);

}
const numFreq = frequencies.length;
//console.log("New Spin");
if (numFreq > 0) {
weight = weight / sumI;
const diff = lineWidth / 16;
const diff = lineWidth / 32;
let valFreq = frequencies[0];

@@ -177,7 +208,10 @@ let inte = intensities[0];

}
return result;
if(output==="xy")
return {x:_getX(options.from, options.to, nbPoints),y:result};
if(output == "y")
return result;
}
function addPeak(result, freq, height, from, to, nbPoints, gaussian) {
//console.log(freq, height)
const center = (nbPoints * (-freq-from) / (to - from)) | 0;

@@ -210,6 +244,9 @@ const lnPoints = gaussian.length;

if(DEBUG) console.log("Hamiltonian size: "+hamSize);
const clusterHam = new SparseMatrix(hamSize, hamSize);
for (var pos = 0; pos < cluster.length; pos++) {
const n = cluster[pos];
var n = cluster[pos];
const L = getPauli(multiplicity[n]);

@@ -264,2 +301,11 @@

function _getX(from, to, nbPoints){
const x = new Array(nbPoints);
const dx = (to-from)/(nbPoints-1);
for (var i = 0 ; i < nbPoints; i++) {
x[i]=from+i*dx;
}
return x;
}
module.exports = simulate1d;

@@ -5,2 +5,5 @@ 'use strict';

const newArray = require('new-array');
const simpleClustering = require('ml-simple-clustering');
const hlClust = require("ml-hclust");
const DEBUG = false;

@@ -13,4 +16,4 @@ class SpinSystem {

this.nSpins = chemicalShifts.length;
this._initConnectivity();
this._initClusters();
this._initConnectivity();
}

@@ -24,5 +27,4 @@

var ids = {};
var jc = new Array(nspins);
var jc = Matrix.zeros(nspins, nspins);
for (let i = 0; i < nspins; i++) {
jc[i] = newArray(nspins, 0);
var tokens = lines[i].split('\t');

@@ -39,3 +41,3 @@ cs[i] = +tokens[2];

var idx = ids[withID];
jc[i][idx] = +tokens[6 + 3 * j];
jc[i][idx] = +tokens[6 + 3 * j]/2;
}

@@ -52,11 +54,31 @@ }

_initClusters() {
const n = this.chemicalShifts.length;
const cluster = new Array(n);
for (var i = 0; i < n; i++) {
cluster[i] = i;
static fromPrediction(result) {
const nSpins = result.length;
const cs = new Array(nSpins);
const jc = Matrix.zeros(nSpins, nSpins);
const multiplicity = new Array(nSpins);
const ids = {};
var i,k,j;
for(i=0;i<nSpins;i++) {
cs[i] = result[i].delta;
ids[result[i].atomIDs[0]] = i;
}
this.clusters = [cluster];
for( i = 0; i < nSpins; i++) {
cs[i] = result[i].delta;
j = result[i].j;
for( k = 0; k < j.length; k++) {
//console.log(ids[result[i].atomIDs[0]],ids[j[k].assignment]);
jc[ids[result[i].atomIDs[0]]][ids[j[k].assignment]] = j[k].coupling;
jc[ids[j[k].assignment]][ids[result[i].atomIDs[0]]] = j[k].coupling;
}
multiplicity[i] = result[i].integral+1;
}
return new SpinSystem(cs, jc, multiplicity);
}
_initClusters() {
this.clusters = simpleClustering(this.connectivity, {out:"indexes"});
}
_initConnectivity() {

@@ -75,4 +97,172 @@ const couplings = this.couplingConstants;

}
_calculateBetas(J, frequency){
var betas = Matrix.zeros(J.length, J.length);
//Before clustering, we must add hidden J, we could use molecular information if available
var i,j;
for( i=0;i<J.rows;i++){
for( j=i;j<J.columns;j++){
if((this.chemicalShifts[i]-this.chemicalShifts[j])!=0){
betas[i][j] = 1 - Math.abs(J[i][j]/((this.chemicalShifts[i]-this.chemicalShifts[j])*frequency));
betas[j][i] = betas[i][j];
}
else if( !(i == j || J[i][j] !== 0) ){
betas[i][j] = 1;
betas[j][i] = 1;
}
}
}
return betas;
}
ensureClusterSize(options){
var betas = this._calculateBetas(this.couplingConstants, options.frequency||400);
var cluster = hlClust.agnes(betas, {isDistanceMatrix:true});
var list = [];
this._splitCluster(cluster, list, options.maxClusterSize||8, false);
var clusters = this._mergeClusters(list);
this.nClusters = clusters.length;
//console.log(clusters);
this.clusters = new Array(clusters.length);
//System.out.println(this.conmatrix);
for(var j=0;j<this.nClusters;j++) {
this.clusters[j] = [];
for(var i = 0; i < this.nSpins; i++) {
if(clusters[j][i] !== 0) {
if (clusters[j][i] < 0)
this.clusters[j].push(-(i + 1));
else
this.clusters[j].push(i);
}
}
}
if(DEBUG){
console.log("Cluster list");
console.log(this.clusters);
}
}
/**
* Recursively split the clusters until the maxClusterSize criteria has been ensured.
* @param cluster
* @param clusterList
*/
_splitCluster(cluster, clusterList, maxClusterSize, force) {
if(!force && cluster.index.length <= maxClusterSize) {
clusterList.push(this._getMembers(cluster));
}
else{
for(var child of cluster.children){
if (!isNaN(child.index) || child.index.length <= maxClusterSize) {
var members = this._getMembers(child);
//Add the neighbors that shares at least 1 coupling with the given cluster
var count = 0;
for (var i = 0; i < this.nSpins; i++) {
if (members[i] == 1) {
count++;
for (var j = 0; j < this.nSpins; j++) {
if (this.connectivity[i][j] == 1 && members[j] == 0) {
members[j] = -1;
count++;
}
}
}
}
if (count <= maxClusterSize)
clusterList.push(members);
else {
if (isNaN(child.index))
this._splitCluster(child, clusterList, maxClusterSize, true);
else {
//We have to threat this spin alone and use the resurrection algorithm instead of the simulation
members[child.index] = 2;
clusterList.push(members);
}
}
}
else{
this._splitCluster(child, clusterList, maxClusterSize, false);
}
}
}
}
/**
* Recursively gets the cluster members
* @param cluster
* @param members
*/
_getMembers(cluster) {
var members = new Array(this.nSpins);
for(var i = 0; i < this.nSpins; i++) {
members[i]=0;
}
if(!isNaN(cluster.index)) {
members[cluster.index*1] = 1;
}
else{
for(var index of cluster.index) {
members[index.index*1] = 1;
}
}
return members;
}
_mergeClusters(list) {
var nElements = 0;
var clusterA, clusterB, i, j, index, common, count;
for(i = list.length-1; i >=0 ; i--) {
clusterA = list[i];
nElements = clusterA.length;
index=0;
//Is it a candidate to be merged?
while(index < nElements && clusterA[index++] != -1);
if(index < nElements) {
for(j = list.length-1; j>= i+1; j--) {
clusterB = list[j];
//Do they have common elements?
index = 0;
common = 0;
count = 0;
while(index < nElements) {
if(clusterA[index]*clusterB[index] === -1) {
common++;
}
if(clusterA[index] !==0 || clusterB[index] !== 0) {
count++;
}
index++;
}
if(common > 0 && count <= this.maxClusterSize) {
//Then we can merge those 2 clusters
index = 0;
while(index<nElements) {
if(clusterB[index] === 1) {
clusterA[index]=1;
}
else{
if(clusterB[index] === -1 && clusterA[index] !== 1) {
clusterA[index]=-1;
}
}
index++;
}
//list.remove(clusterB);
list.splice(j,1);
j++;
}
}
}
}
return list;
}
}
module.exports = SpinSystem;
SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc