nmr-simulation
Advanced tools
Comparing version 0.2.2 to 1.0.0
{ | ||
"name": "nmr-simulation", | ||
"version": "0.2.2", | ||
"version": "1.0.0", | ||
"description": "Simulate NMR spectra from spin systems", | ||
@@ -9,7 +9,5 @@ "main": "src/index.js", | ||
"eslint-fix": "npm run eslint -- --fix", | ||
"test": "npm run test-mocha", | ||
"test-mocha": "mocha --compilers js:babel-register --require should --reporter mocha-better-spec-reporter --recursive src/**/__tests__/**/*.js", | ||
"test-cov": "istanbul cover _mocha -- --compilers js:babel-register --require should --reporter dot --recursive src/**/__tests__/**/*.js", | ||
"test-travis": "istanbul cover _mocha --report lcovonly -- --compilers js:babel-register --require should --reporter mocha-better-spec-reporter --recursive src/**/__tests__/**/*.js && npm run eslint", | ||
"build": "cheminfo build --no-uglify" | ||
"test": "npm run test-mocha && npm run eslint", | ||
"test-mocha": "mocha --require should --reporter mocha-better-spec-reporter --recursive", | ||
"build": "cheminfo build" | ||
}, | ||
@@ -39,4 +37,4 @@ "files": [ | ||
"binary-search": "^1.3.2", | ||
"ml-hclust": "1.1.0", | ||
"ml-matrix": "^1.1.2", | ||
"ml-hclust": "^1.3.0", | ||
"ml-matrix": "^2.3.0", | ||
"ml-simple-clustering": "0.1.0", | ||
@@ -48,9 +46,2 @@ "ml-sparse-matrix": "^0.2.1", | ||
"devDependencies": { | ||
"babel-cli": "^6.2.0", | ||
"babel-core": "^6.2.1", | ||
"babel-plugin-transform-es2015-block-scoping": "^6.1.18", | ||
"babel-preset-es2015-node4": "^2.0.1", | ||
"babel-register": "^6.2.0", | ||
"babelify": "^7.2.0", | ||
"babili": "0.0.8", | ||
"cheminfo-tools": "^1.11.0", | ||
@@ -62,5 +53,5 @@ "eslint": "^3.9.1", | ||
"mocha-better-spec-reporter": "^3.0.1", | ||
"should": "^11.1.1", | ||
"nmr-predictor": "0.5.2" | ||
"nmr-predictor": "^1.0.0", | ||
"should": "^11.1.1" | ||
} | ||
} |
@@ -21,3 +21,3 @@ 'use strict'; | ||
const maxClusterSize = options.maxClusterSize || 10; | ||
const output = options.output || "y"; | ||
const output = options.output || 'y'; | ||
@@ -48,3 +48,3 @@ const chemicalShifts = spinSystem.chemicalShifts.slice(); | ||
for (i = 0; i < cluster.length; i++) { | ||
clusterFake[i] = cluster[i]<0?-cluster[i]-1:cluster[i]; | ||
clusterFake[i] = cluster[i] < 0 ? -cluster[i] - 1 : cluster[i]; | ||
} | ||
@@ -60,12 +60,12 @@ | ||
//Add the central peak. It will be split with every single J coupling. | ||
var index = 0; | ||
while (cluster[index++]<0); | ||
index = cluster[index-1]; | ||
var index = 0; | ||
while (cluster[index++] < 0); | ||
index = cluster[index - 1]; | ||
var currentSize, jc; | ||
frequencies.push(-chemicalShifts[index]); | ||
for (i = 0;i < cluster.length; i++) { | ||
for (i = 0; i < cluster.length; i++) { | ||
if (cluster[i] < 0) { | ||
jc = spinSystem.couplingConstants[index][clusterFake[i]] / 2; | ||
currentSize = frequencies.length; | ||
for ( j=0 ; j < currentSize; j++) { | ||
for (j = 0; j < currentSize; j++) { | ||
frequencies.push(frequencies[j] + jc); | ||
@@ -78,6 +78,6 @@ frequencies[j] -= jc; | ||
frequencies.sort(sortAsc); | ||
sumI=frequencies.length; | ||
weight=1; | ||
sumI = frequencies.length; | ||
weight = 1; | ||
for (i=0;i<sumI;i++){ | ||
for (i = 0; i < sumI; i++) { | ||
intensities.push(1); | ||
@@ -176,3 +176,2 @@ } | ||
const numFreq = frequencies.length; | ||
//console.log("New Spin"); | ||
if (numFreq > 0) { | ||
@@ -199,11 +198,13 @@ weight = weight / sumI; | ||
} | ||
if(output==="xy") | ||
return {x:_getX(options.from, options.to, nbPoints),y:result}; | ||
if(output == "y") | ||
if (output === 'xy') { | ||
return {x: _getX(options.from, options.to, nbPoints), y: result}; | ||
} | ||
if (output === 'y') { | ||
return result; | ||
} | ||
throw new RangeError('wrong output option'); | ||
} | ||
function addPeak(result, freq, height, from, to, nbPoints, gaussian) { | ||
//console.log(freq, height) | ||
const center = (nbPoints * (-freq-from) / (to - from)) | 0; | ||
const center = (nbPoints * (-freq - from) / (to - from)) | 0; | ||
const lnPoints = gaussian.length; | ||
@@ -289,7 +290,7 @@ var index = 0; | ||
function _getX(from, to, nbPoints){ | ||
function _getX(from, to, nbPoints) { | ||
const x = new Array(nbPoints); | ||
const dx = (to-from)/(nbPoints-1); | ||
for (var i = 0 ; i < nbPoints; i++) { | ||
x[i]=from+i*dx; | ||
const dx = (to - from) / (nbPoints - 1); | ||
for (var i = 0; i < nbPoints; i++) { | ||
x[i] = from + i * dx; | ||
} | ||
@@ -296,0 +297,0 @@ return x; |
@@ -5,6 +5,6 @@ 'use strict'; | ||
let defOptions = {'H': {frequency: 400, lineWidth: 10}, 'C': {frequency: 100, lineWidth: 10}} | ||
let defOptions = {'H': {frequency: 400, lineWidth: 10}, 'C': {frequency: 100, lineWidth: 10}}; | ||
function simule2DNmrSpectrum(table, options) { | ||
var i, j; | ||
var i; | ||
const fromLabel = table[0].fromAtomLabel; | ||
@@ -14,4 +14,4 @@ const toLabel = table[0].toLabel; | ||
const frequencyY = options.frequencyY || defOptions[toLabel].frequency; | ||
var lineWidthX = options.lineWidthX || defOptions[fromLabel].lineWidth; | ||
var lineWidthY = options.lineWidthY || defOptions[toLabel].lineWidth; | ||
var lineWidthX = options.lineWidthX || defOptions[fromLabel].lineWidth; | ||
var lineWidthY = options.lineWidthY || defOptions[toLabel].lineWidth; | ||
@@ -26,3 +26,3 @@ var sigmaX = lineWidthX / frequencyX; | ||
i = 1; | ||
while(i < table.length) { | ||
while (i < table.length) { | ||
minX = Math.min(minX, table[i].fromChemicalShift); | ||
@@ -35,10 +35,14 @@ maxX = Math.max(maxX, table[i].fromChemicalShift); | ||
if(options.firstX !== null && !isNaN(options.firstX)) | ||
if (options.firstX !== null && !isNaN(options.firstX)) { | ||
minX = options.firstX; | ||
if(options.firstY !== null && !isNaN(options.firstY)) | ||
} | ||
if (options.firstY !== null && !isNaN(options.firstY)) { | ||
minY = options.firstY; | ||
if(options.lastX !== null && !isNaN(options.lastX)) | ||
maxX = options.lastX | ||
if(options.lastY !== null && !isNaN(options.lastY)) | ||
} | ||
if (options.lastX !== null && !isNaN(options.lastX)) { | ||
maxX = options.lastX; | ||
} | ||
if (options.lastY !== null && !isNaN(options.lastY)) { | ||
maxY = options.lastY; | ||
} | ||
@@ -50,3 +54,3 @@ var nbPointsX = options.nbPointsX || 512; | ||
i = 0; | ||
while(i < table.length) { | ||
while (i < table.length) { | ||
//parameters.couplingConstant = table[i].j; | ||
@@ -59,4 +63,4 @@ //parameters.pathLength = table[i].pathLength; | ||
widthX: unitsToArrayPoints(sigmaX + minX, minX, maxX, nbPointsX), | ||
widthY: unitsToArrayPoints(sigmaY+ minY, minY, maxY, nbPointsY) | ||
} | ||
widthY: unitsToArrayPoints(sigmaY + minY, minY, maxY, nbPointsY) | ||
}; | ||
addPeak(spectraMatrix, peak); | ||
@@ -76,3 +80,3 @@ i++; | ||
var toX = Math.min(matrix[0].length - 1, Math.round(peak.x + peak.widthX * nSigma)); | ||
var fromY = Math.max(0, Math.round(peak.y - peak.widthY * nSigma)); | ||
var fromY = Math.max(0, Math.round(peak.y - peak.widthY * nSigma)); | ||
var toY = Math.min(matrix.length - 1, Math.round(peak.y + peak.widthY * nSigma)); | ||
@@ -86,3 +90,3 @@ | ||
Math.pow(peak.y - j, 2) / squareSigmaY; | ||
var result = 10000 * peak.z * Math.exp( - exponent); | ||
var result = 10000 * peak.z * Math.exp(-exponent); | ||
matrix[j][i] += result; | ||
@@ -94,2 +98,2 @@ } | ||
module.exports = simule2DNmrSpectrum; | ||
module.exports = simule2DNmrSpectrum; |
@@ -6,4 +6,3 @@ 'use strict'; | ||
const simpleClustering = require('ml-simple-clustering'); | ||
const hlClust = require("ml-hclust"); | ||
const DEBUG = false; | ||
const hlClust = require('ml-hclust'); | ||
@@ -52,3 +51,3 @@ class SpinSystem { | ||
static fromPrediction(input) { | ||
let predictions = SpinSystem._ungroupAtoms(input); | ||
let predictions = SpinSystem.ungroupAtoms(input); | ||
const nSpins = predictions.length; | ||
@@ -59,11 +58,11 @@ const cs = new Array(nSpins); | ||
const ids = {}; | ||
var i,k,j; | ||
for(i=0;i<nSpins;i++) { | ||
var i, k, j; | ||
for (i = 0; i < nSpins; i++) { | ||
cs[i] = predictions[i].delta; | ||
ids[predictions[i].atomIDs[0]] = i; | ||
} | ||
for( i = 0; i < nSpins; i++) { | ||
for (i = 0; i < nSpins; i++) { | ||
cs[i] = predictions[i].delta; | ||
j = predictions[i].j; | ||
for( k = 0; k < j.length; k++) { | ||
for (k = 0; k < j.length; k++) { | ||
//console.log(ids[result[i].atomIDs[0]],ids[j[k].assignment]); | ||
@@ -73,3 +72,3 @@ jc[ids[predictions[i].atomIDs[0]]][ids[j[k].assignment]] = j[k].coupling; | ||
} | ||
multiplicity[i] = predictions[i].integral+1; | ||
multiplicity[i] = predictions[i].integral + 1; | ||
} | ||
@@ -81,7 +80,7 @@ | ||
static _ungroupAtoms(prediction) { | ||
static ungroupAtoms(prediction) { | ||
let result = []; | ||
prediction.forEach(pred => { | ||
let atomIDs = pred['atomIDs']; | ||
for(let i = 0; i < atomIDs.length; i++) { | ||
let atomIDs = pred.atomIDs; | ||
for (let i = 0; i < atomIDs.length; i++) { | ||
let tempPred = JSON.parse(JSON.stringify(pred)); | ||
@@ -99,3 +98,3 @@ tempPred.atomIDs = [atomIDs[i]]; | ||
_initClusters() { | ||
this.clusters = simpleClustering(this.connectivity, {out:"indexes"}); | ||
this.clusters = simpleClustering(this.connectivity, {out: 'indexes'}); | ||
} | ||
@@ -118,16 +117,15 @@ | ||
_calculateBetas(J, frequency){ | ||
_calculateBetas(J, frequency) { | ||
var betas = Matrix.zeros(J.length, J.length); | ||
//Before clustering, we must add hidden J, we could use molecular information if available | ||
var i,j; | ||
for( i=0;i<J.rows;i++){ | ||
for( j=i;j<J.columns;j++){ | ||
if((this.chemicalShifts[i]-this.chemicalShifts[j])!=0){ | ||
betas[i][j] = 1 - Math.abs(J[i][j]/((this.chemicalShifts[i]-this.chemicalShifts[j])*frequency)); | ||
var i, j; | ||
for (i = 0; i < J.rows; i++) { | ||
for (j = i; j < J.columns; j++) { | ||
if ((this.chemicalShifts[i] - this.chemicalShifts[j]) !== 0) { | ||
betas[i][j] = 1 - Math.abs(J[i][j] / ((this.chemicalShifts[i] - this.chemicalShifts[j]) * frequency)); | ||
betas[j][i] = betas[i][j]; | ||
} else if (!(i === j || J[i][j] !== 0)) { | ||
betas[i][j] = 1; | ||
betas[j][i] = 1; | ||
} | ||
else if( !(i == j || J[i][j] !== 0) ){ | ||
betas[i][j] = 1; | ||
betas[j][i] = 1; | ||
} | ||
} | ||
@@ -138,28 +136,24 @@ } | ||
ensureClusterSize(options){ | ||
var betas = this._calculateBetas(this.couplingConstants, options.frequency||400); | ||
var cluster = hlClust.agnes(betas, {isDistanceMatrix:true}); | ||
ensureClusterSize(options) { | ||
var betas = this._calculateBetas(this.couplingConstants, options.frequency || 400); | ||
var cluster = hlClust.agnes(betas, {isDistanceMatrix: true}); | ||
var list = []; | ||
this._splitCluster(cluster, list, options.maxClusterSize||8, false); | ||
this._splitCluster(cluster, list, options.maxClusterSize || 8, false); | ||
var clusters = this._mergeClusters(list); | ||
this.nClusters = clusters.length; | ||
//console.log(clusters); | ||
this.clusters = new Array(clusters.length); | ||
this.clusters = new Array(clusters.length); | ||
//System.out.println(this.conmatrix); | ||
for(var j=0;j<this.nClusters;j++) { | ||
for (var j = 0; j < this.nClusters; j++) { | ||
this.clusters[j] = []; | ||
for(var i = 0; i < this.nSpins; i++) { | ||
if(clusters[j][i] !== 0) { | ||
if (clusters[j][i] < 0) | ||
for (var i = 0; i < this.nSpins; i++) { | ||
if (clusters[j][i] !== 0) { | ||
if (clusters[j][i] < 0) { | ||
this.clusters[j].push(-(i + 1)); | ||
else | ||
} else { | ||
this.clusters[j].push(i); | ||
} | ||
} | ||
} | ||
} | ||
if(DEBUG){ | ||
console.log("Cluster list"); | ||
console.log(this.clusters); | ||
} | ||
} | ||
@@ -173,7 +167,6 @@ | ||
_splitCluster(cluster, clusterList, maxClusterSize, force) { | ||
if(!force && cluster.index.length <= maxClusterSize) { | ||
if (!force && cluster.index.length <= maxClusterSize) { | ||
clusterList.push(this._getMembers(cluster)); | ||
} | ||
else{ | ||
for(var child of cluster.children){ | ||
} else { | ||
for (var child of cluster.children) { | ||
if (!isNaN(child.index) || child.index.length <= maxClusterSize) { | ||
@@ -184,6 +177,6 @@ var members = this._getMembers(child); | ||
for (var i = 0; i < this.nSpins; i++) { | ||
if (members[i] == 1) { | ||
if (members[i] === 1) { | ||
count++; | ||
for (var j = 0; j < this.nSpins; j++) { | ||
if (this.connectivity[i][j] == 1 && members[j] == 0) { | ||
if (this.connectivity[i][j] === 1 && members[j] === 0) { | ||
members[j] = -1; | ||
@@ -196,8 +189,8 @@ count++; | ||
if (count <= maxClusterSize) | ||
if (count <= maxClusterSize) { | ||
clusterList.push(members); | ||
else { | ||
if (isNaN(child.index)) | ||
} else { | ||
if (isNaN(child.index)) { | ||
this._splitCluster(child, clusterList, maxClusterSize, true); | ||
else { | ||
} else { | ||
//We have to threat this spin alone and use the resurrection algorithm instead of the simulation | ||
@@ -208,4 +201,3 @@ members[child.index] = 2; | ||
} | ||
} | ||
else{ | ||
} else { | ||
this._splitCluster(child, clusterList, maxClusterSize, false); | ||
@@ -224,11 +216,10 @@ } | ||
var members = new Array(this.nSpins); | ||
for(var i = 0; i < this.nSpins; i++) { | ||
members[i]=0; | ||
for (var i = 0; i < this.nSpins; i++) { | ||
members[i] = 0; | ||
} | ||
if(!isNaN(cluster.index)) { | ||
members[cluster.index*1] = 1; | ||
} | ||
else{ | ||
for(var index of cluster.index) { | ||
members[index.index*1] = 1; | ||
if (!isNaN(cluster.index)) { | ||
members[cluster.index * 1] = 1; | ||
} else { | ||
for (var index of cluster.index) { | ||
members[index.index * 1] = 1; | ||
} | ||
@@ -242,12 +233,12 @@ } | ||
var clusterA, clusterB, i, j, index, common, count; | ||
for(i = list.length-1; i >=0 ; i--) { | ||
for (i = list.length - 1; i >= 0; i--) { | ||
clusterA = list[i]; | ||
nElements = clusterA.length; | ||
index=0; | ||
index = 0; | ||
//Is it a candidate to be merged? | ||
while(index < nElements && clusterA[index++] != -1); | ||
while (index < nElements && clusterA[index++] !== -1); | ||
if(index < nElements) { | ||
for(j = list.length-1; j>= i+1; j--) { | ||
if (index < nElements) { | ||
for (j = list.length - 1; j >= i + 1; j--) { | ||
clusterB = list[j]; | ||
@@ -258,7 +249,7 @@ //Do they have common elements? | ||
count = 0; | ||
while(index < nElements) { | ||
if(clusterA[index]*clusterB[index] === -1) { | ||
while (index < nElements) { | ||
if (clusterA[index] * clusterB[index] === -1) { | ||
common++; | ||
} | ||
if(clusterA[index] !==0 || clusterB[index] !== 0) { | ||
if (clusterA[index] !== 0 || clusterB[index] !== 0) { | ||
count++; | ||
@@ -269,12 +260,11 @@ } | ||
if(common > 0 && count <= this.maxClusterSize) { | ||
if (common > 0 && count <= this.maxClusterSize) { | ||
//Then we can merge those 2 clusters | ||
index = 0; | ||
while(index<nElements) { | ||
if(clusterB[index] === 1) { | ||
clusterA[index]=1; | ||
} | ||
else{ | ||
if(clusterB[index] === -1 && clusterA[index] !== 1) { | ||
clusterA[index]=-1; | ||
while (index < nElements) { | ||
if (clusterB[index] === 1) { | ||
clusterA[index] = 1; | ||
} else { | ||
if (clusterB[index] === -1 && clusterA[index] !== 1) { | ||
clusterA[index] = -1; | ||
} | ||
@@ -285,3 +275,3 @@ } | ||
//list.remove(clusterB); | ||
list.splice(j,1); | ||
list.splice(j, 1); | ||
j++; | ||
@@ -288,0 +278,0 @@ } |
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
Dynamic require
Supply chain riskDynamic require can indicate the package is performing dangerous or unsafe dynamic code execution.
Found 1 instance in 1 package
No v1
QualityPackage is not semver >=1. This means it is not stable and does not support ^ ranges.
Found 1 instance in 1 package
8
0
0
27081
8
616
+ Addedheap@0.2.7(transitive)
+ Addedml-array-utils@0.3.0(transitive)
+ Addedml-distance-matrix@1.0.0(transitive)
+ Addedml-hclust@1.3.0(transitive)
+ Addedml-matrix@2.3.0(transitive)
- Removedml-array-utils@0.2.4(transitive)
- Removedml-binary-search@1.1.2(transitive)
- Removedml-hclust@1.1.0(transitive)
- Removedml-matrix@1.4.0(transitive)
Updatedml-hclust@^1.3.0
Updatedml-matrix@^2.3.0