brukerconverter
Advanced tools
Comparing version 5.0.1 to 5.0.2-pre.1667228020
@@ -72,3 +72,3 @@ import { DoubleArray } from 'cheminfo-types'; | ||
expno: number; | ||
fileCollection: FileCollection; | ||
fileCollectionItems: FileCollectionItem[]; | ||
processedData?: ProcessedData; | ||
@@ -164,5 +164,5 @@ is2D?: boolean; | ||
export function convertFileList( | ||
export function convertFileCollection( | ||
fileCollection: FileCollection, | ||
options?: ConvertFileListOptions, | ||
): Promise<SpectraData[]>; |
167
lib/index.js
@@ -11,3 +11,3 @@ 'use strict'; | ||
* Retrieve the list of files for further process | ||
* @param {FileCollection[]} fileList | ||
* @param {FileCollection[]} fileCollection | ||
* @param {object} [options={}] | ||
@@ -22,3 +22,3 @@ * @param {number|number[]} [options.processingNumber] - processing number to select, default the smallest number | ||
*/ | ||
function groupByExperiments(fileList, options = {}) { | ||
function groupByExperiments(fileCollection, options = {}) { | ||
let { | ||
@@ -41,3 +41,3 @@ processingNumber, | ||
const experiments = {}; | ||
for (let file of fileList) { | ||
for (let file of fileCollection) { | ||
let currentProcessingNo; | ||
@@ -71,3 +71,3 @@ let currentExperimentNo; | ||
processedData: {}, | ||
fileList: [], | ||
fileCollectionItems: [], | ||
}; | ||
@@ -79,3 +79,3 @@ } | ||
experiment.processedData[currentProcessingNo] = { | ||
fileList: [], | ||
fileCollectionItems: [], | ||
name, | ||
@@ -86,3 +86,3 @@ expno: currentExperimentNo, | ||
const processedData = experiment.processedData[currentProcessingNo]; | ||
processedData.fileList.push(file); | ||
processedData.fileCollectionItems.push(file); | ||
if (file.name.match(/^(1r|1i|2rr|procs|proc2s)$/)) { | ||
@@ -100,3 +100,3 @@ processedData[file.name] = file; | ||
} else { | ||
experiment.fileList.push(file); | ||
experiment.fileCollectionItems.push(file); | ||
if (file.name.match(/^(ser|fid|acqus|acqu2s)$/)) { | ||
@@ -152,7 +152,7 @@ experiment[file.name] = file; | ||
if (firstProcessed.procs) { | ||
experiment.fileList.push(firstProcessed.procs); | ||
experiment.fileCollectionItems.push(firstProcessed.procs); | ||
experiment.procs = firstProcessed.procs; | ||
} | ||
if (firstProcessed.proc2s) { | ||
experiment.fileList.push(firstProcessed.proc2s); | ||
experiment.fileCollectionItems.push(firstProcessed.proc2s); | ||
experiment.proc2s = firstProcessed.proc2s; | ||
@@ -167,6 +167,6 @@ } | ||
if (experiment.acqus) { | ||
oneProcessed.fileList.push(experiment.acqus); | ||
oneProcessed.fileCollectionItems.push(experiment.acqus); | ||
} | ||
if (experiment.acqu2s) { | ||
oneProcessed.fileList.push(experiment.acqu2s); | ||
oneProcessed.fileCollectionItems.push(experiment.acqu2s); | ||
} | ||
@@ -236,2 +236,4 @@ experimentsArray.push({ | ||
const QSEQ = 2; | ||
async function setFIDSpectrumData(file, spectra) { | ||
@@ -247,11 +249,10 @@ let td = parseInt(spectra.meta.TD[0], 10); | ||
let SW_H = Number(spectra.meta.SW_h[0]); | ||
const SW_H = Number(spectra.meta.SW_h[0]); | ||
let SF = Number(spectra.meta.SFO1[0]); | ||
const SF = Number(spectra.meta.SFO1[0]); | ||
spectra.meta.DATATYPE = 'NMR FID'; | ||
let DW = 1 / (2 * SW_H); | ||
let AQ = td * DW; | ||
const DW = 1 / (2 * SW_H); | ||
const AQ = td * DW; | ||
let endian = parseInt(spectra.meta.BYTORDA, 10); | ||
@@ -272,3 +273,6 @@ endian = endian ? 0 : 1; | ||
const stopReading = td / 2; | ||
const aqMode = spectra.meta.AQ_mod; | ||
const deltaX = aqMode === QSEQ ? DW : 2 * DW; | ||
const nbPoints = aqMode === QSEQ ? td : (td / 2) >> 0; | ||
for (let j = 0; j < nbSubSpectra; j++) { | ||
@@ -278,3 +282,3 @@ let toSave = { | ||
dataTable: '(X++(R..R))', | ||
nbPoints: td, | ||
nbPoints, | ||
firstX: 0, | ||
@@ -286,16 +290,24 @@ lastX: AQ, | ||
data: { | ||
x: mlSpectraProcessing.createStepArray({ length: td, step: DW }), | ||
re: new Float64Array(td), | ||
im: new Float64Array(td), | ||
x: mlSpectraProcessing.createStepArray({ length: nbPoints, step: deltaX }), | ||
re: new Float64Array(nbPoints), | ||
im: new Float64Array(nbPoints), | ||
}, | ||
isXYdata: true, | ||
isFID: true, | ||
observeFrequency: SF, | ||
title: spectra.meta.TITLE, | ||
deltaX: DW, | ||
deltaX, | ||
}; | ||
spectra.spectra[j] = toSave; | ||
for (let i = 0; i < stopReading; i++) { | ||
spectra.spectra[j].data.re[i] = ioBuffer.readInt32(); | ||
spectra.spectra[j].data.im[i] = ioBuffer.readInt32(); | ||
// const dtypa = spectra.meta.DTYPA;// we should use it for float or double FID data; | ||
if (aqMode === QSEQ) { | ||
for (let i = 0; i < nbPoints; i++) { | ||
spectra.spectra[j].data.re[i] = ioBuffer.readInt32(); | ||
} | ||
} else { | ||
for (let i = 0; i < nbPoints; i++) { | ||
spectra.spectra[j].data.re[i] = ioBuffer.readInt32(); | ||
spectra.spectra[j].data.im[i] = ioBuffer.readInt32(); | ||
} | ||
} | ||
@@ -305,7 +317,9 @@ } | ||
async function setXYSpectrumData(file, spectra) { | ||
let ioBufferReal = file.re ? new iobuffer.IOBuffer(await file.re.arrayBuffer()) : null; | ||
let ioBufferImaginary = file.im | ||
? new iobuffer.IOBuffer(await file.im.arrayBuffer()) | ||
: null; | ||
async function setProcessedSpectrumData(files, spectra) { | ||
const buffers = {}; | ||
for (const key in files) { | ||
if (!files[key]) continue; | ||
buffers[key] = new iobuffer.IOBuffer(await files[key].arrayBuffer()); | ||
} | ||
let td = getDirectParameter(spectra.meta.SI); | ||
@@ -326,8 +340,7 @@ let swP = getDirectParameter(spectra.meta.SW_p); | ||
let nbSubSpectra = spectra.meta.nbSubSpectra ? spectra.meta.nbSubSpectra : 1; | ||
if (endian) { | ||
if (file.re) ioBufferReal.setLittleEndian(); | ||
if (file.im) ioBufferImaginary.setLittleEndian(); | ||
for (const key in buffers) buffers[key].setLittleEndian(); | ||
} else { | ||
if (file.re) ioBufferReal.setBigEndian(); | ||
if (file.im) ioBufferImaginary.setBigEndian(); | ||
for (const key in buffers) buffers[key].setBigEndian(); | ||
} | ||
@@ -344,2 +357,3 @@ | ||
isXYdata: true, | ||
isFT: true, | ||
nucleus: spectra.meta.NUC1, | ||
@@ -351,21 +365,15 @@ observeFrequency: sf, | ||
let deltaX = toSave.deltaX; | ||
let x = new Float64Array(td); | ||
let re = new Float64Array(td); | ||
let im = file.im ? new Float64Array(td) : null; | ||
if (im) { | ||
const deltaX = toSave.deltaX; | ||
const x = mlSpectraProcessing.xSequentialFill({ from: offset, step: deltaX }); | ||
const datum = {}; | ||
for (const key in buffers) { | ||
const buffer = buffers[key]; | ||
const data = new Float64Array(td); | ||
for (let k = 0; k < td; ++k) { | ||
x[k] = offset + k * deltaX; | ||
re[k] = ioBufferReal.readInt32(); | ||
im[k] = ioBufferImaginary.readInt32(); | ||
data[k] = buffer.readInt32(); | ||
} | ||
} else { | ||
for (let k = 0; k < td; ++k) { | ||
x[k] = offset + k * deltaX; | ||
re[k] = ioBufferReal.readInt32(); | ||
} | ||
datum[key] = data; | ||
} | ||
toSave.data = im ? { x, re, im } : { x, re }; | ||
toSave.data = { ...datum, x }; | ||
@@ -386,3 +394,3 @@ spectra.spectra.push(toSave); | ||
if (files['1r'] || files['1i']) { | ||
await setXYSpectrumData( | ||
await setProcessedSpectrumData( | ||
{ | ||
@@ -428,3 +436,5 @@ re: files['1r'], | ||
result.meta.nbSubSpectra = files['2rr'] | ||
result.meta.nbSubSpectra = ['2rr', '2ri', '2ir', '2ii'].some( | ||
(key) => key in files, | ||
) | ||
? parseInt(result.meta.SI[1], 10) | ||
@@ -451,3 +461,11 @@ : parseInt(result.meta.TD[1], 10); | ||
result.meta.lastY = lastY; | ||
await setXYSpectrumData({ re: files['2rr'] }, result); | ||
await setProcessedSpectrumData( | ||
{ | ||
rr: files['2rr'], | ||
ri: files['2ri'], | ||
ir: files['2ir'], | ||
ii: files['2ii'], | ||
}, | ||
result, | ||
); | ||
} else if (files.ser) { | ||
@@ -493,13 +511,15 @@ firstY = 0; | ||
* @param spectra | ||
* @returns {{z: Array, minX: *, maxX: *, minY: *, maxY: *, minZ: *, maxZ: *, noise: number}} | ||
* @returns {{z: Array<Array<number>>, minX: *, maxX: *, minY: *, maxY: *, minZ: *, maxZ: *}} | ||
*/ | ||
function convertTo3DZ(spectra) { | ||
function convertTo3DZ(datum) { | ||
const spectra = datum.spectra; | ||
// if (fileKeys.length === 0) { | ||
// throw new Error('There is not data to extract'); | ||
// } | ||
let ySize = spectra.length; | ||
let xSize = spectra[0].data.re.length; | ||
let z = new Array(ySize); | ||
let xSize = spectra[0].data.x.length; | ||
for (let i = 0; i < ySize; i++) { | ||
z[i] = new Float64Array(spectra[i].data.re); | ||
} | ||
const firstX = spectra[0].data.x[0]; | ||
@@ -510,2 +530,27 @@ const lastX = spectra[0].data.x[xSize - 1]; | ||
const minMax = {}; | ||
const keyFiles = Object.keys(spectra[0].data).filter((key) => key !== 'x'); | ||
for (const keyFile of keyFiles) { | ||
minMax[keyFile] = extractZMatrix(datum, { | ||
keyFile, | ||
ySize, | ||
firstX, | ||
lastX, | ||
firstY, | ||
lastY, | ||
}); | ||
} | ||
return minMax; | ||
} | ||
function extractZMatrix(datum, options) { | ||
const spectra = datum.spectra; | ||
const { keyFile, ySize, firstX, lastX, firstY, lastY } = options; | ||
let z = new Array(ySize); | ||
for (let i = 0; i < ySize; i++) { | ||
z[i] = new Float64Array(spectra[i].data[keyFile]); | ||
} | ||
// Because the min / max value are the only information about the matrix if we invert | ||
@@ -554,3 +599,3 @@ // min and max we need to invert the array | ||
let result; | ||
if (brukerFiles.ser || brukerFiles['2rr']) { | ||
if (['ser', '2rr', '2ri', '2ir', '2ii'].some((key) => key in brukerFiles)) { | ||
result = await convert2D(brukerFiles, options); | ||
@@ -595,3 +640,3 @@ } else if (brukerFiles['1r'] || brukerFiles['1i'] || brukerFiles.fid) { | ||
if (result.twoD) { | ||
result.minMax = convertTo3DZ(result.spectra); | ||
result.minMax = convertTo3DZ(result); | ||
@@ -598,0 +643,0 @@ if (!options.keepSpectra) { |
{ | ||
"name": "brukerconverter", | ||
"version": "5.0.1", | ||
"version": "5.0.2-pre.1667228020", | ||
"description": "Parse and convert Bruker raw data", | ||
@@ -53,2 +53,2 @@ "main": "lib/index.js", | ||
} | ||
} | ||
} |
@@ -39,3 +39,3 @@ import { getCoffee } from 'bruker-data-test'; | ||
dataTable: '(X++(R..R))', | ||
nbPoints: 65536, | ||
nbPoints: 32768, | ||
firstX: 0, | ||
@@ -42,0 +42,0 @@ lastX: 3.984588800000003, |
@@ -34,4 +34,4 @@ import { getData } from 'bruker-data-test'; | ||
expect(metadataFt.SI[1]).toBe(1024); | ||
expect(minMaxFt.z).toHaveLength(1024); | ||
expect(minMaxFt.z[0]).toHaveLength(1024); | ||
expect(minMaxFt.rr.z).toHaveLength(1024); | ||
expect(minMaxFt.rr.z[0]).toHaveLength(1024); | ||
@@ -43,5 +43,5 @@ let minMaxSer = result[0].minMax; | ||
expect(metadataSer.TD[1]).toBe(128); | ||
expect(minMaxSer.z).toHaveLength(64); | ||
expect(minMaxSer.z[0]).toHaveLength(4096); | ||
expect(minMaxSer.re.z).toHaveLength(64); | ||
expect(minMaxSer.re.z[0]).toHaveLength(2048); | ||
}); | ||
}); |
/** | ||
* Retrieve the list of files for further process | ||
* @param {FileCollection[]} fileList | ||
* @param {FileCollection[]} fileCollection | ||
* @param {object} [options={}] | ||
@@ -13,3 +13,3 @@ * @param {number|number[]} [options.processingNumber] - processing number to select, default the smallest number | ||
*/ | ||
export function groupByExperiments(fileList, options = {}) { | ||
export function groupByExperiments(fileCollection, options = {}) { | ||
let { | ||
@@ -32,3 +32,3 @@ processingNumber, | ||
const experiments = {}; | ||
for (let file of fileList) { | ||
for (let file of fileCollection) { | ||
let currentProcessingNo; | ||
@@ -62,3 +62,3 @@ let currentExperimentNo; | ||
processedData: {}, | ||
fileList: [], | ||
fileCollectionItems: [], | ||
}; | ||
@@ -70,3 +70,3 @@ } | ||
experiment.processedData[currentProcessingNo] = { | ||
fileList: [], | ||
fileCollectionItems: [], | ||
name, | ||
@@ -77,3 +77,3 @@ expno: currentExperimentNo, | ||
const processedData = experiment.processedData[currentProcessingNo]; | ||
processedData.fileList.push(file); | ||
processedData.fileCollectionItems.push(file); | ||
if (file.name.match(/^(1r|1i|2rr|procs|proc2s)$/)) { | ||
@@ -91,3 +91,3 @@ processedData[file.name] = file; | ||
} else { | ||
experiment.fileList.push(file); | ||
experiment.fileCollectionItems.push(file); | ||
if (file.name.match(/^(ser|fid|acqus|acqu2s)$/)) { | ||
@@ -143,7 +143,7 @@ experiment[file.name] = file; | ||
if (firstProcessed.procs) { | ||
experiment.fileList.push(firstProcessed.procs); | ||
experiment.fileCollectionItems.push(firstProcessed.procs); | ||
experiment.procs = firstProcessed.procs; | ||
} | ||
if (firstProcessed.proc2s) { | ||
experiment.fileList.push(firstProcessed.proc2s); | ||
experiment.fileCollectionItems.push(firstProcessed.proc2s); | ||
experiment.proc2s = firstProcessed.proc2s; | ||
@@ -158,6 +158,6 @@ } | ||
if (experiment.acqus) { | ||
oneProcessed.fileList.push(experiment.acqus); | ||
oneProcessed.fileCollectionItems.push(experiment.acqus); | ||
} | ||
if (experiment.acqu2s) { | ||
oneProcessed.fileList.push(experiment.acqu2s); | ||
oneProcessed.fileCollectionItems.push(experiment.acqu2s); | ||
} | ||
@@ -164,0 +164,0 @@ experimentsArray.push({ |
import { joinInfoMeta } from './joinMetaInfo.js'; | ||
import { parseData } from './parseData'; | ||
import { setFIDSpectrumData } from './setFIDSpectrumData'; | ||
import { setXYSpectrumData } from './setXYSpectrumData'; | ||
import { setProcessedSpectrumData } from './setProcessedSpectrumData'; | ||
@@ -12,3 +12,3 @@ export async function convert1D(files, options) { | ||
if (files['1r'] || files['1i']) { | ||
await setXYSpectrumData( | ||
await setProcessedSpectrumData( | ||
{ | ||
@@ -15,0 +15,0 @@ re: files['1r'], |
@@ -5,3 +5,3 @@ import { joinInfoMeta } from './joinMetaInfo.js'; | ||
import { setFIDSpectrumData } from './setFIDSpectrumData'; | ||
import { setXYSpectrumData } from './setXYSpectrumData'; | ||
import { setProcessedSpectrumData } from './setProcessedSpectrumData'; | ||
@@ -21,3 +21,5 @@ export async function convert2D(files, options) { | ||
result.meta.nbSubSpectra = files['2rr'] | ||
result.meta.nbSubSpectra = ['2rr', '2ri', '2ir', '2ii'].some( | ||
(key) => key in files, | ||
) | ||
? parseInt(result.meta.SI[1], 10) | ||
@@ -44,3 +46,11 @@ : parseInt(result.meta.TD[1], 10); | ||
result.meta.lastY = lastY; | ||
await setXYSpectrumData({ re: files['2rr'] }, result, true); | ||
await setProcessedSpectrumData( | ||
{ | ||
rr: files['2rr'], | ||
ri: files['2ri'], | ||
ir: files['2ir'], | ||
ii: files['2ii'], | ||
}, | ||
result, | ||
); | ||
} else if (files.ser) { | ||
@@ -59,3 +69,3 @@ firstY = 0; | ||
yTransmitterFrequencyOffset / yTransmitterFrequency + yWindowSize / 2; | ||
await setFIDSpectrumData(files.ser, result); | ||
await setFIDSpectrumData(files.ser, result, true); | ||
} | ||
@@ -62,0 +72,0 @@ |
@@ -17,3 +17,3 @@ import { convert1D } from './convert1D'; | ||
let result; | ||
if (brukerFiles.ser || brukerFiles['2rr']) { | ||
if (['ser', '2rr', '2ri', '2ir', '2ii'].some((key) => key in brukerFiles)) { | ||
result = await convert2D(brukerFiles, options); | ||
@@ -58,3 +58,3 @@ } else if (brukerFiles['1r'] || brukerFiles['1i'] || brukerFiles.fid) { | ||
if (result.twoD) { | ||
result.minMax = convertTo3DZ(result.spectra); | ||
result.minMax = convertTo3DZ(result); | ||
@@ -61,0 +61,0 @@ if (!options.keepSpectra) { |
@@ -5,13 +5,15 @@ import { matrixMinMaxZ } from 'ml-spectra-processing'; | ||
* @param spectra | ||
* @returns {{z: Array, minX: *, maxX: *, minY: *, maxY: *, minZ: *, maxZ: *, noise: number}} | ||
* @returns {{z: Array<Array<number>>, minX: *, maxX: *, minY: *, maxY: *, minZ: *, maxZ: *}} | ||
*/ | ||
export default function convertTo3DZ(spectra) { | ||
export default function convertTo3DZ(datum) { | ||
const spectra = datum.spectra; | ||
// if (fileKeys.length === 0) { | ||
// throw new Error('There is not data to extract'); | ||
// } | ||
let ySize = spectra.length; | ||
let xSize = spectra[0].data.re.length; | ||
let z = new Array(ySize); | ||
let xSize = spectra[0].data.x.length; | ||
for (let i = 0; i < ySize; i++) { | ||
z[i] = new Float64Array(spectra[i].data.re); | ||
} | ||
const firstX = spectra[0].data.x[0]; | ||
@@ -22,2 +24,27 @@ const lastX = spectra[0].data.x[xSize - 1]; | ||
const minMax = {}; | ||
const keyFiles = Object.keys(spectra[0].data).filter((key) => key !== 'x'); | ||
for (const keyFile of keyFiles) { | ||
minMax[keyFile] = extractZMatrix(datum, { | ||
keyFile, | ||
ySize, | ||
firstX, | ||
lastX, | ||
firstY, | ||
lastY, | ||
}); | ||
} | ||
return minMax; | ||
} | ||
function extractZMatrix(datum, options) { | ||
const spectra = datum.spectra; | ||
const { keyFile, ySize, firstX, lastX, firstY, lastY } = options; | ||
let z = new Array(ySize); | ||
for (let i = 0; i < ySize; i++) { | ||
z[i] = new Float64Array(spectra[i].data[keyFile]); | ||
} | ||
// Because the min / max value are the only information about the matrix if we invert | ||
@@ -24,0 +51,0 @@ // min and max we need to invert the array |
import { IOBuffer } from 'iobuffer'; | ||
import { createStepArray } from 'ml-spectra-processing'; | ||
import * as aqModeDirect from '../constants/acquisitionModeDirect'; | ||
export async function setFIDSpectrumData(file, spectra) { | ||
@@ -14,11 +16,10 @@ let td = parseInt(spectra.meta.TD[0], 10); | ||
let SW_H = Number(spectra.meta.SW_h[0]); | ||
const SW_H = Number(spectra.meta.SW_h[0]); | ||
let SF = Number(spectra.meta.SFO1[0]); | ||
const SF = Number(spectra.meta.SFO1[0]); | ||
spectra.meta.DATATYPE = 'NMR FID'; | ||
let DW = 1 / (2 * SW_H); | ||
let AQ = td * DW; | ||
const DW = 1 / (2 * SW_H); | ||
const AQ = td * DW; | ||
let endian = parseInt(spectra.meta.BYTORDA, 10); | ||
@@ -39,3 +40,6 @@ endian = endian ? 0 : 1; | ||
const stopReading = td / 2; | ||
const aqMode = spectra.meta.AQ_mod; | ||
const deltaX = aqMode === aqModeDirect.QSEQ ? DW : 2 * DW; | ||
const nbPoints = aqMode === aqModeDirect.QSEQ ? td : (td / 2) >> 0; | ||
for (let j = 0; j < nbSubSpectra; j++) { | ||
@@ -45,3 +49,3 @@ let toSave = { | ||
dataTable: '(X++(R..R))', | ||
nbPoints: td, | ||
nbPoints, | ||
firstX: 0, | ||
@@ -53,18 +57,26 @@ lastX: AQ, | ||
data: { | ||
x: createStepArray({ length: td, step: DW }), | ||
re: new Float64Array(td), | ||
im: new Float64Array(td), | ||
x: createStepArray({ length: nbPoints, step: deltaX }), | ||
re: new Float64Array(nbPoints), | ||
im: new Float64Array(nbPoints), | ||
}, | ||
isXYdata: true, | ||
isFID: true, | ||
observeFrequency: SF, | ||
title: spectra.meta.TITLE, | ||
deltaX: DW, | ||
deltaX, | ||
}; | ||
spectra.spectra[j] = toSave; | ||
for (let i = 0; i < stopReading; i++) { | ||
spectra.spectra[j].data.re[i] = ioBuffer.readInt32(); | ||
spectra.spectra[j].data.im[i] = ioBuffer.readInt32(); | ||
// const dtypa = spectra.meta.DTYPA;// we should use it for float or double FID data; | ||
if (aqMode === aqModeDirect.QSEQ) { | ||
for (let i = 0; i < nbPoints; i++) { | ||
spectra.spectra[j].data.re[i] = ioBuffer.readInt32(); | ||
} | ||
} else { | ||
for (let i = 0; i < nbPoints; i++) { | ||
spectra.spectra[j].data.re[i] = ioBuffer.readInt32(); | ||
spectra.spectra[j].data.im[i] = ioBuffer.readInt32(); | ||
} | ||
} | ||
} | ||
} |
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
No v1
QualityPackage is not semver >=1. This means it is not stable and does not support ^ ranges.
Found 1 instance in 1 package
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
52921
23
1515
2