Huge News!Announcing our $40M Series B led by Abstract Ventures.Learn More
Socket
Sign inDemoInstall
Socket

music-metadata

Package Overview
Dependencies
Maintainers
1
Versions
256
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

music-metadata - npm Package Compare versions

Comparing version 7.12.6 to 8.0.0

28

lib/aiff/AiffParser.js

@@ -1,13 +0,10 @@

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.AIFFParser = void 0;
const Token = require("token-types");
const debug_1 = require("debug");
const strtok3 = require("strtok3/lib/core");
const ID3v2Parser_1 = require("../id3v2/ID3v2Parser");
const FourCC_1 = require("../common/FourCC");
const BasicParser_1 = require("../common/BasicParser");
const AiffToken = require("./AiffToken");
const iff = require("../iff");
const debug = (0, debug_1.default)('music-metadata:parser:aiff');
import * as Token from 'token-types';
import initDebug from 'debug';
import * as strtok3 from 'strtok3/core';
import { ID3v2Parser } from '../id3v2/ID3v2Parser.js';
import { FourCcToken } from '../common/FourCC.js';
import { BasicParser } from '../common/BasicParser.js';
import * as AiffToken from './AiffToken.js';
import * as iff from '../iff/index.js';
const debug = initDebug('music-metadata:parser:aiff');
const compressionTypes = {

@@ -31,3 +28,3 @@ NONE: 'not compressed PCM Apple Computer',

*/
class AIFFParser extends BasicParser_1.BasicParser {
export class AIFFParser extends BasicParser {
async parse() {

@@ -37,3 +34,3 @@ const header = await this.tokenizer.readToken(iff.Header);

throw new Error('Invalid Chunk-ID, expected \'FORM\''); // Not AIFF format
const type = await this.tokenizer.readToken(FourCC_1.FourCcToken);
const type = await this.tokenizer.readToken(FourCcToken);
switch (type) {

@@ -86,3 +83,3 @@ case 'AIFF':

const rst = strtok3.fromBuffer(id3_data);
await new ID3v2Parser_1.ID3v2Parser().parse(this.metadata, rst, this.options);
await new ID3v2Parser().parse(this.metadata, rst, this.options);
return header.chunkSize;

@@ -99,2 +96,1 @@ case 'SSND': // Sound Data Chunk

}
exports.AIFFParser = AIFFParser;

@@ -1,7 +0,4 @@

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.Common = void 0;
const Token = require("token-types");
const FourCC_1 = require("../common/FourCC");
class Common {
import * as Token from 'token-types';
import { FourCcToken } from '../common/FourCC.js';
export class Common {
constructor(header, isAifc) {

@@ -25,3 +22,3 @@ this.isAifc = isAifc;

if (this.isAifc) {
res.compressionType = FourCC_1.FourCcToken.get(buf, off + 18);
res.compressionType = FourCcToken.get(buf, off + 18);
if (this.len > 22) {

@@ -49,2 +46,1 @@ const strLen = buf.readInt8(off + 22);

}
exports.Common = Common;

@@ -1,14 +0,12 @@

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.APEv2Parser = void 0;
const debug_1 = require("debug");
const strtok3 = require("strtok3/lib/core");
const token_types_1 = require("token-types");
const util = require("../common/Util");
const BasicParser_1 = require("../common/BasicParser");
const APEv2Token_1 = require("./APEv2Token");
const debug = (0, debug_1.default)('music-metadata:parser:APEv2');
import initDebug from 'debug';
import * as strtok3 from 'strtok3/core';
import { StringType } from 'token-types';
import { Buffer } from 'node:buffer';
import * as util from '../common/Util.js';
import { BasicParser } from '../common/BasicParser.js';
import { DataType, DescriptorParser, Header, TagFooter, TagItemHeader } from './APEv2Token.js';
const debug = initDebug('music-metadata:parser:APEv2');
const tagFormat = 'APEv2';
const preamble = 'APETAGEX';
class APEv2Parser extends BasicParser_1.BasicParser {
export class APEv2Parser extends BasicParser {
constructor() {

@@ -40,5 +38,5 @@ super(...arguments);

// Search for APE footer header at the end of the file
const apeBuf = Buffer.alloc(APEv2Token_1.TagFooter.len);
await reader.randomRead(apeBuf, 0, APEv2Token_1.TagFooter.len, offset - APEv2Token_1.TagFooter.len);
const tagFooter = APEv2Token_1.TagFooter.get(apeBuf, 0);
const apeBuf = Buffer.alloc(TagFooter.len);
await reader.randomRead(apeBuf, 0, TagFooter.len, offset - TagFooter.len);
const tagFooter = TagFooter.get(apeBuf, 0);
if (tagFooter.ID === 'APETAGEX') {

@@ -50,3 +48,3 @@ debug(`APE footer header at offset=${offset}`);

static parseTagFooter(metadata, buffer, options) {
const footer = APEv2Token_1.TagFooter.get(buffer, buffer.length - APEv2Token_1.TagFooter.len);
const footer = TagFooter.get(buffer, buffer.length - TagFooter.len);
if (footer.ID !== preamble)

@@ -63,9 +61,9 @@ throw new Error('Unexpected APEv2 Footer ID preamble value.');

async tryParseApeHeader() {
if (this.tokenizer.fileInfo.size && this.tokenizer.fileInfo.size - this.tokenizer.position < APEv2Token_1.TagFooter.len) {
if (this.tokenizer.fileInfo.size && this.tokenizer.fileInfo.size - this.tokenizer.position < TagFooter.len) {
debug(`No APEv2 header found, end-of-file reached`);
return;
}
const footer = await this.tokenizer.peekToken(APEv2Token_1.TagFooter);
const footer = await this.tokenizer.peekToken(TagFooter);
if (footer.ID === preamble) {
await this.tokenizer.ignore(APEv2Token_1.TagFooter.len);
await this.tokenizer.ignore(TagFooter.len);
return this.parseTags(footer);

@@ -85,7 +83,7 @@ }

async parse() {
const descriptor = await this.tokenizer.readToken(APEv2Token_1.DescriptorParser);
const descriptor = await this.tokenizer.readToken(DescriptorParser);
if (descriptor.ID !== 'MAC ')
throw new Error('Unexpected descriptor ID');
this.ape.descriptor = descriptor;
const lenExp = descriptor.descriptorBytes - APEv2Token_1.DescriptorParser.len;
const lenExp = descriptor.descriptorBytes - DescriptorParser.len;
const header = await (lenExp > 0 ? this.parseDescriptorExpansion(lenExp) : this.parseHeader());

@@ -97,6 +95,6 @@ await this.tokenizer.ignore(header.forwardBytes);

const keyBuffer = Buffer.alloc(256); // maximum tag key length
let bytesRemaining = footer.size - APEv2Token_1.TagFooter.len;
let bytesRemaining = footer.size - TagFooter.len;
debug(`Parse APE tags at offset=${this.tokenizer.position}, size=${bytesRemaining}`);
for (let i = 0; i < footer.fields; i++) {
if (bytesRemaining < APEv2Token_1.TagItemHeader.len) {
if (bytesRemaining < TagItemHeader.len) {
this.metadata.addWarning(`APEv2 Tag-header: ${footer.fields - i} items remaining, but no more tag data to read.`);

@@ -106,12 +104,12 @@ break;

// Only APEv2 tag has tag item headers
const tagItemHeader = await this.tokenizer.readToken(APEv2Token_1.TagItemHeader);
bytesRemaining -= APEv2Token_1.TagItemHeader.len + tagItemHeader.size;
const tagItemHeader = await this.tokenizer.readToken(TagItemHeader);
bytesRemaining -= TagItemHeader.len + tagItemHeader.size;
await this.tokenizer.peekBuffer(keyBuffer, { length: Math.min(keyBuffer.length, bytesRemaining) });
let zero = util.findZero(keyBuffer, 0, keyBuffer.length);
const key = await this.tokenizer.readToken(new token_types_1.StringType(zero, 'ascii'));
const key = await this.tokenizer.readToken(new StringType(zero, 'ascii'));
await this.tokenizer.ignore(1);
bytesRemaining -= key.length + 1;
switch (tagItemHeader.flags.dataType) {
case APEv2Token_1.DataType.text_utf8: { // utf-8 text-string
const value = await this.tokenizer.readToken(new token_types_1.StringType(tagItemHeader.size, 'utf8'));
case DataType.text_utf8: { // utf-8 text-string
const value = await this.tokenizer.readToken(new StringType(tagItemHeader.size, 'utf8'));
const values = value.split(/\x00/g);

@@ -123,3 +121,3 @@ for (const val of values) {

}
case APEv2Token_1.DataType.binary: // binary (probably artwork)
case DataType.binary: // binary (probably artwork)
if (this.options.skipCovers) {

@@ -140,7 +138,7 @@ await this.tokenizer.ignore(tagItemHeader.size);

break;
case APEv2Token_1.DataType.external_info:
case DataType.external_info:
debug(`Ignore external info ${key}`);
await this.tokenizer.ignore(tagItemHeader.size);
break;
case APEv2Token_1.DataType.reserved:
case DataType.reserved:
debug(`Ignore external info ${key}`);

@@ -158,3 +156,3 @@ this.metadata.addWarning(`APEv2 header declares a reserved datatype for "${key}"`);

async parseHeader() {
const header = await this.tokenizer.readToken(APEv2Token_1.Header);
const header = await this.tokenizer.readToken(Header);
// ToDo before

@@ -173,2 +171,1 @@ this.metadata.setFormat('lossless', true);

}
exports.APEv2Parser = APEv2Parser;

@@ -1,5 +0,2 @@

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.APEv2TagMapper = void 0;
const CaseInsensitiveTagMap_1 = require("../common/CaseInsensitiveTagMap");
import { CaseInsensitiveTagMap } from '../common/CaseInsensitiveTagMap.js';
/**

@@ -81,3 +78,3 @@ * ID3v2.2 tag mappings

};
class APEv2TagMapper extends CaseInsensitiveTagMap_1.CaseInsensitiveTagMap {
export class APEv2TagMapper extends CaseInsensitiveTagMap {
constructor() {

@@ -87,3 +84,2 @@ super(['APEv2'], apev2TagMap);

}
exports.APEv2TagMapper = APEv2TagMapper;
//# sourceMappingURL=APEv2TagMapper.js.map

@@ -1,7 +0,4 @@

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.isBitSet = exports.parseTagFlags = exports.TagField = exports.TagItemHeader = exports.TagFooter = exports.Header = exports.DescriptorParser = exports.DataType = void 0;
const Token = require("token-types");
const FourCC_1 = require("../common/FourCC");
var DataType;
import * as Token from 'token-types';
import { FourCcToken } from '../common/FourCC.js';
export var DataType;
(function (DataType) {

@@ -12,7 +9,7 @@ DataType[DataType["text_utf8"] = 0] = "text_utf8";

DataType[DataType["reserved"] = 3] = "reserved";
})(DataType = exports.DataType || (exports.DataType = {}));
})(DataType = DataType || (DataType = {}));
/**
* APE_DESCRIPTOR: defines the sizes (and offsets) of all the pieces, as well as the MD5 checksum
*/
exports.DescriptorParser = {
export const DescriptorParser = {
len: 52,

@@ -22,3 +19,3 @@ get: (buf, off) => {

// should equal 'MAC '
ID: FourCC_1.FourCcToken.get(buf, off),
ID: FourCcToken.get(buf, off),
// versionIndex number * 1000 (3.81 = 3810) (remember that 4-byte alignment causes this to take 4-bytes)

@@ -48,3 +45,3 @@ version: Token.UINT32_LE.get(buf, off + 4) / 1000,

*/
exports.Header = {
export const Header = {
len: 24,

@@ -76,3 +73,3 @@ get: (buf, off) => {

*/
exports.TagFooter = {
export const TagFooter = {
len: 32,

@@ -97,3 +94,3 @@ get: (buf, off) => {

*/
exports.TagItemHeader = {
export const TagItemHeader = {
len: 8,

@@ -109,7 +106,6 @@ get: (buf, off) => {

};
const TagField = footer => {
return new Token.Uint8ArrayType(footer.size - exports.TagFooter.len);
export const TagField = footer => {
return new Token.Uint8ArrayType(footer.size - TagFooter.len);
};
exports.TagField = TagField;
function parseTagFlags(flags) {
export function parseTagFlags(flags) {
return {

@@ -123,3 +119,2 @@ containsHeader: isBitSet(flags, 31),

}
exports.parseTagFlags = parseTagFlags;
/**

@@ -130,6 +125,5 @@ * @param num {number}

*/
function isBitSet(num, bit) {
export function isBitSet(num, bit) {
return (num & 1 << bit) !== 0;
}
exports.isBitSet = isBitSet;
//# sourceMappingURL=APEv2Token.js.map

@@ -1,14 +0,12 @@

"use strict";
// ASF Objects
Object.defineProperty(exports, "__esModule", { value: true });
exports.WmPictureToken = exports.MetadataLibraryObjectState = exports.MetadataObjectState = exports.ExtendedStreamPropertiesObjectState = exports.ExtendedContentDescriptionObjectState = exports.ContentDescriptionObjectState = exports.readCodecEntries = exports.HeaderExtensionObject = exports.StreamPropertiesObject = exports.FilePropertiesObject = exports.IgnoreObjectState = exports.State = exports.HeaderObjectToken = exports.TopLevelHeaderObjectToken = exports.DataType = void 0;
const util = require("../common/Util");
const Token = require("token-types");
const GUID_1 = require("./GUID");
const AsfUtil_1 = require("./AsfUtil");
const ID3v2Token_1 = require("../id3v2/ID3v2Token");
import * as Token from 'token-types';
import { Buffer } from 'node:buffer';
import * as util from '../common/Util.js';
import GUID from './GUID.js';
import { AsfUtil } from './AsfUtil.js';
import { AttachedPictureType } from '../id3v2/ID3v2Token.js';
/**
* Data Type: Specifies the type of information being stored. The following values are recognized.
*/
var DataType;
export var DataType;
(function (DataType) {

@@ -39,3 +37,3 @@ /**

DataType[DataType["Word"] = 5] = "Word";
})(DataType = exports.DataType || (exports.DataType = {}));
})(DataType = DataType || (DataType = {}));
/**

@@ -45,7 +43,7 @@ * Token for: 3. ASF top-level Header Object

*/
exports.TopLevelHeaderObjectToken = {
export const TopLevelHeaderObjectToken = {
len: 30,
get: (buf, off) => {
return {
objectId: GUID_1.default.fromBin(new Token.BufferType(16).get(buf, off)),
objectId: GUID.fromBin(new Token.BufferType(16).get(buf, off)),
objectSize: Number(Token.UINT64_LE.get(buf, off + 16)),

@@ -61,7 +59,7 @@ numberOfHeaderObjects: Token.UINT32_LE.get(buf, off + 24)

*/
exports.HeaderObjectToken = {
export const HeaderObjectToken = {
len: 24,
get: (buf, off) => {
return {
objectId: GUID_1.default.fromBin(new Token.BufferType(16).get(buf, off)),
objectId: GUID.fromBin(new Token.BufferType(16).get(buf, off)),
objectSize: Number(Token.UINT64_LE.get(buf, off + 16))

@@ -71,5 +69,5 @@ };

};
class State {
export class State {
constructor(header) {
this.len = Number(header.objectSize) - exports.HeaderObjectToken.len;
this.len = Number(header.objectSize) - HeaderObjectToken.len;
}

@@ -81,3 +79,3 @@ postProcessTag(tags, name, valueType, data) {

else {
const parseAttr = AsfUtil_1.AsfUtil.getParserForAttr(valueType);
const parseAttr = AsfUtil.getParserForAttr(valueType);
if (!parseAttr) {

@@ -90,5 +88,4 @@ throw new Error('unexpected value headerType: ' + valueType);

}
exports.State = State;
// ToDo: use ignore type
class IgnoreObjectState extends State {
export class IgnoreObjectState extends State {
constructor(header) {

@@ -101,3 +98,2 @@ super(header);

}
exports.IgnoreObjectState = IgnoreObjectState;
/**

@@ -107,3 +103,3 @@ * Token for: 3.2: File Properties Object (mandatory, one only)

*/
class FilePropertiesObject extends State {
export class FilePropertiesObject extends State {
constructor(header) {

@@ -114,3 +110,3 @@ super(header);

return {
fileId: GUID_1.default.fromBin(buf, off),
fileId: GUID.fromBin(buf, off),
fileSize: Token.UINT64_LE.get(buf, off + 16),

@@ -133,4 +129,3 @@ creationDate: Token.UINT64_LE.get(buf, off + 24),

}
exports.FilePropertiesObject = FilePropertiesObject;
FilePropertiesObject.guid = GUID_1.default.FilePropertiesObject;
FilePropertiesObject.guid = GUID.FilePropertiesObject;
/**

@@ -140,3 +135,3 @@ * Token for: 3.3 Stream Properties Object (mandatory, one per stream)

*/
class StreamPropertiesObject extends State {
export class StreamPropertiesObject extends State {
constructor(header) {

@@ -147,4 +142,4 @@ super(header);

return {
streamType: GUID_1.default.decodeMediaType(GUID_1.default.fromBin(buf, off)),
errorCorrectionType: GUID_1.default.fromBin(buf, off + 8)
streamType: GUID.decodeMediaType(GUID.fromBin(buf, off)),
errorCorrectionType: GUID.fromBin(buf, off + 8)
// ToDo

@@ -154,4 +149,3 @@ };

}
exports.StreamPropertiesObject = StreamPropertiesObject;
StreamPropertiesObject.guid = GUID_1.default.StreamPropertiesObject;
StreamPropertiesObject.guid = GUID.StreamPropertiesObject;
/**

@@ -161,3 +155,3 @@ * 3.4: Header Extension Object (mandatory, one only)

*/
class HeaderExtensionObject {
export class HeaderExtensionObject {
constructor() {

@@ -168,3 +162,3 @@ this.len = 22;

return {
reserved1: GUID_1.default.fromBin(buf, off),
reserved1: GUID.fromBin(buf, off),
reserved2: buf.readUInt16LE(off + 16),

@@ -175,4 +169,3 @@ extensionDataSize: buf.readUInt32LE(off + 18)

}
exports.HeaderExtensionObject = HeaderExtensionObject;
HeaderExtensionObject.guid = GUID_1.default.HeaderExtensionObject;
HeaderExtensionObject.guid = GUID.HeaderExtensionObject;
/**

@@ -198,3 +191,3 @@ * 3.5: The Codec List Object provides user-friendly information about the codecs and formats used to encode the content found in the ASF file.

*/
async function readCodecEntries(tokenizer) {
export async function readCodecEntries(tokenizer) {
const codecHeader = await tokenizer.readToken(CodecListObjectHeader);

@@ -207,3 +200,2 @@ const entries = [];

}
exports.readCodecEntries = readCodecEntries;
async function readInformation(tokenizer) {

@@ -235,3 +227,3 @@ const length = await tokenizer.readNumber(Token.UINT16_LE);

*/
class ContentDescriptionObjectState extends State {
export class ContentDescriptionObjectState extends State {
constructor(header) {

@@ -248,3 +240,3 @@ super(header);

const end = pos + length;
tags.push({ id: tagName, value: AsfUtil_1.AsfUtil.parseUnicodeAttr(buf.slice(pos, end)) });
tags.push({ id: tagName, value: AsfUtil.parseUnicodeAttr(buf.slice(pos, end)) });
pos = end;

@@ -256,4 +248,3 @@ }

}
exports.ContentDescriptionObjectState = ContentDescriptionObjectState;
ContentDescriptionObjectState.guid = GUID_1.default.ContentDescriptionObject;
ContentDescriptionObjectState.guid = GUID.ContentDescriptionObject;
ContentDescriptionObjectState.contentDescTags = ['Title', 'Author', 'Copyright', 'Description', 'Rating'];

@@ -264,3 +255,3 @@ /**

*/
class ExtendedContentDescriptionObjectState extends State {
export class ExtendedContentDescriptionObjectState extends State {
constructor(header) {

@@ -276,3 +267,3 @@ super(header);

pos += 2;
const name = AsfUtil_1.AsfUtil.parseUnicodeAttr(buf.slice(pos, pos + nameLen));
const name = AsfUtil.parseUnicodeAttr(buf.slice(pos, pos + nameLen));
pos += nameLen;

@@ -290,4 +281,3 @@ const valueType = buf.readUInt16LE(pos);

}
exports.ExtendedContentDescriptionObjectState = ExtendedContentDescriptionObjectState;
ExtendedContentDescriptionObjectState.guid = GUID_1.default.ExtendedContentDescriptionObject;
ExtendedContentDescriptionObjectState.guid = GUID.ExtendedContentDescriptionObject;
/**

@@ -297,3 +287,3 @@ * 4.1 Extended Stream Properties Object (optional, 1 per media stream)

*/
class ExtendedStreamPropertiesObjectState extends State {
export class ExtendedStreamPropertiesObjectState extends State {
constructor(header) {

@@ -329,4 +319,3 @@ super(header);

}
exports.ExtendedStreamPropertiesObjectState = ExtendedStreamPropertiesObjectState;
ExtendedStreamPropertiesObjectState.guid = GUID_1.default.ExtendedStreamPropertiesObject;
ExtendedStreamPropertiesObjectState.guid = GUID.ExtendedStreamPropertiesObject;
/**

@@ -336,3 +325,3 @@ * 4.7 Metadata Object (optional, 0 or 1)

*/
class MetadataObjectState extends State {
export class MetadataObjectState extends State {
constructor(header) {

@@ -354,3 +343,3 @@ super(header);

pos += 4;
const name = AsfUtil_1.AsfUtil.parseUnicodeAttr(buf.slice(pos, pos + nameLen));
const name = AsfUtil.parseUnicodeAttr(buf.slice(pos, pos + nameLen));
pos += nameLen;

@@ -364,6 +353,5 @@ const data = buf.slice(pos, pos + dataLen);

}
exports.MetadataObjectState = MetadataObjectState;
MetadataObjectState.guid = GUID_1.default.MetadataObject;
MetadataObjectState.guid = GUID.MetadataObject;
// 4.8 Metadata Library Object (optional, 0 or 1)
class MetadataLibraryObjectState extends MetadataObjectState {
export class MetadataLibraryObjectState extends MetadataObjectState {
constructor(header) {

@@ -373,8 +361,7 @@ super(header);

}
exports.MetadataLibraryObjectState = MetadataLibraryObjectState;
MetadataLibraryObjectState.guid = GUID_1.default.MetadataLibraryObject;
MetadataLibraryObjectState.guid = GUID.MetadataLibraryObject;
/**
* Ref: https://msdn.microsoft.com/en-us/library/windows/desktop/dd757977(v=vs.85).aspx
*/
class WmPictureToken {
export class WmPictureToken {
constructor(len) {

@@ -403,3 +390,3 @@ this.len = len;

return {
type: ID3v2Token_1.AttachedPictureType[typeId],
type: AttachedPictureType[typeId],
format,

@@ -412,2 +399,1 @@ description,

}
exports.WmPictureToken = WmPictureToken;

@@ -1,10 +0,7 @@

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.AsfParser = void 0;
const debug_1 = require("debug");
const type_1 = require("../type");
const GUID_1 = require("./GUID");
const AsfObject = require("./AsfObject");
const BasicParser_1 = require("../common/BasicParser");
const debug = (0, debug_1.default)('music-metadata:parser:ASF');
import initDebug from 'debug';
import { TrackType } from '../type.js';
import GUID from './GUID.js';
import * as AsfObject from './AsfObject.js';
import { BasicParser } from '../common/BasicParser.js';
const debug = initDebug('music-metadata:parser:ASF');
const headerType = 'asf';

@@ -21,6 +18,6 @@ /**

*/
class AsfParser extends BasicParser_1.BasicParser {
export class AsfParser extends BasicParser {
async parse() {
const header = await this.tokenizer.readToken(AsfObject.TopLevelHeaderObjectToken);
if (!header.objectId.equals(GUID_1.default.HeaderObject)) {
if (!header.objectId.equals(GUID.HeaderObject)) {
throw new Error('expected asf header; but was not found; got: ' + header.objectId.str);

@@ -64,7 +61,7 @@ }

break;
case GUID_1.default.CodecListObject.str:
case GUID.CodecListObject.str:
const codecs = await AsfObject.readCodecEntries(this.tokenizer);
codecs.forEach(codec => {
this.metadata.addStreamInfo({
type: codec.type.videoCodec ? type_1.TrackType.video : type_1.TrackType.audio,
type: codec.type.videoCodec ? TrackType.video : TrackType.audio,
codecName: codec.codecName

@@ -76,7 +73,7 @@ });

break;
case GUID_1.default.StreamBitratePropertiesObject.str:
case GUID.StreamBitratePropertiesObject.str:
// ToDo?
await this.tokenizer.ignore(header.objectSize - AsfObject.HeaderObjectToken.len);
break;
case GUID_1.default.PaddingObject.str:
case GUID.PaddingObject.str:
// ToDo: register bytes pad

@@ -118,10 +115,10 @@ debug('Padding: %s bytes', header.objectSize - AsfObject.HeaderObjectToken.len);

break;
case GUID_1.default.PaddingObject.str:
case GUID.PaddingObject.str:
// ToDo: register bytes pad
await this.tokenizer.ignore(remaining);
break;
case GUID_1.default.CompatibilityObject.str:
case GUID.CompatibilityObject.str:
this.tokenizer.ignore(remaining);
break;
case GUID_1.default.ASF_Index_Placeholder_Object.str:
case GUID.ASF_Index_Placeholder_Object.str:
await this.tokenizer.ignore(remaining);

@@ -139,2 +136,1 @@ break;

}
exports.AsfParser = AsfParser;

@@ -1,5 +0,2 @@

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.AsfTagMapper = void 0;
const GenericTagMapper_1 = require("../common/GenericTagMapper");
import { CommonTagMapper } from '../common/GenericTagMapper.js';
/**

@@ -76,3 +73,3 @@ * ASF Metadata tag mappings.

};
class AsfTagMapper extends GenericTagMapper_1.CommonTagMapper {
export class AsfTagMapper extends CommonTagMapper {
static toRating(rating) {

@@ -96,3 +93,2 @@ return {

}
exports.AsfTagMapper = AsfTagMapper;
//# sourceMappingURL=AsfTagMapper.js.map

@@ -1,7 +0,5 @@

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.AsfUtil = void 0;
const Token = require("token-types");
const util = require("../common/Util");
class AsfUtil {
import * as Token from 'token-types';
import { Buffer } from 'node:buffer';
import * as util from '../common/Util.js';
export class AsfUtil {
static getParserForAttr(i) {

@@ -29,3 +27,2 @@ return AsfUtil.attributeParsers[i];

}
exports.AsfUtil = AsfUtil;
AsfUtil.attributeParsers = [

@@ -32,0 +29,0 @@ AsfUtil.parseUnicodeAttr,

@@ -1,3 +0,1 @@

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
/**

@@ -17,3 +15,3 @@ * Ref:

*/
class GUID {
export default class GUID {
constructor(str) {

@@ -75,3 +73,2 @@ this.str = str;

}
exports.default = GUID;
// 10.1 Top-level ASF object GUIDs

@@ -78,0 +75,0 @@ GUID.HeaderObject = new GUID("75B22630-668E-11CF-A6D9-00AA0062CE6C");

@@ -1,5 +0,2 @@

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.BasicParser = void 0;
class BasicParser {
export class BasicParser {
/**

@@ -18,2 +15,1 @@ * Initialize parser with output (metadata), input (tokenizer) & parsing options (options).

}
exports.BasicParser = BasicParser;

@@ -1,6 +0,3 @@

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.CaseInsensitiveTagMap = void 0;
const GenericTagMapper_1 = require("./GenericTagMapper");
class CaseInsensitiveTagMap extends GenericTagMapper_1.CommonTagMapper {
import { CommonTagMapper } from './GenericTagMapper.js';
export class CaseInsensitiveTagMap extends CommonTagMapper {
constructor(tagTypes, tagMap) {

@@ -21,3 +18,2 @@ const upperCaseMap = {};

}
exports.CaseInsensitiveTagMap = CaseInsensitiveTagMap;
//# sourceMappingURL=CaseInsensitiveTagMap.js.map

@@ -1,27 +0,24 @@

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.CombinedTagMapper = void 0;
const ID3v1TagMap_1 = require("../id3v1/ID3v1TagMap");
const ID3v24TagMapper_1 = require("../id3v2/ID3v24TagMapper");
const AsfTagMapper_1 = require("../asf/AsfTagMapper");
const ID3v22TagMapper_1 = require("../id3v2/ID3v22TagMapper");
const APEv2TagMapper_1 = require("../apev2/APEv2TagMapper");
const MP4TagMapper_1 = require("../mp4/MP4TagMapper");
const VorbisTagMapper_1 = require("../ogg/vorbis/VorbisTagMapper");
const RiffInfoTagMap_1 = require("../riff/RiffInfoTagMap");
const MatroskaTagMapper_1 = require("../matroska/MatroskaTagMapper");
class CombinedTagMapper {
import { ID3v1TagMapper } from '../id3v1/ID3v1TagMap.js';
import { ID3v24TagMapper } from '../id3v2/ID3v24TagMapper.js';
import { AsfTagMapper } from '../asf/AsfTagMapper.js';
import { ID3v22TagMapper } from '../id3v2/ID3v22TagMapper.js';
import { APEv2TagMapper } from '../apev2/APEv2TagMapper.js';
import { MP4TagMapper } from '../mp4/MP4TagMapper.js';
import { VorbisTagMapper } from '../ogg/vorbis/VorbisTagMapper.js';
import { RiffInfoTagMapper } from '../riff/RiffInfoTagMap.js';
import { MatroskaTagMapper } from '../matroska/MatroskaTagMapper.js';
export class CombinedTagMapper {
constructor() {
this.tagMappers = {};
[
new ID3v1TagMap_1.ID3v1TagMapper(),
new ID3v22TagMapper_1.ID3v22TagMapper(),
new ID3v24TagMapper_1.ID3v24TagMapper(),
new MP4TagMapper_1.MP4TagMapper(),
new MP4TagMapper_1.MP4TagMapper(),
new VorbisTagMapper_1.VorbisTagMapper(),
new APEv2TagMapper_1.APEv2TagMapper(),
new AsfTagMapper_1.AsfTagMapper(),
new RiffInfoTagMap_1.RiffInfoTagMapper(),
new MatroskaTagMapper_1.MatroskaTagMapper()
new ID3v1TagMapper(),
new ID3v22TagMapper(),
new ID3v24TagMapper(),
new MP4TagMapper(),
new MP4TagMapper(),
new VorbisTagMapper(),
new APEv2TagMapper(),
new AsfTagMapper(),
new RiffInfoTagMapper(),
new MatroskaTagMapper()
].forEach(mapper => {

@@ -51,3 +48,2 @@ this.registerTagMapper(mapper);

}
exports.CombinedTagMapper = CombinedTagMapper;
//# sourceMappingURL=CombinedTagMapper.js.map

@@ -1,5 +0,2 @@

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.FourCcToken = void 0;
const util = require("./Util");
import * as util from './Util.js';
const validFourCC = /^[\x21-\x7e©][\x20-\x7e\x00()]{3}/;

@@ -10,6 +7,6 @@ /**

*/
exports.FourCcToken = {
export const FourCcToken = {
len: 4,
get: (buf, off) => {
const id = buf.toString('binary', off, off + exports.FourCcToken.len);
const id = buf.toString('binary', off, off + FourCcToken.len);
switch (id) {

@@ -16,0 +13,0 @@ default:

@@ -1,5 +0,2 @@

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.CommonTagMapper = void 0;
class CommonTagMapper {
export class CommonTagMapper {
constructor(tagTypes, tagMap) {

@@ -39,3 +36,3 @@ this.tagTypes = tagTypes;

* Convert native tag key to common tag key
* @tag Native header tag
* @param tag Native header tag
* @return common tag name (alias)

@@ -55,4 +52,3 @@ */

}
exports.CommonTagMapper = CommonTagMapper;
CommonTagMapper.maxRatingScore = 1;
//# sourceMappingURL=GenericTagMapper.js.map

@@ -1,5 +0,2 @@

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.isUnique = exports.isSingleton = exports.commonTags = void 0;
exports.commonTags = {
export const commonTags = {
year: { multiple: false },

@@ -120,6 +117,5 @@ track: { multiple: false },

*/
function isSingleton(alias) {
return exports.commonTags.hasOwnProperty(alias) && !exports.commonTags[alias].multiple;
export function isSingleton(alias) {
return commonTags.hasOwnProperty(alias) && !commonTags[alias].multiple;
}
exports.isSingleton = isSingleton;
/**

@@ -129,6 +125,5 @@ * @param alias Common (generic) tag

*/
function isUnique(alias) {
return !exports.commonTags[alias].multiple || exports.commonTags[alias].unique;
export function isUnique(alias) {
return !commonTags[alias].multiple || commonTags[alias].unique;
}
exports.isUnique = isUnique;
//# sourceMappingURL=GenericTagTypes.js.map

@@ -1,12 +0,9 @@

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.joinArtists = exports.MetadataCollector = void 0;
const type_1 = require("../type");
const debug_1 = require("debug");
const GenericTagTypes_1 = require("./GenericTagTypes");
const CombinedTagMapper_1 = require("./CombinedTagMapper");
const GenericTagMapper_1 = require("./GenericTagMapper");
const Util_1 = require("./Util");
const FileType = require("file-type/core");
const debug = (0, debug_1.default)('music-metadata:collector');
import { TrackType } from '../type.js';
import initDebug from 'debug';
import { isSingleton, isUnique } from './GenericTagTypes.js';
import { CombinedTagMapper } from './CombinedTagMapper.js';
import { CommonTagMapper } from './GenericTagMapper.js';
import { toRatio } from './Util.js';
import { fileTypeFromBuffer } from 'file-type';
const debug = initDebug('music-metadata:collector');
const TagPriority = ['matroska', 'APEv2', 'vorbis', 'ID3v2.4', 'ID3v2.3', 'ID3v2.2', 'exif', 'asf', 'iTunes', 'ID3v1'];

@@ -17,3 +14,3 @@ /**

*/
class MetadataCollector {
export class MetadataCollector {
constructor(opts) {

@@ -42,3 +39,3 @@ this.opts = opts;

this.originPriority = {};
this.tagMapper = new CombinedTagMapper_1.CombinedTagMapper();
this.tagMapper = new CombinedTagMapper();
let priority = 1;

@@ -58,3 +55,3 @@ for (const tagType of TagPriority) {

addStreamInfo(streamInfo) {
debug(`streamInfo: type=${type_1.TrackType[streamInfo.type]}, codec=${streamInfo.codecName}`);
debug(`streamInfo: type=${TrackType[streamInfo.type]}, codec=${streamInfo.codecName}`);
this.format.trackInfo.push(streamInfo);

@@ -117,9 +114,9 @@ }

case 'totaltracks':
this.common.track.of = GenericTagMapper_1.CommonTagMapper.toIntOrNull(tag.value);
this.common.track.of = CommonTagMapper.toIntOrNull(tag.value);
return;
case 'totaldiscs':
this.common.disk.of = GenericTagMapper_1.CommonTagMapper.toIntOrNull(tag.value);
this.common.disk.of = CommonTagMapper.toIntOrNull(tag.value);
return;
case 'movementTotal':
this.common.movementIndex.of = GenericTagMapper_1.CommonTagMapper.toIntOrNull(tag.value);
this.common.movementIndex.of = CommonTagMapper.toIntOrNull(tag.value);
return;

@@ -130,3 +127,3 @@ case 'track':

const of = this.common[tag.id].of; // store of value, maybe maybe overwritten
this.common[tag.id] = GenericTagMapper_1.CommonTagMapper.normalizeTrack(tag.value);
this.common[tag.id] = CommonTagMapper.normalizeTrack(tag.value);
this.common[tag.id].of = of != null ? of : this.common[tag.id].of;

@@ -157,3 +154,3 @@ return;

case 'replaygain_album_peak':
tag.value = (0, Util_1.toRatio)(tag.value);
tag.value = toRatio(tag.value);
break;

@@ -206,3 +203,3 @@ case 'replaygain_track_minmax':

if (!picture.format) {
const fileType = await FileType.fromBuffer(picture.data);
const fileType = await fileTypeFromBuffer(picture.data);
if (fileType) {

@@ -242,3 +239,3 @@ picture.format = fileType.mime;

const prio1 = this.originPriority[tagType];
if ((0, GenericTagTypes_1.isSingleton)(tag.id)) {
if (isSingleton(tag.id)) {
if (prio1 <= prio0) {

@@ -254,3 +251,3 @@ this.common[tag.id] = tag.value;

if (prio1 === prio0) {
if (!(0, GenericTagTypes_1.isUnique)(tag.id) || this.common[tag.id].indexOf(tag.value) === -1) {
if (!isUnique(tag.id) || this.common[tag.id].indexOf(tag.value) === -1) {
this.common[tag.id].push(tag.value);

@@ -277,4 +274,3 @@ }

}
exports.MetadataCollector = MetadataCollector;
function joinArtists(artists) {
export function joinArtists(artists) {
if (artists.length > 2) {

@@ -285,3 +281,2 @@ return artists.slice(0, artists.length - 1).join(', ') + ' & ' + artists[artists.length - 1];

}
exports.joinArtists = joinArtists;
//# sourceMappingURL=MetadataCollector.js.map

@@ -1,9 +0,6 @@

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.RandomFileReader = void 0;
const fs = require("fs");
import * as fs from 'fs';
/**
* Provides abstract file access via the IRandomRead interface
*/
class RandomFileReader {
export class RandomFileReader {
constructor(fileHandle, filePath, fileSize) {

@@ -34,2 +31,1 @@ this.fileHandle = fileHandle;

}
exports.RandomFileReader = RandomFileReader;

@@ -1,8 +0,5 @@

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.RandomUint8ArrayReader = void 0;
/**
* Provides abstract Uint8Array access via the IRandomRead interface
*/
class RandomUint8ArrayReader {
export class RandomUint8ArrayReader {
constructor(uint8Array) {

@@ -25,2 +22,1 @@ this.uint8Array = uint8Array;

}
exports.RandomUint8ArrayReader = RandomUint8ArrayReader;

@@ -1,8 +0,4 @@

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.toRatio = exports.dbToRatio = exports.ratioToDb = exports.a2hex = exports.isBitSet = exports.getBitAllignedNumber = exports.stripNulls = exports.decodeString = exports.trimRightNull = exports.findZero = exports.getBit = void 0;
function getBit(buf, off, bit) {
export function getBit(buf, off, bit) {
return (buf[off] & (1 << bit)) !== 0;
}
exports.getBit = getBit;
/**

@@ -16,3 +12,3 @@ * Found delimiting zero in uint8Array

*/
function findZero(uint8Array, start, end, encoding) {
export function findZero(uint8Array, start, end, encoding) {
let i = start;

@@ -36,8 +32,6 @@ if (encoding === 'utf16le') {

}
exports.findZero = findZero;
function trimRightNull(x) {
export function trimRightNull(x) {
const pos0 = x.indexOf('\0');
return pos0 === -1 ? x : x.substr(0, pos0);
}
exports.trimRightNull = trimRightNull;
function swapBytes(uint8Array) {

@@ -57,3 +51,3 @@ const l = uint8Array.length;

*/
function decodeString(uint8Array, encoding) {
export function decodeString(uint8Array, encoding) {
// annoying workaround for a double BOM issue

@@ -72,4 +66,3 @@ // https://github.com/leetreveil/musicmetadata/issues/84

}
exports.decodeString = decodeString;
function stripNulls(str) {
export function stripNulls(str) {
str = str.replace(/^\x00+/g, '');

@@ -79,3 +72,2 @@ str = str.replace(/\x00+$/g, '');

}
exports.stripNulls = stripNulls;
/**

@@ -90,3 +82,3 @@ * Read bit-aligned number start from buffer

*/
function getBitAllignedNumber(source, byteOffset, bitOffset, len) {
export function getBitAllignedNumber(source, byteOffset, bitOffset, len) {
const byteOff = byteOffset + ~~(bitOffset / 8);

@@ -107,3 +99,2 @@ const bitOff = bitOffset % 8;

}
exports.getBitAllignedNumber = getBitAllignedNumber;
/**

@@ -117,7 +108,6 @@ * Read bit-aligned number start from buffer

*/
function isBitSet(source, byteOffset, bitOffset) {
export function isBitSet(source, byteOffset, bitOffset) {
return getBitAllignedNumber(source, byteOffset, bitOffset, 1) === 1;
}
exports.isBitSet = isBitSet;
function a2hex(str) {
export function a2hex(str) {
const arr = [];

@@ -130,3 +120,2 @@ for (let i = 0, l = str.length; i < l; i++) {

}
exports.a2hex = a2hex;
/**

@@ -136,6 +125,5 @@ * Convert power ratio to DB

*/
function ratioToDb(ratio) {
export function ratioToDb(ratio) {
return 10 * Math.log10(ratio);
}
exports.ratioToDb = ratioToDb;
/**

@@ -145,6 +133,5 @@ * Convert dB to ratio

*/
function dbToRatio(dB) {
export function dbToRatio(dB) {
return Math.pow(10, dB / 10);
}
exports.dbToRatio = dbToRatio;
/**

@@ -154,3 +141,3 @@ * Convert replay gain to ratio and Decibel

*/
function toRatio(value) {
export function toRatio(value) {
const ps = value.split(' ').map(p => p.trim().toLowerCase());

@@ -169,3 +156,2 @@ // @ts-ignore

}
exports.toRatio = toRatio;
//# sourceMappingURL=Util.js.map

@@ -1,10 +0,7 @@

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.scanAppendingHeaders = exports.selectCover = exports.ratingToStars = exports.orderTags = exports.parseFromTokenizer = exports.parseBuffer = exports.parseStream = void 0;
const strtok3 = require("strtok3/lib/core");
const ParserFactory_1 = require("./ParserFactory");
const RandomUint8ArrayReader_1 = require("./common/RandomUint8ArrayReader");
const APEv2Parser_1 = require("./apev2/APEv2Parser");
const ID3v1Parser_1 = require("./id3v1/ID3v1Parser");
const Lyrics3_1 = require("./lyrics3/Lyrics3");
import * as strtok3 from 'strtok3/core';
import { ParserFactory } from './ParserFactory.js';
import { RandomUint8ArrayReader } from './common/RandomUint8ArrayReader.js';
import { APEv2Parser } from './apev2/APEv2Parser.js';
import { hasID3v1Header } from './id3v1/ID3v1Parser.js';
import { getLyricsHeaderLength } from './lyrics3/Lyrics3.js';
/**

@@ -17,6 +14,5 @@ * Parse audio from Node Stream.Readable

*/
function parseStream(stream, fileInfo, options = {}) {
export function parseStream(stream, fileInfo, options = {}) {
return parseFromTokenizer(strtok3.fromStream(stream, typeof fileInfo === 'string' ? { mimeType: fileInfo } : fileInfo), options);
}
exports.parseStream = parseStream;
/**

@@ -30,4 +26,4 @@ * Parse audio from Node Buffer

*/
async function parseBuffer(uint8Array, fileInfo, options = {}) {
const bufferReader = new RandomUint8ArrayReader_1.RandomUint8ArrayReader(uint8Array);
export async function parseBuffer(uint8Array, fileInfo, options = {}) {
const bufferReader = new RandomUint8ArrayReader(uint8Array);
await scanAppendingHeaders(bufferReader, options);

@@ -37,3 +33,2 @@ const tokenizer = strtok3.fromBuffer(uint8Array, typeof fileInfo === 'string' ? { mimeType: fileInfo } : fileInfo);

}
exports.parseBuffer = parseBuffer;
/**

@@ -45,6 +40,5 @@ * Parse audio from ITokenizer source

*/
function parseFromTokenizer(tokenizer, options) {
return ParserFactory_1.ParserFactory.parseOnContentType(tokenizer, options);
export function parseFromTokenizer(tokenizer, options) {
return ParserFactory.parseOnContentType(tokenizer, options);
}
exports.parseFromTokenizer = parseFromTokenizer;
/**

@@ -55,3 +49,3 @@ * Create a dictionary ordered by their tag id (key)

*/
function orderTags(nativeTags) {
export function orderTags(nativeTags) {
const tags = {};

@@ -63,3 +57,2 @@ for (const tag of nativeTags) {

}
exports.orderTags = orderTags;
/**

@@ -70,6 +63,5 @@ * Convert rating to 1-5 star rating

*/
function ratingToStars(rating) {
export function ratingToStars(rating) {
return rating === undefined ? 0 : 1 + Math.round(rating * 4);
}
exports.ratingToStars = ratingToStars;
/**

@@ -80,3 +72,3 @@ * Select most likely cover image.

*/
function selectCover(pictures) {
export function selectCover(pictures) {
return pictures ? pictures.reduce((acc, cur) => {

@@ -88,12 +80,10 @@ if (cur.name && cur.name.toLowerCase() in ['front', 'cover', 'cover (front)'])

}
exports.selectCover = selectCover;
async function scanAppendingHeaders(randomReader, options = {}) {
export async function scanAppendingHeaders(randomReader, options = {}) {
let apeOffset = randomReader.fileSize;
if (await (0, ID3v1Parser_1.hasID3v1Header)(randomReader)) {
if (await hasID3v1Header(randomReader)) {
apeOffset -= 128;
const lyricsLen = await (0, Lyrics3_1.getLyricsHeaderLength)(randomReader);
const lyricsLen = await getLyricsHeaderLength(randomReader);
apeOffset -= lyricsLen;
}
options.apeHeader = await APEv2Parser_1.APEv2Parser.findApeFooterOffset(randomReader, apeOffset);
options.apeHeader = await APEv2Parser.findApeFooterOffset(randomReader, apeOffset);
}
exports.scanAppendingHeaders = scanAppendingHeaders;

@@ -1,12 +0,9 @@

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.DsdiffParser = void 0;
const Token = require("token-types");
const debug_1 = require("debug");
const strtok3 = require("strtok3/lib/core");
const FourCC_1 = require("../common/FourCC");
const BasicParser_1 = require("../common/BasicParser");
const ID3v2Parser_1 = require("../id3v2/ID3v2Parser");
const DsdiffToken_1 = require("./DsdiffToken");
const debug = (0, debug_1.default)('music-metadata:parser:aiff');
import * as Token from 'token-types';
import initDebug from 'debug';
import * as strtok3 from 'strtok3/core';
import { FourCcToken } from '../common/FourCC.js';
import { BasicParser } from '../common/BasicParser.js';
import { ID3v2Parser } from '../id3v2/ID3v2Parser.js';
import { ChunkHeader64 } from './DsdiffToken.js';
const debug = initDebug('music-metadata:parser:aiff');
/**

@@ -18,8 +15,8 @@ * DSDIFF - Direct Stream Digital Interchange File Format (Phillips)

*/
class DsdiffParser extends BasicParser_1.BasicParser {
export class DsdiffParser extends BasicParser {
async parse() {
const header = await this.tokenizer.readToken(DsdiffToken_1.ChunkHeader64);
const header = await this.tokenizer.readToken(ChunkHeader64);
if (header.chunkID !== 'FRM8')
throw new Error('Unexpected chunk-ID');
const type = (await this.tokenizer.readToken(FourCC_1.FourCcToken)).trim();
const type = (await this.tokenizer.readToken(FourCcToken)).trim();
switch (type) {

@@ -29,3 +26,3 @@ case 'DSD':

this.metadata.setFormat('lossless', true);
return this.readFmt8Chunks(header.chunkSize - BigInt(FourCC_1.FourCcToken.len));
return this.readFmt8Chunks(header.chunkSize - BigInt(FourCcToken.len));
default:

@@ -36,8 +33,8 @@ throw Error(`Unsupported DSDIFF type: ${type}`);

async readFmt8Chunks(remainingSize) {
while (remainingSize >= DsdiffToken_1.ChunkHeader64.len) {
const chunkHeader = await this.tokenizer.readToken(DsdiffToken_1.ChunkHeader64);
while (remainingSize >= ChunkHeader64.len) {
const chunkHeader = await this.tokenizer.readToken(ChunkHeader64);
// If the data is an odd number of bytes in length, a pad byte must be added at the end
debug(`Chunk id=${chunkHeader.chunkID}`);
await this.readData(chunkHeader);
remainingSize -= (BigInt(DsdiffToken_1.ChunkHeader64.len) + chunkHeader.chunkSize);
remainingSize -= (BigInt(ChunkHeader64.len) + chunkHeader.chunkSize);
}

@@ -54,6 +51,6 @@ }

case 'PROP': // 3.2 PROPERTY CHUNK
const propType = await this.tokenizer.readToken(FourCC_1.FourCcToken);
const propType = await this.tokenizer.readToken(FourCcToken);
if (propType !== 'SND ')
throw new Error('Unexpected PROP-chunk ID');
await this.handleSoundPropertyChunks(header.chunkSize - BigInt(FourCC_1.FourCcToken.len));
await this.handleSoundPropertyChunks(header.chunkSize - BigInt(FourCcToken.len));
break;

@@ -63,3 +60,3 @@ case 'ID3': // Unofficial ID3 tag support

const rst = strtok3.fromBuffer(id3_data);
await new ID3v2Parser_1.ID3v2Parser().parse(this.metadata, rst, this.options);
await new ID3v2Parser().parse(this.metadata, rst, this.options);
break;

@@ -83,3 +80,3 @@ default:

while (remainingSize > 0) {
const sndPropHeader = await this.tokenizer.readToken(DsdiffToken_1.ChunkHeader64);
const sndPropHeader = await this.tokenizer.readToken(ChunkHeader64);
debug(`Sound-property-chunk[ID=${sndPropHeader.chunkID}, size=${sndPropHeader.chunkSize}]`);

@@ -98,3 +95,3 @@ const p0 = this.tokenizer.position;

case 'CMPR': // 3.2.3 Compression Type Chunk
const compressionIdCode = (await this.tokenizer.readToken(FourCC_1.FourCcToken)).trim();
const compressionIdCode = (await this.tokenizer.readToken(FourCcToken)).trim();
const count = await this.tokenizer.readToken(Token.UINT8);

@@ -129,3 +126,3 @@ const compressionName = await this.tokenizer.readToken(new Token.StringType(count, 'ascii'));

}
remainingSize -= BigInt(DsdiffToken_1.ChunkHeader64.len) + sndPropHeader.chunkSize;
remainingSize -= BigInt(ChunkHeader64.len) + sndPropHeader.chunkSize;
debug(`Parsing sound-property-chunks, remainingSize=${remainingSize}`);

@@ -141,7 +138,7 @@ }

const channels = [];
while (remainingSize >= FourCC_1.FourCcToken.len) {
const channelId = await this.tokenizer.readToken(FourCC_1.FourCcToken);
while (remainingSize >= FourCcToken.len) {
const channelId = await this.tokenizer.readToken(FourCcToken);
debug(`Channel[ID=${channelId}]`);
channels.push(channelId);
remainingSize -= BigInt(FourCC_1.FourCcToken.len);
remainingSize -= BigInt(FourCcToken.len);
}

@@ -152,2 +149,1 @@ debug(`Channels: ${channels.join(', ')}`);

}
exports.DsdiffParser = DsdiffParser;

@@ -1,6 +0,3 @@

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.ChunkHeader64 = void 0;
const Token = require("token-types");
const FourCC_1 = require("../common/FourCC");
import * as Token from 'token-types';
import { FourCcToken } from '../common/FourCC.js';
/**

@@ -11,3 +8,3 @@ * DSDIFF chunk header

*/
exports.ChunkHeader64 = {
export const ChunkHeader64 = {
len: 12,

@@ -17,3 +14,3 @@ get: (buf, off) => {

// Group-ID
chunkID: FourCC_1.FourCcToken.get(buf, off),
chunkID: FourCcToken.get(buf, off),
// Size

@@ -20,0 +17,0 @@ chunkSize: Token.INT64_BE.get(buf, off + 4)

@@ -1,13 +0,10 @@

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.FormatChunk = exports.ChannelType = exports.DsdChunk = exports.ChunkHeader = void 0;
const Token = require("token-types");
const FourCC_1 = require("../common/FourCC");
import * as Token from 'token-types';
import { FourCcToken } from '../common/FourCC.js';
/**
* Common chunk DSD header: the 'chunk name (Four-CC)' & chunk size
*/
exports.ChunkHeader = {
export const ChunkHeader = {
len: 12,
get: (buf, off) => {
return { id: FourCC_1.FourCcToken.get(buf, off), size: Token.UINT64_LE.get(buf, off + 4) };
return { id: FourCcToken.get(buf, off), size: Token.UINT64_LE.get(buf, off + 4) };
}

@@ -18,3 +15,3 @@ };

*/
exports.DsdChunk = {
export const DsdChunk = {
len: 16,

@@ -28,3 +25,3 @@ get: (buf, off) => {

};
var ChannelType;
export var ChannelType;
(function (ChannelType) {

@@ -38,7 +35,7 @@ ChannelType[ChannelType["mono"] = 1] = "mono";

ChannelType[ChannelType["5.1 channels"] = 7] = "5.1 channels";
})(ChannelType = exports.ChannelType || (exports.ChannelType = {}));
})(ChannelType = ChannelType || (ChannelType = {}));
/**
* Common chunk DSD header: the 'chunk name (Four-CC)' & chunk size
*/
exports.FormatChunk = {
export const FormatChunk = {
len: 40,

@@ -45,0 +42,0 @@ get: (buf, off) => {

@@ -1,9 +0,6 @@

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.DsfParser = void 0;
const debug_1 = require("debug");
const AbstractID3Parser_1 = require("../id3v2/AbstractID3Parser");
const DsfChunk_1 = require("./DsfChunk");
const ID3v2Parser_1 = require("../id3v2/ID3v2Parser");
const debug = (0, debug_1.default)('music-metadata:parser:DSF');
import initDebug from 'debug';
import { AbstractID3Parser } from '../id3v2/AbstractID3Parser.js';
import { ChunkHeader, DsdChunk, FormatChunk } from './DsfChunk.js';
import { ID3v2Parser } from "../id3v2/ID3v2Parser.js";
const debug = initDebug('music-metadata:parser:DSF');
/**

@@ -13,6 +10,6 @@ * DSF (dsd stream file) File Parser

*/
class DsfParser extends AbstractID3Parser_1.AbstractID3Parser {
export class DsfParser extends AbstractID3Parser {
async postId3v2Parse() {
const p0 = this.tokenizer.position; // mark start position, normally 0
const chunkHeader = await this.tokenizer.readToken(DsfChunk_1.ChunkHeader);
const chunkHeader = await this.tokenizer.readToken(ChunkHeader);
if (chunkHeader.id !== 'DSD ')

@@ -22,3 +19,3 @@ throw new Error('Invalid chunk signature');

this.metadata.setFormat('lossless', true);
const dsdChunk = await this.tokenizer.readToken(DsfChunk_1.DsdChunk);
const dsdChunk = await this.tokenizer.readToken(DsdChunk);
if (dsdChunk.metadataPointer === BigInt(0)) {

@@ -32,12 +29,12 @@ debug(`No ID3v2 tag present`);

await this.tokenizer.ignore(Number(dsdChunk.metadataPointer) - this.tokenizer.position - p0);
return new ID3v2Parser_1.ID3v2Parser().parse(this.metadata, this.tokenizer, this.options);
return new ID3v2Parser().parse(this.metadata, this.tokenizer, this.options);
}
}
async parseChunks(bytesRemaining) {
while (bytesRemaining >= DsfChunk_1.ChunkHeader.len) {
const chunkHeader = await this.tokenizer.readToken(DsfChunk_1.ChunkHeader);
while (bytesRemaining >= ChunkHeader.len) {
const chunkHeader = await this.tokenizer.readToken(ChunkHeader);
debug(`Parsing chunk name=${chunkHeader.id} size=${chunkHeader.size}`);
switch (chunkHeader.id) {
case 'fmt ':
const formatChunk = await this.tokenizer.readToken(DsfChunk_1.FormatChunk);
const formatChunk = await this.tokenizer.readToken(FormatChunk);
this.metadata.setFormat('numberOfChannels', formatChunk.channelNum);

@@ -52,3 +49,3 @@ this.metadata.setFormat('sampleRate', formatChunk.samplingFrequency);

default:
this.tokenizer.ignore(Number(chunkHeader.size) - DsfChunk_1.ChunkHeader.len);
this.tokenizer.ignore(Number(chunkHeader.size) - ChunkHeader.len);
break;

@@ -60,2 +57,1 @@ }

}
exports.DsfParser = DsfParser;

@@ -1,13 +0,10 @@

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.FlacParser = void 0;
const token_types_1 = require("token-types");
const debug_1 = require("debug");
const util = require("../common/Util");
const Vorbis_1 = require("../ogg/vorbis/Vorbis");
const AbstractID3Parser_1 = require("../id3v2/AbstractID3Parser");
const FourCC_1 = require("../common/FourCC");
const VorbisParser_1 = require("../ogg/vorbis/VorbisParser");
const VorbisDecoder_1 = require("../ogg/vorbis/VorbisDecoder");
const debug = (0, debug_1.default)('music-metadata:parser:FLAC');
import { UINT16_BE, UINT24_BE, Uint8ArrayType } from 'token-types';
import initDebug from 'debug';
import * as util from '../common/Util.js';
import { VorbisPictureToken } from '../ogg/vorbis/Vorbis.js';
import { AbstractID3Parser } from '../id3v2/AbstractID3Parser.js';
import { FourCcToken } from '../common/FourCC.js';
import { VorbisParser } from '../ogg/vorbis/VorbisParser.js';
import { VorbisDecoder } from '../ogg/vorbis/VorbisDecoder.js';
const debug = initDebug('music-metadata:parser:FLAC');
/**

@@ -27,3 +24,3 @@ * FLAC supports up to 128 kinds of metadata blocks; currently the following are defined:

})(BlockType || (BlockType = {}));
class FlacParser extends AbstractID3Parser_1.AbstractID3Parser {
export class FlacParser extends AbstractID3Parser {
constructor() {

@@ -41,7 +38,7 @@ super(...arguments);

super.init(metadata, tokenizer, options);
this.vorbisParser = new VorbisParser_1.VorbisParser(metadata, options);
this.vorbisParser = new VorbisParser(metadata, options);
return this;
}
async postId3v2Parse() {
const fourCC = await this.tokenizer.readToken(FourCC_1.FourCcToken);
const fourCC = await this.tokenizer.readToken(FourCcToken);
if (fourCC.toString() !== 'fLaC') {

@@ -108,4 +105,4 @@ throw new Error('Invalid FLAC preamble');

async parseComment(dataLen) {
const data = await this.tokenizer.readToken(new token_types_1.Uint8ArrayType(dataLen));
const decoder = new VorbisDecoder_1.VorbisDecoder(data, 0);
const data = await this.tokenizer.readToken(new Uint8ArrayType(dataLen));
const decoder = new VorbisDecoder(data, 0);
decoder.readStringUtf8(); // vendor (skip)

@@ -123,3 +120,3 @@ const commentListLength = decoder.readInt32();

else {
const picture = await this.tokenizer.readToken(new Vorbis_1.VorbisPictureToken(dataLen));
const picture = await this.tokenizer.readToken(new VorbisPictureToken(dataLen));
this.vorbisParser.addTag('METADATA_BLOCK_PICTURE', picture);

@@ -129,3 +126,2 @@ }

}
exports.FlacParser = FlacParser;
class Metadata {

@@ -139,3 +135,3 @@ }

type: util.getBitAllignedNumber(buf, off, 1, 7),
length: token_types_1.UINT24_BE.get(buf, off + 1)
length: UINT24_BE.get(buf, off + 1)
};

@@ -153,16 +149,16 @@ }

// The minimum block size (in samples) used in the stream.
minimumBlockSize: token_types_1.UINT16_BE.get(buf, off),
minimumBlockSize: UINT16_BE.get(buf, off),
// The maximum block size (in samples) used in the stream.
// (Minimum blocksize == maximum blocksize) implies a fixed-blocksize stream.
maximumBlockSize: token_types_1.UINT16_BE.get(buf, off + 2) / 1000,
maximumBlockSize: UINT16_BE.get(buf, off + 2) / 1000,
// The minimum frame size (in bytes) used in the stream.
// May be 0 to imply the value is not known.
minimumFrameSize: token_types_1.UINT24_BE.get(buf, off + 4),
minimumFrameSize: UINT24_BE.get(buf, off + 4),
// The maximum frame size (in bytes) used in the stream.
// May be 0 to imply the value is not known.
maximumFrameSize: token_types_1.UINT24_BE.get(buf, off + 7),
maximumFrameSize: UINT24_BE.get(buf, off + 7),
// Sample rate in Hz. Though 20 bits are available,
// the maximum sample rate is limited by the structure of frame headers to 655350Hz.
// Also, a value of 0 is invalid.
sampleRate: token_types_1.UINT24_BE.get(buf, off + 10) >> 4,
sampleRate: UINT24_BE.get(buf, off + 10) >> 4,
// probably slower: sampleRate: common.getBitAllignedNumber(buf, off + 10, 0, 20),

@@ -179,5 +175,5 @@ // (number of channels)-1. FLAC supports from 1 to 8 channels

// the MD5 hash of the file (see notes for usage... it's a littly tricky)
fileMD5: new token_types_1.Uint8ArrayType(16).get(buf, off + 18)
fileMD5: new Uint8ArrayType(16).get(buf, off + 18)
};
}
};

@@ -1,10 +0,7 @@

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.hasID3v1Header = exports.ID3v1Parser = exports.Genres = void 0;
const debug_1 = require("debug");
const token_types_1 = require("token-types");
const util = require("../common/Util");
const BasicParser_1 = require("../common/BasicParser");
const APEv2Parser_1 = require("../apev2/APEv2Parser");
const debug = (0, debug_1.default)('music-metadata:parser:ID3v1');
import initDebug from 'debug';
import { StringType, UINT8 } from 'token-types';
import * as util from '../common/Util.js';
import { BasicParser } from '../common/BasicParser.js';
import { APEv2Parser } from '../apev2/APEv2Parser.js';
const debug = initDebug('music-metadata:parser:ID3v1');
/**

@@ -14,3 +11,3 @@ * ID3v1 Genre mappings

*/
exports.Genres = [
export const Genres = [
'Blues', 'Classic Rock', 'Country', 'Dance', 'Disco', 'Funk', 'Grunge', 'Hip-Hop',

@@ -66,10 +63,10 @@ 'Jazz', 'Metal', 'New Age', 'Oldies', 'Other', 'Pop', 'R&B', 'Rap', 'Reggae', 'Rock',

// ID3v1.1 separator for track
zeroByte: token_types_1.UINT8.get(buf, off + 127),
zeroByte: UINT8.get(buf, off + 127),
// track: ID3v1.1 field added by Michael Mutschler
track: token_types_1.UINT8.get(buf, off + 126),
genre: token_types_1.UINT8.get(buf, off + 127)
track: UINT8.get(buf, off + 126),
genre: UINT8.get(buf, off + 127)
} : null;
}
};
class Id3v1StringType extends token_types_1.StringType {
class Id3v1StringType extends StringType {
constructor(len) {

@@ -85,6 +82,6 @@ super(len, 'binary');

}
class ID3v1Parser extends BasicParser_1.BasicParser {
export class ID3v1Parser extends BasicParser {
static getGenre(genreIndex) {
if (genreIndex < exports.Genres.length) {
return exports.Genres[genreIndex];
if (genreIndex < Genres.length) {
return Genres[genreIndex];
}

@@ -100,3 +97,3 @@ return undefined; // ToDO: generate warning

this.tokenizer.ignore(this.options.apeHeader.offset - this.tokenizer.position);
const apeParser = new APEv2Parser_1.APEv2Parser();
const apeParser = new APEv2Parser();
apeParser.init(this.metadata, this.tokenizer, this.options);

@@ -129,4 +126,3 @@ await apeParser.parseTags(this.options.apeHeader.footer);

}
exports.ID3v1Parser = ID3v1Parser;
async function hasID3v1Header(reader) {
export async function hasID3v1Header(reader) {
if (reader.fileSize >= 128) {

@@ -139,2 +135,1 @@ const tag = Buffer.alloc(3);

}
exports.hasID3v1Header = hasID3v1Header;

@@ -1,5 +0,2 @@

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.ID3v1TagMapper = void 0;
const GenericTagMapper_1 = require("../common/GenericTagMapper");
import { CommonTagMapper } from '../common/GenericTagMapper.js';
/**

@@ -17,3 +14,3 @@ * ID3v1 tag mappings

};
class ID3v1TagMapper extends GenericTagMapper_1.CommonTagMapper {
export class ID3v1TagMapper extends CommonTagMapper {
constructor() {

@@ -23,3 +20,2 @@ super(['ID3v1'], id3v1TagMap);

}
exports.ID3v1TagMapper = ID3v1TagMapper;
//# sourceMappingURL=ID3v1TagMap.js.map

@@ -1,21 +0,18 @@

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.AbstractID3Parser = void 0;
const core_1 = require("strtok3/lib/core");
const debug_1 = require("debug");
const ID3v2Token_1 = require("./ID3v2Token");
const ID3v2Parser_1 = require("./ID3v2Parser");
const ID3v1Parser_1 = require("../id3v1/ID3v1Parser");
const BasicParser_1 = require("../common/BasicParser");
const debug = (0, debug_1.default)('music-metadata:parser:ID3');
import { EndOfStreamError } from 'strtok3/core';
import initDebug from 'debug';
import { ID3v2Header } from './ID3v2Token.js';
import { ID3v2Parser } from './ID3v2Parser.js';
import { ID3v1Parser } from '../id3v1/ID3v1Parser.js';
import { BasicParser } from '../common/BasicParser.js';
const debug = initDebug('music-metadata:parser:ID3');
/**
* Abstract parser which tries take ID3v2 and ID3v1 headers.
*/
class AbstractID3Parser extends BasicParser_1.BasicParser {
export class AbstractID3Parser extends BasicParser {
constructor() {
super(...arguments);
this.id3parser = new ID3v2Parser_1.ID3v2Parser();
this.id3parser = new ID3v2Parser();
}
static async startsWithID3v2Header(tokenizer) {
return (await tokenizer.peekToken(ID3v2Token_1.ID3v2Header)).fileIdentifier === 'ID3';
return (await tokenizer.peekToken(ID3v2Header)).fileIdentifier === 'ID3';
}

@@ -27,3 +24,3 @@ async parse() {

catch (err) {
if (err instanceof core_1.EndOfStreamError) {
if (err instanceof EndOfStreamError) {
debug(`End-of-stream`);

@@ -47,3 +44,3 @@ }

else {
const id3v1parser = new ID3v1Parser_1.ID3v1Parser();
const id3v1parser = new ID3v1Parser();
await id3v1parser.init(this.metadata, this.tokenizer, this.options).parse();

@@ -54,3 +51,3 @@ this.finalize();

async tryReadId3v2Headers() {
const id3Header = await this.tokenizer.peekToken(ID3v2Token_1.ID3v2Header);
const id3Header = await this.tokenizer.peekToken(ID3v2Header);
if (id3Header.fileIdentifier === 'ID3') {

@@ -63,2 +60,1 @@ debug('Found ID3v2 header, pos=%s', this.tokenizer.position);

}
exports.AbstractID3Parser = AbstractID3Parser;

@@ -1,12 +0,9 @@

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.FrameParser = exports.parseGenre = void 0;
const debug_1 = require("debug");
const Token = require("token-types");
const util = require("../common/Util");
const ID3v2Token_1 = require("./ID3v2Token");
const ID3v1Parser_1 = require("../id3v1/ID3v1Parser");
const debug = (0, debug_1.default)('music-metadata:id3v2:frame-parser');
import initDebug from 'debug';
import * as Token from 'token-types';
import * as util from '../common/Util.js';
import { AttachedPictureType, TextEncodingToken } from './ID3v2Token.js';
import { Genres } from '../id3v1/ID3v1Parser.js';
const debug = initDebug('music-metadata:id3v2:frame-parser');
const defaultEnc = 'latin1'; // latin1 == iso-8859-1;
function parseGenre(origVal) {
export function parseGenre(origVal) {
// match everything inside parentheses

@@ -45,3 +42,3 @@ const genres = [];

if (genres.length === 0 && word.match(/^\d*$/)) {
word = ID3v1Parser_1.Genres[word];
word = Genres[word];
}

@@ -52,3 +49,2 @@ genres.push(word);

}
exports.parseGenre = parseGenre;
function parseGenreCode(code) {

@@ -60,6 +56,6 @@ if (code === 'RX')

if (code.match(/^\d*$/)) {
return ID3v1Parser_1.Genres[code];
return Genres[code];
}
}
class FrameParser {
export class FrameParser {
/**

@@ -79,3 +75,3 @@ * Create id3v2 frame parser

}
const { encoding, bom } = ID3v2Token_1.TextEncodingToken.get(uint8Array, 0);
const { encoding, bom } = TextEncodingToken.get(uint8Array, 0);
const length = uint8Array.length;

@@ -164,3 +160,3 @@ let offset = 0;

pic.format = FrameParser.fixPictureMimeType(pic.format);
pic.type = ID3v2Token_1.AttachedPictureType[uint8Array[offset]];
pic.type = AttachedPictureType[uint8Array[offset]];
offset += 1;

@@ -334,2 +330,1 @@ fzero = util.findZero(uint8Array, offset, length, encoding);

}
exports.FrameParser = FrameParser;

@@ -1,9 +0,6 @@

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.ID3v22TagMapper = exports.id3v22TagMap = void 0;
const CaseInsensitiveTagMap_1 = require("../common/CaseInsensitiveTagMap");
import { CaseInsensitiveTagMap } from '../common/CaseInsensitiveTagMap.js';
/**
* ID3v2.2 tag mappings
*/
exports.id3v22TagMap = {
export const id3v22TagMap = {
TT2: 'title',

@@ -50,8 +47,7 @@ TP1: 'artist',

};
class ID3v22TagMapper extends CaseInsensitiveTagMap_1.CaseInsensitiveTagMap {
export class ID3v22TagMapper extends CaseInsensitiveTagMap {
constructor() {
super(['ID3v2.2'], exports.id3v22TagMap);
super(['ID3v2.2'], id3v22TagMap);
}
}
exports.ID3v22TagMapper = ID3v22TagMapper;
//# sourceMappingURL=ID3v22TagMapper.js.map

@@ -1,7 +0,4 @@

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.ID3v24TagMapper = void 0;
const GenericTagMapper_1 = require("../common/GenericTagMapper");
const CaseInsensitiveTagMap_1 = require("../common/CaseInsensitiveTagMap");
const util = require("../common/Util");
import { CommonTagMapper } from '../common/GenericTagMapper.js';
import { CaseInsensitiveTagMap } from '../common/CaseInsensitiveTagMap.js';
import * as util from '../common/Util.js';
/**

@@ -143,7 +140,7 @@ * ID3v2.3/ID3v2.4 tag mappings

};
class ID3v24TagMapper extends CaseInsensitiveTagMap_1.CaseInsensitiveTagMap {
export class ID3v24TagMapper extends CaseInsensitiveTagMap {
static toRating(popm) {
return {
source: popm.email,
rating: popm.rating > 0 ? (popm.rating - 1) / 254 * GenericTagMapper_1.CommonTagMapper.maxRatingScore : undefined
rating: popm.rating > 0 ? (popm.rating - 1) / 254 * CommonTagMapper.maxRatingScore : undefined
};

@@ -194,3 +191,2 @@ }

}
exports.ID3v24TagMapper = ID3v24TagMapper;
//# sourceMappingURL=ID3v24TagMapper.js.map

@@ -1,9 +0,6 @@

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.ID3v2Parser = void 0;
const Token = require("token-types");
const util = require("../common/Util");
const FrameParser_1 = require("./FrameParser");
const ID3v2Token_1 = require("./ID3v2Token");
class ID3v2Parser {
import * as Token from 'token-types';
import * as util from '../common/Util.js';
import { FrameParser } from './FrameParser.js';
import { ExtendedHeader, ID3v2Header, UINT32SYNCSAFE } from './ID3v2Token.js';
export class ID3v2Parser {
static removeUnsyncBytes(buffer) {

@@ -52,3 +49,3 @@ let readI = 0;

static readFrameData(uint8Array, frameHeader, majorVer, includeCovers, warningCollector) {
const frameParser = new FrameParser_1.FrameParser(majorVer, warningCollector);
const frameParser = new FrameParser(majorVer, warningCollector);
switch (majorVer) {

@@ -83,3 +80,3 @@ case 2:

this.options = options;
const id3Header = await this.tokenizer.readToken(ID3v2Token_1.ID3v2Header);
const id3Header = await this.tokenizer.readToken(ID3v2Header);
if (id3Header.fileIdentifier !== 'ID3') {

@@ -93,4 +90,4 @@ throw new Error('expected ID3-header file-identifier \'ID3\' was not found');

async parseExtendedHeader() {
const extendedHeader = await this.tokenizer.readToken(ID3v2Token_1.ExtendedHeader);
const dataRemaining = extendedHeader.size - ID3v2Token_1.ExtendedHeader.len;
const extendedHeader = await this.tokenizer.readToken(ExtendedHeader);
const dataRemaining = extendedHeader.size - ExtendedHeader.len;
return dataRemaining > 0 ? this.parseExtendedHeaderData(dataRemaining, extendedHeader.size) : this.parseId3Data(this.id3Header.size - extendedHeader.size);

@@ -172,3 +169,3 @@ }

id: Buffer.from(uint8Array.slice(0, 4)).toString('ascii'),
length: (majorVer === 4 ? ID3v2Token_1.UINT32SYNCSAFE : Token.UINT32_BE).get(uint8Array, 4),
length: (majorVer === 4 ? UINT32SYNCSAFE : Token.UINT32_BE).get(uint8Array, 4),
flags: ID3v2Parser.readFrameFlags(uint8Array.slice(8, 10))

@@ -186,2 +183,1 @@ };

}
exports.ID3v2Parser = ID3v2Parser;

@@ -1,6 +0,3 @@

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.TextEncodingToken = exports.ExtendedHeader = exports.ID3v2Header = exports.UINT32SYNCSAFE = exports.AttachedPictureType = void 0;
const Token = require("token-types");
const util = require("../common/Util");
import * as Token from 'token-types';
import * as util from '../common/Util.js';
/**

@@ -10,3 +7,3 @@ * The picture type according to the ID3v2 APIC frame

*/
var AttachedPictureType;
export var AttachedPictureType;
(function (AttachedPictureType) {

@@ -34,3 +31,3 @@ AttachedPictureType[AttachedPictureType["Other"] = 0] = "Other";

AttachedPictureType[AttachedPictureType["Publisher/Studio logotype"] = 20] = "Publisher/Studio logotype";
})(AttachedPictureType = exports.AttachedPictureType || (exports.AttachedPictureType = {}));
})(AttachedPictureType = AttachedPictureType || (AttachedPictureType = {}));
/**

@@ -40,3 +37,3 @@ * 28 bits (representing up to 256MB) integer, the msb is 0 to avoid 'false syncsignals'.

*/
exports.UINT32SYNCSAFE = {
export const UINT32SYNCSAFE = {
get: (buf, off) => {

@@ -53,3 +50,3 @@ return buf[off + 3] & 0x7f | ((buf[off + 2]) << 7) |

*/
exports.ID3v2Header = {
export const ID3v2Header = {
len: 10,

@@ -75,7 +72,7 @@ get: (buf, off) => {

},
size: exports.UINT32SYNCSAFE.get(buf, off + 6)
size: UINT32SYNCSAFE.get(buf, off + 6)
};
}
};
exports.ExtendedHeader = {
export const ExtendedHeader = {
len: 10,

@@ -95,3 +92,3 @@ get: (buf, off) => {

};
exports.TextEncodingToken = {
export const TextEncodingToken = {
len: 1,

@@ -98,0 +95,0 @@ get: (uint8Array, off) => {

@@ -1,10 +0,7 @@

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.Header = void 0;
const Token = require("token-types");
const FourCC_1 = require("../common/FourCC");
import * as Token from 'token-types';
import { FourCcToken } from '../common/FourCC.js';
/**
* Common AIFF chunk header
*/
exports.Header = {
export const Header = {
len: 8,

@@ -14,3 +11,3 @@ get: (buf, off) => {

// Chunk type ID
chunkID: FourCC_1.FourCcToken.get(buf, off),
chunkID: FourCcToken.get(buf, off),
// Chunk size

@@ -17,0 +14,0 @@ chunkSize: Number(BigInt(Token.UINT32_BE.get(buf, off + 4)))

@@ -1,14 +0,8 @@

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.ratingToStars = exports.orderTags = exports.parseFile = exports.parseStream = exports.selectCover = exports.parseBuffer = exports.parseFromTokenizer = void 0;
const strtok3 = require("strtok3");
const Core = require("./core");
const ParserFactory_1 = require("./ParserFactory");
const debug_1 = require("debug");
const RandomFileReader_1 = require("./common/RandomFileReader");
const debug = (0, debug_1.default)("music-metadata:parser");
var core_1 = require("./core");
Object.defineProperty(exports, "parseFromTokenizer", { enumerable: true, get: function () { return core_1.parseFromTokenizer; } });
Object.defineProperty(exports, "parseBuffer", { enumerable: true, get: function () { return core_1.parseBuffer; } });
Object.defineProperty(exports, "selectCover", { enumerable: true, get: function () { return core_1.selectCover; } });
import * as strtok3 from 'strtok3';
import initDebug from 'debug';
import { parseFromTokenizer, scanAppendingHeaders } from './core.js';
import { ParserFactory } from './ParserFactory.js';
import { RandomFileReader } from './common/RandomFileReader.js';
export { parseFromTokenizer, parseBuffer, selectCover, orderTags, ratingToStars } from './core.js';
const debug = initDebug('music-metadata:parser');
/**

@@ -21,7 +15,6 @@ * Parse audio from Node Stream.Readable

*/
async function parseStream(stream, fileInfo, options = {}) {
export async function parseStream(stream, fileInfo, options = {}) {
const tokenizer = await strtok3.fromStream(stream, typeof fileInfo === 'string' ? { mimeType: fileInfo } : fileInfo);
return Core.parseFromTokenizer(tokenizer, options);
return parseFromTokenizer(tokenizer, options);
}
exports.parseStream = parseStream;
/**

@@ -33,8 +26,8 @@ * Parse audio from Node file

*/
async function parseFile(filePath, options = {}) {
export async function parseFile(filePath, options = {}) {
debug(`parseFile: ${filePath}`);
const fileTokenizer = await strtok3.fromFile(filePath);
const fileReader = await RandomFileReader_1.RandomFileReader.init(filePath, fileTokenizer.fileInfo.size);
const fileReader = await RandomFileReader.init(filePath, fileTokenizer.fileInfo.size);
try {
await Core.scanAppendingHeaders(fileReader, options);
await scanAppendingHeaders(fileReader, options);
}

@@ -45,6 +38,6 @@ finally {

try {
const parserName = ParserFactory_1.ParserFactory.getParserIdForExtension(filePath);
const parserName = ParserFactory.getParserIdForExtension(filePath);
if (!parserName)
debug(' Parser could not be determined by file extension');
return await ParserFactory_1.ParserFactory.parse(fileTokenizer, parserName, options);
return await ParserFactory.parse(fileTokenizer, parserName, options);
}

@@ -55,24 +48,1 @@ finally {

}
exports.parseFile = parseFile;
/**
* Create a dictionary ordered by their tag id (key)
* @param nativeTags - List of tags
* @returns Tags indexed by id
*/
exports.orderTags = Core.orderTags;
/**
* Convert rating to 1-5 star rating
* @param rating - Normalized rating [0..1] (common.rating[n].rating)
* @returns Number of stars: 1, 2, 3, 4 or 5 stars
*/
exports.ratingToStars = Core.ratingToStars;
/**
* Define default module exports
*/
exports.default = {
parseStream,
parseFile,
parseFromTokenizer: Core.parseFromTokenizer,
parseBuffer: Core.parseBuffer,
selectCover: Core.selectCover
};

@@ -1,6 +0,3 @@

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.getLyricsHeaderLength = exports.endTag2 = void 0;
exports.endTag2 = 'LYRICS200';
async function getLyricsHeaderLength(reader) {
export const endTag2 = 'LYRICS200';
export async function getLyricsHeaderLength(reader) {
if (reader.fileSize >= 143) {

@@ -11,3 +8,3 @@ const buf = Buffer.alloc(15);

const tag = txt.substr(6);
if (tag === exports.endTag2) {
if (tag === endTag2) {
return parseInt(txt.substr(0, 6), 10) + 15;

@@ -18,2 +15,1 @@ }

}
exports.getLyricsHeaderLength = getLyricsHeaderLength;

@@ -1,5 +0,2 @@

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.elements = void 0;
const types_1 = require("./types");
import { DataType } from './types.js';
/**

@@ -11,13 +8,13 @@ * Elements of document type description

*/
exports.elements = {
export const elements = {
0x1a45dfa3: {
name: 'ebml',
container: {
0x4286: { name: 'ebmlVersion', value: types_1.DataType.uint },
0x42f7: { name: 'ebmlReadVersion', value: types_1.DataType.uint },
0x42f2: { name: 'ebmlMaxIDWidth', value: types_1.DataType.uint },
0x42f3: { name: 'ebmlMaxSizeWidth', value: types_1.DataType.uint },
0x4282: { name: 'docType', value: types_1.DataType.string },
0x4287: { name: 'docTypeVersion', value: types_1.DataType.uint },
0x4285: { name: 'docTypeReadVersion', value: types_1.DataType.uint } // 5.1.7
0x4286: { name: 'ebmlVersion', value: DataType.uint },
0x42f7: { name: 'ebmlReadVersion', value: DataType.uint },
0x42f2: { name: 'ebmlMaxIDWidth', value: DataType.uint },
0x42f3: { name: 'ebmlMaxSizeWidth', value: DataType.uint },
0x4282: { name: 'docType', value: DataType.string },
0x4287: { name: 'docTypeVersion', value: DataType.uint },
0x4285: { name: 'docTypeReadVersion', value: DataType.uint } // 5.1.7
}

@@ -36,4 +33,4 @@ },

container: {
0x53ab: { name: 'seekId', value: types_1.DataType.binary },
0x53ac: { name: 'seekPosition', value: types_1.DataType.uint }
0x53ab: { name: 'seekId', value: DataType.binary },
0x53ac: { name: 'seekPosition', value: DataType.uint }
}

@@ -47,14 +44,14 @@ }

container: {
0x73a4: { name: 'uid', value: types_1.DataType.uid },
0x7384: { name: 'filename', value: types_1.DataType.string },
0x3cb923: { name: 'prevUID', value: types_1.DataType.uid },
0x3c83ab: { name: 'prevFilename', value: types_1.DataType.string },
0x3eb923: { name: 'nextUID', value: types_1.DataType.uid },
0x3e83bb: { name: 'nextFilename', value: types_1.DataType.string },
0x2ad7b1: { name: 'timecodeScale', value: types_1.DataType.uint },
0x4489: { name: 'duration', value: types_1.DataType.float },
0x4461: { name: 'dateUTC', value: types_1.DataType.uint },
0x7ba9: { name: 'title', value: types_1.DataType.string },
0x4d80: { name: 'muxingApp', value: types_1.DataType.string },
0x5741: { name: 'writingApp', value: types_1.DataType.string }
0x73a4: { name: 'uid', value: DataType.uid },
0x7384: { name: 'filename', value: DataType.string },
0x3cb923: { name: 'prevUID', value: DataType.uid },
0x3c83ab: { name: 'prevFilename', value: DataType.string },
0x3eb923: { name: 'nextUID', value: DataType.uid },
0x3e83bb: { name: 'nextFilename', value: DataType.string },
0x2ad7b1: { name: 'timecodeScale', value: DataType.uint },
0x4489: { name: 'duration', value: DataType.float },
0x4461: { name: 'dateUTC', value: DataType.uint },
0x7ba9: { name: 'title', value: DataType.string },
0x4d80: { name: 'muxingApp', value: DataType.string },
0x5741: { name: 'writingApp', value: DataType.string }
}

@@ -67,6 +64,6 @@ },

container: {
0xe7: { name: 'timecode', value: types_1.DataType.uid },
0xa3: { name: 'unknown', value: types_1.DataType.binary },
0xa7: { name: 'position', value: types_1.DataType.uid },
0xab: { name: 'prevSize', value: types_1.DataType.uid }
0xe7: { name: 'timecode', value: DataType.uid },
0xa3: { name: 'unknown', value: DataType.binary },
0xa7: { name: 'position', value: DataType.uid },
0xab: { name: 'prevSize', value: DataType.uid }
}

@@ -82,23 +79,23 @@ },

container: {
0xd7: { name: 'trackNumber', value: types_1.DataType.uint },
0x73c5: { name: 'uid', value: types_1.DataType.uid },
0x83: { name: 'trackType', value: types_1.DataType.uint },
0xb9: { name: 'flagEnabled', value: types_1.DataType.bool },
0x88: { name: 'flagDefault', value: types_1.DataType.bool },
0x55aa: { name: 'flagForced', value: types_1.DataType.bool },
0x9c: { name: 'flagLacing', value: types_1.DataType.bool },
0x6de7: { name: 'minCache', value: types_1.DataType.uint },
0x6de8: { name: 'maxCache', value: types_1.DataType.uint },
0x23e383: { name: 'defaultDuration', value: types_1.DataType.uint },
0x23314f: { name: 'timecodeScale', value: types_1.DataType.float },
0x536e: { name: 'name', value: types_1.DataType.string },
0x22b59c: { name: 'language', value: types_1.DataType.string },
0x86: { name: 'codecID', value: types_1.DataType.string },
0x63a2: { name: 'codecPrivate', value: types_1.DataType.binary },
0x258688: { name: 'codecName', value: types_1.DataType.string },
0x3a9697: { name: 'codecSettings', value: types_1.DataType.string },
0x3b4040: { name: 'codecInfoUrl', value: types_1.DataType.string },
0x26b240: { name: 'codecDownloadUrl', value: types_1.DataType.string },
0xaa: { name: 'codecDecodeAll', value: types_1.DataType.bool },
0x6fab: { name: 'trackOverlay', value: types_1.DataType.uint },
0xd7: { name: 'trackNumber', value: DataType.uint },
0x73c5: { name: 'uid', value: DataType.uid },
0x83: { name: 'trackType', value: DataType.uint },
0xb9: { name: 'flagEnabled', value: DataType.bool },
0x88: { name: 'flagDefault', value: DataType.bool },
0x55aa: { name: 'flagForced', value: DataType.bool },
0x9c: { name: 'flagLacing', value: DataType.bool },
0x6de7: { name: 'minCache', value: DataType.uint },
0x6de8: { name: 'maxCache', value: DataType.uint },
0x23e383: { name: 'defaultDuration', value: DataType.uint },
0x23314f: { name: 'timecodeScale', value: DataType.float },
0x536e: { name: 'name', value: DataType.string },
0x22b59c: { name: 'language', value: DataType.string },
0x86: { name: 'codecID', value: DataType.string },
0x63a2: { name: 'codecPrivate', value: DataType.binary },
0x258688: { name: 'codecName', value: DataType.string },
0x3a9697: { name: 'codecSettings', value: DataType.string },
0x3b4040: { name: 'codecInfoUrl', value: DataType.string },
0x26b240: { name: 'codecDownloadUrl', value: DataType.string },
0xaa: { name: 'codecDecodeAll', value: DataType.bool },
0x6fab: { name: 'trackOverlay', value: DataType.uint },
// Video

@@ -108,11 +105,11 @@ 0xe0: {

container: {
0x9a: { name: 'flagInterlaced', value: types_1.DataType.bool },
0x53b8: { name: 'stereoMode', value: types_1.DataType.uint },
0xb0: { name: 'pixelWidth', value: types_1.DataType.uint },
0xba: { name: 'pixelHeight', value: types_1.DataType.uint },
0x54b0: { name: 'displayWidth', value: types_1.DataType.uint },
0x54ba: { name: 'displayHeight', value: types_1.DataType.uint },
0x54b3: { name: 'aspectRatioType', value: types_1.DataType.uint },
0x2eb524: { name: 'colourSpace', value: types_1.DataType.uint },
0x2fb523: { name: 'gammaValue', value: types_1.DataType.float }
0x9a: { name: 'flagInterlaced', value: DataType.bool },
0x53b8: { name: 'stereoMode', value: DataType.uint },
0xb0: { name: 'pixelWidth', value: DataType.uint },
0xba: { name: 'pixelHeight', value: DataType.uint },
0x54b0: { name: 'displayWidth', value: DataType.uint },
0x54ba: { name: 'displayHeight', value: DataType.uint },
0x54b3: { name: 'aspectRatioType', value: DataType.uint },
0x2eb524: { name: 'colourSpace', value: DataType.uint },
0x2fb523: { name: 'gammaValue', value: DataType.float }
}

@@ -124,8 +121,8 @@ },

container: {
0xb5: { name: 'samplingFrequency', value: types_1.DataType.float },
0x78b5: { name: 'outputSamplingFrequency', value: types_1.DataType.float },
0x9f: { name: 'channels', value: types_1.DataType.uint },
0x94: { name: 'channels', value: types_1.DataType.uint },
0x7d7b: { name: 'channelPositions', value: types_1.DataType.binary },
0x6264: { name: 'bitDepth', value: types_1.DataType.uint }
0xb5: { name: 'samplingFrequency', value: DataType.float },
0x78b5: { name: 'outputSamplingFrequency', value: DataType.float },
0x9f: { name: 'channels', value: DataType.uint },
0x94: { name: 'channels', value: DataType.uint },
0x7d7b: { name: 'channelPositions', value: DataType.binary },
0x6264: { name: 'bitDepth', value: DataType.uint }
}

@@ -140,10 +137,10 @@ },

container: {
0x5031: { name: 'order', value: types_1.DataType.uint },
0x5032: { name: 'scope', value: types_1.DataType.bool },
0x5033: { name: 'type', value: types_1.DataType.uint },
0x5031: { name: 'order', value: DataType.uint },
0x5032: { name: 'scope', value: DataType.bool },
0x5033: { name: 'type', value: DataType.uint },
0x5034: {
name: 'contentEncoding',
container: {
0x4254: { name: 'contentCompAlgo', value: types_1.DataType.uint },
0x4255: { name: 'contentCompSettings', value: types_1.DataType.binary }
0x4254: { name: 'contentCompAlgo', value: DataType.uint },
0x4255: { name: 'contentCompSettings', value: DataType.binary }
}

@@ -154,11 +151,11 @@ },

container: {
0x47e1: { name: 'contentEncAlgo', value: types_1.DataType.uint },
0x47e2: { name: 'contentEncKeyID', value: types_1.DataType.binary },
0x47e3: { name: 'contentSignature ', value: types_1.DataType.binary },
0x47e4: { name: 'ContentSigKeyID ', value: types_1.DataType.binary },
0x47e5: { name: 'contentSigAlgo ', value: types_1.DataType.uint },
0x47e6: { name: 'contentSigHashAlgo ', value: types_1.DataType.uint }
0x47e1: { name: 'contentEncAlgo', value: DataType.uint },
0x47e2: { name: 'contentEncKeyID', value: DataType.binary },
0x47e3: { name: 'contentSignature ', value: DataType.binary },
0x47e4: { name: 'ContentSigKeyID ', value: DataType.binary },
0x47e5: { name: 'contentSigAlgo ', value: DataType.uint },
0x47e6: { name: 'contentSigHashAlgo ', value: DataType.uint }
}
},
0x6264: { name: 'bitDepth', value: types_1.DataType.uint }
0x6264: { name: 'bitDepth', value: DataType.uint }
}

@@ -179,19 +176,19 @@ }

container: {
0xb3: { name: 'cueTime', value: types_1.DataType.uid },
0xb3: { name: 'cueTime', value: DataType.uid },
0xb7: {
name: 'positions',
container: {
0xf7: { name: 'track', value: types_1.DataType.uint },
0xf1: { name: 'clusterPosition', value: types_1.DataType.uint },
0x5378: { name: 'blockNumber', value: types_1.DataType.uint },
0xea: { name: 'codecState', value: types_1.DataType.uint },
0xf7: { name: 'track', value: DataType.uint },
0xf1: { name: 'clusterPosition', value: DataType.uint },
0x5378: { name: 'blockNumber', value: DataType.uint },
0xea: { name: 'codecState', value: DataType.uint },
0xdb: {
name: 'reference', container: {
0x96: { name: 'time', value: types_1.DataType.uint },
0x97: { name: 'cluster', value: types_1.DataType.uint },
0x535f: { name: 'number', value: types_1.DataType.uint },
0xeb: { name: 'codecState', value: types_1.DataType.uint }
0x96: { name: 'time', value: DataType.uint },
0x97: { name: 'cluster', value: DataType.uint },
0x535f: { name: 'number', value: DataType.uint },
0xeb: { name: 'codecState', value: DataType.uint }
}
},
0xf0: { name: 'relativePosition', value: types_1.DataType.uint } // extended
0xf0: { name: 'relativePosition', value: DataType.uint } // extended
}

@@ -211,7 +208,7 @@ }

container: {
0x467e: { name: 'description', value: types_1.DataType.string },
0x466e: { name: 'name', value: types_1.DataType.string },
0x4660: { name: 'mimeType', value: types_1.DataType.string },
0x465c: { name: 'data', value: types_1.DataType.binary },
0x46ae: { name: 'uid', value: types_1.DataType.uid }
0x467e: { name: 'description', value: DataType.string },
0x466e: { name: 'name', value: DataType.string },
0x4660: { name: 'mimeType', value: DataType.string },
0x465c: { name: 'data', value: DataType.binary },
0x46ae: { name: 'uid', value: DataType.uid }
}

@@ -231,14 +228,14 @@ }

container: {
0x73c4: { name: 'uid', value: types_1.DataType.uid },
0x91: { name: 'timeStart', value: types_1.DataType.uint },
0x92: { name: 'timeEnd', value: types_1.DataType.uid },
0x98: { name: 'hidden', value: types_1.DataType.bool },
0x4598: { name: 'enabled', value: types_1.DataType.uid },
0x73c4: { name: 'uid', value: DataType.uid },
0x91: { name: 'timeStart', value: DataType.uint },
0x92: { name: 'timeEnd', value: DataType.uid },
0x98: { name: 'hidden', value: DataType.bool },
0x4598: { name: 'enabled', value: DataType.uid },
0x8f: { name: 'track', container: {
0x89: { name: 'trackNumber', value: types_1.DataType.uid },
0x89: { name: 'trackNumber', value: DataType.uid },
0x80: {
name: 'display', container: {
0x85: { name: 'string', value: types_1.DataType.string },
0x437c: { name: 'language ', value: types_1.DataType.string },
0x437e: { name: 'country ', value: types_1.DataType.string }
0x85: { name: 'string', value: DataType.string },
0x437c: { name: 'language ', value: DataType.string },
0x437e: { name: 'country ', value: DataType.string }
}

@@ -265,8 +262,8 @@ }

container: {
0x63c5: { name: 'tagTrackUID', value: types_1.DataType.uid },
0x63c4: { name: 'tagChapterUID', value: types_1.DataType.uint },
0x63c6: { name: 'tagAttachmentUID', value: types_1.DataType.uid },
0x63ca: { name: 'targetType', value: types_1.DataType.string },
0x68ca: { name: 'targetTypeValue', value: types_1.DataType.uint },
0x63c9: { name: 'tagEditionUID', value: types_1.DataType.uid } // extended
0x63c5: { name: 'tagTrackUID', value: DataType.uid },
0x63c4: { name: 'tagChapterUID', value: DataType.uint },
0x63c6: { name: 'tagAttachmentUID', value: DataType.uid },
0x63ca: { name: 'targetType', value: DataType.string },
0x68ca: { name: 'targetTypeValue', value: DataType.uint },
0x63c9: { name: 'tagEditionUID', value: DataType.uid } // extended
}

@@ -278,8 +275,8 @@ },

container: {
0x45a3: { name: 'name', value: types_1.DataType.string },
0x4487: { name: 'string', value: types_1.DataType.string },
0x4485: { name: 'binary', value: types_1.DataType.binary },
0x447a: { name: 'language', value: types_1.DataType.string },
0x447b: { name: 'languageIETF', value: types_1.DataType.string },
0x4484: { name: 'default', value: types_1.DataType.bool } // extended
0x45a3: { name: 'name', value: DataType.string },
0x4487: { name: 'string', value: DataType.string },
0x4485: { name: 'binary', value: DataType.binary },
0x447a: { name: 'language', value: DataType.string },
0x447b: { name: 'languageIETF', value: DataType.string },
0x4484: { name: 'default', value: DataType.bool } // extended
}

@@ -286,0 +283,0 @@ }

@@ -1,10 +0,7 @@

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.MatroskaParser = void 0;
const token_types_1 = require("token-types");
const debug_1 = require("debug");
const BasicParser_1 = require("../common/BasicParser");
const types_1 = require("./types");
const matroskaDtd = require("./MatroskaDtd");
const debug = (0, debug_1.default)('music-metadata:parser:matroska');
import { Float32_BE, Float64_BE, StringType, UINT8 } from 'token-types';
import initDebug from 'debug';
import { BasicParser } from '../common/BasicParser.js';
import * as matroskaDtd from './MatroskaDtd.js';
import { DataType, TargetType, TrackType } from './types.js';
const debug = initDebug('music-metadata:parser:matroska');
/**

@@ -17,3 +14,3 @@ * Extensible Binary Meta Language (EBML) parser

*/
class MatroskaParser extends BasicParser_1.BasicParser {
export class MatroskaParser extends BasicParser {
constructor() {

@@ -25,8 +22,8 @@ super();

this.ebmlMaxSizeLength = 8;
this.parserMap.set(types_1.DataType.uint, e => this.readUint(e));
this.parserMap.set(types_1.DataType.string, e => this.readString(e));
this.parserMap.set(types_1.DataType.binary, e => this.readBuffer(e));
this.parserMap.set(types_1.DataType.uid, async (e) => await this.readUint(e) === 1);
this.parserMap.set(types_1.DataType.bool, e => this.readFlag(e));
this.parserMap.set(types_1.DataType.float, e => this.readFloat(e));
this.parserMap.set(DataType.uint, e => this.readUint(e));
this.parserMap.set(DataType.string, e => this.readString(e));
this.parserMap.set(DataType.binary, e => this.readBuffer(e));
this.parserMap.set(DataType.uid, async (e) => await this.readUint(e) === 1);
this.parserMap.set(DataType.bool, e => this.readFlag(e));
this.parserMap.set(DataType.float, e => this.readFloat(e));
}

@@ -73,3 +70,3 @@ /**

.filter(entry => {
return entry.trackType === types_1.TrackType.audio.valueOf();
return entry.trackType === TrackType.audio.valueOf();
})

@@ -96,3 +93,3 @@ .reduce((acc, cur) => {

const target = tag.target;
const targetType = (target === null || target === void 0 ? void 0 : target.targetTypeValue) ? types_1.TargetType[target.targetTypeValue] : ((target === null || target === void 0 ? void 0 : target.targetType) ? target.targetType : 'track');
const targetType = (target === null || target === void 0 ? void 0 : target.targetTypeValue) ? TargetType[target.targetTypeValue] : ((target === null || target === void 0 ? void 0 : target.targetType) ? target.targetType : 'track');
tag.simpleTags.forEach(simpleTag => {

@@ -169,3 +166,3 @@ const value = simpleTag.string ? simpleTag.string : simpleTag.binary;

async readVintData(maxLength) {
const msb = await this.tokenizer.peekNumber(token_types_1.UINT8);
const msb = await this.tokenizer.peekNumber(UINT8);
let mask = 0x80;

@@ -210,7 +207,7 @@ let oc = 1;

case 4:
return this.tokenizer.readNumber(token_types_1.Float32_BE);
return this.tokenizer.readNumber(Float32_BE);
case 8:
return this.tokenizer.readNumber(token_types_1.Float64_BE);
return this.tokenizer.readNumber(Float64_BE);
case 10:
return this.tokenizer.readNumber(token_types_1.Float64_BE);
return this.tokenizer.readNumber(Float64_BE);
default:

@@ -229,3 +226,3 @@ throw new Error(`Invalid IEEE-754 float length: ${e.len}`);

async readString(e) {
const rawString = await this.tokenizer.readToken(new token_types_1.StringType(e.len, 'utf-8'));
const rawString = await this.tokenizer.readToken(new StringType(e.len, 'utf-8'));
return rawString.replace(/\00.*$/g, '');

@@ -242,2 +239,1 @@ }

}
exports.MatroskaParser = MatroskaParser;

@@ -1,5 +0,2 @@

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.MatroskaTagMapper = void 0;
const CaseInsensitiveTagMap_1 = require("../common/CaseInsensitiveTagMap");
import { CaseInsensitiveTagMap } from '../common/CaseInsensitiveTagMap.js';
/**

@@ -30,3 +27,3 @@ * EBML Tag map

};
class MatroskaTagMapper extends CaseInsensitiveTagMap_1.CaseInsensitiveTagMap {
export class MatroskaTagMapper extends CaseInsensitiveTagMap {
constructor() {

@@ -36,3 +33,2 @@ super(['matroska'], ebmlTagMap);

}
exports.MatroskaTagMapper = MatroskaTagMapper;
//# sourceMappingURL=MatroskaTagMapper.js.map

@@ -1,5 +0,2 @@

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.TrackType = exports.TargetType = exports.DataType = void 0;
var DataType;
export var DataType;
(function (DataType) {

@@ -12,4 +9,4 @@ DataType[DataType["string"] = 0] = "string";

DataType[DataType["float"] = 5] = "float";
})(DataType = exports.DataType || (exports.DataType = {}));
var TargetType;
})(DataType = DataType || (DataType = {}));
export var TargetType;
(function (TargetType) {

@@ -23,4 +20,4 @@ TargetType[TargetType["shot"] = 10] = "shot";

TargetType[TargetType["collection"] = 70] = "collection";
})(TargetType = exports.TargetType || (exports.TargetType = {}));
var TrackType;
})(TargetType = TargetType || (TargetType = {}));
export var TrackType;
(function (TrackType) {

@@ -34,3 +31,3 @@ TrackType[TrackType["video"] = 1] = "video";

TrackType[TrackType["control"] = 32] = "control";
})(TrackType = exports.TrackType || (exports.TrackType = {}));
})(TrackType = TrackType || (TrackType = {}));
//# sourceMappingURL=types.js.map

@@ -1,8 +0,5 @@

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.Atom = void 0;
const debug_1 = require("debug");
const AtomToken = require("./AtomToken");
const debug = (0, debug_1.default)('music-metadata:parser:MP4:Atom');
class Atom {
import initDebug from 'debug';
import * as AtomToken from './AtomToken.js';
const debug = initDebug('music-metadata:parser:MP4:Atom');
export class Atom {
constructor(header, extended, parent) {

@@ -70,2 +67,1 @@ this.header = header;

}
exports.Atom = Atom;

@@ -1,9 +0,6 @@

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.ChapterText = exports.StcoAtom = exports.StszAtom = exports.StscAtom = exports.SampleToChunkToken = exports.SttsAtom = exports.TimeToSampleToken = exports.SoundSampleDescriptionV0 = exports.SoundSampleDescriptionVersion = exports.StsdAtom = exports.TrackHeaderAtom = exports.NameAtom = exports.DataAtom = exports.MvhdAtom = exports.MdhdAtom = exports.FixedLengthAtom = exports.mhdr = exports.tkhd = exports.ftyp = exports.ExtendedSize = exports.Header = void 0;
const Token = require("token-types");
const debug_1 = require("debug");
const FourCC_1 = require("../common/FourCC");
const debug = (0, debug_1.default)('music-metadata:parser:MP4:atom');
exports.Header = {
import * as Token from 'token-types';
import initDebug from 'debug';
import { FourCcToken } from '../common/FourCC.js';
const debug = initDebug('music-metadata:parser:MP4:atom');
export const Header = {
len: 8,

@@ -21,3 +18,3 @@ get: (buf, off) => {

Token.UINT32_BE.put(buf, off, Number(hdr.length));
return FourCC_1.FourCcToken.put(buf, off + 4, hdr.name);
return FourCcToken.put(buf, off + 4, hdr.name);
}

@@ -28,4 +25,4 @@ };

*/
exports.ExtendedSize = Token.UINT64_BE;
exports.ftyp = {
export const ExtendedSize = Token.UINT64_BE;
export const ftyp = {
len: 4,

@@ -38,3 +35,3 @@ get: (buf, off) => {

};
exports.tkhd = {
export const tkhd = {
len: 4,

@@ -50,3 +47,3 @@ get: (buf, off) => {

*/
exports.mhdr = {
export const mhdr = {
len: 8,

@@ -66,3 +63,3 @@ get: (buf, off) => {

*/
class FixedLengthAtom {
export class FixedLengthAtom {
/**

@@ -83,3 +80,2 @@ *

}
exports.FixedLengthAtom = FixedLengthAtom;
/**

@@ -101,3 +97,3 @@ * Timestamp stored in seconds since Mac Epoch (1 January 1904)

*/
class MdhdAtom extends FixedLengthAtom {
export class MdhdAtom extends FixedLengthAtom {
constructor(len) {

@@ -120,7 +116,6 @@ super(len, 24, 'mdhd');

}
exports.MdhdAtom = MdhdAtom;
/**
* Token: Movie Header Atom
*/
class MvhdAtom extends FixedLengthAtom {
export class MvhdAtom extends FixedLengthAtom {
constructor(len) {

@@ -152,7 +147,6 @@ super(len, 100, 'mvhd');

}
exports.MvhdAtom = MvhdAtom;
/**
* Data Atom Structure
*/
class DataAtom {
export class DataAtom {
constructor(len) {

@@ -172,3 +166,2 @@ this.len = len;

}
exports.DataAtom = DataAtom;
/**

@@ -178,3 +171,3 @@ * Data Atom Structure

*/
class NameAtom {
export class NameAtom {
constructor(len) {

@@ -191,3 +184,2 @@ this.len = len;

}
exports.NameAtom = NameAtom;
/**

@@ -197,3 +189,3 @@ * Track Header Atoms structure

*/
class TrackHeaderAtom {
export class TrackHeaderAtom {
constructor(len) {

@@ -218,3 +210,2 @@ this.len = len;

}
exports.TrackHeaderAtom = TrackHeaderAtom;
/**

@@ -244,3 +235,3 @@ * Atom: Sample Description Atom ('stsd')

return {
dataFormat: FourCC_1.FourCcToken.get(buf, off),
dataFormat: FourCcToken.get(buf, off),
dataReferenceIndex: Token.UINT16_BE.get(buf, off + 10),

@@ -255,3 +246,3 @@ description: new Token.Uint8ArrayType(this.len - 12).get(buf, off + 12)

*/
class StsdAtom {
export class StsdAtom {
constructor(len) {

@@ -276,3 +267,2 @@ this.len = len;

}
exports.StsdAtom = StsdAtom;
/**

@@ -282,3 +272,3 @@ * Common Sound Sample Description (version & revision)

*/
exports.SoundSampleDescriptionVersion = {
export const SoundSampleDescriptionVersion = {
len: 8,

@@ -297,3 +287,3 @@ get(buf, off) {

*/
exports.SoundSampleDescriptionV0 = {
export const SoundSampleDescriptionV0 = {
len: 12,

@@ -325,3 +315,3 @@ get(buf, off) {

}
exports.TimeToSampleToken = {
export const TimeToSampleToken = {
len: 8,

@@ -340,10 +330,9 @@ get(buf, off) {

*/
class SttsAtom extends SimpleTableAtom {
export class SttsAtom extends SimpleTableAtom {
constructor(len) {
super(len, exports.TimeToSampleToken);
super(len, TimeToSampleToken);
this.len = len;
}
}
exports.SttsAtom = SttsAtom;
exports.SampleToChunkToken = {
export const SampleToChunkToken = {
len: 12,

@@ -362,9 +351,8 @@ get(buf, off) {

*/
class StscAtom extends SimpleTableAtom {
export class StscAtom extends SimpleTableAtom {
constructor(len) {
super(len, exports.SampleToChunkToken);
super(len, SampleToChunkToken);
this.len = len;
}
}
exports.StscAtom = StscAtom;
/**

@@ -374,3 +362,3 @@ * Sample-size ('stsz') atom

*/
class StszAtom {
export class StszAtom {
constructor(len) {

@@ -390,3 +378,2 @@ this.len = len;

}
exports.StszAtom = StszAtom;
/**

@@ -396,3 +383,3 @@ * Chunk offset atom, 'stco'

*/
class StcoAtom extends SimpleTableAtom {
export class StcoAtom extends SimpleTableAtom {
constructor(len) {

@@ -403,7 +390,6 @@ super(len, Token.INT32_BE);

}
exports.StcoAtom = StcoAtom;
/**
* Token used to decode text-track from 'mdat' atom (raw data stream)
*/
class ChapterText {
export class ChapterText {
constructor(len) {

@@ -418,3 +404,2 @@ this.len = len;

}
exports.ChapterText = ChapterText;
function readTokenTable(buf, token, off, remainingLen, numberOfEntries) {

@@ -421,0 +406,0 @@ debug(`remainingLen=${remainingLen}, numberOfEntries=${numberOfEntries} * token-len=${token.len}`);

@@ -1,12 +0,9 @@

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.MP4Parser = void 0;
const debug_1 = require("debug");
const Token = require("token-types");
const BasicParser_1 = require("../common/BasicParser");
const ID3v1Parser_1 = require("../id3v1/ID3v1Parser");
const type_1 = require("../type");
const Atom_1 = require("./Atom");
const AtomToken = require("./AtomToken");
const debug = (0, debug_1.default)('music-metadata:parser:MP4');
import initDebug from 'debug';
import * as Token from 'token-types';
import { BasicParser } from '../common/BasicParser.js';
import { Genres } from '../id3v1/ID3v1Parser.js';
import { Atom } from './Atom.js';
import * as AtomToken from './AtomToken.js';
import { TrackType } from '../type.js';
const debug = initDebug('music-metadata:parser:MP4');
const tagFormat = 'iTunes';

@@ -93,3 +90,3 @@ const encoderDict = {

*/
class MP4Parser extends BasicParser_1.BasicParser {
export class MP4Parser extends BasicParser {
constructor() {

@@ -237,3 +234,3 @@ super(...arguments);

}
const rootAtom = await Atom_1.Atom.readAtom(this.tokenizer, (atom, remaining) => this.handleAtom(atom, remaining), null, remainingFileSize);
const rootAtom = await Atom.readAtom(this.tokenizer, (atom, remaining) => this.handleAtom(atom, remaining), null, remainingFileSize);
remainingFileSize -= rootAtom.header.length === BigInt(0) ? remainingFileSize : Number(rootAtom.header.length);

@@ -258,3 +255,3 @@ }

if (description.sampleRate > 0) {
streamInfo.type = type_1.TrackType.audio;
streamInfo.type = TrackType.audio;
streamInfo.audio = {

@@ -374,3 +371,3 @@ samplingFrequency: description.sampleRate,

const genreInt = Token.UINT8.get(dataAtom.value, 1);
const genreStr = ID3v1Parser_1.Genres[genreInt - 1];
const genreStr = Genres[genreInt - 1];
// console.log(" %s[data] = %s", tagKey, genreStr);

@@ -515,2 +512,1 @@ this.addTag(tagKey, genreStr);

}
exports.MP4Parser = MP4Parser;

@@ -1,5 +0,2 @@

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.MP4TagMapper = exports.tagType = void 0;
const CaseInsensitiveTagMap_1 = require("../common/CaseInsensitiveTagMap");
import { CaseInsensitiveTagMap } from '../common/CaseInsensitiveTagMap.js';
/**

@@ -109,9 +106,8 @@ * Ref: https://github.com/sergiomb2/libmp4v2/wiki/iTunesMetadata

};
exports.tagType = 'iTunes';
class MP4TagMapper extends CaseInsensitiveTagMap_1.CaseInsensitiveTagMap {
export const tagType = 'iTunes';
export class MP4TagMapper extends CaseInsensitiveTagMap {
constructor() {
super([exports.tagType], mp4TagMap);
super([tagType], mp4TagMap);
}
}
exports.MP4TagMapper = MP4TagMapper;
//# sourceMappingURL=MP4TagMapper.js.map

@@ -1,10 +0,7 @@

"use strict";
/**
* Extended Lame Header
*/
Object.defineProperty(exports, "__esModule", { value: true });
exports.ExtendedLameHeader = void 0;
const Token = require("token-types");
const common = require("../common/Util");
const ReplayGainDataFormat_1 = require("./ReplayGainDataFormat");
import * as Token from 'token-types';
import * as common from '../common/Util.js';
import { ReplayGain } from './ReplayGainDataFormat.js';
/**

@@ -15,3 +12,3 @@ * Info Tag

*/
exports.ExtendedLameHeader = {
export const ExtendedLameHeader = {
len: 27,

@@ -25,4 +22,4 @@ get: (buf, off) => {

track_peak: track_peak === 0 ? undefined : track_peak / Math.pow(2, 23),
track_gain: ReplayGainDataFormat_1.ReplayGain.get(buf, 6),
album_gain: ReplayGainDataFormat_1.ReplayGain.get(buf, 8),
track_gain: ReplayGain.get(buf, 6),
album_gain: ReplayGain.get(buf, 8),
music_length: Token.UINT32_BE.get(buf, off + 20),

@@ -29,0 +26,0 @@ music_crc: Token.UINT8.get(buf, off + 24),

@@ -1,11 +0,8 @@

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.MpegParser = void 0;
const Token = require("token-types");
const core_1 = require("strtok3/lib/core");
const debug_1 = require("debug");
const common = require("../common/Util");
const AbstractID3Parser_1 = require("../id3v2/AbstractID3Parser");
const XingTag_1 = require("./XingTag");
const debug = (0, debug_1.default)('music-metadata:parser:mpeg');
import * as Token from 'token-types';
import { EndOfStreamError } from 'strtok3/core';
import initDebug from 'debug';
import * as common from '../common/Util.js';
import { AbstractID3Parser } from '../id3v2/AbstractID3Parser.js';
import { InfoTagHeaderTag, LameEncoderVersion, readXingHeader } from './XingTag.js';
const debug = initDebug('music-metadata:parser:mpeg');
/**

@@ -209,3 +206,3 @@ * Cache buffer size used for searching synchronization preabmle

}
class MpegParser extends AbstractID3Parser_1.AbstractID3Parser {
export class MpegParser extends AbstractID3Parser {
constructor() {

@@ -238,3 +235,3 @@ super(...arguments);

catch (err) {
if (err instanceof core_1.EndOfStreamError) {
if (err instanceof EndOfStreamError) {
debug(`End-of-stream`);

@@ -281,3 +278,3 @@ if (this.calculateEofDuration) {

if (this.syncPeek.len <= 163) {
throw new core_1.EndOfStreamError();
throw new EndOfStreamError();
}

@@ -303,3 +300,3 @@ while (true) {

if (this.syncPeek.len < this.syncPeek.buf.length) {
throw new core_1.EndOfStreamError();
throw new EndOfStreamError();
}

@@ -444,4 +441,4 @@ await this.tokenizer.ignore(this.syncPeek.len);

async readXtraInfoHeader() {
const headerTag = await this.tokenizer.readToken(XingTag_1.InfoTagHeaderTag);
this.offset += XingTag_1.InfoTagHeaderTag.len; // 12
const headerTag = await this.tokenizer.readToken(InfoTagHeaderTag);
this.offset += InfoTagHeaderTag.len; // 12
switch (headerTag) {

@@ -460,5 +457,5 @@ case 'Info':

case 'LAME':
const version = await this.tokenizer.readToken(XingTag_1.LameEncoderVersion);
if (this.frame_size >= this.offset + XingTag_1.LameEncoderVersion.len) {
this.offset += XingTag_1.LameEncoderVersion.len;
const version = await this.tokenizer.readToken(LameEncoderVersion);
if (this.frame_size >= this.offset + LameEncoderVersion.len) {
this.offset += LameEncoderVersion.len;
this.metadata.setFormat('tool', 'LAME ' + version);

@@ -490,3 +487,3 @@ await this.skipFrameData(this.frame_size - this.offset);

const offset = this.tokenizer.position;
const infoTag = await (0, XingTag_1.readXingHeader)(this.tokenizer);
const infoTag = await readXingHeader(this.tokenizer);
this.offset += this.tokenizer.position - offset;

@@ -531,2 +528,1 @@ if (infoTag.lame) {

}
exports.MpegParser = MpegParser;

@@ -1,5 +0,2 @@

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.ReplayGain = void 0;
const common = require("../common/Util");
import * as common from '../common/Util.js';
/**

@@ -54,3 +51,3 @@ * https://github.com/Borewit/music-metadata/wiki/Replay-Gain-Data-Format#name-code

*/
exports.ReplayGain = {
export const ReplayGain = {
len: 2,

@@ -57,0 +54,0 @@ get: (buf, off) => {

@@ -1,11 +0,8 @@

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.readXingHeader = exports.XingHeaderFlags = exports.LameEncoderVersion = exports.InfoTagHeaderTag = void 0;
const Token = require("token-types");
const util = require("../common/Util");
const ExtendedLameHeader_1 = require("./ExtendedLameHeader");
import * as Token from 'token-types';
import * as util from '../common/Util.js';
import { ExtendedLameHeader } from './ExtendedLameHeader.js';
/**
* Info Tag: Xing, LAME
*/
exports.InfoTagHeaderTag = new Token.StringType(4, 'ascii');
export const InfoTagHeaderTag = new Token.StringType(4, 'ascii');
/**

@@ -16,3 +13,3 @@ * LAME TAG value

*/
exports.LameEncoderVersion = new Token.StringType(6, 'ascii');
export const LameEncoderVersion = new Token.StringType(6, 'ascii');
/**

@@ -22,3 +19,3 @@ * Info Tag

*/
exports.XingHeaderFlags = {
export const XingHeaderFlags = {
len: 4,

@@ -38,4 +35,4 @@ get: (buf, off) => {

// */
async function readXingHeader(tokenizer) {
const flags = await tokenizer.readToken(exports.XingHeaderFlags);
export async function readXingHeader(tokenizer) {
const flags = await tokenizer.readToken(XingHeaderFlags);
const xingInfoTag = {};

@@ -66,3 +63,3 @@ if (flags.frames) {

if (version[0] >= 3 && version[1] >= 90) {
xingInfoTag.lame.extended = await tokenizer.readToken(ExtendedLameHeader_1.ExtendedLameHeader);
xingInfoTag.lame.extended = await tokenizer.readToken(ExtendedLameHeader);
}

@@ -73,2 +70,1 @@ }

}
exports.readXingHeader = readXingHeader;

@@ -1,10 +0,8 @@

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
const debug_1 = require("debug");
const Token = require("token-types");
const AbstractID3Parser_1 = require("../id3v2/AbstractID3Parser");
const MpcSv8Parser_1 = require("./sv8/MpcSv8Parser");
const MpcSv7Parser_1 = require("./sv7/MpcSv7Parser");
const debug = (0, debug_1.default)('music-metadata:parser:musepack');
class MusepackParser extends AbstractID3Parser_1.AbstractID3Parser {
import initDebug from 'debug';
import * as Token from 'token-types';
import { AbstractID3Parser } from '../id3v2/AbstractID3Parser.js';
import { MpcSv8Parser } from './sv8/MpcSv8Parser.js';
import { MpcSv7Parser } from './sv7/MpcSv7Parser.js';
const debug = initDebug('music-metadata:parser:musepack');
class MusepackParser extends AbstractID3Parser {
async postId3v2Parse() {

@@ -16,3 +14,3 @@ const signature = await this.tokenizer.peekToken(new Token.StringType(3, 'binary'));

debug('Musepack stream-version 7');
mpcParser = new MpcSv7Parser_1.MpcSv7Parser();
mpcParser = new MpcSv7Parser();
break;

@@ -22,3 +20,3 @@ }

debug('Musepack stream-version 8');
mpcParser = new MpcSv8Parser_1.MpcSv8Parser();
mpcParser = new MpcSv8Parser();
break;

@@ -34,2 +32,2 @@ }

}
exports.default = MusepackParser;
export default MusepackParser;

@@ -1,6 +0,3 @@

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.BitReader = void 0;
const Token = require("token-types");
class BitReader {
import * as Token from 'token-types';
export class BitReader {
constructor(tokenizer) {

@@ -54,2 +51,1 @@ this.tokenizer = tokenizer;

}
exports.BitReader = BitReader;

@@ -1,11 +0,8 @@

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.MpcSv7Parser = void 0;
const debug_1 = require("debug");
const BasicParser_1 = require("../../common/BasicParser");
const APEv2Parser_1 = require("../../apev2/APEv2Parser");
const BitReader_1 = require("./BitReader");
const SV7 = require("./StreamVersion7");
const debug = (0, debug_1.default)('music-metadata:parser:musepack');
class MpcSv7Parser extends BasicParser_1.BasicParser {
import initDebug from 'debug';
import { BasicParser } from '../../common/BasicParser.js';
import { APEv2Parser } from '../../apev2/APEv2Parser.js';
import { BitReader } from './BitReader.js';
import * as SV7 from './StreamVersion7.js';
const debug = initDebug('music-metadata:parser:musepack');
export class MpcSv7Parser extends BasicParser {
constructor() {

@@ -26,3 +23,3 @@ super(...arguments);

this.metadata.setFormat('duration', this.duration);
this.bitreader = new BitReader_1.BitReader(this.tokenizer);
this.bitreader = new BitReader(this.tokenizer);
this.metadata.setFormat('numberOfChannels', header.midSideStereo || header.intensityStereo ? 2 : 1);

@@ -33,3 +30,3 @@ const version = await this.bitreader.read(8);

debug(`End of audio stream, switching to APEv2, offset=${this.tokenizer.position}`);
return APEv2Parser_1.APEv2Parser.tryParseApeHeader(this.metadata, this.tokenizer, this.options);
return APEv2Parser.tryParseApeHeader(this.metadata, this.tokenizer, this.options);
}

@@ -48,2 +45,1 @@ async skipAudioData(frameCount) {

}
exports.MpcSv7Parser = MpcSv7Parser;

@@ -1,10 +0,7 @@

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.Header = void 0;
const Token = require("token-types");
const util = require("../../common/Util");
import * as Token from 'token-types';
import * as util from '../../common/Util.js';
/**
* BASIC STRUCTURE
*/
exports.Header = {
export const Header = {
len: 6 * 4,

@@ -11,0 +8,0 @@ get: (buf, off) => {

@@ -1,11 +0,8 @@

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.MpcSv8Parser = void 0;
const debug_1 = require("debug");
const BasicParser_1 = require("../../common/BasicParser");
const APEv2Parser_1 = require("../../apev2/APEv2Parser");
const FourCC_1 = require("../../common/FourCC");
const SV8 = require("./StreamVersion8");
const debug = (0, debug_1.default)('music-metadata:parser:musepack');
class MpcSv8Parser extends BasicParser_1.BasicParser {
import initDebug from 'debug';
import { BasicParser } from '../../common/BasicParser.js';
import { APEv2Parser } from '../../apev2/APEv2Parser.js';
import { FourCcToken } from '../../common/FourCC.js';
import * as SV8 from './StreamVersion8.js';
const debug = initDebug('music-metadata:parser:musepack');
export class MpcSv8Parser extends BasicParser {
constructor() {

@@ -16,3 +13,3 @@ super(...arguments);

async parse() {
const signature = await this.tokenizer.readToken(FourCC_1.FourCcToken);
const signature = await this.tokenizer.readToken(FourCcToken);
if (signature !== 'MPCK')

@@ -49,3 +46,3 @@ throw new Error('Invalid Magic number');

this.metadata.setFormat('bitrate', this.audioLength * 8 / this.metadata.format.duration);
return APEv2Parser_1.APEv2Parser.tryParseApeHeader(this.metadata, this.tokenizer, this.options);
return APEv2Parser.tryParseApeHeader(this.metadata, this.tokenizer, this.options);
default:

@@ -57,2 +54,1 @@ throw new Error(`Unexpected header: ${header.key}`);

}
exports.MpcSv8Parser = MpcSv8Parser;

@@ -1,8 +0,5 @@

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.StreamReader = void 0;
const Token = require("token-types");
const debug_1 = require("debug");
const util = require("../../common/Util");
const debug = (0, debug_1.default)('music-metadata:parser:musepack:sv8');
import * as Token from 'token-types';
import initDebug from 'debug';
import * as util from '../../common/Util.js';
const debug = initDebug('music-metadata:parser:musepack:sv8');
const PacketKey = new Token.StringType(2, 'binary');

@@ -38,3 +35,3 @@ /**

};
class StreamReader {
export class StreamReader {
constructor(tokenizer) {

@@ -81,2 +78,1 @@ this.tokenizer = tokenizer;

}
exports.StreamReader = StreamReader;

@@ -1,2 +0,1 @@

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
export {};

@@ -1,16 +0,13 @@

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.OggParser = exports.SegmentTable = void 0;
const Token = require("token-types");
const core_1 = require("strtok3/lib/core");
const debug_1 = require("debug");
const util = require("../common/Util");
const FourCC_1 = require("../common/FourCC");
const BasicParser_1 = require("../common/BasicParser");
const VorbisParser_1 = require("./vorbis/VorbisParser");
const OpusParser_1 = require("./opus/OpusParser");
const SpeexParser_1 = require("./speex/SpeexParser");
const TheoraParser_1 = require("./theora/TheoraParser");
const debug = (0, debug_1.default)('music-metadata:parser:ogg');
class SegmentTable {
import * as Token from 'token-types';
import { EndOfStreamError } from 'strtok3/core';
import initDebug from 'debug';
import * as util from '../common/Util.js';
import { FourCcToken } from '../common/FourCC.js';
import { BasicParser } from '../common/BasicParser.js';
import { VorbisParser } from './vorbis/VorbisParser.js';
import { OpusParser } from './opus/OpusParser.js';
import { SpeexParser } from './speex/SpeexParser.js';
import { TheoraParser } from './theora/TheoraParser.js';
const debug = initDebug('music-metadata:parser:ogg');
export class SegmentTable {
constructor(header) {

@@ -32,7 +29,6 @@ this.len = header.page_segments;

}
exports.SegmentTable = SegmentTable;
/**
* Parser for Ogg logical bitstream framing
*/
class OggParser extends BasicParser_1.BasicParser {
export class OggParser extends BasicParser {
/**

@@ -63,11 +59,11 @@ * Parse page

debug('Set page consumer to Ogg/Vorbis');
this.pageConsumer = new VorbisParser_1.VorbisParser(this.metadata, this.options);
this.pageConsumer = new VorbisParser(this.metadata, this.options);
break;
case 'OpusHea': // Ogg/Opus
debug('Set page consumer to Ogg/Opus');
this.pageConsumer = new OpusParser_1.OpusParser(this.metadata, this.options, this.tokenizer);
this.pageConsumer = new OpusParser(this.metadata, this.options, this.tokenizer);
break;
case 'Speex ': // Ogg/Speex
debug('Set page consumer to Ogg/Speex');
this.pageConsumer = new SpeexParser_1.SpeexParser(this.metadata, this.options, this.tokenizer);
this.pageConsumer = new SpeexParser(this.metadata, this.options, this.tokenizer);
break;

@@ -77,3 +73,3 @@ case 'fishead':

debug('Set page consumer to Ogg/Theora');
this.pageConsumer = new TheoraParser_1.TheoraParser(this.metadata, this.options, this.tokenizer);
this.pageConsumer = new TheoraParser(this.metadata, this.options, this.tokenizer);
break;

@@ -88,3 +84,3 @@ default:

catch (err) {
if (err instanceof core_1.EndOfStreamError) {
if (err instanceof EndOfStreamError) {
this.metadata.addWarning('Last OGG-page is not marked with last-page flag');

@@ -110,3 +106,2 @@ debug(`End-of-stream`);

}
exports.OggParser = OggParser;
OggParser.Header = {

@@ -116,3 +111,3 @@ len: 27,

return {
capturePattern: FourCC_1.FourCcToken.get(buf, off),
capturePattern: FourCcToken.get(buf, off),
version: Token.UINT8.get(buf, off + 4),

@@ -119,0 +114,0 @@ headerType: {

@@ -1,5 +0,2 @@

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.IdHeader = void 0;
const Token = require("token-types");
import * as Token from 'token-types';
/**

@@ -9,3 +6,3 @@ * Opus ID Header parser

*/
class IdHeader {
export class IdHeader {
constructor(len) {

@@ -29,2 +26,1 @@ this.len = len;

}
exports.IdHeader = IdHeader;

@@ -1,7 +0,4 @@

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.OpusParser = void 0;
const Token = require("token-types");
const VorbisParser_1 = require("../vorbis/VorbisParser");
const Opus = require("./Opus");
import * as Token from 'token-types';
import { VorbisParser } from '../vorbis/VorbisParser.js';
import * as Opus from './Opus.js';
/**

@@ -12,3 +9,3 @@ * Opus parser

*/
class OpusParser extends VorbisParser_1.VorbisParser {
export class OpusParser extends VorbisParser {
constructor(metadata, options, tokenizer) {

@@ -57,2 +54,1 @@ super(metadata, options);

}
exports.OpusParser = OpusParser;

@@ -1,6 +0,3 @@

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.Header = void 0;
const Token = require("token-types");
const util = require("../../common/Util");
import * as Token from 'token-types';
import * as util from '../../common/Util.js';
/**

@@ -10,3 +7,3 @@ * Speex Header Packet

*/
exports.Header = {
export const Header = {
len: 80,

@@ -13,0 +10,0 @@ get: (buf, off) => {

@@ -1,8 +0,5 @@

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.SpeexParser = void 0;
const debug_1 = require("debug");
const VorbisParser_1 = require("../vorbis/VorbisParser");
const Speex = require("./Speex");
const debug = (0, debug_1.default)('music-metadata:parser:ogg:speex');
import initDebug from 'debug';
import { VorbisParser } from '../vorbis/VorbisParser.js';
import * as Speex from './Speex.js';
const debug = initDebug('music-metadata:parser:ogg:speex');
/**

@@ -14,3 +11,3 @@ * Speex, RFC 5574

*/
class SpeexParser extends VorbisParser_1.VorbisParser {
export class SpeexParser extends VorbisParser {
constructor(metadata, options, tokenizer) {

@@ -36,2 +33,1 @@ super(metadata, options);

}
exports.SpeexParser = SpeexParser;

@@ -1,5 +0,2 @@

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.IdentificationHeader = void 0;
const Token = require("token-types");
import * as Token from 'token-types';
/**

@@ -9,3 +6,3 @@ * 6.2 Identification Header

*/
exports.IdentificationHeader = {
export const IdentificationHeader = {
len: 42,

@@ -12,0 +9,0 @@ get: (buf, off) => {

@@ -1,7 +0,4 @@

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.TheoraParser = void 0;
const debug_1 = require("debug");
const Theora_1 = require("./Theora");
const debug = (0, debug_1.default)('music-metadata:parser:ogg:theora');
import initDebug from 'debug';
import { IdentificationHeader } from './Theora.js';
const debug = initDebug('music-metadata:parser:ogg:theora');
/**

@@ -11,3 +8,3 @@ * Ref:

*/
class TheoraParser {
export class TheoraParser {
constructor(metadata, options, tokenizer) {

@@ -41,6 +38,5 @@ this.metadata = metadata;

this.metadata.setFormat('codec', 'Theora');
const idHeader = Theora_1.IdentificationHeader.get(pageData, 0);
const idHeader = IdentificationHeader.get(pageData, 0);
this.metadata.setFormat('bitrate', idHeader.nombr);
}
}
exports.TheoraParser = TheoraParser;

@@ -1,6 +0,3 @@

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.IdentificationHeader = exports.CommonHeader = exports.VorbisPictureToken = void 0;
const Token = require("token-types");
const ID3v2Token_1 = require("../../id3v2/ID3v2Token");
import * as Token from 'token-types';
import { AttachedPictureType } from '../../id3v2/ID3v2Token.js';
/**

@@ -12,3 +9,3 @@ * Parse the METADATA_BLOCK_PICTURE

*/
class VorbisPictureToken {
export class VorbisPictureToken {
constructor(len) {

@@ -25,3 +22,3 @@ this.len = len;

get(buffer, offset) {
const type = ID3v2Token_1.AttachedPictureType[Token.UINT32_BE.get(buffer, offset)];
const type = AttachedPictureType[Token.UINT32_BE.get(buffer, offset)];
const mimeLen = Token.UINT32_BE.get(buffer, offset += 4);

@@ -49,3 +46,2 @@ const format = buffer.toString('utf-8', offset += 4, offset + mimeLen);

}
exports.VorbisPictureToken = VorbisPictureToken;
/**

@@ -55,3 +51,3 @@ * Comment header decoder

*/
exports.CommonHeader = {
export const CommonHeader = {
len: 7,

@@ -69,3 +65,3 @@ get: (buf, off) => {

*/
exports.IdentificationHeader = {
export const IdentificationHeader = {
len: 23,

@@ -72,0 +68,0 @@ get: (uint8Array, off) => {

@@ -1,6 +0,3 @@

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.VorbisDecoder = void 0;
const Token = require("token-types");
class VorbisDecoder {
import * as Token from 'token-types';
export class VorbisDecoder {
constructor(data, offset) {

@@ -32,2 +29,1 @@ this.data = data;

}
exports.VorbisDecoder = VorbisDecoder;

@@ -1,9 +0,6 @@

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.VorbisParser = void 0;
const Token = require("token-types");
const debug_1 = require("debug");
const VorbisDecoder_1 = require("./VorbisDecoder");
const Vorbis_1 = require("./Vorbis");
const debug = (0, debug_1.default)('music-metadata:parser:ogg:vorbis1');
import * as Token from 'token-types';
import debugInit from 'debug';
import { VorbisDecoder } from './VorbisDecoder.js';
import { CommonHeader, IdentificationHeader, VorbisPictureToken } from './Vorbis.js';
const debug = debugInit('music-metadata:parser:ogg:vorbis1');
/**

@@ -13,3 +10,3 @@ * Vorbis 1 Parser.

*/
class VorbisParser {
export class VorbisParser {
constructor(metadata, options) {

@@ -32,3 +29,3 @@ this.metadata = metadata;

if (this.pageSegments.length === 0) {
throw new Error("Cannot continue on previous page");
throw new Error('Cannot continue on previous page');
}

@@ -55,3 +52,3 @@ this.pageSegments.push(pageData);

parseUserComment(pageData, offset) {
const decoder = new VorbisDecoder_1.VorbisDecoder(pageData, offset);
const decoder = new VorbisDecoder(pageData, offset);
const tag = decoder.parseUserComment();

@@ -67,3 +64,3 @@ this.addTag(tag.key, tag.value);

}
value = Vorbis_1.VorbisPictureToken.fromBase64(value);
value = VorbisPictureToken.fromBase64(value);
debug(`Push picture: id=${id}, format=${value.format}`);

@@ -85,18 +82,18 @@ }

* Parse first Ogg/Vorbis page
* @param {IPageHeader} header
* @param {Buffer} pageData
* @param header
* @param pageData
*/
parseFirstPage(header, pageData) {
this.metadata.setFormat('codec', 'Vorbis I');
debug("Parse first page");
debug('Parse first page');
// Parse Vorbis common header
const commonHeader = Vorbis_1.CommonHeader.get(pageData, 0);
const commonHeader = CommonHeader.get(pageData, 0);
if (commonHeader.vorbis !== 'vorbis')
throw new Error('Metadata does not look like Vorbis');
if (commonHeader.packetType === 1) {
const idHeader = Vorbis_1.IdentificationHeader.get(pageData, Vorbis_1.CommonHeader.len);
const idHeader = IdentificationHeader.get(pageData, CommonHeader.len);
this.metadata.setFormat('sampleRate', idHeader.sampleRate);
this.metadata.setFormat('bitrate', idHeader.bitrateNominal);
this.metadata.setFormat('numberOfChannels', idHeader.channelMode);
debug("sample-rate=%s[hz], bitrate=%s[b/s], channel-mode=%s", idHeader.sampleRate, idHeader.bitrateNominal, idHeader.channelMode);
debug('sample-rate=%s[hz], bitrate=%s[b/s], channel-mode=%s', idHeader.sampleRate, idHeader.bitrateNominal, idHeader.channelMode);
}

@@ -108,7 +105,7 @@ else

// New page
const commonHeader = Vorbis_1.CommonHeader.get(pageData, 0);
debug("Parse full page: type=%s, byteLength=%s", commonHeader.packetType, pageData.byteLength);
const commonHeader = CommonHeader.get(pageData, 0);
debug('Parse full page: type=%s, byteLength=%s', commonHeader.packetType, pageData.byteLength);
switch (commonHeader.packetType) {
case 3: // type 3: comment header
return this.parseUserCommentList(pageData, Vorbis_1.CommonHeader.len);
return this.parseUserCommentList(pageData, CommonHeader.len);
case 1: // type 1: the identification header

@@ -134,2 +131,1 @@ case 5: // type 5: setup header type

}
exports.VorbisParser = VorbisParser;

@@ -1,5 +0,2 @@

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.VorbisTagMapper = void 0;
const GenericTagMapper_1 = require("../../common/GenericTagMapper");
import { CommonTagMapper } from '../../common/GenericTagMapper.js';
/**

@@ -114,7 +111,7 @@ * Vorbis tag mappings

};
class VorbisTagMapper extends GenericTagMapper_1.CommonTagMapper {
export class VorbisTagMapper extends CommonTagMapper {
static toRating(email, rating) {
return {
source: email ? email.toLowerCase() : email,
rating: parseFloat(rating) * GenericTagMapper_1.CommonTagMapper.maxRatingScore
rating: parseFloat(rating) * CommonTagMapper.maxRatingScore
};

@@ -133,3 +130,2 @@ }

}
exports.VorbisTagMapper = VorbisTagMapper;
//# sourceMappingURL=VorbisTagMapper.js.map

@@ -1,24 +0,22 @@

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.ParserFactory = exports.parseHttpContentType = void 0;
const FileType = require("file-type/core");
const ContentType = require("content-type");
const MimeType = require("media-typer");
const debug_1 = require("debug");
const MetadataCollector_1 = require("./common/MetadataCollector");
const AiffParser_1 = require("./aiff/AiffParser");
const APEv2Parser_1 = require("./apev2/APEv2Parser");
const AsfParser_1 = require("./asf/AsfParser");
const FlacParser_1 = require("./flac/FlacParser");
const MP4Parser_1 = require("./mp4/MP4Parser");
const MpegParser_1 = require("./mpeg/MpegParser");
const musepack_1 = require("./musepack");
const OggParser_1 = require("./ogg/OggParser");
const WaveParser_1 = require("./wav/WaveParser");
const WavPackParser_1 = require("./wavpack/WavPackParser");
const DsfParser_1 = require("./dsf/DsfParser");
const DsdiffParser_1 = require("./dsdiff/DsdiffParser");
const MatroskaParser_1 = require("./matroska/MatroskaParser");
const debug = (0, debug_1.default)('music-metadata:parser:factory');
function parseHttpContentType(contentType) {
import { fileTypeFromBuffer } from 'file-type';
import ContentType from 'content-type';
import MimeType from 'media-typer';
import initDebug from 'debug';
import { Buffer } from 'node:buffer';
import { MetadataCollector } from './common/MetadataCollector.js';
import { AIFFParser } from './aiff/AiffParser.js';
import { APEv2Parser } from './apev2/APEv2Parser.js';
import { AsfParser } from './asf/AsfParser.js';
import { FlacParser } from './flac/FlacParser.js';
import { MP4Parser } from './mp4/MP4Parser.js';
import { MpegParser } from './mpeg/MpegParser.js';
import MusepackParser from './musepack/index.js';
import { OggParser } from './ogg/OggParser.js';
import { WaveParser } from './wav/WaveParser.js';
import { WavPackParser } from './wavpack/WavPackParser.js';
import { DsfParser } from './dsf/DsfParser.js';
import { DsdiffParser } from './dsdiff/DsdiffParser.js';
import { MatroskaParser } from './matroska/MatroskaParser.js';
const debug = initDebug('music-metadata:parser:factory');
export function parseHttpContentType(contentType) {
const type = ContentType.parse(contentType);

@@ -33,11 +31,10 @@ const mime = MimeType.parse(type.type);

}
exports.parseHttpContentType = parseHttpContentType;
async function parse(tokenizer, parserId, opts = {}) {
// Parser found, execute parser
const parser = await ParserFactory.loadParser(parserId);
const metadata = new MetadataCollector_1.MetadataCollector(opts);
const metadata = new MetadataCollector(opts);
await parser.init(metadata, tokenizer, opts).parse();
return metadata.toCommonMetadata();
}
class ParserFactory {
export class ParserFactory {
/**

@@ -68,3 +65,3 @@ * Parse metadata from tokenizer

if (!parserId) {
const guessedType = await FileType.fromBuffer(buf);
const guessedType = await fileTypeFromBuffer(buf);
if (!guessedType) {

@@ -147,17 +144,17 @@ throw new Error('Failed to determine audio format');

switch (moduleName) {
case 'aiff': return new AiffParser_1.AIFFParser();
case 'aiff': return new AIFFParser();
case 'adts':
case 'mpeg':
return new MpegParser_1.MpegParser();
case 'apev2': return new APEv2Parser_1.APEv2Parser();
case 'asf': return new AsfParser_1.AsfParser();
case 'dsf': return new DsfParser_1.DsfParser();
case 'dsdiff': return new DsdiffParser_1.DsdiffParser();
case 'flac': return new FlacParser_1.FlacParser();
case 'mp4': return new MP4Parser_1.MP4Parser();
case 'musepack': return new musepack_1.default();
case 'ogg': return new OggParser_1.OggParser();
case 'riff': return new WaveParser_1.WaveParser();
case 'wavpack': return new WavPackParser_1.WavPackParser();
case 'matroska': return new MatroskaParser_1.MatroskaParser();
return new MpegParser();
case 'apev2': return new APEv2Parser();
case 'asf': return new AsfParser();
case 'dsf': return new DsfParser();
case 'dsdiff': return new DsdiffParser();
case 'flac': return new FlacParser();
case 'mp4': return new MP4Parser();
case 'musepack': return new MusepackParser();
case 'ogg': return new OggParser();
case 'riff': return new WaveParser();
case 'wavpack': return new WavPackParser();
case 'matroska': return new MatroskaParser();
default:

@@ -255,2 +252,1 @@ throw new Error(`Unknown parser type: ${moduleName}`);

}
exports.ParserFactory = ParserFactory;

@@ -1,9 +0,6 @@

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.ListInfoTagValue = exports.Header = void 0;
const Token = require("token-types");
import * as Token from 'token-types';
/**
* Common RIFF chunk header
*/
exports.Header = {
export const Header = {
len: 8,

@@ -22,3 +19,3 @@ get: (buf, off) => {

*/
class ListInfoTagValue {
export class ListInfoTagValue {
constructor(tagHeader) {

@@ -33,2 +30,1 @@ this.tagHeader = tagHeader;

}
exports.ListInfoTagValue = ListInfoTagValue;

@@ -1,5 +0,2 @@

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.RiffInfoTagMapper = exports.riffInfoTagMap = void 0;
const GenericTagMapper_1 = require("../common/GenericTagMapper");
import { CommonTagMapper } from '../common/GenericTagMapper.js';
/**

@@ -9,3 +6,3 @@ * RIFF Info Tags; part of the EXIF 2.3

*/
exports.riffInfoTagMap = {
export const riffInfoTagMap = {
IART: 'artist',

@@ -33,8 +30,7 @@ ICRD: 'date',

};
class RiffInfoTagMapper extends GenericTagMapper_1.CommonTagMapper {
export class RiffInfoTagMapper extends CommonTagMapper {
constructor() {
super(['exif'], exports.riffInfoTagMap);
super(['exif'], riffInfoTagMap);
}
}
exports.RiffInfoTagMapper = RiffInfoTagMapper;
//# sourceMappingURL=RiffInfoTagMap.js.map

@@ -1,6 +0,2 @@

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.TrackType = void 0;
var types_1 = require("./matroska/types");
Object.defineProperty(exports, "TrackType", { enumerable: true, get: function () { return types_1.TrackType; } });
export { TrackType } from './matroska/types.js';
//# sourceMappingURL=type.js.map

@@ -1,6 +0,3 @@

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.BroadcastAudioExtensionChunk = void 0;
const Token = require("token-types");
const Util_1 = require("../common/Util");
import * as Token from 'token-types';
import { stripNulls } from '../common/Util.js';
/**

@@ -10,11 +7,11 @@ * Broadcast Audio Extension Chunk

*/
exports.BroadcastAudioExtensionChunk = {
export const BroadcastAudioExtensionChunk = {
len: 420,
get: (uint8array, off) => {
return {
description: (0, Util_1.stripNulls)(new Token.StringType(256, 'ascii').get(uint8array, off)).trim(),
originator: (0, Util_1.stripNulls)(new Token.StringType(32, 'ascii').get(uint8array, off + 256)).trim(),
originatorReference: (0, Util_1.stripNulls)(new Token.StringType(32, 'ascii').get(uint8array, off + 288)).trim(),
originationDate: (0, Util_1.stripNulls)(new Token.StringType(10, 'ascii').get(uint8array, off + 320)).trim(),
originationTime: (0, Util_1.stripNulls)(new Token.StringType(8, 'ascii').get(uint8array, off + 330)).trim(),
description: stripNulls(new Token.StringType(256, 'ascii').get(uint8array, off)).trim(),
originator: stripNulls(new Token.StringType(32, 'ascii').get(uint8array, off + 256)).trim(),
originatorReference: stripNulls(new Token.StringType(32, 'ascii').get(uint8array, off + 288)).trim(),
originationDate: stripNulls(new Token.StringType(10, 'ascii').get(uint8array, off + 320)).trim(),
originationTime: stripNulls(new Token.StringType(8, 'ascii').get(uint8array, off + 330)).trim(),
timeReferenceLow: Token.UINT32_LE.get(uint8array, off + 338),

@@ -21,0 +18,0 @@ timeReferenceHigh: Token.UINT32_LE.get(uint8array, off + 342),

@@ -1,8 +0,5 @@

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.FactChunk = exports.Format = exports.WaveFormat = void 0;
/**
* Ref: https://msdn.microsoft.com/en-us/library/windows/desktop/dd317599(v=vs.85).aspx
*/
var WaveFormat;
export var WaveFormat;
(function (WaveFormat) {

@@ -24,3 +21,3 @@ WaveFormat[WaveFormat["PCM"] = 1] = "PCM";

WaveFormat[WaveFormat["MPEG"] = 80] = "MPEG";
})(WaveFormat = exports.WaveFormat || (exports.WaveFormat = {}));
})(WaveFormat = WaveFormat || (WaveFormat = {}));
/**

@@ -30,3 +27,3 @@ * format chunk; chunk-id is "fmt "

*/
class Format {
export class Format {
constructor(header) {

@@ -48,3 +45,2 @@ if (header.chunkSize < 16)

}
exports.Format = Format;
/**

@@ -55,3 +51,3 @@ * Fact chunk; chunk-id is "fact"

*/
class FactChunk {
export class FactChunk {
constructor(header) {

@@ -69,2 +65,1 @@ if (header.chunkSize < 4) {

}
exports.FactChunk = FactChunk;

@@ -1,15 +0,12 @@

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.WaveParser = void 0;
const strtok3 = require("strtok3/lib/core");
const Token = require("token-types");
const debug_1 = require("debug");
const riff = require("../riff/RiffChunk");
const WaveChunk = require("./../wav/WaveChunk");
const ID3v2Parser_1 = require("../id3v2/ID3v2Parser");
const util = require("../common/Util");
const FourCC_1 = require("../common/FourCC");
const BasicParser_1 = require("../common/BasicParser");
const BwfChunk_1 = require("../wav/BwfChunk");
const debug = (0, debug_1.default)('music-metadata:parser:RIFF');
import * as strtok3 from 'strtok3/core';
import * as Token from 'token-types';
import initDebug from 'debug';
import * as riff from '../riff/RiffChunk.js';
import * as WaveChunk from './../wav/WaveChunk.js';
import { ID3v2Parser } from '../id3v2/ID3v2Parser.js';
import * as util from '../common/Util.js';
import { FourCcToken } from '../common/FourCC.js';
import { BasicParser } from '../common/BasicParser.js';
import { BroadcastAudioExtensionChunk } from '../wav/BwfChunk.js';
const debug = initDebug('music-metadata:parser:RIFF');
/**

@@ -26,3 +23,3 @@ * Resource Interchange File Format (RIFF) Parser

*/
class WaveParser extends BasicParser_1.BasicParser {
export class WaveParser extends BasicParser {
async parse() {

@@ -40,7 +37,7 @@ const riffHeader = await this.tokenizer.readToken(riff.Header);

async parseRiffChunk(chunkSize) {
const type = await this.tokenizer.readToken(FourCC_1.FourCcToken);
const type = await this.tokenizer.readToken(FourCcToken);
this.metadata.setFormat('container', type);
switch (type) {
case 'WAVE':
return this.readWaveChunk(chunkSize - FourCC_1.FourCcToken.len);
return this.readWaveChunk(chunkSize - FourCcToken.len);
default:

@@ -85,3 +82,3 @@ throw new Error(`Unsupported RIFF format: RIFF/${type}`);

const rst = strtok3.fromBuffer(id3_data);
await new ID3v2Parser_1.ID3v2Parser().parse(this.metadata, rst, this.options);
await new ID3v2Parser().parse(this.metadata, rst, this.options);
break;

@@ -109,7 +106,7 @@ case 'data': // PCM-data

case 'bext': // Broadcast Audio Extension chunk https://tech.ebu.ch/docs/tech/tech3285.pdf
const bext = await this.tokenizer.readToken(BwfChunk_1.BroadcastAudioExtensionChunk);
const bext = await this.tokenizer.readToken(BroadcastAudioExtensionChunk);
Object.keys(bext).forEach(key => {
this.metadata.addTag('exif', 'bext.' + key, bext[key]);
});
const bextRemaining = header.chunkSize - BwfChunk_1.BroadcastAudioExtensionChunk.len;
const bextRemaining = header.chunkSize - BroadcastAudioExtensionChunk.len;
await this.tokenizer.ignore(bextRemaining);

@@ -162,2 +159,1 @@ break;

}
exports.WaveParser = WaveParser;

@@ -1,15 +0,12 @@

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.WavPackParser = void 0;
const Token = require("token-types");
const APEv2Parser_1 = require("../apev2/APEv2Parser");
const FourCC_1 = require("../common/FourCC");
const BasicParser_1 = require("../common/BasicParser");
const WavPackToken_1 = require("./WavPackToken");
const debug_1 = require("debug");
const debug = (0, debug_1.default)('music-metadata:parser:WavPack');
import * as Token from 'token-types';
import { APEv2Parser } from '../apev2/APEv2Parser.js';
import { FourCcToken } from '../common/FourCC.js';
import { BasicParser } from '../common/BasicParser.js';
import { WavPack } from './WavPackToken.js';
import initDebug from 'debug';
const debug = initDebug('music-metadata:parser:WavPack');
/**
* WavPack Parser
*/
class WavPackParser extends BasicParser_1.BasicParser {
export class WavPackParser extends BasicParser {
async parse() {

@@ -20,13 +17,13 @@ this.audioDataSize = 0;

// try to parse APEv2 header
return APEv2Parser_1.APEv2Parser.tryParseApeHeader(this.metadata, this.tokenizer, this.options);
return APEv2Parser.tryParseApeHeader(this.metadata, this.tokenizer, this.options);
}
async parseWavPackBlocks() {
do {
const blockId = await this.tokenizer.peekToken(FourCC_1.FourCcToken);
const blockId = await this.tokenizer.peekToken(FourCcToken);
if (blockId !== 'wvpk')
break;
const header = await this.tokenizer.readToken(WavPackToken_1.WavPack.BlockHeaderToken);
const header = await this.tokenizer.readToken(WavPack.BlockHeaderToken);
if (header.BlockID !== 'wvpk')
throw new Error('Invalid WavPack Block-ID');
debug(`WavPack header blockIndex=${header.blockIndex}, len=${WavPackToken_1.WavPack.BlockHeaderToken.len}`);
debug(`WavPack header blockIndex=${header.blockIndex}, len=${WavPack.BlockHeaderToken.len}`);
if (header.blockIndex === 0 && !this.metadata.format.container) {

@@ -46,3 +43,3 @@ this.metadata.setFormat('container', 'WavPack');

}
const ignoreBytes = header.blockSize - (WavPackToken_1.WavPack.BlockHeaderToken.len - 8);
const ignoreBytes = header.blockSize - (WavPack.BlockHeaderToken.len - 8);
await (header.blockIndex === 0 ? this.parseMetadataSubBlock(header, ignoreBytes) : this.tokenizer.ignore(ignoreBytes));

@@ -52,3 +49,3 @@ if (header.blockSamples > 0) {

}
} while (!this.tokenizer.fileInfo.size || this.tokenizer.fileInfo.size - this.tokenizer.position >= WavPackToken_1.WavPack.BlockHeaderToken.len);
} while (!this.tokenizer.fileInfo.size || this.tokenizer.fileInfo.size - this.tokenizer.position >= WavPack.BlockHeaderToken.len);
this.metadata.setFormat('bitrate', this.audioDataSize * 8 / this.metadata.format.duration);

@@ -61,4 +58,4 @@ }

async parseMetadataSubBlock(header, remainingLength) {
while (remainingLength > WavPackToken_1.WavPack.MetadataIdToken.len) {
const id = await this.tokenizer.readToken(WavPackToken_1.WavPack.MetadataIdToken);
while (remainingLength > WavPack.MetadataIdToken.len) {
const id = await this.tokenizer.readToken(WavPack.MetadataIdToken);
const dataSizeInWords = await this.tokenizer.readNumber(id.largeBlock ? Token.UINT24_LE : Token.UINT8);

@@ -94,3 +91,3 @@ const data = Buffer.alloc(dataSizeInWords * 2 - (id.isOddSize ? 1 : 0));

}
remainingLength -= WavPackToken_1.WavPack.MetadataIdToken.len + (id.largeBlock ? Token.UINT24_LE.len : Token.UINT8.len) + dataSizeInWords * 2;
remainingLength -= WavPack.MetadataIdToken.len + (id.largeBlock ? Token.UINT24_LE.len : Token.UINT8.len) + dataSizeInWords * 2;
debug(`remainingLength=${remainingLength}`);

@@ -104,2 +101,1 @@ if (id.isOddSize)

}
exports.WavPackParser = WavPackParser;

@@ -1,9 +0,6 @@

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.WavPack = void 0;
const Token = require("token-types");
const FourCC_1 = require("../common/FourCC");
import * as Token from 'token-types';
import { FourCcToken } from '../common/FourCC.js';
const SampleRates = [6000, 8000, 9600, 11025, 12000, 16000, 22050, 24000, 32000, 44100,
48000, 64000, 88200, 96000, 192000, -1];
class WavPack {
export class WavPack {
static isBitSet(flags, bitOffset) {

@@ -16,3 +13,2 @@ return WavPack.getBitAllignedNumber(flags, bitOffset, 1) === 1;

}
exports.WavPack = WavPack;
/**

@@ -31,3 +27,3 @@ * WavPack Block Header

// should equal 'wvpk'
BlockID: FourCC_1.FourCcToken.get(buf, off),
BlockID: FourCcToken.get(buf, off),
// 0x402 to 0x410 are valid for decode

@@ -34,0 +30,0 @@ blockSize: Token.UINT32_LE.get(buf, off + 4),

{
"name": "music-metadata",
"description": "Music metadata parser for Node.js, supporting virtual any audio and tag format.",
"version": "7.12.6",
"version": "8.0.0",
"author": {

@@ -13,2 +13,14 @@ "name": "Borewit",

},
"type": "module",
"exports": {
".": {
"node": "./lib/index.js",
"default": "./lib/core.js"
}
},
"types": "lib/index.d.ts",
"files": [
"lib/**/*.js",
"lib/**/*.d.ts"
],
"keywords": [

@@ -64,8 +76,2 @@ "music",

],
"main": "lib/index.js",
"types": "lib/index.d.ts",
"files": [
"lib/**/*.js",
"lib/**/*.d.ts"
],
"scripts": {

@@ -80,7 +86,7 @@ "clean": "del-cli lib/**/*.js lib/**/*.js.map lib/**/*.d.ts src/**/*.d.ts test/**/*.js test/**/*.js.map test/**/*.js test/**/*.js.map doc-gen/**/*.js doc-gen/**/*.js.map",

"lint": "npm run lint-md && npm run eslint",
"test": "mocha --require ts-node/register --require source-map-support/register --full-trace test/test-*.ts",
"test": "mocha",
"build": "npm run clean && npm run compile && npm run doc-gen",
"start": "npm-run-all compile lint cover-test",
"test-coverage": "nyc npm run test",
"send-codacy": "nyc report --reporter=text-lcov | codacy-coverage",
"test-coverage": "c8 npm run test",
"send-codacy": "c8 report --reporter=text-lcov | codacy-coverage",
"doc-gen": "node doc-gen/gen.js"

@@ -92,6 +98,6 @@ },

"debug": "^4.3.4",
"file-type": "^16.5.4",
"file-type": "^17.1.6",
"media-typer": "^1.1.0",
"strtok3": "^6.3.0",
"token-types": "^4.2.0"
"strtok3": "^7.0.0",
"token-types": "^5.0.1"
},

@@ -107,2 +113,3 @@ "devDependencies": {

"@typescript-eslint/parser": "^5.32.0",
"c8": "^7.12.0",
"chai": "^4.3.6",

@@ -119,8 +126,7 @@ "chai-as-promised": "^7.1.1",

"mime": "^3.0.0",
"mocha": "^9.2.2",
"mocha": "^10.0.0",
"npm-run-all": "^4.1.5",
"nyc": "^15.1.0",
"prettier": "^2.5.1",
"remark-cli": "^11.0.0",
"remark-preset-lint-recommended": "^6.1.2",
"source-map-support": "^0.5.21",
"ts-node": "^10.9.1",

@@ -130,3 +136,3 @@ "typescript": "^4.7.4"

"engines": {
"node": ">=10"
"node": "^12.20.0 || ^14.13.1 || >=16.0.0"
},

@@ -140,19 +146,3 @@ "repository": {

"url": "https://github.com/Borewit/music-metadata/issues"
},
"nyc": {
"exclude": [
"test/**/*.ts",
"src/**/*.js"
],
"extension": [
".ts"
],
"sourceMap": true,
"instrument": true,
"reporter": [
"lcov",
"text"
],
"report-dir": "coverage"
}
}

@@ -80,4 +80,5 @@ ![Node.js CI](https://github.com/Borewit/music-metadata/workflows/Node.js%20CI/badge.svg)

The JavaScript in runtime is compliant with [ECMAScript 2017 (ES8)](https://en.wikipedia.org/wiki/ECMAScript#8th_Edition_-_ECMAScript_2017).
Requires [Node.js®](https://nodejs.org/) version 6 or higher.
Module: version 8 migrated from [CommonJS](https://en.wikipedia.org/wiki/CommonJS) to [pure ECMAScript Module (ESM)](https://gist.github.com/sindresorhus/a39789f98801d908bbc7ff3ecc99d99c).
JavaScript is compliant with [ECMAScript 2019 (ES10)](https://en.wikipedia.org/wiki/ECMAScript#10th_Edition_%E2%80%93_ECMAScript_2019).
Requires Node.js ≥ 12.20 engine.

@@ -118,12 +119,8 @@ ### Browser Support

Import music-metadata in JavaScript:
Import music-metadata:
```JavaScript
const mm = require('music-metadata');
import { parseFile } from 'music-metadata';
```
Import the methods you need, like `parseFile` in this example.
This is how it's done in TypeScript:
```ts
import * as mm from 'music-metadata';
```
### Module Functions

@@ -141,3 +138,3 @@

```
```ts
parseFile(filePath: string, opts: IOptions = {}): Promise<IAudioMetadata>`

@@ -148,9 +145,9 @@ ```

```js
const mm = require('music-metadata');
const util = require('util');
import { parseFile } from 'music-metadata';
import { inspect } from 'util';
(async () => {
try {
const metadata = await mm.parseFile('../music-metadata/test/samples/MusicBrainz - Beth Hart - Sinner\'s Prayer [id3v2.3].V2.mp3');
console.log(util.inspect(metadata, { showHidden: false, depth: null }));
const metadata = await parseFile('../music-metadata/test/samples/MusicBrainz - Beth Hart - Sinner\'s Prayer [id3v2.3].V2.mp3');
console.log(inspect(metadata, { showHidden: false, depth: null }));
} catch (error) {

@@ -175,7 +172,7 @@ console.error(error.message);

```js
const mm = require('music-metadata');
import { parseStream } from 'music-metadata';
(async () => {
try {
const metadata = await mm.parseStream(someReadStream, {mimeType: 'audio/mpeg', size: 26838});
const metadata = await parseStream(someReadStream, {mimeType: 'audio/mpeg', size: 26838});
console.log(metadata);

@@ -198,5 +195,7 @@ } catch (error) {

```js
import { parseBuffer } from 'music-metadata';
(async () => {
try {
const metadata = mm.parseBuffer(someBuffer, 'audio/mpeg');
const metadata = parseBuffer(someBuffer, 'audio/mpeg');
console.log(metadata);

@@ -224,2 +223,17 @@ } catch (error) {

```js
import { parseFile, orderTags } from 'music-metadata';
import { inspect } from 'util';
(async () => {
try {
const metadata = await parseFile('../test/samples/MusicBrainz - Beth Hart - Sinner\'s Prayer [id3v2.3].V2.mp3');
const orderedTags = orderTags(metadata.native['ID3v2.3']);
console.log(inspect(orderedTags, { showHidden: false, depth: null }));
} catch (error) {
console.error(error.message);
}
})();
```
#### ratingToStars function

@@ -241,7 +255,7 @@

```js
import * as mm from 'music-metadata';
import { parseFile, selectCover } from 'music-metadata';
(async () => {
const {common} = await mm.parseFile(filePath);
const cover = mm.selectCover(common.picture); // pick the cover image
const {common} = await parseFile(filePath);
const cover = selectCover(common.picture); // pick the cover image
}

@@ -338,7 +352,11 @@ )();

```js
mm.parseStream(someReadStream, {mimeType: 'audio/mpeg', size: 26838}, {duration: true})
.then( function (metadata) {
console.log(util.inspect(metadata, {showHidden: false, depth: null}));
someReadStream.close();
});
import { parseStream } from 'music-metadata';
import { inspect } from 'util';
(async () => {
const metadata = await parseStream(someReadStream, {mimeType: 'audio/mpeg', size: 26838}, {duration: true});
console.log(inspect(metadata, {showHidden: false, depth: null}));
someReadStream.close();
}
)();
```

@@ -391,3 +409,3 @@

```js
const mm = require('music-metadata')
import { parseFile } from 'music-metadata';

@@ -399,3 +417,3 @@ function parseFiles(audioFiles) {

if (audioFile) {
return mm.parseFile(audioFile).then(metadata => {
return parseFile(audioFile).then(metadata => {
// Do great things with the metadata

@@ -405,4 +423,2 @@ return parseFiles(audioFiles); // process rest of the files AFTER we are finished

}
return Promise.resolve();
}

@@ -417,3 +433,3 @@

```js
const mm = require('music-metadata');
import { parseFile } from 'music-metadata';

@@ -426,3 +442,3 @@ // it is required to declare the function 'async' to allow the use of await

// await will ensure the metadata parsing is completed before we move on to the next file
const metadata = await mm.parseFile(audioFile);
const metadata = await parseFile(audioFile);
// Do great things with the metadata

@@ -429,0 +445,0 @@ }

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc