Huge News!Announcing our $40M Series B led by Abstract Ventures.Learn More
Socket
Sign inDemoInstall
Socket

@remotion/media-utils

Package Overview
Dependencies
Maintainers
0
Versions
849
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@remotion/media-utils - npm Package Compare versions

Comparing version 4.0.239 to 4.0.240

dist/combine-float32-arrays.d.ts

3

dist/fft/get-visualization.d.ts
export type OptimizeFor = 'accuracy' | 'speed';
export declare const getVisualization: ({ sampleSize, data, sampleRate, frame, fps, maxInt, optimizeFor, }: {
export declare const getVisualization: ({ sampleSize, data, sampleRate, frame, fps, maxInt, optimizeFor, dataOffsetInSeconds, }: {
sampleSize: number;

@@ -10,2 +10,3 @@ data: Float32Array;

optimizeFor: OptimizeFor;
dataOffsetInSeconds: number;
}) => number[];

@@ -11,3 +11,3 @@ "use strict";

const to_int_16_1 = require("./to-int-16");
const getVisualization = ({ sampleSize, data, sampleRate, frame, fps, maxInt, optimizeFor = 'accuracy', }) => {
const getVisualization = ({ sampleSize, data, sampleRate, frame, fps, maxInt, optimizeFor, dataOffsetInSeconds, }) => {
const isPowerOfTwo = sampleSize > 0 && (sampleSize & (sampleSize - 1)) === 0;

@@ -23,3 +23,3 @@ if (!isPowerOfTwo) {

}
const start = Math.floor((frame / fps) * sampleRate);
const start = Math.floor((frame / fps - dataOffsetInSeconds) * sampleRate);
const actualStart = Math.max(0, start - sampleSize / 2);

@@ -26,0 +26,0 @@ const ints = new Int16Array({

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.getAudioData = void 0;
const fetch_with_cors_catch_1 = require("./fetch-with-cors-catch");
const is_remote_asset_1 = require("./is-remote-asset");

@@ -8,24 +9,2 @@ const p_limit_1 = require("./p-limit");

const limit = (0, p_limit_1.pLimit)(3);
const fetchWithCorsCatch = async (src) => {
try {
const response = await fetch(src, {
mode: 'cors',
referrerPolicy: 'no-referrer-when-downgrade',
});
return response;
}
catch (err) {
const error = err;
if (
// Chrome
error.message.includes('Failed to fetch') ||
// Safari
error.message.includes('Load failed') ||
// Firefox
error.message.includes('NetworkError when attempting to fetch resource')) {
throw new TypeError(`Failed to read from ${src}: ${error.message}. Does the resource support CORS?`);
}
throw err;
}
};
const fn = async (src, options) => {

@@ -42,3 +21,3 @@ var _a;

});
const response = await fetchWithCorsCatch(src);
const response = await (0, fetch_with_cors_catch_1.fetchWithCorsCatch)(src);
const arrayBuffer = await response.arrayBuffer();

@@ -45,0 +24,0 @@ const wave = await audioContext.decodeAudioData(arrayBuffer);

@@ -5,7 +5,10 @@ export { audioBufferToDataUrl } from './audio-buffer/audio-url-helpers';

export { getImageDimensions } from './get-image-dimensions';
export { getPartialWaveData } from './get-partial-wave-data';
export { getVideoMetadata } from './get-video-metadata';
export { getWaveformPortion } from './get-waveform-portion';
export { WaveProbe, probeWaveFile } from './probe-wave-file';
export * from './types';
export type { AudioData, VideoMetadata as VideoData } from './types';
export { useAudioData } from './use-audio-data';
export { visualizeAudio } from './visualize-audio';
export { UseWindowedAudioDataOptions, UseWindowedAudioDataReturnValue, useWindowedAudioData, } from './use-windowed-audio-data';
export { VisualizeAudioOptions, visualizeAudio } from './visualize-audio';

@@ -17,3 +17,3 @@ "use strict";

Object.defineProperty(exports, "__esModule", { value: true });
exports.visualizeAudio = exports.useAudioData = exports.getWaveformPortion = exports.getVideoMetadata = exports.getImageDimensions = exports.getAudioDurationInSeconds = exports.getAudioDuration = exports.getAudioData = exports.audioBufferToDataUrl = void 0;
exports.visualizeAudio = exports.useWindowedAudioData = exports.useAudioData = exports.probeWaveFile = exports.getWaveformPortion = exports.getVideoMetadata = exports.getPartialWaveData = exports.getImageDimensions = exports.getAudioDurationInSeconds = exports.getAudioDuration = exports.getAudioData = exports.audioBufferToDataUrl = void 0;
var audio_url_helpers_1 = require("./audio-buffer/audio-url-helpers");

@@ -28,2 +28,4 @@ Object.defineProperty(exports, "audioBufferToDataUrl", { enumerable: true, get: function () { return audio_url_helpers_1.audioBufferToDataUrl; } });

Object.defineProperty(exports, "getImageDimensions", { enumerable: true, get: function () { return get_image_dimensions_1.getImageDimensions; } });
var get_partial_wave_data_1 = require("./get-partial-wave-data");
Object.defineProperty(exports, "getPartialWaveData", { enumerable: true, get: function () { return get_partial_wave_data_1.getPartialWaveData; } });
var get_video_metadata_1 = require("./get-video-metadata");

@@ -33,6 +35,10 @@ Object.defineProperty(exports, "getVideoMetadata", { enumerable: true, get: function () { return get_video_metadata_1.getVideoMetadata; } });

Object.defineProperty(exports, "getWaveformPortion", { enumerable: true, get: function () { return get_waveform_portion_1.getWaveformPortion; } });
var probe_wave_file_1 = require("./probe-wave-file");
Object.defineProperty(exports, "probeWaveFile", { enumerable: true, get: function () { return probe_wave_file_1.probeWaveFile; } });
__exportStar(require("./types"), exports);
var use_audio_data_1 = require("./use-audio-data");
Object.defineProperty(exports, "useAudioData", { enumerable: true, get: function () { return use_audio_data_1.useAudioData; } });
var use_windowed_audio_data_1 = require("./use-windowed-audio-data");
Object.defineProperty(exports, "useWindowedAudioData", { enumerable: true, get: function () { return use_windowed_audio_data_1.useWindowedAudioData; } });
var visualize_audio_1 = require("./visualize-audio");
Object.defineProperty(exports, "visualizeAudio", { enumerable: true, get: function () { return visualize_audio_1.visualizeAudio; } });

@@ -0,3 +1,4 @@

import type { OptimizeFor } from './fft/get-visualization';
import type { AudioData } from './types';
type FnParameters = {
type MandatoryVisualizeAudioOptions = {
audioData: AudioData;

@@ -7,7 +8,10 @@ frame: number;

numberOfSamples: number;
optimizeFor?: 'accuracy' | 'speed';
};
export declare const visualizeAudio: ({ smoothing, ...parameters }: FnParameters & {
smoothing?: boolean;
}) => number[];
type OptionalVisualizeAudioOptions = {
optimizeFor: OptimizeFor;
dataOffsetInSeconds: number;
smoothing: boolean;
};
export type VisualizeAudioOptions = MandatoryVisualizeAudioOptions & Partial<OptionalVisualizeAudioOptions>;
export declare const visualizeAudio: ({ smoothing, optimizeFor, dataOffsetInSeconds, ...parameters }: MandatoryVisualizeAudioOptions & Partial<OptionalVisualizeAudioOptions> & {}) => number[];
export {};

@@ -13,21 +13,29 @@ "use strict";

*/
const visualizeAudioFrame = ({ audioData: metadata, frame, fps, numberOfSamples, optimizeFor, }) => {
const cacheKey = metadata.resultId + frame + fps + numberOfSamples;
const visualizeAudioFrame = ({ audioData, frame, fps, numberOfSamples, optimizeFor, dataOffsetInSeconds, }) => {
const cacheKey = audioData.resultId + frame + fps + numberOfSamples;
if (cache[cacheKey]) {
return cache[cacheKey];
}
const maxInt = (0, max_value_cached_1.getMaxPossibleMagnitude)(metadata);
const maxInt = (0, max_value_cached_1.getMaxPossibleMagnitude)(audioData);
return (0, get_visualization_1.getVisualization)({
sampleSize: numberOfSamples * 2,
data: metadata.channelWaveforms[0],
data: audioData.channelWaveforms[0],
frame,
fps,
sampleRate: metadata.sampleRate,
sampleRate: audioData.sampleRate,
maxInt,
optimizeFor: optimizeFor !== null && optimizeFor !== void 0 ? optimizeFor : (no_react_1.NoReactInternals.ENABLE_V5_BREAKING_CHANGES ? 'speed' : 'accuracy'),
optimizeFor,
dataOffsetInSeconds,
});
};
const visualizeAudio = ({ smoothing = true, ...parameters }) => {
const visualizeAudio = ({ smoothing = true, optimizeFor = no_react_1.NoReactInternals.ENABLE_V5_BREAKING_CHANGES
? 'speed'
: 'accuracy', dataOffsetInSeconds = 0, ...parameters }) => {
if (!smoothing) {
return visualizeAudioFrame(parameters);
return visualizeAudioFrame({
...parameters,
optimizeFor,
dataOffsetInSeconds,
smoothing,
});
}

@@ -40,3 +48,9 @@ const toSmooth = [

const all = toSmooth.map((s) => {
return visualizeAudioFrame({ ...parameters, frame: s });
return visualizeAudioFrame({
...parameters,
frame: s,
dataOffsetInSeconds,
optimizeFor,
smoothing,
});
});

@@ -43,0 +57,0 @@ return new Array(parameters.numberOfSamples).fill(true).map((_x, i) => {

@@ -6,3 +6,3 @@ {

"name": "@remotion/media-utils",
"version": "4.0.239",
"version": "4.0.240",
"description": "Utilities for working with media files",

@@ -17,3 +17,3 @@ "main": "dist/index.js",

"dependencies": {
"remotion": "4.0.239"
"remotion": "4.0.240"
},

@@ -26,3 +26,3 @@ "peerDependencies": {

"eslint": "9.14.0",
"@remotion/eslint-config-internal": "4.0.239"
"@remotion/eslint-config-internal": "4.0.240"
},

@@ -29,0 +29,0 @@ "keywords": [

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc