New Case Study:See how Anthropic automated 95% of dependency reviews with Socket.Learn More
Socket
Sign inDemoInstall
Socket

livekit-client

Package Overview
Dependencies
Maintainers
1
Versions
238
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

livekit-client - npm Package Compare versions

Comparing version 0.10.1 to 0.11.0

3

dist/livekit.d.ts

@@ -1,2 +0,2 @@

import { ConnectOptions, CreateAudioTrackOptions, CreateLocalTracksOptions, CreateVideoTrackOptions } from './options';
import { ConnectOptions } from './options';
import Room from './room/Room';

@@ -6,2 +6,3 @@ import LocalAudioTrack from './room/track/LocalAudioTrack';

import LocalVideoTrack from './room/track/LocalVideoTrack';
import { CreateAudioTrackOptions, CreateLocalTracksOptions, CreateVideoTrackOptions } from './room/track/options';
export { version } from './version';

@@ -8,0 +9,0 @@ /**

@@ -22,2 +22,3 @@ "use strict";

const LocalAudioTrack_1 = __importDefault(require("./room/track/LocalAudioTrack"));
const LocalTrack_1 = __importDefault(require("./room/track/LocalTrack"));
const LocalVideoTrack_1 = __importDefault(require("./room/track/LocalVideoTrack"));

@@ -128,3 +129,2 @@ const Track_1 = require("./room/track/Track");

return __awaiter(this, void 0, void 0, function* () {
const constraints = {};
if (!options)

@@ -134,28 +134,17 @@ options = {};

options.audio = {};
// default video options
const videoOptions = Object.assign({}, options_1.VideoPresets.qhd.resolution);
if (typeof options.video === 'object' && options.video) {
Object.assign(videoOptions, options.video);
if (options.video.resolution) {
Object.assign(videoOptions, options.video.resolution);
}
}
if (options.video === false) {
constraints.video = false;
}
else {
// use defaults
constraints.video = videoOptions;
}
constraints.audio = options.audio;
const constraints = LocalTrack_1.default.constraintsForOptions(options);
const stream = yield navigator.mediaDevices.getUserMedia(constraints);
const tracks = [];
stream.getTracks().forEach((mediaStreamTrack) => {
let trackOptions = mediaStreamTrack.kind === 'audio' ? options.audio : options.video;
return stream.getTracks().map((mediaStreamTrack) => {
const isAudio = mediaStreamTrack.kind === 'audio';
let trackOptions = isAudio ? options.audio : options.video;
if (typeof trackOptions === 'boolean' || !trackOptions) {
trackOptions = {};
}
tracks.push(createLocalTrack(mediaStreamTrack, trackOptions));
let trackConstraints;
const conOrBool = isAudio ? constraints.audio : constraints.video;
if (typeof conOrBool !== 'boolean') {
trackConstraints = conOrBool;
}
return createLocalTrack(mediaStreamTrack, trackOptions === null || trackOptions === void 0 ? void 0 : trackOptions.name, trackConstraints);
});
return tracks;
});

@@ -165,12 +154,8 @@ }

/** @internal */
function createLocalTrack(mediaStreamTrack, options) {
let name;
if (options instanceof Object && options.name) {
name = options.name;
}
function createLocalTrack(mediaStreamTrack, name, constraints) {
switch (mediaStreamTrack.kind) {
case 'audio':
return new LocalAudioTrack_1.default(mediaStreamTrack, name, options);
return new LocalAudioTrack_1.default(mediaStreamTrack, name, constraints);
case 'video':
return new LocalVideoTrack_1.default(mediaStreamTrack, name, options);
return new LocalVideoTrack_1.default(mediaStreamTrack, name, constraints);
default:

@@ -177,0 +162,0 @@ throw new errors_1.TrackInvalidError(`unsupported track type: ${mediaStreamTrack.kind}`);

import LocalTrack from './room/track/LocalTrack';
import { CreateLocalTracksOptions, VideoCodec, VideoEncoding } from './room/track/options';
/**

@@ -7,15 +8,5 @@ * if video or audio tracks are created as part of [[connect]], it'll automatically

export interface ConnectOptions extends CreateLocalTracksOptions {
/** see [[TrackPublishOptions.videoEncoding]] */
videoEncoding?: VideoEncoding;
/** see [[TrackPublishOptions.videoCodec]] */
videoCodec?: VideoCodec;
/** see [[TrackPublishOptions.audioBitrate]] */
audioBitrate?: number;
/** see [[TrackPublishOptions.simulcast]] */
simulcast?: boolean;
/** autosubscribe to room tracks upon connect, defaults to true */
autoSubscribe?: boolean;
/**
* configures LiveKit internal log level
*/
/** configures LiveKit internal log level */
logLevel?: LogLevel;

@@ -31,2 +22,10 @@ /**

tracks?: LocalTrack[] | MediaStreamTrack[];
/** see [[TrackPublishOptions.videoEncoding]] */
videoEncoding?: VideoEncoding;
/** see [[TrackPublishOptions.videoCodec]] */
videoCodec?: VideoCodec;
/** see [[TrackPublishOptions.audioBitrate]] */
audioBitrate?: number;
/** see [[TrackPublishOptions.simulcast]] */
simulcast?: boolean;
}

@@ -41,119 +40,1 @@ export declare enum LogLevel {

}
export interface CreateLocalTracksOptions {
/**
* creates audio track with getUserMedia automatically on connect.
* default false
*/
audio?: boolean | CreateAudioTrackOptions;
/**
* creates video track with getUserMedia automatically on connect.
* default false
*/
video?: boolean | CreateVideoTrackOptions;
}
export interface CreateLocalTrackOptions {
/** name of track */
name?: string;
/**
* A ConstrainDOMString object specifying a device ID or an array of device
* IDs which are acceptable and/or required.
*/
deviceId?: ConstrainDOMString;
}
export interface CreateVideoTrackOptions extends CreateLocalTrackOptions {
/**
* a facing or an array of facings which are acceptable and/or required.
* [valid options](https://developer.mozilla.org/en-US/docs/Web/API/MediaTrackConstraints/facingMode)
*/
facingMode?: ConstrainDOMString;
resolution?: VideoResolutionConstraint;
}
export interface CreateAudioTrackOptions extends CreateLocalTrackOptions {
/**
* specifies whether automatic gain control is preferred and/or required
*/
autoGainControl?: ConstrainBoolean;
/**
* the channel count or range of channel counts which are acceptable and/or required
*/
channelCount?: ConstrainULong;
/**
* whether or not echo cancellation is preferred and/or required
*/
echoCancellation?: ConstrainBoolean;
/**
* the latency or range of latencies which are acceptable and/or required.
*/
latency?: ConstrainDouble;
/**
* whether noise suppression is preferred and/or required.
*/
noiseSuppression?: ConstrainBoolean;
/**
* the sample rate or range of sample rates which are acceptable and/or required.
*/
sampleRate?: ConstrainULong;
/**
* sample size or range of sample sizes which are acceptable and/or required.
*/
sampleSize?: ConstrainULong;
}
/**
* example
*
* ```typescript
* {
* width: { ideal: 960 },
* height: { ideal: 540 },
* frameRate: {
* ideal: 30,
* max: 60,
* },
* }
* ```
*/
export interface VideoResolutionConstraint {
width: ConstrainULong;
height: ConstrainULong;
frameRate?: ConstrainDouble;
}
export interface VideoEncoding {
maxBitrate: number;
maxFramerate: number;
}
export declare class VideoPreset {
encoding: VideoEncoding;
width: number;
height: number;
constructor(width: number, height: number, maxBitrate: number, maxFramerate: number);
get resolution(): VideoResolutionConstraint;
}
export interface AudioPreset {
maxBitrate: number;
}
export declare type VideoCodec = 'vp8' | 'h264';
export declare namespace AudioPresets {
const telephone: AudioPreset;
const speech: AudioPreset;
const music: AudioPreset;
}
/**
* Sane presets for video resolution/encoding
*/
export declare const VideoPresets: {
qvga: VideoPreset;
vga: VideoPreset;
qhd: VideoPreset;
hd: VideoPreset;
fhd: VideoPreset;
};
/**
* Four by three presets
*/
export declare const VideoPresets43: {
qvga: VideoPreset;
vga: VideoPreset;
qhd: VideoPreset;
hd: VideoPreset;
fhd: VideoPreset;
};
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.VideoPresets43 = exports.VideoPresets = exports.AudioPresets = exports.VideoPreset = exports.LogLevel = void 0;
exports.LogLevel = void 0;
var LogLevel;

@@ -13,54 +13,2 @@ (function (LogLevel) {

})(LogLevel = exports.LogLevel || (exports.LogLevel = {}));
class VideoPreset {
constructor(width, height, maxBitrate, maxFramerate) {
this.width = width;
this.height = height;
this.encoding = {
maxBitrate,
maxFramerate,
};
}
get resolution() {
return {
width: { ideal: this.width },
height: { ideal: this.height },
frameRate: {
ideal: this.encoding.maxFramerate,
},
};
}
}
exports.VideoPreset = VideoPreset;
var AudioPresets;
(function (AudioPresets) {
AudioPresets.telephone = {
maxBitrate: 12000,
};
AudioPresets.speech = {
maxBitrate: 20000,
};
AudioPresets.music = {
maxBitrate: 32000,
};
})(AudioPresets = exports.AudioPresets || (exports.AudioPresets = {}));
/**
* Sane presets for video resolution/encoding
*/
exports.VideoPresets = {
qvga: new VideoPreset(320, 180, 125000, 15),
vga: new VideoPreset(640, 360, 400000, 30),
qhd: new VideoPreset(960, 540, 800000, 30),
hd: new VideoPreset(1280, 720, 2500000, 30),
fhd: new VideoPreset(1920, 1080, 4000000, 30),
};
/**
* Four by three presets
*/
exports.VideoPresets43 = {
qvga: new VideoPreset(240, 180, 100000, 15),
vga: new VideoPreset(480, 360, 320000, 30),
qhd: new VideoPreset(720, 540, 640000, 30),
hd: new VideoPreset(960, 720, 2000000, 30),
fhd: new VideoPreset(1440, 1080, 3200000, 30),
};
//# sourceMappingURL=options.js.map

@@ -108,3 +108,4 @@ /**

*/
DataReceived = "dataReceived"
DataReceived = "dataReceived",
AudioPlaybackStatusChanged = "audioPlaybackChanged"
}

@@ -141,3 +142,7 @@ export declare enum ParticipantEvent {

/** @internal */
UpdateSubscription = "updateSubscription"
UpdateSubscription = "updateSubscription",
/** @internal */
AudioPlaybackStarted = "audioPlaybackStarted",
/** @internal */
AudioPlaybackFailed = "audioPlaybackFailed"
}

@@ -113,2 +113,3 @@ "use strict";

RoomEvent["DataReceived"] = "dataReceived";
RoomEvent["AudioPlaybackStatusChanged"] = "audioPlaybackChanged";
})(RoomEvent = exports.RoomEvent || (exports.RoomEvent = {}));

@@ -149,3 +150,7 @@ var ParticipantEvent;

TrackEvent["UpdateSubscription"] = "updateSubscription";
/** @internal */
TrackEvent["AudioPlaybackStarted"] = "audioPlaybackStarted";
/** @internal */
TrackEvent["AudioPlaybackFailed"] = "audioPlaybackFailed";
})(TrackEvent = exports.TrackEvent || (exports.TrackEvent = {}));
//# sourceMappingURL=events.js.map

@@ -16,3 +16,2 @@ "use strict";

const loglevel_1 = __importDefault(require("loglevel"));
const options_1 = require("../../options");
const livekit_rtc_1 = require("../../proto/livekit_rtc");

@@ -24,2 +23,3 @@ const errors_1 = require("../errors");

const LocalVideoTrack_1 = __importDefault(require("../track/LocalVideoTrack"));
const options_1 = require("../track/options");
const Track_1 = require("../track/Track");

@@ -171,3 +171,8 @@ const Participant_1 = __importDefault(require("./Participant"));

if (sender.track === mediaStreamTrack) {
(_a = this.engine.publisher) === null || _a === void 0 ? void 0 : _a.pc.removeTrack(sender);
try {
(_a = this.engine.publisher) === null || _a === void 0 ? void 0 : _a.pc.removeTrack(sender);
}
catch (e) {
loglevel_1.default.warn('unpublishTrack', 'failed to remove track', e);
}
}

@@ -174,0 +179,0 @@ });

@@ -38,2 +38,3 @@ /// <reference types="node" />

localParticipant: LocalParticipant;
private audioEnabled;
/** @internal */

@@ -44,2 +45,14 @@ constructor(client: SignalClient, config?: RTCConfiguration);

/**
* Browsers have different policies regarding audio playback. Most requiring
* some form of user interaction (click/tap/etc).
* In those cases, audio will be silent until a click/tap triggering one of the following
* - `startAudio`
* - `getUserMedia`
*/
startAudio(): Promise<void>;
/**
* Returns true if audio playback is enabled
*/
get canPlaybackAudio(): boolean;
/**
* disconnects the room, emits [[RoomEvent.Disconnected]]

@@ -54,2 +67,4 @@ */

private handleDataPacket;
private handleAudioPlaybackStarted;
private handleAudioPlaybackFailed;
private getOrCreateParticipant;

@@ -56,0 +71,0 @@ /** @internal */

@@ -24,2 +24,3 @@ "use strict";

const RTCEngine_1 = __importDefault(require("./RTCEngine"));
const Track_1 = require("./track/Track");
const utils_1 = require("./utils");

@@ -50,2 +51,3 @@ var RoomState;

this.activeSpeakers = [];
this.audioEnabled = true;
/** @internal */

@@ -107,2 +109,3 @@ this.connect = (url, token, opts) => __awaiter(this, void 0, void 0, function* () {

};
// updates are sent only when there's a change to speaker ordering
this.handleSpeakerUpdate = (speakers) => {

@@ -150,2 +153,17 @@ const activeSpeakers = [];

};
this.handleAudioPlaybackStarted = () => {
if (this.canPlaybackAudio) {
return;
}
this.audioEnabled = true;
this.emit(events_2.RoomEvent.AudioPlaybackStatusChanged, true);
};
this.handleAudioPlaybackFailed = (e) => {
loglevel_1.default.warn('could not playback audio', e);
if (!this.canPlaybackAudio) {
return;
}
this.audioEnabled = false;
this.emit(events_2.RoomEvent.AudioPlaybackStatusChanged, false);
};
this.participants = new Map();

@@ -173,2 +191,37 @@ this.engine = new RTCEngine_1.default(client, config);

}
/**
* Browsers have different policies regarding audio playback. Most requiring
* some form of user interaction (click/tap/etc).
* In those cases, audio will be silent until a click/tap triggering one of the following
* - `startAudio`
* - `getUserMedia`
*/
startAudio() {
return __awaiter(this, void 0, void 0, function* () {
const elements = [];
this.participants.forEach((p) => {
p.audioTracks.forEach((t) => {
if (t.track) {
t.track.attachedElements.forEach((e) => {
elements.push(e);
});
}
});
});
try {
yield Promise.all(elements.map((e) => e.play()));
this.handleAudioPlaybackStarted();
}
catch (err) {
this.handleAudioPlaybackFailed(err);
throw err;
}
});
}
/**
* Returns true if audio playback is enabled
*/
get canPlaybackAudio() {
return this.audioEnabled;
}
onTrackAdded(mediaTrack, stream, receiver) {

@@ -258,2 +311,7 @@ const parts = utils_1.unpackStreamId(stream.id);

participant.on(events_2.ParticipantEvent.TrackSubscribed, (track, publication) => {
// monitor playback status
if (track.kind === Track_1.Track.Kind.Audio) {
track.on(events_2.TrackEvent.AudioPlaybackStarted, this.handleAudioPlaybackStarted);
track.on(events_2.TrackEvent.AudioPlaybackFailed, this.handleAudioPlaybackFailed);
}
this.emit(events_2.RoomEvent.TrackSubscribed, track, publication, participant);

@@ -260,0 +318,0 @@ });

@@ -127,3 +127,8 @@ "use strict";

var _a;
(_a = this.publisher) === null || _a === void 0 ? void 0 : _a.pc.removeTrack(sender);
try {
(_a = this.publisher) === null || _a === void 0 ? void 0 : _a.pc.removeTrack(sender);
}
catch (e) {
loglevel_1.default.warn('could not removeTrack', e);
}
});

@@ -130,0 +135,0 @@ this.publisher.close();

import LocalTrack from './LocalTrack';
import { CreateAudioTrackOptions } from './options';
export default class LocalAudioTrack extends LocalTrack {
sender?: RTCRtpSender;
protected constraints: MediaTrackConstraints;
constructor(mediaTrack: MediaStreamTrack, name?: string, constraints?: MediaTrackConstraints);
restartTrack(options?: CreateAudioTrackOptions): Promise<void>;
}
"use strict";
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
var __importDefault = (this && this.__importDefault) || function (mod) {

@@ -10,7 +19,18 @@ return (mod && mod.__esModule) ? mod : { "default": mod };

constructor(mediaTrack, name, constraints) {
super(mediaTrack, Track_1.Track.Kind.Audio, name);
this.constraints = constraints || {};
super(mediaTrack, Track_1.Track.Kind.Audio, name, constraints);
}
restartTrack(options) {
return __awaiter(this, void 0, void 0, function* () {
let constraints;
if (options) {
const streamConstraints = LocalTrack_1.default.constraintsForOptions({ audio: options });
if (typeof streamConstraints.audio !== 'boolean') {
constraints = streamConstraints.audio;
}
}
yield this.restart(constraints);
});
}
}
exports.default = LocalAudioTrack;
//# sourceMappingURL=LocalAudioTrack.js.map

@@ -0,13 +1,15 @@

import { CreateLocalTracksOptions } from './options';
import { Track } from './Track';
export default class LocalTrack extends Track {
protected constraints: MediaTrackConstraints;
/** @internal */
sender?: RTCRtpSender;
constructor(mediaTrack: MediaStreamTrack, kind: Track.Kind, name?: string, constraints?: MediaTrackConstraints);
protected constraints: MediaTrackConstraints;
protected constructor(mediaTrack: MediaStreamTrack, kind: Track.Kind, name?: string, constraints?: MediaTrackConstraints);
get id(): string;
get dimensions(): Track.Dimensions | undefined;
static constraintsForOptions(options: CreateLocalTracksOptions): MediaStreamConstraints;
mute(): LocalTrack;
unmute(): LocalTrack;
restart(constraints?: MediaTrackConstraints): Promise<LocalTrack>;
protected restart(constraints?: MediaTrackConstraints): Promise<LocalTrack>;
protected setTrackMuted(muted: boolean): void;
}

@@ -18,2 +18,3 @@ "use strict";

const events_1 = require("../events");
const options_1 = require("./options");
const Track_1 = require("./Track");

@@ -23,3 +24,4 @@ class LocalTrack extends Track_1.Track {

super(mediaTrack, kind, name);
this.constraints = constraints || {};
this.constraints = constraints !== null && constraints !== void 0 ? constraints : mediaTrack.getConstraints();
loglevel_1.default.debug('track created, constraints', this.constraints);
}

@@ -42,2 +44,22 @@ get id() {

}
static constraintsForOptions(options) {
const constraints = {};
// default video options
const videoOptions = Object.assign({}, options_1.VideoPresets.qhd.resolution);
if (typeof options.video === 'object' && options.video) {
Object.assign(videoOptions, options.video);
if (options.video.resolution) {
Object.assign(videoOptions, options.video.resolution);
}
}
if (options.video === false) {
constraints.video = false;
}
else {
// use defaults
constraints.video = videoOptions;
}
constraints.audio = options.audio;
return constraints;
}
mute() {

@@ -59,4 +81,3 @@ this.setTrackMuted(true);

}
// copy existing elements and detach
this.mediaStreamTrack.stop();
loglevel_1.default.debug('restarting track with constraints', constraints);
const streamConstraints = {

@@ -77,3 +98,7 @@ audio: false,

loglevel_1.default.info('re-acquired MediaStreamTrack');
this.constraints = constraints;
// detach and reattach
this.mediaStreamTrack.stop();
this.attachedElements.forEach((el) => {
Track_1.detachTrack(this.mediaStreamTrack, el);
});
newTrack.enabled = this.mediaStreamTrack.enabled;

@@ -85,2 +110,3 @@ yield this.sender.replaceTrack(newTrack);

});
this.constraints = constraints;
return this;

@@ -87,0 +113,0 @@ });

@@ -5,2 +5,3 @@ import { SignalClient } from '../../api/SignalClient';

import LocalTrack from './LocalTrack';
import { CreateVideoTrackOptions } from './options';
export default class LocalVideoTrack extends LocalTrack {

@@ -16,6 +17,9 @@ signalClient?: SignalClient;

stop(): void;
mute(): LocalTrack;
unmute(): LocalTrack;
getSenderStats(): Promise<VideoSenderStats[]>;
setPublishingQuality(maxQuality: VideoQuality): void;
restartTrack(options?: CreateVideoTrackOptions): Promise<void>;
private monitorSender;
private checkAndUpdateSimulcast;
}

@@ -20,4 +20,7 @@ "use strict";

const Track_1 = require("./Track");
// upgrade only if smooth sailing for 3 mins;
const MIN_UPGRADE_DELAY = 3 * 60 * 1000;
// upgrade delay for diff qualities
const QUALITY_UPGRADE_DELAY = {
h: 120 * 1000,
q: 60 * 1000,
};
// once it's disabled this number of times, it will be turned off for the rest

@@ -29,3 +32,3 @@ // of the session

constructor(mediaTrack, name, constraints) {
super(mediaTrack, Track_1.Track.Kind.Video, name);
super(mediaTrack, Track_1.Track.Kind.Video, name, constraints);
// keep track of times we had to disable a track

@@ -51,3 +54,2 @@ this.disableCount = {

});
this.constraints = constraints || {};
}

@@ -73,4 +75,14 @@ get isSimulcast() {

this.sender = undefined;
this.mediaStreamTrack.getConstraints();
super.stop();
}
mute() {
// also stop the track, so that camera indicator is turned off
this.mediaStreamTrack.stop();
return super.mute();
}
unmute() {
this.restartTrack();
return super.unmute();
}
getSenderStats() {

@@ -163,5 +175,17 @@ return __awaiter(this, void 0, void 0, function* () {

}
restartTrack(options) {
return __awaiter(this, void 0, void 0, function* () {
let constraints;
if (options) {
const streamConstraints = LocalTrack_1.default.constraintsForOptions({ video: options });
if (typeof streamConstraints.video !== 'boolean') {
constraints = streamConstraints.video;
}
}
yield this.restart(constraints);
});
}
checkAndUpdateSimulcast(statsMap) {
var _a, _b;
if (!this.sender) {
if (!this.sender || this.isMuted) {
return;

@@ -208,6 +232,7 @@ }

return;
if ((new Date()).getTime() - this.lastQualityChange < MIN_UPGRADE_DELAY) {
const nextQuality = currentQuality + 1;
const upgradeDelay = QUALITY_UPGRADE_DELAY[rid];
if (!upgradeDelay || (new Date()).getTime() - this.lastQualityChange < upgradeDelay) {
return;
}
const nextQuality = currentQuality + 1;
if (this.disableCount[nextQuality] >= MAX_QUALITY_ATTEMPTS) {

@@ -214,0 +239,0 @@ return;

@@ -1,2 +0,1 @@

import { VideoCodec, VideoEncoding } from '../../options';
/**

@@ -29,1 +28,119 @@ * Options when publishing tracks

}
export interface CreateLocalTracksOptions {
/**
* audio track options, true to create with defaults. false if audio shouldn't be created
* default true
*/
audio?: boolean | CreateAudioTrackOptions;
/**
* video track options, true to create with defaults. false if video shouldn't be created
* default true
*/
video?: boolean | CreateVideoTrackOptions;
}
export interface CreateLocalTrackOptions {
/** name of track */
name?: string;
/**
* A ConstrainDOMString object specifying a device ID or an array of device
* IDs which are acceptable and/or required.
*/
deviceId?: ConstrainDOMString;
}
export interface CreateVideoTrackOptions extends CreateLocalTrackOptions {
/**
* a facing or an array of facings which are acceptable and/or required.
* [valid options](https://developer.mozilla.org/en-US/docs/Web/API/MediaTrackConstraints/facingMode)
*/
facingMode?: ConstrainDOMString;
resolution?: VideoResolutionConstraint;
}
export interface CreateAudioTrackOptions extends CreateLocalTrackOptions {
/**
* specifies whether automatic gain control is preferred and/or required
*/
autoGainControl?: ConstrainBoolean;
/**
* the channel count or range of channel counts which are acceptable and/or required
*/
channelCount?: ConstrainULong;
/**
* whether or not echo cancellation is preferred and/or required
*/
echoCancellation?: ConstrainBoolean;
/**
* the latency or range of latencies which are acceptable and/or required.
*/
latency?: ConstrainDouble;
/**
* whether noise suppression is preferred and/or required.
*/
noiseSuppression?: ConstrainBoolean;
/**
* the sample rate or range of sample rates which are acceptable and/or required.
*/
sampleRate?: ConstrainULong;
/**
* sample size or range of sample sizes which are acceptable and/or required.
*/
sampleSize?: ConstrainULong;
}
/**
* example
*
* ```typescript
* {
* width: { ideal: 960 },
* height: { ideal: 540 },
* frameRate: {
* ideal: 30,
* max: 60,
* },
* }
* ```
*/
export interface VideoResolutionConstraint {
width: ConstrainULong;
height: ConstrainULong;
frameRate?: ConstrainDouble;
}
export interface VideoEncoding {
maxBitrate: number;
maxFramerate: number;
}
export declare class VideoPreset {
encoding: VideoEncoding;
width: number;
height: number;
constructor(width: number, height: number, maxBitrate: number, maxFramerate: number);
get resolution(): VideoResolutionConstraint;
}
export interface AudioPreset {
maxBitrate: number;
}
export declare type VideoCodec = 'vp8' | 'h264';
export declare namespace AudioPresets {
const telephone: AudioPreset;
const speech: AudioPreset;
const music: AudioPreset;
}
/**
* Sane presets for video resolution/encoding
*/
export declare const VideoPresets: {
qvga: VideoPreset;
vga: VideoPreset;
qhd: VideoPreset;
hd: VideoPreset;
fhd: VideoPreset;
};
/**
* Four by three presets
*/
export declare const VideoPresets43: {
qvga: VideoPreset;
vga: VideoPreset;
qhd: VideoPreset;
hd: VideoPreset;
fhd: VideoPreset;
};
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.VideoPresets43 = exports.VideoPresets = exports.AudioPresets = exports.VideoPreset = void 0;
class VideoPreset {
constructor(width, height, maxBitrate, maxFramerate) {
this.width = width;
this.height = height;
this.encoding = {
maxBitrate,
maxFramerate,
};
}
get resolution() {
return {
width: { ideal: this.width },
height: { ideal: this.height },
frameRate: {
ideal: this.encoding.maxFramerate,
},
};
}
}
exports.VideoPreset = VideoPreset;
var AudioPresets;
(function (AudioPresets) {
AudioPresets.telephone = {
maxBitrate: 12000,
};
AudioPresets.speech = {
maxBitrate: 20000,
};
AudioPresets.music = {
maxBitrate: 32000,
};
})(AudioPresets = exports.AudioPresets || (exports.AudioPresets = {}));
/**
* Sane presets for video resolution/encoding
*/
exports.VideoPresets = {
qvga: new VideoPreset(320, 180, 125000, 15),
vga: new VideoPreset(640, 360, 400000, 30),
qhd: new VideoPreset(960, 540, 800000, 30),
hd: new VideoPreset(1280, 720, 2500000, 30),
fhd: new VideoPreset(1920, 1080, 4000000, 30),
};
/**
* Four by three presets
*/
exports.VideoPresets43 = {
qvga: new VideoPreset(240, 180, 100000, 15),
vga: new VideoPreset(480, 360, 320000, 30),
qhd: new VideoPreset(720, 540, 640000, 30),
hd: new VideoPreset(960, 720, 2000000, 30),
fhd: new VideoPreset(1440, 1080, 3200000, 30),
};
//# sourceMappingURL=options.js.map

@@ -18,2 +18,11 @@ "use strict";

}
this.attachedElements.forEach((element) => {
// detach or attach
if (muted) {
Track_1.detachTrack(this.mediaStreamTrack, element);
}
else {
Track_1.attachToElement(this.mediaStreamTrack, element);
}
});
}

@@ -20,0 +29,0 @@ stop() {

@@ -20,5 +20,8 @@ /// <reference types="node" />

stop(): void;
private recycleElement;
}
/** @internal */
export declare function attachToElement(track: MediaStreamTrack, element: HTMLMediaElement): void;
/** @internal */
export declare function detachTrack(track: MediaStreamTrack, element: HTMLMediaElement): void;
export declare namespace Track {

@@ -25,0 +28,0 @@ enum Kind {

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.attachToElement = exports.Track = void 0;
exports.detachTrack = exports.attachToElement = exports.Track = void 0;
const events_1 = require("events");
const livekit_models_1 = require("../../proto/livekit_models");
const events_2 = require("../events");
// keep old audio elements when detached, we would re-use them since on iOS
// Safari tracks which audio elements have been "blessed" by the user.
const recycledElements = [];
class Track extends events_1.EventEmitter {

@@ -21,12 +25,19 @@ constructor(mediaTrack, kind, name) {

if (!element) {
element = document.createElement(elementType);
element.autoplay = true;
if (element instanceof HTMLVideoElement) {
element.playsInline = true;
if (elementType === 'audio') {
recycledElements.forEach((e) => {
if (e.parentElement === null && !element) {
element = e;
}
});
if (element) {
// remove it from pool
recycledElements.splice(recycledElements.indexOf(element), 1);
}
}
if (!element) {
element = document.createElement(elementType);
}
}
else {
if (element instanceof HTMLVideoElement) {
element.playsInline = true;
}
if (element instanceof HTMLVideoElement) {
element.playsInline = true;
element.autoplay = true;

@@ -40,2 +51,12 @@ }

this.attachedElements.push(element);
if (element instanceof HTMLAudioElement) {
// manually play audio to detect audio playback status
element.play()
.then(() => {
this.emit(events_2.TrackEvent.AudioPlaybackStarted);
})
.catch((e) => {
this.emit(events_2.TrackEvent.AudioPlaybackFailed, e);
});
}
return element;

@@ -50,2 +71,3 @@ }

this.attachedElements.splice(idx, 1);
this.recycleElement(element);
}

@@ -58,2 +80,3 @@ return element;

detached.push(elm);
this.recycleElement(elm);
});

@@ -67,2 +90,17 @@ // remove all tracks

}
recycleElement(element) {
if (element instanceof HTMLAudioElement) {
// we only need to re-use a single element
let shouldCache = true;
element.pause();
recycledElements.forEach((e) => {
if (!e.parentElement) {
shouldCache = false;
}
});
if (shouldCache) {
recycledElements.push(element);
}
}
}
}

@@ -99,4 +137,6 @@ exports.Track = Track;

mediaStream.removeTrack(track);
element.srcObject = null;
}
}
exports.detachTrack = detachTrack;
(function (Track) {

@@ -103,0 +143,0 @@ let Kind;

@@ -23,2 +23,3 @@ /// <reference types="node" />

get isMuted(): boolean;
get isEnabled(): boolean;
get isSubscribed(): boolean;

@@ -25,0 +26,0 @@ /**

@@ -37,2 +37,5 @@ "use strict";

}
get isEnabled() {
return true;
}
get isSubscribed() {

@@ -39,0 +42,0 @@ return this.track !== undefined;

@@ -1,2 +0,2 @@

export declare const version = "0.10.1";
export declare const version = "0.11.0";
export declare const protocolVersion = 2;
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.protocolVersion = exports.version = void 0;
exports.version = '0.10.1';
exports.version = '0.11.0';
exports.protocolVersion = 2;
//# sourceMappingURL=version.js.map
import {
connect,
createLocalVideoTrack,
connect, CreateVideoTrackOptions,
LocalAudioTrack,

@@ -25,3 +24,2 @@ LocalTrack,

connectToRoom: any;
toggleVideo: any;
shareScreen: any;

@@ -34,2 +32,4 @@ muteVideo: any;

currentRoom: any;
startAudio: any;
flipVideo: any;
}

@@ -206,3 +206,10 @@ }

.on(RoomEvent.Reconnecting, () => appendLog('Reconnecting to room'))
.on(RoomEvent.Reconnected, () => appendLog('Successfully reconnected!'));
.on(RoomEvent.Reconnected, () => appendLog('Successfully reconnected!'))
.on(RoomEvent.AudioPlaybackStatusChanged, () => {
if (room.canPlaybackAudio) {
$('start-audio-button')?.setAttribute('disabled', 'true');
} else {
$('start-audio-button')?.removeAttribute('disabled');
}
});

@@ -259,17 +266,2 @@ appendLog('room participants', room.participants.keys());

window.toggleVideo = async () => {
if (!currentRoom) return;
if (videoTrack) {
appendLog('turning video off');
currentRoom.localParticipant.unpublishTrack(videoTrack);
videoTrack = undefined;
const video = getMyVideo();
if (video) video.remove();
} else {
appendLog('turning video on');
videoTrack = await createLocalVideoTrack();
await publishLocalVideo(videoTrack);
}
};
window.enterText = () => {

@@ -328,2 +320,19 @@ const textField = <HTMLInputElement>$('entry');

window.startAudio = () => {
currentRoom.startAudio();
};
let isFacingForward = true;
window.flipVideo = () => {
if (!videoTrack) {
return;
}
isFacingForward = !isFacingForward;
const options: CreateVideoTrackOptions = {
resolution: VideoPresets.qhd.resolution,
facingMode: isFacingForward ? 'user' : 'environment',
};
videoTrack.restartTrack(options);
};
async function publishLocalVideo(track: LocalVideoTrack) {

@@ -344,2 +353,3 @@ await currentRoom.localParticipant.publishTrack(track);

'disconnect-room-button',
'flip-video-button',
];

@@ -346,0 +356,0 @@ const disconnectedSet = ['connect-button'];

{
"name": "livekit-client",
"version": "0.10.1",
"version": "0.11.0",
"description": "JavaScript/TypeScript client SDK for LiveKit",

@@ -5,0 +5,0 @@ "main": "dist/index.js",

@@ -95,2 +95,4 @@ # JavaScript/TypeScript client SDK for LiveKit

When a video track is muted, the camera indicator will be turned off. When the video is unmuted, the same camera source and capture settings will be re-aquired.
```typescript

@@ -111,2 +113,42 @@ import { createLocalVideoTrack } from 'livekit-client';

### Audio playback
Browsers can be restrictive regarding if audio could be played without user interaction. What each browser considers as user interaction can also be different (with Safari on iOS being the most restrictive). Some browser considers clicking on a button unrelated to audio as interaction, others require audio element's `play` function to be triggered by an onclick event.
LiveKit will attempt to autoplay all audio tracks when you attach them to audio elements. However, if that fails, we'll notify you via `RoomEvent.AudioPlaybackStatusChanged`. `Room.canPlayAudio` will indicate if audio playback is permitted. (Note: LiveKit takes an optimistic approach so it's possible for this value to change from `true` to `false` when we encounter a browser error.
In the case user interaction is required, LiveKit provides `Room.startAudio` to start audio playback. This function must be triggered in an onclick or ontap event handler. In the same session, once audio playback is successful, additional audio tracks can be played without further user interactions.
```typescript
room.on(RoomEvent.AudioPlaybackStatusChanged, () => {
if (!room.canPlayAudio) {
// UI is necessary.
...
button.onclick = () => {
// this function *must* be triggered in an click/tap handler.
room.startAudio().then(() => {
// successful, UI can be removed now
button.remove();
});
}
}
});
```
### Switching input devices
At any point after publishing, you can switch the input devices and other capture settings on both audio and video tracks. For example, switching between regular and selfie camera or changing microphone inputs. This is performed with `restartTrack` on the `LocalAudioTrack` or `LocalVideoTrack`.
```typescript
await room.localParticipant.publishTrack(videoTrack);
await room.localParticipant.publishTrack(audioTrack);
await videoTrack.restartTrack({
facingMode: 'environment',
});
await audioTrack.restartTrack({
deviceId: 'microphoneId',
});
```
### Configuring logging

@@ -113,0 +155,0 @@

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc