livekit-client
Advanced tools
Comparing version 2.5.0 to 2.5.1
@@ -285,3 +285,7 @@ /** | ||
*/ | ||
ActiveDeviceChanged = "activeDeviceChanged" | ||
ActiveDeviceChanged = "activeDeviceChanged", | ||
/** | ||
* fired when the first remote participant has subscribed to the localParticipant's track | ||
*/ | ||
LocalTrackSubscribed = "localTrackSubscribed" | ||
} | ||
@@ -446,3 +450,7 @@ export declare enum ParticipantEvent { | ||
*/ | ||
AttributesChanged = "attributesChanged" | ||
AttributesChanged = "attributesChanged", | ||
/** | ||
* fired on local participant only, when the first remote participant has subscribed to the track specified in the payload | ||
*/ | ||
LocalTrackSubscribed = "localTrackSubscribed" | ||
} | ||
@@ -449,0 +457,0 @@ /** @internal */ |
@@ -1,2 +0,2 @@ | ||
import { ParticipantInfo, ParticipantPermission } from '@livekit/protocol'; | ||
import { Codec, ParticipantInfo, ParticipantPermission } from '@livekit/protocol'; | ||
import type { InternalRoomOptions } from '../../options'; | ||
@@ -30,2 +30,3 @@ import type RTCEngine from '../RTCEngine'; | ||
private pendingSignalRequests; | ||
private enabledPublishVideoCodecs; | ||
/** @internal */ | ||
@@ -156,2 +157,4 @@ constructor(sid: string, identity: string, engine: RTCEngine, options: InternalRoomOptions); | ||
/** @internal */ | ||
setEnabledPublishCodecs(codecs: Codec[]): void; | ||
/** @internal */ | ||
updateInfo(info: ParticipantInfo): boolean; | ||
@@ -158,0 +161,0 @@ private updateTrackSubscriptionPermissions; |
@@ -123,3 +123,4 @@ import { DataPacket_Kind, ParticipantInfo, ParticipantInfo_Kind as ParticipantKind, ParticipantPermission, ConnectionQuality as ProtoQuality, type SipDTMF, SubscriptionError } from '@livekit/protocol'; | ||
attributesChanged: (changedAttributes: Record<string, string>) => void; | ||
localTrackSubscribed: (trackPublication: LocalTrackPublication) => void; | ||
}; | ||
//# sourceMappingURL=Participant.d.ts.map |
@@ -71,2 +71,6 @@ import { DataPacket_Kind, DisconnectReason, ParticipantPermission, SipDTMF, SubscriptionError, TranscriptionSegment as TranscriptionSegmentModel } from '@livekit/protocol'; | ||
/** | ||
* map to store first point in time when a particular transcription segment was received | ||
*/ | ||
private transcriptionReceivedTimes; | ||
/** | ||
* Creates a new Room, the primary construct for a LiveKit session. | ||
@@ -268,3 +272,4 @@ * @param options | ||
activeDeviceChanged: (kind: MediaDeviceKind, deviceId: string) => void; | ||
localTrackSubscribed: (publication: LocalTrackPublication, participant: LocalParticipant) => void; | ||
}; | ||
//# sourceMappingURL=Room.d.ts.map |
@@ -7,7 +7,7 @@ /** | ||
export default class CriticalTimers { | ||
static setTimeout: (callback: (args: void) => void, ms?: number | undefined) => NodeJS.Timeout; | ||
static setInterval: (callback: (args: void) => void, ms?: number | undefined) => NodeJS.Timeout; | ||
static clearTimeout: (timeoutId: string | number | NodeJS.Timeout | undefined) => void; | ||
static clearInterval: (intervalId: string | number | NodeJS.Timeout | undefined) => void; | ||
static setTimeout: (...args: Parameters<typeof setTimeout>) => ReturnType<typeof setTimeout>; | ||
static setInterval: (...args: Parameters<typeof setInterval>) => ReturnType<typeof setInterval>; | ||
static clearTimeout: (...args: Parameters<typeof clearTimeout>) => ReturnType<typeof clearTimeout>; | ||
static clearInterval: (...args: Parameters<typeof clearInterval>) => ReturnType<typeof clearInterval>; | ||
} | ||
//# sourceMappingURL=timers.d.ts.map |
@@ -56,3 +56,3 @@ import type { Track } from './Track'; | ||
/** | ||
* scalability mode for svc codecs, defaults to 'L3T3'. | ||
* scalability mode for svc codecs, defaults to 'L3T3_KEY'. | ||
* for svc codecs, simulcast is disabled. | ||
@@ -59,0 +59,0 @@ */ |
@@ -45,3 +45,5 @@ export type SimulationOptions = { | ||
final: boolean; | ||
firstReceivedTime: number; | ||
lastReceivedTime: number; | ||
} | ||
//# sourceMappingURL=types.d.ts.map |
@@ -98,3 +98,3 @@ import { ClientInfo, Transcription as TranscriptionModel } from '@livekit/protocol'; | ||
export declare function toHttpUrl(url: string): string; | ||
export declare function extractTranscriptionSegments(transcription: TranscriptionModel): TranscriptionSegment[]; | ||
export declare function extractTranscriptionSegments(transcription: TranscriptionModel, firstReceivedTimesMap: Map<string, number>): TranscriptionSegment[]; | ||
//# sourceMappingURL=utils.d.ts.map |
@@ -285,3 +285,7 @@ /** | ||
*/ | ||
ActiveDeviceChanged = "activeDeviceChanged" | ||
ActiveDeviceChanged = "activeDeviceChanged", | ||
/** | ||
* fired when the first remote participant has subscribed to the localParticipant's track | ||
*/ | ||
LocalTrackSubscribed = "localTrackSubscribed" | ||
} | ||
@@ -446,3 +450,7 @@ export declare enum ParticipantEvent { | ||
*/ | ||
AttributesChanged = "attributesChanged" | ||
AttributesChanged = "attributesChanged", | ||
/** | ||
* fired on local participant only, when the first remote participant has subscribed to the track specified in the payload | ||
*/ | ||
LocalTrackSubscribed = "localTrackSubscribed" | ||
} | ||
@@ -449,0 +457,0 @@ /** @internal */ |
@@ -1,2 +0,2 @@ | ||
import { ParticipantInfo, ParticipantPermission } from '@livekit/protocol'; | ||
import { Codec, ParticipantInfo, ParticipantPermission } from '@livekit/protocol'; | ||
import type { InternalRoomOptions } from '../../options'; | ||
@@ -30,2 +30,3 @@ import type RTCEngine from '../RTCEngine'; | ||
private pendingSignalRequests; | ||
private enabledPublishVideoCodecs; | ||
/** @internal */ | ||
@@ -156,2 +157,4 @@ constructor(sid: string, identity: string, engine: RTCEngine, options: InternalRoomOptions); | ||
/** @internal */ | ||
setEnabledPublishCodecs(codecs: Codec[]): void; | ||
/** @internal */ | ||
updateInfo(info: ParticipantInfo): boolean; | ||
@@ -158,0 +161,0 @@ private updateTrackSubscriptionPermissions; |
@@ -124,3 +124,4 @@ import type { SipDTMF } from '@livekit/protocol'; | ||
attributesChanged: (changedAttributes: Record<string, string>) => void; | ||
localTrackSubscribed: (trackPublication: LocalTrackPublication) => void; | ||
}; | ||
//# sourceMappingURL=Participant.d.ts.map |
@@ -71,2 +71,6 @@ import { DataPacket_Kind, DisconnectReason, ParticipantPermission, SipDTMF, SubscriptionError, TranscriptionSegment as TranscriptionSegmentModel } from '@livekit/protocol'; | ||
/** | ||
* map to store first point in time when a particular transcription segment was received | ||
*/ | ||
private transcriptionReceivedTimes; | ||
/** | ||
* Creates a new Room, the primary construct for a LiveKit session. | ||
@@ -268,3 +272,4 @@ * @param options | ||
activeDeviceChanged: (kind: MediaDeviceKind, deviceId: string) => void; | ||
localTrackSubscribed: (publication: LocalTrackPublication, participant: LocalParticipant) => void; | ||
}; | ||
//# sourceMappingURL=Room.d.ts.map |
@@ -7,7 +7,7 @@ /** | ||
export default class CriticalTimers { | ||
static setTimeout: (callback: (args: void) => void, ms?: number | undefined) => NodeJS.Timeout; | ||
static setInterval: (callback: (args: void) => void, ms?: number | undefined) => NodeJS.Timeout; | ||
static clearTimeout: (timeoutId: string | number | NodeJS.Timeout | undefined) => void; | ||
static clearInterval: (intervalId: string | number | NodeJS.Timeout | undefined) => void; | ||
static setTimeout: (...args: Parameters<typeof setTimeout>) => ReturnType<typeof setTimeout>; | ||
static setInterval: (...args: Parameters<typeof setInterval>) => ReturnType<typeof setInterval>; | ||
static clearTimeout: (...args: Parameters<typeof clearTimeout>) => ReturnType<typeof clearTimeout>; | ||
static clearInterval: (...args: Parameters<typeof clearInterval>) => ReturnType<typeof clearInterval>; | ||
} | ||
//# sourceMappingURL=timers.d.ts.map |
@@ -56,3 +56,3 @@ import type { Track } from './Track'; | ||
/** | ||
* scalability mode for svc codecs, defaults to 'L3T3'. | ||
* scalability mode for svc codecs, defaults to 'L3T3_KEY'. | ||
* for svc codecs, simulcast is disabled. | ||
@@ -59,0 +59,0 @@ */ |
@@ -45,3 +45,5 @@ export type SimulationOptions = { | ||
final: boolean; | ||
firstReceivedTime: number; | ||
lastReceivedTime: number; | ||
} | ||
//# sourceMappingURL=types.d.ts.map |
@@ -98,3 +98,3 @@ import { ClientInfo, Transcription as TranscriptionModel } from '@livekit/protocol'; | ||
export declare function toHttpUrl(url: string): string; | ||
export declare function extractTranscriptionSegments(transcription: TranscriptionModel): TranscriptionSegment[]; | ||
export declare function extractTranscriptionSegments(transcription: TranscriptionModel, firstReceivedTimesMap: Map<string, number>): TranscriptionSegment[]; | ||
//# sourceMappingURL=utils.d.ts.map |
{ | ||
"name": "livekit-client", | ||
"version": "2.5.0", | ||
"version": "2.5.1", | ||
"description": "JavaScript/TypeScript client SDK for LiveKit", | ||
@@ -39,3 +39,3 @@ "main": "./dist/livekit-client.umd.js", | ||
"dependencies": { | ||
"@livekit/protocol": "1.20.0", | ||
"@livekit/protocol": "1.20.1", | ||
"events": "^3.3.0", | ||
@@ -42,0 +42,0 @@ "loglevel": "^1.8.0", |
@@ -78,2 +78,6 @@ <!--BEGIN_BANNER_IMAGE--> | ||
RoomEvent, | ||
VideoPresets, | ||
Track, | ||
LocalTrackPublication, | ||
LocalParticipant | ||
} from 'livekit-client'; | ||
@@ -80,0 +84,0 @@ |
@@ -326,2 +326,7 @@ /** | ||
ActiveDeviceChanged = 'activeDeviceChanged', | ||
/** | ||
* fired when the first remote participant has subscribed to the localParticipant's track | ||
*/ | ||
LocalTrackSubscribed = 'localTrackSubscribed', | ||
} | ||
@@ -513,2 +518,7 @@ | ||
AttributesChanged = 'attributesChanged', | ||
/** | ||
* fired on local participant only, when the first remote participant has subscribed to the track specified in the payload | ||
*/ | ||
LocalTrackSubscribed = 'localTrackSubscribed', | ||
} | ||
@@ -515,0 +525,0 @@ |
import { | ||
AddTrackRequest, | ||
Codec, | ||
DataPacket, | ||
@@ -12,2 +13,3 @@ DataPacket_Kind, | ||
SubscribedQualityUpdate, | ||
TrackInfo, | ||
TrackUnpublishedResponse, | ||
@@ -114,2 +116,4 @@ UserPacket, | ||
private enabledPublishVideoCodecs: Codec[] = []; | ||
/** @internal */ | ||
@@ -780,2 +784,13 @@ constructor(sid: string, identity: string, engine: RTCEngine, options: InternalRoomOptions) { | ||
} | ||
if (this.enabledPublishVideoCodecs.length > 0) { | ||
// fallback to a supported codec if it is not supported | ||
if ( | ||
!this.enabledPublishVideoCodecs.some( | ||
(c) => opts.videoCodec === mimeTypeToVideoCodecString(c.mime), | ||
) | ||
) { | ||
opts.videoCodec = mimeTypeToVideoCodecString(this.enabledPublishVideoCodecs[0].mime); | ||
} | ||
} | ||
const videoCodec = opts.videoCodec; | ||
@@ -914,29 +929,83 @@ | ||
const ti = await this.engine.addTrack(req); | ||
// server might not support the codec the client has requested, in that case, fallback | ||
// to a supported codec | ||
let primaryCodecMime: string | undefined; | ||
ti.codecs.forEach((codec) => { | ||
if (primaryCodecMime === undefined) { | ||
primaryCodecMime = codec.mimeType; | ||
const negotiate = async () => { | ||
if (!this.engine.pcManager) { | ||
throw new UnexpectedConnectionState('pcManager is not ready'); | ||
} | ||
}); | ||
if (primaryCodecMime && track.kind === Track.Kind.Video) { | ||
const updatedCodec = mimeTypeToVideoCodecString(primaryCodecMime); | ||
if (updatedCodec !== videoCodec) { | ||
this.log.debug('falling back to server selected codec', { | ||
...this.logContext, | ||
...getLogContextFromTrack(track), | ||
codec: updatedCodec, | ||
}); | ||
opts.videoCodec = updatedCodec; | ||
// recompute encodings since bitrates/etc could have changed | ||
encodings = computeVideoEncodings( | ||
track.source === Track.Source.ScreenShare, | ||
req.width, | ||
req.height, | ||
opts, | ||
); | ||
track.sender = await this.engine.createSender(track, opts, encodings); | ||
if (track instanceof LocalVideoTrack) { | ||
opts.degradationPreference ??= getDefaultDegradationPreference(track); | ||
track.setDegradationPreference(opts.degradationPreference); | ||
} | ||
if (encodings) { | ||
if (isFireFox() && track.kind === Track.Kind.Audio) { | ||
/* Refer to RFC https://datatracker.ietf.org/doc/html/rfc7587#section-6.1, | ||
livekit-server uses maxaveragebitrate=510000 in the answer sdp to permit client to | ||
publish high quality audio track. But firefox always uses this value as the actual | ||
bitrates, causing the audio bitrates to rise to 510Kbps in any stereo case unexpectedly. | ||
So the client need to modify maxaverragebitrates in answer sdp to user provided value to | ||
fix the issue. | ||
*/ | ||
let trackTransceiver: RTCRtpTransceiver | undefined = undefined; | ||
for (const transceiver of this.engine.pcManager.publisher.getTransceivers()) { | ||
if (transceiver.sender === track.sender) { | ||
trackTransceiver = transceiver; | ||
break; | ||
} | ||
} | ||
if (trackTransceiver) { | ||
this.engine.pcManager.publisher.setTrackCodecBitrate({ | ||
transceiver: trackTransceiver, | ||
codec: 'opus', | ||
maxbr: encodings[0]?.maxBitrate ? encodings[0].maxBitrate / 1000 : 0, | ||
}); | ||
} | ||
} else if (track.codec && isSVCCodec(track.codec) && encodings[0]?.maxBitrate) { | ||
this.engine.pcManager.publisher.setTrackCodecBitrate({ | ||
cid: req.cid, | ||
codec: track.codec, | ||
maxbr: encodings[0].maxBitrate / 1000, | ||
}); | ||
} | ||
} | ||
await this.engine.negotiate(); | ||
}; | ||
let ti: TrackInfo; | ||
if (this.enabledPublishVideoCodecs.length > 0) { | ||
const rets = await Promise.all([this.engine.addTrack(req), negotiate()]); | ||
ti = rets[0]; | ||
} else { | ||
ti = await this.engine.addTrack(req); | ||
// server might not support the codec the client has requested, in that case, fallback | ||
// to a supported codec | ||
let primaryCodecMime: string | undefined; | ||
ti.codecs.forEach((codec) => { | ||
if (primaryCodecMime === undefined) { | ||
primaryCodecMime = codec.mimeType; | ||
} | ||
}); | ||
if (primaryCodecMime && track.kind === Track.Kind.Video) { | ||
const updatedCodec = mimeTypeToVideoCodecString(primaryCodecMime); | ||
if (updatedCodec !== videoCodec) { | ||
this.log.debug('falling back to server selected codec', { | ||
...this.logContext, | ||
...getLogContextFromTrack(track), | ||
codec: updatedCodec, | ||
}); | ||
opts.videoCodec = updatedCodec; | ||
// recompute encodings since bitrates/etc could have changed | ||
encodings = computeVideoEncodings( | ||
track.source === Track.Source.ScreenShare, | ||
req.width, | ||
req.height, | ||
opts, | ||
); | ||
} | ||
} | ||
await negotiate(); | ||
} | ||
@@ -952,5 +1021,2 @@ | ||
if (!this.engine.pcManager) { | ||
throw new UnexpectedConnectionState('pcManager is not ready'); | ||
} | ||
this.log.debug(`publishing ${track.kind} with encodings`, { | ||
@@ -962,44 +1028,3 @@ ...this.logContext, | ||
track.sender = await this.engine.createSender(track, opts, encodings); | ||
if (track instanceof LocalVideoTrack) { | ||
opts.degradationPreference ??= getDefaultDegradationPreference(track); | ||
track.setDegradationPreference(opts.degradationPreference); | ||
} | ||
if (encodings) { | ||
if (isFireFox() && track.kind === Track.Kind.Audio) { | ||
/* Refer to RFC https://datatracker.ietf.org/doc/html/rfc7587#section-6.1, | ||
livekit-server uses maxaveragebitrate=510000 in the answer sdp to permit client to | ||
publish high quality audio track. But firefox always uses this value as the actual | ||
bitrates, causing the audio bitrates to rise to 510Kbps in any stereo case unexpectedly. | ||
So the client need to modify maxaverragebitrates in answer sdp to user provided value to | ||
fix the issue. | ||
*/ | ||
let trackTransceiver: RTCRtpTransceiver | undefined = undefined; | ||
for (const transceiver of this.engine.pcManager.publisher.getTransceivers()) { | ||
if (transceiver.sender === track.sender) { | ||
trackTransceiver = transceiver; | ||
break; | ||
} | ||
} | ||
if (trackTransceiver) { | ||
this.engine.pcManager.publisher.setTrackCodecBitrate({ | ||
transceiver: trackTransceiver, | ||
codec: 'opus', | ||
maxbr: encodings[0]?.maxBitrate ? encodings[0].maxBitrate / 1000 : 0, | ||
}); | ||
} | ||
} else if (track.codec && isSVCCodec(track.codec) && encodings[0]?.maxBitrate) { | ||
this.engine.pcManager.publisher.setTrackCodecBitrate({ | ||
cid: req.cid, | ||
codec: track.codec, | ||
maxbr: encodings[0].maxBitrate / 1000, | ||
}); | ||
} | ||
} | ||
await this.engine.negotiate(); | ||
if (track instanceof LocalVideoTrack) { | ||
track.startMonitor(this.engine.client); | ||
@@ -1090,11 +1115,15 @@ } else if (track instanceof LocalAudioTrack) { | ||
const ti = await this.engine.addTrack(req); | ||
const negotiate = async () => { | ||
const transceiverInit: RTCRtpTransceiverInit = { direction: 'sendonly' }; | ||
if (encodings) { | ||
transceiverInit.sendEncodings = encodings; | ||
} | ||
await this.engine.createSimulcastSender(track, simulcastTrack, opts, encodings); | ||
const transceiverInit: RTCRtpTransceiverInit = { direction: 'sendonly' }; | ||
if (encodings) { | ||
transceiverInit.sendEncodings = encodings; | ||
} | ||
await this.engine.createSimulcastSender(track, simulcastTrack, opts, encodings); | ||
await this.engine.negotiate(); | ||
}; | ||
await this.engine.negotiate(); | ||
const rets = await Promise.all([this.engine.addTrack(req), negotiate()]); | ||
const ti = rets[0]; | ||
this.log.debug(`published ${videoCodec} for track ${track.sid}`, { | ||
@@ -1320,2 +1349,9 @@ ...this.logContext, | ||
/** @internal */ | ||
setEnabledPublishCodecs(codecs: Codec[]) { | ||
this.enabledPublishVideoCodecs = codecs.filter( | ||
(c) => c.mime.split('/')[0].toLowerCase() === 'video', | ||
); | ||
} | ||
/** @internal */ | ||
updateInfo(info: ParticipantInfo): boolean { | ||
@@ -1322,0 +1358,0 @@ if (info.sid !== this.sid) { |
@@ -389,2 +389,3 @@ import { | ||
attributesChanged: (changedAttributes: Record<string, string>) => void; | ||
localTrackSubscribed: (trackPublication: LocalTrackPublication) => void; | ||
}; |
@@ -26,2 +26,4 @@ import { EventEmitter } from 'events'; | ||
const debounceInterval = 20; | ||
export const PCEvents = { | ||
@@ -232,3 +234,3 @@ NegotiationStarted: 'negotiationStarted', | ||
} | ||
}, 100); | ||
}, debounceInterval); | ||
@@ -235,0 +237,0 @@ async createAndSendOffer(options?: RTCOfferOptions) { |
@@ -235,3 +235,3 @@ import { | ||
// create offer | ||
if (!this.subscriberPrimary) { | ||
if (!this.subscriberPrimary || joinResponse.fastPublish) { | ||
this.negotiate(); | ||
@@ -238,0 +238,0 @@ } |
@@ -7,11 +7,20 @@ /** | ||
export default class CriticalTimers { | ||
// eslint-disable-next-line @typescript-eslint/no-implied-eval | ||
static setTimeout = (...args: Parameters<typeof setTimeout>) => setTimeout(...args); | ||
static setTimeout: (...args: Parameters<typeof setTimeout>) => ReturnType<typeof setTimeout> = ( | ||
...args: Parameters<typeof setTimeout> | ||
// eslint-disable-next-line @typescript-eslint/no-implied-eval | ||
) => setTimeout(...args); | ||
// eslint-disable-next-line @typescript-eslint/no-implied-eval | ||
static setInterval = (...args: Parameters<typeof setInterval>) => setInterval(...args); | ||
static setInterval: (...args: Parameters<typeof setInterval>) => ReturnType<typeof setInterval> = | ||
// eslint-disable-next-line @typescript-eslint/no-implied-eval | ||
(...args: Parameters<typeof setInterval>) => setInterval(...args); | ||
static clearTimeout = (...args: Parameters<typeof clearTimeout>) => clearTimeout(...args); | ||
static clearTimeout: ( | ||
...args: Parameters<typeof clearTimeout> | ||
) => ReturnType<typeof clearTimeout> = (...args: Parameters<typeof clearTimeout>) => | ||
clearTimeout(...args); | ||
static clearInterval = (...args: Parameters<typeof clearInterval>) => clearInterval(...args); | ||
static clearInterval: ( | ||
...args: Parameters<typeof clearInterval> | ||
) => ReturnType<typeof clearInterval> = (...args: Parameters<typeof clearInterval>) => | ||
clearInterval(...args); | ||
} |
@@ -50,2 +50,62 @@ import { describe, expect, it } from 'vitest'; | ||
it('returns qualities starting from lowest for SVC', () => { | ||
const layers = videoLayersFromEncodings( | ||
1280, | ||
720, | ||
[ | ||
{ | ||
/** @ts-ignore */ | ||
scalabilityMode: 'L2T2', | ||
}, | ||
], | ||
true, | ||
); | ||
expect(layers).toHaveLength(2); | ||
expect(layers[0].quality).toBe(VideoQuality.MEDIUM); | ||
expect(layers[0].width).toBe(1280); | ||
expect(layers[1].quality).toBe(VideoQuality.LOW); | ||
expect(layers[1].width).toBe(640); | ||
}); | ||
it('returns qualities starting from lowest for SVC (three layers)', () => { | ||
const layers = videoLayersFromEncodings( | ||
1280, | ||
720, | ||
[ | ||
{ | ||
/** @ts-ignore */ | ||
scalabilityMode: 'L3T3', | ||
}, | ||
], | ||
true, | ||
); | ||
expect(layers).toHaveLength(3); | ||
expect(layers[0].quality).toBe(VideoQuality.HIGH); | ||
expect(layers[0].width).toBe(1280); | ||
expect(layers[1].quality).toBe(VideoQuality.MEDIUM); | ||
expect(layers[1].width).toBe(640); | ||
expect(layers[2].quality).toBe(VideoQuality.LOW); | ||
expect(layers[2].width).toBe(320); | ||
}); | ||
it('returns qualities starting from lowest for SVC (single layer)', () => { | ||
const layers = videoLayersFromEncodings( | ||
1280, | ||
720, | ||
[ | ||
{ | ||
/** @ts-ignore */ | ||
scalabilityMode: 'L1T2', | ||
}, | ||
], | ||
true, | ||
); | ||
expect(layers).toHaveLength(1); | ||
expect(layers[0].quality).toBe(VideoQuality.LOW); | ||
expect(layers[0].width).toBe(1280); | ||
}); | ||
it('handles portrait', () => { | ||
@@ -52,0 +112,0 @@ const layers = videoLayersFromEncodings(720, 1280, [ |
@@ -610,3 +610,3 @@ import { | ||
new VideoLayer({ | ||
quality: VideoQuality.HIGH - i, | ||
quality: Math.min(VideoQuality.HIGH, sm.spatial - 1) - i, | ||
width: Math.ceil(width / resRatio ** i), | ||
@@ -613,0 +613,0 @@ height: Math.ceil(height / resRatio ** i), |
@@ -67,3 +67,3 @@ import type { Track } from './Track'; | ||
/** | ||
* scalability mode for svc codecs, defaults to 'L3T3'. | ||
* scalability mode for svc codecs, defaults to 'L3T3_KEY'. | ||
* for svc codecs, simulcast is disabled. | ||
@@ -70,0 +70,0 @@ */ |
@@ -68,2 +68,4 @@ export type SimulationOptions = { | ||
final: boolean; | ||
firstReceivedTime: number; | ||
lastReceivedTime: number; | ||
} |
@@ -535,4 +535,12 @@ import { ClientInfo, ClientInfo_SDK, Transcription as TranscriptionModel } from '@livekit/protocol'; | ||
transcription: TranscriptionModel, | ||
firstReceivedTimesMap: Map<string, number>, | ||
): TranscriptionSegment[] { | ||
return transcription.segments.map(({ id, text, language, startTime, endTime, final }) => { | ||
const firstReceivedTime = firstReceivedTimesMap.get(id) ?? Date.now(); | ||
const lastReceivedTime = Date.now(); | ||
if (final) { | ||
firstReceivedTimesMap.delete(id); | ||
} else { | ||
firstReceivedTimesMap.set(id, firstReceivedTime); | ||
} | ||
return { | ||
@@ -545,4 +553,6 @@ id, | ||
language, | ||
firstReceivedTime, | ||
lastReceivedTime, | ||
}; | ||
}); | ||
} |
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is too big to display
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is too big to display
5623081
50547
345
+ Added@livekit/protocol@1.20.1(transitive)
- Removed@livekit/protocol@1.20.0(transitive)
Updated@livekit/protocol@1.20.1