@livekit/components-react
Advanced tools
Comparing version 2.4.3 to 2.5.0
@@ -25,4 +25,5 @@ export * from './controls/ClearPinButton'; | ||
export * from './participant/ParticipantAudioTile'; | ||
export * from './participant/BarVisualizer'; | ||
export { ConnectionStateToast, type ConnectionStateToastProps } from './ConnectionStateToast'; | ||
export { type MessageFormatter, type ChatEntryProps, ChatEntry, formatChatMessageLinks, } from '../components/ChatEntry'; | ||
//# sourceMappingURL=index.d.ts.map |
import * as React from 'react'; | ||
import { type TrackReference } from '@livekit/components-core'; | ||
/** @public */ | ||
/** | ||
* @public | ||
* @deprecated Use BarVisualizer instead | ||
*/ | ||
export interface AudioVisualizerProps extends React.HTMLAttributes<SVGElement> { | ||
@@ -16,4 +19,5 @@ trackRef?: TrackReference; | ||
* @public | ||
* @deprecated Use BarVisualizer instead | ||
*/ | ||
export declare const AudioVisualizer: (props: AudioVisualizerProps & React.RefAttributes<SVGSVGElement>) => React.ReactNode; | ||
//# sourceMappingURL=AudioVisualizer.d.ts.map |
@@ -18,2 +18,9 @@ import * as React from 'react'; | ||
}>): React.JSX.Element; | ||
/** | ||
* Only create a `TrackRefContext` if there is no `TrackRefContext` already. | ||
* @internal | ||
*/ | ||
export declare function TrackRefContextIfNeeded(props: React.PropsWithChildren<{ | ||
trackRef?: TrackReferenceOrPlaceholder; | ||
}>): React.JSX.Element; | ||
/** @public */ | ||
@@ -20,0 +27,0 @@ export interface ParticipantTileProps extends React.HTMLAttributes<HTMLDivElement> { |
@@ -19,3 +19,3 @@ import * as React from 'react'; | ||
*/ | ||
export declare const RoomName: (props: RoomNameProps & React.RefAttributes<HTMLSpanElement>) => React.ReactNode; | ||
export declare const RoomName: React.FC<RoomNameProps & React.RefAttributes<HTMLSpanElement>>; | ||
//# sourceMappingURL=RoomName.d.ts.map |
@@ -9,2 +9,3 @@ export {} from './chat-context'; | ||
export { FeatureFlags, useFeatureContext, LKFeatureContext } from './feature-context'; | ||
export { VoiceAssistantContext } from './voice-assistant-context'; | ||
//# sourceMappingURL=index.d.ts.map |
@@ -44,4 +44,5 @@ export { useAudioPlayback } from './useAudioPlayback'; | ||
export * from './useTrackTranscription'; | ||
export * from './useVoiceAssistant'; | ||
export * from './useParticipantAttributes'; | ||
export * from './useIsRecording'; | ||
//# sourceMappingURL=index.d.ts.map |
@@ -17,3 +17,3 @@ import type { Participant } from 'livekit-client'; | ||
export declare function useParticipantAttributes(props?: UseParticipantAttributesOptions): { | ||
attributes: Readonly<Record<string, string>>; | ||
attributes: Readonly<Record<string, string>> | undefined; | ||
}; | ||
@@ -20,0 +20,0 @@ /** |
@@ -17,3 +17,3 @@ import type { Participant } from 'livekit-client'; | ||
export declare function useParticipantInfo(props?: UseParticipantInfoOptions): { | ||
identity: string; | ||
identity: string | undefined; | ||
name: string | undefined; | ||
@@ -20,0 +20,0 @@ metadata: string | undefined; |
@@ -5,3 +5,3 @@ import { type TrackReferenceOrPlaceholder } from '@livekit/components-core'; | ||
*/ | ||
export declare function useTrackSyncTime({ publication }: TrackReferenceOrPlaceholder): { | ||
export declare function useTrackSyncTime(ref: TrackReferenceOrPlaceholder | undefined): { | ||
timestamp: number; | ||
@@ -8,0 +8,0 @@ rtpTimestamp: number | undefined; |
@@ -19,3 +19,3 @@ import type { ToggleSource } from '@livekit/components-core'; | ||
export declare function useTrackToggle<T extends ToggleSource>({ source, onChange, initialState, captureOptions, publishOptions, onDeviceError, ...rest }: UseTrackToggleProps<T>): { | ||
toggle: (forceState?: boolean | undefined, captureOptions?: import("@livekit/components-core").CaptureOptionsBySource<T> | undefined) => Promise<void>; | ||
toggle: ((forceState?: boolean | undefined) => Promise<void>) | ((forceState?: boolean | undefined, captureOptions?: import("@livekit/components-core").CaptureOptionsBySource<T> | undefined) => Promise<boolean | undefined>); | ||
enabled: boolean; | ||
@@ -22,0 +22,0 @@ pending: boolean; |
import { type ReceivedTranscriptionSegment, type TrackReferenceOrPlaceholder } from '@livekit/components-core'; | ||
import type { TranscriptionSegment } from 'livekit-client'; | ||
/** | ||
@@ -11,2 +12,6 @@ * @alpha | ||
bufferSize?: number; | ||
/** | ||
* optional callback for retrieving newly incoming transcriptions only | ||
*/ | ||
onTranscription?: (newSegments: TranscriptionSegment[]) => void; | ||
} | ||
@@ -17,5 +22,5 @@ /** | ||
*/ | ||
export declare function useTrackTranscription(trackRef: TrackReferenceOrPlaceholder, options?: TrackTranscriptionOptions): { | ||
export declare function useTrackTranscription(trackRef: TrackReferenceOrPlaceholder | undefined, options?: TrackTranscriptionOptions): { | ||
segments: ReceivedTranscriptionSegment[]; | ||
}; | ||
//# sourceMappingURL=useTrackTranscription.d.ts.map |
@@ -35,2 +35,16 @@ import type { LocalAudioTrack, RemoteAudioTrack, AudioAnalyserOptions } from 'livekit-client'; | ||
export declare function useMultibandTrackVolume(trackOrTrackReference?: LocalAudioTrack | RemoteAudioTrack | TrackReferenceOrPlaceholder, options?: MultiBandTrackVolumeOptions): number[]; | ||
/** | ||
* @alpha | ||
*/ | ||
export interface AudioWaveformOptions { | ||
barCount?: number; | ||
volMultiplier?: number; | ||
updateInterval?: number; | ||
} | ||
/** | ||
* @alpha | ||
*/ | ||
export declare function useAudioWaveform(trackOrTrackReference?: LocalAudioTrack | RemoteAudioTrack | TrackReferenceOrPlaceholder, options?: AudioWaveformOptions): { | ||
bars: number[]; | ||
}; | ||
//# sourceMappingURL=useTrackVolume.d.ts.map |
@@ -7,2 +7,3 @@ export { Chat, type ChatProps } from './Chat'; | ||
export { AudioConference, type AudioConferenceProps } from './AudioConference'; | ||
export * from './VoiceAssistantControlBar'; | ||
//# sourceMappingURL=index.d.ts.map |
{ | ||
"name": "@livekit/components-react", | ||
"version": "2.4.3", | ||
"version": "2.5.0", | ||
"license": "Apache-2.0", | ||
@@ -39,3 +39,3 @@ "author": "LiveKit", | ||
"usehooks-ts": "3.1.0", | ||
"@livekit/components-core": "0.11.2" | ||
"@livekit/components-core": "0.11.3" | ||
}, | ||
@@ -66,3 +66,3 @@ "peerDependencies": { | ||
"vitest": "^2.0.0", | ||
"eslint-config-lk-custom": "0.1.1" | ||
"eslint-config-lk-custom": "0.1.2" | ||
}, | ||
@@ -69,0 +69,0 @@ "engines": { |
@@ -25,2 +25,3 @@ export * from './controls/ClearPinButton'; | ||
export * from './participant/ParticipantAudioTile'; | ||
export * from './participant/BarVisualizer'; | ||
export { ConnectionStateToast, type ConnectionStateToastProps } from './ConnectionStateToast'; | ||
@@ -27,0 +28,0 @@ export { |
@@ -27,1 +27,2 @@ export {} from './chat-context'; | ||
export { FeatureFlags, useFeatureContext, LKFeatureContext } from './feature-context'; | ||
export { VoiceAssistantContext } from './voice-assistant-context'; |
@@ -54,3 +54,4 @@ export { useAudioPlayback } from './useAudioPlayback'; | ||
export * from './useTrackTranscription'; | ||
export * from './useVoiceAssistant'; | ||
export * from './useParticipantAttributes'; | ||
export * from './useIsRecording'; |
@@ -20,2 +20,3 @@ import * as React from 'react'; | ||
const p = useEnsureParticipant(participant); | ||
const room = useEnsureRoom(options.room); | ||
@@ -26,5 +27,5 @@ | ||
observer, | ||
p instanceof LocalParticipant ? p.isE2EEEnabled : p.isEncrypted, | ||
p instanceof LocalParticipant ? p.isE2EEEnabled : !!p?.isEncrypted, | ||
); | ||
return isEncrypted; | ||
} |
import { participantAttributesObserver } from '@livekit/components-core'; | ||
import type { Participant } from 'livekit-client'; | ||
import * as React from 'react'; | ||
import { useEnsureParticipant } from '../context'; | ||
import { useEnsureParticipant, useMaybeParticipantContext } from '../context'; | ||
import { useObservableState } from './internal'; | ||
@@ -23,9 +23,14 @@ | ||
export function useParticipantAttributes(props: UseParticipantAttributesOptions = {}) { | ||
const p = useEnsureParticipant(props.participant); | ||
const attributeObserver = React.useMemo(() => participantAttributesObserver(p), [p]); | ||
const { attributes } = useObservableState(attributeObserver, { | ||
attributes: p.attributes, | ||
const participantContext = useMaybeParticipantContext(); | ||
const p = props.participant ?? participantContext; | ||
const attributeObserver = React.useMemo( | ||
// weird typescript constraint | ||
() => (p ? participantAttributesObserver(p) : participantAttributesObserver(p)), | ||
[p], | ||
); | ||
const attributeState = useObservableState(attributeObserver, { | ||
attributes: p?.attributes, | ||
}); | ||
return { attributes }; | ||
return attributeState; | ||
} | ||
@@ -51,2 +56,5 @@ | ||
React.useEffect(() => { | ||
if (!p) { | ||
return; | ||
} | ||
const subscription = participantAttributesObserver(p).subscribe((val) => { | ||
@@ -53,0 +61,0 @@ if (val.changed[attributeKey] !== undefined) { |
import { participantInfoObserver } from '@livekit/components-core'; | ||
import type { Participant } from 'livekit-client'; | ||
import * as React from 'react'; | ||
import { useEnsureParticipant } from '../context'; | ||
import { useMaybeParticipantContext } from '../context'; | ||
import { useObservableState } from './internal'; | ||
@@ -23,8 +23,11 @@ | ||
export function useParticipantInfo(props: UseParticipantInfoOptions = {}) { | ||
const p = useEnsureParticipant(props.participant); | ||
let p = useMaybeParticipantContext(); | ||
if (props.participant) { | ||
p = props.participant; | ||
} | ||
const infoObserver = React.useMemo(() => participantInfoObserver(p), [p]); | ||
const { identity, name, metadata } = useObservableState(infoObserver, { | ||
name: p.name, | ||
identity: p.identity, | ||
metadata: p.metadata, | ||
name: p?.name, | ||
identity: p?.identity, | ||
metadata: p?.metadata, | ||
}); | ||
@@ -31,0 +34,0 @@ |
@@ -8,11 +8,11 @@ import * as React from 'react'; | ||
*/ | ||
export function useTrackSyncTime({ publication }: TrackReferenceOrPlaceholder) { | ||
export function useTrackSyncTime(ref: TrackReferenceOrPlaceholder | undefined) { | ||
const observable = React.useMemo( | ||
() => (publication?.track ? trackSyncTimeObserver(publication.track) : undefined), | ||
[publication?.track], | ||
() => (ref?.publication?.track ? trackSyncTimeObserver(ref?.publication.track) : undefined), | ||
[ref?.publication?.track], | ||
); | ||
return useObservableState(observable, { | ||
timestamp: Date.now(), | ||
rtpTimestamp: publication?.track?.rtpTimestamp, | ||
rtpTimestamp: ref?.publication?.track?.rtpTimestamp, | ||
}); | ||
} |
@@ -68,3 +68,3 @@ import type { ToggleSource } from '@livekit/components-core'; | ||
userInteractionRef.current = true; | ||
toggle().finally(() => (userInteractionRef.current = false)); | ||
toggle().catch(() => (userInteractionRef.current = false)); | ||
rest.onClick?.(evt); | ||
@@ -71,0 +71,0 @@ }, |
@@ -24,2 +24,6 @@ import { | ||
bufferSize?: number; | ||
/** | ||
* optional callback for retrieving newly incoming transcriptions only | ||
*/ | ||
onTranscription?: (newSegments: TranscriptionSegment[]) => void; | ||
/** amount of time (in ms) that the segment is considered `active` past its original segment duration, defaults to 2_000 */ | ||
@@ -39,3 +43,3 @@ // maxAge?: number; | ||
export function useTrackTranscription( | ||
trackRef: TrackReferenceOrPlaceholder, | ||
trackRef: TrackReferenceOrPlaceholder | undefined, | ||
options?: TrackTranscriptionOptions, | ||
@@ -51,2 +55,3 @@ ) { | ||
const handleSegmentMessage = (newSegments: TranscriptionSegment[]) => { | ||
opts.onTranscription?.(newSegments); | ||
setSegments((prevSegments) => | ||
@@ -62,3 +67,3 @@ dedupeSegments( | ||
React.useEffect(() => { | ||
if (!trackRef.publication) { | ||
if (!trackRef?.publication) { | ||
return; | ||
@@ -72,3 +77,3 @@ } | ||
}; | ||
}, [getTrackReferenceId(trackRef), handleSegmentMessage]); | ||
}, [trackRef && getTrackReferenceId(trackRef), handleSegmentMessage]); | ||
@@ -75,0 +80,0 @@ // React.useEffect(() => { |
@@ -99,3 +99,3 @@ import * as React from 'react'; | ||
hiPass: 600, | ||
updateInterval: 10, | ||
updateInterval: 32, | ||
analyserOptions: { fftSize: 2048 }, | ||
@@ -116,4 +116,7 @@ } as const satisfies MultiBandTrackVolumeOptions; | ||
: <LocalAudioTrack | RemoteAudioTrack | undefined>trackOrTrackReference?.publication?.track; | ||
const [frequencyBands, setFrequencyBands] = React.useState<Array<number>>([]); | ||
const opts = { ...multibandDefaults, ...options }; | ||
const [frequencyBands, setFrequencyBands] = React.useState<Array<number>>( | ||
new Array(opts.bands).fill(0), | ||
); | ||
React.useEffect(() => { | ||
@@ -136,4 +139,4 @@ if (!track || !track?.mediaStream) { | ||
const normalizedFrequencies = normalizeFrequencies(frequencies); | ||
const chunkSize = Math.ceil(normalizedFrequencies.length / opts.bands); | ||
const normalizedFrequencies = normalizeFrequencies(frequencies); // is this needed ? | ||
const chunkSize = Math.ceil(normalizedFrequencies.length / opts.bands); // we want logarithmic chunking here | ||
const chunks: Array<number> = []; | ||
@@ -160,1 +163,117 @@ for (let i = 0; i < opts.bands; i++) { | ||
} | ||
/** | ||
* @alpha | ||
*/ | ||
export interface AudioWaveformOptions { | ||
barCount?: number; | ||
volMultiplier?: number; | ||
updateInterval?: number; | ||
} | ||
const waveformDefaults = { | ||
barCount: 120, | ||
volMultiplier: 5, | ||
updateInterval: 20, | ||
} as const satisfies AudioWaveformOptions; | ||
/** | ||
* @alpha | ||
*/ | ||
export function useAudioWaveform( | ||
trackOrTrackReference?: LocalAudioTrack | RemoteAudioTrack | TrackReferenceOrPlaceholder, | ||
options: AudioWaveformOptions = {}, | ||
) { | ||
const track = | ||
trackOrTrackReference instanceof Track | ||
? trackOrTrackReference | ||
: <LocalAudioTrack | RemoteAudioTrack | undefined>trackOrTrackReference?.publication?.track; | ||
const opts = { ...waveformDefaults, ...options }; | ||
const aggregateWave = React.useRef(new Float32Array()); | ||
const timeRef = React.useRef(performance.now()); | ||
const updates = React.useRef(0); | ||
const [bars, setBars] = React.useState<number[]>([]); | ||
const onUpdate = React.useCallback((wave: Float32Array) => { | ||
setBars( | ||
Array.from( | ||
filterData(wave, opts.barCount).map((v) => Math.sqrt(v) * opts.volMultiplier), | ||
// wave.slice(0, opts.barCount).map((v) => sigmoid(v * opts.volMultiplier, 0.08, 0.2)), | ||
), | ||
); | ||
}, []); | ||
React.useEffect(() => { | ||
if (!track || !track?.mediaStream) { | ||
return; | ||
} | ||
const { analyser, cleanup } = createAudioAnalyser(track, { | ||
fftSize: getFFTSizeValue(opts.barCount), | ||
}); | ||
const bufferLength = getFFTSizeValue(opts.barCount); | ||
const dataArray = new Float32Array(bufferLength); | ||
const update = () => { | ||
updateWaveform = requestAnimationFrame(update); | ||
analyser.getFloatTimeDomainData(dataArray); | ||
aggregateWave.current.map((v, i) => v + dataArray[i]); | ||
updates.current += 1; | ||
if (performance.now() - timeRef.current >= opts.updateInterval) { | ||
const newData = dataArray.map((v) => v / updates.current); | ||
onUpdate(newData); | ||
timeRef.current = performance.now(); | ||
updates.current = 0; | ||
} | ||
}; | ||
let updateWaveform = requestAnimationFrame(update); | ||
return () => { | ||
cleanup(); | ||
cancelAnimationFrame(updateWaveform); | ||
}; | ||
}, [track, track?.mediaStream, JSON.stringify(options), onUpdate]); | ||
return { | ||
bars, | ||
}; | ||
} | ||
function getFFTSizeValue(x: number) { | ||
if (x < 32) return 32; | ||
else return pow2ceil(x); | ||
} | ||
// function sigmoid(x: number, k = 2, s = 0) { | ||
// return 1 / (1 + Math.exp(-(x - s) / k)); | ||
// } | ||
function pow2ceil(v: number) { | ||
let p = 2; | ||
while ((v >>= 1)) { | ||
p <<= 1; | ||
} | ||
return p; | ||
} | ||
function filterData(audioData: Float32Array, numSamples: number) { | ||
const blockSize = Math.floor(audioData.length / numSamples); // the number of samples in each subdivision | ||
const filteredData = new Float32Array(numSamples); | ||
for (let i = 0; i < numSamples; i++) { | ||
const blockStart = blockSize * i; // the location of the first sample in the block | ||
let sum = 0; | ||
for (let j = 0; j < blockSize; j++) { | ||
sum = sum + Math.abs(audioData[blockStart + j]); // find the sum of all the samples in the block | ||
} | ||
filteredData[i] = sum / blockSize; // divide the sum by the block size to get the average | ||
} | ||
return filteredData; | ||
} | ||
// function normalizeData(audioData: Float32Array) { | ||
// const multiplier = Math.pow(Math.max(...audioData), -1); | ||
// return audioData.map((n) => n * multiplier); | ||
// } |
@@ -7,1 +7,2 @@ export { Chat, type ChatProps } from './Chat'; | ||
export { AudioConference, type AudioConferenceProps } from './AudioConference'; | ||
export * from './VoiceAssistantControlBar'; |
Sorry, the diff of this file is too big to display
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is too big to display
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is too big to display
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is too big to display
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
2852025
441
30220
+ Added@livekit/components-core@0.11.3(transitive)
- Removed@livekit/components-core@0.11.2(transitive)