@livepeer/core-web
Advanced tools
Comparing version
@@ -99,5 +99,2 @@ import { MediaControllerStore } from '@livepeer/core/media'; | ||
}; | ||
declare const omittedKeys: readonly ["__initialProps", "__device", "__controls", "__controlsFunctions"]; | ||
declare const sanitizeBroadcastState: (state: BroadcastState) => BroadcastCallbackState; | ||
type BroadcastCallbackState = Omit<BroadcastState, (typeof omittedKeys)[number]>; | ||
type BroadcastAriaText = { | ||
@@ -186,2 +183,2 @@ audioTrigger: string; | ||
export { type AudioDeviceId, type BroadcastAriaText, type BroadcastCallbackState, type BroadcastControlsState, type BroadcastDeviceInformation, type BroadcastState, type BroadcastStore, type InitialBroadcastProps, type MediaDeviceIds, type MediaDeviceInfoExtended, type VideoDeviceId, addBroadcastEventListeners, createBroadcastStore, getBroadcastDeviceInfo, sanitizeBroadcastState }; | ||
export { type AudioDeviceId, type BroadcastAriaText, type BroadcastControlsState, type BroadcastDeviceInformation, type BroadcastState, type BroadcastStore, type InitialBroadcastProps, type MediaDeviceIds, type MediaDeviceInfoExtended, type VideoDeviceId, addBroadcastEventListeners, createBroadcastStore, getBroadcastDeviceInfo }; |
Object.defineProperty(exports, '__esModule', { value: true }); | ||
var core = require('@livepeer/core'); | ||
var middleware = require('zustand/middleware'); | ||
var vanilla = require('zustand/vanilla'); | ||
var errors = require('@livepeer/core/errors'); | ||
var utils = require('@livepeer/core/utils'); | ||
require('@livepeer/core/media'); | ||
require('@livepeer/core/utils'); | ||
require('@livepeer/core-web/hls'); | ||
require('@livepeer/core-web/webrtc'); | ||
var utils = require('@livepeer/core-web/utils'); | ||
require('hls.js'); | ||
const isClient = ()=>typeof window !== "undefined"; | ||
const isPictureInPictureSupported = (element)=>{ | ||
if (typeof document === "undefined") { | ||
return true; | ||
} | ||
const videoElement = element ?? document.createElement("video"); | ||
const isPiPDisabled = Boolean(videoElement.disablePictureInPicture); | ||
const { apiType } = getPictureInPictureMode(videoElement); | ||
return Boolean(apiType) && !isPiPDisabled; | ||
}; | ||
const getPictureInPictureMode = (element)=>{ | ||
if (isClient() && element instanceof HTMLVideoElement) { | ||
// we disable the next line since we handle missing Safari versions in the next statement | ||
if (document?.pictureInPictureEnabled) { | ||
return { | ||
apiType: "w3c", | ||
element | ||
}; | ||
} | ||
// fallback to trying webkit | ||
if (element?.webkitSupportsPresentationMode?.("picture-in-picture")) { | ||
return { | ||
apiType: "webkit", | ||
element | ||
}; | ||
} | ||
} | ||
return { | ||
apiType: null | ||
}; | ||
}; | ||
/** | ||
@@ -105,3 +72,3 @@ * Checks if WebRTC is supported and returns the appropriate RTCPeerConnection constructor. | ||
if (response.status === 406) { | ||
throw new Error(core.NOT_ACCEPTABLE_ERROR_MESSAGE); | ||
throw new Error(errors.NOT_ACCEPTABLE_ERROR_MESSAGE); | ||
} | ||
@@ -240,2 +207,33 @@ const errorMessage = await response.text(); | ||
const isPictureInPictureSupported = (element)=>{ | ||
if (typeof document === "undefined") { | ||
return true; | ||
} | ||
const videoElement = element ?? document.createElement("video"); | ||
const isPiPDisabled = Boolean(videoElement.disablePictureInPicture); | ||
const { apiType } = getPictureInPictureMode(videoElement); | ||
return Boolean(apiType) && !isPiPDisabled; | ||
}; | ||
const getPictureInPictureMode = (element)=>{ | ||
if (isClient() && element instanceof HTMLVideoElement) { | ||
// we disable the next line since we handle missing Safari versions in the next statement | ||
if (document?.pictureInPictureEnabled) { | ||
return { | ||
apiType: "w3c", | ||
element | ||
}; | ||
} | ||
// fallback to trying webkit | ||
if (element?.webkitSupportsPresentationMode?.("picture-in-picture")) { | ||
return { | ||
apiType: "webkit", | ||
element | ||
}; | ||
} | ||
} | ||
return { | ||
apiType: null | ||
}; | ||
}; | ||
const VIDEO_WEBRTC_INITIALIZED_ATTRIBUTE = "data-livepeer-video-whip-initialized"; | ||
@@ -333,2 +331,10 @@ /** | ||
}; | ||
const setMediaStreamTracksStatus = async ({ enableVideo, enableAudio, mediaStream })=>{ | ||
for (const videoTrack of mediaStream.getVideoTracks()){ | ||
videoTrack.enabled = enableVideo; | ||
} | ||
for (const audioTrack of mediaStream.getAudioTracks()){ | ||
audioTrack.enabled = enableAudio; | ||
} | ||
}; | ||
const getUserMedia = (constraints)=>{ | ||
@@ -402,9 +408,2 @@ if (typeof navigator === "undefined") { | ||
}); | ||
const omittedKeys = [ | ||
"__initialProps", | ||
"__device", | ||
"__controls", | ||
"__controlsFunctions" | ||
]; | ||
const sanitizeBroadcastState = (state)=>core.omit(state, ...omittedKeys); | ||
const createBroadcastStore = ({ streamKey, device, storage, initialProps })=>{ | ||
@@ -500,6 +499,2 @@ const initialControls = { | ||
toggleDisplayMedia: ()=>set(({ __controls, mediaDeviceIds, aria })=>{ | ||
console.log({ | ||
prev: __controls.previousVideoInputDeviceId, | ||
mediaDeviceIds | ||
}); | ||
if (mediaDeviceIds.videoinput === "screen") { | ||
@@ -685,3 +680,2 @@ return { | ||
if (mounted) { | ||
console.log("mounted!"); | ||
// we use setState here so it's clear this isn't an external function | ||
@@ -693,8 +687,17 @@ store.setState({ | ||
}); | ||
// Subscribe to sync the error states | ||
const destroyMediaSyncError = mediaStore.subscribe(({ error })=>error, async (error)=>{ | ||
if (error?.type === "permissions") { | ||
// we use setState here so it's clear this isn't an external function | ||
store.setState((state)=>({ | ||
__controls: { | ||
...state.__controls, | ||
requestedVideoInputDeviceId: state.mediaDeviceIds.videoinput | ||
} | ||
})); | ||
} | ||
}); | ||
// Subscribe to media stream changes | ||
const destroyPictureInPictureSupportedMonitor = store.subscribe((state)=>state.mediaStream, async ()=>{ | ||
const isPipSupported = isPictureInPictureSupported(element); | ||
console.log({ | ||
isPipSupported | ||
}); | ||
if (!isPipSupported) { | ||
@@ -758,54 +761,69 @@ mediaStore.setState((state)=>({ | ||
requestedAudioDeviceId: state.__controls.requestedAudioInputDeviceId, | ||
requestedVideoDeviceId: state.__controls.requestedVideoInputDeviceId | ||
}), async ({ hydrated, mounted, audio, video, requestedAudioDeviceId, requestedVideoDeviceId })=>{ | ||
if (!mounted || !hydrated) { | ||
console.log("not mounted..."); | ||
return; | ||
requestedVideoDeviceId: state.__controls.requestedVideoInputDeviceId, | ||
previousMediaStream: state.mediaStream | ||
}), async ({ hydrated, mounted, audio, video, requestedAudioDeviceId, requestedVideoDeviceId, previousMediaStream })=>{ | ||
try { | ||
if (!mounted || !hydrated) { | ||
return; | ||
} | ||
if (!audio && !video) { | ||
utils.warn("Audio and video are both not enabled."); | ||
return; | ||
} | ||
const stream = await (requestedVideoDeviceId === "screen" ? getDisplayMedia({ | ||
// for now, only the microphone audio track is supported - we don't support multiple | ||
// discrete audio tracks | ||
audio: false, | ||
// we assume that if the user is requested to share screen, they want to enable video, | ||
// and we don't listen to the `video` enabled state | ||
video: true | ||
}) : getUserMedia({ | ||
audio: audio && requestedAudioDeviceId && requestedAudioDeviceId !== "default" ? { | ||
deviceId: { | ||
// we pass ideal here, so we don't get overconstrained errors | ||
ideal: requestedAudioDeviceId | ||
} | ||
} : Boolean(audio), | ||
video: video && requestedVideoDeviceId && requestedVideoDeviceId !== "default" ? { | ||
deviceId: { | ||
// we pass ideal here, so we don't get overconstrained errors | ||
ideal: requestedVideoDeviceId | ||
} | ||
} : Boolean(video) | ||
})); | ||
if (stream) { | ||
// we get the device ID from the MediaStream and update those | ||
const allAudioTracks = stream?.getAudioTracks() ?? []; | ||
const allVideoTracks = stream?.getVideoTracks() ?? []; | ||
const allAudioDeviceIds = allAudioTracks.map((track)=>track?.getSettings()?.deviceId); | ||
const allVideoDeviceIds = allVideoTracks.map((track)=>track?.getSettings()?.deviceId); | ||
const firstAudioDeviceId = allAudioDeviceIds?.[0] ?? null; | ||
const firstVideoDeviceId = allVideoDeviceIds?.[0] ?? null; | ||
store.getState().__controlsFunctions.setMediaDeviceIds({ | ||
...firstAudioDeviceId ? { | ||
audioinput: firstAudioDeviceId | ||
} : {}, | ||
...firstVideoDeviceId ? { | ||
videoinput: requestedVideoDeviceId === "screen" ? "screen" : firstVideoDeviceId | ||
} : {} | ||
}); | ||
// merge the new audio and/or video and the old media stream | ||
const mergedMediaStream = new MediaStream(); | ||
const mergedAudioTrack = allAudioTracks?.[0] ?? previousMediaStream?.getAudioTracks?.()?.[0] ?? null; | ||
const mergedVideoTrack = allVideoTracks?.[0] ?? previousMediaStream?.getVideoTracks?.()?.[0] ?? null; | ||
if (mergedAudioTrack) mergedMediaStream.addTrack(mergedAudioTrack); | ||
if (mergedVideoTrack) mergedMediaStream.addTrack(mergedVideoTrack); | ||
store.getState().__controlsFunctions.updateMediaStream(mergedMediaStream); | ||
} | ||
} catch (e) { | ||
if (e?.name === "NotAllowedError") { | ||
mediaStore.getState().__controlsFunctions.onError(new Error(errors.PERMISSIONS_ERROR_MESSAGE)); | ||
} else { | ||
utils.warn(e?.message); | ||
} | ||
} | ||
if (!audio && !video) { | ||
utils.warn("Audio and video are both not enabled."); | ||
return; | ||
} | ||
const stream = await (requestedVideoDeviceId === "screen" ? getDisplayMedia({ | ||
// for now, only the microphone audio track is supported - we don't support multiple | ||
// discrete audio tracks | ||
audio: false, | ||
// we assume that if the user is requested to share screen, they want to enable video, | ||
// and we don't listen to the `video` enabled state | ||
video: true | ||
}) : getUserMedia({ | ||
audio: audio && requestedAudioDeviceId && requestedAudioDeviceId !== "default" ? { | ||
deviceId: { | ||
// we pass ideal here, so we don't get overconstrained errors | ||
ideal: requestedAudioDeviceId | ||
} | ||
} : Boolean(audio), | ||
video: video && requestedVideoDeviceId && requestedVideoDeviceId !== "default" ? { | ||
deviceId: { | ||
// we pass ideal here, so we don't get overconstrained errors | ||
ideal: requestedVideoDeviceId | ||
} | ||
} : Boolean(video) | ||
})); | ||
if (stream) { | ||
// we get the device ID from the MediaStream and update those | ||
const allTracks = stream?.getTracks() ?? []; | ||
const allAudioDeviceIds = allTracks.filter((track)=>track.kind === "audio").map((track)=>track?.getSettings()?.deviceId); | ||
const allVideoDeviceIds = allTracks.filter((track)=>track.kind === "video").map((track)=>track?.getSettings()?.deviceId); | ||
const firstAudioDeviceId = allAudioDeviceIds?.[0] ?? null; | ||
const firstVideoDeviceId = allVideoDeviceIds?.[0] ?? null; | ||
store.getState().__controlsFunctions.setMediaDeviceIds({ | ||
...firstAudioDeviceId ? { | ||
audioinput: firstAudioDeviceId | ||
} : {}, | ||
...firstVideoDeviceId ? { | ||
videoinput: requestedVideoDeviceId === "screen" ? "screen" : firstVideoDeviceId | ||
} : {} | ||
}); | ||
store.getState().__controlsFunctions.updateMediaStream(stream); | ||
} | ||
}, { | ||
equalityFn: (a, b)=>a.hydrated === b.hydrated && a.mounted === b.mounted && a.requestedAudioDeviceId === b.requestedAudioDeviceId && a.requestedVideoDeviceId === b.requestedVideoDeviceId | ||
}); | ||
// Subscribe to audio & video enabled | ||
// Subscribe to audio & video enabled, and media stream | ||
const destroyAudioVideoEnabled = store.subscribe((state)=>({ | ||
@@ -817,10 +835,7 @@ audio: state.audio, | ||
if (mediaStream) { | ||
// we get the device ID from the MediaStream and update those | ||
const allTracks = mediaStream?.getTracks() ?? []; | ||
for (const audioTrack of allTracks.filter((track)=>track.kind === "audio")){ | ||
audioTrack.enabled = Boolean(audio); | ||
} | ||
for (const videoTrack of allTracks.filter((track)=>track.kind === "video")){ | ||
videoTrack.enabled = Boolean(video); | ||
} | ||
await setMediaStreamTracksStatus({ | ||
mediaStream, | ||
enableAudio: Boolean(audio), | ||
enableVideo: Boolean(video) | ||
}); | ||
} | ||
@@ -875,3 +890,3 @@ }, { | ||
if (devices) { | ||
store.getState().__controlsFunctions.updateDeviceList(devices); | ||
store.getState().__controlsFunctions.updateDeviceList(devices.filter((d)=>d.deviceId)); | ||
} | ||
@@ -887,3 +902,3 @@ }, { | ||
if (mediaDevices) { | ||
const extendedDevices = mediaDevices.map((device, i)=>({ | ||
const extendedDevices = mediaDevices.filter((d)=>d.deviceId).map((device, i)=>({ | ||
deviceId: device.deviceId, | ||
@@ -917,2 +932,3 @@ kind: device.kind, | ||
destroyMediaStream?.(); | ||
destroyMediaSyncError?.(); | ||
destroyMediaSyncMounted?.(); | ||
@@ -932,2 +948,1 @@ destroyPeerConnectionAndMediaStream?.(); | ||
exports.getBroadcastDeviceInfo = getBroadcastDeviceInfo; | ||
exports.sanitizeBroadcastState = sanitizeBroadcastState; |
@@ -1,5 +0,8 @@ | ||
import { ControlsOptions as ControlsOptions$1, MediaControllerStore, MediaMetrics, Src } from '@livepeer/core/media'; | ||
import { HlsVideoConfig } from './hls.mjs'; | ||
import { DeviceInformation } from '@livepeer/core'; | ||
import { ControlsOptions as ControlsOptions$1, MediaControllerStore, DeviceInformation, MediaMetrics, Src } from '@livepeer/core/media'; | ||
import { HlsConfig } from 'hls.js'; | ||
type HlsVideoConfig = Partial<HlsConfig> & { | ||
autoPlay?: boolean; | ||
}; | ||
type ControlsOptions = ControlsOptions$1 & { | ||
@@ -6,0 +9,0 @@ /** |
Object.defineProperty(exports, '__esModule', { value: true }); | ||
var media = require('@livepeer/core/media'); | ||
var core = require('@livepeer/core'); | ||
var errors = require('@livepeer/core/errors'); | ||
var utils = require('@livepeer/core/utils'); | ||
var hls = require('@livepeer/core-web/hls'); | ||
var webrtc = require('@livepeer/core-web/webrtc'); | ||
var Hls = require('hls.js'); | ||
var storage = require('@livepeer/core/storage'); | ||
var version = require('@livepeer/core/version'); | ||
function _interopDefault (e) { return e && e.__esModule ? e : { default: e }; } | ||
var Hls__default = /*#__PURE__*/_interopDefault(Hls); | ||
const isClient = ()=>typeof window !== "undefined"; | ||
@@ -20,3 +23,2 @@ const ua = ()=>isClient() ? window?.navigator?.userAgent?.toLowerCase() : ""; | ||
if (isClient() && src?.mime) { | ||
// TODO fix this to better support audio mime types | ||
if (src?.type?.includes("audio")) { | ||
@@ -32,2 +34,443 @@ const audio = document.createElement("audio"); | ||
const VIDEO_HLS_INITIALIZED_ATTRIBUTE = "data-livepeer-video-hls-initialized"; | ||
/** | ||
* Checks if hls.js can play in the browser. | ||
*/ const isHlsSupported = ()=>isClient() ? Hls__default.default.isSupported() : true; | ||
/** | ||
* Create an hls.js instance and attach to the provided media element. | ||
*/ const createNewHls = ({ source, element, callbacks, aspectRatio, config, initialQuality })=>{ | ||
// do not attach twice | ||
if (element.getAttribute(VIDEO_HLS_INITIALIZED_ATTRIBUTE) === "true") { | ||
return { | ||
setQuality: ()=>{ | ||
// | ||
}, | ||
destroy: ()=>{ | ||
// | ||
} | ||
}; | ||
} | ||
element.setAttribute(VIDEO_HLS_INITIALIZED_ATTRIBUTE, "true"); | ||
const hls = new Hls__default.default({ | ||
backBufferLength: 60 * 1.5, | ||
manifestLoadingMaxRetry: 0, | ||
fragLoadingMaxRetry: 0, | ||
levelLoadingMaxRetry: 0, | ||
appendErrorMaxRetry: 0, | ||
...config, | ||
...config?.liveSyncDurationCount ? { | ||
liveSyncDurationCount: config.liveSyncDurationCount | ||
} : { | ||
liveMaxLatencyDurationCount: 7, | ||
liveSyncDurationCount: 3 | ||
} | ||
}); | ||
const onDestroy = ()=>{ | ||
hls?.destroy?.(); | ||
element?.removeAttribute?.(VIDEO_HLS_INITIALIZED_ATTRIBUTE); | ||
}; | ||
if (element) { | ||
hls.attachMedia(element); | ||
} | ||
let redirected = false; | ||
hls.on(Hls__default.default.Events.LEVEL_LOADED, async (_e, data)=>{ | ||
const { live, totalduration: duration, url } = data.details; | ||
if (!redirected) { | ||
callbacks?.onRedirect?.(url ?? null); | ||
redirected = true; | ||
} | ||
callbacks?.onLive?.(Boolean(live)); | ||
callbacks?.onDuration?.(duration ?? 0); | ||
}); | ||
hls.on(Hls__default.default.Events.MEDIA_ATTACHED, async ()=>{ | ||
hls.loadSource(source); | ||
hls.on(Hls__default.default.Events.MANIFEST_PARSED, (_event, _data)=>{ | ||
setQuality({ | ||
hls: hls ?? null, | ||
quality: initialQuality, | ||
aspectRatio | ||
}); | ||
callbacks?.onCanPlay?.(); | ||
element?.play?.(); | ||
}); | ||
}); | ||
hls.on(Hls__default.default.Events.ERROR, async (_event, data)=>{ | ||
const { details, fatal } = data; | ||
const isManifestParsingError = details === "manifestParsingError"; | ||
if (!fatal && !isManifestParsingError) return; | ||
callbacks?.onError?.(data); | ||
if (fatal) { | ||
console.error(`Fatal error : ${data.details}`); | ||
switch(data.type){ | ||
case Hls__default.default.ErrorTypes.MEDIA_ERROR: | ||
hls.recoverMediaError(); | ||
break; | ||
case Hls__default.default.ErrorTypes.NETWORK_ERROR: | ||
console.error(`A network error occurred: ${data.details}`); | ||
break; | ||
default: | ||
console.error(`An unrecoverable error occurred: ${data.details}`); | ||
hls.destroy(); | ||
break; | ||
} | ||
} | ||
}); | ||
function updateOffset() { | ||
const currentDate = Date.now(); | ||
const newDate = hls.playingDate; | ||
if (newDate && currentDate) { | ||
callbacks?.onPlaybackOffsetUpdated?.(currentDate - newDate.getTime()); | ||
} | ||
} | ||
const updateOffsetInterval = setInterval(updateOffset, 2000); | ||
return { | ||
destroy: ()=>{ | ||
onDestroy?.(); | ||
clearInterval?.(updateOffsetInterval); | ||
element?.removeAttribute?.(VIDEO_HLS_INITIALIZED_ATTRIBUTE); | ||
}, | ||
setQuality: (videoQuality)=>{ | ||
setQuality({ | ||
hls: hls ?? null, | ||
quality: videoQuality, | ||
aspectRatio | ||
}); | ||
} | ||
}; | ||
}; | ||
const setQuality = ({ hls, quality, aspectRatio })=>{ | ||
if (hls) { | ||
const { width } = media.calculateVideoQualityDimensions(quality, aspectRatio); | ||
if (!width || quality === "auto") { | ||
hls.currentLevel = -1; // Auto level | ||
return; | ||
} | ||
if (hls.levels && hls.levels.length > 0) { | ||
// Sort levels by the absolute difference between their width and the desired width | ||
const sortedLevels = hls.levels.map((level, index)=>({ | ||
...level, | ||
index | ||
})).sort((a, b)=>Math.abs((width ?? 0) - a.width) - Math.abs((width ?? 0) - b.width)); | ||
// Choose the level with the smallest difference in width | ||
const bestMatchLevel = sortedLevels?.[0]; | ||
if ((bestMatchLevel?.index ?? -1) >= 0) { | ||
hls.currentLevel = bestMatchLevel.index; | ||
} else { | ||
hls.currentLevel = -1; | ||
} | ||
} | ||
} | ||
}; | ||
/** | ||
* Checks if WebRTC is supported and returns the appropriate RTCPeerConnection constructor. | ||
*/ const getRTCPeerConnectionConstructor = ()=>{ | ||
// Check if the current environment is a client (browser) | ||
if (!isClient()) { | ||
return null; // If not a client, WebRTC is not supported | ||
} | ||
// Return the constructor for RTCPeerConnection with any vendor prefixes | ||
return window.RTCPeerConnection || window.webkitRTCPeerConnection || window.mozRTCPeerConnection || null // Return null if none of the constructors are available | ||
; | ||
}; | ||
function createPeerConnection(host) { | ||
const RTCPeerConnectionConstructor = getRTCPeerConnectionConstructor(); | ||
if (RTCPeerConnectionConstructor) { | ||
// strip non-standard port number if present | ||
const hostNoPort = host?.split(":")[0]; | ||
const iceServers = host ? [ | ||
{ | ||
urls: `stun:${hostNoPort}` | ||
}, | ||
{ | ||
urls: `turn:${hostNoPort}`, | ||
username: "livepeer", | ||
credential: "livepeer" | ||
} | ||
] : []; | ||
return new RTCPeerConnectionConstructor({ | ||
iceServers | ||
}); | ||
} | ||
throw new Error("No RTCPeerConnection constructor found in this browser."); | ||
} | ||
const DEFAULT_TIMEOUT = 10000; | ||
/** | ||
* Performs the actual SDP exchange. | ||
* | ||
* 1. Sends the SDP offer to the server, | ||
* 2. Awaits the server's offer. | ||
* | ||
* SDP describes what kind of media we can send and how the server and client communicate. | ||
* | ||
* https://developer.mozilla.org/en-US/docs/Glossary/SDP | ||
* https://www.ietf.org/archive/id/draft-ietf-wish-whip-01.html#name-protocol-operation | ||
*/ async function negotiateConnectionWithClientOffer(peerConnection, endpoint, ofr, controller, accessControl, sdpTimeout) { | ||
if (peerConnection && endpoint && ofr) { | ||
/** | ||
* This response contains the server's SDP offer. | ||
* This specifies how the client should communicate, | ||
* and what kind of media client and server have negotiated to exchange. | ||
*/ const response = await postSDPOffer(endpoint, ofr.sdp, controller, accessControl, sdpTimeout); | ||
if (response.ok) { | ||
const answerSDP = await response.text(); | ||
await peerConnection.setRemoteDescription(new RTCSessionDescription({ | ||
type: "answer", | ||
sdp: answerSDP | ||
})); | ||
const playheadUtc = response.headers.get("Playhead-Utc"); | ||
return new Date(playheadUtc ?? new Date()); | ||
} | ||
if (response.status === 406) { | ||
throw new Error(errors.NOT_ACCEPTABLE_ERROR_MESSAGE); | ||
} | ||
const errorMessage = await response.text(); | ||
throw new Error(errorMessage); | ||
} | ||
throw new Error("Peer connection not defined."); | ||
} | ||
/** | ||
* Constructs the client's SDP offer | ||
* | ||
* SDP describes what kind of media we can send and how the server and client communicate. | ||
* | ||
* https://developer.mozilla.org/en-US/docs/Glossary/SDP | ||
* https://www.ietf.org/archive/id/draft-ietf-wish-whip-01.html#name-protocol-operation | ||
*/ async function constructClientOffer(peerConnection, endpoint) { | ||
if (peerConnection && endpoint) { | ||
/** https://developer.mozilla.org/en-US/docs/Web/API/RTCPeerConnection/createOffer */ const offer = await peerConnection.createOffer(); | ||
/** https://developer.mozilla.org/en-US/docs/Web/API/RTCPeerConnection/setLocalDescription */ await peerConnection.setLocalDescription(offer); | ||
/** Wait for ICE gathering to complete */ const ofr = await waitToCompleteICEGathering(peerConnection); | ||
if (!ofr) { | ||
throw Error("failed to gather ICE candidates for offer"); | ||
} | ||
return ofr; | ||
} | ||
return null; | ||
} | ||
// Regular expression to match the playback ID at the end of the URL | ||
// It looks for a string that follows the last "+" or "/" and continues to the end of the pathname | ||
const playbackIdPattern = /([/+])([^/+?]+)$/; | ||
const REPLACE_PLACEHOLDER = "PLAYBACK_ID"; | ||
let cachedRedirectUrl = null; | ||
async function postSDPOffer(endpoint, data, controller, accessControl, sdpTimeout) { | ||
const id = setTimeout(()=>controller.abort(), sdpTimeout ?? DEFAULT_TIMEOUT); | ||
const url = new URL(endpoint); | ||
const parsedMatches = url.pathname.match(playbackIdPattern); | ||
// if we both have a cached redirect URL and a match for the playback ID, | ||
// use these to shortcut the typical webrtc redirect flow | ||
if (cachedRedirectUrl && parsedMatches?.[2]) { | ||
const clonedCachedUrl = new URL(cachedRedirectUrl); | ||
url.host = clonedCachedUrl.host; | ||
url.pathname = clonedCachedUrl.pathname.replace(REPLACE_PLACEHOLDER, parsedMatches[2]); | ||
} | ||
const response = await fetch(url.toString(), { | ||
method: "POST", | ||
mode: "cors", | ||
headers: { | ||
"content-type": "application/sdp", | ||
...accessControl?.accessKey ? { | ||
"Livepeer-Access-Key": accessControl.accessKey | ||
} : {}, | ||
...accessControl?.jwt ? { | ||
"Livepeer-Jwt": accessControl.jwt | ||
} : {} | ||
}, | ||
body: data, | ||
signal: controller.signal | ||
}); | ||
clearTimeout(id); | ||
return response; | ||
} | ||
async function getRedirectUrl(endpoint, abortController, timeout) { | ||
try { | ||
if (cachedRedirectUrl) { | ||
const inputUrl = new URL(endpoint); | ||
inputUrl.host = cachedRedirectUrl.host; | ||
return inputUrl; | ||
} | ||
const id = setTimeout(()=>abortController.abort(), timeout ?? DEFAULT_TIMEOUT); | ||
const response = await fetch(endpoint, { | ||
method: "HEAD", | ||
signal: abortController.signal | ||
}); | ||
clearTimeout(id); | ||
const parsedUrl = new URL(response.url); | ||
if (parsedUrl) { | ||
const cachedUrl = new URL(parsedUrl); | ||
cachedUrl.pathname = cachedUrl.pathname.replace(playbackIdPattern, `$1${REPLACE_PLACEHOLDER}`); | ||
cachedRedirectUrl = cachedUrl; | ||
} | ||
return parsedUrl; | ||
} catch (e) { | ||
return null; | ||
} | ||
} | ||
/** | ||
* Receives an RTCPeerConnection and waits until | ||
* the connection is initialized or a timeout passes. | ||
* | ||
* https://www.ietf.org/archive/id/draft-ietf-wish-whip-01.html#section-4.1 | ||
* https://developer.mozilla.org/en-US/docs/Web/API/RTCPeerConnection/iceGatheringState | ||
* https://developer.mozilla.org/en-US/docs/Web/API/RTCPeerConnection/icegatheringstatechange_event | ||
*/ async function waitToCompleteICEGathering(peerConnection) { | ||
return new Promise((resolve)=>{ | ||
/** Wait at most five seconds for ICE gathering. */ setTimeout(()=>{ | ||
resolve(peerConnection.localDescription); | ||
}, 5000); | ||
peerConnection.onicegatheringstatechange = (_ev)=>{ | ||
if (peerConnection.iceGatheringState === "complete") { | ||
resolve(peerConnection.localDescription); | ||
} | ||
}; | ||
}); | ||
} /** | ||
* Parses the ICE servers from the `Link` headers returned during SDP negotiation. | ||
*/ // function parseIceServersFromLinkHeader( | ||
// iceString: string | null, | ||
// ): NonNullable<RTCConfiguration['iceServers']> | null { | ||
// try { | ||
// const servers = iceString | ||
// ?.split(', ') | ||
// .map((serverStr) => { | ||
// const parts = serverStr.split('; '); | ||
// const server: NonNullable<RTCConfiguration['iceServers']>[number] = { | ||
// urls: '', | ||
// }; | ||
// for (const part of parts) { | ||
// if (part.startsWith('stun:') || part.startsWith('turn:')) { | ||
// server.urls = part; | ||
// } else if (part.startsWith('username=')) { | ||
// server.username = part.slice('username="'.length, -1); | ||
// } else if (part.startsWith('credential=')) { | ||
// server.credential = part.slice('credential="'.length, -1); | ||
// } | ||
// } | ||
// return server; | ||
// }) | ||
// .filter((server) => server.urls); | ||
// return servers && (servers?.length ?? 0) > 0 ? servers : null; | ||
// } catch (e) { | ||
// console.error(e); | ||
// } | ||
// return null; | ||
// } | ||
const VIDEO_WEBRTC_INITIALIZED_ATTRIBUTE = "data-livepeer-video-whep-initialized"; | ||
/** | ||
* Client that uses WHEP to play back video over WebRTC. | ||
* | ||
* https://www.ietf.org/id/draft-murillo-whep-00.html | ||
*/ const createNewWHEP = ({ source, element, callbacks, accessControl, sdpTimeout })=>{ | ||
// do not attach twice | ||
if (element.getAttribute(VIDEO_WEBRTC_INITIALIZED_ATTRIBUTE) === "true") { | ||
return { | ||
destroy: ()=>{ | ||
// | ||
} | ||
}; | ||
} | ||
element.setAttribute(VIDEO_WEBRTC_INITIALIZED_ATTRIBUTE, "true"); | ||
let destroyed = false; | ||
const abortController = new AbortController(); | ||
let peerConnection = null; | ||
const stream = new MediaStream(); | ||
const errorComposed = (e)=>{ | ||
callbacks?.onError?.(e); | ||
if (element) { | ||
element.srcObject = null; | ||
} | ||
}; | ||
getRedirectUrl(source, abortController, sdpTimeout).then((redirectUrl)=>{ | ||
if (destroyed || !redirectUrl) { | ||
return; | ||
} | ||
const redirectUrlString = redirectUrl.toString(); | ||
callbacks?.onRedirect?.(redirectUrlString ?? null); | ||
/** | ||
* Create a new WebRTC connection, using public STUN servers with ICE, | ||
* allowing the client to discover its own IP address. | ||
* https://developer.mozilla.org/en-US/docs/Web/API/WebRTC_API/Protocols#ice | ||
*/ peerConnection = createPeerConnection(redirectUrl.host); | ||
if (peerConnection) { | ||
/** https://developer.mozilla.org/en-US/docs/Web/API/RTCPeerConnection/addTransceiver */ peerConnection.addTransceiver("video", { | ||
direction: "recvonly" | ||
}); | ||
peerConnection.addTransceiver("audio", { | ||
direction: "recvonly" | ||
}); | ||
/** | ||
* When new tracks are received in the connection, store local references, | ||
* so that they can be added to a MediaStream, and to the <video> element. | ||
* | ||
* https://developer.mozilla.org/en-US/docs/Web/API/RTCPeerConnection/track_event | ||
*/ peerConnection.ontrack = async (event)=>{ | ||
try { | ||
if (stream) { | ||
const track = event.track; | ||
const currentTracks = stream.getTracks(); | ||
const streamAlreadyHasVideoTrack = currentTracks.some((track)=>track.kind === "video"); | ||
const streamAlreadyHasAudioTrack = currentTracks.some((track)=>track.kind === "audio"); | ||
switch(track.kind){ | ||
case "video": | ||
if (streamAlreadyHasVideoTrack) { | ||
break; | ||
} | ||
stream.addTrack(track); | ||
break; | ||
case "audio": | ||
if (streamAlreadyHasAudioTrack) { | ||
break; | ||
} | ||
stream.addTrack(track); | ||
break; | ||
default: | ||
console.log(`received unknown track ${track}`); | ||
} | ||
} | ||
} catch (e) { | ||
errorComposed(e); | ||
} | ||
}; | ||
peerConnection.addEventListener("connectionstatechange", async (_ev)=>{ | ||
try { | ||
if (peerConnection?.connectionState === "failed") { | ||
throw new Error("Failed to connect to peer."); | ||
} | ||
if (peerConnection?.connectionState === "connected" && !element.srcObject) { | ||
element.srcObject = stream; | ||
callbacks?.onConnected?.(); | ||
} | ||
} catch (e) { | ||
errorComposed(e); | ||
} | ||
}); | ||
peerConnection.addEventListener("negotiationneeded", async (_ev)=>{ | ||
try { | ||
const ofr = await constructClientOffer(peerConnection, redirectUrlString); | ||
const response = await negotiateConnectionWithClientOffer(peerConnection, source, ofr, abortController, accessControl, sdpTimeout); | ||
const currentDate = Date.now(); | ||
if (response && currentDate) { | ||
callbacks?.onPlaybackOffsetUpdated?.(currentDate - response.getTime()); | ||
} | ||
} catch (e) { | ||
errorComposed(e); | ||
} | ||
}); | ||
} | ||
}).catch((e)=>errorComposed(e)); | ||
return { | ||
destroy: ()=>{ | ||
destroyed = true; | ||
abortController?.abort?.(); | ||
// Remove the WebRTC source | ||
if (element) { | ||
element.srcObject = null; | ||
} | ||
peerConnection?.close?.(); | ||
element?.removeAttribute?.(VIDEO_WEBRTC_INITIALIZED_ATTRIBUTE); | ||
} | ||
}; | ||
}; | ||
const methodsList = [ | ||
@@ -340,3 +783,9 @@ // modern browsers | ||
} | ||
const onLoadedMetadata = ()=>store.getState().__controlsFunctions.onCanPlay(); | ||
const onLoadedMetadata = ()=>{ | ||
store.getState().__controlsFunctions.onCanPlay(); | ||
store.getState().__controlsFunctions.requestMeasure(); | ||
}; | ||
const onLoadedData = ()=>{ | ||
store.getState().__controlsFunctions.requestMeasure(); | ||
}; | ||
const onPlay = ()=>{ | ||
@@ -354,5 +803,14 @@ store.getState().__controlsFunctions.onPlay(); | ||
store.getState().__controlsFunctions.updateLastInteraction(); | ||
const isNotBroadcast = store.getState().__initialProps.hotkeys !== "broadcast"; | ||
if (allKeyTriggers.includes(code)) { | ||
if (code === "Space" || code === "KeyK") { | ||
if ((code === "Space" || code === "KeyK") && isNotBroadcast) { | ||
store.getState().__controlsFunctions.togglePlay(); | ||
} else if (code === "ArrowRight" && isNotBroadcast) { | ||
store.getState().__controlsFunctions.requestSeekForward(); | ||
} else if (code === "ArrowLeft" && isNotBroadcast) { | ||
store.getState().__controlsFunctions.requestSeekBack(); | ||
} else if (code === "KeyM" && isNotBroadcast) { | ||
store.getState().__controlsFunctions.requestToggleMute(); | ||
} else if (code === "KeyX" && isNotBroadcast) { | ||
store.getState().__controlsFunctions.requestClip(); | ||
} else if (code === "KeyF") { | ||
@@ -362,10 +820,2 @@ store.getState().__controlsFunctions.requestToggleFullscreen(); | ||
store.getState().__controlsFunctions.requestTogglePictureInPicture(); | ||
} else if (code === "ArrowRight") { | ||
store.getState().__controlsFunctions.requestSeekForward(); | ||
} else if (code === "ArrowLeft") { | ||
store.getState().__controlsFunctions.requestSeekBack(); | ||
} else if (code === "KeyM") { | ||
store.getState().__controlsFunctions.requestToggleMute(); | ||
} else if (code === "KeyX") { | ||
store.getState().__controlsFunctions.requestClip(); | ||
} | ||
@@ -416,7 +866,7 @@ } | ||
console.warn("Video not found"); | ||
return store.getState().__controlsFunctions?.onError?.(new Error(core.STREAM_OFFLINE_ERROR_MESSAGE)); | ||
return store.getState().__controlsFunctions?.onError?.(new Error(errors.STREAM_OFFLINE_ERROR_MESSAGE)); | ||
} | ||
if (response.status === 401) { | ||
console.warn("Unauthorized to view video"); | ||
return store.getState().__controlsFunctions?.onError?.(new Error(core.ACCESS_CONTROL_ERROR_MESSAGE)); | ||
return store.getState().__controlsFunctions?.onError?.(new Error(errors.ACCESS_CONTROL_ERROR_MESSAGE)); | ||
} | ||
@@ -446,20 +896,4 @@ } catch (err) { | ||
const onResize = async ()=>{ | ||
store.getState().__controlsFunctions.setSize({ | ||
...element?.videoHeight && element?.videoWidth ? { | ||
media: { | ||
height: element.videoHeight, | ||
width: element.videoWidth | ||
} | ||
} : {}, | ||
...element?.clientHeight && element?.clientWidth ? { | ||
container: { | ||
height: element.clientHeight, | ||
width: element.clientWidth | ||
} | ||
} : {} | ||
}); | ||
store.getState().__controlsFunctions.requestMeasure(); | ||
}; | ||
if (element) { | ||
onResize(); | ||
} | ||
const parentElementOrElement = element?.parentElement ?? element; | ||
@@ -470,2 +904,3 @@ if (element) { | ||
element.addEventListener("loadedmetadata", onLoadedMetadata); | ||
element.addEventListener("loadeddata", onLoadedData); | ||
element.addEventListener("play", onPlay); | ||
@@ -479,3 +914,2 @@ element.addEventListener("pause", onPause); | ||
element.addEventListener("loadstart", onLoadStart); | ||
element.addEventListener("resize", onResize); | ||
element.addEventListener("ended", onEnded); | ||
@@ -491,2 +925,5 @@ if (autohide) { | ||
} | ||
if (typeof window !== "undefined") { | ||
window?.addEventListener?.("resize", onResize); | ||
} | ||
if (parentElementOrElement) { | ||
@@ -528,2 +965,3 @@ if (store.getState().__initialProps.hotkeys) { | ||
element?.removeEventListener?.("loadedmetadata", onLoadedMetadata); | ||
element?.removeEventListener?.("loadeddata", onLoadedData); | ||
element?.removeEventListener?.("play", onPlay); | ||
@@ -537,4 +975,6 @@ element?.removeEventListener?.("pause", onPause); | ||
element?.removeEventListener?.("loadstart", onLoadStart); | ||
element?.removeEventListener?.("resize", onResize); | ||
element?.removeEventListener?.("ended", onEnded); | ||
if (typeof window !== "undefined") { | ||
window?.removeEventListener?.("resize", onResize); | ||
} | ||
if (autohide) { | ||
@@ -603,6 +1043,6 @@ parentElementOrElement?.removeEventListener?.("mouseover", onMouseUpdate); | ||
if (bframes) { | ||
onErrorComposed(new Error(core.BFRAMES_ERROR_MESSAGE)); | ||
onErrorComposed(new Error(errors.BFRAMES_ERROR_MESSAGE)); | ||
} | ||
}); | ||
const { destroy } = webrtc.createNewWHEP({ | ||
const { destroy } = createNewWHEP({ | ||
source: source.src, | ||
@@ -641,6 +1081,6 @@ element, | ||
const onErrorCleaned = (error)=>{ | ||
const cleanError = new Error(error?.response?.data?.toString?.() ?? (error?.response?.code === 401 ? core.ACCESS_CONTROL_ERROR_MESSAGE : "Error with HLS.js")); | ||
const cleanError = new Error(error?.response?.data?.toString?.() ?? (error?.response?.code === 401 ? errors.ACCESS_CONTROL_ERROR_MESSAGE : "Error with HLS.js")); | ||
onErrorComposed?.(cleanError); | ||
}; | ||
const { destroy, setQuality } = hls.createNewHls({ | ||
const { destroy, setQuality } = createNewHls({ | ||
source: source?.src, | ||
@@ -677,3 +1117,2 @@ element, | ||
cleanupSource = ()=>{ | ||
console.log("cleaning up prev hls"); | ||
unmounted = true; | ||
@@ -732,2 +1171,3 @@ destroy?.(); | ||
const destroyVolume = store.subscribe((state)=>({ | ||
playing: state.playing, | ||
volume: state.volume, | ||
@@ -740,3 +1180,3 @@ isVolumeChangeSupported: state.__device.isVolumeChangeSupported | ||
}, { | ||
equalityFn: (a, b)=>a.volume === b.volume && a.isVolumeChangeSupported === b.isVolumeChangeSupported | ||
equalityFn: (a, b)=>a.volume === b.volume && a.playing === b.playing && a.isVolumeChangeSupported === b.isVolumeChangeSupported | ||
}); | ||
@@ -787,5 +1227,61 @@ // Subscribe to mute changes | ||
}); | ||
// Subscribe to sizing requests | ||
const destroyRequestSizing = store.subscribe((state)=>({ | ||
lastTime: state.__controls.requestedMeasureLastTime, | ||
fullscreen: state.fullscreen | ||
}), async ()=>{ | ||
store.getState().__controlsFunctions.setSize({ | ||
...element?.videoHeight && element?.videoWidth ? { | ||
media: { | ||
height: element.videoHeight, | ||
width: element.videoWidth | ||
} | ||
} : {}, | ||
...element?.clientHeight && element?.clientWidth ? { | ||
container: { | ||
height: element.clientHeight, | ||
width: element.clientWidth | ||
} | ||
} : {}, | ||
...typeof window !== "undefined" && window?.innerHeight && window?.innerWidth ? { | ||
window: { | ||
height: window.innerHeight, | ||
width: window.innerWidth | ||
} | ||
} : {} | ||
}); | ||
}, { | ||
equalityFn: (a, b)=>a?.fullscreen === b?.fullscreen && a?.lastTime === b?.lastTime | ||
}); | ||
// Subscribe to media sizing changes | ||
const destroyMediaSizing = store.subscribe((state)=>state.__controls.size?.media, async (media)=>{ | ||
const parentElementOrElement = element?.parentElement ?? element; | ||
if (parentElementOrElement) { | ||
if (media?.height && media?.width) { | ||
const elementStyle = parentElementOrElement.style; | ||
elementStyle.setProperty("--livepeer-media-height", `${media.height}px`); | ||
elementStyle.setProperty("--livepeer-media-width", `${media.width}px`); | ||
} | ||
} | ||
}, { | ||
equalityFn: (a, b)=>a?.height === b?.height && a?.width === b?.width | ||
}); | ||
// Subscribe to container sizing changes | ||
const destroyContainerSizing = store.subscribe((state)=>state.__controls.size?.container, async (container)=>{ | ||
const parentElementOrElement = element?.parentElement ?? element; | ||
if (parentElementOrElement) { | ||
if (container?.height && container?.width) { | ||
const elementStyle = parentElementOrElement.style; | ||
elementStyle.setProperty("--livepeer-container-height", `${container.height}px`); | ||
elementStyle.setProperty("--livepeer-container-width", `${container.width}px`); | ||
} | ||
} | ||
}, { | ||
equalityFn: (a, b)=>a?.height === b?.height && a?.width === b?.width | ||
}); | ||
return ()=>{ | ||
destroyAutohide?.(); | ||
destroyContainerSizing?.(); | ||
destroyFullscreen?.(); | ||
destroyMediaSizing?.(); | ||
destroyMute?.(); | ||
@@ -796,2 +1292,3 @@ destroyPictureInPicture?.(); | ||
destroyPosterImage?.(); | ||
destroyRequestSizing?.(); | ||
destroySeeking?.(); | ||
@@ -805,42 +1302,2 @@ destroyVolume?.(); | ||
/** | ||
* Checks if WebRTC is supported and returns the appropriate RTCPeerConnection constructor. | ||
*/ const getRTCPeerConnectionConstructor = ()=>{ | ||
// Check if the current environment is a client (browser) | ||
if (!isClient()) { | ||
return null; // If not a client, WebRTC is not supported | ||
} | ||
// Return the constructor for RTCPeerConnection with any vendor prefixes | ||
return window.RTCPeerConnection || window.webkitRTCPeerConnection || window.mozRTCPeerConnection || null // Return null if none of the constructors are available | ||
; | ||
}; | ||
// iceString: string | null, | ||
// ): NonNullable<RTCConfiguration['iceServers']> | null { | ||
// try { | ||
// const servers = iceString | ||
// ?.split(', ') | ||
// .map((serverStr) => { | ||
// const parts = serverStr.split('; '); | ||
// const server: NonNullable<RTCConfiguration['iceServers']>[number] = { | ||
// urls: '', | ||
// }; | ||
// for (const part of parts) { | ||
// if (part.startsWith('stun:') || part.startsWith('turn:')) { | ||
// server.urls = part; | ||
// } else if (part.startsWith('username=')) { | ||
// server.username = part.slice('username="'.length, -1); | ||
// } else if (part.startsWith('credential=')) { | ||
// server.credential = part.slice('credential="'.length, -1); | ||
// } | ||
// } | ||
// return server; | ||
// }) | ||
// .filter((server) => server.urls); | ||
// return servers && (servers?.length ?? 0) > 0 ? servers : null; | ||
// } catch (e) { | ||
// console.error(e); | ||
// } | ||
// return null; | ||
// } | ||
const getDeviceInfo = (version)=>({ | ||
@@ -856,3 +1313,3 @@ version, | ||
isPictureInPictureSupported: isPictureInPictureSupported(), | ||
isHlsSupported: hls.isHlsSupported(), | ||
isHlsSupported: isHlsSupported(), | ||
isVolumeChangeSupported: true | ||
@@ -859,0 +1316,0 @@ }); |
import { ErrorData, HlsConfig } from 'hls.js'; | ||
import { VideoQuality } from './index.mjs'; | ||
import { VideoQuality } from '@livepeer/core/media'; | ||
@@ -4,0 +4,0 @@ declare const VIDEO_HLS_INITIALIZED_ATTRIBUTE = "data-livepeer-video-hls-initialized"; |
@@ -1,1 +0,6 @@ | ||
export { ACCESS_CONTROL_ERROR_MESSAGE, AccessControlParams, Address, AriaText, AudioSrc, AudioTrackSelector, BFRAMES_ERROR_MESSAGE, Base64Src, ClipLength, ClipParams, ControlsOptions, ControlsState, DeviceInformation, ElementSize, Hash, HlsSrc, InitialProps, MediaControllerCallbackState, MediaControllerState, MediaControllerStore, MediaMetrics, MediaSizing, Metadata, MetricsStatus, NOT_ACCEPTABLE_ERROR_MESSAGE, ObjectFit, PlaybackError, PlaybackMonitor, PlaybackRate, STREAM_OFFLINE_ERROR_MESSAGE, STREAM_OPEN_ERROR_MESSAGE, SingleAudioTrackSelector, SingleTrackSelector, SingleVideoTrackSelector, Src, Storage, VideoQuality, VideoSrc, VideoTrackSelector, WebRTCSrc, b64Decode, b64Encode, b64UrlDecode, b64UrlEncode, createControllerStore, createStorage, deepMerge, getMediaSourceType, isAccessControlError, isBframesError, isNotAcceptableError, isStreamOfflineError, noopStorage, omit, pick, sanitizeMediaControllerState, version } from '@livepeer/core'; | ||
export { ACCESS_CONTROL_ERROR_MESSAGE, BFRAMES_ERROR_MESSAGE, NOT_ACCEPTABLE_ERROR_MESSAGE, PERMISSIONS_ERROR_MESSAGE, STREAM_OFFLINE_ERROR_MESSAGE, STREAM_OPEN_ERROR_MESSAGE, isAccessControlError, isBframesError, isNotAcceptableError, isPermissionsError, isStreamOfflineError } from '@livepeer/core/errors'; | ||
export { AccessControlParams, AriaText, AudioSrc, AudioTrackSelector, Base64Src, ClipLength, ClipParams, ControlsOptions, ControlsState, DeviceInformation, ElementSize, HlsSrc, InitialProps, MediaControllerState, MediaControllerStore, MediaMetrics, MediaSizing, Metadata, MetricsStatus, ObjectFit, PlaybackError, PlaybackMonitor, PlaybackRate, SingleAudioTrackSelector, SingleTrackSelector, SingleVideoTrackSelector, Src, VideoQuality, VideoSrc, VideoTrackSelector, WebRTCSrc, createControllerStore, getMediaSourceType } from '@livepeer/core/media'; | ||
export { ClientStorage, createStorage, noopStorage } from '@livepeer/core/storage'; | ||
export { Address, Hash } from '@livepeer/core/types'; | ||
export { b64Decode, b64Encode, b64UrlDecode, b64UrlEncode, deepMerge, omit, pick } from '@livepeer/core/utils'; | ||
export { version } from '@livepeer/core/version'; |
Object.defineProperty(exports, '__esModule', { value: true }); | ||
var core = require('@livepeer/core'); | ||
var errors = require('@livepeer/core/errors'); | ||
var media = require('@livepeer/core/media'); | ||
var storage = require('@livepeer/core/storage'); | ||
var utils = require('@livepeer/core/utils'); | ||
var version = require('@livepeer/core/version'); | ||
@@ -9,87 +13,91 @@ | ||
enumerable: true, | ||
get: function () { return core.ACCESS_CONTROL_ERROR_MESSAGE; } | ||
get: function () { return errors.ACCESS_CONTROL_ERROR_MESSAGE; } | ||
}); | ||
Object.defineProperty(exports, "BFRAMES_ERROR_MESSAGE", { | ||
enumerable: true, | ||
get: function () { return core.BFRAMES_ERROR_MESSAGE; } | ||
get: function () { return errors.BFRAMES_ERROR_MESSAGE; } | ||
}); | ||
Object.defineProperty(exports, "NOT_ACCEPTABLE_ERROR_MESSAGE", { | ||
enumerable: true, | ||
get: function () { return core.NOT_ACCEPTABLE_ERROR_MESSAGE; } | ||
get: function () { return errors.NOT_ACCEPTABLE_ERROR_MESSAGE; } | ||
}); | ||
Object.defineProperty(exports, "PERMISSIONS_ERROR_MESSAGE", { | ||
enumerable: true, | ||
get: function () { return errors.PERMISSIONS_ERROR_MESSAGE; } | ||
}); | ||
Object.defineProperty(exports, "STREAM_OFFLINE_ERROR_MESSAGE", { | ||
enumerable: true, | ||
get: function () { return core.STREAM_OFFLINE_ERROR_MESSAGE; } | ||
get: function () { return errors.STREAM_OFFLINE_ERROR_MESSAGE; } | ||
}); | ||
Object.defineProperty(exports, "STREAM_OPEN_ERROR_MESSAGE", { | ||
enumerable: true, | ||
get: function () { return core.STREAM_OPEN_ERROR_MESSAGE; } | ||
get: function () { return errors.STREAM_OPEN_ERROR_MESSAGE; } | ||
}); | ||
Object.defineProperty(exports, "b64Decode", { | ||
Object.defineProperty(exports, "isAccessControlError", { | ||
enumerable: true, | ||
get: function () { return core.b64Decode; } | ||
get: function () { return errors.isAccessControlError; } | ||
}); | ||
Object.defineProperty(exports, "b64Encode", { | ||
Object.defineProperty(exports, "isBframesError", { | ||
enumerable: true, | ||
get: function () { return core.b64Encode; } | ||
get: function () { return errors.isBframesError; } | ||
}); | ||
Object.defineProperty(exports, "b64UrlDecode", { | ||
Object.defineProperty(exports, "isNotAcceptableError", { | ||
enumerable: true, | ||
get: function () { return core.b64UrlDecode; } | ||
get: function () { return errors.isNotAcceptableError; } | ||
}); | ||
Object.defineProperty(exports, "b64UrlEncode", { | ||
Object.defineProperty(exports, "isPermissionsError", { | ||
enumerable: true, | ||
get: function () { return core.b64UrlEncode; } | ||
get: function () { return errors.isPermissionsError; } | ||
}); | ||
Object.defineProperty(exports, "isStreamOfflineError", { | ||
enumerable: true, | ||
get: function () { return errors.isStreamOfflineError; } | ||
}); | ||
Object.defineProperty(exports, "createControllerStore", { | ||
enumerable: true, | ||
get: function () { return core.createControllerStore; } | ||
get: function () { return media.createControllerStore; } | ||
}); | ||
Object.defineProperty(exports, "createStorage", { | ||
Object.defineProperty(exports, "getMediaSourceType", { | ||
enumerable: true, | ||
get: function () { return core.createStorage; } | ||
get: function () { return media.getMediaSourceType; } | ||
}); | ||
Object.defineProperty(exports, "deepMerge", { | ||
Object.defineProperty(exports, "createStorage", { | ||
enumerable: true, | ||
get: function () { return core.deepMerge; } | ||
get: function () { return storage.createStorage; } | ||
}); | ||
Object.defineProperty(exports, "getMediaSourceType", { | ||
Object.defineProperty(exports, "noopStorage", { | ||
enumerable: true, | ||
get: function () { return core.getMediaSourceType; } | ||
get: function () { return storage.noopStorage; } | ||
}); | ||
Object.defineProperty(exports, "isAccessControlError", { | ||
Object.defineProperty(exports, "b64Decode", { | ||
enumerable: true, | ||
get: function () { return core.isAccessControlError; } | ||
get: function () { return utils.b64Decode; } | ||
}); | ||
Object.defineProperty(exports, "isBframesError", { | ||
Object.defineProperty(exports, "b64Encode", { | ||
enumerable: true, | ||
get: function () { return core.isBframesError; } | ||
get: function () { return utils.b64Encode; } | ||
}); | ||
Object.defineProperty(exports, "isNotAcceptableError", { | ||
Object.defineProperty(exports, "b64UrlDecode", { | ||
enumerable: true, | ||
get: function () { return core.isNotAcceptableError; } | ||
get: function () { return utils.b64UrlDecode; } | ||
}); | ||
Object.defineProperty(exports, "isStreamOfflineError", { | ||
Object.defineProperty(exports, "b64UrlEncode", { | ||
enumerable: true, | ||
get: function () { return core.isStreamOfflineError; } | ||
get: function () { return utils.b64UrlEncode; } | ||
}); | ||
Object.defineProperty(exports, "noopStorage", { | ||
Object.defineProperty(exports, "deepMerge", { | ||
enumerable: true, | ||
get: function () { return core.noopStorage; } | ||
get: function () { return utils.deepMerge; } | ||
}); | ||
Object.defineProperty(exports, "omit", { | ||
enumerable: true, | ||
get: function () { return core.omit; } | ||
get: function () { return utils.omit; } | ||
}); | ||
Object.defineProperty(exports, "pick", { | ||
enumerable: true, | ||
get: function () { return core.pick; } | ||
get: function () { return utils.pick; } | ||
}); | ||
Object.defineProperty(exports, "sanitizeMediaControllerState", { | ||
enumerable: true, | ||
get: function () { return core.sanitizeMediaControllerState; } | ||
}); | ||
Object.defineProperty(exports, "version", { | ||
enumerable: true, | ||
get: function () { return core.version; } | ||
get: function () { return version.version; } | ||
}); |
@@ -1,1 +0,1 @@ | ||
export { AccessControlParams, AriaText, AudioSrc, AudioTrackSelector, Base64Src, ClipLength, ClipParams, ControlsOptions, ControlsState, DEFAULT_AUTOHIDE_TIME, DEFAULT_VOLUME_LEVEL, DeviceInformation, ElementSize, HlsSrc, InitialProps, MediaControllerCallbackState, MediaControllerState, MediaControllerStore, MediaMetrics, MediaSizing, Metadata, MetricsStatus, ObjectFit, PlaybackError, PlaybackMonitor, PlaybackRate, SingleAudioTrackSelector, SingleTrackSelector, SingleVideoTrackSelector, Src, VideoQuality, VideoSrc, VideoTrackSelector, WebRTCSrc, addMediaMetricsToStore, createControllerStore, getMediaSourceType, sanitizeMediaControllerState } from '@livepeer/core/media'; | ||
export { AccessControlParams, AriaText, AudioSrc, AudioTrackSelector, Base64Src, ClipLength, ClipParams, ControlsOptions, ControlsState, DeviceInformation, ElementSize, HlsSrc, InitialProps, MediaControllerState, MediaControllerStore, MediaMetrics, MediaSizing, Metadata, MetricsStatus, ObjectFit, PlaybackError, PlaybackMonitor, PlaybackRate, SingleAudioTrackSelector, SingleTrackSelector, SingleVideoTrackSelector, Src, VideoQuality, VideoSrc, VideoTrackSelector, WebRTCSrc, createControllerStore, getMediaSourceType } from '@livepeer/core/media'; |
@@ -7,14 +7,2 @@ Object.defineProperty(exports, '__esModule', { value: true }); | ||
Object.defineProperty(exports, "DEFAULT_AUTOHIDE_TIME", { | ||
enumerable: true, | ||
get: function () { return media.DEFAULT_AUTOHIDE_TIME; } | ||
}); | ||
Object.defineProperty(exports, "DEFAULT_VOLUME_LEVEL", { | ||
enumerable: true, | ||
get: function () { return media.DEFAULT_VOLUME_LEVEL; } | ||
}); | ||
Object.defineProperty(exports, "addMediaMetricsToStore", { | ||
enumerable: true, | ||
get: function () { return media.addMediaMetricsToStore; } | ||
}); | ||
Object.defineProperty(exports, "createControllerStore", { | ||
@@ -28,5 +16,1 @@ enumerable: true, | ||
}); | ||
Object.defineProperty(exports, "sanitizeMediaControllerState", { | ||
enumerable: true, | ||
get: function () { return media.sanitizeMediaControllerState; } | ||
}); |
@@ -1,2 +0,2 @@ | ||
import { AccessControlParams } from '@livepeer/core'; | ||
import { AccessControlParams } from '@livepeer/core/media'; | ||
@@ -3,0 +3,0 @@ /** |
Object.defineProperty(exports, '__esModule', { value: true }); | ||
var core = require('@livepeer/core'); | ||
require('@livepeer/core/utils'); | ||
var utils = require('@livepeer/core-web/utils'); | ||
var errors = require('@livepeer/core/errors'); | ||
var utils = require('@livepeer/core/utils'); | ||
@@ -69,3 +68,3 @@ const isClient = ()=>typeof window !== "undefined"; | ||
if (response.status === 406) { | ||
throw new Error(core.NOT_ACCEPTABLE_ERROR_MESSAGE); | ||
throw new Error(errors.NOT_ACCEPTABLE_ERROR_MESSAGE); | ||
} | ||
@@ -72,0 +71,0 @@ const errorMessage = await response.text(); |
@@ -5,3 +5,3 @@ { | ||
"license": "MIT", | ||
"version": "4.0.0-next.1", | ||
"version": "4.0.0-next.2", | ||
"repository": { | ||
@@ -91,22 +91,2 @@ "type": "git", | ||
}, | ||
"./types": { | ||
"import": { | ||
"types": "./dist/es/types.d.mts", | ||
"default": "./dist/es/types.mjs" | ||
}, | ||
"require": { | ||
"types": "./dist/cjs/types.d.ts", | ||
"default": "./dist/cjs/types.js" | ||
} | ||
}, | ||
"./utils": { | ||
"import": { | ||
"types": "./dist/es/utils.d.mts", | ||
"default": "./dist/es/utils.mjs" | ||
}, | ||
"require": { | ||
"types": "./dist/cjs/utils.d.ts", | ||
"default": "./dist/cjs/utils.js" | ||
} | ||
}, | ||
"./webrtc": { | ||
@@ -134,3 +114,3 @@ "import": { | ||
"zustand": "^4.5.0", | ||
"@livepeer/core": "3.0.0-next.1" | ||
"@livepeer/core": "3.0.0-next.2" | ||
}, | ||
@@ -146,4 +126,5 @@ "keywords": [ | ||
"clean": "rimraf .turbo node_modules dist", | ||
"dev": "bunchee --watch" | ||
"dev": "bunchee --watch", | ||
"lint": "tsc --noEmit" | ||
} | ||
} |
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
278111
14.86%6124
16.38%31
-27.91%14
40%+ Added
- Removed
Updated