castable-video
Advanced tools
Comparing version 0.3.0 to 0.4.0
/* global globalThis, chrome, cast */ | ||
/** | ||
* CastableVideoMixin | ||
* | ||
* Because there can only be one custom built-in (is="my-video") this mixin function | ||
* provides a way to compose multiple classes to create one custom built-in class. | ||
* @see https://justinfagnani.com/2015/12/21/real-mixins-with-javascript-classes/ | ||
* | ||
* @param {HTMLVideoElement} superclass - HTMLVideoElement or an extended class of it. | ||
* @return {CastableVideo} | ||
*/ | ||
export const CastableVideoMixin = (superclass) => | ||
class CastableVideo extends superclass { | ||
static observedAttributes = ['cast-src']; | ||
static observedAttributes = [ | ||
'cast-src', | ||
'cast-content-type', | ||
'cast-stream-type', | ||
]; | ||
static instances = new Set(); | ||
@@ -31,4 +45,4 @@ | ||
if (!this.#isChromeCastAvailable) { | ||
window.__onGCastApiAvailable = () => { | ||
// The window.__onGCastApiAvailable callback alone is not reliable for | ||
globalThis.__onGCastApiAvailable = () => { | ||
// The globalThis.__onGCastApiAvailable callback alone is not reliable for | ||
// the added cast.framework. It's loaded in a separate JS file. | ||
@@ -54,5 +68,16 @@ // http://www.gstatic.com/eureka/clank/101/cast_sender.js | ||
for (const video of this.instances) { | ||
video.#init(); | ||
} | ||
const { CAST_STATE_CHANGED } = cast.framework.CastContextEventType; | ||
CastableVideo.#castContext.addEventListener(CAST_STATE_CHANGED, (e) => { | ||
this.instances.forEach((video) => video.#onCastStateChanged(e)); | ||
}); | ||
const { SESSION_STATE_CHANGED } = cast.framework.CastContextEventType; | ||
CastableVideo.#castContext.addEventListener( | ||
SESSION_STATE_CHANGED, | ||
(e) => { | ||
this.instances.forEach((video) => video.#onSessionStateChanged(e)); | ||
} | ||
); | ||
this.instances.forEach((video) => video.#init()); | ||
} | ||
@@ -82,12 +107,52 @@ }; | ||
#castAvailable = false; | ||
static get #currentMedia() { | ||
return CastableVideo.#currentSession?.getSessionObj().media[0]; | ||
} | ||
static #editTracksInfo(request) { | ||
return new Promise((resolve, reject) => { | ||
CastableVideo.#currentMedia.editTracksInfo(request, resolve, reject); | ||
}); | ||
} | ||
static #getMediaStatus(request) { | ||
return new Promise((resolve, reject) => { | ||
CastableVideo.#currentMedia.getStatus(request, resolve, reject); | ||
}); | ||
} | ||
static #setOptions(options) { | ||
return CastableVideo.#castContext.setOptions({ | ||
// Set the receiver application ID to your own (created in the | ||
// Google Cast Developer Console), or optionally | ||
// use the chrome.cast.media.DEFAULT_MEDIA_RECEIVER_APP_ID | ||
receiverApplicationId: chrome.cast.media.DEFAULT_MEDIA_RECEIVER_APP_ID, | ||
// Auto join policy can be one of the following three: | ||
// ORIGIN_SCOPED - Auto connect from same appId and page origin | ||
// TAB_AND_ORIGIN_SCOPED - Auto connect from same appId, page origin, and tab | ||
// PAGE_SCOPED - No auto connect | ||
autoJoinPolicy: chrome.cast.AutoJoinPolicy.ORIGIN_SCOPED, | ||
// The following flag enables Cast Connect(requires Chrome 87 or higher) | ||
// https://developers.googleblog.com/2020/08/introducing-cast-connect-android-tv.html | ||
androidReceiverCompatible: false, | ||
language: 'en-US', | ||
resumeSavedSession: true, | ||
...options, | ||
}); | ||
} | ||
castEnabled = false; | ||
#localState = { paused: false }; | ||
#remoteState = { paused: false, currentTime: 0, muted: false }; | ||
#remotePlayer; | ||
#remoteListeners = []; | ||
#textTrackState = new Map(); | ||
#remoteListeners = {}; | ||
#enterCastCallback; | ||
#leaveCastCallback; | ||
#castChangeCallback; | ||
constructor() { | ||
super(); | ||
this.castEnabled = false; | ||
@@ -104,3 +169,3 @@ CastableVideo.instances.add(this); | ||
get #isMediaLoaded() { | ||
return this.#remotePlayer?.isMediaLoaded; | ||
return this.castPlayer?.isMediaLoaded; | ||
} | ||
@@ -112,2 +177,3 @@ | ||
switch (attrName) { | ||
case 'cast-stream-type': | ||
case 'cast-src': | ||
@@ -122,3 +188,3 @@ this.load(); | ||
this.#remoteListeners.forEach(([event, listener]) => { | ||
Object.entries(this.#remoteListeners).forEach(([event, listener]) => { | ||
this.#remotePlayer.controller.removeEventListener(event, listener); | ||
@@ -129,5 +195,6 @@ }); | ||
this.muted = this.#remoteState.muted; | ||
this.currentTime = this.#remoteState.currentTime; | ||
if (this.#remoteState.paused === false) { | ||
// isMuted is not in savedPlayerState. should we sync this back to local? | ||
this.muted = this.#remotePlayer.isMuted; | ||
this.currentTime = this.#remotePlayer.savedPlayerState.currentTime; | ||
if (this.#remotePlayer.savedPlayerState.isPaused === false) { | ||
this.play(); | ||
@@ -137,23 +204,5 @@ } | ||
#init() { | ||
if (!CastableVideo.#isCastFrameworkAvailable || this.#castAvailable) return; | ||
this.#castAvailable = true; | ||
this.#setOptions(); | ||
this.textTracks.addEventListener( | ||
'change', | ||
this.#onLocalTextTracksChange.bind(this) | ||
); | ||
#onCastStateChanged() { | ||
// Cast state: NO_DEVICES_AVAILABLE, NOT_CONNECTED, CONNECTING, CONNECTED | ||
// https://developers.google.com/cast/docs/reference/web_sender/cast.framework#.CastState | ||
const { CAST_STATE_CHANGED } = cast.framework.CastContextEventType; | ||
CastableVideo.#castContext.addEventListener(CAST_STATE_CHANGED, () => { | ||
this.dispatchEvent( | ||
new CustomEvent('castchange', { | ||
detail: CastableVideo.#castContext.getCastState(), | ||
}) | ||
); | ||
}); | ||
this.dispatchEvent( | ||
@@ -164,150 +213,136 @@ new CustomEvent('castchange', { | ||
); | ||
this.#remotePlayer = new cast.framework.RemotePlayer(); | ||
new cast.framework.RemotePlayerController(this.#remotePlayer); | ||
this.#remoteListeners = [ | ||
[ | ||
cast.framework.RemotePlayerEventType.IS_CONNECTED_CHANGED, | ||
({ value }) => { | ||
if (value === false) { | ||
this.#disconnect(); | ||
} | ||
this.dispatchEvent(new Event(value ? 'entercast' : 'leavecast')); | ||
}, | ||
], | ||
[ | ||
cast.framework.RemotePlayerEventType.DURATION_CHANGED, | ||
() => this.dispatchEvent(new Event('durationchange')), | ||
], | ||
[ | ||
cast.framework.RemotePlayerEventType.VOLUME_LEVEL_CHANGED, | ||
() => this.dispatchEvent(new Event('volumechange')), | ||
], | ||
[ | ||
cast.framework.RemotePlayerEventType.IS_MUTED_CHANGED, | ||
() => { | ||
this.#remoteState.muted = this.muted; | ||
this.dispatchEvent(new Event('volumechange')); | ||
}, | ||
], | ||
[ | ||
cast.framework.RemotePlayerEventType.CURRENT_TIME_CHANGED, | ||
() => { | ||
if (this.#isMediaLoaded) { | ||
this.#remoteState.currentTime = this.currentTime; | ||
this.dispatchEvent(new Event('timeupdate')); | ||
} | ||
}, | ||
], | ||
[ | ||
cast.framework.RemotePlayerEventType.VIDEO_INFO_CHANGED, | ||
() => this.dispatchEvent(new Event('resize')), | ||
], | ||
[ | ||
cast.framework.RemotePlayerEventType.IS_PAUSED_CHANGED, | ||
() => { | ||
this.#remoteState.paused = this.paused; | ||
this.dispatchEvent(new Event(this.paused ? 'pause' : 'play')); | ||
}, | ||
], | ||
[ | ||
cast.framework.RemotePlayerEventType.PLAYER_STATE_CHANGED, | ||
() => { | ||
// pause event is handled above. | ||
if ( | ||
this.castPlayer?.playerState === | ||
chrome.cast.media.PlayerState.PAUSED | ||
) { | ||
return; | ||
} | ||
this.dispatchEvent( | ||
new Event( | ||
{ | ||
[chrome.cast.media.PlayerState.PLAYING]: 'playing', | ||
[chrome.cast.media.PlayerState.BUFFERING]: 'waiting', | ||
[chrome.cast.media.PlayerState.IDLE]: 'emptied', | ||
}[this.castPlayer?.playerState] | ||
) | ||
); | ||
}, | ||
], | ||
]; | ||
} | ||
#setOptions(options) { | ||
return CastableVideo.#castContext.setOptions({ | ||
// Set the receiver application ID to your own (created in the | ||
// Google Cast Developer Console), or optionally | ||
// use the chrome.cast.media.DEFAULT_MEDIA_RECEIVER_APP_ID | ||
receiverApplicationId: chrome.cast.media.DEFAULT_MEDIA_RECEIVER_APP_ID, | ||
async #onSessionStateChanged() { | ||
// Session states: NO_SESSION, SESSION_STARTING, SESSION_STARTED, SESSION_START_FAILED, | ||
// SESSION_ENDING, SESSION_ENDED, SESSION_RESUMED | ||
// https://developers.google.com/cast/docs/reference/web_sender/cast.framework#.SessionState | ||
// Auto join policy can be one of the following three: | ||
// ORIGIN_SCOPED - Auto connect from same appId and page origin | ||
// TAB_AND_ORIGIN_SCOPED - Auto connect from same appId, page origin, and tab | ||
// PAGE_SCOPED - No auto connect | ||
autoJoinPolicy: chrome.cast.AutoJoinPolicy.ORIGIN_SCOPED, | ||
const { SESSION_RESUMED } = cast.framework.SessionState; | ||
if (CastableVideo.#castContext.getSessionState() === SESSION_RESUMED) { | ||
/** | ||
* Figure out if this was the video that started the resumed session. | ||
* @TODO make this more specific than just checking against the video src!! (WL) | ||
* | ||
* If this video element can get the same unique id on each browser refresh | ||
* it would be possible to pass this unique id w/ `LoadRequest.customData` | ||
* and verify against CastableVideo.#currentMedia.customData below. | ||
*/ | ||
if (this.castSrc === CastableVideo.#currentMedia?.media.contentId) { | ||
CastableVideo.#castElement = this; | ||
// The following flag enables Cast Connect(requires Chrome 87 or higher) | ||
// https://developers.googleblog.com/2020/08/introducing-cast-connect-android-tv.html | ||
androidReceiverCompatible: false, | ||
Object.entries(this.#remoteListeners).forEach(([event, listener]) => { | ||
this.#remotePlayer.controller.addEventListener(event, listener); | ||
}); | ||
language: 'en-US', | ||
resumeSavedSession: false, | ||
/** | ||
* There is cast framework resume session bug when you refresh the page a few | ||
* times the this.#remotePlayer.currentTime will not be in sync with the receiver :( | ||
* The below status request syncs it back up. | ||
*/ | ||
try { | ||
await CastableVideo.#getMediaStatus( | ||
new chrome.cast.media.GetStatusRequest() | ||
); | ||
} catch (error) { | ||
console.error(error); | ||
} | ||
...options, | ||
}); | ||
// Dispatch the play, playing events manually to sync remote playing state. | ||
this.#remoteListeners[ | ||
cast.framework.RemotePlayerEventType.IS_PAUSED_CHANGED | ||
](); | ||
this.#remoteListeners[ | ||
cast.framework.RemotePlayerEventType.PLAYER_STATE_CHANGED | ||
](); | ||
} | ||
} | ||
} | ||
#getTrackId(track) { | ||
return this.#textTrackState.get(track)?.trackId; | ||
} | ||
#init() { | ||
if (!CastableVideo.#isCastFrameworkAvailable || this.castEnabled) return; | ||
this.castEnabled = true; | ||
CastableVideo.#setOptions(); | ||
#onLocalTextTracksChange() { | ||
if (!this.castPlayer) return; | ||
/** | ||
* @TODO add listeners for addtrack, removetrack (WL) | ||
* This only has an impact on <track> with a `src` because these have to be | ||
* loaded manually in the load() method. This will require a new load() call | ||
* for each added/removed track w/ src. | ||
*/ | ||
this.textTracks.addEventListener( | ||
'change', | ||
this.#updateRemoteTextTrack.bind(this) | ||
); | ||
// Note this could also include audio or video tracks, diff against local state. | ||
const activeTrackIds = | ||
CastableVideo.#currentSession?.getSessionObj().media[0].activeTrackIds; | ||
this.#onCastStateChanged(); | ||
const subtitles = [...this.textTracks].filter( | ||
({ kind }) => kind === 'subtitles' || kind === 'captions' | ||
); | ||
const hiddenSubtitles = subtitles.filter( | ||
({ mode }) => mode !== 'showing' | ||
); | ||
const hiddenTrackIds = hiddenSubtitles.map(this.#getTrackId, this); | ||
const showingSubtitle = subtitles.find(({ mode }) => mode === 'showing'); | ||
this.#remotePlayer = new cast.framework.RemotePlayer(); | ||
new cast.framework.RemotePlayerController(this.#remotePlayer); | ||
let requestTrackIds = activeTrackIds; | ||
this.#remoteListeners = { | ||
[cast.framework.RemotePlayerEventType.IS_CONNECTED_CHANGED]: ({ | ||
value, | ||
}) => { | ||
if (value === false) { | ||
this.#disconnect(); | ||
} | ||
this.dispatchEvent(new Event(value ? 'entercast' : 'leavecast')); | ||
}, | ||
[cast.framework.RemotePlayerEventType.DURATION_CHANGED]: () => { | ||
this.dispatchEvent(new Event('durationchange')); | ||
}, | ||
[cast.framework.RemotePlayerEventType.VOLUME_LEVEL_CHANGED]: () => { | ||
this.dispatchEvent(new Event('volumechange')); | ||
}, | ||
[cast.framework.RemotePlayerEventType.IS_MUTED_CHANGED]: () => { | ||
this.dispatchEvent(new Event('volumechange')); | ||
}, | ||
[cast.framework.RemotePlayerEventType.CURRENT_TIME_CHANGED]: () => { | ||
if (!this.#isMediaLoaded) return; | ||
this.dispatchEvent(new Event('timeupdate')); | ||
}, | ||
[cast.framework.RemotePlayerEventType.VIDEO_INFO_CHANGED]: () => { | ||
this.dispatchEvent(new Event('resize')); | ||
}, | ||
[cast.framework.RemotePlayerEventType.IS_PAUSED_CHANGED]: () => { | ||
this.dispatchEvent(new Event(this.paused ? 'pause' : 'play')); | ||
}, | ||
[cast.framework.RemotePlayerEventType.PLAYER_STATE_CHANGED]: () => { | ||
// Player states: IDLE, PLAYING, PAUSED, BUFFERING | ||
// https://developers.google.com/cast/docs/reference/web_sender/chrome.cast.media#.PlayerState | ||
if (activeTrackIds.length) { | ||
// Filter out all local hidden subtitle trackId's. | ||
requestTrackIds = requestTrackIds.filter( | ||
(id) => !hiddenTrackIds.includes(id) | ||
); | ||
} | ||
// pause event is handled above. | ||
if ( | ||
this.castPlayer?.playerState === | ||
chrome.cast.media.PlayerState.PAUSED | ||
) { | ||
return; | ||
} | ||
this.dispatchEvent( | ||
new Event( | ||
{ | ||
[chrome.cast.media.PlayerState.PLAYING]: 'playing', | ||
[chrome.cast.media.PlayerState.BUFFERING]: 'waiting', | ||
[chrome.cast.media.PlayerState.IDLE]: 'emptied', | ||
}[this.castPlayer?.playerState] | ||
) | ||
); | ||
}, | ||
[cast.framework.RemotePlayerEventType.IS_MEDIA_LOADED_CHANGED]: | ||
async () => { | ||
if (!this.#isMediaLoaded) return; | ||
if (!requestTrackIds.includes(showingSubtitle)) { | ||
const showingTrackId = this.#getTrackId(showingSubtitle); | ||
if (showingTrackId) { | ||
requestTrackIds = [...requestTrackIds, showingTrackId]; | ||
} | ||
} | ||
const request = new chrome.cast.media.EditTracksInfoRequest( | ||
requestTrackIds | ||
); | ||
CastableVideo.#currentSession?.getSessionObj().media[0].editTracksInfo( | ||
request, | ||
() => {}, | ||
(error) => console.error(error) | ||
); | ||
// mediaInfo is not immediately available due to a bug? wait one tick | ||
await Promise.resolve(); | ||
this.#onRemoteMediaLoaded(); | ||
}, | ||
}; | ||
} | ||
async requestCast(options = {}) { | ||
this.#setOptions(options); | ||
CastableVideo.#setOptions(options); | ||
CastableVideo.#castElement = this; | ||
this.#remoteListeners.forEach(([event, listener]) => { | ||
Object.entries(this.#remoteListeners).forEach(([event, listener]) => { | ||
this.#remotePlayer.controller.addEventListener(event, listener); | ||
@@ -347,30 +382,20 @@ }); | ||
mediaInfo.textTrackStyle = new chrome.cast.media.TextTrackStyle(); | ||
mediaInfo.textTrackStyle.backgroundColor = '#00000000'; | ||
mediaInfo.textTrackStyle.edgeColor = '#000000FF'; | ||
mediaInfo.textTrackStyle.edgeType = | ||
chrome.cast.media.TextTrackEdgeType.OUTLINE; | ||
mediaInfo.textTrackStyle.fontScale = 1.0; | ||
mediaInfo.textTrackStyle.foregroundColor = '#FFFFFF'; | ||
// First give all text tracks a unique ID and save them in a Map(). | ||
[...this.textTracks] | ||
.filter(({ kind }) => kind === 'subtitles' || kind === 'captions') | ||
.forEach((track) => { | ||
if (!this.#textTrackState.has(track)) { | ||
const trackId = this.#textTrackState.size + 1; | ||
this.#textTrackState.set(track, { trackId }); | ||
} | ||
}); | ||
// Manually add text tracks with a `src` attribute. | ||
// M3U8's load text tracks in the receiver, handle these in the media loaded event. | ||
const subtitles = [...this.querySelectorAll('track')].filter( | ||
({ kind }) => { | ||
return kind === 'subtitles' || kind === 'captions'; | ||
({ kind, src }) => { | ||
return src && (kind === 'subtitles' || kind === 'captions'); | ||
} | ||
); | ||
const activeTrackIds = []; | ||
let textTrackIdCount = 0; | ||
if (subtitles.length) { | ||
mediaInfo.tracks = subtitles.map((trackEl, i) => { | ||
const trackId = this.#getTrackId(subtitles[i].track); | ||
if (!trackId) return; | ||
mediaInfo.tracks = subtitles.map((trackEl) => { | ||
const trackId = ++textTrackIdCount; | ||
// only activate 1 subtitle text track. | ||
if (activeTrackIds.length === 0 && trackEl.track.mode === 'showing') { | ||
activeTrackIds.push(trackId); | ||
} | ||
@@ -393,3 +418,3 @@ const track = new chrome.cast.media.Track( | ||
if (this.castStreamType?.includes('live')) { | ||
if (this.castStreamType === 'live') { | ||
mediaInfo.streamType = chrome.cast.media.StreamType.LIVE; | ||
@@ -411,11 +436,4 @@ } else { | ||
request.autoplay = !this.#localState.paused; | ||
request.activeTrackIds = activeTrackIds; | ||
for (let i = 0; i < subtitles.length; i++) { | ||
const trackId = this.#getTrackId(subtitles[i].track); | ||
if (subtitles[i].track.mode === 'showing' && trackId) { | ||
request.activeTrackIds = [trackId]; | ||
break; | ||
} | ||
} | ||
await CastableVideo.#currentSession?.loadMedia(request); | ||
@@ -426,2 +444,72 @@ | ||
#onRemoteMediaLoaded() { | ||
this.#updateRemoteTextTrack(); | ||
} | ||
async #updateRemoteTextTrack() { | ||
if (!this.castPlayer) return; | ||
// Get the tracks w/ trackId's that have been loaded; manually or via a playlist like a M3U8 or MPD. | ||
const remoteTracks = this.#remotePlayer.mediaInfo?.tracks ?? []; | ||
const remoteSubtitles = remoteTracks.filter( | ||
({ type }) => type === chrome.cast.media.TrackType.TEXT | ||
); | ||
const localSubtitles = [...this.textTracks].filter( | ||
({ kind }) => kind === 'subtitles' || kind === 'captions' | ||
); | ||
// Create a new array from the local subs w/ the trackId's from the remote subs. | ||
const subtitles = remoteSubtitles | ||
.map(({ language, name, trackId }) => { | ||
// Find the corresponding local text track and assign the trackId. | ||
const { mode } = | ||
localSubtitles.find( | ||
(local) => local.language === language && local.label === name | ||
) ?? {}; | ||
if (mode) return { mode, trackId }; | ||
return false; | ||
}) | ||
.filter(Boolean); | ||
const hiddenSubtitles = subtitles.filter( | ||
({ mode }) => mode !== 'showing' | ||
); | ||
const hiddenTrackIds = hiddenSubtitles.map(({ trackId }) => trackId); | ||
const showingSubtitle = subtitles.find(({ mode }) => mode === 'showing'); | ||
// Note this could also include audio or video tracks, diff against local state. | ||
const activeTrackIds = | ||
CastableVideo.#currentSession?.getSessionObj().media[0] | ||
?.activeTrackIds ?? []; | ||
let requestTrackIds = activeTrackIds; | ||
if (activeTrackIds.length) { | ||
// Filter out all local hidden subtitle trackId's. | ||
requestTrackIds = requestTrackIds.filter( | ||
(id) => !hiddenTrackIds.includes(id) | ||
); | ||
} | ||
if (showingSubtitle?.trackId) { | ||
requestTrackIds = [...requestTrackIds, showingSubtitle.trackId]; | ||
} | ||
// Remove duplicate ids. | ||
requestTrackIds = [...new Set(requestTrackIds)]; | ||
const arrayEquals = (a, b) => | ||
a.length === b.length && a.every((a) => b.includes(a)); | ||
if (!arrayEquals(activeTrackIds, requestTrackIds)) { | ||
try { | ||
const request = new chrome.cast.media.EditTracksInfoRequest( | ||
requestTrackIds | ||
); | ||
await CastableVideo.#editTracksInfo(request); | ||
} catch (error) { | ||
console.error(error); | ||
} | ||
} | ||
} | ||
play() { | ||
@@ -434,3 +522,3 @@ if (this.castPlayer) { | ||
} | ||
super.play(); | ||
return super.play(); | ||
} | ||
@@ -553,5 +641,50 @@ | ||
} | ||
get onentercast() { | ||
return this.#enterCastCallback; | ||
} | ||
set onentercast(callback) { | ||
if (this.#enterCastCallback) { | ||
this.removeEventListener('entercast', this.#enterCastCallback); | ||
this.#enterCastCallback = null; | ||
} | ||
if (typeof callback == 'function') { | ||
this.#enterCastCallback = callback; | ||
this.addEventListener('entercast', callback); | ||
} | ||
} | ||
get onleavecast() { | ||
return this.#leaveCastCallback; | ||
} | ||
set onleavecast(callback) { | ||
if (this.#leaveCastCallback) { | ||
this.removeEventListener('leavecast', this.#leaveCastCallback); | ||
this.#leaveCastCallback = null; | ||
} | ||
if (typeof callback == 'function') { | ||
this.#leaveCastCallback = callback; | ||
this.addEventListener('leavecast', callback); | ||
} | ||
} | ||
get oncastchange() { | ||
return this.#castChangeCallback; | ||
} | ||
set oncastchange(callback) { | ||
if (this.#castChangeCallback) { | ||
this.removeEventListener('castchange', this.#castChangeCallback); | ||
this.#castChangeCallback = null; | ||
} | ||
if (typeof callback == 'function') { | ||
this.#castChangeCallback = callback; | ||
this.addEventListener('castchange', callback); | ||
} | ||
} | ||
}; | ||
class CastableVideoElement extends CastableVideoMixin(HTMLVideoElement) {} | ||
export const CastableVideoElement = CastableVideoMixin(HTMLVideoElement); | ||
@@ -566,3 +699,1 @@ if (!customElements.get('castable-video')) { | ||
CastableVideoElement.initCast(); | ||
export { CastableVideoElement }; |
{ | ||
"name": "castable-video", | ||
"version": "0.3.0", | ||
"version": "0.4.0", | ||
"description": "Cast your video element to the big screen with ease!", | ||
@@ -5,0 +5,0 @@ "main": "castable-video.js", |
Sorry, the diff of this file is not supported yet
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
360712
584