Huge News!Announcing our $40M Series B led by Abstract Ventures.Learn More
Socket
Sign inDemoInstall
Socket

@types/webxr

Package Overview
Dependencies
Maintainers
1
Versions
29
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@types/webxr - npm Package Compare versions

Comparing version 0.3.0 to 0.4.0

814

webxr/index.d.ts

@@ -1,2 +0,2 @@

// Type definitions for non-npm package webxr 0.3
// Type definitions for non-npm package webxr 0.4
// Project: https://www.w3.org/TR/webxr/

@@ -17,3 +17,11 @@ // Definitions by: Rob Rohan <https://github.com/robrohan>

/**
* ref: https://immersive-web.github.io/webxr/#navigator-xr-attribute
*/
interface Navigator {
/**
* An XRSystem object is the entry point to the API, used to query for XR features
* available to the user agent and initiate communication with XR hardware via the
* creation of XRSessions.
*/
xr?: XRSystem | undefined;

@@ -23,3 +31,18 @@ }

/**
* WebGL Context Compatability
*
* ref: https://immersive-web.github.io/webxr/#contextcompatibility
*/
interface WebGLContextAttributes {
xrCompatible?: boolean | undefined;
}
interface WebGLRenderingContextBase {
makeXRCompatible(): Promise<void>;
}
/**
* Available session modes
*
* ref: https://immersive-web.github.io/webxr/#xrsessionmode-enum
*/

@@ -35,2 +58,5 @@ type XRSessionMode = 'inline' | 'immersive-vr' | 'immersive-ar';

/**
* ref: https://immersive-web.github.io/webxr/#xrsession-interface
*/
type XRVisibilityState = 'visible' | 'visible-blurred' | 'hidden';

@@ -53,70 +79,69 @@

/**
* Type of XR events available
*/
type XREventType =
| 'devicechange'
| 'visibilitychange'
| 'end'
| 'inputsourceschange'
| 'select'
| 'selectstart'
| 'selectend'
| 'squeeze'
| 'squeezestart'
| 'squeezeend'
| 'reset';
type XRFrameRequestCallback = (time: DOMHighResTimeStamp, frame: XRFrame) => void;
type XRPlaneSet = Set<XRPlane>;
type XRAnchorSet = Set<XRAnchor>;
interface XREventHandler {
(event: Event): any;
}
// tslint:disable-next-line no-empty-interface
interface XRLayer extends EventTarget { }
interface XRSessionInit {
optionalFeatures?: string[] | undefined;
requiredFeatures?: string[] | undefined;
}
interface XRSessionEvent extends Event {
readonly session: XRSession;
}
interface XRSystemDeviceChangeEvent extends Event {
type: "devicechange";
type: 'devicechange';
}
interface XRSessionGrant {
mode: XRSessionMode;
interface XRSystemDeviceChangeEventHandler {
(event: XRSystemDeviceChangeEvent): any;
}
interface XRSystemSessionGrantedEvent extends Event {
type: "sessiongranted";
session: XRSessionGrant;
interface XRSystemEventMap {
devicechange: XRSystemDeviceChangeEvent;
}
interface XRSystemEventMap extends HTMLMediaElementEventMap {
"devicechange": XRSystemDeviceChangeEvent;
"sessiongranted": XRSystemSessionGrantedEvent;
}
/**
* An XRSystem object is the entry point to the API, used to query for XR features available
* to the user agent and initiate communication with XR hardware via the creation of
* XRSessions.
*
* ref: https://immersive-web.github.io/webxr/#xrsystem-interface
*/
interface XRSystem extends EventTarget {
/**
* Attempts to initialize an XRSession for the given mode if possible, entering immersive
* mode if necessary.
* @param mode
* @param options
*/
requestSession(mode: XRSessionMode, options?: XRSessionInit): Promise<XRSession>;
/**
* Queries if a given mode may be supported by the user agent and device capabilities.
* @param mode
*/
isSessionSupported(mode: XRSessionMode): Promise<boolean>;
ondevicechange: ((this: XRSystem, ev: XRSystemDeviceChangeEvent) => any) | null;
onsessiongranted: ((this: XRSystem, ev: XRSystemSessionGrantedEvent) => any) | null;
ondevicechange: XRSystemDeviceChangeEventHandler | null;
addEventListener<K extends keyof XRSystemEventMap>(type: K, listener: (this: XRSystem, ev: XRSystemEventMap[K]) => any, options?: boolean | AddEventListenerOptions): void;
addEventListener(type: string, listener: EventListenerOrEventListenerObject, options?: boolean | AddEventListenerOptions): void;
removeEventListener<K extends keyof XRSystemEventMap>(type: K, listener: (this: XRSystem, ev: XRSystemEventMap[K]) => any, options?: boolean | EventListenerOptions): void;
removeEventListener(type: string, listener: EventListenerOrEventListenerObject, options?: boolean | EventListenerOptions): void;
addEventListener<K extends keyof XRSystemEventMap>(
type: K,
listener: (this: XRSystem, ev: XRSystemEventMap[K]) => any,
options?: boolean | AddEventListenerOptions,
): void;
addEventListener(
type: string,
listener: EventListenerOrEventListenerObject,
options?: boolean | AddEventListenerOptions,
): void;
removeEventListener<K extends keyof XRSystemEventMap>(
type: K,
listener: (this: XRSystem, ev: XRSystemEventMap[K]) => any,
options?: boolean | EventListenerOptions,
): void;
removeEventListener(
type: string,
listener: EventListenerOrEventListenerObject,
options?: boolean | EventListenerOptions,
): void;
}
declare abstract class XRSystem implements XRSystem {}
/**
* Describes a viewport, or rectangular region, of a graphics surface.
*
* ref: https://immersive-web.github.io/webxr/#xrviewport-interface
*/
interface XRViewport {

@@ -129,38 +154,24 @@ readonly x: number;

interface XRWebGLLayerInit {
antialias?: boolean | undefined;
depth?: boolean | undefined;
stencil?: boolean | undefined;
alpha?: boolean | undefined;
ignoreDepthValues?: boolean | undefined;
framebufferScaleFactor?: number | undefined;
}
declare abstract class XRViewport implements XRViewport {}
declare class XRWebGLLayer implements XRLayer {
static getNativeFramebufferScaleFactor(session: XRSession): number;
/**
* Represents a virtual coordinate system with an origin that corresponds to a physical location.
* Spatial data that is requested from the API or given to the API is always expressed in relation
* to a specific XRSpace at the time of a specific XRFrame. Numeric values such as pose positions
* are coordinates in that space relative to its origin. The interface is intentionally opaque.
*
* ref: https://immersive-web.github.io/webxr/#xrspace-interface
*/
// tslint:disable-next-line no-empty-interface
interface XRSpace extends EventTarget {}
constructor(
session: XRSession,
context: WebGLRenderingContext | WebGL2RenderingContext,
layerInit?: XRWebGLLayerInit,
);
declare abstract class XRSpace implements XRSpace {}
readonly antialias: boolean;
readonly ignoreDepthValues: boolean;
fixedFoveation?: number | undefined;
readonly framebuffer: WebGLFramebuffer;
readonly framebufferWidth: number;
readonly framebufferHeight: number;
getViewport(view: XRView): XRViewport | undefined;
addEventListener(type: string, listener: EventListenerOrEventListenerObject | null, options?: boolean | AddEventListenerOptions): void;
removeEventListener(type: string, callback: EventListenerOrEventListenerObject | null, options?: boolean | EventListenerOptions): void;
dispatchEvent(event: Event): boolean;
interface XRRenderStateInit {
baseLayer?: XRWebGLLayer | undefined;
depthFar?: number | undefined;
depthNear?: number | undefined;
inlineVerticalFieldOfView?: number | undefined;
}
// tslint:disable-next-line no-empty-interface
interface XRSpace extends EventTarget { }
interface XRRenderState {

@@ -171,20 +182,75 @@ readonly baseLayer?: XRWebGLLayer | undefined;

readonly inlineVerticalFieldOfView?: number | undefined;
}
// https://immersive-web.github.io/layers/#xrrenderstatechanges
readonly layers?: XRLayer[] | undefined;
declare abstract class XRRenderState implements XRRenderState {}
interface XRReferenceSpaceEventInit extends EventInit {
referenceSpace?: XRReferenceSpace | undefined;
transform?: XRRigidTransform | undefined;
}
interface XRRenderStateInit {
baseLayer?: XRWebGLLayer | undefined;
depthFar?: number | undefined;
depthNear?: number | undefined;
inlineVerticalFieldOfView?: number | undefined;
layers?: XRLayer[] | undefined;
/**
* XRReferenceSpaceEvents are fired to indicate changes to the state of an XRReferenceSpace.
*
* ref: https://immersive-web.github.io/webxr/#xrreferencespaceevent-interface
*/
interface XRReferenceSpaceEvent extends Event {
readonly type: 'reset';
readonly referenceSpace: XRReferenceSpace;
readonly transform?: XRRigidTransform | undefined;
}
// tslint:disable-next-line no-unnecessary-class
declare class XRReferenceSpaceEvent implements XRReferenceSpaceEvent {
constructor(type: 'reset', eventInitDict?: XRReferenceSpaceEventInit);
}
interface XRReferenceSpaceEventHandler {
(event: XRReferenceSpaceEvent): any;
}
interface XRReferenceSpaceEventMap {
reset: XRReferenceSpaceEvent;
}
/**
* One of several common XRSpaces that applications can use to establish a spatial relationship
* with the user's physical environment.
*
* ref: https://immersive-web.github.io/webxr/#xrreferencespace-interface
*/
interface XRReferenceSpace extends XRSpace {
getOffsetReferenceSpace(originOffset: XRRigidTransform): XRReferenceSpace;
onreset: XREventHandler;
onreset: XRReferenceSpaceEventHandler;
addEventListener<K extends keyof XRReferenceSpaceEventMap>(
type: K,
listener: (this: XRReferenceSpace, ev: XRReferenceSpaceEventMap[K]) => any,
options?: boolean | AddEventListenerOptions,
): void;
addEventListener(
type: string,
listener: EventListenerOrEventListenerObject,
options?: boolean | AddEventListenerOptions,
): void;
removeEventListener<K extends keyof XRReferenceSpaceEventMap>(
type: K,
listener: (this: XRReferenceSpace, ev: XRReferenceSpaceEventMap[K]) => any,
options?: boolean | EventListenerOptions,
): void;
removeEventListener(
type: string,
listener: EventListenerOrEventListenerObject,
options?: boolean | EventListenerOptions,
): void;
}
declare abstract class XRReferenceSpace implements XRReferenceSpace {}
/**
* Extends XRReferenceSpace to include boundsGeometry, indicating the pre-configured boundaries
* of the user's space.
*
* ref: https://immersive-web.github.io/webxr/#xrboundedreferencespace-interface
*/
interface XRBoundedReferenceSpace extends XRReferenceSpace {

@@ -194,2 +260,13 @@ readonly boundsGeometry: DOMPointReadOnly[];

declare abstract class XRBoundedReferenceSpace implements XRBoundedReferenceSpace {}
/**
* Represents an XR input source, which is any input mechanism which allows the user to perform
* targeted actions in the same virtual space as the viewer. Example XR input sources include,
* but are not limited to, handheld controllers, optically tracked hands, and gaze-based input
* methods that operate on the viewer's pose. Input mechanisms which are not explicitly associated
* with the XR device, such as traditional gamepads, mice, or keyboards SHOULD NOT be considered
* XR input sources.
* ref: https://immersive-web.github.io/webxr/#xrinputsource-interface
*/
interface XRInputSource {

@@ -205,2 +282,29 @@ readonly handedness: XRHandedness;

declare abstract class XRInputSource implements XRInputSource {}
/**
* Represents a list of XRInputSources. It is used in favor of a frozen array type when the contents
* of the list are expected to change over time, such as with the XRSession inputSources attribute.
* ref: https://immersive-web.github.io/webxr/#xrinputsourcearray-interface
*/
interface XRInputSourceArray {
[Symbol.iterator](): IterableIterator<XRInputSource>;
[n: number]: XRInputSource;
length: number;
entries(): IterableIterator<[number, XRInputSource]>;
keys(): IterableIterator<number>;
values(): IterableIterator<XRInputSource>;
forEach(callbackfn: (value: XRInputSource, index: number, array: XRInputSource[]) => void, thisArg?: any): void;
}
declare abstract class XRInputSourceArray implements XRInputSourceArray {}
/**
* Describes a position and orientation in space relative to an XRSpace.
*
* ref: https://immersive-web.github.io/webxr/#xrpose-interface
*/
interface XRPose {

@@ -211,48 +315,111 @@ readonly transform: XRRigidTransform;

declare abstract class XRPose implements XRPose {}
/**
* Represents a snapshot of the state of all of the tracked objects for an XRSession. Applications
* can acquire an XRFrame by calling requestAnimationFrame() on an XRSession with an
* XRFrameRequestCallback. When the callback is called it will be passed an XRFrame.
* Events which need to communicate tracking state, such as the select event, will also provide an
* XRFrame.
*
* ref: https://immersive-web.github.io/webxr/#xrframe-interface
*/
interface XRFrame {
readonly session: XRSession;
readonly predictedDisplayTime: DOMHighResTimeStamp;
/**
* Provides the pose of space relative to baseSpace as an XRPose, at the time represented by
* the XRFrame.
*
* @param space
* @param baseSpace
*/
getPose(space: XRSpace, baseSpace: XRSpace): XRPose | undefined;
/**
* Provides the pose of the viewer relative to referenceSpace as an XRViewerPose, at the
* XRFrame's time.
*
* @param referenceSpace
*/
getViewerPose(referenceSpace: XRReferenceSpace): XRViewerPose | undefined;
}
// AR
getHitTestResults(hitTestSource: XRHitTestSource): XRHitTestResult[];
getHitTestResultsForTransientInput(
hitTestSource: XRTransientInputHitTestSource
): XRTransientInputHitTestResult[];
declare abstract class XRFrame implements XRFrame {}
// Anchors
trackedAnchors?: XRAnchorSet | undefined;
createAnchor?: (pose: XRRigidTransform, space: XRSpace) => Promise<XRAnchor> | undefined;
/**
* Type of XR events available
*/
type XRInputSourceEventType = 'select' | 'selectend' | 'selectstart' | 'squeeze' | 'squeezeend' | 'squeezestart';
// Planes
worldInformation?: {
detectedPlanes?: XRPlaneSet | undefined;
} | undefined;
interface XRInputSourceEventInit extends EventInit {
frame?: XRFrame | undefined;
inputSource?: XRInputSource | undefined;
}
// Hand tracking
getJointPose?: (joint: XRJointSpace, baseSpace: XRSpace) => XRJointPose | undefined;
/**
* XRInputSourceEvents are fired to indicate changes to the state of an XRInputSource.
* ref: https://immersive-web.github.io/webxr/#xrinputsourceevent-interface
*/
declare class XRInputSourceEvent extends Event {
readonly type: XRInputSourceEventType;
readonly frame: XRFrame;
readonly inputSource: XRInputSource;
constructor(type: XRInputSourceEventType, eventInitDict?: XRInputSourceEventInit);
}
declare class XRFrame {
prototype: XRFrame;
interface XRInputSourceEventHandler {
(evt: XRInputSourceEvent): any;
}
interface XRInputSourceEvent extends Event {
readonly frame: XRFrame;
readonly inputSource: XRInputSource;
type XRSessionEventType = 'end' | 'visibilitychange' | 'frameratechange';
interface XRSessionEventInit extends EventInit {
session: XRSession;
}
/**
* XRSessionEvents are fired to indicate changes to the state of an XRSession.
* ref: https://immersive-web.github.io/webxr/#xrsessionevent-interface
*/
declare class XRSessionEvent extends Event {
readonly session: XRSession;
constructor(type: XRSessionEventType, eventInitDict?: XRSessionEventInit);
}
interface XRSessionEventHandler {
(evt: XRSessionEvent): any;
}
/**
* ref: https://immersive-web.github.io/webxr/#feature-dependencies
*/
interface XRSessionInit {
optionalFeatures?: string[] | undefined;
requiredFeatures?: string[] | undefined;
}
interface XRSessionEventMap {
"end": XREventHandler;
"inputsourceschange": XREventHandler;
"select": XREventHandler;
"selectstart": XREventHandler;
"selectend": XREventHandler;
"squeeze": XREventHandler;
"squeezestart": XREventHandler;
"squeezeend": XREventHandler;
"visibilitychange": XREventHandler;
"frameratechange": XREventHandler;
inputsourceschange: XRInputSourceChangeEvent;
end: XRSessionEvent;
visibilitychange: XRSessionEvent;
frameratechange: XRSessionEvent;
select: XRInputSourceEvent;
selectstart: XRInputSourceEvent;
selectend: XRInputSourceEvent;
squeeze: XRInputSourceEvent;
squeezestart: XRInputSourceEvent;
squeezeend: XRInputSourceEvent;
}
/**
* Any interaction with XR hardware is done via an XRSession object, which can only be
* retrieved by calling requestSession() on the XRSystem object. Once a session has been
* successfully acquired, it can be used to poll the viewer pose, query information about
* the user's environment, and present imagery to the user.
*
* ref: https://immersive-web.github.io/webxr/#xrsession-interface
*/
interface XRSession extends EventTarget {

@@ -263,3 +430,3 @@ /**

*/
readonly inputSources: XRInputSource[];
readonly inputSources: XRInputSourceArray;
/**

@@ -305,51 +472,78 @@ * object which contains options affecting how the imagery is rendered.

updateRenderState(renderStateInit: XRRenderStateInit): Promise<void>;
updateRenderState(renderStateInit?: XRRenderStateInit): Promise<void>;
updateTargetFrameRate(rate: number): Promise<void>;
onend: XREventHandler;
oninputsourceschange: XREventHandler;
onselect: XREventHandler;
onselectstart: XREventHandler;
onselectend: XREventHandler;
onsqueeze: XREventHandler;
onsqueezestart: XREventHandler;
onsqueezeend: XREventHandler;
onvisibilitychange: XREventHandler;
onframeratechange: XREventHandler;
onend: XRSessionEventHandler;
oninputsourceschange: XRInputSourceChangeEventHandler;
onselect: XRInputSourceEventHandler;
onselectstart: XRInputSourceEventHandler;
onselectend: XRInputSourceEventHandler;
onsqueeze: XRInputSourceEventHandler;
onsqueezestart: XRInputSourceEventHandler;
onsqueezeend: XRInputSourceEventHandler;
onvisibilitychange: XRSessionEventHandler;
onframeratechange: XRSessionEventHandler;
addEventListener<K extends keyof XRSessionEventMap>(type: K, listener: (this: XRSession, ev: XRSessionEventMap[K]) => any, options?: boolean | AddEventListenerOptions): void;
addEventListener(type: string, listener: EventListenerOrEventListenerObject, options?: boolean | AddEventListenerOptions): void;
removeEventListener<K extends keyof XRSessionEventMap>(type: K, listener: (this: XRSession, ev: XRSessionEventMap[K]) => any, options?: boolean | EventListenerOptions): void;
removeEventListener(type: string, listener: EventListenerOrEventListenerObject, options?: boolean | EventListenerOptions): void;
// hit test
requestHitTestSource?: (options: XRHitTestOptionsInit) => Promise<XRHitTestSource> | undefined;
requestHitTestSourceForTransientInput?: (
options: XRTransientInputHitTestOptionsInit,
) => Promise<XRTransientInputHitTestSource> | undefined;
// legacy AR hit test
requestHitTest?: (ray: XRRay, referenceSpace: XRReferenceSpace) => Promise<XRHitResult[]> | undefined;
// legacy plane detection
updateWorldTrackingState?: (options: { planeDetectionState?: { enabled: boolean } | undefined }) => void | undefined;
addEventListener<K extends keyof XRSessionEventMap>(
type: K,
listener: (this: XRSession, ev: XRSessionEventMap[K]) => any,
options?: boolean | AddEventListenerOptions,
): void;
addEventListener(
type: string,
listener: EventListenerOrEventListenerObject,
options?: boolean | AddEventListenerOptions,
): void;
removeEventListener<K extends keyof XRSessionEventMap>(
type: K,
listener: (this: XRSession, ev: XRSessionEventMap[K]) => any,
options?: boolean | EventListenerOptions,
): void;
removeEventListener(
type: string,
listener: EventListenerOrEventListenerObject,
options?: boolean | EventListenerOptions,
): void;
}
declare class XRSession {
prototype: XRSession;
}
declare abstract class XRSession implements XRSession {}
/**
* An XRPose describing the state of a viewer of the XR scene as tracked by the XR
* device. A viewer may represent a tracked piece of hardware, the observed position
* of a user's head relative to the hardware, or some other means of computing a series
* of viewpoints into the XR scene. XRViewerPoses can only be queried relative to an
* XRReferenceSpace. It provides, in addition to the XRPose values, an array of views
* which include rigid transforms to indicate the viewpoint and projection matrices.
* These values should be used by the application when rendering a frame of an XR scene.
*
* ref: https://immersive-web.github.io/webxr/#xrviewerpose-interface
*/
interface XRViewerPose extends XRPose {
readonly views: XRView[];
readonly views: ReadonlyArray<XRView>;
}
declare abstract class XRViewerPose implements XRViewerPose {}
/**
* A transform described by a position and orientation. When interpreting an
* XRRigidTransform the orientation is always applied prior to the position.
*
* ref: https://immersive-web.github.io/webxr/#xrrigidtransform-interface
*/
declare class XRRigidTransform {
readonly position: DOMPointReadOnly;
readonly orientation: DOMPointReadOnly;
readonly matrix: Float32Array;
readonly inverse: XRRigidTransform;
constructor(position?: DOMPointInit, direction?: DOMPointInit);
position: DOMPointReadOnly;
orientation: DOMPointReadOnly;
matrix: Float32Array;
inverse: XRRigidTransform;
}
/**
* Describes a single view into an XR scene for a given frame.
*
* ref: https://immersive-web.github.io/webxr/#xrview-interface
*/
interface XRView {

@@ -363,29 +557,55 @@ readonly eye: XREye;

declare abstract class XRView implements XRView {}
/**
* XRInputSourcesChangeEvents are fired to indicate changes to the XRInputSources that are
* available to an XRSession.
* ref: https://immersive-web.github.io/webxr/#xrinputsourceschangeevent-interface
*/
interface XRInputSourceChangeEvent extends XRSessionEvent {
removed: XRInputSource[];
added: XRInputSource[];
readonly removed: ReadonlyArray<XRInputSource>;
readonly added: ReadonlyArray<XRInputSource>;
}
interface XRInputSourceChangeEventHandler {
(evt: XRInputSourceChangeEvent): any;
}
// Experimental/Draft features
// Anchors
type XRAnchorSet = Set<XRAnchor>;
interface XRAnchor {
anchorSpace: XRSpace;
delete(): void;
}
declare abstract class XRAnchor implements XRAnchor {}
interface XRFrame {
trackedAnchors?: XRAnchorSet | undefined;
createAnchor?: (pose: XRRigidTransform, space: XRSpace) => Promise<XRAnchor> | undefined;
}
// AR Hit testing
declare class XRRay {
constructor(transformOrOrigin: XRRigidTransform | DOMPointInit, direction?: DOMPointInit);
readonly origin: DOMPointReadOnly;
readonly direction: DOMPointReadOnly;
readonly matrix: Float32Array;
constructor(transformOrOrigin?: XRRigidTransform | DOMPointInit, direction?: DOMPointInit);
}
type XRHitTestTrackableType =
| 'point'
| 'plane'
| 'mesh';
type XRHitTestTrackableType = 'point' | 'plane' | 'mesh';
interface XRHitResult {
hitMatrix: Float32Array;
}
interface XRTransientInputHitTestResult {
readonly inputSource: XRInputSource;
readonly results: XRHitTestResult[];
readonly results: ReadonlyArray<XRHitTestResult>;
}
declare class XRTransientInputHitTestResult {
prototype: XRTransientInputHitTestResult;
}
interface XRHitTestResult {

@@ -397,2 +617,4 @@ getPose(baseSpace: XRSpace): XRPose | undefined;

declare abstract class XRHitTestResult implements XRHitTestResult {}
interface XRHitTestSource {

@@ -402,2 +624,4 @@ cancel(): void;

declare abstract class XRHitTestSource implements XRHitTestSource {}
interface XRTransientInputHitTestSource {

@@ -407,2 +631,4 @@ cancel(): void;

declare abstract class XRTransientInputHitTestSource implements XRTransientInputHitTestSource {}
interface XRHitTestOptionsInit {

@@ -420,9 +646,29 @@ space: XRSpace;

interface XRAnchor {
anchorSpace: XRSpace;
delete(): void;
interface XRSession {
requestHitTestSource?: (options: XRHitTestOptionsInit) => Promise<XRHitTestSource> | undefined;
requestHitTestSourceForTransientInput?: (
options: XRTransientInputHitTestOptionsInit,
) => Promise<XRTransientInputHitTestSource> | undefined;
// Legacy
requestHitTest?: (ray: XRRay, referenceSpace: XRReferenceSpace) => Promise<XRHitResult[]> | undefined;
}
interface XRFrame {
getHitTestResults(hitTestSource: XRHitTestSource): XRHitTestResult[];
getHitTestResultsForTransientInput(hitTestSource: XRTransientInputHitTestSource): XRTransientInputHitTestResult[];
}
// Legacy
interface XRHitResult {
hitMatrix: Float32Array;
}
// Plane detection
type XRPlaneSet = Set<XRPlane>;
type XRPlaneOrientation = 'horizontal' | 'vertical';
interface XRPlane {
orientation: 'horizontal' | 'vertical';
orientation: XRPlaneOrientation;
planeSpace: XRSpace;

@@ -433,2 +679,20 @@ polygon: DOMPointReadOnly[];

declare abstract class XRPlane implements XRPlane {}
interface XRSession {
// Legacy
updateWorldTrackingState?: (options: {
planeDetectionState?: { enabled: boolean } | undefined;
}) => void | undefined;
}
interface XRFrame {
worldInformation?:
| {
detectedPlanes?: XRPlaneSet | undefined;
}
| undefined;
}
// Hand Tracking
type XRHandJoint =

@@ -465,2 +729,4 @@ | 'wrist'

declare abstract class XRJointSpace implements XRJointSpace {}
interface XRJointPose extends XRPose {

@@ -470,7 +736,5 @@ readonly radius: number | undefined;

interface XRHand extends Iterable<XRJointSpace> {
readonly length: number;
declare abstract class XRJointPose implements XRJointPose {}
[index: number]: XRJointSpace;
interface XRHand extends Map<number, XRJointSpace> {
readonly WRIST: number;

@@ -508,9 +772,65 @@

declare abstract class XRHand implements XRHand {}
interface XRFrame {
getJointPose?: (joint: XRJointSpace, baseSpace: XRSpace) => XRJointPose | undefined;
}
// WebXR Layers
interface XRLayerEventInit extends EventInit {
layer: XRLayer;
/**
* The base class for XRWebGLLayer and other layer types introduced by future extensions.
* ref: https://immersive-web.github.io/webxr/#xrlayer-interface
*/
// tslint:disable-next-line no-empty-interface
interface XRLayer extends EventTarget {}
declare abstract class XRLayer implements XRLayer {}
interface XRWebGLLayerInit {
antialias?: boolean | undefined;
depth?: boolean | undefined;
stencil?: boolean | undefined;
alpha?: boolean | undefined;
ignoreDepthValues?: boolean | undefined;
framebufferScaleFactor?: number | undefined;
}
declare class XRLayerEvent extends Event {
constructor(type: string, eventInitDict: XRLayerEventInit);
/**
* A layer which provides a WebGL framebuffer to render into, enabling hardware accelerated
* rendering of 3D graphics to be presented on the XR device. *
* ref: https://immersive-web.github.io/webxr/#xrwebgllayer-interface
*/
declare class XRWebGLLayer extends XRLayer {
static getNativeFramebufferScaleFactor(session: XRSession): number;
constructor(
session: XRSession,
context: WebGLRenderingContext | WebGL2RenderingContext,
layerInit?: XRWebGLLayerInit,
);
readonly antialias: boolean;
readonly ignoreDepthValues: boolean;
fixedFoveation?: number | undefined;
readonly framebuffer: WebGLFramebuffer;
readonly framebufferWidth: number;
readonly framebufferHeight: number;
getViewport(view: XRView): XRViewport | undefined;
}
interface XRRenderStateInit {
layers?: XRLayer[] | undefined;
}
interface XRRenderState {
readonly layers?: XRLayer[] | undefined;
}
type XRLayerEventType = 'redraw';
interface XRLayerEvent extends Event {
readonly type: XRLayerEventType;
readonly layer: XRLayer;

@@ -520,3 +840,3 @@ }

interface XRCompositionLayerEventMap {
"redraw": XRLayerEvent;
redraw: XRLayerEvent;
}

@@ -535,3 +855,3 @@

// Events
onredraw: (evt: XRCompositionLayerEventMap["redraw"]) => any;
onredraw: (evt: XRCompositionLayerEventMap['redraw']) => any;

@@ -542,3 +862,3 @@ addEventListener<K extends keyof XRCompositionLayerEventMap>(

callback: (evt: XRCompositionLayerEventMap[K]) => any,
options?: boolean | AddEventListenerOptions
options?: boolean | AddEventListenerOptions,
): void;

@@ -548,3 +868,3 @@ addEventListener(

listener: EventListenerOrEventListenerObject,
options?: boolean | AddEventListenerOptions
options?: boolean | AddEventListenerOptions,
): void;

@@ -555,3 +875,3 @@

type: K,
callback: (evt: XRCompositionLayerEventMap[K]) => any
callback: (evt: XRCompositionLayerEventMap[K]) => any,
): void;

@@ -561,15 +881,12 @@ removeEventListener(

listener: EventListenerOrEventListenerObject,
options?: boolean | EventListenerOptions
options?: boolean | EventListenerOptions,
): void;
}
type XRTextureType = "texture" | "texture-array";
declare abstract class XRCompositionLayer implements XRCompositionLayer {}
type XRLayerLayout =
| "default"
| "mono"
| "stereo"
| "stereo-left-right"
| "stereo-top-bottom";
type XRTextureType = 'texture' | 'texture-array';
type XRLayerLayout = 'default' | 'mono' | 'stereo' | 'stereo-left-right' | 'stereo-top-bottom';
interface XRProjectionLayerInit {

@@ -590,2 +907,4 @@ scaleFactor?: number | undefined;

declare abstract class XRProjectionLayer implements XRProjectionLayer {}
interface XRLayerInit {

@@ -630,2 +949,4 @@ mipLevels?: number | undefined;

declare abstract class XRCylinderLayer implements XRCylinderLayer {}
interface XRQuadLayerInit extends XRLayerInit {

@@ -650,2 +971,4 @@ textureType?: XRTextureType | undefined;

declare abstract class XRQuadLayer implements XRQuadLayer {}
interface XREquirectLayerInit extends XRLayerInit {

@@ -676,2 +999,4 @@ textureType?: XRTextureType | undefined;

declare abstract class XREquirectLayer implements XREquirectLayer {}
interface XRCubeLayerInit extends XRLayerInit {

@@ -685,2 +1010,4 @@ orientation?: DOMPointReadOnly | undefined;

declare abstract class XRCubeLayer implements XRCubeLayer {}
interface XRSubImage {

@@ -690,2 +1017,4 @@ readonly viewport: XRViewport;

declare abstract class XRSubImage implements XRSubImage {}
interface XRWebGLSubImage extends XRSubImage {

@@ -699,7 +1028,9 @@ readonly colorTexture: WebGLTexture;

declare abstract class XRWebGLSubImage implements XRWebGLSubImage {}
declare class XRWebGLBinding {
readonly nativeProjectionScaleFactor: number;
constructor(session: XRSession, context: WebGLRenderingContext);
readonly nativeProjectionScaleFactor: number;
createProjectionLayer(init?: XRProjectionLayerInit): XRProjectionLayer;

@@ -724,58 +1055,6 @@ createQuadLayer(init?: XRQuadLayerInit): XRQuadLayer;

// WebGL extensions
interface WebGLRenderingContext {
makeXRCompatible(): Promise<void>;
getExtension(extensionName: "OCULUS_multiview"): OCULUS_multiview | null;
getExtension(extensionName: "EXT_blend_minmax"): EXT_blend_minmax | null;
getExtension(extensionName: "EXT_texture_filter_anisotropic"): EXT_texture_filter_anisotropic | null;
getExtension(extensionName: "EXT_frag_depth"): EXT_frag_depth | null;
getExtension(extensionName: "EXT_shader_texture_lod"): EXT_shader_texture_lod | null;
getExtension(extensionName: "EXT_sRGB"): EXT_sRGB | null;
getExtension(extensionName: "OES_vertex_array_object"): OES_vertex_array_object | null;
getExtension(extensionName: "WEBGL_color_buffer_float"): WEBGL_color_buffer_float | null;
getExtension(extensionName: "WEBGL_compressed_texture_astc"): WEBGL_compressed_texture_astc | null;
getExtension(extensionName: "WEBGL_compressed_texture_s3tc_srgb"): WEBGL_compressed_texture_s3tc_srgb | null;
getExtension(extensionName: "WEBGL_debug_shaders"): WEBGL_debug_shaders | null;
getExtension(extensionName: "WEBGL_draw_buffers"): WEBGL_draw_buffers | null;
getExtension(extensionName: "WEBGL_lose_context"): WEBGL_lose_context | null;
getExtension(extensionName: "WEBGL_depth_texture"): WEBGL_depth_texture | null;
getExtension(extensionName: "WEBGL_debug_renderer_info"): WEBGL_debug_renderer_info | null;
getExtension(extensionName: "WEBGL_compressed_texture_s3tc"): WEBGL_compressed_texture_s3tc | null;
getExtension(extensionName: "OES_texture_half_float_linear"): OES_texture_half_float_linear | null;
getExtension(extensionName: "OES_texture_half_float"): OES_texture_half_float | null;
getExtension(extensionName: "OES_texture_float_linear"): OES_texture_float_linear | null;
getExtension(extensionName: "OES_texture_float"): OES_texture_float | null;
getExtension(extensionName: "OES_standard_derivatives"): OES_standard_derivatives | null;
getExtension(extensionName: "OES_element_index_uint"): OES_element_index_uint | null;
getExtension(extensionName: "ANGLE_instanced_arrays"): ANGLE_instanced_arrays | null;
getExtension(extensionName: string): any;
interface WebGLRenderingContextBase {
getExtension(extensionName: 'OCULUS_multiview'): OCULUS_multiview | null;
}
interface WebGL2RenderingContext {
makeXRCompatible(): Promise<void>;
getExtension(extensionName: "OCULUS_multiview"): OCULUS_multiview | null;
getExtension(extensionName: "EXT_blend_minmax"): EXT_blend_minmax | null;
getExtension(extensionName: "EXT_texture_filter_anisotropic"): EXT_texture_filter_anisotropic | null;
getExtension(extensionName: "EXT_frag_depth"): EXT_frag_depth | null;
getExtension(extensionName: "EXT_shader_texture_lod"): EXT_shader_texture_lod | null;
getExtension(extensionName: "EXT_sRGB"): EXT_sRGB | null;
getExtension(extensionName: "OES_vertex_array_object"): OES_vertex_array_object | null;
getExtension(extensionName: "WEBGL_color_buffer_float"): WEBGL_color_buffer_float | null;
getExtension(extensionName: "WEBGL_compressed_texture_astc"): WEBGL_compressed_texture_astc | null;
getExtension(extensionName: "WEBGL_compressed_texture_s3tc_srgb"): WEBGL_compressed_texture_s3tc_srgb | null;
getExtension(extensionName: "WEBGL_debug_shaders"): WEBGL_debug_shaders | null;
getExtension(extensionName: "WEBGL_draw_buffers"): WEBGL_draw_buffers | null;
getExtension(extensionName: "WEBGL_lose_context"): WEBGL_lose_context | null;
getExtension(extensionName: "WEBGL_depth_texture"): WEBGL_depth_texture | null;
getExtension(extensionName: "WEBGL_debug_renderer_info"): WEBGL_debug_renderer_info | null;
getExtension(extensionName: "WEBGL_compressed_texture_s3tc"): WEBGL_compressed_texture_s3tc | null;
getExtension(extensionName: "OES_texture_half_float_linear"): OES_texture_half_float_linear | null;
getExtension(extensionName: "OES_texture_half_float"): OES_texture_half_float | null;
getExtension(extensionName: "OES_texture_float_linear"): OES_texture_float_linear | null;
getExtension(extensionName: "OES_texture_float"): OES_texture_float | null;
getExtension(extensionName: "OES_standard_derivatives"): OES_standard_derivatives | null;
getExtension(extensionName: "OES_element_index_uint"): OES_element_index_uint | null;
getExtension(extensionName: "ANGLE_instanced_arrays"): ANGLE_instanced_arrays | null;
getExtension(extensionName: string): any;
}
declare enum XOVR_multiview2 {

@@ -785,3 +1064,3 @@ FRAMEBUFFER_ATTACHMENT_TEXTURE_NUM_VIEWS_OVR = 0x9630,

MAX_VIEWS_OVR = 0x9631,
FRAMEBUFFER_INCOMPLETE_VIEW_TARGETS_OVR = 0x9633
FRAMEBUFFER_INCOMPLETE_VIEW_TARGETS_OVR = 0x9633,
}

@@ -801,6 +1080,31 @@

baseViewIndex: number,
numViews: number
numViews: number,
): WebGLRenderbuffer;
}
declare abstract class OVR_multiview2 implements OVR_multiview2 {}
// Oculus extensions
interface XRSessionGrant {
mode: XRSessionMode;
}
interface XRSystemSessionGrantedEvent extends Event {
type: 'sessiongranted';
session: XRSessionGrant;
}
interface XRSystemSessionGrantedEventHandler {
(event: XRSystemSessionGrantedEvent): any;
}
interface XRSystemEventMap {
// Session Grant events are an Meta Oculus Browser extension
sessiongranted: XRSystemSessionGrantedEvent;
}
interface XRSystem {
onsessiongranted: XRSystemSessionGrantedEventHandler | null;
}
interface OCULUS_multiview extends OVR_multiview2 {

@@ -814,4 +1118,6 @@ framebufferTextureMultisampleMultiviewOVR(

baseViewIndex: GLint,
numViews: GLsizei
numViews: GLsizei,
): void;
}
declare abstract class OCULUS_multiview implements OCULUS_multiview {}
{
"name": "@types/webxr",
"version": "0.3.0",
"version": "0.4.0",
"description": "TypeScript definitions for webxr",

@@ -33,4 +33,4 @@ "homepage": "https://github.com/DefinitelyTyped/DefinitelyTyped/tree/master/types/webxr",

"dependencies": {},
"typesPublisherContentHash": "58eae7058b15df7d9bce991439ebca9b07bc5666e2af0bf74fe6581a557fbc5c",
"typesPublisherContentHash": "ef462a172f814285b672a6714277041d458fc94a7b8d9ff925aae19f9af98728",
"typeScriptVersion": "3.9"
}

@@ -11,7 +11,7 @@ # Installation

### Additional Details
* Last updated: Sat, 14 May 2022 08:01:35 GMT
* Last updated: Wed, 18 May 2022 14:01:35 GMT
* Dependencies: none
* Global values: `XOVR_multiview2`, `XRFrame`, `XRLayerEvent`, `XRMediaBinding`, `XRRay`, `XRRigidTransform`, `XRSession`, `XRWebGLBinding`, `XRWebGLLayer`
* Global values: `OCULUS_multiview`, `OVR_multiview2`, `XOVR_multiview2`, `XRAnchor`, `XRBoundedReferenceSpace`, `XRCompositionLayer`, `XRCubeLayer`, `XRCylinderLayer`, `XREquirectLayer`, `XRFrame`, `XRHand`, `XRHitTestResult`, `XRHitTestSource`, `XRInputSource`, `XRInputSourceArray`, `XRInputSourceEvent`, `XRJointPose`, `XRJointSpace`, `XRLayer`, `XRMediaBinding`, `XRPlane`, `XRPose`, `XRProjectionLayer`, `XRQuadLayer`, `XRRay`, `XRReferenceSpace`, `XRReferenceSpaceEvent`, `XRRenderState`, `XRRigidTransform`, `XRSession`, `XRSessionEvent`, `XRSpace`, `XRSubImage`, `XRSystem`, `XRTransientInputHitTestResult`, `XRTransientInputHitTestSource`, `XRView`, `XRViewerPose`, `XRViewport`, `XRWebGLBinding`, `XRWebGLLayer`, `XRWebGLSubImage`
# Credits
These definitions were written by [Rob Rohan](https://github.com/robrohan), [Raanan Weber](https://github.com/RaananW), and [Sean T. McBeth](https://github.com/capnmidnight).
SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc