New Case Study:See how Anthropic automated 95% of dependency reviews with Socket.Learn More
Socket
Sign inDemoInstall
Socket

@luma.gl/webgpu

Package Overview
Dependencies
Maintainers
0
Versions
111
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@luma.gl/webgpu - npm Package Compare versions

Comparing version 9.1.0-beta.9 to 9.2.0-alpha.1

dist/adapter/resources/webgpu-command-buffer.d.ts

1

dist/adapter/helpers/accessor-to-format.js

@@ -0,1 +1,2 @@

"use strict";
// luma.gl

@@ -2,0 +3,0 @@ // SPDX-License-Identifier: MIT

4

dist/adapter/helpers/get-bind-group.d.ts

@@ -11,3 +11,5 @@ import type { ComputeShaderLayout, BindingDeclaration, Binding } from '@luma.gl/core';

export declare function getBindGroup(device: GPUDevice, bindGroupLayout: GPUBindGroupLayout, shaderLayout: ComputeShaderLayout, bindings: Record<string, Binding>): GPUBindGroup;
export declare function getShaderLayoutBinding(shaderLayout: ComputeShaderLayout, bindingName: string): BindingDeclaration | null;
export declare function getShaderLayoutBinding(shaderLayout: ComputeShaderLayout, bindingName: string, options?: {
ignoreWarnings?: boolean;
}): BindingDeclaration | null;
//# sourceMappingURL=get-bind-group.d.ts.map

@@ -21,10 +21,17 @@ // luma.gl

const entries = getBindGroupEntries(bindings, shaderLayout);
return device.createBindGroup({
device.pushErrorScope('validation');
const bindGroup = device.createBindGroup({
layout: bindGroupLayout,
entries
});
device.popErrorScope().then((error) => {
if (error) {
log.error(`createBindGroup validation failed: ${error.message}`)();
}
});
return bindGroup;
}
export function getShaderLayoutBinding(shaderLayout, bindingName) {
export function getShaderLayoutBinding(shaderLayout, bindingName, options) {
const bindingLayout = shaderLayout.bindings.find(binding => binding.name === bindingName || `${binding.name}uniforms` === bindingName.toLocaleLowerCase());
if (!bindingLayout) {
if (!bindingLayout && !options?.ignoreWarnings) {
log.warn(`Binding ${bindingName} not set: Not found in shader layout.`)();

@@ -43,8 +50,18 @@ }

if (bindingLayout) {
entries.push(getBindGroupEntry(value, bindingLayout.location));
const entry = getBindGroupEntry(value, bindingLayout.location);
if (entry) {
entries.push(entry);
}
}
// TODO - hack to automatically bind samplers to supplied texture default samplers
bindingLayout = getShaderLayoutBinding(shaderLayout, `${bindingName}Sampler`);
if (bindingLayout) {
entries.push(getBindGroupEntry(value, bindingLayout.location, { sampler: true }));
if (value instanceof Texture) {
bindingLayout = getShaderLayoutBinding(shaderLayout, `${bindingName}Sampler`, {
ignoreWarnings: true
});
if (bindingLayout) {
const entry = getBindGroupEntry(value, bindingLayout.location, { sampler: true });
if (entry) {
entries.push(entry);
}
}
}

@@ -69,3 +86,3 @@ }

}
else if (binding instanceof Texture) {
if (binding instanceof Texture) {
if (options?.sampler) {

@@ -79,7 +96,8 @@ return {

binding: index,
resource: binding.handle.createView({ label: 'bind-group-auto-created' })
resource: binding.view.handle
};
}
throw new Error('invalid binding');
log.warn(`invalid binding ${name}`, binding);
return null;
}
//# sourceMappingURL=get-bind-group.js.map
// luma.gl
// SPDX-License-Identifier: MIT
// Copyright (c) vis.gl contributors
import { log, decodeVertexFormat } from '@luma.gl/core';
import { log, getVertexFormatInfo } from '@luma.gl/core';
// import {getAttributeInfosFromLayouts} from '@luma.gl/core';

@@ -48,3 +48,3 @@ /** Throw error on any WebGL-only vertex formats */

});
byteStride += decodeVertexFormat(format).byteLength;
byteStride += getVertexFormatInfo(format).byteLength;
}

@@ -58,3 +58,3 @@ // non-interleaved mapping (just set offset and stride)

}
byteStride = decodeVertexFormat(format).byteLength;
byteStride = getVertexFormatInfo(format).byteLength;
stepMode =

@@ -81,3 +81,3 @@ attributeLayout.stepMode ||

vertexBufferLayouts.push({
arrayStride: decodeVertexFormat('float32x3').byteLength,
arrayStride: getVertexFormatInfo('float32x3').byteLength,
stepMode: attribute.stepMode || (attribute.name.startsWith('instance') ? 'instance' : 'vertex'),

@@ -130,7 +130,7 @@ attributes: [

if (!attribute) {
log.warn(`Unknown attribute ${name}`)();
log.warn(`Supplied attribute not present in shader layout: ${name}`)();
return null;
}
if (attributeNames.has(name)) {
throw new Error(`Duplicate attribute ${name}`);
throw new Error(`Found multiple entries for attribute: ${name}`);
}

@@ -137,0 +137,0 @@ attributeNames.add(name);

@@ -33,7 +33,7 @@ // luma.gl

// RASTERIZATION PARAMETERS
cullMode: (parameter, value, descriptor) => {
cullMode: (_, value, descriptor) => {
descriptor.primitive = descriptor.primitive || {};
descriptor.primitive.cullMode = value;
},
frontFace: (parameter, value, descriptor) => {
frontFace: (_, value, descriptor) => {
descriptor.primitive = descriptor.primitive || {};

@@ -43,23 +43,25 @@ descriptor.primitive.frontFace = value;

// DEPTH
depthWriteEnabled: (parameter, value, descriptor) => {
const depthStencil = addDepthStencil(descriptor);
depthStencil.depthWriteEnabled = value;
depthWriteEnabled: (_, value, descriptor) => {
if (value) {
const depthStencil = addDepthStencil(descriptor);
depthStencil.depthWriteEnabled = value;
}
},
depthCompare: (parameter, value, descriptor) => {
depthCompare: (_, value, descriptor) => {
const depthStencil = addDepthStencil(descriptor);
depthStencil.depthCompare = value;
},
depthFormat: (parameter, value, descriptor) => {
depthFormat: (_, value, descriptor) => {
const depthStencil = addDepthStencil(descriptor);
depthStencil.format = value;
},
depthBias: (parameter, value, descriptor) => {
depthBias: (_, value, descriptor) => {
const depthStencil = addDepthStencil(descriptor);
depthStencil.depthBias = value;
},
depthBiasSlopeScale: (parameter, value, descriptor) => {
depthBiasSlopeScale: (_, value, descriptor) => {
const depthStencil = addDepthStencil(descriptor);
depthStencil.depthBiasSlopeScale = value;
},
depthBiasClamp: (parameter, value, descriptor) => {
depthBiasClamp: (_, value, descriptor) => {
const depthStencil = addDepthStencil(descriptor);

@@ -69,11 +71,11 @@ depthStencil.depthBiasClamp = value;

// STENCIL
stencilReadMask: (parameter, value, descriptor) => {
stencilReadMask: (_, value, descriptor) => {
const depthStencil = addDepthStencil(descriptor);
depthStencil.stencilReadMask = value;
},
stencilWriteMask: (parameter, value, descriptor) => {
stencilWriteMask: (_, value, descriptor) => {
const depthStencil = addDepthStencil(descriptor);
depthStencil.stencilWriteMask = value;
},
stencilCompare: (parameter, value, descriptor) => {
stencilCompare: (_, value, descriptor) => {
const stencilFront = addDepthStencilFront(descriptor);

@@ -84,3 +86,3 @@ const stencilBack = addDepthStencilBack(descriptor);

},
stencilPassOperation: (parameter, value, descriptor) => {
stencilPassOperation: (_, value, descriptor) => {
const stencilFront = addDepthStencilFront(descriptor);

@@ -91,3 +93,3 @@ const stencilBack = addDepthStencilBack(descriptor);

},
stencilFailOperation: (parameter, value, descriptor) => {
stencilFailOperation: (_, value, descriptor) => {
const stencilFront = addDepthStencilFront(descriptor);

@@ -98,3 +100,3 @@ const stencilBack = addDepthStencilBack(descriptor);

},
stencilDepthFailOperation: (parameter, value, descriptor) => {
stencilDepthFailOperation: (_, value, descriptor) => {
const stencilFront = addDepthStencilFront(descriptor);

@@ -106,11 +108,11 @@ const stencilBack = addDepthStencilBack(descriptor);

// MULTISAMPLE
sampleCount: (parameter, value, descriptor) => {
sampleCount: (_, value, descriptor) => {
descriptor.multisample = descriptor.multisample || {};
descriptor.multisample.count = value;
},
sampleMask: (parameter, value, descriptor) => {
sampleMask: (_, value, descriptor) => {
descriptor.multisample = descriptor.multisample || {};
descriptor.multisample.mask = value;
},
sampleAlphaToCoverageEnabled: (parameter, value, descriptor) => {
sampleAlphaToCoverageEnabled: (_, value, descriptor) => {
descriptor.multisample = descriptor.multisample || {};

@@ -120,50 +122,40 @@ descriptor.multisample.alphaToCoverageEnabled = value;

// COLOR
colorMask: (parameter, value, descriptor) => {
const targets = addColorState(descriptor);
targets[0].writeMask = value;
colorMask: (_, value, descriptor) => {
const target = addColorState(descriptor, 0);
target.writeMask = value;
},
blendColorOperation: (parameter, value, descriptor) => {
addColorState(descriptor);
// const targets = addColorState(descriptor);
// const target = targets[0];
// const blend: GPUBlendState = target.blend || {color: {alpha: 0}};
// blend.color = blend.color || {};
// target.blend.color.operation = value;
}
/*
blendColorSrcTarget: (parameter, value, descriptor: GPURenderPipelineDescriptor) => {
addColorState(descriptor);
targets[0].blend = targets[0].blend || {};
targets[0].blend.color = targets[0].blend.color || {};
targets[0].blend.color.srcTarget = value;
blend: (_, value, descriptor) => {
if (value) {
addBlendState(descriptor, 0);
}
},
blendColorDstTarget: (parameter, value, descriptor: GPURenderPipelineDescriptor) => {
addColorState(descriptor);
targets[0].blend = targets[0].blend || {};
targets[0].blend.color = targets[0].blend.color || {};
targets[0].blend.color.dstTarget = value;
blendColorOperation: (_, value, descriptor) => {
const blend = addBlendState(descriptor, 0);
blend.color = blend.color || {};
blend.color.operation = value;
},
blendAlphaOperation: (parameter, value, descriptor: GPURenderPipelineDescriptor) => {
addColorState(descriptor);
targets[0].blend = targets[0].blend || {};
targets[0].blend.alpha = targets[0].blend.alpha || {};
targets[0].blend.alpha.operation = value;
blendColorSrcFactor: (_, value, descriptor) => {
const blend = addBlendState(descriptor, 0);
blend.color = blend.color || {};
blend.color.srcFactor = value;
},
blendAlphaSrcTarget: (parameter, value, descriptor: GPURenderPipelineDescriptor) => {
addColorState(descriptor);
targets[0].blend = targets[0].blend || {};
targets[0].blend.alpha = targets[0].blend.alpha || {};
targets[0].blend.alpha.srcTarget = value;
blendColorDstFactor: (_, value, descriptor) => {
const blend = addBlendState(descriptor, 0);
blend.color.dstFactor = value;
},
blendAlphaDstTarget: (parameter, value, descriptor: GPURenderPipelineDescriptor) => {
addColorState(descriptor);
targets[0].blend = targets[0].blend || {};
targets[0].blend.alpha = targets[0].blend.alpha || {};
targets[0].blend.alpha.dstTarget = value;
blendAlphaOperation: (_, value, descriptor) => {
const blend = addBlendState(descriptor, 0);
blend.alpha = blend.alpha || {};
blend.alpha.operation = value;
},
*/
blendAlphaSrcFactor: (_, value, descriptor) => {
const blend = addBlendState(descriptor, 0);
blend.alpha = blend.alpha || {};
blend.alpha.srcFactor = value;
},
blendAlphaDstFactor: (_, value, descriptor) => {
const blend = addBlendState(descriptor, 0);
blend.alpha = blend.alpha || {};
blend.alpha.dstFactor = value;
}
};

@@ -210,3 +202,4 @@ const DEFAULT_PIPELINE_DESCRIPTOR = {

}
function addColorState(descriptor) {
/** @todo - support multiple color targets... */
function addColorState(descriptor, attachment) {
// @ts-ignore

@@ -220,4 +213,9 @@ descriptor.fragment.targets = descriptor.fragment?.targets || [];

}
return descriptor.fragment?.targets;
return descriptor.fragment?.targets[0];
}
function addBlendState(descriptor, attachment) {
const target = addColorState(descriptor, attachment);
target.blend = target.blend || { color: {}, alpha: {} };
return target.blend;
}
//# sourceMappingURL=webgpu-parameters.js.map

@@ -19,2 +19,4 @@ // luma.gl

const size = Math.ceil(this.byteLength / 4) * 4;
this.device.handle.pushErrorScope('out-of-memory');
this.device.handle.pushErrorScope('validation');
this.handle =

@@ -29,2 +31,12 @@ this.props.handle ||

});
this.device.handle.popErrorScope().then((error) => {
if (error) {
this.device.reportError(new Error(`Buffer validation failed: ${error.message}`), this);
}
});
this.device.handle.popErrorScope().then((error) => {
if (error) {
this.device.reportError(new Error(`Buffer out of memory: ${error.message}`), this);
}
});
if (props.data) {

@@ -31,0 +43,0 @@ this._writeMapped(props.data);

@@ -0,4 +1,7 @@

import type { RenderPassProps, ComputePassProps, CopyTextureToTextureOptions, CopyTextureToBufferOptions } from '@luma.gl/core';
import { CommandEncoder, CommandEncoderProps, Buffer, Texture } from '@luma.gl/core';
import type { CopyTextureToTextureOptions, CopyTextureToBufferOptions } from '@luma.gl/core';
import { WebGPUDevice } from "../webgpu-device.js";
import { WebGPUCommandBuffer } from "./webgpu-command-buffer.js";
import { WebGPURenderPass } from "./webgpu-render-pass.js";
import { WebGPUComputePass } from "./webgpu-compute-pass.js";
import { WebGPUQuerySet } from "./webgpu-query-set.js";

@@ -8,7 +11,11 @@ export declare class WebGPUCommandEncoder extends CommandEncoder {

readonly handle: GPUCommandEncoder;
constructor(device: WebGPUDevice, props: CommandEncoderProps);
constructor(device: WebGPUDevice, props?: CommandEncoderProps);
destroy(): void;
finish(options?: {
id?: string;
}): GPUCommandBuffer;
finish(props?: CommandEncoderProps): WebGPUCommandBuffer;
/**
* Allows a render pass to begin against a canvas context
* @todo need to support a "Framebuffer" equivalent (aka preconfigured RenderPassDescriptors?).
*/
beginRenderPass(props: RenderPassProps): WebGPURenderPass;
beginComputePass(props: ComputePassProps): WebGPUComputePass;
copyBufferToBuffer(options: {

@@ -15,0 +22,0 @@ sourceBuffer: Buffer;

@@ -5,6 +5,9 @@ // luma.gl

import { CommandEncoder } from '@luma.gl/core';
import { WebGPUCommandBuffer } from "./webgpu-command-buffer.js";
import { WebGPURenderPass } from "./webgpu-render-pass.js";
import { WebGPUComputePass } from "./webgpu-compute-pass.js";
export class WebGPUCommandEncoder extends CommandEncoder {
device;
handle;
constructor(device, props) {
constructor(device, props = {}) {
super(device, props);

@@ -15,4 +18,5 @@ this.device = device;

this.device.handle.createCommandEncoder({
// TODO was this removed in standard?
// measureExecutionTime: this.props.measureExecutionTime
label: this.props.id
// TODO was this removed in standard?
// measureExecutionTime: this.props.measureExecutionTime
});

@@ -22,5 +26,17 @@ this.handle.label = this.props.id;

destroy() { }
finish(options) {
return this.finish(options);
finish(props) {
return new WebGPUCommandBuffer(this, {
id: props?.id || 'unnamed-command-buffer'
});
}
/**
* Allows a render pass to begin against a canvas context
* @todo need to support a "Framebuffer" equivalent (aka preconfigured RenderPassDescriptors?).
*/
beginRenderPass(props) {
return new WebGPURenderPass(this.device, props);
}
beginComputePass(props) {
return new WebGPUComputePass(this.device, props);
}
// beginRenderPass(GPURenderPassDescriptor descriptor): GPURenderPassEncoder;

@@ -27,0 +43,0 @@ // beginComputePass(optional GPUComputePassDescriptor descriptor = {}): GPUComputePassEncoder;

@@ -26,3 +26,3 @@ // luma.gl

this.props.handle ||
device.commandEncoder?.beginComputePass({
device.commandEncoder.handle.beginComputePass({
label: this.props.id,

@@ -29,0 +29,0 @@ timestampWrites

@@ -33,3 +33,4 @@ // luma.gl

this.device.handle.pushErrorScope('validation');
this.handle = this.props.handle || device.commandEncoder.beginRenderPass(renderPassDescriptor);
this.handle =
this.props.handle || device.commandEncoder.handle.beginRenderPass(renderPassDescriptor);
this.device.handle.popErrorScope().then((error) => {

@@ -36,0 +37,0 @@ if (error) {

@@ -15,2 +15,3 @@ import type { Binding, RenderPass, VertexArray } from '@luma.gl/core';

private _bindGroup;
get [Symbol.toStringTag](): string;
constructor(device: WebGPUDevice, props: RenderPipelineProps);

@@ -17,0 +18,0 @@ destroy(): void;

@@ -18,2 +18,5 @@ // luma.gl MIT license

_bindGroup = null;
get [Symbol.toStringTag]() {
return 'WebGPURenderPipeline';
}
constructor(device, props) {

@@ -52,2 +55,8 @@ super(device, props);

setBindings(bindings) {
// Invalidate the cached bind group if any value has changed
for (const [name, binding] of Object.entries(bindings)) {
if (this._bindings[name] !== binding) {
this._bindGroup = null;
}
}
Object.assign(this._bindings, bindings);

@@ -110,2 +119,13 @@ }

};
// Populate color targets
// TODO - at the moment blend and write mask are only set on the first target
const targets = [];
if (this.props.colorAttachmentFormats) {
for (const format of this.props.colorAttachmentFormats) {
targets.push(format ? { format: getWebGPUTextureFormat(format) } : null);
}
}
else {
targets.push({ format: getWebGPUTextureFormat(this.device.preferredColorFormat) });
}
// Set up the fragment stage

@@ -115,8 +135,3 @@ const fragment = {

entryPoint: this.props.fragmentEntryPoint || 'main',
targets: [
{
// TODO exclamation mark hack!
format: getWebGPUTextureFormat(this.device.getCanvasContext().format)
}
]
targets
};

@@ -132,2 +147,9 @@ // Create a partially populated descriptor

};
// Set depth format if required, defaulting to the preferred depth format
const depthFormat = this.props.depthStencilAttachmentFormat || this.device.preferredDepthFormat;
if (this.props.parameters.depthWriteEnabled) {
descriptor.depthStencil = {
format: getWebGPUTextureFormat(depthFormat)
};
}
// Set parameters on the descriptor

@@ -134,0 +156,0 @@ applyParametersToRenderPipelineDescriptor(descriptor, this.props.parameters);

@@ -26,3 +26,3 @@ // luma.gl, MIT license

}
this.handle = this.handle || this.device.handle.createSampler(samplerDescriptor);
this.handle = props.handle || this.device.handle.createSampler(samplerDescriptor);
this.handle.label = this.props.id;

@@ -29,0 +29,0 @@ }

@@ -16,13 +16,19 @@ // luma.gl

this.texture = props.texture;
this.device.pushErrorScope('validation');
this.handle =
this.handle ||
this.texture.handle.createView({
format: (props.format || this.texture.format),
dimension: props.dimension || this.texture.dimension,
aspect: props.aspect,
baseMipLevel: props.baseMipLevel,
mipLevelCount: props.mipLevelCount, // GPUIntegerCoordinate;
baseArrayLayer: props.baseArrayLayer, // GPUIntegerCoordinate;
arrayLayerCount: props.arrayLayerCount // GPUIntegerCoordinate;
});
// props.handle ||
this.texture.handle.createView({
format: (this.props.format || this.texture.format),
dimension: this.props.dimension || this.texture.dimension,
aspect: this.props.aspect,
baseMipLevel: this.props.baseMipLevel,
mipLevelCount: this.props.mipLevelCount,
baseArrayLayer: this.props.baseArrayLayer,
arrayLayerCount: this.props.arrayLayerCount
});
this.device.handle.popErrorScope().then((error) => {
if (error) {
this.device.reportError(new Error(`TextureView validation failed: ${error.message}`), this);
}
});
this.handle.label = this.props.id;

@@ -29,0 +35,0 @@ }

@@ -1,2 +0,2 @@

import type { TextureProps, TextureViewProps, Sampler, SamplerProps, Texture1DData, Texture2DData, Texture3DData, TextureCubeData, TextureArrayData, TextureCubeArrayData, ExternalImage } from '@luma.gl/core';
import type { TextureProps, TextureViewProps, CopyExternalImageOptions, CopyImageDataOptions } from '@luma.gl/core';
import { Texture } from '@luma.gl/core';

@@ -14,42 +14,9 @@ import type { WebGPUDevice } from "../webgpu-device.js";

createView(props: TextureViewProps): WebGPUTextureView;
protected initialize(props: TextureProps): void;
protected createHandle(): GPUTexture;
/** @deprecated - intention is to use the createView public API */
createGPUTextureView(): GPUTextureView;
/**
* Set default sampler
* Accept a sampler instance or set of props;
*/
setSampler(sampler: Sampler | SamplerProps): this;
setTexture1DData(data: Texture1DData): void;
setTexture2DData(lodData: Texture2DData, depth?: number, target?: number): void;
setTexture3DData(lodData: Texture3DData, depth?: number, target?: number): void;
setTextureCubeData(data: TextureCubeData, depth?: number): void;
setTextureArrayData(data: TextureArrayData): void;
setTextureCubeArrayData(data: TextureCubeArrayData): void;
setData(options: {
data: any;
}): {
copyImageData(options_: CopyImageDataOptions): void;
copyExternalImage(options_: CopyExternalImageOptions): {
width: number;
height: number;
};
copyExternalImage(options: {
image: ExternalImage;
width?: number;
height?: number;
depth?: number;
sourceX?: number;
sourceY?: number;
mipLevel?: number;
x?: number;
y?: number;
z?: number;
aspect?: 'all' | 'stencil-only' | 'depth-only';
colorSpace?: 'srgb';
premultipliedAlpha?: boolean;
}): {
width: number;
height: number;
};
generateMipmapsWebGL(): void;
}
//# sourceMappingURL=webgpu-texture.d.ts.map

@@ -1,13 +0,5 @@

import { Texture } from '@luma.gl/core';
import { Texture, log } from '@luma.gl/core';
import { getWebGPUTextureFormat } from "../helpers/convert-texture-format.js";
import { WebGPUSampler } from "./webgpu-sampler.js";
import { WebGPUTextureView } from "./webgpu-texture-view.js";
const BASE_DIMENSIONS = {
'1d': '1d',
'2d': '2d',
'2d-array': '2d',
cube: '2d',
'cube-array': '2d',
'3d': '3d'
};
export class WebGPUTexture extends Texture {

@@ -21,40 +13,39 @@ device;

this.device = device;
// Texture base class strips out the data prop, so we need to add it back in
const propsWithData = { ...this.props };
if (props.data) {
propsWithData.data = props.data;
if (this.dimension === 'cube') {
this.depth = 6;
}
this.initialize(propsWithData);
}
destroy() {
this.handle?.destroy();
// @ts-expect-error readonly
this.handle = null;
}
createView(props) {
return new WebGPUTextureView(this.device, { ...props, texture: this });
}
initialize(props) {
// @ts-expect-error
this.handle = this.props.handle || this.createHandle();
this.handle.label ||= this.id;
if (this.props.data) {
if (Texture.isExternalImage(this.props.data)) {
this.copyExternalImage({ image: this.props.data });
this.device.handle.pushErrorScope('out-of-memory');
this.device.handle.pushErrorScope('validation');
this.handle =
this.props.handle ||
this.device.handle.createTexture({
label: this.id,
size: {
width: this.width,
height: this.height,
depthOrArrayLayers: this.depth
},
usage: this.props.usage || Texture.TEXTURE | Texture.COPY_DST,
dimension: this.baseDimension,
format: getWebGPUTextureFormat(this.format),
mipLevelCount: this.mipLevels,
sampleCount: this.props.samples
});
this.device.handle.popErrorScope().then((error) => {
if (error) {
this.device.reportError(new Error(`Texture validation failed: ${error.message}`), this);
}
else {
this.setData({ data: this.props.data });
});
this.device.handle.popErrorScope().then((error) => {
if (error) {
this.device.reportError(new Error(`Texture out of memory: ${error.message}`), this);
}
});
// Update props if external handle was supplied - used mainly by CanvasContext.getDefaultFramebuffer()
// TODO - Read all properties directly from the supplied handle?
if (this.props.handle) {
this.handle.label ||= this.id;
this.width = this.handle.width;
this.height = this.handle.height;
}
this.width = this.handle.width;
this.height = this.handle.height;
// Why not just read all properties directly from the texture
// this.depthOrArrayLayers = this.handle.depthOrArrayLayers;
// this.mipLevelCount = this.handle.mipLevelCount;
// this.sampleCount = this.handle.sampleCount;
// this.dimension = this.handle.dimension;
// this.format = this.handle.format;
// this.usage = this.handle.usage;
// Create a default sampler. This mimics the WebGL1 API where sampler props are stored on the texture
// this.setSampler(props.sampler);
this.sampler =

@@ -64,85 +55,59 @@ props.sampler instanceof WebGPUSampler

: new WebGPUSampler(this.device, props.sampler || {});
// TODO - To support texture arrays we need to create custom views...
// But we are not ready to expose TextureViews to the public API.
// @ts-expect-error
this.view = new WebGPUTextureView(this.device, { ...this.props, texture: this });
// format: this.props.format,
// dimension: this.props.dimension,
// aspect = "all";
// baseMipLevel: 0;
// mipLevelCount;
// baseArrayLayer = 0;
// arrayLayerCount;
}
createHandle() {
// Deduce size from data - TODO this is a hack
// @ts-expect-error
const width = this.props.width || this.props.data?.width || 1;
// @ts-expect-error
const height = this.props.height || this.props.data?.height || 1;
return this.device.handle.createTexture({
label: this.id,
size: {
width,
height,
depthOrArrayLayers: this.depth
},
usage: this.props.usage || Texture.TEXTURE | Texture.COPY_DST,
dimension: BASE_DIMENSIONS[this.dimension],
format: getWebGPUTextureFormat(this.format),
this.view = new WebGPUTextureView(this.device, {
...this.props,
texture: this,
mipLevelCount: this.mipLevels,
sampleCount: this.props.samples
arrayLayerCount: this.depth
});
// Set initial data
// Texture base class strips out the data prop from this.props, so we need to handle it here
this._initializeData(props.data);
}
/** @deprecated - intention is to use the createView public API */
createGPUTextureView() {
return this.handle.createView({ label: this.id });
destroy() {
this.handle?.destroy();
// @ts-expect-error readonly
this.handle = null;
}
/**
* Set default sampler
* Accept a sampler instance or set of props;
*/
setSampler(sampler) {
this.sampler =
sampler instanceof WebGPUSampler ? sampler : new WebGPUSampler(this.device, sampler);
return this;
createView(props) {
return new WebGPUTextureView(this.device, { ...props, texture: this });
}
setTexture1DData(data) {
throw new Error('not implemented');
copyImageData(options_) {
const { width, height, depth } = this;
const options = this._normalizeCopyImageDataOptions(options_);
this.device.handle.pushErrorScope('validation');
this.device.handle.queue.writeTexture(
// destination: GPUImageCopyTexture
{
// texture subresource
texture: this.handle,
mipLevel: options.mipLevel,
aspect: options.aspect,
// origin to write to
origin: [options.x, options.y, options.z]
},
// data
options.data,
// dataLayout: GPUImageDataLayout
{
offset: options.byteOffset,
bytesPerRow: options.bytesPerRow,
rowsPerImage: options.rowsPerImage
},
// size: GPUExtent3D - extents of the content to write
[width, height, depth]);
this.device.handle.popErrorScope().then((error) => {
if (error) {
this.device.reportError(new Error(`copyImageData validation failed: ${error.message}`));
}
});
}
setTexture2DData(lodData, depth, target) {
throw new Error('not implemented');
}
setTexture3DData(lodData, depth, target) {
throw new Error('not implemented');
}
setTextureCubeData(data, depth) {
throw new Error('not implemented');
}
setTextureArrayData(data) {
throw new Error('not implemented');
}
setTextureCubeArrayData(data) {
throw new Error('not implemented');
}
setData(options) {
if (ArrayBuffer.isView(options.data)) {
const clampedArray = new Uint8ClampedArray(options.data.buffer);
// TODO - pass through src data color space as ImageData Options?
const image = new ImageData(clampedArray, this.width, this.height);
return this.copyExternalImage({ image });
}
throw new Error('Texture.setData: Use CommandEncoder to upload data to texture in WebGPU');
}
copyExternalImage(options) {
const size = Texture.getExternalImageSize(options.image);
const opts = { ...Texture.defaultCopyExternalImageOptions, ...size, ...options };
const { image, sourceX, sourceY, width, height, depth, mipLevel, x, y, z, aspect, colorSpace, premultipliedAlpha, flipY } = opts;
// TODO - max out width
copyExternalImage(options_) {
const options = this._normalizeCopyExternalImageOptions(options_);
this.device.handle.pushErrorScope('validation');
this.device.handle.queue.copyExternalImageToTexture(
// source: GPUImageCopyExternalImage
{
source: image,
origin: [sourceX, sourceY],
flipY
source: options.image,
origin: [options.sourceX, options.sourceY],
flipY: options.flipY
},

@@ -152,13 +117,22 @@ // destination: GPUImageCopyTextureTagged

texture: this.handle,
origin: [x, y, z],
mipLevel,
aspect,
colorSpace,
premultipliedAlpha
origin: [options.x, options.y, options.depth],
mipLevel: options.mipLevel,
aspect: options.aspect,
colorSpace: options.colorSpace,
premultipliedAlpha: options.premultipliedAlpha
},
// copySize: GPUExtent3D
[width, height, depth]);
return { width, height };
[options.width, options.height, 1]);
this.device.handle.popErrorScope().then((error) => {
if (error) {
this.device.reportError(new Error(`copyExternalImage validation failed: ${error.message}`));
}
});
// TODO - should these be clipped to the texture size minus x,y,z?
return { width: options.width, height: options.height };
}
generateMipmapsWebGL() {
log.warn(`${this}: generateMipmaps not supported in WebGPU`)();
}
}
//# sourceMappingURL=webgpu-texture.js.map

@@ -8,4 +8,4 @@ import type { Device, Buffer, VertexArrayProps, RenderPass } from '@luma.gl/core';

readonly device: WebGPUDevice;
/** Vertex Array is a helper class under WebGPU */
readonly handle: never;
/** Vertex Array is just a helper class under WebGPU */
readonly handle: null;
constructor(device: WebGPUDevice, props: VertexArrayProps);

@@ -12,0 +12,0 @@ destroy(): void;

@@ -12,4 +12,4 @@ // luma.gl

device;
/** Vertex Array is a helper class under WebGPU */
handle;
/** Vertex Array is just a helper class under WebGPU */
handle = null;
// Create a VertexArray

@@ -16,0 +16,0 @@ constructor(device, props) {

import { Adapter, DeviceProps } from '@luma.gl/core';
import { WebGPUDevice } from "./webgpu-device.js";
import type { WebGPUDevice } from "./webgpu-device.js";
export declare class WebGPUAdapter extends Adapter {
/** type of device's created by this adapter */
readonly type = "webgpu";
constructor();
/** Check if WebGPU is available */
isSupported(): boolean;
isDeviceHandle(handle: unknown): boolean;
create(props: DeviceProps): Promise<WebGPUDevice>;

@@ -10,0 +9,0 @@ attach(handle: GPUDevice): Promise<WebGPUDevice>;

// luma.gl
// SPDX-License-Identifier: MIT
// Copyright (c) vis.gl contributors
// prettier-ignore
// / <reference types="@webgpu/types" />
import { Adapter, log } from '@luma.gl/core';
import { WebGPUDevice } from "./webgpu-device.js";
// / <reference types="@webgpu/types" />
export class WebGPUAdapter extends Adapter {
/** type of device's created by this adapter */
type = 'webgpu';
constructor() {
super();
// @ts-ignore For backwards compatibility luma.registerDevices
WebGPUDevice.adapter = this;
}
/** Check if WebGPU is available */
isSupported() {
// Check if WebGPU is available
return Boolean(typeof navigator !== 'undefined' && navigator.gpu);
}
isDeviceHandle(handle) {
if (typeof GPUDevice !== 'undefined' && handle instanceof GPUDevice) {
return true;
}
// TODO - WebGPU does not yet seem to have a stable in-browser API, so we "sniff" for members instead
if (handle?.queue) {
return true;
}
return false;
}
async create(props) {
if (!navigator.gpu) {
throw new Error('WebGPU not available. Open in Chrome Canary and turn on chrome://flags/#enable-unsafe-webgpu');
throw new Error('WebGPU not available. Recent Chrome browsers should work.');
}

@@ -31,3 +36,6 @@ log.groupCollapsed(1, 'WebGPUDevice created')();

}
const adapterInfo = await adapter.requestAdapterInfo();
// Note: adapter.requestAdapterInfo() has been replaced with adapter.info. Fall back in case adapter.info is not available
const adapterInfo = adapter.info ||
// @ts-ignore
(await adapter.requestAdapterInfo?.());
log.probe(2, 'Adapter available', adapterInfo)();

@@ -55,2 +63,3 @@ const requiredFeatures = [];

log.probe(1, 'GPUDevice available')();
const { WebGPUDevice } = await import('./webgpu-device');
const device = new WebGPUDevice(props, gpuDevice, adapter, adapterInfo);

@@ -57,0 +66,0 @@ log.probe(1, 'Device created. For more info, set chrome://flags/#enable-webgpu-developer-features')();

@@ -1,2 +0,2 @@

import type { Texture, TextureFormat, CanvasContextProps } from '@luma.gl/core';
import type { DepthStencilTextureFormat, CanvasContextProps } from '@luma.gl/core';
import { CanvasContext } from '@luma.gl/core';

@@ -13,7 +13,3 @@ import { WebGPUDevice } from "./webgpu-device.js";

readonly device: WebGPUDevice;
readonly gpuCanvasContext: GPUCanvasContext;
/** Format of returned textures: "bgra8unorm", "rgba8unorm", "rgba16float". */
readonly format: TextureFormat;
/** Default stencil format for depth textures */
readonly depthStencilFormat: TextureFormat;
readonly handle: GPUCanvasContext;
private depthStencilAttachment;

@@ -25,15 +21,11 @@ get [Symbol.toStringTag](): string;

/** Update framebuffer with properly resized "swap chain" texture views */
getCurrentFramebuffer(): WebGPUFramebuffer;
/** Resizes and updates render targets if necessary */
update(): void;
resize(options?: {
width?: number;
height?: number;
useDevicePixels?: boolean | number;
}): void;
getCurrentFramebuffer(options?: {
depthStencilFormat?: DepthStencilTextureFormat | false;
}): WebGPUFramebuffer;
_updateDevice(): void;
/** Wrap the current canvas context texture in a luma.gl texture */
getCurrentTexture(): WebGPUTexture;
/** We build render targets on demand (i.e. not when size changes but when about to render) */
_createDepthStencilAttachment(): Texture;
_createDepthStencilAttachment(depthStencilFormat: DepthStencilTextureFormat): WebGPUTexture;
}
//# sourceMappingURL=webgpu-canvas-context.d.ts.map
// luma.gl
// SPDX-License-Identifier: MIT
// Copyright (c) vis.gl contributors
import { CanvasContext, log } from '@luma.gl/core';
import { getWebGPUTextureFormat } from "./helpers/convert-texture-format.js";
import { CanvasContext, Texture, log } from '@luma.gl/core';
import { WebGPUFramebuffer } from "./resources/webgpu-framebuffer.js";

@@ -14,7 +13,3 @@ /**

device;
gpuCanvasContext;
/** Format of returned textures: "bgra8unorm", "rgba8unorm", "rgba16float". */
format = navigator.gpu.getPreferredCanvasFormat();
/** Default stencil format for depth textures */
depthStencilFormat = 'depth24plus';
handle;
depthStencilAttachment = null;

@@ -26,35 +21,34 @@ get [Symbol.toStringTag]() {

super(props);
const context = this.canvas.getContext('webgpu');
if (!context) {
throw new Error(`${this}: Failed to create WebGPU canvas context`);
}
this.device = device;
// TODO - ugly hack to trigger first resize
this.width = -1;
this.height = -1;
this.handle = context;
// Base class constructor cannot access derived methods/fields, so we need to call these functions in the subclass constructor
this._setAutoCreatedCanvasId(`${this.device.id}-canvas`);
// @ts-ignore TODO - we don't handle OffscreenRenderingContext.
this.gpuCanvasContext = this.canvas.getContext('webgpu');
// TODO this has been replaced
// this.format = this.gpuCanvasContext.getPreferredFormat(adapter);
this.format = 'bgra8unorm';
this._updateDevice();
}
/** Destroy any textures produced while configured and remove the context configuration. */
destroy() {
this.gpuCanvasContext.unconfigure();
this.handle.unconfigure();
}
/** Update framebuffer with properly resized "swap chain" texture views */
getCurrentFramebuffer() {
// Ensure the canvas context size is updated
this.update();
getCurrentFramebuffer(options = {
depthStencilFormat: 'depth24plus'
}) {
// Wrap the current canvas context texture in a luma.gl texture
// const currentColorAttachment = this.device.createTexture({
// id: 'default-render-target',
// handle: this.gpuCanvasContext.getCurrentTexture(),
// format: this.format,
// width: this.width,
// height: this.height
// });
// Wrap the current canvas context texture in a luma.gl texture
const currentColorAttachment = this.getCurrentTexture();
this.width = currentColorAttachment.width;
this.height = currentColorAttachment.height;
// TODO - temporary debug code
if (currentColorAttachment.width !== this.drawingBufferWidth ||
currentColorAttachment.height !== this.drawingBufferHeight) {
const [oldWidth, oldHeight] = this.getDrawingBufferSize();
this.drawingBufferWidth = currentColorAttachment.width;
this.drawingBufferHeight = currentColorAttachment.height;
log.log(1, `${this}: Resized to compensate for initial canvas size mismatch ${oldWidth}x${oldHeight} => ${this.drawingBufferWidth}x${this.drawingBufferHeight}px`)();
}
// Resize the depth stencil attachment
this._createDepthStencilAttachment();
if (options?.depthStencilFormat) {
this._createDepthStencilAttachment(options?.depthStencilFormat);
}
return new WebGPUFramebuffer(this.device, {

@@ -65,56 +59,39 @@ colorAttachments: [currentColorAttachment],

}
/** Resizes and updates render targets if necessary */
update() {
const oldWidth = this.width;
const oldHeight = this.height;
const [newWidth, newHeight] = this.getPixelSize();
const sizeChanged = newWidth !== oldWidth || newHeight !== oldHeight;
if (sizeChanged) {
this.width = newWidth;
this.height = newHeight;
if (this.depthStencilAttachment) {
this.depthStencilAttachment.destroy();
this.depthStencilAttachment = null;
}
// Reconfigure the canvas size.
// https://www.w3.org/TR/webgpu/#canvas-configuration
this.gpuCanvasContext.configure({
device: this.device.handle,
format: getWebGPUTextureFormat(this.format),
// Can be used to define e.g. -srgb views
// viewFormats: [...]
colorSpace: this.props.colorSpace,
alphaMode: this.props.alphaMode
});
log.log(1, `${this} Resized ${oldWidth}x${oldHeight} => ${newWidth}x${newHeight}px`)();
// IMPLEMENTATION OF ABSTRACT METHODS
_updateDevice() {
if (this.depthStencilAttachment) {
this.depthStencilAttachment.destroy();
this.depthStencilAttachment = null;
}
// Reconfigure the canvas size.
// https://www.w3.org/TR/webgpu/#canvas-configuration
this.handle.configure({
device: this.device.handle,
format: this.device.preferredColorFormat,
// Can be used to define e.g. -srgb views
// viewFormats: [...]
colorSpace: this.props.colorSpace,
alphaMode: this.props.alphaMode
});
}
resize(options) {
this.update();
if (!this.device.handle)
return;
// Resize browser context .
if (this.canvas) {
const devicePixelRatio = this.getDevicePixelRatio(options?.useDevicePixels);
this.setDevicePixelRatio(devicePixelRatio, options);
return;
}
}
/** Wrap the current canvas context texture in a luma.gl texture */
getCurrentTexture() {
const handle = this.handle.getCurrentTexture();
return this.device.createTexture({
id: `${this.id}#color-texture`,
handle: this.gpuCanvasContext.getCurrentTexture(),
format: this.format
handle,
format: this.device.preferredColorFormat,
width: handle.width,
height: handle.height
});
}
/** We build render targets on demand (i.e. not when size changes but when about to render) */
_createDepthStencilAttachment() {
_createDepthStencilAttachment(depthStencilFormat) {
if (!this.depthStencilAttachment) {
this.depthStencilAttachment = this.device.createTexture({
id: `${this.id}#depth-stencil-texture`,
format: this.depthStencilFormat,
width: this.width,
height: this.height,
usage: GPUTextureUsage.RENDER_ATTACHMENT
usage: Texture.RENDER_ATTACHMENT,
format: depthStencilFormat,
width: this.drawingBufferWidth,
height: this.drawingBufferHeight
});

@@ -121,0 +98,0 @@ }

@@ -1,2 +0,2 @@

import type { DeviceInfo, DeviceLimits, DeviceTextureFormatCapabilities, CanvasContextProps, BufferProps, SamplerProps, ShaderProps, Texture, TextureProps, ExternalTextureProps, FramebufferProps, RenderPipelineProps, ComputePipelineProps, RenderPassProps, ComputePassProps, VertexArrayProps, TransformFeedback, TransformFeedbackProps, QuerySet, QuerySetProps, DeviceProps } from '@luma.gl/core';
import type { DeviceInfo, DeviceLimits, DeviceTextureFormatCapabilities, CanvasContextProps, BufferProps, SamplerProps, ShaderProps, TextureProps, ExternalTextureProps, FramebufferProps, RenderPipelineProps, ComputePipelineProps, VertexArrayProps, TransformFeedback, TransformFeedbackProps, QuerySet, QuerySetProps, DeviceProps, CommandEncoderProps } from '@luma.gl/core';
import { Device, DeviceFeatures } from '@luma.gl/core';

@@ -11,14 +11,14 @@ import { WebGPUBuffer } from "./resources/webgpu-buffer.js";

import { WebGPUComputePipeline } from "./resources/webgpu-compute-pipeline.js";
import { WebGPURenderPass } from "./resources/webgpu-render-pass.js";
import { WebGPUComputePass } from "./resources/webgpu-compute-pass.js";
import { WebGPUVertexArray } from "./resources/webgpu-vertex-array.js";
import { WebGPUCanvasContext } from "./webgpu-canvas-context.js";
import { WebGPUCommandEncoder } from "./resources/webgpu-command-encoder.js";
import { WebGPUCommandBuffer } from "./resources/webgpu-command-buffer.js";
/** WebGPU Device implementation */
export declare class WebGPUDevice extends Device {
/** The underlying WebGPU device */
readonly handle: GPUDevice;
/** type of this device */
readonly type = "webgpu";
/** The underlying WebGPU device */
readonly handle: GPUDevice;
readonly adapter: GPUAdapter;
readonly adapterInfo: GPUAdapterInfo;
readonly preferredColorFormat: "rgba8unorm" | "bgra8unorm";
readonly preferredDepthFormat = "depth24plus";
readonly features: DeviceFeatures;

@@ -33,4 +33,5 @@ readonly info: DeviceInfo;

private _isLost;
commandEncoder: GPUCommandEncoder | null;
renderPass: WebGPURenderPass | null;
commandEncoder: WebGPUCommandEncoder;
readonly adapter: GPUAdapter;
readonly adapterInfo: GPUAdapterInfo;
constructor(props: DeviceProps, device: GPUDevice, adapter: GPUAdapter, adapterInfo: GPUAdapterInfo);

@@ -48,29 +49,13 @@ destroy(): void;

createVertexArray(props: VertexArrayProps): WebGPUVertexArray;
/**
* Allows a render pass to begin against a canvas context
* @todo need to support a "Framebuffer" equivalent (aka preconfigured RenderPassDescriptors?).
*/
beginRenderPass(props: RenderPassProps): WebGPURenderPass;
beginComputePass(props: ComputePassProps): WebGPUComputePass;
createCommandEncoder(props?: CommandEncoderProps): WebGPUCommandEncoder;
createTransformFeedback(props: TransformFeedbackProps): TransformFeedback;
createQuerySet(props: QuerySetProps): QuerySet;
createCanvasContext(props: CanvasContextProps): WebGPUCanvasContext;
submit(): void;
submit(commandBuffer?: WebGPUCommandBuffer): void;
pushErrorScope(scope: 'validation' | 'out-of-memory'): void;
popErrorScope(handler: (message: string) => void): void;
protected _getInfo(): DeviceInfo;
protected _getFeatures(): DeviceFeatures;
_getDeviceSpecificTextureFormatCapabilities(capabilities: DeviceTextureFormatCapabilities): DeviceTextureFormatCapabilities;
copyExternalImageToTexture(options: {
texture: Texture;
mipLevel?: number;
aspect?: 'all' | 'stencil-only' | 'depth-only';
colorSpace?: 'display-p3' | 'srgb';
premultipliedAlpha?: boolean;
source: ImageBitmap | HTMLVideoElement | HTMLCanvasElement | OffscreenCanvas;
sourceX?: number;
sourceY?: number;
width?: number;
height?: number;
depth?: number;
}): void;
}
//# sourceMappingURL=webgpu-device.d.ts.map

@@ -13,18 +13,14 @@ // luma.gl

import { WebGPUComputePipeline } from "./resources/webgpu-compute-pipeline.js";
import { WebGPURenderPass } from "./resources/webgpu-render-pass.js";
import { WebGPUComputePass } from "./resources/webgpu-compute-pass.js";
// import {WebGPUCommandEncoder} from './resources/webgpu-command-encoder';
import { WebGPUVertexArray } from "./resources/webgpu-vertex-array.js";
import { WebGPUCanvasContext } from "./webgpu-canvas-context.js";
import { WebGPUCommandEncoder } from "./resources/webgpu-command-encoder.js";
import { WebGPUQuerySet } from "./resources/webgpu-query-set.js";
/** WebGPU Device implementation */
export class WebGPUDevice extends Device {
/** The underlying WebGPU device */
handle;
/** type of this device */
type = 'webgpu';
/** The underlying WebGPU device */
handle;
/* The underlying WebGPU adapter */
adapter;
/* The underlying WebGPU adapter's info */
adapterInfo;
preferredColorFormat = navigator.gpu.getPreferredCanvasFormat();
preferredDepthFormat = 'depth24plus';
features;

@@ -36,4 +32,7 @@ info;

_isLost = false;
commandEncoder = null;
renderPass = null;
commandEncoder;
/* The underlying WebGPU adapter */
adapter;
/* The underlying WebGPU adapter's info */
adapterInfo;
constructor(props, device, adapter, adapterInfo) {

@@ -49,2 +48,3 @@ super({ ...props, id: props.id || 'webgpu-device' });

device.addEventListener('uncapturederror', (event) => {
event.preventDefault();
// TODO is this the right way to make sure the error is an Error instance?

@@ -57,3 +57,2 @@ const errorMessage = event instanceof GPUUncapturedErrorEvent ? event.error.message : 'Unknown WebGPU error';

}
event.preventDefault();
});

@@ -71,2 +70,3 @@ // "Context" loss handling

}
this.commandEncoder = this.createCommandEncoder({});
}

@@ -111,18 +111,6 @@ // TODO

}
createCommandEncoder(props) {
return new WebGPUCommandEncoder(this, props);
}
// WebGPU specifics
/**
* Allows a render pass to begin against a canvas context
* @todo need to support a "Framebuffer" equivalent (aka preconfigured RenderPassDescriptors?).
*/
beginRenderPass(props) {
this.commandEncoder = this.commandEncoder || this.handle.createCommandEncoder();
return new WebGPURenderPass(this, props);
}
beginComputePass(props) {
this.commandEncoder = this.commandEncoder || this.handle.createCommandEncoder();
return new WebGPUComputePass(this, props);
}
// createCommandEncoder(props: CommandEncoderProps): WebGPUCommandEncoder {
// return new WebGPUCommandEncoder(this, props);
// }
createTransformFeedback(props) {

@@ -137,15 +125,27 @@ throw new Error('Transform feedback not supported in WebGPU');

}
submit() {
const commandBuffer = this.commandEncoder?.finish();
if (commandBuffer) {
this.handle.pushErrorScope('validation');
this.handle.queue.submit([commandBuffer]);
this.handle.popErrorScope().then((error) => {
if (error) {
this.reportError(new Error(`WebGPU command submission failed: ${error.message}`));
}
});
submit(commandBuffer) {
if (!commandBuffer) {
commandBuffer = this.commandEncoder.finish();
this.commandEncoder.destroy();
this.commandEncoder = this.createCommandEncoder({ id: `${this.id}-default-encoder` });
}
this.commandEncoder = null;
this.handle.pushErrorScope('validation');
this.handle.queue.submit([commandBuffer.handle]);
this.handle.popErrorScope().then((error) => {
if (error) {
this.reportError(new Error(`WebGPU command submission failed: ${error.message}`));
}
});
}
// WebGPU specific
pushErrorScope(scope) {
this.handle.pushErrorScope(scope);
}
popErrorScope(handler) {
this.handle.popErrorScope().then((error) => {
if (error) {
handler(error.message);
}
});
}
// PRIVATE METHODS

@@ -210,30 +210,3 @@ _getInfo() {

}
// DEPRECATED METHODS
// @deprecated
copyExternalImageToTexture(options) {
const { source, sourceX = 0, sourceY = 0, texture, mipLevel = 0, aspect = 'all', colorSpace = 'display-p3', premultipliedAlpha = false,
// destinationX,
// destinationY,
// desitnationZ,
width = texture.width, height = texture.height, depth = 1 } = options;
const webGpuTexture = texture;
this.handle?.queue.copyExternalImageToTexture(
// source: GPUImageCopyExternalImage
{
source,
origin: [sourceX, sourceY]
},
// destination: GPUImageCopyTextureTagged
{
texture: webGpuTexture.handle,
origin: [0, 0, 0], // [x, y, z],
mipLevel,
aspect,
colorSpace,
premultipliedAlpha
},
// copySize: GPUExtent3D
[width, height, depth]);
}
}
//# sourceMappingURL=webgpu-device.js.map

@@ -7,8 +7,8 @@ (function webpackUniversalModuleDefinition(root, factory) {

else root['luma'] = factory();})(globalThis, function () {
var __exports__=(()=>{var Se=Object.create;var W=Object.defineProperty;var Ce=Object.getOwnPropertyDescriptor;var Ee=Object.getOwnPropertyNames;var Pe=Object.getPrototypeOf,Be=Object.prototype.hasOwnProperty;var Ae=(n,e)=>()=>(e||n((e={exports:{}}).exports,e),e.exports),Ge=(n,e)=>{for(var t in e)W(n,t,{get:e[t],enumerable:!0})},R=(n,e,t,r)=>{if(e&&typeof e=="object"||typeof e=="function")for(let i of Ee(e))!Be.call(n,i)&&i!==t&&W(n,i,{get:()=>e[i],enumerable:!(r=Ce(e,i))||r.enumerable});return n},F=(n,e,t)=>(R(n,e,"default"),t&&R(t,e,"default")),h=(n,e,t)=>(t=n!=null?Se(Pe(n)):{},R(e||!n||!n.__esModule?W(t,"default",{value:n,enumerable:!0}):t,n)),Te=n=>R(W({},"__esModule",{value:!0}),n);var o=Ae((Oe,te)=>{te.exports=globalThis.luma});var I={};Ge(I,{WebGPUBuffer:()=>b,WebGPUDevice:()=>S,WebGPUSampler:()=>p,WebGPUShader:()=>P,WebGPUTexture:()=>E,webgpuAdapter:()=>be});F(I,h(o(),1));var f=h(o(),1);var X=h(o(),1);var O=h(o(),1);function Le(n){return n.byteLength||n.data?.byteLength||0}var b=class extends O.Buffer{device;handle;byteLength;constructor(e,t){super(e,t),this.device=e,this.byteLength=Le(t);let r=Boolean(t.data),i=Math.ceil(this.byteLength/4)*4;this.handle=this.props.handle||this.device.handle.createBuffer({size:i,usage:this.props.usage||GPUBufferUsage.VERTEX|GPUBufferUsage.COPY_DST,mappedAtCreation:this.props.mappedAtCreation||r,label:this.props.id}),t.data&&this._writeMapped(t.data),r&&!t.mappedAtCreation&&this.handle.unmap()}destroy(){this.handle?.destroy(),this.handle=null}write(e,t=0){this.device.handle.queue.writeBuffer(this.handle,t,e.buffer,e.byteOffset,e.byteLength)}async readAsync(e=0,t=this.byteLength){let r=new b(this.device,{usage:O.Buffer.MAP_READ|O.Buffer.COPY_DST,byteLength:t}),i=this.device.handle.createCommandEncoder();i.copyBufferToBuffer(this.handle,e,r.handle,0,t),this.device.handle.queue.submit([i.finish()]),await r.handle.mapAsync(GPUMapMode.READ,e,t);let s=r.handle.getMappedRange().slice(0);return r.handle.unmap(),r.destroy(),new Uint8Array(s)}_writeMapped(e){let t=this.handle.getMappedRange();new e.constructor(t).set(e)}mapAsync(e,t=0,r){return this.handle.mapAsync(e,t,r)}getMappedRange(e=0,t){return this.handle.getMappedRange(e,t)}unmap(){this.handle.unmap()}};var v=h(o(),1);function C(n){if(n.includes("webgl"))throw new Error("webgl-only format");return n}var re=h(o(),1),p=class extends re.Sampler{device;handle;constructor(e,t){super(e,t),this.device=e;let r={...this.props,mipmapFilter:void 0};t.type!=="comparison-sampler"&&delete r.compare,t.mipmapFilter&&t.mipmapFilter!=="none"&&(r.mipmapFilter=t.mipmapFilter),this.handle=this.handle||this.device.handle.createSampler(r),this.handle.label=this.props.id}destroy(){this.handle=null}};var ne=h(o(),1),T=class extends ne.TextureView{device;handle;texture;constructor(e,t){super(e,t),this.device=e,this.texture=t.texture,this.handle=this.handle||this.texture.handle.createView({format:t.format||this.texture.format,dimension:t.dimension||this.texture.dimension,aspect:t.aspect,baseMipLevel:t.baseMipLevel,mipLevelCount:t.mipLevelCount,baseArrayLayer:t.baseArrayLayer,arrayLayerCount:t.arrayLayerCount}),this.handle.label=this.props.id}destroy(){this.handle=null}};var De={"1d":"1d","2d":"2d","2d-array":"2d",cube:"2d","cube-array":"2d","3d":"3d"},E=class extends v.Texture{device;handle;sampler;view;constructor(e,t){super(e,t),this.device=e;let r={...this.props};t.data&&(r.data=t.data),this.initialize(r)}destroy(){this.handle?.destroy(),this.handle=null}createView(e){return new T(this.device,{...e,texture:this})}initialize(e){this.handle=this.props.handle||this.createHandle(),this.handle.label||=this.id,this.props.data&&(v.Texture.isExternalImage(this.props.data)?this.copyExternalImage({image:this.props.data}):this.setData({data:this.props.data})),this.width=this.handle.width,this.height=this.handle.height,this.sampler=e.sampler instanceof p?e.sampler:new p(this.device,e.sampler||{}),this.view=new T(this.device,{...this.props,texture:this})}createHandle(){let e=this.props.width||this.props.data?.width||1,t=this.props.height||this.props.data?.height||1;return this.device.handle.createTexture({label:this.id,size:{width:e,height:t,depthOrArrayLayers:this.depth},usage:this.props.usage||v.Texture.TEXTURE|v.Texture.COPY_DST,dimension:De[this.dimension],format:C(this.format),mipLevelCount:this.mipLevels,sampleCount:this.props.samples})}createGPUTextureView(){return this.handle.createView({label:this.id})}setSampler(e){return this.sampler=e instanceof p?e:new p(this.device,e),this}setTexture1DData(e){throw new Error("not implemented")}setTexture2DData(e,t,r){throw new Error("not implemented")}setTexture3DData(e,t,r){throw new Error("not implemented")}setTextureCubeData(e,t){throw new Error("not implemented")}setTextureArrayData(e){throw new Error("not implemented")}setTextureCubeArrayData(e){throw new Error("not implemented")}setData(e){if(ArrayBuffer.isView(e.data)){let t=new Uint8ClampedArray(e.data.buffer),r=new ImageData(t,this.width,this.height);return this.copyExternalImage({image:r})}throw new Error("Texture.setData: Use CommandEncoder to upload data to texture in WebGPU")}copyExternalImage(e){let t=v.Texture.getExternalImageSize(e.image),r={...v.Texture.defaultCopyExternalImageOptions,...t,...e},{image:i,sourceX:s,sourceY:a,width:l,height:c,depth:d,mipLevel:g,x:u,y:G,z:J,aspect:xe,colorSpace:we,premultipliedAlpha:ye,flipY:ve}=r;return this.device.handle.queue.copyExternalImageToTexture({source:i,origin:[s,a],flipY:ve},{texture:this.handle,origin:[u,G,J],mipLevel:g,aspect:xe,colorSpace:we,premultipliedAlpha:ye},[l,c,d]),{width:l,height:c}}};var ie=h(o(),1);var U=class extends ie.ExternalTexture{device;handle;sampler;constructor(e,t){super(e,t),this.device=e,this.handle=this.props.handle||this.device.handle.importExternalTexture({source:t.source,colorSpace:t.colorSpace}),this.sampler=null}destroy(){this.handle=null}setSampler(e){return this.sampler=e instanceof p?e:new p(this.device,e),this}};var L=h(o(),1),P=class extends L.Shader{device;handle;constructor(e,t){super(e,t),this.device=e;let r=t.source.includes("#version");if(this.props.language==="glsl"||r)throw new Error("GLSL shaders are not supported in WebGPU");this.device.handle.pushErrorScope("validation"),this.handle=this.props.handle||this.device.handle.createShaderModule({code:t.source}),this.device.handle.popErrorScope().then(i=>{i&&L.log.error(`${this} creation failed:
"${i.message}"`,this,this.props.source)()}),this.handle.label=this.props.id,this._checkCompilationError()}get asyncCompilationStatus(){return this.getCompilationInfo().then(()=>this.compilationStatus)}async _checkCompilationError(){let e=await this.getCompilationInfo(),t=Boolean(e.find(r=>r.type==="error"));this.compilationStatus=t?"error":"success",this.debugShader(),this.compilationStatus==="error"&&L.log.error("Shader compilation error",e)()}destroy(){this.handle=null}async getCompilationInfo(){return(await this.handle.getCompilationInfo()).messages}};var w=h(o(),1);function m(n){return n.depthStencil=n.depthStencil||{format:"depth24plus",stencilFront:{},stencilBack:{},depthWriteEnabled:!1,depthCompare:"less-equal"},n.depthStencil}function M(n){return m(n).stencilFront}function k(n){return m(n).stencilBack}var _e={cullMode:(n,e,t)=>{t.primitive=t.primitive||{},t.primitive.cullMode=e},frontFace:(n,e,t)=>{t.primitive=t.primitive||{},t.primitive.frontFace=e},depthWriteEnabled:(n,e,t)=>{let r=m(t);r.depthWriteEnabled=e},depthCompare:(n,e,t)=>{let r=m(t);r.depthCompare=e},depthFormat:(n,e,t)=>{let r=m(t);r.format=e},depthBias:(n,e,t)=>{let r=m(t);r.depthBias=e},depthBiasSlopeScale:(n,e,t)=>{let r=m(t);r.depthBiasSlopeScale=e},depthBiasClamp:(n,e,t)=>{let r=m(t);r.depthBiasClamp=e},stencilReadMask:(n,e,t)=>{let r=m(t);r.stencilReadMask=e},stencilWriteMask:(n,e,t)=>{let r=m(t);r.stencilWriteMask=e},stencilCompare:(n,e,t)=>{let r=M(t),i=k(t);r.compare=e,i.compare=e},stencilPassOperation:(n,e,t)=>{let r=M(t),i=k(t);r.passOp=e,i.passOp=e},stencilFailOperation:(n,e,t)=>{let r=M(t),i=k(t);r.failOp=e,i.failOp=e},stencilDepthFailOperation:(n,e,t)=>{let r=M(t),i=k(t);r.depthFailOp=e,i.depthFailOp=e},sampleCount:(n,e,t)=>{t.multisample=t.multisample||{},t.multisample.count=e},sampleMask:(n,e,t)=>{t.multisample=t.multisample||{},t.multisample.mask=e},sampleAlphaToCoverageEnabled:(n,e,t)=>{t.multisample=t.multisample||{},t.multisample.alphaToCoverageEnabled=e},colorMask:(n,e,t)=>{let r=se(t);r[0].writeMask=e},blendColorOperation:(n,e,t)=>{se(t)}},Ie={primitive:{cullMode:"back",topology:"triangle-list"},vertex:{module:void 0,entryPoint:"main"},fragment:{module:void 0,entryPoint:"main",targets:[]},layout:"auto"};function ae(n,e={}){Object.assign(n,{...Ie,...n}),Re(n,e)}function Re(n,e){for(let[t,r]of Object.entries(e)){let i=_e[t];if(!i)throw new Error(`Illegal parameter ${t}`);i(t,r,n)}}function se(n){if(n.fragment.targets=n.fragment?.targets||[],!Array.isArray(n.fragment?.targets))throw new Error("colorstate");return n.fragment?.targets?.length===0&&n.fragment.targets?.push({}),n.fragment?.targets}var x=h(o(),1);function V(n,e,t,r){let i=We(r,t);return n.createBindGroup({layout:e,entries:i})}function oe(n,e){let t=n.bindings.find(r=>r.name===e||`${r.name}uniforms`===e.toLocaleLowerCase());return t||x.log.warn(`Binding ${e} not set: Not found in shader layout.`)(),t||null}function We(n,e){let t=[];for(let[r,i]of Object.entries(n)){let s=oe(e,r);s&&t.push(he(i,s.location)),s=oe(e,`${r}Sampler`),s&&t.push(he(i,s.location,{sampler:!0}))}return t}function he(n,e,t){if(n instanceof x.Buffer)return{binding:e,resource:{buffer:n.handle}};if(n instanceof x.Sampler)return{binding:e,resource:n.handle};if(n instanceof x.Texture)return t?.sampler?{binding:e,resource:n.sampler.handle}:{binding:e,resource:n.handle.createView({label:"bind-group-auto-created"})};throw new Error("invalid binding")}var B=h(o(),1);function Z(n){if(n.endsWith("-webgl"))throw new Error(`WebGPU does not support vertex format ${n}`);return n}function le(n,e){let t=[],r=new Set;for(let i of e){let s=[],a="vertex",l=0,c=i.format;if(i.attributes)for(let d of i.attributes){let g=d.attribute,u=de(n,g,r),G=u?.location;a=u?.stepMode||(u?.name.startsWith("instance")?"instance":"vertex"),s.push({format:Z(d.format||i.format),offset:d.byteOffset,shaderLocation:G}),l+=(0,B.decodeVertexFormat)(c).byteLength}else{let d=de(n,i.name,r);if(!d)continue;l=(0,B.decodeVertexFormat)(c).byteLength,a=d.stepMode||(d.name.startsWith("instance")?"instance":"vertex"),s.push({format:Z(c),offset:0,shaderLocation:d.location})}t.push({arrayStride:i.byteStride||l,stepMode:a,attributes:s})}for(let i of n.attributes)r.has(i.name)||t.push({arrayStride:(0,B.decodeVertexFormat)("float32x3").byteLength,stepMode:i.stepMode||(i.name.startsWith("instance")?"instance":"vertex"),attributes:[{format:Z("float32x3"),offset:0,shaderLocation:i.location}]});return t}function de(n,e,t){let r=n.attributes.find(i=>i.name===e);if(!r)return B.log.warn(`Unknown attribute ${e}`)(),null;if(t.has(e))throw new Error(`Duplicate attribute ${e}`);return t.add(e),r}var $=class extends w.RenderPipeline{device;handle;vs;fs=null;_bindings;_bindGroupLayout=null;_bindGroup=null;constructor(e,t){if(super(e,t),this.device=e,this.handle=this.props.handle,!this.handle){let r=this._getRenderPipelineDescriptor();w.log.groupCollapsed(1,`new WebGPURenderPipeline(${this.id})`)(),w.log.probe(1,JSON.stringify(r,null,2))(),w.log.groupEnd(1)(),this.device.handle.pushErrorScope("validation"),this.handle=this.device.handle.createRenderPipeline(r),this.device.handle.popErrorScope().then(i=>{i&&w.log.error(`${this} creation failed:
"${i.message}"`,this,this.props.vs?.source)()})}this.handle.label=this.props.id,this.vs=t.vs,this.fs=t.fs,this._bindings={...this.props.bindings}}destroy(){this.handle=null}setBindings(e){Object.assign(this._bindings,e)}draw(e){let t=e.renderPass;this.device.handle.pushErrorScope("validation"),t.handle.setPipeline(this.handle),this.device.handle.popErrorScope().then(i=>{i&&w.log.error(`${this} setPipeline failed:
"${i.message}"`,this)()});let r=this._getBindGroup();return r&&t.handle.setBindGroup(0,r),e.vertexArray.bindBeforeRender(e.renderPass),e.indexCount?t.handle.drawIndexed(e.indexCount,e.instanceCount,e.firstIndex,e.baseVertex,e.firstInstance):t.handle.draw(e.vertexCount||0,e.instanceCount||1,e.firstInstance),e.vertexArray.unbindAfterRender(e.renderPass),!0}_getBindGroup(){return this.shaderLayout.bindings.length===0?null:(this._bindGroupLayout=this._bindGroupLayout||this.handle.getBindGroupLayout(0),this._bindGroup=this._bindGroup||V(this.device.handle,this._bindGroupLayout,this.shaderLayout,this._bindings),this._bindGroup)}_getRenderPipelineDescriptor(){let e={module:this.props.vs.handle,entryPoint:this.props.vertexEntryPoint||"main",buffers:le(this.shaderLayout,this.props.bufferLayout)},t={module:this.props.fs.handle,entryPoint:this.props.fragmentEntryPoint||"main",targets:[{format:C(this.device.getCanvasContext().format)}]},r={vertex:e,fragment:t,primitive:{topology:this.props.topology},layout:"auto"};return ae(r,this.props.parameters),r}};var pe=h(o(),1),A=class extends pe.Framebuffer{device;colorAttachments=[];depthStencilAttachment=null;constructor(e,t){super(e,t),this.device=e,this.autoCreateAttachmentTextures()}updateAttachments(){}};var ce=h(o(),1);var q=class extends ce.ComputePipeline{device;handle;_bindGroupLayout=null;_bindGroup=null;_bindings={};constructor(e,t){super(e,t),this.device=e;let r=this.props.shader;this.handle=this.props.handle||this.device.handle.createComputePipeline({label:this.props.id,compute:{module:r.handle,entryPoint:this.props.entryPoint,constants:this.props.constants},layout:"auto"})}setBindings(e){Object.assign(this._bindings,e)}_getBindGroup(){return this._bindGroupLayout=this._bindGroupLayout||this.handle.getBindGroupLayout(0),this._bindGroup=this._bindGroup||V(this.device.handle,this._bindGroupLayout,this.shaderLayout,this._bindings),this._bindGroup}};var y=h(o(),1),z=class extends y.RenderPass{device;handle;pipeline=null;constructor(e,t={}){super(e,t),this.device=e;let r=t.framebuffer||e.getCanvasContext().getCurrentFramebuffer(),i=this.getRenderPassDescriptor(r),s=t.timestampQuerySet;if(s&&(i.occlusionQuerySet=s.handle),e.features.has("timestamp-query")){let a=t.timestampQuerySet;i.timestampWrites=a?{querySet:a.handle,beginningOfPassWriteIndex:t.beginTimestampIndex,endOfPassWriteIndex:t.endTimestampIndex}:void 0}if(!e.commandEncoder)throw new Error("commandEncoder not available");this.device.handle.pushErrorScope("validation"),this.handle=this.props.handle||e.commandEncoder.beginRenderPass(i),this.device.handle.popErrorScope().then(a=>{a&&y.log.error(`${this} creation failed:
"${a.message}"`,this)()}),this.handle.label=this.props.id,y.log.groupCollapsed(3,`new WebGPURenderPass(${this.id})`)(),y.log.probe(3,JSON.stringify(i,null,2))(),y.log.groupEnd(3)()}destroy(){}end(){this.handle.end()}setPipeline(e){this.pipeline=e,this.handle.setPipeline(this.pipeline.handle)}setBindings(e){this.pipeline?.setBindings(e);let t=this.pipeline?._getBindGroup();t&&this.handle.setBindGroup(0,t)}setIndexBuffer(e,t,r=0,i){this.handle.setIndexBuffer(e.handle,t,r,i)}setVertexBuffer(e,t,r=0){this.handle.setVertexBuffer(e,t.handle,r)}draw(e){e.indexCount?this.handle.drawIndexed(e.indexCount,e.instanceCount,e.firstIndex,e.baseVertex,e.firstInstance):this.handle.draw(e.vertexCount||0,e.instanceCount||1,e.firstIndex,e.firstInstance)}drawIndirect(){}setParameters(e){let{blendConstant:t,stencilReference:r,scissorRect:i,viewport:s}=e;t&&this.handle.setBlendConstant(t),r&&this.handle.setStencilReference(r),i&&this.handle.setScissorRect(i[0],i[1],i[2],i[3]),s&&this.handle.setViewport(s[0],s[1],s[2],s[3],s[4],s[5])}pushDebugGroup(e){this.handle.pushDebugGroup(e)}popDebugGroup(){this.handle.popDebugGroup()}insertDebugMarker(e){this.handle.insertDebugMarker(e)}beginOcclusionQuery(e){this.handle.beginOcclusionQuery(e)}endOcclusionQuery(){this.handle.endOcclusionQuery()}getRenderPassDescriptor(e){let t={colorAttachments:[]};if(t.colorAttachments=e.colorAttachments.map((r,i)=>({loadOp:this.props.clearColor!==!1?"clear":"load",colorClearValue:this.props.clearColors?.[i]||this.props.clearColor||y.RenderPass.defaultClearColor,storeOp:this.props.discard?"discard":"store",view:r.handle})),e.depthStencilAttachment){t.depthStencilAttachment={view:e.depthStencilAttachment.handle};let{depthStencilAttachment:r}=t;this.props.depthReadOnly&&(r.depthReadOnly=!0),this.props.clearDepth!==!1&&(r.depthClearValue=this.props.clearDepth),!0&&(r.depthLoadOp=this.props.clearDepth!==!1?"clear":"load",r.depthStoreOp="store"),!1&&(r.stencilLoadOp=this.props.clearStencil!==!1?"clear":"load",r.stencilStoreOp="store")}return t}};var ue=h(o(),1),Q=class extends ue.ComputePass{device;handle;_webgpuPipeline=null;constructor(e,t){super(e,t),this.device=e;let r;if(e.features.has("timestamp-query")){let i=t.timestampQuerySet;i&&(r={querySet:i.handle,beginningOfPassWriteIndex:t.beginTimestampIndex,endOfPassWriteIndex:t.endTimestampIndex})}this.handle=this.props.handle||e.commandEncoder?.beginComputePass({label:this.props.id,timestampWrites:r})}destroy(){}end(){this.handle.end()}setPipeline(e){let t=e;this.handle.setPipeline(t.handle),this._webgpuPipeline=t,this.setBindings([])}setBindings(e){let t=this._webgpuPipeline._getBindGroup();this.handle.setBindGroup(0,t)}dispatch(e,t,r){this.handle.dispatchWorkgroups(e,t,r)}dispatchIndirect(e,t=0){let r=e;this.handle.dispatchWorkgroupsIndirect(r.handle,t)}pushDebugGroup(e){this.handle.pushDebugGroup(e)}popDebugGroup(){this.handle.popDebugGroup()}insertDebugMarker(e){this.handle.insertDebugMarker(e)}};var D=h(o(),1);var ht=globalThis.document||{},dt=globalThis.process||{},lt=globalThis.console,me=globalThis.navigator||{};function N(n){if(typeof window<"u"&&window.process?.type==="renderer"||typeof process<"u"&&Boolean(process.versions?.electron))return!0;let e=typeof navigator<"u"&&navigator.userAgent,t=n||e;return Boolean(t&&t.indexOf("Electron")>=0)}function fe(){return!(typeof process=="object"&&String(process)==="[object process]"&&!process?.browser)||N()}function K(n){return!n&&!fe()?"Node":N(n)?"Electron":(n||me.userAgent||"").indexOf("Edge")>-1?"Edge":globalThis.chrome?"Chrome":globalThis.safari?"Safari":globalThis.mozInnerScreenX?"Firefox":"Unknown"}var j=class extends D.VertexArray{get[Symbol.toStringTag](){return"WebGPUVertexArray"}device;handle;constructor(e,t){super(e,t),this.device=e}destroy(){}setIndexBuffer(e){this.indexBuffer=e}setBuffer(e,t){this.attributes[e]=t}bindBeforeRender(e,t,r){let i=e,s=this.indexBuffer;s?.handle&&(D.log.info(3,"setting index buffer",s?.handle,s?.indexType)(),i.handle.setIndexBuffer(s?.handle,s?.indexType));for(let a=0;a<this.maxVertexAttributes;a++){let l=this.attributes[a];l?.handle&&(D.log.info(3,`setting vertex buffer ${a}`,l?.handle)(),i.handle.setVertexBuffer(a,l?.handle))}}unbindAfterRender(e){}static isConstantAttributeZeroSupported(e){return K()==="Chrome"}};var Y=h(o(),1);var _=class extends Y.CanvasContext{device;gpuCanvasContext;format=navigator.gpu.getPreferredCanvasFormat();depthStencilFormat="depth24plus";depthStencilAttachment=null;get[Symbol.toStringTag](){return"WebGPUCanvasContext"}constructor(e,t,r){super(r),this.device=e,this.width=-1,this.height=-1,this._setAutoCreatedCanvasId(`${this.device.id}-canvas`),this.gpuCanvasContext=this.canvas.getContext("webgpu"),this.format="bgra8unorm"}destroy(){this.gpuCanvasContext.unconfigure()}getCurrentFramebuffer(){this.update();let e=this.getCurrentTexture();return this.width=e.width,this.height=e.height,this._createDepthStencilAttachment(),new A(this.device,{colorAttachments:[e],depthStencilAttachment:this.depthStencilAttachment})}update(){let e=this.width,t=this.height,[r,i]=this.getPixelSize();(r!==e||i!==t)&&(this.width=r,this.height=i,this.depthStencilAttachment&&(this.depthStencilAttachment.destroy(),this.depthStencilAttachment=null),this.gpuCanvasContext.configure({device:this.device.handle,format:C(this.format),colorSpace:this.props.colorSpace,alphaMode:this.props.alphaMode}),Y.log.log(1,`${this} Resized ${e}x${t} => ${r}x${i}px`)())}resize(e){if(this.update(),!!this.device.handle&&this.canvas){let t=this.getDevicePixelRatio(e?.useDevicePixels);this.setDevicePixelRatio(t,e);return}}getCurrentTexture(){return this.device.createTexture({id:`${this.id}#color-texture`,handle:this.gpuCanvasContext.getCurrentTexture(),format:this.format})}_createDepthStencilAttachment(){return this.depthStencilAttachment||(this.depthStencilAttachment=this.device.createTexture({id:`${this.id}#depth-stencil-texture`,format:this.depthStencilFormat,width:this.width,height:this.height,usage:GPUTextureUsage.RENDER_ATTACHMENT})),this.depthStencilAttachment}};var ge=h(o(),1),H=class extends ge.QuerySet{device;handle;constructor(e,t){super(e,t),this.device=e,this.handle=this.props.handle||this.device.handle.createQuerySet({type:this.props.type,count:this.props.count}),this.handle.label=this.props.id}destroy(){this.handle?.destroy(),this.handle=null}};var S=class extends X.Device{type="webgpu";handle;adapter;adapterInfo;features;info;limits;lost;canvasContext=null;_isLost=!1;commandEncoder=null;renderPass=null;constructor(e,t,r,i){if(super({...e,id:e.id||"webgpu-device"}),this.handle=t,this.adapter=r,this.adapterInfo=i,this.info=this._getInfo(),this.features=this._getFeatures(),this.limits=this.handle.limits,t.addEventListener("uncapturederror",s=>{let a=s instanceof GPUUncapturedErrorEvent?s.error.message:"Unknown WebGPU error";if(this.reportError(new Error(a)),this.props.debug)debugger;s.preventDefault()}),this.lost=new Promise(async s=>{let a=await this.handle.lost;this._isLost=!0,s({reason:"destroyed",message:a.message})}),e.createCanvasContext){let s=e.createCanvasContext===!0?{}:e.createCanvasContext;this.canvasContext=new _(this,this.adapter,s)}}destroy(){this.handle.destroy()}get isLost(){return this._isLost}createBuffer(e){let t=this._normalizeBufferProps(e);return new b(this,t)}createTexture(e){return new E(this,e)}createExternalTexture(e){return new U(this,e)}createShader(e){return new P(this,e)}createSampler(e){return new p(this,e)}createRenderPipeline(e){return new $(this,e)}createFramebuffer(e){return new A(this,e)}createComputePipeline(e){return new q(this,e)}createVertexArray(e){return new j(this,e)}beginRenderPass(e){return this.commandEncoder=this.commandEncoder||this.handle.createCommandEncoder(),new z(this,e)}beginComputePass(e){return this.commandEncoder=this.commandEncoder||this.handle.createCommandEncoder(),new Q(this,e)}createTransformFeedback(e){throw new Error("Transform feedback not supported in WebGPU")}createQuerySet(e){return new H(this,e)}createCanvasContext(e){return new _(this,this.adapter,e)}submit(){let e=this.commandEncoder?.finish();e&&(this.handle.pushErrorScope("validation"),this.handle.queue.submit([e]),this.handle.popErrorScope().then(t=>{t&&this.reportError(new Error(`WebGPU command submission failed: ${t.message}`))})),this.commandEncoder=null}_getInfo(){let[e,t]=(this.adapterInfo.driver||"").split(" Version "),r=this.adapterInfo.vendor||this.adapter.__brand||"unknown",i=e||"",s=t||"",a=r==="apple"?"apple":"unknown",l=this.adapterInfo.architecture||"unknown",c=this.adapterInfo.backend||"unknown",d=(this.adapterInfo.type||"").split(" ")[0].toLowerCase()||"unknown";return{type:"webgpu",vendor:r,renderer:i,version:s,gpu:a,gpuType:d,gpuBackend:c,gpuArchitecture:l,shadingLanguage:"wgsl",shadingLanguageVersion:100}}_getFeatures(){let e=new Set(this.handle.features);e.has("depth-clamping")&&(e.delete("depth-clamping"),e.add("depth-clip-control")),e.has("texture-compression-bc")&&e.add("texture-compression-bc5-webgl");let t=["timer-query-webgl","compilation-status-async-webgl","float32-renderable-webgl","float16-renderable-webgl","norm16-renderable-webgl","texture-filterable-anisotropic-webgl","shader-noperspective-interpolation-webgl"];for(let r of t)e.add(r);return new X.DeviceFeatures(Array.from(e),this.props._disabledFeatures)}_getDeviceSpecificTextureFormatCapabilities(e){let{format:t}=e;return t.includes("webgl")?{format:t,create:!1,render:!1,filter:!1,blend:!1,store:!1}:e}copyExternalImageToTexture(e){let{source:t,sourceX:r=0,sourceY:i=0,texture:s,mipLevel:a=0,aspect:l="all",colorSpace:c="display-p3",premultipliedAlpha:d=!1,width:g=s.width,height:u=s.height,depth:G=1}=e,J=s;this.handle?.queue.copyExternalImageToTexture({source:t,origin:[r,i]},{texture:J.handle,origin:[0,0,0],mipLevel:a,aspect:l,colorSpace:c,premultipliedAlpha:d},[g,u,G])}};var ee=class extends f.Adapter{type="webgpu";constructor(){super(),S.adapter=this}isSupported(){return Boolean(typeof navigator<"u"&&navigator.gpu)}async create(e){if(!navigator.gpu)throw new Error("WebGPU not available. Open in Chrome Canary and turn on chrome://flags/#enable-unsafe-webgpu");f.log.groupCollapsed(1,"WebGPUDevice created")();let t=await navigator.gpu.requestAdapter({powerPreference:"high-performance"});if(!t)throw new Error("Failed to request WebGPU adapter");let r=await t.requestAdapterInfo();f.log.probe(2,"Adapter available",r)();let i=[],s={};if(e._requestMaxLimits){i.push(...Array.from(t.features));let c=Object.keys(t.limits).filter(d=>!["minSubgroupSize","maxSubgroupSize"].includes(d));for(let d of c){let g=d,u=t.limits[g];typeof u=="number"&&(s[g]=u)}}let a=await t.requestDevice({requiredFeatures:i,requiredLimits:s});f.log.probe(1,"GPUDevice available")();let l=new S(e,a,t,r);return f.log.probe(1,"Device created. For more info, set chrome://flags/#enable-webgpu-developer-features")(),f.log.table(1,l.info)(),f.log.groupEnd(1)(),l}async attach(e){throw new Error("WebGPUAdapter.attach() not implemented")}},be=new ee;return Te(I);})();
"use strict";var __exports__=(()=>{var Je=Object.create;var U=Object.defineProperty;var Ze=Object.getOwnPropertyDescriptor;var Ke=Object.getOwnPropertyNames;var et=Object.getPrototypeOf,tt=Object.prototype.hasOwnProperty;var a=(n,e)=>()=>(n&&(e=n(n=0)),e);var rt=(n,e)=>()=>(e||n((e={exports:{}}).exports,e),e.exports),ue=(n,e)=>{for(var t in e)U(n,t,{get:e[t],enumerable:!0})},M=(n,e,t,r)=>{if(e&&typeof e=="object"||typeof e=="function")for(let i of Ke(e))!tt.call(n,i)&&i!==t&&U(n,i,{get:()=>e[i],enumerable:!(r=Ze(e,i))||r.enumerable});return n},k=(n,e,t)=>(M(n,e,"default"),t&&M(t,e,"default")),d=(n,e,t)=>(t=n!=null?Je(et(n)):{},M(e||!n||!n.__esModule?U(t,"default",{value:n,enumerable:!0}):t,n)),nt=n=>M(U({},"__esModule",{value:!0}),n);var h=rt((lt,fe)=>{fe.exports=globalThis.luma});function it(n){return n.byteLength||n.data?.byteLength||0}var $,b,re=a(()=>{"use strict";$=d(h(),1);b=class extends $.Buffer{device;handle;byteLength;constructor(e,t){super(e,t),this.device=e,this.byteLength=it(t);let r=Boolean(t.data),i=Math.ceil(this.byteLength/4)*4;this.device.handle.pushErrorScope("out-of-memory"),this.device.handle.pushErrorScope("validation"),this.handle=this.props.handle||this.device.handle.createBuffer({size:i,usage:this.props.usage||GPUBufferUsage.VERTEX|GPUBufferUsage.COPY_DST,mappedAtCreation:this.props.mappedAtCreation||r,label:this.props.id}),this.device.handle.popErrorScope().then(s=>{s&&this.device.reportError(new Error(`Buffer validation failed: ${s.message}`),this)}),this.device.handle.popErrorScope().then(s=>{s&&this.device.reportError(new Error(`Buffer out of memory: ${s.message}`),this)}),t.data&&this._writeMapped(t.data),r&&!t.mappedAtCreation&&this.handle.unmap()}destroy(){this.handle?.destroy(),this.handle=null}write(e,t=0){this.device.handle.queue.writeBuffer(this.handle,t,e.buffer,e.byteOffset,e.byteLength)}async readAsync(e=0,t=this.byteLength){let r=new b(this.device,{usage:$.Buffer.MAP_READ|$.Buffer.COPY_DST,byteLength:t}),i=this.device.handle.createCommandEncoder();i.copyBufferToBuffer(this.handle,e,r.handle,0,t),this.device.handle.queue.submit([i.finish()]),await r.handle.mapAsync(GPUMapMode.READ,e,t);let s=r.handle.getMappedRange().slice(0);return r.handle.unmap(),r.destroy(),new Uint8Array(s)}_writeMapped(e){let t=this.handle.getMappedRange();new e.constructor(t).set(e)}mapAsync(e,t=0,r){return this.handle.mapAsync(e,t,r)}getMappedRange(e=0,t){return this.handle.getMappedRange(e,t)}unmap(){this.handle.unmap()}}});function S(n){if(n.includes("webgl"))throw new Error("webgl-only format");return n}var ne=a(()=>{"use strict"});var me,p,T=a(()=>{"use strict";me=d(h(),1),p=class extends me.Sampler{device;handle;constructor(e,t){super(e,t),this.device=e;let r={...this.props,mipmapFilter:void 0};t.type!=="comparison-sampler"&&delete r.compare,t.mipmapFilter&&t.mipmapFilter!=="none"&&(r.mipmapFilter=t.mipmapFilter),this.handle=t.handle||this.device.handle.createSampler(r),this.handle.label=this.props.id}destroy(){this.handle=null}}});var ge,L,be=a(()=>{"use strict";ge=d(h(),1),L=class extends ge.TextureView{device;handle;texture;constructor(e,t){super(e,t),this.device=e,this.texture=t.texture,this.device.pushErrorScope("validation"),this.handle=this.texture.handle.createView({format:this.props.format||this.texture.format,dimension:this.props.dimension||this.texture.dimension,aspect:this.props.aspect,baseMipLevel:this.props.baseMipLevel,mipLevelCount:this.props.mipLevelCount,baseArrayLayer:this.props.baseArrayLayer,arrayLayerCount:this.props.arrayLayerCount}),this.device.handle.popErrorScope().then(r=>{r&&this.device.reportError(new Error(`TextureView validation failed: ${r.message}`),this)}),this.handle.label=this.props.id}destroy(){this.handle=null}}});var E,C,ie=a(()=>{"use strict";E=d(h(),1);ne();T();be();C=class extends E.Texture{device;handle;sampler;view;constructor(e,t){super(e,t),this.device=e,this.dimension==="cube"&&(this.depth=6),this.device.handle.pushErrorScope("out-of-memory"),this.device.handle.pushErrorScope("validation"),this.handle=this.props.handle||this.device.handle.createTexture({label:this.id,size:{width:this.width,height:this.height,depthOrArrayLayers:this.depth},usage:this.props.usage||E.Texture.TEXTURE|E.Texture.COPY_DST,dimension:this.baseDimension,format:S(this.format),mipLevelCount:this.mipLevels,sampleCount:this.props.samples}),this.device.handle.popErrorScope().then(r=>{r&&this.device.reportError(new Error(`Texture validation failed: ${r.message}`),this)}),this.device.handle.popErrorScope().then(r=>{r&&this.device.reportError(new Error(`Texture out of memory: ${r.message}`),this)}),this.props.handle&&(this.handle.label||=this.id,this.width=this.handle.width,this.height=this.handle.height),this.sampler=t.sampler instanceof p?t.sampler:new p(this.device,t.sampler||{}),this.view=new L(this.device,{...this.props,texture:this,mipLevelCount:this.mipLevels,arrayLayerCount:this.depth}),this._initializeData(t.data)}destroy(){this.handle?.destroy(),this.handle=null}createView(e){return new L(this.device,{...e,texture:this})}copyImageData(e){let{width:t,height:r,depth:i}=this,s=this._normalizeCopyImageDataOptions(e);this.device.handle.pushErrorScope("validation"),this.device.handle.queue.writeTexture({texture:this.handle,mipLevel:s.mipLevel,aspect:s.aspect,origin:[s.x,s.y,s.z]},s.data,{offset:s.byteOffset,bytesPerRow:s.bytesPerRow,rowsPerImage:s.rowsPerImage},[t,r,i]),this.device.handle.popErrorScope().then(o=>{o&&this.device.reportError(new Error(`copyImageData validation failed: ${o.message}`))})}copyExternalImage(e){let t=this._normalizeCopyExternalImageOptions(e);return this.device.handle.pushErrorScope("validation"),this.device.handle.queue.copyExternalImageToTexture({source:t.image,origin:[t.sourceX,t.sourceY],flipY:t.flipY},{texture:this.handle,origin:[t.x,t.y,t.depth],mipLevel:t.mipLevel,aspect:t.aspect,colorSpace:t.colorSpace,premultipliedAlpha:t.premultipliedAlpha},[t.width,t.height,1]),this.device.handle.popErrorScope().then(r=>{r&&this.device.reportError(new Error(`copyExternalImage validation failed: ${r.message}`))}),{width:t.width,height:t.height}}generateMipmapsWebGL(){E.log.warn(`${this}: generateMipmaps not supported in WebGPU`)()}}});var we,V,xe=a(()=>{"use strict";we=d(h(),1);T();V=class extends we.ExternalTexture{device;handle;sampler;constructor(e,t){super(e,t),this.device=e,this.handle=this.props.handle||this.device.handle.importExternalTexture({source:t.source,colorSpace:t.colorSpace}),this.sampler=null}destroy(){this.handle=null}setSampler(e){return this.sampler=e instanceof p?e:new p(this.device,e),this}}});var D,P,se=a(()=>{"use strict";D=d(h(),1),P=class extends D.Shader{device;handle;constructor(e,t){super(e,t),this.device=e;let r=t.source.includes("#version");if(this.props.language==="glsl"||r)throw new Error("GLSL shaders are not supported in WebGPU");this.device.handle.pushErrorScope("validation"),this.handle=this.props.handle||this.device.handle.createShaderModule({code:t.source}),this.device.handle.popErrorScope().then(i=>{i&&D.log.error(`${this} creation failed:
"${i.message}"`,this,this.props.source)()}),this.handle.label=this.props.id,this._checkCompilationError()}get asyncCompilationStatus(){return this.getCompilationInfo().then(()=>this.compilationStatus)}async _checkCompilationError(){let e=await this.getCompilationInfo(),t=Boolean(e.find(r=>r.type==="error"));this.compilationStatus=t?"error":"success",this.debugShader(),this.compilationStatus==="error"&&D.log.error("Shader compilation error",e)()}destroy(){this.handle=null}async getCompilationInfo(){return(await this.handle.getCompilationInfo()).messages}}});function f(n){return n.depthStencil=n.depthStencil||{format:"depth24plus",stencilFront:{},stencilBack:{},depthWriteEnabled:!1,depthCompare:"less-equal"},n.depthStencil}function q(n){return f(n).stencilFront}function Q(n){return f(n).stencilBack}function ve(n,e={}){Object.assign(n,{...ot,...n}),at(n,e)}function at(n,e){for(let[t,r]of Object.entries(e)){let i=st[t];if(!i)throw new Error(`Illegal parameter ${t}`);i(t,r,n)}}function ye(n,e){if(n.fragment.targets=n.fragment?.targets||[],!Array.isArray(n.fragment?.targets))throw new Error("colorstate");return n.fragment?.targets?.length===0&&n.fragment.targets?.push({}),n.fragment?.targets[0]}function v(n,e){let t=ye(n,e);return t.blend=t.blend||{color:{},alpha:{}},t.blend}var st,ot,Se=a(()=>{"use strict";st={cullMode:(n,e,t)=>{t.primitive=t.primitive||{},t.primitive.cullMode=e},frontFace:(n,e,t)=>{t.primitive=t.primitive||{},t.primitive.frontFace=e},depthWriteEnabled:(n,e,t)=>{if(e){let r=f(t);r.depthWriteEnabled=e}},depthCompare:(n,e,t)=>{let r=f(t);r.depthCompare=e},depthFormat:(n,e,t)=>{let r=f(t);r.format=e},depthBias:(n,e,t)=>{let r=f(t);r.depthBias=e},depthBiasSlopeScale:(n,e,t)=>{let r=f(t);r.depthBiasSlopeScale=e},depthBiasClamp:(n,e,t)=>{let r=f(t);r.depthBiasClamp=e},stencilReadMask:(n,e,t)=>{let r=f(t);r.stencilReadMask=e},stencilWriteMask:(n,e,t)=>{let r=f(t);r.stencilWriteMask=e},stencilCompare:(n,e,t)=>{let r=q(t),i=Q(t);r.compare=e,i.compare=e},stencilPassOperation:(n,e,t)=>{let r=q(t),i=Q(t);r.passOp=e,i.passOp=e},stencilFailOperation:(n,e,t)=>{let r=q(t),i=Q(t);r.failOp=e,i.failOp=e},stencilDepthFailOperation:(n,e,t)=>{let r=q(t),i=Q(t);r.depthFailOp=e,i.depthFailOp=e},sampleCount:(n,e,t)=>{t.multisample=t.multisample||{},t.multisample.count=e},sampleMask:(n,e,t)=>{t.multisample=t.multisample||{},t.multisample.mask=e},sampleAlphaToCoverageEnabled:(n,e,t)=>{t.multisample=t.multisample||{},t.multisample.alphaToCoverageEnabled=e},colorMask:(n,e,t)=>{let r=ye(t,0);r.writeMask=e},blend:(n,e,t)=>{e&&v(t,0)},blendColorOperation:(n,e,t)=>{let r=v(t,0);r.color=r.color||{},r.color.operation=e},blendColorSrcFactor:(n,e,t)=>{let r=v(t,0);r.color=r.color||{},r.color.srcFactor=e},blendColorDstFactor:(n,e,t)=>{let r=v(t,0);r.color.dstFactor=e},blendAlphaOperation:(n,e,t)=>{let r=v(t,0);r.alpha=r.alpha||{},r.alpha.operation=e},blendAlphaSrcFactor:(n,e,t)=>{let r=v(t,0);r.alpha=r.alpha||{},r.alpha.srcFactor=e},blendAlphaDstFactor:(n,e,t)=>{let r=v(t,0);r.alpha=r.alpha||{},r.alpha.dstFactor=e}},ot={primitive:{cullMode:"back",topology:"triangle-list"},vertex:{module:void 0,entryPoint:"main"},fragment:{module:void 0,entryPoint:"main",targets:[]},layout:"auto"}});function z(n,e,t,r){let i=ht(r,t);n.pushErrorScope("validation");let s=n.createBindGroup({layout:e,entries:i});return n.popErrorScope().then(o=>{o&&u.log.error(`createBindGroup validation failed: ${o.message}`)()}),s}function Ee(n,e,t){let r=n.bindings.find(i=>i.name===e||`${i.name}uniforms`===e.toLocaleLowerCase());return!r&&!t?.ignoreWarnings&&u.log.warn(`Binding ${e} not set: Not found in shader layout.`)(),r||null}function ht(n,e){let t=[];for(let[r,i]of Object.entries(n)){let s=Ee(e,r);if(s){let o=Ce(i,s.location);o&&t.push(o)}if(i instanceof u.Texture&&(s=Ee(e,`${r}Sampler`,{ignoreWarnings:!0}),s)){let o=Ce(i,s.location,{sampler:!0});o&&t.push(o)}}return t}function Ce(n,e,t){return n instanceof u.Buffer?{binding:e,resource:{buffer:n.handle}}:n instanceof u.Sampler?{binding:e,resource:n.handle}:n instanceof u.Texture?t?.sampler?{binding:e,resource:n.sampler.handle}:{binding:e,resource:n.view.handle}:(u.log.warn(`invalid binding ${name}`,n),null)}var u,oe=a(()=>{"use strict";u=d(h(),1)});function ae(n){if(n.endsWith("-webgl"))throw new Error(`WebGPU does not support vertex format ${n}`);return n}function Be(n,e){let t=[],r=new Set;for(let i of e){let s=[],o="vertex",c=0,g=i.format;if(i.attributes)for(let l of i.attributes){let A=l.attribute,y=Pe(n,A,r),R=y?.location;o=y?.stepMode||(y?.name.startsWith("instance")?"instance":"vertex"),s.push({format:ae(l.format||i.format),offset:l.byteOffset,shaderLocation:R}),c+=(0,B.getVertexFormatInfo)(g).byteLength}else{let l=Pe(n,i.name,r);if(!l)continue;c=(0,B.getVertexFormatInfo)(g).byteLength,o=l.stepMode||(l.name.startsWith("instance")?"instance":"vertex"),s.push({format:ae(g),offset:0,shaderLocation:l.location})}t.push({arrayStride:i.byteStride||c,stepMode:o,attributes:s})}for(let i of n.attributes)r.has(i.name)||t.push({arrayStride:(0,B.getVertexFormatInfo)("float32x3").byteLength,stepMode:i.stepMode||(i.name.startsWith("instance")?"instance":"vertex"),attributes:[{format:ae("float32x3"),offset:0,shaderLocation:i.location}]});return t}function Pe(n,e,t){let r=n.attributes.find(i=>i.name===e);if(!r)return B.log.warn(`Supplied attribute not present in shader layout: ${e}`)(),null;if(t.has(e))throw new Error(`Found multiple entries for attribute: ${e}`);return t.add(e),r}var B,_e=a(()=>{"use strict";B=d(h(),1)});var w,j,Ge=a(()=>{"use strict";w=d(h(),1);Se();ne();oe();_e();j=class extends w.RenderPipeline{device;handle;vs;fs=null;_bindings;_bindGroupLayout=null;_bindGroup=null;get[Symbol.toStringTag](){return"WebGPURenderPipeline"}constructor(e,t){if(super(e,t),this.device=e,this.handle=this.props.handle,!this.handle){let r=this._getRenderPipelineDescriptor();w.log.groupCollapsed(1,`new WebGPURenderPipeline(${this.id})`)(),w.log.probe(1,JSON.stringify(r,null,2))(),w.log.groupEnd(1)(),this.device.handle.pushErrorScope("validation"),this.handle=this.device.handle.createRenderPipeline(r),this.device.handle.popErrorScope().then(i=>{i&&w.log.error(`${this} creation failed:
"${i.message}"`,this,this.props.vs?.source)()})}this.handle.label=this.props.id,this.vs=t.vs,this.fs=t.fs,this._bindings={...this.props.bindings}}destroy(){this.handle=null}setBindings(e){for(let[t,r]of Object.entries(e))this._bindings[t]!==r&&(this._bindGroup=null);Object.assign(this._bindings,e)}draw(e){let t=e.renderPass;this.device.handle.pushErrorScope("validation"),t.handle.setPipeline(this.handle),this.device.handle.popErrorScope().then(i=>{i&&w.log.error(`${this} setPipeline failed:
"${i.message}"`,this)()});let r=this._getBindGroup();return r&&t.handle.setBindGroup(0,r),e.vertexArray.bindBeforeRender(e.renderPass),e.indexCount?t.handle.drawIndexed(e.indexCount,e.instanceCount,e.firstIndex,e.baseVertex,e.firstInstance):t.handle.draw(e.vertexCount||0,e.instanceCount||1,e.firstInstance),e.vertexArray.unbindAfterRender(e.renderPass),!0}_getBindGroup(){return this.shaderLayout.bindings.length===0?null:(this._bindGroupLayout=this._bindGroupLayout||this.handle.getBindGroupLayout(0),this._bindGroup=this._bindGroup||z(this.device.handle,this._bindGroupLayout,this.shaderLayout,this._bindings),this._bindGroup)}_getRenderPipelineDescriptor(){let e={module:this.props.vs.handle,entryPoint:this.props.vertexEntryPoint||"main",buffers:Be(this.shaderLayout,this.props.bufferLayout)},t=[];if(this.props.colorAttachmentFormats)for(let o of this.props.colorAttachmentFormats)t.push(o?{format:S(o)}:null);else t.push({format:S(this.device.preferredColorFormat)});let r={module:this.props.fs.handle,entryPoint:this.props.fragmentEntryPoint||"main",targets:t},i={vertex:e,fragment:r,primitive:{topology:this.props.topology},layout:"auto"},s=this.props.depthStencilAttachmentFormat||this.device.preferredDepthFormat;return this.props.parameters.depthWriteEnabled&&(i.depthStencil={format:S(s)}),ve(i,this.props.parameters),i}}});var Ae,_,he=a(()=>{"use strict";Ae=d(h(),1),_=class extends Ae.Framebuffer{device;colorAttachments=[];depthStencilAttachment=null;constructor(e,t){super(e,t),this.device=e,this.autoCreateAttachmentTextures()}updateAttachments(){}}});var Te,N,Le=a(()=>{"use strict";Te=d(h(),1);oe();N=class extends Te.ComputePipeline{device;handle;_bindGroupLayout=null;_bindGroup=null;_bindings={};constructor(e,t){super(e,t),this.device=e;let r=this.props.shader;this.handle=this.props.handle||this.device.handle.createComputePipeline({label:this.props.id,compute:{module:r.handle,entryPoint:this.props.entryPoint,constants:this.props.constants},layout:"auto"})}setBindings(e){Object.assign(this._bindings,e)}_getBindGroup(){return this._bindGroupLayout=this._bindGroupLayout||this.handle.getBindGroupLayout(0),this._bindGroup=this._bindGroup||z(this.device.handle,this._bindGroupLayout,this.shaderLayout,this._bindings),this._bindGroup}}});var Ft,It,Wt,De,Fe=a(()=>{Ft=globalThis.document||{},It=globalThis.process||{},Wt=globalThis.console,De=globalThis.navigator||{}});function H(n){if(typeof window<"u"&&window.process?.type==="renderer"||typeof process<"u"&&Boolean(process.versions?.electron))return!0;let e=typeof navigator<"u"&&navigator.userAgent,t=n||e;return Boolean(t&&t.indexOf("Electron")>=0)}var de=a(()=>{});function Ie(){return!(typeof process=="object"&&String(process)==="[object process]"&&!process?.browser)||H()}var We=a(()=>{de()});function le(n){return!n&&!Ie()?"Node":H(n)?"Electron":(n||De.userAgent||"").indexOf("Edge")>-1?"Edge":globalThis.chrome?"Chrome":globalThis.safari?"Safari":globalThis.mozInnerScreenX?"Firefox":"Unknown"}var Oe=a(()=>{We();de();Fe()});var Re=a(()=>{Oe()});var F,Y,Me=a(()=>{"use strict";F=d(h(),1);Re();Y=class extends F.VertexArray{get[Symbol.toStringTag](){return"WebGPUVertexArray"}device;handle=null;constructor(e,t){super(e,t),this.device=e}destroy(){}setIndexBuffer(e){this.indexBuffer=e}setBuffer(e,t){this.attributes[e]=t}bindBeforeRender(e,t,r){let i=e,s=this.indexBuffer;s?.handle&&(F.log.info(3,"setting index buffer",s?.handle,s?.indexType)(),i.handle.setIndexBuffer(s?.handle,s?.indexType));for(let o=0;o<this.maxVertexAttributes;o++){let c=this.attributes[o];c?.handle&&(F.log.info(3,`setting vertex buffer ${o}`,c?.handle)(),i.handle.setVertexBuffer(o,c?.handle))}}unbindAfterRender(e){}static isConstantAttributeZeroSupported(e){return le()==="Chrome"}}});var G,I,Ue=a(()=>{"use strict";G=d(h(),1);he();I=class extends G.CanvasContext{device;handle;depthStencilAttachment=null;get[Symbol.toStringTag](){return"WebGPUCanvasContext"}constructor(e,t,r){super(r);let i=this.canvas.getContext("webgpu");if(!i)throw new Error(`${this}: Failed to create WebGPU canvas context`);this.device=e,this.handle=i,this._setAutoCreatedCanvasId(`${this.device.id}-canvas`),this._updateDevice()}destroy(){this.handle.unconfigure()}getCurrentFramebuffer(e={depthStencilFormat:"depth24plus"}){let t=this.getCurrentTexture();if(t.width!==this.drawingBufferWidth||t.height!==this.drawingBufferHeight){let[r,i]=this.getDrawingBufferSize();this.drawingBufferWidth=t.width,this.drawingBufferHeight=t.height,G.log.log(1,`${this}: Resized to compensate for initial canvas size mismatch ${r}x${i} => ${this.drawingBufferWidth}x${this.drawingBufferHeight}px`)()}return e?.depthStencilFormat&&this._createDepthStencilAttachment(e?.depthStencilFormat),new _(this.device,{colorAttachments:[t],depthStencilAttachment:this.depthStencilAttachment})}_updateDevice(){this.depthStencilAttachment&&(this.depthStencilAttachment.destroy(),this.depthStencilAttachment=null),this.handle.configure({device:this.device.handle,format:this.device.preferredColorFormat,colorSpace:this.props.colorSpace,alphaMode:this.props.alphaMode})}getCurrentTexture(){let e=this.handle.getCurrentTexture();return this.device.createTexture({id:`${this.id}#color-texture`,handle:e,format:this.device.preferredColorFormat,width:e.width,height:e.height})}_createDepthStencilAttachment(e){return this.depthStencilAttachment||(this.depthStencilAttachment=this.device.createTexture({id:`${this.id}#depth-stencil-texture`,usage:G.Texture.RENDER_ATTACHMENT,format:e,width:this.drawingBufferWidth,height:this.drawingBufferHeight})),this.depthStencilAttachment}}});var ke,X,$e=a(()=>{"use strict";ke=d(h(),1),X=class extends ke.CommandBuffer{device;handle;constructor(e,t){super(e.device,{}),this.device=e.device,this.handle=this.props.handle||e.handle.finish({label:t?.id||"unnamed-command-buffer"})}}});var x,J,Ve=a(()=>{"use strict";x=d(h(),1),J=class extends x.RenderPass{device;handle;pipeline=null;constructor(e,t={}){super(e,t),this.device=e;let r=t.framebuffer||e.getCanvasContext().getCurrentFramebuffer(),i=this.getRenderPassDescriptor(r),s=t.timestampQuerySet;if(s&&(i.occlusionQuerySet=s.handle),e.features.has("timestamp-query")){let o=t.timestampQuerySet;i.timestampWrites=o?{querySet:o.handle,beginningOfPassWriteIndex:t.beginTimestampIndex,endOfPassWriteIndex:t.endTimestampIndex}:void 0}if(!e.commandEncoder)throw new Error("commandEncoder not available");this.device.handle.pushErrorScope("validation"),this.handle=this.props.handle||e.commandEncoder.handle.beginRenderPass(i),this.device.handle.popErrorScope().then(o=>{o&&x.log.error(`${this} creation failed:
"${o.message}"`,this)()}),this.handle.label=this.props.id,x.log.groupCollapsed(3,`new WebGPURenderPass(${this.id})`)(),x.log.probe(3,JSON.stringify(i,null,2))(),x.log.groupEnd(3)()}destroy(){}end(){this.handle.end()}setPipeline(e){this.pipeline=e,this.handle.setPipeline(this.pipeline.handle)}setBindings(e){this.pipeline?.setBindings(e);let t=this.pipeline?._getBindGroup();t&&this.handle.setBindGroup(0,t)}setIndexBuffer(e,t,r=0,i){this.handle.setIndexBuffer(e.handle,t,r,i)}setVertexBuffer(e,t,r=0){this.handle.setVertexBuffer(e,t.handle,r)}draw(e){e.indexCount?this.handle.drawIndexed(e.indexCount,e.instanceCount,e.firstIndex,e.baseVertex,e.firstInstance):this.handle.draw(e.vertexCount||0,e.instanceCount||1,e.firstIndex,e.firstInstance)}drawIndirect(){}setParameters(e){let{blendConstant:t,stencilReference:r,scissorRect:i,viewport:s}=e;t&&this.handle.setBlendConstant(t),r&&this.handle.setStencilReference(r),i&&this.handle.setScissorRect(i[0],i[1],i[2],i[3]),s&&this.handle.setViewport(s[0],s[1],s[2],s[3],s[4],s[5])}pushDebugGroup(e){this.handle.pushDebugGroup(e)}popDebugGroup(){this.handle.popDebugGroup()}insertDebugMarker(e){this.handle.insertDebugMarker(e)}beginOcclusionQuery(e){this.handle.beginOcclusionQuery(e)}endOcclusionQuery(){this.handle.endOcclusionQuery()}getRenderPassDescriptor(e){let t={colorAttachments:[]};if(t.colorAttachments=e.colorAttachments.map((r,i)=>({loadOp:this.props.clearColor!==!1?"clear":"load",colorClearValue:this.props.clearColors?.[i]||this.props.clearColor||x.RenderPass.defaultClearColor,storeOp:this.props.discard?"discard":"store",view:r.handle})),e.depthStencilAttachment){t.depthStencilAttachment={view:e.depthStencilAttachment.handle};let{depthStencilAttachment:r}=t;this.props.depthReadOnly&&(r.depthReadOnly=!0),this.props.clearDepth!==!1&&(r.depthClearValue=this.props.clearDepth),!0&&(r.depthLoadOp=this.props.clearDepth!==!1?"clear":"load",r.depthStoreOp="store"),!1&&(r.stencilLoadOp=this.props.clearStencil!==!1?"clear":"load",r.stencilStoreOp="store")}return t}}});var qe,Z,Qe=a(()=>{"use strict";qe=d(h(),1),Z=class extends qe.ComputePass{device;handle;_webgpuPipeline=null;constructor(e,t){super(e,t),this.device=e;let r;if(e.features.has("timestamp-query")){let i=t.timestampQuerySet;i&&(r={querySet:i.handle,beginningOfPassWriteIndex:t.beginTimestampIndex,endOfPassWriteIndex:t.endTimestampIndex})}this.handle=this.props.handle||e.commandEncoder.handle.beginComputePass({label:this.props.id,timestampWrites:r})}destroy(){}end(){this.handle.end()}setPipeline(e){let t=e;this.handle.setPipeline(t.handle),this._webgpuPipeline=t,this.setBindings([])}setBindings(e){let t=this._webgpuPipeline._getBindGroup();this.handle.setBindGroup(0,t)}dispatch(e,t,r){this.handle.dispatchWorkgroups(e,t,r)}dispatchIndirect(e,t=0){let r=e;this.handle.dispatchWorkgroupsIndirect(r.handle,t)}pushDebugGroup(e){this.handle.pushDebugGroup(e)}popDebugGroup(){this.handle.popDebugGroup()}insertDebugMarker(e){this.handle.insertDebugMarker(e)}}});var ze,K,je=a(()=>{"use strict";ze=d(h(),1);$e();Ve();Qe();K=class extends ze.CommandEncoder{device;handle;constructor(e,t={}){super(e,t),this.device=e,this.handle=t.handle||this.device.handle.createCommandEncoder({label:this.props.id}),this.handle.label=this.props.id}destroy(){}finish(e){return new X(this,{id:e?.id||"unnamed-command-buffer"})}beginRenderPass(e){return new J(this.device,e)}beginComputePass(e){return new Z(this.device,e)}copyBufferToBuffer(e){let t=e.sourceBuffer,r=e.destinationBuffer;this.handle.copyBufferToBuffer(t.handle,e.sourceOffset??0,r.handle,e.destinationOffset??0,e.size??0)}copyBufferToTexture(e){let t=e.sourceBuffer,r=e.destinationTexture;this.handle.copyBufferToTexture({buffer:t.handle,offset:e.offset??0,bytesPerRow:e.bytesPerRow,rowsPerImage:e.rowsPerImage},{texture:r.handle,mipLevel:e.mipLevel??0,origin:e.origin??{}},{width:e.extent?.[0],height:e.extent?.[1],depthOrArrayLayers:e.extent?.[2]})}copyTextureToBuffer(e){}copyTextureToTexture(e){}pushDebugGroup(e){this.handle.pushDebugGroup(e)}popDebugGroup(){this.handle.popDebugGroup()}insertDebugMarker(e){this.handle.insertDebugMarker(e)}resolveQuerySet(e,t,r){let i=e,s=t;this.handle.resolveQuerySet(i.handle,r?.firstQuery||0,r?.queryCount||e.props.count-(r?.firstQuery||0),s.handle,r?.destinationOffset||0)}}});var Ne,ee,He=a(()=>{"use strict";Ne=d(h(),1),ee=class extends Ne.QuerySet{device;handle;constructor(e,t){super(e,t),this.device=e,this.handle=this.props.handle||this.device.handle.createQuerySet({type:this.props.type,count:this.props.count}),this.handle.label=this.props.id}destroy(){this.handle?.destroy(),this.handle=null}}});var Ye={};ue(Ye,{WebGPUDevice:()=>W});var te,W,pe=a(()=>{"use strict";te=d(h(),1);re();ie();xe();T();se();Ge();he();Le();Me();Ue();je();He();W=class extends te.Device{handle;type="webgpu";preferredColorFormat=navigator.gpu.getPreferredCanvasFormat();preferredDepthFormat="depth24plus";features;info;limits;lost;canvasContext=null;_isLost=!1;commandEncoder;adapter;adapterInfo;constructor(e,t,r,i){if(super({...e,id:e.id||"webgpu-device"}),this.handle=t,this.adapter=r,this.adapterInfo=i,this.info=this._getInfo(),this.features=this._getFeatures(),this.limits=this.handle.limits,t.addEventListener("uncapturederror",s=>{s.preventDefault();let o=s instanceof GPUUncapturedErrorEvent?s.error.message:"Unknown WebGPU error";if(this.reportError(new Error(o)),this.props.debug)debugger}),this.lost=new Promise(async s=>{let o=await this.handle.lost;this._isLost=!0,s({reason:"destroyed",message:o.message})}),e.createCanvasContext){let s=e.createCanvasContext===!0?{}:e.createCanvasContext;this.canvasContext=new I(this,this.adapter,s)}this.commandEncoder=this.createCommandEncoder({})}destroy(){this.handle.destroy()}get isLost(){return this._isLost}createBuffer(e){let t=this._normalizeBufferProps(e);return new b(this,t)}createTexture(e){return new C(this,e)}createExternalTexture(e){return new V(this,e)}createShader(e){return new P(this,e)}createSampler(e){return new p(this,e)}createRenderPipeline(e){return new j(this,e)}createFramebuffer(e){return new _(this,e)}createComputePipeline(e){return new N(this,e)}createVertexArray(e){return new Y(this,e)}createCommandEncoder(e){return new K(this,e)}createTransformFeedback(e){throw new Error("Transform feedback not supported in WebGPU")}createQuerySet(e){return new ee(this,e)}createCanvasContext(e){return new I(this,this.adapter,e)}submit(e){e||(e=this.commandEncoder.finish(),this.commandEncoder.destroy(),this.commandEncoder=this.createCommandEncoder({id:`${this.id}-default-encoder`})),this.handle.pushErrorScope("validation"),this.handle.queue.submit([e.handle]),this.handle.popErrorScope().then(t=>{t&&this.reportError(new Error(`WebGPU command submission failed: ${t.message}`))})}pushErrorScope(e){this.handle.pushErrorScope(e)}popErrorScope(e){this.handle.popErrorScope().then(t=>{t&&e(t.message)})}_getInfo(){let[e,t]=(this.adapterInfo.driver||"").split(" Version "),r=this.adapterInfo.vendor||this.adapter.__brand||"unknown",i=e||"",s=t||"",o=r==="apple"?"apple":"unknown",c=this.adapterInfo.architecture||"unknown",g=this.adapterInfo.backend||"unknown",l=(this.adapterInfo.type||"").split(" ")[0].toLowerCase()||"unknown";return{type:"webgpu",vendor:r,renderer:i,version:s,gpu:o,gpuType:l,gpuBackend:g,gpuArchitecture:c,shadingLanguage:"wgsl",shadingLanguageVersion:100}}_getFeatures(){let e=new Set(this.handle.features);e.has("depth-clamping")&&(e.delete("depth-clamping"),e.add("depth-clip-control")),e.has("texture-compression-bc")&&e.add("texture-compression-bc5-webgl");let t=["timer-query-webgl","compilation-status-async-webgl","float32-renderable-webgl","float16-renderable-webgl","norm16-renderable-webgl","texture-filterable-anisotropic-webgl","shader-noperspective-interpolation-webgl"];for(let r of t)e.add(r);return new te.DeviceFeatures(Array.from(e),this.props._disabledFeatures)}_getDeviceSpecificTextureFormatCapabilities(e){let{format:t}=e;return t.includes("webgl")?{format:t,create:!1,render:!1,filter:!1,blend:!1,store:!1}:e}}});var O={};ue(O,{WebGPUBuffer:()=>b,WebGPUDevice:()=>W,WebGPUSampler:()=>p,WebGPUShader:()=>P,WebGPUTexture:()=>C,webgpuAdapter:()=>Xe});k(O,d(h(),1));var m=d(h(),1),ce=class extends m.Adapter{type="webgpu";isSupported(){return Boolean(typeof navigator<"u"&&navigator.gpu)}isDeviceHandle(e){return!!(typeof GPUDevice<"u"&&e instanceof GPUDevice||e?.queue)}async create(e){if(!navigator.gpu)throw new Error("WebGPU not available. Recent Chrome browsers should work.");m.log.groupCollapsed(1,"WebGPUDevice created")();let t=await navigator.gpu.requestAdapter({powerPreference:"high-performance"});if(!t)throw new Error("Failed to request WebGPU adapter");let r=t.info||await t.requestAdapterInfo?.();m.log.probe(2,"Adapter available",r)();let i=[],s={};if(e._requestMaxLimits){i.push(...Array.from(t.features));let l=Object.keys(t.limits).filter(A=>!["minSubgroupSize","maxSubgroupSize"].includes(A));for(let A of l){let y=A,R=t.limits[y];typeof R=="number"&&(s[y]=R)}}let o=await t.requestDevice({requiredFeatures:i,requiredLimits:s});m.log.probe(1,"GPUDevice available")();let{WebGPUDevice:c}=await Promise.resolve().then(()=>(pe(),Ye)),g=new c(e,o,t,r);return m.log.probe(1,"Device created. For more info, set chrome://flags/#enable-webgpu-developer-features")(),m.log.table(1,g.info)(),m.log.groupEnd(1)(),g}async attach(e){throw new Error("WebGPUAdapter.attach() not implemented")}},Xe=new ce;pe();re();ie();T();se();return nt(O);})();
return __exports__;
});
{
"name": "@luma.gl/webgpu",
"version": "9.1.0-beta.9",
"version": "9.2.0-alpha.1",
"description": "WebGPU adapter for the luma.gl core API",

@@ -40,3 +40,3 @@ "type": "module",

"peerDependencies": {
"@luma.gl/core": "^9.1.0-beta.1"
"@luma.gl/core": "9.2.0-alpha.0"
},

@@ -47,3 +47,3 @@ "dependencies": {

},
"gitHead": "ce90c649e27184c399eaeab814ec21a46f45677b"
"gitHead": "d6d2f791f2ce96f4b5acb68e05faea62c35440fb"
}

@@ -37,6 +37,13 @@ // luma.gl

const entries = getBindGroupEntries(bindings, shaderLayout);
return device.createBindGroup({
device.pushErrorScope('validation');
const bindGroup = device.createBindGroup({
layout: bindGroupLayout,
entries
});
device.popErrorScope().then((error: GPUError | null) => {
if (error) {
log.error(`createBindGroup validation failed: ${error.message}`)();
}
});
return bindGroup;
}

@@ -46,3 +53,4 @@

shaderLayout: ComputeShaderLayout,
bindingName: string
bindingName: string,
options?: {ignoreWarnings?: boolean}
): BindingDeclaration | null {

@@ -53,3 +61,3 @@ const bindingLayout = shaderLayout.bindings.find(

);
if (!bindingLayout) {
if (!bindingLayout && !options?.ignoreWarnings) {
log.warn(`Binding ${bindingName} not set: Not found in shader layout.`)();

@@ -73,9 +81,19 @@ }

if (bindingLayout) {
entries.push(getBindGroupEntry(value, bindingLayout.location));
const entry = getBindGroupEntry(value, bindingLayout.location);
if (entry) {
entries.push(entry);
}
}
// TODO - hack to automatically bind samplers to supplied texture default samplers
bindingLayout = getShaderLayoutBinding(shaderLayout, `${bindingName}Sampler`);
if (bindingLayout) {
entries.push(getBindGroupEntry(value, bindingLayout.location, {sampler: true}));
if (value instanceof Texture) {
bindingLayout = getShaderLayoutBinding(shaderLayout, `${bindingName}Sampler`, {
ignoreWarnings: true
});
if (bindingLayout) {
const entry = getBindGroupEntry(value, bindingLayout.location, {sampler: true});
if (entry) {
entries.push(entry);
}
}
}

@@ -91,3 +109,3 @@ }

options?: {sampler?: boolean}
): GPUBindGroupEntry {
): GPUBindGroupEntry | null {
if (binding instanceof Buffer) {

@@ -106,3 +124,4 @@ return {

};
} else if (binding instanceof Texture) {
}
if (binding instanceof Texture) {
if (options?.sampler) {

@@ -116,6 +135,7 @@ return {

binding: index,
resource: (binding as WebGPUTexture).handle.createView({label: 'bind-group-auto-created'})
resource: (binding as WebGPUTexture).view.handle
};
}
throw new Error('invalid binding');
log.warn(`invalid binding ${name}`, binding);
return null;
}

@@ -6,3 +6,3 @@ // luma.gl

import type {ShaderLayout, BufferLayout, AttributeDeclaration, VertexFormat} from '@luma.gl/core';
import {log, decodeVertexFormat} from '@luma.gl/core';
import {log, getVertexFormatInfo} from '@luma.gl/core';
// import {getAttributeInfosFromLayouts} from '@luma.gl/core';

@@ -62,3 +62,3 @@

byteStride += decodeVertexFormat(format).byteLength;
byteStride += getVertexFormatInfo(format).byteLength;
}

@@ -71,3 +71,3 @@ // non-interleaved mapping (just set offset and stride)

}
byteStride = decodeVertexFormat(format).byteLength;
byteStride = getVertexFormatInfo(format).byteLength;

@@ -97,3 +97,3 @@ stepMode =

vertexBufferLayouts.push({
arrayStride: decodeVertexFormat('float32x3').byteLength,
arrayStride: getVertexFormatInfo('float32x3').byteLength,
stepMode:

@@ -159,7 +159,7 @@ attribute.stepMode || (attribute.name.startsWith('instance') ? 'instance' : 'vertex'),

if (!attribute) {
log.warn(`Unknown attribute ${name}`)();
log.warn(`Supplied attribute not present in shader layout: ${name}`)();
return null;
}
if (attributeNames.has(name)) {
throw new Error(`Duplicate attribute ${name}`);
throw new Error(`Found multiple entries for attribute: ${name}`);
}

@@ -166,0 +166,0 @@ attributeNames.add(name);

@@ -40,3 +40,3 @@ // luma.gl

cullMode: (parameter: keyof Parameters, value: any, descriptor: GPURenderPipelineDescriptor) => {
cullMode: (_: keyof Parameters, value: any, descriptor: GPURenderPipelineDescriptor) => {
descriptor.primitive = descriptor.primitive || {};

@@ -46,3 +46,3 @@ descriptor.primitive.cullMode = value;

frontFace: (parameter: keyof Parameters, value: any, descriptor: GPURenderPipelineDescriptor) => {
frontFace: (_: keyof Parameters, value: any, descriptor: GPURenderPipelineDescriptor) => {
descriptor.primitive = descriptor.primitive || {};

@@ -54,16 +54,10 @@ descriptor.primitive.frontFace = value;

depthWriteEnabled: (
parameter: keyof Parameters,
value: any,
descriptor: GPURenderPipelineDescriptor
) => {
const depthStencil = addDepthStencil(descriptor);
depthStencil.depthWriteEnabled = value;
depthWriteEnabled: (_: keyof Parameters, value: any, descriptor: GPURenderPipelineDescriptor) => {
if (value) {
const depthStencil = addDepthStencil(descriptor);
depthStencil.depthWriteEnabled = value;
}
},
depthCompare: (
parameter: keyof Parameters,
value: any,
descriptor: GPURenderPipelineDescriptor
) => {
depthCompare: (_: keyof Parameters, value: any, descriptor: GPURenderPipelineDescriptor) => {
const depthStencil = addDepthStencil(descriptor);

@@ -73,7 +67,3 @@ depthStencil.depthCompare = value;

depthFormat: (
parameter: keyof Parameters,
value: any,
descriptor: GPURenderPipelineDescriptor
) => {
depthFormat: (_: keyof Parameters, value: any, descriptor: GPURenderPipelineDescriptor) => {
const depthStencil = addDepthStencil(descriptor);

@@ -83,3 +73,3 @@ depthStencil.format = value;

depthBias: (parameter: keyof Parameters, value: any, descriptor: GPURenderPipelineDescriptor) => {
depthBias: (_: keyof Parameters, value: any, descriptor: GPURenderPipelineDescriptor) => {
const depthStencil = addDepthStencil(descriptor);

@@ -90,3 +80,3 @@ depthStencil.depthBias = value;

depthBiasSlopeScale: (
parameter: keyof Parameters,
_: keyof Parameters,
value: any,

@@ -99,7 +89,3 @@ descriptor: GPURenderPipelineDescriptor

depthBiasClamp: (
parameter: keyof Parameters,
value: any,
descriptor: GPURenderPipelineDescriptor
) => {
depthBiasClamp: (_: keyof Parameters, value: any, descriptor: GPURenderPipelineDescriptor) => {
const depthStencil = addDepthStencil(descriptor);

@@ -111,7 +97,3 @@ depthStencil.depthBiasClamp = value;

stencilReadMask: (
parameter: keyof Parameters,
value: any,
descriptor: GPURenderPipelineDescriptor
) => {
stencilReadMask: (_: keyof Parameters, value: any, descriptor: GPURenderPipelineDescriptor) => {
const depthStencil = addDepthStencil(descriptor);

@@ -121,7 +103,3 @@ depthStencil.stencilReadMask = value;

stencilWriteMask: (
parameter: keyof Parameters,
value: any,
descriptor: GPURenderPipelineDescriptor
) => {
stencilWriteMask: (_: keyof Parameters, value: any, descriptor: GPURenderPipelineDescriptor) => {
const depthStencil = addDepthStencil(descriptor);

@@ -131,7 +109,3 @@ depthStencil.stencilWriteMask = value;

stencilCompare: (
parameter: keyof Parameters,
value: any,
descriptor: GPURenderPipelineDescriptor
) => {
stencilCompare: (_: keyof Parameters, value: any, descriptor: GPURenderPipelineDescriptor) => {
const stencilFront = addDepthStencilFront(descriptor);

@@ -144,3 +118,3 @@ const stencilBack = addDepthStencilBack(descriptor);

stencilPassOperation: (
parameter: keyof Parameters,
_: keyof Parameters,
value: any,

@@ -156,3 +130,3 @@ descriptor: GPURenderPipelineDescriptor

stencilFailOperation: (
parameter: keyof Parameters,
_: keyof Parameters,
value: any,

@@ -168,3 +142,3 @@ descriptor: GPURenderPipelineDescriptor

stencilDepthFailOperation: (
parameter: keyof Parameters,
_: keyof Parameters,
value: any,

@@ -181,7 +155,3 @@ descriptor: GPURenderPipelineDescriptor

sampleCount: (
parameter: keyof Parameters,
value: any,
descriptor: GPURenderPipelineDescriptor
) => {
sampleCount: (_: keyof Parameters, value: any, descriptor: GPURenderPipelineDescriptor) => {
descriptor.multisample = descriptor.multisample || {};

@@ -191,7 +161,3 @@ descriptor.multisample.count = value;

sampleMask: (
parameter: keyof Parameters,
value: any,
descriptor: GPURenderPipelineDescriptor
) => {
sampleMask: (_: keyof Parameters, value: any, descriptor: GPURenderPipelineDescriptor) => {
descriptor.multisample = descriptor.multisample || {};

@@ -202,3 +168,3 @@ descriptor.multisample.mask = value;

sampleAlphaToCoverageEnabled: (
parameter: keyof Parameters,
_: keyof Parameters,
value: any,

@@ -213,56 +179,71 @@ descriptor: GPURenderPipelineDescriptor

colorMask: (parameter: keyof Parameters, value: any, descriptor: GPURenderPipelineDescriptor) => {
const targets = addColorState(descriptor);
targets[0].writeMask = value;
colorMask: (_: keyof Parameters, value: any, descriptor: GPURenderPipelineDescriptor) => {
const target = addColorState(descriptor, 0);
target.writeMask = value;
},
blend: (_: keyof Parameters, value: any, descriptor: GPURenderPipelineDescriptor) => {
if (value) {
addBlendState(descriptor, 0);
}
},
blendColorOperation: (
parameter: keyof Parameters,
_: keyof Parameters,
value: any,
descriptor: GPURenderPipelineDescriptor
) => {
addColorState(descriptor);
// const targets = addColorState(descriptor);
// const target = targets[0];
// const blend: GPUBlendState = target.blend || {color: {alpha: 0}};
// blend.color = blend.color || {};
// target.blend.color.operation = value;
}
const blend = addBlendState(descriptor, 0);
blend.color = blend.color || {};
blend.color.operation = value;
},
/*
blendColorSrcTarget: (parameter, value, descriptor: GPURenderPipelineDescriptor) => {
addColorState(descriptor);
targets[0].blend = targets[0].blend || {};
targets[0].blend.color = targets[0].blend.color || {};
targets[0].blend.color.srcTarget = value;
blendColorSrcFactor: (
_: keyof Parameters,
value: any,
descriptor: GPURenderPipelineDescriptor
) => {
const blend = addBlendState(descriptor, 0);
blend.color = blend.color || {};
blend.color.srcFactor = value;
},
blendColorDstTarget: (parameter, value, descriptor: GPURenderPipelineDescriptor) => {
addColorState(descriptor);
targets[0].blend = targets[0].blend || {};
targets[0].blend.color = targets[0].blend.color || {};
targets[0].blend.color.dstTarget = value;
blendColorDstFactor: (
_: keyof Parameters,
value: any,
descriptor: GPURenderPipelineDescriptor
) => {
const blend = addBlendState(descriptor, 0);
blend.color.dstFactor = value;
},
blendAlphaOperation: (parameter, value, descriptor: GPURenderPipelineDescriptor) => {
addColorState(descriptor);
targets[0].blend = targets[0].blend || {};
targets[0].blend.alpha = targets[0].blend.alpha || {};
targets[0].blend.alpha.operation = value;
blendAlphaOperation: (
_: keyof Parameters,
value: any,
descriptor: GPURenderPipelineDescriptor
) => {
const blend = addBlendState(descriptor, 0);
blend.alpha = blend.alpha || {};
blend.alpha.operation = value;
},
blendAlphaSrcTarget: (parameter, value, descriptor: GPURenderPipelineDescriptor) => {
addColorState(descriptor);
targets[0].blend = targets[0].blend || {};
targets[0].blend.alpha = targets[0].blend.alpha || {};
targets[0].blend.alpha.srcTarget = value;
blendAlphaSrcFactor: (
_: keyof Parameters,
value: any,
descriptor: GPURenderPipelineDescriptor
) => {
const blend = addBlendState(descriptor, 0);
blend.alpha = blend.alpha || {};
blend.alpha.srcFactor = value;
},
blendAlphaDstTarget: (parameter, value, descriptor: GPURenderPipelineDescriptor) => {
addColorState(descriptor);
targets[0].blend = targets[0].blend || {};
targets[0].blend.alpha = targets[0].blend.alpha || {};
targets[0].blend.alpha.dstTarget = value;
},
*/
blendAlphaDstFactor: (
_: keyof Parameters,
value: any,
descriptor: GPURenderPipelineDescriptor
) => {
const blend = addBlendState(descriptor, 0);
blend.alpha = blend.alpha || {};
blend.alpha.dstFactor = value;
}
};

@@ -323,5 +304,9 @@

function addColorState(descriptor: GPURenderPipelineDescriptor): GPUColorTargetState[] {
/** @todo - support multiple color targets... */
function addColorState(
descriptor: GPURenderPipelineDescriptor,
attachment: number
): GPUColorTargetState {
// @ts-ignore
descriptor.fragment.targets = descriptor.fragment?.targets || [];
descriptor.fragment.targets = descriptor.fragment?.targets || ([] as GPUColorTargetState[]);
if (!Array.isArray(descriptor.fragment?.targets)) {

@@ -333,3 +318,9 @@ throw new Error('colorstate');

}
return descriptor.fragment?.targets as GPUColorTargetState[];
return descriptor.fragment?.targets[0] as GPUColorTargetState;
}
function addBlendState(descriptor: GPURenderPipelineDescriptor, attachment: number): GPUBlendState {
const target = addColorState(descriptor, attachment);
target.blend = target.blend || {color: {}, alpha: {}};
return target.blend;
}

@@ -27,2 +27,4 @@ // luma.gl

this.device.handle.pushErrorScope('out-of-memory');
this.device.handle.pushErrorScope('validation');
this.handle =

@@ -37,2 +39,12 @@ this.props.handle ||

});
this.device.handle.popErrorScope().then((error: GPUError | null) => {
if (error) {
this.device.reportError(new Error(`Buffer validation failed: ${error.message}`), this);
}
});
this.device.handle.popErrorScope().then((error: GPUError | null) => {
if (error) {
this.device.reportError(new Error(`Buffer out of memory: ${error.message}`), this);
}
});

@@ -39,0 +51,0 @@ if (props.data) {

@@ -5,6 +5,14 @@ // luma.gl

import type {
RenderPassProps,
ComputePassProps,
CopyTextureToTextureOptions,
CopyTextureToBufferOptions
} from '@luma.gl/core';
import {CommandEncoder, CommandEncoderProps, Buffer, Texture} from '@luma.gl/core';
import type {CopyTextureToTextureOptions, CopyTextureToBufferOptions} from '@luma.gl/core';
import {WebGPUDevice} from '../webgpu-device';
import {WebGPUCommandBuffer} from './webgpu-command-buffer';
import {WebGPUBuffer} from './webgpu-buffer';
import {WebGPURenderPass} from './webgpu-render-pass';
import {WebGPUComputePass} from './webgpu-compute-pass';
import {WebGPUTexture} from './webgpu-texture';

@@ -17,3 +25,3 @@ import {WebGPUQuerySet} from './webgpu-query-set';

constructor(device: WebGPUDevice, props: CommandEncoderProps) {
constructor(device: WebGPUDevice, props: CommandEncoderProps = {}) {
super(device, props);

@@ -24,2 +32,3 @@ this.device = device;

this.device.handle.createCommandEncoder({
label: this.props.id
// TODO was this removed in standard?

@@ -33,6 +42,20 @@ // measureExecutionTime: this.props.measureExecutionTime

finish(options?: {id?: string}): GPUCommandBuffer {
return this.finish(options);
finish(props?: CommandEncoderProps): WebGPUCommandBuffer {
return new WebGPUCommandBuffer(this, {
id: props?.id || 'unnamed-command-buffer'
});
}
/**
* Allows a render pass to begin against a canvas context
* @todo need to support a "Framebuffer" equivalent (aka preconfigured RenderPassDescriptors?).
*/
beginRenderPass(props: RenderPassProps): WebGPURenderPass {
return new WebGPURenderPass(this.device, props);
}
beginComputePass(props: ComputePassProps): WebGPUComputePass {
return new WebGPUComputePass(this.device, props);
}
// beginRenderPass(GPURenderPassDescriptor descriptor): GPURenderPassEncoder;

@@ -39,0 +62,0 @@ // beginComputePass(optional GPUComputePassDescriptor descriptor = {}): GPUComputePassEncoder;

@@ -36,3 +36,3 @@ // luma.gl

this.props.handle ||
device.commandEncoder?.beginComputePass({
device.commandEncoder.handle.beginComputePass({
label: this.props.id,

@@ -39,0 +39,0 @@ timestampWrites

@@ -50,3 +50,4 @@ // luma.gl

this.device.handle.pushErrorScope('validation');
this.handle = this.props.handle || device.commandEncoder.beginRenderPass(renderPassDescriptor);
this.handle =
this.props.handle || device.commandEncoder.handle.beginRenderPass(renderPassDescriptor);
this.device.handle.popErrorScope().then((error: GPUError | null) => {

@@ -53,0 +54,0 @@ if (error) {

@@ -33,2 +33,6 @@ // luma.gl MIT license

override get [Symbol.toStringTag]() {
return 'WebGPURenderPipeline';
}
constructor(device: WebGPUDevice, props: RenderPipelineProps) {

@@ -72,2 +76,8 @@ super(device, props);

setBindings(bindings: Record<string, Binding>): void {
// Invalidate the cached bind group if any value has changed
for (const [name, binding] of Object.entries(bindings)) {
if (this._bindings[name] !== binding) {
this._bindGroup = null;
}
}
Object.assign(this._bindings, bindings);

@@ -161,2 +171,13 @@ }

// Populate color targets
// TODO - at the moment blend and write mask are only set on the first target
const targets: (GPUColorTargetState | null)[] = [];
if (this.props.colorAttachmentFormats) {
for (const format of this.props.colorAttachmentFormats) {
targets.push(format ? {format: getWebGPUTextureFormat(format)} : null);
}
} else {
targets.push({format: getWebGPUTextureFormat(this.device.preferredColorFormat)});
}
// Set up the fragment stage

@@ -166,8 +187,3 @@ const fragment: GPUFragmentState = {

entryPoint: this.props.fragmentEntryPoint || 'main',
targets: [
{
// TODO exclamation mark hack!
format: getWebGPUTextureFormat(this.device.getCanvasContext().format)
}
]
targets
};

@@ -185,2 +201,11 @@

// Set depth format if required, defaulting to the preferred depth format
const depthFormat = this.props.depthStencilAttachmentFormat || this.device.preferredDepthFormat;
if (this.props.parameters.depthWriteEnabled) {
descriptor.depthStencil = {
format: getWebGPUTextureFormat(depthFormat)
};
}
// Set parameters on the descriptor

@@ -187,0 +212,0 @@ applyParametersToRenderPipelineDescriptor(descriptor, this.props.parameters);

@@ -38,3 +38,3 @@ // luma.gl, MIT license

this.handle = this.handle || this.device.handle.createSampler(samplerDescriptor);
this.handle = props.handle || this.device.handle.createSampler(samplerDescriptor);
this.handle.label = this.props.id;

@@ -41,0 +41,0 @@ }

@@ -41,13 +41,20 @@ // luma.gl

this.device.pushErrorScope('validation');
this.handle =
this.handle ||
// props.handle ||
this.texture.handle.createView({
format: (props.format || this.texture.format) as GPUTextureFormat,
dimension: props.dimension || this.texture.dimension,
aspect: props.aspect,
baseMipLevel: props.baseMipLevel,
mipLevelCount: props.mipLevelCount, // GPUIntegerCoordinate;
baseArrayLayer: props.baseArrayLayer, // GPUIntegerCoordinate;
arrayLayerCount: props.arrayLayerCount // GPUIntegerCoordinate;
format: (this.props.format || this.texture.format) as GPUTextureFormat,
dimension: this.props.dimension || this.texture.dimension,
aspect: this.props.aspect,
baseMipLevel: this.props.baseMipLevel,
mipLevelCount: this.props.mipLevelCount,
baseArrayLayer: this.props.baseArrayLayer,
arrayLayerCount: this.props.arrayLayerCount
});
this.device.handle.popErrorScope().then((error: GPUError | null) => {
if (error) {
this.device.reportError(new Error(`TextureView validation failed: ${error.message}`), this);
}
});
this.handle.label = this.props.id;

@@ -54,0 +61,0 @@ }

// luma.gl, MIT license
import type {
// Device,
TextureProps,
TextureViewProps,
Sampler,
SamplerProps,
// TextureFormat,
// TextureCubeFace,
// ExternalImage,
// TextureLevelData,
Texture1DData,
Texture2DData,
Texture3DData,
TextureCubeData,
TextureArrayData,
TextureCubeArrayData,
ExternalImage
CopyExternalImageOptions,
CopyImageDataOptions
} from '@luma.gl/core';
import {Texture} from '@luma.gl/core';
import {Texture, log} from '@luma.gl/core';

@@ -27,15 +15,5 @@ import {getWebGPUTextureFormat} from '../helpers/convert-texture-format';

const BASE_DIMENSIONS: Record<string, '1d' | '2d' | '3d'> = {
'1d': '1d',
'2d': '2d',
'2d-array': '2d',
cube: '2d',
'cube-array': '2d',
'3d': '3d'
};
export class WebGPUTexture extends Texture {
readonly device: WebGPUDevice;
readonly handle: GPUTexture;
sampler: WebGPUSampler;

@@ -48,46 +26,43 @@ view: WebGPUTextureView;

// Texture base class strips out the data prop, so we need to add it back in
const propsWithData = {...this.props};
if (props.data) {
propsWithData.data = props.data;
if (this.dimension === 'cube') {
this.depth = 6;
}
this.initialize(propsWithData);
}
this.device.handle.pushErrorScope('out-of-memory');
this.device.handle.pushErrorScope('validation');
override destroy(): void {
this.handle?.destroy();
// @ts-expect-error readonly
this.handle = null;
}
this.handle =
this.props.handle ||
this.device.handle.createTexture({
label: this.id,
size: {
width: this.width,
height: this.height,
depthOrArrayLayers: this.depth
},
usage: this.props.usage || Texture.TEXTURE | Texture.COPY_DST,
dimension: this.baseDimension,
format: getWebGPUTextureFormat(this.format),
mipLevelCount: this.mipLevels,
sampleCount: this.props.samples
});
this.device.handle.popErrorScope().then((error: GPUError | null) => {
if (error) {
this.device.reportError(new Error(`Texture validation failed: ${error.message}`), this);
}
});
this.device.handle.popErrorScope().then((error: GPUError | null) => {
if (error) {
this.device.reportError(new Error(`Texture out of memory: ${error.message}`), this);
}
});
createView(props: TextureViewProps): WebGPUTextureView {
return new WebGPUTextureView(this.device, {...props, texture: this});
}
protected initialize(props: TextureProps): void {
// @ts-expect-error
this.handle = this.props.handle || this.createHandle();
this.handle.label ||= this.id;
if (this.props.data) {
if (Texture.isExternalImage(this.props.data)) {
this.copyExternalImage({image: this.props.data});
} else {
this.setData({data: this.props.data});
}
// Update props if external handle was supplied - used mainly by CanvasContext.getDefaultFramebuffer()
// TODO - Read all properties directly from the supplied handle?
if (this.props.handle) {
this.handle.label ||= this.id;
this.width = this.handle.width;
this.height = this.handle.height;
}
this.width = this.handle.width;
this.height = this.handle.height;
// Why not just read all properties directly from the texture
// this.depthOrArrayLayers = this.handle.depthOrArrayLayers;
// this.mipLevelCount = this.handle.mipLevelCount;
// this.sampleCount = this.handle.sampleCount;
// this.dimension = this.handle.dimension;
// this.format = this.handle.format;
// this.usage = this.handle.usage;
// Create a default sampler. This mimics the WebGL1 API where sampler props are stored on the texture
// this.setSampler(props.sampler);
this.sampler =

@@ -98,130 +73,66 @@ props.sampler instanceof WebGPUSampler

// TODO - To support texture arrays we need to create custom views...
// But we are not ready to expose TextureViews to the public API.
// @ts-expect-error
this.view = new WebGPUTextureView(this.device, {...this.props, texture: this});
// format: this.props.format,
// dimension: this.props.dimension,
// aspect = "all";
// baseMipLevel: 0;
// mipLevelCount;
// baseArrayLayer = 0;
// arrayLayerCount;
}
protected createHandle(): GPUTexture {
// Deduce size from data - TODO this is a hack
// @ts-expect-error
const width = this.props.width || this.props.data?.width || 1;
// @ts-expect-error
const height = this.props.height || this.props.data?.height || 1;
return this.device.handle.createTexture({
label: this.id,
size: {
width,
height,
depthOrArrayLayers: this.depth
},
usage: this.props.usage || Texture.TEXTURE | Texture.COPY_DST,
dimension: BASE_DIMENSIONS[this.dimension],
format: getWebGPUTextureFormat(this.format),
this.view = new WebGPUTextureView(this.device, {
...this.props,
texture: this,
mipLevelCount: this.mipLevels,
sampleCount: this.props.samples
arrayLayerCount: this.depth
});
}
/** @deprecated - intention is to use the createView public API */
createGPUTextureView(): GPUTextureView {
return this.handle.createView({label: this.id});
// Set initial data
// Texture base class strips out the data prop from this.props, so we need to handle it here
this._initializeData(props.data);
}
/**
* Set default sampler
* Accept a sampler instance or set of props;
*/
setSampler(sampler: Sampler | SamplerProps): this {
this.sampler =
sampler instanceof WebGPUSampler ? sampler : new WebGPUSampler(this.device, sampler);
return this;
override destroy(): void {
this.handle?.destroy();
// @ts-expect-error readonly
this.handle = null;
}
setTexture1DData(data: Texture1DData): void {
throw new Error('not implemented');
createView(props: TextureViewProps): WebGPUTextureView {
return new WebGPUTextureView(this.device, {...props, texture: this});
}
setTexture2DData(lodData: Texture2DData, depth?: number, target?: number): void {
throw new Error('not implemented');
copyImageData(options_: CopyImageDataOptions): void {
const {width, height, depth} = this;
const options = this._normalizeCopyImageDataOptions(options_);
this.device.handle.pushErrorScope('validation');
this.device.handle.queue.writeTexture(
// destination: GPUImageCopyTexture
{
// texture subresource
texture: this.handle,
mipLevel: options.mipLevel,
aspect: options.aspect,
// origin to write to
origin: [options.x, options.y, options.z]
},
// data
options.data,
// dataLayout: GPUImageDataLayout
{
offset: options.byteOffset,
bytesPerRow: options.bytesPerRow,
rowsPerImage: options.rowsPerImage
},
// size: GPUExtent3D - extents of the content to write
[width, height, depth]
);
this.device.handle.popErrorScope().then((error: GPUError | null) => {
if (error) {
this.device.reportError(new Error(`copyImageData validation failed: ${error.message}`));
}
});
}
setTexture3DData(lodData: Texture3DData, depth?: number, target?: number): void {
throw new Error('not implemented');
}
copyExternalImage(options_: CopyExternalImageOptions): {width: number; height: number} {
const options = this._normalizeCopyExternalImageOptions(options_);
setTextureCubeData(data: TextureCubeData, depth?: number): void {
throw new Error('not implemented');
}
setTextureArrayData(data: TextureArrayData): void {
throw new Error('not implemented');
}
setTextureCubeArrayData(data: TextureCubeArrayData): void {
throw new Error('not implemented');
}
setData(options: {data: any}): {width: number; height: number} {
if (ArrayBuffer.isView(options.data)) {
const clampedArray = new Uint8ClampedArray(options.data.buffer);
// TODO - pass through src data color space as ImageData Options?
const image = new ImageData(clampedArray, this.width, this.height);
return this.copyExternalImage({image});
}
throw new Error('Texture.setData: Use CommandEncoder to upload data to texture in WebGPU');
}
copyExternalImage(options: {
image: ExternalImage;
width?: number;
height?: number;
depth?: number;
sourceX?: number;
sourceY?: number;
mipLevel?: number;
x?: number;
y?: number;
z?: number;
aspect?: 'all' | 'stencil-only' | 'depth-only';
colorSpace?: 'srgb';
premultipliedAlpha?: boolean;
}): {width: number; height: number} {
const size = Texture.getExternalImageSize(options.image);
const opts = {...Texture.defaultCopyExternalImageOptions, ...size, ...options};
const {
image,
sourceX,
sourceY,
width,
height,
depth,
mipLevel,
x,
y,
z,
aspect,
colorSpace,
premultipliedAlpha,
flipY
} = opts;
// TODO - max out width
this.device.handle.pushErrorScope('validation');
this.device.handle.queue.copyExternalImageToTexture(
// source: GPUImageCopyExternalImage
{
source: image,
origin: [sourceX, sourceY],
flipY
source: options.image,
origin: [options.sourceX, options.sourceY],
flipY: options.flipY
},

@@ -231,14 +142,25 @@ // destination: GPUImageCopyTextureTagged

texture: this.handle,
origin: [x, y, z],
mipLevel,
aspect,
colorSpace,
premultipliedAlpha
origin: [options.x, options.y, options.depth],
mipLevel: options.mipLevel,
aspect: options.aspect,
colorSpace: options.colorSpace,
premultipliedAlpha: options.premultipliedAlpha
},
// copySize: GPUExtent3D
[width, height, depth]
[options.width, options.height, 1]
);
return {width, height};
this.device.handle.popErrorScope().then((error: GPUError | null) => {
if (error) {
this.device.reportError(new Error(`copyExternalImage validation failed: ${error.message}`));
}
});
// TODO - should these be clipped to the texture size minus x,y,z?
return {width: options.width, height: options.height};
}
override generateMipmapsWebGL(): void {
log.warn(`${this}: generateMipmaps not supported in WebGPU`)();
}
// WebGPU specific

@@ -245,0 +167,0 @@

@@ -21,4 +21,4 @@ // luma.gl

readonly device: WebGPUDevice;
/** Vertex Array is a helper class under WebGPU */
readonly handle: never;
/** Vertex Array is just a helper class under WebGPU */
readonly handle: null = null;

@@ -25,0 +25,0 @@ // Create a VertexArray

@@ -5,7 +5,8 @@ // luma.gl

// prettier-ignore
// / <reference types="@webgpu/types" />
import {Adapter, DeviceProps, log} from '@luma.gl/core';
import {WebGPUDevice} from './webgpu-device';
import type {WebGPUDevice} from './webgpu-device';
// / <reference types="@webgpu/types" />
export class WebGPUAdapter extends Adapter {

@@ -15,19 +16,25 @@ /** type of device's created by this adapter */

constructor() {
super();
// @ts-ignore For backwards compatibility luma.registerDevices
WebGPUDevice.adapter = this;
}
/** Check if WebGPU is available */
isSupported(): boolean {
// Check if WebGPU is available
return Boolean(typeof navigator !== 'undefined' && navigator.gpu);
}
isDeviceHandle(handle: unknown): boolean {
if (typeof GPUDevice !== 'undefined' && handle instanceof GPUDevice) {
return true;
}
// TODO - WebGPU does not yet seem to have a stable in-browser API, so we "sniff" for members instead
if ((handle as any)?.queue) {
return true;
}
return false;
}
async create(props: DeviceProps): Promise<WebGPUDevice> {
if (!navigator.gpu) {
throw new Error(
'WebGPU not available. Open in Chrome Canary and turn on chrome://flags/#enable-unsafe-webgpu'
);
throw new Error('WebGPU not available. Recent Chrome browsers should work.');
}
log.groupCollapsed(1, 'WebGPUDevice created')();

@@ -43,3 +50,7 @@ const adapter = await navigator.gpu.requestAdapter({

const adapterInfo = await adapter.requestAdapterInfo();
// Note: adapter.requestAdapterInfo() has been replaced with adapter.info. Fall back in case adapter.info is not available
const adapterInfo =
adapter.info ||
// @ts-ignore
(await adapter.requestAdapterInfo?.());
log.probe(2, 'Adapter available', adapterInfo)();

@@ -75,2 +86,4 @@

const {WebGPUDevice} = await import('./webgpu-device');
const device = new WebGPUDevice(props, gpuDevice, adapter, adapterInfo);

@@ -77,0 +90,0 @@

@@ -5,7 +5,7 @@ // luma.gl

// prettier-ignore
// / <reference types="@webgpu/types" />
import type {Texture, TextureFormat, CanvasContextProps} from '@luma.gl/core';
import {CanvasContext, log} from '@luma.gl/core';
import {getWebGPUTextureFormat} from './helpers/convert-texture-format';
import type {DepthStencilTextureFormat, CanvasContextProps} from '@luma.gl/core';
import {CanvasContext, Texture, log} from '@luma.gl/core';
import {WebGPUDevice} from './webgpu-device';

@@ -22,9 +22,5 @@ import {WebGPUFramebuffer} from './resources/webgpu-framebuffer';

readonly device: WebGPUDevice;
readonly gpuCanvasContext: GPUCanvasContext;
/** Format of returned textures: "bgra8unorm", "rgba8unorm", "rgba16float". */
readonly format: TextureFormat = navigator.gpu.getPreferredCanvasFormat() as TextureFormat;
/** Default stencil format for depth textures */
readonly depthStencilFormat: TextureFormat = 'depth24plus';
readonly handle: GPUCanvasContext;
private depthStencilAttachment: Texture | null = null;
private depthStencilAttachment: WebGPUTexture | null = null;

@@ -37,13 +33,13 @@ get [Symbol.toStringTag](): string {

super(props);
const context = this.canvas.getContext('webgpu');
if (!context) {
throw new Error(`${this}: Failed to create WebGPU canvas context`);
}
this.device = device;
// TODO - ugly hack to trigger first resize
this.width = -1;
this.height = -1;
this.handle = context;
// Base class constructor cannot access derived methods/fields, so we need to call these functions in the subclass constructor
this._setAutoCreatedCanvasId(`${this.device.id}-canvas`);
// @ts-ignore TODO - we don't handle OffscreenRenderingContext.
this.gpuCanvasContext = this.canvas.getContext('webgpu');
// TODO this has been replaced
// this.format = this.gpuCanvasContext.getPreferredFormat(adapter);
this.format = 'bgra8unorm';
this._updateDevice();
}

@@ -53,26 +49,31 @@

destroy(): void {
this.gpuCanvasContext.unconfigure();
this.handle.unconfigure();
}
/** Update framebuffer with properly resized "swap chain" texture views */
getCurrentFramebuffer(): WebGPUFramebuffer {
// Ensure the canvas context size is updated
this.update();
getCurrentFramebuffer(
options: {depthStencilFormat?: DepthStencilTextureFormat | false} = {
depthStencilFormat: 'depth24plus'
}
): WebGPUFramebuffer {
// Wrap the current canvas context texture in a luma.gl texture
// const currentColorAttachment = this.device.createTexture({
// id: 'default-render-target',
// handle: this.gpuCanvasContext.getCurrentTexture(),
// format: this.format,
// width: this.width,
// height: this.height
// });
// Wrap the current canvas context texture in a luma.gl texture
const currentColorAttachment = this.getCurrentTexture();
this.width = currentColorAttachment.width;
this.height = currentColorAttachment.height;
// TODO - temporary debug code
if (
currentColorAttachment.width !== this.drawingBufferWidth ||
currentColorAttachment.height !== this.drawingBufferHeight
) {
const [oldWidth, oldHeight] = this.getDrawingBufferSize();
this.drawingBufferWidth = currentColorAttachment.width;
this.drawingBufferHeight = currentColorAttachment.height;
log.log(
1,
`${this}: Resized to compensate for initial canvas size mismatch ${oldWidth}x${oldHeight} => ${this.drawingBufferWidth}x${this.drawingBufferHeight}px`
)();
}
// Resize the depth stencil attachment
this._createDepthStencilAttachment();
if (options?.depthStencilFormat) {
this._createDepthStencilAttachment(options?.depthStencilFormat);
}

@@ -85,45 +86,20 @@ return new WebGPUFramebuffer(this.device, {

/** Resizes and updates render targets if necessary */
update() {
const oldWidth = this.width;
const oldHeight = this.height;
const [newWidth, newHeight] = this.getPixelSize();
// IMPLEMENTATION OF ABSTRACT METHODS
const sizeChanged = newWidth !== oldWidth || newHeight !== oldHeight;
if (sizeChanged) {
this.width = newWidth;
this.height = newHeight;
if (this.depthStencilAttachment) {
this.depthStencilAttachment.destroy();
this.depthStencilAttachment = null;
}
// Reconfigure the canvas size.
// https://www.w3.org/TR/webgpu/#canvas-configuration
this.gpuCanvasContext.configure({
device: this.device.handle,
format: getWebGPUTextureFormat(this.format),
// Can be used to define e.g. -srgb views
// viewFormats: [...]
colorSpace: this.props.colorSpace,
alphaMode: this.props.alphaMode
});
log.log(1, `${this} Resized ${oldWidth}x${oldHeight} => ${newWidth}x${newHeight}px`)();
_updateDevice(): void {
if (this.depthStencilAttachment) {
this.depthStencilAttachment.destroy();
this.depthStencilAttachment = null;
}
}
resize(options?: {width?: number; height?: number; useDevicePixels?: boolean | number}): void {
this.update();
if (!this.device.handle) return;
// Resize browser context .
if (this.canvas) {
const devicePixelRatio = this.getDevicePixelRatio(options?.useDevicePixels);
this.setDevicePixelRatio(devicePixelRatio, options);
return;
}
// Reconfigure the canvas size.
// https://www.w3.org/TR/webgpu/#canvas-configuration
this.handle.configure({
device: this.device.handle,
format: this.device.preferredColorFormat,
// Can be used to define e.g. -srgb views
// viewFormats: [...]
colorSpace: this.props.colorSpace,
alphaMode: this.props.alphaMode
});
}

@@ -133,6 +109,9 @@

getCurrentTexture(): WebGPUTexture {
const handle = this.handle.getCurrentTexture();
return this.device.createTexture({
id: `${this.id}#color-texture`,
handle: this.gpuCanvasContext.getCurrentTexture(),
format: this.format
handle,
format: this.device.preferredColorFormat,
width: handle.width,
height: handle.height
});

@@ -142,10 +121,10 @@ }

/** We build render targets on demand (i.e. not when size changes but when about to render) */
_createDepthStencilAttachment() {
_createDepthStencilAttachment(depthStencilFormat: DepthStencilTextureFormat): WebGPUTexture {
if (!this.depthStencilAttachment) {
this.depthStencilAttachment = this.device.createTexture({
id: `${this.id}#depth-stencil-texture`,
format: this.depthStencilFormat,
width: this.width,
height: this.height,
usage: GPUTextureUsage.RENDER_ATTACHMENT
usage: Texture.RENDER_ATTACHMENT,
format: depthStencilFormat,
width: this.drawingBufferWidth,
height: this.drawingBufferHeight
});

@@ -152,0 +131,0 @@ }

@@ -17,3 +17,2 @@ // luma.gl

ShaderProps,
Texture,
TextureProps,

@@ -24,5 +23,2 @@ ExternalTextureProps,

ComputePipelineProps,
RenderPassProps,
ComputePassProps,
// CommandEncoderProps,
VertexArrayProps,

@@ -33,3 +29,4 @@ TransformFeedback,

QuerySetProps,
DeviceProps
DeviceProps,
CommandEncoderProps,
} from '@luma.gl/core';

@@ -45,8 +42,7 @@ import {Device, DeviceFeatures} from '@luma.gl/core';

import {WebGPUComputePipeline} from './resources/webgpu-compute-pipeline';
import {WebGPURenderPass} from './resources/webgpu-render-pass';
import {WebGPUComputePass} from './resources/webgpu-compute-pass';
// import {WebGPUCommandEncoder} from './resources/webgpu-command-encoder';
import {WebGPUVertexArray} from './resources/webgpu-vertex-array';
import {WebGPUCanvasContext} from './webgpu-canvas-context';
import {WebGPUCommandEncoder} from './resources/webgpu-command-encoder';
import {WebGPUCommandBuffer} from './resources/webgpu-command-buffer';
import {WebGPUQuerySet} from './resources/webgpu-query-set';

@@ -56,11 +52,11 @@

export class WebGPUDevice extends Device {
/** The underlying WebGPU device */
readonly handle: GPUDevice;
/** type of this device */
readonly type = 'webgpu';
/** The underlying WebGPU device */
readonly handle: GPUDevice;
/* The underlying WebGPU adapter */
readonly adapter: GPUAdapter;
/* The underlying WebGPU adapter's info */
readonly adapterInfo: GPUAdapterInfo;
readonly preferredColorFormat = navigator.gpu.getPreferredCanvasFormat() as
| 'rgba8unorm'
| 'bgra8unorm';
readonly preferredDepthFormat = 'depth24plus';

@@ -72,7 +68,11 @@ readonly features: DeviceFeatures;

readonly lost: Promise<{reason: 'destroyed'; message: string}>;
canvasContext: WebGPUCanvasContext | null = null;
override canvasContext: WebGPUCanvasContext | null = null;
private _isLost: boolean = false;
commandEncoder: GPUCommandEncoder | null = null;
renderPass: WebGPURenderPass | null = null;
commandEncoder: WebGPUCommandEncoder;
/* The underlying WebGPU adapter */
readonly adapter: GPUAdapter;
/* The underlying WebGPU adapter's info */
readonly adapterInfo: GPUAdapterInfo;

@@ -96,2 +96,3 @@ constructor(

device.addEventListener('uncapturederror', (event: Event) => {
event.preventDefault();
// TODO is this the right way to make sure the error is an Error instance?

@@ -105,3 +106,2 @@ const errorMessage =

}
event.preventDefault();
});

@@ -122,2 +122,4 @@

}
this.commandEncoder = this.createCommandEncoder({});
}

@@ -175,22 +177,8 @@

// WebGPU specifics
/**
* Allows a render pass to begin against a canvas context
* @todo need to support a "Framebuffer" equivalent (aka preconfigured RenderPassDescriptors?).
*/
beginRenderPass(props: RenderPassProps): WebGPURenderPass {
this.commandEncoder = this.commandEncoder || this.handle.createCommandEncoder();
return new WebGPURenderPass(this, props);
override createCommandEncoder(props?: CommandEncoderProps): WebGPUCommandEncoder {
return new WebGPUCommandEncoder(this, props);
}
beginComputePass(props: ComputePassProps): WebGPUComputePass {
this.commandEncoder = this.commandEncoder || this.handle.createCommandEncoder();
return new WebGPUComputePass(this, props);
}
// WebGPU specifics
// createCommandEncoder(props: CommandEncoderProps): WebGPUCommandEncoder {
// return new WebGPUCommandEncoder(this, props);
// }
createTransformFeedback(props: TransformFeedbackProps): TransformFeedback {

@@ -208,16 +196,32 @@ throw new Error('Transform feedback not supported in WebGPU');

submit(): void {
const commandBuffer = this.commandEncoder?.finish();
if (commandBuffer) {
this.handle.pushErrorScope('validation');
this.handle.queue.submit([commandBuffer]);
this.handle.popErrorScope().then((error: GPUError | null) => {
if (error) {
this.reportError(new Error(`WebGPU command submission failed: ${error.message}`));
}
});
submit(commandBuffer?: WebGPUCommandBuffer): void {
if (!commandBuffer) {
commandBuffer = this.commandEncoder.finish();
this.commandEncoder.destroy();
this.commandEncoder = this.createCommandEncoder({id: `${this.id}-default-encoder`});
}
this.commandEncoder = null;
this.handle.pushErrorScope('validation');
this.handle.queue.submit([commandBuffer.handle]);
this.handle.popErrorScope().then((error: GPUError | null) => {
if (error) {
this.reportError(new Error(`WebGPU command submission failed: ${error.message}`));
}
});
}
// WebGPU specific
pushErrorScope(scope: 'validation' | 'out-of-memory'): void {
this.handle.pushErrorScope(scope);
}
popErrorScope(handler: (message: string) => void): void {
this.handle.popErrorScope().then((error: GPUError | null) => {
if (error) {
handler(error.message);
}
});
}
// PRIVATE METHODS

@@ -294,61 +298,2 @@

}
// DEPRECATED METHODS
// @deprecated
copyExternalImageToTexture(options: {
texture: Texture;
mipLevel?: number;
aspect?: 'all' | 'stencil-only' | 'depth-only';
colorSpace?: 'display-p3' | 'srgb';
premultipliedAlpha?: boolean;
source: ImageBitmap | HTMLVideoElement | HTMLCanvasElement | OffscreenCanvas;
sourceX?: number;
sourceY?: number;
width?: number;
height?: number;
depth?: number;
}): void {
const {
source,
sourceX = 0,
sourceY = 0,
texture,
mipLevel = 0,
aspect = 'all',
colorSpace = 'display-p3',
premultipliedAlpha = false,
// destinationX,
// destinationY,
// desitnationZ,
width = texture.width,
height = texture.height,
depth = 1
} = options;
const webGpuTexture = texture as WebGPUTexture;
this.handle?.queue.copyExternalImageToTexture(
// source: GPUImageCopyExternalImage
{
source,
origin: [sourceX, sourceY]
},
// destination: GPUImageCopyTextureTagged
{
texture: webGpuTexture.handle,
origin: [0, 0, 0], // [x, y, z],
mipLevel,
aspect,
colorSpace,
premultipliedAlpha
},
// copySize: GPUExtent3D
[width, height, depth]
);
}
}

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is too big to display

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc