8000 Merge pull request #289 from webgpu/worker · webgpu/webgpu-samples@8c86ccf · GitHub
[go: up one dir, main page]

Skip to content

Commit 8c86ccf

Browse files
authored
Merge pull request #289 from webgpu/worker
Added a sample that shows WebGPU running from a web worker
2 parents 914e885 + ae3e65f commit 8c86ccf

File tree

5 files changed

+316
-7
lines changed

5 files changed

+316
-7
lines changed

package-lock.json

Lines changed: 7 additions & 6 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

package.json

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -10,6 +10,7 @@
1010
},
1111
"scripts": {
1212
"lint": "eslint --ext .ts,.tsx src/",
13+
"fix": "eslint --fix --ext .ts,.tsx src/",
1314
"start": "next dev",
1415
"build": "next build",
1516
"serve": "next start",
@@ -44,6 +45,6 @@
4445
"eslint-plugin-react": "^7.31.10",
4546
"prettier": "^2.7.1",
4647
"raw-loader": "^4.0.2",
47-
"typescript": "^4.8.4"
48+
"typescript": "^4.9.5"
4849
}
4950
}

src/pages/samples/[slug].tsx

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -44,6 +44,7 @@ export const pages: PageComponentType = {
4444
cornell: dynamic(() => import('../../sample/cornell/main')),
4545
gameOfLife: dynamic(() => import('../../sample/gameOfLife/main')),
4646
renderBundles: dynamic(() => import('../../sample/renderBundles/main')),
47+
worker: dynamic(() => import('../../sample/worker/main')),
4748
};
4849

4950
function Page({ slug }: Props): JSX.Element {

src/sample/worker/main.ts

Lines changed: 91 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,91 @@
1+
import { makeSample, SampleInit } from '../../components/SampleLayout';
2+
3+
const init: SampleInit = async ({ canvas, pageState }) => {
4+
if (!pageState.active) return;
5+
6+
// The web worker is created by passing a path to the worker's source file, which will then be
7+
// executed on a separate thread.
8+
const worker = new Worker(new URL('./worker.ts', import.meta.url));
9+
10+
// The primary way to communicate with the worker is to send and receive messages.
11+
worker.addEventListener('message', (ev) => {
12+
// The format of the message can be whatever you'd like, but it's helpful to decide on a
13+
// consistent convention so that you can tell the message types apart as your apps grow in
14+
// complexity. Here we establish a convention that all messages to and from the worker will
15+
// have a `type` field that we can use to determine the content of the message.
16+
switch (ev.data.type) {
17+
case 'log': {
18+
// Workers don't have a built-in mechanism for logging to the console, so it's useful to
19+
// create a way to echo console messages.
20+
console.log(ev.data.message);
21+
break;
22+
}
23+
default: {
24+
console.error(`Unknown Message Type: ${ev.data.type}`);
25+
}
26+
}
27+
});
28+
29+
try {
30+
// In order for the worker to display anything on the page, an OffscreenCanvas must be used.
31+
// Here we can create one from our normal canvas by calling transferControlToOffscreen().
32+
// Anything drawn to the OffscreenCanvas that call returns will automatically be displayed on
33+
// the source canvas on the page.
34+
const offscreenCanvas = canvas.transferControlToOffscreen();
35+
const devicePixelRatio = window.devicePixelRatio || 1;
36+
offscreenCanvas.width = canvas.clientWidth * devicePixelRatio;
37+
offscreenCanvas.height = canvas.clientHeight * devicePixelRatio;
38+
39+
// Send a message to the worker telling it to initialize WebGPU with the OffscreenCanvas. The
40+
// array passed as the second argument here indicates that the OffscreenCanvas is to be
41+
// transferred to the worker, meaning this main thread will lose access to it and it will be
42+
// fully owned by the worker.
43+
worker.postMessage({ type: 'init', offscreenCanvas }, [offscreenCanvas]);
44+
} catch (err) {
45+
// TODO: This catch is added here because React will call init twice with the same canvas, and
46+
// the second time will fail the transferControlToOffscreen() because it's already been
47+
// transferred. I'd love to know how to get around that.
48+
console.warn(err.message);
49+
worker.terminate();
50+
}
51+
};
52+
53+
const WebGPUWorker: () => JSX.Element = () =>
54+
makeSample({
55+
name: 'WebGPU in a Worker',
56+
description: `This example shows one method of using WebGPU in a web worker and presenting to
57+
the main thread. It uses canvas.transferControlToOffscreen() to produce an offscreen canvas
58+
which is then transferred to the worker where all the WebGPU calls are made.`,
59+
init,
60+
sources: [
61+
{
62+
name: __filename.substring(__dirname.length + 1),
63+
contents: __SOURCE__,
64+
},
65+
{
66+
name: './worker.ts',
67+
// eslint-disable-next-line @typescript-eslint/no-var-requires
68+
contents: require('!!raw-loader!./worker.ts').default,
69+
},
70+
{
71+
name: '../../shaders/basic.vert.wgsl',
72+
// eslint-disable-next-line @typescript-eslint/no-var-requires
73+
contents: require('!!raw-loader!../../shaders/basic.vert.wgsl').default,
74+
},
75+
{
76+
name: '../../shaders/vertexPositionColor.frag.wgsl',
77+
contents:
78+
// eslint-disable-next-line @typescript-eslint/no-var-requires
79+
require('!!raw-loader!../../shaders/vertexPositionColor.frag.wgsl')
80+
.default,
81+
},
82+
{
83+
name: '../../meshes/cube.ts',
84+
// eslint-disable-next-line @typescript-eslint/no-var-requires
85+
contents: require('!!raw-loader!../../meshes/cube.ts').default,
86+
},
87+
],
88+
filename: __filename,
89+
});
90+
91+
export default WebGPUWorker;

src/sample/worker/worker.ts

Lines changed: 215 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,215 @@
1+
import { mat4, vec3 } from 'wgpu-matrix';
2+
3+
import {
4+
cubeVertexArray,
5+
cubeVertexSize,
6+
cubeUVOffset,
7+
cubePositionOffset,
8+
cubeVertexCount,
9+
} from '../../meshes/cube';
10+
11+
import basicVertWGSL from '../../shaders/basic.vert.wgsl';
12+
import vertexPositionColorWGSL from '../../shaders/vertexPositionColor.frag.wgsl';
13+
14+
// The worker process can instantiate a WebGPU device immediately, but it still needs an
15+
// OffscreenCanvas to be able to display anything. Here we listen for an 'init' message from the
16+
// main thread that will contain an OffscreenCanvas transferred from the page, and use that as the
17+
// signal to begin WebGPU initialization.
18+
self.addEventListener('message', (ev) => {
19+
switch (ev.data.type) {
20+
case 'init': {
21+
try {
22+
init(ev.data.offscreenCanvas);
23+
} catch (err) {
24+
self.postMessage({
25+
type: 'log',
26+
message: `Error while initializing WebGPU in worker process: ${err.message}`,
27+
});
28+
}
29+
break;
30+
}
31+
}
32+
});
33+
34+
// Once we receive the OffscreenCanvas this init() function is called, which functions similarly
35+
// to the init() method for all the other samples. The remainder of this file is largely identical
36+
// to the rotatingCube sample.
37+
async function init(canvas) {
38+
const adapter = await navigator.gpu.requestAdapter();
39+
const device = await adapter.requestDevice();
40+
const context = canvas.getContext('webgpu');
41+
42+
const presentationFormat = navigator.gpu.getPreferredCanvasFormat( F987 );
43+
44+
context.configure({
45+
device,
46+
format: presentationFormat,
47+
alphaMode: 'premultiplied',
48+
});
49+
50+
// Create a vertex buffer from the cube data.
51+
const verticesBuffer = device.createBuffer({
52+
size: cubeVertexArray.byteLength,
53+
usage: GPUBufferUsage.VERTEX,
54+
mappedAtCreation: true,
55+
});
56+
new Float32Array(verticesBuffer.getMappedRange()).set(cubeVertexArray);
57+
verticesBuffer.unmap();
58+
59+
const pipeline = device.createRenderPipeline({
60+
layout: 'auto',
61+
vertex: {
62+
module: device.createShaderModule({
63+
code: basicVertWGSL,
64+
}),
65+
entryPoint: 'main',
66+
buffers: [
67+
{
68+
arrayStride: cubeVertexSize,
69+
attributes: [
70+
{
71+
// position
72+
shaderLocation: 0,
73+
offset: cubePositionOffset,
74+
format: 'float32x4',
75+
},
76+
{
77+
// uv
78+
shaderLocation: 1,
79+
offset: cubeUVOffset,
80+
format: 'float32x2',
81+
},
82+
],
83+
},
84+
],
85+
},
86+
fragment: {
87+
module: device.createShaderModule({
88+
code: vertexPositionColorWGSL,
89+
}),
90+
entryPoint: 'main',
91+
targets: [
92+
{
93+
format: presentationFormat,
94+
},
95+
],
96+
},
97+
primitive: {
98+
topology: 'triangle-list',
99+
100+
// Backface culling since the cube is solid piece of geometry.
101+
// Faces pointing away from the camera will be occluded by faces
102+
// pointing toward the camera.
103+
cullMode: 'back',
104+
},
105+
106+
// Enable depth testing so that the fragment closest to the camera
107+
// is rendered in front.
108+
depthStencil: {
109+
depthWriteEnabled: true,
110+
depthCompare: 'less',
111+
format: 'depth24plus',
112+
},
113+
});
114+
115+
const depthTexture = device.createTexture({
116+
size: [canvas.width, canvas.height],
117+
format: 'depth24plus',
118+
usage: GPUTextureUsage.RENDER_ATTACHMENT,
119+
});
120+
121+
const uniformBufferSize = 4 * 16; // 4x4 matrix
122+
const uniformBuffer = device.createBuffer({
123+
size: uniformBufferSize,
124+
usage: GPUBufferUsage.UNIFORM | GPUBufferUsage.COPY_DST,
125+
});
126+
127+
const uniformBindGroup = device.createBindGroup({
128+
layout: pipeline.getBindGroupLayout(0),
129+
entries: [
130+
{
131+
binding: 0,
132+
resource: {
133+
buffer: uniformBuffer,
134+
},
135+
},
136+
],
137+
});
138+
139+
const renderPassDescriptor: GPURenderPassDescriptor = {
140+
colorAttachments: [
141+
{
142+
view: undefined, // Assigned later
143+
144+
clearValue: { r: 0.5, g: 0.5, b: 0.5, a: 1.0 },
145+
loadOp: 'clear',
146+
storeOp: 'store',
147+
},
148+
],
149+
depthStencilAttachment: {
150+
view: depthTexture.createView(),
151+
152+
depthClearValue: 1.0,
153+
depthLoadOp: 'clear',
154+
depthStoreOp: 'store',
155+
},
156+
};
157+
158+
const aspect = canvas.width / canvas.height;
159+
const projectionMatrix = mat4.perspective(
160+
(2 * Math.PI) / 5,
161+
aspect,
162+
1,
163+
100.0
164+
);
165+
const modelViewProjectionMatrix = mat4.create();
166+
167+
function getTransformationMatrix() {
168+
const viewMatrix = mat4.identity();
169+
mat4.translate(viewMatrix, vec3.fromValues(0, 0, -4), viewMatrix);
170+
const now = Date.now() / 1000;
171+
mat4.rotate(
172+
viewMatrix,
173+
vec3.fromValues(Math.sin(now), Math.cos(now), 0),
174+
1,
175+
viewMatrix
176+
);
177+
178+
mat4.multiply(projectionMatrix, viewMatrix, modelViewProjectionMatrix);
179+
180+
return modelViewProjectionMatrix as Float32Array;
181+
}
182+
183+
function frame() {
184+
const transformationMatrix = getTransformationMatrix();
185+
device.queue.writeBuffer(
186+
uniformBuffer,
187+
0,
188+
transformationMatrix.buffer,
189+
transformationMatrix.byteOffset,
190+
transformationMatrix.byteLength
191+
);
192+
renderPassDescriptor.colorAttachments[0].view = context
193+
.getCurrentTexture()
194+
.createView();
195+
196+
const commandEncoder = device.createCommandEncoder();
197+
const passEncoder = commandEncoder.beginRenderPass(renderPassDescriptor);
198+
passEncoder.setPipeline(pipeline);
199+
passEncoder.setBindGroup(0, uniformBindGroup);
200+
passEncoder.setVertexBuffer(0, verticesBuffer);
201+
passEncoder.draw(cubeVertexCount, 1, 0, 0);
202+
passEncoder.end();
203+
device.queue.submit([commandEncoder.finish()]);
204+
205+
requestAnimationFrame(frame);
206+
}
207+
208+
// Note: It is important to return control to the browser regularly in order for the worker to
209+
// process events. You shouldn't simply loop infinitely with while(true) or similar! Using a
210+
// traditional requestAnimationFrame() loop in the worker is one way to ensure that events are
211+
// handled correctly by the worker.
212+
requestAnimationFrame(frame);
213+
}
214+
215+
export {};

0 commit comments

Comments
0 (0)
0