WebGPU基础(三)-场景和相机
内容纲要
物体
在引擎开发中,我们会添加一个一个的场景,然后使用不同的相机查看不同的物体。
首先物体的生成:
//...
import { mat4, vec3 } from 'gl-matrix';
import fragShaderCode from './shaders/triangle.frag.wgsl?raw';
import vertexShadowCode from './shaders/triangle.vert.wgsl?raw';
//...
constructor(device: GPUDevice, verticesArray: Float32Array, vertexCount: number, parameter?: RenderObjectParameter) {
this.vertexCount = vertexCount;
this.renderPipeline = device.createRenderPipeline({
vertex: {
module: device.createShaderModule({
code: wgslShaders.vertex,
}),
entryPoint: 'main',
buffers: [
{
arrayStride: vertexSize,
attributes: [
{
shaderLocation: 0,
offset: positionOffset,
format: 'float32x4',
},
{
shaderLocation: 1,
offset: colorOffset,
format: 'float32x4',
},
],
} as GPUVertexBufferLayout,
],
},
fragment: {
module: device.createShaderModule({
code: wgslShaders.fragment,
}),
entryPoint: 'main',
targets: [
{
format: 'bgra8unorm' as GPUTextureFormat,
},
],
},
primitive: {
topology: 'triangle-list',
cullMode: 'back',
},
depthStencil: {
depthWriteEnabled: true,
depthCompare: 'less',
format: 'depth24plus-stencil8',
},
});
this.uniformBuffer = device.createBuffer({
size: this.uniformBufferSize,
usage: GPUBufferUsage.UNIFORM | GPUBufferUsage.COPY_DST,
});
this.uniformBindGroup = device.createBindGroup({
layout: this.renderPipeline.getBindGroupLayout(0),
entries: [
{
binding: 0,
resource: {
buffer: this.uniformBuffer,
offset: 0,
size: this.matrixSize,
},
},
],
});
this.verticesBuffer = device.createBuffer({
size: verticesArray.byteLength,
usage: GPUBufferUsage.VERTEX,
mappedAtCreation: true,
});
new Float32Array(this.verticesBuffer.getMappedRange()).set(verticesArray);
this.verticesBuffer.unmap();
this.setTransformation(parameter);
}
//...
然后是物体的位置和角度:
//...
private setTransformation(parameter?: RenderObjectParameter) {
if (parameter == null) {
return;
}
this.x = parameter.x ? parameter.x : 0;
this.y = parameter.y ? parameter.y : 0;
this.z = parameter.z ? parameter.z : 0;
this.rotX = parameter.rotX ? parameter.rotX : 0;
this.rotY = parameter.rotY ? parameter.rotY : 0;
this.rotZ = parameter.rotZ ? parameter.rotZ : 0;
}
//...
接着是物体的绘制:
//...
this.updateTransformationMatrix(camera.getCameraViewProjMatrix())
passEncoder.setPipeline(this.renderPipeline);
device.queue.writeBuffer(
this.uniformBuffer,
0,
this.modelViewProjectionMatrix.buffer,
this.modelViewProjectionMatrix.byteOffset,
this.modelViewProjectionMatrix.byteLength
);
passEncoder.setVertexBuffer(0, this.verticesBuffer);
passEncoder.setBindGroup(0, this.uniformBindGroup);
passEncoder.draw(this.vertexCount, 1, 0, 0);
//...
接着是物体的更新:
//...
const modelMatrix = mat4.create();
mat4.translate(modelMatrix, modelMatrix, vec3.fromValues(this.x, this.y, this.z))
mat4.rotateX(modelMatrix, modelMatrix, this.rotX);
mat4.rotateY(modelMatrix, modelMatrix, this.rotY);
mat4.rotateZ(modelMatrix, modelMatrix, this.rotZ);
mat4.multiply(this.modelViewProjectionMatrix, cameraProjectionMatrix, modelMatrix);
//...
接着就可以生成物体了😄:
//...
public static cube(parameter?: RenderObjectParameter): IObject {
return new IObject(device, cubeVertexArray, cubeVertexCount, parameter)
}
public static pyramid(parameter?: RenderObjectParameter): IObject {
return new IObject(device, triangleVertexArray, triangleVertexCount, parameter)
}
//...
这里需要用到device,创建一个common:
/*
* @Author: indeex
* @Date: 2021-03-01 19:58:41
* @Email: indeex@qq.com
*/
export let device:GPUDevice;
export function setDevice(_device: GPUDevice) {
device = _device;
}
在engine中设置后,就可以在其他地方使用:
//...
let _device = await this.adapter.requestDevice();
setDevice(_device);
//...
还有Shader,这里使用WGSL:
顶点着色器vert:
struct Uniforms {
modelViewProjectionMatrix : mat4x4<f32>;
};
@binding(0) @group(0) var<uniform> uniforms : Uniforms;
struct VertexOutput {
@builtin(position) Position : vec4<f32>;
@location(0) fragColor : vec4<f32>;
};
@stage(vertex)
fn main(@location(0) position : vec4<f32>,
@location(1) color : vec4<f32>) -> VertexOutput {
return VertexOutput(uniforms.modelViewProjectionMatrix * position, color);
}
片段着色器(像素着色器)frag:
@stage(fragment)
fn main(@location(0) fragColor : vec4<f32>) -> @location(0) vec4<f32> {
return fragColor;
}
相机
然后是相机的设置,相机是跟随物体的,所以跟物体是相对的:
/*
* @Author: indeex
* @Date: 2021-03-01 19:14:17
* @Email: indeex@qq.com
*/
import { mat4, vec3 } from 'gl-matrix';
export class Camera {
public x: number = 0;
public y: number = 0;
public z: number = 0;
public rotX: number = 0;
public rotY: number = 0;
public rotZ: number = 0;
public fovy: number = (2 * Math.PI) / 5;
public aspect: number = 16 / 9;
public near: number = 1;
public far: number = 1000;
constructor (aspect: number) {
this.aspect = aspect;
}
public getViewMatrix () : mat4 {
let viewMatrix = mat4.create();
mat4.lookAt(viewMatrix, vec3.fromValues(this.x, this.y, this.z), vec3.fromValues(0, 0, 0), vec3.fromValues(0, 1, 0));
mat4.rotateX(viewMatrix, viewMatrix, this.rotX);
mat4.rotateY(viewMatrix, viewMatrix, this.rotY);
mat4.rotateZ(viewMatrix, viewMatrix, this.rotZ);
return viewMatrix;
}
public getProjectionMatrix () : mat4 {
let projectionMatrix = mat4.create();
mat4.perspective(projectionMatrix, this.fovy, this.aspect, this.near, this.far);
return projectionMatrix;
}
public getCameraViewProjMatrix () : mat4 {
const viewProjMatrix = mat4.create();
const view = this.getViewMatrix();
const proj = this.getProjectionMatrix();
mat4.multiply(viewProjMatrix, proj, view);
return viewProjMatrix;
}
}
场景
场景可以理解为装物体的一间间无限大的屋子,也可以在初始时规定屋子的大小,在以前的Flash时代不管是2D还是3D,场景都是最常用的,比如最常用的Sprit,还有装了无数小Sprit的Moveclip组成的画面等:
//...
export class Scene {
private objects: IObject[] = [];
public add (object: IObject) {
this.objects.push(object);
}
public getObjects () : IObject[] {
return this.objects;
}
}
//...
Engine
把之前的代码封装成Engine,引擎负责更新渲染每一帧:
export default class Engine {
constructor() {}
//...
update() {
(this.renderPassDescriptor!.depthStencilAttachment as GPURenderPassDepthStencilAttachment).view = this.depthTextureView();
}
frame(camera: Camera, scene: Scene) {
(this.renderPassDescriptor!.colorAttachments as [GPURenderPassColorAttachment])[0].view = this.context!
.getCurrentTexture()
.createView();
const commandEncoder = device!.createCommandEncoder();
const passEncoder = commandEncoder.beginRenderPass((this.renderPassDescriptor as any));
for (let object of scene.getObjects()) {
object.draw(passEncoder, (device as any), camera)
}
passEncoder.end();
device!.queue.submit([commandEncoder.finish()]);
}
//...
}
控制
然后通过键盘、手势、鼠标等控制:
canvas.onwheel = (e: WheelEvent) => {
camera.z += e.deltaY / 100
}
var mouseDown = false;
canvas.onmousedown = (e: MouseEvent) => {
if (e.button !== 0) return;
mouseDown = true;
lastMouseX = e.pageX;
lastMouseY = e.pageY;
}
canvas.onmouseup = (e: MouseEvent) => {
mouseDown = false;
}
var lastMouseX = -1;
var lastMouseY = -1;
canvas.onmousemove = (e: MouseEvent) => {
if (!mouseDown) {
return;
}
var mousex = e.pageX;
var mousey = e.pageY;
if (lastMouseX > 0 && lastMouseY > 0) {
const roty = mousex - lastMouseX;
const rotx = mousey - lastMouseY;
camera.rotY += roty / 100;
camera.rotX += rotx / 100;
}
lastMouseX = mousex;
lastMouseY = mousey;
}
这里例子只用到了立方体和锥体,还有很多其他的物体,可以参考W3C的官方文档自行设置。
效果预览地址:

code enjoy! 🦖🦖🦖
作者:indeex
著作权归作者所有。商业转载请联系作者获得授权,非商业转载请注明出处。