[英]WebGL change uv buffer using the same shader attribute
I'm trying to render two textures with different UV maps on a 3D plane.我正在尝试在 3D 平面上渲染两个具有不同 UV 贴图的纹理。 But when the application draws the textures it uses only the last uv buffer (with its associated uv coordinates).
但是当应用程序绘制纹理时,它只使用最后一个 uv 缓冲区(及其关联的 uv 坐标)。 I have been looking in almost all cases in this forum but none gives me the solution.
我一直在寻找这个论坛的几乎所有案例,但没有一个能给我解决方案。
This is my app (is the smallest version I could program):这是我的应用程序(是我可以编程的最小版本):
const PICTURES = {
indices: [
0, 1, 2,
0, 3, 1,
],
vertices: [
1.000000, -1.000000, 0.000000,
-1.000000, 1.000000, 0.000000,
-1.000000, -1.000000, 0.000000,
1.000000, 1.000000, 0.000000,
],
uv_layers: {
'uv_1': [
1.000000, 1.077309,
0.000000, -0.077309,
0.000000, 1.077309,
1.000000, -0.077309,
],
'uv_2': [
1.500000, 1.500000,
-0.500000, -0.500000,
-0.500000, 1.500000,
1.500000, -0.500000,
],
},
};
const vertexShaderCode = `
precision mediump float;
attribute vec4 vertex;
attribute vec2 uv;
varying vec2 UV;
uniform mat4 projectionMatrix, viewMatrix, modelMatrix;
void main()
{
UV = uv;
gl_Position = projectionMatrix * viewMatrix * modelMatrix * vertex;
}`;
const fragmentShaderCode = `
precision mediump float;
varying vec2 UV;
uniform sampler2D bitmap_1;
uniform sampler2D bitmap_2;
void main() {
// Mix the bitmaps in equal proportions
vec4 px1 = texture2D(bitmap_1, UV) * 0.5;
vec4 px2 = texture2D(bitmap_2, UV) * 0.5;
gl_FragColor = px1 + px2;
}`;
function loadShader(gl, vertexShaderCode, fragmentShaderCode) {
const vertexShader = gl.createShader(gl.VERTEX_SHADER);
gl.shaderSource(vertexShader, vertexShaderCode);
gl.compileShader(vertexShader);
if (!gl.getShaderParameter(vertexShader, gl.COMPILE_STATUS)) {
alert('An error occurred compiling the shaders: ' + gl.getShaderInfoLog(vertexShader));
gl.deleteShader(vertexShader);
return null;
}
const fragmentShader = gl.createShader(gl.FRAGMENT_SHADER);
gl.shaderSource(fragmentShader, fragmentShaderCode);
gl.compileShader(fragmentShader);
if (!gl.getShaderParameter(fragmentShader, gl.COMPILE_STATUS)) {
alert('An error occurred compiling the shaders: ' + gl.getShaderInfoLog(fragmentShader));
gl.deleteShader(fragmentShader);
return null;
}
return [vertexShader, fragmentShader];
}
function loadTexture(gl, url) {
function isPowerOf2(value){ return (value & (value - 1)) == 0; }
const texture = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, texture);
// Because images have to be download over the internet they might take a moment until they are ready.
// Until then put a single pixel in the texture so we can use it immediately.
// When the image has finished downloading we'll update the texture with the contents of the image.
const level = 0;
const internalFormat= gl.RGBA;
const width = 1;
const height = 1;
const border = 0;
const srcFormat = gl.RGBA;
const srcType = gl.UNSIGNED_BYTE;
const pixel = new Uint8Array([1.0, 0.0, 1.0, 1.0]); // magenta to warn if there is no texture
gl.texImage2D(gl.TEXTURE_2D, level, internalFormat, width, height, border, srcFormat, srcType, pixel);
const image = new Image();
image.onload = function() {
gl.bindTexture(gl.TEXTURE_2D, texture);
gl.texImage2D(gl.TEXTURE_2D, level, internalFormat, srcFormat, srcType, image);
// WebGL1 has different requirements for power of 2 images vs non power of 2 images so check if the image is a
// power of 2 in both dimensions.
if (isPowerOf2(image.width) && isPowerOf2(image.height)) {
// Yes, it's a power of 2. Generate mips.
gl.generateMipmap(gl.TEXTURE_2D);
} else {
// No, it's not a power of 2. Turn of mips and set wrapping to clamp to edge
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
}
};
image.src = url;
return texture;
}
function Material(gl, shaders) {
this.shaderProgram = gl.createProgram();
gl.attachShader(this.shaderProgram, shaders[0]); // Vertex shader
gl.attachShader(this.shaderProgram, shaders[1]); // Fragment shader
gl.linkProgram(this.shaderProgram);
if (!gl.getProgramParameter(this.shaderProgram, gl.LINK_STATUS)) {
alert('Unable to initialize the shader program: ' + gl.getProgramInfoLog(this.shaderProgram));
return null;
}
}
function main() {
var surface = document.getElementById('glcanvas');
var gl = surface.getContext('webgl');
if (!gl) {
alert('Unable to initialize WebGL. Your browser or machine may not support it.');
return;
}
// -------------------------------------mesh-----------------------------------------
// Create the index buffer
const indexBuffer = gl.createBuffer();
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, indexBuffer);
gl.bufferData(gl.ELEMENT_ARRAY_BUFFER, new Uint16Array(PICTURES.indices), gl.STATIC_DRAW);
// The vertex buffer
const vertexBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, vertexBuffer);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(PICTURES.vertices), gl.STATIC_DRAW);
// uv layers buffers (two uv channels for two different bitmaps)
const uvBuffer_1 = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, uvBuffer_1);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(PICTURES.uv_layers['uv_1']), gl.DYNAMIC_DRAW);
const uvBuffer_2 = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, uvBuffer_2);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(PICTURES.uv_layers['uv_2']), gl.DYNAMIC_DRAW);
// Create the textures for the respectives uv buffers
const texture_1 = loadTexture(gl, TEXTURES_FOLDER+'old_fashioned_portrait.jpg');
const texture_2 = loadTexture(gl, TEXTURES_FOLDER+'demon.jpg');
// One material for two textures maped within its own uv maps, like any 3D software could allow
let shaders = loadShader(gl,vertexShaderCode, fragmentShaderCode);
const material = new Material(gl, shaders);
let modelMatrix = mat4.create();
mat4.translate(modelMatrix, modelMatrix, [0.0, 0.0, 0.0]);
let meshData = {
indices : PICTURES.indices,
indexBuffer : indexBuffer,
vertexBuffer: vertexBuffer,
uvBuffer_1 : uvBuffer_1,
uvBuffer_2 : uvBuffer_2,
material : material,
texture_1 : texture_1,
texture_2 : texture_2,
modelMatrix : modelMatrix,
};
// Render data and methods
requestAnimationFrame(function () {render(gl, meshData, Date.now()*0.001);});
}
var rotation = 0.0;
function render(gl, meshData, before)
{
var now = Date.now()*0.001;
var delta = now-before;
rotation += delta;
// ------------------------------------canvas----------------------------------------
gl.clearColor(0.0, 0.5, 0.5, 1.0);
gl.clearDepth(1.0);
gl.enable(gl.DEPTH_TEST);
gl.depthFunc(gl.LEQUAL);
gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT);
const aspect = gl.canvas.clientWidth / gl.canvas.clientHeight;
const fieldOfView = 45 * Math.PI / 180;
const zNear = 0.1;
const zFar = 100.0;
let projectionMatrix = mat4.create();
mat4.perspective(projectionMatrix, fieldOfView, aspect, zNear, zFar);
let viewMatrix = mat4.create();
mat4.translate(viewMatrix, viewMatrix, [0.0, 0.0, -5.0]);
mat4.rotate(viewMatrix, viewMatrix, rotation*2, [1, 0, 0]);
mat4.rotate(viewMatrix, viewMatrix, rotation, [0, 1, 0]);
// -------------------------------------mesh-----------------------------------------
gl.useProgram(meshData.material.shaderProgram);
// Vertices transformations
gl.uniformMatrix4fv(gl.getUniformLocation(meshData.material.shaderProgram, 'projectionMatrix'), false, projectionMatrix);
gl.uniformMatrix4fv(gl.getUniformLocation(meshData.material.shaderProgram, 'viewMatrix'), false, viewMatrix);
gl.uniformMatrix4fv(gl.getUniformLocation(meshData.material.shaderProgram, 'modelMatrix'), false, meshData.modelMatrix);
// Give the GPU the order of the points to form the triangles
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, meshData.indexBuffer);
// Give the GPU the transformed vertices
gl.bindBuffer(gl.ARRAY_BUFFER, meshData.vertexBuffer);
gl.vertexAttribPointer(gl.getAttribLocation(meshData.material.shaderProgram, 'vertex'), 3, gl.FLOAT, false, 0, 0);
gl.enableVertexAttribArray(gl.getAttribLocation(meshData.material.shaderProgram, 'vertex'));
// First texture mapped in the triangles with the first uv coordinates
// 'uvBuffer_1' is smashed by 'uvBuffer_2'
gl.bindBuffer(gl.ARRAY_BUFFER, meshData.uvBuffer_1);
gl.vertexAttribPointer(gl.getAttribLocation(meshData.material.shaderProgram, 'uv'), 2, gl.FLOAT, false, 0, 0);
gl.enableVertexAttribArray(gl.getAttribLocation(meshData.material.shaderProgram, 'uv'));
gl.activeTexture(gl.TEXTURE0);
gl.bindTexture(gl.TEXTURE_2D, meshData.texture_1);
gl.uniform1i(gl.getUniformLocation(meshData.material.shaderProgram, 'bitmap_1'), 0);
// Second texture mapped in the triangles with the second uv coordinates
// The last uv buffer (mesh.uvBuffer_2) prevails:
gl.bindBuffer(gl.ARRAY_BUFFER, meshData.uvBuffer_2);
gl.vertexAttribPointer(gl.getAttribLocation(meshData.material.shaderProgram, 'uv'), 2, gl.FLOAT, false, 0, 0);
gl.enableVertexAttribArray(gl.getAttribLocation(meshData.material.shaderProgram, 'uv'));
gl.activeTexture(gl.TEXTURE1);
gl.bindTexture(gl.TEXTURE_2D, meshData.texture_2);
gl.uniform1i(gl.getUniformLocation(meshData.material.shaderProgram, 'bitmap_2'), 1);
gl.drawElements(gl.TRIANGLES, meshData.indices.length, gl.UNSIGNED_SHORT, 0);
gl.useProgram(null);
requestAnimationFrame(function(){render(gl, meshData, now);});
}
main();
The html: html:
<!doctype html>
<html lang="en">
<head>
<meta charset="utf-8">
<title>webgl</title>
<link rel="stylesheet" href="../webgl.css" type="text/css">
</head>
<body>
<canvas id="glcanvas" width="640" height="480"></canvas>
<script src="gl-matrix.js"></script>
<script>const TEXTURES_FOLDER = 'textures/';</script>
<script src="webgl.js"></script>
</body>
</html>
Your shaders only use one set of UV coordinates您的着色器仅使用一组 UV 坐标
const vertexShaderCode = `
precision mediump float;
attribute vec4 vertex;
attribute vec2 uv;
varying vec2 UV;
uniform mat4 projectionMatrix, viewMatrix, modelMatrix;
void main()
{
UV = uv;
gl_Position = projectionMatrix * viewMatrix * modelMatrix * vertex;
}`;
const fragmentShaderCode = `
precision mediump float;
varying vec2 UV;
uniform sampler2D bitmap_1;
uniform sampler2D bitmap_2;
void main() {
// Mix the bitmaps in equal proportions
vec4 px1 = texture2D(bitmap_1, UV) * 0.5;
vec4 px2 = texture2D(bitmap_2, UV) * 0.5;
gl_FragColor = px1 + px2;
}`;
If you want to use 2 sets of UV coordinates you need shaders that use 2 sets of UV coordinates如果要使用 2 组 UV 坐标,则需要使用 2 组 UV 坐标的着色器
const vertexShaderCode = `
precision mediump float;
attribute vec4 vertex;
attribute vec2 uv1;
attribute vec2 uv2;
varying vec2 UV1;
varying vec2 UV2;
uniform mat4 projectionMatrix, viewMatrix, modelMatrix;
void main()
{
UV1 = uv1;
UV2 = uv2;
gl_Position = projectionMatrix * viewMatrix * modelMatrix * vertex;
}`;
const fragmentShaderCode = `
precision mediump float;
varying vec2 UV1;
varying vec2 UV2;
uniform sampler2D bitmap_1;
uniform sampler2D bitmap_2;
void main() {
// Mix the bitmaps in equal proportions
vec4 px1 = texture2D(bitmap_1, UV1) * 0.5;
vec4 px2 = texture2D(bitmap_2, UV2) * 0.5;
gl_FragColor = px1 + px2;
}`;
And then of course look up the new attribute locations然后当然要查找新的属性位置
// First texture mapped in the triangles with the first uv coordinates
// 'uvBuffer_1' is smashed by 'uvBuffer_2'
gl.bindBuffer(gl.ARRAY_BUFFER, meshData.uvBuffer_1);
gl.vertexAttribPointer(gl.getAttribLocation(meshData.material.shaderProgram, 'uv1'), 2, gl.FLOAT, false, 0, 0);
gl.enableVertexAttribArray(gl.getAttribLocation(meshData.material.shaderProgram, 'uv1'));
...
// Second texture mapped in the triangles with the second uv coordinates
// The last uv buffer (mesh.uvBuffer_2) prevails:
gl.bindBuffer(gl.ARRAY_BUFFER, meshData.uvBuffer_2);
gl.vertexAttribPointer(gl.getAttribLocation(meshData.material.shaderProgram, 'uv2'), 2, gl.FLOAT, false, 0, 0);
gl.enableVertexAttribArray(gl.getAttribLocation(meshData.material.shaderProgram, 'uv2'));
声明:本站的技术帖子网页,遵循CC BY-SA 4.0协议,如果您需要转载,请注明本站网址或者原文地址。任何问题请咨询:yoyou2525@163.com.