WebGL(全称:Web Graphics Library)是一种在不需要任何插件的情况下,在任何兼容的Web浏览器中呈现3D图形和2D图形的JavaScript API。WebGL通过HTML5的<canvas>
元素与DOM进行交互,允许开发者在网页上渲染复杂的2D和3D图形。
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<title>WebGL Image Drawing</title>
<style>
canvas { width: 100%; height: 100%; }
</style>
</head>
<body>
<canvas id="glcanvas"></canvas>
<script>
function loadTexture(gl, url) {
const texture = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, texture);
// 因为图片还没有加载完成,所以我们先填充一个1x1的蓝色像素
const level = 0;
const internalFormat = gl.RGBA;
const width = 1;
const height = 1;
const border = 0;
const srcFormat = gl.RGBA;
const srcType = gl.UNSIGNED_BYTE;
const pixel = new Uint8Array([0, 0, 255, 255]); // 蓝色
gl.texImage2D(gl.TEXTURE_2D, level, internalFormat, width, height, border, srcFormat, srcType, pixel);
const image = new Image();
image.onload = function() {
gl.bindTexture(gl.TEXTURE_2D, texture);
gl.texImage2D(gl.TEXTURE_2D, level, internalFormat, srcFormat, srcType, image);
// WebGL1.0对纹理尺寸有限制,需要对大图片进行缩放
gl.generateMipmap(gl.TEXTURE_2D);
};
image.src = url;
return texture;
}
function main() {
const canvas = document.getElementById('glcanvas');
const gl = canvas.getContext('webgl');
if (!gl) {
console.error('WebGL not supported, falling back on experimental-webgl');
gl = canvas.getContext('experimental-webgl');
}
if (!gl) {
alert('Your browser does not support WebGL');
}
// 顶点着色器代码
const vertexShaderSource = `
attribute vec4 a_position;
attribute vec2 a_texCoord;
varying vec2 v_texCoord;
void main() {
gl_Position = a_position;
v_texCoord = a_texCoord;
}
`;
// 片段着色器代码
const fragmentShaderSource = `
precision mediump float;
varying vec2 v_texCoord;
uniform sampler2D u_image;
void main() {
gl_FragColor = texture2D(u_image, v_texCoord);
}
`;
// 创建并编译着色器程序
const shaderProgram = initShaderProgram(gl, vertexShaderSource, fragmentShaderSource);
const programInfo = {
program: shaderProgram,
attribLocations: {
position: gl.getAttribLocation(shaderProgram, 'a_position'),
texCoord: gl.getAttribLocation(shaderProgram, 'a_texCoord'),
},
uniformLocations: {
u_image: gl.getUniformLocation(shaderProgram, 'u_image'),
},
};
// 设置顶点和纹理坐标
const buffers = initBuffers(gl);
// 加载图片作为纹理
const texture = loadTexture(gl, 'path/to/your/image.jpg');
// 渲染循环
function render() {
drawScene(gl, programInfo, buffers, texture);
requestAnimationFrame(render);
}
requestAnimationFrame(render);
}
function initBuffers(gl) {
const positionBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, positionBuffer);
const positions = [
1.0, 1.0,
-1.0, 1.0,
1.0, -1.0,
-1.0, -1.0,
];
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(positions), gl.STATIC_DRAW);
const texCoordBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, texCoordBuffer);
const texCoords = [
1.0, 1.0,
0.0, 1.0,
1.0, 0.0,
0.0, 0.0,
];
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(texCoords), gl.STATIC_DRAW);
return {
position: positionBuffer,
texCoord: texCoordBuffer,
};
}
function drawScene(gl, programInfo, buffers, texture) {
gl.clearColor(0.0, 0.0, 0.0, 1.0); // Clear to black, fully opaque
gl.clearDepth(1.0); // Clear everything
gl.enable(gl.DEPTH_TEST); // Enable depth testing
gl.depthFunc(gl.LEQUAL); // Near things obscure far things
// Clear the canvas before we start drawing on it.
gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT);
// Create a perspective matrix, a special matrix that is
// used to simulate the distortion of perspective in a camera.
// Our field of view is 45 degrees, with a width/height
// ratio that matches the display size of the canvas
// and we only want to see objects between 0.1 units
// and 100 units away from the camera.
const fieldOfView = 45 * Math.PI / 180; // in radians
const aspect = gl.canvas.clientWidth / gl.canvas.clientHeight;
const zNear = 0.1;
const zFar = 100.0;
const projectionMatrix = mat4.create();
// note: glmatrix.js always has the first argument
// as the destination to receive the result.
mat4.perspective(projectionMatrix,
fieldOfView,
aspect,
zNear,
zFar);
// Set the drawing position to the "identity" point, which is
// the center of the scene.
const modelViewMatrix = mat4.create();
// Now move the drawing position a bit to where we want to
// start drawing the square.
mat4.translate(modelViewMatrix, // destination matrix
modelViewMatrix, // matrix to translate
[-0.0, 0.0, -6.0]); // amount to translate
// Tell WebGL how to pull out the positions from the position
// buffer into the vertexPosition attribute.
{
const numComponents = 2; // pull out 2 values per iteration
const type = gl.FLOAT; // the data in the buffer is 32bit floats
const normalize = false; // don't normalize
const stride = 0; // how many bytes to get from one set of values to the next
// 0 = use type and numComponents above
const offset = 0; // how many bytes inside the buffer to start from
gl.bindBuffer(gl.ARRAY_BUFFER, buffers.position);
gl.vertexAttribPointer(
programInfo.attribLocations.position,
numComponents,
type,
normalize,
stride,
offset);
gl.enableVertexAttribArray(
programInfo.attribLocations.position);
}
// Tell WebGL how to pull out the texture coordinates from
// the texture coordinate buffer into the texCoord attribute.
{
const numComponents = 2;
const type = gl.FLOAT;
const normalize = false;
const stride = 0;
const offset = 0;
gl.bindBuffer(gl.ARRAY_BUFFER, buffers.texCoord);
gl.vertexAttribPointer(
programInfo.attribLocations.texCoord,
numComponents,
type,
normalize,
stride,
offset);
gl.enableVertexAttribArray(
programInfo.attribLocations.texCoord);
}
// Tell WebGL to use our program when drawing
gl.useProgram(programInfo.program);
// Set the shader uniforms
gl.uniformMatrix4fv(
programInfo.uniformLocations.projectionMatrix,
false,
projectionMatrix);
gl.uniformMatrix4fv(
programInfo.uniformLocations.modelViewMatrix,
false,
modelViewMatrix);
// Bind the texture to texture unit 0
gl.activeTexture(gl.TEXTURE0);
gl.bindTexture(gl.TEXTURE_2D, texture);
// Tell the shader we bound the texture to texture unit 0
gl.uniform1i(programInfo.uniformLocations.u_image, 0);
{
const offset = 0;
const vertexCount = 4;
gl.drawArrays(gl.TRIANGLE_STRIP, offset, vertexCount);
}
}
function initShaderProgram(gl, vsSource, fsSource) {
const vertexShader = loadShader(gl, gl.VERTEX_SHADER, vsSource);
const fragmentShader = loadShader(gl, gl.FRAGMENT_SHADER, fsSource);
// Create the shader program
const shaderProgram = gl.createProgram();
gl.attachShader(shaderProgram, vertexShader);
gl.attachShader(shaderProgram, fragmentShader);
gl.linkProgram(shaderProgram);
// If creating the shader program failed, alert
if (!gl.getProgramParameter(shaderProgram, gl.LINK_STATUS)) {
alert('Unable to initialize the shader program: ' + gl.getProgramInfoLog(shader(self.program)));
return null;
}
return shaderProgram;
}
function loadShader(gl, type, source) {
const shader = gl.createShader(type);
// Send the source to the shader object
gl.shaderSource(shader, source);
// Compile the shader program
gl.compileShader(shader);
// See if it compiled successfully
if (!gl.getShaderParameter(shader, gl.COMPILE_STATUS)) {
alert('An error occurred compiling the shaders: ' + gl.getShaderInfoLog(shader));
gl.deleteShader(shader);
return null;
}
return shader;
}
</script>
</body>
</html>
WebGL广泛应用于:
通过上述代码示例和应用场景的介绍,你可以开始尝试在WebGL中绘制图片,并根据需要进行进一步的探索和优化。
没有搜到相关的文章