我正在尝试将图像(作为纹理)绘制到帧缓冲区,应用锐度滤镜(通过在UI上拖动滑块),然后从帧缓冲区读取结果并通过调用将数据复制到简单的2d画布(而不是webgl)带有绑定帧缓冲区的readPixels,获取带有像素的数组并将它们复制到ImageData.data,但该函数返回原始纹理。
也许有人可以向我解释这一点,因为根据我的理解,屏幕上的内容实际上是帧缓冲的内容。
很抱歉很多代码,但我希望它可以帮助理解我正在做的事情。
(function () {
var anotherContext = null;
var canvas = $("#canvas");
main();
setupCanvas();
function setupCanvas () {
anotherContext = document.getElementById("anothercanvas").getContext("2d");
}
function main () {
var image = new Image();
image.src = "http://localhost:9292/img/ava.jpg";
image.onload = function () {
render(image);
}
}
function render (image) {
//-----get contexts----
var canvas = document.getElementById('canvas');
var gl = canvas.getContext('experimental-webgl');
//----define shaders-----
var vs = document.getElementById('vshader').textContent;
var fs = document.getElementById('fshader').textContent;
//----create program-----
var program = createProgram(vs, fs);
gl.useProgram(program);
//----setup vertex data-----
var positionLocation = gl.getAttribLocation(program, "a_position");
var texCoordLocation = gl.getAttribLocation(program, "a_texCoord");
//----setup texture-----
var texCoordBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, texCoordBuffer);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([
0.0, 0.0,
1.0, 0.0,
0.0, 1.0,
0.0, 1.0,
1.0, 0.0,
1.0, 1.0]), gl.STATIC_DRAW);
gl.enableVertexAttribArray(texCoordLocation);
gl.vertexAttribPointer(texCoordLocation, 2, gl.FLOAT, false, 0, 0);
// Create a texture.
var texture = createAndSetupTexture();
// Upload the image into the texture.
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, image);
//---framebuffer----
var framebuffer = gl.createFramebuffer();
gl.bindFramebuffer(gl.FRAMEBUFFER, framebuffer);
gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, texture, 0);
var canRead = (gl.checkFramebufferStatus(gl.FRAMEBUFFER) == gl.FRAMEBUFFER_COMPLETE);
console.log("Can read: ", canRead);
//----lookup uniforms and set the resolution-----
var resolutionLocation = gl.getUniformLocation(program, "u_resolution");
var textureSizeLocation = gl.getUniformLocation(program, "u_textureSize");
var kernelLocation = gl.getUniformLocation(program, "u_kernel[0]");
gl.uniform2f(textureSizeLocation, image.width, image.height);
//----kernels-----
var kernel = [
0, 0, 0,
0, 1, 0,
0, 0, 0
];
var sharpnessKernel = [
0,-1, 0,
-1, 5, -1,
0,-1, 0
];
//-----bind buffer------
var vertexBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, vertexBuffer);
gl.enableVertexAttribArray(positionLocation);
gl.vertexAttribPointer(program.vertexPosAttrib, 2, gl.FLOAT, false, 0, 0);
setRectangle(gl, 0, 0, image.width, image.height);
draw(kernel);
function draw (krn) {
// gl.bindTexture(gl.TEXTURE_2D, texture);
setFramebuffer(framebuffer);
drawWithKernel(krn);
copyImage();
// gl.bindTexture(gl.TEXTURE_2D, texture);
setFramebuffer(null);
gl.drawArrays(gl.TRIANGLES, 0, 6);
}
function setFramebuffer (fbuf) {
gl.bindFramebuffer(gl.FRAMEBUFFER, fbuf);
gl.uniform2f(resolutionLocation, canvas.width, canvas.height);
gl.viewport(0, 0, canvas.width, canvas.height);
}
function drawWithKernel (kernel) {
gl.uniform1fv(kernelLocation, kernel);
//---draw
gl.drawArrays(gl.TRIANGLES, 0, 6);
}
function createAndSetupTexture () {
var texture = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, texture);
// Set the parameters so we can render any size image.
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
return texture;
}
function setRectangle (gl, x, y, width, height) {
var x1 = x;
var x2 = x + width;
var y1 = y;
var y2 = y + height;
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([
x1, y1,
x2, y1,
x1, y2,
x1, y2,
x2, y1,
x2, y2]), gl.STATIC_DRAW);
}
function createShader(str, type) {
var shader = gl.createShader(type);
gl.shaderSource(shader, str);
gl.compileShader(shader);
return shader;
}
function createProgram (vstr, fstr) {
var program = gl.createProgram();
var vshader = createShader(vstr, gl.VERTEX_SHADER);
var fshader = createShader(fstr, gl.FRAGMENT_SHADER);
gl.attachShader(program, vshader);
gl.attachShader(program, fshader);
gl.linkProgram(program);
return program;
}
function copyImage () {
var pixels = new Uint8Array(image.width * image.height * 4);
gl.readPixels(0, 0, image.width, image.height, gl.RGBA, gl.UNSIGNED_BYTE, pixels);
var imageData = anotherContext.createImageData(image.width, image.height);
for (var i = pixels.length - 1; i >= 0; i--) {
imageData.data[i] = pixels[i];
};
// console.log(imageData.data);
anotherContext.putImageData(imageData, 0, 0);
}
$("#slider").slider({
min: 0,
max: 99,
slide: function (event, ui) {
var currentKernel = null;
//do not use any filtering if slider is on 0 position
if(ui.value == 0) {
currentKernel = kernel;
}
else {
currentKernel = sharpnessKernel.slice(0);
currentKernel[4] -= (ui.value / 100);
}
draw(currentKernel);
}
});
}
})()
答案 0 :(得分:-2)
您当前的功能和支架设置已完全中断,这也表示您未使用firebug或任何其他web-console来调试您的JavaScript。
首先,您应该将一些功能移到主要功能之外,然后获得modern editor,因为大多数功能都显示属于一起的括号。
编辑:看起来类似于WebGL fundamentals中的代码。要回答你的问题:只有当绑定的帧缓冲区为空时才会绘制屏幕(正如你可以在该网站上阅读的那样)。
无论如何,您可能会受到以下方面的帮助:Creating texture from getImageData (Javascript)或可能fabric.js image filters