在探索javascript时,我遇到了一个令人困惑的问题。序言是:我使用ImageBitmap接口将不同的mime类型的图像(主要是png / jpg)转换为位图,然后将其传输给worker以在单独的线程中转换为blob(为此,我首先将其绘制到屏幕外的画布上下文中),然后保存到IDB中,而主线程继续加载新图像。这样做时,为了拓宽视野,我决定在画布中使用webgl2渲染上下文,因为GL是我从未接触过的东西。
要将位图应用于画布,我使用了texImage2D函数,我似乎不明白。在那里,我可以指定存储在内存中的数据格式,这些数据将呈现给GLSL(由于位图是在不进行Alpha预乘的情况下创建的,因此应为rgb(right?)),内部格式和类型。由于格式/内部格式/类型的组合是由规范指定的,因此我尝试利用它们的多种功能,并出于自己的目的选择了最佳的(质量/文件大小)。由于要转换为位图的图像主要是黑白图像,因此我认为亮度是我所需要的。但是首先我使用标准的RGB格式:
gl.texImage2D(
gl.TEXTURE_2D, 0, gl.RGB, bitmap.width, bitmap.height, 0, gl.RGB, gl.UNSIGNED_BYTE, bitmap
);
然后,我将RGB565与UNSIGNED_SHORT_5_6_5数据类型一起使用,并且在将斑点大小从RGB减小约30%的同时,没有看到任何质量损失。据我了解,它减少了,因为RGB565是每个像素2个无符号短字节,对吗?然后,我使用了UNSIGNED_SHORT_5_5_5_1 RGBA,与标准RGB相比,blob文件大小减少了约43%。甚至比RGB565还小!但是图像上的渐变变得古怪,所以我没有5551RGBA。我不了解5551 RGBA和RGB565在尺寸上的巨大差异。而且更令人困惑的是,根据spec类型/格式/内部格式组合使用亮度时,与标准RGB相比仅减少了5%。为什么RGB565的尺寸减小了约30%,而亮度仅减小了5%?
为此,我在片段着色器中使用了相同的浮点采样器:
#version 300 es
precision mediump float;
precision mediump sampler2D;
uniform sampler2D sampler;
uniform vec2 dimensions;
out vec4 color;
void main(){
color = texture(sampler, vec2(gl_FragCoord.x/dimensions.x, 1.0 - (gl_FragCoord.y/dimensions.y)));
}
还有相同的pixelStorei和texParameteri:
gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, false);
gl.pixelStorei(gl.UNPACK_PREMULTIPLY_ALPHA_WEBGL, false);
gl.pixelStorei(gl.UNPACK_COLORSPACE_CONVERSION_WEBGL, gl.NONE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
如下图所示,如果图像是黑白的,亮度不会改变blob的文件大小,而如果是彩色的,则亮度会减小,尽管比RGBA4还要小。考虑到RGBA4每个像素有2个字节,而LUMA-1。
(async() => {
function createImage(src) {
return new Promise((rs, rj) => {
var img = new Image();
img.crossOrigin = 'anonymous';
img.src = src;
img.onload = () => rs(img);
img.onerror = e => rj(e);
});
};
var jpeg = await createImage('https://upload.wikimedia.org/wikipedia/commons/a/aa/5inchHowitzerFiringGallipoli1915.jpeg');
var png = await createImage('https://upload.wikimedia.org/wikipedia/commons/2/2c/6.d%C3%ADl_html_m2fdede78.png');
var jpgClr = await createImage('https://upload.wikimedia.org/wikipedia/commons/thumb/e/ed/%22Good_bye%2C_sweetheart%22%2C_tobacco_label%2C_ca._1865.jpg/117px-%22Good_bye%2C_sweetheart%22%2C_tobacco_label%2C_ca._1865.jpg');
var format = {
standard: {
internalFormat: 'RGB8',
format: 'RGB',
type: 'UNSIGNED_BYTE',
},
rgb565: {
internalFormat: 'RGB565',
format: 'RGB',
type: 'UNSIGNED_SHORT_5_6_5',
},
rgb9e5: {
internalFormat: 'RGB9_E5',
format: 'RGB',
type: 'FLOAT',
},
srgb: {
internalFormat: 'SRGB8',
format: 'RGB',
type: 'UNSIGNED_BYTE',
},
rgba32f: {
internalFormat: 'RGB32F',
format: 'RGB',
type: 'FLOAT',
},
rgba4: {
internalFormat: 'RGBA4',
format: 'RGBA',
type: 'UNSIGNED_SHORT_4_4_4_4',
},
rgb5a1: {
internalFormat: 'RGB5_A1',
format: 'RGBA',
type: 'UNSIGNED_SHORT_5_5_5_1',
},
luma: {
internalFormat: 'LUMINANCE',
format: 'LUMINANCE',
type: 'UNSIGNED_BYTE',
},
};
function compareFormatSize(image) {
return new Promise((r, _) => {
createImageBitmap(image, {
premultiplyAlpha: 'none',
colorSpaceConversion: 'none',
}).then(async bitmap => {
var text = String(image.src.match(/(?<=\.)\w{3,4}$/)).toUpperCase();
console.log(`${text === 'JPG' ? 'Colored jpg' : text}:`);
for (let val of Object.values(format)) {
await logBlobSize(bitmap, val);
if(val.format === 'LUMINANCE') r();
}
}).catch(console.warn);
});
};
compareFormatSize(jpeg).then(_ => compareFormatSize(png)).then(_ => compareFormatSize(jpgClr));
function logBlobSize(bitmap, { internalFormat, format, type }) {
return new Promise(r => {
drawCanvas(bitmap, internalFormat, format, type).convertToBlob({
type: `image/webp`
}).then(blob => { console.log(`Blob from ${internalFormat} is ${blob.size}b`); r(); });
})
}
function drawCanvas(bitmap, internalFormat, format, type) {
const gl = (new OffscreenCanvas(bitmap.width, bitmap.height)).getContext("webgl2", {
antialias: false,
alpha: false,
depth: false,
});
function createShader(gl, type, glsl) {
const shader = gl.createShader(type);
gl.shaderSource(shader, glsl)
gl.compileShader(shader);
if (!gl.getShaderParameter(shader, gl.COMPILE_STATUS)) {
console.error(gl.getShaderInfoLog(shader));
gl.deleteShader(shader);
return;
}
return shader;
}
const vs = createShader(
gl,
gl.VERTEX_SHADER,
`#version 300 es
#define POSITION_LOCATION 0
layout(location = POSITION_LOCATION) in vec2 position;
void main()
{
gl_Position = vec4(position, 0.0, 1.0);
}`,
);
const fs = createShader(
gl,
gl.FRAGMENT_SHADER,
`#version 300 es
precision mediump float;
precision mediump sampler2D;
uniform sampler2D sampler;
uniform vec2 dimensions;
out vec4 color;
void main()
{
color = texture(sampler, vec2(gl_FragCoord.x/dimensions.x, 1.0 - (gl_FragCoord.y/dimensions.y)));
}`,
);
const program = gl.createProgram();
gl.attachShader(program, vs);
gl.attachShader(program, fs);
gl.linkProgram(program);
const sampler = gl.getUniformLocation(program, 'sampler');
const dimensions = gl.getUniformLocation(program, 'dimensions');
const position = 0; // GLSL location
const vao = gl.createVertexArray();
gl.bindVertexArray(vao);
gl.enableVertexAttribArray(position);
const vxBuffer = gl.createBuffer();
const vertices = new Float32Array([
-1.0,-1.0,
1.0,-1.0,
-1.0, 1.0,
1.0, 1.0,
]);
gl.bindBuffer(gl.ARRAY_BUFFER, vxBuffer);
gl.vertexAttribPointer(position, 2, gl.FLOAT, false, 0, 0);
gl.bufferData(gl.ARRAY_BUFFER, vertices, gl.STATIC_DRAW);
const texture = gl.createTexture();
gl.activeTexture(gl.TEXTURE0);
gl.bindTexture(gl.TEXTURE_2D, texture);
gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, false);
gl.pixelStorei(gl.UNPACK_PREMULTIPLY_ALPHA_WEBGL, false);
gl.pixelStorei(gl.UNPACK_COLORSPACE_CONVERSION_WEBGL, gl.NONE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.texImage2D(
gl.TEXTURE_2D,
0,
gl[internalFormat],
bitmap.width,
bitmap.height,
0,
gl[format],
gl[type],
bitmap
);
gl.useProgram(program);
gl.uniform1i(sampler, 0);
gl.uniform2f(dimensions, gl.canvas.width, gl.canvas.height);
gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4);
gl.deleteTexture(texture);
gl.deleteVertexArray(vao);
gl.deleteBuffer(vxBuffer);
gl.deleteProgram(program);
return gl.canvas;
}
})()
谢谢!
答案 0 :(得分:2)
画布始终为RGBA 8位(32位颜色)。有人在谈论添加选项,以使画布更深以支持高清彩色显示,但尚未提供。
因此,调用canvas.converToBlob
总是会给您RGBA32bit png(或jpeg)。创建LUMIANCE纹理将为您提供黑白纹理,但它会被绘制到RGBA 32位画布中。无法获得1通道的PNG。
对于RGB565,RGBA5551等,硬件可能会或可能不直接支持这些格式,该规范允许驱动程序选择分辨率更高的格式,我想大多数台式机在您将数据扩展为RGBA8时上传数据,这样就不会节省任何内存。
另一方面,WebGL规范要求以RGB565或RGBA5551的格式上传图像时,首先将图像转换为该格式,因此浏览器将获取图像并有效地将其量化为这些颜色深度这意味着您正在失去色彩。然后,将量化的图像绘制回画布上并保存,因此当然会压缩得更好,因为存在更多相似的颜色。
从WebGL spec开始,获取texImage2D
版本的ImageBitmap
概念上,首先将源图像数据转换为由format和type参数指定的数据类型和格式,然后将其传输到WebGL实现。根据下表进行格式转换。 如果指定了压缩像素格式,这意味着图像数据中的精度损失,则必须发生这种精度损失。
让我们在没有WebGL的情况下尝试
(async() => {
function createImage(src) {
return new Promise((rs, rj) => {
const img = new Image();
img.crossOrigin = 'anonymous';
img.src = src;
img.onload = () => rs(img);
img.onerror = rj;
});
};
const jpeg = await createImage('https://upload.wikimedia.org/wikipedia/commons/a/aa/5inchHowitzerFiringGallipoli1915.jpeg');
const png = await createImage('https://upload.wikimedia.org/wikipedia/commons/2/2c/6.d%C3%ADl_html_m2fdede78.png');
const jpgClr = await createImage('https://upload.wikimedia.org/wikipedia/commons/thumb/e/ed/%22Good_bye%2C_sweetheart%22%2C_tobacco_label%2C_ca._1865.jpg/117px-%22Good_bye%2C_sweetheart%22%2C_tobacco_label%2C_ca._1865.jpg');
const format = {
standard: {
internalFormat: 'RGB8',
format: 'RGB',
type: 'UNSIGNED_BYTE',
fn: p => [p[0], p[1], p[2], 255],
},
rgb565: {
internalFormat: 'RGB565',
format: 'RGB',
type: 'UNSIGNED_SHORT_5_6_5',
fn: p => [
(p[0] >> 3) * 255 / 31,
(p[1] >> 2) * 255 / 63,
(p[2] >> 3) * 255 / 31,
255,
],
},
rgba4: {
internalFormat: 'RGBA4',
format: 'RGBA',
type: 'UNSIGNED_SHORT_4_4_4_4',
fn: p => [
(p[0] >> 4) * 255 / 15,
(p[1] >> 4) * 255 / 15,
(p[2] >> 4) * 255 / 15,
(p[3] >> 4) * 255 / 15,
],
},
rgb5a1: {
internalFormat: 'RGB5_A1',
format: 'RGBA',
type: 'UNSIGNED_SHORT_5_5_5_1',
fn: p => [
(p[0] >> 3) * 255 / 31,
(p[1] >> 3) * 255 / 31,
(p[2] >> 3) * 255 / 31,
(p[3] >> 7) * 255 / 1,
],
},
luma: {
internalFormat: 'LUMINANCE',
format: 'LUMINANCE',
type: 'UNSIGNED_BYTE',
fn: p => [p[0], p[0], p[0], 255],
},
};
async function compareFormatSize(image) {
const bitmap = await createImageBitmap(image, {
premultiplyAlpha: 'none',
colorSpaceConversion: 'none',
});
const text = String(image.src.match(/(?<=\.)\w{3,4}$/)).toUpperCase();
log(`${text === 'JPG' ? 'Colored jpg' : text}:`);
for (const val of Object.values(format)) {
await logBlobSize(bitmap, val);
}
};
await compareFormatSize(jpeg);
await compareFormatSize(png);
await compareFormatSize(jpgClr);
async function logBlobSize(bitmap, {
internalFormat,
format,
type,
fn,
}) {
const canvas = drawCanvas(bitmap, internalFormat, format, type);
const blob = await canvas.convertToBlob({
type: `image/webp`
});
const canvas2 = drawFn(bitmap, fn);
const blob2 = await canvas2.convertToBlob({
type: `image/webp`
});
log(`Blob from ${internalFormat} is ${blob.size}b(webgl) vs ${blob2.size}b(code)`);
if (false) {
const img = new Image();
img.src = URL.createObjectURL(blob);
document.body.appendChild(img);
const img2 = new Image();
img2.src = URL.createObjectURL(blob2);
document.body.appendChild(img2);
}
}
function drawFn(bitmap, fn) {
const ctx = (new OffscreenCanvas(bitmap.width, bitmap.height)).getContext("2d");
ctx.drawImage(bitmap, 0, 0);
const imageData = ctx.getImageData(0, 0, bitmap.width, bitmap.height);
const pixels = imageData.data;
for (let i = 0; i < pixels.length; i += 4) {
const n = fn(pixels.subarray(i, i + 4));
pixels.set(n, i);
}
ctx.putImageData(imageData, 0, 0);
return ctx.canvas;
}
function drawCanvas(bitmap, internalFormat, format, type) {
const gl = (new OffscreenCanvas(bitmap.width, bitmap.height)).getContext("webgl2", {
antialias: false,
alpha: false,
depth: false,
});
function createShader(gl, type, glsl) {
const shader = gl.createShader(type);
gl.shaderSource(shader, glsl)
gl.compileShader(shader);
if (!gl.getShaderParameter(shader, gl.COMPILE_STATUS)) {
console.error(gl.getShaderInfoLog(shader));
gl.deleteShader(shader);
return;
}
return shader;
}
const vs = createShader(
gl,
gl.VERTEX_SHADER,
`#version 300 es
#define POSITION_LOCATION 0
layout(location = POSITION_LOCATION) in vec2 position;
void main()
{
gl_Position = vec4(position, 0.0, 1.0);
}`,
);
const fs = createShader(
gl,
gl.FRAGMENT_SHADER,
`#version 300 es
precision mediump float;
precision mediump sampler2D;
uniform sampler2D sampler;
uniform vec2 dimensions;
out vec4 color;
void main()
{
color = texture(sampler, vec2(gl_FragCoord.x/dimensions.x, 1.0 - (gl_FragCoord.y/dimensions.y)));
}`,
);
const program = gl.createProgram();
gl.attachShader(program, vs);
gl.attachShader(program, fs);
gl.linkProgram(program);
const sampler = gl.getUniformLocation(program, 'sampler');
const dimensions = gl.getUniformLocation(program, 'dimensions');
const position = 0; // GLSL location
const vao = gl.createVertexArray();
gl.bindVertexArray(vao);
gl.enableVertexAttribArray(position);
const vxBuffer = gl.createBuffer();
const vertices = new Float32Array([-1.0, -1.0,
1.0, -1.0, -1.0, 1.0,
1.0, 1.0,
]);
gl.bindBuffer(gl.ARRAY_BUFFER, vxBuffer);
gl.vertexAttribPointer(position, 2, gl.FLOAT, false, 0, 0);
gl.bufferData(gl.ARRAY_BUFFER, vertices, gl.STATIC_DRAW);
const texture = gl.createTexture();
gl.activeTexture(gl.TEXTURE0);
gl.bindTexture(gl.TEXTURE_2D, texture);
gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, false);
gl.pixelStorei(gl.UNPACK_PREMULTIPLY_ALPHA_WEBGL, false);
gl.pixelStorei(gl.UNPACK_COLORSPACE_CONVERSION_WEBGL, gl.NONE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.texImage2D(
gl.TEXTURE_2D,
0,
gl[internalFormat],
bitmap.width,
bitmap.height,
0,
gl[format],
gl[type],
bitmap
);
gl.useProgram(program);
gl.uniform1i(sampler, 0);
gl.uniform2f(dimensions, gl.canvas.width, gl.canvas.height);
gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4);
gl.deleteTexture(texture);
gl.deleteVertexArray(vao);
gl.deleteBuffer(vxBuffer);
gl.deleteProgram(program);
return gl.canvas;
}
})()
function log(...args) {
const elem = document.createElement('pre');
elem.textContent = [...args].join(' ');
document.body.appendChild(elem);
}
pre { margin: 0; }
为什么将gl.texImage2D中的格式设置为gl.LUMINANCE而不是gl.RGB,使得画布上的斑点在文件大小中仅缩小5%?
我没有看到这些结果。在您的示例中,黑白图像的大小与RGB vs LUMIANCE相同。彩色图像变为1/2尺寸。但是,当然,压缩黑白算法是否将黑白32bit图像压缩为小于彩色32bit图像取决于压缩算法,因为在所有情况下,调用convertToBlob时画布都是32位。