我在C#中创建了一个图像服务,它采用基础层图像(JPG),再多一层透明PNG(32位),然后输出最终的JPG图像。我试图从这个函数中挤出最后一毫秒,我的代码是GDI +中DrawImage调用的瓶颈。托管代码:
// Load base image and create graphics
Image image = LoadImage(renderSettings.RenderedImageDirectory + baseLayer);
Graphics graphics = Graphics.FromImage(image);
graphics.CompositingQuality = System.Drawing.Drawing2D.CompositingQuality.HighQuality;
graphics.InterpolationMode = System.Drawing.Drawing2D.InterpolationMode.HighQualityBicubic;
graphics.SmoothingMode = System.Drawing.Drawing2D.SmoothingMode.HighQuality;
graphics.PixelOffsetMode = System.Drawing.Drawing2D.PixelOffsetMode.HighSpeed;
// Draw additional layers to final image
for (int i = 1; i < renderLayers.Count; i++) {
// SLOW -- LoadImage just a utility method that returns an Image from disk or cache
graphics.DrawImage(LoadImage(renderSettings.RenderedImageDirectory + renderLayers[i]), 0, 0, image.Width, image.Height);
}
if (graphics != null) graphics.Dispose();
现在,我读到了通过P / Invoke直接调用GDI获得的性能提升,并试图替换DrawImage调用。我创建了一个单元测试,试图复制加载JPG的相同功能,然后在其上层叠一个透明PNG。
参考:http://social.msdn.microsoft.com/Forums/en-US/winforms/thread/29582142-0068-40dd-bd99-4b3883a76350
Bitmap sourceImage = new Bitmap("c:\\base.jpg");
Bitmap overlayImage = new Bitmap("c:\\layer1.png");
// NOTE: ImageHelper is a utility class containing all the P/Invoke stuff
// Get source image in memory
Graphics sourceImageGraphics = Graphics.FromImage(sourceImage);
IntPtr sourceImageHDC = sourceImageGraphics.GetHdc();
IntPtr sourceImageCDC = ImageHelper.CreateCompatibleDC(sourceImageHDC);
IntPtr sourceImageHandle = sourceImage.GetHbitmap();
ImageHelper.SelectObject(sourceImageCDC, sourceImageHandle);
// Get overlay image in memory
Graphics overlayImageGraphics = Graphics.FromImage(overlayImage);
IntPtr overlayImageHDC = overlayImageGraphics.GetHdc();
IntPtr overlayImageCDC = ImageHelper.CreateCompatibleDC(overlayImageHDC);
IntPtr overlayImageHandle = overlayImage.GetHbitmap();
ImageHelper.SelectObject(overlayImageCDC, overlayImageHandle);
ImageHelper.BitBlt(sourceImageHDC, 0, 0, sourceImage.Width, sourceImage.Height, overlayImageCDC, 0, 0, ImageHelper.TernaryRasterOperations.SRCAND);
ImageHelper.AlphaBlend(sourceImageHDC, 0, 0, sourceImage.Width, sourceImage.Height, overlayImageCDC, 0, 0, sourceImage.Width, sourceImage.Height, new ImageHelper.BLENDFUNCTION(ImageHelper.AC_SRC_OVER, 0, 0xff, ImageHelper.AC_SRC_ALPHA));
// Release source Image memory.
ImageHelper.DeleteDC(sourceImageCDC);
ImageHelper.DeleteObject(sourceImageHandle);
sourceImageGraphics.ReleaseHdc(sourceImageHDC);
sourceImageGraphics.Dispose();
// Release overlay Image memory.
ImageHelper.DeleteDC(overlayImageCDC);
ImageHelper.DeleteObject(overlayImageHandle);
overlayImageGraphics.ReleaseHdc(overlayImageHDC);
overlayImageGraphics.Dispose();
// Save to jpg
sourceImage.Save("c:\\output.jpg", ImageFormat.Jpeg);
但这无法产生分层图像。只是没有基础JPG的PNG。我应该做些什么呢?在直接参加GDI时,我有点不在我的联盟中。
答案 0 :(得分:8)
我最终使用SharpDX来访问WIC和Direct2d API。至少可以说结果令人印象深刻。与Direct2d合成时,我发现GDI +的性能提升了400-500%。
我还尝试了GDI +和任务并行库,将图像分解为四个quandrants,并在每个核心中进行合成工作。结果并不像使用SharpDX那样显着。
这是我最终使用的代码。对“renderSettings”的引用只是一个配置对象。根据需要替换renderLayer图像列表。
/* SharpDX */
using SharpDX;
using SharpDX.Direct2D1;
using SharpDX.DirectWrite;
using SharpDX.DXGI;
using SharpDX.IO;
using SharpDX.WIC;
using AlphaMode = SharpDX.Direct2D1.AlphaMode;
using WicBitmap = SharpDX.WIC.Bitmap;
using D2DPixelFormat = SharpDX.Direct2D1.PixelFormat;
using WicPixelFormat = SharpDX.WIC.PixelFormat;
using Rectangle = System.Drawing.Rectangle;
using Bitmap = System.Drawing.Bitmap;
public Image FlattenImageDirect2d()
{
List<string> renderLayers = new List<string>()
{
"image1.jpg", "image1.png", "image2.png", "image3.png", "image4.png", "image5.png", "image6.png", "image7.png"
};
// Base image
string baseLayer = renderLayers[0];
// Create WIC and D2D factories
var wicFactory = new ImagingFactory();
var ddFactory = new SharpDX.Direct2D1.Factory();
// Get image size using WIC
int baseWidth, baseHeight;
using (var wicStream = new WICStream(wicFactory, renderDirectory + baseLayer, NativeFileAccess.Read)) {
var jpegDecoder = new JpegBitmapDecoder(wicFactory);
jpegDecoder.Initialize(wicStream, DecodeOptions.CacheOnDemand);
var frame = jpegDecoder.GetFrame(0);
baseWidth = frame.Size.Width;
baseHeight = frame.Size.Height;
frame.Dispose();
jpegDecoder.Dispose();
}
// Resize image?
bool resizeImage = (baseWidth != renderSettings.RenderWidth) || (baseHeight != renderSettings.RenderHeight);
// Bitmaps and render target settings
var wicBitmap = new WicBitmap(wicFactory, renderSettings.RenderWidth, renderSettings.RenderHeight, SharpDX.WIC.PixelFormat.Format32bppBGR, BitmapCreateCacheOption.CacheOnLoad);
var renderTargetProperties = new RenderTargetProperties(RenderTargetType.Default, new D2DPixelFormat(Format.Unknown, AlphaMode.Unknown), 0, 0, RenderTargetUsage.None, FeatureLevel.Level_DEFAULT);
var wicRenderTarget = new WicRenderTarget(ddFactory, wicBitmap, renderTargetProperties);
// Create bitmap render target used to draw all images to
SharpDX.Direct2D1.BitmapRenderTarget bitmapRenderTarget = new SharpDX.Direct2D1.BitmapRenderTarget(wicRenderTarget, CompatibleRenderTargetOptions.None, new D2DPixelFormat(Format.Unknown, AlphaMode.Premultiplied));
// Draw render layers
for (int i = 0; i < renderLayers.Count; i++) {
// First layer is always a jpeg, all other subsequent layers are png's
ImageFormat imageFormat = (i == 0) ? ImageFormat.Jpeg : ImageFormat.Png;
using (SharpDX.WIC.BitmapSource bitmapSource = LoadWicBitmap(wicFactory, renderDirectory + renderLayers[i], imageFormat, resizeImage, renderSettings.RenderWidth, renderSettings.RenderHeight)) {
// Convert WIC pixel format to D2D1 format
var formatConverter = new FormatConverter(wicFactory);
formatConverter.Initialize(bitmapSource, SharpDX.WIC.PixelFormat.Format32bppPBGRA, BitmapDitherType.None, null, 0f, BitmapPaletteType.MedianCut);
// Create direct 2d bitmap from wic bitmap
SharpDX.Direct2D1.Bitmap direct2DBitmap = SharpDX.Direct2D1.Bitmap.FromWicBitmap(bitmapRenderTarget, formatConverter);
// Draw direct2d image to bitmap render target
wicRenderTarget.BeginDraw();
wicRenderTarget.DrawBitmap(direct2DBitmap, 1.0f, SharpDX.Direct2D1.BitmapInterpolationMode.Linear);
wicRenderTarget.EndDraw();
// Clean up
formatConverter.Dispose();
direct2DBitmap.Dispose();
}
}
// Final image data
byte[] imageData;
// Create streams to write output to.
using (var memoryStream = new MemoryStream()) {
using (var wicStream = new WICStream(wicFactory, memoryStream)) {
// Encode wic bitmap
var encoder = new JpegBitmapEncoder(wicFactory);
encoder.Initialize(wicStream);
var frameEncoder = new BitmapFrameEncode(encoder);
frameEncoder.Initialize();
frameEncoder.SetSize(renderSettings.RenderWidth, renderSettings.RenderHeight);
frameEncoder.PixelFormat = WicPixelFormat.FormatDontCare;
frameEncoder.WriteSource(wicBitmap);
frameEncoder.Commit();
encoder.Commit();
// Set image data
memoryStream.Position = 0;
imageData = memoryStream.ToArray();
// Clean up
frameEncoder.Dispose();
encoder.Dispose();
wicBitmap.Dispose();
wicRenderTarget.Dispose();
bitmapRenderTarget.Dispose();
ddFactory.Dispose();
wicFactory.Dispose();
frameEncoder = null;
encoder = null;
wicBitmap = null;
wicRenderTarget = null;
bitmapRenderTarget = null;
ddFactory = null;
wicFactory = null;
}
}
return Image.FromStream(new MemoryStream(imageData));
}
private BitmapSource LoadWicBitmap(ImagingFactory wicFactory, string path, ImageFormat imageFormat, bool resize, int resizeWidth = 0, int resizeHeight = 0)
{
PngBitmapDecoder pngDecoder;
JpegBitmapDecoder jpegDecoder;
BitmapFrameDecode bitmapFrameDecode;
var stream = new WICStream(wicFactory, path, NativeFileAccess.Read);
// Load the appropriate decoder
if (imageFormat == ImageFormat.Jpeg) {
jpegDecoder = new JpegBitmapDecoder(wicFactory);
jpegDecoder.Initialize(stream, DecodeOptions.CacheOnLoad);
bitmapFrameDecode = jpegDecoder.GetFrame(0);
jpegDecoder.Dispose();
}
else {
pngDecoder = new PngBitmapDecoder(wicFactory);
pngDecoder.Initialize(stream, DecodeOptions.CacheOnDemand);
bitmapFrameDecode = pngDecoder.GetFrame(0);
pngDecoder.Dispose();
}
// Clean up
stream.Dispose();
// Resize if necessary
if (resize) {
// Prepare scaler
var scaler = new BitmapScaler(wicFactory);
scaler.Initialize(bitmapFrameDecode, resizeWidth, resizeHeight, SharpDX.WIC.BitmapInterpolationMode.Fant);
return (BitmapSource)scaler;
}
return (BitmapSource)bitmapFrameDecode;
}
答案 1 :(得分:-1)
这个应该有效:
private Bitmap GetImage() {
//##################### Get the Bitmaps ############################
Bitmap sourceImage = new Bitmap("c:\\1.png");
Bitmap overlayImage = new Bitmap("c:\\2.png");
//##################### Get Hdc from baselayer ############################
Graphics sourceImageGraphics = Graphics.FromImage(sourceImage);
IntPtr sourceImageHDC = sourceImageGraphics.GetHdc();
//##################### Get Cdc from second layer ############################
IntPtr overlayImageCDC = CreateCompatibleDC(sourceImageHDC);
IntPtr overlayImageHandle = overlayImage.GetHbitmap();
SelectObject(overlayImageCDC, overlayImageHandle);
/*
* BitBlt from sourceImage is not neccessary,
* because Graphics.FromImage(sourceImage) already did it for you
*/
//##################### Draw the second layer ############################
AlphaBlend(sourceImageHDC, 0, 0, overlayImage.Width, overlayImage.Height, overlayImageCDC, 0, 0, overlayImage.Width, overlayImage.Height, new BLENDFUNCTION(AC_SRC_OVER, 0, 0xff, AC_SRC_ALPHA));
//##################### Release everthing ############################
sourceImageGraphics.ReleaseHdc(sourceImageHDC);
sourceImageGraphics.Dispose();
DeleteDC(overlayImageCDC);
DeleteObject(overlayImageHandle);
//##################### Return Image ############################
return sourceImage;
}