从CMSampleBuffer创建UIImage

时间:2013-03-31 05:49:14

标签: ios core-image quartz-core

这与将CMSampleBuffer转换为UIImage的无数问题不同。我只是想知道为什么我不能这样转换它:

CVPixelBufferRef pixelBuffer = (CVPixelBufferRef)CMSampleBufferGetImageBuffer(sampleBuffer);
CIImage * imageFromCoreImageLibrary = [CIImage imageWithCVPixelBuffer: pixelBuffer];
UIImage * imageForUI = [UIImage imageWithCIImage: imageFromCoreImageLibrary];

它似乎更简单,因为它适用于YCbCr色彩空间,以及RGBA等。该代码有问题吗?

8 个答案:

答案 0 :(得分:22)

对于JPEG图像:

Swift 4:

let buff: CMSampleBuffer ...            // Have you have CMSampleBuffer 
if let imageData = AVCapturePhotoOutput.jpegPhotoDataRepresentation(forJPEGSampleBuffer: buff, previewPhotoSampleBuffer: nil) {
    let image = UIImage(data: imageData) //  Here you have UIImage
}

答案 1 :(得分:20)

使用Swift 3和iOS 10 AVCapturePhotoOutput: 包括:

import UIKit
import CoreData
import CoreMotion
import AVFoundation

创建一个UIView进行预览并将其链接到Main Class

  @IBOutlet var preview: UIView!

创建此项以设置相机会话( kCVPixelFormatType_32BGRA 非常重要!!):

  lazy var cameraSession: AVCaptureSession = {
    let s = AVCaptureSession()
    s.sessionPreset = AVCaptureSessionPresetHigh
    return s
  }()

  lazy var previewLayer: AVCaptureVideoPreviewLayer = {
    let previewl:AVCaptureVideoPreviewLayer =  AVCaptureVideoPreviewLayer(session: self.cameraSession)
    previewl.frame = self.preview.bounds
    return previewl
  }()

  func setupCameraSession() {
    let captureDevice = AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeVideo) as AVCaptureDevice

    do {
      let deviceInput = try AVCaptureDeviceInput(device: captureDevice)

      cameraSession.beginConfiguration()

      if (cameraSession.canAddInput(deviceInput) == true) {
        cameraSession.addInput(deviceInput)
      }

      let dataOutput = AVCaptureVideoDataOutput()
      dataOutput.videoSettings = [(kCVPixelBufferPixelFormatTypeKey as NSString) : NSNumber(value: **kCVPixelFormatType_32BGRA** as UInt32)]
      dataOutput.alwaysDiscardsLateVideoFrames = true

      if (cameraSession.canAddOutput(dataOutput) == true) {
        cameraSession.addOutput(dataOutput)
      }

      cameraSession.commitConfiguration()

      let queue = DispatchQueue(label: "fr.popigny.videoQueue", attributes: [])
      dataOutput.setSampleBufferDelegate(self, queue: queue)

    }
    catch let error as NSError {
      NSLog("\(error), \(error.localizedDescription)")
    }
  }

WillAppear:

  override func viewWillAppear(_ animated: Bool) {
    super.viewWillAppear(animated)
    setupCameraSession()
  }

在Didappear:

  override func viewDidAppear(_ animated: Bool) {
    super.viewDidAppear(animated)
    preview.layer.addSublayer(previewLayer)
    cameraSession.startRunning()
  }

创建一个捕获输出的函数:

  func captureOutput(_ captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, from connection: AVCaptureConnection!) {

    // Here you collect each frame and process it
    let ts:CMTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer)
    self.mycapturedimage = imageFromSampleBuffer(sampleBuffer: sampleBuffer)
}

以下是将 kCVPixelFormatType_32BGRA CMSampleBuffer转换为 UIImage 的代码,关键字是 bitmapInfo ,必须与 32BGRA对应 32与preultfirst和alpha信息很少:

  func imageFromSampleBuffer(sampleBuffer : CMSampleBuffer) -> UIImage
  {
    // Get a CMSampleBuffer's Core Video image buffer for the media data
    let  imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
    // Lock the base address of the pixel buffer
    CVPixelBufferLockBaseAddress(imageBuffer!, CVPixelBufferLockFlags.readOnly);


    // Get the number of bytes per row for the pixel buffer
    let baseAddress = CVPixelBufferGetBaseAddress(imageBuffer!);

    // Get the number of bytes per row for the pixel buffer
    let bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer!);
    // Get the pixel buffer width and height
    let width = CVPixelBufferGetWidth(imageBuffer!);
    let height = CVPixelBufferGetHeight(imageBuffer!);

    // Create a device-dependent RGB color space
    let colorSpace = CGColorSpaceCreateDeviceRGB();

    // Create a bitmap graphics context with the sample buffer data
    var bitmapInfo: UInt32 = CGBitmapInfo.byteOrder32Little.rawValue
    bitmapInfo |= CGImageAlphaInfo.premultipliedFirst.rawValue & CGBitmapInfo.alphaInfoMask.rawValue
    //let bitmapInfo: UInt32 = CGBitmapInfo.alphaInfoMask.rawValue
    let context = CGContext.init(data: baseAddress, width: width, height: height, bitsPerComponent: 8, bytesPerRow: bytesPerRow, space: colorSpace, bitmapInfo: bitmapInfo)
    // Create a Quartz image from the pixel data in the bitmap graphics context
    let quartzImage = context?.makeImage();
    // Unlock the pixel buffer
    CVPixelBufferUnlockBaseAddress(imageBuffer!, CVPixelBufferLockFlags.readOnly);

    // Create an image object from the Quartz image
    let image = UIImage.init(cgImage: quartzImage!);

    return (image);
  }

答案 2 :(得分:13)

使用以下代码从PixelBuffer转换图像 选项1:

CIImage *ciImage = [CIImage imageWithCVPixelBuffer:pixelBuffer];

CIContext *context = [CIContext contextWithOptions:nil];
CGImageRef myImage = [context
                         createCGImage:ciImage
                         fromRect:CGRectMake(0, 0,
                                             CVPixelBufferGetWidth(pixelBuffer),
                                             CVPixelBufferGetHeight(pixelBuffer))];

UIImage *uiImage = [UIImage imageWithCGImage:myImage];

选项2:

int w = CVPixelBufferGetWidth(pixelBuffer);
int h = CVPixelBufferGetHeight(pixelBuffer);
int r = CVPixelBufferGetBytesPerRow(pixelBuffer);
int bytesPerPixel = r/w;

unsigned char *buffer = CVPixelBufferGetBaseAddress(pixelBuffer);

UIGraphicsBeginImageContext(CGSizeMake(w, h));

CGContextRef c = UIGraphicsGetCurrentContext();

unsigned char* data = CGBitmapContextGetData(c);
if (data != NULL) {
    int maxY = h;
    for(int y = 0; y<maxY; y++) {
        for(int x = 0; x<w; x++) {
            int offset = bytesPerPixel*((w*y)+x);
            data[offset] = buffer[offset];     // R
            data[offset+1] = buffer[offset+1]; // G
            data[offset+2] = buffer[offset+2]; // B
            data[offset+3] = buffer[offset+3]; // A
        }
    }
}
UIImage *img = UIGraphicsGetImageFromCurrentImageContext();

UIGraphicsEndImageContext();

答案 3 :(得分:7)

我写了一个简单的扩展名,用于 Swift 4.x / 3.x ,以便从UIImage生成CMSampleBuffer

这也可以处理缩放和方向,但如果它们适合你,你可以接受默认值。

import UIKit
import AVFoundation

extension CMSampleBuffer {
    func image(orientation: UIImageOrientation = .up, 
               scale: CGFloat = 1.0) -> UIImage? {
        if let buffer = CMSampleBufferGetImageBuffer(self) {
            let ciImage = CIImage(cvPixelBuffer: buffer)

            return UIImage(ciImage: ciImage, 
                           scale: scale,
                           orientation: orientation)
        }

        return nil
    }
}
  1. 如果它可以从图像中获取缓冲区数据,它将继续,否则返回nil
  2. 使用缓冲区初始化CIImage
  3. 返回UIImage初始化的ciImage值,以及scale&amp; orientation值。如果未提供,则分别使用up1.0的默认值

答案 4 :(得分:2)

这将与iOS 10 AVCapturePhotoOutput类有关。假设用户想要拍照并且您拨打capturePhoto(with:delegate:)并且您的设置包含对预览图像的请求。这是获得预览图像的绝佳方式,但是如何在界面中显示它?预览图像在委托方法的实现中作为CMSampleBuffer到达:

func capture(_ output: AVCapturePhotoOutput, 
    didFinishProcessingPhotoSampleBuffer buff: CMSampleBuffer?, 
    previewPhotoSampleBuffer: CMSampleBuffer?, 
    resolvedSettings: AVCaptureResolvedPhotoSettings, 
    bracketSettings: AVCaptureBracketedStillImageSettings?, 
    error: Error?) {

您需要将CMSampleBuffer previewPhotoSampleBuffer转换为UIImage。你打算怎么做?像这样:

if let prev = previewPhotoSampleBuffer {
    if let buff = CMSampleBufferGetImageBuffer(prev) {
        let cim = CIImage(cvPixelBuffer: buff)
        let im = UIImage(ciImage: cim)
        // and now you have a UIImage! do something with it ...
    }
}

答案 5 :(得分:1)

全部:请勿使用以下方法:

    private let context = CIContext()

    private func imageFromSampleBuffer2(_ sampleBuffer: CMSampleBuffer) -> UIImage? {
        guard let imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else { return nil }
        let ciImage = CIImage(cvPixelBuffer: imageBuffer)
        guard let cgImage = context.createCGImage(ciImage, from: ciImage.extent) else { return nil }
        return UIImage(cgImage: cgImage)
    }

他们吃了更多的cpu,转换的时间更多

使用https://stackoverflow.com/a/40193359/7767664

中的解决方案

不要忘记为AVCaptureVideoDataOutput

设置下一个设置
    videoOutput = AVCaptureVideoDataOutput()

    videoOutput.videoSettings = [(kCVPixelBufferPixelFormatTypeKey as String) : NSNumber(value: kCVPixelFormatType_32BGRA as UInt32)]
    //videoOutput.alwaysDiscardsLateVideoFrames = true

    videoOutput.setSampleBufferDelegate(self, queue: DispatchQueue(label: "MyQueue"))

转换方法

    func imageFromSampleBuffer(_ sampleBuffer : CMSampleBuffer) -> UIImage {
        // Get a CMSampleBuffer's Core Video image buffer for the media data
        let  imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
        // Lock the base address of the pixel buffer
        CVPixelBufferLockBaseAddress(imageBuffer!, CVPixelBufferLockFlags.readOnly);


    // Get the number of bytes per row for the pixel buffer
    let baseAddress = CVPixelBufferGetBaseAddress(imageBuffer!);

    // Get the number of bytes per row for the pixel buffer
    let bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer!);
    // Get the pixel buffer width and height
    let width = CVPixelBufferGetWidth(imageBuffer!);
    let height = CVPixelBufferGetHeight(imageBuffer!);

    // Create a device-dependent RGB color space
    let colorSpace = CGColorSpaceCreateDeviceRGB();

    // Create a bitmap graphics context with the sample buffer data
    var bitmapInfo: UInt32 = CGBitmapInfo.byteOrder32Little.rawValue
    bitmapInfo |= CGImageAlphaInfo.premultipliedFirst.rawValue & CGBitmapInfo.alphaInfoMask.rawValue
    //let bitmapInfo: UInt32 = CGBitmapInfo.alphaInfoMask.rawValue
    let context = CGContext.init(data: baseAddress, width: width, height: height, bitsPerComponent: 8, bytesPerRow: bytesPerRow, space: colorSpace, bitmapInfo: bitmapInfo)
    // Create a Quartz image from the pixel data in the bitmap graphics context
    let quartzImage = context?.makeImage();
    // Unlock the pixel buffer
    CVPixelBufferUnlockBaseAddress(imageBuffer!, CVPixelBufferLockFlags.readOnly);

    // Create an image object from the Quartz image
    let image = UIImage.init(cgImage: quartzImage!);

    return (image);
}

答案 6 :(得分:1)

Swift 5.0

if let cvImageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) {
   let ciimage = CIImage(cvImageBuffer: cvImageBuffer)
   let context = CIContext()

   if let cgImage = context.createCGImage(ciimage, from: ciimage.extent) {
      let uiImage = UIImage(cgImage: cgImage)
   }
}

答案 7 :(得分:-1)

Swift 4 / iOS 11版本的Popigny回答:

content_type

在上下文中查看此内容的完整示例项目:https://github.com/cruinh/CameraCapture