错误:使用未声明的类型“加速”

时间:2019-09-26 05:19:06

标签: ios swift opentok accelerate

使用Accelerate Framework快速将YUV420转换为RGB。
vImageConvert_420Yp8_Cb8_Cr8ToARGB8888的调用引发

  

线程1:EXC_BAD_ACCESS(代码= 1,地址= 0x108bc9000)

同时调试源和目标指针,然后显示此消息。

  

yPlaneBuffer的打印描述:表达式产生错误:   swift:1:65:错误:使用未声明的类型'Accelerate'   Swift._DebuggerSupport.stringForPrintObject(Swift.UnsafePointer(bitPattern:   0x108788240)!. pointee)

所有源yPlaneBufferuPlaneBuffervPlaneBuffer都将出现相同的错误。 以下是我的代码。

import Foundation
import Accelerate.vImage
import UIKit
import OpenTok



class I420Converter{



var infoYpCbCrToARGB = vImage_YpCbCrToARGB()


init() {
   configureYpCbCrToARGBInfo()
}

func configureYpCbCrToARGBInfo() -> vImage_Error {
    print("Configuring")
    var pixelRange = vImage_YpCbCrPixelRange(Yp_bias: 0,
                                             CbCr_bias: 128,
                                             YpRangeMax: 255,
                                             CbCrRangeMax: 255,
                                             YpMax: 255,
                                             YpMin: 1,
                                             CbCrMax: 255,
                                             CbCrMin: 0)

    let error = vImageConvert_YpCbCrToARGB_GenerateConversion(
        kvImage_YpCbCrToARGBMatrix_ITU_R_601_4!,
        &pixelRange,
        &infoYpCbCrToARGB,
        kvImage420Yp8_Cb8_Cr8,
        kvImageARGB8888,
        vImage_Flags(kvImagePrintDiagnosticsToConsole))



    print("Configration done \(error)")
    return error
}




public func convertFrameVImageYUV(toUIImage frame: OTVideoFrame) -> UIImage {

    var result: UIImage? = nil
   let width = frame.format?.imageWidth ?? 0
    let height = frame.format?.imageHeight ?? 0
    var pixelBuffer: CVPixelBuffer? = nil
    let status = CVPixelBufferCreate(kCFAllocatorDefault, Int(width), Int(height), kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, nil, &pixelBuffer)

    convertFrameVImageYUV(frame, to: pixelBuffer)
    var ciImage: CIImage? = nil
    if let pixelBuffer = pixelBuffer {
        ciImage = CIImage(cvPixelBuffer: pixelBuffer)
    }

    let temporaryContext = CIContext(options: nil)
    var uiImage: CGImage? = nil
    if let ciImage = ciImage {
        uiImage = temporaryContext.createCGImage(ciImage, from: CGRect(x: 0, y: 0, width: CVPixelBufferGetWidth(pixelBuffer!), height: CVPixelBufferGetHeight(pixelBuffer!)))
    }

    if let uiImage = uiImage {
        result = UIImage(cgImage: uiImage)
    }
    //CGImageRelease(uiImage!)
    print("done")
    return result!

}

func convertFrameVImageYUV(_ frame: OTVideoFrame, to pixelBufferRef: CVPixelBuffer?) -> vImage_Error{
    if pixelBufferRef == nil {
        print("No PixelBuffer refrance found")
        return vImage_Error(kvImageInvalidParameter)
    }

    let width = frame.format?.imageWidth ?? 0
    let height = frame.format?.imageHeight ?? 0
    let subsampledWidth = frame.format!.imageWidth/2
    let subsampledHeight = frame.format!.imageHeight/2
    print("subsample height \(subsampledHeight) \(subsampledWidth)")
    let planeSize = calculatePlaneSize(forFrame: frame)


    let yPlane = UnsafeMutablePointer<GLubyte>.allocate(capacity: planeSize.ySize)
    let uPlane = UnsafeMutablePointer<GLubyte>.allocate(capacity: planeSize.uSize)
    let vPlane = UnsafeMutablePointer<GLubyte>.allocate(capacity: planeSize.vSize)

    memcpy(yPlane, frame.planes?.pointer(at: 0), planeSize.ySize)
    memcpy(uPlane, frame.planes?.pointer(at: 1), planeSize.uSize)
    memcpy(vPlane, frame.planes?.pointer(at: 2), planeSize.vSize)


    print("192")

    var yPlaneBuffer = vImage_Buffer(data: yPlane, height: vImagePixelCount(height), width: vImagePixelCount(width), rowBytes: planeSize.ySize)

    var uPlaneBuffer = vImage_Buffer(data: uPlane, height: vImagePixelCount(subsampledHeight), width: vImagePixelCount(subsampledHeight), rowBytes: planeSize.uSize)




    var vPlaneBuffer = vImage_Buffer(data: vPlane, height: vImagePixelCount(subsampledHeight), width: vImagePixelCount(subsampledWidth), rowBytes: planeSize.vSize)
    CVPixelBufferLockBaseAddress(pixelBufferRef!, .readOnly)
    let pixelBufferData = CVPixelBufferGetBaseAddress(pixelBufferRef!)
    let rowBytes = CVPixelBufferGetBytesPerRow(pixelBufferRef!)
    var destinationImageBuffer = vImage_Buffer()
    destinationImageBuffer.data = pixelBufferData
    destinationImageBuffer.height = vImagePixelCount(height)
    destinationImageBuffer.width = vImagePixelCount(width)
    destinationImageBuffer.rowBytes = rowBytes
    var permuteMap: [UInt8] = [3, 2, 1, 0] // BGRA


    let convertError = vImageConvert_420Yp8_Cb8_Cr8ToARGB8888(&yPlaneBuffer, &uPlaneBuffer, &vPlaneBuffer, &destinationImageBuffer, &infoYpCbCrToARGB, &permuteMap, 255, vImage_Flags(kvImagePrintDiagnosticsToConsole))

    CVPixelBufferUnlockBaseAddress(pixelBufferRef!, [])
    print("is error \(convertError)")
    return convertError

}
fileprivate func calculatePlaneSize(forFrame frame: OTVideoFrame)
    -> (ySize: Int, uSize: Int, vSize: Int)
{
    guard let frameFormat = frame.format
        else {
            return (0, 0 ,0)
    }
    let baseSize = Int(frameFormat.imageWidth * frameFormat.imageHeight) * MemoryLayout<GLubyte>.size
    return (baseSize, baseSize / 4, baseSize / 4)
}

 }

0 个答案:

没有答案