当用户点击摄像机视图上的Point时,我试图获取RGB颜色。我的方法如下:但是,即使我点击相机视图的较亮部分,我也只能获得0,0,0,0
。我用来打印的代码是print(color)
。
我认为颜色是在sRGB
中给出的。
更新:我打印时得到的输出
[ (kCGColorSpaceICCBased; kCGColorSpaceModelRGB; sRGB IEC61966-2.1; 扩展范围)](0 0 0 0)
我该如何解决这个问题?
override func touchesBegan(_ touches: Set<UITouch>, with event: UIEvent?) {
let touch = touches.first
if let point = touch?.location(in: view) {
let color:CGColor = self.colorOfPoint(point: point)
print(color)
}
}
func colorOfPoint(point:CGPoint) -> CGColor {
let pixel = UnsafeMutablePointer<CUnsignedChar>.allocate(capacity: 4)
let colorSpace = CGColorSpaceCreateDeviceRGB()
let bitmapInfo = CGBitmapInfo(rawValue: CGImageAlphaInfo.premultipliedLast.rawValue)
let context = CGContext(data: pixel, width: 1, height: 1, bitsPerComponent: 8, bytesPerRow: 4, space: colorSpace, bitmapInfo: bitmapInfo.rawValue)
context!.translateBy(x: -point.x, y: -point.y)
self.previewLayer.render(in: context!)
let red: CGFloat = CGFloat(pixel[0]) / 255.0
let green: CGFloat = CGFloat(pixel[1]) / 255.0
let blue: CGFloat = CGFloat(pixel[2]) / 255.0
let alpha: CGFloat = CGFloat(pixel[3]) / 255.0
let color = UIColor(red:red, green: green, blue:blue, alpha:alpha)
return color.cgColor
}
更新:2
class ViewController: UIViewController, AVCaptureVideoDataOutputSampleBufferDelegate {
let captureSession = AVCaptureSession()
var previewLayer:CALayer!
let dataOutput = AVCaptureVideoDataOutput()
var captureDevice:AVCaptureDevice!
var takePhoto = false
override func viewDidLoad() {
super.viewDidLoad()
if let availableDevices = AVCaptureDeviceDiscoverySession(deviceTypes: [.builtInWideAngleCamera], mediaType: AVMediaTypeVideo, position: .back).devices {
captureDevice = availableDevices.first
beginSession()
}
do {
let captureDeviceInput = try AVCaptureDeviceInput(device: captureDevice)
captureSession.addInput(captureDeviceInput)
}catch {
print(error.localizedDescription)
}
if let previewLayer = AVCaptureVideoPreviewLayer(session: captureSession) {
self.previewLayer = previewLayer
self.view.layer.addSublayer(self.previewLayer)
self.previewLayer.frame = self.view.layer.frame
captureSession.startRunning()
dataOutput.videoSettings = [(kCVPixelBufferPixelFormatTypeKey as NSString):NSNumber(value:kCVPixelFormatType_32BGRA)]
dataOutput.alwaysDiscardsLateVideoFrames = true
if captureSession.canAddOutput(dataOutput) {
captureSession.addOutput(dataOutput)
}
captureSession.commitConfiguration()
let queue = DispatchQueue(label: "com.brianadvent.captureQueue")
dataOutput.setSampleBufferDelegate(self, queue: queue)
}
...
}