我有一些使用AVKit和AVCapturePhotoCaptureDelegate的Camera功能。
import UIKit
import AVKit
class CaptureImageClass: NSObject, AVCapturePhotoCaptureDelegate {
var photoData: Data?
func photoOutput(_ output: AVCapturePhotoOutput, didFinishProcessingPhoto photo: AVCapturePhoto, error: Error?) {
if let error = error {
print("Error capturing photo: \(error)")
} else {
photoData = photo.fileDataRepresentation() //Cannot assign value of type 'Data?' to type 'Data?'
}
}
func capture(_ output: AVCapturePhotoOutput, didFinishCaptureForResolvedSettings resolvedSettings: AVCaptureResolvedPhotoSettings, error: Error?) {
guard let photoData = photoData else {
print("No photo data resource")
return
}
let capturedImage = UIImage.init(data: photoData , scale: 1.0) //Cannot convert value of type 'Data' to expected argument type 'Data'
if let image = capturedImage {
UIImageWriteToSavedPhotosAlbum(image, nil, nil, nil)
}
}
}
当我创建独立项目时,此代码编译没有问题。但是当我尝试申请另一个项目时,它会出现诸如Cannot assign value of type 'Data?' to type 'Data?'
和Cannot convert value of type 'Data' to expected argument type 'Data'
之类的错误
这个问题是由不同的Swift版本引起的吗?
注意:
此“另一个项目”的部署目标是iOS 10和Swift 3,并且将func capture
使用didFinishCaptureForResolvedSettings
,而不能使用func photoOutput
我的独立项目正在使用Swift 4运行,并且部署目标是iOS 11.3
答案 0 :(得分:0)
检查错误是否不是由设备的iOS版本引起的。
请记住
func photoOutput(_ output: AVCapturePhotoOutput,
didFinishProcessingPhoto
photo: AVCapturePhoto,
error: Error?) {
适用于iOS11,适用于iOS10及更高版本,您应使用
optional func photoOutput(_ output: AVCapturePhotoOutput,
didFinishProcessingPhoto photoSampleBuffer: CMSampleBuffer?,
previewPhoto previewPhotoSampleBuffer: CMSampleBuffer?,
resolvedSettings: AVCaptureResolvedPhotoSettings,
bracketSettings: AVCaptureBracketedStillImageSettings?,
error: Error?)
也
fileDataRepresentation()
可用于iOS11和更高版本
希望有帮助...
答案 1 :(得分:0)
某人已覆盖cir['date'] = ['3-15','4-15', '1-16', '2-16', '3-16', '4-16', '1-17', '2-
17', '3-17', '4-17']
cir['value'] = [78.4, 13.5, 19.8, 19, 78.4, 123.2, 82.6, 86.4, 83.5, 12.2.4]
cir.plot.bar()
类型。您可以使用:
Data
答案 2 :(得分:0)
通过使用解决 func capture(_ captureOutput:AVCapturePhotoOutput, didFinishProcessingPhotoSampleBuffer photoSampleBuffer:CMSampleBuffer ?, PreviewPhotoSampleBuffer:CMSampleBuffer ?, resolveSettings:AVCaptureResolvedPhotoSettings, bracketSettings:AVCaptureBracketedStillImageSettings ?, 错误:错误?){
// Make sure we get some photo sample buffer
guard error == nil,
let photoSampleBuffer = photoSampleBuffer else {
print("Error capturing photo: \(String(describing: error))")
return
}
// Convert photo same buffer to a jpeg image data by using // AVCapturePhotoOutput
guard let imageData =
AVCapturePhotoOutput.jpegPhotoDataRepresentation(forJPEGSampleBuffer: photoSampleBuffer, previewPhotoSampleBuffer: previewPhotoSampleBuffer) else {
return
}
let dataProvider = CGDataProvider(data: imageData as CFData)
let cgImageRef = CGImage(jpegDataProviderSource: dataProvider!, decode: nil, shouldInterpolate: true, intent: CGColorRenderingIntent.absoluteColorimetric)
let image = UIImage(cgImage: cgImageRef!, scale: 1.0, orientation: UIImageOrientation.right)
UIImageWriteToSavedPhotosAlbum(image, nil, nil, nil)
}