黑屏使用截图并在Swift4中共享

时间:2018-02-07 21:48:41

标签: ios swift4

我正在尝试获取截图并与Swift4共享,但我得到的只是黑屏图片 - 无法弄明白。我尝试了几种方法,但没有任何方法能让它变得更好。我使用的代码写在下面。我真的很感激你的帮助。我认为它涉及层,但我刚开始学习可以提供帮助?

代码:

   import UIKit
import AVFoundation
import Vision

class ViewController: UIViewController, AVCaptureVideoDataOutputSampleBufferDelegate {

  let session = AVCaptureSession()

  var previewLayer: AVCaptureVideoPreviewLayer!

  let captureQueue = DispatchQueue(label: "captureQueue")

  var gradientLayer: CAGradientLayer!

  var visionRequests = [VNRequest]()

    var recognitionThreshold : Float = 0.25

    @IBOutlet weak var thresholdStackView: UIStackView!
    @IBOutlet weak var threshholdLabel: UILabel!
    @IBOutlet weak var threshholdSlider: UISlider!
    @IBOutlet weak var share: UIButton!


    @IBOutlet weak var previewView: UIView!
  @IBOutlet weak var resultView: UILabel!

  override func viewDidLoad() {
    super.viewDidLoad()

    guard let camera = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: .front) else {
      fatalError("No video camera available")
    }
    do {

      previewLayer = AVCaptureVideoPreviewLayer(session: session)
        previewLayer.videoGravity = AVLayerVideoGravity.resizeAspectFill
      previewView.layer.addSublayer(previewLayer)

      gradientLayer = CAGradientLayer()
      gradientLayer.colors = [
        UIColor.init(red: 0, green: 0, blue: 0, alpha: 0.7).cgColor,
        UIColor.init(red: 0, green: 0, blue: 0, alpha: 0.0).cgColor,
      ]
      gradientLayer.locations = [0.0, 0.3]
      self.previewView.layer.addSublayer(gradientLayer)

      let cameraInput = try AVCaptureDeviceInput(device: camera)

      let videoOutput = AVCaptureVideoDataOutput()
      videoOutput.setSampleBufferDelegate(self, queue: captureQueue)
      videoOutput.alwaysDiscardsLateVideoFrames = true
      videoOutput.videoSettings = [kCVPixelBufferPixelFormatTypeKey as String: kCVPixelFormatType_32BGRA]
      session.sessionPreset = .high

      session.addInput(cameraInput)
      session.addOutput(videoOutput)

      let conn = videoOutput.connection(with: .video)
      conn?.videoOrientation = .portrait

      session.startRunning()

      guard let resNet50Model = try? VNCoreMLModel(for: food().model) else {
        fatalError("Could not load model")
      }

      let classificationRequest = VNCoreMLRequest(model: resNet50Model, completionHandler: handleClassifications)
      classificationRequest.imageCropAndScaleOption = .centerCrop
      visionRequests = [classificationRequest]
    } catch {
      fatalError(error.localizedDescription)
    }

    updateThreshholdLabel()
  }

    func updateThreshholdLabel () {
        self.threshholdLabel.text = "Threshold: " + String(format: "%.2f", recognitionThreshold)
    }

  override func viewDidLayoutSubviews() {
    super.viewDidLayoutSubviews()
    previewLayer.frame = self.previewView.bounds;
    gradientLayer.frame = self.previewView.bounds;

    let orientation: UIDeviceOrientation = UIDevice.current.orientation;
    switch (orientation) {
    case .portrait:
        previewLayer?.connection?.videoOrientation = .portrait
    case .landscapeRight:
        previewLayer?.connection?.videoOrientation = .landscapeLeft
    case .landscapeLeft:
        previewLayer?.connection?.videoOrientation = .landscapeRight
    case .portraitUpsideDown:
        previewLayer?.connection?.videoOrientation = .portraitUpsideDown
    default:
        previewLayer?.connection?.videoOrientation = .portrait
    }
  }

  func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
    guard let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else {
      return
    }

    connection.videoOrientation = .portrait

    var requestOptions:[VNImageOption: Any] = [:]

    if let cameraIntrinsicData = CMGetAttachment(sampleBuffer, kCMSampleBufferAttachmentKey_CameraIntrinsicMatrix, nil) {
      requestOptions = [.cameraIntrinsics: cameraIntrinsicData]
    }


    let imageRequestHandler = VNImageRequestHandler(cvPixelBuffer: pixelBuffer, orientation: .upMirrored, options: requestOptions)
    do {
      try imageRequestHandler.perform(self.visionRequests)
    } catch {
      print(error)
    }
  }

    @IBAction func userTapped(sender: Any) {
        self.thresholdStackView.isHidden = !self.thresholdStackView.isHidden
    }

    @IBAction func share2(_ sender: Any) {
        //Set the default sharing message.
        let message = "Hello!"
        let link = NSURL(string: "http://url.com/")
        // Screenshot:
        UIGraphicsBeginImageContextWithOptions(self.view.frame.size, true, 0.0)
        self.view.drawHierarchy(in: self.view.frame, afterScreenUpdates: false)
        let img = UIGraphicsGetImageFromCurrentImageContext()
        UIGraphicsEndImageContext()

        //Set the link, message, image to share.
        if let link = link, let img = img {
            let objectsToShare = [message,link,img] as [Any]
            let activityVC = UIActivityViewController(activityItems: objectsToShare, applicationActivities: nil)
            activityVC.excludedActivityTypes = [UIActivityType.airDrop, UIActivityType.addToReadingList]
            self.present(activityVC, animated: true, completion: nil)

        }

    }

    @IBAction func sliderValueChanged(slider: UISlider) {
        self.recognitionThreshold = slider.value
        updateThreshholdLabel()
    }

  func handleClassifications(request: VNRequest, error: Error?) {
    if let theError = error {
      print("Error: \(theError.localizedDescription)")
      return
    }
    guard let observations = request.results else {
      print("No results")
      return
    }

    let classifications = observations[0...4] // top 4 results
        .flatMap({ $0 as? VNClassificationObservation })
        .flatMap({$0.confidence > recognitionThreshold ? $0 : nil})
      .map({ "\($0.identifier) \(String(format:"%.2f", $0.confidence))" })
        .joined(separator: "\n")

    DispatchQueue.main.async {
        self.resultView.text = classifications
    }

  }
}

1 个答案:

答案 0 :(得分:1)

尝试替换:

UIGraphicsBeginImageContextWithOptions(self.view.frame.size, true, 0.0)

with:

UIGraphicsBeginImageContextWithOptions(self.view.bounds.size, false, UIScreen.main.scale)