我想在iOS Swift的通话中拍摄正在进行的VideoCaptureView
的屏幕截图。我使用了 QuickBlox 。
我用下面的代码返回黑色图像
public extension UIView {
public func snapshotImage() -> UIImage? {
UIGraphicsBeginImageContextWithOptions(bounds.size, isOpaque,0)
drawHierarchy(in: bounds, afterScreenUpdates: false)
let snapshotImage = UIGraphicsGetImageFromCurrentImageContext()
UIGraphicsEndImageContext()
return snapshotImage
}
public func snapshotView() -> UIView? {
if let snapshotImage = snapshotImage() {
return UIImageView(image: snapshotImage)
} else {
return nil
}
}
}
let snapshot = view.snapshotView()
答案 0 :(得分:0)
尝试一下
fileprivate func captureUIImageFromUIView(_ view:UIView?) -> UIImage {
guard (view != nil) else{
// if the view is nil (it's happened to me) return an alternative image
let errorImage = UIImage(named: "Logo.png")
return errorImage!
}
// if the view is all good then convert the image inside the view to a uiimage
if #available(iOS 10.0, *) {
let renderer = UIGraphicsImageRenderer(size: view!.bounds.size)
let capturedImage = renderer.image {
(ctx) in
view!.drawHierarchy(in: view!.bounds, afterScreenUpdates: true)
}
return capturedImage
} else {
UIGraphicsBeginImageContextWithOptions((view!.bounds.size), view!.isOpaque, 0.0)
view!.drawHierarchy(in: view!.bounds, afterScreenUpdates: false)
let capturedImage = UIGraphicsGetImageFromCurrentImageContext()
UIGraphicsEndImageContext()
return capturedImage!
}
}
致电
let pdfImage = self.captureUIImageFromUIView(self.containerView)
// // ViewController.swift // GooglePlace-AutoComplete // //由信息创建于7/10/18。 //版权所有©2018 infos。版权所有。 //
导入UIKit 导入GoogleMaps 导入GooglePlaces 导入SwiftyJSON 导入Alamofire
枚举位置{ 案例startLocation 案例destinationLocation }
ViewController类:UIViewController,GMSMapViewDelegate,CLLocationManagerDelegate,UITextFieldDelegate {
@IBOutlet weak var googleMaps: GMSMapView!
@IBOutlet weak var startLocation: UITextField!
@IBOutlet weak var destinationLocation: UITextField!
var locationManager = CLLocationManager()
var locationSelected = Location.startLocation
var polyline = GMSPolyline()
var locationStart = CLLocation()
var locationEnd = CLLocation()
override func viewDidLoad() {
super.viewDidLoad()
GMSPlacesClient.provideAPIKey("AIzaSyC55Dq1qPH7EM_uiAVf-8QuxJtf2W1viQs")
GMSServices.provideAPIKey("AIzaSyC55Dq1qPH7EM_uiAVf-8QuxJtf2W1viQs")
// Create a GMSCameraPosition that tells the map to display the
let camera = GMSCameraPosition.camera(withLatitude: 13.082680,
longitude: 80.270718,
zoom: 10.0,
bearing: 30,
viewingAngle: 40)
//Setting the googleView
googleMaps.camera = camera
googleMaps.delegate = self
googleMaps.isMyLocationEnabled = true
googleMaps.settings.myLocationButton = true
googleMaps.settings.compassButton = true
googleMaps.settings.zoomGestures = true
googleMaps.animate(to: camera)
self.view.addSubview(googleMaps)
//Setting the start and end location
let origin = "\(13.082680),\(80.270718)"
let destination = "\(15.912900),\(79.739987)"
let url = "https://maps.googleapis.com/maps/api/directions/json?origin=\(origin)&destination=\(destination)&mode=driving"
//Rrequesting Alamofire and SwiftyJSON
Alamofire.request(url).responseJSON { response in
print(response.request as Any) // original URL request
print(response.response as Any) // HTTP URL response
print(response.data as Any) // server data
print(response.result) // result of response serialization
do {
let json = try JSON(data: response.data!)
let routes = json["routes"].arrayValue
for route in routes
{
let routeOverviewPolyline = route["overview_polyline"].dictionary
let points = routeOverviewPolyline?["points"]?.stringValue
let path = GMSPath.init(fromEncodedPath: points!)
let polyline = GMSPolyline.init(path: path)
polyline.strokeColor = UIColor.blue
polyline.strokeWidth = 2
polyline.map = self.googleMaps
}
}
catch {
}
}
// Creates a marker in the center of the map.
let marker = GMSMarker()
marker.position = CLLocationCoordinate2D(latitude: 28.524555, longitude: 77.275111)
marker.title = "Mobiloitte"
marker.snippet = "India"
marker.map = googleMaps
//28.643091, 77.218280
let marker1 = GMSMarker()
marker1.position = CLLocationCoordinate2D(latitude: 28.643091, longitude: 77.218280)
marker1.title = "NewDelhi"
marker1.snippet = "India"
marker1.map = googleMaps
}
// MARK: function for create a marker pin on map
func createMarker(titleMarker: String, iconMarker: UIImage, latitude: CLLocationDegrees, longitude: CLLocationDegrees) {
let marker = GMSMarker()
marker.position = CLLocationCoordinate2DMake(latitude, longitude)
marker.isDraggable=true
marker.title = titleMarker
marker.icon = iconMarker
marker.map = googleMaps
}
//MARK: - Location Manager delegates
func locationManager(_ manager: CLLocationManager, didFailWithError error: Error) {
print("Error to get location : \(error)")
}
func locationManager(_ manager: CLLocationManager, didUpdateLocations locations: [CLLocation]) {
let location = locations.last
// let camera = GMSCameraPosition.camera(withLatitude: (location?.coordinate.latitude)!, longitude: (location?.coordinate.longitude)!, zoom: 17.0)
let locationMobi = CLLocation(latitude: 28.524555, longitude: 77.275111)
drawPath(startLocation: location!, endLocation: locationMobi)
//self.googleMaps?.animate(to: camera)
self.locationManager.stopUpdatingLocation()
}
// MARK: - GMSMapViewDelegate
func mapView(_ mapView: GMSMapView, idleAt position: GMSCameraPosition) {
googleMaps.isMyLocationEnabled = true
}
func mapView(_ mapView: GMSMapView, willMove gesture: Bool) {
googleMaps.isMyLocationEnabled = true
if (gesture) {
mapView.selectedMarker = nil
}
}
func mapView(_ mapView: GMSMapView, didTap marker: GMSMarker) -> Bool {
googleMaps.isMyLocationEnabled = true
return false
}
func mapView(_ mapView: GMSMapView, didTapAt coordinate: CLLocationCoordinate2D) {
print("COORDINATE \(coordinate)") // when you tapped coordinate
}
func didTapMyLocationButton(for mapView: GMSMapView) -> Bool {
googleMaps.isMyLocationEnabled = true
googleMaps.selectedMarker = nil
return false
}
//MARK: - Marker Delegate
func mapView(_ mapView: GMSMapView, didDrag marker: GMSMarker) {
}
func mapView(_ mapView: GMSMapView, didBeginDragging marker: GMSMarker) {
}
func mapView(_ mapView: GMSMapView, didEndDragging marker: GMSMarker) {
self.googleMaps.reloadInputViews()
//self.polyline.map = nil;
print("marker dragged to location: \(marker.position.latitude),\(marker.position.longitude)")
let locationMobi = CLLocation(latitude: marker.position.latitude, longitude: marker.position.longitude)
self.drawPath(startLocation: locationMobi, endLocation: locationEnd)
}
//MARK: - this is function for create direction path, from start location to desination location
func drawPath(startLocation: CLLocation, endLocation: CLLocation)
{
let origin = "\(startLocation.coordinate.latitude),\(startLocation.coordinate.longitude)"
let destination = "\(endLocation.coordinate.latitude),\(endLocation.coordinate.longitude)"
self.polyline.map = nil
//self.googleMaps.clear()
let url = "https://maps.googleapis.com/maps/api/directions/json?origin=\(origin)&destination=\(destination)&mode=driving"
Alamofire.request(url).responseJSON { response in
print(response.request as Any) // original URL request
print(response.response as Any) // HTTP URL response
print(response.data as Any) // server data
print(response.result as Any) // result of response serialization
do {
let json = try JSON(data: response.data!)
let routes = json["routes"].arrayValue
// print route using Polyline
for route in routes
{
let routeOverviewPolyline = route["overview_polyline"].dictionary
let points = routeOverviewPolyline?["points"]?.stringValue
let path = GMSPath.init(fromEncodedPath: points!)
self.polyline = GMSPolyline.init(path: path)
self.polyline.strokeWidth = 2
self.polyline.strokeColor = UIColor.red
self.polyline.map = self.googleMaps
}
} catch {
}
}
}
// MARK: when start location tap, this will open the search location
@IBAction func openStartLocation(_ sender: UIButton) {
let autoCompleteController = GMSAutocompleteViewController()
autoCompleteController.delegate = self
// selected location
locationSelected = .startLocation
// Change text color
UISearchBar.appearance().setTextColor(color: UIColor.black)
self.locationManager.stopUpdatingLocation()
self.present(autoCompleteController, animated: true, completion: nil)
}
// MARK: when destination location tap, this will open the search location
@IBAction func openDestinationLocation(_ sender: UIButton) {
let autoCompleteController = GMSAutocompleteViewController()
autoCompleteController.delegate = self
// selected location
locationSelected = .destinationLocation
// Change text color
UISearchBar.appearance().setTextColor(color: UIColor.black)
self.locationManager.stopUpdatingLocation()
self.present(autoCompleteController, animated: true, completion: nil)
}
// MARK: SHOW DIRECTION WITH BUTTON
@IBAction func showDirection(_ sender: UIButton) {
// when button direction tapped, must call drawpath func
self.drawPath(startLocation: locationStart, endLocation: locationEnd)
}
}
// MARK:-GMS自动完成委托,用于自动完成搜索位置 扩展ViewController:GMSAutocompleteViewControllerDelegate {
func viewController(_ viewController: GMSAutocompleteViewController, didFailAutocompleteWithError error: Error) {
print("Error \(error)")
}
func viewController(_ viewController: GMSAutocompleteViewController, didAutocompleteWith place: GMSPlace) {
// Change map location
let camera = GMSCameraPosition.camera(withLatitude: place.coordinate.latitude, longitude: place.coordinate.longitude, zoom: 16.0
)
// set coordinate to text
if locationSelected == .startLocation {
if (self.locationManager.location?.coordinate.longitude) != nil {
startLocation.text = "\(place.coordinate.latitude), \(place.coordinate.longitude)"
locationStart = CLLocation(latitude: place.coordinate.latitude, longitude: place.coordinate.longitude)
createMarker(titleMarker: "Location Start", iconMarker: #imageLiteral(resourceName: "images"), latitude: place.coordinate.latitude, longitude: place.coordinate.longitude)
}else {
// handle the error by declaring default value
}
} else {
if (self.locationManager.location?.coordinate.longitude) != nil {
destinationLocation.text = "\(place.coordinate.latitude), \(place.coordinate.longitude)"
locationEnd = CLLocation(latitude: place.coordinate.latitude, longitude: place.coordinate.longitude)
createMarker(titleMarker: "Location End", iconMarker: #imageLiteral(resourceName: "images"), latitude: place.coordinate.latitude, longitude: place.coordinate.longitude)
}else {
// handle the error by declaring default value
}
}
self.googleMaps.camera = camera
self.dismiss(animated: true, completion: nil)
}
func wasCancelled(_ viewController: GMSAutocompleteViewController) {
self.dismiss(animated: true, completion: nil)
}
func didRequestAutocompletePredictions(_ viewController: GMSAutocompleteViewController) {
UIApplication.shared.isNetworkActivityIndicatorVisible = true
}
func didUpdateAutocompletePredictions(_ viewController: GMSAutocompleteViewController) {
UIApplication.shared.isNetworkActivityIndicatorVisible = false
}
} 公共功能textFieldShouldReturn(_ textField:UITextField)-> Bool { textField.resignFirstResponder() 返回真 } 公共扩展UISearchBar {
public func setTextColor(color: UIColor) {
let svs = subviews.flatMap { $0.subviews }
guard let tf = (svs.filter { $0 is UITextField }).first as? UITextField else { return }
tf.textColor = color
}
}
答案 1 :(得分:0)
这是工作代码,用于在QuickBlox通话期间为正在进行的视频拍摄屏幕截图
@IBOutlet weak var stackView: UIStackView!
let stillImageOutput = AVCaptureStillImageOutput()
override func viewDidLoad() {
super.viewDidLoad()
QBRTCClient.initializeRTC()
QBRTCClient.instance().add(self)
cofigureVideo()
}
func cofigureVideo() {
QBRTCConfig.mediaStreamConfiguration().videoCodec = .H264
QBRTCConfig.setAnswerTimeInterval(30)
QBRTCConfig.setStatsReportTimeInterval(5)
let videoFormat = QBRTCVideoFormat.init()
videoFormat.frameRate = 30
videoFormat.pixelFormat = .format420f
videoFormat.width = 640
videoFormat.height = 480
self.videoCapture = QBRTCCameraCapture.init(videoFormat: videoFormat, position: .front)
self.videoCapture.previewLayer.videoGravity = AVLayerVideoGravity.resizeAspectFill
self.videoCapture.startSession {
self.stillImageOutput.outputSettings = [AVVideoCodecKey:AVVideoCodecJPEG]
if self.videoCapture.captureSession.canAddOutput(self.stillImageOutput) {
self.videoCapture.captureSession.addOutput(self.stillImageOutput)
}
let localView = LocalVideoView.init(withPreviewLayer:self.videoCapture.previewLayer)
self.stackView.addArrangedSubview(localView)
}
}
拍照按钮单击
@IBAction func TakePhotoTapped(_ sender: Any) {
if let videoConnection = stillImageOutput.connection(with: AVMediaType.video) {
stillImageOutput.captureStillImageAsynchronously(from: videoConnection) {
(imageDataSampleBuffer, error) -> Void in
let imageData = AVCaptureStillImageOutput.jpegStillImageNSDataRepresentation(imageDataSampleBuffer!)
if let image = UIImage(data: imageData!){
// Your image is Here
}
}
}
}