点击视图并从视图内的图像中获取正确的像素数据似乎存在问题。
func handleTap(gestureRecognizer: UIGestureRecognizer) {
print("You tapped at \(gestureRecognizer.locationInView(self.view))")
print("You tapped at \(gestureRecognizer.locationInView(view))")
print(pickerImage.image!.size.height)
print(pickerImage.image!.size.width)
//imageViewOutlet.frame = CGRectMake(0 , 0, pickerImage.image!.size.width, pickerImage.image!.size.height)
let point = gestureRecognizer.locationInView(self.view)
let xPoint = (point.x * 0.91777)
let yPoint = point.y
let image : UIImage = pickerImage.image!;
let color = image.getPixelColor(CGPointMake(xPoint, yPoint))
colorText.backgroundColor = color;
colorText2.text = color.htmlRGBColor
}
高度为300的容器内的照片没有相同的像素数据“你点击了(369.0,214.33332824707)”
相反,当我点击两侧与屏幕上的图像相比时,大约有35px的顶部布局指南(电池条所在的位置)和x Scale的比例差异为.9177。
以下是图片
的容器声明 if UIImagePickerController.isSourceTypeAvailable(UIImagePickerControllerSourceType.SavedPhotosAlbum) {
print("Button capture")
imagePicker.delegate = self
imagePicker.sourceType = UIImagePickerControllerSourceType.SavedPhotosAlbum;
imagePicker.allowsEditing = false
self.presentViewController(imagePicker, animated: true, completion: nil)
}
}
func imagePickerController(picker: UIImagePickerController!, didFinishPickingImage image: UIImage!, editingInfo: NSDictionary!){
self.dismissViewControllerAnimated(true, completion: { () -> Void in
})
// imageTemp.image = image
pickerImage.image = resizeImage(image, newHeight: CGFloat(300))
// pickerImage.image = image
}
以下是代码如何在文件外部扩展
import Foundation
import UIKit
extension UIImage {
func getPixelColor(pos: CGPoint) -> UIColor {
let pixelData = CGDataProviderCopyData(CGImageGetDataProvider(self.CGImage))
let data: UnsafePointer<UInt8> = CFDataGetBytePtr(pixelData)
let pixelInfo: Int = ((Int(self.size.width) * Int(pos.y)) + Int(pos.x)) * 4
let r = CGFloat(data[pixelInfo]) / CGFloat(255.0)
let g = CGFloat(data[pixelInfo+1]) / CGFloat(255.0)
let b = CGFloat(data[pixelInfo+2]) / CGFloat(255.0)
let a = CGFloat(data[pixelInfo+3]) / CGFloat(255.0)
return UIColor(red: r, green: g, blue: b, alpha: a);
}
}
extension UIColor{
func rgb() -> Int? {
var fRed: CGFloat = 0
var fGreen: CGFloat = 0
var fBlue: CGFloat = 0
var fAlpha: CGFloat = 0
if self.getRed(&fRed, green: &fGreen, blue: &fBlue, alpha: &fAlpha) {
let iRed = Int(fRed * 255.0)
let iGreen = Int(fGreen * 255.0)
let iBlue = Int(fBlue * 255.0)
let iAlpha = Int(fAlpha * 255.0)
// (Bits 24-31 are alpha, 16-23 are red, 8-15 are green, 0-7 are blue).
let rgb = (iAlpha << 24) + (iRed << 16) + (iGreen << 8) + iBlue
return rgb
} else {
// Could not extract RGBA components:
return 4
}
}
}
我也在使用此answer
的扩展程序我要做的是只允许屏幕接受高度为300的容器内的触摸,并且UIImage位于该容器内。当用户获取容器的坐标时,我将这些坐标连接到该容器内图像的像素数据时失败。
尝试: 使用x坐标的比例将较大的图像重新缩放到仅高度300 将y刻度减小30(顶部条的高度) 将图像的x比例计算为坐标的大小