Swift CIfilter从collectionView

时间:2017-10-14 14:38:47

标签: ios swift uiimage core-image ciimage

我正在开发一款应用程序,可让用户选择照片应用过滤器并保存。 基本上是Instagram app相机的副本,用户从库中选择一张照片,应用过滤器然后发布。

该应用程序工作正常,我唯一的问题是,当使用过滤后的图像加载collectionView时,该过程有点慢。 我已经读了一下(我是swift的新手,这是我第一次使用核心图像)而且我试图应用我所学到的所有建议。 不过我认为该应用仍然运行缓慢。

所以这里'我做了什么。

我创建了一个带过滤器的模型:

public protocol Filter {
 // reference to the core image filter
var filter: CIFilter { get }
 // output of the filter.
var outputImage: CIImage? { get }
}

 // common to all filters.
 extension Filter {
   public var outputImage: CIImage? { return self.filter.outputImage }
}

//Bloom
public class Bloom: Filter {

public let filter: CIFilter

public init(inputImage: CIImage, inputRadius: CGFloat = 10.0, 
 inputIntensity: CGFloat = 1.0) {
    let parameters:[String : Any] = [
        "inputImage":inputImage,
        "inputRadius":inputRadius,
        "inputIntensity":inputIntensity        ]
    guard let filter = CIFilter(name:"CIBloom", withInputParameters: parameters) else { fatalError() }
    self.filter = filter
  }
}


// Box Blur
public class BoxBlur: Filter {

public let filter: CIFilter

public init(inputImage: CIImage, inputRadius: CGFloat = 10.0) {
    let parameters:[String : Any] = [
        "inputImage":inputImage,
        "inputRadius":inputRadius        ]
    guard let filter = CIFilter(name:"CIBoxBlur", withInputParameters: parameters) else { fatalError() }
    self.filter = filter
  }
}
...........

以及我需要的所有滤镜。

我创建了一个类来创建一个包含所有过滤图像的UIImage数组:

class filteredImages {
static var filterToApply: Filter!
static var filterNames = [
    "Original","Sepia","Poster",....
]

static var filteredImages = [UIImage]()
static var filteredImage: UIImage!

static func createImageArray(inputImage: CIImage, onSuccess: @escaping () -> () ) {
    filteredImages.removeAll()
    for filter in filterNames {
        switch filter {
        case "Original":
            filteredImage = UIImage(ciImage:inputImage)
            filteredImages.append(filteredImage)
        case "Sepia":
            filterToApply = SepiaTone(inputImage: inputImage, inputIntensity: 0.8)
            filteredImage = UIImage(ciImage: filterToApply.outputImage!)
            filteredImages.append(filteredImage)
............
      }
      onSuccess()
     }
   }
}

最后在我的控制器中:

var filteredImagesArray = [UIImage]()
override func viewWillAppear(_ animated: Bool) {
    super.viewWillAppear(true)
    let inputImage = CIImage(image: (originalImage.image?.resized(toWidth: 120))!)!
    filteredImages.createImageArray(inputImage: inputImage) {
        self.filteredImagesArray = filteredImages.filteredImages
    }
}
func collectionView(_ collectionView: UICollectionView, cellForItemAt indexPath: IndexPath) -> UICollectionViewCell {
    let cell = collectionView.dequeueReusableCell(withReuseIdentifier: "FilterCell", for: indexPath) as! FilterCell

        cell.filteredImage.image = filteredImagesArray[indexPath.row]
        cell.filterName.text = filteredImages.filterNames[indexPath.row]
        cell.layer.borderWidth = 0.7
        cell.layer.borderColor = UIColor.white.cgColor
    return cell
}

我还有一个扩展名来减少"原作"图像:

extension UIImage {
func resized(toWidth width: CGFloat) -> UIImage? {
    let canvasSize = CGSize(width: width, height: CGFloat(ceil(width/size.width * size.height)))
    UIGraphicsBeginImageContextWithOptions(canvasSize, false, scale)
    defer { UIGraphicsEndImageContext() }
    draw(in: CGRect(origin: .zero, size: canvasSize))
    return UIGraphicsGetImageFromCurrentImageContext()
  }
}

有没有办法让它更快? 我错过了什么吗?

谢谢。

-----------根据Ladislav REQUEST更新

这是我用来从图书馆获取图片的代码:

  enum Section: Int {
    case allPhotos = 0
    case smartAlbums
    case userCollections

    static let count = 3
}
var allPhotos: PHFetchResult<PHAsset>!
var allPhotosFromAlbum = PHFetchResult<PHAsset>()
var smartAlbums: [PHAssetCollection] = []
var userCollections: PHFetchResult<PHAssetCollection>!
let subtypes:[PHAssetCollectionSubtype] = [
    .smartAlbumFavorites,
    .smartAlbumPanoramas,
    .smartAlbumScreenshots,
    .smartAlbumSelfPortraits,
    .smartAlbumVideos,
    .smartAlbumRecentlyAdded,
    .smartAlbumSelfPortraits
]
 override func viewDidLoad() {
    super.viewDidLoad()
    // Create a PHFetchResult object for each section in the table view.
    let allPhotosOptions = PHFetchOptions()
    allPhotosOptions.sortDescriptors = [NSSortDescriptor(key: "creationDate", ascending: false)]
    allPhotos = PHAsset.fetchAssets(with: allPhotosOptions)
    let options = PHFetchOptions()
    options.predicate = NSPredicate(format: "estimatedAssetCount > 0")
    options.sortDescriptors = [NSSortDescriptor(key: "localizedTitle", ascending: false)]
    smartAlbums = fetchSmartCollections(with: .smartAlbum, subtypes: subtypes)
    userCollections = PHAssetCollection.fetchAssetCollections(with: .album, subtype: .albumRegular, options: options)
}

这里是我用来获取带子类型的PHAssetCollection的函数:

private func fetchSmartCollections(with: PHAssetCollectionType, subtypes: [PHAssetCollectionSubtype]) -> [PHAssetCollection] {
    var collections:[PHAssetCollection] = []
    let options = PHFetchOptions()
    options.includeHiddenAssets = false

    for subtype in subtypes {
        if let collection = PHAssetCollection.fetchAssetCollections(with: with, subtype: subtype, options: options).firstObject, collection.photosCount > 0 {
            collections.append(collection)
        }
    }

    return collections
}

我在tableView中显示结果,然后在选择时将相册中的所有图像传递给另一个控制器。

这里是控制器的代码(让我们称之为mMainVC),其中我实际显示要选择的图像:

fileprivate let imageManager = PHCachingImageManager()
var fetchResult: PHFetchResult<PHAsset>!
var assetCollection: PHAssetCollection!

 override func viewDidLoad() {
    super.viewDidLoad()
if fetchResult == nil {
        let allPhotosOptions = PHFetchOptions()
        allPhotosOptions.sortDescriptors = [NSSortDescriptor(key: "creationDate", ascending: false)]
        fetchResult = PHAsset.fetchAssets(with: allPhotosOptions)
    }
}

这里是从fetchResult获取图像的函数:

func selectImageFromAssetAtIndex(index: Int) {
    let asset = fetchResult.object(at: index)
    let size = scrollViewImage.frame.size.width

    PHImageManager.default().requestImage(for: asset, targetSize: CGSize(width: size, height: size), contentMode: .aspectFill, options: nil, resultHandler: { (image, info) in
        DispatchQueue.main.async {
            self.displayImageInScrollView(image: image!)
        }
    })
}

--------------------- SCREENSHOT AND VIDEO

enter image description here enter image description here

此处是视频的链接:https://drive.google.com/file/d/0B6U8olIA_ZS8U0tpN0hKOWdrVm8/view?usp=sharing

1 个答案:

答案 0 :(得分:1)

我认为你所做的一切都很好,唯一的问题是

TextView myTextView = view.findViewById(R.id.5);
myTextView.setText("exampleText");

override func viewWillAppear(_ animated: Bool) { super.viewWillAppear(true) let inputImage = CIImage(image: (originalImage.image?.resized(toWidth: 120))!)! filteredImages.createImageArray(inputImage: inputImage) { self.filteredImagesArray = filteredImages.filteredImages } } 在主线程上被调用,这就是你的应用冻结的原因。

我会向每个createImageArray loading添加一个activityIndicatorcollectionView,并且单元格将处于cell状态,直到您从loading获得结果{1}}调用,然后您将使用过滤后的图像填充单元格,如:

createImageArray

因此,在您重新加载可见项时,override func viewWillAppear(_ animated: Bool) { super.viewWillAppear(true) DispatchQueue.global(qos: .userInitiated).async {[weak self] in let inputImage = CIImage(image: (originalImage.image?.resized(toWidth: 120))!)! filteredImages.createImageArray(inputImage: inputImage) { self.filteredImagesArray = filteredImages.filteredImages DispatchQueue.main.async { let indexPaths = collectionView.indexPathsForVisibleItems collectionView.reloadItems(at: indexPaths) } } } } 将被设置,您将能够禁用self.filteredImagesArray状态并显示loading

中的图像