我正在尝试提出一种在AVVideoCompositionCoreAnimationTool导出期间执行实时模糊的方法。我已经尝试过UIVisualEffectView并窃取了基础视图的层。它可以在预览中工作,但是一旦在AVVideoCompositionCoreAnimationTool中使用它,则该图层为黑色。因此,我开始构建执行此操作的CALayer,但更新频率不够高。我该怎么做才能使其更频繁地绘制,或者在iOS中使用AVVideoCompositionCoreAnimationTool和实时模糊处理可能会怎样?这是我建立的图层。
class CABlurLayer : CALayer{
let maxBlurRadius : CGFloat = 20
var currentImageIndex : Float = 0
var blur : Int = 10
var context : CGContext?
var link : Timer?
var snap : UIImage?
var targetLayer : CALayer?
override init() {
super.init()
}
convenience init(targetLayer:CALayer?){
self.init()
self.targetLayer = targetLayer
self.drawsAsynchronously = true
if let tl = targetLayer{
self.masksToBounds = tl.masksToBounds
}
updateSnapShots()
link = Timer.scheduledTimer(timeInterval: 1/60, target: self, selector: #selector(updateBlur), userInfo: nil, repeats: true)
}
@objc func updateBlur(){
updateSnapShots()
DispatchQueue.main.async {
self.setNeedsDisplay()
}
}
required init?(coder aDecoder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
func updateSnapShots(){
guard let tl = targetLayer else{return}
UIGraphicsBeginImageContextWithOptions(self.bounds.size, false, 0)
guard let ctx = UIGraphicsGetCurrentContext() else{return}
tl.render(in: ctx)
let snapshot = UIGraphicsGetImageFromCurrentImageContext()
snap = snapshot?.applyBlurWithRadius(CGFloat(blur), tintColor: UIColor().withAlphaComponent(0), saturationDeltaFactor: 1.4)
}
override func draw(in ctx: CGContext) {
guard let blurredImage = snap,
let tl = targetLayer else{return}
var origin = tl.frame.origin
if let pres = tl.presentation(){
origin = pres.frame.origin
}
UIGraphicsPushContext(ctx)
blurredImage.draw(at: origin)
UIGraphicsPopContext()
}
}
class MyViewController : UIViewController {
override func loadView() {
let view = UIView()
view.backgroundColor = .white
self.view = view
let ur = URL(string: "https://images.pexels.com/photos/457882/pexels-photo-457882.jpeg?auto=compress&cs=tinysrgb&dpr=2&w=500")
URLSession.shared.dataTask(with: ur!) { (dt, response, error) in
if let data = dt{
print("we have a response")
let img = UIImage(data: data)
DispatchQueue.main.async {
let layer = CALayer()
layer.frame = CGRect(x: 0, y: 0, width: 500, height: 500)
view.layer.addSublayer(layer)
let imageLayer = CALayer()
imageLayer.masksToBounds = true
imageLayer.frame = CGRect(x: 0, y: 150, width: 400, height: 300)
imageLayer.contentsGravity = .resizeAspectFill
imageLayer.contents = img?.cgImage
layer.addSublayer(imageLayer)
let blur = CABlurLayer(targetLayer: imageLayer)
blur.frame = layer.bounds
layer.addSublayer(blur)
blur.blur = 20
let pos = CABasicAnimation(keyPath: "position.x")
pos.toValue = imageLayer.position.x
pos.fromValue = imageLayer.position.x - 100
pos.duration = 2
pos.repeatCount = 100
pos.autoreverses = true
imageLayer.add(pos, forKey: nil)
}
}
}.resume()
}
}
// Present the view controller in the Live View window
PlaygroundPage.current.liveView = MyViewController()
PlaygroundPage.current.needsIndefiniteExecution = true
UIImage扩展
import UIKit
import Accelerate
public extension UIImage {
public func applyLightEffect() -> UIImage? {
return applyBlurWithRadius(30, tintColor: UIColor(white: 1.0, alpha: 0.3), saturationDeltaFactor: 1.8)
}
public func applyExtraLightEffect() -> UIImage? {
return applyBlurWithRadius(20, tintColor: UIColor(white: 0.97, alpha: 0.82), saturationDeltaFactor: 1.8)
}
public func applyDarkEffect() -> UIImage? {
return applyBlurWithRadius(20, tintColor: UIColor(white: 0.11, alpha: 0.73), saturationDeltaFactor: 1.8)
}
public func applyTintEffectWithColor(_ tintColor: UIColor) -> UIImage? {
let effectColorAlpha: CGFloat = 0.6
var effectColor = tintColor
let componentCount = tintColor.cgColor.numberOfComponents
if componentCount == 2 {
var b: CGFloat = 0
if tintColor.getWhite(&b, alpha: nil) {
effectColor = UIColor(white: b, alpha: effectColorAlpha)
}
} else {
var red: CGFloat = 0
var green: CGFloat = 0
var blue: CGFloat = 0
if tintColor.getRed(&red, green: &green, blue: &blue, alpha: nil) {
effectColor = UIColor(red: red, green: green, blue: blue, alpha: effectColorAlpha)
}
}
return applyBlurWithRadius(10, tintColor: effectColor, saturationDeltaFactor: -1.0, maskImage: nil)
}
public func applyBlurWithRadius(_ blurRadius: CGFloat, tintColor: UIColor?, saturationDeltaFactor: CGFloat, maskImage: UIImage? = nil) -> UIImage? {
// Check pre-conditions.
if (size.width < 1 || size.height < 1) {
print("*** error: invalid size: \(size.width) x \(size.height). Both dimensions must be >= 1: \(self)")
return nil
}
guard let cgImage = self.cgImage else {
print("*** error: image must be backed by a CGImage: \(self)")
return nil
}
if maskImage != nil && maskImage!.cgImage == nil {
print("*** error: maskImage must be backed by a CGImage: \(String(describing: maskImage))")
return nil
}
let __FLT_EPSILON__ = CGFloat(Float.ulpOfOne)
let screenScale = UIScreen.main.scale
let imageRect = CGRect(origin: CGPoint.zero, size: size)
var effectImage = self
let hasBlur = blurRadius > __FLT_EPSILON__
let hasSaturationChange = fabs(saturationDeltaFactor - 1.0) > __FLT_EPSILON__
if hasBlur || hasSaturationChange {
func createEffectBuffer(_ context: CGContext) -> vImage_Buffer {
let data = context.data
let width = vImagePixelCount(context.width)
let height = vImagePixelCount(context.height)
let rowBytes = context.bytesPerRow
return vImage_Buffer(data: data, height: height, width: width, rowBytes: rowBytes)
}
UIGraphicsBeginImageContextWithOptions(size, false, screenScale)
guard let effectInContext = UIGraphicsGetCurrentContext() else { return nil }
effectInContext.scaleBy(x: 1.0, y: -1.0)
effectInContext.translateBy(x: 0, y: -size.height)
effectInContext.draw(cgImage, in: imageRect)
var effectInBuffer = createEffectBuffer(effectInContext)
UIGraphicsBeginImageContextWithOptions(size, false, screenScale)
guard let effectOutContext = UIGraphicsGetCurrentContext() else { return nil }
var effectOutBuffer = createEffectBuffer(effectOutContext)
if hasBlur {
// A description of how to compute the box kernel width from the Gaussian
// radius (aka standard deviation) appears in the SVG spec:
// http://www.w3.org/TR/SVG/filters.html#feGaussianBlurElement
//
// For larger values of 's' (s >= 2.0), an approximation can be used: Three
// successive box-blurs build a piece-wise quadratic convolution kernel, which
// approximates the Gaussian kernel to within roughly 3%.
//
// let d = floor(s * 3*sqrt(2*pi)/4 + 0.5)
//
// ... if d is odd, use three box-blurs of size 'd', centered on the output pixel.
//
let inputRadius = blurRadius * screenScale
let d = floor(inputRadius * 3.0 * CGFloat(sqrt(2 * .pi) / 4 + 0.5))
var radius = UInt32(d)
if radius % 2 != 1 {
radius += 1 // force radius to be odd so that the three box-blur methodology works.
}
let imageEdgeExtendFlags = vImage_Flags(kvImageEdgeExtend)
vImageBoxConvolve_ARGB8888(&effectInBuffer, &effectOutBuffer, nil, 0, 0, radius, radius, nil, imageEdgeExtendFlags)
vImageBoxConvolve_ARGB8888(&effectOutBuffer, &effectInBuffer, nil, 0, 0, radius, radius, nil, imageEdgeExtendFlags)
vImageBoxConvolve_ARGB8888(&effectInBuffer, &effectOutBuffer, nil, 0, 0, radius, radius, nil, imageEdgeExtendFlags)
}
var effectImageBuffersAreSwapped = false
if hasSaturationChange {
let s: CGFloat = saturationDeltaFactor
let floatingPointSaturationMatrix: [CGFloat] = [
0.0722 + 0.9278 * s, 0.0722 - 0.0722 * s, 0.0722 - 0.0722 * s, 0,
0.7152 - 0.7152 * s, 0.7152 + 0.2848 * s, 0.7152 - 0.7152 * s, 0,
0.2126 - 0.2126 * s, 0.2126 - 0.2126 * s, 0.2126 + 0.7873 * s, 0,
0, 0, 0, 1
]
let divisor: CGFloat = 256
let matrixSize = floatingPointSaturationMatrix.count
var saturationMatrix = [Int16](repeating: 0, count: matrixSize)
for i: Int in 0 ..< matrixSize {
saturationMatrix[i] = Int16(round(floatingPointSaturationMatrix[i] * divisor))
}
if hasBlur {
vImageMatrixMultiply_ARGB8888(&effectOutBuffer, &effectInBuffer, saturationMatrix, Int32(divisor), nil, nil, vImage_Flags(kvImageNoFlags))
effectImageBuffersAreSwapped = true
} else {
vImageMatrixMultiply_ARGB8888(&effectInBuffer, &effectOutBuffer, saturationMatrix, Int32(divisor), nil, nil, vImage_Flags(kvImageNoFlags))
}
}
if !effectImageBuffersAreSwapped {
effectImage = UIGraphicsGetImageFromCurrentImageContext()!
}
UIGraphicsEndImageContext()
if effectImageBuffersAreSwapped {
effectImage = UIGraphicsGetImageFromCurrentImageContext()!
}
UIGraphicsEndImageContext()
}
// Set up output context.
UIGraphicsBeginImageContextWithOptions(size, false, screenScale)
guard let outputContext = UIGraphicsGetCurrentContext() else { return nil }
outputContext.scaleBy(x: 1.0, y: -1.0)
outputContext.translateBy(x: 0, y: -size.height)
// Draw base image.
outputContext.draw(cgImage, in: imageRect)
// Draw effect image.
if hasBlur {
outputContext.saveGState()
if let maskCGImage = maskImage?.cgImage {
outputContext.clip(to: imageRect, mask: maskCGImage);
}
outputContext.draw(effectImage.cgImage!, in: imageRect)
outputContext.restoreGState()
}
// Add in color tint.
if let color = tintColor {
outputContext.saveGState()
outputContext.setFillColor(color.cgColor)
outputContext.fill(imageRect)
outputContext.restoreGState()
}
// Output image is ready.
let outputImage = UIGraphicsGetImageFromCurrentImageContext()
UIGraphicsEndImageContext()
return outputImage
}
public func blurImage()->UIImage?{
return self.applyBlurWithRadius(20, tintColor: UIColor().withAlphaComponent(0), saturationDeltaFactor: 1.4)
}
}
答案 0 :(得分:0)
这已经有一段时间了,所以我想我会分享我的解决方案。我从UIVisualEffectView窃取了CABackdropLayer以实现实时模糊。您可以初始化此类型的层,但它是私有的。但是,由于公众视图使用了该层,而我只是从该视图中使用它,所以不必以超级直接的方式访问私有api。