【问题标题】:How do you apply Core Image filters to an onscreen image using Swift/MacOS or iOS and Core Image如何使用 Swift/MacOS 或 iOS 和 Core Image 将 Core Image 过滤器应用于屏幕图像
【发布时间】:2026-01-04 17:50:01
【问题描述】:

照片编辑调整可在应用调整时提供应用调整的实时视图。我找不到任何有关您如何执行此操作的示例。所有示例似乎都表明您通过各种管道应用过滤器,然后获取结果图像并使用结果更新屏幕。请参阅下面的代码。

照片似乎显示了应用于屏幕图像的调整。他们是如何做到这一点的?

func editImage(inputImage: CGImage) {
        
        DispatchQueue.global().async {
            
            let beginImage = CIImage(cgImage: inputImage)
            
            guard let exposureOutput = self.exposureFilter(beginImage, ev: self.brightness) else {
                return
            }
            guard let vibranceOutput = self.vibranceFilter(exposureOutput, amount: self.vibranceAmount) else {
                return
            }
            guard let unsharpMaskOutput = self.unsharpMaskFilter(vibranceOutput, intensity: self.unsharpMaskIntensity, radius: self.unsharpMaskRadius) else {
                return
            }
            guard let sharpnessOutput = self.sharpenFilter(unsharpMaskOutput, sharpness: self.unsharpMaskIntensity) else {
                return
            }
            
            if let cgimg = self.context.createCGImage(sharpnessOutput, from: vibranceOutput.extent) {
                
                DispatchQueue.main.async {
                    self.cgImage = cgimg
                }
            }
        }
        
    }

【问题讨论】:

  • “他们是如何做到这一点的?”这是什么'?你的意思是,使产生的效果永久化?
  • 这是实现对显示应用效果的图像的实时更新 - 顺便说一句,我想我找到了答案 - MTKView 并使用 draw() 函数在应用效果时渲染图像。有点让它工作,但图像没有填满整个视图 - 所以我想我必须计算出金属坐标系以及如何缩放图像以适应视图。它从来没有像他们说的那么容易!

标签: ios swift macos core-image


【解决方案1】:

好的,我刚刚找到了答案 - 使用 MTKView,除了让图像正确填充视图之外,它工作正常!

为了其他人的利益,这里是基础知识...我还没有弄清楚如何在视图中正确定位图像 - 但我可以看到实时应用的过滤器!

  class ViewController: NSViewController, MTKViewDelegate {
        
        ....
      @objc dynamic var cgImage: CGImage? {
            didSet {
                if let cgimg = cgImage {
                    
                ciImage = CIImage(cgImage: cgimg)
    
                }
            }
        }
        var ciImage: CIImage?    
    
            // Metal resources
            var device: MTLDevice!
            var commandQueue: MTLCommandQueue!
            var sourceTexture: MTLTexture!                         // 2
            let colorSpace = CGColorSpaceCreateDeviceRGB()
            var context: CIContext!
            var textureLoader: MTKTextureLoader!
            
            override func viewDidLoad() {
                super.viewDidLoad()
        
           // Do view setup here.
                let metalView = MTKView()
                
                metalView.translatesAutoresizingMaskIntoConstraints = false
                self.imageView.addSubview(metalView)
               
                NSLayoutConstraint.activate([
                    metalView.bottomAnchor.constraint(equalTo: view.bottomAnchor),
                    metalView.trailingAnchor.constraint(equalTo: view.trailingAnchor),
                    metalView.leadingAnchor.constraint(equalTo: view.leadingAnchor),
                    metalView.topAnchor.constraint(equalTo: view.topAnchor)
                ])
                
                device = MTLCreateSystemDefaultDevice()            
                commandQueue = device.makeCommandQueue()
                
                metalView.delegate = self
                metalView.device = device
                metalView.framebufferOnly = false
                
                context = CIContext()
                
                textureLoader = MTKTextureLoader(device: device) 
    
         }
        
           public func draw(in view: MTKView) {
            if let ciImage = self.ciImage  {
                if let currentDrawable = view.currentDrawable {              
                    let commandBuffer = commandQueue.makeCommandBuffer()
                    
                    let inputImage = ciImage     // 2
                    exposureFilter.setValue(inputImage, forKey: kCIInputImageKey)
                    exposureFilter.setValue(ev, forKey: kCIInputEVKey)
                    
                    context.render(exposureFilter.outputImage!,                      
                                   to: currentDrawable.texture,
                                   commandBuffer: commandBuffer,
                                   bounds: CGRect(origin: .zero, size: view.drawableSize),
                                   colorSpace: colorSpace)
                    
                    commandBuffer?.present(currentDrawable)                
                    commandBuffer?.commit()
                }
            }
        }

【讨论】: