【问题标题】:How to run tflite model with ARKit session captured image?如何使用 ARKit 会话捕获的图像运行 tflite 模型?
【发布时间】:2020-10-26 06:21:19
【问题描述】:

我有一个tflite 模型,我想使用 ARKit 会话捕获的图像运行模型。它显示源像素格式无效。我能够使用 AVCapture 会话运行 tflite 模型:

// Not working with ARKit frame
func session(_ session: ARSession, didUpdate frame: ARFrame) {
    guard currentBuffer == nil, case .normal = frame.camera.trackingState else { return }
    runModel(with : currentBuffer)
}

// Working fine with AVCapture session

func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
    let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer)
    runModel(with : pixelBuffer)
}

【问题讨论】:

    标签: ios swift arkit tensorflow-lite cvpixelbuffer


    【解决方案1】:
    let context = CIContext()
    
    
    
    func session(_ session: ARSession, didUpdate frame: ARFrame) {
    
        guard currentBuffer == nil, case .normal = frame.camera.trackingState else {
    
                   return
    
               }
    
        
    
        guard let scaledPixelBuffer = CIImage(cvPixelBuffer: frame.capturedImage)
    
            .oriented(.right)
    
            .resize(size: CGSize(width: 256, height: 256))
    
            .toPixelBuffer(context: context)
    
            
    
            else {
    
            return
    
        }
    
        runModel(with : scaledPixelBuffer)
    }
    
    
    
    
    
    
    extension CIImage {
    
    
    
    func resize(size : CGSize) -> CIImage {
    
        let scale = min(size.width, size.height) / min(self.extent.size.width, self.extent.size.height)
    
        let resizedImage = self.transformed(by: CGAffineTransform(scaleX: scale, y: scale))
    
        let width = resizedImage.extent.size.width
    
        let height = resizedImage.extent.size.height
    
        let xOffset = (CGFloat(width) - size.width) / 2.0
    
        let yOffset = (CGFloat(height) - size.height) / 2.0
    
        let rect = CGRect(x: xOffset, y: yOffset, width: size.width, height: size.height)
    
        return resizedImage
    
        .clamped(to: rect)
    
            .cropped(to: CGRect(x: 0, y: 0, width: size.width, height: size.height))
    
        
    
    }
    
    
    
    func toPixelBuffer(context : CIContext, size inSize:CGSize? = nil, gray : Bool = true) -> CVPixelBuffer? {
    
        let attributes = [
    
            kCVPixelBufferCGImageCompatibilityKey: kCFBooleanTrue,
    
            kCVPixelBufferCGBitmapContextCompatibilityKey: kCFBooleanTrue ] as CFDictionary
    
        var nullablePixelBuffer : CVPixelBuffer? = nil
    
        let status = CVPixelBufferCreate(kCFAllocatorDefault, Int(self.extent.size.width), Int(self.extent.size.height), kCVPixelFormatType_32BGRA, attributes, &nullablePixelBuffer)
    
        guard status == kCVReturnSuccess, let pixelBuffer = nullablePixelBuffer else {
    
            return nil
    
        }
    
        CVPixelBufferLockBaseAddress(pixelBuffer, CVPixelBufferLockFlags(rawValue: 0))
    
        context.render(self, to: pixelBuffer, bounds: CGRect(x: 0, y: 0, width: self.extent.size.width, height: self.extent.size.height), colorSpace: gray ? CGColorSpaceCreateDeviceGray() : self.colorSpace)
    
        CVPixelBufferUnlockBaseAddress(pixelBuffer, CVPixelBufferLockFlags(rawValue: 0))
    
        return pixelBuffer
    
            }
    
    }
    

    【讨论】:

      猜你喜欢
      • 1970-01-01
      • 1970-01-01
      • 1970-01-01
      • 2022-09-27
      • 1970-01-01
      • 1970-01-01
      • 2015-07-07
      • 2020-03-01
      • 1970-01-01
      相关资源
      最近更新 更多