【问题标题】:AVCaptureMetadataObjectDelegate not receiving callbackAVCaptureMetadataObjectDelegate 未收到回调
【发布时间】:2019-05-02 09:47:45
【问题描述】:

我正在制作二维码扫描仪。当所有代码都写在ViewController 内的一个地方时,我的代码正在工作,但是当我将它模块化时,我没有在AVCaptureMetadataOutputObjectsDelegate 内得到回调。

import Foundation
import UIKit
import AVFoundation

class CameraSource : NSObject {

    private var session                     : AVCaptureSession?
    private var inputDevice             : AVCaptureDeviceInput?
    private var videoPreviewLayer   : AVCaptureVideoPreviewLayer?

    private var captureMetadataOutput : AVCaptureMetadataOutput?

    func setCaptureMetadataOutput() {
        self.captureMetadataOutput = nil
        self.captureMetadataOutput = AVCaptureMetadataOutput()
    }

    func getCaptureMetadataOutput() -> AVCaptureMetadataOutput? {
        return self.captureMetadataOutput
    }

    func setInputDevice(inputDevice : AVCaptureDeviceInput?) {
        self.inputDevice = inputDevice
    }

    func getInputDevice() -> AVCaptureDeviceInput? {
        return self.inputDevice
    }

    func setSession(session : AVCaptureSession?) {
        self.session = session
    }

    func getSession() -> AVCaptureSession? {
        return self.session
    }

    func setMetadataObjects(metaObjects : [AVMetadataObject.ObjectType], delegate : AVCaptureMetadataOutputObjectsDelegate) {
        assert(self.captureMetadataOutput != nil)
        self.captureMetadataOutput!.setMetadataObjectsDelegate(delegate, queue: DispatchQueue.main)
        self.captureMetadataOutput!.metadataObjectTypes = metaObjects
    }

    func initViewoPreviewLayer(videoGravity : AVLayerVideoGravity, orientation : AVCaptureVideoOrientation) {
        assert(session != nil)

        videoPreviewLayer                                                           = AVCaptureVideoPreviewLayer(session: session!)
        videoPreviewLayer!.videoGravity                                 = videoGravity
        videoPreviewLayer!.connection!.videoOrientation = orientation
    }

    func addVideoLayerToImageView(imageView : UIImageView) {
        assert(self.videoPreviewLayer != nil)

        imageView.layer.addSublayer(self.videoPreviewLayer!)
        self.videoPreviewLayer!.frame = imageView.bounds
    }

    func startSession() {
        assert(session != nil)
        self.session!.startRunning()
    }


    /*==========================================================================
    STATIC FUNCTIONS
    ==========================================================================*/

    static func getBackCamera() -> AVCaptureDevice {
        return AVCaptureDevice.default(.builtInWideAngleCamera, for: AVMediaType.video, position: .back)!
    }

    static func getFrontCamera() -> AVCaptureDevice {
        return AVCaptureDevice.default(.builtInWideAngleCamera, for: AVMediaType.video, position: .front)!
    }

    static func isCameraAvailable() -> Bool {
        if #available(iOS 10.0, *) {
            let count : Int = AVCaptureDevice.DiscoverySession(deviceTypes: [.builtInWideAngleCamera],
                                                                                                                 mediaType: AVMediaType.video,
                                                                                                                 position: .unspecified).devices.count
            if count > 0 { return true }
        }
        else {
            let count = AVCaptureDevice.devices(for: AVMediaType.video).count
            if count > 0 { return true }
        }
        return false
    }


    /*==========================================================================
    CAMERA BUILDER CLASS
    ==========================================================================*/

    class Builder {

        var cameraSource : CameraSource

        init() {
            cameraSource = CameraSource()
        }

        func createSession() -> Builder {
            if (cameraSource.getSession() != nil) {
                cameraSource.setSession(session: nil)
            }
            cameraSource.setSession(session: AVCaptureSession())
            return self
        }

        func setSessionPreset(preset : AVCaptureSession.Preset) -> Builder {
            assert(cameraSource.getSession() != nil)

            cameraSource.getSession()!.sessionPreset = preset
            return self
        }

        func attachInputDevice(camera : AVCaptureDevice) throws -> Builder {

            try self.prepareInputDevice(camera: camera)
            try self.addInputToSession()

            assert(cameraSource.inputDevice != nil)
            return self
        }

        func addOutputToSessionForMetaData() throws -> CameraSource {
            cameraSource.setCaptureMetadataOutput()

            assert(cameraSource.getSession() != nil && cameraSource.getCaptureMetadataOutput() != nil)

            if !cameraSource.getSession()!.canAddOutput(cameraSource.getCaptureMetadataOutput()!) {
                throw AppErrorCode.cameraError("Unable to attach output to camera session")
            }
            cameraSource.getSession()!.addOutput(cameraSource.getCaptureMetadataOutput()!)

            return self.cameraSource
        }

        /*==========================================================================
        BUILDER PRIVATE FUNCTIONS
        ==========================================================================*/

        private func prepareInputDevice(camera : AVCaptureDevice) throws {
            do {
                let inputDevice = try AVCaptureDeviceInput(device: camera)
                cameraSource.setInputDevice(inputDevice: inputDevice)

            } catch let error as NSError {
                print(error.localizedDescription)
                throw AppErrorCode.cameraError("Unable to attach input to camera session")
            }
        }

        private func addInputToSession() throws {
            if(cameraSource.getSession() == nil) {
                throw AppErrorCode.cameraError("Unable to create camera session")
            }

            assert(cameraSource.getInputDevice() != nil && cameraSource.getSession()!.canAddInput(cameraSource.getInputDevice()!))

            cameraSource.getSession()!.addInput(cameraSource.getInputDevice()!)
        }

    }


}

我的二维码扫描代码看起来像

import UIKit
import Foundation
import AVFoundation

protocol QRScannerDelegate {
    func scannedData(_ scannedString : String)
}

class QRScanner : NSObject {

    private var cameraSource : CameraSource?

    var delegate : QRScannerDelegate?

    func prepareCamera (delegate : QRScannerDelegate) throws -> QRScanner {
        do {
            self.delegate = delegate
            self.cameraSource = try CameraSource
                .Builder()
                .createSession()
                .setSessionPreset(preset: .photo)
                .attachInputDevice(camera: CameraSource.getBackCamera())
                .addOutputToSessionForMetaData()

            self.cameraSource!.setMetadataObjects(metaObjects: [.qr], delegate: self as AVCaptureMetadataOutputObjectsDelegate)

        } catch let err as NSError {
            print(err.localizedDescription)
            self.cameraSource = nil
            throw AppErrorCode.cameraError("Unable to process camera with one or more issue")
        }

        return self
    }

    func initViewoPreviewLayer(videoGravity : AVLayerVideoGravity, orientation : AVCaptureVideoOrientation) -> QRScanner{
        assert(cameraSource != nil)

        self.cameraSource!.initViewoPreviewLayer(videoGravity: videoGravity, orientation: orientation)
        return self
    }

    func addVideoLayerToImageView(imageView : UIImageView) -> QRScanner{
        assert(cameraSource != nil)

        self.cameraSource!.addVideoLayerToImageView(imageView: imageView)
        return self
    }

    func startSession() {
        assert(cameraSource != nil)
        self.cameraSource!.startSession()
    }
}

extension QRScanner : AVCaptureMetadataOutputObjectsDelegate {
    func metadataOutput(_ output: AVCaptureMetadataOutput, didOutput metadataObjects: [AVMetadataObject], from connection: AVCaptureConnection) {

        print("Delegate called")
        if metadataObjects.count == 0 {

            self.delegate?.scannedData("No Data")

        } else {

            let metadataObj = metadataObjects[0] as! AVMetadataMachineReadableCodeObject
            if metadataObj.type == AVMetadataObject.ObjectType.qr {

                if metadataObj.stringValue != nil {
                    print("Scanner Getting data: \(metadataObj.stringValue!)")
                    self.delegate?.scannedData(metadataObj.stringValue!)
                }

            }

        }
    }
}

我已经在我的ViewController 中实现了QRScannerDelegate,但我没有得到任何东西。此外,我什至没有在AVCaptureMetadataOutputObjectsDelegate 内收到回调。

我尝试将ViewController 实例作为AVCaptureMetadataOutputObjectsDelegate 传递,然后我收到带有扫描信息的回调。

所以我的问题是为什么会这样?

1) 当我以AVCaptureMetadataOutputObjectsDelegate 传递普通课程时,我没有收到回调。但是。

2) 当我将UIViewController 实例作为AVCaptureMetadataOutputObjectsDelegate 传递时,我可以得到回调。

更新

这就是我从View Controller 调用 prepareCamera 的方式

override func viewDidAppear(_ animated: Bool) {
        super.viewDidAppear(animated)
        do {

            try QRScanner().prepareCamera(delegate: self)
                    .initViewoPreviewLayer(videoGravity: .resizeAspectFill, orientation: .portrait)
                    .addVideoLayerToImageView(imageView: self.qrScannerImageView)
                    .startSession()

        } catch {

            print("Some Camera Error")

        }
        self.createOverlay()
    }

【问题讨论】:

    标签: swift avfoundation avcapturesession avcapture avcaptureoutput


    【解决方案1】:

    如果不知道您是如何调用 prepareCamera 的,很难确定,因为这是触发 setMetadataObjectsDelegate 的原因,但在我看来,您可能没有在 ViewController 中保持对 QRScanner 的强烈引用(将其实例化为在实例变量中)这可以解释为什么当您的 ViewController 是您的 AVCaptureMetadataOutputObjectsDelegate 时回调会受到影响,因为 ViewController 仍在内存中。

    还值得注意的是,如果 ViewController 是您的 QRScannerDelegate,您需要将委托定义为 weak var delegate : QRScannerDelegate? 以防止内存泄漏。

    编辑: 改变

    override func viewDidAppear(_ animated: Bool) {
            super.viewDidAppear(animated)
            do {
    
                try QRScanner().prepareCamera(delegate: self)
                        .initViewoPreviewLayer(videoGravity: .resizeAspectFill, orientation: .portrait)
                        .addVideoLayerToImageView(imageView: self.qrScannerImageView)
                        .startSession()
    
            } catch {
    
                print("Some Camera Error")
    
            }
            self.createOverlay()
        }
    

    var qrScanner = QRScanner()
    override func viewDidAppear(_ animated: Bool) {
            super.viewDidAppear(animated)
            do {
    
                try self.qrScanner.prepareCamera(delegate: self)
                        .initViewoPreviewLayer(videoGravity: .resizeAspectFill, orientation: .portrait)
                        .addVideoLayerToImageView(imageView: self.qrScannerImageView)
                        .startSession()
    
            } catch {
    
                print("Some Camera Error")
    
            }
            self.createOverlay()
        }
    

    改变

    protocol QRScannerDelegate {
        func scannedData(_ scannedString : String)
    }
    

    protocol QRScannerDelegate: class {
        func scannedData(_ scannedString : String)
    }
    

    允许弱委托

    AVCaptureMetadataOutputObjectsDelegate 很难,但你可以用它做一些非常酷的事情!所以坚持下去。

    我提取了我不久前编写的一些 QRScanner 代码,如果您想查看它,请将其放入要点中。它比您拥有的要精简一些,但您可能会发现它很有帮助。 https://gist.github.com/aChase55/733ea89af1bfa80c65971d3bc691f0b2

    【讨论】:

    • 我已经用我如何调用准备相机更新了我的问题。你能给我更多的见解吗?
    • 是的,我使用 ViewController 作为 QRScannerDelegate。我尝试将变量保留为弱引用,但 xcode 显示错误。所以我不得不删除它。
    • 你能告诉我为什么我们必须在协议中添加“类”来引用它作为弱变量,它有什么帮助?
    • 如果您的协议中不包含class,则该协议可以通过值类型或引用类型来实现。从本质上讲,值类型不可能有弱引用。在协议中添加class 会告诉编译器只有引用类型才能实现协议,从而使弱定义合法。由于您的视图控制器拥有委托,并且委托具有对视图控制器的引用,如果委托对视图控制器的引用不弱,则会发生内存泄漏。
    • 这里有更多关于值与引用类型的信息developer.apple.com/swift/blog/?id=10
    猜你喜欢
    • 1970-01-01
    • 1970-01-01
    • 2016-04-08
    • 2018-10-07
    • 2018-01-02
    • 1970-01-01
    • 1970-01-01
    • 1970-01-01
    • 1970-01-01
    相关资源
    最近更新 更多