2016-07-01 8 views
0

Ich bin neu in Swift und ich habe versucht, eine Echtzeit-Video-Anwendung mit OpenCV zu erstellen. Ich benutze Swift, iOS 9 und Xcode 7.Kann AVCaptureVideoDataOutputSampleBufferDelegate nicht zu self implementieren

Ich habe Probleme beim Erfassen der Frames, und ich kam mit dem folgenden Code aus mehreren Tutorials.

  1. Im folgenden Code halte ich erhalte eine Fehlermeldung:

    func setupCameraSession() { 
        let devices = AVCaptureDevice.devices() 
        var captureDevice:AVCaptureDevice? 
    
        do { 
         if cameraType == CameraType.Front { 
          for device in devices { 
           if device.position == AVCaptureDevicePosition.Front { 
            captureDevice = device as? AVCaptureDevice 
            break 
           } 
          } 
         } 
         else { 
          captureDevice = AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeVideo) as AVCaptureDevice 
         } 
    
         let deviceInput = try AVCaptureDeviceInput(device: captureDevice) 
    
         cameraSession.beginConfiguration() 
    
         if (cameraSession.canAddInput(deviceInput) == true) { 
          cameraSession.addInput(deviceInput) 
         } 
    
         let dataOutput = AVCaptureVideoDataOutput() 
         dataOutput.videoSettings = [(kCVPixelBufferPixelFormatTypeKey as NSString) : NSNumber(unsignedInt: kCVPixelFormatType_420YpCbCr8BiPlanarFullRange)] 
         dataOutput.alwaysDiscardsLateVideoFrames = true 
         dataOutput.setSampleBufferDelegate(self, queue: dispatch_queue_create("cameraQueue", DISPATCH_QUEUE_SERIAL)) 
    
         if (cameraSession.canAddOutput(dataOutput) == true) { 
          cameraSession.addOutput(dataOutput) 
         } 
    
         cameraSession.commitConfiguration() 
    
        } 
        catch let error as NSError { 
         NSLog("\(error), \(error.localizedDescription)") 
        } 
    } 
    

mit dem folgenden Fehler:

FirstViewController.swift:137:48: 
Cannot convert value of type 'FirstViewController' to expected argument 
type 'AVCaptureVideoDataOutputSampleBufferDelegate!' 

einen Fehler mit 'Selbst' Verursachung in der Funktion setSampleBufferDelegate .

Ich glaube, das ist der Schlüssel zum Erfassen jedes Frames, aber ich bin mir nicht genau sicher, was es tut.

  1. Ich möchte auch wissen, wie ich die folgenden Funktionen verwenden sollte, um den Rahmen und verarbeiten sie als UIImage zu erfassen:

    func captureOutput(captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, fromConnection connection: AVCaptureConnection!) { 
        // Here you collect each frame and process it 
        print("frame received") 
    } 
    
    func captureOutput(captureOutput: AVCaptureOutput!, didDropSampleBuffer sampleBuffer: CMSampleBuffer!, fromConnection connection: AVCaptureConnection!) { 
        // Here you can count how many frames are dropped 
        print("frame dropped") 
    } 
    

Wann und wo sind diese Funktionen aufgerufen, und wie sollte ich jeden erfassten Frame in einen UIImage konvertieren?

Im Folgenden ist der gesamte Code Vordergrund der FirstViewController Klasse:

// 
// FirstViewController.swift 
// nVisoDemoApp 
// 
// Created by Timothy Llewellynn on 30/06/16. 
// Copyright © 2016 Timothy Llewellynn. All rights reserved. 
// 

import UIKit 
import AVFoundation 

class FirstViewController: UIViewController, UITabBarControllerDelegate { 

    @IBOutlet weak var OpenCVVersion: UILabel! 
    @IBOutlet weak var OpenCVDisplay: UIImageView! 

    @IBOutlet weak var SadnessValue: UILabel! 
    @IBOutlet weak var NeutralValue: UILabel! 
    @IBOutlet weak var DisgustValue: UILabel! 
    @IBOutlet weak var AngerValue: UILabel! 
    @IBOutlet weak var SurpriseValue: UILabel! 
    @IBOutlet weak var FearValue: UILabel! 
    @IBOutlet weak var HappinessValue: UILabel! 

    enum CameraType { 
     case Front 
     case Back 
    } 

    var cameraType = CameraType.Front 

    override func viewDidLoad() { 
     super.viewDidLoad() 
     // Do any additional setup after loading the view, typically from a nib.//  SadnessValue.text = "[Value]" 
//  NeutralValue.text = "[Value]" 
//  DisgustValue.text = "[Value]" 
//  AngerValue.text = "[Value]" 
//  SurpriseValue.text = "[Value]" 
//  FearValue.text = "[Value]" 
//  HappinessValue.text = "[Value]" 

//  OpenCVDisplay.image = 

     self.view.sendSubviewToBack(OpenCVDisplay) 
     setupCameraSession() 
     OpenCVVersion.text = CVWrapper.versionOpenCV() 
     OpenCVDisplay.layer.addSublayer(previewLayer) 
     cameraSession.startRunning() 

     let leftSwipe = UISwipeGestureRecognizer(target: self, action: Selector("handleSwipes:")) 
     leftSwipe.direction = .Left 
     view.addGestureRecognizer(leftSwipe) 
    } 

    func handleSwipes(sender:UISwipeGestureRecognizer) { 
     if (sender.direction == .Left) { 
      let selectedIndex: Int = self.tabBarController!.selectedIndex 
      self.tabBarController!.selectedIndex = selectedIndex + 1 
     } 

     if (sender.direction == .Right) { 

     } 
    } 

    override func viewDidAppear(animated: Bool) { 
     super.viewDidAppear(animated) 

     self.view.sendSubviewToBack(OpenCVDisplay) 
     setupCameraSession() 
     OpenCVVersion.text = CVWrapper.versionOpenCV() 
     OpenCVDisplay.layer.addSublayer(previewLayer) 
     cameraSession.startRunning() 
    } 

    override func viewWillDisappear(animated: Bool) { 
     super.viewWillDisappear(animated) 

     cameraSession.stopRunning() 
     previewLayer.removeFromSuperlayer() 

     let currentCameraInput: AVCaptureInput = cameraSession.inputs[0] as! AVCaptureInput 
     cameraSession.removeInput(currentCameraInput) 
    } 

    override func didReceiveMemoryWarning() { 
     super.didReceiveMemoryWarning() 
     // Dispose of any resources that can be recreated. 
    } 

    lazy var cameraSession: AVCaptureSession = { 
     let s = AVCaptureSession() 
     s.sessionPreset = AVCaptureSessionPresetHigh 
     return s 
    }() 

    lazy var previewLayer: AVCaptureVideoPreviewLayer = { 
     let preview = AVCaptureVideoPreviewLayer(session: self.cameraSession) 
     preview.bounds = CGRect(x: 0, y: 0, width: self.view.bounds.width, height: self.view.bounds.height) 
     preview.position = CGPoint(x: CGRectGetMidX(self.view.bounds), y: CGRectGetMidY(self.view.bounds)) 
     preview.videoGravity = AVLayerVideoGravityResize 
     return preview 
    }() 

    func setupCameraSession() { 
     let devices = AVCaptureDevice.devices() 
     var captureDevice:AVCaptureDevice? 

     do { 
      if cameraType == CameraType.Front { 
       for device in devices { 
        if device.position == AVCaptureDevicePosition.Front { 
         captureDevice = device as? AVCaptureDevice 
         break 
        } 
       } 
      } 
      else { 
       captureDevice = AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeVideo) as AVCaptureDevice 
      } 

      let deviceInput = try AVCaptureDeviceInput(device: captureDevice) 

      cameraSession.beginConfiguration() 

      if (cameraSession.canAddInput(deviceInput) == true) { 
       cameraSession.addInput(deviceInput) 
      } 

      let dataOutput = AVCaptureVideoDataOutput() 
      dataOutput.videoSettings = [(kCVPixelBufferPixelFormatTypeKey as NSString) : NSNumber(unsignedInt: kCVPixelFormatType_420YpCbCr8BiPlanarFullRange)] 
      dataOutput.alwaysDiscardsLateVideoFrames = true 

//   let queue = dispatch_queue_create("cameraQueue", DISPATCH_QUEUE_SERIAL) 
//   dataOutput.setSampleBufferDelegate(self, queue: queue) 
      dataOutput.setSampleBufferDelegate(self, queue: dispatch_queue_create("cameraQueue", DISPATCH_QUEUE_SERIAL)) 
      if (cameraSession.canAddOutput(dataOutput) == true) { 
       cameraSession.addOutput(dataOutput) 
      } 
      /Users/tllewellynn/Desktop/dev/nVisoDemo/nVisoDemo/FirstViewController.swift:137:48: Cannot convert value of type 'FirstViewController' to expected argument type 'AVCaptureVideoDataOutputSampleBufferDelegate!' 
      cameraSession.commitConfiguration() 

     } 
     catch let error as NSError { 
      NSLog("\(error), \(error.localizedDescription)") 
     } 
    } 

// func capturePicture(){ 
//   
//  print("Capturing image") 
//  var stillImageOutput = AVCaptureStillImageOutput() 
//  stillImageOutput.outputSettings = [AVVideoCodecKey: AVVideoCodecJPEG] 
//  cameraSession.addOutput(stillImageOutput) 
//   
//  if let videoConnection = stillImageOutput.connectionWithMediaType(AVMediaTypeVideo){ 
//   stillImageOutput.captureStillImageAsynchronouslyFromConnection(videoConnection, completionHandler: { 
//    (sampleBuffer, error) in 
//    var imageData = AVCaptureStillImageOutput.jpegStillImageNSDataRepresentation(sampleBuffer) 
//    var dataProvider = CGDataProviderCreateWithCFData(imageData) 
//    var cgImageRef = CGImageCreateWithJPEGDataProvider(dataProvider, nil, true, CGColorRenderingIntent.RenderingIntentDefault) 
////    var image = UIImage(CGImage: cgImageRef, scale: 1.0, orientation: UIImageOrientation.Right) 
//     
////    var imageView = UIImageView(image: image) 
////    imageView.frame = CGRect(x:0, y:0, width:self.screenSize.width, height:self.screenSize.height) 
////     
////    //Show the captured image to 
////    self.view.addSubview(imageView) 
////     
////    //Save the captured preview to image 
////    UIImageWriteToSavedPhotosAlbum(image, nil, nil, nil) 
//     
//   }) 
//  } 
// } 

    @IBAction func SwitchCameraAction(sender: UIButton) { 
     cameraType = cameraType == CameraType.Back ? CameraType.Front : CameraType.Back 
     cameraSession.stopRunning() 
     previewLayer.removeFromSuperlayer() 

     let currentCameraInput: AVCaptureInput = cameraSession.inputs[0] as! AVCaptureInput 
     cameraSession.removeInput(currentCameraInput) 

     setupCameraSession() 
     OpenCVDisplay.layer.addSublayer(previewLayer) 
     cameraSession.startRunning() 
    } 

    func captureOutput(captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, fromConnection connection: AVCaptureConnection!) { 
     // Here you collect each frame and process it 
     print("frame received") 
    } 

    func captureOutput(captureOutput: AVCaptureOutput!, didDropSampleBuffer sampleBuffer: CMSampleBuffer!, fromConnection connection: AVCaptureConnection!) { 
     // Here you can count how many frames are dropped 
     print("frame dropped") 
    } 
} 

Keine Erkenntnisse?

Antwort

3
class FirstViewController: UIViewController, UITabBarControllerDelegate 

nur mit

ändern
class FirstViewController: UIViewController, UITabBarControllerDelegate,AVCaptureVideoDataOutputSampleBufferDelegate