Ich versuche, eine App zu erstellen, die Video von einem iPhone nehmen und es über Apple Multipeer-Schnittstelle an ein iPad senden wird.Swift Multipeer Video App liest keine Daten
//
// ViewController.swift
// Multipeer Video
//
//
import UIKit
import MultipeerConnectivity
import CoreMotion
import SceneKit
import AVFoundation
import AVKit
import MediaPlayer
class ViewController: UIViewController, MCNearbyServiceAdvertiserDelegate, MCNearbyServiceBrowserDelegate, MCSessionDelegate, NSStreamDelegate, AVCaptureVideoDataOutputSampleBufferDelegate
{
//Video
let transferredSession = AVCaptureSession()
let captureSession = AVCaptureSession()
var transferredLayer : AVCaptureVideoPreviewLayer?
var previewLayer : AVCaptureVideoPreviewLayer?
var captureDevice : AVCaptureDevice?
var videoDeviceOutput: AVCaptureVideoDataOutput!
var sessionQueue: dispatch_queue_t!
var data = NSData()
var movieplayer = MPMoviePlayerController()
//MultiPeer
let label = UILabel()
var displayLink: CADisplayLink?
let serviceType = "motion-control"
let peerID = MCPeerID(displayName: UIDevice.currentDevice().name)
var serviceAdvertiser : MCNearbyServiceAdvertiser!
var serviceBrowser : MCNearbyServiceBrowser!
lazy var session : MCSession =
{
let session = MCSession(peer: self.peerID, securityIdentity: nil, encryptionPreference: MCEncryptionPreference.Required)
session.delegate = self
return session
}()
override func viewDidLoad()
{
super.viewDidLoad()
//Video
captureSession.sessionPreset = AVCaptureSessionPresetHigh
let devices = AVCaptureDevice.devices()
// Loop through all the capture devices on this phone
for device in devices {
// Make sure this particular device supports video
if (device.hasMediaType(AVMediaTypeVideo)) {
// Finally check the position and confirm we've got the back camera
if(device.position == AVCaptureDevicePosition.Back) {
captureDevice = device as? AVCaptureDevice
if captureDevice != nil {
print("Capture device found")
}
}
}
}
label.textAlignment = NSTextAlignment.Center
view.addSubview(label)
if UIDevice.currentDevice().userInterfaceIdiom == UIUserInterfaceIdiom.Pad
{
label.text = "iPad"
view.backgroundColor = UIColor.blackColor()
label.textColor = UIColor.whiteColor()
initialiseAdvertising()
//need to setup recieving video
}
else
{
label.text = "iPhone"
initialiseBrowsing()
beginVideoSession()
}
}
func beginVideoSession() {
configureDevice()
do {
//try captureSession.addInput(input: captureDevice)
try captureSession.addInput(AVCaptureDeviceInput(device: captureDevice))
updateDeviceSettings(0.0, isoValue: 0.0)
} catch {
//error message etc.
print("Capture device not initialisable")
}
previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
self.view.layer.addSublayer(previewLayer!)
previewLayer?.frame = self.view.layer.frame
self.view.layer.insertSublayer(previewLayer!, atIndex: 0)
captureSession.startRunning()
}
func configureDevice() {
if let device = captureDevice {
do {
try device.lockForConfiguration()
device.focusMode = .Locked
device.unlockForConfiguration()
} catch {
//error message etc.
print("Capture device not configurable")
}
}
}
// set ISO
func updateDeviceSettings(focusValue : Float, isoValue : Float) {
if let device = captureDevice {
do {
try device.lockForConfiguration()
device.setFocusModeLockedWithLensPosition(focusValue, completionHandler: { (time) -> Void in
})
let minISO = device.activeFormat.minISO
let maxISO = device.activeFormat.maxISO
let clampedISO = isoValue * (maxISO - minISO) + minISO
device.setExposureModeCustomWithDuration(AVCaptureExposureDurationCurrent, ISO: clampedISO, completionHandler: { (time) -> Void in
//
})
device.unlockForConfiguration()
} catch {
print("Can't update device settings")
}
}
}
// MARK: MCNearbyServiceBrowserDelegate (iPhone is browser)
var streamTargetPeer: MCPeerID?
var outputStream: NSOutputStream?
func initialiseBrowsing()
{
serviceBrowser = MCNearbyServiceBrowser(peer: peerID, serviceType: serviceType)
serviceBrowser.delegate = self
serviceBrowser.startBrowsingForPeers()
}
func browser(browser: MCNearbyServiceBrowser, foundPeer peerID: MCPeerID, withDiscoveryInfo info: [String : String]?)
{
print("Found Peer! \(peerID)")
streamTargetPeer = peerID
browser.invitePeer(peerID, toSession: session, withContext: nil, timeout: 120)
displayLink = CADisplayLink(target: self, selector: #selector(ViewController.step))
displayLink?.addToRunLoop(NSRunLoop.mainRunLoop(), forMode: NSDefaultRunLoopMode)
print("displayLink")
}
//Initializes SampleBufferDelegate and videoDeviceOutput
func addVideoOutput() {
videoDeviceOutput = AVCaptureVideoDataOutput()
videoDeviceOutput.alwaysDiscardsLateVideoFrames = true
self.sessionQueue = dispatch_queue_create("Camera Session", DISPATCH_QUEUE_SERIAL)
videoDeviceOutput.setSampleBufferDelegate(self, queue: sessionQueue)
if captureSession.canAddOutput(videoDeviceOutput) {
captureSession.addOutput(videoDeviceOutput)
}
}
//Grabbing frames from camera
func captureOutput(captureOutput: AVCaptureOutput, didOutputSampleBuffer sampleBuffer: CMSampleBufferRef, fromConnection connection: AVCaptureConnection) {
print("frame received")
let imageBuffer: CVImageBufferRef = CMSampleBufferGetImageBuffer(sampleBuffer)!
CVPixelBufferLockBaseAddress(imageBuffer, 0)
let bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer)
let height = CVPixelBufferGetHeight(imageBuffer)
let src_buff = CVPixelBufferGetBaseAddress(imageBuffer)
data = NSData(bytes: src_buff, length: bytesPerRow * height)
CVPixelBufferUnlockBaseAddress(imageBuffer, 0);
}
func startStream()
{
guard let streamTargetPeer = streamTargetPeer where outputStream == nil else
{
return
}
do
{
print("stream started")
outputStream = try session.startStreamWithName("MotionControlStream", toPeer: streamTargetPeer)
outputStream?.scheduleInRunLoop(NSRunLoop.mainRunLoop(), forMode: NSDefaultRunLoopMode)
outputStream?.open()
}
catch
{
print("unable to start stream!! \(error)")
}
}
func step()
{
startStream()
print("step")
guard let outputStream = outputStream else
{
print("no stream")
return
}
if outputStream.hasSpaceAvailable
{
print("writing to output")
outputStream.write(UnsafePointer<UInt8>(data.bytes), maxLength: data.length)
}
else
{
print("no space availale")
}
}
func browser(browser: MCNearbyServiceBrowser, lostPeer peerID: MCPeerID)
{
label.text = "Lost Peer!"
}
// MARK: MCNearbyServiceAdvertiserDelegate (iPad is advertiser)
func initialiseAdvertising()
{
serviceAdvertiser = MCNearbyServiceAdvertiser(peer: peerID, discoveryInfo: nil, serviceType: serviceType)
serviceAdvertiser.delegate = self
serviceAdvertiser.startAdvertisingPeer()
}
// MARK: MCSessionDelegate
func session(session: MCSession, peer peerID: MCPeerID, didChangeState state: MCSessionState)
{
let stateName:String
switch state
{
case MCSessionState.Connected:
stateName = "connected"
case MCSessionState.Connecting:
stateName = "connecting"
case MCSessionState.NotConnected:
stateName = "not connected"
}
let deviceName:String
switch UIDevice.currentDevice().userInterfaceIdiom
{
case UIUserInterfaceIdiom.Pad:
deviceName = "iPad"
case UIUserInterfaceIdiom.Phone:
deviceName = "iPhone"
default:
deviceName = "Unspecified"
}
dispatch_async(dispatch_get_main_queue())
{
self.label.text = "\(deviceName) didChangeState: \(stateName)"
}
}
func session(_: MCSession, didReceiveStream stream: NSInputStream, withName streamName: String, fromPeer peerID: MCPeerID)
{
print("did recieve")
stream.scheduleInRunLoop(NSRunLoop.mainRunLoop(), forMode: NSDefaultRunLoopMode)
stream.delegate = self
stream.open()
}
func stream(stream: NSStream, handleEvent eventCode: NSStreamEvent)
{
print(eventCode)
if eventCode == NSStreamEvent.EndEncountered{
print("end")
}
print("stream started")
if let inputStream = stream as? NSInputStream //where eventCode == NSStreamEvent.HasBytesAvailable
{
print("Does this byte?")
/* let bufferSize = 1024
var buffer = [UInt8](count: bufferSize, repeatedValue: 0)
let bytesRead = inputStream.read(&buffer, maxLength: bufferSize)
print(bytesRead)
if bytesRead >= 0 {
let output = NSString(bytes: &buffer, length: bytesRead, encoding: NSUTF8StringEncoding)
print(output)
}
*/
var bytes = [UInt8](count:12, repeatedValue: 0)
inputStream.read(&bytes, maxLength: data.length)
let dataIn: NSData = "Stream".dataUsingEncoding(NSUTF8StringEncoding, allowLossyConversion: true)!
let stream: NSInputStream = NSInputStream(data: dataIn)
var buffer = [UInt8](count: 8, repeatedValue: 0)
stream.open()
if stream.hasBytesAvailable {
print("stream has bytes!")
let result: Int = stream.read(&buffer, maxLength: buffer.count)
print("result: \(result)")
let dataString: NSString = NSString(data: dataIn, encoding: NSUTF8StringEncoding)!
print(dataString)
let movieURL = NSURL.init(string: dataString as String)
print(movieURL)
} else {
print("stream has no bytes")
}
dispatch_async(dispatch_get_main_queue())
{
}
//*/
}
if let outstream = stream as? NSOutputStream {
print("This is output")
print(outstream)
}
}
func session(session: MCSession, didFinishReceivingResourceWithName resourceName: String, fromPeer peerID: MCPeerID, atURL localURL: NSURL, withError error: NSError?)
{
}
func session(session: MCSession, didStartReceivingResourceWithName resourceName: String, fromPeer peerID: MCPeerID, withProgress progress: NSProgress)
{
}
// MARK: Layout
func session(session: MCSession, didReceiveData data: NSData, fromPeer peerID: MCPeerID)
{
}
override func viewDidLayoutSubviews()
{
if UIDevice.currentDevice().userInterfaceIdiom == UIUserInterfaceIdiom.Pad
{
label.frame = CGRect(x: 0, y: topLayoutGuide.length, width: view.frame.width, height: label.intrinsicContentSize().height)
}
else
{
label.frame = view.bounds
}
}
func advertiser(advertiser: MCNearbyServiceAdvertiser, didReceiveInvitationFromPeer peerID: MCPeerID, withContext context: NSData?, invitationHandler: (Bool, MCSession) -> Void)
{
invitationHandler(true, self.session)
}
}
Es scheint, als ob ich in der Lage bin Daten mit dem iPhone richtig zu senden, aber ich habe nicht in der Lage gewesen, um herauszufinden, was mit dem iPad ich falsch mache. Ich nehme an, es hat mit der Laufschleife zu tun, aber während der Fehlersuche habe ich bemerkt, dass das iPad einen Ende-Ereigniscode erhält.
Vielen Dank für Ihre Hilfe!