2016-04-25 5 views
0

Wenn ich eine benutzerdefinierte Kamera-App in Swift mache. Aber wenn ich versuche, auf die Kamera zuzugreifen, drehte sich die Ansicht in der Kamera um 90 Grad. Ich versuche die Lösung zu finden. Eine Lösung, die ich finde, ist das Hinzufügen einer FixOrientation-Funktion, um die Ansicht zu korrigieren. Aber nicht funktioniert ... Hier ist mein vollständiger Code:Kameraansicht drehen 90 Grad in Swift

let CIHueAdjust = "CIHueAdjust" 
let CIHueAdjustFilter = CIFilter(name: "CIHueAdjust", withInputParameters: ["inputAngle" : 1.24]) 

let Filters = [CIHueAdjust: CIHueAdjustFilter] 

let FilterNames = [String](Filters.keys).sort() 

class LiveCamViewController : UIViewController,AVCaptureVideoDataOutputSampleBufferDelegate{ 
let mainGroup = UIStackView() 
let imageView = UIImageView(frame: CGRectZero) 
let filtersControl = UISegmentedControl(items: FilterNames) 

override func viewDidLoad() 
{ 
    super.viewDidLoad() 

    view.addSubview(mainGroup) 
    mainGroup.axis = UILayoutConstraintAxis.Vertical 
    mainGroup.distribution = UIStackViewDistribution.Fill 

    mainGroup.addArrangedSubview(imageView) 
    mainGroup.addArrangedSubview(filtersControl) 

    imageView.contentMode = UIViewContentMode.ScaleAspectFit 

    filtersControl.selectedSegmentIndex = 0 

    let captureSession = AVCaptureSession() 
    captureSession.sessionPreset = AVCaptureSessionPresetPhoto 

    let backCamera = AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeVideo) 

    do 
    { 
     let input = try AVCaptureDeviceInput(device: backCamera) 

     captureSession.addInput(input) 
    } 
    catch 
    { 
     print("can't access camera") 
     return 
    } 

    //get captureOutput invoked 
    let previewLayer = AVCaptureVideoPreviewLayer(session: captureSession) 
    view.layer.addSublayer(previewLayer) 

    let videoOutput = AVCaptureVideoDataOutput() 

    videoOutput.setSampleBufferDelegate(self, queue: dispatch_queue_create("sample buffer delegate", DISPATCH_QUEUE_SERIAL)) 
    if captureSession.canAddOutput(videoOutput) 
    { 
     captureSession.addOutput(videoOutput) 
    } 

    captureSession.startRunning() 
} 

func captureOutput(captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, fromConnection connection: AVCaptureConnection!) 
{ 
    guard let filter = Filters[FilterNames[filtersControl.selectedSegmentIndex]] else 
    { 
     return 
    } 

    let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) 
    let cameraImage = CIImage(CVPixelBuffer: pixelBuffer!) 

    filter!.setValue(cameraImage, forKey: kCIInputImageKey) 

    let filteredImage = UIImage(CIImage: filter!.valueForKey(kCIOutputImageKey) as! CIImage!) 
    let fixedImage = fixOrientation(filteredImage) 

    dispatch_async(dispatch_get_main_queue()) 
    { 
     self.imageView.image = fixedImage 
    } 
} 

func fixOrientation(image: UIImage) -> UIImage { 
    if (image.imageOrientation == UIImageOrientation.Up) { 
     return image; 
    } 

    print(image.imageOrientation) 

    var transform = CGAffineTransformIdentity 

    switch (image.imageOrientation) { 
    case .Down, .DownMirrored: 
     transform = CGAffineTransformTranslate(transform, image.size.width, image.size.height) 
     transform = CGAffineTransformRotate(transform, CGFloat(M_PI)) 
     break 
    case .Left, .LeftMirrored: 
     transform = CGAffineTransformTranslate(transform, image.size.width, 0) 
     transform = CGAffineTransformRotate(transform, CGFloat(M_PI_2)) 
     break 
    case .Right, .RightMirrored: 
     transform = CGAffineTransformTranslate(transform, 0, image.size.height) 
     transform = CGAffineTransformRotate(transform, CGFloat(-M_PI_2)) 
     break 
    case .Up, .UpMirrored: 
     break 
    } 

    switch (image.imageOrientation) { 
    case .UpMirrored, .DownMirrored: 
     transform = CGAffineTransformTranslate(transform, image.size.width, 0) 
     transform = CGAffineTransformScale(transform, -1, 1) 
     break 
    case .LeftMirrored, .RightMirrored: 
     transform = CGAffineTransformTranslate(transform, image.size.height, 0) 
     transform = CGAffineTransformScale(transform, -1, 1) 
     break 
    case .Up, .Down, .Left, .Right: 
     break 
    } 

    //Draw the underlying CGImage into a new context, applying the transform 
    let ctx = CGBitmapContextCreate(nil, Int(image.size.width), Int(image.size.height), CGImageGetBitsPerComponent(image.CGImage), 0, CGImageGetColorSpace(image.CGImage), UInt32(CGImageGetBitmapInfo(image.CGImage).rawValue)) 

    CGContextConcatCTM(ctx, transform); 

    switch (image.imageOrientation) { 
    case .Left, .LeftMirrored, .Right, .RightMirrored: 
     CGContextDrawImage(ctx, CGRectMake(0, 0, image.size.height, image.size.width), image.CGImage) 
     break 
    default: 
     CGContextDrawImage(ctx, CGRectMake(0, 0, image.size.width, image.size.height), image.CGImage) 
     break 
    } 

    let cgimg = CGBitmapContextCreateImage(ctx) 
    let img = UIImage(CGImage:cgimg!) 

    return img 
} 

override func viewDidLayoutSubviews() 
{ 
    mainGroup.frame = CGRect(x: 37, y: 115, width: 301, height: 481) 
} 

}

stelle ich einen Haltepunkt zu testen, scheint der Code nur dann laufen, bis

if (image.imageOrientation == UIImageOrientation.Up) { 
     return image; 
    } 

dann gibt er die gleichen view ...

Kann mir jemand helfen? Danke !!! Statt

+0

So image.imageOrientation == UIImageOrientation.Up ist wahr, kommentieren Sie diesen Teil, wenn Sie Logik unten testen möchten. – LoVo

+0

@LoVo Hallo, danke für die Antwort. Ich habe diese kommentiert. Aber ich habe immer noch ein paar Fehler. Wie CGBitmapContextCreate: nicht unterstützte Parameterkombination, CGContextConcatCTM: ungültiger Kontext 0x0, CGContextDrawImage: ungültiger Kontext 0x0, CGBitmapContextCreateImage: ungültiger Kontext 0x0. Ich habe keine Ahnung, was falsch ist :( – dididaisy

+0

Könnten Sie die Werte von 'ctx' und' transform' überprüfen, bevor es abstürzt? – LoVo

Antwort

0

versuchen, diese Methode: (aus here)

import Darwin 

class func rotateCameraImageToProperOrientation(imageSource : UIImage, maxResolution : CGFloat) -> UIImage { 

let imgRef = imageSource.CGImage; 

let width = CGFloat(CGImageGetWidth(imgRef)); 
let height = CGFloat(CGImageGetHeight(imgRef)); 

var bounds = CGRectMake(0, 0, width, height) 

var scaleRatio : CGFloat = 1 
if (width > maxResolution || height > maxResolution) { 

    scaleRatio = min(maxResolution/bounds.size.width, maxResolution/bounds.size.height) 
    bounds.size.height = bounds.size.height * scaleRatio 
    bounds.size.width = bounds.size.width * scaleRatio 
} 

var transform = CGAffineTransformIdentity 
let orient = imageSource.imageOrientation 
let imageSize = CGSizeMake(CGFloat(CGImageGetWidth(imgRef)), CGFloat(CGImageGetHeight(imgRef))) 


switch(imageSource.imageOrientation) { 
case .Up : 
    transform = CGAffineTransformIdentity 

case .UpMirrored : 
    transform = CGAffineTransformMakeTranslation(imageSize.width, 0.0); 
    transform = CGAffineTransformScale(transform, -1.0, 1.0); 

case .Down : 
    transform = CGAffineTransformMakeTranslation(imageSize.width, imageSize.height); 
    transform = CGAffineTransformRotate(transform, CGFloat(M_PI)); 

case .DownMirrored : 
    transform = CGAffineTransformMakeTranslation(0.0, imageSize.height); 
    transform = CGAffineTransformScale(transform, 1.0, -1.0); 

case .Left : 
    let storedHeight = bounds.size.height 
    bounds.size.height = bounds.size.width; 
    bounds.size.width = storedHeight; 
    transform = CGAffineTransformMakeTranslation(0.0, imageSize.width); 
    transform = CGAffineTransformRotate(transform, 3.0 * CGFloat(M_PI)/2.0); 

case .LeftMirrored : 
    let storedHeight = bounds.size.height 
    bounds.size.height = bounds.size.width; 
    bounds.size.width = storedHeight; 
    transform = CGAffineTransformMakeTranslation(imageSize.height, imageSize.width); 
    transform = CGAffineTransformScale(transform, -1.0, 1.0); 
    transform = CGAffineTransformRotate(transform, 3.0 * CGFloat(M_PI)/2.0); 

case .Right : 
    let storedHeight = bounds.size.height 
    bounds.size.height = bounds.size.width; 
    bounds.size.width = storedHeight; 
    transform = CGAffineTransformMakeTranslation(imageSize.height, 0.0); 
    transform = CGAffineTransformRotate(transform, CGFloat(M_PI)/2.0); 

case .RightMirrored : 
    let storedHeight = bounds.size.height 
    bounds.size.height = bounds.size.width; 
    bounds.size.width = storedHeight; 
    transform = CGAffineTransformMakeScale(-1.0, 1.0); 
    transform = CGAffineTransformRotate(transform, CGFloat(M_PI)/2.0); 

default :() 
} 

UIGraphicsBeginImageContext(bounds.size) 
let context = UIGraphicsGetCurrentContext() 

if orient == .Right || orient == .Left { 
    CGContextScaleCTM(context, -scaleRatio, scaleRatio); 
    CGContextTranslateCTM(context, -height, 0); 
} else { 
    CGContextScaleCTM(context, scaleRatio, -scaleRatio); 
    CGContextTranslateCTM(context, 0, -height); 
} 

CGContextConcatCTM(context, transform); 
CGContextDrawImage(UIGraphicsGetCurrentContext(), CGRectMake(0, 0, width, height), imgRef); 

let imageCopy = UIGraphicsGetImageFromCurrentImageContext(); 
UIGraphicsEndImageContext(); 

return imageCopy; 
} 
+0

Sorry. Es funktioniert immer noch nicht für mich. Bevor ich eine fixOrientation-Methode: Die Ansicht ist gegen den Uhrzeigersinn um 90 Grad gedreht, und es wird richtig angezeigt, wenn Sie das Telefon gegen den Uhrzeigersinn um 90 Grad drehen (nehmen Sie das Telefon horizontal) Methode, die Ansicht ist immer gegen den Uhrzeigersinn um 90 Grad ändern, was auch immer ich die Richtung des Telefons drehen – dididaisy

+0

Entschuldigung. Mein Fehler. Eigentlich ändert diese Methode nichts – dididaisy

+0

Weil Ihre UIImageOrientation.Up immer noch wahr ist ... – LoVo

1

Für diejenigen, die den Code aus @LoVo in Swift 3 brauchen:

func rotateCameraImageToProperOrientation(imageSource : UIImage, maxResolution : CGFloat) -> UIImage { 

    guard let imgRef = imageSource.cgImage else { 
     return imageSource 
    } 

    let width = CGFloat(imgRef.width); 
    let height = CGFloat(imgRef.height); 

    var bounds = CGRect(x: 0, y: 0, width: width, height: height) 

    var scaleRatio : CGFloat = 1 
    if (width > maxResolution || height > maxResolution) { 

     scaleRatio = min(maxResolution/bounds.size.width, maxResolution/bounds.size.height) 
     bounds.size.height = bounds.size.height * scaleRatio 
     bounds.size.width = bounds.size.width * scaleRatio 
    } 

    var transform = CGAffineTransform.identity 
    let orient = imageSource.imageOrientation 
    let imageSize = CGSize(width: CGFloat(imgRef.width), height: CGFloat(imgRef.height)) 


    switch(imageSource.imageOrientation) { 
    case .up : 
     transform = CGAffineTransform.identity 

    case .upMirrored : 
     transform = CGAffineTransform(translationX: imageSize.width, y: 0.0) 
     transform = transform.scaledBy(x: -1.0, y: 1.0) 

    case .down : 
     transform = CGAffineTransform(translationX: imageSize.width, y: imageSize.height) 
     transform = transform.rotated(by: CGFloat(M_PI)) 

    case .downMirrored : 
     transform = CGAffineTransform(translationX: 0.0, y: imageSize.height) 
     transform = transform.scaledBy(x: 1.0, y: -1.0) 

    case .left : 
     let storedHeight = bounds.size.height 
     bounds.size.height = bounds.size.width 
     bounds.size.width = storedHeight 
     transform = CGAffineTransform(translationX: 0.0, y: imageSize.width) 
     transform = transform.rotated(by: 3.0 * CGFloat(M_PI)/2.0) 

    case .leftMirrored : 
     let storedHeight = bounds.size.height 
     bounds.size.height = bounds.size.width 
     bounds.size.width = storedHeight 
     transform = CGAffineTransform(translationX: imageSize.height, y: imageSize.width) 
     transform = transform.scaledBy(x: -1.0, y: 1.0) 
     transform = transform.rotated(by: 3.0 * CGFloat(M_PI)/2.0) 

    case .right : 
     let storedHeight = bounds.size.height 
     bounds.size.height = bounds.size.width 
     bounds.size.width = storedHeight 
     transform = CGAffineTransform(translationX: imageSize.height, y: 0.0) 
     transform = transform.rotated(by: CGFloat(M_PI)/2.0) 

    case .rightMirrored : 
     let storedHeight = bounds.size.height 
     bounds.size.height = bounds.size.width 
     bounds.size.width = storedHeight 
     transform = CGAffineTransform(scaleX: -1.0, y: 1.0) 
     transform = transform.rotated(by: CGFloat(M_PI)/2.0) 
    } 

    UIGraphicsBeginImageContext(bounds.size) 
    guard let context = UIGraphicsGetCurrentContext() else { 
     return imageSource 
    } 

    if orient == .right || orient == .left { 
     context.scaleBy(x: -scaleRatio, y: scaleRatio) 
     context.translateBy(x: -height, y: 0) 
    } else { 
     context.scaleBy(x: scaleRatio, y: -scaleRatio) 
     context.translateBy(x: 0, y: -height) 
    } 

    context.concatenate(transform); 
    context.draw(imgRef, in: CGRect(x: 0, y: 0, width: width, height: height)) 

    guard let imageCopy = UIGraphicsGetImageFromCurrentImageContext() else { 
     return imageSource 
    } 
    UIGraphicsEndImageContext(); 

    return imageCopy; 
}