Das ist, was ich bisher für die Konfiguration der Kamera versucht:UIImage unkomprimiert von AVCaptureStillImageOutput
AVCaptureSession *session = [[AVCaptureSession alloc] init];
[session setSessionPreset:AVCaptureSessionPresetInputPriority];
AVCaptureDevice *videoDevice = [AVCamViewController deviceWithMediaType:AVMediaTypeVideo preferringPosition:AVCaptureDevicePositionBack];
NSError *errorVideo;
AVCaptureDeviceFormat *deviceFormat = nil;
for (AVCaptureDeviceFormat *format in videoDevice.formats) {
CMVideoDimensions dim = CMVideoFormatDescriptionGetDimensions(format.formatDescription);
if (dim.width == 2592 && dim.height == 1936) {
deviceFormat = format;
break;
}
}
[videoDevice lockForConfiguration:&errorVideo];
if (deviceFormat) {
videoDevice.activeFormat = deviceFormat;
if ([videoDevice isExposureModeSupported:AVCaptureExposureModeContinuousAutoExposure]) {
[videoDevice setExposureMode:AVCaptureExposureModeContinuousAutoExposure];
}
if ([videoDevice isAutoFocusRangeRestrictionSupported]) {
[videoDevice setAutoFocusRangeRestriction:AVCaptureAutoFocusRangeRestrictionFar];
}
}
[videoDevice unlockForConfiguration];
AVCaptureDeviceInput *videoDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:videoDevice error:&error];
if ([session canAddInput:videoDeviceInput]) {
[session addInput:videoDeviceInput];
}
AVCaptureStillImageOutput *stillImageOutput = [[AVCaptureStillImageOutput alloc] init];
if ([session canAddOutput:stillImageOutput]) {
[stillImageOutput setOutputSettings:@{(id)kCVPixelBufferPixelFormatTypeKey:@(kCVPixelFormatType_32BGRA)}];
[session addOutput:stillImageOutput];
}
Dies ist, was ich versuchte, die UIImage aus dem CMSamplebuffer für immer:
[[self stillImageOutput] captureStillImageAsynchronouslyFromConnection:connection completionHandler:^(CMSampleBufferRef imageDataSampleBuffer, NSError *error) {
if (imageDataSampleBuffer && !error) {
dispatch_async(dispatch_get_main_queue(), ^{
UIImage *image = [self imageFromSampleBuffer:imageDataSampleBuffer];
});
}
}];
Dies ist ein Apple-Beispielcode:
- (UIImage *) imageFromSampleBuffer:(CMSampleBufferRef) sampleBuffer{
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
CVPixelBufferLockBaseAddress(imageBuffer, 0);
void *baseAddress = CVPixelBufferGetBaseAddress(imageBuffer);
// Get the number of bytes per row for the pixel buffer
size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
// Get the pixel buffer width and height
size_t width = CVPixelBufferGetWidth(imageBuffer);
size_t height = CVPixelBufferGetHeight(imageBuffer);
// Create a device-dependent RGB color space
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
// Create a bitmap graphics context with the sample buffer data
CGContextRef context = CGBitmapContextCreate(baseAddress, width, height, 8,
bytesPerRow, colorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst);
// Create a Quartz image from the pixel data in the bitmap graphics context
CGImageRef quartzImage = CGBitmapContextCreateImage(context);
// Unlock the pixel buffer
CVPixelBufferUnlockBaseAddress(imageBuffer,0);
// Free up the context and color space
CGContextRelease(context);
CGColorSpaceRelease(colorSpace);
// Create an image object from the Quartz image
UIImage *image = [UIImage imageWithCGImage:quartzImage];
// Release the Quartz image
CGImageRelease(quartzImage);
return (image);
}
Aber das Bild ist immer Null. Nach einigen Debugging. Ich fand, dass diese Funktion immer Null zurückgibt CMSampleBufferGetImageBuffer(sampleBuffer);
Kann jemand helfen?
Sind Sie sicher, dass Sie alle Eingaben richtig eingerichtet haben? – SeanLintern88
Eine abgespeckte Version des Codes mit allen Konfigurationen wurde hinzugefügt. Für die Kamera und Ausgabe – Bogus