Ich versuche, Video über einen Medienrecorder und die Kamera2 API aufzunehmen, und ich habe einige Schwierigkeiten, gute Qualität Aufnahmen daraus zu bekommen.Schlechte camera2 Videoaufnahme-Leistung
Ich bekomme auch ein paar verschiedene Fehler in meinem Logcat wie "App eine NULL-Oberfläche übergeben" beim Starten der Textureview.
Ansonsten scheint es zu funktionieren, aber die aufgenommenen Videos sind herangezoomt und haben eine sehr niedrige Framerate ~ 2 fps.
Dies ist der Code, den ich arbeite mit:
public class VideoTest extends AppCompatActivity {
private TextureView mTextureView;
private CameraDevice mCameraDevice;
private CaptureRequest.Builder mPreviewBuilder;
private CameraCaptureSession mPreviewSession;
private Size mPreviewSize;
private Handler backgroundHandler;
private HandlerThread thread;
private MediaRecorder mMediaRecorder;
private String mVideoPath;
private boolean mIsRecordingVideo;
private static final String TAG = "VideoTest";
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_video_test);
mTextureView = (TextureView) findViewById(R.id.texture);
mTextureView.setSurfaceTextureListener(mSurfaceTextureListener);
mMediaRecorder = new MediaRecorder();
}
@Override
public void onResume() {
super.onResume();
registerReceiver(buttonReceiver, new IntentFilter("ACTION_PRESSED"));
}
@Override
public void onPause() {
Log.d(TAG, "onPause");
super.onPause();
closeCamera();
stopBackgroundThread();
}
private BroadcastReceiver buttonReceiver = new BroadcastReceiver() {
@Override
public void onReceive(Context context, Intent intent) {
Log.d(TAG, "Got Button Press!");
try {
if (mIsRecordingVideo) {
i2cRequest(IndicatorControlReceiver.INDICATOR_OFF);
stopRecordingVideo();
} else {
i2cRequest(IndicatorControlReceiver.INDICATOR_ON);
startRecordingVideo();
}
} catch (Exception ex) {
Log.d(TAG, "ERROR BLAH CAMERA SUX");
}
}
};
private void openCamera() {
CameraManager camManager = (CameraManager) getSystemService(Context.CAMERA_SERVICE);
Log.d(TAG, "Opening Camera");
try {
String camId = camManager.getCameraIdList()[0];
CameraCharacteristics cameraChars = camManager.getCameraCharacteristics(camId);
StreamConfigurationMap map = cameraChars.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
assert map != null;
mPreviewSize = map.getOutputSizes(SurfaceTexture.class)[14];
camManager.openCamera(camId, cameraStateCallback, null);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
private TextureView.SurfaceTextureListener mSurfaceTextureListener = new TextureView.SurfaceTextureListener() {
@Override
public void onSurfaceTextureAvailable(SurfaceTexture surface, int width, int height) {
Log.e(TAG, "onSurfaceTextureAvailable, width="+width+",height="+height);
openCamera();
}
@Override
public void onSurfaceTextureSizeChanged(SurfaceTexture surface, int width, int height) {
//Log.e(TAG, "onSurfaceTextureSizeChanged");
}
@Override
public boolean onSurfaceTextureDestroyed(SurfaceTexture surface) {
return true;
}
@Override
public void onSurfaceTextureUpdated(SurfaceTexture surface) {
//Log.e(TAG, "onSurfaceTextureUpdated");
}
};
private CameraDevice.StateCallback cameraStateCallback = new CameraDevice.StateCallback() {
@Override
public void onOpened(@NonNull CameraDevice camera) {
Log.d(TAG, "onOpened");
mCameraDevice = camera;
startPreview();
}
@Override
public void onDisconnected(@NonNull CameraDevice camera) {
Log.d(TAG, "onDisconnected");
}
@Override
public void onError(@NonNull CameraDevice camera, int error) {
Log.e(TAG, "onError code: " + error);
}
};
private void startPreview() {
if (null == mCameraDevice || !mTextureView.isAvailable() || null == mPreviewSize) {
Log.e(TAG, "Error Starting Preview. ABORTED!");
return;
}
SurfaceTexture texture = mTextureView.getSurfaceTexture();
if(null == texture) {
Log.e(TAG, "Cannot create texture. ABORTED!");
return;
}
texture.setDefaultBufferSize(mPreviewSize.getWidth(), mPreviewSize.getHeight());
Surface surface = new Surface(texture);
try {
mPreviewBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
} catch (CameraAccessException e) {
e.printStackTrace();
}
mPreviewBuilder.addTarget(surface);
try {
mCameraDevice.createCaptureSession(Collections.singletonList(surface), new CameraCaptureSession.StateCallback() {
@Override
public void onConfigured(@NonNull CameraCaptureSession session) {
mPreviewSession = session;
updatePreview();
i2cRequest(I2CRequestReceiver.VIDEO_READY);
}
@Override
public void onConfigureFailed(@NonNull CameraCaptureSession session) {
Log.e(TAG, "onConfigureFailed");
i2cRequest(I2CRequestReceiver.E_CAMERA_ERROR);
}
}, null);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
private void updatePreview() {
if(null == mCameraDevice) {
Log.e(TAG, "Camera Device is Null! ABORT!");
return;
}
/* mPreviewBuilder.set(CaptureRequest.CONTROL_MODE, CameraMetadata.CONTROL_MODE_AUTO);
mPreviewBuilder.set(CaptureRequest.CONTROL_AF_MODE,CameraMetadata.CONTROL_AF_MODE_OFF);*/
mPreviewBuilder.set(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, new Range<>(30000,30000));
thread = new HandlerThread("CameraPreview");
thread.start();
backgroundHandler = new Handler(thread.getLooper());
try {
mPreviewSession.setRepeatingRequest(mPreviewBuilder.build(), null, backgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
/**
* Stops the background thread and its {@link Handler}.
*/
private void stopBackgroundThread() {
thread.quitSafely();
try {
thread.join();
thread = null;
backgroundHandler = null;
} catch (InterruptedException e) {
e.printStackTrace();
}
}
private void closePreviewSession() {
if (null != mPreviewSession) {
mPreviewSession.close();
}
}
private void closeCamera(){
closePreviewSession();
if (null != mCameraDevice) {
mCameraDevice.close();
mCameraDevice = null;
}
}
private void setupMediaRecorder() throws IOException {
mMediaRecorder.setAudioSource(MediaRecorder.AudioSource.MIC);
mMediaRecorder.setVideoSource(MediaRecorder.VideoSource.SURFACE);
/*
mMediaRecorder.setOutputFormat(MediaRecorder.OutputFormat.MPEG_4);
*/
CamcorderProfile profile = CamcorderProfile.get(CamcorderProfile.QUALITY_720P);
profile.audioBitRate = 128000;
profile.audioCodec = MediaRecorder.AudioEncoder.AAC;
profile.fileFormat = MediaRecorder.OutputFormat.MPEG_4;
profile.videoCodec = MediaRecorder.VideoEncoder.H264;
profile.videoBitRate = 2048000;
profile.videoFrameRate = 30;
mMediaRecorder.setProfile(profile);
/* mMediaRecorder.setVideoEncodingBitRate(2048000);
mMediaRecorder.setAudioEncodingBitRate(128000);
mMediaRecorder.setVideoSize(1280, 720);
mMediaRecorder.setVideoEncoder(MediaRecorder.VideoEncoder.H264);
mMediaRecorder.setAudioEncoder(MediaRecorder.AudioEncoder.AAC);*/
if (mVideoPath == null || mVideoPath.isEmpty()) {
mVideoPath = getVideoFilePath();
}
mMediaRecorder.setOutputFile(mVideoPath);
mMediaRecorder.prepare();
}
@SuppressLint("SdCardPath")
private String getVideoFilePath() {
return "/sdcard/LIVE/video/" + System.currentTimeMillis() + ".mp4";
}
Surface recorderSurface;
private void startRecordingVideo() {
if (null == mCameraDevice || !mTextureView.isAvailable() || null == mPreviewSize) {
Log.e(TAG, "Cannot bind camera, textureView, or previewSize");
return;
}
try {
closePreviewSession();
setupMediaRecorder();
SurfaceTexture texture = mTextureView.getSurfaceTexture();
assert texture!= null;
texture.setDefaultBufferSize(1280, 720);
mPreviewBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_RECORD);
List<Surface> surfaces = new ArrayList<>();
Surface previewSurface = new Surface(texture);
surfaces.add(previewSurface);
mPreviewBuilder.addTarget(previewSurface);
recorderSurface = mMediaRecorder.getSurface();
surfaces.add(recorderSurface);
mPreviewBuilder.addTarget(recorderSurface);
mCameraDevice.createCaptureSession(surfaces, new CameraCaptureSession.StateCallback() {
@Override
public void onConfigured(@NonNull CameraCaptureSession session) {
mPreviewSession = session;
updatePreview();
new Thread(new Runnable() {
@Override
public void run() {
i2cRequest(I2CRequestReceiver.VIDEO_RECORDING);
mIsRecordingVideo = true;
mMediaRecorder.start();
}
}).start();
}
@Override
public void onConfigureFailed(@NonNull CameraCaptureSession session) {
Log.e(TAG, "Capture failed!");
runOnUiThread(new Runnable() {
@Override
public void run() {
i2cRequest(I2CRequestReceiver.E_CAMERA_ERROR);
mIsRecordingVideo = false;
}
});
}
},backgroundHandler);
} catch (IOException | CameraAccessException e) {
e.printStackTrace();
}
}
private void stopRecordingVideo() {
try {
mPreviewSession.abortCaptures();
} catch (CameraAccessException e) {
e.printStackTrace();
}
mIsRecordingVideo = false;
mMediaRecorder.stop();
mMediaRecorder.reset();
Log.d(TAG, "Video saved: " + mVideoPath);
}
private void i2cRequest(String request) {
Intent sendI2cRequest = new Intent();
sendI2cRequest.setAction(I2CRequestReceiver.NOWSPEAK_REQUEST_ACTION);
switch (request) {
case I2CRequestReceiver.VIDEO_READY:
sendI2cRequest.putExtra(I2CRequestReceiver.EXTRA_SPEAK, I2CRequestReceiver.VIDEO_READY);
Log.d(TAG, "VIDEO READY!!");
break;
case I2CRequestReceiver.E_CAMERA_ERROR:
sendI2cRequest.putExtra(I2CRequestReceiver.EXTRA_SPEAK, I2CRequestReceiver.E_CAMERA_ERROR);
Log.d(TAG, "VIDEO ERROR!!");
break;
case IndicatorControlReceiver.INDICATOR_ON:
sendI2cRequest.setAction(IndicatorControlReceiver.INDICATOR_CONTROL_ACTION);
sendI2cRequest.putExtra(IndicatorControlReceiver.EXTRA_INDICATOR, IndicatorControlReceiver.INDICATOR_ON);
break;
case IndicatorControlReceiver.INDICATOR_OFF:
sendI2cRequest.setAction(IndicatorControlReceiver.INDICATOR_CONTROL_ACTION);
sendI2cRequest.putExtra(IndicatorControlReceiver.EXTRA_INDICATOR, IndicatorControlReceiver.INDICATOR_OFF);
break;
}
LocalBroadcastManager.getInstance(this).sendBroadcast(sendI2cRequest);
}
}
Auf meiner Plattform scheinen sie in Einheiten von 1/1000 FPS zu sein, und Ihre vorgeschlagene Korrektur führte zu einer Warnung "W/LegacyRequestMapper: Nicht unterstütztes FPS Bereich Set [30,30]". 'D/VideoTestCam2-FPS-Rang :: SENKEN: 15000 UPPER: 15000 D/VideoTestCam2-FPS-Rang :: SENKEN: 24000 UPPER: 24000 D/VideoTestCam2-FPS-Rang :: SENKEN: 7500 UPPER: 30000 D/VideoTestCam2-FPS-Rang :: UNTERE: 30000 OBERE: 30000' Danke für den Vorschlag obwohl. – RobGries
@RobGries: Eddy Tavala hat mehr über Camera2 vergessen als ich jemals wissen werde. Welche Plattform ist das, die so weit außerhalb der Spezifikation ist? – CommonsWare
@CommonsWare Ich verwende die Intrinsyc OpenQ 410-Plattform - https://www.intrinsyc.com/snapdragon- embedded-development-kits/snapdragon-410-development-kit/ – RobGries