2014-06-18 8 views
7

Ich nehme mp4 mit dem folgenden Code auf. Aber während ich aufnehme,Abrufen/Ändern von Bildern während der Aufzeichnung von mp4 mit MediaRecorder

1- ist es möglich, Frames von Video in Echtzeit in Array-Daten oder ein anderes Format zu bekommen?

2- ist es möglich, Frames zu ändern? Zum Beispiel, um die Farben zu invertieren und sie negativ aussehen zu lassen.

public class MainActivity extends Activity implements OnClickListener, SurfaceHolder.Callback { 
MediaRecorder recorder; 
SurfaceHolder holder; 
boolean recording = false; 
@Override 
public void onCreate(Bundle savedInstanceState) { 
    super.onCreate(savedInstanceState); 
    requestWindowFeature(Window.FEATURE_NO_TITLE); 
    getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN, 
      WindowManager.LayoutParams.FLAG_FULLSCREEN); 
    setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE); 

    recorder = new MediaRecorder(); 
    initRecorder(); 
    setContentView(R.layout.activity_main); 

    SurfaceView cameraView = (SurfaceView) findViewById(R.id.surfaceView1); 
    holder = cameraView.getHolder(); 
    holder.addCallback(this); 
    holder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS); 

    cameraView.setClickable(true); 
    cameraView.setOnClickListener(this); 
} 

private void initRecorder() { 
    recorder.setAudioSource(MediaRecorder.AudioSource.DEFAULT); 
    recorder.setVideoSource(MediaRecorder.VideoSource.DEFAULT); 

    CamcorderProfile cpHigh = CamcorderProfile 
      .get(CamcorderProfile.QUALITY_HIGH); 
    recorder.setProfile(cpHigh); 

    recorder.setOutputFile(Environment.getExternalStorageDirectory().getPath() + "/video.mp4"); 
    recorder.setMaxDuration(50 * 1000); // 50 seconds 
    recorder.setMaxFileSize(5 * 1000000); // Approximately 5 megabytes 
} 

private void prepareRecorder() { 
    recorder.setPreviewDisplay(holder.getSurface()); 

    try { 
     recorder.prepare(); 
    } catch (IllegalStateException e) { 
     e.printStackTrace(); 
     finish(); 
    } catch (IOException e) { 
     e.printStackTrace(); 
     finish(); 
    } 
} 

public void onClick(View v) { 
    if (recording) { 
     recorder.stop(); 
     recording = false; 


     // Let's initRecorder so we can record again 
     initRecorder(); 
     prepareRecorder(); 
    } else { 
     recording = true; 
     recorder.start(); 
    } 
} 

public void surfaceCreated(SurfaceHolder holder) { 
    prepareRecorder(); 
} 

public void surfaceChanged(SurfaceHolder holder, int format, int width, 
     int height) { 
} 

public void surfaceDestroyed(SurfaceHolder holder) { 
    if (recording) { 
     recorder.stop(); 
     recording = false; 
    } 
    recorder.release(); 
    finish(); 
} 

}

Antwort

1

verwendete ich JavaCV Bibliothek für die Bearbeitung von Frames (die FFMPEG und OpenCV wickelt). Erfassen Sie Frames unter onPreviewFrame und zeichnen Sie sie über den FFMPEG-Recorder auf.

Folgendes ist das RecordActivity.java-Beispiel.

public class RecordActivity extends Activity implements OnClickListener { 

    private final static String CLASS_LABEL = "RecordActivity"; 
    private final static String LOG_TAG = CLASS_LABEL; 

    private PowerManager.WakeLock mWakeLock; 

    private String ffmpeg_link = "/mnt/sdcard/stream.flv"; 

    long startTime = 0; 
    boolean recording = false; 

    private volatile FFmpegFrameRecorder recorder; 

    private boolean isPreviewOn = false; 

    private int sampleAudioRateInHz = 44100; 
    private int imageWidth = 320; 
    private int imageHeight = 240; 
    private int frameRate = 30; 

    /* audio data getting thread */ 
    private AudioRecord audioRecord; 
    private AudioRecordRunnable audioRecordRunnable; 
    private Thread audioThread; 
    volatile boolean runAudioThread = true; 

    /* video data getting thread */ 
    private Camera cameraDevice; 
    private CameraView cameraView; 

    private IplImage yuvIplimage = null; 

    /* layout setting */ 
    private final int bg_screen_bx = 232; 
    private final int bg_screen_by = 128; 
    private final int bg_screen_width = 700; 
    private final int bg_screen_height = 500; 
    private final int bg_width = 1123; 
    private final int bg_height = 715; 
    private final int live_width = 640; 
    private final int live_height = 480; 
    private int screenWidth, screenHeight; 
    private Button btnRecorderControl; 

    @Override 
    public void onCreate(Bundle savedInstanceState) { 
     super.onCreate(savedInstanceState); 
     setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE); 

     setContentView(R.layout.main); 

     PowerManager pm = (PowerManager) getSystemService(Context.POWER_SERVICE); 
     mWakeLock = pm.newWakeLock(PowerManager.SCREEN_BRIGHT_WAKE_LOCK, CLASS_LABEL); 
     mWakeLock.acquire(); 

     initLayout(); 
     initRecorder(); 
    } 


    @Override 
    protected void onResume() { 
     super.onResume(); 

     if (mWakeLock == null) { 
      PowerManager pm = (PowerManager) getSystemService(Context.POWER_SERVICE); 
      mWakeLock = pm.newWakeLock(PowerManager.SCREEN_BRIGHT_WAKE_LOCK, CLASS_LABEL); 
      mWakeLock.acquire(); 
     } 
    } 

    @Override 
    protected void onPause() { 
     super.onPause(); 

     if (mWakeLock != null) { 
      mWakeLock.release(); 
      mWakeLock = null; 
     } 
    } 

    @Override 
    protected void onDestroy() { 
     super.onDestroy(); 

     recording = false; 

     if (cameraView != null) { 
      cameraView.stopPreview(); 
     } 

     if(cameraDevice != null) { 
      cameraDevice.stopPreview(); 
      cameraDevice.release(); 
      cameraDevice = null; 
     } 

     if (mWakeLock != null) { 
      mWakeLock.release(); 
      mWakeLock = null; 
     } 
    } 


    private void initLayout() { 

     /* get size of screen */ 
     Display display = ((WindowManager) getSystemService(Context.WINDOW_SERVICE)).getDefaultDisplay(); 
     screenWidth = display.getWidth(); 
     screenHeight = display.getHeight(); 
     RelativeLayout.LayoutParams layoutParam = null; 
     LayoutInflater myInflate = null; 
     myInflate = (LayoutInflater) getSystemService(Context.LAYOUT_INFLATER_SERVICE); 
     RelativeLayout topLayout = new RelativeLayout(this); 
     setContentView(topLayout); 
     LinearLayout preViewLayout = (LinearLayout) myInflate.inflate(R.layout.main, null); 
     layoutParam = new RelativeLayout.LayoutParams(screenWidth, screenHeight); 
     topLayout.addView(preViewLayout, layoutParam); 

     /* add control button: start and stop */ 
     btnRecorderControl = (Button) findViewById(R.id.recorder_control); 
     btnRecorderControl.setText("Start"); 
     btnRecorderControl.setOnClickListener(this); 

     /* add camera view */ 
     int display_width_d = (int) (1.0 * bg_screen_width * screenWidth/bg_width); 
     int display_height_d = (int) (1.0 * bg_screen_height * screenHeight/bg_height); 
     int prev_rw, prev_rh; 
     if (1.0 * display_width_d/display_height_d > 1.0 * live_width/live_height) { 
      prev_rh = display_height_d; 
      prev_rw = (int) (1.0 * display_height_d * live_width/live_height); 
     } else { 
      prev_rw = display_width_d; 
      prev_rh = (int) (1.0 * display_width_d * live_height/live_width); 
     } 
     layoutParam = new RelativeLayout.LayoutParams(prev_rw, prev_rh); 
     layoutParam.topMargin = (int) (1.0 * bg_screen_by * screenHeight/bg_height); 
     layoutParam.leftMargin = (int) (1.0 * bg_screen_bx * screenWidth/bg_width); 

     cameraDevice = Camera.open(); 
     Log.i(LOG_TAG, "cameara open"); 
     cameraView = new CameraView(this, cameraDevice); 
     topLayout.addView(cameraView, layoutParam); 
     Log.i(LOG_TAG, "cameara preview start: OK"); 
    } 

    //--------------------------------------- 
    // initialize ffmpeg_recorder 
    //--------------------------------------- 
    private void initRecorder() { 

     Log.w(LOG_TAG,"init recorder"); 

     if (yuvIplimage == null) { 
      yuvIplimage = IplImage.create(imageWidth, imageHeight, IPL_DEPTH_8U, 2); 
      Log.i(LOG_TAG, "create yuvIplimage"); 
     } 

     Log.i(LOG_TAG, "ffmpeg_url: " + ffmpeg_link); 
     recorder = new FFmpegFrameRecorder(ffmpeg_link, imageWidth, imageHeight, 1); 
     recorder.setFormat("flv"); 
     recorder.setSampleRate(sampleAudioRateInHz); 
     // Set in the surface changed method 
     recorder.setFrameRate(frameRate); 

     Log.i(LOG_TAG, "recorder initialize success"); 

     audioRecordRunnable = new AudioRecordRunnable(); 
     audioThread = new Thread(audioRecordRunnable); 
     runAudioThread = true; 
    } 

    public void startRecording() { 

     try { 
      recorder.start(); 
      startTime = System.currentTimeMillis(); 
      recording = true; 
      audioThread.start(); 

     } catch (FFmpegFrameRecorder.Exception e) { 
      e.printStackTrace(); 
     } 
    } 

    public void stopRecording() { 

     runAudioThread = false; 
     try { 
      audioThread.join(); 
     } catch (InterruptedException e) { 
      e.printStackTrace(); 
     } 
     audioRecordRunnable = null; 
     audioThread = null; 

     if (recorder != null && recording) { 
      recording = false; 
      Log.v(LOG_TAG,"Finishing recording, calling stop and release on recorder"); 
      try { 
       recorder.stop(); 
       recorder.release(); 
      } catch (FFmpegFrameRecorder.Exception e) { 
       e.printStackTrace(); 
      } 
      recorder = null; 

     } 
    } 

    @Override 
    public boolean onKeyDown(int keyCode, KeyEvent event) { 

     if (keyCode == KeyEvent.KEYCODE_BACK) { 
      if (recording) { 
       stopRecording(); 
      } 

      finish(); 

      return true; 
     } 

     return super.onKeyDown(keyCode, event); 
    } 


    //--------------------------------------------- 
    // audio thread, gets and encodes audio data 
    //--------------------------------------------- 
    class AudioRecordRunnable implements Runnable { 

     @Override 
     public void run() { 
      android.os.Process.setThreadPriority(android.os.Process.THREAD_PRIORITY_URGENT_AUDIO); 

      // Audio 
      int bufferSize; 
      short[] audioData; 
      int bufferReadResult; 

      bufferSize = AudioRecord.getMinBufferSize(sampleAudioRateInHz, 
        AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT); 
      audioRecord = new AudioRecord(MediaRecorder.AudioSource.MIC, sampleAudioRateInHz, 
        AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT, bufferSize); 

      audioData = new short[bufferSize]; 

      Log.d(LOG_TAG, "audioRecord.startRecording()"); 
      audioRecord.startRecording(); 

      /* ffmpeg_audio encoding loop */ 
      while (runAudioThread) { 
       //Log.v(LOG_TAG,"recording? " + recording); 
       bufferReadResult = audioRecord.read(audioData, 0, audioData.length); 
       if (bufferReadResult > 0) { 
        Log.v(LOG_TAG,"bufferReadResult: " + bufferReadResult); 
        // If "recording" isn't true when start this thread, it never get's set according to this if statement...!!! 
        // Why? Good question... 
        if (recording) { 
         try { 
          recorder.record(ShortBuffer.wrap(audioData, 0, bufferReadResult)); 
          //Log.v(LOG_TAG,"recording " + 1024*i + " to " + 1024*i+1024); 
         } catch (FFmpegFrameRecorder.Exception e) { 
          Log.v(LOG_TAG,e.getMessage()); 
          e.printStackTrace(); 
         } 
        } 
       } 
      } 
      Log.v(LOG_TAG,"AudioThread Finished, release audioRecord"); 

      /* encoding finish, release recorder */ 
      if (audioRecord != null) { 
       audioRecord.stop(); 
       audioRecord.release(); 
       audioRecord = null; 
       Log.v(LOG_TAG,"audioRecord released"); 
      } 
     } 
    } 

    //--------------------------------------------- 
    // camera thread, gets and encodes video data 
    //--------------------------------------------- 
    class CameraView extends SurfaceView implements SurfaceHolder.Callback, PreviewCallback { 

     private SurfaceHolder mHolder; 
     private Camera mCamera; 

     public CameraView(Context context, Camera camera) { 
      super(context); 
      Log.w("camera","camera view"); 
      mCamera = camera; 
      mHolder = getHolder(); 
      mHolder.addCallback(CameraView.this); 
      mHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS); 
      mCamera.setPreviewCallback(CameraView.this); 
     } 

     @Override 
     public void surfaceCreated(SurfaceHolder holder) { 
      try { 
       stopPreview(); 
       mCamera.setPreviewDisplay(holder); 
      } catch (IOException exception) { 
       mCamera.release(); 
       mCamera = null; 
      } 
     } 

     public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) { 
      Log.v(LOG_TAG,"Setting imageWidth: " + imageWidth + " imageHeight: " + imageHeight + " frameRate: " + frameRate); 
      Camera.Parameters camParams = mCamera.getParameters(); 
      camParams.setPreviewSize(imageWidth, imageHeight); 

      Log.v(LOG_TAG,"Preview Framerate: " + camParams.getPreviewFrameRate()); 

      camParams.setPreviewFrameRate(frameRate); 
      mCamera.setParameters(camParams); 
      startPreview(); 
     } 

     @Override 
     public void surfaceDestroyed(SurfaceHolder holder) { 
      try { 
       mHolder.addCallback(null); 
       mCamera.setPreviewCallback(null); 
      } catch (RuntimeException e) { 
       // The camera has probably just been released, ignore. 
      } 
     } 

     public void startPreview() { 
      if (!isPreviewOn && mCamera != null) { 
       isPreviewOn = true; 
       mCamera.startPreview(); 
      } 
     } 

     public void stopPreview() { 
      if (isPreviewOn && mCamera != null) { 
       isPreviewOn = false; 
       mCamera.stopPreview(); 
      } 
     } 

     @Override 
     public void onPreviewFrame(byte[] data, Camera camera) { 
      /* get video data */ 
      if (yuvIplimage != null && recording) { 
       yuvIplimage.getByteBuffer().put(data); 

       Log.v(LOG_TAG,"Writing Frame"); 
       try { 
        long t = 1000 * (System.currentTimeMillis() - startTime); 
        if (t > recorder.getTimestamp()) { 
         recorder.setTimestamp(t); 
        } 
        recorder.record(yuvIplimage); 
       } catch (FFmpegFrameRecorder.Exception e) { 
        Log.v(LOG_TAG,e.getMessage()); 
        e.printStackTrace(); 
       } 
      } 
     } 
    } 

    @Override 
    public void onClick(View v) { 
     if (!recording) { 
      startRecording(); 
      Log.w(LOG_TAG, "Start Button Pushed"); 
      btnRecorderControl.setText("Stop"); 
     } else { 
      // This will trigger the audio recording loop to stop and then set isRecorderStart = false; 
      stopRecording(); 
      Log.w(LOG_TAG, "Stop Button Pushed"); 
      btnRecorderControl.setText("Start"); 
     } 
    } 
}