Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Capturing camera and storing in sequential segmented files. #111

Open
wants to merge 1 commit into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -153,7 +153,7 @@ requires API level 23 (Marshmallow) or greater.
The shader used for the filters is not optimized, but seems to perform well on most devices
(the original Nexus 7 (2012) being a notable exception). Demo
here: http://www.youtube.com/watch?v=kH9kCP2T5Gg
- The output is a video-only MP4 file ("camera-test.mp4").
- The output is a video-only MP4 file ("camera-test.mp4") or you can store the video in sequential segmented files ("camera-test-1.mp4", "camera-test-2.mp4", ...). For storing in multiple files, you must set `Output Segment Duration` in UI.

[Simple Canvas in TextureView](app/src/main/java/com/android/grafika/TextureViewCanvasActivity.java). Exercises software rendering to a `TextureView` with a `Canvas`.
- Renders as quickly as possible. Because it's using software rendering, this will likely
Expand Down
26 changes: 20 additions & 6 deletions app/src/main/java/com/android/grafika/CameraCaptureActivity.java
Original file line number Diff line number Diff line change
Expand Up @@ -16,11 +16,15 @@

package com.android.grafika;

import android.app.Activity;
import android.graphics.SurfaceTexture;
import android.hardware.Camera;
import android.opengl.EGL14;
import android.opengl.GLES20;
import android.opengl.GLSurfaceView;
import android.os.Build;
import android.os.Bundle;
import android.os.Environment;
import android.os.Handler;
import android.os.Message;
import android.support.annotation.NonNull;
Expand All @@ -34,11 +38,9 @@
import android.widget.AdapterView.OnItemSelectedListener;
import android.widget.ArrayAdapter;
import android.widget.Button;
import android.widget.EditText;
import android.widget.Spinner;
import android.widget.TextView;
import android.app.Activity;
import android.graphics.SurfaceTexture;
import android.hardware.Camera;
import android.widget.Toast;

import com.android.grafika.gles.FullFrameRect;
Expand Down Expand Up @@ -138,6 +140,7 @@ public class CameraCaptureActivity extends Activity
static final int FILTER_EDGE_DETECT = 4;
static final int FILTER_EMBOSS = 5;

private EditText mSegmentDurationEditText;
private GLSurfaceView mGLView;
private CameraSurfaceRenderer mRenderer;
private Camera mCamera;
Expand All @@ -154,10 +157,12 @@ protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_camera_capture);

File outputFile = new File(getFilesDir(), "camera-test.mp4");
File outputFile = new File(getExternalFilesDir(Environment.DIRECTORY_DCIM), "camera-test.mp4");
TextView fileText = (TextView) findViewById(R.id.cameraOutputFile_text);
fileText.setText(outputFile.toString());

mSegmentDurationEditText = findViewById(R.id.segmentDuration_edit);

Spinner spinner = (Spinner) findViewById(R.id.cameraFilter_spinner);
ArrayAdapter<CharSequence> adapter = ArrayAdapter.createFromResource(this,
R.array.cameraFilterNames, android.R.layout.simple_spinner_item);
Expand Down Expand Up @@ -353,6 +358,7 @@ public void clickToggleRecording(@SuppressWarnings("unused") View unused) {
mGLView.queueEvent(new Runnable() {
@Override public void run() {
// notify the renderer that we want to change the encoder's state
mRenderer.setSegmentDuration(Integer.parseInt(mSegmentDurationEditText.getText().toString()));
mRenderer.changeRecordingState(mRecordingEnabled);
}
});
Expand Down Expand Up @@ -494,7 +500,7 @@ class CameraSurfaceRenderer implements GLSurfaceView.Renderer {
private int mCurrentFilter;
private int mNewFilter;


private int mSegmentDuration;
/**
* Constructs CameraSurfaceRenderer.
* <p>
Expand Down Expand Up @@ -692,7 +698,7 @@ public void onDrawFrame(GL10 unused) {
Log.d(TAG, "START recording");
// start recording
mVideoEncoder.startRecording(new TextureMovieEncoder.EncoderConfig(
mOutputFile, 640, 480, 1000000, EGL14.eglGetCurrentContext()));
mOutputFile, 640, 480, 1000000, EGL14.eglGetCurrentContext(), mSegmentDuration));
mRecordingStatus = RECORDING_ON;
break;
case RECORDING_RESUMED:
Expand Down Expand Up @@ -771,4 +777,12 @@ private void drawBox() {
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
GLES20.glDisable(GLES20.GL_SCISSOR_TEST);
}

public int getSegmentDuration() {
return mSegmentDuration;
}

public void setSegmentDuration(int segmentDuration) {
this.mSegmentDuration = segmentDuration;
}
}
5 changes: 2 additions & 3 deletions app/src/main/java/com/android/grafika/MainActivity.java
Original file line number Diff line number Diff line change
Expand Up @@ -16,10 +16,9 @@

package com.android.grafika;

import android.os.Bundle;
import android.app.ListActivity;
import android.content.Intent;
import android.util.Log;
import android.os.Bundle;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
Expand Down Expand Up @@ -80,7 +79,7 @@ public class MainActivity extends ListActivity {
"Exercises SurfaceFlinger PTS handling",
"ScheduledSwapActivity" },
{ "Show + capture camera",
"Shows camera preview, records when requested",
"Shows camera preview, records when requested. You can record in a single file or sequential segmented files",
"CameraCaptureActivity" },
{ "Simple GL in TextureView",
"Renders with GL as quickly as possible",
Expand Down
10 changes: 10 additions & 0 deletions app/src/main/java/com/android/grafika/MiscUtils.java
Original file line number Diff line number Diff line change
Expand Up @@ -100,4 +100,14 @@ public static long getDisplayRefreshNsec(Activity activity) {
Log.d(TAG, "refresh rate is " + displayFps + " fps --> " + refreshNs + " ns");
return refreshNs;
}

/**
* Calculate the greatest common divisor (gcd) of two integer
*/
public static int gcd(int a, int b)
{
if (b == 0)
return a;
return gcd(b, a % b);
}
}
13 changes: 10 additions & 3 deletions app/src/main/java/com/android/grafika/TextureMovieEncoder.java
Original file line number Diff line number Diff line change
Expand Up @@ -101,14 +101,21 @@ public static class EncoderConfig {
final int mHeight;
final int mBitRate;
final EGLContext mEglContext;
final int mSegmentDuration;

public EncoderConfig(File outputFile, int width, int height, int bitRate,
EGLContext sharedEglContext) {
this(outputFile, width, height, bitRate, sharedEglContext, 0);
}

public EncoderConfig(File outputFile, int width, int height, int bitRate,
EGLContext sharedEglContext, int segmentDuration) {
mOutputFile = outputFile;
mWidth = width;
mHeight = height;
mBitRate = bitRate;
mEglContext = sharedEglContext;
mSegmentDuration = segmentDuration;
}

@Override
Expand Down Expand Up @@ -310,7 +317,7 @@ private void handleStartRecording(EncoderConfig config) {
Log.d(TAG, "handleStartRecording " + config);
mFrameNum = 0;
prepareEncoder(config.mEglContext, config.mWidth, config.mHeight, config.mBitRate,
config.mOutputFile);
config.mOutputFile, config.mSegmentDuration);
}

/**
Expand Down Expand Up @@ -376,9 +383,9 @@ private void handleUpdateSharedContext(EGLContext newSharedContext) {
}

private void prepareEncoder(EGLContext sharedContext, int width, int height, int bitRate,
File outputFile) {
File outputFile, int segmentDuration) {
try {
mVideoEncoder = new VideoEncoderCore(width, height, bitRate, outputFile);
mVideoEncoder = new VideoEncoderCore(width, height, bitRate, outputFile, segmentDuration);
} catch (IOException ioe) {
throw new RuntimeException(ioe);
}
Expand Down
59 changes: 56 additions & 3 deletions app/src/main/java/com/android/grafika/VideoEncoderCore.java
Original file line number Diff line number Diff line change
Expand Up @@ -52,13 +52,24 @@ public class VideoEncoderCore {
private MediaCodec.BufferInfo mBufferInfo;
private int mTrackIndex;
private boolean mMuxerStarted;

private final File mOutputFile;
private int mSegmentIndex = 1;
private final long mSegmentDurationUsec;
private long mLastPresentationTimeUs = -1;

/**
* Configures encoder and muxer state, and prepares the input Surface.
*/
public VideoEncoderCore(int width, int height, int bitRate, File outputFile)
throws IOException {
this(width, height, bitRate, outputFile, 0);
}

/**
* Configures encoder and muxer state, and prepares the input Surface.
*/
public VideoEncoderCore(int width, int height, int bitRate, File outputFile, int segmentDurationSec)
throws IOException {
mBufferInfo = new MediaCodec.BufferInfo();

MediaFormat format = MediaFormat.createVideoFormat(MIME_TYPE, width, height);
Expand All @@ -69,7 +80,7 @@ public VideoEncoderCore(int width, int height, int bitRate, File outputFile)
MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
format.setInteger(MediaFormat.KEY_BIT_RATE, bitRate);
format.setInteger(MediaFormat.KEY_FRAME_RATE, FRAME_RATE);
format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, IFRAME_INTERVAL);
format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, MiscUtils.gcd(IFRAME_INTERVAL, segmentDurationSec));
if (VERBOSE) Log.d(TAG, "format: " + format);

// Create a MediaCodec encoder, and configure it with our format. Get a Surface
Expand All @@ -85,7 +96,9 @@ public VideoEncoderCore(int width, int height, int bitRate, File outputFile)
//
// We're not actually interested in multiplexing audio. We just want to convert
// the raw H.264 elementary stream we get from MediaCodec into a .mp4 file.
mMuxer = new MediaMuxer(outputFile.toString(),
mSegmentDurationUsec = segmentDurationSec * 1000000;
mOutputFile = outputFile;
mMuxer = new MediaMuxer(getNextOutputFileName(),
MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);

mTrackIndex = -1;
Expand All @@ -99,6 +112,23 @@ public Surface getInputSurface() {
return mInputSurface;
}

/**
* Returns sequential new filename if segmentDuration be grater than 0
*/
private String getNextOutputFileName() {
if(mSegmentDurationUsec > 0) {
String ext = "";
String fileName = mOutputFile.getName();
int extIndex = fileName.lastIndexOf('.');
if(extIndex > 0) {
ext = fileName.substring(extIndex);
fileName = fileName.substring(0, extIndex);
}
return new File(mOutputFile.getParent(), fileName + "-" + mSegmentIndex++ + ext).toString();
} else
return mOutputFile.toString();
}

/**
* Releases encoder resources.
*/
Expand Down Expand Up @@ -189,6 +219,29 @@ public void drainEncoder(boolean endOfStream) {
encodedData.position(mBufferInfo.offset);
encodedData.limit(mBufferInfo.offset + mBufferInfo.size);

if(mLastPresentationTimeUs == -1)
mLastPresentationTimeUs = mBufferInfo.presentationTimeUs;

if(mSegmentDurationUsec > 0 && (mBufferInfo.flags & MediaCodec.BUFFER_FLAG_KEY_FRAME) > 0 &&
mBufferInfo.presentationTimeUs - mLastPresentationTimeUs >= mSegmentDurationUsec) {
mLastPresentationTimeUs = mBufferInfo.presentationTimeUs;

mMuxer.stop();
mMuxer.release();
mMuxer = null;

try {
String newMuxerPath = getNextOutputFileName();
if (VERBOSE) Log.d(TAG, "creating new muxer: " + newMuxerPath);
mMuxer = new MediaMuxer(newMuxerPath,
MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
} catch (IOException e) {
throw new RuntimeException("Can not create new muxer", e);
}
mTrackIndex = mMuxer.addTrack(mEncoder.getOutputFormat());
mMuxer.start();
}

mMuxer.writeSampleData(mTrackIndex, encodedData, mBufferInfo);
if (VERBOSE) {
Log.d(TAG, "sent " + mBufferInfo.size + " bytes to muxer, ts=" +
Expand Down
20 changes: 18 additions & 2 deletions app/src/main/res/layout/activity_camera_capture.xml
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@
android:layout_alignLeft="@+id/toggleRecording_button"
android:layout_alignParentBottom="true"
android:layout_alignParentRight="true"
android:layout_below="@+id/toggleRecording_button"
android:layout_below="@+id/segmentDuration_edit"
android:layout_centerInParent="true" >

<android.opengl.GLSurfaceView
Expand All @@ -49,6 +49,22 @@
android:text="@string/toggleRecordingOn"
android:onClick="clickToggleRecording" />

<TextView
android:id="@+id/outputSegment_text"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_below="@+id/toggleRecording_button"
android:text="Output Segment Duration:"/>

<EditText
android:id="@+id/segmentDuration_edit"
android:text="0"
android:layout_width="60dp"
android:inputType="numberDecimal"
android:layout_height="wrap_content"
android:layout_below="@+id/toggleRecording_button"
android:layout_toRightOf="@id/outputSegment_text"/>

<Spinner
android:id="@+id/cameraFilter_spinner"
android:layout_width="wrap_content"
Expand All @@ -61,7 +77,7 @@
android:id="@+id/cameraParams_text"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_below="@id/toggleRecording_button"
android:layout_alignTop="@id/cameraPreview_afl"
android:text="[?]"
android:textAppearance="?android:attr/textAppearanceMedium" />

Expand Down
2 changes: 1 addition & 1 deletion build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ buildscript {
jcenter()
}
dependencies {
classpath 'com.android.tools.build:gradle:3.3.0'
classpath 'com.android.tools.build:gradle:3.5.3'
}
}

Expand Down
2 changes: 1 addition & 1 deletion gradle/wrapper/gradle-wrapper.properties
Original file line number Diff line number Diff line change
Expand Up @@ -3,4 +3,4 @@ distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists
zipStoreBase=GRADLE_USER_HOME
zipStorePath=wrapper/dists
distributionUrl=https\://services.gradle.org/distributions/gradle-4.10.1-all.zip
distributionUrl=https\://services.gradle.org/distributions/gradle-5.4.1-all.zip