Skip to content

Commit

Permalink
implementing video record
Browse files Browse the repository at this point in the history
  • Loading branch information
Shaji Khan committed Dec 1, 2023
1 parent 444ba61 commit cd89bf6
Show file tree
Hide file tree
Showing 2 changed files with 288 additions and 1 deletion.
256 changes: 256 additions & 0 deletions app/src/main/cpp/camera_engine.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@
#include <cstring>

#include "native_debug.h"
#include "logging_macros.h"

CameraAppEngine::CameraAppEngine(JNIEnv* env, jobject instance, jint w, jint h)
: env_(env),
Expand All @@ -37,6 +38,7 @@ CameraAppEngine::CameraAppEngine(JNIEnv* env, jobject instance, jint w, jint h)
ASSERT(camera_, "Failed to Create CameraObject");
camera_->MatchCaptureSizeRequest(requestWidth_, requestHeight_,
&compatibleCameraRes_);
createEncoder();
}

CameraAppEngine::~CameraAppEngine() {
Expand Down Expand Up @@ -85,6 +87,260 @@ int CameraAppEngine::GetCameraSensorOrientation(int32_t requestFacing) {
return 0;
}

void CameraAppEngine::createEncoder () {
format = AMediaFormat_new() ;
AMediaFormat_setInt32(format,AMEDIAFORMAT_KEY_WIDTH,requestWidth_);
AMediaFormat_setInt32(format,AMEDIAFORMAT_KEY_HEIGHT,requestHeight_);

AMediaFormat_setString(format,AMEDIAFORMAT_KEY_MIME,"video/avc"); // H.264 Advanced Video Coding
AMediaFormat_setInt32(format, AMEDIAFORMAT_KEY_COLOR_FORMAT, 21); // #21 COLOR_FormatYUV420SemiPlanar (NV12)
AMediaFormat_setInt32(format,AMEDIAFORMAT_KEY_BIT_RATE,500000);
AMediaFormat_setFloat(format,AMEDIAFORMAT_KEY_FRAME_RATE,mFPS);
AMediaFormat_setInt32(format,AMEDIAFORMAT_KEY_I_FRAME_INTERVAL,5);

mEncoder = AMediaCodec_createEncoderByType("video/avc");
if(mEncoder == nullptr){
LOGE("Unable to create encoder");
}

media_status_t err = AMediaCodec_configure(mEncoder, format, NULL, NULL, AMEDIACODEC_CONFIGURE_FLAG_ENCODE);
if(err != AMEDIA_OK){
LOGE( "Error occurred: %d", err );
}

err = AMediaCodec_start(mEncoder);
if(err != AMEDIA_OK){
LOGE( "Error occurred: %d", err);
}

fd = fileno (fopen (filename.c_str(), "w"));
mMuxer = AMediaMuxer_new(fd, AMEDIAMUXER_OUTPUT_FORMAT_MPEG_4);

if(mMuxer == nullptr){
LOGE("Unable to create Muxer");
}

mTrackIndex = -1;
mMuxerStarted = false;
mFrameCounter = 0;
isRunning = true;
LOGD ("Encoder ready!");

}


int64_t CameraAppEngine::frame2Time(int64_t frameNo){
return (frameNo*1000000)/mFPS;
}

long long CameraAppEngine::computePresentationTimeNsec() {
mFrameCounter++;
double timePerFrame = 1000000.0/mFPS;
return static_cast<long long>(mFrameCounter*timePerFrame);
}


void CameraAppEngine::writeEnd(){
LOGD("End of recording called!");
// Send the termination frame
ssize_t inBufferIdx = AMediaCodec_dequeueInputBuffer(mEncoder, TIMEOUT_USEC);
size_t out_size;
uint8_t* inBuffer = AMediaCodec_getInputBuffer(mEncoder, inBufferIdx, &out_size);
int64_t presentationTimeNs = computePresentationTimeNsec();
LOGD( "Sending EOS");
media_status_t status = AMediaCodec_queueInputBuffer(mEncoder, inBufferIdx, 0, out_size, presentationTimeNs, AMEDIACODEC_BUFFER_FLAG_END_OF_STREAM);
// send end-of-stream to encoder, and drain remaining output

drainEncoder(true);

releaseEncoder();

// To test the result, open the file with MediaExtractor, and get the format. Pass
// that into the MediaCodec decoder configuration, along with a SurfaceTexture surface,
// and examine the output with glReadPixels.
}

void CameraAppEngine::releaseEncoder() {
LOGW( "releasing encoder objects");
if (mEncoder != nullptr) {
AMediaCodec_stop(mEncoder);
}

if (mMuxer != nullptr) {
AMediaMuxer_stop(mMuxer);
}

if (mEncoder != nullptr) {
AMediaCodec_delete(mEncoder);
mEncoder = nullptr;
}

if (mMuxer != nullptr) {
AMediaMuxer_delete(mMuxer);
mMuxer = nullptr;
}

isRunning = false;
LOGD("recording finished");
}

bool CameraAppEngine::writeFrame(int * data, const long long timestamp){
// Feed any pending encoder output into the muxer.
drainEncoder(false);

// Generate a new frame of input.

/**
* Get the index of the next available input buffer. An app will typically use this with
* getInputBuffer() to get a pointer to the buffer, then copy the data to be encoded or decoded
* into the buffer before passing it to the codec.
*/
ssize_t inBufferIdx = AMediaCodec_dequeueInputBuffer(mEncoder, TIMEOUT_USEC);

/**
* Get an input buffer. The specified buffer index must have been previously obtained from
* dequeueInputBuffer, and not yet queued.
*/
size_t out_size;
uint8_t* inBuffer = AMediaCodec_getInputBuffer(mEncoder, inBufferIdx, &out_size);

// here we actually copy the data.
memcpy(inBuffer, data, out_size);

/**
* Send the specified buffer to the codec for processing.
*/
//int64_t presentationTimeNs = timestamp;
int64_t presentationTimeNs = computePresentationTimeNsec();

media_status_t status = AMediaCodec_queueInputBuffer(mEncoder, inBufferIdx, 0, out_size, presentationTimeNs, mat.empty() ? AMEDIACODEC_BUFFER_FLAG_END_OF_STREAM : 0);

if(status == AMEDIA_OK){
//qDebug() << "Successfully pushed frame to input buffer";
}
else{
LOGW("Something went wrong while pushing frame to input buffer: %d", status);
return false;
}

// Submit it to the encoder. The eglSwapBuffers call will block if the input
// is full, which would be bad if it stayed full until we dequeued an output
// buffer (which we can't do, since we're stuck here). So long as we fully drain
// the encoder before supplying additional input, the system guarantees that we
// can supply another frame without blocking.
//qDebug() << "sending frame " << i << " to encoder";
//AMediaCodec_flush(mEncoder);
return true;
}

void CameraAppEngine::drainEncoder(bool endOfStream) {

if (endOfStream) {
LOGD( "Draining encoder to EOS");
// only API >= 26
// Send an empty frame with the end-of-stream flag set.
// AMediaCodec_signalEndOfInputStream();
// Instead, we construct that frame manually.
}




while (true) {
ssize_t encoderStatus = AMediaCodec_dequeueOutputBuffer(mEncoder, &mBufferInfo, TIMEOUT_USEC);


if (encoderStatus == AMEDIACODEC_INFO_TRY_AGAIN_LATER) {
// no output available yet
if (!endOfStream) {
return;
//break; // out of while
}
if(endOfStream){
LOGD("no output available, spinning to await EOS");
return;
}

} else if (encoderStatus == AMEDIACODEC_INFO_OUTPUT_BUFFERS_CHANGED) {
// not expected for an encoder
} else if (encoderStatus == AMEDIACODEC_INFO_OUTPUT_FORMAT_CHANGED) {
// should happen before receiving buffers, and should only happen once
if (mMuxerStarted) {
LOGW( "ERROR: format changed twice");
}
AMediaFormat* newFormat = AMediaCodec_getOutputFormat(mEncoder);

if(newFormat == nullptr){
LOGW( "Unable to set new format.");
}

LOGW( "%s", std::string ("encoder output format changed: " + std::string (AMediaFormat_toString(newFormat))).c_str());

// now that we have the Magic Goodies, start the muxer
mTrackIndex = AMediaMuxer_addTrack(mMuxer, newFormat);
media_status_t err = AMediaMuxer_start(mMuxer);

if(err != AMEDIA_OK){
LOGW( "Error occurred: %d", err );
}

mMuxerStarted = true;
} else if (encoderStatus < 0) {
LOGW( "unexpected result from encoder.dequeueOutputBuffer: %d", encoderStatus);
// let's ignore it
} else {

size_t out_size;
uint8_t* encodedData = AMediaCodec_getOutputBuffer(mEncoder, encoderStatus, &out_size);

if(out_size <= 0){
LOGW( "Encoded data of size 0.");
}

if (encodedData == nullptr) {
LOGW("encoderOutputBuffer was null");
}


if ((mBufferInfo.flags & AMEDIACODEC_BUFFER_FLAG_CODEC_CONFIG) != 0) {
// The codec config data was pulled out and fed to the muxer when we got
// the INFO_OUTPUT_FORMAT_CHANGED status. Ignore it.
LOGW( "ignoring BUFFER_FLAG_CODEC_CONFIG");
mBufferInfo.size = 0;
}

if (mBufferInfo.size != 0) {
if (!mMuxerStarted) {
LOGW ( "muxer hasn't started");
}


// adjust the ByteBuffer values to match BufferInfo (not needed?)
//encodedData.position(mBufferInfo.offset);
//encodedData.limit(mBufferInfo.offset + mBufferInfo.size);

AMediaMuxer_writeSampleData(mMuxer, mTrackIndex, encodedData, &mBufferInfo);
//qDebug() << "sent " + QString::number(mBufferInfo.size) + " bytes to muxer";
}
else{
LOGW( "mBufferInfo empty %d" , mBufferInfo.size);
}

AMediaCodec_releaseOutputBuffer(mEncoder, encoderStatus, false);

if ((mBufferInfo.flags & AMEDIACODEC_BUFFER_FLAG_END_OF_STREAM) != 0) {
if (!endOfStream) {
LOGW( "reached end of stream unexpectedly");
} else {
LOGD( "end of stream reached");

}
break; // out of while
}
}
}
}

/**
*
* @param start is true to start preview, false to stop preview
Expand Down
33 changes: 32 additions & 1 deletion app/src/main/cpp/camera_engine.h
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,11 @@
#include <android/native_activity.h>
#include <android/native_window.h>
#include <android/native_window_jni.h>
#include "media/NdkMediaCodec.h"
#include "media/NdkMediaError.h"
#include "media/NdkMediaFormat.h"
#include "media/NdkMediaMuxer.h"

#include <jni.h>

#include <functional>
Expand All @@ -41,14 +46,40 @@ class CameraAppEngine {
const ImageFormat& GetCompatibleCameraRes() const;
int32_t GetCameraSensorOrientation(int32_t facing);
jobject GetSurfaceObject();
AMediaFormat* format = NULL ;
AMediaCodec* mEncoder;
AMediaMuxer* mMuxer;
AMediaCodecBufferInfo mBufferInfo;
int mTrackIndex;
bool mMuxerStarted;
const static int TIMEOUT_USEC = 10000;
int mFPS = 30;
int mFrameCounter = 0;
std::string filename ;
bool isRunning = false;
int fd = -1 ;

private:
private:
JNIEnv* env_;
jobject javaInstance_;
int32_t requestWidth_;
int32_t requestHeight_;
jobject surface_;
NDKCamera* camera_;
ImageFormat compatibleCameraRes_;

void createEncoder();

long long int computePresentationTimeNsec();

int64_t frame2Time(int64_t frameNo);

void drainEncoder(bool endOfStream);

bool writeFrame(int *data, const long long int timestamp);

void writeEnd();

void releaseEncoder();
};
#endif // __CAMERA_ENGINE_H__

0 comments on commit cd89bf6

Please sign in to comment.