diff --git a/app/src/main/cpp/Engine.cpp b/app/src/main/cpp/Engine.cpp index 8d38e3a2..6770fd0d 100644 --- a/app/src/main/cpp/Engine.cpp +++ b/app/src/main/cpp/Engine.cpp @@ -561,4 +561,9 @@ void Engine::test () { int Engine::setTuner (buffer_t * buffer) { +} + + +int Engine::pushToVideo (buffer_t * buffer) { + } \ No newline at end of file diff --git a/app/src/main/cpp/Engine.h b/app/src/main/cpp/Engine.h index 3d1b9018..d003c3ec 100644 --- a/app/src/main/cpp/Engine.h +++ b/app/src/main/cpp/Engine.h @@ -106,6 +106,8 @@ std::string LIBRARY_PATH ; int setTuner(buffer_t *buffer); + + int pushToVideo(buffer_t *buffer); } ; #endif // __ENGINE__H \ No newline at end of file diff --git a/app/src/main/cpp/Meter.cpp b/app/src/main/cpp/Meter.cpp index 64dfb280..724a7133 100644 --- a/app/src/main/cpp/Meter.cpp +++ b/app/src/main/cpp/Meter.cpp @@ -17,6 +17,7 @@ bool Meter::engine_running = false ; //LockFreeQueue Meter::lockFreeQueue; int Meter::bufferUsed = 0; bool Meter::tunerEnabled = false; +bool Meter::videoRecording = false ; int Meter::bufferUsedOutput = 0; float Meter::tunerBuffer [1024 * 4] ; int Meter::tunerIndex = 0; @@ -29,6 +30,7 @@ jmethodID Meter::setMixerMeter ; jclass Meter::mainActivity ; jmethodID Meter::setMixerMeterOutput ; jmethodID Meter::setTuner ; +jmethodID Meter::pushToVideo ; jclass Meter::mainActivityOutput ; JNIEnv * Meter::env = NULL; JNIEnv * Meter::envOutput = NULL; @@ -104,10 +106,6 @@ int Meter::updateMeterOutput (AudioBuffer * buffer) { } */ - if (! engine_running) { - return 0; - } - if (envOutput == nullptr) { LOGD("MeterOutput thread id: %d", gettid ()); envOutput = getEnv(); @@ -128,9 +126,8 @@ int Meter::updateMeterOutput (AudioBuffer * buffer) { setMixerMeterOutput = envOutput->GetStaticMethodID(mainActivityOutput, "setMixerMeterSwitch", "(FZ)V"); - setTuner = envOutput->GetStaticMethodID(mainActivityOutput, "setTuner", - "([F)V"); + "([FI)V"); if (setMixerMeterOutput == nullptr) { LOGF("cannot find method!"); } @@ -143,6 +140,21 @@ int Meter::updateMeterOutput (AudioBuffer * buffer) { jfloatArray1 = envOutput->NewFloatArray(TUNER_ARRAY_SIZE); jfloatArray1_index = 0 ; return 0 ; + } else { + if (tunerEnabled or videoRecording) { + if ((jfloatArray1_index + samples) >= TUNER_ARRAY_SIZE) { + envOutput->CallStaticVoidMethod(mainActivityOutput, setTuner, jfloatArray1, samples, false); + jfloatArray1_index = 0 ; + } else { + envOutput->SetFloatArrayRegion(jfloatArray1, jfloatArray1_index, samples, raw); + jfloatArray1_index += samples; + } + } + } + + + if (! engine_running) { + return 0; } if (envOutput == nullptr) { @@ -180,27 +192,6 @@ int Meter::updateMeterOutput (AudioBuffer * buffer) { envOutput->CallStaticVoidMethod(mainActivityOutput, setMixerMeterOutput, (jfloat) max, false); envOutput->CallStaticVoidMethod(mainActivityOutput, setMixerMeterOutput, (jfloat) imax, true); - if (tunerEnabled) { - /* - if (samples > jfloatArray1_Size) { - LOGW("increased float array size from %d to %d", jfloatArray1_Size, samples); - jboolean copy = true ; - jfloat * elems = envOutput->GetFloatArrayElements( jfloatArray1, ©); - envOutput->ReleaseFloatArrayElements(jfloatArray1, elems, 0); - - jfloatArray1 = envOutput->NewFloatArray(samples + 1); - jfloatArray1_Size = samples + 1 ; - } - */ - - if ((jfloatArray1_index + samples) >= TUNER_ARRAY_SIZE) { - envOutput->CallStaticVoidMethod(mainActivityOutput, setTuner, jfloatArray1, false); - jfloatArray1_index = 0 ; - } else { - envOutput->SetFloatArrayRegion(jfloatArray1, jfloatArray1_index, samples, raw); - jfloatArray1_index += samples; - } - } return 0; } diff --git a/app/src/main/cpp/Meter.h b/app/src/main/cpp/Meter.h index 40377dde..676b7e3b 100644 --- a/app/src/main/cpp/Meter.h +++ b/app/src/main/cpp/Meter.h @@ -213,6 +213,8 @@ class Meter { static _jfloatArray *jfloatArray1; static int jfloatArray1_Size; static int jfloatArray1_index; + static jmethodID pushToVideo; + static bool videoRecording; }; #endif //AMP_RACK_METER_H diff --git a/app/src/main/cpp/native-lib.cpp b/app/src/main/cpp/native-lib.cpp index 8c15cb0b..8d373809 100644 --- a/app/src/main/cpp/native-lib.cpp +++ b/app/src/main/cpp/native-lib.cpp @@ -823,4 +823,11 @@ JNIEXPORT jstring JNICALL Java_com_shajikhan_ladspa_amprack_AudioEngine_tuneLatency(JNIEnv *env, jclass clazz) { if (engine == NULL) return NULL ; return env->NewStringUTF (engine -> tuneLatency ().c_str()); +} +extern "C" +JNIEXPORT void JNICALL +Java_com_shajikhan_ladspa_amprack_AudioEngine_toggleVideoRecording(JNIEnv *env, jclass clazz, + jboolean toggle) { + if (engine == nullptr) return; + engine->meter->videoRecording = toggle; } \ No newline at end of file diff --git a/app/src/main/java/com/shajikhan/ladspa/amprack/AudioEngine.java b/app/src/main/java/com/shajikhan/ladspa/amprack/AudioEngine.java index 174e959e..ae7eed29 100644 --- a/app/src/main/java/com/shajikhan/ladspa/amprack/AudioEngine.java +++ b/app/src/main/java/com/shajikhan/ladspa/amprack/AudioEngine.java @@ -111,4 +111,5 @@ static void warnLowLatency (Context context) { } static native int getSampleRate () ; + static native void toggleVideoRecording (boolean toggle); } diff --git a/app/src/main/java/com/shajikhan/ladspa/amprack/Camera2.java b/app/src/main/java/com/shajikhan/ladspa/amprack/Camera2.java index 8647b828..9785d4d2 100644 --- a/app/src/main/java/com/shajikhan/ladspa/amprack/Camera2.java +++ b/app/src/main/java/com/shajikhan/ladspa/amprack/Camera2.java @@ -31,6 +31,7 @@ import java.io.IOException; import java.nio.ByteBuffer; +import java.nio.FloatBuffer; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Arrays; @@ -45,14 +46,18 @@ public class Camera2 { private static final int FRAME_RATE = 30; // 15fps private static final int IFRAME_INTERVAL = 1; // 10 seconds between I-frames private int mWidth = -1; + public long presentationTimeUs = 0 ; private int mHeight = -1; // bit rate, in bits per second private int mBitRate = -1; - private MediaCodec mEncoder; + public MediaCodec mEncoder, audioEncoder; + ByteBuffer[] audioInputBuffers ; + private Surface mInputSurface; private MediaMuxer mMuxer; private int mTrackIndex; - private boolean mMuxerStarted; + public int audioIndex ; + public boolean mMuxerStarted; // allocate one of these up front so we don't need to do it every time private MediaCodec.BufferInfo mBufferInfo; @@ -259,6 +264,11 @@ private void prepareEncoder() { format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, IFRAME_INTERVAL); Log.d(TAG, "format: " + format); + MediaFormat outputFormat = MediaFormat.createAudioFormat("audio/mp4a-latm",AudioEngine.getSampleRate(), 1); + outputFormat.setInteger(MediaFormat.KEY_AAC_PROFILE, MediaCodecInfo.CodecProfileLevel.AACObjectLC); + outputFormat.setInteger(MediaFormat.KEY_BIT_RATE, 160000); + outputFormat.setInteger(MediaFormat.KEY_MAX_INPUT_SIZE, 16384); + // Create a MediaCodec encoder, and configure it with our format. Get a Surface // we can use for input and wrap it with a class that handles the EGL work. // @@ -268,13 +278,19 @@ private void prepareEncoder() { // take eglGetCurrentContext() as the share_context argument. try { mEncoder = MediaCodec.createEncoderByType(MIME_TYPE); + audioEncoder = MediaCodec.createEncoderByType("audio/mp4a-latm"); } catch (IOException e) { throw new RuntimeException(e); } mEncoder.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE); + audioEncoder.configure(outputFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE); + mInputSurface = mEncoder.createInputSurface(); - mEncoder.setCallback(new EncoderCallback()); + mEncoder.setCallback(new EncoderCallback(true)); + audioEncoder.setCallback(new EncoderCallback(false)); + + audioEncoder.start(); mEncoder.start(); // Output filename. Ideally this would use Context.getFilesDir() rather than a @@ -310,11 +326,19 @@ private void prepareEncoder() { private void releaseEncoder() { Log.d(TAG, "releaseEncoder: stopping encoder"); mEncoder.signalEndOfInputStream(); + if (mEncoder != null) { mEncoder.stop(); mEncoder.release(); mEncoder = null; } + + if (audioEncoder != null) { + audioEncoder.stop(); + audioEncoder.release(); + audioEncoder = null; + } + if (mInputSurface != null) { mInputSurface.release(); mInputSurface = null; @@ -323,6 +347,7 @@ private void releaseEncoder() { mMuxer.stop(); mMuxer.release(); mMuxer = null; + mMuxerStarted = false; } } @@ -413,30 +438,60 @@ private void drainEncoder(boolean endOfStream) { } class EncoderCallback extends MediaCodec.Callback { - ByteBuffer outPutByteBuffer; + ByteBuffer outPutByteBuffer, inputByteBuffer; + MainActivity.AVBuffer floatBuffer; + boolean isVideo ; + + EncoderCallback (boolean video) { + isVideo = video; + } + @Override public void onInputBufferAvailable(@NonNull MediaCodec codec, int index) { + if (! isVideo) return; + int eos = 0 ; + + if (! mainActivity.videoRecording) + eos = MediaCodec.BUFFER_FLAG_END_OF_STREAM ; + + floatBuffer = mainActivity.avBuffer.pop() ; + inputByteBuffer = codec.getInputBuffer(index); + inputByteBuffer.asFloatBuffer().put(floatBuffer.floatBuffer); + presentationTimeUs = 1000000l * index / 48000; + mainActivity.camera2.audioEncoder.queueInputBuffer(index, 0, floatBuffer.size, presentationTimeUs, eos);; + if (eos != 0) { + audioEncoder.signalEndOfInputStream(); + audioEncoder.stop(); + } } @Override public void onOutputBufferAvailable(@NonNull MediaCodec codec, int index, @NonNull MediaCodec.BufferInfo info) { - if (! mMuxerStarted) { + if (! mMuxerStarted && isVideo) { MediaFormat newFormat = mEncoder.getOutputFormat(); Log.d(TAG, "encoder output format changed: " + newFormat); // now that we have the Magic Goodies, start the muxer mTrackIndex = mMuxer.addTrack(newFormat); + mMuxer.addTrack(audioEncoder.getOutputFormat()); + mMuxer.setOrientationHint(cameraCharacteristicsHashMap.get(cameraId).get(CameraCharacteristics.SENSOR_ORIENTATION)); mMuxer.start(); mMuxerStarted = true; } + if (!mMuxerStarted && ! isVideo) + return; + outPutByteBuffer = codec.getOutputBuffer(index); // byte[] outDate = new byte[info.size]; // outPutByteBuffer.get(outDate); - mMuxer.writeSampleData(mTrackIndex, outPutByteBuffer, info); + if (isVideo) + mMuxer.writeSampleData(mTrackIndex, outPutByteBuffer, info); + else + mMuxer.writeSampleData(mTrackIndex + 1, outPutByteBuffer, info); codec.releaseOutputBuffer(index, false); } diff --git a/app/src/main/java/com/shajikhan/ladspa/amprack/MainActivity.java b/app/src/main/java/com/shajikhan/ladspa/amprack/MainActivity.java index d4194467..318d1d88 100644 --- a/app/src/main/java/com/shajikhan/ladspa/amprack/MainActivity.java +++ b/app/src/main/java/com/shajikhan/ladspa/amprack/MainActivity.java @@ -141,6 +141,7 @@ import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; +import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Set; @@ -155,7 +156,17 @@ public class MainActivity extends AppCompatActivity implements ActivityCompat.On public boolean headphoneWarning = true; static Context context; static MainActivity mainActivity; + boolean videoRecording = false ; Camera2 camera2 ; + static class AVBuffer { + FloatBuffer floatBuffer ; + int size ; + } + static LinkedList avBuffer = new LinkedList<>(); + static int avEncoderIndex = 0 ; + static long presentationTimeUs = 0; + int totalBytesRead = 0; + ExtendedFloatingActionButton fab ; Button hidePanel; SwitchMaterial onOff = null ; @@ -2703,7 +2714,26 @@ static void setMixerMeter (float inputValue, float outputValue) { outputMeter.setProgress((int) (outputValue * 100)); } - static void setTuner (float [] data) { + static void setTuner (float [] data, int size) { + if (mainActivity.videoRecording && mainActivity.camera2.mainActivity.camera2.mMuxerStarted) { + /* + int inputBufferId = mainActivity.camera2.audioEncoder.dequeueInputBuffer(5000); + if (inputBufferId >= 0) { + ByteBuffer inputBuffer = mainActivity.camera2.audioEncoder.getInputBuffer (avEncoderIndex); + inputBuffer.asFloatBuffer().put(data); + presentationTimeUs = 1000000l * avEncoderIndex / 48000; + mainActivity.camera2.audioEncoder.queueInputBuffer(inputBufferId, 0, size, presentationTimeUs, 0);; + } + + */ + + AVBuffer buffer = new AVBuffer(); + buffer.size = size; + buffer.floatBuffer = FloatBuffer.wrap(data); + + avBuffer.addLast(buffer); + } + if (! mainActivity.tunerEnabled) return ; double freq = pitch.computePitchFrequency(data); @@ -3410,4 +3440,7 @@ int getCameraSensorOrientation(CameraCharacteristics characteristics) { return (360 - (cameraOrientation != null ? cameraOrientation : 0)) % 360; } + public static void pushToVideo (float [] data, int nframes) { + + } } \ No newline at end of file diff --git a/app/src/main/java/com/shajikhan/ladspa/amprack/Rack.java b/app/src/main/java/com/shajikhan/ladspa/amprack/Rack.java index 102f83cb..65a5dd8a 100644 --- a/app/src/main/java/com/shajikhan/ladspa/amprack/Rack.java +++ b/app/src/main/java/com/shajikhan/ladspa/amprack/Rack.java @@ -192,12 +192,16 @@ public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) { videoPreview.setVisibility(View.GONE); mainActivity.camera2.closeCamera(); mainActivity.showMediaPlayerDialog(); + mainActivity.avBuffer.clear(); } else { videoPreview.setVisibility(View.VISIBLE); mainActivity.camera2.openCamera(); } + + AudioEngine.toggleVideoRecording(isChecked); + mainActivity.videoRecording = isChecked; } }); diff --git a/app/src/main/res/layout/media_player_dialog.xml b/app/src/main/res/layout/media_player_dialog.xml index 9a31b150..38f2a3ac 100644 --- a/app/src/main/res/layout/media_player_dialog.xml +++ b/app/src/main/res/layout/media_player_dialog.xml @@ -63,7 +63,7 @@ android:layout_weight="1" android:textAlignment="viewStart" android:id="@+id/share_file" - android:text=" Share" + android:text=" Share" android:fontFamily="@font/comfortaa" android:drawableLeft="@drawable/ic_baseline_share_24"/>