diff --git a/README.md b/README.md index 9adc0c1..bf98167 100644 --- a/README.md +++ b/README.md @@ -12,9 +12,9 @@ A React Native package featuring native modules for generating and rendering aud ## 🎬 Preview -| Audio Playback Waveform | Audio Record Waveform | -| --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| AudioPlaybackWaveform | AudioRecordWaveform | +| Audio Playback Waveform | Audio Record Waveform | Audio Waveform with Speed | +| --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | +| AudioPlaybackWaveform | AudioRecordWaveform | AudioRecordWaveform | ## Quick Access @@ -132,6 +132,7 @@ You can check out the full example at [Example](./example/src/App.tsx). | mode\* | - | ✅ | ✅ | 'live' or 'static' | Type of waveform. It can be either `static` for the resource file or `live` if you want to record audio | | ref\* | - | ✅ | ✅ | IWaveformRef | Type of ref provided to waveform component. If waveform mode is `static`, some methods from ref will throw error and same for `live`.
Check [IWaveformRef](#iwaveformref-methods) for more details about which methods these refs provides. | | path\* | - | ✅ | ❌ | string | Used for `static` type. It is the resource path of an audio source file. | +| playbackSpeed | 1.0 | ✅ | ❌ | 1.0 / 1.5 / 2.0 | The playback speed of the audio player. Note: Currently playback speed only supports, Normal (1x) Faster(1.5x) and Fastest(2.0x), any value passed to playback speed greater than 2.0 will be automatically adjusted to normal playback speed | | candleSpace | 2 | ✅ | ✅ | number | Space between two candlesticks of waveform | | candleWidth | 5 | ✅ | ✅ | number | Width of single candlestick of waveform | | candleHeightScale | 3 | ✅ | ✅ | number | Scaling height of candlestick of waveform | diff --git a/android/src/main/java/com/audiowaveform/AudioPlayer.kt b/android/src/main/java/com/audiowaveform/AudioPlayer.kt index fabc3cc..2635601 100644 --- a/android/src/main/java/com/audiowaveform/AudioPlayer.kt +++ b/android/src/main/java/com/audiowaveform/AudioPlayer.kt @@ -105,7 +105,18 @@ class AudioPlayer( } } - fun start(finishMode: Int?, promise: Promise) { + private fun validateAndSetPlaybackSpeed(player: Player, speed: Float?): Boolean { + // Validate the speed: if null or less than or equal to 0, set to 1f + val validSpeed = if (speed == null || speed <= 0f) 1f else speed + + // Set the playback speed on the player + val playbackParameters = player.playbackParameters.withSpeed(validSpeed) + player.playbackParameters = playbackParameters + + return true // Indicate success + } + + fun start(finishMode: Int?, speed: Float?, promise: Promise) { try { if (finishMode != null && finishMode == 0) { this.finishMode = FinishMode.Loop @@ -114,6 +125,9 @@ class AudioPlayer( } else { this.finishMode = FinishMode.Stop } + + validateAndSetPlaybackSpeed(player, speed) + player.playWhenReady = true player.play() promise.resolve(true) @@ -158,6 +172,18 @@ class AudioPlayer( } } + fun setPlaybackSpeed(speed: Float?, promise: Promise) { + try { + // Call the custom function to validate and set the playback speed + val success = validateAndSetPlaybackSpeed(player, speed) + promise.resolve(success) // Resolve the promise with success + + } catch (e: Exception) { + // Handle any exceptions and reject the promise + promise.reject("setPlaybackSpeed Error", e.toString()) + } + } + private fun startListening(promise: Promise) { try { audioPlaybackListener = object : CountDownTimer(player.duration, UpdateFrequency.Low.value) { diff --git a/android/src/main/java/com/audiowaveform/AudioWaveformModule.kt b/android/src/main/java/com/audiowaveform/AudioWaveformModule.kt index 596a7fd..8d5e7e5 100644 --- a/android/src/main/java/com/audiowaveform/AudioWaveformModule.kt +++ b/android/src/main/java/com/audiowaveform/AudioWaveformModule.kt @@ -133,8 +133,9 @@ class AudioWaveformModule(context: ReactApplicationContext): ReactContextBaseJav fun startPlayer(obj: ReadableMap, promise: Promise) { val finishMode = obj.getInt(Constants.finishMode) val key = obj.getString(Constants.playerKey) + val speed = obj.getDouble(Constants.speed) if (key != null) { - audioPlayers[key]?.start(finishMode ?: 2, promise) + audioPlayers[key]?.start(finishMode ?: 2, speed.toFloat(),promise) } else { promise.reject("startPlayer Error", "Player key can't be null") } @@ -222,6 +223,23 @@ class AudioWaveformModule(context: ReactApplicationContext): ReactContextBaseJav } } + @ReactMethod + fun setPlaybackSpeed(obj: ReadableMap, promise: Promise) { + // If the key doesn't exist or if the value is null or undefined, set default speed to 1.0 + val speed = if (!obj.hasKey(Constants.speed) || obj.isNull(Constants.speed)) { + 1.0f // Set default speed to 1.0 if null, undefined, or missing + } else { + obj.getDouble(Constants.speed).toFloat() + } + + val key = obj.getString(Constants.playerKey) + if (key != null) { + audioPlayers[key]?.setPlaybackSpeed(speed, promise) + } else { + promise.reject("setPlaybackSpeed Error", "Player key can't be null") + } + } + private fun initPlayer(playerKey: String) { if (audioPlayers[playerKey] == null) { val newPlayer = AudioPlayer( diff --git a/android/src/main/java/com/audiowaveform/Utils.kt b/android/src/main/java/com/audiowaveform/Utils.kt index 93fb607..852b606 100644 --- a/android/src/main/java/com/audiowaveform/Utils.kt +++ b/android/src/main/java/com/audiowaveform/Utils.kt @@ -43,6 +43,7 @@ object Constants { const val currentDecibel = "currentDecibel" const val bitRate = "bitRate" const val sampleRate = "sampleRate" + const val speed = "speed" } enum class FinishMode(val value:Int) { diff --git a/assets/audio_playback.gif b/assets/audio_playback.gif index bb6c4f1..f172ffa 100644 Binary files a/assets/audio_playback.gif and b/assets/audio_playback.gif differ diff --git a/assets/audio_playback_with_speed.gif b/assets/audio_playback_with_speed.gif new file mode 100644 index 0000000..ed839a9 Binary files /dev/null and b/assets/audio_playback_with_speed.gif differ diff --git a/assets/audio_record.gif b/assets/audio_record.gif index cb68fff..c692925 100644 Binary files a/assets/audio_record.gif and b/assets/audio_record.gif differ diff --git a/example/ios/AudioWaveformExample.xcodeproj/project.pbxproj b/example/ios/AudioWaveformExample.xcodeproj/project.pbxproj index 467a596..1bdd484 100644 --- a/example/ios/AudioWaveformExample.xcodeproj/project.pbxproj +++ b/example/ios/AudioWaveformExample.xcodeproj/project.pbxproj @@ -14,11 +14,11 @@ 13B07FC11A68108700A75B9A /* main.m in Sources */ = {isa = PBXBuildFile; fileRef = 13B07FB71A68108700A75B9A /* main.m */; }; 4A3054DD28F34283AA8FEE0D /* file_example_mp3_15s.mp3 in Resources */ = {isa = PBXBuildFile; fileRef = C2A26BD00D7D401B9804E1AA /* file_example_mp3_15s.mp3 */; }; 578F19E664D14A79A203A29B /* file_example_mp3_12s.mp3 in Resources */ = {isa = PBXBuildFile; fileRef = D09FCEB6D8A04D349E9422A5 /* file_example_mp3_12s.mp3 */; }; + 6393E563479648F7B8E27E90 /* file_example_mp3_700kb.mp3 in Resources */ = {isa = PBXBuildFile; fileRef = 2026064602944A9C9A1F84D3 /* file_example_mp3_700kb.mp3 */; }; 7699B88040F8A987B510C191 /* libPods-AudioWaveformExample-AudioWaveformExampleTests.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 19F6CBCC0A4E27FBF8BF4A61 /* libPods-AudioWaveformExample-AudioWaveformExampleTests.a */; }; 81AB9BB82411601600AC10FF /* LaunchScreen.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 81AB9BB72411601600AC10FF /* LaunchScreen.storyboard */; }; - 75825DA638CD44CCB89B4B48 /* index.ts in Resources */ = {isa = PBXBuildFile; fileRef = 4879BE3405234CBFAFAD88DB /* index.ts */; }; + DDA8C5D92054463296965254 /* index.ts in Resources */ = {isa = PBXBuildFile; fileRef = DEA7EA5ABBD543DFB81B93A1 /* index.ts */; }; F754817CD5E04214A63ED292 /* file_example_mp3_1mg.mp3 in Resources */ = {isa = PBXBuildFile; fileRef = 3D92446673D84104B7DBB5E8 /* file_example_mp3_1mg.mp3 */; }; - 6393E563479648F7B8E27E90 /* file_example_mp3_700kb.mp3 in Resources */ = {isa = PBXBuildFile; fileRef = 2026064602944A9C9A1F84D3 /* file_example_mp3_700kb.mp3 */; }; /* End PBXBuildFile section */ /* Begin PBXContainerItemProxy section */ @@ -42,7 +42,9 @@ 13B07FB61A68108700A75B9A /* Info.plist */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.plist.xml; name = Info.plist; path = AudioWaveformExample/Info.plist; sourceTree = ""; }; 13B07FB71A68108700A75B9A /* main.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = main.m; path = AudioWaveformExample/main.m; sourceTree = ""; }; 19F6CBCC0A4E27FBF8BF4A61 /* libPods-AudioWaveformExample-AudioWaveformExampleTests.a */ = {isa = PBXFileReference; explicitFileType = archive.ar; includeInIndex = 0; path = "libPods-AudioWaveformExample-AudioWaveformExampleTests.a"; sourceTree = BUILT_PRODUCTS_DIR; }; + 2026064602944A9C9A1F84D3 /* file_example_mp3_700kb.mp3 */ = {isa = PBXFileReference; explicitFileType = undefined; fileEncoding = undefined; includeInIndex = 0; lastKnownFileType = unknown; name = file_example_mp3_700kb.mp3; path = ../src/assets/audio/file_example_mp3_700kb.mp3; sourceTree = ""; }; 3B4392A12AC88292D35C810B /* Pods-AudioWaveformExample.debug.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-AudioWaveformExample.debug.xcconfig"; path = "Target Support Files/Pods-AudioWaveformExample/Pods-AudioWaveformExample.debug.xcconfig"; sourceTree = ""; }; + 3D92446673D84104B7DBB5E8 /* file_example_mp3_1mg.mp3 */ = {isa = PBXFileReference; explicitFileType = undefined; fileEncoding = undefined; includeInIndex = 0; lastKnownFileType = unknown; name = file_example_mp3_1mg.mp3; path = ../src/assets/audio/file_example_mp3_1mg.mp3; sourceTree = ""; }; 5709B34CF0A7D63546082F79 /* Pods-AudioWaveformExample.release.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-AudioWaveformExample.release.xcconfig"; path = "Target Support Files/Pods-AudioWaveformExample/Pods-AudioWaveformExample.release.xcconfig"; sourceTree = ""; }; 5B7EB9410499542E8C5724F5 /* Pods-AudioWaveformExample-AudioWaveformExampleTests.debug.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-AudioWaveformExample-AudioWaveformExampleTests.debug.xcconfig"; path = "Target Support Files/Pods-AudioWaveformExample-AudioWaveformExampleTests/Pods-AudioWaveformExample-AudioWaveformExampleTests.debug.xcconfig"; sourceTree = ""; }; 5DCACB8F33CDC322A6C60F78 /* libPods-AudioWaveformExample.a */ = {isa = PBXFileReference; explicitFileType = archive.ar; includeInIndex = 0; path = "libPods-AudioWaveformExample.a"; sourceTree = BUILT_PRODUCTS_DIR; }; @@ -50,10 +52,8 @@ 89C6BE57DB24E9ADA2F236DE /* Pods-AudioWaveformExample-AudioWaveformExampleTests.release.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-AudioWaveformExample-AudioWaveformExampleTests.release.xcconfig"; path = "Target Support Files/Pods-AudioWaveformExample-AudioWaveformExampleTests/Pods-AudioWaveformExample-AudioWaveformExampleTests.release.xcconfig"; sourceTree = ""; }; C2A26BD00D7D401B9804E1AA /* file_example_mp3_15s.mp3 */ = {isa = PBXFileReference; explicitFileType = undefined; fileEncoding = undefined; includeInIndex = 0; lastKnownFileType = unknown; name = file_example_mp3_15s.mp3; path = ../src/assets/audio/file_example_mp3_15s.mp3; sourceTree = ""; }; D09FCEB6D8A04D349E9422A5 /* file_example_mp3_12s.mp3 */ = {isa = PBXFileReference; explicitFileType = undefined; fileEncoding = undefined; includeInIndex = 0; lastKnownFileType = unknown; name = file_example_mp3_12s.mp3; path = ../src/assets/audio/file_example_mp3_12s.mp3; sourceTree = ""; }; + DEA7EA5ABBD543DFB81B93A1 /* index.ts */ = {isa = PBXFileReference; explicitFileType = undefined; fileEncoding = undefined; includeInIndex = 0; lastKnownFileType = unknown; name = index.ts; path = ../src/assets/audio/index.ts; sourceTree = ""; }; ED297162215061F000B7C4FE /* JavaScriptCore.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = JavaScriptCore.framework; path = System/Library/Frameworks/JavaScriptCore.framework; sourceTree = SDKROOT; }; - 4879BE3405234CBFAFAD88DB /* index.ts */ = {isa = PBXFileReference; name = "index.ts"; path = "../src/assets/audio/index.ts"; sourceTree = ""; fileEncoding = undefined; lastKnownFileType = unknown; explicitFileType = undefined; includeInIndex = 0; }; - 3D92446673D84104B7DBB5E8 /* file_example_mp3_1mg.mp3 */ = {isa = PBXFileReference; name = "file_example_mp3_1mg.mp3"; path = "../src/assets/audio/file_example_mp3_1mg.mp3"; sourceTree = ""; fileEncoding = undefined; lastKnownFileType = unknown; explicitFileType = undefined; includeInIndex = 0; }; - 2026064602944A9C9A1F84D3 /* file_example_mp3_700kb.mp3 */ = {isa = PBXFileReference; name = "file_example_mp3_700kb.mp3"; path = "../src/assets/audio/file_example_mp3_700kb.mp3"; sourceTree = ""; fileEncoding = undefined; lastKnownFileType = unknown; explicitFileType = undefined; includeInIndex = 0; }; /* End PBXFileReference section */ /* Begin PBXFrameworksBuildPhase section */ @@ -111,9 +111,9 @@ children = ( D09FCEB6D8A04D349E9422A5 /* file_example_mp3_12s.mp3 */, C2A26BD00D7D401B9804E1AA /* file_example_mp3_15s.mp3 */, - 4879BE3405234CBFAFAD88DB /* index.ts */, 3D92446673D84104B7DBB5E8 /* file_example_mp3_1mg.mp3 */, 2026064602944A9C9A1F84D3 /* file_example_mp3_700kb.mp3 */, + DEA7EA5ABBD543DFB81B93A1 /* index.ts */, ); name = Resources; path = ""; @@ -270,9 +270,9 @@ 13B07FBF1A68108700A75B9A /* Images.xcassets in Resources */, 578F19E664D14A79A203A29B /* file_example_mp3_12s.mp3 in Resources */, 4A3054DD28F34283AA8FEE0D /* file_example_mp3_15s.mp3 in Resources */, - 75825DA638CD44CCB89B4B48 /* index.ts in Resources */, F754817CD5E04214A63ED292 /* file_example_mp3_1mg.mp3 in Resources */, 6393E563479648F7B8E27E90 /* file_example_mp3_700kb.mp3 in Resources */, + DDA8C5D92054463296965254 /* index.ts in Resources */, ); runOnlyForDeploymentPostprocessing = 0; }; diff --git a/example/src/App.tsx b/example/src/App.tsx index e50e622..33a33b0 100644 --- a/example/src/App.tsx +++ b/example/src/App.tsx @@ -1,3 +1,14 @@ +import { + FinishMode, + IWaveformRef, + PermissionStatus, + PlaybackSpeedType, + PlayerState, + RecorderState, + UpdateFrequency, + Waveform, + useAudioPermission, +} from '@simform_solutions/react-native-audio-waveform'; import React, { Dispatch, SetStateAction, @@ -13,25 +24,20 @@ import { Pressable, ScrollView, StatusBar, + Text, View, } from 'react-native'; -import { - FinishMode, - IWaveformRef, - PermissionStatus, - PlayerState, - RecorderState, - UpdateFrequency, - Waveform, - useAudioPermission, -} from '@simform_solutions/react-native-audio-waveform'; import { GestureHandlerRootView } from 'react-native-gesture-handler'; import { SafeAreaProvider, useSafeAreaInsets, } from 'react-native-safe-area-context'; import { Gifs, Icons } from './assets'; -import { generateAudioList, type ListItem } from './constants'; +import { + generateAudioList, + playbackSpeedSequence, + type ListItem, +} from './constants'; import stylesheet from './styles'; import { Colors } from './theme'; @@ -41,11 +47,15 @@ const RenderListItem = React.memo( currentPlaying, setCurrentPlaying, onPanStateChange, + currentPlaybackSpeed, + changeSpeed, }: { item: ListItem; currentPlaying: string; setCurrentPlaying: Dispatch>; onPanStateChange: (value: boolean) => void; + currentPlaybackSpeed: PlaybackSpeedType; + changeSpeed: () => void; }) => { const ref = useRef(null); const [playerState, setPlayerState] = useState(PlayerState.stopped); @@ -100,6 +110,7 @@ const RenderListItem = React.memo( containerStyle={styles.staticWaveformView} mode="static" key={item.path} + playbackSpeed={currentPlaybackSpeed} ref={ref} path={item.path} candleSpace={2} @@ -132,6 +143,15 @@ const RenderListItem = React.memo( setIsLoading(state); }} /> + {playerState === PlayerState.playing ? ( + + {`${currentPlaybackSpeed}x`} + + ) : ( + + )} @@ -187,7 +207,7 @@ const LivePlayerComponent = ({ containerStyle={styles.liveWaveformView} ref={ref} candleSpace={2} - candleWidth={2} + candleWidth={4} waveColor={Colors.pink} onRecorderStateChange={setRecorderState} /> @@ -210,6 +230,8 @@ const AppContainer = () => { const [shouldScroll, setShouldScroll] = useState(true); const [currentPlaying, setCurrentPlaying] = useState(''); const [list, setList] = useState([]); + const [currentPlaybackSpeed, setCurrentPlaybackSpeed] = + useState(1.0); const { top, bottom } = useSafeAreaInsets(); const styles = stylesheet({ top, bottom }); @@ -222,6 +244,16 @@ const AppContainer = () => { }); }, []); + const changeSpeed = () => { + setCurrentPlaybackSpeed( + prev => + playbackSpeedSequence[ + (playbackSpeedSequence.indexOf(prev) + 1) % + playbackSpeedSequence.length + ] ?? 1.0 + ); + }; + return ( { setCurrentPlaying={setCurrentPlaying} item={item} onPanStateChange={value => setShouldScroll(!value)} + {...{ currentPlaybackSpeed, changeSpeed }} /> ))} diff --git a/example/src/assets/icons/index.ts b/example/src/assets/icons/index.ts index 5300a39..c28aa2c 100644 --- a/example/src/assets/icons/index.ts +++ b/example/src/assets/icons/index.ts @@ -3,4 +3,5 @@ export const Icons = { stop: require('./stop.png'), simform: require('./simform.png'), mic: require('./mic.png'), + logo: require('./logo.png'), }; diff --git a/example/src/assets/icons/logo.png b/example/src/assets/icons/logo.png new file mode 100644 index 0000000..ddd51a0 Binary files /dev/null and b/example/src/assets/icons/logo.png differ diff --git a/example/src/assets/icons/logo@2x.png b/example/src/assets/icons/logo@2x.png new file mode 100644 index 0000000..964e5de Binary files /dev/null and b/example/src/assets/icons/logo@2x.png differ diff --git a/example/src/assets/icons/logo@3x.png b/example/src/assets/icons/logo@3x.png new file mode 100644 index 0000000..442f419 Binary files /dev/null and b/example/src/assets/icons/logo@3x.png differ diff --git a/example/src/constants/StaticData.ts b/example/src/constants/StaticData.ts new file mode 100644 index 0000000..b052905 --- /dev/null +++ b/example/src/constants/StaticData.ts @@ -0,0 +1,3 @@ +import { PlaybackSpeedType } from '@simform_solutions/react-native-audio-waveform'; + +export const playbackSpeedSequence: PlaybackSpeedType[] = [1.0, 1.5, 2.0]; diff --git a/example/src/constants/index.ts b/example/src/constants/index.ts index ac5ea48..02c79d9 100644 --- a/example/src/constants/index.ts +++ b/example/src/constants/index.ts @@ -1,2 +1,3 @@ export * from './Audios'; +export { playbackSpeedSequence } from './StaticData'; export { default as Strings } from './Strings'; diff --git a/example/src/styles.ts b/example/src/styles.ts index a62f002..8589dea 100644 --- a/example/src/styles.ts +++ b/example/src/styles.ts @@ -34,6 +34,8 @@ const styles = (params: StyleSheetParams = {}) => borderRadius: scale(10), alignItems: 'center', overflow: 'hidden', + columnGap: scale(8), + paddingHorizontal: scale(8), }, listItemContainer: { marginTop: scale(16), @@ -43,18 +45,18 @@ const styles = (params: StyleSheetParams = {}) => width: '90%', }, buttonImage: { - height: '100%', - width: '100%', + height: scale(22), + width: scale(22), + alignSelf: 'flex-end', }, staticWaveformView: { flex: 1, height: scale(75), - paddingEnd: scale(10), }, playBackControlPressable: { height: scale(30), width: scale(30), - padding: scale(5), + justifyContent: 'center', }, recordAudioPressable: { height: scale(40), @@ -90,6 +92,21 @@ const styles = (params: StyleSheetParams = {}) => loadingText: { color: Colors.black, }, + speedBox: { + height: scale(28), + width: scale(28), + borderRadius: scale(14), + justifyContent: 'center', + }, + whiteBackground: { + backgroundColor: Colors.white, + }, + speed: { + color: Colors.black, + fontSize: scale(10), + textAlign: 'center', + fontWeight: '600', + }, }); export default styles; diff --git a/ios/AudioPlayer.swift b/ios/AudioPlayer.swift index 84e2ba3..362720d 100644 --- a/ios/AudioPlayer.swift +++ b/ios/AudioPlayer.swift @@ -41,6 +41,7 @@ class AudioPlayer: NSObject, AVAudioPlayerDelegate { player?.prepareToPlay() player?.volume = Float(volume ?? 100.0) player?.currentTime = Double(time / 1000) + player?.enableRate = true resolve(true) } catch let error as NSError { reject(Constants.audioWaveforms, error.localizedDescription, error) @@ -77,7 +78,7 @@ class AudioPlayer: NSObject, AVAudioPlayerDelegate { EventEmitter.sharedInstance.dispatch(name: withName, body: body) } - func startPlyer(_ finishMode: Int?, result: RCTPromiseResolveBlock) { + func startPlyer(_ finishMode: Int?, speed: Float, result: RCTPromiseResolveBlock) { if(finishMode != nil && finishMode == 0) { self.finishMode = FinishMode.loop } else if(finishMode != nil && finishMode == 1) { @@ -87,6 +88,7 @@ class AudioPlayer: NSObject, AVAudioPlayerDelegate { } player?.play() player?.delegate = self + player?.rate = Float(speed) startListening() result(player?.isPlaying) } @@ -141,6 +143,16 @@ class AudioPlayer: NSObject, AVAudioPlayerDelegate { strongSelf.timer = Timer.scheduledTimer(timeInterval: TimeInterval((Float(strongSelf.updateFrequency.rawValue) / 1000)), target: strongSelf, selector: #selector(strongSelf.timerUpdate(_:)), userInfo: nil, repeats: true) } } + + func setPlaybackSpeed(_ speed: Float, _ result: @escaping RCTPromiseResolveBlock) { + if let player = player { + player.enableRate = true + player.rate = Float(speed) + result(true) + } else { + result(false) + } + } func stopListening() { timer?.invalidate() diff --git a/ios/AudioWaveform.m b/ios/AudioWaveform.m index 9bf86ee..94b9a75 100644 --- a/ios/AudioWaveform.m +++ b/ios/AudioWaveform.m @@ -60,4 +60,7 @@ @interface RCT_EXTERN_MODULE(AudioWaveform, RCTEventEmitter) rejecter: (RCTPromiseRejectBlock)reject) RCT_EXTERN_METHOD(stopAllPlayers:(RCTPromiseResolveBlock)resolve rejecter: (RCTPromiseRejectBlock)reject) +RCT_EXTERN_METHOD(setPlaybackSpeed:(NSDictionary *)args + resolver: (RCTPromiseResolveBlock)resolve + rejecter: (RCTPromiseRejectBlock)reject) @end diff --git a/ios/AudioWaveform.swift b/ios/AudioWaveform.swift index 8169698..4dff437 100644 --- a/ios/AudioWaveform.swift +++ b/ios/AudioWaveform.swift @@ -153,8 +153,10 @@ class AudioWaveform: RCTEventEmitter { @objc func startPlayer(_ args: NSDictionary?, resolver resolve: RCTPromiseResolveBlock, rejecter reject: RCTPromiseRejectBlock) -> Void { let key = args?[Constants.playerKey] as? String let finishMode = args?[Constants.finishMode] as? Int + let speed = (args?[Constants.speed] as? NSNumber)?.floatValue ?? 1.0 + if(key != nil){ - audioPlayers[key!]?.startPlyer(finishMode, result:resolve) + audioPlayers[key!]?.startPlyer(finishMode, speed: speed, result:resolve) } else { reject(Constants.audioWaveforms, "Can not start player", NSError()) } @@ -239,5 +241,17 @@ class AudioWaveform: RCTEventEmitter { audioPlayers[playerKey] = newPlayer } } - + + @objc func setPlaybackSpeed(_ args: NSDictionary?, + resolver resolve: @escaping RCTPromiseResolveBlock, + rejecter reject: @escaping RCTPromiseRejectBlock) -> Void { + let key = args?[Constants.playerKey] as? String + let speed = (args?[Constants.speed] as? NSNumber)?.floatValue ?? 1.0 + + if(key != nil){ + audioPlayers[key!]?.setPlaybackSpeed(speed, resolve) + } else { + reject(Constants.audioWaveforms, "Can not pause player, Player key is null", NSError()) + } + } } diff --git a/ios/Utils.swift b/ios/Utils.swift index 2bbecf2..503e124 100644 --- a/ios/Utils.swift +++ b/ios/Utils.swift @@ -60,6 +60,7 @@ struct Constants { static let stopAllPlayers = "stopAllPlayers" static let onDidFinishPlayingAudio = "onDidFinishPlayingAudio" static let finishMode = "finishMode" + static let speed = "speed" static let finishType = "finishType" static let extractWaveformData = "extractWaveformData" static let noOfSamples = "noOfSamples" diff --git a/src/components/Waveform/Waveform.tsx b/src/components/Waveform/Waveform.tsx index 6421d6a..eeceb7f 100644 --- a/src/components/Waveform/Waveform.tsx +++ b/src/components/Waveform/Waveform.tsx @@ -18,6 +18,7 @@ import { DurationType, FinishMode, PermissionStatus, + playbackSpeedThreshold, PlayerState, RecorderState, UpdateFrequency, @@ -45,6 +46,8 @@ export const Waveform = forwardRef((props, ref) => { mode, path, volume = 3, + // The playback speed of the audio player. A value of 1.0 represents normal playback speed. + playbackSpeed = 1.0, candleSpace = 2, candleWidth = 5, containerStyle = {}, @@ -71,6 +74,8 @@ export const Waveform = forwardRef((props, ref) => { const [panMoving, setPanMoving] = useState(false); const [playerState, setPlayerState] = useState(PlayerState.stopped); const [recorderState, setRecorderState] = useState(RecorderState.stopped); + const audioSpeed: number = + playbackSpeed > playbackSpeedThreshold ? 1.0 : playbackSpeed; const { extractWaveformData, @@ -83,6 +88,7 @@ export const Waveform = forwardRef((props, ref) => { onCurrentDuration, onDidFinishPlayingAudio, onCurrentRecordingWaveformData, + setPlaybackSpeed, } = useAudioPlayer(); const { startRecording, stopRecording, pauseRecording, resumeRecording } = @@ -90,6 +96,26 @@ export const Waveform = forwardRef((props, ref) => { const { checkHasAudioRecorderPermission } = useAudioPermission(); + /** + * Updates the playback speed of the audio player. + * + * @param speed - The new playback speed to set. + * @returns A Promise that resolves when the playback speed has been updated. + * @throws An error if there was a problem updating the playback speed. + */ + const updatePlaybackSpeed = async (speed: number) => { + try { + await setPlaybackSpeed({ speed, playerKey: `PlayerFor${path}` }); + } catch (error) { + console.error('Error updating playback speed', error); + } + }; + + useEffect(() => { + updatePlaybackSpeed(audioSpeed); + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [audioSpeed]); + const preparePlayerForPath = async (progress?: number) => { if (!isNil(path) && !isEmpty(path)) { try { @@ -213,6 +239,7 @@ export const Waveform = forwardRef((props, ref) => { finishMode: FinishMode.stop, playerKey: `PlayerFor${path}`, path: path, + speed: audioSpeed, ...args, }); diff --git a/src/components/Waveform/WaveformTypes.ts b/src/components/Waveform/WaveformTypes.ts index c3f3a88..f3a5dd3 100644 --- a/src/components/Waveform/WaveformTypes.ts +++ b/src/components/Waveform/WaveformTypes.ts @@ -4,6 +4,8 @@ import type { IStartRecording } from '../../types'; type StaticOrLive = 'static' | 'live'; +export type PlaybackSpeedType = 1.0 | 1.5 | 2.0; + interface BaseWaveform { candleSpace?: number; candleWidth?: number; @@ -26,6 +28,7 @@ export interface StaticWaveform extends BaseWaveform { songDuration: number ) => void; onChangeWaveformLoadState?: (state: boolean) => void; + playbackSpeed?: PlaybackSpeedType; } export interface LiveWaveform extends BaseWaveform { diff --git a/src/constants/index.ts b/src/constants/index.ts index 7f5e301..7955112 100644 --- a/src/constants/index.ts +++ b/src/constants/index.ts @@ -41,3 +41,5 @@ export enum UpdateFrequency { medium = 500.0, low = 1000.0, } + +export const playbackSpeedThreshold = 2.0; diff --git a/src/hooks/useAudioPlayer.tsx b/src/hooks/useAudioPlayer.tsx index fa20ab0..9e90db6 100644 --- a/src/hooks/useAudioPlayer.tsx +++ b/src/hooks/useAudioPlayer.tsx @@ -11,6 +11,7 @@ import { type IPausePlayer, type IPreparePlayer, type ISeekPlayer, + type ISetPlaybackSpeed, type ISetVolume, type IStartPlayer, type IStopPlayer, @@ -72,6 +73,9 @@ export const useAudioPlayer = () => { result => callback(result) ); + const setPlaybackSpeed = (args: ISetPlaybackSpeed) => + AudioWaveform.setPlaybackSpeed(args); + return { extractWaveformData, pausePlayer, @@ -86,5 +90,6 @@ export const useAudioPlayer = () => { onCurrentExtractedWaveformData, getDuration, onCurrentRecordingWaveformData, + setPlaybackSpeed, }; }; diff --git a/src/index.ts b/src/index.ts index ef2daf2..d056c9c 100644 --- a/src/index.ts +++ b/src/index.ts @@ -1,4 +1,8 @@ -export { Waveform, type IWaveformRef } from './components'; +export { + Waveform, + type IWaveformRef, + type PlaybackSpeedType, +} from './components'; export { FinishMode, PermissionStatus, diff --git a/src/types/AudioWaveformTypes.ts b/src/types/AudioWaveformTypes.ts index dd65a5e..5aa904f 100644 --- a/src/types/AudioWaveformTypes.ts +++ b/src/types/AudioWaveformTypes.ts @@ -34,6 +34,7 @@ export interface IPreparePlayer extends IPlayerKey, IPlayerPath { export interface IStartPlayer extends IPlayerKey { finishMode?: FinishMode; + speed?: number; path?: string; } @@ -70,6 +71,10 @@ export interface IOnCurrentRecordingWaveForm { currentDecibel: number; } +export interface ISetPlaybackSpeed extends IPlayerKey { + speed: number; +} + /** * Represents the interface for the AudioWaveforms module. */ @@ -196,4 +201,11 @@ export interface IAudioWaveforms extends NativeModule { * @returns A promise that resolves to a boolean indicating if all players were stopped successfully. */ stopAllPlayers(): Promise; + + /** + * Sets the playback speed of the audio. + * @param args - The playback speed to set, where 1.0 is normal speed. + * @returns A promise that resolves to a boolean indicating if the playback speed was set successfully. + */ + setPlaybackSpeed(args: ISetPlaybackSpeed): Promise; }