diff --git a/README.md b/README.md index bf98167..bbad02d 100644 --- a/README.md +++ b/README.md @@ -33,13 +33,13 @@ Here's how to get started with react-native-audio-waveform in your React Native ##### 1. Install the package ```sh -npm install @simform_solutions/react-native-audio-waveform react-native-gesture-handler +npm install @simform_solutions/react-native-audio-waveform react-native-blob-util react-native-gesture-handler ``` ###### --- or --- ```sh -yarn add @simform_solutions/react-native-audio-waveform react-native-gesture-handler +yarn add @simform_solutions/react-native-audio-waveform react-native-blob-util react-native-gesture-handler ``` ##### 2. Install CocoaPods in the iOS project @@ -48,7 +48,7 @@ yarn add @simform_solutions/react-native-audio-waveform react-native-gesture-han npx pod-install ``` -##### Know more about [react-native-gesture-handler](https://www.npmjs.com/package/react-native-gesture-handler) +##### Know more about [react-native-blob-util](https://www.npmjs.com/package/react-native-blob-util) and [react-native-gesture-handler](https://www.npmjs.com/package/react-native-gesture-handler) ##### 3. Add audio recording permissions @@ -90,7 +90,35 @@ const ref = useRef(null); console.log(playerState)} + onPanStateChange={isMoving => console.log(isMoving)} +/>; +``` + +When you want to show a waveform for a external audio URL, you need to use `static` mode for the waveform and set isExternalUrl to true. + +Check the example below for more information. + +```tsx +import { + Waveform, + type IWaveformRef, +} from '@simform_solutions/react-native-audio-waveform'; + +const url = 'https://www2.cs.uic.edu/~i101/SoundFiles/taunt.wav'; // URL to the audio file for which you want to show waveform +const ref = useRef(null); + console.log(state)} + onDownloadProgressChange={progress => console.log(progress)} candleSpace={2} candleWidth={4} scrubColor="white" @@ -133,6 +161,9 @@ You can check out the full example at [Example](./example/src/App.tsx). | ref\* | - | ✅ | ✅ | IWaveformRef | Type of ref provided to waveform component. If waveform mode is `static`, some methods from ref will throw error and same for `live`.
Check [IWaveformRef](#iwaveformref-methods) for more details about which methods these refs provides. | | path\* | - | ✅ | ❌ | string | Used for `static` type. It is the resource path of an audio source file. | | playbackSpeed | 1.0 | ✅ | ❌ | 1.0 / 1.5 / 2.0 | The playback speed of the audio player. Note: Currently playback speed only supports, Normal (1x) Faster(1.5x) and Fastest(2.0x), any value passed to playback speed greater than 2.0 will be automatically adjusted to normal playback speed | +| volume | 3 | ✅ | ❌ | number | Used for `static` type. It is a volume level for the media player, ranging from 1 to 10. | +| isExternalUrl | false | ✅ | ❌ | boolean | Used for `static` type. If the resource path of an audio file is a URL, then pass true; otherwise, pass false. | +| autoDownloadExternalAudio | true | ✅ | ❌ | boolean | Used for `static` type. Indicates whether the external media should be auto downloaded or not. | | candleSpace | 2 | ✅ | ✅ | number | Space between two candlesticks of waveform | | candleWidth | 5 | ✅ | ✅ | number | Width of single candlestick of waveform | | candleHeightScale | 3 | ✅ | ✅ | number | Scaling height of candlestick of waveform | @@ -145,6 +176,8 @@ You can check out the full example at [Example](./example/src/App.tsx). | onRecorderStateChange | - | ❌ | ✅ | ( recorderState : RecorderState ) => void | callback function which returns the recorder state whenever the recorder state changes. Check RecorderState for more details | | onCurrentProgressChange | - | ✅ | ❌ | ( currentProgress : number, songDuration: number ) => void | callback function, which returns current progress of audio and total song duration. | | onChangeWaveformLoadState | - | ✅ | ❌ | ( state : boolean ) => void | callback function which returns the loading state of waveform candlestick. | +| onDownloadingStateChange | - | ✅ | ❌ | ( state : boolean ) => void | A callback function that returns the loading state of a file download from an external URL. | +| onDownloadProgressChange | - | ✅ | ❌ | ( currentProgress : number ) => void | Used when isExternalUrl is true; a callback function that returns the current progress of a file download from an external URL | | onError | - | ✅ | ❌ | ( error : Error ) => void | callback function which returns the error for static audio waveform | ##### Know more about [ViewStyle](https://reactnative.dev/docs/view-style-props), [PlayerState](#playerstate), and [RecorderState](#recorderstate) @@ -191,6 +224,14 @@ resumePlayer(): Promise It returns a boolean indicating whether playback is resumed again. +#### downloadExternalAudio() + +```ts +downloadExternalAudio(): Promise +``` + +It returns a boolean indicating whether download and cache operation was successful + #### For Live mode #### startRecord() diff --git a/example/ios/Podfile.lock b/example/ios/Podfile.lock index 11db637..897329a 100644 --- a/example/ios/Podfile.lock +++ b/example/ios/Podfile.lock @@ -387,10 +387,12 @@ PODS: - React-jsinspector (0.72.7) - React-logger (0.72.7): - glog - - react-native-audio-waveform (1.0.0): + - react-native-audio-waveform (2.1.2): - RCT-Folly (= 2021.07.22.00) - React-Core - - react-native-safe-area-context (4.11.0): + - react-native-blob-util (0.19.11): + - React-Core + - react-native-safe-area-context (4.14.0): - React-Core - React-NativeModulesApple (0.72.7): - hermes-engine @@ -502,15 +504,13 @@ PODS: - React-jsi (= 0.72.7) - React-logger (= 0.72.7) - React-perflogger (= 0.72.7) - - rn-fetch-blob (0.12.0): - - React-Core - RNFastImage (8.6.3): - React-Core - SDWebImage (~> 5.11.1) - SDWebImageWebPCoder (~> 0.8.4) - RNFS (2.20.0): - React-Core - - RNGestureHandler (2.19.0): + - RNGestureHandler (2.21.2): - RCT-Folly (= 2021.07.22.00) - React-Core - SDWebImage (5.11.1): @@ -571,6 +571,7 @@ DEPENDENCIES: - React-jsinspector (from `../node_modules/react-native/ReactCommon/jsinspector`) - React-logger (from `../node_modules/react-native/ReactCommon/logger`) - react-native-audio-waveform (from `../..`) + - react-native-blob-util (from `../node_modules/react-native-blob-util`) - react-native-safe-area-context (from `../node_modules/react-native-safe-area-context`) - React-NativeModulesApple (from `../node_modules/react-native/ReactCommon/react/nativemodule/core/platform/ios`) - React-perflogger (from `../node_modules/react-native/ReactCommon/reactperflogger`) @@ -589,7 +590,6 @@ DEPENDENCIES: - React-runtimescheduler (from `../node_modules/react-native/ReactCommon/react/renderer/runtimescheduler`) - React-utils (from `../node_modules/react-native/ReactCommon/react/utils`) - ReactCommon/turbomodule/core (from `../node_modules/react-native/ReactCommon`) - - rn-fetch-blob (from `../node_modules/rn-fetch-blob`) - RNFastImage (from `../node_modules/react-native-fast-image`) - RNFS (from `../node_modules/react-native-fs`) - RNGestureHandler (from `../node_modules/react-native-gesture-handler`) @@ -661,6 +661,8 @@ EXTERNAL SOURCES: :path: "../node_modules/react-native/ReactCommon/logger" react-native-audio-waveform: :path: "../.." + react-native-blob-util: + :path: "../node_modules/react-native-blob-util" react-native-safe-area-context: :path: "../node_modules/react-native-safe-area-context" React-NativeModulesApple: @@ -697,8 +699,6 @@ EXTERNAL SOURCES: :path: "../node_modules/react-native/ReactCommon/react/utils" ReactCommon: :path: "../node_modules/react-native/ReactCommon" - rn-fetch-blob: - :path: "../node_modules/rn-fetch-blob" RNFastImage: :path: "../node_modules/react-native-fast-image" RNFS: @@ -743,8 +743,9 @@ SPEC CHECKSUMS: React-jsiexecutor: c49502e5d02112247ee4526bc3ccfc891ae3eb9b React-jsinspector: 8baadae51f01d867c3921213a25ab78ab4fbcd91 React-logger: 8edc785c47c8686c7962199a307015e2ce9a0e4f - react-native-audio-waveform: 7cdb6e4963eeae907240396975b9c79713591758 - react-native-safe-area-context: 851c62c48dce80ccaa5637b6aa5991a1bc36eca9 + react-native-audio-waveform: 99f401dee91ac357ce40cba147a31a18b539d312 + react-native-blob-util: 39a20f2ef11556d958dc4beb0aa07d1ef2690745 + react-native-safe-area-context: 4532f1a0c5d34a46b9324ccaaedcb5582a302b7d React-NativeModulesApple: b6868ee904013a7923128892ee4a032498a1024a React-perflogger: 31ea61077185eb1428baf60c0db6e2886f141a5a React-RCTActionSheet: 392090a3abc8992eb269ef0eaa561750588fc39d @@ -762,10 +763,9 @@ SPEC CHECKSUMS: React-runtimescheduler: 7649c3b46c8dee1853691ecf60146a16ae59253c React-utils: 56838edeaaf651220d1e53cd0b8934fb8ce68415 ReactCommon: 5f704096ccf7733b390f59043b6fa9cc180ee4f6 - rn-fetch-blob: f065bb7ab7fb48dd002629f8bdcb0336602d3cba RNFastImage: 5c9c9fed9c076e521b3f509fe79e790418a544e8 RNFS: 4ac0f0ea233904cb798630b3c077808c06931688 - RNGestureHandler: 7ad14a6c7b491add489246611d324f10009083ac + RNGestureHandler: 0972b77a2cab0f1a61ca1a3ce348a424614f65c2 SDWebImage: a7f831e1a65eb5e285e3fb046a23fcfbf08e696d SDWebImageWebPCoder: 908b83b6adda48effe7667cd2b7f78c897e5111d SocketRocket: f32cd54efbe0f095c4d7594881e52619cfe80b17 diff --git a/example/package.json b/example/package.json index 48c932f..395530b 100644 --- a/example/package.json +++ b/example/package.json @@ -12,11 +12,11 @@ "dependencies": { "react": "18.2.0", "react-native": "0.72.7", + "react-native-blob-util": "^0.19.11", "react-native-fast-image": "^8.6.3", "react-native-fs": "^2.20.0", "react-native-gesture-handler": "^2.13.4", - "react-native-safe-area-context": "^4.9.0", - "rn-fetch-blob": "^0.12.0" + "react-native-safe-area-context": "^4.11.0" }, "devDependencies": { "@babel/core": "^7.20.0", diff --git a/example/src/App.tsx b/example/src/App.tsx index 7e7c666..0c0950c 100644 --- a/example/src/App.tsx +++ b/example/src/App.tsx @@ -25,6 +25,7 @@ import { ScrollView, StatusBar, Text, + TouchableOpacity, View, } from 'react-native'; import { GestureHandlerRootView } from 'react-native-gesture-handler'; @@ -51,16 +52,19 @@ const RenderListItem = React.memo( onPanStateChange, currentPlaybackSpeed, changeSpeed, + isExternalUrl = false, }: { item: ListItem; onPanStateChange: (value: boolean) => void; currentPlaybackSpeed: PlaybackSpeedType; changeSpeed: () => void; + isExternalUrl?: boolean; }) => { const ref = useRef(null); const [playerState, setPlayerState] = useState(PlayerState.stopped); const styles = stylesheet({ currentUser: item.fromCurrentUser }); - const [isLoading, setIsLoading] = useState(true); + const [isLoading, setIsLoading] = useState(isExternalUrl ? false : true); + const [isAudioDownloaded, setIsAudioDownloaded] = useState(false); const handlePlayPauseAction = async () => { // If we are recording do nothing @@ -75,9 +79,13 @@ const RenderListItem = React.memo( if (ref.current?.currentState === PlayerState.paused) { await ref.current?.resumePlayer(); } else { - await ref.current?.startPlayer({ - finishMode: FinishMode.stop, - }); + try { + await ref.current?.startPlayer({ + finishMode: FinishMode.stop, + }); + } catch (error) { + console.log('Error starting player', error); + } } }; @@ -105,9 +113,21 @@ const RenderListItem = React.memo( const handleStopAction = async () => { ref.current?.stopPlayer(); }; + const handleDownloadPress = (): void => { + setIsLoading(true); + ref.current?.downloadExternalAudio(); + }; return ( - + { console.log(error, 'we are in example'); }} + onDownloadingStateChange={state => { + console.log('Download State', state); + }} + onDownloadProgressChange={progress => { + console.log('Download Progress', `${progress}%`); + if (progress === 100) { + setIsAudioDownloaded(true); + } + }} onCurrentProgressChange={(currentProgress, songDuration) => { console.log( `currentProgress ${currentProgress}, songDuration ${songDuration}` @@ -184,6 +215,15 @@ const RenderListItem = React.memo( )} + {isExternalUrl && !isAudioDownloaded && !isLoading ? ( + + + + ) : null} ); } @@ -366,6 +406,7 @@ const AppContainer = () => { setShouldScroll(!value)} {...{ currentPlaybackSpeed, changeSpeed }} /> diff --git a/example/src/assets/icons/download.png b/example/src/assets/icons/download.png new file mode 100644 index 0000000..6948d47 Binary files /dev/null and b/example/src/assets/icons/download.png differ diff --git a/example/src/assets/icons/index.ts b/example/src/assets/icons/index.ts index b4833bb..f464659 100644 --- a/example/src/assets/icons/index.ts +++ b/example/src/assets/icons/index.ts @@ -6,4 +6,5 @@ export const Icons = { logo: require('./logo.png'), delete: require('./delete.png'), pause: require('./pause.png'), + download: require('./download.png'), }; diff --git a/example/src/constants/Audios.ts b/example/src/constants/Audios.ts index daee5be..d7c8d61 100644 --- a/example/src/constants/Audios.ts +++ b/example/src/constants/Audios.ts @@ -1,11 +1,12 @@ import fs from 'react-native-fs'; -import RNFetchBlob from 'rn-fetch-blob'; +import RNFetchBlob from 'react-native-blob-util'; import { globalMetrics } from '../../src/theme'; import { Platform } from 'react-native'; export interface ListItem { fromCurrentUser: boolean; path: string; + isExternalUrl?: boolean; } /** @@ -70,16 +71,26 @@ const audioAssetArray = [ 'file_example_mp3_15s.mp3', ]; +const externalAudioAssetArray = [ + 'https://codeskulptor-demos.commondatastorage.googleapis.com/GalaxyInvaders/theme_01.mp3', + 'https://codeskulptor-demos.commondatastorage.googleapis.com/pang/paza-moduless.mp3', +]; + /** * Retrieve previously recorded audio files from the cache/document directory. - * @returns + * @returns */ export const getRecordedAudios = async (): Promise => { - const recordingSavingPath = Platform.select({ ios: fs.DocumentDirectoryPath, default: fs.CachesDirectoryPath }) + const recordingSavingPath = Platform.select({ + ios: fs.DocumentDirectoryPath, + default: fs.CachesDirectoryPath, + }); - const items = await fs.readDir(recordingSavingPath) - return items.filter(item => item.path.endsWith('.m4a')).map(item => item.path) -} + const items = await fs.readDir(recordingSavingPath); + return items + .filter(item => item.path.endsWith('.m4a')) + .map(item => item.path); +}; /** * Generate a list of file objects with information about successfully copied files (Android) @@ -87,13 +98,29 @@ export const getRecordedAudios = async (): Promise => { * @returns {Promise} A Promise that resolves to the list of file objects. */ export const generateAudioList = async (): Promise => { - const audioAssetPaths = (await copyFilesToNativeResources()).map(value => `${filePath}/${value}`); - const recordedAudios = await getRecordedAudios() + const audioAssetPaths = (await copyFilesToNativeResources()).map( + value => `${filePath}/${value}` + ); + const recordedAudios = await getRecordedAudios(); // Generate the final list based on the copied or available files - return [...audioAssetPaths, ...recordedAudios].map?.((value, index) => ({ - fromCurrentUser: index % 2 !== 0, + const localAssetList = [...audioAssetPaths, ...recordedAudios].map?.( + value => ({ + path: value, + }) + ); + + const externalAudioList = externalAudioAssetArray.map(value => ({ path: value, + isExternalUrl: true, })); + const finalAudios = [...localAssetList, ...externalAudioList].map( + (value, index) => ({ + ...value, + fromCurrentUser: index % 2 !== 0, + }) + ); + + return finalAudios; }; diff --git a/example/src/styles.ts b/example/src/styles.ts index bea55af..ef14adf 100644 --- a/example/src/styles.ts +++ b/example/src/styles.ts @@ -43,10 +43,16 @@ const styles = (params: StyleSheetParams = {}) => }, listItemContainer: { marginTop: scale(16), - alignItems: params.currentUser ? 'flex-end' : 'flex-start', + flexDirection: 'row', + justifyContent: params.currentUser ? 'flex-end' : 'flex-start', + alignItems: 'center', + }, + listItemReverseContainer: { + flexDirection: 'row-reverse', + alignSelf: 'flex-end', }, listItemWidth: { - width: '90%', + width: '88%', }, buttonImage: { height: scale(22), @@ -134,6 +140,13 @@ const styles = (params: StyleSheetParams = {}) => textAlign: 'center', fontWeight: '600', }, + downloadIcon: { + width: 20, + height: 20, + tintColor: Colors.pink, + marginLeft: 10, + marginRight: 10, + }, }); export default styles; diff --git a/package.json b/package.json index 444524f..a7408e7 100644 --- a/package.json +++ b/package.json @@ -110,6 +110,7 @@ ] }, "dependencies": { - "lodash": "^4.17.21" + "lodash": "^4.17.21", + "react-native-blob-util": "^0.19.11" } } diff --git a/src/components/Waveform/Waveform.tsx b/src/components/Waveform/Waveform.tsx index 0961a15..25e2b26 100644 --- a/src/components/Waveform/Waveform.tsx +++ b/src/components/Waveform/Waveform.tsx @@ -12,11 +12,13 @@ import React, { } from 'react'; import { PanResponder, + Platform, ScrollView, View, type LayoutRectangle, type NativeTouchEvent, } from 'react-native'; +import RNFetchBlob, { type FetchBlobResponse } from 'react-native-blob-util'; import { DurationType, FinishMode, @@ -42,6 +44,12 @@ import { type StaticWaveform, } from './WaveformTypes'; +// Cache directory based on the platform +const cacheDir: string = + Platform.OS === 'ios' + ? RNFetchBlob.fs.dirs.DocumentDir + : RNFetchBlob.fs.dirs.CacheDir; + export const Waveform = forwardRef((props, ref) => { const { // The maximum number of candles set in the waveform. Once this limit is reached, the oldest candle will be removed as a new one is added to the waveform. @@ -51,6 +59,8 @@ export const Waveform = forwardRef((props, ref) => { volume = 3, // The playback speed of the audio player. A value of 1.0 represents normal playback speed. playbackSpeed = 1.0, + isExternalUrl = false, + autoDownloadExternalAudio = false, candleSpace = 2, candleWidth = 5, containerStyle = {}, @@ -63,11 +73,17 @@ export const Waveform = forwardRef((props, ref) => { onCurrentProgressChange = () => {}, candleHeightScale = 3, onChangeWaveformLoadState, + onDownloadingStateChange, + onDownloadProgressChange, } = props as StaticWaveform & LiveWaveform; const viewRef = useRef(null); + const [audioPath, setAudioPath] = useState( + !isExternalUrl ? path : undefined + ); const scrollRef = useRef(null); const isLayoutCalculated = useRef(false); const isAutoPaused = useRef(false); + const audioPathRef = useRef(undefined); const [waveform, setWaveform] = useState([]); const [viewLayout, setViewLayout] = useState(null); const [seekPosition, setSeekPosition] = useState( @@ -111,23 +127,107 @@ export const Waveform = forwardRef((props, ref) => { */ const updatePlaybackSpeed = async (speed: number) => { try { - await setPlaybackSpeed({ speed, playerKey: `PlayerFor${path}` }); + await setPlaybackSpeed({ speed, playerKey: `PlayerFor${audioPath}` }); } catch (error) { console.error('Error updating playback speed', error); } }; useEffect(() => { - updatePlaybackSpeed(audioSpeed); + if (audioPath !== undefined) { + updatePlaybackSpeed(audioSpeed); + } + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [audioSpeed, audioPath]); + + const setExternalAudioPath = (filePath: string): void => { + setAudioPath(filePath); + audioPathRef.current = filePath; + (onDownloadingStateChange as Function)?.(false); + (onDownloadProgressChange as Function)?.(100); + }; + + // Replace special characters with _ and remove extension from the URL and make file name lowercase + const formatUrlToFileName = (url: string): string => { + return url + ?.replace?.(/[:\/\.\%20\-~\?=&@#\!\$\^\*\(\)\{\}\[\],\'"]/g, '_') + ?.replace?.(/\.[^/.]+$/, '') + ?.toLowerCase?.(); + }; + + /** + * Downloads the audio file and caches it. + * @returns A promise that resolves to a boolean indicating if the download and cache operation was successful. + */ + const downloadAndCacheFile = async (): Promise => { + const fileUrl: string = path; + const fileName: string = formatUrlToFileName(path); + const filePath: string = `${cacheDir}/${fileName}`; + + try { + const fileExists: boolean = await RNFetchBlob.fs.exists(filePath); + + if (fileExists) { + setExternalAudioPath(filePath); + return Promise.resolve(true); + } + + // File doesn't exist, download it + (onDownloadingStateChange as Function)?.(true); + return await RNFetchBlob.config({ + path: filePath, + fileCache: true, + }) + .fetch('GET', fileUrl) + .progress((received: string, total: string) => { + let progressPercentage: number = Number( + ((Number(received) / Number(total)) * 100)?.toFixed?.(2) + ); + (onDownloadProgressChange as Function)?.(progressPercentage); + }) + .then((response: FetchBlobResponse) => { + const tempFilePath: string = response.path(); + setExternalAudioPath(tempFilePath); + return Promise.resolve(true); + }) + .catch(error => { + console.error(error); + (onDownloadingStateChange as Function)?.(false); + return Promise.resolve(false); + }); + } catch (error) { + console.error(error); + return Promise.resolve(false); + } + }; + + const checkIsFileDownloaded = async (fileName: string): Promise => { + const filePath: string = `${cacheDir}/${fileName}`; + const fileExists: boolean = await RNFetchBlob.fs.exists(filePath); + if (fileExists) { + setExternalAudioPath(filePath); + } + }; + + useEffect(() => { + const fileName: string = formatUrlToFileName(path); + + if (isExternalUrl && path && autoDownloadExternalAudio) { + downloadAndCacheFile(); + } else if (isExternalUrl && path) { + checkIsFileDownloaded(fileName); + } else { + (onDownloadingStateChange as Function)?.(false); + } // eslint-disable-next-line react-hooks/exhaustive-deps - }, [audioSpeed]); + }, [isExternalUrl, path, autoDownloadExternalAudio]); const preparePlayerForPath = async (progress?: number) => { - if (!isNil(path) && !isEmpty(path)) { + if (!isNil(audioPath) && !isEmpty(audioPath)) { try { const prepare = await preparePlayer({ - path, - playerKey: `PlayerFor${path}`, + path: audioPath, + playerKey: `PlayerFor${audioPath}`, updateFrequency: UpdateFrequency.medium, volume: volume, progress, @@ -138,7 +238,7 @@ export const Waveform = forwardRef((props, ref) => { } } else { return Promise.reject( - new Error(`Can not start player for path: ${path}`) + new Error(`Can not start player for path: ${audioPath ?? path}`) ); } }; @@ -146,7 +246,7 @@ export const Waveform = forwardRef((props, ref) => { const getAudioDuration = async () => { try { const duration = await getDuration({ - playerKey: `PlayerFor${path}`, + playerKey: `PlayerFor${audioPath}`, durationType: DurationType.max, }); if (!isNil(duration)) { @@ -155,7 +255,7 @@ export const Waveform = forwardRef((props, ref) => { return Promise.resolve(audioDuration); } else { return Promise.reject( - new Error(`Could not get duration for path: ${path}`) + new Error(`Could not get duration for path: ${audioPath}`) ); } } catch (err) { @@ -179,13 +279,13 @@ export const Waveform = forwardRef((props, ref) => { }; const getAudioWaveFormForPath = async (noOfSample: number) => { - if (!isNil(path) && !isEmpty(path)) { + if (!isNil(audioPath) && !isEmpty(audioPath)) { try { (onChangeWaveformLoadState as Function)?.(true); const result = await extractWaveformData({ - path: path, - playerKey: `PlayerFor${path}`, - noOfSamples: Math.max(noOfSample, 1), + path: audioPath, + playerKey: `PlayerFor${audioPath}`, + noOfSamples: noOfSample, }); (onChangeWaveformLoadState as Function)?.(false); @@ -203,9 +303,11 @@ export const Waveform = forwardRef((props, ref) => { } } else { (onError as Function)( - `Can not find waveform for mode ${mode} path: ${path}` + `Can not find waveform for mode ${mode} path: ${audioPath}` + ); + console.error( + `Can not find waveform for mode ${mode} path: ${audioPath}` ); - console.error(`Can not find waveform for mode ${mode} path: ${path}`); } }; @@ -213,7 +315,7 @@ export const Waveform = forwardRef((props, ref) => { if (mode === 'static') { try { const result = await stopPlayer({ - playerKey: `PlayerFor${path}`, + playerKey: `PlayerFor${audioPath}`, }); if (!isNil(result) && result) { if (resetProgress) { @@ -224,7 +326,7 @@ export const Waveform = forwardRef((props, ref) => { return Promise.resolve(result); } else { return Promise.reject( - new Error(`error in stopping player for path: ${path}`) + new Error(`error in stopping player for path: ${audioPath}`) ); } } catch (err) { @@ -246,8 +348,8 @@ export const Waveform = forwardRef((props, ref) => { const play = await playPlayer({ finishMode: FinishMode.stop, - playerKey: `PlayerFor${path}`, - path: path, + playerKey: `PlayerFor${audioPath}`, + path: audioPath, speed: audioSpeed, ...args, }); @@ -257,7 +359,7 @@ export const Waveform = forwardRef((props, ref) => { return Promise.resolve(true); } else { return Promise.reject( - new Error(`error in starting player for path: ${path}`) + new Error(`error in starting player for path: ${audioPath}`) ); } } catch (error) { @@ -279,7 +381,7 @@ export const Waveform = forwardRef((props, ref) => { if (mode === 'static') { try { const pause = await pausePlayer({ - playerKey: `PlayerFor${path}`, + playerKey: `PlayerFor${audioPath}`, }); if (pause) { if (changePlayerState) { @@ -289,7 +391,7 @@ export const Waveform = forwardRef((props, ref) => { return Promise.resolve(true); } else { return Promise.reject( - new Error(`error in pause player for path: ${path}`) + new Error(`error in pause player for path: ${audioPath}`) ); } } catch (error) { @@ -416,7 +518,7 @@ export const Waveform = forwardRef((props, ref) => { }; useEffect(() => { - if (!isNil(viewLayout?.width)) { + if (!isNil(viewLayout?.width) && audioPath !== undefined) { const getNumberOfSamples = floor( (viewLayout?.width ?? 0) / (candleWidth + candleSpace) ); @@ -432,10 +534,10 @@ export const Waveform = forwardRef((props, ref) => { } } // eslint-disable-next-line react-hooks/exhaustive-deps - }, [viewLayout?.width, mode, candleWidth, candleSpace]); + }, [viewLayout?.width, mode, candleWidth, candleSpace, audioPath]); const seekToPlayerAction = async () => { - if (!isNil(seekPosition)) { + if (!isNil(seekPosition) && audioPath !== undefined) { if (mode === 'static') { const seekAmount = (seekPosition?.pageX - (viewLayout?.x ?? 0)) / @@ -445,7 +547,7 @@ export const Waveform = forwardRef((props, ref) => { if (!panMoving) { try { await seekToPlayer({ - playerKey: `PlayerFor${path}`, + playerKey: `PlayerFor${audioPath}`, progress: clampedSeekAmount * songDuration, }); } catch (e) { @@ -468,30 +570,40 @@ export const Waveform = forwardRef((props, ref) => { useEffect(() => { seekToPlayerAction(); // eslint-disable-next-line react-hooks/exhaustive-deps - }, [seekPosition, panMoving, mode, songDuration]); + }, [seekPosition, panMoving, mode, songDuration, audioPath]); useEffect(() => { - const tracePlayerState = onDidFinishPlayingAudio(async data => { - if (data.playerKey === `PlayerFor${path}`) { - if (data.finishType === FinishMode.stop) { - setPlayerState(PlayerState.stopped); - setCurrentProgress(0); + if (audioPath !== undefined) { + const tracePlayerState = onDidFinishPlayingAudio(async data => { + if (data.playerKey === `PlayerFor${audioPath}`) { + if (data.finishType === FinishMode.stop) { + setPlayerState(PlayerState.stopped); + setCurrentProgress(0); + } } - } - }); + }); - const tracePlaybackValue = onCurrentDuration(data => { - if (data.playerKey === `PlayerFor${path}`) { - const currentAudioDuration = Number(data.currentDuration); + const tracePlaybackValue = onCurrentDuration(data => { + if (data.playerKey === `PlayerFor${audioPath}`) { + const currentAudioDuration = Number(data.currentDuration); - if (!isNaN(currentAudioDuration)) { - setCurrentProgress(currentAudioDuration); - } else { - setCurrentProgress(0); + if (!isNaN(currentAudioDuration)) { + setCurrentProgress(currentAudioDuration); + } else { + setCurrentProgress(0); + } } - } - }); + }); + return () => { + tracePlayerState.remove(); + tracePlaybackValue.remove(); + }; + } + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [audioPath]); + + useEffect(() => { const traceRecorderWaveformValue = onCurrentRecordingWaveformData( result => { if (mode === 'live') { @@ -515,9 +627,8 @@ export const Waveform = forwardRef((props, ref) => { } } ); + return () => { - tracePlayerState.remove(); - tracePlaybackValue.remove(); traceRecorderWaveformValue.remove(); markPlayerAsUnmounted(); }; @@ -539,20 +650,22 @@ export const Waveform = forwardRef((props, ref) => { }, [recorderState]); useEffect(() => { - if (panMoving) { - if (playerState === PlayerState.playing) { - pausePlayerAction(false); - isAutoPaused.current = true; - } - } else { - if (playerState === PlayerState.paused && isAutoPaused.current) { - startPlayerAction(); - } + if (audioPath !== undefined) { + if (panMoving) { + if (playerState === PlayerState.playing) { + pausePlayerAction(false); + isAutoPaused.current = true; + } + } else { + if (playerState === PlayerState.paused && isAutoPaused.current) { + startPlayerAction(); + } - isAutoPaused.current = false; + isAutoPaused.current = false; + } } // eslint-disable-next-line react-hooks/exhaustive-deps - }, [panMoving]); + }, [panMoving, audioPath]); const calculateLayout = (): void => { viewRef.current?.measureInWindow((x, y, width, height) => { @@ -580,14 +693,19 @@ export const Waveform = forwardRef((props, ref) => { }, onPanResponderStart: () => {}, onPanResponderMove: event => { - setSeekPosition(event.nativeEvent); + if (audioPathRef.current || !isExternalUrl) { + setSeekPosition(event.nativeEvent); + } }, onPanResponderEnd: () => { (onPanStateChange as Function)(false); setPanMoving(false); }, onPanResponderRelease: e => { - setSeekPosition(e.nativeEvent); + if (audioPathRef.current || !isExternalUrl) { + setSeekPosition(e.nativeEvent); + } + (onPanStateChange as Function)(false); setPanMoving(false); }, @@ -611,6 +729,7 @@ export const Waveform = forwardRef((props, ref) => { resumeRecord: resumeRecordingAction, currentState: mode === 'static' ? playerState : recorderState, playerKey: path, + downloadExternalAudio: downloadAndCacheFile, })); return ( diff --git a/src/components/Waveform/WaveformTypes.ts b/src/components/Waveform/WaveformTypes.ts index 3d73c88..b447dda 100644 --- a/src/components/Waveform/WaveformTypes.ts +++ b/src/components/Waveform/WaveformTypes.ts @@ -20,6 +20,8 @@ export interface StaticWaveform extends BaseWaveform { path: string; volume?: number; scrubColor?: string; + isExternalUrl?: boolean; + autoDownloadExternalAudio?: boolean; onPlayerStateChange?: (playerState: PlayerState) => void; onPanStateChange?: (panMoving: boolean) => void; onError?: (error: string) => void; @@ -29,6 +31,8 @@ export interface StaticWaveform extends BaseWaveform { ) => void; onChangeWaveformLoadState?: (state: boolean) => void; playbackSpeed?: PlaybackSpeedType; + onDownloadingStateChange?: (state: boolean) => void; + onDownloadProgressChange?: (currentProgress: number) => void; } export interface LiveWaveform extends BaseWaveform { @@ -53,4 +57,5 @@ export interface IWaveformRef { resumeRecord: () => Promise; currentState: PlayerState | RecorderState; playerKey: string; + downloadExternalAudio: () => Promise; }