diff --git a/README.md b/README.md index bf98167..bd08976 100644 --- a/README.md +++ b/README.md @@ -33,13 +33,13 @@ Here's how to get started with react-native-audio-waveform in your React Native ##### 1. Install the package ```sh -npm install @simform_solutions/react-native-audio-waveform react-native-gesture-handler +npm install @simform_solutions/react-native-audio-waveform rn-fetch-blob react-native-gesture-handler ``` ###### --- or --- ```sh -yarn add @simform_solutions/react-native-audio-waveform react-native-gesture-handler +yarn add @simform_solutions/react-native-audio-waveform rn-fetch-blob react-native-gesture-handler ``` ##### 2. Install CocoaPods in the iOS project @@ -48,7 +48,7 @@ yarn add @simform_solutions/react-native-audio-waveform react-native-gesture-han npx pod-install ``` -##### Know more about [react-native-gesture-handler](https://www.npmjs.com/package/react-native-gesture-handler) +##### Know more about [rn-fetch-blob](https://www.npmjs.com/package/rn-fetch-blob) and [react-native-gesture-handler](https://www.npmjs.com/package/react-native-gesture-handler) ##### 3. Add audio recording permissions @@ -90,7 +90,34 @@ const ref = useRef(null); console.log(playerState)} + onPanStateChange={isMoving => console.log(isMoving)} +/>; +``` + +When you want to show a waveform for a external audio URL, you need to use `static` mode for the waveform and set isExternalUrl to true. + +Check the example below for more information. + +```tsx +import { + Waveform, + type IWaveformRef, +} from '@simform_solutions/react-native-audio-waveform'; + +const url = 'https://www2.cs.uic.edu/~i101/SoundFiles/taunt.wav'; // URL to the audio file for which you want to show waveform +const ref = useRef(null); + console.log(state)} + onDownloadProgressChange={progress => console.log(progress)} candleSpace={2} candleWidth={4} scrubColor="white" @@ -133,6 +160,9 @@ You can check out the full example at [Example](./example/src/App.tsx). | ref\* | - | ✅ | ✅ | IWaveformRef | Type of ref provided to waveform component. If waveform mode is `static`, some methods from ref will throw error and same for `live`.
Check [IWaveformRef](#iwaveformref-methods) for more details about which methods these refs provides. | | path\* | - | ✅ | ❌ | string | Used for `static` type. It is the resource path of an audio source file. | | playbackSpeed | 1.0 | ✅ | ❌ | 1.0 / 1.5 / 2.0 | The playback speed of the audio player. Note: Currently playback speed only supports, Normal (1x) Faster(1.5x) and Fastest(2.0x), any value passed to playback speed greater than 2.0 will be automatically adjusted to normal playback speed | +| volume | 3 | ✅ | ❌ | number | Used for `static` type. It is a volume level for the media player, ranging from 1 to 10. | +| isExternalUrl | false | ✅ | ❌ | boolean | Used for `static` type. If the resource path of an audio file is a URL, then pass true; otherwise, pass false. | +| downloadExternalAudio | true | ✅ | ❌ | boolean | Used for `static` type. Indicates whether the external media should be downloaded. | | candleSpace | 2 | ✅ | ✅ | number | Space between two candlesticks of waveform | | candleWidth | 5 | ✅ | ✅ | number | Width of single candlestick of waveform | | candleHeightScale | 3 | ✅ | ✅ | number | Scaling height of candlestick of waveform | @@ -145,6 +175,8 @@ You can check out the full example at [Example](./example/src/App.tsx). | onRecorderStateChange | - | ❌ | ✅ | ( recorderState : RecorderState ) => void | callback function which returns the recorder state whenever the recorder state changes. Check RecorderState for more details | | onCurrentProgressChange | - | ✅ | ❌ | ( currentProgress : number, songDuration: number ) => void | callback function, which returns current progress of audio and total song duration. | | onChangeWaveformLoadState | - | ✅ | ❌ | ( state : boolean ) => void | callback function which returns the loading state of waveform candlestick. | +| onDownloadStateChange | - | ✅ | ❌ | ( state : boolean ) => void | A callback function that returns the loading state of a file download from an external URL. | +| onDownloadProgressChange | - | ✅ | ❌ | ( currentProgress : number ) => void | Used when isExternalUrl is true; a callback function that returns the current progress of a file download from an external URL | | onError | - | ✅ | ❌ | ( error : Error ) => void | callback function which returns the error for static audio waveform | ##### Know more about [ViewStyle](https://reactnative.dev/docs/view-style-props), [PlayerState](#playerstate), and [RecorderState](#recorderstate) diff --git a/example/src/App.tsx b/example/src/App.tsx index fb85122..bbbb74b 100644 --- a/example/src/App.tsx +++ b/example/src/App.tsx @@ -25,6 +25,7 @@ import { ScrollView, StatusBar, Text, + TouchableOpacity, View, } from 'react-native'; import { GestureHandlerRootView } from 'react-native-gesture-handler'; @@ -52,6 +53,7 @@ const RenderListItem = React.memo( onPanStateChange, currentPlaybackSpeed, changeSpeed, + isExternalUrl = false, }: { item: ListItem; currentPlaying: string; @@ -59,13 +61,16 @@ const RenderListItem = React.memo( onPanStateChange: (value: boolean) => void; currentPlaybackSpeed: PlaybackSpeedType; changeSpeed: () => void; + isExternalUrl?: boolean; }) => { const ref = useRef(null); const [playerState, setPlayerState] = useState(PlayerState.stopped); const styles = stylesheet({ currentUser: item.fromCurrentUser }); - const [isLoading, setIsLoading] = useState(true); + const [isLoading, setIsLoading] = useState(isExternalUrl ? false : true); + const [downloadExternalAudio, setDownloadExternalAudio] = useState(false); + const [isAudioDownloaded, setIsAudioDownloaded] = useState(false); - const handleButtonAction = () => { + const handleButtonAction = (): void => { if (playerState === PlayerState.stopped) { setCurrentPlaying(item.path); } else { @@ -73,6 +78,15 @@ const RenderListItem = React.memo( } }; + const handleDownloadPress = (): void => { + setDownloadExternalAudio(true); + if (currentPlaying === item.path) { + setCurrentPlaying(''); + } + + setIsLoading(true); + }; + useEffect(() => { if (currentPlaying !== item.path) { ref.current?.stopPlayer(); @@ -82,7 +96,15 @@ const RenderListItem = React.memo( }, [currentPlaying]); return ( - + {isLoading ? ( - + ) : ( { setPlayerState(state); if ( @@ -124,10 +147,20 @@ const RenderListItem = React.memo( setCurrentPlaying(''); } }} + isExternalUrl={isExternalUrl} onPanStateChange={onPanStateChange} onError={error => { console.log(error, 'we are in example'); }} + onDownloadStateChange={state => { + console.log('Download State', state); + }} + onDownloadProgressChange={progress => { + console.log('Download Progress', `${progress}%`); + if (progress === 100) { + setIsAudioDownloaded(true); + } + }} onCurrentProgressChange={(currentProgress, songDuration) => { console.log( 'currentProgress ', @@ -151,6 +184,15 @@ const RenderListItem = React.memo( )} + {isExternalUrl && !downloadExternalAudio && !isAudioDownloaded ? ( + + + + ) : null} ); } @@ -328,6 +370,7 @@ const AppContainer = () => { currentPlaying={currentPlaying} setCurrentPlaying={setCurrentPlaying} item={item} + isExternalUrl={item.isExternalUrl} onPanStateChange={value => setShouldScroll(!value)} {...{ currentPlaybackSpeed, changeSpeed }} /> diff --git a/example/src/assets/icons/download.png b/example/src/assets/icons/download.png new file mode 100644 index 0000000..6948d47 Binary files /dev/null and b/example/src/assets/icons/download.png differ diff --git a/example/src/assets/icons/index.ts b/example/src/assets/icons/index.ts index 996a449..f111abb 100644 --- a/example/src/assets/icons/index.ts +++ b/example/src/assets/icons/index.ts @@ -5,4 +5,5 @@ export const Icons = { mic: require('./mic.png'), logo: require('./logo.png'), delete: require('./delete.png'), + download: require('./download.png'), }; diff --git a/example/src/constants/Audios.ts b/example/src/constants/Audios.ts index daee5be..9a6bcc5 100644 --- a/example/src/constants/Audios.ts +++ b/example/src/constants/Audios.ts @@ -6,6 +6,7 @@ import { Platform } from 'react-native'; export interface ListItem { fromCurrentUser: boolean; path: string; + isExternalUrl?: boolean; } /** @@ -70,16 +71,26 @@ const audioAssetArray = [ 'file_example_mp3_15s.mp3', ]; +const externalAudioAssetArray = [ + 'https://codeskulptor-demos.commondatastorage.googleapis.com/GalaxyInvaders/theme_01.mp3', + 'https://codeskulptor-demos.commondatastorage.googleapis.com/pang/paza-moduless.mp3', +]; + /** * Retrieve previously recorded audio files from the cache/document directory. - * @returns + * @returns */ export const getRecordedAudios = async (): Promise => { - const recordingSavingPath = Platform.select({ ios: fs.DocumentDirectoryPath, default: fs.CachesDirectoryPath }) + const recordingSavingPath = Platform.select({ + ios: fs.DocumentDirectoryPath, + default: fs.CachesDirectoryPath, + }); - const items = await fs.readDir(recordingSavingPath) - return items.filter(item => item.path.endsWith('.m4a')).map(item => item.path) -} + const items = await fs.readDir(recordingSavingPath); + return items + .filter(item => item.path.endsWith('.m4a')) + .map(item => item.path); +}; /** * Generate a list of file objects with information about successfully copied files (Android) @@ -87,13 +98,29 @@ export const getRecordedAudios = async (): Promise => { * @returns {Promise} A Promise that resolves to the list of file objects. */ export const generateAudioList = async (): Promise => { - const audioAssetPaths = (await copyFilesToNativeResources()).map(value => `${filePath}/${value}`); - const recordedAudios = await getRecordedAudios() + const audioAssetPaths = (await copyFilesToNativeResources()).map( + value => `${filePath}/${value}` + ); + const recordedAudios = await getRecordedAudios(); // Generate the final list based on the copied or available files - return [...audioAssetPaths, ...recordedAudios].map?.((value, index) => ({ - fromCurrentUser: index % 2 !== 0, + const localAssetList = [...audioAssetPaths, ...recordedAudios].map?.( + value => ({ + path: value, + }) + ); + + const externalAudioList = externalAudioAssetArray.map(value => ({ path: value, + isExternalUrl: true, })); + const finalAudios = [...localAssetList, ...externalAudioList].map( + (value, index) => ({ + ...value, + fromCurrentUser: index % 2 !== 0, + }) + ); + + return finalAudios; }; diff --git a/example/src/styles.ts b/example/src/styles.ts index 8cb9eb3..9bae9c6 100644 --- a/example/src/styles.ts +++ b/example/src/styles.ts @@ -43,10 +43,16 @@ const styles = (params: StyleSheetParams = {}) => }, listItemContainer: { marginTop: scale(16), - alignItems: params.currentUser ? 'flex-end' : 'flex-start', + flexDirection: 'row', + justifyContent: params.currentUser ? 'flex-end' : 'flex-start', + alignItems: 'center', + }, + listItemReverseContainer: { + flexDirection: 'row-reverse', + alignSelf: 'flex-end', }, listItemWidth: { - width: '90%', + width: '88%', }, buttonImage: { height: scale(22), @@ -129,6 +135,13 @@ const styles = (params: StyleSheetParams = {}) => textAlign: 'center', fontWeight: '600', }, + downloadIcon: { + width: 20, + height: 20, + tintColor: Colors.pink, + marginLeft: 10, + marginRight: 10, + }, }); export default styles; diff --git a/package.json b/package.json index 5d9eaee..3aed694 100644 --- a/package.json +++ b/package.json @@ -110,6 +110,7 @@ ] }, "dependencies": { - "lodash": "^4.17.21" + "lodash": "^4.17.21", + "rn-fetch-blob": "^0.12.0" } -} \ No newline at end of file +} diff --git a/src/components/Waveform/Waveform.tsx b/src/components/Waveform/Waveform.tsx index 7934eea..a076942 100644 --- a/src/components/Waveform/Waveform.tsx +++ b/src/components/Waveform/Waveform.tsx @@ -12,11 +12,13 @@ import React, { } from 'react'; import { PanResponder, + Platform, ScrollView, View, type LayoutRectangle, type NativeTouchEvent, } from 'react-native'; +import RNFetchBlob, { type FetchBlobResponse } from 'rn-fetch-blob'; import { DurationType, FinishMode, @@ -42,6 +44,12 @@ import { type StaticWaveform, } from './WaveformTypes'; +// Cache directory based on the platform +const cacheDir: string = + Platform.OS === 'ios' + ? RNFetchBlob.fs.dirs.DocumentDir + : RNFetchBlob.fs.dirs.CacheDir; + export const Waveform = forwardRef((props, ref) => { const { // The maximum number of candles set in the waveform. Once this limit is reached, the oldest candle will be removed as a new one is added to the waveform. @@ -51,6 +59,8 @@ export const Waveform = forwardRef((props, ref) => { volume = 3, // The playback speed of the audio player. A value of 1.0 represents normal playback speed. playbackSpeed = 1.0, + isExternalUrl = false, + downloadExternalAudio = true, candleSpace = 2, candleWidth = 5, containerStyle = {}, @@ -63,10 +73,16 @@ export const Waveform = forwardRef((props, ref) => { onCurrentProgressChange = () => {}, candleHeightScale = 3, onChangeWaveformLoadState, + onDownloadStateChange, + onDownloadProgressChange, } = props as StaticWaveform & LiveWaveform; const viewRef = useRef(null); + const [audioPath, setAudioPath] = useState( + !isExternalUrl ? path : undefined + ); const scrollRef = useRef(null); const isLayoutCalculated = useRef(false); + const audioPathRef = useRef(undefined); const [waveform, setWaveform] = useState([]); const [viewLayout, setViewLayout] = useState(null); const [seekPosition, setSeekPosition] = useState( @@ -110,23 +126,107 @@ export const Waveform = forwardRef((props, ref) => { */ const updatePlaybackSpeed = async (speed: number) => { try { - await setPlaybackSpeed({ speed, playerKey: `PlayerFor${path}` }); + await setPlaybackSpeed({ speed, playerKey: `PlayerFor${audioPath}` }); } catch (error) { console.error('Error updating playback speed', error); } }; useEffect(() => { - updatePlaybackSpeed(audioSpeed); + if (audioPath !== undefined) { + updatePlaybackSpeed(audioSpeed); + } // eslint-disable-next-line react-hooks/exhaustive-deps - }, [audioSpeed]); + }, [audioSpeed, audioPath]); + + const setExternalAudioPath = (filePath: string): void => { + setAudioPath(filePath); + audioPathRef.current = filePath; + (onDownloadStateChange as Function)?.(false); + (onDownloadProgressChange as Function)?.(100); + }; + + /** + * Downloads the audio file and caches it in the cache directory. + * @param fileUrl - The URL of the audio file to download. + * @param fileName - The name of the audio file to use in the cache directory. + * @returns A Promise that resolves when the audio file has been downloaded and cached. + */ + const downloadAndCacheFile = async ( + fileUrl: string, + fileName: string + ): Promise => { + const filePath: string = `${cacheDir}/${fileName}`; + + try { + const fileExists: boolean = await RNFetchBlob.fs.exists(filePath); + + if (fileExists) { + setExternalAudioPath(filePath); + return; + } + + // File doesn't exist, download it + (onDownloadStateChange as Function)?.(true); + await RNFetchBlob.config({ + path: filePath, + fileCache: true, + }) + .fetch('GET', fileUrl) + .progress((received: number, total: number) => { + let progressPercentage: number = Number( + ((received / total) * 100)?.toFixed?.(2) + ); + (onDownloadProgressChange as Function)?.(progressPercentage); + }) + .then((response: FetchBlobResponse) => { + const tempFilePath: string = response.path(); + setExternalAudioPath(tempFilePath); + }) + .catch(error => { + console.error(error); + (onDownloadStateChange as Function)?.(false); + }); + } catch (error) { + console.error(error); + } + }; + + const checkIsFileDownloaded = async (fileName: string): Promise => { + const filePath: string = `${cacheDir}/${fileName}`; + const fileExists: boolean = await RNFetchBlob.fs.exists(filePath); + if (fileExists) { + setExternalAudioPath(filePath); + } + }; + + // Replace special characters with _ and remove extension from the URL and make file name lowercase + const formatUrlToFileName = (url: string): string => { + return url + ?.replace?.(/[:\/\.\%20\-~\?=&@#\!\$\^\*\(\)\{\}\[\],\'"]/g, '_') + ?.replace?.(/\.[^/.]+$/, '') + ?.toLowerCase?.(); + }; + + useEffect(() => { + const fileName: string = formatUrlToFileName(path); + + if (isExternalUrl && path && downloadExternalAudio) { + downloadAndCacheFile(path, fileName); + } else if (isExternalUrl && path) { + checkIsFileDownloaded(fileName); + } else { + (onDownloadStateChange as Function)?.(false); + } + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [isExternalUrl, path, downloadExternalAudio]); const preparePlayerForPath = async (progress?: number) => { - if (!isNil(path) && !isEmpty(path)) { + if (!isNil(audioPath) && !isEmpty(audioPath)) { try { const prepare = await preparePlayer({ - path, - playerKey: `PlayerFor${path}`, + path: audioPath, + playerKey: `PlayerFor${audioPath}`, updateFrequency: UpdateFrequency.medium, volume: volume, progress, @@ -137,7 +237,7 @@ export const Waveform = forwardRef((props, ref) => { } } else { return Promise.reject( - new Error(`Can not start player for path: ${path}`) + new Error(`Can not start player for path: ${audioPath}`) ); } }; @@ -145,7 +245,7 @@ export const Waveform = forwardRef((props, ref) => { const getAudioDuration = async () => { try { const duration = await getDuration({ - playerKey: `PlayerFor${path}`, + playerKey: `PlayerFor${audioPath}`, durationType: DurationType.max, }); if (!isNil(duration)) { @@ -154,7 +254,7 @@ export const Waveform = forwardRef((props, ref) => { return Promise.resolve(audioDuration); } else { return Promise.reject( - new Error(`Could not get duration for path: ${path}`) + new Error(`Could not get duration for path: ${audioPath}`) ); } } catch (err) { @@ -178,13 +278,13 @@ export const Waveform = forwardRef((props, ref) => { }; const getAudioWaveFormForPath = async (noOfSample: number) => { - if (!isNil(path) && !isEmpty(path)) { + if (!isNil(audioPath) && !isEmpty(audioPath)) { try { (onChangeWaveformLoadState as Function)?.(true); const result = await extractWaveformData({ - path: path, - playerKey: `PlayerFor${path}`, - noOfSamples: Math.max(noOfSample, 1), + path: audioPath, + playerKey: `PlayerFor${audioPath}`, + noOfSamples: noOfSample, }); (onChangeWaveformLoadState as Function)?.(false); @@ -202,9 +302,11 @@ export const Waveform = forwardRef((props, ref) => { } } else { (onError as Function)( - `Can not find waveform for mode ${mode} path: ${path}` + `Can not find waveform for mode ${mode} path: ${audioPath}` + ); + console.error( + `Can not find waveform for mode ${mode} path: ${audioPath}` ); - console.error(`Can not find waveform for mode ${mode} path: ${path}`); } }; @@ -212,7 +314,7 @@ export const Waveform = forwardRef((props, ref) => { if (mode === 'static') { try { const result = await stopPlayer({ - playerKey: `PlayerFor${path}`, + playerKey: `PlayerFor${audioPath}`, }); if (!isNil(result) && result) { setCurrentProgress(0); @@ -220,7 +322,7 @@ export const Waveform = forwardRef((props, ref) => { return Promise.resolve(result); } else { return Promise.reject( - new Error(`error in stopping player for path: ${path}`) + new Error(`error in stopping player for path: ${audioPath}`) ); } } catch (err) { @@ -242,8 +344,8 @@ export const Waveform = forwardRef((props, ref) => { const play = await playPlayer({ finishMode: FinishMode.stop, - playerKey: `PlayerFor${path}`, - path: path, + playerKey: `PlayerFor${audioPath}`, + path: audioPath, speed: audioSpeed, ...args, }); @@ -253,7 +355,7 @@ export const Waveform = forwardRef((props, ref) => { return Promise.resolve(true); } else { return Promise.reject( - new Error(`error in starting player for path: ${path}`) + new Error(`error in starting player for path: ${audioPath}`) ); } } catch (error) { @@ -270,14 +372,14 @@ export const Waveform = forwardRef((props, ref) => { if (mode === 'static') { try { const pause = await pausePlayer({ - playerKey: `PlayerFor${path}`, + playerKey: `PlayerFor${audioPath}`, }); if (pause) { setPlayerState(PlayerState.paused); return Promise.resolve(true); } else { return Promise.reject( - new Error(`error in pause player for path: ${path}`) + new Error(`error in pause player for path: ${audioPath}`) ); } } catch (error) { @@ -404,7 +506,7 @@ export const Waveform = forwardRef((props, ref) => { }; useEffect(() => { - if (!isNil(viewLayout?.width)) { + if (!isNil(viewLayout?.width) && audioPath !== undefined) { const getNumberOfSamples = floor( (viewLayout?.width ?? 0) / (candleWidth + candleSpace) ); @@ -420,10 +522,10 @@ export const Waveform = forwardRef((props, ref) => { } } // eslint-disable-next-line react-hooks/exhaustive-deps - }, [viewLayout?.width, mode, candleWidth, candleSpace]); + }, [viewLayout?.width, mode, candleWidth, candleSpace, audioPath]); useEffect(() => { - if (!isNil(seekPosition)) { + if (!isNil(seekPosition) && audioPath !== undefined) { if (mode === 'static') { const seekAmount = (seekPosition?.pageX - (viewLayout?.x ?? 0)) / @@ -432,7 +534,7 @@ export const Waveform = forwardRef((props, ref) => { if (!panMoving) { seekToPlayer({ - playerKey: `PlayerFor${path}`, + playerKey: `PlayerFor${audioPath}`, progress: clampedSeekAmount * songDuration, }); if (playerState === PlayerState.playing) { @@ -444,30 +546,40 @@ export const Waveform = forwardRef((props, ref) => { } } // eslint-disable-next-line react-hooks/exhaustive-deps - }, [seekPosition, panMoving, mode, songDuration]); + }, [seekPosition, panMoving, mode, songDuration, audioPath]); useEffect(() => { - const tracePlayerState = onDidFinishPlayingAudio(async data => { - if (data.playerKey === `PlayerFor${path}`) { - if (data.finishType === FinishMode.stop) { - setPlayerState(PlayerState.stopped); - setCurrentProgress(0); + if (audioPath !== undefined) { + const tracePlayerState = onDidFinishPlayingAudio(async data => { + if (data.playerKey === `PlayerFor${audioPath}`) { + if (data.finishType === FinishMode.stop) { + setPlayerState(PlayerState.stopped); + setCurrentProgress(0); + } } - } - }); + }); - const tracePlaybackValue = onCurrentDuration(data => { - if (data.playerKey === `PlayerFor${path}`) { - const currentAudioDuration = Number(data.currentDuration); + const tracePlaybackValue = onCurrentDuration(data => { + if (data.playerKey === `PlayerFor${audioPath}`) { + const currentAudioDuration = Number(data.currentDuration); - if (!isNaN(currentAudioDuration)) { - setCurrentProgress(currentAudioDuration); - } else { - setCurrentProgress(0); + if (!isNaN(currentAudioDuration)) { + setCurrentProgress(currentAudioDuration); + } else { + setCurrentProgress(0); + } } - } - }); + }); + return () => { + tracePlayerState.remove(); + tracePlaybackValue.remove(); + }; + } + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [audioPath]); + + useEffect(() => { const traceRecorderWaveformValue = onCurrentRecordingWaveformData( result => { if (mode === 'live') { @@ -491,9 +603,8 @@ export const Waveform = forwardRef((props, ref) => { } } ); + return () => { - tracePlayerState.remove(); - tracePlaybackValue.remove(); traceRecorderWaveformValue.remove(); markPlayerAsUnmounted(); }; @@ -515,17 +626,19 @@ export const Waveform = forwardRef((props, ref) => { }, [recorderState]); useEffect(() => { - if (panMoving) { - if (playerState === PlayerState.playing) { - pausePlayerAction(); - } - } else { - if (playerState === PlayerState.paused) { - startPlayerAction(); + if (audioPath !== undefined) { + if (panMoving) { + if (playerState === PlayerState.playing) { + pausePlayerAction(); + } + } else { + if (playerState === PlayerState.paused) { + startPlayerAction(); + } } } // eslint-disable-next-line react-hooks/exhaustive-deps - }, [panMoving]); + }, [panMoving, audioPath]); const calculateLayout = (): void => { viewRef.current?.measureInWindow((x, y, width, height) => { diff --git a/src/components/Waveform/WaveformTypes.ts b/src/components/Waveform/WaveformTypes.ts index f3a5dd3..404ec61 100644 --- a/src/components/Waveform/WaveformTypes.ts +++ b/src/components/Waveform/WaveformTypes.ts @@ -20,6 +20,8 @@ export interface StaticWaveform extends BaseWaveform { path: string; volume?: number; scrubColor?: string; + isExternalUrl?: boolean; + downloadExternalAudio?: boolean; onPlayerStateChange?: (playerState: PlayerState) => void; onPanStateChange?: (panMoving: boolean) => void; onError?: (error: string) => void; @@ -29,6 +31,8 @@ export interface StaticWaveform extends BaseWaveform { ) => void; onChangeWaveformLoadState?: (state: boolean) => void; playbackSpeed?: PlaybackSpeedType; + onDownloadStateChange?: (state: boolean) => void; + onDownloadProgressChange?: (currentProgress: number) => void; } export interface LiveWaveform extends BaseWaveform {