AudioPlayer.tsx 9.7 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320
  1. import React, { useCallback, useEffect, useRef, useState } from 'react'
  2. import { t } from 'i18next'
  3. import styles from './AudioPlayer.module.css'
  4. import Toast from '@/app/components/base/toast'
  5. type AudioPlayerProps = {
  6. src: string
  7. }
  8. const AudioPlayer: React.FC<AudioPlayerProps> = ({ src }) => {
  9. const [isPlaying, setIsPlaying] = useState(false)
  10. const [currentTime, setCurrentTime] = useState(0)
  11. const [duration, setDuration] = useState(0)
  12. const [waveformData, setWaveformData] = useState<number[]>([])
  13. const [bufferedTime, setBufferedTime] = useState(0)
  14. const audioRef = useRef<HTMLAudioElement>(null)
  15. const canvasRef = useRef<HTMLCanvasElement>(null)
  16. const [hasStartedPlaying, setHasStartedPlaying] = useState(false)
  17. const [hoverTime, setHoverTime] = useState(0)
  18. const [isAudioAvailable, setIsAudioAvailable] = useState(true)
  19. useEffect(() => {
  20. const audio = audioRef.current
  21. if (!audio)
  22. return
  23. const handleError = () => {
  24. setIsAudioAvailable(false)
  25. }
  26. const setAudioData = () => {
  27. setDuration(audio.duration)
  28. }
  29. const setAudioTime = () => {
  30. setCurrentTime(audio.currentTime)
  31. }
  32. const handleProgress = () => {
  33. if (audio.buffered.length > 0)
  34. setBufferedTime(audio.buffered.end(audio.buffered.length - 1))
  35. }
  36. const handleEnded = () => {
  37. setIsPlaying(false)
  38. }
  39. audio.addEventListener('loadedmetadata', setAudioData)
  40. audio.addEventListener('timeupdate', setAudioTime)
  41. audio.addEventListener('progress', handleProgress)
  42. audio.addEventListener('ended', handleEnded)
  43. audio.addEventListener('error', handleError)
  44. // Preload audio metadata
  45. audio.load()
  46. // Delayed generation of waveform data
  47. // eslint-disable-next-line @typescript-eslint/no-use-before-define
  48. const timer = setTimeout(() => generateWaveformData(src), 1000)
  49. return () => {
  50. audio.removeEventListener('loadedmetadata', setAudioData)
  51. audio.removeEventListener('timeupdate', setAudioTime)
  52. audio.removeEventListener('progress', handleProgress)
  53. audio.removeEventListener('ended', handleEnded)
  54. audio.removeEventListener('error', handleError)
  55. clearTimeout(timer)
  56. }
  57. }, [src])
  58. const generateWaveformData = async (audioSrc: string) => {
  59. if (!window.AudioContext && !(window as any).webkitAudioContext) {
  60. setIsAudioAvailable(false)
  61. Toast.notify({
  62. type: 'error',
  63. message: 'Web Audio API is not supported in this browser',
  64. })
  65. return null
  66. }
  67. const url = new URL(src)
  68. const isHttp = url.protocol === 'http:' || url.protocol === 'https:'
  69. if (!isHttp) {
  70. setIsAudioAvailable(false)
  71. return null
  72. }
  73. const audioContext = new (window.AudioContext || (window as any).webkitAudioContext)()
  74. const samples = 70
  75. try {
  76. const response = await fetch(audioSrc, { mode: 'cors' })
  77. if (!response || !response.ok) {
  78. setIsAudioAvailable(false)
  79. return null
  80. }
  81. const arrayBuffer = await response.arrayBuffer()
  82. const audioBuffer = await audioContext.decodeAudioData(arrayBuffer)
  83. const channelData = audioBuffer.getChannelData(0)
  84. const blockSize = Math.floor(channelData.length / samples)
  85. const waveformData: number[] = []
  86. for (let i = 0; i < samples; i++) {
  87. let sum = 0
  88. for (let j = 0; j < blockSize; j++)
  89. sum += Math.abs(channelData[i * blockSize + j])
  90. // Apply nonlinear scaling to enhance small amplitudes
  91. waveformData.push((sum / blockSize) * 5)
  92. }
  93. // Normalized waveform data
  94. const maxAmplitude = Math.max(...waveformData)
  95. const normalizedWaveform = waveformData.map(amp => amp / maxAmplitude)
  96. setWaveformData(normalizedWaveform)
  97. setIsAudioAvailable(true)
  98. }
  99. catch (error) {
  100. const waveform: number[] = []
  101. let prevValue = Math.random()
  102. for (let i = 0; i < samples; i++) {
  103. const targetValue = Math.random()
  104. const interpolatedValue = prevValue + (targetValue - prevValue) * 0.3
  105. waveform.push(interpolatedValue)
  106. prevValue = interpolatedValue
  107. }
  108. const maxAmplitude = Math.max(...waveform)
  109. const randomWaveform = waveform.map(amp => amp / maxAmplitude)
  110. setWaveformData(randomWaveform)
  111. setIsAudioAvailable(true)
  112. }
  113. finally {
  114. await audioContext.close()
  115. }
  116. }
  117. const togglePlay = useCallback(() => {
  118. const audio = audioRef.current
  119. if (audio && isAudioAvailable) {
  120. if (isPlaying) {
  121. setHasStartedPlaying(false)
  122. audio.pause()
  123. }
  124. else {
  125. setHasStartedPlaying(true)
  126. audio.play().catch(error => console.error('Error playing audio:', error))
  127. }
  128. setIsPlaying(!isPlaying)
  129. }
  130. else {
  131. Toast.notify({
  132. type: 'error',
  133. message: 'Audio element not found',
  134. })
  135. setIsAudioAvailable(false)
  136. }
  137. }, [isAudioAvailable, isPlaying])
  138. const handleCanvasInteraction = useCallback((e: React.MouseEvent | React.TouchEvent) => {
  139. e.preventDefault()
  140. const getClientX = (event: React.MouseEvent | React.TouchEvent): number => {
  141. if ('touches' in event)
  142. return event.touches[0].clientX
  143. return event.clientX
  144. }
  145. const updateProgress = (clientX: number) => {
  146. const canvas = canvasRef.current
  147. const audio = audioRef.current
  148. if (!canvas || !audio)
  149. return
  150. const rect = canvas.getBoundingClientRect()
  151. const percent = Math.min(Math.max(0, clientX - rect.left), rect.width) / rect.width
  152. const newTime = percent * duration
  153. // Removes the buffer check, allowing drag to any location
  154. audio.currentTime = newTime
  155. setCurrentTime(newTime)
  156. if (!isPlaying) {
  157. setIsPlaying(true)
  158. audio.play().catch((error) => {
  159. Toast.notify({
  160. type: 'error',
  161. message: `Error playing audio: ${error}`,
  162. })
  163. setIsPlaying(false)
  164. })
  165. }
  166. }
  167. updateProgress(getClientX(e))
  168. }, [duration, isPlaying])
  169. const formatTime = (time: number) => {
  170. const minutes = Math.floor(time / 60)
  171. const seconds = Math.floor(time % 60)
  172. return `${minutes}:${seconds.toString().padStart(2, '0')}`
  173. }
  174. const drawWaveform = useCallback(() => {
  175. const canvas = canvasRef.current
  176. if (!canvas)
  177. return
  178. const ctx = canvas.getContext('2d')
  179. if (!ctx)
  180. return
  181. const width = canvas.width
  182. const height = canvas.height
  183. const data = waveformData
  184. ctx.clearRect(0, 0, width, height)
  185. const barWidth = width / data.length
  186. const playedWidth = (currentTime / duration) * width
  187. const cornerRadius = 2
  188. // Draw waveform bars
  189. data.forEach((value, index) => {
  190. let color
  191. if (index * barWidth <= playedWidth)
  192. color = '#296DFF'
  193. else if ((index * barWidth / width) * duration <= hoverTime)
  194. color = 'rgba(21,90,239,.40)'
  195. else
  196. color = 'rgba(21,90,239,.20)'
  197. const barHeight = value * height
  198. const rectX = index * barWidth
  199. const rectY = (height - barHeight) / 2
  200. const rectWidth = barWidth * 0.5
  201. const rectHeight = barHeight
  202. ctx.lineWidth = 1
  203. ctx.fillStyle = color
  204. if (ctx.roundRect) {
  205. ctx.beginPath()
  206. ctx.roundRect(rectX, rectY, rectWidth, rectHeight, cornerRadius)
  207. ctx.fill()
  208. }
  209. else {
  210. ctx.fillRect(rectX, rectY, rectWidth, rectHeight)
  211. }
  212. })
  213. }, [currentTime, duration, hoverTime, waveformData])
  214. useEffect(() => {
  215. drawWaveform()
  216. }, [drawWaveform, bufferedTime, hasStartedPlaying])
  217. const handleMouseMove = useCallback((e: React.MouseEvent) => {
  218. const canvas = canvasRef.current
  219. const audio = audioRef.current
  220. if (!canvas || !audio)
  221. return
  222. const rect = canvas.getBoundingClientRect()
  223. const percent = Math.min(Math.max(0, e.clientX - rect.left), rect.width) / rect.width
  224. const time = percent * duration
  225. // Check if the hovered position is within a buffered range before updating hoverTime
  226. for (let i = 0; i < audio.buffered.length; i++) {
  227. if (time >= audio.buffered.start(i) && time <= audio.buffered.end(i)) {
  228. setHoverTime(time)
  229. break
  230. }
  231. }
  232. }, [duration])
  233. return (
  234. <div className={styles.audioPlayer}>
  235. <audio ref={audioRef} src={src} preload="auto"/>
  236. <button className={styles.playButton} onClick={togglePlay} disabled={!isAudioAvailable}>
  237. {isPlaying
  238. ? (
  239. <svg viewBox="0 0 24 24" width="16" height="16">
  240. <rect x="7" y="6" width="3" height="12" rx="1.5" ry="1.5"/>
  241. <rect x="15" y="6" width="3" height="12" rx="1.5" ry="1.5"/>
  242. </svg>
  243. )
  244. : (
  245. <svg viewBox="0 0 24 24" width="16" height="16">
  246. <path d="M8 5v14l11-7z" fill="currentColor"/>
  247. </svg>
  248. )}
  249. </button>
  250. <div className={isAudioAvailable ? styles.audioControls : styles.audioControls_disabled} hidden={!isAudioAvailable}>
  251. <div className={styles.progressBarContainer}>
  252. <canvas
  253. ref={canvasRef}
  254. className={styles.waveform}
  255. onClick={handleCanvasInteraction}
  256. onMouseMove={handleMouseMove}
  257. onMouseDown={handleCanvasInteraction}
  258. />
  259. {/* <div className={styles.currentTime} style={{ left: `${(currentTime / duration) * 81}%`, bottom: '29px' }}>
  260. {formatTime(currentTime)}
  261. </div> */}
  262. <div className={styles.timeDisplay}>
  263. <span className={styles.duration}>{formatTime(duration)}</span>
  264. </div>
  265. </div>
  266. </div>
  267. <div className={styles.source_unavailable} hidden={isAudioAvailable}>{t('common.operation.audioSourceUnavailable')}</div>
  268. </div>
  269. )
  270. }
  271. export default AudioPlayer