AudioPlayer.tsx 10 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320
  1. import React, { useCallback, useEffect, useRef, useState } from 'react'
  2. import { t } from 'i18next'
  3. import {
  4. RiPauseCircleFill,
  5. RiPlayLargeFill,
  6. } from '@remixicon/react'
  7. import Toast from '@/app/components/base/toast'
  8. import useTheme from '@/hooks/use-theme'
  9. import { Theme } from '@/types/app'
  10. import cn from '@/utils/classnames'
  11. type AudioPlayerProps = {
  12. src: string
  13. }
  14. const AudioPlayer: React.FC<AudioPlayerProps> = ({ src }) => {
  15. const [isPlaying, setIsPlaying] = useState(false)
  16. const [currentTime, setCurrentTime] = useState(0)
  17. const [duration, setDuration] = useState(0)
  18. const [waveformData, setWaveformData] = useState<number[]>([])
  19. const [bufferedTime, setBufferedTime] = useState(0)
  20. const audioRef = useRef<HTMLAudioElement>(null)
  21. const canvasRef = useRef<HTMLCanvasElement>(null)
  22. const [hasStartedPlaying, setHasStartedPlaying] = useState(false)
  23. const [hoverTime, setHoverTime] = useState(0)
  24. const [isAudioAvailable, setIsAudioAvailable] = useState(true)
  25. const { theme } = useTheme()
  26. useEffect(() => {
  27. const audio = audioRef.current
  28. if (!audio)
  29. return
  30. const handleError = () => {
  31. setIsAudioAvailable(false)
  32. }
  33. const setAudioData = () => {
  34. setDuration(audio.duration)
  35. }
  36. const setAudioTime = () => {
  37. setCurrentTime(audio.currentTime)
  38. }
  39. const handleProgress = () => {
  40. if (audio.buffered.length > 0)
  41. setBufferedTime(audio.buffered.end(audio.buffered.length - 1))
  42. }
  43. const handleEnded = () => {
  44. setIsPlaying(false)
  45. }
  46. audio.addEventListener('loadedmetadata', setAudioData)
  47. audio.addEventListener('timeupdate', setAudioTime)
  48. audio.addEventListener('progress', handleProgress)
  49. audio.addEventListener('ended', handleEnded)
  50. audio.addEventListener('error', handleError)
  51. // Preload audio metadata
  52. audio.load()
  53. // Delayed generation of waveform data
  54. // eslint-disable-next-line ts/no-use-before-define
  55. const timer = setTimeout(() => generateWaveformData(src), 1000)
  56. return () => {
  57. audio.removeEventListener('loadedmetadata', setAudioData)
  58. audio.removeEventListener('timeupdate', setAudioTime)
  59. audio.removeEventListener('progress', handleProgress)
  60. audio.removeEventListener('ended', handleEnded)
  61. audio.removeEventListener('error', handleError)
  62. clearTimeout(timer)
  63. }
  64. }, [src])
  65. const generateWaveformData = async (audioSrc: string) => {
  66. if (!window.AudioContext && !(window as any).webkitAudioContext) {
  67. setIsAudioAvailable(false)
  68. Toast.notify({
  69. type: 'error',
  70. message: 'Web Audio API is not supported in this browser',
  71. })
  72. return null
  73. }
  74. const url = new URL(src)
  75. const isHttp = url.protocol === 'http:' || url.protocol === 'https:'
  76. if (!isHttp) {
  77. setIsAudioAvailable(false)
  78. return null
  79. }
  80. const audioContext = new (window.AudioContext || (window as any).webkitAudioContext)()
  81. const samples = 70
  82. try {
  83. const response = await fetch(audioSrc, { mode: 'cors' })
  84. if (!response || !response.ok) {
  85. setIsAudioAvailable(false)
  86. return null
  87. }
  88. const arrayBuffer = await response.arrayBuffer()
  89. const audioBuffer = await audioContext.decodeAudioData(arrayBuffer)
  90. const channelData = audioBuffer.getChannelData(0)
  91. const blockSize = Math.floor(channelData.length / samples)
  92. const waveformData: number[] = []
  93. for (let i = 0; i < samples; i++) {
  94. let sum = 0
  95. for (let j = 0; j < blockSize; j++)
  96. sum += Math.abs(channelData[i * blockSize + j])
  97. // Apply nonlinear scaling to enhance small amplitudes
  98. waveformData.push((sum / blockSize) * 5)
  99. }
  100. // Normalized waveform data
  101. const maxAmplitude = Math.max(...waveformData)
  102. const normalizedWaveform = waveformData.map(amp => amp / maxAmplitude)
  103. setWaveformData(normalizedWaveform)
  104. setIsAudioAvailable(true)
  105. }
  106. catch (error) {
  107. const waveform: number[] = []
  108. let prevValue = Math.random()
  109. for (let i = 0; i < samples; i++) {
  110. const targetValue = Math.random()
  111. const interpolatedValue = prevValue + (targetValue - prevValue) * 0.3
  112. waveform.push(interpolatedValue)
  113. prevValue = interpolatedValue
  114. }
  115. const maxAmplitude = Math.max(...waveform)
  116. const randomWaveform = waveform.map(amp => amp / maxAmplitude)
  117. setWaveformData(randomWaveform)
  118. setIsAudioAvailable(true)
  119. }
  120. finally {
  121. await audioContext.close()
  122. }
  123. }
  124. const togglePlay = useCallback(() => {
  125. const audio = audioRef.current
  126. if (audio && isAudioAvailable) {
  127. if (isPlaying) {
  128. setHasStartedPlaying(false)
  129. audio.pause()
  130. }
  131. else {
  132. setHasStartedPlaying(true)
  133. audio.play().catch(error => console.error('Error playing audio:', error))
  134. }
  135. setIsPlaying(!isPlaying)
  136. }
  137. else {
  138. Toast.notify({
  139. type: 'error',
  140. message: 'Audio element not found',
  141. })
  142. setIsAudioAvailable(false)
  143. }
  144. }, [isAudioAvailable, isPlaying])
  145. const handleCanvasInteraction = useCallback((e: React.MouseEvent | React.TouchEvent) => {
  146. e.preventDefault()
  147. const getClientX = (event: React.MouseEvent | React.TouchEvent): number => {
  148. if ('touches' in event)
  149. return event.touches[0].clientX
  150. return event.clientX
  151. }
  152. const updateProgress = (clientX: number) => {
  153. const canvas = canvasRef.current
  154. const audio = audioRef.current
  155. if (!canvas || !audio)
  156. return
  157. const rect = canvas.getBoundingClientRect()
  158. const percent = Math.min(Math.max(0, clientX - rect.left), rect.width) / rect.width
  159. const newTime = percent * duration
  160. // Removes the buffer check, allowing drag to any location
  161. audio.currentTime = newTime
  162. setCurrentTime(newTime)
  163. if (!isPlaying) {
  164. setIsPlaying(true)
  165. audio.play().catch((error) => {
  166. Toast.notify({
  167. type: 'error',
  168. message: `Error playing audio: ${error}`,
  169. })
  170. setIsPlaying(false)
  171. })
  172. }
  173. }
  174. updateProgress(getClientX(e))
  175. }, [duration, isPlaying])
  176. const formatTime = (time: number) => {
  177. const minutes = Math.floor(time / 60)
  178. const seconds = Math.floor(time % 60)
  179. return `${minutes}:${seconds.toString().padStart(2, '0')}`
  180. }
  181. const drawWaveform = useCallback(() => {
  182. const canvas = canvasRef.current
  183. if (!canvas)
  184. return
  185. const ctx = canvas.getContext('2d')
  186. if (!ctx)
  187. return
  188. const width = canvas.width
  189. const height = canvas.height
  190. const data = waveformData
  191. ctx.clearRect(0, 0, width, height)
  192. const barWidth = width / data.length
  193. const playedWidth = (currentTime / duration) * width
  194. const cornerRadius = 2
  195. // Draw waveform bars
  196. data.forEach((value, index) => {
  197. let color
  198. if (index * barWidth <= playedWidth)
  199. color = theme === Theme.light ? '#296DFF' : '#84ABFF'
  200. else if ((index * barWidth / width) * duration <= hoverTime)
  201. color = theme === Theme.light ? 'rgba(21,90,239,.40)' : 'rgba(200, 206, 218, 0.28)'
  202. else
  203. color = theme === Theme.light ? 'rgba(21,90,239,.20)' : 'rgba(200, 206, 218, 0.14)'
  204. const barHeight = value * height
  205. const rectX = index * barWidth
  206. const rectY = (height - barHeight) / 2
  207. const rectWidth = barWidth * 0.5
  208. const rectHeight = barHeight
  209. ctx.lineWidth = 1
  210. ctx.fillStyle = color
  211. if (ctx.roundRect) {
  212. ctx.beginPath()
  213. ctx.roundRect(rectX, rectY, rectWidth, rectHeight, cornerRadius)
  214. ctx.fill()
  215. }
  216. else {
  217. ctx.fillRect(rectX, rectY, rectWidth, rectHeight)
  218. }
  219. })
  220. }, [currentTime, duration, hoverTime, theme, waveformData])
  221. useEffect(() => {
  222. drawWaveform()
  223. }, [drawWaveform, bufferedTime, hasStartedPlaying])
  224. const handleMouseMove = useCallback((e: React.MouseEvent) => {
  225. const canvas = canvasRef.current
  226. const audio = audioRef.current
  227. if (!canvas || !audio)
  228. return
  229. const rect = canvas.getBoundingClientRect()
  230. const percent = Math.min(Math.max(0, e.clientX - rect.left), rect.width) / rect.width
  231. const time = percent * duration
  232. // Check if the hovered position is within a buffered range before updating hoverTime
  233. for (let i = 0; i < audio.buffered.length; i++) {
  234. if (time >= audio.buffered.start(i) && time <= audio.buffered.end(i)) {
  235. setHoverTime(time)
  236. break
  237. }
  238. }
  239. }, [duration])
  240. return (
  241. <div className='flex h-9 min-w-[240px] max-w-[420px] items-end gap-2 rounded-[10px] border border-components-panel-border-subtle bg-components-chat-input-audio-bg-alt p-2 shadow-xs backdrop-blur-sm'>
  242. <audio ref={audioRef} src={src} preload="auto"/>
  243. <button className='inline-flex shrink-0 cursor-pointer items-center justify-center border-none text-text-accent transition-all hover:text-text-accent-secondary disabled:text-components-button-primary-bg-disabled' onClick={togglePlay} disabled={!isAudioAvailable}>
  244. {isPlaying
  245. ? (
  246. <RiPauseCircleFill className='h-5 w-5' />
  247. )
  248. : (
  249. <RiPlayLargeFill className='h-5 w-5' />
  250. )}
  251. </button>
  252. <div className={cn(isAudioAvailable && 'grow')} hidden={!isAudioAvailable}>
  253. <div className='flex h-8 items-center justify-center'>
  254. <canvas
  255. ref={canvasRef}
  256. className='relative flex h-6 w-full grow cursor-pointer items-center justify-center'
  257. onClick={handleCanvasInteraction}
  258. onMouseMove={handleMouseMove}
  259. onMouseDown={handleCanvasInteraction}
  260. />
  261. <div className='system-xs-medium inline-flex min-w-[50px] items-center justify-center text-text-accent-secondary'>
  262. <span className='rounded-[10px] px-0.5 py-1'>{formatTime(duration)}</span>
  263. </div>
  264. </div>
  265. </div>
  266. <div className='absolute left-0 top-0 flex h-full w-full items-center justify-center text-text-quaternary' hidden={isAudioAvailable}>{t('common.operation.audioSourceUnavailable')}</div>
  267. </div>
  268. )
  269. }
  270. export default AudioPlayer