index.tsx 6.2 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207
  1. import { useCallback, useEffect, useRef, useState } from 'react'
  2. import { useTranslation } from 'react-i18next'
  3. import { useParams, usePathname } from 'next/navigation'
  4. import cn from 'classnames'
  5. import Recorder from 'js-audio-recorder'
  6. import { useRafInterval } from 'ahooks'
  7. import { convertToMp3 } from './utils'
  8. import s from './index.module.css'
  9. import { StopCircle } from '@/app/components/base/icons/src/vender/solid/mediaAndDevices'
  10. import { Loading02, XClose } from '@/app/components/base/icons/src/vender/line/general'
  11. import { audioToText } from '@/service/share'
  12. type VoiceInputTypes = {
  13. onConverted: (text: string) => void
  14. onCancel: () => void
  15. }
  16. const VoiceInput = ({
  17. onCancel,
  18. onConverted,
  19. }: VoiceInputTypes) => {
  20. const { t } = useTranslation()
  21. const recorder = useRef(new Recorder({
  22. sampleBits: 16,
  23. sampleRate: 16000,
  24. numChannels: 1,
  25. compiling: false,
  26. }))
  27. const canvasRef = useRef<HTMLCanvasElement | null>(null)
  28. const ctxRef = useRef<CanvasRenderingContext2D | null>(null)
  29. const drawRecordId = useRef<number | null>(null)
  30. const [originDuration, setOriginDuration] = useState(0)
  31. const [startRecord, setStartRecord] = useState(false)
  32. const [startConvert, setStartConvert] = useState(false)
  33. const pathname = usePathname()
  34. const params = useParams()
  35. const clearInterval = useRafInterval(() => {
  36. setOriginDuration(originDuration + 1)
  37. }, 1000)
  38. const drawRecord = useCallback(() => {
  39. drawRecordId.current = requestAnimationFrame(drawRecord)
  40. const canvas = canvasRef.current!
  41. const ctx = ctxRef.current!
  42. const dataUnit8Array = recorder.current.getRecordAnalyseData()
  43. const dataArray = [].slice.call(dataUnit8Array)
  44. const lineLength = parseInt(`${canvas.width / 3}`)
  45. const gap = parseInt(`${1024 / lineLength}`)
  46. ctx.clearRect(0, 0, canvas.width, canvas.height)
  47. ctx.beginPath()
  48. let x = 0
  49. for (let i = 0; i < lineLength; i++) {
  50. let v = dataArray.slice(i * gap, i * gap + gap).reduce((prev: number, next: number) => {
  51. return prev + next
  52. }, 0) / gap
  53. if (v < 128)
  54. v = 128
  55. if (v > 178)
  56. v = 178
  57. const y = (v - 128) / 50 * canvas.height
  58. ctx.moveTo(x, 16)
  59. if (ctx.roundRect)
  60. ctx.roundRect(x, 16 - y, 2, y, [1, 1, 0, 0])
  61. else
  62. ctx.rect(x, 16 - y, 2, y)
  63. ctx.fill()
  64. x += 3
  65. }
  66. ctx.closePath()
  67. }, [])
  68. const handleStopRecorder = useCallback(async () => {
  69. clearInterval()
  70. setStartRecord(false)
  71. setStartConvert(true)
  72. recorder.current.stop()
  73. drawRecordId.current && cancelAnimationFrame(drawRecordId.current)
  74. drawRecordId.current = null
  75. const canvas = canvasRef.current!
  76. const ctx = ctxRef.current!
  77. ctx.clearRect(0, 0, canvas.width, canvas.height)
  78. const mp3Blob = convertToMp3(recorder.current)
  79. const mp3File = new File([mp3Blob], 'temp.mp3', { type: 'audio/mp3' })
  80. const formData = new FormData()
  81. formData.append('file', mp3File)
  82. let url = ''
  83. let isPublic = false
  84. if (params.token) {
  85. url = '/audio-to-text'
  86. isPublic = true
  87. }
  88. else if (params.appId) {
  89. if (pathname.search('explore/installed') > -1)
  90. url = `/installed-apps/${params.appId}/audio-to-text`
  91. else
  92. url = `/apps/${params.appId}/audio-to-text`
  93. }
  94. try {
  95. const audioResponse = await audioToText(url, isPublic, formData)
  96. onConverted(audioResponse.text)
  97. onCancel()
  98. }
  99. catch (e) {
  100. onConverted('')
  101. onCancel()
  102. }
  103. }, [])
  104. const handleStartRecord = async () => {
  105. try {
  106. await recorder.current.start()
  107. setStartRecord(true)
  108. setStartConvert(false)
  109. if (canvasRef.current && ctxRef.current)
  110. drawRecord()
  111. }
  112. catch (e) {
  113. onCancel()
  114. }
  115. }
  116. const initCanvas = () => {
  117. const dpr = window.devicePixelRatio || 1
  118. const canvas = document.getElementById('voice-input-record') as HTMLCanvasElement
  119. if (canvas) {
  120. const { width: cssWidth, height: cssHeight } = canvas.getBoundingClientRect()
  121. canvas.width = dpr * cssWidth
  122. canvas.height = dpr * cssHeight
  123. canvasRef.current = canvas
  124. const ctx = canvas.getContext('2d')
  125. if (ctx) {
  126. ctx.scale(dpr, dpr)
  127. ctx.fillStyle = 'rgba(209, 224, 255, 1)'
  128. ctxRef.current = ctx
  129. }
  130. }
  131. }
  132. if (originDuration >= 120 && startRecord)
  133. handleStopRecorder()
  134. useEffect(() => {
  135. initCanvas()
  136. handleStartRecord()
  137. }, [])
  138. const minutes = parseInt(`${parseInt(`${originDuration}`) / 60}`)
  139. const seconds = parseInt(`${originDuration}`) % 60
  140. return (
  141. <div className={cn(s.wrapper, 'absolute inset-0 rounded-xl')}>
  142. <div className='absolute inset-[1.5px] flex items-center pl-[14.5px] pr-[6.5px] py-[14px] bg-primary-25 rounded-[10.5px] overflow-hidden'>
  143. <canvas id='voice-input-record' className='absolute left-0 bottom-0 w-full h-4' />
  144. {
  145. startConvert && <Loading02 className='animate-spin mr-2 w-4 h-4 text-primary-700' />
  146. }
  147. <div className='grow'>
  148. {
  149. startRecord && (
  150. <div className='text-sm text-gray-500'>
  151. {t('common.voiceInput.speaking')}
  152. </div>
  153. )
  154. }
  155. {
  156. startConvert && (
  157. <div className={cn(s.convert, 'text-sm')}>
  158. {t('common.voiceInput.converting')}
  159. </div>
  160. )
  161. }
  162. </div>
  163. {
  164. startRecord && (
  165. <div
  166. className='flex justify-center items-center mr-1 w-8 h-8 hover:bg-primary-100 rounded-lg cursor-pointer'
  167. onClick={handleStopRecorder}
  168. >
  169. <StopCircle className='w-5 h-5 text-primary-600' />
  170. </div>
  171. )
  172. }
  173. {
  174. startConvert && (
  175. <div
  176. className='flex justify-center items-center mr-1 w-8 h-8 hover:bg-gray-200 rounded-lg cursor-pointer'
  177. onClick={onCancel}
  178. >
  179. <XClose className='w-4 h-4 text-gray-500' />
  180. </div>
  181. )
  182. }
  183. <div className={`w-[45px] pl-1 text-xs font-medium ${originDuration > 110 ? 'text-[#F04438]' : 'text-gray-700'}`}>{`0${minutes.toFixed(0)}:${seconds >= 10 ? seconds : `0${seconds}`}`}</div>
  184. </div>
  185. </div>
  186. )
  187. }
  188. export default VoiceInput