CzRger преди 1 месец
родител
ревизия
3e0057236c

BIN
src/assets/images/chat/bg-1.png


BIN
src/assets/images/chat/bg-2.png


+ 1 - 1
src/views/chat/answer/index.vue

@@ -107,7 +107,7 @@ import DOMPurify from 'dompurify'
 import thinkCom from './think.vue'
 import { copy } from '@/utils/czr-util'
 import { ElMessage } from 'element-plus'
-import useTextToSpeech from './useTextToSpeech'
+import useTextToSpeech from '../audio/useTextToSpeech'
 
 const { speak, stop } = useTextToSpeech()
 const md = new MarkdownIt({

+ 1 - 1
src/views/chat/audio/sound-wave.vue

@@ -43,7 +43,7 @@ const updateWave = (volume) => {
 const animateHeight = (item) => {
   const animate = () => {
     const diff = item.targetPercent - item.percent
-    item.percent += diff * 0.5 // 平滑系数,起伏的缓冲效果
+    item.percent += diff * 0.4 // 平滑系数,起伏的缓冲效果
     if (Math.abs(diff) > 10) {
       requestAnimationFrame(animate)
     }

+ 130 - 0
src/views/chat/audio/useAudio.ts

@@ -0,0 +1,130 @@
+import { reactive, ref } from 'vue'
+import { ElMessage } from 'element-plus'
+
+export default function useAudio(url, playEnd = () => {}) {
+  return new Promise((resolve, reject) => {
+    const state: any = reactive({
+      source: null,
+      analyser: null,
+      dataArray: null,
+      animationId: null,
+      pauseTime: 0,
+    })
+    const volume = ref(0)
+    const audioContext = new window.AudioContext()
+    let audioDom: any = document.createElement('audio')
+    audioDom.setAttribute('src', url)
+    audioDom.setAttribute('crossorigin', 'anonymous')
+    // audioDom.
+    document.body.appendChild(audioDom)
+    // 创建分析器节点
+    state.analyser = audioContext.createAnalyser()
+    state.analyser.fftSize = 256
+    state.dataArray = new Uint8Array(state.analyser.frequencyBinCount)
+    // 创建媒体元素源节点
+    state.source = audioContext.createMediaElementSource(audioDom)
+    // 连接节点:源 -> 分析器 -> 目的地(扬声器)
+    state.source.connect(state.analyser)
+    state.analyser.connect(audioContext.destination)
+    const pause = () => {
+      cancelAnimationFrame(state.animationId)
+      audioDom.pause()
+      volume.value = 0
+    }
+    const stop = () => {
+      pause()
+      document.body.removeChild(audioDom)
+      audioDom = null
+      playEnd()
+    }
+    audioDom.oncanplay = () => {
+      resolve({
+        volume,
+        play: () => {
+          audioDom?.play()
+          updateVolume()
+        },
+        pause,
+        stop,
+      })
+    }
+    audioDom.onended = () => {
+      stop()
+    }
+    // const init = async () => {
+    //   try {
+    //     const audioContext = new window.AudioContext()
+    //     console.log(audioContext)
+    //     // 获取音频文件
+    //     const response = await fetch(url)
+    //     const arrayBuffer = await response.arrayBuffer()
+    //     const audioBuffer = await audioContext.decodeAudioData(arrayBuffer)
+    //     // 创建音频源
+    //     state.source = audioContext.createBufferSource()
+    //     state.source.buffer = audioBuffer
+    //     state.source.onended = () => {
+    //       console.log(444)
+    //       cancelAnimationFrame(state.animationId)
+    //       playEnd()
+    //     }
+    //     // 创建分析器节点
+    //     state.analyser = audioContext.createAnalyser()
+    //     state.analyser.fftSize = 256
+    //     state.dataArray = new Uint8Array(state.analyser.frequencyBinCount)
+    //     // 连接节点:源 -> 分析器 -> 目的地(扬声器)
+    //     state.source.connect(state.analyser)
+    //     state.analyser.connect(audioContext.destination)
+    //     // 开始播放
+    //     console.log(state.source)
+    //     // 开始音量检测循环
+    //     updateVolume()
+    //     resolve({
+    //       volume,
+    //       play: () => {
+    //         // 如果是从暂停恢复播放
+    //         if (state.pauseTime > 0) {
+    //           console.log(state.pauseTime)
+    //           state.source = audioContext.createBufferSource()
+    //           state.source.buffer = audioBuffer
+    //           state.source.connect(state.analyser)
+    //           console.log(Math.min(0, state.pauseTime - 1))
+    //           state.source.start(0, state.pauseTime - 1)
+    //           updateVolume()
+    //         } else {
+    //           state.source.start(0)
+    //         }
+    //       },
+    //       stop: () => {
+    //         state.source.stop()
+    //         state.source.disconnect()
+    //       },
+    //       pause: () => {
+    //         state.pauseTime = audioContext.currentTime
+    //         // 停止当前播放
+    //         state.source.stop()
+    //         state.source.disconnect()
+    //       },
+    //     })
+    //   } catch (err: any) {
+    //     ElMessage.error('无法播放音频: ' + err.message)
+    //     console.error('音频播放异常:', err)
+    //     reject(err)
+    //   }
+    // }
+    // init()
+    const updateVolume = () => {
+      if (!state.analyser) return
+      state.analyser.getByteFrequencyData(state.dataArray)
+      // 计算平均音量
+      let sum = 0
+      for (let i = 0; i < state.dataArray.length; i++) {
+        sum += state.dataArray[i]
+      }
+      const average = sum / state.dataArray.length
+      // 将音量映射到0-100范围
+      volume.value = Math.min(100, Math.max(0, Math.round(average / 2.55)))
+      // 继续动画
+      state.animationId = requestAnimationFrame(updateVolume)
+    }
+  })
+}

+ 1 - 3
src/utils/useSpeechToAudio.ts

@@ -76,20 +76,18 @@ export default function useSpeechToAudio({
   }
   const stop = () => {
     state.mediaRecorder?.stop()
+    // state.analyser?.stop()
   }
   // 更新音量显示
   const updateVolume = () => {
     if (!state.analyser) return
-
     state.analyser.getByteFrequencyData(state.dataArray)
-
     // 计算平均音量
     let sum = 0
     for (let i = 0; i < state.dataArray.length; i++) {
       sum += state.dataArray[i]
     }
     const average = sum / state.dataArray.length
-
     // 将音量映射到0-100范围
     volume.value = Math.min(100, Math.max(0, Math.round(average / 2.55)))
     if (timeout > 0) {

src/utils/useTextToSpeech.ts → src/views/chat/audio/useTextToSpeech.ts


+ 2 - 2
src/views/chat/normal.vue

@@ -1,9 +1,9 @@
 <template>
   <div
-    class="chat-normal flex h-full w-full flex-col overflow-hidden bg-[#ffffff] p-4"
+    class="chat-normal flex h-full w-full flex-col overflow-hidden bg-[url('@/assets/images/chat/bg-1.png')] bg-[length:100%_100%] bg-no-repeat px-6 py-4"
   >
     <div
-      class="chat-msg -mr-4 flex flex-1 flex-col gap-4 overflow-y-auto pr-4"
+      class="chat-msg -mr-6 flex flex-1 flex-col gap-4 overflow-y-auto pr-6"
       ref="ref_chatMsg"
     >
       <template v-for="(item, index) in state.chats">

+ 47 - 10
src/views/chat/online.vue

@@ -1,7 +1,7 @@
 <template>
   <div
     tabindex="0"
-    class="chat-online relative flex size-full flex-col items-center justify-center overflow-hidden rounded-lg bg-[url('@/assets/images/knowledge/knowledge-back-test.png')] bg-[length:100%_100%] bg-no-repeat"
+    class="chat-online relative flex size-full flex-col items-center justify-center overflow-hidden rounded-lg bg-[url('@/assets/images/chat/bg-2.png')] bg-[length:100%_100%] bg-no-repeat"
     @focus="state.isFocus = true"
     @blur="state.isFocus = false"
     @mousedown.left="onSpeakStart"
@@ -18,10 +18,11 @@
     <template v-if="!state.isWaiting">
       <div class="flex flex-col items-center">
         <div>
-          <soundWave :volume="state.isAsk ? volume : 0" :num="30" />
+          <soundWave :volume="volumeCpt" :num="30" />
         </div>
         <div class="mt-4 h-7 text-lg font-bold text-[var(--czr-main-color)]">
           <template v-if="state.isAsk">正在聆听 {{ durationCpt }}</template>
+          <template v-else-if="state.isThink">正在思考……</template>
           <template v-else-if="state.isAnswer">正在回答</template>
         </div>
         <div class="mt-3 flex items-center text-sm text-[#909399]">
@@ -32,6 +33,7 @@
             class="mr-1"
           />
           <template v-if="state.isAsk"> 松开鼠标左键/Space以提问 </template>
+          <template v-else-if="state.isThink"> 正在思考中,请稍后…… </template>
           <template v-else-if="state.isAnswer"> 鼠标左键/Space以打断 </template>
           <template v-else> 按住鼠标左键/Space开始提问 </template>
         </div>
@@ -71,18 +73,22 @@
 <script setup lang="ts">
 import { computed, reactive } from 'vue'
 import soundWave from './audio/sound-wave.vue'
-import useSpeechToAudio from '@/utils/useSpeechToAudio'
+import useSpeechToAudio from '@/views/chat/audio/useSpeechToAudio'
 import { ElMessage } from 'element-plus'
+import useAudio from '@/views/chat/audio/useAudio'
 
 const emit = defineEmits(['hangUp'])
 const state: any = reactive({
   isWaiting: true, // 等待接听
   isAsk: false, // 提问录音中
   isAnswer: false, // 聆听回答中
+  isThink: false, // 思考回答中
   isFocus: false, // 语音聊天是否聚焦
   duration: 0, // 提问录音时长
   timer: null, // 开始提问防抖
   clickSpace: false, // 按下空格会一直触发,添加个判断
+  answerVolume: 0, //回答音频的音量
+  answerAudioControl: null,
 })
 const durationCpt = computed(() => {
   const minutes = Math.floor(state.duration / 60)
@@ -95,29 +101,58 @@ const onSpeak = ({ duration }) => {
 }
 const onEnd = (audio) => {
   state.isAsk = false
-  state.isAnswer = true
   if (state.duration < 2) {
     ElMessage.warning('提问时长过短,请持续提问!')
-    return
+    state.return
   }
   console.log(audio)
+  state.isThink = true
+  setTimeout(() => {
+    state.isThink = false
+    useAudio(
+      'https://lf-bot-studio-plugin-resource.coze.cn/obj/bot-studio-platform-plugin-tos/sami_podcast/tts/134d9d71475043199ef7372567fa9689.mp3',
+      // 'https://lf-bot-studio-plugin-resource.coze.cn/obj/bot-studio-platform-plugin-tos/artist/image/32afe00820aa466192ce0f7b6495e946.mp3',
+      () => {
+        state.isAnswer = false
+      },
+    ).then(({ volume, play, stop, pause }: any) => {
+      state.isAnswer = true
+      state.answerAudioControl = {
+        play,
+        stop,
+        pause,
+      }
+      state.answerVolume = volume
+      play()
+    })
+  }, 1000)
 }
 const { speak, stop, volume } = useSpeechToAudio({
   onEnd,
   onSpeak,
   timeout: 0,
 })
+const volumeCpt = computed(() => {
+  if (state.isAsk) {
+    return volume.value
+  }
+  if (state.isAnswer) {
+    return state.answerVolume
+  }
+  return 0
+})
 const onCall = () => {
   state.isWaiting = false
   state.isAsk = false
+  state.isThink = false
   state.isAnswer = false
 }
 const onSpeakStart = () => {
-  if (state.isAnswer) {
-    onStopAnswer()
-  }
   if (!state.isWaiting && !state.isAsk && !state.clickSpace) {
     state.timer = setTimeout(() => {
+      if (state.isAnswer) {
+        onStopAnswer()
+      }
       speak()
     }, 300)
   }
@@ -129,8 +164,10 @@ const onSpeakStop = () => {
   }
 }
 const onSpaceSpeakStart = () => {
-  onSpeakStart()
-  state.clickSpace = true
+  if (!state.isThink) {
+    onSpeakStart()
+    state.clickSpace = true
+  }
 }
 const onSpaceSpeakStop = () => {
   onSpeakStop()

+ 1 - 1
src/views/manage/app/index_audio.vue

@@ -35,7 +35,7 @@
 <script setup lang="ts">
 import { onMounted, reactive, ref, watch } from 'vue'
 import { ElMessage } from 'element-plus'
-import useSpeechToAudio from '@/utils/useSpeechToAudio'
+import useSpeechToAudio from '@/views/chat/audio/useSpeechToAudio'
 
 const props = defineProps({})
 const state: any = reactive({

+ 2 - 16
src/views/manage/home/app/index.vue

@@ -67,7 +67,7 @@
         </div>
       </div>
       <div
-        class="chat-block m-1 flex-1 rounded-lg border-1 border-[var(--czr-main-color)]/5 shadow"
+        class="m-1 flex-1 rounded-lg border-1 border-[var(--czr-main-color)]/5 shadow"
       >
         <chat :online="state.isOnline" :ID="state.ID" />
       </div>
@@ -123,18 +123,4 @@ const initDictionary = () => {
 }
 </script>
 
-<style lang="scss" scoped>
-:deep(.chat-block) {
-  .chat-normal {
-    padding: 1rem 1.5rem;
-    background-color: transparent;
-    background-image: url('@/assets/images/dialog-bg.png');
-    background-size: 100% 100%;
-    background-repeat: no-repeat;
-    .chat-msg {
-      margin-right: -1.5rem;
-      padding-right: 1.5rem;
-    }
-  }
-}
-</style>
+<style lang="scss" scoped></style>