Commit 9e1b9591 authored by 水玉婷's avatar 水玉婷
Browse files

feat:优化语音输入

parent 5f5b0e28
......@@ -18,6 +18,7 @@ instance.interceptors.request.use(
const userToken = extMap.sessionId;
if (userToken) {
try {
config.headers['Token'] = userToken || ''
config.headers['x-session-id'] = userToken || ''
} catch (error) {
console.warn('解析用户信息失败', error)
......
......@@ -70,15 +70,9 @@
<div class="chat-input-container">
<div class="chat-input">
<!-- 语音识别按钮 -->
<VoiceRecognition
ref="voiceRecognitionRef"
:disabled="loading"
:debug="true"
@audio="handleVoiceAudio"
@error="handleVoiceError"
class="voice-recognition-wrapper"
/>
<VoiceRecognition ref="voiceRecognitionRef" :disabled="loading" :debug="true" @audio="handleVoiceAudio"
@error="handleVoiceError" class="voice-recognition-wrapper" />
<textarea ref="textarea" v-model="messageText" placeholder="输入消息..." @keypress="handleKeyPress"
@input="adjustTextareaHeight" :disabled="loading"></textarea>
<button @click="sendMessage" :disabled="loading">
......@@ -176,7 +170,7 @@ const contentTemplates = {
},
// 简化的iframe模板 - 移除全屏功能,设置宽高100%固定
iframe: (iframeData: any) => {
const { tips,title,url } = iframeData || {};
const { tips, title, url } = iframeData || {};
console.log('iframeData', iframeData);
return `<div class="message-iframe iframe-loading">
<!-- 加载状态 -->
......@@ -208,16 +202,31 @@ const contentTemplates = {
audio: (audioData: any) => {
const { audioUrl, audioBlob } = audioData;
let src = audioUrl;
// 如果提供了Blob对象,创建对象URL
if (audioBlob && !audioUrl) {
src = URL.createObjectURL(audioBlob);
}
return `<div class="audio-message">
<audio controls src="${src}">
您的浏览器不支持音频播放
</audio>
// 生成唯一ID用于音频播放器
const audioId = `audio_${Date.now()}_${Math.random().toString(36).substr(2, 9)}`;
return `<div class="audio-message" data-audio-id="${audioId}">
<div class="audio-player" data-audio-src="${src}">
<div class="audio-icon">
<span class="play-icon">▶</span>
<span class="pause-icon" style="display: none;">❚❚</span>
</div>
<div class="audio-wave">
<div class="wave-bar"></div>
<div class="wave-bar"></div>
<div class="wave-bar"></div>
<div class="wave-bar"></div>
<div class="wave-bar"></div>
</div>
<div class="audio-duration">0:00</div>
</div>
<audio id="${audioId}" src="${src}" preload="metadata" style="display: none;"></audio>
</div>`;
}
};
......@@ -244,9 +253,9 @@ interface Message {
// 检查是否为音频消息的辅助函数
const isAudioMessage = (messageData: any): boolean => {
return messageData.questionType === 'audio' ||
(messageData.question && typeof messageData.question === 'object' &&
(messageData.question.audioUrl || messageData.question.audioData));
return messageData.questionType === 'audio' ||
(messageData.question && typeof messageData.question === 'object' &&
(messageData.question.audioUrl || messageData.question.audioData));
};
interface SSEData {
......@@ -276,12 +285,12 @@ const isReconnecting = ref(false);
const timeArr = ref([]);
const hasStartedConversation = ref(false); // 添加对话开始状态
// 语音事件处理函数
const handleVoiceAudio = (audioBlob: Blob) => {
console.log('收到音频数据:', audioBlob);
// 语音事件处理函数 - 修改为接收服务器返回的URL
const handleVoiceAudio = (audioUrl: string, audioBlob?: Blob) => {
console.log('收到音频URL:', audioUrl);
// 开始对话
startConversation();
// 添加音频消息到聊天记录
messages.value.push({
messageType: 'sent',
......@@ -293,7 +302,7 @@ const handleVoiceAudio = (audioBlob: Blob) => {
date: dayjs().format('HH:mm'),
contentBlocks: [
{
content: contentTemplates.audio({ audioBlob }),
content: contentTemplates.audio({ audioUrl, audioBlob }),
thinkContent: '',
hasThinkBox: false,
thinkBoxExpanded: false,
......@@ -301,13 +310,15 @@ const handleVoiceAudio = (audioBlob: Blob) => {
],
});
// 如果有音频Blob,直接发送到服务器
if (audioUrl) {
sendAudioMessage(audioUrl);
}
// 滚动到底部
nextTick(() => {
scrollToBottom();
});
// 发送音频到AI
sendAudioMessage(audioBlob);
};
const handleVoiceError = (error: string) => {
......@@ -315,27 +326,25 @@ const handleVoiceError = (error: string) => {
// 可以添加错误提示
};
// 发送音频消息
const sendAudioMessage = async (audioBlob: Blob) => {
// 发送音频消息 - 简化逻辑,与sendMessage保持一致
const sendAudioMessage = async (audioUrl: string) => {
loading.value = true;
try {
// 创建FormData来发送音频文件
const formData = new FormData();
formData.append('audio', audioBlob, 'recording.wav');
formData.append('dialogSessionId', dialogSessionId.value);
formData.append('appId', props.params?.appId || '');
formData.append('stage', props.params?.stage || '');
// 开始对话
startConversation();
// 调用外部传入的消息发送函数
if (props.onMessageSend) {
console.log('调用外部音频发送函数');
// 这里需要根据实际情况调整,可能需要将音频转换为base64或其他格式
await props.onMessageSend(audioBlob);
await props.onMessageSend(audioUrl);
} else {
// 默认的API调用逻辑 - 发送音频
// 默认的API调用逻辑 - 使用与sendMessage相同的逻辑,只是参数不同
console.log('默认音频API调用逻辑');
const response = await post(`${props.apiBaseUrl}/aiService/ask/audio/app/${props.params?.appId}`, formData, {
const response = await post(`${props.apiBaseUrl}/aiService/ask/app/${props.params?.appId}`, {
questionLocalAudioFilePath: audioUrl,
...props.params,
}, {
headers: {
Token: props.token || '',
'x-session-id': props.token || '',
......@@ -377,14 +386,14 @@ const simulateOptionData = () => {
}
};
// 第二个消息:展示一个options(会走iframe逻辑)
// 第二个消息:展示一个options(会走iframe逻辑)
const secondOptionData = {
status: 3,
type: 'option',
message: {
tips: "这是单个报表的预览:",
options: [
{
{
title: "销售指标看板",
url: "/WeChatOauth2/MobileReport_Monthly/MonthlyReport_index.aspx?postage=384b67414b334f2f693177644246313756704a724d513d3d&company=器械整体&typename=整体指标"
}
......@@ -427,7 +436,7 @@ const simulateOptionData = () => {
messages.value.push(secondResult.updatedResponse);
nextTick(() => {
scrollToBottom();
});
});
}
};
......@@ -471,7 +480,7 @@ const sendMessage = async () => {
try {
messageText.value = '';
// 可选:在发送消息后模拟选项数据返回
// 取消注释以下代码来启用模拟
setTimeout(() => {
......@@ -566,7 +575,7 @@ const processSSEMessage = (
});
break;
case 'option': // 选项数据
const { tips,options } = messageContent;
const { tips, options } = messageContent;
if (options?.length) {
if (options?.length === 1) {
// 走iframe
......@@ -704,13 +713,11 @@ const reconnectSSE = (newDialogSessionId: string) => {
console.log('正在重连中,跳过重复重连');
return;
}
isReconnecting.value = true;
console.log('开始重连SSE,新的dialogSessionId:', newDialogSessionId);
closeSSE();
dialogSessionId.value = newDialogSessionId;
// 添加重连间隔控制,避免频繁重连
const reconnectTimeout = setTimeout(() => {
initSSE();
......@@ -841,7 +848,6 @@ const initSSE = () => {
console.log('收到新的 dialogSessionId:', result.newDialogSessionId);
dialogSessionId.value = result.newDialogSessionId;
}
await nextTick();
scrollToBottom();
} catch (error) {
......@@ -885,7 +891,7 @@ const processHistoryData = (data: any): Message[] => {
// 处理问题消息
if (data.question) {
let questionContent = '';
// 检查是否为音频消息
if (isAudioMessage(data)) {
// 处理音频消息
......@@ -1056,8 +1062,113 @@ onMounted(() => {
if (props.dialogSessionId) {
getChatRecord(props.dialogSessionId);
}
// 初始化音频播放器事件监听
initAudioPlayers();
});
// 初始化音频播放器
const initAudioPlayers = () => {
// 监听消息变化,为新的音频消息添加事件监听
watch(messages, () => {
nextTick(() => {
setupAudioPlayers();
});
}, { deep: true });
};
// 设置音频播放器事件
const setupAudioPlayers = () => {
const audioPlayers = document.querySelectorAll('.audio-player');
audioPlayers.forEach((player) => {
// 移除之前的事件监听器,避免重复绑定
const newPlayer = player.cloneNode(true);
player.parentNode.replaceChild(newPlayer, player);
const audioMessage = newPlayer.closest('.audio-message');
const audioId = audioMessage?.getAttribute('data-audio-id');
// 修复:在DOM替换后重新获取音频元素
const audioElement = audioId ? document.getElementById(audioId) : null;
if (!audioElement) {
console.warn('未找到音频元素,audioId:', audioId);
return;
}
console.log('音频元素:', audioElement);
// 音频播放结束,重置为总时长 - 已移除
audioElement.addEventListener('ended', () => {
newPlayer.classList.remove('playing');
const playIcon = newPlayer.querySelector('.play-icon');
const pauseIcon = newPlayer.querySelector('.pause-icon');
if (playIcon && pauseIcon) {
playIcon.style.display = 'inline';
pauseIcon.style.display = 'none';
}
});
// 设置播放/暂停事件
newPlayer.addEventListener('click', (e) => {
e.stopPropagation();
if (audioElement.paused) {
// 暂停其他正在播放的音频
pauseAllOtherAudios(audioElement);
audioElement.play().catch(error => {
console.error('播放音频失败:', error);
});
newPlayer.classList.add('playing');
} else {
audioElement.pause();
newPlayer.classList.remove('playing');
}
});
// 音频播放事件
audioElement.addEventListener('play', () => {
newPlayer.classList.add('playing');
const playIcon = newPlayer.querySelector('.play-icon');
const pauseIcon = newPlayer.querySelector('.pause-icon');
if (playIcon && pauseIcon) {
playIcon.style.display = 'none';
pauseIcon.style.display = 'inline';
}
});
// 音频暂停事件
audioElement.addEventListener('pause', () => {
newPlayer.classList.remove('playing');
const playIcon = newPlayer.querySelector('.play-icon');
const pauseIcon = newPlayer.querySelector('.pause-icon');
if (playIcon && pauseIcon) {
playIcon.style.display = 'inline';
pauseIcon.style.display = 'none';
}
});
});
};
// 暂停所有其他正在播放的音频
const pauseAllOtherAudios = (currentAudio: HTMLAudioElement) => {
const allAudios = document.querySelectorAll('audio');
allAudios.forEach((audio) => {
if (audio !== currentAudio && !audio.paused) {
audio.pause();
const player = audio.closest('.audio-message')?.querySelector('.audio-player');
if (player) {
player.classList.remove('playing');
const playIcon = player.querySelector('.play-icon');
const pauseIcon = player.querySelector('.pause-icon');
if (playIcon && pauseIcon) {
playIcon.style.display = 'inline';
pauseIcon.style.display = 'none';
}
}
}
});
}; // 移除多余的括号
onBeforeUnmount(() => {
closeSSE();
// 清除重连超时
......
<template>
<div class="audio-player" :class="{ playing: isPlaying }" @click="togglePlay">
<div class="audio-icon">
<span class="play-icon" :style="{ display: isPlaying ? 'none' : 'inline' }">▶</span>
<span class="pause-icon" :style="{ display: isPlaying ? 'inline' : 'none' }">❚❚</span>
</div>
<div class="audio-wave">
<div class="wave-bar wave-bar-1"></div>
<div class="wave-bar wave-bar-2"></div>
<div class="wave-bar wave-bar-3"></div>
</div>
<audio ref="audioElement" :src="audioUrl" preload="metadata" @play="onPlay" @pause="onPause" @ended="onEnded"
@timeupdate="onTimeUpdate"></audio>
</div>
</template>
<script setup lang="ts">
import { ref, onMounted, onUnmounted, nextTick } from 'vue'
interface Props {
audioUrl: string
}
const props = defineProps<Props>()
const audioElement = ref<HTMLAudioElement>()
const isPlaying = ref(false)
// 播放/暂停切换
const togglePlay = async () => {
if (!audioElement.value) return
try {
if (audioElement.value.paused) {
// 暂停其他正在播放的音频
pauseAllOtherAudios()
await audioElement.value.play()
} else {
audioElement.value.pause()
}
} catch (error) {
console.error('音频播放失败:', error)
}
}
// 播放事件
const onPlay = () => {
isPlaying.value = true
}
// 暂停事件
const onPause = () => {
isPlaying.value = false
}
// 播放结束事件
const onEnded = () => {
isPlaying.value = false
}
// 时间更新事件
const onTimeUpdate = () => {
// 可以在这里添加时间更新逻辑,如果需要显示播放进度的话
}
// 暂停所有其他正在播放的音频
const pauseAllOtherAudios = () => {
const allAudios = document.querySelectorAll('audio')
allAudios.forEach((audio) => {
if (audio !== audioElement.value && !audio.paused) {
audio.pause()
const player = audio.closest('.audio-player')
if (player) {
player.classList.remove('playing')
}
}
})
}
// 组件挂载时初始化
onMounted(() => {
nextTick(() => {
// 确保音频元素已挂载
if (audioElement.value) {
// 可以在这里添加音频加载完成的逻辑
}
})
})
// 组件卸载时清理
onUnmounted(() => {
if (audioElement.value) {
audioElement.value.pause()
audioElement.value.src = ''
}
})
// 暴露方法给父组件
defineExpose({
play: () => audioElement.value?.play(),
pause: () => audioElement.value?.pause(),
getCurrentTime: () => audioElement.value?.currentTime || 0,
getDuration: () => audioElement.value?.duration || 0,
isPlaying: () => !audioElement.value?.paused || false
})
</script>
<style scoped lang="less">
.audio-player {
display: flex;
align-items: center;
cursor: pointer;
transition: all 0.3s ease;
user-select: none;
box-sizing: border-box;
&.playing {
.audio-wave .wave-bar {
animation: wechatWaveAnimation 1.2s ease-in-out infinite;
&.wave-bar-1 {
animation-delay: 0s;
animation-duration: 1.4s;
}
&.wave-bar-2 {
animation-delay: 0.2s;
animation-duration: 1.2s;
}
&.wave-bar-3 {
animation-delay: 0.4s;
animation-duration: 1.0s;
}
}
}
.audio-icon {
width: 24px;
height: 24px;
display: flex;
align-items: center;
justify-content: center;
margin-right: 8px;
color: #ffffff; // 白色图标
.play-icon,
.pause-icon {
font-size: 12px;
font-weight: bold;
}
}
.audio-wave {
display: flex;
align-items: flex-end;
gap: 1px;
margin-right: 8px;
height: 16px;
.wave-bar {
width: 3px;
background: #ffffff; // 白色波形条
border-radius: 2px 2px 0 0; // 顶部圆角,底部直角
transition: all 0.3s ease;
&.wave-bar-1 {
height: 6px;
border-radius: 3px 3px 0 0;
}
&.wave-bar-2 {
height: 10px;
border-radius: 3px 3px 0 0;
}
&.wave-bar-3 {
height: 14px;
border-radius: 3px 3px 0 0;
}
}
}
audio {
display: none; // 隐藏原生音频控件
}
}
@keyframes wechatWaveAnimation {
0%,
100% {
transform: scaleY(0.3);
opacity: 0.6;
}
25% {
transform: scaleY(0.7);
opacity: 0.8;
}
50% {
transform: scaleY(1);
opacity: 1;
}
75% {
transform: scaleY(0.7);
opacity: 0.8;
}
}
</style>
......@@ -6,7 +6,7 @@
:class="{ 'recording': isRecording, 'disabled': disabled }"
@click="toggleRecording"
:disabled="disabled"
:title="isRecording ? '停止录音' : '开始录音'"
:title="isRecording ? '停止录音' : '开始说话'"
>
<!-- 语音图标始终显示 -->
<span class="voice-icon">
......@@ -31,6 +31,7 @@
<script setup lang="ts">
import { ref, computed, onMounted, onUnmounted, nextTick } from 'vue'
import { AudioOutlined } from '@ant-design/icons-vue'
import { post } from '@/utils/axios' // 导入项目中的axios
// 组件属性
interface Props {
......@@ -45,7 +46,7 @@ const props = withDefaults(defineProps<Props>(), {
// 组件事件
const emit = defineEmits<{
audio: [audioBlob: Blob]
audio: [audioUrl: string, audioBlob: Blob]
error: [error: string]
}>()
......@@ -217,8 +218,36 @@ const stopRecording = () => {
}
}
// 上传音频文件到服务器 - 修改为使用axios
const uploadAudioFile = async (audioBlob: Blob): Promise<string> => {
try {
const formData = new FormData();
formData.append('file', audioBlob, 'recording.wav');
formData.append('fileFolder', 'AI_TEMP');
// 使用项目中的axios post方法调用上传接口
const result = await post('/pedapi/platformService/upload/v2', formData, {
headers: {
'Content-Type': 'multipart/form-data',
'x-app-code': 'ped.qywx'
}
});
console.log('上传接口返回数据:', result);
if (result.data.code === 0) {
const filePath = result.data.data.filePath;
return filePath;
} else {
throw new Error('上传接口返回数据格式错误');
}
} catch (error) {
console.error('音频上传失败:', error);
throw error;
}
}
// 发送录制的音频
const sendRecordedAudio = () => {
const sendRecordedAudio = async () => {
if (audioChunks.value.length === 0) {
showStatusMessage('录音数据为空');
return;
......@@ -226,15 +255,29 @@ const sendRecordedAudio = () => {
const audioBlob = new Blob(audioChunks.value, { type: 'audio/webm;codecs=opus' });
// 发送音频数据
emit('audio', audioBlob);
showStatusMessage('音频已发送');
// 清理录音数据
audioChunks.value = [];
if (props.debug) {
console.log('音频发送完成,大小:', Math.round(audioBlob.size / 1024), 'KB');
try {
showStatusMessage('正在上传音频...');
// 先调用上传接口获取URL
const audioUrl = await uploadAudioFile(audioBlob);
console.log('上传接口返回的filePath:', audioUrl);
// 上传成功后触发audio事件,传递URL和Blob
emit('audio', audioUrl, audioBlob);
showStatusMessage('音频已发送');
if (props.debug) {
console.log('音频上传成功,URL:', audioUrl);
console.log('音频发送完成,大小:', Math.round(audioBlob.size / 1024), 'KB');
}
} catch (error) {
console.error('音频上传失败:', error);
const errorMsg = '音频上传失败,请重试';
showStatusMessage(errorMsg);
emit('error', errorMsg);
} finally {
// 清理录音数据
audioChunks.value = [];
}
}
......
......@@ -187,7 +187,7 @@ p, h1, h2, h3, h4, h5, h6, ul, ol, li {
flex-direction: row-reverse;
justify-content: flex-end;
}
}
}
}
.avatar-container {
......@@ -426,7 +426,7 @@ p, h1, h2, h3, h4, h5, h6, ul, ol, li {
max-width: 100%;
margin: 8px 0;
// 表格容器
// 表格容器
.table-container {
width: 100%;
overflow-x: auto;
......@@ -447,8 +447,7 @@ p, h1, h2, h3, h4, h5, h6, ul, ol, li {
&::-webkit-scrollbar-thumb {
background: #c1c1c1;
border-radius: 4px;
&:hover {
&:hover {
background: #a8a8a8;
}
}
......@@ -458,7 +457,7 @@ p, h1, h2, h3, h4, h5, h6, ul, ol, li {
width: auto;
min-width: 100%;
border-collapse: collapse;
background-color: @white;
background-color: @white;
table-layout: auto;
// 列类型样式
......@@ -508,7 +507,7 @@ p, h1, h2, h3, h4, h5, h6, ul, ol, li {
height: 35px;
white-space: nowrap;
overflow: hidden;
text-overflow: ellipsis;
text-overflow: ellipsis;
vertical-align: middle;
min-width: 80px;
}
......@@ -563,7 +562,7 @@ p, h1, h2, h3, h4, h5, h6, ul, ol, li {
width: 100%;
max-width: 100%;
margin: 8px 0;
border-radius: 8px;
border-radius: 8px;
background-color: @white;
border: 1px solid @blue-light-3;
box-shadow: 0 2px 8px rgba(0, 0, 0, 0.1);
......@@ -614,7 +613,7 @@ p, h1, h2, h3, h4, h5, h6, ul, ol, li {
&.iframe-loading {
iframe {
opacity: 0;
pointer-events: none;
pointer-events: none;
min-height: 400px;
}
......@@ -626,7 +625,7 @@ p, h1, h2, h3, h4, h5, h6, ul, ol, li {
position: absolute;
top: 0;
left: 0;
width: 100%;
width: 100%;
height: 100%;
background: linear-gradient(135deg, #f5f7fa 0%, #c3cfe2 100%);
z-index: 10;
......@@ -642,7 +641,7 @@ p, h1, h2, h3, h4, h5, h6, ul, ol, li {
.iframe-loading {
display: none;
}
}
}
// 加载动画
.loading-spinner {
......@@ -848,39 +847,88 @@ p, h1, h2, h3, h4, h5, h6, ul, ol, li {
}
}
// 音频消息样式
.message-audio {
background: linear-gradient(135deg, #f0f9ff, #e6f7ff);
border: 1px solid #91d5ff;
border-radius: 12px;
padding: 12px;
margin: 8px 0;
// 音频消息样式 - 白色主题,无背景色
:deep(.audio-message) {
display: inline-block;
width: -webkit-fill-available;
audio {
display: none; // 隐藏原生音频控件
}
.audio-indicator {
.audio-player {
display: flex;
align-items: center;
gap: 8px;
margin-bottom: 8px;
cursor: pointer;
transition: all 0.3s ease;
user-select: none;
box-sizing: border-box;
&.playing {
.audio-wave .wave-bar {
animation: waveAnimation 1.2s ease-in-out infinite;
&:nth-child(1) { animation-delay: 0s; }
&:nth-child(2) { animation-delay: 0.2s; }
&:nth-child(3) { animation-delay: 0.4s; }
&:nth-child(4) { animation-delay: 0.6s; }
&:nth-child(5) { animation-delay: 0.8s; }
}
}
.audio-icon {
font-size: 16px;
width: 24px;
height: 24px;
display: flex;
align-items: center;
justify-content: center;
margin-right: 8px;
color: #ffffff; // 白色图标
.play-icon, .pause-icon {
font-size: 12px;
font-weight: bold;
}
}
.audio-text {
font-size: 14px;
font-weight: 500;
color: #1890ff;
.audio-wave {
display: flex;
align-items: center;
gap: 2px;
margin-right: 8px;
.wave-bar {
width: 2px;
height: 12px;
background: #ffffff; // 白色波形条
border-radius: 1px;
transition: all 0.3s ease;
&:nth-child(1) { height: 4px; }
&:nth-child(2) { height: 8px; }
&:nth-child(3) { height: 12px; }
&:nth-child(4) { height: 8px; }
&:nth-child(5) { height: 4px; }
}
}
.audio-duration {
font-size: 12px;
color: #ffffff; // 白色时长文字
min-width: 30px;
text-align: center;
}
}
.audio-transcript {
font-size: 14px;
line-height: 1.4;
color: #595959;
background: rgba(255, 255, 255, 0.7);
padding: 8px;
border-radius: 6px;
border-left: 3px solid #1890ff;
}
@keyframes waveAnimation {
0%, 100% {
transform: scaleY(0.3);
opacity: 0.5;
}
50% {
transform: scaleY(1);
opacity: 1;
}
}
......@@ -935,4 +983,14 @@ p, h1, h2, h3, h4, h5, h6, ul, ol, li {
margin-right: 15px;
}
}
}
@keyframes waveAnimation {
0%, 100% {
transform: scaleY(0.3);
opacity: 0.5;
}
50% {
transform: scaleY(1);
opacity: 1;
}
}
\ No newline at end of file
......@@ -32,18 +32,15 @@ export default defineConfig({
});
}
},
'/ifile': {
target: 'http://peddev.cmic.com.cn',
changeOrigin: true, // 解决跨域问题
secure: false, // 允许不安全的SSL连接
},
'/WeChatOauth2': {
target: 'http://peddev.cmic.com.cn',
changeOrigin: true, // 解决跨域问题
secure: false, // 允许不安全的SSL连接
configure: (proxy, options) => {
proxy.on('error', (err, req, res) => {
console.log('代理错误:', err);
});
proxy.on('proxyReq', (proxyReq, req, res) => {
console.log('发送请求到:', options.target);
});
}
}
},
},
......
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment