可以RTC频道下进行通话,但不显示通话内容。

This commit is contained in:
fsy
2025-03-20 15:13:29 +08:00
parent 7c473b9ed1
commit 6a7b8ef363
3 changed files with 253 additions and 29 deletions

View File

@@ -9,15 +9,25 @@ class AgoraService {
this.remoteUsers = {};
this.volumeIndicator = null;
this.vadEnabled = true;
// 加入同一RTC频道
this.appid = '01a1debc964a4c6a8df1de2a6ce7aa4d';
this.token = '007eJxTYHi+XWtqBkPmn71LGvdmPds7sfiQfomBxpt3XMfOC53fcjVegcHAMNEwJTUp2dLMJNEk2SzRIiUNyDdKNEtONU9MNEkpibmZ3hDIyPDYXo2JkQECQXwRhuT8vLLETCBZnJ+TGm9oYmluYMDAAACcPigI';
this.channel = 'convaiconsole_149700';
this.token = '007eJxTYEibc7f9w4Ebac5HtT9ej/CL7KzPrGb+GZmn6/G+kLVp8XsFBgPDRMOU1KRkSzOTRJNks0SLlDQg3yjRLDnVPDHRJGV67630hkBGhhvth1kZGSAQxBdhSM7PK0vMBJLF+Tmp8YZGRmZGJgwMAIF3KEg=';
this.channel = 'convaiconsole_122624';
// VAD参数调整检测语音段落
this.vadParams = {
interruptDurationMs: 160,
prefixPaddingMs: 300,
silenceDurationMs: 480,
threshold: 0.5
interruptDurationMs: 800, // 语音中断500毫秒视为一段话结束
prefixPaddingMs: 300, // 在语音开始前预留的静音时间
silenceDurationMs: 1200, // 静音持续1.2秒视为用户停止说话
threshold: 0.5 // 判定为语音的阈值
};
this.isListening = false; // 是否正在监听
this.isSpeaking = false; // 用户是否正在说话
this.segmentBuffer = null; // 用于存储语音段的缓冲区
this.recordedChunks = []; // 存储录制的语音段
this.inConversation = false; // 是否在对话状态中
this.speechSegmentCallback = null; // 语音段落回调函数
}
/**
@@ -76,11 +86,22 @@ class AgoraService {
try {
// Join the channel
this.uid = await this.client.join(this.appid, this.channel, this.token, uid);
console.log("successful! this.uid is ", this.uid);
this.isJoined = true;
// Enable volume indicator
this.client.enableAudioVolumeIndicator();
return true;
} catch (error) {
console.error('Error joining channel:', error);
return false;
}
}
async startAudioPublishing(){
try{
// Create and publish local audio track
this.localAudioTrack = await AgoraRTC.createMicrophoneAudioTrack({
AEC: true,
@@ -91,23 +112,21 @@ class AgoraService {
// Enable VAD (Voice Activity Detection)
if (this.vadEnabled && this.localAudioTrack.setVADMode) {
this.localAudioTrack.setVADMode(true, this.vadParams);
// 设置VAD回调用于检测语音段落
if (this.inConversation) {
this.setupVADCallback();
}
}
// Publish local audio track
await this.client.publish([this.localAudioTrack]);
// Enable volume indicator
this.client.enableAudioVolumeIndicator();
console.log("status:", this.status);
return true;
} catch (error) {
console.error('Error joining channel:', error);
}catch(error){
console.log("Error create and publish local audio track!",error);
return false;
}
}
/**
* Leave the channel and release resources
*/
@@ -165,6 +184,104 @@ class AgoraService {
this.localAudioTrack.setVADMode(enabled, this.vadParams);
}
}
/**
* 开始对话状态
* @param {Function} callback - 语音段落回调函数
*/
startConversation(callback) {
this.inConversation = true;
this.speechSegmentCallback = callback;
// 如果已经有音频轨道设置VAD回调
if (this.localAudioTrack && this.vadEnabled) {
this.setupVADCallback();
} else {
// 如果没有音频轨道,先创建并发布
this.startAudioPublishing().then(success => {
if (success && this.vadEnabled) {
this.setupVADCallback();
}
});
}
}
/**
* 结束对话状态
*/
endConversation() {
this.inConversation = false;
this.speechSegmentCallback = null;
// 如果有正在处理的语音段,发送它
if (this.segmentBuffer && this.segmentBuffer.trim()) {
this.processSpeechSegment(this.segmentBuffer);
}
// 清除缓冲区
this.segmentBuffer = null;
this.recordedChunks = [];
}
/**
* 设置VAD回调
*/
setupVADCallback() {
if (!this.localAudioTrack) return;
// 设置VAD回调
this.localAudioTrack.on('vad', (result) => {
if (!this.inConversation) return;
if (result.state === 'speech_start') {
// 语音开始
this.isSpeaking = true;
this.segmentBuffer = '';
// 触发事件
const event = new CustomEvent('speech-start');
window.dispatchEvent(event);
}
else if (result.state === 'speech_end') {
// 语音结束
this.isSpeaking = false;
// 处理语音段落
if (this.segmentBuffer && this.segmentBuffer.trim()) {
this.processSpeechSegment(this.segmentBuffer);
}
// 清除缓冲区
this.segmentBuffer = null;
// 触发事件
const event = new CustomEvent('speech-end');
window.dispatchEvent(event);
}
else if (result.state === 'speech') {
// 正在说话,更新转写结果
if (result.text) {
this.segmentBuffer = result.text;
// 触发事件
const event = new CustomEvent('speech-update', {
detail: { text: result.text }
});
window.dispatchEvent(event);
}
}
});
}
/**
* 处理语音段落
* @param {string} text - 语音段落文本
*/
processSpeechSegment(text) {
if (this.speechSegmentCallback && typeof this.speechSegmentCallback === 'function') {
this.speechSegmentCallback(text);
}
}
}
export default new AgoraService();