314 lines
8.7 KiB
JavaScript
314 lines
8.7 KiB
JavaScript
import AgoraRTC from 'agora-rtc-sdk-ng';
|
||
|
||
class AgoraService {
|
||
constructor() {
|
||
this.client = null;
|
||
this.localAudioTrack = null;
|
||
this.uid = null;
|
||
this.isJoined = false;
|
||
this.remoteUsers = {};
|
||
this.volumeIndicator = null;
|
||
this.vadEnabled = true;
|
||
// 加入同一RTC频道
|
||
this.appid = '01a1debc964a4c6a8df1de2a6ce7aa4d';
|
||
this.token = '007eJxTYChiW7ib6cSzPW7fP2xqFVvPqf854sNXniin39cd3pu931SlwGBgmGiYkpqUbGlmkmiSbJZokZIG5BslmiWnmicmmqRcenA7vSGQkaFhbQArIwMEgvgiDMn5eWWJmUCyOD8nNd7QwtDAxJyBAQDAkSkW';
|
||
this.channel = 'convaiconsole_181047';
|
||
|
||
// VAD参数调整,检测语音段落
|
||
this.vadParams = {
|
||
interruptDurationMs: 800, // 语音中断500毫秒视为一段话结束
|
||
prefixPaddingMs: 300, // 在语音开始前预留的静音时间
|
||
silenceDurationMs: 1200, // 静音持续1.2秒视为用户停止说话
|
||
threshold: 0.5 // 判定为语音的阈值
|
||
};
|
||
|
||
this.isListening = false; // 是否正在监听
|
||
this.isSpeaking = false; // 用户是否正在说话
|
||
this.segmentBuffer = null; // 用于存储语音段的缓冲区
|
||
this.recordedChunks = []; // 存储录制的语音段
|
||
this.inConversation = false; // 是否在对话状态中
|
||
this.speechSegmentCallback = null; // 语音段落回调函数
|
||
}
|
||
|
||
/**
|
||
* Initialize the Agora RTC client
|
||
*/
|
||
init() {
|
||
this.client = AgoraRTC.createClient({ mode: 'rtc', codec: 'vp8' });
|
||
this.setupEventListeners();
|
||
return this.client;
|
||
}
|
||
|
||
/**
|
||
* Set up event listeners for the Agora client
|
||
*/
|
||
setupEventListeners() {
|
||
this.client.on('user-published', async (user, mediaType) => {
|
||
await this.client.subscribe(user, mediaType);
|
||
if (mediaType === 'audio') {
|
||
// 确保不播放本地用户的音频(避免回声)
|
||
if (user.uid !== this.uid) {
|
||
user.audioTrack.play();
|
||
} else {
|
||
console.log("Prevented local audio playback");
|
||
}
|
||
this.remoteUsers[user.uid] = user;
|
||
}
|
||
});
|
||
|
||
this.client.on('user-unpublished', (user) => {
|
||
if (user.audioTrack) {
|
||
user.audioTrack.stop();
|
||
}
|
||
delete this.remoteUsers[user.uid];
|
||
});
|
||
|
||
this.client.on('user-left', (user) => {
|
||
delete this.remoteUsers[user.uid];
|
||
});
|
||
|
||
this.client.on('volume-indicator', (volumes) => {
|
||
volumes.forEach((volume) => {
|
||
// Handle volume indicator
|
||
if (volume.uid === this.uid) {
|
||
// Local user's volume
|
||
const event = new CustomEvent('local-volume', {
|
||
detail: { level: volume.level }
|
||
});
|
||
window.dispatchEvent(event);
|
||
}
|
||
});
|
||
});
|
||
}
|
||
|
||
/**
|
||
* Join a channel with the given token and channel name
|
||
* @param {string} token - The token for authentication
|
||
* @param {string} channel - The channel name to join
|
||
* @param {string} uid - The user ID (optional)
|
||
*/
|
||
async join(agent_id, create_ts, status, uid = null) {
|
||
try {
|
||
// Join the channel
|
||
this.uid = await this.client.join(this.appid, this.channel, this.token, uid);
|
||
|
||
this.isJoined = true;
|
||
|
||
// Enable volume indicator
|
||
this.client.enableAudioVolumeIndicator();
|
||
|
||
return true;
|
||
} catch (error) {
|
||
console.error('Error joining channel:', error);
|
||
return false;
|
||
}
|
||
}
|
||
|
||
async startAudioPublishing(){
|
||
try{
|
||
// Create and publish local audio track
|
||
this.localAudioTrack = await AgoraRTC.createMicrophoneAudioTrack({
|
||
AEC: true, // 回声消除
|
||
AGC: true, // 自动增益控制
|
||
ANS: true, // 噪声抑制
|
||
encoderConfig: {
|
||
sampleRate: 48000,
|
||
stereo: false,
|
||
bitrate: 128 // 比特率
|
||
}
|
||
});
|
||
|
||
// 禁用本地音频监听,防止在耳机中听到自己的声音
|
||
this.localAudioTrack.play = function() {
|
||
console.log("Local audio playback disabled");
|
||
return;
|
||
};
|
||
|
||
// Enable VAD (Voice Activity Detection)
|
||
if (this.vadEnabled && this.localAudioTrack.setVADMode) {
|
||
this.localAudioTrack.setVADMode(true, this.vadParams);
|
||
|
||
// 设置VAD回调,用于检测语音段落
|
||
if (this.inConversation) {
|
||
this.setupVADCallback();
|
||
}
|
||
}
|
||
|
||
// Publish local audio track
|
||
console.log("this.localAudioTrack:",this.localAudioTrack);
|
||
|
||
await this.client.publish([this.localAudioTrack]);
|
||
return true;
|
||
}catch(error){
|
||
console.log("Error create and publish local audio track!",error);
|
||
return false;
|
||
}
|
||
}
|
||
/**
|
||
* Leave the channel and release resources
|
||
*/
|
||
async leave() {
|
||
if (this.localAudioTrack) {
|
||
this.localAudioTrack.close();
|
||
this.localAudioTrack = null;
|
||
}
|
||
|
||
await this.client.leave();
|
||
this.isJoined = false;
|
||
this.remoteUsers = {};
|
||
}
|
||
|
||
/**
|
||
* Mute or unmute the local audio
|
||
* @param {boolean} mute - Whether to mute the audio
|
||
*/
|
||
muteAudio(mute) {
|
||
if (this.localAudioTrack) {
|
||
if (mute) {
|
||
this.localAudioTrack.setEnabled(false);
|
||
} else {
|
||
this.localAudioTrack.setEnabled(true);
|
||
}
|
||
}
|
||
}
|
||
|
||
/**
|
||
* Check if the local audio is muted
|
||
* @returns {boolean} - Whether the audio is muted
|
||
*/
|
||
isAudioMuted() {
|
||
return this.localAudioTrack ? !this.localAudioTrack.enabled : true;
|
||
}
|
||
|
||
/**
|
||
* Set the VAD parameters
|
||
* @param {Object} params - The VAD parameters
|
||
*/
|
||
setVADParams(params) {
|
||
this.vadParams = { ...this.vadParams, ...params };
|
||
if (this.localAudioTrack && this.localAudioTrack.setVADMode) {
|
||
this.localAudioTrack.setVADMode(this.vadEnabled, this.vadParams);
|
||
}
|
||
}
|
||
|
||
/**
|
||
* Enable or disable VAD
|
||
* @param {boolean} enabled - Whether to enable VAD
|
||
*/
|
||
enableVAD(enabled) {
|
||
this.vadEnabled = enabled;
|
||
if (this.localAudioTrack && this.localAudioTrack.setVADMode) {
|
||
this.localAudioTrack.setVADMode(enabled, this.vadParams);
|
||
}
|
||
}
|
||
|
||
/**
|
||
* 开始对话状态
|
||
* @param {Function} callback - 语音段落回调函数
|
||
*/
|
||
startConversation(callback) {
|
||
this.inConversation = true;
|
||
this.speechSegmentCallback = callback;
|
||
|
||
// 如果已经有音频轨道,设置VAD回调
|
||
if (this.localAudioTrack && this.vadEnabled) {
|
||
this.setupVADCallback();
|
||
} else {
|
||
// 如果没有音频轨道,先创建并发布
|
||
this.startAudioPublishing().then(success => {
|
||
if (success && this.vadEnabled) {
|
||
this.setupVADCallback();
|
||
}
|
||
});
|
||
}
|
||
}
|
||
|
||
/**
|
||
* 结束对话状态
|
||
*/
|
||
endConversation() {
|
||
this.inConversation = false;
|
||
this.speechSegmentCallback = null;
|
||
|
||
// 如果有正在处理的语音段,发送它
|
||
if (this.segmentBuffer && this.segmentBuffer.trim()) {
|
||
this.processSpeechSegment(this.segmentBuffer);
|
||
}
|
||
|
||
// 清除缓冲区
|
||
this.segmentBuffer = null;
|
||
this.recordedChunks = [];
|
||
}
|
||
|
||
/**
|
||
* 设置VAD回调
|
||
*/
|
||
setupVADCallback() {
|
||
// if (!this.localAudioTrack) return;
|
||
|
||
if (!this.localAudioTrack) {
|
||
return;
|
||
}
|
||
if (typeof this.localAudioTrack.setVADMode !== 'function') {
|
||
console.error("当前 Agora SDK 版本不支持 VAD 功能");}
|
||
|
||
// 设置VAD回调
|
||
this.localAudioTrack.on('vad', (result) => {
|
||
|
||
console.log("VAD 事件触发,result.state:", result.state, "result:", result);
|
||
|
||
if (!this.inConversation) return;
|
||
|
||
if (result.state === 'speech_start') {
|
||
// 语音开始
|
||
this.isSpeaking = true;
|
||
this.segmentBuffer = '';
|
||
|
||
// 触发事件
|
||
const event = new CustomEvent('speech-start');
|
||
window.dispatchEvent(event);
|
||
}
|
||
else if (result.state === 'speech_end') {
|
||
// 语音结束
|
||
this.isSpeaking = false;
|
||
|
||
// 处理语音段落
|
||
if (this.segmentBuffer && this.segmentBuffer.trim()) {
|
||
this.processSpeechSegment(this.segmentBuffer);
|
||
}
|
||
|
||
// 清除缓冲区
|
||
this.segmentBuffer = null;
|
||
|
||
// 触发事件
|
||
const event = new CustomEvent('speech-end');
|
||
window.dispatchEvent(event);
|
||
}
|
||
else if (result.state === 'speech') {
|
||
// 正在说话,更新转写结果
|
||
if (result.text) {
|
||
this.segmentBuffer = result.text;
|
||
|
||
// 触发事件
|
||
const event = new CustomEvent('speech-update', {
|
||
detail: { text: result.text }
|
||
});
|
||
window.dispatchEvent(event);
|
||
}
|
||
}
|
||
});
|
||
}
|
||
|
||
/**
|
||
* 处理语音段落
|
||
* @param {string} text - 语音段落文本
|
||
*/
|
||
processSpeechSegment(text) {
|
||
if (this.speechSegmentCallback && typeof this.speechSegmentCallback === 'function') {
|
||
this.speechSegmentCallback(text);
|
||
}
|
||
}
|
||
}
|
||
|
||
export default new AgoraService();
|