对话能够在两边显示,还缺少ui优化和长json显示

This commit is contained in:
fsy
2025-03-21 17:52:02 +08:00
parent 711ed1f13e
commit 9f58286bfe
3 changed files with 115 additions and 71 deletions

View File

@@ -10,8 +10,8 @@ class AgoraService {
this.volumeIndicator = null; this.volumeIndicator = null;
// 加入同一RTC频道 // 加入同一RTC频道
this.appid = '01a1debc964a4c6a8df1de2a6ce7aa4d'; this.appid = '01a1debc964a4c6a8df1de2a6ce7aa4d';
this.token = '007eJxTYChiW7ib6cSzPW7fP2xqFVvPqf854sNXniin39cd3pu931SlwGBgmGiYkpqUbGlmkmiSbJZokZIG5BslmiWnmicmmqRcenA7vSGQkaFhbQArIwMEgvgiDMn5eWWJmUCyOD8nNd7QwtDAxJyBAQDAkSkW'; this.token = '007eJxTYOhQ/af5fwa3YP9mzYq4WWxKco1TJJqEC9L6z39c0cf2xkuBwcAw0TAlNSnZ0swk0STZLNEiJQ3IN0o0S041T0w0SQk0u5veEMjIwFi9lhFIgiGIL8KQnJ9XlpgJJIvzc1LjDQ3NjE3MGRgACCIjzA==';
this.channel = 'convaiconsole_181047'; this.channel = 'convaiconsole_116347';
this.isListening = false; // 是否正在监听 this.isListening = false; // 是否正在监听
this.isSpeaking = false; // 用户是否正在说话 this.isSpeaking = false; // 用户是否正在说话
@@ -19,6 +19,7 @@ class AgoraService {
this.recordedChunks = []; // 存储录制的语音段 this.recordedChunks = []; // 存储录制的语音段
this.inConversation = false; // 是否在对话状态中 this.inConversation = false; // 是否在对话状态中
this.speechSegmentCallback = null; // 语音段落回调函数 this.speechSegmentCallback = null; // 语音段落回调函数
this.readableTextCallback = null; // 可读文本回调函数
} }
/** /**
@@ -35,6 +36,52 @@ class AgoraService {
* Set up event listeners for the Agora client * Set up event listeners for the Agora client
*/ */
setupEventListeners() { setupEventListeners() {
let jsonBuffer = '';
this.client.on('stream-message', (uid, message) => {
try {
// 确认接收到的是字节流Uint8Array
if (message instanceof Uint8Array) {
const decodedMessage = new TextDecoder().decode(message);
// console.log('Decoded message (Raw String):', decodedMessage);
const parts = decodedMessage.split('|');
const base64Data = parts[parts.length - 1];
// console.log('Extracted Base64 data:', base64Data);
const jsonString = atob(base64Data);
console.log('Decoded JSON string:', jsonString);
jsonBuffer += jsonString;
if (isCompleteJson(jsonBuffer)) {
const jsonData = JSON.parse(jsonString); // 将字符串解析为 JSON 对象
console.log('Parsed JSON object:', jsonData);
if (this.readableTextCallback && typeof this.readableTextCallback === 'function') {
this.readableTextCallback(jsonData.text); // 传递解码后的文本到回调
}
jsonBuffer = '';
return jsonData.text;
}
} else {
console.log('Received message is not Uint8Array:', message);
}
} catch (error) {
console.error('Error processing stream-message:', error);
}
});
function isCompleteJson(jsonString) {
try {
JSON.parse(jsonString); // 解析测试是否为合法 JSON
return true; // 如果能被解析,说明是完整的 JSON
} catch (error) {
return false; // 捕获解析错误,说明 JSON 不完整
}
}
this.client.on('user-published', async (user, mediaType) => { this.client.on('user-published', async (user, mediaType) => {
await this.client.subscribe(user, mediaType); await this.client.subscribe(user, mediaType);
if (mediaType === 'audio') { if (mediaType === 'audio') {
@@ -164,11 +211,13 @@ class AgoraService {
/** /**
* 开始对话状态 * 开始对话状态
* @param {Function} callback - 语音段落回调函数 * @param {Function} speechCallback - 语音段落回调函数
* @param {Function} textCallback - 可读文本回调函数
*/ */
startConversation(callback) { startConversation(speechCallback, textCallback) {
this.inConversation = true; this.inConversation = true;
this.speechSegmentCallback = callback; this.speechSegmentCallback = speechCallback;
this.readableTextCallback = textCallback;
} }
/** /**
@@ -177,6 +226,7 @@ class AgoraService {
endConversation() { endConversation() {
this.inConversation = false; this.inConversation = false;
this.speechSegmentCallback = null; this.speechSegmentCallback = null;
this.readableTextCallback = null;
// 如果有正在处理的语音段,发送它 // 如果有正在处理的语音段,发送它
if (this.segmentBuffer && this.segmentBuffer.trim()) { if (this.segmentBuffer && this.segmentBuffer.trim()) {

View File

@@ -35,10 +35,10 @@ class ApiService {
const response = await this.client.post( const response = await this.client.post(
`${this.baseUrl}/projects/${this.projectId}/join/`, `${this.baseUrl}/projects/${this.projectId}/join/`,
{ {
"name": "convaiconsole_181047", "name": "convaiconsole_116347",
"properties": { "properties": {
"channel": "convaiconsole_181047", "channel": "convaiconsole_116347",
"agent_rtc_uid": "28794", "agent_rtc_uid": "87578",
"remote_rtc_uids": [ "remote_rtc_uids": [
"*" "*"
], ],
@@ -93,7 +93,7 @@ class ApiService {
"enable_metrics": true, "enable_metrics": true,
"audio_scenario": "default" "audio_scenario": "default"
}, },
"token": "007eJxTYChiW7ib6cSzPW7fP2xqFVvPqf854sNXniin39cd3pu931SlwGBgmGiYkpqUbGlmkmiSbJZokZIG5BslmiWnmicmmqRcenA7vSGQkaFhbQArIwMEgvgiDMn5eWWJmUCyOD8nNd7QwtDAxJyBAQDAkSkW", "token": "007eJxTYOhQ/af5fwa3YP9mzYq4WWxKco1TJJqEC9L6z39c0cf2xkuBwcAw0TAlNSnZ0swk0STZLNEiJQ3IN0o0S041T0w0SQk0u5veEMjIwFi9lhFIgiGIL8KQnJ9XlpgJJIvzc1LjDQ3NjE3MGRgACCIjzA==",
"advanced_features": { "advanced_features": {
"enable_aivad": false "enable_aivad": false
} }
@@ -109,31 +109,6 @@ class ApiService {
} }
} }
/**
* Send a message to the AI agent
* @param {string} message - The message to send
* @returns {Promise} - The response from the API
*/
async sendMessage(message) {
if (!this.sessionId) {
throw new Error('No active session. Please join a project first.');
}
try {
const response = await this.client.post(
`${this.baseUrl}/sessions/${this.sessionId}/messages/`,
{
type: 'text',
content: message
}
);
return response.data;
} catch (error) {
console.error('Error sending message:', error);
throw error;
}
}
/** /**
* End the current session * End the current session
* @returns {Promise} - The response from the API * @returns {Promise} - The response from the API

View File

@@ -47,13 +47,11 @@ export const useChatStore = defineStore('chat', {
const status =response.status; const status =response.status;
await agoraService.join(agent_id, create_ts, status) await agoraService.join(agent_id, create_ts, status)
// await agoraService.join(token, channel, agentRtcUid);
this.isConnected = true; this.isConnected = true;
this.error = null; this.error = null;
// Add greeting message // Add greeting message
// const greetingMessage = response.properties.llm.greeting_message;
const greetingMessage = "你好呀,有什么可以帮您?"; const greetingMessage = "你好呀,有什么可以帮您?";
if (greetingMessage) { if (greetingMessage) {
this.addMessage({ this.addMessage({
@@ -82,8 +80,11 @@ export const useChatStore = defineStore('chat', {
this.isListening = true; this.isListening = true;
this.currentTranscript = ''; this.currentTranscript = '';
// 设置语音段落回调 // 设置语音段落回调和可读文本回调
agoraService.startConversation(this.handleSpeechSegment.bind(this)); agoraService.startConversation(
this.handleSpeechSegment.bind(this),
this.handleReadableText.bind(this)
);
// 启动音频发布 // 启动音频发布
if (!agoraService.localAudioTrack) { if (!agoraService.localAudioTrack) {
@@ -95,6 +96,22 @@ export const useChatStore = defineStore('chat', {
return true; return true;
}, },
/**
* 处理可读文本
* @param {string} text - 从Agora获取的可读文本
*/
handleReadableText(text) {
if (!text || !text.trim()) return;
const isFromAI = text.includes('\n\n') || text.includes('<think>\n');
this.addMessage({
id: Date.now(),
content: text,
sender: isFromAI ? 'ai' : 'user',
timestamp: new Date().toISOString(),
});
},
/** /**
* 结束对话状态,停止持续监听 * 结束对话状态,停止持续监听
@@ -159,6 +176,8 @@ export const useChatStore = defineStore('chat', {
* @param {string} text - 语音段落文本 * @param {string} text - 语音段落文本
*/ */
handleSpeechSegment(text) { handleSpeechSegment(text) {
console.log("我在这里!");
if (!text || !text.trim()) return; if (!text || !text.trim()) return;
// 更新当前转写 // 更新当前转写
@@ -202,44 +221,44 @@ export const useChatStore = defineStore('chat', {
* Send a message to the AI * Send a message to the AI
* @param {string} content - The message content * @param {string} content - The message content
*/ */
async sendMessage(content) { // async sendMessage(content) {
try { // try {
this.isProcessing = true; // this.isProcessing = true;
console.log("user send content:",content); // console.log("user send content:",content);
// Add user message to the list // // Add user message to the list
this.addMessage({ // this.addMessage({
id: Date.now(), // id: Date.now(),
content: content, // content: content,
sender: 'user', // sender: 'user',
timestamp: new Date().toISOString(), // timestamp: new Date().toISOString(),
}); // });
// Send message to API // // Send message to API
const response = await apiService.sendMessage(content); // const response = await apiService.sendMessage(content);
// 检查response返回的内容 // // 检查response返回的内容
console.log("AI response:",response); // console.log("AI response:",response);
// Add AI response to the list // // Add AI response to the list
if (response && response.content) { // if (response && response.content) {
this.addMessage({ // this.addMessage({
id: Date.now(), // id: Date.now(),
content: response.content, // content: response.content,
sender: 'ai', // sender: 'ai',
timestamp: new Date().toISOString(), // timestamp: new Date().toISOString(),
}); // });
} // }
this.isProcessing = false; // this.isProcessing = false;
return true; // return true;
} catch (error) { // } catch (error) {
this.error = error.message || 'Failed to send message'; // this.error = error.message || 'Failed to send message';
this.isProcessing = false; // this.isProcessing = false;
console.error('Error sending message:', error); // console.error('Error sending message:', error);
return false; // return false;
} // }
}, // },
/** /**
* Add a message to the list * Add a message to the list