This commit is contained in:
jiang 2025-02-14 14:30:35 +08:00
commit d19d4ef44b
24 changed files with 1083 additions and 0 deletions

5
.idea/.gitignore vendored Normal file
View File

@ -0,0 +1,5 @@
# Default ignored files
/shelf/
/workspace.xml
# Editor-based HTTP Client requests
/httpRequests/

View File

@ -0,0 +1,12 @@
<?xml version="1.0" encoding="UTF-8"?>
<module type="WEB_MODULE" version="4">
<component name="NewModuleRootManager">
<content url="file://$MODULE_DIR$">
<excludeFolder url="file://$MODULE_DIR$/.tmp" />
<excludeFolder url="file://$MODULE_DIR$/temp" />
<excludeFolder url="file://$MODULE_DIR$/tmp" />
</content>
<orderEntry type="inheritedJdk" />
<orderEntry type="sourceFolder" forTests="false" />
</component>
</module>

8
.idea/modules.xml Normal file
View File

@ -0,0 +1,8 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="ProjectModuleManager">
<modules>
<module fileurl="file://$PROJECT_DIR$/.idea/FloatingBotExtension.iml" filepath="$PROJECT_DIR$/.idea/FloatingBotExtension.iml" />
</modules>
</component>
</project>

6
.idea/vcs.xml Normal file
View File

@ -0,0 +1,6 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="VcsDirectoryMappings">
<mapping directory="$PROJECT_DIR$" vcs="Git" />
</component>
</project>

122
1.html Normal file
View File

@ -0,0 +1,122 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>语音录制与播放</title>
<style>
.button-row {
display: flex;
justify-content: space-around;
margin-bottom: 10px;
}
button {
padding: 10px 20px;
font-size: 16px;
margin: 5px;
}
</style>
</head>
<body>
<div class="button-row">
<button id="startRecord">1-开始录制语音</button>
<button id="stopRecord">2-停止录制语音</button>
<button id="playRecord">3-播放录制的声音</button>
</div>
<div class="button-row">
<button id="startScreenRecord">4-获取系统扬声器声音</button>
<button id="stopScreenRecord">5-停止获取系统扬声器声音</button>
<button id="playScreenRecord">6-播放系统扬声器声音</button>
</div>
<audio id="audioPlayback" controls style="display:none;"></audio>
<audio id="audioPlayback2" controls style="display:none;"></audio>
<script>
let mediaRecorder;
let screenMediaRecorder;
let audioChunks = [];
document.getElementById('startRecord').addEventListener('click', async () => {
try {
const stream = await navigator.mediaDevices.getUserMedia({ audio: true });
mediaRecorder = new MediaRecorder(stream);
mediaRecorder.ondataavailable = event => {
audioChunks.push(event.data);
};
mediaRecorder.onstop = () => {
const audioBlob = new Blob(audioChunks, { 'type': 'audio/wav' });
const audioUrl = URL.createObjectURL(audioBlob);
document.getElementById('audioPlayback').src = audioUrl;
document.getElementById('audioPlayback').style.display = 'block';
audioChunks = []; // Reset for next recording
mediaRecorder = null;
stream.getTracks().forEach(track => {
track.stop();
});
};
mediaRecorder.start();
} catch (err) {
console.error("获取用户语音失败: ", err);
}
});
document.getElementById('stopRecord').addEventListener('click', () => {
if (mediaRecorder) {
mediaRecorder.stop();
}
});
document.getElementById('playRecord').addEventListener('click', () => {
const audioPlayback = document.getElementById('audioPlayback');
if (audioPlayback.paused) {
audioPlayback.play();
} else {
audioPlayback.pause();
}
});
document.getElementById('startScreenRecord').addEventListener('click', async()=> {
try {
// 请求访问用户的屏幕媒体流
const stream = await navigator.mediaDevices.getDisplayMedia({audio: true});
screenMediaRecorder = new MediaRecorder(stream);
screenMediaRecorder.ondataavailable = event => {
audioChunks.push(event.data);
};
screenMediaRecorder.onstop = () => {
const audioBlob = new Blob(audioChunks, { 'type': 'audio/wav' });
const audioUrl = URL.createObjectURL(audioBlob);
document.getElementById('audioPlayback2').src = audioUrl;
document.getElementById('audioPlayback2').style.display = 'block';
audioChunks = []; // Reset for next recording
screenMediaRecorder = null;
stream.getTracks().forEach(track => {
track.stop();
});
};
screenMediaRecorder.start();
} catch (err) {
console.error("获取用户语音失败: ", err);
}
});
document.getElementById('stopScreenRecord').addEventListener('click', () => {
if (screenMediaRecorder) {
screenMediaRecorder.stop();
}
});
document.getElementById('playScreenRecord').addEventListener('click', () => {
const audioPlayback = document.getElementById('audioPlayback2');
if (audioPlayback.paused) {
audioPlayback.play();
} else {
audioPlayback.pause();
}
});
</script>
</body>
</html>

69
IatRecorder.js Normal file
View File

@ -0,0 +1,69 @@
var crypto = require('crypto');
var WebSocketClient = require('websocket').client;
var fs = require('fs');
// AIUI websocket服务地址
var BASE_URL = "wss://wsapi.xfyun.cn/v1/aiui";
var ORIGIN = "http://wsapi.xfyun.cn";
// 应用ID在AIUI开放平台创建并设置
var APPID = "xxxx";
// 接口密钥在AIUI开放平台查看
var APIKEY = "xxxx";
// 业务参数
var PARAM = "{\"auth_id\":\"f8948af1d2d6547eaf09bc2f20ebfcc6\",\"data_type\":\"audio\",\"scene\":\"main_box\",\"sample_rate\":\"16000\",\"aue\":\"raw\",\"result_level\":\"plain\",\"context\":\"{\\\"sdk_support\\\":[\\\"tts\\\"]}\"}";
// 计算握手参数
function getHandshakeParams(){
var paramBase64 = new Buffer(PARAM).toString('base64');
var curtime = Math.floor(Date.now()/1000);
var originStr = APIKEY + curtime + paramBase64;
var checksum = crypto.createHash('md5').update(originStr).digest("hex");
var handshakeParams = "?appid="+APPID+"&checksum="+checksum+"&curtime="+curtime+"&param="+paramBase64;
console.log(handshakeParams);
return handshakeParams;
}
// 定义websocket client
var client = new WebSocketClient();
client.on('connectFailed', function(error) {
console.log('Connect Error: ' + error.toString());
});
client.on('connect', function(connection) {
console.log('WebSocket client connected');
connection.on('error', function(error) {
console.log("Connection Error: " + error.toString());
});
connection.on('close', function() {
console.log('echo-protocol Connection Closed');
});
connection.on('message', function(message) {
if (message.type === 'utf8') {
console.log("Received: '" + message.utf8Data + "'");
}
});
function sendMsg() {
if (connection.connected) {
let audioFile = fs.createReadStream('./weather.pcm');
let idx = 0;
audioFile.on("data", function(data) {
console.log("发送音频块 ", idx++);
connection.sendBytes(data);
});
audioFile.on("close", function() {
connection.sendUTF("--end--");
});
}
}
// 发送数据
sendMsg();
});
// 建立连接
client.connect(BASE_URL+getHandshakeParams(), "", ORIGIN);

4
background.js Normal file
View File

@ -0,0 +1,4 @@
// background.js - 监听扩展安装
chrome.runtime.onInstalled.addListener(() => {
console.log("Floating Bot Extension 已安装");
});

54
content.js Normal file
View File

@ -0,0 +1,54 @@
(function () {
let botWindow = document.createElement("div");
botWindow.id = "floating-bot";
botWindow.innerHTML = `
<div id="bot-header">🤖 小机器人</div>
<div id="bot-body">你好我是你的网页助手</div>
`;
document.body.appendChild(botWindow);
let style = document.createElement("style");
style.innerHTML = `
#floating-bot {
position: fixed;
width: 200px;
height: 150px;
bottom: 50px;
right: 50px;
background: white;
border: 2px solid #ccc;
border-radius: 10px;
box-shadow: 2px 2px 10px rgba(0,0,0,0.2);
z-index: 9999;
font-family: Arial, sans-serif;
}
#bot-header {
background: #007bff;
color: white;
padding: 10px;
cursor: move;
border-top-left-radius: 10px;
border-top-right-radius: 10px;
}
#bot-body {
padding: 10px;
}
`;
document.head.appendChild(style);
let isDragging = false, offsetX, offsetY;
botWindow.querySelector("#bot-header").addEventListener("mousedown", (e) => {
isDragging = true;
offsetX = e.clientX - botWindow.offsetLeft;
offsetY = e.clientY - botWindow.offsetTop;
});
document.addEventListener("mousemove", (e) => {
if (isDragging) {
botWindow.style.left = `${e.clientX - offsetX}px`;
botWindow.style.top = `${e.clientY - offsetY}px`;
}
});
document.addEventListener("mouseup", () => {
isDragging = false;
});
})();

BIN
icons/icon128.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 80 KiB

BIN
icons/icon16.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 80 KiB

BIN
icons/icon48.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 80 KiB

121
js/audio.js Normal file
View File

@ -0,0 +1,121 @@
function transcode(audioData) {
// 将音频数据转换为16kHz采样率
let output = to16kHz(audioData);
// 将音频数据转换为16位PCM格式
output = to16BitPCM(output);
// 将DataView对象转换为字节数组
output = Array.from(new Uint8Array(output.buffer));
return output;
}
function to16kHz(audioData) {
// 创建一个新的Float32Array以避免修改原始数据
let data = new Float32Array(audioData);
// 计算目标采样率下的数据点数量
let fitCount = Math.round(data.length * (16000 / 44100));
// 创建一个新的Float32Array用于存储转换后的数据
let newData = new Float32Array(fitCount);
// 计算插值的步长用于将44100Hz的数据压缩到16000Hz
let springFactor = (data.length - 1) / (fitCount - 1);
// 设置第一帧数据
newData[0] = data[0];
for (let i = 1; i < fitCount - 1; i++) {
// 计算映射到原始数据的浮点索引
let tmp = i * springFactor;
// 计算索引的左右整数位置
let before = Math.floor(tmp);
let after = Math.ceil(tmp);
// 计算线性插值的权重
let atPoint = tmp - before;
// 使用线性插值法计算采样点
newData[i] = data[before] + (data[after] - data[before]) * atPoint;
}
// 设置最后一帧数据
newData[fitCount - 1] = data[data.length - 1];
return newData;
}
function to16BitPCM(input) {
// 计算输出数据所需的字节数每个样本占2个字节
let dataLength = input.length * 2;
// 创建ArrayBuffer以存储二进制数据
let dataBuffer = new ArrayBuffer(dataLength);
// 使用DataView操作二进制数据
let dataView = new DataView(dataBuffer);
let offset = 0;
for (let i = 0; i < input.length; i++, offset += 2) {
// 将浮点数限制在[-1, 1]范围内
let s = Math.max(-1, Math.min(1, input[i]));
// 将浮点数转换为16位有符号整数并按小端字节序写入DataView
dataView.setInt16(offset, s < 0 ? s * 0x8000 : s * 0x7fff, true);
}
return dataView;
}
function transToAudioData(
audioDataStr,
fromRate = 16000,
toRate = 22505,
) {
// 将Base64音频数据解码为Int16Array
let outputS16 = base64ToS16(audioDataStr);
// 将Int16Array数据转换为Float32Array
let output = transS16ToF32(outputS16);
// 进行采样率转换将音频数据从fromRate转换到toRate
output = transSamplingRate(output, fromRate, toRate);
// 转换为普通数组格式并返回
output = Array.from(output);
return output;
}
function transSamplingRate(
data,
fromRate = 44100,
toRate = 16000,
) {
// 计算目标采样点数
let fitCount = Math.round(data.length * (toRate / fromRate));
// 创建新的Float32Array以存储采样率转换后的数据
let newData = new Float32Array(fitCount);
// 计算数据点之间的缩放因子
let springFactor = (data.length - 1) / (fitCount - 1);
// 设置起始点
newData[0] = data[0];
for (let i = 1; i < fitCount - 1; i++) {
// 计算缩放后的位置
let tmp = i * springFactor;
let before = Math.floor(tmp);
let after = Math.ceil(tmp);
// 计算线性插值
let atPoint = tmp - before;
newData[i] = data[before] + (data[after] - data[before]) * atPoint;
}
// 设置终止点
newData[fitCount - 1] = data[data.length - 1];
return newData;
}
function transS16ToF32(input) {
// 创建临时数组存储转换后的数据
let tmpData = [];
for (let i = 0; i < input.length; i++) {
// 将Int16数据标准化到[-1, 1]区间
let d = input[i] < 0 ? input[i] / 0x8000 : input[i] / 0x7fff;
tmpData.push(d);
}
// 转换为Float32Array
return new Float32Array(tmpData);
}
function base64ToS16(base64AudioData) {
// 使用atob解码Base64字符串为二进制字符串
base64AudioData = atob(base64AudioData);
// 创建Uint8Array以存储字节数据
const outputArray = new Uint8Array(base64AudioData.length);
for (let i = 0; i < base64AudioData.length; ++i) {
outputArray[i] = base64AudioData.charCodeAt(i);
}
// 将Uint8Array视为Int16Array
return new Int16Array(new DataView(outputArray.buffer).buffer);
}

101
js/constant.js Normal file
View File

@ -0,0 +1,101 @@
// 枚举定义InteractMode
const InteractMode = {
/**
* @description 交互类型为音频模式
* @returns {string} 'audio' - 表示当前交互模式是音频模式
* @example usage:
* const interactType = getInteractType('AUDIO'); // 返回 'audio'
*/
AUDIO: 'audio',
/**
* @description 交互类型为文字模式
* @returns {string} 'text' - 表示当前交互模式是文本模式
* @example usage:
* const interactType = getInteractType('TEXT'); // 返回 'text'
*/
TEXT: 'text',
};
// 枚举定义FORMAL_ENVIRONMENT
const FormalEnvironment = {
/**
* @description 测试环境
* @returns {string} '0' - 表示当前环境为测试环境
* @example usage:
* console.log(FORMAL_ENVIRONMENT.TEST); // 输出 '0'
*/
TEST: '0',
/**
* @description 生产环境
* @returns {string} '1' - 表示当前环境为生产环境
* @example usage:
* console.log(FORMAL_ENVIRONMENT.PROD); // 输出 '1'
*/
PROD: '1'
};
// 常量类Constant包含各项配置信息
const Constant = {
/**
* @description 交互云地址 WebSocket URL
* @returns {string} WebSocket连接地址 - 获取交互服务的 WebSocket 地址
* @example usage:
* console.log(Constant.INTERACT_SOCKET_URL); // 输出 'ws://103.8.34.136:26002/createRec'
*/
INTERACT_SOCKET_URL: 'ws://103.8.34.136:26003/createRec',
/**
* @description 机器人ID标识符
* @returns {string} 机器人的唯一标识符 - 获取用户的机器人身份信息
* @note 请确保这个 ID 在你的系统中是有效且唯一的
* @example usage:
* console.log(Constant.INTERACT_BOT_ID); // 输出你的机器人 ID '2557854368452906'
*/
INTERACT_BOT_ID: '2557854368452906',
/**
* @description API 请求头中的用户认证 token
* @returns {string} 用户身份认证 token - 获取或设置用户认证信息建议保密
* @note 请在实际使用中替换为你的真实组织代码
* @example usage:
* console.log(Constant.INTERACT_ORG_CODE); // 输出 '6b9fe858-1efc-43e7-abf1-ab085a086ebf'
*/
INTERACT_ORG_CODE: '6b9fe858-1efc-43e7-abf1-ab085a086ebf',
/**
* @description 应用 ID
* @returns {string} 应用的唯一标识符 - 获取应用的基本信息建议与组织代码关联
* @example usage:
* console.log(Constant.INTERACT_APP_ID); // 输出 '12345'
*/
INTERACT_APP_ID: 'ef014ded',
/**
* @description 当前场景名称
* @returns {string} 当前交互场景的名称 - 用于标识当前操作或情境
* @example usage:
* console.log(Constant.INTERACT_SCENE); // 输出 'test'
*/
INTERACT_SCENE: 'main_box',
/**
* @description 用户所在的地理位置信息
* @returns {string} 地理位置描述 - 获取用户的位置信息例如场所名称或代码
* @example usage:
* console.log(Constant.INTERACT_LOCATION); // 输出 '钟楼站'
*/
INTERACT_LOCATION: '合肥',
/**
* @description 当前正式环境配置
* @returns {string} 正常化处理后的环境参数 - 获取当前使用的正式环境状态
* @example usage:
* console.log(Constant.INTERACT_FORMAL_ENVIRONMENT); // 输出当前正式环境状态,例如 '1'表示生产环境
*/
INTERACT_FORMAL_ENVIRONMENT: FormalEnvironment.PROD,
};
// 使用InteractMode:
function getInteractType(type) {
switch (type) {
case 'AUDIO':
return Constant.INTERACT_SOCKET_URL;
default:
throw new Error('未知的交互类型');
}
}

1
js/crypto-js.min.js vendored Normal file

File diff suppressed because one or more lines are too long

167
js/handleMessage.js Normal file
View File

@ -0,0 +1,167 @@
function buildAudioPayload(data, sid, endFlag, languageType) {
let customParams = {
botId: Constant.INTERACT_BOT_ID,
orgCode: Constant.INTERACT_ORG_CODE,
sid: sid,
deviceId: '123',
location: Constant.INTERACT_LOCATION,
languageType: languageType === '2' ? 'cn' : languageType === '3' ? 'en' : 'cn',
};
let iatParams = {
sid: sid,
aue: 'raw',
dwa: 'wpgs',
pgs: 'apd',
endFlag: true,
vad_switch: 'false',
eos: '60000',
rse: 'utf-8',
rst: 'json',
engine_param: `pproc_param_puncproc=false;wdec_param_LanguageTypeChoice=${languageType || '1'}`,
};
let payload = {
data: toBase64(data),
appid: Constant.INTERACT_APP_ID,
scene: Constant.INTERACT_SCENE,
debug: true,
sessionParams: {
id: sid,
traceId: sid,
reset: false,
abilityList: [
{
abilityCode: 'iat',
serviceName: 'iat',
param: JSON.stringify(iatParams),
},
],
},
};
return JSON.stringify(payload);
}
function buildTextPayload(data, sid) {
let customParams = {
botId: 'INTERACT_BOT_ID',
orgCode: 'INTERACT_ORG_CODE',
sid: sid,
location: '双龙站',
deviceId: '123',
};
let ttsParams = {
audio_coding: 'raw',
sample_rate: '16000',
sid: sid,
volume: '20',
};
let payload = {
data: btoa(data),
appid: 'INTERACT_APPID',
scene: 'INTERACT_SCENE',
debug: true,
sessionParams: {
id: sid,
traceId: sid,
reset: false,
abilityList: [
{
abilityCode: 'dics',
param: JSON.stringify(customParams),
},
{
abilityCode: 'tts',
param: JSON.stringify(ttsParams),
},
],
},
};
return JSON.stringify(payload);
}
function toBase64(buffer) {
var binary = '';
var bytes = new Uint8Array(buffer);
var len = bytes.byteLength;
for (var i = 0; i < len; i++) {
binary += String.fromCharCode(bytes[i]);
}
return btoa(binary);
}
let iatText = '';
let iatAppendText = '';
function decodeMessage(messageData) {
let aiuiResult = new AIUIResult(AIUI_TYPE.ERROR, messageData);
try {
const result = JSON.parse(messageData);
if (result) {
const json = JSON.parse(result.result);
let sub = json[0].sub;
let data = json[0].data;
if (sub === 'iat' && data && data !== '') {
let iat = JSON.parse(data);
let pgs = iat.pgs;
let ls = iat.ls;
if (ls) {
aiuiResult = new AIUIResult(AIUI_TYPE.IAT_FINAL, messageData);
aiuiResult.setFinalText(iatText);
iatText = '';
return aiuiResult;
}
if (pgs === 'rpl') {
iatText = '';
iatText += iatAppendText;
} else if (pgs === 'apd') {
iatAppendText = '';
if (iatText != null) {
iatAppendText += iatText;
}
iatText = '';
}
if (iat.ws && iat.ws.length > 0) {
iat.ws.forEach((item) => {
iatText += item.cw[0].w;
});
}
aiuiResult = new AIUIResult(AIUI_TYPE.IAT_REALTIME, messageData);
aiuiResult.setRealtimeText(iatText);
return aiuiResult;
}
}
} catch (e) {
console.error(e);
}
return aiuiResult;
}
const AIUI_TYPE = {
IAT_REALTIME: 'iat_realtime',
IAT_FINAL: 'iat_final',
ERROR: 'error',
};
class AIUIResult {
constructor(type, json) {
this.type = type;
this.json = json;
}
setRealtimeText(text) {
this.realtimeText = text;
}
setFinalText(text) {
this.finalText = text;
}
getReal() {
return this.realtimeText;
}
getFinal() {
return this.finalText;
}
getJSON() {
return this.json;
}
}

74
js/iat.js Normal file
View File

@ -0,0 +1,74 @@
const APPID = "6a744d1b";
const APIKEY = "27d1b3d2f5d1bd93e7c94330b1675ee2";
const PARAM = {
auth_id: "f8948af1d2d6547eaf09bc2f20ebfcc6",
data_type: "audio",
scene: "main_box",
aue: "raw",
sample_rate: "16000",
};
/**
* 计算握手参数
* @returns {string} WebSocket 连接参数字符串
*/
function getHandshakeParams() {
const paramBase64 = btoa(JSON.stringify(PARAM)); // 将参数转换为 Base64
const curTime = Math.floor(Date.now() / 1000); // 当前时间戳(秒)
const originStr = APIKEY + curTime + paramBase64; // 拼接字符串
const checksum = CryptoJS.MD5(originStr).toString(); // MD5 哈希计算
return `?appid=${APPID}&checksum=${checksum}&curtime=${curTime}&param=${paramBase64}`;
}
/**
* Int16Array 转换为 Base64 编码字符串
* @param {Int16Array} buffer PCM 数据
* @returns {string} Base64 字符串
*/
function pcmToBase64(buffer) {
let binary = "";
buffer.forEach((val) => {
binary += String.fromCharCode(val & 0xff, (val >> 8) & 0xff);
});
return btoa(binary);
}
/**
* 录音 + WebSocket 发送的完整流程
*/
async function startMicrophoneStreaming() {
const wsUtil = new WebSocketUtil(Constant.INTERACT_SOCKET_URL + getHandshakeParams());
const audioProcessor = new WebRTCAudioProcessor();
// 启动音频处理
audioProcessor.start((pcmData)=>{
wsUtil.send(buildAudioPayload(transcode(pcmData),"","","cn"))
})
.then(() => {
console.log('Audio processor started successfully.');
})
.catch((error) => {
console.error('Failed to start audio processor:', error);
});
// 连接 WebSocket
wsUtil.onOpen(() => {
console.log("✅ WebSocket 连接成功");
/*mic.startRecording((pcmData) =>{
console.log("📤 发送音频数据...");
console.log(pcmData)
})*/
});
wsUtil.onMessage((event) =>{});
wsUtil.onError((event) => console.error("❌ WebSocket 错误:", event));
wsUtil.onClose(() => console.log("🔴 WebSocket 连接已关闭"));
wsUtil.connect();
}
// 启动录音流媒体传输
startMicrophoneStreaming();

91
js/log.js Normal file
View File

@ -0,0 +1,91 @@
// time.js (假设 getDateTime 函数在这个文件中)
function getDateTime() {
const date = new Date();
const year = date.getFullYear();
const month = String(date.getMonth() + 1).padStart(2, '0');
const day = String(date.getDate()).padStart(2, '0');
const hour = String(date.getHours()).padStart(2, '0');
const minute = String(date.getMinutes()).padStart(2, '0');
const second = String(date.getSeconds()).padStart(2, '0');
const ms = String(date.getMilliseconds()).padStart(3, '0');
return `${year}-${month}-${day} ${hour}:${minute}:${second}:${ms}`;
}
// StaticLogger 类,用于在浏览器控制台中打印格式化的日志
class StaticLogger {
// 静态属性,用于控制日志是否启用
static enabled = true;
// 静态方法,用于格式化日志消息
// tag: 日志标签message: 日志内容
static formatMessage(tag, message) {
// 获取当前时间戳
const timeStamp = getDateTime();
// 使用 %c 占位符为时间戳添加蓝色样式
const coloredTimeStamp = `[${timeStamp}]`;
// 使用 %c 占位符为标签添加黄色样式
const coloredTag = `🚀 [${tag}]`;
// 如果有消息内容,则返回格式化后的完整日志
if (message) {
return `${coloredTag} ${coloredTimeStamp} ${message}`;
} else {
// 如果没有消息内容,则只返回时间戳和标签
return `${coloredTimeStamp} ${coloredTag}`;
}
}
// 静态方法,用于打印普通日志
static log(tag, message) {
// 检查日志是否启用
if (this.enabled) {
// 如果消息是一个对象,则直接打印对象
if (message instanceof Object) {
console.log(this.formatMessage(tag), message);
} else {
// 否则,打印格式化后的日志,并应用样式
console.log(this.formatMessage(tag, message));
}
}
}
// 静态方法,用于打印信息日志
static info(tag, message) {
if (this.enabled) {
if (message instanceof Object) {
console.info(this.formatMessage(tag), message);
} else {
console.info(this.formatMessage(tag, message));
}
}
}
// 静态方法,用于打印警告日志
static warn(tag, message) {
if (this.enabled) {
if (message instanceof Object) {
console.warn(this.formatMessage(tag), message);
} else {
console.warn(this.formatMessage(tag, message));
}
}
}
// 静态方法,用于打印错误日志
static error(tag, message) {
if (this.enabled) {
if (message instanceof Object) {
console.error(this.formatMessage(tag), message);
} else {
console.error(this.formatMessage(tag, message));
}
}
}
// 静态方法,用于启用或禁用日志
static setEnabled(enabled) {
this.enabled = enabled;
}
}
// 将 StaticLogger 类暴露到全局作用域,以便在 HTML 中直接使用
window.StaticLogger = StaticLogger;

0
js/tts.js Normal file
View File

View File

@ -0,0 +1,79 @@
class WebRTCAudioProcessor {
constructor() {
this.audioContext = null;
this.scriptProcessor = null;
this.source = null;
this.onProcessAudio = null; // 用于外部传入的音频处理回调
}
/**
* 初始化并开始捕获音频流
* @param {Function} onProcessAudio - 处理 PCM 数据的回调函数
* @returns {Promise<void>}
*/
async start(onProcessAudio) {
if (this.audioContext) {
console.warn('Audio processor is already running.');
return;
}
if (typeof onProcessAudio !== 'function') {
throw new Error('onProcessAudio must be a function');
}
this.onProcessAudio = onProcessAudio;
try {
// 获取用户音频流
const stream = await navigator.mediaDevices.getUserMedia({ audio: true, video: false });
this._setupAudioContext(stream);
} catch (error) {
console.error('Error accessing microphone:', error);
throw error;
}
}
/**
* 设置 AudioContext 和音频处理节点
* @param {MediaStream} stream - 音频流
*/
_setupAudioContext(stream) {
this.audioContext = new (window.AudioContext || window.webkitAudioContext)();
this.source = this.audioContext.createMediaStreamSource(stream);
// 创建 ScriptProcessorNode 处理音频数据
this.scriptProcessor = this.audioContext.createScriptProcessor(4096, 1, 1);
this.source.connect(this.scriptProcessor);
this.scriptProcessor.connect(this.audioContext.destination);
// 处理音频数据
this.scriptProcessor.onaudioprocess = (event) => {
const inputBuffer = event.inputBuffer;
const pcmData = inputBuffer.getChannelData(0); // 获取 PCM 数据
this.onProcessAudio(pcmData); // 调用外部传入的回调函数
};
}
/**
* 停止音频处理并释放资源
*/
stop() {
if (this.scriptProcessor) {
this.scriptProcessor.disconnect();
this.scriptProcessor = null;
}
if (this.source) {
this.source.disconnect();
this.source = null;
}
if (this.audioContext) {
this.audioContext.close().then(() => {
this.audioContext = null;
});
}
this.onProcessAudio = null;
}
}

105
js/webSocketUtil.js Normal file
View File

@ -0,0 +1,105 @@
class WebSocketUtil {
/**
* 构造函数初始化 WebSocket 连接参数
* @param {string} url WebSocket 服务器地址
*/
constructor(url) {
this.url = url;
this.websocket = null;
this.onOpenCallback = null;
this.onMessageCallback = null;
this.onErrorCallback = null;
this.onCloseCallback = null;
}
/**
* 连接 WebSocket 服务器
*/
connect() {
if (this.websocket) {
StaticLogger.info("WebSocket connect","WebSocket 已经初始化。")
return;
}
this.websocket = new WebSocket(this.url);
// 监听 WebSocket 连接成功事件
this.websocket.onopen = (event) => {
StaticLogger.info("WebSocket onopen","WebSocket 连接已打开。")
if (this.onOpenCallback) this.onOpenCallback(event);
};
// 监听 WebSocket 接收到消息事件
this.websocket.onmessage = (event) => {
StaticLogger.info("WebSocket onmessage","WebSocket 接收到消息事件:"+ decodeMessage(event.data).realtimeText);
if (this.onMessageCallback) this.onMessageCallback(event);
};
// 监听 WebSocket 发生错误事件
this.websocket.onerror = (event) => {
StaticLogger.error("WebSocket onerror","WebSocket 发生错误:")
if (this.onErrorCallback) this.onErrorCallback(event);
};
// 监听 WebSocket 连接关闭事件
this.websocket.onclose = (event) => {
StaticLogger.info("WebSocket onclose","WebSocket 连接已关闭:")
if (this.onCloseCallback) this.onCloseCallback(event);
};
}
/**
* 发送消息到 WebSocket 服务器
* @param {string} message 需要发送的消息
*/
send(message) {
StaticLogger.info("WebSocket send","WebSocket 发送消息")
if (this.websocket && this.websocket.readyState === WebSocket.OPEN) {
this.websocket.send(message);
} else {
StaticLogger.warn("WebSocket send","WebSocket 连接未打开,无法发送消息。")
}
}
/**
* 关闭 WebSocket 连接
*/
close() {
if (this.websocket) {
this.websocket.close();
}
}
/**
* 设置 WebSocket 连接成功时的回调函数
* @param {function} callback 连接成功的回调函数
*/
onOpen(callback) {
this.onOpenCallback = callback;
}
/**
* 设置 WebSocket 接收到消息时的回调函数
* @param {function} callback 消息接收的回调函数
*/
onMessage(callback) {
this.onMessageCallback = callback;
}
/**
* 设置 WebSocket 发生错误时的回调函数
* @param {function} callback 发生错误的回调函数
*/
onError(callback) {
this.onErrorCallback = callback;
}
/**
* 设置 WebSocket 连接关闭时的回调函数
* @param {function} callback 连接关闭的回调函数
*/
onClose(callback) {
this.onCloseCallback = callback;
}
}

25
manifest.json Normal file
View File

@ -0,0 +1,25 @@
{
"manifest_version": 3,
"name": "Floating Bot Extension",
"version": "1.0",
"description": "一个可拖动的网页悬浮机器人窗口",
"permissions": ["activeTab", "storage"],
"host_permissions": ["<all_urls>"],
"background": {
"service_worker": "background.js"
},
"content_scripts": [
{
"matches": ["<all_urls>"],
"js": ["content.js"]
}
],
"action": {
"default_popup": "popup.html",
"default_icon": {
"16": "icons/icon16.png",
"48": "icons/icon48.png",
"128": "icons/icon128.png"
}
}
}

24
popup.html Normal file
View File

@ -0,0 +1,24 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<title>Floating Bot</title>
<style>
body { font-family: Arial, sans-serif; padding: 10px; }
button { padding: 10px; cursor: pointer; }
</style>
</head>
<body>
<h2>浮动机器人</h2>
<button id="button1">开启麦克风</button>
<button id="button2">关闭麦克风</button>
<script src="js/crypto-js.min.js"></script>
<script src="js/constant.js"></script>
<script src="js/log.js"></script>
<script src="js/audio.js"></script>
<script src="js/webRTCAudioProcessor.js"></script>
<script src="js/handleMessage.js"></script>
<script src="js/webSocketUtil.js"></script>
<script src="js/iat.js"></script>
</body>
</html>

15
popup.js Normal file
View File

@ -0,0 +1,15 @@
document.getElementById("toggleBot").addEventListener("click", () => {
chrome.tabs.query({ active: true, currentWindow: true }, (tabs) => {
chrome.scripting.executeScript({
target: { tabId: tabs[0].id },
function: toggleBot
});
});
});
function toggleBot() {
let bot = document.getElementById("floating-bot");
if (bot) {
bot.style.display = bot.style.display === "none" ? "block" : "none";
}
}

BIN
weather.pcm Normal file

Binary file not shown.