2 Commits

Author SHA1 Message Date
MatrixSeven
b7485059cf feat: 更新README文档 2025-11-24 18:06:43 +08:00
MatrixSeven
04d4af5ef1 feat: 共享桌面支持开启语音会话,增加实用性 2025-11-24 18:05:15 +08:00
19 changed files with 1132 additions and 262 deletions

View File

@@ -5,7 +5,7 @@ PORT=8080
# FRONTEND_DIR=./dist
# TURN服务器配置
TURN_ENABLED=true
TURN_ENABLED=false
TURN_PORT=3478
TURN_USERNAME=chuan
TURN_PASSWORD=chuan123

5
.gitignore vendored
View File

@@ -109,4 +109,7 @@ backup/
./chuan/.next
./internal/web/frontend/*
./file-transfer-server
file-transfer-server
file-transfer-server
./chuan-vue
./chuan-vue/*
chuan-vue

View File

@@ -30,6 +30,8 @@
- 🖥️ **多平台支持** - 支持linux/macos/win 单文件部署
## 🔄 最近更新日志
### 2025-11-24
-**共享桌面** - 共享桌面支持开启语音,提升实用性
### 2025-09-5
-**WEBRTC链接恢复** - 关闭页面后在打开,进行数据链接恢复
@@ -179,6 +181,7 @@ make dev
cd chuan-next && yarn && yarn dev
```
## 📄 许可证
MIT License
@@ -192,3 +195,5 @@ MIT License
[![Star History Chart](https://api.star-history.com/svg?repos=MatrixSeven/file-transfer-go&type=timeline)]
</div>
[![Powered by DartNode](https://dartnode.com/branding/DN-Open-Source-sm.png)](https://dartnode.com "Powered by DartNode - Free VPS for Open Source")

View File

@@ -22,16 +22,11 @@ const getConnectionStatus = (
isPeerConnected: boolean;
isConnecting: boolean;
error: string | null;
currentConnectType: 'webrtc' | 'websocket';
currentConnectType: string;
isJoinedRoom: boolean;
}
) => {
const isWebSocketConnected = connection.isWebSocketConnected;
const isPeerConnected = connection.isPeerConnected;
const isConnecting = connection.isConnecting;
const error = connection.error;
const currentConnectType = connection.currentConnectType;
const isJoinedRoom = connection.isJoinedRoom;
const { isWebSocketConnected, isPeerConnected, isConnecting, error, currentConnectType, isJoinedRoom } = connection;
if (!currentRoom) {
return {
@@ -205,10 +200,6 @@ export function ConnectionStatus(props: ConnectionStatusProps) {
// 使用全局WebRTC状态
const webrtcState = useWebRTCStore();
// 获取连接状态
const { getConnectState } = useReadConnectState();
const connectionState = getConnectState();
// 创建connection对象以兼容现有代码
const connection = {
isWebSocketConnected: webrtcState.isWebSocketConnected,
@@ -216,11 +207,9 @@ export function ConnectionStatus(props: ConnectionStatusProps) {
isConnecting: webrtcState.isConnecting,
error: webrtcState.error,
currentConnectType: webrtcState.currentConnectType,
isJoinedRoom: connectionState?.isJoinedRoom || false,
isJoinedRoom: webrtcState.isJoinedRoom,
};
const isConnected = webrtcState.isWebSocketConnected && webrtcState.isPeerConnected;
// 如果是内联模式,只返回状态文字
if (inline) {
return <span className={cn('text-sm text-slate-600', className)}>{getConnectionStatusText(connection)}</span>;

View File

@@ -0,0 +1,92 @@
import React from 'react';
import { Mic, MicOff } from 'lucide-react';
interface VoiceIndicatorProps {
volume: number; // 0-100
isSpeaking: boolean;
isMuted?: boolean;
className?: string;
}
export function VoiceIndicator({
volume,
isSpeaking,
isMuted = false,
className = '',
}: VoiceIndicatorProps) {
// 根据音量计算波纹大小
const rippleScale = 1 + (volume / 100) * 0.8; // 1.0 到 1.8
// 音量条数量5条
const barCount = 5;
const activeBars = Math.ceil((volume / 100) * barCount);
return (
<div className={`flex items-center space-x-2 ${className}`}>
{/* 麦克风图标和波纹效果 */}
<div className="relative flex items-center justify-center">
{/* 波纹动画 - 只在说话时显示 */}
{isSpeaking && !isMuted && (
<>
<div
className="absolute w-10 h-10 rounded-full bg-green-500 opacity-20 animate-ping"
style={{
animationDuration: '1s',
transform: `scale(${rippleScale})`,
}}
/>
<div
className="absolute w-10 h-10 rounded-full bg-green-400 opacity-30"
style={{
transform: `scale(${rippleScale})`,
transition: 'transform 0.1s ease-out',
}}
/>
</>
)}
{/* 麦克风图标 */}
<div
className={`relative z-10 w-8 h-8 rounded-full flex items-center justify-center transition-colors ${
isMuted
? 'bg-red-100 text-red-600'
: isSpeaking
? 'bg-green-100 text-green-600'
: 'bg-slate-100 text-slate-600'
}`}
>
{isMuted ? (
<MicOff className="w-4 h-4" />
) : (
<Mic className="w-4 h-4" />
)}
</div>
</div>
{/* 音量条 - 10个等级 */}
<div className="flex items-center space-x-0.5">
{Array.from({ length: barCount }).map((_, index) => {
const isActive = index < activeBars && !isMuted;
const height = 8 + index * 1.5; // 递增高度: 8, 9.5, 11, 12.5... 到 21.5
return (
<div
key={index}
className={`w-1 rounded-full transition-all duration-150 ${
isActive
? isSpeaking
? 'bg-green-500'
: 'bg-slate-400'
: 'bg-slate-200'
}`}
style={{
height: `${height}px`,
opacity: isActive ? 1 : 0.3,
}}
/>
);
})}
</div>
</div>
);
}

View File

@@ -1,11 +1,13 @@
"use client";
import React, { useState, useCallback, useEffect } from 'react';
import React, { useState, useCallback, useEffect, useRef } from 'react';
import { Button } from '@/components/ui/button';
import { Input } from '@/components/ui/input';
import { Monitor, Square } from 'lucide-react';
import { Monitor, Square, Mic, MicOff } from 'lucide-react';
import { useToast } from '@/components/ui/toast-simple';
import { useDesktopShareBusiness } from '@/hooks/desktop-share';
import { useVoiceChatBusiness } from '@/hooks/desktop-share/useVoiceChatBusiness';
import { VoiceIndicator } from '@/components/VoiceIndicator';
import DesktopViewer from '@/components/DesktopViewer';
import { ConnectionStatus } from '@/components/ConnectionStatus';
@@ -24,6 +26,26 @@ export default function WebRTCDesktopReceiver({ className, initialCode, onConnec
// 使用桌面共享业务逻辑
const desktopShare = useDesktopShareBusiness();
// 使用语音通话业务逻辑
const voiceChat = useVoiceChatBusiness(desktopShare.webRTCConnection);
// 远程音频元素引用
const remoteAudioRef = useRef<HTMLAudioElement | null>(null);
// 调试:监控语音状态变化(只监听状态,不监听实时音量)
useEffect(() => {
console.log('[DesktopShareReceiver] 🎤 语音状态变化:', {
isVoiceEnabled: voiceChat.isVoiceEnabled,
isRemoteVoiceActive: voiceChat.isRemoteVoiceActive,
debug: voiceChat._debug
});
}, [
voiceChat.isVoiceEnabled,
voiceChat.isRemoteVoiceActive
// 不监听 localVolume, remoteVolume, localIsSpeaking, remoteIsSpeaking
// 这些值每帧都在变化约60fps会导致过度渲染
]);
// 通知父组件连接状态变化
useEffect(() => {
@@ -117,7 +139,7 @@ export default function WebRTCDesktopReceiver({ className, initialCode, onConnec
}
}, [desktopShare, inputCode, isJoiningRoom, showToast]);
// 停止观看
// 停止观看桌面
const handleStopViewing = useCallback(async () => {
try {
setIsLoading(true);
@@ -132,6 +154,34 @@ export default function WebRTCDesktopReceiver({ className, initialCode, onConnec
}
}, [desktopShare, showToast]);
// 开启语音
const handleEnableVoice = useCallback(async () => {
try {
console.log('[DesktopShareReceiver] 用户点击开启语音');
await voiceChat.enableVoice();
showToast('语音已开启', 'success');
} catch (error) {
console.error('[DesktopShareReceiver] 开启语音失败:', error);
let errorMessage = '开启语音失败';
if (error instanceof Error) {
if (error.message.includes('麦克风权限') || error.message.includes('Permission')) {
errorMessage = '无法访问麦克风,请检查浏览器权限设置';
} else if (error.message.includes('P2P连接')) {
errorMessage = '请先等待连接建立';
} else if (error.message.includes('NotFoundError') || error.message.includes('设备')) {
errorMessage = '未检测到麦克风设备';
} else if (error.message.includes('NotAllowedError')) {
errorMessage = '麦克风权限被拒绝,请在浏览器设置中允许使用麦克风';
} else {
errorMessage = error.message;
}
}
showToast(errorMessage, 'error');
}
}, [voiceChat, showToast]);
// 如果有初始代码且还未加入观看,自动尝试加入
React.useEffect(() => {
console.log('[WebRTCDesktopReceiver] useEffect 触发, 参数:', {
@@ -320,50 +370,143 @@ export default function WebRTCDesktopReceiver({ className, initialCode, onConnec
/>
</div>
{/* 观看中的控制面板 */}
<div className="flex justify-center mb-4">
<div className="bg-white rounded-lg p-3 shadow-lg border flex items-center space-x-4">
<div className="flex items-center space-x-2 text-green-600">
<Monitor className="w-4 h-4" />
<span className="font-semibold"></span>
{/* 观看中的控制面板 - 移动端优化 */}
<div className="mb-4">
<div className="bg-white rounded-lg p-3 shadow-lg border">
<div className="flex flex-col sm:flex-row sm:items-center gap-3 sm:gap-4">
{/* 状态指示 */}
<div className="flex items-center space-x-2 text-green-600">
<Monitor className="w-4 h-4" />
<span className="font-semibold"></span>
</div>
{/* 对方说话提示 - 移动端全宽 */}
{voiceChat.isRemoteVoiceActive && voiceChat.remoteIsSpeaking && (
<div className="flex items-center space-x-2 bg-green-50 text-green-700 px-3 py-1.5 rounded-lg border border-green-200 animate-pulse">
<div className="w-2 h-2 bg-green-500 rounded-full animate-ping"></div>
<Mic className="w-3.5 h-3.5" />
<span className="text-sm font-medium"></span>
</div>
)}
{/* 按钮组 - 移动端全宽横向 */}
<div className="flex gap-2 sm:ml-auto w-full sm:w-auto">
<Button
onClick={voiceChat.isVoiceEnabled ? () => voiceChat.disableVoice() : handleEnableVoice}
variant="outline"
size="sm"
className={`flex-1 sm:flex-initial ${
voiceChat.isVoiceEnabled
? "text-green-600 border-green-300"
: "text-slate-600 border-slate-300"
}`}
disabled={!desktopShare.isPeerConnected && !voiceChat.isVoiceEnabled}
>
{voiceChat.isVoiceEnabled ? (
<>
<Mic className="w-4 h-4 sm:mr-1" />
<span className="hidden sm:inline"></span>
</>
) : (
<>
<MicOff className="w-4 h-4 sm:mr-1" />
<span className="hidden sm:inline"></span>
</>
)}
</Button>
<Button
onClick={handleStopViewing}
disabled={isLoading}
variant="destructive"
size="sm"
className="flex-1 sm:flex-initial"
>
<Square className="w-4 h-4 sm:mr-2" />
<span className="hidden sm:inline">{isLoading ? '退出中...' : '退出观看'}</span>
</Button>
</div>
</div>
<Button
onClick={handleStopViewing}
disabled={isLoading}
variant="destructive"
size="sm"
>
<Square className="w-4 h-4 mr-2" />
{isLoading ? '退出中...' : '退出观看'}
</Button>
</div>
</div>
{/* 桌面显示区域 */}
{desktopShare.remoteStream ? (
<DesktopViewer
stream={desktopShare.remoteStream}
isConnected={desktopShare.isViewing}
connectionCode={inputCode}
onDisconnect={handleStopViewing}
/>
) : (
<div className="bg-white/80 backdrop-blur-sm rounded-xl p-8 border border-slate-200">
<div className="text-center">
<Monitor className="w-16 h-16 mx-auto text-slate-400 mb-4" />
<p className="text-slate-600 mb-2">...</p>
<p className="text-sm text-slate-500"></p>
<div className="flex items-center justify-center space-x-2 mt-4">
<div className="animate-spin rounded-full h-4 w-4 border-b-2 border-purple-500"></div>
<span className="text-sm text-purple-600">...</span>
</div>
<div className="relative">
{desktopShare.remoteStream ? (
<DesktopViewer
stream={desktopShare.remoteStream}
isConnected={desktopShare.isViewing}
connectionCode={inputCode}
onDisconnect={handleStopViewing}
/>
) : (
<div className="bg-white/80 backdrop-blur-sm rounded-xl p-8 border border-slate-200">
<div className="text-center">
<Monitor className="w-16 h-16 mx-auto text-slate-400 mb-4" />
<p className="text-slate-600 mb-2">...</p>
<p className="text-sm text-slate-500"></p>
<div className="flex items-center justify-center space-x-2 mt-4">
<div className="animate-spin rounded-full h-4 w-4 border-b-2 border-purple-500"></div>
<span className="text-sm text-purple-600">...</span>
</div>
</div>
</div>
</div>
)}
)}
{/* 语音状态指示器 - 始终显示,点击切换 */}
{desktopShare.remoteStream && (
<div className="mt-4">
<div
className="bg-gradient-to-br from-slate-50 to-white rounded-xl p-3 shadow-lg border border-slate-200 cursor-pointer hover:shadow-xl transition-shadow"
onClick={voiceChat.isVoiceEnabled ? () => voiceChat.disableVoice() : handleEnableVoice}
title={voiceChat.isVoiceEnabled ? "点击关闭发言" : "点击开启发言"}
>
<div className="flex items-center justify-between">
<div className="flex items-center space-x-3">
<div className={`w-8 h-8 rounded-full flex items-center justify-center ${
voiceChat.isVoiceEnabled ? 'bg-blue-100' : 'bg-slate-100'
}`}>
{voiceChat.isVoiceEnabled ? (
<Mic className="w-4 h-4 text-blue-600" />
) : (
<MicOff className="w-4 h-4 text-slate-400" />
)}
</div>
<div className="flex flex-col">
<span className={`text-sm font-medium ${
voiceChat.isVoiceEnabled ? 'text-slate-700' : 'text-slate-500'
}`}></span>
<span className="text-xs text-slate-500">
{voiceChat.isVoiceEnabled ? '点击关闭' : '点击开启'}
</span>
</div>
</div>
{voiceChat.isVoiceEnabled && (
<VoiceIndicator
volume={voiceChat.localVolume}
isSpeaking={voiceChat.localIsSpeaking}
isMuted={false}
/>
)}
</div>
</div>
</div>
)}
</div>
</div>
)}
</div>
{/* 隐藏的音频元素用于播放远程音频 */}
<audio
ref={(el) => {
remoteAudioRef.current = el;
voiceChat.setRemoteAudioRef(el);
}}
autoPlay
style={{ display: 'none' }}
/>
</div>
</div>
);

View File

@@ -5,7 +5,9 @@ import RoomInfoDisplay from '@/components/RoomInfoDisplay';
import { Button } from '@/components/ui/button';
import { useToast } from '@/components/ui/toast-simple';
import { useDesktopShareBusiness } from '@/hooks/desktop-share';
import { Monitor, Repeat, Share, Square } from 'lucide-react';
import { useVoiceChatBusiness } from '@/hooks/desktop-share/useVoiceChatBusiness';
import { VoiceIndicator } from '@/components/VoiceIndicator';
import { Monitor, Repeat, Share, Square, Mic, MicOff } from 'lucide-react';
import { useCallback, useEffect, useRef, useState } from 'react';
interface WebRTCDesktopSenderProps {
@@ -19,6 +21,23 @@ export default function WebRTCDesktopSender({ className, onConnectionChange }: W
// 使用桌面共享业务逻辑
const desktopShare = useDesktopShareBusiness();
// 使用语音通话业务逻辑 - 传入同一个connection实例
const voiceChat = useVoiceChatBusiness(desktopShare.webRTCConnection);
// 调试:监控语音状态变化(只监听状态,不监听实时音量)
useEffect(() => {
console.log('[DesktopShareSender] 🎤 语音状态变化:', {
isVoiceEnabled: voiceChat.isVoiceEnabled,
isRemoteVoiceActive: voiceChat.isRemoteVoiceActive,
debug: voiceChat._debug
});
}, [
voiceChat.isVoiceEnabled,
voiceChat.isRemoteVoiceActive
// 不监听 localVolume, remoteVolume, localIsSpeaking, remoteIsSpeaking
// 这些值每帧都在变化约60fps会导致过度渲染
]);
// 调试监控localStream状态变化
useEffect(() => {
@@ -34,6 +53,11 @@ export default function WebRTCDesktopSender({ className, onConnectionChange }: W
// 保持本地视频元素的引用
const localVideoRef = useRef<HTMLVideoElement | null>(null);
// 设置远程音频元素的回调
const setRemoteAudioRef = useCallback((audioElement: HTMLAudioElement | null) => {
voiceChat.setRemoteAudioRef(audioElement);
}, [voiceChat]);
// 处理本地流变化,确保视频正确显示
useEffect(() => {
if (localVideoRef.current && desktopShare.localStream) {
@@ -213,6 +237,34 @@ export default function WebRTCDesktopSender({ className, onConnectionChange }: W
}
}, [desktopShare, showToast]);
// 开启语音
const handleEnableVoice = useCallback(async () => {
try {
console.log('[DesktopShareSender] 用户点击开启语音');
await voiceChat.enableVoice();
showToast('语音已开启', 'success');
} catch (error) {
console.error('[DesktopShareSender] 开启语音失败:', error);
let errorMessage = '开启语音失败';
if (error instanceof Error) {
if (error.message.includes('麦克风权限') || error.message.includes('Permission')) {
errorMessage = '无法访问麦克风,请检查浏览器权限设置';
} else if (error.message.includes('P2P连接')) {
errorMessage = '请先等待对方加入';
} else if (error.message.includes('NotFoundError') || error.message.includes('设备')) {
errorMessage = '未检测到麦克风设备';
} else if (error.message.includes('NotAllowedError')) {
errorMessage = '麦克风权限被拒绝,请在浏览器设置中允许使用麦克风';
} else {
errorMessage = error.message;
}
}
showToast(errorMessage, 'error');
}
}, [voiceChat, showToast]);
return (
<div className={`space-y-4 sm:space-y-6 ${className || ''}`}>
<div className="bg-white/80 backdrop-blur-sm rounded-xl p-4 sm:p-6 shadow-lg border border-white/20 animate-fade-in-up">
@@ -293,16 +345,16 @@ export default function WebRTCDesktopSender({ className, onConnectionChange }: W
{/* 控制按钮 */}
{desktopShare.isSharing && (
<div className="flex items-center space-x-2">
<Button
onClick={handleSwitchDesktop}
disabled={isLoading}
variant="outline"
size="sm"
className="text-slate-700 border-slate-300"
>
<Repeat className="w-4 h-4 mr-1" />
</Button>
<Button
onClick={handleSwitchDesktop}
disabled={isLoading}
variant="outline"
size="sm"
className="text-slate-700 border-slate-300"
>
<Repeat className="w-4 h-4 mr-1" />
</Button>
<Button
onClick={handleStopSharing}
disabled={isLoading}
@@ -313,6 +365,30 @@ export default function WebRTCDesktopSender({ className, onConnectionChange }: W
<Square className="w-4 h-4 mr-1" />
</Button>
{/* 语音控制按钮 */}
<Button
onClick={voiceChat.isVoiceEnabled ? voiceChat.disableVoice : handleEnableVoice}
disabled={isLoading}
variant="outline"
size="sm"
className={voiceChat.isVoiceEnabled
? "text-green-700 border-green-300 hover:bg-green-50"
: "text-slate-700 border-slate-300 hover:bg-slate-50"
}
>
{voiceChat.isVoiceEnabled ? (
<>
<Mic className="w-4 h-4 mr-1" />
</>
) : (
<>
<MicOff className="w-4 h-4 mr-1" />
</>
)}
</Button>
</div>
)}
</div>
@@ -347,9 +423,44 @@ export default function WebRTCDesktopSender({ className, onConnectionChange }: W
</div>
</div>
)}
{/* 语音状态指示器 - 始终显示,点击切换 */}
<div className="absolute bottom-2 right-2 z-10">
<div
className="bg-gradient-to-br from-slate-50/95 to-white/95 backdrop-blur rounded-xl p-3 shadow-xl border border-slate-200/50 cursor-pointer hover:shadow-2xl transition-shadow"
onClick={voiceChat.isVoiceEnabled ? voiceChat.disableVoice : handleEnableVoice}
title={voiceChat.isVoiceEnabled ? "点击关闭发言" : "点击开启发言"}
>
<div className="flex items-center space-x-3">
<div className={`w-8 h-8 rounded-full flex items-center justify-center flex-shrink-0 ${
voiceChat.isVoiceEnabled ? 'bg-blue-100' : 'bg-slate-100'
}`}>
{voiceChat.isVoiceEnabled ? (
<Mic className="w-4 h-4 text-blue-600" />
) : (
<MicOff className="w-4 h-4 text-slate-400" />
)}
</div>
<div className="flex flex-col">
<span className={`text-xs font-medium ${
voiceChat.isVoiceEnabled ? 'text-slate-700' : 'text-slate-500'
}`}></span>
<span className="text-[10px] text-slate-500">
{voiceChat.isVoiceEnabled ? '点击关闭' : '点击开启'}
</span>
</div>
{voiceChat.isVoiceEnabled && (
<VoiceIndicator
volume={voiceChat.localVolume}
isSpeaking={voiceChat.localIsSpeaking}
isMuted={false}
/>
)}
</div>
</div>
</div>
</div>
)}
</div>
</div>
)}
@@ -376,6 +487,14 @@ export default function WebRTCDesktopSender({ className, onConnectionChange }: W
showToast('观看链接已复制', 'success');
}}
/>
{/* 隐藏的远程音频播放元素 - 用于播放观看方的语音 */}
<audio
ref={setRemoteAudioRef}
autoPlay
playsInline
className="hidden"
/>
</div>
)}
</div>

View File

@@ -49,7 +49,7 @@ export interface IWebConnection extends IRegisterEventHandler, IGetConnectState
// 媒体轨道方法
addTrack: (track: MediaStreamTrack, stream: MediaStream) => RTCRtpSender | null;
removeTrack: (sender: RTCRtpSender) => void;
onTrack: (callback: (event: RTCTrackEvent) => void) => void;
onTrack: (callback: (event: RTCTrackEvent) => void) => () => void; // 返回清理函数
getPeerConnection: () => RTCPeerConnection | null;
createOfferNow: () => Promise<boolean>;
@@ -108,8 +108,8 @@ export interface WebRTCTrackManager {
// 移除媒体轨道
removeTrack: (sender: RTCRtpSender) => void;
// 设置轨道处理器
onTrack: (handler: (event: RTCTrackEvent) => void) => void;
// 设置轨道处理器 - 返回清理函数以移除处理器
onTrack: (handler: (event: RTCTrackEvent) => void) => () => void;
// 请求重新协商(通知 Core 层需要重新创建 Offer
requestOfferRenegotiation: () => Promise<boolean>;

View File

@@ -35,7 +35,6 @@ export function useConnectManager(): IWebConnection & IRegisterEventHandler & IG
useEffect(() => {
const targetConnection = currentConnectType === 'webrtc' ? webrtcConnection : wsConnection;
if (currentConnectionRef.current !== targetConnection) {
console.log('[ConnectManager] 🔄 同步连接引用到:', currentConnectType);
currentConnectionRef.current = targetConnection;
}
}, [currentConnectType, webrtcConnection, wsConnection]);
@@ -253,10 +252,8 @@ export function useConnectManager(): IWebConnection & IRegisterEventHandler & IG
}, []);
const onTrack = useCallback((callback: (event: RTCTrackEvent) => void) => {
console.log('[ConnectManager] 🎧 设置 onTrack 处理器,当前连接类型:', currentConnectType);
console.log('[ConnectManager] 当前连接引用:', currentConnectionRef.current === webrtcConnection ? 'WebRTC' : 'WebSocket');
currentConnectionRef.current.onTrack(callback);
}, [currentConnectType, webrtcConnection]);
return currentConnectionRef.current.onTrack(callback);
}, []); // 空依赖,使用 ref 确保总是获取最新的连接
const getPeerConnection = useCallback(() => {
return currentConnectionRef.current.getPeerConnection();

View File

@@ -103,7 +103,36 @@ export function useWebRTCConnectionCore(
// 创建 Offer应该在 Core 层处理信令)
const createOffer = useCallback(async (pc: RTCPeerConnection, ws: WebSocket) => {
try {
console.log('[ConnectionCore] 🎬 开始创建offer当前轨道数量:', pc.getSenders().length);
// 清理所有没有轨道的发送器(避免空 sender 干扰)
const allSenders = pc.getSenders();
console.log('[ConnectionCore] 🎬 开始创建offer总发送器数量:', allSenders.length);
// 移除所有 track 为 null 的 sender
const emptyRemovals = allSenders.filter(sender => !sender.track).map(async sender => {
try {
await pc.removeTrack(sender);
console.log('[ConnectionCore] 🗑️ 已清理空发送器');
} catch (e) {
console.warn('[ConnectionCore] ⚠️ 清理空发送器失败:', e);
}
});
if (emptyRemovals.length > 0) {
await Promise.all(emptyRemovals);
console.log('[ConnectionCore] 🧹 已清理', emptyRemovals.length, '个空发送器');
}
// 获取清理后的有效发送器
const activeSenders = pc.getSenders().filter(s => s.track);
console.log('[ConnectionCore] 📊 有效轨道数量:', activeSenders.length);
activeSenders.forEach((sender, index) => {
console.log(`[ConnectionCore] 发送器 ${index}:`, {
kind: sender.track?.kind,
id: sender.track?.id,
enabled: sender.track?.enabled,
readyState: sender.track?.readyState
});
});
// 确保连接状态稳定
if (pc.connectionState !== 'connecting' && pc.connectionState !== 'new') {
@@ -186,8 +215,14 @@ export function useWebRTCConnectionCore(
// 设置轨道接收处理(对于接收方)
// 注意:这个处理器会在 TrackManager.onTrack() 中被业务逻辑覆盖
pc.ontrack = (event) => {
console.log('[ConnectionCore] 🎥 PeerConnection收到轨道:', event.track.kind, event.track.id, '状态:', event.track.readyState);
console.log('[ConnectionCore] 关联的流数量:', event.streams.length);
console.log('[ConnectionCore] 📥 PeerConnection收到远程轨道:', {
kind: event.track.kind,
id: event.track.id,
enabled: event.track.enabled,
readyState: event.track.readyState,
streamCount: event.streams.length,
streamId: event.streams[0]?.id
});
console.log('[ConnectionCore] ⚠️ 默认轨道处理器 - 业务层应该通过 TrackManager.onTrack() 设置自己的处理器');
};
@@ -469,9 +504,40 @@ export function useWebRTCConnectionCore(
await new Promise(resolve => setTimeout(resolve, 100));
}
if (pcOffer && pcOffer.signalingState === 'stable') {
if (pcOffer) {
const currentState = pcOffer.signalingState;
console.log('[ConnectionCore] 当前信令状态:', currentState, '角色:', role);
// Perfect Negotiation 模式receiver 是 politesender 是 impolite
const isPolite = role === 'receiver';
// 处理交叉协商
if (currentState === 'have-local-offer') {
if (isPolite) {
// Polite peer (receiver) 回滚自己的 offer
console.log('[ConnectionCore] 🔄 [Polite-Receiver] 交叉协商,回滚本地 offer');
await pcOffer.setLocalDescription({ type: 'rollback' });
} else {
// Impolite peer (sender) 也需要接受对方的 offer
// 之前的逻辑错误:不应该直接 break而是也要回滚或等待
console.log('[ConnectionCore] 🔄 [Impolite-Sender] 交叉协商,回滚并接受对方 offer');
await pcOffer.setLocalDescription({ type: 'rollback' });
}
}
await pcOffer.setRemoteDescription(new RTCSessionDescription(message.payload));
console.log('[ConnectionCore] ✅ 设置远程描述完成');
// 记录当前本地轨道
const currentSenders = pcOffer.getSenders();
console.log('[ConnectionCore] 📊 创建 answer 前的本地轨道数量:', currentSenders.length);
currentSenders.forEach((sender, index) => {
console.log(`[ConnectionCore] 本地发送器 ${index}:`, {
kind: sender.track?.kind,
id: sender.track?.id,
enabled: sender.track?.enabled
});
});
const answer = await pcOffer.createAnswer();
await pcOffer.setLocalDescription(answer);
@@ -480,7 +546,7 @@ export function useWebRTCConnectionCore(
ws.send(JSON.stringify({ type: 'answer', payload: answer }));
console.log('[ConnectionCore] 📤 发送 answer');
} else {
console.warn('[ConnectionCore] ⚠️ PeerConnection状态不是stable或不存在:', pcOffer?.signalingState);
console.warn('[ConnectionCore] ⚠️ PeerConnection不存在');
}
break;
@@ -505,21 +571,12 @@ export function useWebRTCConnectionCore(
const signalingState = pcAnswer.signalingState;
console.log('[ConnectionCore] 当前信令状态:', signalingState, '角色:', role);
// 如果是发送方且状态是stable说明已经有媒体轨道应该发送新的offer而不是处理answer
if (role === 'sender' && signalingState === 'stable') {
console.log('[ConnectionCore] 🎬 发送方处于stable状态发送包含媒体轨道的新offer');
try {
await createOffer(pcAnswer, ws);
console.log('[ConnectionCore] ✅ 媒体offer发送完成');
} catch (error) {
console.error('[ConnectionCore] ❌ 发送媒体offer失败:', error);
}
} else if (signalingState === 'have-local-offer') {
if (signalingState === 'have-local-offer') {
// 正常的answer处理
await pcAnswer.setRemoteDescription(new RTCSessionDescription(message.payload));
console.log('[ConnectionCore] ✅ answer 处理完成');
} else {
console.warn('[ConnectionCore] ⚠️ PeerConnection状态异常:', signalingState, '跳过answer处理');
console.warn('[ConnectionCore] ⚠️ PeerConnection状态不是have-local-offer:', signalingState, '跳过answer处理');
}
}
} catch (error) {
@@ -532,6 +589,28 @@ export function useWebRTCConnectionCore(
}
break;
case 'renegotiate-request':
// 接收方请求重新协商(例如添加/移除音频轨道)
console.log('[ConnectionCore] 🔄 收到重新协商请求:', message.payload);
if (role === 'sender') {
// 只有发送方才能响应重新协商请求
const pcRenegotiate = pcRef.current;
if (pcRenegotiate) {
console.log('[ConnectionCore] 📡 [Sender] 响应重新协商请求,创建新的 offer');
try {
await createOffer(pcRenegotiate, ws);
console.log('[ConnectionCore] ✅ [Sender] 重新协商 offer 发送完成');
} catch (error) {
console.error('[ConnectionCore] ❌ [Sender] 重新协商失败:', error);
}
} else {
console.warn('[ConnectionCore] ⚠️ [Sender] PeerConnection 不存在,无法重新协商');
}
} else {
console.warn('[ConnectionCore] ⚠️ [Receiver] 收到重新协商请求但角色不是 sender');
}
break;
case 'ice-candidate':
let pcIce = pcRef.current;
if (!pcIce) {

View File

@@ -27,12 +27,7 @@ export function useWebRTCTrackManager(
}
try {
console.log('[TrackManager] 📡 请求重新协商 - 媒体轨道已更新');
// 这里应该通过回调或事件通知 Core 层重新创建 Offer
// 暂时直接调用,但更好的设计是通过事件系统
// 触发重新协商事件(应该由 Core 层监听)
console.log('[TrackManager] ⚠️ 需要 Core 层支持重新协商回调机制');
return true;
} catch (error) {
console.error('[TrackManager] 请求重新协商失败:', error);
@@ -71,23 +66,32 @@ export function useWebRTCTrackManager(
}
}, []);
// 设置轨道处理器
const onTrack = useCallback((handler: (event: RTCTrackEvent) => void) => {
// 存储多个轨道处理器
const trackHandlersRef = useRef<Set<(event: RTCTrackEvent) => void>>(new Set());
// 设置轨道处理器 - 返回清理函数
const onTrack = useCallback((handler: (event: RTCTrackEvent) => void): (() => void) => {
// 添加到处理器集合
trackHandlersRef.current.add(handler);
const pc = pcRef.current;
if (!pc) {
console.warn('[TrackManager] PeerConnection 尚未准备就绪将在连接建立后设置onTrack');
// 检查是否已有重试在进行,避免多个重试循环
if (retryInProgressRef.current) {
console.log('[TrackManager] 已有重试进程在运行,跳过重复重试');
return;
// 返回清理函数
return () => {
trackHandlersRef.current.delete(handler);
console.log('[TrackManager] 🗑️ 移除轨道处理器,剩余处理器数量:', trackHandlersRef.current.size);
};
}
// 检查WebSocket连接状态只有连接后才尝试设置
const state = stateManager.getState();
if (!state.isWebSocketConnected) {
console.log('[TrackManager] WebSocket未连接等待连接建立...');
return;
// 返回清理函数
return () => {
trackHandlersRef.current.delete(handler);
};
}
retryInProgressRef.current = true;
@@ -99,25 +103,20 @@ export function useWebRTCTrackManager(
const checkAndSetTrackHandler = () => {
const currentPc = pcRef.current;
if (currentPc) {
console.log('[TrackManager] ✅ PeerConnection 已准备就绪设置onTrack处理器');
currentPc.ontrack = handler;
retryInProgressRef.current = false; // 成功后重置标记
// 如果已经有远程轨道,立即触发处理
const receivers = currentPc.getReceivers();
console.log(`[TrackManager] 📡 当前有 ${receivers.length} 个接收器`);
receivers.forEach(receiver => {
if (receiver.track) {
console.log(`[TrackManager] 🎥 发现现有轨道: ${receiver.track.kind}, ${receiver.track.id}, 状态: ${receiver.track.readyState}`);
}
});
// 设置多路复用处理器
currentPc.ontrack = (event: RTCTrackEvent) => {
trackHandlersRef.current.forEach(h => {
try {
h(event);
} catch (error) {
console.error('[TrackManager] 轨道处理器执行错误:', error);
}
});
};
retryInProgressRef.current = false;
} else {
retryCount++;
if (retryCount < maxRetries) {
// 每5次重试输出一次日志减少日志数量
if (retryCount % 5 === 0) {
console.log(`[TrackManager] ⏳ 等待PeerConnection准备就绪... (尝试: ${retryCount}/${maxRetries})`);
}
setTimeout(checkAndSetTrackHandler, 100);
} else {
console.error('[TrackManager] ❌ PeerConnection 长时间未准备就绪,停止重试');
@@ -126,20 +125,28 @@ export function useWebRTCTrackManager(
}
};
checkAndSetTrackHandler();
return;
// 返回清理函数
return () => {
trackHandlersRef.current.delete(handler);
};
}
console.log('[TrackManager] ✅ 立即设置onTrack处理器');
pc.ontrack = handler;
// 检查是否已有轨道
const receivers = pc.getReceivers();
console.log(`[TrackManager] 📡 当前有 ${receivers.length} 个接收器`);
receivers.forEach(receiver => {
if (receiver.track) {
console.log(`[TrackManager] 🎥 发现现有轨道: ${receiver.track.kind}, ${receiver.track.id}, 状态: ${receiver.track.readyState}`);
}
});
// 设置多路复用处理器
pc.ontrack = (event: RTCTrackEvent) => {
trackHandlersRef.current.forEach(h => {
try {
h(event);
} catch (error) {
console.error('[TrackManager] 轨道处理器执行错误:', error);
}
});
};
// 返回清理函数
return () => {
trackHandlersRef.current.delete(handler);
};
}, [stateManager]);
// 立即触发重新协商(用于媒体轨道添加后的重新协商)
@@ -153,7 +160,6 @@ export function useWebRTCTrackManager(
}
try {
console.log('[TrackManager] 📡 触发媒体重新协商');
// 实际的 offer 创建应该由 Core 层处理
// 这里只是一个触发器,通知需要重新协商
return true;

View File

@@ -310,8 +310,10 @@ export function useWebSocketConnection(): IWebConnection & { injectWebSocket: (w
console.warn('[WebSocket] WebSocket 不支持媒体轨道');
}, []);
const onTrack = useCallback(() => {
const onTrack = useCallback((callback: (event: RTCTrackEvent) => void): (() => void) => {
console.warn('[WebSocket] WebSocket 不支持媒体轨道');
// 返回清理函数以符合接口预期
return () => {};
}, []);
const getPeerConnection = useCallback(() => {

View File

@@ -1,2 +1,4 @@
// 桌面共享相关的hooks
export { useDesktopShareBusiness } from './useDesktopShareBusiness';
export { useVoiceChatBusiness } from './useVoiceChatBusiness';
export { useAudioVisualizer } from './useAudioVisualizer';

View File

@@ -0,0 +1,122 @@
import { useEffect, useRef, useState } from 'react';
interface AudioVisualizerState {
volume: number; // 0-100
isSpeaking: boolean;
}
export function useAudioVisualizer(stream: MediaStream | null) {
const [state, setState] = useState<AudioVisualizerState>({
volume: 0,
isSpeaking: false,
});
const audioContextRef = useRef<AudioContext | null>(null);
const analyserRef = useRef<AnalyserNode | null>(null);
const animationFrameRef = useRef<number | null>(null);
useEffect(() => {
if (!stream) {
// 清理状态
setState({ volume: 0, isSpeaking: false });
return;
}
const audioTracks = stream.getAudioTracks();
if (audioTracks.length === 0) {
return;
}
try {
// 创建音频上下文
const audioContext = new AudioContext();
audioContextRef.current = audioContext;
// 创建分析器节点
const analyser = audioContext.createAnalyser();
analyser.fftSize = 256;
analyser.smoothingTimeConstant = 0.8;
analyserRef.current = analyser;
// 连接音频流到分析器
const source = audioContext.createMediaStreamSource(stream);
source.connect(analyser);
// 创建数据数组
const dataArray = new Uint8Array(analyser.frequencyBinCount);
// 音量检测阈值
const SPEAKING_THRESHOLD = 10; // 说话阈值
const SILENCE_FRAMES = 10; // 连续多少帧低于阈值才认为停止说话
let silenceFrameCount = 0;
// 分析音频数据
const analyzeAudio = () => {
if (!analyserRef.current) return;
analyser.getByteFrequencyData(dataArray);
// 计算平均音量
let sum = 0;
for (let i = 0; i < dataArray.length; i++) {
sum += dataArray[i];
}
const average = sum / dataArray.length;
// 归一化到 0-100
const normalizedVolume = Math.min(100, Math.round((average / 255) * 100));
// 判断是否在说话
const currentlySpeaking = normalizedVolume > SPEAKING_THRESHOLD;
if (currentlySpeaking) {
silenceFrameCount = 0;
setState(prev => ({
volume: normalizedVolume,
isSpeaking: true,
}));
} else {
silenceFrameCount++;
if (silenceFrameCount >= SILENCE_FRAMES) {
setState(prev => ({
volume: normalizedVolume,
isSpeaking: false,
}));
} else {
// 保持说话状态,但更新音量
setState(prev => ({
volume: normalizedVolume,
isSpeaking: prev.isSpeaking,
}));
}
}
animationFrameRef.current = requestAnimationFrame(analyzeAudio);
};
// 开始分析
analyzeAudio();
// 清理函数
return () => {
if (animationFrameRef.current !== null) {
cancelAnimationFrame(animationFrameRef.current);
animationFrameRef.current = null;
}
if (audioContextRef.current) {
audioContextRef.current.close();
audioContextRef.current = null;
}
analyserRef.current = null;
setState({ volume: 0, isSpeaking: false });
};
} catch (error) {
console.error('[AudioVisualizer] 初始化音频分析器失败:', error);
}
}, [stream]);
return state;
}

View File

@@ -43,65 +43,81 @@ export function useDesktopShareBusiness() {
}, []); // 移除updateState依赖直接使用setState
// 设置远程轨道处理器(始终监听)
useEffect(() => {
console.log('[DesktopShare] 🎧 设置远程轨道处理器');
const handleRemoteTrack = useCallback((event: RTCTrackEvent) => {
// 只处理视频轨道,音频轨道由 useVoiceChatBusiness 处理
if (event.track.kind !== 'video') {
console.log('[DesktopShare] ⏭️ 跳过非视频轨道:', event.track.kind, event.track.id);
return;
}
const trackHandler = (event: RTCTrackEvent) => {
console.log('[DesktopShare] 🎥 收到远程轨道:', event.track.kind, event.track.id, '状态:', event.track.readyState);
console.log('[DesktopShare] 远程流数量:', event.streams.length);
console.log('[DesktopShare] 🎥 收到远程视频轨道:', event.track.id, '状态:', event.track.readyState);
console.log('[DesktopShare] 远程流数量:', event.streams.length);
if (event.streams.length > 0) {
const remoteStream = event.streams[0];
console.log('[DesktopShare] 🎬 设置远程流,轨道数量:', remoteStream.getTracks().length);
remoteStream.getTracks().forEach(track => {
console.log('[DesktopShare] 远程轨道:', track.kind, track.id, '启用:', track.enabled, '状态:', track.readyState);
});
// 确保轨道已启用
remoteStream.getTracks().forEach(track => {
if (event.streams.length > 0) {
const remoteStream = event.streams[0];
console.log('[DesktopShare] 🎬 设置远程流,轨道数量:', remoteStream.getTracks().length);
// 只提取视频轨道创建新的视频流
const videoTracks = remoteStream.getVideoTracks();
if (videoTracks.length > 0) {
const videoStream = new MediaStream(videoTracks);
console.log('[DesktopShare] 📹 创建纯视频流,视频轨道数:', videoTracks.length);
videoTracks.forEach(track => {
console.log('[DesktopShare] 视频轨道:', track.id, '启用:', track.enabled, '状态:', track.readyState);
// 确保轨道已启用
if (!track.enabled) {
console.log('[DesktopShare] 🔓 启用远程轨道:', track.id);
console.log('[DesktopShare] 🔓 启用视频轨道:', track.id);
track.enabled = true;
}
});
// 直接使用setState而不是handleRemoteStream避免依赖问题
setState(prev => ({ ...prev, remoteStream }));
// 直接使用setState
setState(prev => ({ ...prev, remoteStream: videoStream }));
// 如果有视频元素引用,设置流
if (remoteVideoRef.current) {
remoteVideoRef.current.srcObject = remoteStream;
}
} else {
console.warn('[DesktopShare] ⚠️ 收到轨道但没有关联的流');
// 尝试从轨道创建流
try {
const newStream = new MediaStream([event.track]);
console.log('[DesktopShare] 🔄 从轨道创建新流:', newStream.id);
// 确保轨道已启用
newStream.getTracks().forEach(track => {
if (!track.enabled) {
console.log('[DesktopShare] 🔓 启用新流中的轨道:', track.id);
track.enabled = true;
}
});
// 直接使用setState
setState(prev => ({ ...prev, remoteStream: newStream }));
// 如果有视频元素引用,设置流
if (remoteVideoRef.current) {
remoteVideoRef.current.srcObject = newStream;
}
} catch (error) {
console.error('[DesktopShare] ❌ 从轨道创建流失败:', error);
remoteVideoRef.current.srcObject = videoStream;
}
}
};
} else {
console.warn('[DesktopShare] ⚠️ 收到视频轨道但没有关联的流');
// 尝试从轨道创建流
try {
const newStream = new MediaStream([event.track]);
console.log('[DesktopShare] 🔄 从视频轨道创建新流:', newStream.id);
webRTC.onTrack(trackHandler);
}, [webRTC]); // 只依赖webRTC移除handleRemoteStream依赖
// 确保轨道已启用
if (!event.track.enabled) {
console.log('[DesktopShare] 🔓 启用视频轨道:', event.track.id);
event.track.enabled = true;
}
// 直接使用setState
setState(prev => ({ ...prev, remoteStream: newStream }));
// 如果有视频元素引用,设置流
if (remoteVideoRef.current) {
remoteVideoRef.current.srcObject = newStream;
}
} catch (error) {
console.error('[DesktopShare] ❌ 从轨道创建流失败:', error);
}
}
}, []);
useEffect(() => {
if (!webRTC) return;
const cleanup = webRTC.onTrack(handleRemoteTrack);
// 返回清理函数
return () => {
if (cleanup) {
cleanup();
}
};
}, [webRTC, handleRemoteTrack]); // 依赖 webRTC 和稳定的处理器函数
// 获取桌面共享流
const getDesktopStream = useCallback(async (): Promise<MediaStream> => {

View File

@@ -0,0 +1,317 @@
import { useCallback, useEffect, useRef, useState } from 'react';
import { IWebConnection } from '../connection/types';
import { useAudioVisualizer } from './useAudioVisualizer';
interface VoiceChatState {
isVoiceEnabled: boolean;
isMuted: boolean;
isRemoteVoiceActive: boolean;
localAudioStream: MediaStream | null;
remoteAudioStream: MediaStream | null;
error: string | null;
}
export function useVoiceChatBusiness(connection: IWebConnection) {
const [state, setState] = useState<VoiceChatState>({
isVoiceEnabled: false,
isMuted: false,
isRemoteVoiceActive: false,
localAudioStream: null,
remoteAudioStream: null,
error: null,
});
const localAudioStreamRef = useRef<MediaStream | null>(null);
const audioSenderRef = useRef<RTCRtpSender | null>(null);
const remoteAudioRef = useRef<HTMLAudioElement | null>(null);
// 使用音频可视化
const localAudioVisualizer = useAudioVisualizer(state.localAudioStream);
const remoteAudioVisualizer = useAudioVisualizer(state.remoteAudioStream);
const updateState = useCallback((updates: Partial<VoiceChatState>) => {
setState(prev => ({ ...prev, ...updates }));
}, []);
// 监听远程音频轨道
const handleRemoteAudioTrack = useCallback((event: RTCTrackEvent, currentTrackRef: { current: MediaStreamTrack | null }) => {
if (event.track.kind !== 'audio') return;
// 移除旧轨道的监听器
if (currentTrackRef.current) {
currentTrackRef.current.onended = null;
currentTrackRef.current.onmute = null;
currentTrackRef.current.onunmute = null;
}
currentTrackRef.current = event.track;
if (event.streams.length > 0) {
const remoteStream = event.streams[0];
event.track.enabled = true;
// 更新状态
setState(prev => ({
...prev,
remoteAudioStream: remoteStream,
isRemoteVoiceActive: true
}));
// 监听轨道结束事件
event.track.onended = () => {
setState(prev => ({ ...prev, isRemoteVoiceActive: false }));
};
// 监听轨道静音事件
event.track.onmute = () => {
// 远程音频轨道被静音
};
event.track.onunmute = () => {
// 远程音频轨道取消静音
};
// 在设置状态后,使用 setTimeout 确保 audio 元素更新
setTimeout(() => {
if (remoteAudioRef.current && remoteStream.active) {
remoteAudioRef.current.srcObject = remoteStream;
remoteAudioRef.current.play().catch(err => {
// 忽略 AbortError这是正常的竞态条件
if (err.name !== 'AbortError') {
console.error('[VoiceChat] 播放远程音频失败:', err);
}
});
}
}, 0);
}
}, []); // 空依赖数组,函数引用始终不变
useEffect(() => {
if (!connection) return;
const currentTrackRef = { current: null as MediaStreamTrack | null };
const trackHandler = (event: RTCTrackEvent) => {
if (event.track.kind === 'audio') {
handleRemoteAudioTrack(event, currentTrackRef);
}
};
const cleanup = connection.onTrack(trackHandler);
return () => {
if (currentTrackRef.current) {
currentTrackRef.current.onended = null;
currentTrackRef.current.onmute = null;
currentTrackRef.current.onunmute = null;
}
if (cleanup) {
cleanup();
}
};
}, [connection, handleRemoteAudioTrack]); // 只在 connection 或处理器变化时重新注册
// 获取本地音频流
const getLocalAudioStream = useCallback(async (): Promise<MediaStream> => {
try {
const stream = await navigator.mediaDevices.getUserMedia({
audio: {
echoCancellation: true,
noiseSuppression: true,
autoGainControl: true,
},
video: false,
});
return stream;
} catch (error) {
console.error('[VoiceChat] 获取本地音频流失败:', error);
// 根据错误类型提供更详细的错误消息
if (error instanceof DOMException) {
if (error.name === 'NotAllowedError') {
throw new Error('麦克风权限被拒绝,请在浏览器设置中允许使用麦克风');
} else if (error.name === 'NotFoundError') {
throw new Error('未检测到麦克风设备,请连接麦克风后重试');
} else if (error.name === 'NotReadableError') {
throw new Error('麦克风被其他应用占用,请关闭其他使用麦克风的程序');
} else if (error.name === 'OverconstrainedError') {
throw new Error('麦克风不支持所需的音频设置');
} else if (error.name === 'AbortError') {
throw new Error('麦克风访问被中断');
} else if (error.name === 'SecurityError') {
throw new Error('安全限制无法访问麦克风请使用HTTPS');
}
}
throw new Error('无法获取麦克风权限,请确保允许使用麦克风');
}
}, []);
// 启用语音通话
const enableVoice = useCallback(async () => {
if (state.isVoiceEnabled || !connection) {
return;
}
try {
updateState({ error: null });
// 检查P2P连接状态
const connectState = connection.getConnectState();
if (!connectState.isPeerConnected) {
throw new Error('P2P连接尚未建立无法启用语音');
}
// 获取本地音频流
const stream = await getLocalAudioStream();
localAudioStreamRef.current = stream;
console.log('[VoiceChat] ✅ 本地音频流获取成功:', {
streamId: stream.id,
audioTracks: stream.getAudioTracks().length,
trackEnabled: stream.getAudioTracks()[0]?.enabled,
trackReadyState: stream.getAudioTracks()[0]?.readyState
});
// 添加音频轨道到P2P连接
const audioTrack = stream.getAudioTracks()[0];
if (audioTrack) {
const role = connection.currentRoom?.role;
console.log('[VoiceChat] 📤 添加音频轨道到P2P连接, 当前角色:', role);
const sender = connection.addTrack(audioTrack, stream);
audioSenderRef.current = sender;
if (sender) {
console.log('[VoiceChat] 📊 Sender 信息:', {
track: sender.track?.id,
trackEnabled: sender.track?.enabled,
trackReadyState: sender.track?.readyState
});
}
// 重要:添加音频轨道后,本地必须主动创建 offer
// 因为对方不知道我们添加了新轨道,必须由我们通知对方
console.log('[VoiceChat] 📡 [' + role + '] 创建 offer 进行重新协商(添加音频轨道)');
const negotiated = await connection.createOfferNow();
console.log('[VoiceChat] 📡 [' + role + '] 重新协商结果:', negotiated);
}
updateState({
isVoiceEnabled: true,
localAudioStream: stream,
isMuted: false,
});
} catch (error) {
console.error('[VoiceChat] 启用语音失败:', error);
const errorMsg = error instanceof Error ? error.message : '启用语音失败';
updateState({ error: errorMsg });
throw error;
}
}, [connection, getLocalAudioStream, state.isVoiceEnabled, updateState]);
// 禁用语音通话
const disableVoice = useCallback(async () => {
if (!state.isVoiceEnabled) return;
const role = connection.currentRoom?.role;
// 移除音频轨道
if (audioSenderRef.current) {
connection.removeTrack(audioSenderRef.current);
audioSenderRef.current = null;
// 重要:移除音频轨道后,本地必须主动创建 offer
// 因为对方不知道我们移除了轨道,必须由我们通知对方
console.log('[VoiceChat] 📡 [' + role + '] 移除音频轨道后重新协商');
try {
await connection.createOfferNow();
} catch (error) {
console.error('[VoiceChat] 重新协商失败:', error);
}
}
// 停止本地音频流
if (localAudioStreamRef.current) {
localAudioStreamRef.current.getTracks().forEach(track => {
track.stop();
});
localAudioStreamRef.current = null;
}
updateState({
isVoiceEnabled: false,
localAudioStream: null,
isMuted: false,
});
}, [connection, state.isVoiceEnabled, updateState]);
// 切换静音状态
const toggleMute = useCallback(() => {
if (!localAudioStreamRef.current) {
return;
}
const audioTracks = localAudioStreamRef.current.getAudioTracks();
if (audioTracks.length === 0) {
return;
}
const newMutedState = !state.isMuted;
audioTracks.forEach(track => {
track.enabled = !newMutedState;
});
updateState({ isMuted: newMutedState });
}, [state.isMuted, updateState]);
// 设置远程音频元素引用
const setRemoteAudioRef = useCallback((element: HTMLAudioElement | null) => {
remoteAudioRef.current = element;
if (element && state.remoteAudioStream && state.remoteAudioStream.active) {
element.srcObject = state.remoteAudioStream;
element.play().catch(err => {
// 忽略 AbortError这是正常的竞态条件
if (err.name !== 'AbortError') {
console.error('[VoiceChat] 播放远程音频失败:', err);
}
});
}
}, [state.remoteAudioStream]);
// 清理
useEffect(() => {
return () => {
if (localAudioStreamRef.current) {
localAudioStreamRef.current.getTracks().forEach(track => track.stop());
}
};
}, []);
return {
// 状态
isVoiceEnabled: state.isVoiceEnabled,
isMuted: state.isMuted,
isRemoteVoiceActive: state.isRemoteVoiceActive,
error: state.error,
// 音频可视化数据
localVolume: localAudioVisualizer.volume,
localIsSpeaking: localAudioVisualizer.isSpeaking,
remoteVolume: remoteAudioVisualizer.volume,
remoteIsSpeaking: remoteAudioVisualizer.isSpeaking,
// 方法
enableVoice,
disableVoice,
toggleMute,
setRemoteAudioRef,
// 调试信息
_debug: {
hasRemoteStream: !!state.remoteAudioStream,
remoteStreamId: state.remoteAudioStream?.id,
remoteTrackCount: state.remoteAudioStream?.getTracks().length || 0,
}
};
}

View File

@@ -385,26 +385,19 @@ export function useFileTransferBusiness(connection: IWebConnection) {
retryCount = 0
): Promise<boolean> => {
return new Promise((resolve) => {
// 改进数据传输前的连接状态检查
// 主要检查数据通道状态,因为数据通道是文件传输的实际通道
const channelState = connection.getConnectState();
const isChannelUsable =
channelState.state === 'open' ||
channelState.isDataChannelConnected ||
channelState.isPeerConnected ||
(channelState.isWebSocketConnected && channelState.currentConnectType === 'websocket');
if (!isChannelUsable) {
console.warn(`数据块发送失败,传输通道不可用 ${chunkIndex}:`, {
state: channelState.state,
isDataChannelConnected: channelState.isDataChannelConnected,
isPeerConnected: channelState.isPeerConnected,
isWebSocketConnected: channelState.isWebSocketConnected,
currentConnectType: channelState.currentConnectType
});
if (channelState.state === 'closed') {
console.warn(`数据通道已关闭,停止发送文件块 ${chunkIndex}`);
resolve(false);
return;
}
// 如果连接暂时断开但数据通道可用,仍然可以尝试发送
if (!channelState.isConnected && channelState.state === 'connecting') {
console.warn(`WebRTC 连接暂时断开,但数据通道正在连接,继续尝试发送文件块 ${chunkIndex}`);
}
const chunkKey = `${fileId}-${chunkIndex}`;
// 设置确认回调
@@ -450,27 +443,8 @@ export function useFileTransferBusiness(connection: IWebConnection) {
// 安全发送文件
const sendFileSecure = useCallback(async (file: File, fileId?: string) => {
// 改进连接状态检查 - 使用更全面的连接状态判断
const connectState = connection.getConnectState();
const isReadyToSend =
connectState.state === 'open' || // 数据通道已打开
connectState.isDataChannelConnected || // 数据通道已连接
connectState.isPeerConnected || // P2P连接已建立
(connectState.isWebSocketConnected && connectState.currentConnectType === 'websocket'); // WebSocket降级模式
console.log('发送文件前连接状态检查:', {
state: connectState.state,
isDataChannelConnected: connectState.isDataChannelConnected,
isPeerConnected: connectState.isPeerConnected,
isWebSocketConnected: connectState.isWebSocketConnected,
currentConnectType: connectState.currentConnectType,
isReadyToSend
});
if (!isReadyToSend) {
const errorMsg = `连接未就绪 - 状态: ${connectState.state}, 数据通道: ${connectState.isDataChannelConnected}, P2P: ${connectState.isPeerConnected}, WebSocket: ${connectState.isWebSocketConnected}`;
console.error(errorMsg);
updateState({ error: errorMsg });
if (connection.getConnectState().state !== 'open') {
updateState({ error: '连接未就绪' });
return;
}
@@ -514,23 +488,16 @@ export function useFileTransferBusiness(connection: IWebConnection) {
let retryCount = 0;
while (!success && retryCount <= MAX_RETRIES) {
// 改进传输过程中的连接状态检查
// 检查数据通道状态,这是文件传输的实际通道
const channelState = connection.getConnectState();
const isChannelUsable =
channelState.state === 'open' ||
channelState.isDataChannelConnected ||
channelState.isPeerConnected ||
(channelState.isWebSocketConnected && channelState.currentConnectType === 'websocket');
if (channelState.state === 'closed') {
console.warn(`数据通道已关闭,停止文件传输`);
throw new Error('数据通道已关闭');
}
if (!isChannelUsable) {
console.warn(`数据传输通道不可用,停止文件传输:`, {
state: channelState.state,
isDataChannelConnected: channelState.isDataChannelConnected,
isPeerConnected: channelState.isPeerConnected,
isWebSocketConnected: channelState.isWebSocketConnected,
currentConnectType: channelState.currentConnectType
});
throw new Error('数据传输通道不可用');
// 如果连接暂时断开但数据通道可用,仍然可以尝试发送
if (!connection.getConnectState().isConnected && channelState.state === 'connecting') {
console.warn(`WebRTC 连接暂时断开,但数据通道正在连接,继续尝试发送文件块 ${chunkIndex}`);
}
const start = chunkIndex * chunkSize;
@@ -629,26 +596,26 @@ export function useFileTransferBusiness(connection: IWebConnection) {
// 发送文件列表
const sendFileList = useCallback((fileList: FileInfo[]) => {
// 改进连接状态检查逻辑
// 检查连接状态 - 优先检查数据通道状态,因为 P2P 连接可能已经建立但状态未及时更新
const channelState = connection.getConnectState();
const isReadyToSend =
channelState.state === 'open' ||
channelState.isDataChannelConnected ||
channelState.isPeerConnected ||
(channelState.isWebSocketConnected && channelState.currentConnectType === 'websocket') ||
channelState.isConnected;
const peerConnected = channelState.isPeerConnected;
const dataChannelConnected = channelState.isDataChannelConnected;
const channelReadyState = channelState.state;
console.log('发送文件列表检查:', {
state: channelState.state,
isDataChannelConnected: channelState.isDataChannelConnected,
isPeerConnected: channelState.isPeerConnected,
isWebSocketConnected: channelState.isWebSocketConnected,
currentConnectType: channelState.currentConnectType,
isConnected: channelState.isConnected,
isReadyToSend,
channelState,
peerConnected,
dataChannelConnected,
channelReadyState,
fileListLength: fileList.length
});
// 使用更宽松的条件检查连接状态
const isReadyToSend = channelReadyState === 'open' ||
dataChannelConnected ||
peerConnected ||
channelState.isConnected;
if (isReadyToSend) {
console.log('发送文件列表:', fileList.map(f => f.name));
@@ -662,36 +629,42 @@ export function useFileTransferBusiness(connection: IWebConnection) {
// 不立即重试,让上层逻辑处理重试
}
} else {
console.log('连接未就绪,等待连接后再发送文件列表');
console.log('连接未就绪,等待连接后再发送文件列表:', {
channelReadyState,
dataChannelConnected,
peerConnected,
isConnected: channelState.isConnected
});
}
}, [connection]);
// 请求文件
const requestFile = useCallback((fileId: string, fileName: string) => {
const channelState = connection.getConnectState();
// 统一的连接状态检查逻辑
const isReadyToRequest =
channelState.state === 'open' ||
channelState.isDataChannelConnected ||
channelState.isPeerConnected ||
(channelState.isWebSocketConnected && channelState.currentConnectType === 'websocket') ||
channelState.isConnected;
const isChannelOpen = channelState.state === 'open';
const isDataChannelConnected = channelState.isDataChannelConnected;
const isPeerConnected = channelState.isPeerConnected;
const isConnected = channelState.isConnected;
console.log('请求文件前检查连接状态:', {
fileName,
fileId,
state: channelState.state,
isDataChannelConnected: channelState.isDataChannelConnected,
isPeerConnected: channelState.isPeerConnected,
isWebSocketConnected: channelState.isWebSocketConnected,
currentConnectType: channelState.currentConnectType,
isConnected: channelState.isConnected,
isReadyToRequest
isChannelOpen,
isDataChannelConnected,
isPeerConnected,
isConnected
});
// 使用更宽松的条件检查连接状态
const isReadyToRequest = isChannelOpen || isDataChannelConnected || isPeerConnected || isConnected;
if (!isReadyToRequest) {
console.error('数据通道未准备就绪,无法请求文件');
console.error('数据通道未准备就绪,无法请求文件:', {
isChannelOpen,
isDataChannelConnected,
isPeerConnected,
isConnected
});
return;
}

2
go.mod
View File

@@ -6,6 +6,7 @@ require (
github.com/go-chi/chi/v5 v5.0.10
github.com/go-chi/cors v1.2.1
github.com/gorilla/websocket v1.5.3
github.com/pion/turn/v3 v3.0.3
)
require (
@@ -15,7 +16,6 @@ require (
github.com/pion/stun/v2 v2.0.0 // indirect
github.com/pion/transport/v2 v2.2.1 // indirect
github.com/pion/transport/v3 v3.0.2 // indirect
github.com/pion/turn/v3 v3.0.3 // indirect
golang.org/x/crypto v0.21.0 // indirect
golang.org/x/sys v0.18.0 // indirect
)

5
go.sum
View File

@@ -1,4 +1,5 @@
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/go-chi/chi/v5 v5.0.10 h1:rLz5avzKpjqxrYwXNfmjkrYYXOyLJd37pz53UFHC6vk=
github.com/go-chi/chi/v5 v5.0.10/go.mod h1:DslCQbL2OYiznFReuXYUmQ2hGd1aDpCnlMNITLSKoi8=
@@ -21,6 +22,7 @@ github.com/pion/transport/v3 v3.0.2 h1:r+40RJR25S9w3jbA6/5uEPTzcdn7ncyU44RWCbHkL
github.com/pion/transport/v3 v3.0.2/go.mod h1:nIToODoOlb5If2jF9y2Igfx3PFYWfuXi37m0IlWa/D0=
github.com/pion/turn/v3 v3.0.3 h1:1e3GVk8gHZLPBA5LqadWYV60lmaKUaHCkm9DX9CkGcE=
github.com/pion/turn/v3 v3.0.3/go.mod h1:vw0Dz420q7VYAF3J4wJKzReLHIo2LGp4ev8nXQexYsc=
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw=
@@ -30,6 +32,7 @@ github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/
github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU=
github.com/stretchr/testify v1.8.3/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo=
github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo=
github.com/stretchr/testify v1.9.0 h1:HtqpIVDClZ4nwg75+f6Lvsy/wHu+3BoSGCbBAcpTsTg=
github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY=
github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
@@ -49,6 +52,7 @@ golang.org/x/net v0.9.0/go.mod h1:d48xBJpPfHeWQsugry2m+kC02ZBRGRgulfHnEXEuWns=
golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg=
golang.org/x/net v0.14.0/go.mod h1:PpSgVXXLK0OxS0F31C1/tv6XNguvCrnXIDrFMspZIUI=
golang.org/x/net v0.21.0/go.mod h1:bIjVDfnllIU7BJ2DNgfnXvpSvtn8VRwhlsaeUTyUS44=
golang.org/x/net v0.22.0 h1:9sGLhx7iRIHEiX0oAJ3MRZMUCElJgy7Br1nO+AMN3Tc=
golang.org/x/net v0.22.0/go.mod h1:JKghWKKOSdJwpW2GEx0Ja7fmaKnMsbu+MWVZTokSYmg=
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
@@ -87,4 +91,5 @@ golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU=
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=