Skip to content

Commit af3ceb7

Browse files
committed
add three opus decoder, every rtc connection can choose one to send now
use a seperate gain node to control mute/unmute of peer and main output
1 parent d68dba2 commit af3ceb7

File tree

18 files changed

+289
-137
lines changed

18 files changed

+289
-137
lines changed

README.md

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -41,6 +41,10 @@ yarn dev
4141
yarn build
4242
```
4343

44+
### start two instances for testing
45+
run vscode task `Run Default on 5173 & A on 5174`,
46+
this will start two dev instances in one terminal.
47+
4448
## Config files
4549
- `config.json`: Main configuration file for RelayX
4650
- `.env`: Tailscale authentication key and hostname (the empty env file will be created even if user don't use authkey to login)

app/user-config.json

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -8,8 +8,8 @@
88
"output": "default"
99
},
1010
"windowBounds": {
11-
"x": 7,
12-
"y": 48,
11+
"x": 824,
12+
"y": 140,
1313
"width": 1400,
1414
"height": 800
1515
},

app/user-configA.json

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -8,9 +8,10 @@
88
"output": "default"
99
},
1010
"windowBounds": {
11-
"x": 190,
12-
"y": 125,
11+
"x": 364,
12+
"y": 108,
1313
"width": 1400,
1414
"height": 783
15-
}
15+
},
16+
"loginMethod": "key"
1617
}

package.json

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1,13 +1,13 @@
11
{
2-
"name": "electron-react-vite-ts-template",
3-
"author": "klz",
4-
"description": "xxx",
2+
"name": "relayx",
3+
"author": "Need_an_AwP",
4+
"description": "A serverless voice chat application based on Tailscale",
55
"private": true,
6-
"version": "0.0.1",
6+
"version": "0.0.2",
77
"type": "module",
88
"main": "app/index.mjs",
99
"scripts": {
10-
"downloadCPA": "yarn add download-cli -D && download https://github.com/Need-an-AwP/process-audio-capture-stdio/releases/download/v0.0.2/process-audio-capture.exe --out app/",
10+
"downloadCPA": "yarn add download-cli -D && download https://github.com/Need-an-AwP/process-audio-capture-stdio/releases/latest/download/process-audio-capture.exe --out app/",
1111
"dev": "concurrently -k \"vite\" \"cross-env DEV=true electron app/index.mjs\"",
1212
"dev:build": "yarn build:go && yarn dev",
1313
"build:go": "go build -C twg -o ../app",

src/AudioManager/nodes/MainAudioNodes.ts

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -5,13 +5,15 @@ export class MainAudioNodes {
55
private audioContext: AudioContext;
66
public mainGainNode: GainNode;
77
public mainAnalyserNode: AnalyserNode;
8+
public mainMuteGainNode: GainNode;
89
public mainDestination: MediaStreamAudioDestinationNode;
910

1011
constructor(audioContext: AudioContext) {
1112
this.audioContext = audioContext;
1213
this.mainDestination = this.audioContext.createMediaStreamDestination();
1314
this.mainGainNode = this.audioContext.createGain();
1415
this.mainAnalyserNode = AudioAnalyser.createAnalyserNode(this.audioContext);
16+
this.mainMuteGainNode = this.audioContext.createGain();
1517
this.mainGainNode.gain.value = 1.0;
1618

1719
// play global audio only when local user is in chat
@@ -26,7 +28,8 @@ export class MainAudioNodes {
2628
}
2729
)
2830

29-
this.mainAnalyserNode.connect(this.mainDestination);
31+
this.mainAnalyserNode.connect(this.mainMuteGainNode);
32+
this.mainMuteGainNode.connect(this.mainDestination);
3033

3134
// this.playRandomNoise(); // random noise for testing
3235
}
@@ -71,7 +74,7 @@ export class MainAudioNodes {
7174
* @param muted whether to mute
7275
*/
7376
public setMainOutputMuted(muted: boolean): void {
74-
this.mainGainNode.gain.value = muted ? 0 : this.mainGainNode.gain.value;
77+
this.mainMuteGainNode.gain.value = muted ? 0 : 1;
7578
}
7679

7780
// getter 方法

src/AudioManager/nodes/PeerNodeManager.ts

Lines changed: 5 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -32,12 +32,13 @@ export class PeerNodeManager {
3232
try {
3333
const gainNode = this.audioContext.createGain();
3434
const analyserNode = AudioAnalyser.createAnalyserNode(this.audioContext);
35-
gainNode.gain.value = 1.0;
35+
const muteGainNode = this.audioContext.createGain();
3636

3737
gainNode.connect(analyserNode);
38-
analyserNode.connect(this.mainNodes.mainGainNode);
38+
analyserNode.connect(muteGainNode);
39+
muteGainNode.connect(this.mainNodes.mainGainNode);
3940

40-
this.peerNodes[peerIP] = { gainNode, analyserNode };
41+
this.peerNodes[peerIP] = { gainNode, analyserNode, muteGainNode };
4142
useAudioStore.getState().setPeerAnalyser(peerIP, analyserNode);
4243
return true;
4344
} catch (error) {
@@ -75,7 +76,7 @@ export class PeerNodeManager {
7576
return;
7677
}
7778

78-
this.peerNodes[peerIP].gainNode.gain.value = muted ? 0 : 1;
79+
this.peerNodes[peerIP].muteGainNode.gain.value = muted ? 0 : 1;
7980
}
8081

8182
/**

src/AudioManager/types.ts

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
export interface PeerAudioNodes {
22
gainNode: GainNode;
33
analyserNode: AnalyserNode;
4+
muteGainNode: GainNode;
45
}
56

67
export interface TrackAudioNodes {

src/MediaTrackManager/input/audioEncoder.ts

Lines changed: 94 additions & 32 deletions
Original file line numberDiff line numberDiff line change
@@ -14,6 +14,8 @@ export default class InputAudioProcessor {
1414
private trackID: TrackIDType;
1515
private ws: WebSocket;
1616
private encoder: AudioEncoder | null = null;
17+
private static bitrateList = [32_000, 64_000, 128_000];
18+
private encoders: Record<number, AudioEncoder> = {};
1719
private state: ProcessorStateType = ProcessorState.IDLE;
1820
private audioConfig: AudioEncoderConfig | null = null;
1921

@@ -58,32 +60,37 @@ export default class InputAudioProcessor {
5860
return;
5961
}
6062

63+
for (const bitrate of InputAudioProcessor.bitrateList) {
64+
const encoder = new AudioEncoder({
65+
output: (chunk: EncodedAudioChunk, metadata?: EncodedAudioChunkMetadata) => {
66+
this.handleMultipleEncoders(chunk, bitrate, metadata);
67+
},
68+
error: (error) => {
69+
console.error(`${bitrate} AudioEncoder error:`, error);
70+
if (this.state !== ProcessorState.STOPPING) {
71+
delete this.encoders[bitrate];
72+
}
73+
},
74+
});
75+
const cfg = { ...config, bitrate };
76+
encoder.configure(cfg);
77+
this.encoders[bitrate] = encoder;
78+
}
6179

62-
this.encoder = new AudioEncoder({
63-
output: this.handleEncodedChunk.bind(this),
64-
error: (error) => {
65-
console.error('AudioEncoder error:', error);
66-
// 只有在非停止状态时才重置编码器状态
67-
if (this.state !== ProcessorState.STOPPING) {
68-
this.state = ProcessorState.STOPPED;
69-
this.encoder = null;
70-
}
71-
},
72-
});
80+
// this.encoder = new AudioEncoder({
81+
// output: this.handleEncodedChunk.bind(this),
82+
// error: (error) => {
83+
// console.error('AudioEncoder error:', error);
84+
// // 只有在非停止状态时才重置编码器状态
85+
// if (this.state !== ProcessorState.STOPPING) {
86+
// this.state = ProcessorState.STOPPED;
87+
// this.encoder = null;
88+
// }
89+
// },
90+
// });
91+
// this.encoder.configure(config);
7392

74-
this.encoder.configure(config);
7593
this.state = ProcessorState.RUNNING;
76-
77-
// 配置多个不同目标码率的opus,尝试同步
78-
// let n = 0;
79-
// setInterval(() => {
80-
// this.encoder!.configure({
81-
// ...config,
82-
// bitrate: n % 2 === 0 ? 32_000 : 128_000,
83-
// })
84-
// console.log(`[audio encoder] Reconfigured encoder, bitrate: ${n % 2 === 0 ? 32_000 : 128_000}`);
85-
// n++;
86-
// }, 10 * 1000);
8794
} catch (error) {
8895
console.error('[audio encoder] Failed to initialize AudioEncoder:', error);
8996
this.state = ProcessorState.STOPPED;
@@ -120,6 +127,38 @@ export default class InputAudioProcessor {
120127
}
121128
}
122129

130+
private handleMultipleEncoders(chunk: EncodedAudioChunk, bitrate: number, metadata?: EncodedAudioChunkMetadata) {
131+
const buffer = new Uint8Array(chunk.byteLength);
132+
chunk.copyTo(buffer);
133+
134+
const headerSize = 1 + 8 + 4; // trackID + duration + bitrate
135+
const totalSize = headerSize + buffer.length;
136+
const packet = new ArrayBuffer(totalSize);
137+
const view = new DataView(packet);
138+
139+
let offset = 0;
140+
view.setUint8(offset, this.trackID); // 轨道ID
141+
offset += 1;
142+
143+
const duration = chunk.duration || 0;
144+
view.setBigUint64(offset, BigInt(duration), true); // duration
145+
offset += 8;
146+
147+
view.setUint32(offset, bitrate, true); // 比特率
148+
offset += 4;
149+
150+
const dataView = new Uint8Array(packet, offset);
151+
dataView.set(buffer);
152+
153+
// 发送多比特率编码数据
154+
if (this.ws.readyState === WebSocket.OPEN) {
155+
this.ws.send(packet);
156+
} else {
157+
console.warn('mediaWs is not open. Unable to send multi-bitrate audio data.');
158+
}
159+
}
160+
161+
123162

124163
private async encodeFromAudioTrack(track: MediaStreamAudioTrack) {
125164
if (!('MediaStreamTrackProcessor' in window)) {
@@ -142,9 +181,13 @@ export default class InputAudioProcessor {
142181
await this.init(value);
143182
}
144183

145-
if (this.encoder && this.state === ProcessorState.RUNNING) {
184+
if (this.state === ProcessorState.RUNNING) {
146185
try {
147-
this.encoder.encode(value);
186+
// this.encoder.encode(value);
187+
188+
this.encoders && Object.values(this.encoders).forEach(enc => {
189+
enc.encode(value);
190+
});
148191
} catch (error) {
149192
console.error('Error encoding frame:', error);
150193
}
@@ -165,23 +208,42 @@ export default class InputAudioProcessor {
165208
public stop() {
166209
this.state = ProcessorState.STOPPING;
167210

211+
// 停止主编码器
168212
if (this.encoder) {
169-
// Flush any pending frames and close the encoder
170213
this.encoder.flush()
171214
.then(() => {
172215
this.encoder?.close();
173216
this.encoder = null;
174-
this.state = ProcessorState.STOPPED;
175217
})
176218
.catch(error => {
177-
console.error('Error flushing encoder:', error);
178-
this.state = ProcessorState.STOPPED;
219+
console.error('Error flushing main encoder:', error);
179220
});
180-
} else {
181-
this.state = ProcessorState.STOPPED;
182221
}
183222

184-
console.log('Audio processing stopped');
223+
// 停止所有多比特率编码器
224+
const flushPromises = Object.entries(this.encoders).map(([bitrate, encoder]) => {
225+
return encoder.flush()
226+
.then(() => {
227+
encoder.close();
228+
console.log(`Closed encoder for ${bitrate}bps`);
229+
})
230+
.catch(error => {
231+
console.error(`Error flushing encoder ${bitrate}bps:`, error);
232+
});
233+
});
234+
235+
// 等待所有编码器完成
236+
Promise.all(flushPromises)
237+
.then(() => {
238+
this.encoders = {};
239+
this.state = ProcessorState.STOPPED;
240+
console.log('All audio encoders stopped');
241+
})
242+
.catch(error => {
243+
console.error('Error stopping audio encoders:', error);
244+
this.encoders = {};
245+
this.state = ProcessorState.STOPPED;
246+
});
185247
}
186248
}
187249

Lines changed: 11 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,11 +1,18 @@
1-
import { useLatencyStore } from "@/stores";
1+
import { useLatencyStore, useLocalUserStateStore, useRemoteUsersStore } from "@/stores";
22

33
export default function LatencyDisplay({ peerIP }: { peerIP: string }) {
4-
const { latencies } = useLatencyStore();
4+
const { latencies, targetBitrates } = useLatencyStore();
5+
const { userState } = useLocalUserStateStore();
6+
const { peers } = useRemoteUsersStore();
57

68
return (
7-
<div className="flex items-center text-xs text-muted-foreground space-x-1">
8-
<span className="font-medium">{latencies[peerIP] || "--"}</span>
9+
<div className="flex flex-col items-end text-xs text-muted-foreground">
10+
<span className="font-medium" title="current latency">
11+
{latencies[peerIP] || "--"}
12+
</span>
13+
{userState.isInChat && peers[peerIP].isInChat && <span className="font-medium" title="available outbound bandwidth">
14+
{targetBitrates[peerIP] ? (targetBitrates[peerIP] / 1000) + " kbps" : "--"}
15+
</span>}
916
</div>
1017
);
1118
}

src/components/RightPanel/CardDisplay/userCards/index.tsx

Lines changed: 17 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
11
export { default as UserThumbnailCard } from "./UserThumbnailCard"
22

3+
34
import { useEffect, useRef, useState } from "react"
45
import { Card } from "@/components/ui/card"
56
import { Avatar, AvatarFallback, AvatarImage } from "@/components/ui/avatar"
@@ -8,6 +9,8 @@ import { ContextMenu, ContextMenuContent, ContextMenuItem, ContextMenuTrigger, C
89
import UserAudioSpectrum from "@/components/UserAudioSpectrum";
910
import { type PeerState, TrackID } from "@/types"
1011
import { useVideoStreamStore, useAudioStore } from "@/stores"
12+
import { AudioContextManager } from "@/AudioManager"
13+
1114

1215
type peerIP = string;
1316

@@ -27,8 +30,16 @@ export function UserCard({ maximiumCard,
2730
const [isDisplayingSpectrum, setIsDisplayingSpectrum] = useState(false)
2831
const [isDisplayingAvatar, setIsDisplayingAvatar] = useState(true)
2932
const [hasVideoTrack, setHasVideoTrack] = useState(false);
33+
const [isMuted, setIsMuted] = useState(false)
3034
const videoStream = useVideoStreamStore(state => state.streamsByPeer[peerIP]?.find(vs => vs.trackID === TrackID.SCREEN_SHARE_VIDEO) || null);
3135
const analyser = useAudioStore(state => state.peerAnalysers[peerIP]);
36+
const { mutedPeers, setMutedPeer } = useAudioStore();
37+
38+
const PeerNodeManager = AudioContextManager.getInstance().peerManager
39+
useEffect(() => {
40+
PeerNodeManager.setPeerMuted(peerIP, isMuted)
41+
setMutedPeer(peerIP, isMuted)
42+
}, [isMuted])
3243

3344
useEffect(() => {
3445
if (videoRef.current && videoStream && !isDisplayingSpectrum) {
@@ -88,8 +99,9 @@ export function UserCard({ maximiumCard,
8899
<UserAudioSpectrum
89100
renderId={`user-audio-card-${peerIP}`}
90101
analyser={analyser}
91-
className="absolute w-full h-full top-0 left-0 opacity-60
92-
group-hover:opacity-100 transition-opacity duration-300"
102+
className={`absolute w-full h-full top-0 left-0 opacity-60
103+
group-hover:opacity-100 transition-opacity duration-300
104+
${mutedPeers.includes(peerIP) ? 'grayscale contrast-50' : ''}`}
93105
verticalAlignment='center'
94106
/>}
95107
</>}
@@ -106,6 +118,9 @@ export function UserCard({ maximiumCard,
106118
}}>
107119
Switch to display {isDisplayingSpectrum ? 'video' : 'spectrum'}
108120
</ContextMenuItem>}
121+
<ContextMenuItem onClick={() => { setIsMuted(!isMuted) }}>
122+
{isMuted ? 'Unmute this User' : 'Mute this User'}
123+
</ContextMenuItem>
109124
<ContextMenuSeparator />
110125
<span className='block max-w-[200px] px-2 text-xs text-muted-foreground overflow-hidden truncate'>
111126
{peerState.userName}

0 commit comments

Comments
 (0)