Skip to content

Commit aab21da

Browse files
authored
Merge pull request #30 from waterbustech/feat/switch-device-for-media
Feat: Switch device for media
2 parents 2348ebf + 5564dc1 commit aab21da

16 files changed

+316
-69
lines changed

lib/core/webrtc/webrtc_manager.dart

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -36,11 +36,13 @@ abstract class WebRTCManager {
3636

3737
// ====== Media & Device Control ======
3838
Future<void> initializeMediaDevices();
39-
Future<void> applyMediaSettings(MediaConfig setting);
39+
Future<void> updateMediaConfig(MediaConfig setting);
4040

4141
Future<void> toggleAudioInput({bool? forceValue});
4242
Future<void> toggleVideoInput();
4343
Future<void> toggleSpeakerOutput({bool? forceValue});
44+
Future<void> changeAudioInputDevice({required String deviceId});
45+
Future<void> changeVideoInputDevice({required String deviceId});
4446
Future<void> switchCameraInput();
4547

4648
// ====== Screen Sharing ======

lib/core/webrtc/webrtc_manager_impl.dart

Lines changed: 69 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -372,7 +372,7 @@ class WebRTCManagerIpml extends WebRTCManager {
372372
}
373373

374374
@override
375-
Future<void> applyMediaSettings(MediaConfig setting) async {
375+
Future<void> updateMediaConfig(MediaConfig setting) async {
376376
if (_currentCallSetting.videoConfig.videoQuality ==
377377
setting.videoConfig.videoQuality) {
378378
if (_currentCallSetting.e2eeEnabled != setting.e2eeEnabled) {
@@ -425,10 +425,9 @@ class WebRTCManagerIpml extends WebRTCManager {
425425
bool? forceValue,
426426
bool ignoreUpdateValue = false,
427427
}) async {
428-
if (_mParticipant == null ||
429-
(_mParticipant!.isSharingScreen && WebRTC.platformIsMobile)) {
430-
return;
431-
}
428+
if (_mParticipant == null) return;
429+
430+
if (_mParticipant!.isSharingScreen && WebRTC.platformIsMobile) return;
432431

433432
final tracks = _localCameraStream?.getVideoTracks() ?? [];
434433
final newValue = forceValue ?? !_mParticipant!.isVideoEnabled;
@@ -494,6 +493,50 @@ class WebRTCManagerIpml extends WebRTCManager {
494493
_notify(CallbackEvents.shouldBeUpdateState);
495494
}
496495

496+
@override
497+
Future<void> changeAudioInputDevice({required String deviceId}) async {
498+
if (_mParticipant == null) return;
499+
500+
_currentCallSetting = _currentCallSetting.copyWith(
501+
audioConfig: _currentCallSetting.audioConfig.copyWith(deviceId: deviceId),
502+
);
503+
504+
final MediaStream? newStream = await _getUserMedia(onlyStream: true);
505+
506+
if (newStream == null) return;
507+
508+
final MediaStreamTrack? audioTrack = newStream.getAudioTracks().firstOrNull;
509+
510+
if (audioTrack == null) return;
511+
512+
_localCameraStream = newStream;
513+
await _replaceAudioTrack(audioTrack);
514+
515+
_mParticipant?.setSrcObject(newStream);
516+
}
517+
518+
@override
519+
Future<void> changeVideoInputDevice({required String deviceId}) async {
520+
if (_mParticipant == null) return;
521+
522+
_currentCallSetting = _currentCallSetting.copyWith(
523+
videoConfig: _currentCallSetting.videoConfig.copyWith(deviceId: deviceId),
524+
);
525+
526+
final MediaStream? newStream = await _getUserMedia(onlyStream: true);
527+
528+
if (newStream == null) return;
529+
530+
final MediaStreamTrack? videoTrack = newStream.getVideoTracks().firstOrNull;
531+
532+
if (videoTrack == null) return;
533+
534+
_localCameraStream = newStream;
535+
await _replaceVideoTrack(videoTrack);
536+
537+
_mParticipant?.setSrcObject(newStream);
538+
}
539+
497540
@override
498541
Future<void> switchCameraInput() async {
499542
if (_localCameraStream == null) {
@@ -1271,6 +1314,26 @@ class WebRTCManagerIpml extends WebRTCManager {
12711314
_localCameraStream = newStream;
12721315
}
12731316

1317+
Future<void> _replaceAudioTrack(
1318+
MediaStreamTrack track, {
1319+
List<RTCRtpSender>? sendersList,
1320+
}) async {
1321+
final List<RTCRtpSender> senders =
1322+
(sendersList ?? await _mParticipant!.peerConnection.getSenders())
1323+
.where(
1324+
(sender) => sender.track?.kind == RtcTrackKind.audio.kind,
1325+
)
1326+
.toList();
1327+
1328+
if (senders.isEmpty) return;
1329+
1330+
final sender = senders.first;
1331+
1332+
await sender.replaceTrack(track);
1333+
1334+
await _applyEncryption(_currentCallSetting.e2eeEnabled, senders: [sender]);
1335+
}
1336+
12741337
Future<void> _replaceVideoTrack(
12751338
MediaStreamTrack track, {
12761339
List<RTCRtpSender>? sendersList,
@@ -1286,7 +1349,7 @@ class WebRTCManagerIpml extends WebRTCManager {
12861349

12871350
final sender = senders.first;
12881351

1289-
sender.replaceTrack(track);
1352+
await sender.replaceTrack(track);
12901353

12911354
await _applyEncryption(_currentCallSetting.e2eeEnabled, senders: [sender]);
12921355
}

lib/flutter_waterbus_sdk.dart

Lines changed: 10 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -115,6 +115,14 @@ class WaterbusSdk {
115115
await _sdk.toggleAudio();
116116
}
117117

118+
Future<void> changeAudioInputDevice({required String deviceId}) async {
119+
await _sdk.changeAudioInputDevice(deviceId: deviceId);
120+
}
121+
122+
Future<void> changeVideoInputDevice({required String deviceId}) async {
123+
await _sdk.changeVideoInputDevice(deviceId: deviceId);
124+
}
125+
118126
void toggleRaiseHand() {
119127
_sdk.toggleRaiseHand();
120128
}
@@ -127,8 +135,8 @@ class WaterbusSdk {
127135
_sdk.setSubscribeSubtitle(isEnabled);
128136
}
129137

130-
Future<void> changeCallSetting(MediaConfig setting) async {
131-
await _sdk.changeCallSettings(setting);
138+
Future<void> updateMediaConfig(MediaConfig setting) async {
139+
await _sdk.updateMediaConfig(setting);
132140
}
133141

134142
Future<void> enableVirtualBackground({

lib/stats/webrtc_audio_stats.dart

Lines changed: 55 additions & 29 deletions
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,7 @@ import 'package:injectable/injectable.dart';
55

66
import 'package:waterbus_sdk/types/index.dart';
77
import 'package:waterbus_sdk/utils/extensions/duration_extension.dart';
8+
import 'package:waterbus_sdk/utils/logger/logger.dart';
89

910
@singleton
1011
class WebRTCAudioStats {
@@ -47,15 +48,18 @@ class WebRTCAudioStats {
4748
);
4849

4950
if (index < 0) return;
50-
5151
_receivers.removeAt(index);
5252
}
5353

5454
void initialize() {
5555
_timer ??= Timer.periodic(1.seconds, (timer) {
56-
if (_sender != null) _monitorAudio(params: _sender!);
56+
if (_sender != null) {
57+
_monitorAudio(params: _sender!);
58+
}
5759

58-
for (final params in _receivers) {
60+
// Create a copy to avoid concurrent modification
61+
final receiversCopy = List<AudioStatsParams>.from(_receivers);
62+
for (final params in receiversCopy) {
5963
_monitorAudio(params: params, type: 'inbound-rtp');
6064
}
6165
});
@@ -73,35 +77,57 @@ class WebRTCAudioStats {
7377
required AudioStatsParams params,
7478
String type = 'media-source',
7579
}) async {
76-
final List<StatsReport> stats = [];
77-
78-
if (type == 'media-source') {
79-
if (params.pc == null) return;
80-
final List<RTCRtpSender> senders = (await params.pc!.getSenders())
81-
.where((sender) => sender.track?.kind == 'audio')
82-
.toList();
83-
84-
for (final rtpSender in senders) {
85-
final senderStats = await rtpSender.getStats();
86-
stats.addAll(senderStats);
87-
}
88-
} else {
89-
final List<RTCRtpReceiver> rtpReceivers = params.receivers;
90-
91-
for (final rtpReceiver in rtpReceivers) {
92-
final receiverStats = await rtpReceiver.getStats();
93-
stats.addAll(receiverStats);
80+
try {
81+
final List<StatsReport> stats = [];
82+
83+
if (type == 'media-source') {
84+
if (params.pc == null) return;
85+
86+
if (params.pc!.connectionState ==
87+
RTCPeerConnectionState.RTCPeerConnectionStateClosed ||
88+
params.pc!.connectionState ==
89+
RTCPeerConnectionState.RTCPeerConnectionStateFailed) {
90+
return;
91+
}
92+
93+
final List<RTCRtpSender> senders = (await params.pc!.getSenders())
94+
.where((sender) => sender.track?.kind == 'audio')
95+
.toList();
96+
97+
for (final rtpSender in senders) {
98+
try {
99+
final senderStats = await rtpSender.getStats();
100+
stats.addAll(senderStats);
101+
} catch (e) {
102+
continue;
103+
}
104+
}
105+
} else {
106+
final List<RTCRtpReceiver> rtpReceivers = params.receivers;
107+
for (final rtpReceiver in rtpReceivers) {
108+
try {
109+
final receiverStats = await rtpReceiver.getStats();
110+
stats.addAll(receiverStats);
111+
} catch (e) {
112+
continue;
113+
}
114+
}
94115
}
95-
}
96-
97-
for (final v in stats) {
98-
if (v.type == type && v.values['kind'] == 'audio') {
99-
final num? audioLevel = getNumValFromReport(v.values, 'audioLevel');
100-
101-
if (audioLevel == null) return;
102116

103-
params.callBack(audioLevel.level);
117+
for (final v in stats) {
118+
if (v.type == type && v.values['kind'] == 'audio') {
119+
final num? audioLevel = getNumValFromReport(v.values, 'audioLevel');
120+
if (audioLevel == null) continue;
121+
122+
try {
123+
params.callBack(audioLevel.level);
124+
} catch (e) {
125+
continue;
126+
}
127+
}
104128
}
129+
} catch (error) {
130+
WaterbusLogger.instance.bug('Error in _monitorAudio: $error');
105131
}
106132
}
107133
}

lib/stats/webrtc_video_stats.dart

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -82,14 +82,17 @@ class WebRTCVideoStats {
8282
}
8383

8484
Future<void> _monitorSenderStats() async {
85-
for (final senders in _senders.entries) {
85+
final sendersEntries = _senders.entries.toList();
86+
87+
for (final senders in sendersEntries) {
88+
if (!_senders.containsKey(senders.key)) continue;
89+
8690
for (final sender in senders.value.senders) {
8791
try {
8892
final List<StatsReport> statsReport = await sender.getStats();
8993
final List<VideoSenderStats> stats =
9094
await _getSenderStats(statsReport);
9195

92-
// Check if stats is empty before proceeding
9396
if (stats.isEmpty) continue;
9497

9598
final Map<String, VideoSenderStats> statsMap = {};

lib/types/error/app_exception.dart

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -4,12 +4,12 @@ import 'package:waterbus_sdk/types/error/failures.dart';
44

55
enum AppException {
66
// Room
7-
roomNotFound("Room Not Found", RoomNotFound.new),
7+
roomNotFound("Room with Code ", RoomNotFound.new),
88
notAllowedToUpdateRoom(
99
'User not allowed to update rooom',
1010
NotAllowedToUpdateRoom.new,
1111
),
12-
wrongPassword('Wrong password!', WrongPassword.new),
12+
wrongPassword('Password is not correct', WrongPassword.new),
1313
notAllowToJoinDirectly(
1414
'User not allow to join directly',
1515
NotAllowToJoinDirectly.new,
@@ -57,7 +57,7 @@ enum AppException {
5757
extension AppExceptionX on String {
5858
Failure get toFailure {
5959
final match = AppException.values.firstWhereOrNull(
60-
(e) => e.message == this,
60+
(e) => contains(e.message),
6161
);
6262

6363
return match?.failure ?? ServerFailure();

lib/types/externals/models/audio_config.dart

Lines changed: 23 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,5 @@
1+
import 'package:flutter/foundation.dart';
2+
13
import 'package:freezed_annotation/freezed_annotation.dart';
24

35
part "audio_config.freezed.dart";
@@ -6,6 +8,7 @@ part "audio_config.g.dart";
68
@freezed
79
abstract class AudioConfig with _$AudioConfig {
810
const factory AudioConfig({
11+
String? deviceId,
912
@Default(false) bool isLowBandwidthMode,
1013
@Default(false) bool isAudioMuted,
1114
@Default(true) bool echoCancellationEnabled,
@@ -16,3 +19,23 @@ abstract class AudioConfig with _$AudioConfig {
1619
factory AudioConfig.fromJson(Map<String, Object?> json) =>
1720
_$AudioConfigFromJson(json);
1821
}
22+
23+
extension AudioConfigX on AudioConfig {
24+
Map<String, dynamic> get configDeviceId {
25+
final Map<String, dynamic> constraints = {};
26+
if (deviceId != null && deviceId!.isNotEmpty) {
27+
if (kIsWeb) {
28+
constraints['deviceId'] = {
29+
'exact': deviceId,
30+
'ideal': deviceId,
31+
};
32+
} else {
33+
constraints['optional'] = [
34+
{'sourceId': deviceId},
35+
];
36+
}
37+
}
38+
39+
return constraints;
40+
}
41+
}

0 commit comments

Comments
 (0)