Skip to content

Feat: Switch device for media #30

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 6 commits into from
Jun 11, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 3 additions & 1 deletion lib/core/webrtc/webrtc_manager.dart
Original file line number Diff line number Diff line change
Expand Up @@ -36,11 +36,13 @@ abstract class WebRTCManager {

// ====== Media & Device Control ======
Future<void> initializeMediaDevices();
Future<void> applyMediaSettings(MediaConfig setting);
Future<void> updateMediaConfig(MediaConfig setting);

Future<void> toggleAudioInput({bool? forceValue});
Future<void> toggleVideoInput();
Future<void> toggleSpeakerOutput({bool? forceValue});
Future<void> changeAudioInputDevice({required String deviceId});
Future<void> changeVideoInputDevice({required String deviceId});
Future<void> switchCameraInput();

// ====== Screen Sharing ======
Expand Down
75 changes: 69 additions & 6 deletions lib/core/webrtc/webrtc_manager_impl.dart
Original file line number Diff line number Diff line change
Expand Up @@ -372,7 +372,7 @@ class WebRTCManagerIpml extends WebRTCManager {
}

@override
Future<void> applyMediaSettings(MediaConfig setting) async {
Future<void> updateMediaConfig(MediaConfig setting) async {
if (_currentCallSetting.videoConfig.videoQuality ==
setting.videoConfig.videoQuality) {
if (_currentCallSetting.e2eeEnabled != setting.e2eeEnabled) {
Expand Down Expand Up @@ -425,10 +425,9 @@ class WebRTCManagerIpml extends WebRTCManager {
bool? forceValue,
bool ignoreUpdateValue = false,
}) async {
if (_mParticipant == null ||
(_mParticipant!.isSharingScreen && WebRTC.platformIsMobile)) {
return;
}
if (_mParticipant == null) return;

if (_mParticipant!.isSharingScreen && WebRTC.platformIsMobile) return;

final tracks = _localCameraStream?.getVideoTracks() ?? [];
final newValue = forceValue ?? !_mParticipant!.isVideoEnabled;
Expand Down Expand Up @@ -494,6 +493,50 @@ class WebRTCManagerIpml extends WebRTCManager {
_notify(CallbackEvents.shouldBeUpdateState);
}

@override
Future<void> changeAudioInputDevice({required String deviceId}) async {
if (_mParticipant == null) return;

_currentCallSetting = _currentCallSetting.copyWith(
audioConfig: _currentCallSetting.audioConfig.copyWith(deviceId: deviceId),
);

final MediaStream? newStream = await _getUserMedia(onlyStream: true);

if (newStream == null) return;

final MediaStreamTrack? audioTrack = newStream.getAudioTracks().firstOrNull;

if (audioTrack == null) return;

_localCameraStream = newStream;
await _replaceAudioTrack(audioTrack);

_mParticipant?.setSrcObject(newStream);
}

@override
Future<void> changeVideoInputDevice({required String deviceId}) async {
if (_mParticipant == null) return;

_currentCallSetting = _currentCallSetting.copyWith(
videoConfig: _currentCallSetting.videoConfig.copyWith(deviceId: deviceId),
);

final MediaStream? newStream = await _getUserMedia(onlyStream: true);

if (newStream == null) return;

final MediaStreamTrack? videoTrack = newStream.getVideoTracks().firstOrNull;

if (videoTrack == null) return;

_localCameraStream = newStream;
await _replaceVideoTrack(videoTrack);

_mParticipant?.setSrcObject(newStream);
}

@override
Future<void> switchCameraInput() async {
if (_localCameraStream == null) {
Expand Down Expand Up @@ -1271,6 +1314,26 @@ class WebRTCManagerIpml extends WebRTCManager {
_localCameraStream = newStream;
}

Future<void> _replaceAudioTrack(
MediaStreamTrack track, {
List<RTCRtpSender>? sendersList,
}) async {
final List<RTCRtpSender> senders =
(sendersList ?? await _mParticipant!.peerConnection.getSenders())
.where(
(sender) => sender.track?.kind == RtcTrackKind.audio.kind,
)
.toList();

if (senders.isEmpty) return;

final sender = senders.first;

await sender.replaceTrack(track);

await _applyEncryption(_currentCallSetting.e2eeEnabled, senders: [sender]);
}

Future<void> _replaceVideoTrack(
MediaStreamTrack track, {
List<RTCRtpSender>? sendersList,
Expand All @@ -1286,7 +1349,7 @@ class WebRTCManagerIpml extends WebRTCManager {

final sender = senders.first;

sender.replaceTrack(track);
await sender.replaceTrack(track);

await _applyEncryption(_currentCallSetting.e2eeEnabled, senders: [sender]);
}
Expand Down
12 changes: 10 additions & 2 deletions lib/flutter_waterbus_sdk.dart
Original file line number Diff line number Diff line change
Expand Up @@ -115,6 +115,14 @@ class WaterbusSdk {
await _sdk.toggleAudio();
}

Future<void> changeAudioInputDevice({required String deviceId}) async {
await _sdk.changeAudioInputDevice(deviceId: deviceId);
}

Future<void> changeVideoInputDevice({required String deviceId}) async {
await _sdk.changeVideoInputDevice(deviceId: deviceId);
}

void toggleRaiseHand() {
_sdk.toggleRaiseHand();
}
Expand All @@ -127,8 +135,8 @@ class WaterbusSdk {
_sdk.setSubscribeSubtitle(isEnabled);
}

Future<void> changeCallSetting(MediaConfig setting) async {
await _sdk.changeCallSettings(setting);
Future<void> updateMediaConfig(MediaConfig setting) async {
await _sdk.updateMediaConfig(setting);
}

Future<void> enableVirtualBackground({
Expand Down
84 changes: 55 additions & 29 deletions lib/stats/webrtc_audio_stats.dart
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@ import 'package:injectable/injectable.dart';

import 'package:waterbus_sdk/types/index.dart';
import 'package:waterbus_sdk/utils/extensions/duration_extension.dart';
import 'package:waterbus_sdk/utils/logger/logger.dart';

@singleton
class WebRTCAudioStats {
Expand Down Expand Up @@ -47,15 +48,18 @@ class WebRTCAudioStats {
);

if (index < 0) return;

_receivers.removeAt(index);
}

void initialize() {
_timer ??= Timer.periodic(1.seconds, (timer) {
if (_sender != null) _monitorAudio(params: _sender!);
if (_sender != null) {
_monitorAudio(params: _sender!);
}

for (final params in _receivers) {
// Create a copy to avoid concurrent modification
final receiversCopy = List<AudioStatsParams>.from(_receivers);
for (final params in receiversCopy) {
_monitorAudio(params: params, type: 'inbound-rtp');
}
});
Expand All @@ -73,35 +77,57 @@ class WebRTCAudioStats {
required AudioStatsParams params,
String type = 'media-source',
}) async {
final List<StatsReport> stats = [];

if (type == 'media-source') {
if (params.pc == null) return;
final List<RTCRtpSender> senders = (await params.pc!.getSenders())
.where((sender) => sender.track?.kind == 'audio')
.toList();

for (final rtpSender in senders) {
final senderStats = await rtpSender.getStats();
stats.addAll(senderStats);
}
} else {
final List<RTCRtpReceiver> rtpReceivers = params.receivers;

for (final rtpReceiver in rtpReceivers) {
final receiverStats = await rtpReceiver.getStats();
stats.addAll(receiverStats);
try {
final List<StatsReport> stats = [];

if (type == 'media-source') {
if (params.pc == null) return;

if (params.pc!.connectionState ==
RTCPeerConnectionState.RTCPeerConnectionStateClosed ||
params.pc!.connectionState ==
RTCPeerConnectionState.RTCPeerConnectionStateFailed) {
return;
}

final List<RTCRtpSender> senders = (await params.pc!.getSenders())
.where((sender) => sender.track?.kind == 'audio')
.toList();

for (final rtpSender in senders) {
try {
final senderStats = await rtpSender.getStats();
stats.addAll(senderStats);
} catch (e) {
continue;
}
}
} else {
final List<RTCRtpReceiver> rtpReceivers = params.receivers;
for (final rtpReceiver in rtpReceivers) {
try {
final receiverStats = await rtpReceiver.getStats();
stats.addAll(receiverStats);
} catch (e) {
continue;
}
}
}
}

for (final v in stats) {
if (v.type == type && v.values['kind'] == 'audio') {
final num? audioLevel = getNumValFromReport(v.values, 'audioLevel');

if (audioLevel == null) return;

params.callBack(audioLevel.level);
for (final v in stats) {
if (v.type == type && v.values['kind'] == 'audio') {
final num? audioLevel = getNumValFromReport(v.values, 'audioLevel');
if (audioLevel == null) continue;

try {
params.callBack(audioLevel.level);
} catch (e) {
continue;
}
}
}
} catch (error) {
WaterbusLogger.instance.bug('Error in _monitorAudio: $error');
}
}
}
7 changes: 5 additions & 2 deletions lib/stats/webrtc_video_stats.dart
Original file line number Diff line number Diff line change
Expand Up @@ -82,14 +82,17 @@ class WebRTCVideoStats {
}

Future<void> _monitorSenderStats() async {
for (final senders in _senders.entries) {
final sendersEntries = _senders.entries.toList();

for (final senders in sendersEntries) {
if (!_senders.containsKey(senders.key)) continue;

for (final sender in senders.value.senders) {
try {
final List<StatsReport> statsReport = await sender.getStats();
final List<VideoSenderStats> stats =
await _getSenderStats(statsReport);

// Check if stats is empty before proceeding
if (stats.isEmpty) continue;

final Map<String, VideoSenderStats> statsMap = {};
Expand Down
6 changes: 3 additions & 3 deletions lib/types/error/app_exception.dart
Original file line number Diff line number Diff line change
Expand Up @@ -4,12 +4,12 @@ import 'package:waterbus_sdk/types/error/failures.dart';

enum AppException {
// Room
roomNotFound("Room Not Found", RoomNotFound.new),
roomNotFound("Room with Code ", RoomNotFound.new),
notAllowedToUpdateRoom(
'User not allowed to update rooom',
NotAllowedToUpdateRoom.new,
),
wrongPassword('Wrong password!', WrongPassword.new),
wrongPassword('Password is not correct', WrongPassword.new),
notAllowToJoinDirectly(
'User not allow to join directly',
NotAllowToJoinDirectly.new,
Expand Down Expand Up @@ -57,7 +57,7 @@ enum AppException {
extension AppExceptionX on String {
Failure get toFailure {
final match = AppException.values.firstWhereOrNull(
(e) => e.message == this,
(e) => contains(e.message),
);

return match?.failure ?? ServerFailure();
Expand Down
23 changes: 23 additions & 0 deletions lib/types/externals/models/audio_config.dart
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
import 'package:flutter/foundation.dart';

import 'package:freezed_annotation/freezed_annotation.dart';

part "audio_config.freezed.dart";
Expand All @@ -6,6 +8,7 @@ part "audio_config.g.dart";
@freezed
abstract class AudioConfig with _$AudioConfig {
const factory AudioConfig({
String? deviceId,
@Default(false) bool isLowBandwidthMode,
@Default(false) bool isAudioMuted,
@Default(true) bool echoCancellationEnabled,
Expand All @@ -16,3 +19,23 @@ abstract class AudioConfig with _$AudioConfig {
factory AudioConfig.fromJson(Map<String, Object?> json) =>
_$AudioConfigFromJson(json);
}

extension AudioConfigX on AudioConfig {
Map<String, dynamic> get configDeviceId {
final Map<String, dynamic> constraints = {};
if (deviceId != null && deviceId!.isNotEmpty) {
if (kIsWeb) {
constraints['deviceId'] = {
'exact': deviceId,
'ideal': deviceId,
};
} else {
constraints['optional'] = [
{'sourceId': deviceId},
];
}
}

return constraints;
}
}
Loading
Loading