From 573e887558465d92b67761abf031b8227ec2c926 Mon Sep 17 00:00:00 2001 From: Hiroshi Horie <548776+hiroshihorie@users.noreply.github.com> Date: Tue, 22 Jul 2025 03:43:09 +0900 Subject: [PATCH 1/6] Organize imports --- lib/src/core/engine.dart | 14 +++++++++++--- lib/src/data_stream/stream_writer.dart | 6 +++--- lib/src/livekit.dart | 2 +- lib/src/track/audio_visualizer.dart | 2 +- lib/src/track/audio_visualizer_web.dart | 2 +- lib/src/track/local/video.dart | 5 ++--- lib/src/types/participant_permissions.dart | 3 +-- 7 files changed, 20 insertions(+), 14 deletions(-) diff --git a/lib/src/core/engine.dart b/lib/src/core/engine.dart index 331bf3299..25b371af8 100644 --- a/lib/src/core/engine.dart +++ b/lib/src/core/engine.dart @@ -16,23 +16,31 @@ import 'dart:async'; -import 'package:flutter/foundation.dart'; - import 'package:collection/collection.dart'; import 'package:connectivity_plus/connectivity_plus.dart'; +import 'package:flutter/foundation.dart'; import 'package:flutter_webrtc/flutter_webrtc.dart' as rtc; import 'package:meta/meta.dart'; -import 'package:livekit_client/livekit_client.dart'; +import '../events.dart'; +import '../exceptions.dart'; import '../extensions.dart'; import '../internal/events.dart'; import '../internal/types.dart'; +import '../logger.dart' show logger; +import '../managers/event.dart'; +import '../options.dart'; import '../proto/livekit_models.pb.dart' as lk_models; import '../proto/livekit_rtc.pb.dart' as lk_rtc; +import '../publication/local.dart'; import '../support/disposable.dart'; +import '../support/platform.dart' show lkPlatformIsTest, lkPlatformIs, PlatformType; import '../support/region_url_provider.dart'; import '../support/websocket.dart'; +import '../track/local/local.dart'; +import '../track/local/video.dart'; import '../types/internal.dart'; +import '../types/other.dart'; import 'signal_client.dart'; import 'transport.dart'; diff --git a/lib/src/data_stream/stream_writer.dart b/lib/src/data_stream/stream_writer.dart index c60e37e89..1d2832a80 100644 --- a/lib/src/data_stream/stream_writer.dart +++ b/lib/src/data_stream/stream_writer.dart @@ -3,11 +3,11 @@ import 'dart:typed_data'; import 'package:fixnum/fixnum.dart'; -import 'package:livekit_client/src/core/engine.dart'; -import 'package:livekit_client/src/types/other.dart'; -import 'package:livekit_client/src/utils.dart'; +import '../core/engine.dart'; import '../proto/livekit_models.pb.dart' as lk_models; import '../types/data_stream.dart'; +import '../types/other.dart'; +import '../utils.dart'; class BaseStreamWriter { final StreamWriter writableStream; diff --git a/lib/src/livekit.dart b/lib/src/livekit.dart index acfd4fa50..428240184 100644 --- a/lib/src/livekit.dart +++ b/lib/src/livekit.dart @@ -14,8 +14,8 @@ import 'package:flutter_webrtc/flutter_webrtc.dart' as rtc; -import 'package:livekit_client/livekit_client.dart'; import 'support/native.dart'; +import 'support/platform.dart' show lkPlatformIsMobile; /// Main entry point to connect to a room. /// {@category Room} diff --git a/lib/src/track/audio_visualizer.dart b/lib/src/track/audio_visualizer.dart index 13718e771..9f4982ae0 100644 --- a/lib/src/track/audio_visualizer.dart +++ b/lib/src/track/audio_visualizer.dart @@ -1,6 +1,6 @@ import 'package:uuid/uuid.dart' as uuid; -import 'package:livekit_client/src/support/disposable.dart'; +import '../support/disposable.dart'; import '../events.dart' show AudioVisualizerEvent; import '../managers/event.dart' show EventsEmittable; import 'local/local.dart' show AudioTrack; diff --git a/lib/src/track/audio_visualizer_web.dart b/lib/src/track/audio_visualizer_web.dart index c57c64ef8..fc34140e6 100644 --- a/lib/src/track/audio_visualizer_web.dart +++ b/lib/src/track/audio_visualizer_web.dart @@ -5,7 +5,7 @@ import 'dart:typed_data'; import 'package:flutter_webrtc/flutter_webrtc.dart'; -import 'package:livekit_client/src/events.dart' show AudioVisualizerEvent; +import '../events.dart' show AudioVisualizerEvent; import '../logger.dart' show logger; import 'audio_visualizer.dart'; import 'local/local.dart' show AudioTrack; diff --git a/lib/src/track/local/video.dart b/lib/src/track/local/video.dart index 44d3afc94..f2da07c3e 100644 --- a/lib/src/track/local/video.dart +++ b/lib/src/track/local/video.dart @@ -12,14 +12,13 @@ // See the License for the specific language governing permissions and // limitations under the License. -import 'package:flutter/foundation.dart'; - import 'package:collection/collection.dart'; +import 'package:flutter/foundation.dart'; import 'package:flutter_webrtc/flutter_webrtc.dart' as rtc; -import 'package:livekit_client/src/extensions.dart'; import '../../events.dart'; import '../../exceptions.dart'; +import '../../extensions.dart'; import '../../logger.dart'; import '../../options.dart'; import '../../proto/livekit_models.pb.dart' as lk_models; diff --git a/lib/src/types/participant_permissions.dart b/lib/src/types/participant_permissions.dart index 36d6ce41c..22ac7faab 100644 --- a/lib/src/types/participant_permissions.dart +++ b/lib/src/types/participant_permissions.dart @@ -14,7 +14,6 @@ import 'package:meta/meta.dart'; -import 'package:livekit_client/src/proto/livekit_models.pbenum.dart'; import '../proto/livekit_models.pb.dart' as lk_models; @immutable @@ -24,7 +23,7 @@ class ParticipantPermissions { final bool canPublishData; final bool hidden; final bool canUpdateMetadata; - final List canPublishSources; + final List canPublishSources; const ParticipantPermissions({ this.canSubscribe = false, From 4655b0be782df237769adb26ceb08b3cbcc5a330 Mon Sep 17 00:00:00 2001 From: Hiroshi Horie <548776+hiroshihorie@users.noreply.github.com> Date: Tue, 22 Jul 2025 03:47:33 +0900 Subject: [PATCH 2/6] Update options.dart --- lib/src/options.dart | 3 --- 1 file changed, 3 deletions(-) diff --git a/lib/src/options.dart b/lib/src/options.dart index 7d896ebf3..1f64401d1 100644 --- a/lib/src/options.dart +++ b/lib/src/options.dart @@ -14,12 +14,9 @@ import 'constants.dart'; import 'e2ee/options.dart'; -import 'proto/livekit_models.pb.dart'; -import 'publication/remote.dart'; import 'track/local/audio.dart'; import 'track/local/video.dart'; import 'track/options.dart'; -import 'track/track.dart'; import 'types/other.dart'; import 'types/video_encoding.dart'; import 'types/video_parameters.dart'; From 808c18bc050310a5e545a1fcab68161f8fc8a215 Mon Sep 17 00:00:00 2001 From: Hiroshi Horie <548776+hiroshihorie@users.noreply.github.com> Date: Tue, 22 Jul 2025 20:43:51 +0900 Subject: [PATCH 3/6] Impl --- ios/Classes/AudioRenderer.swift | 1 + macos/Classes/AudioRenderer.swift | 1 + shared_swift/AudioRenderer.swift | 68 +++++ shared_swift/LiveKitPlugin.swift | 417 ++++++++++++++++++------------ 4 files changed, 318 insertions(+), 169 deletions(-) create mode 120000 ios/Classes/AudioRenderer.swift create mode 120000 macos/Classes/AudioRenderer.swift create mode 100644 shared_swift/AudioRenderer.swift diff --git a/ios/Classes/AudioRenderer.swift b/ios/Classes/AudioRenderer.swift new file mode 120000 index 000000000..a1bce0cda --- /dev/null +++ b/ios/Classes/AudioRenderer.swift @@ -0,0 +1 @@ +../../shared_swift/AudioRenderer.swift \ No newline at end of file diff --git a/macos/Classes/AudioRenderer.swift b/macos/Classes/AudioRenderer.swift new file mode 120000 index 000000000..a1bce0cda --- /dev/null +++ b/macos/Classes/AudioRenderer.swift @@ -0,0 +1 @@ +../../shared_swift/AudioRenderer.swift \ No newline at end of file diff --git a/shared_swift/AudioRenderer.swift b/shared_swift/AudioRenderer.swift new file mode 100644 index 000000000..71d8b479e --- /dev/null +++ b/shared_swift/AudioRenderer.swift @@ -0,0 +1,68 @@ +/* + * Copyright 2024 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import AVFoundation +import WebRTC + +#if os(macOS) + import Cocoa + import FlutterMacOS +#else + import Flutter + import UIKit +#endif + +public class AudioRenderer: NSObject { + private var eventSink: FlutterEventSink? + private var channel: FlutterEventChannel? + + private weak var _track: AudioTrack? + + public init(track: AudioTrack?, + binaryMessenger: FlutterBinaryMessenger, + rendererId: String) + { + _track = track + super.init() + _track?.add(audioRenderer: self) + + let channelName = "io.livekit.audio.renderer/eventchannel-" + rendererId + channel = FlutterEventChannel(name: channelName, binaryMessenger: binaryMessenger) + channel?.setStreamHandler(self) + } + + deinit { + _track?.remove(audioRenderer: self) + } +} + +extension AudioRenderer: FlutterStreamHandler { + public func onListen(withArguments _: Any?, eventSink events: @escaping FlutterEventSink) -> FlutterError? { + eventSink = events + return nil + } + + public func onCancel(withArguments _: Any?) -> FlutterError? { + eventSink = nil + return nil + } +} + +extension AudioRenderer: RTCAudioRenderer { + public func render(pcmBuffer _: AVAudioPCMBuffer) { + eventSink?("audio_renderer_event") + } +} diff --git a/shared_swift/LiveKitPlugin.swift b/shared_swift/LiveKitPlugin.swift index aed2f4478..9e6cac6fd 100644 --- a/shared_swift/LiveKitPlugin.swift +++ b/shared_swift/LiveKitPlugin.swift @@ -12,36 +12,48 @@ // See the License for the specific language governing permissions and // limitations under the License. -import WebRTC import flutter_webrtc +import WebRTC #if os(macOS) -import Cocoa -import FlutterMacOS + import Cocoa + import FlutterMacOS #else -import Flutter -import UIKit -import Combine + import Combine + import Flutter + import UIKit #endif +let trackIdKey = "visualizerId" +let visualizerIdKey = "visualizerId" +let rendererIdKey = "rendererId" + +class AudioProcessors { + var track: AudioTrack + var visualizers: [String: Visualizer] = [:] + var renderers: [String: AudioRenderer] = [:] + + init(track: AudioTrack) { + self.track = track + } +} + @available(iOS 13.0, *) public class LiveKitPlugin: NSObject, FlutterPlugin { - - var processors: Dictionary = [:] - var tracks: Dictionary = [:] + // TrackId: AudioProcessors + var audioProcessors: [String: AudioProcessors] = [:] var binaryMessenger: FlutterBinaryMessenger? #if os(iOS) - var cancellable = Set() + var cancellable = Set() #endif public static func register(with registrar: FlutterPluginRegistrar) { - #if os(macOS) - let messenger = registrar.messenger + let messenger = registrar.messenger #else - let messenger = registrar.messenger() + let messenger = registrar.messenger() #endif let channel = FlutterMethodChannel(name: "livekit_client", binaryMessenger: messenger) @@ -50,198 +62,265 @@ public class LiveKitPlugin: NSObject, FlutterPlugin { registrar.addMethodCallDelegate(instance, channel: channel) #if os(iOS) - BroadcastManager.shared.isBroadcastingPublisher - .sink { isBroadcasting in - channel.invokeMethod("broadcastStateChanged", arguments: isBroadcasting) - } - .store(in: &instance.cancellable) + BroadcastManager.shared.isBroadcastingPublisher + .sink { isBroadcasting in + channel.invokeMethod("broadcastStateChanged", arguments: isBroadcasting) + } + .store(in: &instance.cancellable) #endif } #if !os(macOS) - // https://developer.apple.com/documentation/avfaudio/avaudiosession/category - let categoryMap: [String: AVAudioSession.Category] = [ - "ambient": .ambient, - "multiRoute": .multiRoute, - "playAndRecord": .playAndRecord, - "playback": .playback, - "record": .record, - "soloAmbient": .soloAmbient - ] - - // https://developer.apple.com/documentation/avfaudio/avaudiosession/categoryoptions - let categoryOptionsMap: [String: AVAudioSession.CategoryOptions] = [ - "mixWithOthers": .mixWithOthers, - "duckOthers": .duckOthers, - "interruptSpokenAudioAndMixWithOthers": .interruptSpokenAudioAndMixWithOthers, - "allowBluetooth": .allowBluetooth, - "allowBluetoothA2DP": .allowBluetoothA2DP, - "allowAirPlay": .allowAirPlay, - "defaultToSpeaker": .defaultToSpeaker - // @available(iOS 14.5, *) - // "overrideMutedMicrophoneInterruption": .overrideMutedMicrophoneInterruption, - ] - - // https://developer.apple.com/documentation/avfaudio/avaudiosession/mode - let modeMap: [String: AVAudioSession.Mode] = [ - "default": .default, - "gameChat": .gameChat, - "measurement": .measurement, - "moviePlayback": .moviePlayback, - "spokenAudio": .spokenAudio, - "videoChat": .videoChat, - "videoRecording": .videoRecording, - "voiceChat": .voiceChat, - "voicePrompt": .voicePrompt - ] - - private func categoryOptions(fromFlutter options: [String]) -> AVAudioSession.CategoryOptions { - var result: AVAudioSession.CategoryOptions = [] - for option in categoryOptionsMap { - if options.contains(option.key) { - result.insert(option.value) + // https://developer.apple.com/documentation/avfaudio/avaudiosession/category + let categoryMap: [String: AVAudioSession.Category] = [ + "ambient": .ambient, + "multiRoute": .multiRoute, + "playAndRecord": .playAndRecord, + "playback": .playback, + "record": .record, + "soloAmbient": .soloAmbient, + ] + + // https://developer.apple.com/documentation/avfaudio/avaudiosession/categoryoptions + let categoryOptionsMap: [String: AVAudioSession.CategoryOptions] = [ + "mixWithOthers": .mixWithOthers, + "duckOthers": .duckOthers, + "interruptSpokenAudioAndMixWithOthers": .interruptSpokenAudioAndMixWithOthers, + "allowBluetooth": .allowBluetooth, + "allowBluetoothA2DP": .allowBluetoothA2DP, + "allowAirPlay": .allowAirPlay, + "defaultToSpeaker": .defaultToSpeaker, + // @available(iOS 14.5, *) + // "overrideMutedMicrophoneInterruption": .overrideMutedMicrophoneInterruption, + ] + + // https://developer.apple.com/documentation/avfaudio/avaudiosession/mode + let modeMap: [String: AVAudioSession.Mode] = [ + "default": .default, + "gameChat": .gameChat, + "measurement": .measurement, + "moviePlayback": .moviePlayback, + "spokenAudio": .spokenAudio, + "videoChat": .videoChat, + "videoRecording": .videoRecording, + "voiceChat": .voiceChat, + "voicePrompt": .voicePrompt, + ] + + private func categoryOptions(fromFlutter options: [String]) -> AVAudioSession.CategoryOptions { + var result: AVAudioSession.CategoryOptions = [] + for option in categoryOptionsMap { + if options.contains(option.key) { + result.insert(option.value) + } } + return result } - return result - } #endif - public func handleStartAudioVisualizer(args: [String: Any?], result: @escaping FlutterResult) { + private func audioProcessors(for trackId: String) -> AudioProcessors? { + if let existing = audioProcessors[trackId] { + return existing + } + let webrtc = FlutterWebRTCPlugin.sharedSingleton() - let trackId = args["trackId"] as? String - let visualizerId = args["visualizerId"] as? String + var audioTrack: AudioTrack? + if let track = webrtc?.localTracks![trackId] as? LocalAudioTrack { + audioTrack = LKLocalAudioTrack(name: trackId, track: track) + } else if let track = webrtc?.remoteTrack(forId: trackId) as? RTCAudioTrack { + audioTrack = LKRemoteAudioTrack(name: trackId, track: track) + } + + guard let audioTrack else { + return nil + } + + let processor = AudioProcessors(track: audioTrack) + audioProcessors[trackId] = processor + return processor + } + + public func handleStartAudioVisualizer(args: [String: Any?], result: @escaping FlutterResult) { + // Required params + let trackId = args[trackIdKey] as? String + let visualizerId = args[visualizerIdKey] as? String + + guard let trackId else { + result(FlutterError(code: trackIdKey, message: "\(trackIdKey) is required", details: nil)) + return + } + + guard let visualizerId else { + result(FlutterError(code: visualizerIdKey, message: "\(visualizerIdKey) is required", details: nil)) + return + } + + // Optional params let barCount = args["barCount"] as? Int ?? 7 let isCentered = args["isCentered"] as? Bool ?? true let smoothTransition = args["smoothTransition"] as? Bool ?? true - if visualizerId == nil { - result(FlutterError(code: "visualizerId", message: "visualizerId is required", details: nil)) + guard let processors = audioProcessors(for: trackId) else { + result(FlutterError(code: trackIdKey, message: "No such track", details: nil)) return } - if let unwrappedTrackId = trackId { - let unwrappedVisualizerId = visualizerId! - - let localTrack = webrtc?.localTracks![unwrappedTrackId] - if let audioTrack = localTrack as? LocalAudioTrack { - let lkLocalTrack = LKLocalAudioTrack(name: unwrappedTrackId, track: audioTrack); - let processor = Visualizer(track: lkLocalTrack, - binaryMessenger: self.binaryMessenger!, - bandCount: barCount, - isCentered: isCentered, - smoothTransition: smoothTransition, - visualizerId: unwrappedVisualizerId) - - tracks[unwrappedTrackId] = lkLocalTrack - processors[unwrappedVisualizerId] = processor - - } - - let track = webrtc?.remoteTrack(forId: unwrappedTrackId) - if let audioTrack = track as? RTCAudioTrack { - let lkRemoteTrack = LKRemoteAudioTrack(name: unwrappedTrackId, track: audioTrack); - let processor = Visualizer(track: lkRemoteTrack, - binaryMessenger: self.binaryMessenger!, - bandCount: barCount, - isCentered: isCentered, - smoothTransition: smoothTransition, - visualizerId: unwrappedVisualizerId) - tracks[unwrappedTrackId] = lkRemoteTrack - processors[unwrappedVisualizerId] = processor - } + // Already exists + if processors.visualizers[visualizerId] != nil { + result(true) + return } + let visualizer = Visualizer(track: processors.track, + binaryMessenger: binaryMessenger!, + bandCount: barCount, + isCentered: isCentered, + smoothTransition: smoothTransition, + visualizerId: visualizerId) + // Retain + processors.visualizers[visualizerId] = visualizer result(true) } public func handleStopAudioVisualizer(args: [String: Any?], result: @escaping FlutterResult) { - let trackId = args["trackId"] as? String - let visualizerId = args["visualizerId"] as? String - if let unwrappedTrackId = trackId { - for key in tracks.keys { - if key == unwrappedTrackId { - tracks.removeValue(forKey: key) - } - } + // let trackId = args["trackId"] as? String + let visualizerId = args[visualizerIdKey] as? String + + guard let visualizerId else { + result(FlutterError(code: visualizerIdKey, message: "\(visualizerIdKey) is required", details: nil)) + return } - if let unwrappedVisualizerId = visualizerId { - processors.removeValue(forKey: unwrappedVisualizerId) + + for processors in audioProcessors.values { + processors.visualizers.removeValue(forKey: visualizerId) } + result(true) } - public func handleConfigureNativeAudio(args: [String: Any?], result: @escaping FlutterResult) { + public func handleStartAudioRenderer(args: [String: Any?], result: @escaping FlutterResult) { + // Required params + let trackId = args[trackIdKey] as? String + let rendererId = args[rendererIdKey] as? String - #if os(macOS) - result(FlutterMethodNotImplemented) - #else - - let configuration = RTCAudioSessionConfiguration.webRTC() + guard let trackId else { + result(FlutterError(code: trackIdKey, message: "\(trackIdKey) is required", details: nil)) + return + } - // Category - if let string = args["appleAudioCategory"] as? String, - let category = categoryMap[string] { - configuration.category = category.rawValue - print("[LiveKit] Configuring category: ", configuration.category) + guard let rendererId else { + result(FlutterError(code: rendererIdKey, message: "\(rendererIdKey) is required", details: nil)) + return } - // CategoryOptions - if let strings = args["appleAudioCategoryOptions"] as? [String] { - configuration.categoryOptions = categoryOptions(fromFlutter: strings) - print("[LiveKit] Configuring categoryOptions: ", strings) + guard let processors = audioProcessors(for: trackId) else { + result(FlutterError(code: trackIdKey, message: "No such track", details: nil)) + return } - // Mode - if let string = args["appleAudioMode"] as? String, - let mode = modeMap[string] { - configuration.mode = mode.rawValue - print("[LiveKit] Configuring mode: ", configuration.mode) + // Already exists + if processors.visualizers[rendererId] != nil { + result(true) + return } - // get `RTCAudioSession` and lock - let rtcSession = RTCAudioSession.sharedInstance() - rtcSession.lockForConfiguration() + let renderer = AudioRenderer(track: processors.track, + binaryMessenger: binaryMessenger!, + rendererId: rendererId) + // Retain + processors.renderers[rendererId] = renderer - var isLocked: Bool = true - let unlock = { - guard isLocked else { - print("[LiveKit] not locked, ignoring unlock") - return - } - rtcSession.unlockForConfiguration() - isLocked = false + result(true) + } + + public func handleStopAudioRenderer(args: [String: Any?], result: @escaping FlutterResult) { + let rendererId = args[rendererIdKey] as? String + + guard let rendererId else { + result(FlutterError(code: rendererIdKey, message: "\(rendererIdKey) is required", details: nil)) + return } - // always `unlock()` when exiting scope, calling multiple times has no side-effect - defer { - unlock() + for processors in audioProcessors.values { + processors.renderers.removeValue(forKey: rendererId) } - do { - try rtcSession.setConfiguration(configuration, active: true) - // unlock here before configuring `AVAudioSession` - // unlock() - print("[LiveKit] RTCAudioSession Configure success") - - // also configure longFormAudio - // let avSession = AVAudioSession.sharedInstance() - // try avSession.setCategory(AVAudioSession.Category(rawValue: configuration.category), - // mode: AVAudioSession.Mode(rawValue: configuration.mode), - // policy: .default, - // options: configuration.categoryOptions) - // print("[LiveKit] AVAudioSession Configure success") - - // preferSpeakerOutput - if let preferSpeakerOutput = args["preferSpeakerOutput"] as? Bool { - try rtcSession.overrideOutputAudioPort(preferSpeakerOutput ? .speaker : .none) + result(true) + } + + public func handleConfigureNativeAudio(args: [String: Any?], result: @escaping FlutterResult) { + #if os(macOS) + result(FlutterMethodNotImplemented) + #else + + let configuration = RTCAudioSessionConfiguration.webRTC() + + // Category + if let string = args["appleAudioCategory"] as? String, + let category = categoryMap[string] + { + configuration.category = category.rawValue + print("[LiveKit] Configuring category: ", configuration.category) + } + + // CategoryOptions + if let strings = args["appleAudioCategoryOptions"] as? [String] { + configuration.categoryOptions = categoryOptions(fromFlutter: strings) + print("[LiveKit] Configuring categoryOptions: ", strings) + } + + // Mode + if let string = args["appleAudioMode"] as? String, + let mode = modeMap[string] + { + configuration.mode = mode.rawValue + print("[LiveKit] Configuring mode: ", configuration.mode) + } + + // get `RTCAudioSession` and lock + let rtcSession = RTCAudioSession.sharedInstance() + rtcSession.lockForConfiguration() + + var isLocked = true + let unlock = { + guard isLocked else { + print("[LiveKit] not locked, ignoring unlock") + return + } + rtcSession.unlockForConfiguration() + isLocked = false + } + + // always `unlock()` when exiting scope, calling multiple times has no side-effect + defer { + unlock() + } + + do { + try rtcSession.setConfiguration(configuration, active: true) + // unlock here before configuring `AVAudioSession` + // unlock() + print("[LiveKit] RTCAudioSession Configure success") + + // also configure longFormAudio + // let avSession = AVAudioSession.sharedInstance() + // try avSession.setCategory(AVAudioSession.Category(rawValue: configuration.category), + // mode: AVAudioSession.Mode(rawValue: configuration.mode), + // policy: .default, + // options: configuration.categoryOptions) + // print("[LiveKit] AVAudioSession Configure success") + + // preferSpeakerOutput + if let preferSpeakerOutput = args["preferSpeakerOutput"] as? Bool { + try rtcSession.overrideOutputAudioPort(preferSpeakerOutput ? .speaker : .none) + } + result(true) + } catch { + print("[LiveKit] Configure audio error: ", error) + result(FlutterError(code: "configure", message: error.localizedDescription, details: nil)) } - result(true) - } catch let error { - print("[LiveKit] Configure audio error: ", error) - result(FlutterError(code: "configure", message: error.localizedDescription, details: nil)) - } #endif } @@ -258,7 +337,7 @@ public class LiveKitPlugin: NSObject, FlutterPlugin { if osVersion.patchVersion != 0 { versions.append(osVersion.patchVersion) } - return versions.map({ String($0) }).joined(separator: ".") + return versions.map { String($0) }.joined(separator: ".") } public func handle(_ call: FlutterMethodCall, result: @escaping FlutterResult) { @@ -278,12 +357,12 @@ public class LiveKitPlugin: NSObject, FlutterPlugin { case "osVersionString": result(LiveKitPlugin.osVersionString()) #if os(iOS) - case "broadcastRequestActivation": - BroadcastManager.shared.requestActivation() - result(true) - case "broadcastRequestStop": - BroadcastManager.shared.requestStop() - result(true) + case "broadcastRequestActivation": + BroadcastManager.shared.requestActivation() + result(true) + case "broadcastRequestStop": + BroadcastManager.shared.requestStop() + result(true) #endif default: print("[LiveKit] method not found: ", call.method) From d5b84229348995eb61dfcde2d903312ace5c4df5 Mon Sep 17 00:00:00 2001 From: Hiroshi Horie <548776+hiroshihorie@users.noreply.github.com> Date: Tue, 22 Jul 2025 20:51:59 +0900 Subject: [PATCH 4/6] Buffer --- shared_swift/AudioRenderer.swift | 76 +++++++++++++++++++++++++++++++- 1 file changed, 74 insertions(+), 2 deletions(-) diff --git a/shared_swift/AudioRenderer.swift b/shared_swift/AudioRenderer.swift index 71d8b479e..2d7b7ae41 100644 --- a/shared_swift/AudioRenderer.swift +++ b/shared_swift/AudioRenderer.swift @@ -62,7 +62,79 @@ extension AudioRenderer: FlutterStreamHandler { } extension AudioRenderer: RTCAudioRenderer { - public func render(pcmBuffer _: AVAudioPCMBuffer) { - eventSink?("audio_renderer_event") + public func render(pcmBuffer: AVAudioPCMBuffer) { + guard let eventSink = eventSink else { return } + + // Extract audio format information + let sampleRate = pcmBuffer.format.sampleRate + let channelCount = pcmBuffer.format.channelCount + let frameLength = pcmBuffer.frameLength + + // The format of the data: + // { + // "sampleRate": 48000.0, + // "channelCount": 2, + // "frameLength": 480, + // "format": "float32", // or "int16", "int32", "unknown" + // "data": [ + // [/* channel 0 audio samples */], + // [/* channel 1 audio samples */] + // ] + // } + + // Create the result dictionary to send to Flutter + var result: [String: Any] = [ + "sampleRate": sampleRate, + "channelCount": channelCount, + "frameLength": frameLength, + ] + + // Extract audio data based on the buffer format + if let floatChannelData = pcmBuffer.floatChannelData { + // Buffer contains float data + var channelsData: [[Float]] = [] + + for channel in 0 ..< Int(channelCount) { + let channelPointer = floatChannelData[channel] + let channelArray = Array(UnsafeBufferPointer(start: channelPointer, count: Int(frameLength))) + channelsData.append(channelArray) + } + + result["data"] = channelsData + result["format"] = "float32" + } else if let int16ChannelData = pcmBuffer.int16ChannelData { + // Buffer contains int16 data + var channelsData: [[Int16]] = [] + + for channel in 0 ..< Int(channelCount) { + let channelPointer = int16ChannelData[channel] + let channelArray = Array(UnsafeBufferPointer(start: channelPointer, count: Int(frameLength))) + channelsData.append(channelArray) + } + + result["data"] = channelsData + result["format"] = "int16" + } else if let int32ChannelData = pcmBuffer.int32ChannelData { + // Buffer contains int32 data + var channelsData: [[Int32]] = [] + + for channel in 0 ..< Int(channelCount) { + let channelPointer = int32ChannelData[channel] + let channelArray = Array(UnsafeBufferPointer(start: channelPointer, count: Int(frameLength))) + channelsData.append(channelArray) + } + + result["data"] = channelsData + result["format"] = "int32" + } else { + // Fallback - send minimal info if no recognizable data format + result["data"] = [] + result["format"] = "unknown" + } + + // Send the result to Flutter on the main thread + DispatchQueue.main.async { + eventSink(result) + } } } From 2440c3712f60c741b56c8afbf5b1e7a8b423322e Mon Sep 17 00:00:00 2001 From: Hiroshi Horie <548776+hiroshihorie@users.noreply.github.com> Date: Tue, 22 Jul 2025 21:03:39 +0900 Subject: [PATCH 5/6] start stop --- shared_swift/LiveKitPlugin.swift | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/shared_swift/LiveKitPlugin.swift b/shared_swift/LiveKitPlugin.swift index 9e6cac6fd..53fd115bb 100644 --- a/shared_swift/LiveKitPlugin.swift +++ b/shared_swift/LiveKitPlugin.swift @@ -354,6 +354,10 @@ public class LiveKitPlugin: NSObject, FlutterPlugin { handleStartAudioVisualizer(args: args, result: result) case "stopVisualizer": handleStopAudioVisualizer(args: args, result: result) + case "startAudioRenderer": + handleStartAudioRenderer(args: args, result: result) + case "stopAudioRenderer": + handleStopAudioRenderer(args: args, result: result) case "osVersionString": result(LiveKitPlugin.osVersionString()) #if os(iOS) From ae642445cbfbf263597fbc421a0043b2e7c8aaa4 Mon Sep 17 00:00:00 2001 From: Hiroshi Horie <548776+hiroshihorie@users.noreply.github.com> Date: Tue, 12 Aug 2025 17:38:57 +0900 Subject: [PATCH 6/6] pre-connect impl --- lib/livekit_client.dart | 2 + lib/src/core/room.dart | 4 + lib/src/core/room_preconnect.dart | 26 +++ lib/src/options.dart | 9 +- lib/src/participant/local.dart | 8 + .../preconnect/pre_connect_audio_buffer.dart | 205 ++++++++++++++++++ lib/src/support/native.dart | 36 +++ pubspec.lock | 8 +- shared_swift/AudioRenderer.swift | 19 +- shared_swift/LiveKitPlugin.swift | 11 +- 10 files changed, 313 insertions(+), 15 deletions(-) create mode 100644 lib/src/core/room_preconnect.dart create mode 100644 lib/src/preconnect/pre_connect_audio_buffer.dart diff --git a/lib/livekit_client.dart b/lib/livekit_client.dart index f3415b4de..bf0f1915a 100644 --- a/lib/livekit_client.dart +++ b/lib/livekit_client.dart @@ -17,6 +17,7 @@ library livekit_client; export 'src/constants.dart'; export 'src/core/room.dart'; +export 'src/core/room_preconnect.dart'; export 'src/data_stream/stream_reader.dart'; export 'src/data_stream/stream_writer.dart'; export 'src/e2ee/e2ee_manager.dart'; @@ -34,6 +35,7 @@ export 'src/options.dart'; export 'src/participant/local.dart'; export 'src/participant/participant.dart'; export 'src/participant/remote.dart'; +export 'src/preconnect/pre_connect_audio_buffer.dart'; export 'src/publication/local.dart'; export 'src/publication/remote.dart'; export 'src/publication/track_publication.dart'; diff --git a/lib/src/core/room.dart b/lib/src/core/room.dart index 58027f909..f6e36927f 100644 --- a/lib/src/core/room.dart +++ b/lib/src/core/room.dart @@ -34,6 +34,7 @@ import '../options.dart'; import '../participant/local.dart'; import '../participant/participant.dart'; import '../participant/remote.dart'; +import '../preconnect/pre_connect_audio_buffer.dart'; import '../proto/livekit_models.pb.dart' as lk_models; import '../proto/livekit_rtc.pb.dart' as lk_rtc; import '../support/disposable.dart'; @@ -135,6 +136,9 @@ class Room extends DisposableChangeNotifier with EventsEmittable { final Map _textStreamHandlers = {}; + @internal + late final preConnectAudioBuffer = PreConnectAudioBuffer(this); + // for testing @internal Map get rpcHandlers => _rpcHandlers; diff --git a/lib/src/core/room_preconnect.dart b/lib/src/core/room_preconnect.dart new file mode 100644 index 000000000..681df9db2 --- /dev/null +++ b/lib/src/core/room_preconnect.dart @@ -0,0 +1,26 @@ +import 'dart:async'; + +import '../logger.dart'; +import '../preconnect/pre_connect_audio_buffer.dart'; +import 'room.dart'; + +extension RoomPreConnect on Room { + /// Wrap an async operation while a pre-connect audio buffer records. + /// Stops and flushes on error. + Future withPreConnectAudio( + Future Function() operation, { + Duration timeout = const Duration(seconds: 10), + PreConnectOnError? onError, + }) async { + await preConnectAudioBuffer.startRecording(timeout: timeout); + try { + final result = await operation(); + return result; + } catch (error) { + logger.warning('[Preconnect] operation failed with error: $error'); + rethrow; + } finally { + await preConnectAudioBuffer.reset(); + } + } +} diff --git a/lib/src/options.dart b/lib/src/options.dart index 1f64401d1..d3873db47 100644 --- a/lib/src/options.dart +++ b/lib/src/options.dart @@ -326,12 +326,17 @@ class AudioPublishOptions extends PublishOptions { /// max audio bitrate final int audioBitrate; + /// Mark this audio as originating from a pre-connect buffer. + /// Used to populate protobuf audioFeatures (TF_PRECONNECT_BUFFER). + final bool preConnect; + const AudioPublishOptions({ super.name, super.stream, this.dtx = true, this.red = true, this.audioBitrate = AudioPreset.music, + this.preConnect = false, }); AudioPublishOptions copyWith({ @@ -340,6 +345,7 @@ class AudioPublishOptions extends PublishOptions { String? name, String? stream, bool? red, + bool? preConnect, }) => AudioPublishOptions( dtx: dtx ?? this.dtx, @@ -347,11 +353,12 @@ class AudioPublishOptions extends PublishOptions { name: name ?? this.name, stream: stream ?? this.stream, red: red ?? this.red, + preConnect: preConnect ?? this.preConnect, ); @override String toString() => - '${runtimeType}(dtx: ${dtx}, audioBitrate: ${audioBitrate}, red: ${red})'; + '${runtimeType}(dtx: ${dtx}, audioBitrate: ${audioBitrate}, red: ${red}, preConnect: ${preConnect})'; } final backupCodecs = ['vp8', 'h264']; diff --git a/lib/src/participant/local.dart b/lib/src/participant/local.dart index 823513819..25dd1f3b5 100644 --- a/lib/src/participant/local.dart +++ b/lib/src/participant/local.dart @@ -127,6 +127,14 @@ class LocalParticipant extends Participant { encryption: room.roomOptions.lkEncryptionType, ); + // Populate audio features (e.g., TF_NO_DTX, TF_PRECONNECT_BUFFER) + req.audioFeatures.addAll([ + if (!publishOptions.dtx) + lk_models.AudioTrackFeature.TF_NO_DTX, + if (publishOptions.preConnect) + lk_models.AudioTrackFeature.TF_PRECONNECT_BUFFER, + ]); + Future negotiate() async { track.transceiver = await room.engine .createTransceiverRTCRtpSender(track, publishOptions!, encodings); diff --git a/lib/src/preconnect/pre_connect_audio_buffer.dart b/lib/src/preconnect/pre_connect_audio_buffer.dart new file mode 100644 index 000000000..3b3ba91b5 --- /dev/null +++ b/lib/src/preconnect/pre_connect_audio_buffer.dart @@ -0,0 +1,205 @@ +// Copyright 2025 LiveKit, Inc. +// Lightweight pre-connect audio buffer (scaffold). Captures bytes externally +// and uploads via byte stream once an agent is ready. + +import 'dart:async'; +import 'dart:developer'; +import 'dart:typed_data'; + +import 'package:flutter/services.dart'; +import 'package:livekit_client/livekit_client.dart'; +import 'package:uuid/uuid.dart'; + +import '../support/native.dart'; + +typedef PreConnectOnError = void Function(Object error); + +class AudioFrame { + final List data; + final int sampleRate; + final int channelCount; + final int frameLength; + final String format; + + AudioFrame({ + required this.data, + required this.sampleRate, + required this.channelCount, + required this.frameLength, + required this.format, + }); + + factory AudioFrame.fromMap(Map map) => AudioFrame( + data: (map['data'] as List) + .map((channel) => (channel as List).map((e) => e as int).toList() as Int16List) + .toList(), + sampleRate: (map['sampleRate'] as int), + channelCount: (map['channelCount'] as int), + frameLength: (map['frameLength'] as int), + format: map['format'] as String, + ); +} + +class PreConnectAudioBuffer { + static const String dataTopic = 'lk.agent.pre-connect-audio-buffer'; + + static const int defaultMaxSize = 10 * 1024 * 1024; // 10MB + static const int defaultSampleRate = 24000; // Hz + + // Reference to the room + final Room _room; + + // Internal states + bool _isRecording = false; + bool _isSent = false; + String? _rendererId; + + LocalAudioTrack? _localTrack; + EventChannel? _eventChannel; + StreamSubscription? _streamSubscription; + + final PreConnectOnError? _onError; + final int _sampleRate; + + final BytesBuilder _bytes = BytesBuilder(copy: false); + Timer? _timeoutTimer; + CancelListenFunc? _participantStateListener; + CancelListenFunc? _remoteSubscribedListener; + + PreConnectAudioBuffer( + this._room, { + PreConnectOnError? onError, + int sampleRate = defaultSampleRate, + }) : _onError = onError, + _sampleRate = sampleRate; + + // Getters + bool get isRecording => _isRecording; + int get bufferedSize => _bytes.length; + + Future startRecording({ + Duration timeout = const Duration(seconds: 10), + }) async { + if (_isRecording) { + logger.warning('Already recording'); + return; + } + _isRecording = true; + + _localTrack = await LocalAudioTrack.create(); + print('localTrack: ${_localTrack!.mediaStreamTrack.id}'); + + final rendererId = Uuid().v4(); + logger.info('Starting audio renderer with rendererId: $rendererId'); + + final result = await Native.startAudioRenderer( + trackId: _localTrack!.mediaStreamTrack.id!, + rendererId: rendererId, + ); + + _rendererId = rendererId; + + logger.info('startAudioRenderer result: $result'); + + _eventChannel = EventChannel('io.livekit.audio.renderer/channel-$rendererId'); + _streamSubscription = _eventChannel?.receiveBroadcastStream().listen((event) { + try { + // logger.info('event: $event'); + // {sampleRate: 32000, format: int16, frameLength: 320, channelCount: 1} + final dataChannels = event['data'] as List; + final monoData = dataChannels[0].cast(); + _bytes.add(monoData); + log('bufferedSize: ${_bytes.length}'); + } catch (e) { + logger.warning('Error parsing event: $e'); + } + }); + + // Listen for agent readiness; when active, attempt to send buffer once. + _participantStateListener = _room.events.on((event) async { + if (event.participant.kind == ParticipantKind.AGENT && event.state == ParticipantState.active) { + logger.info('Agent is active: ${event.participant.identity}'); + try { + await sendAudioData(agents: [event.participant.identity]); + } catch (e) { + _onError?.call(e); + } finally { + await reset(); + } + } + }); + + _remoteSubscribedListener = _room.events.on((event) async { + logger.info('Remote track subscribed: ${event.trackSid}'); + await stopRecording(); + }); + } + + Future stopRecording() async { + if (!_isRecording) { + logger.warning('Not recording'); + return; + } + _isRecording = false; + + // Cancel the stream subscription. + await _streamSubscription?.cancel(); + _streamSubscription = null; + + // Dispose the event channel. + _eventChannel = null; + + final rendererId = _rendererId; + if (rendererId == null) { + logger.warning('No rendererId'); + return; + } + + await Native.stopAudioRenderer( + rendererId: rendererId, + ); + + _rendererId = null; + } + + // Clean-up & reset for re-use + Future reset() async { + await stopRecording(); + _timeoutTimer?.cancel(); + _participantStateListener?.call(); + _participantStateListener = null; + _remoteSubscribedListener?.call(); + _remoteSubscribedListener = null; + _bytes.clear(); + _localTrack = null; + } + + Future sendAudioData({ + required List agents, + String topic = dataTopic, + }) async { + if (_isSent) return; + if (agents.isEmpty) return; + + final data = _bytes.takeBytes(); + if (data.length <= 1024) { + throw StateError('Audio data too small to send (${data.length} bytes)'); + } + _isSent = true; + + final streamOptions = StreamBytesOptions( + topic: topic, + attributes: { + 'sampleRate': '$_sampleRate', + 'channels': '1', + 'trackId': _localTrack!.mediaStreamTrack.id!, + }, + destinationIdentities: agents, + ); + + final writer = await _room.localParticipant!.streamBytes(streamOptions); + await writer.write(data); + await writer.close(); + logger.info('[preconnect] sent ${(data.length / 1024).toStringAsFixed(1)}KB of audio to ${agents.length} agent(s)'); + } +} diff --git a/lib/src/support/native.dart b/lib/src/support/native.dart index c40400885..f7384e3cd 100644 --- a/lib/src/support/native.dart +++ b/lib/src/support/native.dart @@ -91,6 +91,42 @@ class Native { } } + @internal + static Future startAudioRenderer({ + required String trackId, + required String rendererId, + }) async { + try { + final result = await channel.invokeMethod( + 'startAudioRenderer', + { + 'trackId': trackId, + 'rendererId': rendererId, + }, + ); + return result == true; + } catch (error) { + logger.warning('startAudioRenderer did throw $error'); + return false; + } + } + + @internal + static Future stopAudioRenderer({ + required String rendererId, + }) async { + try { + await channel.invokeMethod( + 'stopAudioRenderer', + { + 'rendererId': rendererId, + }, + ); + } catch (error) { + logger.warning('stopAudioRenderer did throw $error'); + } + } + /// Returns OS's version as a string /// Currently only for iOS, macOS @internal diff --git a/pubspec.lock b/pubspec.lock index 858715d7c..4652acd5c 100644 --- a/pubspec.lock +++ b/pubspec.lock @@ -141,10 +141,10 @@ packages: dependency: "direct main" description: name: dart_webrtc - sha256: "5b76fd85ac95d6f5dee3e7d7de8d4b51bfbec1dc73804647c6aebb52d6297116" + sha256: a2ae542cdadc21359022adedc26138fa3487cc3b3547c24ff4f556681869e28c url: "https://pub.dev" source: hosted - version: "1.5.3+hotfix.2" + version: "1.5.3+hotfix.4" dbus: dependency: transitive description: @@ -220,10 +220,10 @@ packages: dependency: "direct main" description: name: flutter_webrtc - sha256: dd47ca103b5b6217771e6277882674276d9621bbf9eb23da3c03898b507844e3 + sha256: "69095ba39b83da3de48286dfc0769aa8e9f10491f70058dc8d8ecc960ef7a260" url: "https://pub.dev" source: hosted - version: "0.14.1" + version: "1.0.0" glob: dependency: transitive description: diff --git a/shared_swift/AudioRenderer.swift b/shared_swift/AudioRenderer.swift index 2d7b7ae41..d550542eb 100644 --- a/shared_swift/AudioRenderer.swift +++ b/shared_swift/AudioRenderer.swift @@ -30,22 +30,27 @@ public class AudioRenderer: NSObject { private var channel: FlutterEventChannel? private weak var _track: AudioTrack? - - public init(track: AudioTrack?, + public let rendererId: String + public init(track: AudioTrack, binaryMessenger: FlutterBinaryMessenger, rendererId: String) { _track = track + self.rendererId = rendererId super.init() _track?.add(audioRenderer: self) - let channelName = "io.livekit.audio.renderer/eventchannel-" + rendererId + let channelName = "io.livekit.audio.renderer/channel-" + rendererId channel = FlutterEventChannel(name: channelName, binaryMessenger: binaryMessenger) channel?.setStreamHandler(self) } + func detach() { + _track?.remove(audioRenderer: self) + } + deinit { - _track?.remove(audioRenderer: self) + detach() } } @@ -84,9 +89,9 @@ extension AudioRenderer: RTCAudioRenderer { // Create the result dictionary to send to Flutter var result: [String: Any] = [ - "sampleRate": sampleRate, - "channelCount": channelCount, - "frameLength": frameLength, + "sampleRate": UInt(sampleRate), + "channelCount": UInt(channelCount), + "frameLength": UInt(frameLength), ] // Extract audio data based on the buffer format diff --git a/shared_swift/LiveKitPlugin.swift b/shared_swift/LiveKitPlugin.swift index 53fd115bb..d2719a7fe 100644 --- a/shared_swift/LiveKitPlugin.swift +++ b/shared_swift/LiveKitPlugin.swift @@ -24,7 +24,7 @@ import WebRTC import UIKit #endif -let trackIdKey = "visualizerId" +let trackIdKey = "trackId" let visualizerIdKey = "visualizerId" let rendererIdKey = "rendererId" @@ -221,7 +221,7 @@ public class LiveKitPlugin: NSObject, FlutterPlugin { } // Already exists - if processors.visualizers[rendererId] != nil { + if processors.renderers[rendererId] != nil { result(true) return } @@ -232,6 +232,8 @@ public class LiveKitPlugin: NSObject, FlutterPlugin { // Retain processors.renderers[rendererId] = renderer + AudioManager.sharedInstance().startLocalRecording() + result(true) } @@ -244,7 +246,10 @@ public class LiveKitPlugin: NSObject, FlutterPlugin { } for processors in audioProcessors.values { - processors.renderers.removeValue(forKey: rendererId) + if let renderer = processors.renderers[rendererId] { + renderer.detach() + processors.renderers.removeValue(forKey: rendererId) + } } result(true)