[bug] Share screen not starting on iOS
Describe the bug When trying to start screen sharing the system dialog is shown, but the screen recording never starts
https://github.com/user-attachments/assets/0476f493-05bd-4b9d-9a78-666994aa50d8
To Reproduce
void _shareScreen() async {
if (defaultTargetPlatform == TargetPlatform.android) {
bool hasPermissions = await FlutterBackground.hasPermissions;
androidConfig = const FlutterBackgroundAndroidConfig(
notificationTitle: "Experty",
notificationText: "Experty sta condividendo lo schermo",
notificationImportance: AndroidNotificationImportance.Default,
notificationIcon:
AndroidResource(name: 'background_icon', defType: 'drawable'),
);
await FlutterBackground.initialize(androidConfig: androidConfig!);
await FlutterBackground.enableBackgroundExecution();
await Helper.requestCapturePermission();
} else {
ReplayKitChannel.startReplayKit();
await mediaDevices.getDisplayMedia({'deviceId': 'broadcast'});
}
await room.localParticipant!.setCameraEnabled(false);
await room.localParticipant!.setScreenShareEnabled(true,
captureScreenAudio: false,
screenShareCaptureOptions:
const ScreenShareCaptureOptions(useiOSBroadcastExtension: true));
setState(() {
isLocalScreenShared = !isLocalScreenShared;
});}
SampleHandler.swift
import ReplayKit
import OSLog
let broadcastLogger = OSLog(subsystem: "io.livekit.example.flutter", category: "Broadcast")
private enum Constants {
// the App Group ID value that the app and the broadcast extension targets are setup with. It differs for each app.
static let appGroupIdentifier = "group.expertyscreensharing"}
class SampleHandler: RPBroadcastSampleHandler {
private var clientConnection: SocketConnection?
private var uploader: SampleUploader?
private var frameCount: Int = 0
var socketFilePath: String {
let sharedContainer = FileManager.default.containerURL(forSecurityApplicationGroupIdentifier: Constants.appGroupIdentifier)
return sharedContainer?.appendingPathComponent("rtc_SSFD").path ?? ""
}
override init() {
super.init()
if let connection = SocketConnection(filePath: socketFilePath) {
clientConnection = connection
setupConnection()
uploader = SampleUploader(connection: connection)
}
os_log(.debug, log: broadcastLogger, "%{public}s", socketFilePath)
}
override func broadcastStarted(withSetupInfo setupInfo: [String: NSObject]?) {
// User has requested to start the broadcast. Setup info from the UI extension can be supplied but optional.
frameCount = 0
DarwinNotificationCenter.shared.postNotification(.broadcastStarted)
openConnection()
startReplayKit()
}
override func broadcastPaused() {
// User has requested to pause the broadcast. Samples will stop being delivered.
}
override func broadcastResumed() {
// User has requested to resume the broadcast. Samples delivery will resume.
}
override func broadcastFinished() {
// User has requested to finish the broadcast.
DarwinNotificationCenter.shared.postNotification(.broadcastStopped)
clientConnection?.close()
closeReplayKit()
}
override func processSampleBuffer(_ sampleBuffer: CMSampleBuffer, with sampleBufferType: RPSampleBufferType) {
switch sampleBufferType {
case RPSampleBufferType.video:
uploader?.send(sample: sampleBuffer)
default:
break
}
}}
private extension SampleHandler {
func setupConnection() {
clientConnection?.didClose = { [weak self] error in
if #available(iOSApplicationExtension 14.0, *) {
os_log(.debug, log: broadcastLogger, "client connection did close \(String(describing: error))")
} else {
// Fallback on earlier versions
}
if let error = error {
self?.finishBroadcastWithError(error)
} else {
// the displayed failure message is more user friendly when using NSError instead of Error
let JMScreenSharingStopped = 10001
let customError = NSError(domain: RPRecordingErrorDomain, code: JMScreenSharingStopped, userInfo: [NSLocalizedDescriptionKey: "Screen sharing stopped"])
self?.finishBroadcastWithError(customError)
}
}
}
func openConnection() {
let queue = DispatchQueue(label: "broadcast.connectTimer")
let timer = DispatchSource.makeTimerSource(queue: queue)
timer.schedule(deadline: .now(), repeating: .milliseconds(100), leeway: .milliseconds(500))
timer.setEventHandler { [weak self] in
guard self?.clientConnection?.open() == true else {
return
}
timer.cancel()
}
timer.resume()
}
func startReplayKit() {
let group=UserDefaults(suiteName: Constants.appGroupIdentifier)
group!.set(false, forKey: "closeReplayKitFromNative")
group!.set(false, forKey: "closeReplayKitFromFlutter")
group!.set(true, forKey: "hasSampleBroadcast")
}
func closeReplayKit() {
let group = UserDefaults(suiteName: Constants.appGroupIdentifier)
group!.set(true, forKey:"closeReplayKitFromNative")
group!.set(false, forKey: "hasSampleBroadcast")
}
}
SampleUploader.swift
import Foundation
import ReplayKit
import OSLog
private enum Constants {
static let bufferMaxLength = 10240
}
class SampleUploader {
private static var imageContext = CIContext(options: nil)
@Atomic private var isReady = false
private var connection: SocketConnection
private var dataToSend: Data?
private var byteIndex = 0
private let serialQueue: DispatchQueue
init(connection: SocketConnection) {
self.connection = connection
self.serialQueue = DispatchQueue(label: "org.jitsi.meet.broadcast.sampleUploader")
setupConnection()
}
@discardableResult func send(sample buffer: CMSampleBuffer) -> Bool {
guard isReady else {
return false
}
isReady = false
dataToSend = prepare(sample: buffer)
byteIndex = 0
serialQueue.async { [weak self] in
self?.sendDataChunk()
}
return true
}
}
private extension SampleUploader {
func setupConnection() {
connection.didOpen = { [weak self] in
self?.isReady = true
}
connection.streamHasSpaceAvailable = { [weak self] in
self?.serialQueue.async {
if let success = self?.sendDataChunk() {
self?.isReady = !success
}
}
}
}
@discardableResult func sendDataChunk() -> Bool {
guard let dataToSend = dataToSend else {
return false
}
var bytesLeft = dataToSend.count - byteIndex
var length = bytesLeft > Constants.bufferMaxLength ? Constants.bufferMaxLength : bytesLeft
length = dataToSend[byteIndex..<(byteIndex + length)].withUnsafeBytes {
guard let ptr = $0.bindMemory(to: UInt8.self).baseAddress else {
return 0
}
return connection.writeToStream(buffer: ptr, maxLength: length)
}
if length > 0 {
byteIndex += length
bytesLeft -= length
if bytesLeft == 0 {
self.dataToSend = nil
byteIndex = 0
}
} else {
os_log(.debug, log: broadcastLogger, "writeBufferToStream failure")
}
return true
}
func prepare(sample buffer: CMSampleBuffer) -> Data? {
guard let imageBuffer = CMSampleBufferGetImageBuffer(buffer) else {
os_log(.debug, log: broadcastLogger, "image buffer not available")
return nil
}
CVPixelBufferLockBaseAddress(imageBuffer, .readOnly)
let scaleFactor = 1.0
let width = CVPixelBufferGetWidth(imageBuffer)/Int(scaleFactor)
let height = CVPixelBufferGetHeight(imageBuffer)/Int(scaleFactor)
let orientation = CMGetAttachment(buffer, key: RPVideoSampleOrientationKey as CFString, attachmentModeOut: nil)?.uintValue ?? 0
let scaleTransform = CGAffineTransform(scaleX: CGFloat(1.0/scaleFactor), y: CGFloat(1.0/scaleFactor))
let bufferData = self.jpegData(from: imageBuffer, scale: scaleTransform)
CVPixelBufferUnlockBaseAddress(imageBuffer, .readOnly)
guard let messageData = bufferData else {
os_log(.debug, log: broadcastLogger, "corrupted image buffer")
return nil
}
let httpResponse = CFHTTPMessageCreateResponse(nil, 200, nil, kCFHTTPVersion1_1).takeRetainedValue()
CFHTTPMessageSetHeaderFieldValue(httpResponse, "Content-Length" as CFString, String(messageData.count) as CFString)
CFHTTPMessageSetHeaderFieldValue(httpResponse, "Buffer-Width" as CFString, String(width) as CFString)
CFHTTPMessageSetHeaderFieldValue(httpResponse, "Buffer-Height" as CFString, String(height) as CFString)
CFHTTPMessageSetHeaderFieldValue(httpResponse, "Buffer-Orientation" as CFString, String(orientation) as CFString)
CFHTTPMessageSetBody(httpResponse, messageData as CFData)
let serializedMessage = CFHTTPMessageCopySerializedMessage(httpResponse)?.takeRetainedValue() as Data?
return serializedMessage
}
func jpegData(from buffer: CVPixelBuffer, scale scaleTransform: CGAffineTransform) -> Data? {
let image = CIImage(cvPixelBuffer: buffer).transformed(by: scaleTransform)
guard let colorSpace = image.colorSpace else {
return nil
}
let options: [CIImageRepresentationOption: Float] = [kCGImageDestinationLossyCompressionQuality as CIImageRepresentationOption: 1.0]
return SampleUploader.imageContext.jpegRepresentation(of: image, colorSpace: colorSpace, options: options)
}
}
SocketConnection.swift
import Foundation
import OSLog
class SocketConnection: NSObject {
var didOpen: (() -> Void)?
var didClose: ((Error?) -> Void)?
var streamHasSpaceAvailable: (() -> Void)?
private let filePath: String
private var socketHandle: Int32 = -1
private var address: sockaddr_un?
private var inputStream: InputStream?
private var outputStream: OutputStream?
private var networkQueue: DispatchQueue?
private var shouldKeepRunning = false
init?(filePath path: String) {
filePath = path
socketHandle = Darwin.socket(AF_UNIX, SOCK_STREAM, 0)
guard socketHandle != -1 else {
os_log(.debug, log: broadcastLogger, "failure: create socket")
return nil
}
}
func open() -> Bool {
os_log(.debug, log: broadcastLogger, "open socket connection")
guard FileManager.default.fileExists(atPath: filePath) else {
os_log(.debug, log: broadcastLogger, "failure: socket file missing")
return false
}
guard setupAddress() == true else {
return false
}
guard connectSocket() == true else {
return false
}
setupStreams()
inputStream?.open()
outputStream?.open()
return true
}
func close() {
unscheduleStreams()
inputStream?.delegate = nil
outputStream?.delegate = nil
inputStream?.close()
outputStream?.close()
inputStream = nil
outputStream = nil
}
func writeToStream(buffer: UnsafePointer<UInt8>, maxLength length: Int) -> Int {
outputStream?.write(buffer, maxLength: length) ?? 0
}
}
extension SocketConnection: StreamDelegate {
func stream(_ aStream: Stream, handle eventCode: Stream.Event) {
switch eventCode {
case .openCompleted:
os_log(.debug, log: broadcastLogger, "client stream open completed")
if aStream == outputStream {
didOpen?()
}
case .hasBytesAvailable:
if aStream == inputStream {
var buffer: UInt8 = 0
let numberOfBytesRead = inputStream?.read(&buffer, maxLength: 1)
if numberOfBytesRead == 0 && aStream.streamStatus == .atEnd {
os_log(.debug, log: broadcastLogger, "server socket closed")
close()
notifyDidClose(error: nil)
}
}
case .hasSpaceAvailable:
if aStream == outputStream {
streamHasSpaceAvailable?()
}
case .errorOccurred:
if #available(iOSApplicationExtension 14.0, *) {
os_log(.debug, log: broadcastLogger, "client stream error occured: \(String(describing: aStream.streamError))")
} else {
// Fallback on earlier versions
}
close()
notifyDidClose(error: aStream.streamError)
default:
break
}
}
}
private extension SocketConnection {
func setupAddress() -> Bool {
var addr = sockaddr_un()
guard filePath.count < MemoryLayout.size(ofValue: addr.sun_path) else {
os_log(.debug, log: broadcastLogger, "failure: fd path is too long")
return false
}
_ = withUnsafeMutablePointer(to: &addr.sun_path.0) { ptr in
filePath.withCString {
strncpy(ptr, $0, filePath.count)
}
}
address = addr
return true
}
func connectSocket() -> Bool {
guard var addr = address else {
return false
}
let status = withUnsafePointer(to: &addr) { ptr in
ptr.withMemoryRebound(to: sockaddr.self, capacity: 1) {
Darwin.connect(socketHandle, $0, socklen_t(MemoryLayout<sockaddr_un>.size))
}
}
guard status == noErr else {
if #available(iOSApplicationExtension 14.0, *) {
os_log(.debug, log: broadcastLogger, "failure: \(status)")
} else {
// Fallback on earlier versions
}
return false
}
return true
}
func setupStreams() {
var readStream: Unmanaged<CFReadStream>?
var writeStream: Unmanaged<CFWriteStream>?
CFStreamCreatePairWithSocket(kCFAllocatorDefault, socketHandle, &readStream, &writeStream)
inputStream = readStream?.takeRetainedValue()
inputStream?.delegate = self
inputStream?.setProperty(kCFBooleanTrue, forKey: Stream.PropertyKey(kCFStreamPropertyShouldCloseNativeSocket as String))
outputStream = writeStream?.takeRetainedValue()
outputStream?.delegate = self
outputStream?.setProperty(kCFBooleanTrue, forKey: Stream.PropertyKey(kCFStreamPropertyShouldCloseNativeSocket as String))
scheduleStreams()
}
func scheduleStreams() {
shouldKeepRunning = true
networkQueue = DispatchQueue.global(qos: .userInitiated)
networkQueue?.async { [weak self] in
self?.inputStream?.schedule(in: .current, forMode: .common)
self?.outputStream?.schedule(in: .current, forMode: .common)
RunLoop.current.run()
var isRunning = false
repeat {
isRunning = self?.shouldKeepRunning ?? false && RunLoop.current.run(mode: .default, before: .distantFuture)
} while (isRunning)
}
}
func unscheduleStreams() {
networkQueue?.sync { [weak self] in
self?.inputStream?.remove(from: .current, forMode: .common)
self?.outputStream?.remove(from: .current, forMode: .common)
}
shouldKeepRunning = false
}
func notifyDidClose(error: Error?) {
if didClose != nil {
didClose?(error)
}
}
}
DarwinNotificationCenter.swift
import Foundation
enum DarwinNotification: String {
case broadcastStarted = "iOS_BroadcastStarted"
case broadcastStopped = "iOS_BroadcastStopped"
}
class DarwinNotificationCenter {
static let shared = DarwinNotificationCenter()
private let notificationCenter: CFNotificationCenter
init() {
notificationCenter = CFNotificationCenterGetDarwinNotifyCenter()
}
func postNotification(_ name: DarwinNotification) {
CFNotificationCenterPostNotification(notificationCenter, CFNotificationName(rawValue: name.rawValue as CFString), nil, nil, true)
}
}
Atomic.swift
import Foundation
@propertyWrapper
struct Atomic<Value> {
private var value: Value
private let lock = NSLock()
init(wrappedValue value: Value) {
self.value = value
}
var wrappedValue: Value {
get { load() }
set { store(newValue: newValue) }
}
func load() -> Value {
lock.lock()
defer { lock.unlock() }
return value
}
mutating func store(newValue: Value) {
lock.lock()
defer { lock.unlock() }
value = newValue
}
}
Expected behavior
Platform information
- Flutter version: [✓] Flutter (Channel stable, 3.24.2, on macOS 14.6.1 23G93 darwin-arm64, locale it-IT) [✓] Android toolchain - develop for Android devices (Android SDK version 34.0.0) [✓] Xcode - develop for iOS and macOS (Xcode 15.4) [✓] Chrome - develop for the web [✓] Android Studio (version 2023.1) [✓] IntelliJ IDEA Ultimate Edition (version 2023.3.6) [✓] VS Code (version 1.92.2) [✓] Connected device (5 available) ! Error: Browsing on the local area network for iPhone di Federico (XR). Ensure the device is unlocked and attached with a cable or associated with the same local area network as this Mac. The device must be opted into Developer Mode to connect wirelessly. (code -27) [✓] Network resources
• No issues found!
- Plugin version: 2.2.4
- Flutter target OS: (physical) iPhone 12
- Flutter target OS version: iOS 17.5.1
- Flutter console log: No errors in logs
Have you tried changing RTCAppGroupIdentifier to your group id?
https://github.com/livekit/client-sdk-flutter/blob/main/example/ios/Runner/Info.plist#L7-L8
The main app and Broadcast-Extension actually communicate through the app group identifier
Yes, I already modified the Info.plist and added the group identifier to both the app and the extension
These two lines of code need to be removed. when you call getDisplayMedia but do not use this screenVideoTrack, participant.setScreenShareEnabled will fail to enable screen sharing.
else {
ReplayKitChannel.startReplayKit();
await mediaDevices.getDisplayMedia({'deviceId': 'broadcast'});
}
This is the effective code to enable screen sharing
await room.localParticipant!.setScreenShareEnabled(true,
captureScreenAudio: false,
screenShareCaptureOptions:
const ScreenShareCaptureOptions(useiOSBroadcastExtension: true));
void _shareScreen() async {
await room.localParticipant!.setCameraEnabled(false);
if (defaultTargetPlatform == TargetPlatform.android) {
bool hasPermissions = await FlutterBackground.hasPermissions;
androidConfig = const FlutterBackgroundAndroidConfig(
notificationTitle: "Experty",
notificationText: "Experty sta condividendo lo schermo",
notificationImportance: AndroidNotificationImportance.Default,
notificationIcon:
AndroidResource(name: 'background_icon', defType: 'drawable'),
);
await FlutterBackground.initialize(androidConfig: androidConfig!);
await FlutterBackground.enableBackgroundExecution();
await Helper.requestCapturePermission();
// for android
await room.localParticipant!.setScreenShareEnabled(true);
} else if (defaultTargetPlatform == TargetPlatform.ios) {
// for iOS
await room.localParticipant!.setScreenShareEnabled(true,
captureScreenAudio: false,
screenShareCaptureOptions:
const ScreenShareCaptureOptions(useiOSBroadcastExtension: true));
}
setState(() {
isLocalScreenShared = !isLocalScreenShared;
});}
Hi @cloudwebrtc ,
Thank you for your previous response. I have implemented all the suggested changes, and while the app shows that screen sharing has started, nothing is actually being shared. Below are the details of the code and logs:
Changes:
if (lkPlatformIs(PlatformType.iOS)) {
// for iOS
await participant?.setScreenShareEnabled(true,
captureScreenAudio: false,
screenShareCaptureOptions:
const ScreenShareCaptureOptions(useiOSBroadcastExtension: true));
return;
}
if (t.isScreenShare) {
// Uncommented based on previous advice but still no sharing
// if (lkPlatformIs(PlatformType.iOS) && !_flagStartedReplayKit) {
// _flagStartedReplayKit = true;
// ReplayKitChannel.startReplayKit();
// }
screenTracks.add(ParticipantTrack(
participant: localParticipant,
type: ParticipantTrackType.kScreenShare,
));
}
Logs:
start broadcast capture
codec video/VP8
codec video/H264
codec video/rtx
codec video/H264
codec video/VP9
codec video/AV1
codec video/red
codec video/ulpfec
codec video/flexfec-03
stop broadcast capture, trackID 882E3F84-95BF-4F1E-A377-E1CE171BC089
video capturer stopped, trackID = 882E3F84-95BF-4F1E-A377-E1CE171BC089
Hello @ashifali3147 ,
I managed to make screen sharing work with the following code:
void _shareScreen() async {
if (isLocalScreenShared) {
if (defaultTargetPlatform == TargetPlatform.android) {
await FlutterBackground.initialize(androidConfig: androidConfig!);
await FlutterBackground.disableBackgroundExecution();
} else {
ReplayKitChannel.closeReplayKit();
}
await room.localParticipant!.setScreenShareEnabled(false);
await room.localParticipant!.setCameraEnabled(true);
} else {
androidConfig = const FlutterBackgroundAndroidConfig(
notificationTitle: "Experty",
notificationText: "Experty sta condividendo lo schermo",
notificationImportance: AndroidNotificationImportance.Default,
notificationIcon:
AndroidResource(name: 'background_icon', defType: 'drawable'),
);
if (defaultTargetPlatform == TargetPlatform.android) {
await FlutterBackground.hasPermissions;
await FlutterBackground.initialize(androidConfig: androidConfig!);
await Helper.requestCapturePermission();
await FlutterBackground.enableBackgroundExecution();
} else {
ReplayKitChannel.startReplayKit();
mediaDevices.getDisplayMedia({
'video': {'deviceId': 'broadcast'},
'audio': false
});
}
await room.localParticipant!.setCameraEnabled(false);
await room.localParticipant!.setScreenShareEnabled(true,
captureScreenAudio: false,
screenShareCaptureOptions:
const ScreenShareCaptureOptions(useiOSBroadcastExtension: true));
}
setState(() {
isLocalScreenShared = !isLocalScreenShared;
});
}
Hi @rimedinaif,
I tried your method but it's not working. Also getting error-
Logs:
failure: path too long
start broadcast capture
[ERROR:flutter/runtime/dart_vm_initializer.cc(41)] Unhandled Exception: MissingPluginException(No implementation found for method startReplayKit on channel io.livekit.example.flutter/replaykit-channel)
#0 MethodChannel._invokeMethod (package:flutter/src/services/platform_channel.dart:332:7)
<asynchronous suspension>
failure: path too long
start broadcast capture
codec video/VP8
codec video/H264
codec video/rtx
codec video/H264
codec video/VP9
codec video/AV1
codec video/red
codec video/ulpfec
codec video/flexfec-03
flutter: Attempting to reconnect 1/10, (0ms delay until next attempt)
flutter: Attempting to reconnect 1/10, (0ms delay until next attempt)
flutter: Attempting to reconnect 1/10, (0ms delay until next attempt)
flutter: Attempting to reconnect 1/10, (0ms delay until next attempt)
flutter: Attempting to reconnect 1/10, (0ms delay until next attempt)
flutter: Attempting to reconnect 1/10, (0ms delay until next attempt)
I also implemented the startReplayKit method. this is the relative code in my AppDelegate.swift
var replayKitChannel: FlutterMethodChannel! = nil
override func application(
_ application: UIApplication,
didFinishLaunchingWithOptions launchOptions: [UIApplication.LaunchOptionsKey: Any]?
) -> Bool {
guard let controller = window?.rootViewController as? FlutterViewController else {
return super.application(application, didFinishLaunchingWithOptions: launchOptions)
}
// Creating the method channel for ReplayKit communication
replayKitChannel = FlutterMethodChannel(name: "expertyscreensharing/replaykit-channel",
binaryMessenger: controller.binaryMessenger)
// Setting method call handler for handling Flutter calls
replayKitChannel.setMethodCallHandler { [weak self] (call: FlutterMethodCall, result: @escaping FlutterResult) in
self?.handleReplayKitFromFlutter(result: result, call: call)
}
func handleReplayKitFromFlutter(result: FlutterResult, call: FlutterMethodCall) {
switch call.method {
case "startReplayKit":
self.hasEmittedFirstSample = false
if let group = UserDefaults(suiteName: "group.expertyscreensharing") {
// Resetting flags when starting ReplayKit
group.set(false, forKey: "closeReplayKitFromNative")
group.set(false, forKey: "closeReplayKitFromFlutter")
self.observeReplayKitStateChanged()
}
result(true)
case "closeReplayKit":
if let group = UserDefaults(suiteName: "group.expertyscreensharing") {
// Setting flag to close ReplayKit from Flutter side
group.set(true, forKey: "closeReplayKitFromFlutter")
result(true)
}
default:
result(FlutterMethodNotImplemented)
}
}
// Method to observe ReplayKit state changes
func observeReplayKitStateChanged() {
if self.observeTimer != nil {
return // Timer is already active, no need to start another one
}
let group = UserDefaults(suiteName: "group.expertyscreensharing")
self.observeTimer = Timer.scheduledTimer(withTimeInterval: 1, repeats: true) { [weak self] timer in
guard let self = self, let group = group else { return }
// Checking whether to close ReplayKit from native or if a sample was broadcast
let closeReplayKitFromNative = group.bool(forKey: "closeReplayKitFromNative")
let hasSampleBroadcast = group.bool(forKey: "hasSampleBroadcast")
if closeReplayKitFromNative {
// If the broadcast should be closed natively
self.hasEmittedFirstSample = false
self.replayKitChannel.invokeMethod("closeReplayKitFromNative", arguments: true)
} else if hasSampleBroadcast, !self.hasEmittedFirstSample {
// If a sample was broadcast for the first time
self.hasEmittedFirstSample = true
self.replayKitChannel.invokeMethod("hasSampleBroadcast", arguments: true)
}
}
}
and this is the class from which I call the method:
class ReplayKitChannel {
static const String kReplayKitChannel =
'expertyscreensharing/replaykit-channel';
static const MethodChannel _replayKitChannel =
MethodChannel(kReplayKitChannel);
static void listenMethodChannel(Room room) {
_replayKitChannel.setMethodCallHandler((call) async {
if (call.method == 'closeReplayKitFromNative') {
if (!(room.localParticipant?.isScreenShareEnabled() ?? false)) {
return;
}
await room.localParticipant?.setScreenShareEnabled(false);
} else if (call.method == 'hasSampleBroadcast') {
if (room.localParticipant?.isScreenShareEnabled() ?? true) return;
await room.localParticipant?.setScreenShareEnabled(true);
}
});
}
static void startReplayKit() {
if (!Platform.isIOS) return;
_replayKitChannel.invokeMethod('startReplayKit');
}
static void closeReplayKit() {
if (!Platform.isIOS) return;
_replayKitChannel.invokeMethod('closeReplayKit');
}
}
Hi @rimedinaif
Thank you for the solution! I implemented it and ensured to set the suiteName with my app's group identifier. However, I'm still facing an issue where the ScreenShare Broadcast popup does not appear.
Do I need to configure anything else apart from updating the suiteName? For example:
Any additional Info.plist changes? Specific entitlements required for the ReplayKit extension? Any steps in configuring the iOS Broadcast Upload Extension target?
please refer to this page to enable screen share on iOS https://github.com/flutter-webrtc/flutter-webrtc/wiki/iOS-Screen-Sharing