Description
Hi @cloudwebrtc ,I am using sfu (i.e, core of that package is using flutter webrtc -> 0.12.12+hotfix+1 but i am also only facing a silent freeze in ios while producing media which should be wrapped by FrameCryptor, even it is working in android
It is working fine without framecryptor in ios , but when i apply according to the documentation provided here
https://github.com/flutter-webrtc/flutter-webrtc/blob/main/Documentation/E2EE.md
On Ios it is working fine when we are receiving from any other user which is encrypted , only i am getting issue in ios while producing or sending my video in encrypted format
@cloudwebrtc ,It will be great if you please reply on this !!
Package code (modified)
Future _produce(ProduceArguments arguments) async {
try {
List normalizedEncodings = [];
if (arguments.encodings.isEmpty) {
normalizedEncodings = [];
} else if (arguments.encodings.isNotEmpty) {
normalizedEncodings =
arguments.encodings.map((RtpEncodingParameters encoding) {
RtpEncodingParameters normalizedEncoding =
RtpEncodingParameters(active: true);
if (encoding.active == false) {
normalizedEncoding.active = false;
}
if (encoding.dtx != null) {
normalizedEncoding.dtx = encoding.dtx;
}
if (encoding.scalabilityMode != null) {
normalizedEncoding.scalabilityMode = encoding.scalabilityMode;
}
if (encoding.scaleResolutionDownBy != null) {
normalizedEncoding.scaleResolutionDownBy =
encoding.scaleResolutionDownBy;
}
if (encoding.maxBitrate != null) {
normalizedEncoding.maxBitrate = encoding.maxBitrate;
}
if (encoding.maxFramerate != null) {
normalizedEncoding.maxFramerate = encoding.maxFramerate;
}
if (encoding.adaptivePtime != null) {
normalizedEncoding.adaptivePtime = encoding.adaptivePtime;
}
if (encoding.priority != null) {
normalizedEncoding.priority = encoding.priority;
}
if (encoding.networkPriority != null) {
normalizedEncoding.networkPriority = encoding.networkPriority;
}
return normalizedEncoding;
}).toList();
}
HandlerSendResult sendResult = await _handler.send(HandlerSendOptions(
track: arguments.track,
encodings: normalizedEncodings,
codecOptions: arguments.codecOptions,
codec: arguments.codec,
stream: arguments.stream,
));
// 👇 TEMPORARY PRODUCER BEFORE SIGNALING
final tempProducer = Producer(
id: '', // Placeholder
localId: sendResult.localId,
rtpSender: sendResult.rtpSender,
track: arguments.track,
rtpParameters: sendResult.rtpParameters,
stopTracks: arguments.stopTracks,
disableTrackOnPause: arguments.disableTrackOnPause,
zeroRtpOnPause: arguments.zeroRtpOnPause,
appData: arguments.appData,
stream: arguments.stream,
source: arguments.source,
);
// ✅ INVOKE EARLY CALLBACK FOR FrameCryptor
if (onWillProduce != null) {
try {
await onWillProduce!(tempProducer);
} catch (e, st) {
_logger.error('onWillProduce failed: $e\n$st');
rethrow; // Prevent continue if cryptor setup fails
}
}
try {
// This will fill rtpParameters's missing fields with default values.
Ortc.validateRtpParameters(sendResult.rtpParameters);
// 🔐 FRAMECRYPTOR SETUP (before DTLS)
final sender = sendResult.rtpSender;
if (sender == null) {
print('RTCRtpSender is null. Cannot create FrameCryptor.');
throw Exception('RTCRtpSender is null. Cannot create FrameCryptor.');
}
// ✅ Wait before cryptor setup (allow RTCRtpSender stability)
await Future.delayed(const Duration(milliseconds: 300));
// ✅ Use Future.delayed
instead of Future.microtask
on iOS
try {
print("🛠 [iOS] Stabilizing... delaying frameCryptor creation");
// Create cryptor slightly delayed to give iOS main thread time
final cryptor = await Future.delayed(Duration.zero, () {
return arguments.frameCryptorFactory!.createFrameCryptorForRtpSender(
participantId: arguments.participantId!,
sender: sender,
algorithm: Algorithm.kAesGcm,
keyProvider: arguments.keyProvider!,
);
});
await cryptor.setEnabled(true);
await cryptor.setKeyIndex(0);
cryptor.onFrameCryptorStateChanged = (pid, state) {
print("🛡️ FrameCryptor[$pid] state changed: $state");
};
print("✅ FrameCryptor setup complete for ${arguments.participantId}");
} catch (e) {
print("❌ FrameCryptor setup failed: $e");
rethrow;
}
String id = await safeEmitAsFuture('produce', {
'kind': arguments.track.kind,
'rtpParameters': sendResult.rtpParameters,
'appData': arguments.appData,
});
final producer = Producer(
id: id,
localId: sendResult.localId,
rtpSender: sendResult.rtpSender,
track: arguments.track,
rtpParameters: sendResult.rtpParameters,
stopTracks: arguments.stopTracks,
disableTrackOnPause: arguments.disableTrackOnPause,
zeroRtpOnPause: arguments.zeroRtpOnPause,
appData: arguments.appData,
stream: arguments.stream,
source: arguments.source,
);
_producers[producer.id] = producer;
_handleProducer(producer);
// Emit observer event.
_observer.safeEmit('newProducer', {
'producer': producer,
});
producerCallback?.call(producer);
// ✅ Return the producer to the caller
arguments.completer.complete(producer);
} catch (error) {
_handler.stopSending(sendResult.localId);
arguments.completer.completeError(error);
rethrow;
}
} catch (error) {
// This catch is needed to stop the given track if the command above
// failed due to closed Transport.
if (arguments.stopTracks) {
try {
arguments.track.stop();
} catch (error2) {}
}
rethrow;
}
}
//My code below according to package logic
My Flow is connectTransport video -> connect Transport -> get Producer Callback
` void _producerCallback(Producer producer) async{
final sender = producer.rtpSender;
final stats = await sender?.getStats();
print("Sender stats: $stats");
if (sender == null) {
Logger().e("RTP Sender is null");
return;
}
try {
final frameCryptor = await frameCryptorFactory.createFrameCryptorForRtpSender(
participantId: 'video-$socketid',
sender: sender,
algorithm: Algorithm.kAesGcm,
keyProvider: keyProvider,
);
frameCryptor.onFrameCryptorStateChanged = (participantId, state) =>
Logger().d('Producer onFrameCryptorStateChanged $participantId $state');
await frameCryptor.setEnabled(true);
await frameCryptor.setKeyIndex(0);
Logger().f("🔐 E2EE enabled on sender for track: ${producer.track.id} with video-$socketid");
} catch (e) {
Logger().e("❌ Failed to create FrameCryptor: $e");
}
producer.on('trackended', () {
print("Producer track ended");
// Handle track ending (e.g., close the video).
});
producer.on('transportclose', () {
print("Transport closed for this producer");
// Handle transport closing (e.g., clean up resources).
});
}
Future createSendTransport() async {
print("Entering Into CreateSendTransport ---------");
// Emit event to create WebRTC transport on the server for sending media
socket.emitWithAck('createWebRtcTransport', {'consumer': false},
ack: (data) {
print("CalllBack Complish");
if (data['error'] != null) {
print('Error creating WebRTC transport: ${data['error']}');
return;
}
var transportInfo = data['params'];
producerTransport = device.createSendTransportFromMap(
transportInfo,
producerCallback: _producerCallback,
producerTransport.on('connect', (Map data) {
print("Transport connect event triggered");
var callback = data['callback'];
var errback = data['errback'];
try {
// Emit transport connect event without waiting for acknowledgment
socket.emit('transport-connect', {
'transportId': producerTransport.id,
'dtlsParameters': data['dtlsParameters'].toMap(),
});
print("checking the transport callback");
// Call the success callback
callback();
} catch (error) {
// Handle errors by calling the error callback
print("Error during transport connect: $error");
errback(error);
}
});
producerTransport.on('produce', (Map data) {
print("Produce event received: $data");
var callback = data['callback'];
var errback = data['errback'];
try {
// Extract necessary fields from data
var kind = data['kind'];
var rtpParameters = data['rtpParameters'];
var appData = data['appData'];
// Emit transport produce event with acknowledgment
socket.emitWithAck('transport-produce', {
'transportId': producerTransport.id,
'kind': kind,
'rtpParameters': rtpParameters.toMap(),
'appData':
appData != null ? Map<String, dynamic>.from(appData) : null,
}, ack: (response) {
// Handle the acknowledgment response
print("transport-produce successful: ${response['id']}");
final storeid = {
'id': response['id'],
'source': response['source']
};
storeidandsource.add(storeid);
// Call the success callback with the producer ID
callback(response['id']);
});
} catch (error) {
// Handle errors by calling the error callback
print("Error during transport produce: $error");
errback(error);
}
});
});
}
Future connectSendTransportVideo(
MediaStream localStream, MediaStreamTrack localtrack) async {
print("enter into connectSendTransportVideo");
print("Checking localStream ${localStream}");
print("Checking localtrack ${localtrack}");
print("Checking my Producer Transport ${producerTransport}");
// await _prepareVideoFrameCryptor(localStream);
try {
final producer = await producerTransport.produce(
stream: localStream,
track: localtrack,
source: 'camera',
appData: {
'source': 'camera',
},
codecOptions: ProducerCodecOptions(
videoGoogleStartBitrate: 1000,
),
encodings: [
RtpEncodingParameters(
maxBitrate: 500000,
scalabilityMode: 'L1T1',
),
],
);
print("connectSendTransportVideo: ${producerTransport.id}");
producerTransport.on('trackended', () {
print("video track ended");
// Close video track
});
producerTransport.on('transportclose', () {
print('video transport ended');
// Close video track
});
} catch (e) {
print('Error during video production: $e');
}
}
`