Skip to content

Commit a6f4958

Browse files
authored
Merge pull request #200 from flutter-webrtc/feat/desktop-capture-testing
Screen sharing supports
2 parents 45be971 + a92cc75 commit a6f4958

File tree

5 files changed

+408
-17
lines changed

5 files changed

+408
-17
lines changed

android/app/build.gradle

+1
Original file line numberDiff line numberDiff line change
@@ -26,6 +26,7 @@ apply from: "$flutterRoot/packages/flutter_tools/gradle/flutter.gradle"
2626

2727
android {
2828
compileSdkVersion 28
29+
ndkVersion "21.4.7075529"
2930

3031
lintOptions {
3132
disable 'InvalidPackage'

lib/src/call_sample/call_sample.dart

+55-8
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
11
import 'package:flutter/material.dart';
22
import 'dart:core';
3+
import '../widgets/screen_select_dialog.dart';
34
import 'signaling.dart';
45
import 'package:flutter_webrtc/flutter_webrtc.dart';
56

@@ -136,15 +137,19 @@ class _CallSampleState extends State<CallSample> {
136137
title: Text("title"),
137138
content: Text("accept?"),
138139
actions: <Widget>[
139-
TextButton(
140-
child: Text("reject"),
140+
MaterialButton(
141+
child: Text(
142+
'Reject',
143+
style: TextStyle(color: Colors.red),
144+
),
141145
onPressed: () => Navigator.of(context).pop(false),
142146
),
143-
TextButton(
144-
child: Text("accept"),
145-
onPressed: () {
146-
Navigator.of(context).pop(true);
147-
},
147+
MaterialButton(
148+
child: Text(
149+
'Accept',
150+
style: TextStyle(color: Colors.green),
151+
),
152+
onPressed: () => Navigator.of(context).pop(true),
148153
),
149154
],
150155
);
@@ -201,6 +206,41 @@ class _CallSampleState extends State<CallSample> {
201206
_signaling?.switchCamera();
202207
}
203208

209+
Future<void> selectScreenSourceDialog(BuildContext context) async {
210+
MediaStream? screenStream;
211+
if (WebRTC.platformIsDesktop) {
212+
final source = await showDialog<DesktopCapturerSource>(
213+
context: context,
214+
builder: (context) => ScreenSelectDialog(),
215+
);
216+
if (source != null) {
217+
try {
218+
var stream =
219+
await navigator.mediaDevices.getDisplayMedia(<String, dynamic>{
220+
'video': {
221+
'deviceId': {'exact': source.id},
222+
'mandatory': {'frameRate': 30.0}
223+
}
224+
});
225+
stream.getVideoTracks()[0].onEnded = () {
226+
print(
227+
'By adding a listener on onEnded you can: 1) catch stop video sharing on Web');
228+
};
229+
screenStream = stream;
230+
} catch (e) {
231+
print(e);
232+
}
233+
}
234+
} else if (WebRTC.platformIsWeb) {
235+
screenStream =
236+
await navigator.mediaDevices.getDisplayMedia(<String, dynamic>{
237+
'audio': false,
238+
'video': true,
239+
});
240+
}
241+
if (screenStream != null) _signaling?.switchToScreenSharing(screenStream);
242+
}
243+
204244
_muteMic() {
205245
_signaling?.muteMic();
206246
}
@@ -254,14 +294,20 @@ class _CallSampleState extends State<CallSample> {
254294
floatingActionButtonLocation: FloatingActionButtonLocation.centerFloat,
255295
floatingActionButton: _inCalling
256296
? SizedBox(
257-
width: 200.0,
297+
width: 240.0,
258298
child: Row(
259299
mainAxisAlignment: MainAxisAlignment.spaceBetween,
260300
children: <Widget>[
261301
FloatingActionButton(
262302
child: const Icon(Icons.switch_camera),
303+
tooltip: 'Camera',
263304
onPressed: _switchCamera,
264305
),
306+
FloatingActionButton(
307+
child: const Icon(Icons.desktop_mac),
308+
tooltip: 'Screen Sharing',
309+
onPressed: () => selectScreenSourceDialog(context),
310+
),
265311
FloatingActionButton(
266312
onPressed: _hangUp,
267313
tooltip: 'Hangup',
@@ -270,6 +316,7 @@ class _CallSampleState extends State<CallSample> {
270316
),
271317
FloatingActionButton(
272318
child: const Icon(Icons.mic_off),
319+
tooltip: 'Mute Mic',
273320
onPressed: _muteMic,
274321
)
275322
]))

lib/src/call_sample/signaling.dart

+44-8
Original file line numberDiff line numberDiff line change
@@ -26,6 +26,11 @@ enum CallState {
2626
CallStateBye,
2727
}
2828

29+
enum VideoSource {
30+
Camera,
31+
Screen,
32+
}
33+
2934
class Session {
3035
Session({required this.sid, required this.pid});
3136
String pid;
@@ -49,6 +54,8 @@ class Signaling {
4954
Map<String, Session> _sessions = {};
5055
MediaStream? _localStream;
5156
List<MediaStream> _remoteStreams = <MediaStream>[];
57+
List<RTCRtpSender> _senders = <RTCRtpSender>[];
58+
VideoSource _videoSource = VideoSource.Camera;
5259

5360
Function(SignalingState state)? onSignalingStateChange;
5461
Function(Session session, CallState state)? onCallStateChange;
@@ -60,8 +67,7 @@ class Signaling {
6067
onDataChannelMessage;
6168
Function(Session session, RTCDataChannel dc)? onDataChannel;
6269

63-
String get sdpSemantics =>
64-
WebRTC.platformIsWindows ? 'plan-b' : 'unified-plan';
70+
String get sdpSemantics => 'unified-plan';
6571

6672
Map<String, dynamic> _iceServers = {
6773
'iceServers': [
@@ -99,7 +105,29 @@ class Signaling {
99105

100106
void switchCamera() {
101107
if (_localStream != null) {
102-
Helper.switchCamera(_localStream!.getVideoTracks()[0]);
108+
if (_videoSource != VideoSource.Camera) {
109+
_senders.forEach((sender) {
110+
if (sender.track!.kind == 'video') {
111+
sender.replaceTrack(_localStream!.getVideoTracks()[0]);
112+
}
113+
});
114+
_videoSource = VideoSource.Camera;
115+
onLocalStream?.call(_localStream!);
116+
} else {
117+
Helper.switchCamera(_localStream!.getVideoTracks()[0]);
118+
}
119+
}
120+
}
121+
122+
void switchToScreenSharing(MediaStream stream) {
123+
if (_localStream != null && _videoSource != VideoSource.Screen) {
124+
_senders.forEach((sender) {
125+
if (sender.track!.kind == 'video') {
126+
sender.replaceTrack(stream.getVideoTracks()[0]);
127+
}
128+
});
129+
onLocalStream?.call(stream);
130+
_videoSource = VideoSource.Screen;
103131
}
104132
}
105133

@@ -193,7 +221,6 @@ class Signaling {
193221
newSession.remoteCandidates.clear();
194222
}
195223
onCallStateChange?.call(newSession, CallState.CallStateNew);
196-
197224
onCallStateChange?.call(newSession, CallState.CallStateRinging);
198225
}
199226
break;
@@ -381,8 +408,8 @@ class Signaling {
381408
onAddRemoteStream?.call(newSession, event.streams[0]);
382409
}
383410
};
384-
_localStream!.getTracks().forEach((track) {
385-
pc.addTrack(track, _localStream!);
411+
_localStream!.getTracks().forEach((track) async {
412+
_senders.add(await pc.addTrack(track, _localStream!));
386413
});
387414
break;
388415
}
@@ -492,7 +519,7 @@ class Signaling {
492519
try {
493520
RTCSessionDescription s =
494521
await session.pc!.createOffer(media == 'data' ? _dcConstraints : {});
495-
await session.pc!.setLocalDescription(s);
522+
await session.pc!.setLocalDescription(_fixSdp(s));
496523
_send('offer', {
497524
'to': session.pid,
498525
'from': _selfId,
@@ -505,11 +532,18 @@ class Signaling {
505532
}
506533
}
507534

535+
RTCSessionDescription _fixSdp(RTCSessionDescription s) {
536+
var sdp = s.sdp;
537+
s.sdp =
538+
sdp!.replaceAll('profile-level-id=640c1f', 'profile-level-id=42e032');
539+
return s;
540+
}
541+
508542
Future<void> _createAnswer(Session session, String media) async {
509543
try {
510544
RTCSessionDescription s =
511545
await session.pc!.createAnswer(media == 'data' ? _dcConstraints : {});
512-
await session.pc!.setLocalDescription(s);
546+
await session.pc!.setLocalDescription(_fixSdp(s));
513547
_send('answer', {
514548
'to': session.pid,
515549
'from': _selfId,
@@ -565,5 +599,7 @@ class Signaling {
565599

566600
await session.pc?.close();
567601
await session.dc?.close();
602+
_senders.clear();
603+
_videoSource = VideoSource.Camera;
568604
}
569605
}

0 commit comments

Comments
 (0)